diff options
Diffstat (limited to 'xtask')
-rw-r--r-- | xtask/Cargo.toml | 1 | ||||
-rw-r--r-- | xtask/src/bin/pre-commit.rs | 4 | ||||
-rw-r--r-- | xtask/src/boilerplate_gen.rs | 348 | ||||
-rw-r--r-- | xtask/src/codegen.rs | 46 | ||||
-rw-r--r-- | xtask/src/codegen/gen_parser_tests.rs | 155 | ||||
-rw-r--r-- | xtask/src/codegen/gen_syntax.rs | 354 | ||||
-rw-r--r-- | xtask/src/help.rs | 1 | ||||
-rw-r--r-- | xtask/src/lib.rs | 175 | ||||
-rw-r--r-- | xtask/src/main.rs | 16 | ||||
-rw-r--r-- | xtask/tests/tidy-tests/cli.rs | 13 |
10 files changed, 573 insertions, 540 deletions
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 4fc1c744b..023f6a859 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml | |||
@@ -7,7 +7,6 @@ publish = false | |||
7 | 7 | ||
8 | [dependencies] | 8 | [dependencies] |
9 | walkdir = "2.1.3" | 9 | walkdir = "2.1.3" |
10 | itertools = "0.8.0" | ||
11 | pico-args = "0.3.0" | 10 | pico-args = "0.3.0" |
12 | quote = "1.0.2" | 11 | quote = "1.0.2" |
13 | proc-macro2 = "1.0.1" | 12 | proc-macro2 = "1.0.1" |
diff --git a/xtask/src/bin/pre-commit.rs b/xtask/src/bin/pre-commit.rs index 4ee864756..cc6ccb25e 100644 --- a/xtask/src/bin/pre-commit.rs +++ b/xtask/src/bin/pre-commit.rs | |||
@@ -2,10 +2,10 @@ | |||
2 | 2 | ||
3 | use std::process::Command; | 3 | use std::process::Command; |
4 | 4 | ||
5 | use xtask::{project_root, run, run_rustfmt, Overwrite, Result}; | 5 | use xtask::{codegen::Mode, project_root, run, run_rustfmt, Result}; |
6 | 6 | ||
7 | fn main() -> Result<()> { | 7 | fn main() -> Result<()> { |
8 | run_rustfmt(Overwrite)?; | 8 | run_rustfmt(Mode::Overwrite)?; |
9 | update_staged() | 9 | update_staged() |
10 | } | 10 | } |
11 | 11 | ||
diff --git a/xtask/src/boilerplate_gen.rs b/xtask/src/boilerplate_gen.rs index 39f1cae66..e69de29bb 100644 --- a/xtask/src/boilerplate_gen.rs +++ b/xtask/src/boilerplate_gen.rs | |||
@@ -1,348 +0,0 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use std::{ | ||
4 | collections::BTreeMap, | ||
5 | fs, | ||
6 | io::Write, | ||
7 | process::{Command, Stdio}, | ||
8 | }; | ||
9 | |||
10 | use proc_macro2::{Punct, Spacing}; | ||
11 | use quote::{format_ident, quote}; | ||
12 | use ron; | ||
13 | use serde::Deserialize; | ||
14 | |||
15 | use crate::{project_root, update, Mode, Result, AST, GRAMMAR, SYNTAX_KINDS}; | ||
16 | |||
17 | pub fn generate_boilerplate(mode: Mode) -> Result<()> { | ||
18 | let grammar = project_root().join(GRAMMAR); | ||
19 | let grammar: Grammar = { | ||
20 | let text = fs::read_to_string(grammar)?; | ||
21 | ron::de::from_str(&text)? | ||
22 | }; | ||
23 | |||
24 | let syntax_kinds_file = project_root().join(SYNTAX_KINDS); | ||
25 | let syntax_kinds = generate_syntax_kinds(&grammar)?; | ||
26 | update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?; | ||
27 | |||
28 | let ast_file = project_root().join(AST); | ||
29 | let ast = generate_ast(&grammar)?; | ||
30 | update(ast_file.as_path(), &ast, mode)?; | ||
31 | |||
32 | Ok(()) | ||
33 | } | ||
34 | |||
35 | fn generate_ast(grammar: &Grammar) -> Result<String> { | ||
36 | let nodes = grammar.ast.iter().map(|(name, ast_node)| { | ||
37 | let variants = | ||
38 | ast_node.variants.iter().map(|var| format_ident!("{}", var)).collect::<Vec<_>>(); | ||
39 | let name = format_ident!("{}", name); | ||
40 | |||
41 | let adt = if variants.is_empty() { | ||
42 | let kind = format_ident!("{}", to_upper_snake_case(&name.to_string())); | ||
43 | quote! { | ||
44 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
45 | pub struct #name { | ||
46 | pub(crate) syntax: SyntaxNode, | ||
47 | } | ||
48 | |||
49 | impl AstNode for #name { | ||
50 | fn can_cast(kind: SyntaxKind) -> bool { | ||
51 | match kind { | ||
52 | #kind => true, | ||
53 | _ => false, | ||
54 | } | ||
55 | } | ||
56 | fn cast(syntax: SyntaxNode) -> Option<Self> { | ||
57 | if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } | ||
58 | } | ||
59 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
60 | } | ||
61 | } | ||
62 | } else { | ||
63 | let kinds = variants | ||
64 | .iter() | ||
65 | .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string()))) | ||
66 | .collect::<Vec<_>>(); | ||
67 | |||
68 | quote! { | ||
69 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
70 | pub enum #name { | ||
71 | #(#variants(#variants),)* | ||
72 | } | ||
73 | |||
74 | #( | ||
75 | impl From<#variants> for #name { | ||
76 | fn from(node: #variants) -> #name { | ||
77 | #name::#variants(node) | ||
78 | } | ||
79 | } | ||
80 | )* | ||
81 | |||
82 | impl AstNode for #name { | ||
83 | fn can_cast(kind: SyntaxKind) -> bool { | ||
84 | match kind { | ||
85 | #(#kinds)|* => true, | ||
86 | _ => false, | ||
87 | } | ||
88 | } | ||
89 | fn cast(syntax: SyntaxNode) -> Option<Self> { | ||
90 | let res = match syntax.kind() { | ||
91 | #( | ||
92 | #kinds => #name::#variants(#variants { syntax }), | ||
93 | )* | ||
94 | _ => return None, | ||
95 | }; | ||
96 | Some(res) | ||
97 | } | ||
98 | fn syntax(&self) -> &SyntaxNode { | ||
99 | match self { | ||
100 | #( | ||
101 | #name::#variants(it) => &it.syntax, | ||
102 | )* | ||
103 | } | ||
104 | } | ||
105 | } | ||
106 | } | ||
107 | }; | ||
108 | |||
109 | let traits = ast_node.traits.iter().map(|trait_name| { | ||
110 | let trait_name = format_ident!("{}", trait_name); | ||
111 | quote!(impl ast::#trait_name for #name {}) | ||
112 | }); | ||
113 | |||
114 | let collections = ast_node.collections.iter().map(|(name, kind)| { | ||
115 | let method_name = format_ident!("{}", name); | ||
116 | let kind = format_ident!("{}", kind); | ||
117 | quote! { | ||
118 | pub fn #method_name(&self) -> AstChildren<#kind> { | ||
119 | AstChildren::new(&self.syntax) | ||
120 | } | ||
121 | } | ||
122 | }); | ||
123 | |||
124 | let options = ast_node.options.iter().map(|attr| { | ||
125 | let method_name = match attr { | ||
126 | Attr::Type(t) => format_ident!("{}", to_lower_snake_case(&t)), | ||
127 | Attr::NameType(n, _) => format_ident!("{}", n), | ||
128 | }; | ||
129 | let ty = match attr { | ||
130 | Attr::Type(t) | Attr::NameType(_, t) => format_ident!("{}", t), | ||
131 | }; | ||
132 | quote! { | ||
133 | pub fn #method_name(&self) -> Option<#ty> { | ||
134 | AstChildren::new(&self.syntax).next() | ||
135 | } | ||
136 | } | ||
137 | }); | ||
138 | |||
139 | quote! { | ||
140 | #adt | ||
141 | |||
142 | #(#traits)* | ||
143 | |||
144 | impl #name { | ||
145 | #(#collections)* | ||
146 | #(#options)* | ||
147 | } | ||
148 | } | ||
149 | }); | ||
150 | |||
151 | let ast = quote! { | ||
152 | use crate::{ | ||
153 | SyntaxNode, SyntaxKind::{self, *}, | ||
154 | ast::{self, AstNode, AstChildren}, | ||
155 | }; | ||
156 | |||
157 | #(#nodes)* | ||
158 | }; | ||
159 | |||
160 | let pretty = reformat(ast)?; | ||
161 | Ok(pretty) | ||
162 | } | ||
163 | |||
164 | fn generate_syntax_kinds(grammar: &Grammar) -> Result<String> { | ||
165 | let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar | ||
166 | .punct | ||
167 | .iter() | ||
168 | .filter(|(token, _name)| token.len() == 1) | ||
169 | .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name))) | ||
170 | .unzip(); | ||
171 | |||
172 | let punctuation_values = grammar.punct.iter().map(|(token, _name)| { | ||
173 | if "{}[]()".contains(token) { | ||
174 | let c = token.chars().next().unwrap(); | ||
175 | quote! { #c } | ||
176 | } else { | ||
177 | let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint)); | ||
178 | quote! { #(#cs)* } | ||
179 | } | ||
180 | }); | ||
181 | let punctuation = | ||
182 | grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
183 | |||
184 | let full_keywords_values = &grammar.keywords; | ||
185 | let full_keywords = | ||
186 | full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw))); | ||
187 | |||
188 | let all_keywords_values = | ||
189 | grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); | ||
190 | let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); | ||
191 | let all_keywords = all_keywords_values | ||
192 | .iter() | ||
193 | .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name))) | ||
194 | .collect::<Vec<_>>(); | ||
195 | |||
196 | let literals = | ||
197 | grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
198 | |||
199 | let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
200 | |||
201 | let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
202 | |||
203 | let ast = quote! { | ||
204 | #![allow(bad_style, missing_docs, unreachable_pub)] | ||
205 | /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`. | ||
206 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] | ||
207 | #[repr(u16)] | ||
208 | pub enum SyntaxKind { | ||
209 | // Technical SyntaxKinds: they appear temporally during parsing, | ||
210 | // but never end up in the final tree | ||
211 | #[doc(hidden)] | ||
212 | TOMBSTONE, | ||
213 | #[doc(hidden)] | ||
214 | EOF, | ||
215 | #(#punctuation,)* | ||
216 | #(#all_keywords,)* | ||
217 | #(#literals,)* | ||
218 | #(#tokens,)* | ||
219 | #(#nodes,)* | ||
220 | |||
221 | // Technical kind so that we can cast from u16 safely | ||
222 | #[doc(hidden)] | ||
223 | __LAST, | ||
224 | } | ||
225 | use self::SyntaxKind::*; | ||
226 | |||
227 | impl SyntaxKind { | ||
228 | pub fn is_keyword(self) -> bool { | ||
229 | match self { | ||
230 | #(#all_keywords)|* => true, | ||
231 | _ => false, | ||
232 | } | ||
233 | } | ||
234 | |||
235 | pub fn is_punct(self) -> bool { | ||
236 | match self { | ||
237 | #(#punctuation)|* => true, | ||
238 | _ => false, | ||
239 | } | ||
240 | } | ||
241 | |||
242 | pub fn is_literal(self) -> bool { | ||
243 | match self { | ||
244 | #(#literals)|* => true, | ||
245 | _ => false, | ||
246 | } | ||
247 | } | ||
248 | |||
249 | pub fn from_keyword(ident: &str) -> Option<SyntaxKind> { | ||
250 | let kw = match ident { | ||
251 | #(#full_keywords_values => #full_keywords,)* | ||
252 | _ => return None, | ||
253 | }; | ||
254 | Some(kw) | ||
255 | } | ||
256 | |||
257 | pub fn from_char(c: char) -> Option<SyntaxKind> { | ||
258 | let tok = match c { | ||
259 | #(#single_byte_tokens_values => #single_byte_tokens,)* | ||
260 | _ => return None, | ||
261 | }; | ||
262 | Some(tok) | ||
263 | } | ||
264 | } | ||
265 | |||
266 | #[macro_export] | ||
267 | macro_rules! T { | ||
268 | #((#punctuation_values) => { $crate::SyntaxKind::#punctuation };)* | ||
269 | #((#all_keywords_idents) => { $crate::SyntaxKind::#all_keywords };)* | ||
270 | } | ||
271 | }; | ||
272 | |||
273 | reformat(ast) | ||
274 | } | ||
275 | |||
276 | fn reformat(text: impl std::fmt::Display) -> Result<String> { | ||
277 | let mut rustfmt = Command::new("rustfmt") | ||
278 | .arg("--config-path") | ||
279 | .arg(project_root().join("rustfmt.toml")) | ||
280 | .stdin(Stdio::piped()) | ||
281 | .stdout(Stdio::piped()) | ||
282 | .spawn()?; | ||
283 | write!(rustfmt.stdin.take().unwrap(), "{}", text)?; | ||
284 | let output = rustfmt.wait_with_output()?; | ||
285 | let stdout = String::from_utf8(output.stdout)?; | ||
286 | let preamble = "Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`"; | ||
287 | Ok(format!("//! {}\n\n{}", preamble, stdout)) | ||
288 | } | ||
289 | |||
290 | #[derive(Deserialize, Debug)] | ||
291 | struct Grammar { | ||
292 | punct: Vec<(String, String)>, | ||
293 | keywords: Vec<String>, | ||
294 | contextual_keywords: Vec<String>, | ||
295 | literals: Vec<String>, | ||
296 | tokens: Vec<String>, | ||
297 | nodes: Vec<String>, | ||
298 | ast: BTreeMap<String, AstNode>, | ||
299 | } | ||
300 | |||
301 | #[derive(Deserialize, Debug)] | ||
302 | struct AstNode { | ||
303 | #[serde(default)] | ||
304 | #[serde(rename = "enum")] | ||
305 | variants: Vec<String>, | ||
306 | |||
307 | #[serde(default)] | ||
308 | traits: Vec<String>, | ||
309 | #[serde(default)] | ||
310 | collections: Vec<(String, String)>, | ||
311 | #[serde(default)] | ||
312 | options: Vec<Attr>, | ||
313 | } | ||
314 | |||
315 | #[derive(Deserialize, Debug)] | ||
316 | #[serde(untagged)] | ||
317 | enum Attr { | ||
318 | Type(String), | ||
319 | NameType(String, String), | ||
320 | } | ||
321 | |||
322 | fn to_upper_snake_case(s: &str) -> String { | ||
323 | let mut buf = String::with_capacity(s.len()); | ||
324 | let mut prev_is_upper = None; | ||
325 | for c in s.chars() { | ||
326 | if c.is_ascii_uppercase() && prev_is_upper == Some(false) { | ||
327 | buf.push('_') | ||
328 | } | ||
329 | prev_is_upper = Some(c.is_ascii_uppercase()); | ||
330 | |||
331 | buf.push(c.to_ascii_uppercase()); | ||
332 | } | ||
333 | buf | ||
334 | } | ||
335 | |||
336 | fn to_lower_snake_case(s: &str) -> String { | ||
337 | let mut buf = String::with_capacity(s.len()); | ||
338 | let mut prev_is_upper = None; | ||
339 | for c in s.chars() { | ||
340 | if c.is_ascii_uppercase() && prev_is_upper == Some(false) { | ||
341 | buf.push('_') | ||
342 | } | ||
343 | prev_is_upper = Some(c.is_ascii_uppercase()); | ||
344 | |||
345 | buf.push(c.to_ascii_lowercase()); | ||
346 | } | ||
347 | buf | ||
348 | } | ||
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs new file mode 100644 index 000000000..948b86719 --- /dev/null +++ b/xtask/src/codegen.rs | |||
@@ -0,0 +1,46 @@ | |||
1 | //! We use code generation heavily in rust-analyzer. | ||
2 | //! | ||
3 | //! Rather then doing it via proc-macros, we use old-school way of just dumping | ||
4 | //! the source code. | ||
5 | //! | ||
6 | //! This module's submodules define specific bits that we generate. | ||
7 | |||
8 | mod gen_syntax; | ||
9 | mod gen_parser_tests; | ||
10 | |||
11 | use std::{fs, path::Path}; | ||
12 | |||
13 | use crate::Result; | ||
14 | |||
15 | pub use self::{gen_parser_tests::generate_parser_tests, gen_syntax::generate_syntax}; | ||
16 | |||
17 | pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron"; | ||
18 | const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar"; | ||
19 | const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok"; | ||
20 | const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err"; | ||
21 | |||
22 | pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs"; | ||
23 | pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs"; | ||
24 | |||
25 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
26 | pub enum Mode { | ||
27 | Overwrite, | ||
28 | Verify, | ||
29 | } | ||
30 | |||
31 | /// A helper to update file on disk if it has changed. | ||
32 | /// With verify = false, | ||
33 | pub fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> { | ||
34 | match fs::read_to_string(path) { | ||
35 | Ok(ref old_contents) if old_contents == contents => { | ||
36 | return Ok(()); | ||
37 | } | ||
38 | _ => (), | ||
39 | } | ||
40 | if mode == Mode::Verify { | ||
41 | Err(format!("`{}` is not up-to-date", path.display()))?; | ||
42 | } | ||
43 | eprintln!("updating {}", path.display()); | ||
44 | fs::write(path, contents)?; | ||
45 | Ok(()) | ||
46 | } | ||
diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs new file mode 100644 index 000000000..0f550d948 --- /dev/null +++ b/xtask/src/codegen/gen_parser_tests.rs | |||
@@ -0,0 +1,155 @@ | |||
1 | //! This module greps parser's code for specially formatted comments and turnes | ||
2 | //! them into tests. | ||
3 | |||
4 | use std::{ | ||
5 | collections::HashMap, | ||
6 | fs, | ||
7 | path::{Path, PathBuf}, | ||
8 | }; | ||
9 | |||
10 | use crate::{ | ||
11 | codegen::{self, update, Mode}, | ||
12 | project_root, Result, | ||
13 | }; | ||
14 | |||
15 | pub fn generate_parser_tests(mode: Mode) -> Result<()> { | ||
16 | let tests = tests_from_dir(&project_root().join(Path::new(codegen::GRAMMAR_DIR)))?; | ||
17 | fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> { | ||
18 | let tests_dir = project_root().join(into); | ||
19 | if !tests_dir.is_dir() { | ||
20 | fs::create_dir_all(&tests_dir)?; | ||
21 | } | ||
22 | // ok is never actually read, but it needs to be specified to create a Test in existing_tests | ||
23 | let existing = existing_tests(&tests_dir, true)?; | ||
24 | for t in existing.keys().filter(|&t| !tests.contains_key(t)) { | ||
25 | panic!("Test is deleted: {}", t); | ||
26 | } | ||
27 | |||
28 | let mut new_idx = existing.len() + 1; | ||
29 | for (name, test) in tests { | ||
30 | let path = match existing.get(name) { | ||
31 | Some((path, _test)) => path.clone(), | ||
32 | None => { | ||
33 | let file_name = format!("{:04}_{}.rs", new_idx, name); | ||
34 | new_idx += 1; | ||
35 | tests_dir.join(file_name) | ||
36 | } | ||
37 | }; | ||
38 | update(&path, &test.text, mode)?; | ||
39 | } | ||
40 | Ok(()) | ||
41 | } | ||
42 | install_tests(&tests.ok, codegen::OK_INLINE_TESTS_DIR, mode)?; | ||
43 | install_tests(&tests.err, codegen::ERR_INLINE_TESTS_DIR, mode) | ||
44 | } | ||
45 | |||
46 | #[derive(Debug)] | ||
47 | struct Test { | ||
48 | pub name: String, | ||
49 | pub text: String, | ||
50 | pub ok: bool, | ||
51 | } | ||
52 | |||
53 | #[derive(Default, Debug)] | ||
54 | struct Tests { | ||
55 | pub ok: HashMap<String, Test>, | ||
56 | pub err: HashMap<String, Test>, | ||
57 | } | ||
58 | |||
59 | fn collect_tests(s: &str) -> Vec<(usize, Test)> { | ||
60 | let mut res = vec![]; | ||
61 | let prefix = "// "; | ||
62 | let lines = s.lines().map(str::trim_start).enumerate(); | ||
63 | |||
64 | let mut block = vec![]; | ||
65 | for (line_idx, line) in lines { | ||
66 | let is_comment = line.starts_with(prefix); | ||
67 | if is_comment { | ||
68 | block.push((line_idx, &line[prefix.len()..])); | ||
69 | } else { | ||
70 | process_block(&mut res, &block); | ||
71 | block.clear(); | ||
72 | } | ||
73 | } | ||
74 | process_block(&mut res, &block); | ||
75 | return res; | ||
76 | |||
77 | fn process_block(acc: &mut Vec<(usize, Test)>, block: &[(usize, &str)]) { | ||
78 | if block.is_empty() { | ||
79 | return; | ||
80 | } | ||
81 | let mut ok = true; | ||
82 | let mut block = block.iter(); | ||
83 | let (start_line, name) = loop { | ||
84 | match block.next() { | ||
85 | Some(&(idx, line)) if line.starts_with("test ") => { | ||
86 | break (idx, line["test ".len()..].to_string()); | ||
87 | } | ||
88 | Some(&(idx, line)) if line.starts_with("test_err ") => { | ||
89 | ok = false; | ||
90 | break (idx, line["test_err ".len()..].to_string()); | ||
91 | } | ||
92 | Some(_) => (), | ||
93 | None => return, | ||
94 | } | ||
95 | }; | ||
96 | let text: String = | ||
97 | block.map(|(_, line)| *line).chain(std::iter::once("")).collect::<Vec<_>>().join("\n"); | ||
98 | assert!(!text.trim().is_empty() && text.ends_with('\n')); | ||
99 | acc.push((start_line, Test { name, text, ok })) | ||
100 | } | ||
101 | } | ||
102 | |||
103 | fn tests_from_dir(dir: &Path) -> Result<Tests> { | ||
104 | let mut res = Tests::default(); | ||
105 | for entry in ::walkdir::WalkDir::new(dir) { | ||
106 | let entry = entry.unwrap(); | ||
107 | if !entry.file_type().is_file() { | ||
108 | continue; | ||
109 | } | ||
110 | if entry.path().extension().unwrap_or_default() != "rs" { | ||
111 | continue; | ||
112 | } | ||
113 | process_file(&mut res, entry.path())?; | ||
114 | } | ||
115 | let grammar_rs = dir.parent().unwrap().join("grammar.rs"); | ||
116 | process_file(&mut res, &grammar_rs)?; | ||
117 | return Ok(res); | ||
118 | fn process_file(res: &mut Tests, path: &Path) -> Result<()> { | ||
119 | let text = fs::read_to_string(path)?; | ||
120 | |||
121 | for (_, test) in collect_tests(&text) { | ||
122 | if test.ok { | ||
123 | if let Some(old_test) = res.ok.insert(test.name.clone(), test) { | ||
124 | Err(format!("Duplicate test: {}", old_test.name))? | ||
125 | } | ||
126 | } else { | ||
127 | if let Some(old_test) = res.err.insert(test.name.clone(), test) { | ||
128 | Err(format!("Duplicate test: {}", old_test.name))? | ||
129 | } | ||
130 | } | ||
131 | } | ||
132 | Ok(()) | ||
133 | } | ||
134 | } | ||
135 | |||
136 | fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> { | ||
137 | let mut res = HashMap::new(); | ||
138 | for file in fs::read_dir(dir)? { | ||
139 | let file = file?; | ||
140 | let path = file.path(); | ||
141 | if path.extension().unwrap_or_default() != "rs" { | ||
142 | continue; | ||
143 | } | ||
144 | let name = { | ||
145 | let file_name = path.file_name().unwrap().to_str().unwrap(); | ||
146 | file_name[5..file_name.len() - 3].to_string() | ||
147 | }; | ||
148 | let text = fs::read_to_string(&path)?; | ||
149 | let test = Test { name: name.clone(), text, ok }; | ||
150 | if let Some(old) = res.insert(name, (path, test)) { | ||
151 | println!("Duplicate test: {:?}", old); | ||
152 | } | ||
153 | } | ||
154 | Ok(res) | ||
155 | } | ||
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs new file mode 100644 index 000000000..6a81c0e4d --- /dev/null +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -0,0 +1,354 @@ | |||
1 | //! This module generate AST datatype used by rust-analyzer. | ||
2 | //! | ||
3 | //! Specifically, it generates the `SyntaxKind` enum and a number of newtype | ||
4 | //! wrappers around `SyntaxNode` which implement `ra_syntax::AstNode`. | ||
5 | |||
6 | use std::{ | ||
7 | collections::BTreeMap, | ||
8 | fs, | ||
9 | io::Write, | ||
10 | process::{Command, Stdio}, | ||
11 | }; | ||
12 | |||
13 | use proc_macro2::{Punct, Spacing}; | ||
14 | use quote::{format_ident, quote}; | ||
15 | use ron; | ||
16 | use serde::Deserialize; | ||
17 | |||
18 | use crate::{ | ||
19 | codegen::{self, update, Mode}, | ||
20 | project_root, Result, | ||
21 | }; | ||
22 | |||
23 | pub fn generate_syntax(mode: Mode) -> Result<()> { | ||
24 | let grammar = project_root().join(codegen::GRAMMAR); | ||
25 | let grammar: Grammar = { | ||
26 | let text = fs::read_to_string(grammar)?; | ||
27 | ron::de::from_str(&text)? | ||
28 | }; | ||
29 | |||
30 | let syntax_kinds_file = project_root().join(codegen::SYNTAX_KINDS); | ||
31 | let syntax_kinds = generate_syntax_kinds(&grammar)?; | ||
32 | update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?; | ||
33 | |||
34 | let ast_file = project_root().join(codegen::AST); | ||
35 | let ast = generate_ast(&grammar)?; | ||
36 | update(ast_file.as_path(), &ast, mode)?; | ||
37 | |||
38 | Ok(()) | ||
39 | } | ||
40 | |||
41 | fn generate_ast(grammar: &Grammar) -> Result<String> { | ||
42 | let nodes = grammar.ast.iter().map(|(name, ast_node)| { | ||
43 | let variants = | ||
44 | ast_node.variants.iter().map(|var| format_ident!("{}", var)).collect::<Vec<_>>(); | ||
45 | let name = format_ident!("{}", name); | ||
46 | |||
47 | let adt = if variants.is_empty() { | ||
48 | let kind = format_ident!("{}", to_upper_snake_case(&name.to_string())); | ||
49 | quote! { | ||
50 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
51 | pub struct #name { | ||
52 | pub(crate) syntax: SyntaxNode, | ||
53 | } | ||
54 | |||
55 | impl AstNode for #name { | ||
56 | fn can_cast(kind: SyntaxKind) -> bool { | ||
57 | match kind { | ||
58 | #kind => true, | ||
59 | _ => false, | ||
60 | } | ||
61 | } | ||
62 | fn cast(syntax: SyntaxNode) -> Option<Self> { | ||
63 | if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } | ||
64 | } | ||
65 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
66 | } | ||
67 | } | ||
68 | } else { | ||
69 | let kinds = variants | ||
70 | .iter() | ||
71 | .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string()))) | ||
72 | .collect::<Vec<_>>(); | ||
73 | |||
74 | quote! { | ||
75 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
76 | pub enum #name { | ||
77 | #(#variants(#variants),)* | ||
78 | } | ||
79 | |||
80 | #( | ||
81 | impl From<#variants> for #name { | ||
82 | fn from(node: #variants) -> #name { | ||
83 | #name::#variants(node) | ||
84 | } | ||
85 | } | ||
86 | )* | ||
87 | |||
88 | impl AstNode for #name { | ||
89 | fn can_cast(kind: SyntaxKind) -> bool { | ||
90 | match kind { | ||
91 | #(#kinds)|* => true, | ||
92 | _ => false, | ||
93 | } | ||
94 | } | ||
95 | fn cast(syntax: SyntaxNode) -> Option<Self> { | ||
96 | let res = match syntax.kind() { | ||
97 | #( | ||
98 | #kinds => #name::#variants(#variants { syntax }), | ||
99 | )* | ||
100 | _ => return None, | ||
101 | }; | ||
102 | Some(res) | ||
103 | } | ||
104 | fn syntax(&self) -> &SyntaxNode { | ||
105 | match self { | ||
106 | #( | ||
107 | #name::#variants(it) => &it.syntax, | ||
108 | )* | ||
109 | } | ||
110 | } | ||
111 | } | ||
112 | } | ||
113 | }; | ||
114 | |||
115 | let traits = ast_node.traits.iter().map(|trait_name| { | ||
116 | let trait_name = format_ident!("{}", trait_name); | ||
117 | quote!(impl ast::#trait_name for #name {}) | ||
118 | }); | ||
119 | |||
120 | let collections = ast_node.collections.iter().map(|(name, kind)| { | ||
121 | let method_name = format_ident!("{}", name); | ||
122 | let kind = format_ident!("{}", kind); | ||
123 | quote! { | ||
124 | pub fn #method_name(&self) -> AstChildren<#kind> { | ||
125 | AstChildren::new(&self.syntax) | ||
126 | } | ||
127 | } | ||
128 | }); | ||
129 | |||
130 | let options = ast_node.options.iter().map(|attr| { | ||
131 | let method_name = match attr { | ||
132 | Attr::Type(t) => format_ident!("{}", to_lower_snake_case(&t)), | ||
133 | Attr::NameType(n, _) => format_ident!("{}", n), | ||
134 | }; | ||
135 | let ty = match attr { | ||
136 | Attr::Type(t) | Attr::NameType(_, t) => format_ident!("{}", t), | ||
137 | }; | ||
138 | quote! { | ||
139 | pub fn #method_name(&self) -> Option<#ty> { | ||
140 | AstChildren::new(&self.syntax).next() | ||
141 | } | ||
142 | } | ||
143 | }); | ||
144 | |||
145 | quote! { | ||
146 | #adt | ||
147 | |||
148 | #(#traits)* | ||
149 | |||
150 | impl #name { | ||
151 | #(#collections)* | ||
152 | #(#options)* | ||
153 | } | ||
154 | } | ||
155 | }); | ||
156 | |||
157 | let ast = quote! { | ||
158 | use crate::{ | ||
159 | SyntaxNode, SyntaxKind::{self, *}, | ||
160 | ast::{self, AstNode, AstChildren}, | ||
161 | }; | ||
162 | |||
163 | #(#nodes)* | ||
164 | }; | ||
165 | |||
166 | let pretty = reformat(ast)?; | ||
167 | Ok(pretty) | ||
168 | } | ||
169 | |||
170 | fn generate_syntax_kinds(grammar: &Grammar) -> Result<String> { | ||
171 | let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar | ||
172 | .punct | ||
173 | .iter() | ||
174 | .filter(|(token, _name)| token.len() == 1) | ||
175 | .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name))) | ||
176 | .unzip(); | ||
177 | |||
178 | let punctuation_values = grammar.punct.iter().map(|(token, _name)| { | ||
179 | if "{}[]()".contains(token) { | ||
180 | let c = token.chars().next().unwrap(); | ||
181 | quote! { #c } | ||
182 | } else { | ||
183 | let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint)); | ||
184 | quote! { #(#cs)* } | ||
185 | } | ||
186 | }); | ||
187 | let punctuation = | ||
188 | grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
189 | |||
190 | let full_keywords_values = &grammar.keywords; | ||
191 | let full_keywords = | ||
192 | full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw))); | ||
193 | |||
194 | let all_keywords_values = | ||
195 | grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); | ||
196 | let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); | ||
197 | let all_keywords = all_keywords_values | ||
198 | .iter() | ||
199 | .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name))) | ||
200 | .collect::<Vec<_>>(); | ||
201 | |||
202 | let literals = | ||
203 | grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
204 | |||
205 | let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
206 | |||
207 | let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>(); | ||
208 | |||
209 | let ast = quote! { | ||
210 | #![allow(bad_style, missing_docs, unreachable_pub)] | ||
211 | /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`. | ||
212 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] | ||
213 | #[repr(u16)] | ||
214 | pub enum SyntaxKind { | ||
215 | // Technical SyntaxKinds: they appear temporally during parsing, | ||
216 | // but never end up in the final tree | ||
217 | #[doc(hidden)] | ||
218 | TOMBSTONE, | ||
219 | #[doc(hidden)] | ||
220 | EOF, | ||
221 | #(#punctuation,)* | ||
222 | #(#all_keywords,)* | ||
223 | #(#literals,)* | ||
224 | #(#tokens,)* | ||
225 | #(#nodes,)* | ||
226 | |||
227 | // Technical kind so that we can cast from u16 safely | ||
228 | #[doc(hidden)] | ||
229 | __LAST, | ||
230 | } | ||
231 | use self::SyntaxKind::*; | ||
232 | |||
233 | impl SyntaxKind { | ||
234 | pub fn is_keyword(self) -> bool { | ||
235 | match self { | ||
236 | #(#all_keywords)|* => true, | ||
237 | _ => false, | ||
238 | } | ||
239 | } | ||
240 | |||
241 | pub fn is_punct(self) -> bool { | ||
242 | match self { | ||
243 | #(#punctuation)|* => true, | ||
244 | _ => false, | ||
245 | } | ||
246 | } | ||
247 | |||
248 | pub fn is_literal(self) -> bool { | ||
249 | match self { | ||
250 | #(#literals)|* => true, | ||
251 | _ => false, | ||
252 | } | ||
253 | } | ||
254 | |||
255 | pub fn from_keyword(ident: &str) -> Option<SyntaxKind> { | ||
256 | let kw = match ident { | ||
257 | #(#full_keywords_values => #full_keywords,)* | ||
258 | _ => return None, | ||
259 | }; | ||
260 | Some(kw) | ||
261 | } | ||
262 | |||
263 | pub fn from_char(c: char) -> Option<SyntaxKind> { | ||
264 | let tok = match c { | ||
265 | #(#single_byte_tokens_values => #single_byte_tokens,)* | ||
266 | _ => return None, | ||
267 | }; | ||
268 | Some(tok) | ||
269 | } | ||
270 | } | ||
271 | |||
272 | #[macro_export] | ||
273 | macro_rules! T { | ||
274 | #((#punctuation_values) => { $crate::SyntaxKind::#punctuation };)* | ||
275 | #((#all_keywords_idents) => { $crate::SyntaxKind::#all_keywords };)* | ||
276 | } | ||
277 | }; | ||
278 | |||
279 | reformat(ast) | ||
280 | } | ||
281 | |||
282 | fn reformat(text: impl std::fmt::Display) -> Result<String> { | ||
283 | let mut rustfmt = Command::new("rustfmt") | ||
284 | .arg("--config-path") | ||
285 | .arg(project_root().join("rustfmt.toml")) | ||
286 | .stdin(Stdio::piped()) | ||
287 | .stdout(Stdio::piped()) | ||
288 | .spawn()?; | ||
289 | write!(rustfmt.stdin.take().unwrap(), "{}", text)?; | ||
290 | let output = rustfmt.wait_with_output()?; | ||
291 | let stdout = String::from_utf8(output.stdout)?; | ||
292 | let preamble = "Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`"; | ||
293 | Ok(format!("//! {}\n\n{}", preamble, stdout)) | ||
294 | } | ||
295 | |||
296 | #[derive(Deserialize, Debug)] | ||
297 | struct Grammar { | ||
298 | punct: Vec<(String, String)>, | ||
299 | keywords: Vec<String>, | ||
300 | contextual_keywords: Vec<String>, | ||
301 | literals: Vec<String>, | ||
302 | tokens: Vec<String>, | ||
303 | nodes: Vec<String>, | ||
304 | ast: BTreeMap<String, AstNode>, | ||
305 | } | ||
306 | |||
307 | #[derive(Deserialize, Debug)] | ||
308 | struct AstNode { | ||
309 | #[serde(default)] | ||
310 | #[serde(rename = "enum")] | ||
311 | variants: Vec<String>, | ||
312 | |||
313 | #[serde(default)] | ||
314 | traits: Vec<String>, | ||
315 | #[serde(default)] | ||
316 | collections: Vec<(String, String)>, | ||
317 | #[serde(default)] | ||
318 | options: Vec<Attr>, | ||
319 | } | ||
320 | |||
321 | #[derive(Deserialize, Debug)] | ||
322 | #[serde(untagged)] | ||
323 | enum Attr { | ||
324 | Type(String), | ||
325 | NameType(String, String), | ||
326 | } | ||
327 | |||
328 | fn to_upper_snake_case(s: &str) -> String { | ||
329 | let mut buf = String::with_capacity(s.len()); | ||
330 | let mut prev_is_upper = None; | ||
331 | for c in s.chars() { | ||
332 | if c.is_ascii_uppercase() && prev_is_upper == Some(false) { | ||
333 | buf.push('_') | ||
334 | } | ||
335 | prev_is_upper = Some(c.is_ascii_uppercase()); | ||
336 | |||
337 | buf.push(c.to_ascii_uppercase()); | ||
338 | } | ||
339 | buf | ||
340 | } | ||
341 | |||
342 | fn to_lower_snake_case(s: &str) -> String { | ||
343 | let mut buf = String::with_capacity(s.len()); | ||
344 | let mut prev_is_upper = None; | ||
345 | for c in s.chars() { | ||
346 | if c.is_ascii_uppercase() && prev_is_upper == Some(false) { | ||
347 | buf.push('_') | ||
348 | } | ||
349 | prev_is_upper = Some(c.is_ascii_uppercase()); | ||
350 | |||
351 | buf.push(c.to_ascii_lowercase()); | ||
352 | } | ||
353 | buf | ||
354 | } | ||
diff --git a/xtask/src/help.rs b/xtask/src/help.rs index 4c6bf6b53..730eb5c61 100644 --- a/xtask/src/help.rs +++ b/xtask/src/help.rs | |||
@@ -13,7 +13,6 @@ SUBCOMMANDS: | |||
13 | format-hook | 13 | format-hook |
14 | fuzz-tests | 14 | fuzz-tests |
15 | codegen | 15 | codegen |
16 | gen-tests | ||
17 | install | 16 | install |
18 | lint"; | 17 | lint"; |
19 | 18 | ||
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index a8685f567..cc69463a9 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -1,9 +1,8 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | mod boilerplate_gen; | 3 | pub mod codegen; |
4 | 4 | ||
5 | use std::{ | 5 | use std::{ |
6 | collections::HashMap, | ||
7 | error::Error, | 6 | error::Error, |
8 | fs, | 7 | fs, |
9 | io::{Error as IoError, ErrorKind}, | 8 | io::{Error as IoError, ErrorKind}, |
@@ -11,72 +10,12 @@ use std::{ | |||
11 | process::{Command, Output, Stdio}, | 10 | process::{Command, Output, Stdio}, |
12 | }; | 11 | }; |
13 | 12 | ||
14 | use itertools::Itertools; | 13 | use crate::codegen::Mode; |
15 | |||
16 | pub use self::boilerplate_gen::generate_boilerplate; | ||
17 | 14 | ||
18 | pub type Result<T> = std::result::Result<T, Box<dyn Error>>; | 15 | pub type Result<T> = std::result::Result<T, Box<dyn Error>>; |
19 | 16 | ||
20 | pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron"; | ||
21 | const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar"; | ||
22 | const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok"; | ||
23 | const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err"; | ||
24 | |||
25 | pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs"; | ||
26 | pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs"; | ||
27 | const TOOLCHAIN: &str = "stable"; | 17 | const TOOLCHAIN: &str = "stable"; |
28 | 18 | ||
29 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
30 | pub enum Mode { | ||
31 | Overwrite, | ||
32 | Verify, | ||
33 | } | ||
34 | pub use Mode::*; | ||
35 | |||
36 | #[derive(Debug)] | ||
37 | pub struct Test { | ||
38 | pub name: String, | ||
39 | pub text: String, | ||
40 | pub ok: bool, | ||
41 | } | ||
42 | |||
43 | pub fn collect_tests(s: &str) -> Vec<(usize, Test)> { | ||
44 | let mut res = vec![]; | ||
45 | let prefix = "// "; | ||
46 | let comment_blocks = s | ||
47 | .lines() | ||
48 | .map(str::trim_start) | ||
49 | .enumerate() | ||
50 | .group_by(|(_idx, line)| line.starts_with(prefix)); | ||
51 | |||
52 | 'outer: for (is_comment, block) in comment_blocks.into_iter() { | ||
53 | if !is_comment { | ||
54 | continue; | ||
55 | } | ||
56 | let mut block = block.map(|(idx, line)| (idx, &line[prefix.len()..])); | ||
57 | |||
58 | let mut ok = true; | ||
59 | let (start_line, name) = loop { | ||
60 | match block.next() { | ||
61 | Some((idx, line)) if line.starts_with("test ") => { | ||
62 | break (idx, line["test ".len()..].to_string()); | ||
63 | } | ||
64 | Some((idx, line)) if line.starts_with("test_err ") => { | ||
65 | ok = false; | ||
66 | break (idx, line["test_err ".len()..].to_string()); | ||
67 | } | ||
68 | Some(_) => (), | ||
69 | None => continue 'outer, | ||
70 | } | ||
71 | }; | ||
72 | let text: String = | ||
73 | itertools::join(block.map(|(_, line)| line).chain(::std::iter::once("")), "\n"); | ||
74 | assert!(!text.trim().is_empty() && text.ends_with('\n')); | ||
75 | res.push((start_line, Test { name, text, ok })) | ||
76 | } | ||
77 | res | ||
78 | } | ||
79 | |||
80 | pub fn project_root() -> PathBuf { | 19 | pub fn project_root() -> PathBuf { |
81 | Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(1).unwrap().to_path_buf() | 20 | Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(1).unwrap().to_path_buf() |
82 | } | 21 | } |
@@ -126,7 +65,7 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> { | |||
126 | _ => install_rustfmt()?, | 65 | _ => install_rustfmt()?, |
127 | }; | 66 | }; |
128 | 67 | ||
129 | if mode == Verify { | 68 | if mode == Mode::Verify { |
130 | run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; | 69 | run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; |
131 | } else { | 70 | } else { |
132 | run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; | 71 | run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; |
@@ -206,37 +145,6 @@ pub fn run_fuzzer() -> Result<()> { | |||
206 | run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax") | 145 | run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax") |
207 | } | 146 | } |
208 | 147 | ||
209 | pub fn gen_tests(mode: Mode) -> Result<()> { | ||
210 | let tests = tests_from_dir(&project_root().join(Path::new(GRAMMAR_DIR)))?; | ||
211 | fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> { | ||
212 | let tests_dir = project_root().join(into); | ||
213 | if !tests_dir.is_dir() { | ||
214 | fs::create_dir_all(&tests_dir)?; | ||
215 | } | ||
216 | // ok is never actually read, but it needs to be specified to create a Test in existing_tests | ||
217 | let existing = existing_tests(&tests_dir, true)?; | ||
218 | for t in existing.keys().filter(|&t| !tests.contains_key(t)) { | ||
219 | panic!("Test is deleted: {}", t); | ||
220 | } | ||
221 | |||
222 | let mut new_idx = existing.len() + 1; | ||
223 | for (name, test) in tests { | ||
224 | let path = match existing.get(name) { | ||
225 | Some((path, _test)) => path.clone(), | ||
226 | None => { | ||
227 | let file_name = format!("{:04}_{}.rs", new_idx, name); | ||
228 | new_idx += 1; | ||
229 | tests_dir.join(file_name) | ||
230 | } | ||
231 | }; | ||
232 | update(&path, &test.text, mode)?; | ||
233 | } | ||
234 | Ok(()) | ||
235 | } | ||
236 | install_tests(&tests.ok, OK_INLINE_TESTS_DIR, mode)?; | ||
237 | install_tests(&tests.err, ERR_INLINE_TESTS_DIR, mode) | ||
238 | } | ||
239 | |||
240 | fn do_run<F>(cmdline: &str, dir: &str, mut f: F) -> Result<Output> | 148 | fn do_run<F>(cmdline: &str, dir: &str, mut f: F) -> Result<Output> |
241 | where | 149 | where |
242 | F: FnMut(&mut Command), | 150 | F: FnMut(&mut Command), |
@@ -253,80 +161,3 @@ where | |||
253 | } | 161 | } |
254 | Ok(output) | 162 | Ok(output) |
255 | } | 163 | } |
256 | |||
257 | #[derive(Default, Debug)] | ||
258 | struct Tests { | ||
259 | pub ok: HashMap<String, Test>, | ||
260 | pub err: HashMap<String, Test>, | ||
261 | } | ||
262 | |||
263 | fn tests_from_dir(dir: &Path) -> Result<Tests> { | ||
264 | let mut res = Tests::default(); | ||
265 | for entry in ::walkdir::WalkDir::new(dir) { | ||
266 | let entry = entry.unwrap(); | ||
267 | if !entry.file_type().is_file() { | ||
268 | continue; | ||
269 | } | ||
270 | if entry.path().extension().unwrap_or_default() != "rs" { | ||
271 | continue; | ||
272 | } | ||
273 | process_file(&mut res, entry.path())?; | ||
274 | } | ||
275 | let grammar_rs = dir.parent().unwrap().join("grammar.rs"); | ||
276 | process_file(&mut res, &grammar_rs)?; | ||
277 | return Ok(res); | ||
278 | fn process_file(res: &mut Tests, path: &Path) -> Result<()> { | ||
279 | let text = fs::read_to_string(path)?; | ||
280 | |||
281 | for (_, test) in collect_tests(&text) { | ||
282 | if test.ok { | ||
283 | if let Some(old_test) = res.ok.insert(test.name.clone(), test) { | ||
284 | Err(format!("Duplicate test: {}", old_test.name))? | ||
285 | } | ||
286 | } else { | ||
287 | if let Some(old_test) = res.err.insert(test.name.clone(), test) { | ||
288 | Err(format!("Duplicate test: {}", old_test.name))? | ||
289 | } | ||
290 | } | ||
291 | } | ||
292 | Ok(()) | ||
293 | } | ||
294 | } | ||
295 | |||
296 | fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> { | ||
297 | let mut res = HashMap::new(); | ||
298 | for file in fs::read_dir(dir)? { | ||
299 | let file = file?; | ||
300 | let path = file.path(); | ||
301 | if path.extension().unwrap_or_default() != "rs" { | ||
302 | continue; | ||
303 | } | ||
304 | let name = { | ||
305 | let file_name = path.file_name().unwrap().to_str().unwrap(); | ||
306 | file_name[5..file_name.len() - 3].to_string() | ||
307 | }; | ||
308 | let text = fs::read_to_string(&path)?; | ||
309 | let test = Test { name: name.clone(), text, ok }; | ||
310 | if let Some(old) = res.insert(name, (path, test)) { | ||
311 | println!("Duplicate test: {:?}", old); | ||
312 | } | ||
313 | } | ||
314 | Ok(res) | ||
315 | } | ||
316 | |||
317 | /// A helper to update file on disk if it has changed. | ||
318 | /// With verify = false, | ||
319 | pub fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> { | ||
320 | match fs::read_to_string(path) { | ||
321 | Ok(ref old_contents) if old_contents == contents => { | ||
322 | return Ok(()); | ||
323 | } | ||
324 | _ => (), | ||
325 | } | ||
326 | if mode == Verify { | ||
327 | Err(format!("`{}` is not up-to-date", path.display()))?; | ||
328 | } | ||
329 | eprintln!("updating {}", path.display()); | ||
330 | fs::write(path, contents)?; | ||
331 | Ok(()) | ||
332 | } | ||
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index c08915aac..db901ced2 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -7,8 +7,8 @@ use core::str; | |||
7 | use pico_args::Arguments; | 7 | use pico_args::Arguments; |
8 | use std::{env, path::PathBuf}; | 8 | use std::{env, path::PathBuf}; |
9 | use xtask::{ | 9 | use xtask::{ |
10 | gen_tests, generate_boilerplate, install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt, | 10 | codegen::{self, Mode}, |
11 | run_with_output, Cmd, Overwrite, Result, | 11 | install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt, run_with_output, Cmd, Result, |
12 | }; | 12 | }; |
13 | 13 | ||
14 | // Latest stable, feel free to send a PR if this lags behind. | 14 | // Latest stable, feel free to send a PR if this lags behind. |
@@ -57,26 +57,20 @@ fn main() -> Result<()> { | |||
57 | }; | 57 | }; |
58 | install(opts)? | 58 | install(opts)? |
59 | } | 59 | } |
60 | "gen-tests" => { | ||
61 | if matches.contains(["-h", "--help"]) { | ||
62 | help::print_no_param_subcommand_help(&subcommand); | ||
63 | return Ok(()); | ||
64 | } | ||
65 | gen_tests(Overwrite)? | ||
66 | } | ||
67 | "codegen" => { | 60 | "codegen" => { |
68 | if matches.contains(["-h", "--help"]) { | 61 | if matches.contains(["-h", "--help"]) { |
69 | help::print_no_param_subcommand_help(&subcommand); | 62 | help::print_no_param_subcommand_help(&subcommand); |
70 | return Ok(()); | 63 | return Ok(()); |
71 | } | 64 | } |
72 | generate_boilerplate(Overwrite)? | 65 | codegen::generate_syntax(Mode::Overwrite)?; |
66 | codegen::generate_parser_tests(Mode::Overwrite)?; | ||
73 | } | 67 | } |
74 | "format" => { | 68 | "format" => { |
75 | if matches.contains(["-h", "--help"]) { | 69 | if matches.contains(["-h", "--help"]) { |
76 | help::print_no_param_subcommand_help(&subcommand); | 70 | help::print_no_param_subcommand_help(&subcommand); |
77 | return Ok(()); | 71 | return Ok(()); |
78 | } | 72 | } |
79 | run_rustfmt(Overwrite)? | 73 | run_rustfmt(Mode::Overwrite)? |
80 | } | 74 | } |
81 | "format-hook" => { | 75 | "format-hook" => { |
82 | if matches.contains(["-h", "--help"]) { | 76 | if matches.contains(["-h", "--help"]) { |
diff --git a/xtask/tests/tidy-tests/cli.rs b/xtask/tests/tidy-tests/cli.rs index 5d8ddea83..543c7d7c4 100644 --- a/xtask/tests/tidy-tests/cli.rs +++ b/xtask/tests/tidy-tests/cli.rs | |||
@@ -1,23 +1,26 @@ | |||
1 | use walkdir::WalkDir; | 1 | use walkdir::WalkDir; |
2 | use xtask::{gen_tests, generate_boilerplate, project_root, run_rustfmt, Verify}; | 2 | use xtask::{ |
3 | codegen::{self, Mode}, | ||
4 | project_root, run_rustfmt, | ||
5 | }; | ||
3 | 6 | ||
4 | #[test] | 7 | #[test] |
5 | fn generated_grammar_is_fresh() { | 8 | fn generated_grammar_is_fresh() { |
6 | if let Err(error) = generate_boilerplate(Verify) { | 9 | if let Err(error) = codegen::generate_syntax(Mode::Verify) { |
7 | panic!("{}. Please update it by running `cargo xtask codegen`", error); | 10 | panic!("{}. Please update it by running `cargo xtask codegen`", error); |
8 | } | 11 | } |
9 | } | 12 | } |
10 | 13 | ||
11 | #[test] | 14 | #[test] |
12 | fn generated_tests_are_fresh() { | 15 | fn generated_tests_are_fresh() { |
13 | if let Err(error) = gen_tests(Verify) { | 16 | if let Err(error) = codegen::generate_parser_tests(Mode::Verify) { |
14 | panic!("{}. Please update tests by running `cargo xtask gen-tests`", error); | 17 | panic!("{}. Please update tests by running `cargo xtask codegen`", error); |
15 | } | 18 | } |
16 | } | 19 | } |
17 | 20 | ||
18 | #[test] | 21 | #[test] |
19 | fn check_code_formatting() { | 22 | fn check_code_formatting() { |
20 | if let Err(error) = run_rustfmt(Verify) { | 23 | if let Err(error) = run_rustfmt(Mode::Verify) { |
21 | panic!("{}. Please format the code by running `cargo format`", error); | 24 | panic!("{}. Please format the code by running `cargo format`", error); |
22 | } | 25 | } |
23 | } | 26 | } |