aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_tools
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2019-08-19 10:37:03 +0100
committerGitHub <[email protected]>2019-08-19 10:37:03 +0100
commit53241928e5866b5aa501710e59b264905beeca1a (patch)
tree2e1533cdefd25cda25f02711632ae8e206cc68f6 /crates/ra_tools
parent83413cc9ef73d578f2e42c21f7aa85ef296be8c0 (diff)
parent038975b348b03dd3063d14cabd30fa71b010baea (diff)
Merge #1697
1697: Use quote instead of tera for boilerplate generation r=matklad a=matklad Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_tools')
-rw-r--r--crates/ra_tools/Cargo.toml6
-rw-r--r--crates/ra_tools/src/boilerplate_gen.rs342
-rw-r--r--crates/ra_tools/src/lib.rs43
-rw-r--r--crates/ra_tools/src/main.rs6
-rw-r--r--crates/ra_tools/tests/cli.rs4
5 files changed, 382 insertions, 19 deletions
diff --git a/crates/ra_tools/Cargo.toml b/crates/ra_tools/Cargo.toml
index 9c5430992..4c9aa1cc3 100644
--- a/crates/ra_tools/Cargo.toml
+++ b/crates/ra_tools/Cargo.toml
@@ -6,7 +6,11 @@ authors = ["rust-analyzer developers"]
6publish = false 6publish = false
7 7
8[dependencies] 8[dependencies]
9teraron = "0.1.0"
10walkdir = "2.1.3" 9walkdir = "2.1.3"
11itertools = "0.8.0" 10itertools = "0.8.0"
12clap = "2.32.0" 11clap = "2.32.0"
12quote = "1.0.2"
13proc-macro2 = "1.0.1"
14ron = "0.5.1"
15heck = "0.3.0"
16serde = { version = "1.0.0", features = ["derive"] }
diff --git a/crates/ra_tools/src/boilerplate_gen.rs b/crates/ra_tools/src/boilerplate_gen.rs
new file mode 100644
index 000000000..7ef51e82a
--- /dev/null
+++ b/crates/ra_tools/src/boilerplate_gen.rs
@@ -0,0 +1,342 @@
1use std::{
2 collections::BTreeMap,
3 fs,
4 io::Write,
5 process::{Command, Stdio},
6};
7
8use heck::{ShoutySnakeCase, SnakeCase};
9use proc_macro2::{Punct, Spacing};
10use quote::{format_ident, quote};
11use ron;
12use serde::Deserialize;
13
14use crate::{project_root, update, Mode, Result, AST, GRAMMAR, SYNTAX_KINDS};
15
16pub fn generate_boilerplate(mode: Mode) -> Result<()> {
17 let grammar = project_root().join(GRAMMAR);
18 let grammar: Grammar = {
19 let text = fs::read_to_string(grammar)?;
20 ron::de::from_str(&text)?
21 };
22
23 let syntax_kinds_file = project_root().join(SYNTAX_KINDS);
24 let syntax_kinds = generate_syntax_kinds(&grammar)?;
25 update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?;
26
27 let ast_file = project_root().join(AST);
28 let ast = generate_ast(&grammar)?;
29 update(ast_file.as_path(), &ast, mode)?;
30
31 Ok(())
32}
33
34fn generate_ast(grammar: &Grammar) -> Result<String> {
35 let nodes = grammar.ast.iter().map(|(name, ast_node)| {
36 let variants =
37 ast_node.variants.iter().map(|var| format_ident!("{}", var)).collect::<Vec<_>>();
38 let name = format_ident!("{}", name);
39
40 let kinds = if variants.is_empty() { vec![name.clone()] } else { variants.clone() }
41 .into_iter()
42 .map(|name| format_ident!("{}", name.to_string().to_shouty_snake_case()))
43 .collect::<Vec<_>>();
44
45 let variants = if variants.is_empty() {
46 None
47 } else {
48 let kind_enum = format_ident!("{}Kind", name);
49 Some(quote!(
50 pub enum #kind_enum {
51 #(#variants(#variants),)*
52 }
53
54 #(
55 impl From<#variants> for #name {
56 fn from(node: #variants) -> #name {
57 #name { syntax: node.syntax }
58 }
59 }
60 )*
61
62 impl #name {
63 pub fn kind(&self) -> #kind_enum {
64 let syntax = self.syntax.clone();
65 match syntax.kind() {
66 #(
67 #kinds =>
68 #kind_enum::#variants(#variants { syntax }),
69 )*
70 _ => unreachable!(),
71 }
72 }
73 }
74 ))
75 };
76
77 let traits = ast_node.traits.iter().map(|trait_name| {
78 let trait_name = format_ident!("{}", trait_name);
79 quote!(impl ast::#trait_name for #name {})
80 });
81
82 let collections = ast_node.collections.iter().map(|(name, kind)| {
83 let method_name = format_ident!("{}", name);
84 let kind = format_ident!("{}", kind);
85 quote! {
86 pub fn #method_name(&self) -> AstChildren<#kind> {
87 AstChildren::new(&self.syntax)
88 }
89 }
90 });
91
92 let options = ast_node.options.iter().map(|attr| {
93 let method_name = match attr {
94 Attr::Type(t) => format_ident!("{}", t.to_snake_case()),
95 Attr::NameType(n, _) => format_ident!("{}", n),
96 };
97 let ty = match attr {
98 Attr::Type(t) | Attr::NameType(_, t) => format_ident!("{}", t),
99 };
100 quote! {
101 pub fn #method_name(&self) -> Option<#ty> {
102 AstChildren::new(&self.syntax).next()
103 }
104 }
105 });
106
107 quote! {
108 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
109 pub struct #name {
110 pub(crate) syntax: SyntaxNode,
111 }
112
113 impl AstNode for #name {
114 fn can_cast(kind: SyntaxKind) -> bool {
115 match kind {
116 #(#kinds)|* => true,
117 _ => false,
118 }
119 }
120 fn cast(syntax: SyntaxNode) -> Option<Self> {
121 if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
122 }
123 fn syntax(&self) -> &SyntaxNode { &self.syntax }
124 }
125
126 #variants
127
128 #(#traits)*
129
130 impl #name {
131 #(#collections)*
132 #(#options)*
133 }
134 }
135 });
136
137 let ast = quote! {
138 use crate::{
139 SyntaxNode, SyntaxKind::{self, *},
140 ast::{self, AstNode, AstChildren},
141 };
142
143 #(#nodes)*
144 };
145
146 let pretty = reformat(ast)?;
147 Ok(pretty)
148}
149
150fn generate_syntax_kinds(grammar: &Grammar) -> Result<String> {
151 let single_byte_tokens_values =
152 grammar.single_byte_tokens.iter().map(|(token, _name)| token.chars().next().unwrap());
153 let single_byte_tokens = grammar
154 .single_byte_tokens
155 .iter()
156 .map(|(_token, name)| format_ident!("{}", name))
157 .collect::<Vec<_>>();
158
159 let punctuation_values =
160 grammar.single_byte_tokens.iter().chain(grammar.multi_byte_tokens.iter()).map(
161 |(token, _name)| {
162 if "{}[]()".contains(token) {
163 let c = token.chars().next().unwrap();
164 quote! { #c }
165 } else {
166 let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
167 quote! { #(#cs)* }
168 }
169 },
170 );
171 let punctuation = single_byte_tokens
172 .clone()
173 .into_iter()
174 .chain(grammar.multi_byte_tokens.iter().map(|(_token, name)| format_ident!("{}", name)))
175 .collect::<Vec<_>>();
176
177 let full_keywords_values = &grammar.keywords;
178 let full_keywords =
179 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", kw.to_shouty_snake_case()));
180
181 let all_keywords_values =
182 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
183 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
184 let all_keywords = all_keywords_values
185 .iter()
186 .map(|name| format_ident!("{}_KW", name.to_shouty_snake_case()))
187 .collect::<Vec<_>>();
188
189 let literals =
190 grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
191
192 let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
193
194 let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
195
196 let ast = quote! {
197 #![allow(bad_style, missing_docs, unreachable_pub)]
198 use super::SyntaxInfo;
199
200 /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`.
201 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
202 #[repr(u16)]
203 pub enum SyntaxKind {
204 // Technical SyntaxKinds: they appear temporally during parsing,
205 // but never end up in the final tree
206 #[doc(hidden)]
207 TOMBSTONE,
208 #[doc(hidden)]
209 EOF,
210 #(#punctuation,)*
211 #(#all_keywords,)*
212 #(#literals,)*
213 #(#tokens,)*
214 #(#nodes,)*
215
216 // Technical kind so that we can cast from u16 safely
217 #[doc(hidden)]
218 __LAST,
219 }
220 use self::SyntaxKind::*;
221
222 impl From<u16> for SyntaxKind {
223 fn from(d: u16) -> SyntaxKind {
224 assert!(d <= (__LAST as u16));
225 unsafe { std::mem::transmute::<u16, SyntaxKind>(d) }
226 }
227 }
228
229 impl From<SyntaxKind> for u16 {
230 fn from(k: SyntaxKind) -> u16 {
231 k as u16
232 }
233 }
234
235 impl SyntaxKind {
236 pub fn is_keyword(self) -> bool {
237 match self {
238 #(#all_keywords)|* => true,
239 _ => false,
240 }
241 }
242
243 pub fn is_punct(self) -> bool {
244 match self {
245 #(#punctuation)|* => true,
246 _ => false,
247 }
248 }
249
250 pub fn is_literal(self) -> bool {
251 match self {
252 #(#literals)|* => true,
253 _ => false,
254 }
255 }
256
257 pub(crate) fn info(self) -> &'static SyntaxInfo {
258 match self {
259 #(#punctuation => &SyntaxInfo { name: stringify!(#punctuation) },)*
260 #(#all_keywords => &SyntaxInfo { name: stringify!(#all_keywords) },)*
261 #(#literals => &SyntaxInfo { name: stringify!(#literals) },)*
262 #(#tokens => &SyntaxInfo { name: stringify!(#tokens) },)*
263 #(#nodes => &SyntaxInfo { name: stringify!(#nodes) },)*
264 TOMBSTONE => &SyntaxInfo { name: "TOMBSTONE" },
265 EOF => &SyntaxInfo { name: "EOF" },
266 __LAST => &SyntaxInfo { name: "__LAST" },
267 }
268 }
269
270 pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
271 let kw = match ident {
272 #(#full_keywords_values => #full_keywords,)*
273 _ => return None,
274 };
275 Some(kw)
276 }
277
278 pub fn from_char(c: char) -> Option<SyntaxKind> {
279 let tok = match c {
280 #(#single_byte_tokens_values => #single_byte_tokens,)*
281 _ => return None,
282 };
283 Some(tok)
284 }
285 }
286
287 #[macro_export]
288 macro_rules! T {
289 #((#punctuation_values) => { $crate::SyntaxKind::#punctuation };)*
290 #((#all_keywords_idents) => { $crate::SyntaxKind::#all_keywords };)*
291 }
292 };
293
294 reformat(ast)
295}
296
297fn reformat(text: impl std::fmt::Display) -> Result<String> {
298 let mut rustfmt = Command::new("rustfmt")
299 .arg("--config-path")
300 .arg(project_root().join("rustfmt.toml"))
301 .stdin(Stdio::piped())
302 .stdout(Stdio::piped())
303 .spawn()?;
304 write!(rustfmt.stdin.take().unwrap(), "{}", text)?;
305 let output = rustfmt.wait_with_output()?;
306 let stdout = String::from_utf8(output.stdout)?;
307 let preamble = "Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`";
308 Ok(format!("// {}\n\n{}", preamble, stdout))
309}
310
311#[derive(Deserialize, Debug)]
312struct Grammar {
313 single_byte_tokens: Vec<(String, String)>,
314 multi_byte_tokens: Vec<(String, String)>,
315 keywords: Vec<String>,
316 contextual_keywords: Vec<String>,
317 literals: Vec<String>,
318 tokens: Vec<String>,
319 nodes: Vec<String>,
320 ast: BTreeMap<String, AstNode>,
321}
322
323#[derive(Deserialize, Debug)]
324struct AstNode {
325 #[serde(default)]
326 #[serde(rename = "enum")]
327 variants: Vec<String>,
328
329 #[serde(default)]
330 traits: Vec<String>,
331 #[serde(default)]
332 collections: Vec<(String, String)>,
333 #[serde(default)]
334 options: Vec<Attr>,
335}
336
337#[derive(Deserialize, Debug)]
338#[serde(untagged)]
339enum Attr {
340 Type(String),
341 NameType(String, String),
342}
diff --git a/crates/ra_tools/src/lib.rs b/crates/ra_tools/src/lib.rs
index bb7845f7d..d47660369 100644
--- a/crates/ra_tools/src/lib.rs
+++ b/crates/ra_tools/src/lib.rs
@@ -1,3 +1,5 @@
1mod boilerplate_gen;
2
1use std::{ 3use std::{
2 collections::HashMap, 4 collections::HashMap,
3 error::Error, 5 error::Error,
@@ -9,7 +11,7 @@ use std::{
9 11
10use itertools::Itertools; 12use itertools::Itertools;
11 13
12pub use teraron::{Mode, Overwrite, Verify}; 14pub use self::boilerplate_gen::generate_boilerplate;
13 15
14pub type Result<T> = std::result::Result<T, Box<dyn Error>>; 16pub type Result<T> = std::result::Result<T, Box<dyn Error>>;
15 17
@@ -18,10 +20,17 @@ const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar";
18const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok"; 20const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok";
19const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err"; 21const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err";
20 22
21pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs.tera"; 23pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs";
22pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs.tera"; 24pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs";
23const TOOLCHAIN: &str = "stable"; 25const TOOLCHAIN: &str = "stable";
24 26
27#[derive(Debug, PartialEq, Eq, Clone, Copy)]
28pub enum Mode {
29 Overwrite,
30 Verify,
31}
32pub use Mode::*;
33
25#[derive(Debug)] 34#[derive(Debug)]
26pub struct Test { 35pub struct Test {
27 pub name: String, 36 pub name: String,
@@ -66,15 +75,6 @@ pub fn collect_tests(s: &str) -> Vec<(usize, Test)> {
66 res 75 res
67} 76}
68 77
69pub fn generate(mode: Mode) -> Result<()> {
70 let grammar = project_root().join(GRAMMAR);
71 let syntax_kinds = project_root().join(SYNTAX_KINDS);
72 let ast = project_root().join(AST);
73 teraron::generate(&syntax_kinds, &grammar, mode)?;
74 teraron::generate(&ast, &grammar, mode)?;
75 Ok(())
76}
77
78pub fn project_root() -> PathBuf { 78pub fn project_root() -> PathBuf {
79 Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(2).unwrap().to_path_buf() 79 Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(2).unwrap().to_path_buf()
80} 80}
@@ -227,7 +227,7 @@ pub fn gen_tests(mode: Mode) -> Result<()> {
227 tests_dir.join(file_name) 227 tests_dir.join(file_name)
228 } 228 }
229 }; 229 };
230 teraron::update(&path, &test.text, mode)?; 230 update(&path, &test.text, mode)?;
231 } 231 }
232 Ok(()) 232 Ok(())
233 } 233 }
@@ -311,3 +311,20 @@ fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test
311 } 311 }
312 Ok(res) 312 Ok(res)
313} 313}
314
315/// A helper to update file on disk if it has changed.
316/// With verify = false,
317pub fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> {
318 match fs::read_to_string(path) {
319 Ok(ref old_contents) if old_contents == contents => {
320 return Ok(());
321 }
322 _ => (),
323 }
324 if mode == Verify {
325 Err(format!("`{}` is not up-to-date", path.display()))?;
326 }
327 eprintln!("updating {}", path.display());
328 fs::write(path, contents)?;
329 Ok(())
330}
diff --git a/crates/ra_tools/src/main.rs b/crates/ra_tools/src/main.rs
index 54d96e446..03cb9d5a7 100644
--- a/crates/ra_tools/src/main.rs
+++ b/crates/ra_tools/src/main.rs
@@ -1,8 +1,8 @@
1use clap::{App, Arg, SubCommand}; 1use clap::{App, Arg, SubCommand};
2use core::str; 2use core::str;
3use ra_tools::{ 3use ra_tools::{
4 gen_tests, generate, install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt, Cmd, 4 gen_tests, generate_boilerplate, install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt,
5 Overwrite, Result, 5 Cmd, Overwrite, Result,
6}; 6};
7use std::{env, path::PathBuf}; 7use std::{env, path::PathBuf};
8 8
@@ -49,7 +49,7 @@ fn main() -> Result<()> {
49 install(opts)? 49 install(opts)?
50 } 50 }
51 ("gen-tests", _) => gen_tests(Overwrite)?, 51 ("gen-tests", _) => gen_tests(Overwrite)?,
52 ("gen-syntax", _) => generate(Overwrite)?, 52 ("gen-syntax", _) => generate_boilerplate(Overwrite)?,
53 ("format", _) => run_rustfmt(Overwrite)?, 53 ("format", _) => run_rustfmt(Overwrite)?,
54 ("format-hook", _) => install_format_hook()?, 54 ("format-hook", _) => install_format_hook()?,
55 ("lint", _) => run_clippy()?, 55 ("lint", _) => run_clippy()?,
diff --git a/crates/ra_tools/tests/cli.rs b/crates/ra_tools/tests/cli.rs
index ae0eb337d..c672e5788 100644
--- a/crates/ra_tools/tests/cli.rs
+++ b/crates/ra_tools/tests/cli.rs
@@ -1,10 +1,10 @@
1use walkdir::WalkDir; 1use walkdir::WalkDir;
2 2
3use ra_tools::{gen_tests, generate, project_root, run_rustfmt, Verify}; 3use ra_tools::{gen_tests, generate_boilerplate, project_root, run_rustfmt, Verify};
4 4
5#[test] 5#[test]
6fn generated_grammar_is_fresh() { 6fn generated_grammar_is_fresh() {
7 if let Err(error) = generate(Verify) { 7 if let Err(error) = generate_boilerplate(Verify) {
8 panic!("{}. Please update it by running `cargo gen-syntax`", error); 8 panic!("{}. Please update it by running `cargo gen-syntax`", error);
9 } 9 }
10} 10}