aboutsummaryrefslogtreecommitdiff
path: root/tools/src
diff options
context:
space:
mode:
Diffstat (limited to 'tools/src')
-rw-r--r--tools/src/bin/collect-tests.rs133
-rw-r--r--tools/src/bin/gen.rs121
-rw-r--r--tools/src/bin/main.rs184
-rw-r--r--tools/src/bin/parse.rs19
4 files changed, 184 insertions, 273 deletions
diff --git a/tools/src/bin/collect-tests.rs b/tools/src/bin/collect-tests.rs
deleted file mode 100644
index a52e7b119..000000000
--- a/tools/src/bin/collect-tests.rs
+++ /dev/null
@@ -1,133 +0,0 @@
1extern crate file;
2extern crate itertools;
3extern crate walkdir;
4
5use walkdir::WalkDir;
6use itertools::Itertools;
7
8use std::path::{Path, PathBuf};
9use std::collections::HashSet;
10use std::fs;
11
12fn main() {
13 let verify = ::std::env::args().any(|arg| arg == "--verify");
14
15 let d = grammar_dir();
16 let tests = tests_from_dir(&d);
17 let existing = existing_tests();
18
19 for t in existing.difference(&tests) {
20 panic!("Test is deleted: {}\n{}", t.name, t.text);
21 }
22
23 let new_tests = tests.difference(&existing);
24 for (i, t) in new_tests.enumerate() {
25 if verify {
26 panic!("Inline test is not recorded: {}", t.name);
27 }
28
29 let name = format!("{:04}_{}.rs", existing.len() + i + 1, t.name);
30 println!("Creating {}", name);
31 let path = inline_tests_dir().join(name);
32 file::put_text(&path, &t.text).unwrap();
33 }
34}
35
36#[derive(Debug, Eq)]
37struct Test {
38 name: String,
39 text: String,
40}
41
42impl PartialEq for Test {
43 fn eq(&self, other: &Test) -> bool {
44 self.name.eq(&other.name)
45 }
46}
47
48impl ::std::hash::Hash for Test {
49 fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
50 self.name.hash(state)
51 }
52}
53
54fn tests_from_dir(dir: &Path) -> HashSet<Test> {
55 let mut res = HashSet::new();
56 for entry in WalkDir::new(dir) {
57 let entry = entry.unwrap();
58 if !entry.file_type().is_file() {
59 continue;
60 }
61 if entry.path().extension().unwrap_or_default() != "rs" {
62 continue;
63 }
64 let text = file::get_text(entry.path()).unwrap();
65
66 for test in collect_tests(&text) {
67 if let Some(old_test) = res.replace(test) {
68 panic!("Duplicate test: {}", old_test.name)
69 }
70 }
71 }
72 res
73}
74
75fn collect_tests(s: &str) -> Vec<Test> {
76 let mut res = vec![];
77 let prefix = "// ";
78 let comment_blocks = s.lines()
79 .map(str::trim_left)
80 .group_by(|line| line.starts_with(prefix));
81
82 'outer: for (is_comment, block) in comment_blocks.into_iter() {
83 if !is_comment {
84 continue;
85 }
86 let mut block = block.map(|line| &line[prefix.len()..]);
87
88 let name = loop {
89 match block.next() {
90 Some(line) if line.starts_with("test ") => break line["test ".len()..].to_string(),
91 Some(_) => (),
92 None => continue 'outer,
93 }
94 };
95 let text: String = itertools::join(block.chain(::std::iter::once("")), "\n");
96 assert!(!text.trim().is_empty() && text.ends_with("\n"));
97 res.push(Test { name, text })
98 }
99 res
100}
101
102fn existing_tests() -> HashSet<Test> {
103 let mut res = HashSet::new();
104 for file in fs::read_dir(&inline_tests_dir()).unwrap() {
105 let file = file.unwrap();
106 let path = file.path();
107 if path.extension().unwrap_or_default() != "rs" {
108 continue;
109 }
110 let name = path.file_name().unwrap().to_str().unwrap();
111 let name = name["0000_".len()..name.len() - 3].to_string();
112 let text = file::get_text(&path).unwrap();
113 res.insert(Test { name, text });
114 }
115 res
116}
117
118fn inline_tests_dir() -> PathBuf {
119 let res = base_dir().join("tests/data/parser/inline");
120 if !res.is_dir() {
121 fs::create_dir_all(&res).unwrap();
122 }
123 res
124}
125
126fn grammar_dir() -> PathBuf {
127 base_dir().join("src/parser/grammar")
128}
129
130fn base_dir() -> PathBuf {
131 let dir = env!("CARGO_MANIFEST_DIR");
132 PathBuf::from(dir).parent().unwrap().to_owned()
133}
diff --git a/tools/src/bin/gen.rs b/tools/src/bin/gen.rs
deleted file mode 100644
index 2d3cd422d..000000000
--- a/tools/src/bin/gen.rs
+++ /dev/null
@@ -1,121 +0,0 @@
1extern crate serde;
2#[macro_use]
3extern crate serde_derive;
4
5extern crate file;
6extern crate ron;
7
8use std::path::PathBuf;
9use std::fmt::Write;
10
11fn main() {
12 let grammar = Grammar::read();
13 let text = grammar.to_syntax_kinds();
14 let target = generated_file();
15 if text != file::get_text(&target).unwrap_or_default() {
16 file::put_text(&target, &text).unwrap();
17 }
18}
19
20#[derive(Deserialize)]
21struct Grammar {
22 keywords: Vec<String>,
23 contextual_keywords: Vec<String>,
24 tokens: Vec<String>,
25 nodes: Vec<String>,
26}
27
28impl Grammar {
29 fn read() -> Grammar {
30 let text = file::get_text(&grammar_file()).unwrap();
31 ron::de::from_str(&text).unwrap()
32 }
33
34 fn to_syntax_kinds(&self) -> String {
35 let mut acc = String::new();
36 acc.push_str("#![allow(bad_style, missing_docs, unreachable_pub)]\n");
37 acc.push_str("#![cfg_attr(rustfmt, rustfmt_skip)]\n");
38 acc.push_str("//! Generated from grammar.ron\n");
39 acc.push_str("use super::SyntaxInfo;\n");
40 acc.push_str("\n");
41
42 let syntax_kinds: Vec<String> = self.tokens
43 .iter()
44 .cloned()
45 .chain(self.keywords.iter().map(|kw| kw_token(kw)))
46 .chain(self.contextual_keywords.iter().map(|kw| kw_token(kw)))
47 .chain(self.nodes.iter().cloned())
48 .collect();
49
50 // enum SyntaxKind
51 acc.push_str("/// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`.\n");
52 acc.push_str("#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n");
53 acc.push_str("pub enum SyntaxKind {\n");
54 for kind in syntax_kinds.iter() {
55 write!(acc, " {},\n", scream(kind)).unwrap();
56 }
57 acc.push_str("\n");
58 acc.push_str(" // Technical SyntaxKinds: they appear temporally during parsing,\n");
59 acc.push_str(" // but never end up in the final tree\n");
60 acc.push_str(" #[doc(hidden)]\n");
61 acc.push_str(" TOMBSTONE,\n");
62 acc.push_str(" #[doc(hidden)]\n");
63 acc.push_str(" EOF,\n");
64 acc.push_str("}\n");
65 acc.push_str("pub(crate) use self::SyntaxKind::*;\n");
66 acc.push_str("\n");
67
68 // fn info
69 acc.push_str("impl SyntaxKind {\n");
70 acc.push_str(" pub(crate) fn info(self) -> &'static SyntaxInfo {\n");
71 acc.push_str(" match self {\n");
72 for kind in syntax_kinds.iter() {
73 let sname = scream(kind);
74 write!(
75 acc,
76 " {sname} => &SyntaxInfo {{ name: \"{sname}\" }},\n",
77 sname = sname
78 ).unwrap();
79 }
80 acc.push_str("\n");
81 acc.push_str(" TOMBSTONE => &SyntaxInfo { name: \"TOMBSTONE\" },\n");
82 acc.push_str(" EOF => &SyntaxInfo { name: \"EOF\" },\n");
83 acc.push_str(" }\n");
84 acc.push_str(" }\n");
85
86 // fn from_keyword
87 acc.push_str(" pub(crate) fn from_keyword(ident: &str) -> Option<SyntaxKind> {\n");
88 acc.push_str(" match ident {\n");
89 // NB: no contextual_keywords here!
90 for kw in self.keywords.iter() {
91 write!(acc, " {:?} => Some({}),\n", kw, kw_token(kw)).unwrap();
92 }
93 acc.push_str(" _ => None,\n");
94 acc.push_str(" }\n");
95 acc.push_str(" }\n");
96 acc.push_str("}\n");
97 acc.push_str("\n");
98 acc
99 }
100}
101
102fn grammar_file() -> PathBuf {
103 base_dir().join("src/grammar.ron")
104}
105
106fn generated_file() -> PathBuf {
107 base_dir().join("src/syntax_kinds/generated.rs")
108}
109
110fn scream(word: &str) -> String {
111 word.chars().map(|c| c.to_ascii_uppercase()).collect()
112}
113
114fn kw_token(keyword: &str) -> String {
115 format!("{}_KW", scream(keyword))
116}
117
118fn base_dir() -> PathBuf {
119 let dir = env!("CARGO_MANIFEST_DIR");
120 PathBuf::from(dir).parent().unwrap().to_owned()
121}
diff --git a/tools/src/bin/main.rs b/tools/src/bin/main.rs
new file mode 100644
index 000000000..6a9793fff
--- /dev/null
+++ b/tools/src/bin/main.rs
@@ -0,0 +1,184 @@
1extern crate clap;
2#[macro_use]
3extern crate failure;
4extern crate tera;
5extern crate ron;
6extern crate walkdir;
7extern crate itertools;
8
9use std::{
10 fs,
11 path::{Path},
12 collections::HashSet,
13};
14use clap::{App, Arg, SubCommand};
15use itertools::Itertools;
16
17type Result<T> = ::std::result::Result<T, failure::Error>;
18
19const GRAMMAR_DIR: &str = "./src/parser/grammar";
20const INLINE_TESTS_DIR: &str = "tests/data/parser/inline";
21const GRAMMAR: &str = "./src/grammar.ron";
22const SYNTAX_KINDS: &str = "./src/syntax_kinds/generated.rs";
23const SYNTAX_KINDS_TEMPLATE: &str = "./src/syntax_kinds/generated.rs.tera";
24
25fn main() -> Result<()> {
26 let matches = App::new("tasks")
27 .setting(clap::AppSettings::SubcommandRequiredElseHelp)
28 .arg(
29 Arg::with_name("verify")
30 .long("--verify")
31 .help("Verify that generated code is up-to-date")
32 .global(true)
33 )
34 .subcommand(SubCommand::with_name("gen-kinds"))
35 .subcommand(SubCommand::with_name("gen-tests"))
36 .get_matches();
37 match matches.subcommand() {
38 (name, Some(matches)) => run_gen_command(name, matches.is_present("verify"))?,
39 _ => unreachable!(),
40 }
41 Ok(())
42}
43
44fn run_gen_command(name: &str, verify: bool) -> Result<()> {
45 match name {
46 "gen-kinds" => update(Path::new(SYNTAX_KINDS), &get_kinds()?, verify),
47 "gen-tests" => gen_tests(verify),
48 _ => unreachable!(),
49 }
50}
51
52fn update(path: &Path, contents: &str, verify: bool) -> Result<()> {
53 match fs::read_to_string(path) {
54 Ok(ref old_contents) if old_contents == contents => {
55 return Ok(());
56 }
57 _ => (),
58 }
59 if verify {
60 bail!("`{}` is not up-to-date", path.display());
61 }
62 fs::write(path, contents)?;
63 Ok(())
64}
65
66fn get_kinds() -> Result<String> {
67 let grammar = grammar()?;
68 let template = fs::read_to_string(SYNTAX_KINDS_TEMPLATE)?;
69 let ret = tera::Tera::one_off(&template, &grammar, false).map_err(|e| {
70 format_err!("template error: {}", e)
71 })?;
72 Ok(ret)
73}
74
75fn grammar() -> Result<ron::value::Value> {
76 let text = fs::read_to_string(GRAMMAR)?;
77 let ret = ron::de::from_str(&text)?;
78 Ok(ret)
79}
80
81fn gen_tests(verify: bool) -> Result<()> {
82 let tests = tests_from_dir(Path::new(GRAMMAR_DIR))?;
83
84 let inline_tests_dir = Path::new(INLINE_TESTS_DIR);
85 if !inline_tests_dir.is_dir() {
86 fs::create_dir_all(inline_tests_dir)?;
87 }
88 let existing = existing_tests(inline_tests_dir)?;
89
90 for t in existing.difference(&tests) {
91 panic!("Test is deleted: {}\n{}", t.name, t.text);
92 }
93
94 let new_tests = tests.difference(&existing);
95 for (i, t) in new_tests.enumerate() {
96 let name = format!("{:04}_{}.rs", existing.len() + i + 1, t.name);
97 let path = inline_tests_dir.join(name);
98 update(&path, &t.text, verify)?;
99 }
100 Ok(())
101}
102
103#[derive(Debug, Eq)]
104struct Test {
105 name: String,
106 text: String,
107}
108
109impl PartialEq for Test {
110 fn eq(&self, other: &Test) -> bool {
111 self.name.eq(&other.name)
112 }
113}
114
115impl ::std::hash::Hash for Test {
116 fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
117 self.name.hash(state)
118 }
119}
120
121fn tests_from_dir(dir: &Path) -> Result<HashSet<Test>> {
122 let mut res = HashSet::new();
123 for entry in ::walkdir::WalkDir::new(dir) {
124 let entry = entry.unwrap();
125 if !entry.file_type().is_file() {
126 continue;
127 }
128 if entry.path().extension().unwrap_or_default() != "rs" {
129 continue;
130 }
131 let text = fs::read_to_string(entry.path())?;
132
133 for test in collect_tests(&text) {
134 if let Some(old_test) = res.replace(test) {
135 bail!("Duplicate test: {}", old_test.name)
136 }
137 }
138 }
139 Ok(res)
140}
141
142fn collect_tests(s: &str) -> Vec<Test> {
143 let mut res = vec![];
144 let prefix = "// ";
145 let comment_blocks = s.lines()
146 .map(str::trim_left)
147 .group_by(|line| line.starts_with(prefix));
148
149 'outer: for (is_comment, block) in comment_blocks.into_iter() {
150 if !is_comment {
151 continue;
152 }
153 let mut block = block.map(|line| &line[prefix.len()..]);
154
155 let name = loop {
156 match block.next() {
157 Some(line) if line.starts_with("test ") => break line["test ".len()..].to_string(),
158 Some(_) => (),
159 None => continue 'outer,
160 }
161 };
162 let text: String = itertools::join(block.chain(::std::iter::once("")), "\n");
163 assert!(!text.trim().is_empty() && text.ends_with("\n"));
164 res.push(Test { name, text })
165 }
166 res
167}
168
169fn existing_tests(dir: &Path) -> Result<HashSet<Test>> {
170 let mut res = HashSet::new();
171 for file in fs::read_dir(dir)? {
172 let file = file?;
173 let path = file.path();
174 if path.extension().unwrap_or_default() != "rs" {
175 continue;
176 }
177 let name = path.file_name().unwrap().to_str().unwrap();
178 let name = name["0000_".len()..name.len() - 3].to_string();
179 let text = fs::read_to_string(&path)?;
180 res.insert(Test { name, text });
181 }
182 Ok(res)
183}
184
diff --git a/tools/src/bin/parse.rs b/tools/src/bin/parse.rs
deleted file mode 100644
index cb3414711..000000000
--- a/tools/src/bin/parse.rs
+++ /dev/null
@@ -1,19 +0,0 @@
1extern crate libsyntax2;
2
3use std::io::Read;
4
5use libsyntax2::{parse};
6use libsyntax2::utils::dump_tree_green;
7
8fn main() {
9 let text = read_input();
10 let file = parse(text);
11 let tree = dump_tree_green(&file);
12 println!("{}", tree);
13}
14
15fn read_input() -> String {
16 let mut buff = String::new();
17 ::std::io::stdin().read_to_string(&mut buff).unwrap();
18 buff
19}