aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_tools/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_tools/src')
-rw-r--r--crates/ra_tools/src/codegen.rs147
1 files changed, 146 insertions, 1 deletions
diff --git a/crates/ra_tools/src/codegen.rs b/crates/ra_tools/src/codegen.rs
index 6fd672290..c7f37e20c 100644
--- a/crates/ra_tools/src/codegen.rs
+++ b/crates/ra_tools/src/codegen.rs
@@ -7,6 +7,7 @@ use std::{
7}; 7};
8 8
9use heck::{ShoutySnakeCase, SnakeCase}; 9use heck::{ShoutySnakeCase, SnakeCase};
10use proc_macro2::{Punct, Spacing};
10use quote::{format_ident, quote}; 11use quote::{format_ident, quote};
11use ron; 12use ron;
12use serde::Deserialize; 13use serde::Deserialize;
@@ -23,7 +24,7 @@ pub fn generate(mode: Mode) -> Result<()> {
23 let _syntax_kinds = project_root().join(SYNTAX_KINDS); 24 let _syntax_kinds = project_root().join(SYNTAX_KINDS);
24 let _ast = project_root().join(AST); 25 let _ast = project_root().join(AST);
25 26
26 let ast = generate_ast(&grammar)?; 27 let ast = generate_syntax_kinds(&grammar)?;
27 println!("{}", ast); 28 println!("{}", ast);
28 Ok(()) 29 Ok(())
29} 30}
@@ -144,6 +145,149 @@ fn generate_ast(grammar: &Grammar) -> Result<String> {
144 Ok(pretty) 145 Ok(pretty)
145} 146}
146 147
148fn generate_syntax_kinds(grammar: &Grammar) -> Result<String> {
149 let single_byte_tokens_values =
150 grammar.single_byte_tokens.iter().map(|(token, _name)| token.chars().next().unwrap());
151 let single_byte_tokens = grammar
152 .single_byte_tokens
153 .iter()
154 .map(|(_token, name)| format_ident!("{}", name))
155 .collect::<Vec<_>>();
156
157 let punctuation_values =
158 grammar.single_byte_tokens.iter().chain(grammar.multi_byte_tokens.iter()).map(
159 |(token, _name)| {
160 if "{}[]()".contains(token) {
161 let c = token.chars().next().unwrap();
162 quote! { #c }
163 } else {
164 let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
165 quote! { #(#cs)* }
166 }
167 },
168 );
169 let punctuation = single_byte_tokens
170 .clone()
171 .into_iter()
172 .chain(grammar.multi_byte_tokens.iter().map(|(_token, name)| format_ident!("{}", name)))
173 .collect::<Vec<_>>();
174
175 let keywords_values =
176 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
177 let keywords_idents = keywords_values.iter().map(|kw| format_ident!("{}", kw));
178 let keywords = keywords_values
179 .iter()
180 .map(|name| format_ident!("{}_KW", name.to_shouty_snake_case()))
181 .collect::<Vec<_>>();
182
183 let literals =
184 grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
185
186 let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
187
188 let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
189
190 let ast = quote! {
191 #![allow(bad_style, missing_docs, unreachable_pub)]
192 use super::SyntaxInfo;
193
194 /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`.
195 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
196 #[repr(u16)]
197 pub enum SyntaxKind {
198 // Technical SyntaxKinds: they appear temporally during parsing,
199 // but never end up in the final tree
200 #[doc(hidden)]
201 TOMBSTONE,
202 #[doc(hidden)]
203 EOF,
204 #(#punctuation,)*
205 #(#keywords,)*
206 #(#literals,)*
207 #(#tokens,)*
208 #(#nodes,)*
209
210 // Technical kind so that we can cast from u16 safely
211 #[doc(hidden)]
212 __LAST,
213 }
214 use self::SyntaxKind::*;
215
216 impl From<u16> for SyntaxKind {
217 fn from(d: u16) -> SyntaxKind {
218 assert!(d <= (__LAST as u16));
219 unsafe { std::mem::transmute::<u16, SyntaxKind>(d) }
220 }
221 }
222
223 impl From<SyntaxKind> for u16 {
224 fn from(k: SyntaxKind) -> u16 {
225 k as u16
226 }
227 }
228
229 impl SyntaxKind {
230 pub fn is_keyword(self) -> bool {
231 match self {
232 #(#keywords)|* => true,
233 _ => false,
234 }
235 }
236
237 pub fn is_punct(self) -> bool {
238 match self {
239 #(#punctuation)|* => true,
240 _ => false,
241 }
242 }
243
244 pub fn is_literal(self) -> bool {
245 match self {
246 #(#literals)|* => true,
247 _ => false,
248 }
249 }
250
251 pub(crate) fn info(self) -> &'static SyntaxInfo {
252 match self {
253 #(#punctuation => &SyntaxInfo { name: stringify!(#punctuation) },)*
254 #(#keywords => &SyntaxInfo { name: stringify!(#keywords) },)*
255 #(#literals => &SyntaxInfo { name: stringify!(#literals) },)*
256 #(#tokens => &SyntaxInfo { name: stringify!(#tokens) },)*
257 #(#nodes => &SyntaxInfo { name: stringify!(#nodes) },)*
258 TOMBSTONE => &SyntaxInfo { name: "TOMBSTONE" },
259 EOF => &SyntaxInfo { name: "EOF" },
260 __LAST => &SyntaxInfo { name: "__LAST" },
261 }
262 }
263
264 pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
265 let kw = match ident {
266 #(#keywords_values => #keywords,)*
267 _ => return None,
268 };
269 Some(kw)
270 }
271
272 pub fn from_char(c: char) -> Option<SyntaxKind> {
273 let tok = match c {
274 #(#single_byte_tokens_values => #single_byte_tokens,)*
275 _ => return None,
276 };
277 Some(tok)
278 }
279 }
280
281 #[macro_export]
282 macro_rules! T {
283 #((#punctuation_values) => { $crate::SyntaxKind::#punctuation };)*
284 #((#keywords_idents) => { $crate::SyntaxKind::#keywords };)*
285 }
286 };
287
288 reformat(ast)
289}
290
147fn reformat(text: impl std::fmt::Display) -> Result<String> { 291fn reformat(text: impl std::fmt::Display) -> Result<String> {
148 let mut rustfmt = Command::new("rustfmt") 292 let mut rustfmt = Command::new("rustfmt")
149 .arg("--config-path") 293 .arg("--config-path")
@@ -166,6 +310,7 @@ struct Grammar {
166 contextual_keywords: Vec<String>, 310 contextual_keywords: Vec<String>,
167 literals: Vec<String>, 311 literals: Vec<String>,
168 tokens: Vec<String>, 312 tokens: Vec<String>,
313 nodes: Vec<String>,
169 ast: BTreeMap<String, AstNode>, 314 ast: BTreeMap<String, AstNode>,
170} 315}
171 316