use std::{ collections::BTreeMap, fs, io::Write, process::{Command, Stdio}, }; use heck::{ShoutySnakeCase, SnakeCase}; use proc_macro2::{Punct, Spacing}; use quote::{format_ident, quote}; use ron; use serde::Deserialize; use crate::{project_root, update, Mode, Result, AST, GRAMMAR, SYNTAX_KINDS}; pub fn generate_boilerplate(mode: Mode) -> Result<()> { let grammar = project_root().join(GRAMMAR); let grammar: Grammar = { let text = fs::read_to_string(grammar)?; ron::de::from_str(&text)? }; let syntax_kinds_file = project_root().join(SYNTAX_KINDS); let syntax_kinds = generate_syntax_kinds(&grammar)?; update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?; let ast_file = project_root().join(AST); let ast = generate_ast(&grammar)?; update(ast_file.as_path(), &ast, mode)?; Ok(()) } fn generate_ast(grammar: &Grammar) -> Result { let nodes = grammar.ast.iter().map(|(name, ast_node)| { let variants = ast_node.variants.iter().map(|var| format_ident!("{}", var)).collect::>(); let name = format_ident!("{}", name); let kinds = if variants.is_empty() { vec![name.clone()] } else { variants.clone() } .into_iter() .map(|name| format_ident!("{}", name.to_string().to_shouty_snake_case())) .collect::>(); let variants = if variants.is_empty() { None } else { let kind_enum = format_ident!("{}Kind", name); Some(quote!( pub enum #kind_enum { #(#variants(#variants),)* } #( impl From<#variants> for #name { fn from(node: #variants) -> #name { #name { syntax: node.syntax } } } )* impl #name { pub fn kind(&self) -> #kind_enum { let syntax = self.syntax.clone(); match syntax.kind() { #( #kinds => #kind_enum::#variants(#variants { syntax }), )* _ => unreachable!(), } } } )) }; let traits = ast_node.traits.iter().map(|trait_name| { let trait_name = format_ident!("{}", trait_name); quote!(impl ast::#trait_name for #name {}) }); let collections = ast_node.collections.iter().map(|(name, kind)| { let method_name = format_ident!("{}", name); let kind = format_ident!("{}", kind); quote! { pub fn #method_name(&self) -> AstChildren<#kind> { AstChildren::new(&self.syntax) } } }); let options = ast_node.options.iter().map(|attr| { let method_name = match attr { Attr::Type(t) => format_ident!("{}", t.to_snake_case()), Attr::NameType(n, _) => format_ident!("{}", n), }; let ty = match attr { Attr::Type(t) | Attr::NameType(_, t) => format_ident!("{}", t), }; quote! { pub fn #method_name(&self) -> Option<#ty> { AstChildren::new(&self.syntax).next() } } }); quote! { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct #name { pub(crate) syntax: SyntaxNode, } impl AstNode for #name { fn can_cast(kind: SyntaxKind) -> bool { match kind { #(#kinds)|* => true, _ => false, } } fn cast(syntax: SyntaxNode) -> Option { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } } #variants #(#traits)* impl #name { #(#collections)* #(#options)* } } }); let ast = quote! { use crate::{ SyntaxNode, SyntaxKind::{self, *}, ast::{self, AstNode, AstChildren}, }; #(#nodes)* }; let pretty = reformat(ast)?; Ok(pretty) } fn generate_syntax_kinds(grammar: &Grammar) -> Result { let single_byte_tokens_values = grammar.single_byte_tokens.iter().map(|(token, _name)| token.chars().next().unwrap()); let single_byte_tokens = grammar .single_byte_tokens .iter() .map(|(_token, name)| format_ident!("{}", name)) .collect::>(); let punctuation_values = grammar.single_byte_tokens.iter().chain(grammar.multi_byte_tokens.iter()).map( |(token, _name)| { if "{}[]()".contains(token) { let c = token.chars().next().unwrap(); quote! { #c } } else { let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint)); quote! { #(#cs)* } } }, ); let punctuation = single_byte_tokens .clone() .into_iter() .chain(grammar.multi_byte_tokens.iter().map(|(_token, name)| format_ident!("{}", name))) .collect::>(); let full_keywords_values = &grammar.keywords; let full_keywords = full_keywords_values.iter().map(|kw| format_ident!("{}_KW", kw.to_shouty_snake_case())); let all_keywords_values = grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::>(); let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); let all_keywords = all_keywords_values .iter() .map(|name| format_ident!("{}_KW", name.to_shouty_snake_case())) .collect::>(); let literals = grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::>(); let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::>(); let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::>(); let ast = quote! { #![allow(bad_style, missing_docs, unreachable_pub)] use super::SyntaxInfo; /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(u16)] pub enum SyntaxKind { // Technical SyntaxKinds: they appear temporally during parsing, // but never end up in the final tree #[doc(hidden)] TOMBSTONE, #[doc(hidden)] EOF, #(#punctuation,)* #(#all_keywords,)* #(#literals,)* #(#tokens,)* #(#nodes,)* // Technical kind so that we can cast from u16 safely #[doc(hidden)] __LAST, } use self::SyntaxKind::*; impl From for SyntaxKind { fn from(d: u16) -> SyntaxKind { assert!(d <= (__LAST as u16)); unsafe { std::mem::transmute::(d) } } } impl From for u16 { fn from(k: SyntaxKind) -> u16 { k as u16 } } impl SyntaxKind { pub fn is_keyword(self) -> bool { match self { #(#all_keywords)|* => true, _ => false, } } pub fn is_punct(self) -> bool { match self { #(#punctuation)|* => true, _ => false, } } pub fn is_literal(self) -> bool { match self { #(#literals)|* => true, _ => false, } } pub(crate) fn info(self) -> &'static SyntaxInfo { match self { #(#punctuation => &SyntaxInfo { name: stringify!(#punctuation) },)* #(#all_keywords => &SyntaxInfo { name: stringify!(#all_keywords) },)* #(#literals => &SyntaxInfo { name: stringify!(#literals) },)* #(#tokens => &SyntaxInfo { name: stringify!(#tokens) },)* #(#nodes => &SyntaxInfo { name: stringify!(#nodes) },)* TOMBSTONE => &SyntaxInfo { name: "TOMBSTONE" }, EOF => &SyntaxInfo { name: "EOF" }, __LAST => &SyntaxInfo { name: "__LAST" }, } } pub fn from_keyword(ident: &str) -> Option { let kw = match ident { #(#full_keywords_values => #full_keywords,)* _ => return None, }; Some(kw) } pub fn from_char(c: char) -> Option { let tok = match c { #(#single_byte_tokens_values => #single_byte_tokens,)* _ => return None, }; Some(tok) } } #[macro_export] macro_rules! T { #((#punctuation_values) => { $crate::SyntaxKind::#punctuation };)* #((#all_keywords_idents) => { $crate::SyntaxKind::#all_keywords };)* } }; reformat(ast) } fn reformat(text: impl std::fmt::Display) -> Result { let mut rustfmt = Command::new("rustfmt") .arg("--config-path") .arg(project_root().join("rustfmt.toml")) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; write!(rustfmt.stdin.take().unwrap(), "{}", text)?; let output = rustfmt.wait_with_output()?; let stdout = String::from_utf8(output.stdout)?; let preamble = "Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`"; Ok(format!("// {}\n\n{}", preamble, stdout)) } #[derive(Deserialize, Debug)] struct Grammar { single_byte_tokens: Vec<(String, String)>, multi_byte_tokens: Vec<(String, String)>, keywords: Vec, contextual_keywords: Vec, literals: Vec, tokens: Vec, nodes: Vec, ast: BTreeMap, } #[derive(Deserialize, Debug)] struct AstNode { #[serde(default)] #[serde(rename = "enum")] variants: Vec, #[serde(default)] traits: Vec, #[serde(default)] collections: Vec<(String, String)>, #[serde(default)] options: Vec, } #[derive(Deserialize, Debug)] #[serde(untagged)] enum Attr { Type(String), NameType(String, String), }