aboutsummaryrefslogtreecommitdiff
path: root/xtask/src/codegen
diff options
context:
space:
mode:
Diffstat (limited to 'xtask/src/codegen')
-rw-r--r--xtask/src/codegen/gen_lint_completions.rs94
-rw-r--r--xtask/src/codegen/gen_syntax.rs26
2 files changed, 88 insertions, 32 deletions
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs
index 24dbc6a39..54fcaa0e6 100644
--- a/xtask/src/codegen/gen_lint_completions.rs
+++ b/xtask/src/codegen/gen_lint_completions.rs
@@ -1,4 +1,5 @@
1//! Generates descriptors structure for unstable feature from Unstable Book 1//! Generates descriptors structure for unstable feature from Unstable Book
2use std::borrow::Cow;
2use std::fmt::Write; 3use std::fmt::Write;
3use std::path::{Path, PathBuf}; 4use std::path::{Path, PathBuf};
4 5
@@ -12,25 +13,75 @@ pub(crate) fn generate_lint_completions() -> Result<()> {
12 cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; 13 cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
13 } 14 }
14 15
15 let mut contents = String::from("use crate::completions::attribute::LintCompletion;\n\n"); 16 let mut contents = String::from(
16 generate_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?; 17 r#"pub struct Lint {
18 pub label: &'static str,
19 pub description: &'static str,
20}
21
22"#,
23 );
24 generate_lint_descriptor(&mut contents)?;
25 contents.push('\n');
26
27 generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?;
17 contents.push('\n'); 28 contents.push('\n');
18 29
19 cmd!("curl http://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; 30 cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
20 generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?; 31 generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
21 let contents = reformat(&contents)?; 32 let contents = reformat(&contents)?;
22 33
23 let destination = 34 let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
24 project_root().join("crates/ide_completion/src/generated_lint_completions.rs");
25 ensure_file_contents(destination.as_path(), &contents)?; 35 ensure_file_contents(destination.as_path(), &contents)?;
26 36
27 Ok(()) 37 Ok(())
28} 38}
29 39
30fn generate_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> { 40fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
31 buf.push_str(r#"pub(super) const FEATURES: &[LintCompletion] = &["#); 41 let stdout = cmd!("rustc -W help").read()?;
42 let start_lints =
43 stdout.find("---- ------- -------").ok_or_else(|| anyhow::format_err!(""))?;
44 let start_lint_groups =
45 stdout.find("---- ---------").ok_or_else(|| anyhow::format_err!(""))?;
46 let end_lints =
47 stdout.find("Lint groups provided by rustc:").ok_or_else(|| anyhow::format_err!(""))?;
48 let end_lint_groups = stdout
49 .find("Lint tools like Clippy can provide additional lints and lint groups.")
50 .ok_or_else(|| anyhow::format_err!(""))?;
51 buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
52 buf.push('\n');
53 let mut lints = stdout[start_lints..end_lints]
54 .lines()
55 .skip(1)
56 .filter(|l| !l.is_empty())
57 .map(|line| {
58 let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
59 let (_default_level, description) =
60 rest.trim().split_once(char::is_whitespace).unwrap();
61 (name.trim(), Cow::Borrowed(description.trim()))
62 })
63 .collect::<Vec<_>>();
64 lints.extend(
65 stdout[start_lint_groups..end_lint_groups].lines().skip(1).filter(|l| !l.is_empty()).map(
66 |line| {
67 let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
68 (name.trim(), format!("lint group for: {}", lints.trim()).into())
69 },
70 ),
71 );
72
73 lints.sort_by(|(ident, _), (ident2, _)| ident.cmp(ident2));
74 lints.into_iter().for_each(|(name, description)| {
75 push_lint_completion(buf, &name.replace("-", "_"), &description)
76 });
77 buf.push_str("];\n");
78 Ok(())
79}
80
81fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
82 buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
32 buf.push('\n'); 83 buf.push('\n');
33 ["language-features", "library-features"] 84 let mut vec = ["language-features", "library-features"]
34 .iter() 85 .iter()
35 .flat_map(|it| WalkDir::new(src_dir.join(it))) 86 .flat_map(|it| WalkDir::new(src_dir.join(it)))
36 .filter_map(|e| e.ok()) 87 .filter_map(|e| e.ok())
@@ -38,13 +89,16 @@ fn generate_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
38 // Get all `.md ` files 89 // Get all `.md ` files
39 entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md" 90 entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
40 }) 91 })
41 .for_each(|entry| { 92 .map(|entry| {
42 let path = entry.path(); 93 let path = entry.path();
43 let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_"); 94 let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
44 let doc = read_file(path).unwrap(); 95 let doc = read_file(path).unwrap();
45 96 (feature_ident, doc)
46 push_lint_completion(buf, &feature_ident, &doc); 97 })
47 }); 98 .collect::<Vec<_>>();
99 vec.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
100 vec.into_iter()
101 .for_each(|(feature_ident, doc)| push_lint_completion(buf, &feature_ident, &doc));
48 buf.push_str("];\n"); 102 buf.push_str("];\n");
49 Ok(()) 103 Ok(())
50} 104}
@@ -55,6 +109,10 @@ struct ClippyLint {
55 id: String, 109 id: String,
56} 110}
57 111
112fn unescape(s: &str) -> String {
113 s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
114}
115
58fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> { 116fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
59 let file_content = read_file(path)?; 117 let file_content = read_file(path)?;
60 let mut clippy_lints: Vec<ClippyLint> = vec![]; 118 let mut clippy_lints: Vec<ClippyLint> = vec![];
@@ -81,12 +139,12 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
81 .strip_prefix(prefix_to_strip) 139 .strip_prefix(prefix_to_strip)
82 .expect("should be prefixed by what it does") 140 .expect("should be prefixed by what it does")
83 .strip_suffix(suffix_to_strip) 141 .strip_suffix(suffix_to_strip)
84 .expect("should be suffixed by comma") 142 .map(unescape)
85 .into(); 143 .expect("should be suffixed by comma");
86 } 144 }
87 } 145 }
88 146 clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
89 buf.push_str(r#"pub(super) const CLIPPY_LINTS: &[LintCompletion] = &["#); 147 buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
90 buf.push('\n'); 148 buf.push('\n');
91 clippy_lints.into_iter().for_each(|clippy_lint| { 149 clippy_lints.into_iter().for_each(|clippy_lint| {
92 let lint_ident = format!("clippy::{}", clippy_lint.id); 150 let lint_ident = format!("clippy::{}", clippy_lint.id);
@@ -102,7 +160,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
102fn push_lint_completion(buf: &mut String, label: &str, description: &str) { 160fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
103 writeln!( 161 writeln!(
104 buf, 162 buf,
105 r###" LintCompletion {{ 163 r###" Lint {{
106 label: "{}", 164 label: "{}",
107 description: r##"{}"## 165 description: r##"{}"##
108 }},"###, 166 }},"###,
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs
index ba4b24848..5435da76e 100644
--- a/xtask/src/codegen/gen_syntax.rs
+++ b/xtask/src/codegen/gen_syntax.rs
@@ -94,18 +94,16 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
94 support::children(&self.syntax) 94 support::children(&self.syntax)
95 } 95 }
96 } 96 }
97 } else { 97 } else if let Some(token_kind) = field.token_kind() {
98 if let Some(token_kind) = field.token_kind() { 98 quote! {
99 quote! { 99 pub fn #method_name(&self) -> Option<#ty> {
100 pub fn #method_name(&self) -> Option<#ty> { 100 support::token(&self.syntax, #token_kind)
101 support::token(&self.syntax, #token_kind)
102 }
103 } 101 }
104 } else { 102 }
105 quote! { 103 } else {
106 pub fn #method_name(&self) -> Option<#ty> { 104 quote! {
107 support::child(&self.syntax) 105 pub fn #method_name(&self) -> Option<#ty> {
108 } 106 support::child(&self.syntax)
109 } 107 }
110 } 108 }
111 } 109 }
@@ -260,7 +258,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
260 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") { 258 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
261 res.push_str(chunk); 259 res.push_str(chunk);
262 if let Some(doc) = docs.next() { 260 if let Some(doc) = docs.next() {
263 write_doc_comment(&doc, &mut res); 261 write_doc_comment(doc, &mut res);
264 } 262 }
265 } 263 }
266 264
@@ -296,14 +294,14 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
296 294
297 let full_keywords_values = &grammar.keywords; 295 let full_keywords_values = &grammar.keywords;
298 let full_keywords = 296 let full_keywords =
299 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw))); 297 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
300 298
301 let all_keywords_values = 299 let all_keywords_values =
302 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); 300 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
303 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); 301 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
304 let all_keywords = all_keywords_values 302 let all_keywords = all_keywords_values
305 .iter() 303 .iter()
306 .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name))) 304 .map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
307 .collect::<Vec<_>>(); 305 .collect::<Vec<_>>();
308 306
309 let literals = 307 let literals =