aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax
diff options
context:
space:
mode:
Diffstat (limited to 'crates/syntax')
-rw-r--r--crates/syntax/src/ast/token_ext.rs38
-rw-r--r--crates/syntax/src/ast/traits.rs38
2 files changed, 35 insertions, 41 deletions
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index fa40e64e8..6167d50e2 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -18,12 +18,33 @@ impl ast::Comment {
18 } 18 }
19 19
20 pub fn prefix(&self) -> &'static str { 20 pub fn prefix(&self) -> &'static str {
21 let &(prefix, _kind) = CommentKind::BY_PREFIX 21 let &(prefix, _kind) = CommentKind::with_prefix_from_text(self.text());
22 .iter()
23 .find(|&(prefix, kind)| self.kind() == *kind && self.text().starts_with(prefix))
24 .unwrap();
25 prefix 22 prefix
26 } 23 }
24
25 pub fn kind_and_prefix(&self) -> &(&'static str, CommentKind) {
26 CommentKind::with_prefix_from_text(self.text())
27 }
28
29 /// Returns the textual content of a doc comment block as a single string.
30 /// That is, strips leading `///` (+ optional 1 character of whitespace),
31 /// trailing `*/`, trailing whitespace and then joins the lines.
32 pub fn doc_comment(&self) -> Option<&str> {
33 match self.kind_and_prefix() {
34 (prefix, CommentKind { shape, doc: Some(_) }) => {
35 let text = &self.text().as_str()[prefix.len()..];
36 let ws = text.chars().next().filter(|c| c.is_whitespace());
37 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
38 match shape {
39 CommentShape::Block if text.ends_with("*/") => {
40 Some(&text[..text.len() - "*/".len()])
41 }
42 _ => Some(text),
43 }
44 }
45 _ => None,
46 }
47 }
27} 48}
28 49
29#[derive(Debug, PartialEq, Eq, Clone, Copy)] 50#[derive(Debug, PartialEq, Eq, Clone, Copy)]
@@ -67,12 +88,13 @@ impl CommentKind {
67 ]; 88 ];
68 89
69 pub(crate) fn from_text(text: &str) -> CommentKind { 90 pub(crate) fn from_text(text: &str) -> CommentKind {
70 let &(_prefix, kind) = CommentKind::BY_PREFIX 91 let &(_prefix, kind) = Self::with_prefix_from_text(text);
71 .iter()
72 .find(|&(prefix, _kind)| text.starts_with(prefix))
73 .unwrap();
74 kind 92 kind
75 } 93 }
94
95 fn with_prefix_from_text(text: &str) -> &(&'static str, CommentKind) {
96 CommentKind::BY_PREFIX.iter().find(|&(prefix, _kind)| text.starts_with(prefix)).unwrap()
97 }
76} 98}
77 99
78impl ast::Whitespace { 100impl ast::Whitespace {
diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs
index 0bdc22d95..13a769d51 100644
--- a/crates/syntax/src/ast/traits.rs
+++ b/crates/syntax/src/ast/traits.rs
@@ -91,40 +91,12 @@ impl CommentIter {
91 /// That is, strips leading `///` (+ optional 1 character of whitespace), 91 /// That is, strips leading `///` (+ optional 1 character of whitespace),
92 /// trailing `*/`, trailing whitespace and then joins the lines. 92 /// trailing `*/`, trailing whitespace and then joins the lines.
93 pub fn doc_comment_text(self) -> Option<String> { 93 pub fn doc_comment_text(self) -> Option<String> {
94 let mut has_comments = false; 94 let docs =
95 let docs = self 95 self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)).join("\n");
96 .filter(|comment| comment.kind().doc.is_some()) 96 if docs.is_empty() {
97 .map(|comment| {
98 has_comments = true;
99 let prefix_len = comment.prefix().len();
100
101 let line: &str = comment.text().as_str();
102
103 // Determine if the prefix or prefix + 1 char is stripped
104 let pos =
105 if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) {
106 prefix_len + ws.len_utf8()
107 } else {
108 prefix_len
109 };
110
111 let end = if comment.kind().shape.is_block() && line.ends_with("*/") {
112 line.len() - 2
113 } else {
114 line.len()
115 };
116
117 // Note that we do not trim the end of the line here
118 // since whitespace can have special meaning at the end
119 // of a line in markdown.
120 line[pos..end].to_owned()
121 })
122 .join("\n");
123
124 if has_comments {
125 Some(docs)
126 } else {
127 None 97 None
98 } else {
99 Some(docs)
128 } 100 }
129 } 101 }
130} 102}