diff options
author | Edwin Cheng <[email protected]> | 2020-04-04 09:07:22 +0100 |
---|---|---|
committer | Edwin Cheng <[email protected]> | 2020-04-09 05:22:45 +0100 |
commit | 6af1015f74ee8395ea7fe8339d8748089f05e3e4 (patch) | |
tree | 7ac7295e2ea65327ada7dcba23b2d06ca27a6b73 /crates | |
parent | 080c983498afcac3eb54028af5c9f8bfe7f2c826 (diff) |
Add rustc_server (ra_tt rustc bridge)
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_proc_macro_srv/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/ra_proc_macro_srv/src/lib.rs | 3 | ||||
-rw-r--r-- | crates/ra_proc_macro_srv/src/rustc_server.rs | 686 |
3 files changed, 690 insertions, 1 deletions
diff --git a/crates/ra_proc_macro_srv/Cargo.toml b/crates/ra_proc_macro_srv/Cargo.toml index 6300d668a..f08de5fc7 100644 --- a/crates/ra_proc_macro_srv/Cargo.toml +++ b/crates/ra_proc_macro_srv/Cargo.toml | |||
@@ -10,9 +10,9 @@ doctest = false | |||
10 | 10 | ||
11 | [dependencies] | 11 | [dependencies] |
12 | ra_tt = { path = "../ra_tt" } | 12 | ra_tt = { path = "../ra_tt" } |
13 | ra_mbe = { path = "../ra_mbe" } | ||
13 | ra_proc_macro = { path = "../ra_proc_macro" } | 14 | ra_proc_macro = { path = "../ra_proc_macro" } |
14 | 15 | ||
15 | |||
16 | [dev-dependencies] | 16 | [dev-dependencies] |
17 | cargo_metadata = "0.9.1" | 17 | cargo_metadata = "0.9.1" |
18 | difference = "2.0.0" | 18 | difference = "2.0.0" |
diff --git a/crates/ra_proc_macro_srv/src/lib.rs b/crates/ra_proc_macro_srv/src/lib.rs index 80cfa1174..f376df236 100644 --- a/crates/ra_proc_macro_srv/src/lib.rs +++ b/crates/ra_proc_macro_srv/src/lib.rs | |||
@@ -14,6 +14,9 @@ | |||
14 | #[doc(hidden)] | 14 | #[doc(hidden)] |
15 | mod proc_macro; | 15 | mod proc_macro; |
16 | 16 | ||
17 | #[doc(hidden)] | ||
18 | mod rustc_server; | ||
19 | |||
17 | use proc_macro::bridge::client::TokenStream; | 20 | use proc_macro::bridge::client::TokenStream; |
18 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; | 21 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; |
19 | 22 | ||
diff --git a/crates/ra_proc_macro_srv/src/rustc_server.rs b/crates/ra_proc_macro_srv/src/rustc_server.rs new file mode 100644 index 000000000..76e6a2752 --- /dev/null +++ b/crates/ra_proc_macro_srv/src/rustc_server.rs | |||
@@ -0,0 +1,686 @@ | |||
1 | //! Rustc proc-macro server implementation with ra_tt | ||
2 | //! | ||
3 | //! Based on idea from https://github.com/fedochet/rust-proc-macro-expander | ||
4 | //! The lib-proc-macro server backend is `TokenStream`-agnostic, such that | ||
5 | //! we could provide any TokenStream implementation. | ||
6 | //! The original idea from fedochet is using proc-macro2 as backend, | ||
7 | //! we use ra_tt instead for better intergation with RA. | ||
8 | //! | ||
9 | //! FIXME: No span and source file informatin is implemented yet | ||
10 | |||
11 | use crate::proc_macro::bridge::{self, server}; | ||
12 | use ra_tt as tt; | ||
13 | |||
14 | use std::collections::{Bound, HashMap}; | ||
15 | use std::hash::Hash; | ||
16 | use std::iter::FromIterator; | ||
17 | use std::str::FromStr; | ||
18 | use std::{ascii, vec::IntoIter}; | ||
19 | |||
20 | type Group = tt::Subtree; | ||
21 | type TokenTree = tt::TokenTree; | ||
22 | type Punct = tt::Punct; | ||
23 | type Spacing = tt::Spacing; | ||
24 | type Literal = tt::Literal; | ||
25 | type Span = tt::TokenId; | ||
26 | |||
27 | #[derive(Debug, Clone)] | ||
28 | pub struct TokenStream { | ||
29 | pub subtree: tt::Subtree, | ||
30 | } | ||
31 | |||
32 | impl TokenStream { | ||
33 | pub fn new() -> Self { | ||
34 | TokenStream { subtree: Default::default() } | ||
35 | } | ||
36 | pub fn with_subtree(subtree: tt::Subtree) -> Self { | ||
37 | TokenStream { subtree } | ||
38 | } | ||
39 | pub fn is_empty(&self) -> bool { | ||
40 | self.subtree.token_trees.is_empty() | ||
41 | } | ||
42 | } | ||
43 | |||
44 | /// Creates a token stream containing a single token tree. | ||
45 | impl From<TokenTree> for TokenStream { | ||
46 | fn from(tree: TokenTree) -> TokenStream { | ||
47 | TokenStream { subtree: tt::Subtree { delimiter: None, token_trees: vec![tree] } } | ||
48 | } | ||
49 | } | ||
50 | |||
51 | /// Collects a number of token trees into a single stream. | ||
52 | impl FromIterator<TokenTree> for TokenStream { | ||
53 | fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { | ||
54 | trees.into_iter().map(TokenStream::from).collect() | ||
55 | } | ||
56 | } | ||
57 | |||
58 | /// A "flattening" operation on token streams, collects token trees | ||
59 | /// from multiple token streams into a single stream. | ||
60 | impl FromIterator<TokenStream> for TokenStream { | ||
61 | fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { | ||
62 | let mut builder = TokenStreamBuilder::new(); | ||
63 | streams.into_iter().for_each(|stream| builder.push(stream)); | ||
64 | builder.build() | ||
65 | } | ||
66 | } | ||
67 | |||
68 | impl Extend<TokenTree> for TokenStream { | ||
69 | fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) { | ||
70 | self.extend(trees.into_iter().map(TokenStream::from)); | ||
71 | } | ||
72 | } | ||
73 | |||
74 | impl Extend<TokenStream> for TokenStream { | ||
75 | fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { | ||
76 | for item in streams { | ||
77 | self.subtree.token_trees.extend(&mut item.into_iter()) | ||
78 | } | ||
79 | } | ||
80 | } | ||
81 | |||
82 | type Level = crate::proc_macro::Level; | ||
83 | type LineColumn = crate::proc_macro::LineColumn; | ||
84 | type SourceFile = crate::proc_macro::SourceFile; | ||
85 | |||
86 | /// A structure representing a diagnostic message and associated children | ||
87 | /// messages. | ||
88 | #[derive(Clone, Debug)] | ||
89 | pub struct Diagnostic { | ||
90 | level: Level, | ||
91 | message: String, | ||
92 | spans: Vec<Span>, | ||
93 | children: Vec<Diagnostic>, | ||
94 | } | ||
95 | |||
96 | impl Diagnostic { | ||
97 | /// Creates a new diagnostic with the given `level` and `message`. | ||
98 | pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic { | ||
99 | Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } | ||
100 | } | ||
101 | } | ||
102 | |||
103 | // Rustc Server Ident has to be `Copyable` | ||
104 | // We use a stub here for bypassing | ||
105 | #[derive(Hash, Eq, PartialEq, Copy, Clone)] | ||
106 | pub struct IdentId(u32); | ||
107 | |||
108 | #[derive(Clone, Hash, Eq, PartialEq)] | ||
109 | struct IdentData(tt::Ident); | ||
110 | |||
111 | #[derive(Default)] | ||
112 | struct IdentInterner { | ||
113 | idents: HashMap<IdentData, u32>, | ||
114 | ident_data: Vec<IdentData>, | ||
115 | } | ||
116 | |||
117 | impl IdentInterner { | ||
118 | fn intern(&mut self, data: &IdentData) -> u32 { | ||
119 | if let Some(index) = self.idents.get(data) { | ||
120 | return *index; | ||
121 | } | ||
122 | |||
123 | let index = self.idents.len() as u32; | ||
124 | self.ident_data.push(data.clone()); | ||
125 | self.idents.insert(data.clone(), index); | ||
126 | index | ||
127 | } | ||
128 | |||
129 | fn get(&self, index: u32) -> &IdentData { | ||
130 | &self.ident_data[index as usize] | ||
131 | } | ||
132 | |||
133 | #[allow(unused)] | ||
134 | fn get_mut(&mut self, index: u32) -> &mut IdentData { | ||
135 | self.ident_data.get_mut(index as usize).expect("Should be consistent") | ||
136 | } | ||
137 | } | ||
138 | |||
139 | pub struct TokenStreamBuilder { | ||
140 | acc: TokenStream, | ||
141 | } | ||
142 | |||
143 | /// Public implementation details for the `TokenStream` type, such as iterators. | ||
144 | pub mod token_stream { | ||
145 | use super::{tt, TokenStream, TokenTree}; | ||
146 | use std::str::FromStr; | ||
147 | |||
148 | /// An iterator over `TokenStream`'s `TokenTree`s. | ||
149 | /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, | ||
150 | /// and returns whole groups as token trees. | ||
151 | impl IntoIterator for TokenStream { | ||
152 | type Item = TokenTree; | ||
153 | type IntoIter = super::IntoIter<TokenTree>; | ||
154 | |||
155 | fn into_iter(self) -> Self::IntoIter { | ||
156 | self.subtree.token_trees.into_iter() | ||
157 | } | ||
158 | } | ||
159 | |||
160 | type LexError = String; | ||
161 | |||
162 | /// Attempts to break the string into tokens and parse those tokens into a token stream. | ||
163 | /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters | ||
164 | /// or characters not existing in the language. | ||
165 | /// All tokens in the parsed stream get `Span::call_site()` spans. | ||
166 | /// | ||
167 | /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to | ||
168 | /// change these errors into `LexError`s later. | ||
169 | impl FromStr for TokenStream { | ||
170 | type Err = LexError; | ||
171 | |||
172 | fn from_str(src: &str) -> Result<TokenStream, LexError> { | ||
173 | let (subtree, _token_map) = | ||
174 | ra_mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; | ||
175 | |||
176 | let tt: tt::TokenTree = subtree.into(); | ||
177 | Ok(tt.into()) | ||
178 | } | ||
179 | } | ||
180 | |||
181 | impl ToString for TokenStream { | ||
182 | fn to_string(&self) -> String { | ||
183 | let tt = self.subtree.clone().into(); | ||
184 | to_text(&tt) | ||
185 | } | ||
186 | } | ||
187 | |||
188 | fn to_text(tkn: &tt::TokenTree) -> String { | ||
189 | match tkn { | ||
190 | tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.text.clone().into(), | ||
191 | tt::TokenTree::Leaf(tt::Leaf::Literal(literal)) => literal.text.clone().into(), | ||
192 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => format!("{}", punct.char), | ||
193 | tt::TokenTree::Subtree(subtree) => { | ||
194 | let content = subtree | ||
195 | .token_trees | ||
196 | .iter() | ||
197 | .map(|tkn| { | ||
198 | let s = to_text(tkn); | ||
199 | if let tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) = tkn { | ||
200 | if punct.spacing == tt::Spacing::Alone { | ||
201 | return s + " "; | ||
202 | } | ||
203 | } | ||
204 | s | ||
205 | }) | ||
206 | .collect::<Vec<_>>() | ||
207 | .concat(); | ||
208 | let (open, close) = match subtree.delimiter.map(|it| it.kind) { | ||
209 | None => ("", ""), | ||
210 | Some(tt::DelimiterKind::Brace) => ("{", "}"), | ||
211 | Some(tt::DelimiterKind::Parenthesis) => ("(", ")"), | ||
212 | Some(tt::DelimiterKind::Bracket) => ("[", "]"), | ||
213 | }; | ||
214 | format!("{}{}{}", open, content, close) | ||
215 | } | ||
216 | } | ||
217 | } | ||
218 | } | ||
219 | |||
220 | impl TokenStreamBuilder { | ||
221 | fn new() -> TokenStreamBuilder { | ||
222 | TokenStreamBuilder { acc: TokenStream::new() } | ||
223 | } | ||
224 | |||
225 | fn push(&mut self, stream: TokenStream) { | ||
226 | self.acc.extend(stream.into_iter()) | ||
227 | } | ||
228 | |||
229 | fn build(self) -> TokenStream { | ||
230 | self.acc | ||
231 | } | ||
232 | } | ||
233 | |||
234 | #[derive(Clone)] | ||
235 | pub struct TokenStreamIter { | ||
236 | trees: IntoIter<TokenTree>, | ||
237 | } | ||
238 | |||
239 | #[derive(Default)] | ||
240 | pub struct Rustc { | ||
241 | ident_interner: IdentInterner, | ||
242 | // FIXME: store span information here. | ||
243 | } | ||
244 | |||
245 | impl server::Types for Rustc { | ||
246 | type TokenStream = TokenStream; | ||
247 | type TokenStreamBuilder = TokenStreamBuilder; | ||
248 | type TokenStreamIter = TokenStreamIter; | ||
249 | type Group = Group; | ||
250 | type Punct = Punct; | ||
251 | type Ident = IdentId; | ||
252 | type Literal = Literal; | ||
253 | type SourceFile = SourceFile; | ||
254 | type Diagnostic = Diagnostic; | ||
255 | type Span = Span; | ||
256 | type MultiSpan = Vec<Span>; | ||
257 | } | ||
258 | |||
259 | impl server::TokenStream for Rustc { | ||
260 | fn new(&mut self) -> Self::TokenStream { | ||
261 | Self::TokenStream::new() | ||
262 | } | ||
263 | |||
264 | fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { | ||
265 | stream.is_empty() | ||
266 | } | ||
267 | fn from_str(&mut self, src: &str) -> Self::TokenStream { | ||
268 | Self::TokenStream::from_str(src).expect("cannot parse string") | ||
269 | } | ||
270 | fn to_string(&mut self, stream: &Self::TokenStream) -> String { | ||
271 | stream.to_string() | ||
272 | } | ||
273 | fn from_token_tree( | ||
274 | &mut self, | ||
275 | tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>, | ||
276 | ) -> Self::TokenStream { | ||
277 | match tree { | ||
278 | bridge::TokenTree::Group(group) => { | ||
279 | let tree = TokenTree::from(group); | ||
280 | Self::TokenStream::from_iter(vec![tree]) | ||
281 | } | ||
282 | |||
283 | bridge::TokenTree::Ident(IdentId(index)) => { | ||
284 | let IdentData(ident) = self.ident_interner.get(index).clone(); | ||
285 | let ident: tt::Ident = ident; | ||
286 | let leaf = tt::Leaf::from(ident); | ||
287 | let tree = TokenTree::from(leaf); | ||
288 | Self::TokenStream::from_iter(vec![tree]) | ||
289 | } | ||
290 | |||
291 | bridge::TokenTree::Literal(literal) => { | ||
292 | let leaf = tt::Leaf::from(literal); | ||
293 | let tree = TokenTree::from(leaf); | ||
294 | Self::TokenStream::from_iter(vec![tree]) | ||
295 | } | ||
296 | |||
297 | bridge::TokenTree::Punct(p) => { | ||
298 | let leaf = tt::Leaf::from(p); | ||
299 | let tree = TokenTree::from(leaf); | ||
300 | Self::TokenStream::from_iter(vec![tree]) | ||
301 | } | ||
302 | } | ||
303 | } | ||
304 | |||
305 | fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { | ||
306 | let trees: Vec<TokenTree> = stream.into_iter().collect(); | ||
307 | TokenStreamIter { trees: trees.into_iter() } | ||
308 | } | ||
309 | } | ||
310 | |||
311 | impl server::TokenStreamBuilder for Rustc { | ||
312 | fn new(&mut self) -> Self::TokenStreamBuilder { | ||
313 | Self::TokenStreamBuilder::new() | ||
314 | } | ||
315 | fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { | ||
316 | builder.push(stream) | ||
317 | } | ||
318 | fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream { | ||
319 | builder.build() | ||
320 | } | ||
321 | } | ||
322 | |||
323 | impl server::TokenStreamIter for Rustc { | ||
324 | fn next( | ||
325 | &mut self, | ||
326 | iter: &mut Self::TokenStreamIter, | ||
327 | ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> { | ||
328 | iter.trees.next().map(|tree| match tree { | ||
329 | TokenTree::Subtree(group) => bridge::TokenTree::Group(group), | ||
330 | TokenTree::Leaf(tt::Leaf::Ident(ident)) => { | ||
331 | bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) | ||
332 | } | ||
333 | TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal), | ||
334 | TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct), | ||
335 | }) | ||
336 | } | ||
337 | } | ||
338 | |||
339 | fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> { | ||
340 | let kind = match d { | ||
341 | bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, | ||
342 | bridge::Delimiter::Brace => tt::DelimiterKind::Brace, | ||
343 | bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket, | ||
344 | bridge::Delimiter::None => return None, | ||
345 | }; | ||
346 | Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) | ||
347 | } | ||
348 | |||
349 | fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter { | ||
350 | match d.map(|it| it.kind) { | ||
351 | Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis, | ||
352 | Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace, | ||
353 | Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket, | ||
354 | None => bridge::Delimiter::None, | ||
355 | } | ||
356 | } | ||
357 | |||
358 | fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing { | ||
359 | match spacing { | ||
360 | bridge::Spacing::Alone => Spacing::Alone, | ||
361 | bridge::Spacing::Joint => Spacing::Joint, | ||
362 | } | ||
363 | } | ||
364 | |||
365 | fn spacing_to_external(spacing: Spacing) -> bridge::Spacing { | ||
366 | match spacing { | ||
367 | Spacing::Alone => bridge::Spacing::Alone, | ||
368 | Spacing::Joint => bridge::Spacing::Joint, | ||
369 | } | ||
370 | } | ||
371 | |||
372 | impl server::Group for Rustc { | ||
373 | fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group { | ||
374 | Self::Group { | ||
375 | delimiter: delim_to_internal(delimiter), | ||
376 | token_trees: stream.subtree.token_trees, | ||
377 | } | ||
378 | } | ||
379 | fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter { | ||
380 | delim_to_external(group.delimiter) | ||
381 | } | ||
382 | |||
383 | // NOTE: Return value of do not include delimiter | ||
384 | fn stream(&mut self, group: &Self::Group) -> Self::TokenStream { | ||
385 | TokenStream { | ||
386 | subtree: tt::Subtree { delimiter: None, token_trees: group.token_trees.clone() }, | ||
387 | } | ||
388 | } | ||
389 | |||
390 | fn span(&mut self, group: &Self::Group) -> Self::Span { | ||
391 | group.delimiter.map(|it| it.id).unwrap_or_else(|| tt::TokenId::unspecified()) | ||
392 | } | ||
393 | |||
394 | fn set_span(&mut self, _group: &mut Self::Group, _span: Self::Span) { | ||
395 | // FIXME handle span | ||
396 | } | ||
397 | |||
398 | fn span_open(&mut self, _group: &Self::Group) -> Self::Span { | ||
399 | // FIXME handle span | ||
400 | // MySpan(self.span_interner.intern(&MySpanData(group.span_open()))) | ||
401 | tt::TokenId::unspecified() | ||
402 | } | ||
403 | |||
404 | fn span_close(&mut self, _group: &Self::Group) -> Self::Span { | ||
405 | // FIXME handle span | ||
406 | tt::TokenId::unspecified() | ||
407 | } | ||
408 | } | ||
409 | |||
410 | impl server::Punct for Rustc { | ||
411 | fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct { | ||
412 | tt::Punct { | ||
413 | char: ch, | ||
414 | spacing: spacing_to_internal(spacing), | ||
415 | id: tt::TokenId::unspecified(), | ||
416 | } | ||
417 | } | ||
418 | fn as_char(&mut self, punct: Self::Punct) -> char { | ||
419 | punct.char | ||
420 | } | ||
421 | fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing { | ||
422 | spacing_to_external(punct.spacing) | ||
423 | } | ||
424 | fn span(&mut self, _punct: Self::Punct) -> Self::Span { | ||
425 | // FIXME handle span | ||
426 | tt::TokenId::unspecified() | ||
427 | } | ||
428 | fn with_span(&mut self, punct: Self::Punct, _span: Self::Span) -> Self::Punct { | ||
429 | // FIXME handle span | ||
430 | punct | ||
431 | } | ||
432 | } | ||
433 | |||
434 | impl server::Ident for Rustc { | ||
435 | fn new(&mut self, string: &str, _span: Self::Span, _is_raw: bool) -> Self::Ident { | ||
436 | IdentId( | ||
437 | self.ident_interner.intern(&IdentData(tt::Ident { | ||
438 | text: string.into(), | ||
439 | id: tt::TokenId::unspecified(), | ||
440 | })), | ||
441 | ) | ||
442 | } | ||
443 | |||
444 | fn span(&mut self, _ident: Self::Ident) -> Self::Span { | ||
445 | // FIXME handle span | ||
446 | tt::TokenId::unspecified() | ||
447 | } | ||
448 | fn with_span(&mut self, ident: Self::Ident, _span: Self::Span) -> Self::Ident { | ||
449 | // FIXME handle span | ||
450 | ident | ||
451 | } | ||
452 | } | ||
453 | |||
454 | impl server::Literal for Rustc { | ||
455 | // FIXME(eddyb) `Literal` should not expose internal `Debug` impls. | ||
456 | fn debug(&mut self, literal: &Self::Literal) -> String { | ||
457 | format!("{:?}", literal) | ||
458 | } | ||
459 | |||
460 | fn integer(&mut self, n: &str) -> Self::Literal { | ||
461 | let n: i128 = n.parse().unwrap(); | ||
462 | Literal { text: n.to_string().into(), id: tt::TokenId::unspecified() } | ||
463 | } | ||
464 | |||
465 | fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { | ||
466 | macro_rules! def_suffixed_integer { | ||
467 | ($kind:ident, $($ty:ty),*) => { | ||
468 | match $kind { | ||
469 | $( | ||
470 | stringify!($ty) => { | ||
471 | let n: $ty = n.parse().unwrap(); | ||
472 | format!(concat!("{}", stringify!($ty)), n) | ||
473 | } | ||
474 | )* | ||
475 | _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind), | ||
476 | } | ||
477 | } | ||
478 | } | ||
479 | |||
480 | let text = | ||
481 | def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128}; | ||
482 | |||
483 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
484 | } | ||
485 | |||
486 | fn float(&mut self, n: &str) -> Self::Literal { | ||
487 | let n: f64 = n.parse().unwrap(); | ||
488 | let mut text = f64::to_string(&n); | ||
489 | if !text.contains('.') { | ||
490 | text += ".0" | ||
491 | } | ||
492 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
493 | } | ||
494 | |||
495 | fn f32(&mut self, n: &str) -> Self::Literal { | ||
496 | let n: f32 = n.parse().unwrap(); | ||
497 | let text = format!("{}f32", n); | ||
498 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
499 | } | ||
500 | |||
501 | fn f64(&mut self, n: &str) -> Self::Literal { | ||
502 | let n: f64 = n.parse().unwrap(); | ||
503 | let text = format!("{}f64", n); | ||
504 | Literal { text: text.into(), id: tt::TokenId::unspecified() } | ||
505 | } | ||
506 | |||
507 | fn string(&mut self, string: &str) -> Self::Literal { | ||
508 | let mut escaped = String::new(); | ||
509 | for ch in string.chars() { | ||
510 | escaped.extend(ch.escape_debug()); | ||
511 | } | ||
512 | Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } | ||
513 | } | ||
514 | |||
515 | fn character(&mut self, ch: char) -> Self::Literal { | ||
516 | Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } | ||
517 | } | ||
518 | |||
519 | fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { | ||
520 | let string = bytes | ||
521 | .iter() | ||
522 | .cloned() | ||
523 | .flat_map(ascii::escape_default) | ||
524 | .map(Into::<char>::into) | ||
525 | .collect::<String>(); | ||
526 | |||
527 | Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } | ||
528 | } | ||
529 | |||
530 | fn span(&mut self, literal: &Self::Literal) -> Self::Span { | ||
531 | literal.id | ||
532 | } | ||
533 | |||
534 | fn set_span(&mut self, _literal: &mut Self::Literal, _span: Self::Span) { | ||
535 | // FIXME handle span | ||
536 | } | ||
537 | |||
538 | fn subspan( | ||
539 | &mut self, | ||
540 | _literal: &Self::Literal, | ||
541 | _start: Bound<usize>, | ||
542 | _end: Bound<usize>, | ||
543 | ) -> Option<Self::Span> { | ||
544 | // FIXME handle span | ||
545 | None | ||
546 | } | ||
547 | } | ||
548 | |||
549 | impl server::SourceFile for Rustc { | ||
550 | fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { | ||
551 | file1.eq(file2) | ||
552 | } | ||
553 | fn path(&mut self, file: &Self::SourceFile) -> String { | ||
554 | String::from( | ||
555 | file.path().to_str().expect("non-UTF8 file path in `proc_macro::SourceFile::path`"), | ||
556 | ) | ||
557 | } | ||
558 | fn is_real(&mut self, file: &Self::SourceFile) -> bool { | ||
559 | file.is_real() | ||
560 | } | ||
561 | } | ||
562 | |||
563 | impl server::Diagnostic for Rustc { | ||
564 | fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { | ||
565 | let mut diag = Diagnostic::new(level, msg); | ||
566 | diag.spans = spans; | ||
567 | diag | ||
568 | } | ||
569 | |||
570 | fn sub( | ||
571 | &mut self, | ||
572 | _diag: &mut Self::Diagnostic, | ||
573 | _level: Level, | ||
574 | _msg: &str, | ||
575 | _spans: Self::MultiSpan, | ||
576 | ) { | ||
577 | // FIXME handle diagnostic | ||
578 | // | ||
579 | } | ||
580 | |||
581 | fn emit(&mut self, _diag: Self::Diagnostic) { | ||
582 | // FIXME handle diagnostic | ||
583 | // diag.emit() | ||
584 | } | ||
585 | } | ||
586 | |||
587 | impl server::Span for Rustc { | ||
588 | fn debug(&mut self, span: Self::Span) -> String { | ||
589 | format!("{:?}", span.0) | ||
590 | } | ||
591 | fn def_site(&mut self) -> Self::Span { | ||
592 | // MySpan(self.span_interner.intern(&MySpanData(Span::def_site()))) | ||
593 | // FIXME handle span | ||
594 | tt::TokenId::unspecified() | ||
595 | } | ||
596 | fn call_site(&mut self) -> Self::Span { | ||
597 | // MySpan(self.span_interner.intern(&MySpanData(Span::call_site()))) | ||
598 | // FIXME handle span | ||
599 | tt::TokenId::unspecified() | ||
600 | } | ||
601 | fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { | ||
602 | // let MySpanData(span) = self.span_interner.get(span.0); | ||
603 | unimplemented!() | ||
604 | } | ||
605 | |||
606 | /// Recent feature, not yet in the proc_macro | ||
607 | /// | ||
608 | /// See PR: | ||
609 | /// https://github.com/rust-lang/rust/pull/55780 | ||
610 | fn source_text(&mut self, _span: Self::Span) -> Option<String> { | ||
611 | None | ||
612 | } | ||
613 | |||
614 | fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> { | ||
615 | // FIXME handle span | ||
616 | None | ||
617 | } | ||
618 | fn source(&mut self, span: Self::Span) -> Self::Span { | ||
619 | // FIXME handle span | ||
620 | span | ||
621 | } | ||
622 | fn start(&mut self, _span: Self::Span) -> LineColumn { | ||
623 | // FIXME handle span | ||
624 | LineColumn { line: 0, column: 0 } | ||
625 | } | ||
626 | fn end(&mut self, _span: Self::Span) -> LineColumn { | ||
627 | // FIXME handle span | ||
628 | LineColumn { line: 0, column: 0 } | ||
629 | } | ||
630 | fn join(&mut self, _first: Self::Span, _second: Self::Span) -> Option<Self::Span> { | ||
631 | None | ||
632 | } | ||
633 | fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { | ||
634 | // FIXME handle span | ||
635 | tt::TokenId::unspecified() | ||
636 | } | ||
637 | |||
638 | fn mixed_site(&mut self) -> Self::Span { | ||
639 | // FIXME handle span | ||
640 | tt::TokenId::unspecified() | ||
641 | } | ||
642 | } | ||
643 | |||
644 | impl server::MultiSpan for Rustc { | ||
645 | fn new(&mut self) -> Self::MultiSpan { | ||
646 | // FIXME handle span | ||
647 | vec![] | ||
648 | } | ||
649 | |||
650 | fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) { | ||
651 | //TODP | ||
652 | other.push(span) | ||
653 | } | ||
654 | } | ||
655 | |||
656 | #[cfg(test)] | ||
657 | mod tests { | ||
658 | use super::*; | ||
659 | use crate::proc_macro::bridge::server::Literal; | ||
660 | |||
661 | #[test] | ||
662 | fn test_rustc_server_literals() { | ||
663 | let mut srv = Rustc { ident_interner: IdentInterner::default() }; | ||
664 | assert_eq!(srv.integer("1234").text, "1234"); | ||
665 | |||
666 | assert_eq!(srv.typed_integer("12", "u8").text, "12u8"); | ||
667 | assert_eq!(srv.typed_integer("255", "u16").text, "255u16"); | ||
668 | assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32"); | ||
669 | assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64"); | ||
670 | assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128"); | ||
671 | assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize"); | ||
672 | assert_eq!(srv.typed_integer("127", "i8").text, "127i8"); | ||
673 | assert_eq!(srv.typed_integer("255", "i16").text, "255i16"); | ||
674 | assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32"); | ||
675 | assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64"); | ||
676 | assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128"); | ||
677 | assert_eq!(srv.float("0").text, "0.0"); | ||
678 | assert_eq!(srv.float("15684.5867").text, "15684.5867"); | ||
679 | assert_eq!(srv.f32("15684.58").text, "15684.58f32"); | ||
680 | assert_eq!(srv.f64("15684.58").text, "15684.58f64"); | ||
681 | |||
682 | assert_eq!(srv.string("hello_world").text, "\"hello_world\""); | ||
683 | assert_eq!(srv.character('c').text, "'c'"); | ||
684 | assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\""); | ||
685 | } | ||
686 | } | ||