diff options
-rw-r--r-- | bench_data/glorious_old_parser | 8562 | ||||
-rw-r--r-- | crates/ide/src/syntax_highlighting/tests.rs | 37 | ||||
-rw-r--r-- | crates/syntax/src/tests.rs | 28 | ||||
-rw-r--r-- | crates/test_utils/src/bench_fixture.rs | 9 |
4 files changed, 8628 insertions, 8 deletions
diff --git a/bench_data/glorious_old_parser b/bench_data/glorious_old_parser new file mode 100644 index 000000000..7e900dfeb --- /dev/null +++ b/bench_data/glorious_old_parser | |||
@@ -0,0 +1,8562 @@ | |||
1 | use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy}; | ||
2 | use crate::ast::{GenericBound, TraitBoundModifier}; | ||
3 | use crate::ast::Unsafety; | ||
4 | use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind}; | ||
5 | use crate::ast::Block; | ||
6 | use crate::ast::{BlockCheckMode, CaptureBy, Movability}; | ||
7 | use crate::ast::{Constness, Crate}; | ||
8 | use crate::ast::Defaultness; | ||
9 | use crate::ast::EnumDef; | ||
10 | use crate::ast::{Expr, ExprKind, RangeLimits}; | ||
11 | use crate::ast::{Field, FnDecl, FnHeader}; | ||
12 | use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; | ||
13 | use crate::ast::{GenericParam, GenericParamKind}; | ||
14 | use crate::ast::GenericArg; | ||
15 | use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind}; | ||
16 | use crate::ast::{Label, Lifetime, Lit, LitKind}; | ||
17 | use crate::ast::Local; | ||
18 | use crate::ast::MacStmtStyle; | ||
19 | use crate::ast::{Mac, Mac_, MacDelimiter}; | ||
20 | use crate::ast::{MutTy, Mutability}; | ||
21 | use crate::ast::{Pat, PatKind, PathSegment}; | ||
22 | use crate::ast::{PolyTraitRef, QSelf}; | ||
23 | use crate::ast::{Stmt, StmtKind}; | ||
24 | use crate::ast::{VariantData, StructField}; | ||
25 | use crate::ast::StrStyle; | ||
26 | use crate::ast::SelfKind; | ||
27 | use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax}; | ||
28 | use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds}; | ||
29 | use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar}; | ||
30 | use crate::ast::{UseTree, UseTreeKind}; | ||
31 | use crate::ast::{BinOpKind, UnOp}; | ||
32 | use crate::ast::{RangeEnd, RangeSyntax}; | ||
33 | use crate::{ast, attr}; | ||
34 | use crate::ext::base::DummyResult; | ||
35 | use crate::source_map::{self, SourceMap, Spanned, respan}; | ||
36 | use crate::parse::{self, SeqSep, classify, token}; | ||
37 | use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; | ||
38 | use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; | ||
39 | use crate::parse::token::DelimToken; | ||
40 | use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; | ||
41 | use crate::util::parser::{AssocOp, Fixity}; | ||
42 | use crate::print::pprust; | ||
43 | use crate::ptr::P; | ||
44 | use crate::parse::PResult; | ||
45 | use crate::ThinVec; | ||
46 | use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; | ||
47 | use crate::symbol::{Symbol, keywords}; | ||
48 | |||
49 | use errors::{Applicability, DiagnosticBuilder, DiagnosticId}; | ||
50 | use rustc_target::spec::abi::{self, Abi}; | ||
51 | use syntax_pos::{Span, MultiSpan, BytePos, FileName}; | ||
52 | use log::{debug, trace}; | ||
53 | |||
54 | use std::borrow::Cow; | ||
55 | use std::cmp; | ||
56 | use std::mem; | ||
57 | use std::path::{self, Path, PathBuf}; | ||
58 | use std::slice; | ||
59 | |||
60 | #[derive(Debug)] | ||
61 | /// Whether the type alias or associated type is a concrete type or an existential type | ||
62 | pub enum AliasKind { | ||
63 | /// Just a new name for the same type | ||
64 | Weak(P<Ty>), | ||
65 | /// Only trait impls of the type will be usable, not the actual type itself | ||
66 | Existential(GenericBounds), | ||
67 | } | ||
68 | |||
69 | bitflags::bitflags! { | ||
70 | struct Restrictions: u8 { | ||
71 | const STMT_EXPR = 1 << 0; | ||
72 | const NO_STRUCT_LITERAL = 1 << 1; | ||
73 | } | ||
74 | } | ||
75 | |||
76 | type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>); | ||
77 | |||
78 | /// Specifies how to parse a path. | ||
79 | #[derive(Copy, Clone, PartialEq)] | ||
80 | pub enum PathStyle { | ||
81 | /// In some contexts, notably in expressions, paths with generic arguments are ambiguous | ||
82 | /// with something else. For example, in expressions `segment < ....` can be interpreted | ||
83 | /// as a comparison and `segment ( ....` can be interpreted as a function call. | ||
84 | /// In all such contexts the non-path interpretation is preferred by default for practical | ||
85 | /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g. | ||
86 | /// `x<y>` - comparisons, `x::<y>` - unambiguously a path. | ||
87 | Expr, | ||
88 | /// In other contexts, notably in types, no ambiguity exists and paths can be written | ||
89 | /// without the disambiguator, e.g., `x<y>` - unambiguously a path. | ||
90 | /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too. | ||
91 | Type, | ||
92 | /// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports, | ||
93 | /// visibilities or attributes. | ||
94 | /// Technically, this variant is unnecessary and e.g., `Expr` can be used instead | ||
95 | /// (paths in "mod" contexts have to be checked later for absence of generic arguments | ||
96 | /// anyway, due to macros), but it is used to avoid weird suggestions about expected | ||
97 | /// tokens when something goes wrong. | ||
98 | Mod, | ||
99 | } | ||
100 | |||
101 | #[derive(Clone, Copy, PartialEq, Debug)] | ||
102 | enum SemiColonMode { | ||
103 | Break, | ||
104 | Ignore, | ||
105 | Comma, | ||
106 | } | ||
107 | |||
108 | #[derive(Clone, Copy, PartialEq, Debug)] | ||
109 | enum BlockMode { | ||
110 | Break, | ||
111 | Ignore, | ||
112 | } | ||
113 | |||
114 | /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression | ||
115 | /// dropped into the token stream, which happens while parsing the result of | ||
116 | /// macro expansion). Placement of these is not as complex as I feared it would | ||
117 | /// be. The important thing is to make sure that lookahead doesn't balk at | ||
118 | /// `token::Interpolated` tokens. | ||
119 | macro_rules! maybe_whole_expr { | ||
120 | ($p:expr) => { | ||
121 | if let token::Interpolated(nt) = $p.token.clone() { | ||
122 | match *nt { | ||
123 | token::NtExpr(ref e) | token::NtLiteral(ref e) => { | ||
124 | $p.bump(); | ||
125 | return Ok((*e).clone()); | ||
126 | } | ||
127 | token::NtPath(ref path) => { | ||
128 | $p.bump(); | ||
129 | let span = $p.span; | ||
130 | let kind = ExprKind::Path(None, (*path).clone()); | ||
131 | return Ok($p.mk_expr(span, kind, ThinVec::new())); | ||
132 | } | ||
133 | token::NtBlock(ref block) => { | ||
134 | $p.bump(); | ||
135 | let span = $p.span; | ||
136 | let kind = ExprKind::Block((*block).clone(), None); | ||
137 | return Ok($p.mk_expr(span, kind, ThinVec::new())); | ||
138 | } | ||
139 | _ => {}, | ||
140 | }; | ||
141 | } | ||
142 | } | ||
143 | } | ||
144 | |||
145 | /// As maybe_whole_expr, but for things other than expressions | ||
146 | macro_rules! maybe_whole { | ||
147 | ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { | ||
148 | if let token::Interpolated(nt) = $p.token.clone() { | ||
149 | if let token::$constructor($x) = (*nt).clone() { | ||
150 | $p.bump(); | ||
151 | return Ok($e); | ||
152 | } | ||
153 | } | ||
154 | }; | ||
155 | } | ||
156 | |||
157 | fn maybe_append(mut lhs: Vec<Attribute>, mut rhs: Option<Vec<Attribute>>) -> Vec<Attribute> { | ||
158 | if let Some(ref mut rhs) = rhs { | ||
159 | lhs.append(rhs); | ||
160 | } | ||
161 | lhs | ||
162 | } | ||
163 | |||
164 | #[derive(Debug, Clone, Copy, PartialEq)] | ||
165 | enum PrevTokenKind { | ||
166 | DocComment, | ||
167 | Comma, | ||
168 | Plus, | ||
169 | Interpolated, | ||
170 | Eof, | ||
171 | Ident, | ||
172 | Other, | ||
173 | } | ||
174 | |||
175 | trait RecoverQPath: Sized { | ||
176 | const PATH_STYLE: PathStyle = PathStyle::Expr; | ||
177 | fn to_ty(&self) -> Option<P<Ty>>; | ||
178 | fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self; | ||
179 | fn to_string(&self) -> String; | ||
180 | } | ||
181 | |||
182 | impl RecoverQPath for Ty { | ||
183 | const PATH_STYLE: PathStyle = PathStyle::Type; | ||
184 | fn to_ty(&self) -> Option<P<Ty>> { | ||
185 | Some(P(self.clone())) | ||
186 | } | ||
187 | fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self { | ||
188 | Self { span: path.span, node: TyKind::Path(qself, path), id: self.id } | ||
189 | } | ||
190 | fn to_string(&self) -> String { | ||
191 | pprust::ty_to_string(self) | ||
192 | } | ||
193 | } | ||
194 | |||
195 | impl RecoverQPath for Pat { | ||
196 | fn to_ty(&self) -> Option<P<Ty>> { | ||
197 | self.to_ty() | ||
198 | } | ||
199 | fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self { | ||
200 | Self { span: path.span, node: PatKind::Path(qself, path), id: self.id } | ||
201 | } | ||
202 | fn to_string(&self) -> String { | ||
203 | pprust::pat_to_string(self) | ||
204 | } | ||
205 | } | ||
206 | |||
207 | impl RecoverQPath for Expr { | ||
208 | fn to_ty(&self) -> Option<P<Ty>> { | ||
209 | self.to_ty() | ||
210 | } | ||
211 | fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self { | ||
212 | Self { span: path.span, node: ExprKind::Path(qself, path), | ||
213 | id: self.id, attrs: self.attrs.clone() } | ||
214 | } | ||
215 | fn to_string(&self) -> String { | ||
216 | pprust::expr_to_string(self) | ||
217 | } | ||
218 | } | ||
219 | |||
220 | /* ident is handled by common.rs */ | ||
221 | |||
222 | #[derive(Clone)] | ||
223 | pub struct Parser<'a> { | ||
224 | pub sess: &'a ParseSess, | ||
225 | /// the current token: | ||
226 | pub token: token::Token, | ||
227 | /// the span of the current token: | ||
228 | pub span: Span, | ||
229 | /// the span of the previous token: | ||
230 | meta_var_span: Option<Span>, | ||
231 | pub prev_span: Span, | ||
232 | /// the previous token kind | ||
233 | prev_token_kind: PrevTokenKind, | ||
234 | restrictions: Restrictions, | ||
235 | /// Used to determine the path to externally loaded source files | ||
236 | crate directory: Directory<'a>, | ||
237 | /// Whether to parse sub-modules in other files. | ||
238 | pub recurse_into_file_modules: bool, | ||
239 | /// Name of the root module this parser originated from. If `None`, then the | ||
240 | /// name is not known. This does not change while the parser is descending | ||
241 | /// into modules, and sub-parsers have new values for this name. | ||
242 | pub root_module_name: Option<String>, | ||
243 | crate expected_tokens: Vec<TokenType>, | ||
244 | token_cursor: TokenCursor, | ||
245 | desugar_doc_comments: bool, | ||
246 | /// Whether we should configure out of line modules as we parse. | ||
247 | pub cfg_mods: bool, | ||
248 | /// This field is used to keep track of how many left angle brackets we have seen. This is | ||
249 | /// required in order to detect extra leading left angle brackets (`<` characters) and error | ||
250 | /// appropriately. | ||
251 | /// | ||
252 | /// See the comments in the `parse_path_segment` function for more details. | ||
253 | crate unmatched_angle_bracket_count: u32, | ||
254 | crate max_angle_bracket_count: u32, | ||
255 | /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery | ||
256 | /// it gets removed from here. Every entry left at the end gets emitted as an independent | ||
257 | /// error. | ||
258 | crate unclosed_delims: Vec<UnmatchedBrace>, | ||
259 | } | ||
260 | |||
261 | |||
262 | #[derive(Clone)] | ||
263 | struct TokenCursor { | ||
264 | frame: TokenCursorFrame, | ||
265 | stack: Vec<TokenCursorFrame>, | ||
266 | } | ||
267 | |||
268 | #[derive(Clone)] | ||
269 | struct TokenCursorFrame { | ||
270 | delim: token::DelimToken, | ||
271 | span: DelimSpan, | ||
272 | open_delim: bool, | ||
273 | tree_cursor: tokenstream::Cursor, | ||
274 | close_delim: bool, | ||
275 | last_token: LastToken, | ||
276 | } | ||
277 | |||
278 | /// This is used in `TokenCursorFrame` above to track tokens that are consumed | ||
279 | /// by the parser, and then that's transitively used to record the tokens that | ||
280 | /// each parse AST item is created with. | ||
281 | /// | ||
282 | /// Right now this has two states, either collecting tokens or not collecting | ||
283 | /// tokens. If we're collecting tokens we just save everything off into a local | ||
284 | /// `Vec`. This should eventually though likely save tokens from the original | ||
285 | /// token stream and just use slicing of token streams to avoid creation of a | ||
286 | /// whole new vector. | ||
287 | /// | ||
288 | /// The second state is where we're passively not recording tokens, but the last | ||
289 | /// token is still tracked for when we want to start recording tokens. This | ||
290 | /// "last token" means that when we start recording tokens we'll want to ensure | ||
291 | /// that this, the first token, is included in the output. | ||
292 | /// | ||
293 | /// You can find some more example usage of this in the `collect_tokens` method | ||
294 | /// on the parser. | ||
295 | #[derive(Clone)] | ||
296 | enum LastToken { | ||
297 | Collecting(Vec<TreeAndJoint>), | ||
298 | Was(Option<TreeAndJoint>), | ||
299 | } | ||
300 | |||
301 | impl TokenCursorFrame { | ||
302 | fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { | ||
303 | TokenCursorFrame { | ||
304 | delim: delim, | ||
305 | span: sp, | ||
306 | open_delim: delim == token::NoDelim, | ||
307 | tree_cursor: tts.clone().into_trees(), | ||
308 | close_delim: delim == token::NoDelim, | ||
309 | last_token: LastToken::Was(None), | ||
310 | } | ||
311 | } | ||
312 | } | ||
313 | |||
314 | impl TokenCursor { | ||
315 | fn next(&mut self) -> TokenAndSpan { | ||
316 | loop { | ||
317 | let tree = if !self.frame.open_delim { | ||
318 | self.frame.open_delim = true; | ||
319 | TokenTree::open_tt(self.frame.span.open, self.frame.delim) | ||
320 | } else if let Some(tree) = self.frame.tree_cursor.next() { | ||
321 | tree | ||
322 | } else if !self.frame.close_delim { | ||
323 | self.frame.close_delim = true; | ||
324 | TokenTree::close_tt(self.frame.span.close, self.frame.delim) | ||
325 | } else if let Some(frame) = self.stack.pop() { | ||
326 | self.frame = frame; | ||
327 | continue | ||
328 | } else { | ||
329 | return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP } | ||
330 | }; | ||
331 | |||
332 | match self.frame.last_token { | ||
333 | LastToken::Collecting(ref mut v) => v.push(tree.clone().into()), | ||
334 | LastToken::Was(ref mut t) => *t = Some(tree.clone().into()), | ||
335 | } | ||
336 | |||
337 | match tree { | ||
338 | TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, | ||
339 | TokenTree::Delimited(sp, delim, tts) => { | ||
340 | let frame = TokenCursorFrame::new(sp, delim, &tts); | ||
341 | self.stack.push(mem::replace(&mut self.frame, frame)); | ||
342 | } | ||
343 | } | ||
344 | } | ||
345 | } | ||
346 | |||
347 | fn next_desugared(&mut self) -> TokenAndSpan { | ||
348 | let (sp, name) = match self.next() { | ||
349 | TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), | ||
350 | tok => return tok, | ||
351 | }; | ||
352 | |||
353 | let stripped = strip_doc_comment_decoration(&name.as_str()); | ||
354 | |||
355 | // Searches for the occurrences of `"#*` and returns the minimum number of `#`s | ||
356 | // required to wrap the text. | ||
357 | let mut num_of_hashes = 0; | ||
358 | let mut count = 0; | ||
359 | for ch in stripped.chars() { | ||
360 | count = match ch { | ||
361 | '"' => 1, | ||
362 | '#' if count > 0 => count + 1, | ||
363 | _ => 0, | ||
364 | }; | ||
365 | num_of_hashes = cmp::max(num_of_hashes, count); | ||
366 | } | ||
367 | |||
368 | let delim_span = DelimSpan::from_single(sp); | ||
369 | let body = TokenTree::Delimited( | ||
370 | delim_span, | ||
371 | token::Bracket, | ||
372 | [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)), | ||
373 | TokenTree::Token(sp, token::Eq), | ||
374 | TokenTree::Token(sp, token::Literal( | ||
375 | token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None)) | ||
376 | ] | ||
377 | .iter().cloned().collect::<TokenStream>().into(), | ||
378 | ); | ||
379 | |||
380 | self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new( | ||
381 | delim_span, | ||
382 | token::NoDelim, | ||
383 | &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { | ||
384 | [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] | ||
385 | .iter().cloned().collect::<TokenStream>().into() | ||
386 | } else { | ||
387 | [TokenTree::Token(sp, token::Pound), body] | ||
388 | .iter().cloned().collect::<TokenStream>().into() | ||
389 | }, | ||
390 | ))); | ||
391 | |||
392 | self.next() | ||
393 | } | ||
394 | } | ||
395 | |||
396 | #[derive(Clone, PartialEq)] | ||
397 | crate enum TokenType { | ||
398 | Token(token::Token), | ||
399 | Keyword(keywords::Keyword), | ||
400 | Operator, | ||
401 | Lifetime, | ||
402 | Ident, | ||
403 | Path, | ||
404 | Type, | ||
405 | Const, | ||
406 | } | ||
407 | |||
408 | impl TokenType { | ||
409 | fn to_string(&self) -> String { | ||
410 | match *self { | ||
411 | TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)), | ||
412 | TokenType::Keyword(kw) => format!("`{}`", kw.name()), | ||
413 | TokenType::Operator => "an operator".to_string(), | ||
414 | TokenType::Lifetime => "lifetime".to_string(), | ||
415 | TokenType::Ident => "identifier".to_string(), | ||
416 | TokenType::Path => "path".to_string(), | ||
417 | TokenType::Type => "type".to_string(), | ||
418 | TokenType::Const => "const".to_string(), | ||
419 | } | ||
420 | } | ||
421 | } | ||
422 | |||
423 | /// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`, | ||
424 | /// `IDENT<<u8 as Trait>::AssocTy>`. | ||
425 | /// | ||
426 | /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes | ||
427 | /// that `IDENT` is not the ident of a fn trait. | ||
428 | fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { | ||
429 | t == &token::ModSep || t == &token::Lt || | ||
430 | t == &token::BinOp(token::Shl) | ||
431 | } | ||
432 | |||
433 | /// Information about the path to a module. | ||
434 | pub struct ModulePath { | ||
435 | name: String, | ||
436 | path_exists: bool, | ||
437 | pub result: Result<ModulePathSuccess, Error>, | ||
438 | } | ||
439 | |||
440 | pub struct ModulePathSuccess { | ||
441 | pub path: PathBuf, | ||
442 | pub directory_ownership: DirectoryOwnership, | ||
443 | warn: bool, | ||
444 | } | ||
445 | |||
446 | pub enum Error { | ||
447 | FileNotFoundForModule { | ||
448 | mod_name: String, | ||
449 | default_path: String, | ||
450 | secondary_path: String, | ||
451 | dir_path: String, | ||
452 | }, | ||
453 | DuplicatePaths { | ||
454 | mod_name: String, | ||
455 | default_path: String, | ||
456 | secondary_path: String, | ||
457 | }, | ||
458 | UselessDocComment, | ||
459 | InclusiveRangeWithNoEnd, | ||
460 | } | ||
461 | |||
462 | impl Error { | ||
463 | fn span_err<S: Into<MultiSpan>>(self, | ||
464 | sp: S, | ||
465 | handler: &errors::Handler) -> DiagnosticBuilder<'_> { | ||
466 | match self { | ||
467 | Error::FileNotFoundForModule { ref mod_name, | ||
468 | ref default_path, | ||
469 | ref secondary_path, | ||
470 | ref dir_path } => { | ||
471 | let mut err = struct_span_err!(handler, sp, E0583, | ||
472 | "file not found for module `{}`", mod_name); | ||
473 | err.help(&format!("name the file either {} or {} inside the directory \"{}\"", | ||
474 | default_path, | ||
475 | secondary_path, | ||
476 | dir_path)); | ||
477 | err | ||
478 | } | ||
479 | Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { | ||
480 | let mut err = struct_span_err!(handler, sp, E0584, | ||
481 | "file for module `{}` found at both {} and {}", | ||
482 | mod_name, | ||
483 | default_path, | ||
484 | secondary_path); | ||
485 | err.help("delete or rename one of them to remove the ambiguity"); | ||
486 | err | ||
487 | } | ||
488 | Error::UselessDocComment => { | ||
489 | let mut err = struct_span_err!(handler, sp, E0585, | ||
490 | "found a documentation comment that doesn't document anything"); | ||
491 | err.help("doc comments must come before what they document, maybe a comment was \ | ||
492 | intended with `//`?"); | ||
493 | err | ||
494 | } | ||
495 | Error::InclusiveRangeWithNoEnd => { | ||
496 | let mut err = struct_span_err!(handler, sp, E0586, | ||
497 | "inclusive range with no end"); | ||
498 | err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); | ||
499 | err | ||
500 | } | ||
501 | } | ||
502 | } | ||
503 | } | ||
504 | |||
505 | #[derive(Debug)] | ||
506 | enum LhsExpr { | ||
507 | NotYetParsed, | ||
508 | AttributesParsed(ThinVec<Attribute>), | ||
509 | AlreadyParsed(P<Expr>), | ||
510 | } | ||
511 | |||
512 | impl From<Option<ThinVec<Attribute>>> for LhsExpr { | ||
513 | fn from(o: Option<ThinVec<Attribute>>) -> Self { | ||
514 | if let Some(attrs) = o { | ||
515 | LhsExpr::AttributesParsed(attrs) | ||
516 | } else { | ||
517 | LhsExpr::NotYetParsed | ||
518 | } | ||
519 | } | ||
520 | } | ||
521 | |||
522 | impl From<P<Expr>> for LhsExpr { | ||
523 | fn from(expr: P<Expr>) -> Self { | ||
524 | LhsExpr::AlreadyParsed(expr) | ||
525 | } | ||
526 | } | ||
527 | |||
528 | /// Creates a placeholder argument. | ||
529 | fn dummy_arg(span: Span) -> Arg { | ||
530 | let ident = Ident::new(keywords::Invalid.name(), span); | ||
531 | let pat = P(Pat { | ||
532 | id: ast::DUMMY_NODE_ID, | ||
533 | node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), | ||
534 | span, | ||
535 | }); | ||
536 | let ty = Ty { | ||
537 | node: TyKind::Err, | ||
538 | span, | ||
539 | id: ast::DUMMY_NODE_ID | ||
540 | }; | ||
541 | Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID } | ||
542 | } | ||
543 | |||
544 | #[derive(Copy, Clone, Debug)] | ||
545 | enum TokenExpectType { | ||
546 | Expect, | ||
547 | NoExpect, | ||
548 | } | ||
549 | |||
550 | impl<'a> Parser<'a> { | ||
551 | pub fn new(sess: &'a ParseSess, | ||
552 | tokens: TokenStream, | ||
553 | directory: Option<Directory<'a>>, | ||
554 | recurse_into_file_modules: bool, | ||
555 | desugar_doc_comments: bool) | ||
556 | -> Self { | ||
557 | let mut parser = Parser { | ||
558 | sess, | ||
559 | token: token::Whitespace, | ||
560 | span: syntax_pos::DUMMY_SP, | ||
561 | prev_span: syntax_pos::DUMMY_SP, | ||
562 | meta_var_span: None, | ||
563 | prev_token_kind: PrevTokenKind::Other, | ||
564 | restrictions: Restrictions::empty(), | ||
565 | recurse_into_file_modules, | ||
566 | directory: Directory { | ||
567 | path: Cow::from(PathBuf::new()), | ||
568 | ownership: DirectoryOwnership::Owned { relative: None } | ||
569 | }, | ||
570 | root_module_name: None, | ||
571 | expected_tokens: Vec::new(), | ||
572 | token_cursor: TokenCursor { | ||
573 | frame: TokenCursorFrame::new( | ||
574 | DelimSpan::dummy(), | ||
575 | token::NoDelim, | ||
576 | &tokens.into(), | ||
577 | ), | ||
578 | stack: Vec::new(), | ||
579 | }, | ||
580 | desugar_doc_comments, | ||
581 | cfg_mods: true, | ||
582 | unmatched_angle_bracket_count: 0, | ||
583 | max_angle_bracket_count: 0, | ||
584 | unclosed_delims: Vec::new(), | ||
585 | }; | ||
586 | |||
587 | let tok = parser.next_tok(); | ||
588 | parser.token = tok.tok; | ||
589 | parser.span = tok.sp; | ||
590 | |||
591 | if let Some(directory) = directory { | ||
592 | parser.directory = directory; | ||
593 | } else if !parser.span.is_dummy() { | ||
594 | if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) { | ||
595 | path.pop(); | ||
596 | parser.directory.path = Cow::from(path); | ||
597 | } | ||
598 | } | ||
599 | |||
600 | parser.process_potential_macro_variable(); | ||
601 | parser | ||
602 | } | ||
603 | |||
604 | fn next_tok(&mut self) -> TokenAndSpan { | ||
605 | let mut next = if self.desugar_doc_comments { | ||
606 | self.token_cursor.next_desugared() | ||
607 | } else { | ||
608 | self.token_cursor.next() | ||
609 | }; | ||
610 | if next.sp.is_dummy() { | ||
611 | // Tweak the location for better diagnostics, but keep syntactic context intact. | ||
612 | next.sp = self.prev_span.with_ctxt(next.sp.ctxt()); | ||
613 | } | ||
614 | next | ||
615 | } | ||
616 | |||
617 | /// Converts the current token to a string using `self`'s reader. | ||
618 | pub fn this_token_to_string(&self) -> String { | ||
619 | pprust::token_to_string(&self.token) | ||
620 | } | ||
621 | |||
622 | fn token_descr(&self) -> Option<&'static str> { | ||
623 | Some(match &self.token { | ||
624 | t if t.is_special_ident() => "reserved identifier", | ||
625 | t if t.is_used_keyword() => "keyword", | ||
626 | t if t.is_unused_keyword() => "reserved keyword", | ||
627 | token::DocComment(..) => "doc comment", | ||
628 | _ => return None, | ||
629 | }) | ||
630 | } | ||
631 | |||
632 | fn this_token_descr(&self) -> String { | ||
633 | if let Some(prefix) = self.token_descr() { | ||
634 | format!("{} `{}`", prefix, self.this_token_to_string()) | ||
635 | } else { | ||
636 | format!("`{}`", self.this_token_to_string()) | ||
637 | } | ||
638 | } | ||
639 | |||
640 | fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> { | ||
641 | let token_str = pprust::token_to_string(t); | ||
642 | Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str))) | ||
643 | } | ||
644 | |||
645 | crate fn unexpected<T>(&mut self) -> PResult<'a, T> { | ||
646 | match self.expect_one_of(&[], &[]) { | ||
647 | Err(e) => Err(e), | ||
648 | Ok(_) => unreachable!(), | ||
649 | } | ||
650 | } | ||
651 | |||
652 | /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. | ||
653 | pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { | ||
654 | if self.expected_tokens.is_empty() { | ||
655 | if self.token == *t { | ||
656 | self.bump(); | ||
657 | Ok(false) | ||
658 | } else { | ||
659 | let token_str = pprust::token_to_string(t); | ||
660 | let this_token_str = self.this_token_descr(); | ||
661 | let mut err = self.fatal(&format!("expected `{}`, found {}", | ||
662 | token_str, | ||
663 | this_token_str)); | ||
664 | |||
665 | let sp = if self.token == token::Token::Eof { | ||
666 | // EOF, don't want to point at the following char, but rather the last token | ||
667 | self.prev_span | ||
668 | } else { | ||
669 | self.sess.source_map().next_point(self.prev_span) | ||
670 | }; | ||
671 | let label_exp = format!("expected `{}`", token_str); | ||
672 | match self.recover_closing_delimiter(&[t.clone()], err) { | ||
673 | Err(e) => err = e, | ||
674 | Ok(recovered) => { | ||
675 | return Ok(recovered); | ||
676 | } | ||
677 | } | ||
678 | let cm = self.sess.source_map(); | ||
679 | match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { | ||
680 | (Ok(ref a), Ok(ref b)) if a.line == b.line => { | ||
681 | // When the spans are in the same line, it means that the only content | ||
682 | // between them is whitespace, point only at the found token. | ||
683 | err.span_label(self.span, label_exp); | ||
684 | } | ||
685 | _ => { | ||
686 | err.span_label(sp, label_exp); | ||
687 | err.span_label(self.span, "unexpected token"); | ||
688 | } | ||
689 | } | ||
690 | Err(err) | ||
691 | } | ||
692 | } else { | ||
693 | self.expect_one_of(slice::from_ref(t), &[]) | ||
694 | } | ||
695 | } | ||
696 | |||
697 | fn recover_closing_delimiter( | ||
698 | &mut self, | ||
699 | tokens: &[token::Token], | ||
700 | mut err: DiagnosticBuilder<'a>, | ||
701 | ) -> PResult<'a, bool> { | ||
702 | let mut pos = None; | ||
703 | // we want to use the last closing delim that would apply | ||
704 | for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { | ||
705 | if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) | ||
706 | && Some(self.span) > unmatched.unclosed_span | ||
707 | { | ||
708 | pos = Some(i); | ||
709 | } | ||
710 | } | ||
711 | match pos { | ||
712 | Some(pos) => { | ||
713 | // Recover and assume that the detected unclosed delimiter was meant for | ||
714 | // this location. Emit the diagnostic and act as if the delimiter was | ||
715 | // present for the parser's sake. | ||
716 | |||
717 | // Don't attempt to recover from this unclosed delimiter more than once. | ||
718 | let unmatched = self.unclosed_delims.remove(pos); | ||
719 | let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); | ||
720 | |||
721 | // We want to suggest the inclusion of the closing delimiter where it makes | ||
722 | // the most sense, which is immediately after the last token: | ||
723 | // | ||
724 | // {foo(bar {}} | ||
725 | // - ^ | ||
726 | // | | | ||
727 | // | help: `)` may belong here (FIXME: #58270) | ||
728 | // | | ||
729 | // unclosed delimiter | ||
730 | if let Some(sp) = unmatched.unclosed_span { | ||
731 | err.span_label(sp, "unclosed delimiter"); | ||
732 | } | ||
733 | err.span_suggestion_short( | ||
734 | self.sess.source_map().next_point(self.prev_span), | ||
735 | &format!("{} may belong here", delim.to_string()), | ||
736 | delim.to_string(), | ||
737 | Applicability::MaybeIncorrect, | ||
738 | ); | ||
739 | err.emit(); | ||
740 | self.expected_tokens.clear(); // reduce errors | ||
741 | Ok(true) | ||
742 | } | ||
743 | _ => Err(err), | ||
744 | } | ||
745 | } | ||
746 | |||
747 | /// Expect next token to be edible or inedible token. If edible, | ||
748 | /// then consume it; if inedible, then return without consuming | ||
749 | /// anything. Signal a fatal error if next token is unexpected. | ||
750 | pub fn expect_one_of( | ||
751 | &mut self, | ||
752 | edible: &[token::Token], | ||
753 | inedible: &[token::Token], | ||
754 | ) -> PResult<'a, bool /* recovered */> { | ||
755 | fn tokens_to_string(tokens: &[TokenType]) -> String { | ||
756 | let mut i = tokens.iter(); | ||
757 | // This might be a sign we need a connect method on Iterator. | ||
758 | let b = i.next() | ||
759 | .map_or(String::new(), |t| t.to_string()); | ||
760 | i.enumerate().fold(b, |mut b, (i, a)| { | ||
761 | if tokens.len() > 2 && i == tokens.len() - 2 { | ||
762 | b.push_str(", or "); | ||
763 | } else if tokens.len() == 2 && i == tokens.len() - 2 { | ||
764 | b.push_str(" or "); | ||
765 | } else { | ||
766 | b.push_str(", "); | ||
767 | } | ||
768 | b.push_str(&a.to_string()); | ||
769 | b | ||
770 | }) | ||
771 | } | ||
772 | if edible.contains(&self.token) { | ||
773 | self.bump(); | ||
774 | Ok(false) | ||
775 | } else if inedible.contains(&self.token) { | ||
776 | // leave it in the input | ||
777 | Ok(false) | ||
778 | } else { | ||
779 | let mut expected = edible.iter() | ||
780 | .map(|x| TokenType::Token(x.clone())) | ||
781 | .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) | ||
782 | .chain(self.expected_tokens.iter().cloned()) | ||
783 | .collect::<Vec<_>>(); | ||
784 | expected.sort_by_cached_key(|x| x.to_string()); | ||
785 | expected.dedup(); | ||
786 | let expect = tokens_to_string(&expected[..]); | ||
787 | let actual = self.this_token_to_string(); | ||
788 | let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { | ||
789 | let short_expect = if expected.len() > 6 { | ||
790 | format!("{} possible tokens", expected.len()) | ||
791 | } else { | ||
792 | expect.clone() | ||
793 | }; | ||
794 | (format!("expected one of {}, found `{}`", expect, actual), | ||
795 | (self.sess.source_map().next_point(self.prev_span), | ||
796 | format!("expected one of {} here", short_expect))) | ||
797 | } else if expected.is_empty() { | ||
798 | (format!("unexpected token: `{}`", actual), | ||
799 | (self.prev_span, "unexpected token after this".to_string())) | ||
800 | } else { | ||
801 | (format!("expected {}, found `{}`", expect, actual), | ||
802 | (self.sess.source_map().next_point(self.prev_span), | ||
803 | format!("expected {} here", expect))) | ||
804 | }; | ||
805 | let mut err = self.fatal(&msg_exp); | ||
806 | if self.token.is_ident_named("and") { | ||
807 | err.span_suggestion_short( | ||
808 | self.span, | ||
809 | "use `&&` instead of `and` for the boolean operator", | ||
810 | "&&".to_string(), | ||
811 | Applicability::MaybeIncorrect, | ||
812 | ); | ||
813 | } | ||
814 | if self.token.is_ident_named("or") { | ||
815 | err.span_suggestion_short( | ||
816 | self.span, | ||
817 | "use `||` instead of `or` for the boolean operator", | ||
818 | "||".to_string(), | ||
819 | Applicability::MaybeIncorrect, | ||
820 | ); | ||
821 | } | ||
822 | let sp = if self.token == token::Token::Eof { | ||
823 | // This is EOF, don't want to point at the following char, but rather the last token | ||
824 | self.prev_span | ||
825 | } else { | ||
826 | label_sp | ||
827 | }; | ||
828 | match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { | ||
829 | TokenType::Token(t) => Some(t.clone()), | ||
830 | _ => None, | ||
831 | }).collect::<Vec<_>>(), err) { | ||
832 | Err(e) => err = e, | ||
833 | Ok(recovered) => { | ||
834 | return Ok(recovered); | ||
835 | } | ||
836 | } | ||
837 | |||
838 | let cm = self.sess.source_map(); | ||
839 | match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { | ||
840 | (Ok(ref a), Ok(ref b)) if a.line == b.line => { | ||
841 | // When the spans are in the same line, it means that the only content between | ||
842 | // them is whitespace, point at the found token in that case: | ||
843 | // | ||
844 | // X | () => { syntax error }; | ||
845 | // | ^^^^^ expected one of 8 possible tokens here | ||
846 | // | ||
847 | // instead of having: | ||
848 | // | ||
849 | // X | () => { syntax error }; | ||
850 | // | -^^^^^ unexpected token | ||
851 | // | | | ||
852 | // | expected one of 8 possible tokens here | ||
853 | err.span_label(self.span, label_exp); | ||
854 | } | ||
855 | _ if self.prev_span == syntax_pos::DUMMY_SP => { | ||
856 | // Account for macro context where the previous span might not be | ||
857 | // available to avoid incorrect output (#54841). | ||
858 | err.span_label(self.span, "unexpected token"); | ||
859 | } | ||
860 | _ => { | ||
861 | err.span_label(sp, label_exp); | ||
862 | err.span_label(self.span, "unexpected token"); | ||
863 | } | ||
864 | } | ||
865 | Err(err) | ||
866 | } | ||
867 | } | ||
868 | |||
869 | /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. | ||
870 | fn interpolated_or_expr_span(&self, | ||
871 | expr: PResult<'a, P<Expr>>) | ||
872 | -> PResult<'a, (Span, P<Expr>)> { | ||
873 | expr.map(|e| { | ||
874 | if self.prev_token_kind == PrevTokenKind::Interpolated { | ||
875 | (self.prev_span, e) | ||
876 | } else { | ||
877 | (e.span, e) | ||
878 | } | ||
879 | }) | ||
880 | } | ||
881 | |||
882 | fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { | ||
883 | let mut err = self.struct_span_err(self.span, | ||
884 | &format!("expected identifier, found {}", | ||
885 | self.this_token_descr())); | ||
886 | if let token::Ident(ident, false) = &self.token { | ||
887 | if ident.is_reserved() && !ident.is_path_segment_keyword() && | ||
888 | ident.name != keywords::Underscore.name() | ||
889 | { | ||
890 | err.span_suggestion( | ||
891 | self.span, | ||
892 | "you can escape reserved keywords to use them as identifiers", | ||
893 | format!("r#{}", ident), | ||
894 | Applicability::MaybeIncorrect, | ||
895 | ); | ||
896 | } | ||
897 | } | ||
898 | if let Some(token_descr) = self.token_descr() { | ||
899 | err.span_label(self.span, format!("expected identifier, found {}", token_descr)); | ||
900 | } else { | ||
901 | err.span_label(self.span, "expected identifier"); | ||
902 | if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { | ||
903 | err.span_suggestion( | ||
904 | self.span, | ||
905 | "remove this comma", | ||
906 | String::new(), | ||
907 | Applicability::MachineApplicable, | ||
908 | ); | ||
909 | } | ||
910 | } | ||
911 | err | ||
912 | } | ||
913 | |||
914 | pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { | ||
915 | self.parse_ident_common(true) | ||
916 | } | ||
917 | |||
918 | fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { | ||
919 | match self.token { | ||
920 | token::Ident(ident, _) => { | ||
921 | if self.token.is_reserved_ident() { | ||
922 | let mut err = self.expected_ident_found(); | ||
923 | if recover { | ||
924 | err.emit(); | ||
925 | } else { | ||
926 | return Err(err); | ||
927 | } | ||
928 | } | ||
929 | let span = self.span; | ||
930 | self.bump(); | ||
931 | Ok(Ident::new(ident.name, span)) | ||
932 | } | ||
933 | _ => { | ||
934 | Err(if self.prev_token_kind == PrevTokenKind::DocComment { | ||
935 | self.span_fatal_err(self.prev_span, Error::UselessDocComment) | ||
936 | } else { | ||
937 | self.expected_ident_found() | ||
938 | }) | ||
939 | } | ||
940 | } | ||
941 | } | ||
942 | |||
943 | /// Checks if the next token is `tok`, and returns `true` if so. | ||
944 | /// | ||
945 | /// This method will automatically add `tok` to `expected_tokens` if `tok` is not | ||
946 | /// encountered. | ||
947 | crate fn check(&mut self, tok: &token::Token) -> bool { | ||
948 | let is_present = self.token == *tok; | ||
949 | if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } | ||
950 | is_present | ||
951 | } | ||
952 | |||
953 | /// Consumes a token 'tok' if it exists. Returns whether the given token was present. | ||
954 | pub fn eat(&mut self, tok: &token::Token) -> bool { | ||
955 | let is_present = self.check(tok); | ||
956 | if is_present { self.bump() } | ||
957 | is_present | ||
958 | } | ||
959 | |||
960 | fn check_keyword(&mut self, kw: keywords::Keyword) -> bool { | ||
961 | self.expected_tokens.push(TokenType::Keyword(kw)); | ||
962 | self.token.is_keyword(kw) | ||
963 | } | ||
964 | |||
965 | /// If the next token is the given keyword, eats it and returns | ||
966 | /// `true`. Otherwise, returns `false`. | ||
967 | pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool { | ||
968 | if self.check_keyword(kw) { | ||
969 | self.bump(); | ||
970 | true | ||
971 | } else { | ||
972 | false | ||
973 | } | ||
974 | } | ||
975 | |||
976 | fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool { | ||
977 | if self.token.is_keyword(kw) { | ||
978 | self.bump(); | ||
979 | true | ||
980 | } else { | ||
981 | false | ||
982 | } | ||
983 | } | ||
984 | |||
985 | /// If the given word is not a keyword, signals an error. | ||
986 | /// If the next token is not the given word, signals an error. | ||
987 | /// Otherwise, eats it. | ||
988 | fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> { | ||
989 | if !self.eat_keyword(kw) { | ||
990 | self.unexpected() | ||
991 | } else { | ||
992 | Ok(()) | ||
993 | } | ||
994 | } | ||
995 | |||
996 | fn check_ident(&mut self) -> bool { | ||
997 | if self.token.is_ident() { | ||
998 | true | ||
999 | } else { | ||
1000 | self.expected_tokens.push(TokenType::Ident); | ||
1001 | false | ||
1002 | } | ||
1003 | } | ||
1004 | |||
1005 | fn check_path(&mut self) -> bool { | ||
1006 | if self.token.is_path_start() { | ||
1007 | true | ||
1008 | } else { | ||
1009 | self.expected_tokens.push(TokenType::Path); | ||
1010 | false | ||
1011 | } | ||
1012 | } | ||
1013 | |||
1014 | fn check_type(&mut self) -> bool { | ||
1015 | if self.token.can_begin_type() { | ||
1016 | true | ||
1017 | } else { | ||
1018 | self.expected_tokens.push(TokenType::Type); | ||
1019 | false | ||
1020 | } | ||
1021 | } | ||
1022 | |||
1023 | fn check_const_arg(&mut self) -> bool { | ||
1024 | if self.token.can_begin_const_arg() { | ||
1025 | true | ||
1026 | } else { | ||
1027 | self.expected_tokens.push(TokenType::Const); | ||
1028 | false | ||
1029 | } | ||
1030 | } | ||
1031 | |||
1032 | /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=` | ||
1033 | /// and continues. If a `+` is not seen, returns `false`. | ||
1034 | /// | ||
1035 | /// This is used when token-splitting `+=` into `+`. | ||
1036 | /// See issue #47856 for an example of when this may occur. | ||
1037 | fn eat_plus(&mut self) -> bool { | ||
1038 | self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); | ||
1039 | match self.token { | ||
1040 | token::BinOp(token::Plus) => { | ||
1041 | self.bump(); | ||
1042 | true | ||
1043 | } | ||
1044 | token::BinOpEq(token::Plus) => { | ||
1045 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1046 | self.bump_with(token::Eq, span); | ||
1047 | true | ||
1048 | } | ||
1049 | _ => false, | ||
1050 | } | ||
1051 | } | ||
1052 | |||
1053 | |||
1054 | /// Checks to see if the next token is either `+` or `+=`. | ||
1055 | /// Otherwise returns `false`. | ||
1056 | fn check_plus(&mut self) -> bool { | ||
1057 | if self.token.is_like_plus() { | ||
1058 | true | ||
1059 | } | ||
1060 | else { | ||
1061 | self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); | ||
1062 | false | ||
1063 | } | ||
1064 | } | ||
1065 | |||
1066 | /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single | ||
1067 | /// `&` and continues. If an `&` is not seen, signals an error. | ||
1068 | fn expect_and(&mut self) -> PResult<'a, ()> { | ||
1069 | self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); | ||
1070 | match self.token { | ||
1071 | token::BinOp(token::And) => { | ||
1072 | self.bump(); | ||
1073 | Ok(()) | ||
1074 | } | ||
1075 | token::AndAnd => { | ||
1076 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1077 | Ok(self.bump_with(token::BinOp(token::And), span)) | ||
1078 | } | ||
1079 | _ => self.unexpected() | ||
1080 | } | ||
1081 | } | ||
1082 | |||
1083 | /// Expects and consumes an `|`. If `||` is seen, replaces it with a single | ||
1084 | /// `|` and continues. If an `|` is not seen, signals an error. | ||
1085 | fn expect_or(&mut self) -> PResult<'a, ()> { | ||
1086 | self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); | ||
1087 | match self.token { | ||
1088 | token::BinOp(token::Or) => { | ||
1089 | self.bump(); | ||
1090 | Ok(()) | ||
1091 | } | ||
1092 | token::OrOr => { | ||
1093 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1094 | Ok(self.bump_with(token::BinOp(token::Or), span)) | ||
1095 | } | ||
1096 | _ => self.unexpected() | ||
1097 | } | ||
1098 | } | ||
1099 | |||
1100 | fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) { | ||
1101 | match suffix { | ||
1102 | None => {/* everything ok */} | ||
1103 | Some(suf) => { | ||
1104 | let text = suf.as_str(); | ||
1105 | if text.is_empty() { | ||
1106 | self.span_bug(sp, "found empty literal suffix in Some") | ||
1107 | } | ||
1108 | let msg = format!("{} with a suffix is invalid", kind); | ||
1109 | self.struct_span_err(sp, &msg) | ||
1110 | .span_label(sp, msg) | ||
1111 | .emit(); | ||
1112 | } | ||
1113 | } | ||
1114 | } | ||
1115 | |||
1116 | /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single | ||
1117 | /// `<` and continue. If `<-` is seen, replaces it with a single `<` | ||
1118 | /// and continue. If a `<` is not seen, returns false. | ||
1119 | /// | ||
1120 | /// This is meant to be used when parsing generics on a path to get the | ||
1121 | /// starting token. | ||
1122 | fn eat_lt(&mut self) -> bool { | ||
1123 | self.expected_tokens.push(TokenType::Token(token::Lt)); | ||
1124 | let ate = match self.token { | ||
1125 | token::Lt => { | ||
1126 | self.bump(); | ||
1127 | true | ||
1128 | } | ||
1129 | token::BinOp(token::Shl) => { | ||
1130 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1131 | self.bump_with(token::Lt, span); | ||
1132 | true | ||
1133 | } | ||
1134 | token::LArrow => { | ||
1135 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1136 | self.bump_with(token::BinOp(token::Minus), span); | ||
1137 | true | ||
1138 | } | ||
1139 | _ => false, | ||
1140 | }; | ||
1141 | |||
1142 | if ate { | ||
1143 | // See doc comment for `unmatched_angle_bracket_count`. | ||
1144 | self.unmatched_angle_bracket_count += 1; | ||
1145 | self.max_angle_bracket_count += 1; | ||
1146 | debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); | ||
1147 | } | ||
1148 | |||
1149 | ate | ||
1150 | } | ||
1151 | |||
1152 | fn expect_lt(&mut self) -> PResult<'a, ()> { | ||
1153 | if !self.eat_lt() { | ||
1154 | self.unexpected() | ||
1155 | } else { | ||
1156 | Ok(()) | ||
1157 | } | ||
1158 | } | ||
1159 | |||
1160 | /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it | ||
1161 | /// with a single `>` and continues. If a `>` is not seen, signals an error. | ||
1162 | fn expect_gt(&mut self) -> PResult<'a, ()> { | ||
1163 | self.expected_tokens.push(TokenType::Token(token::Gt)); | ||
1164 | let ate = match self.token { | ||
1165 | token::Gt => { | ||
1166 | self.bump(); | ||
1167 | Some(()) | ||
1168 | } | ||
1169 | token::BinOp(token::Shr) => { | ||
1170 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1171 | Some(self.bump_with(token::Gt, span)) | ||
1172 | } | ||
1173 | token::BinOpEq(token::Shr) => { | ||
1174 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1175 | Some(self.bump_with(token::Ge, span)) | ||
1176 | } | ||
1177 | token::Ge => { | ||
1178 | let span = self.span.with_lo(self.span.lo() + BytePos(1)); | ||
1179 | Some(self.bump_with(token::Eq, span)) | ||
1180 | } | ||
1181 | _ => None, | ||
1182 | }; | ||
1183 | |||
1184 | match ate { | ||
1185 | Some(_) => { | ||
1186 | // See doc comment for `unmatched_angle_bracket_count`. | ||
1187 | if self.unmatched_angle_bracket_count > 0 { | ||
1188 | self.unmatched_angle_bracket_count -= 1; | ||
1189 | debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); | ||
1190 | } | ||
1191 | |||
1192 | Ok(()) | ||
1193 | }, | ||
1194 | None => self.unexpected(), | ||
1195 | } | ||
1196 | } | ||
1197 | |||
1198 | /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, | ||
1199 | /// passes through any errors encountered. Used for error recovery. | ||
1200 | fn eat_to_tokens(&mut self, kets: &[&token::Token]) { | ||
1201 | let handler = self.diagnostic(); | ||
1202 | |||
1203 | if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets, | ||
1204 | SeqSep::none(), | ||
1205 | TokenExpectType::Expect, | ||
1206 | |p| Ok(p.parse_token_tree())) { | ||
1207 | handler.cancel(err); | ||
1208 | } | ||
1209 | } | ||
1210 | |||
1211 | /// Parses a sequence, including the closing delimiter. The function | ||
1212 | /// `f` must consume tokens until reaching the next separator or | ||
1213 | /// closing bracket. | ||
1214 | pub fn parse_seq_to_end<T, F>(&mut self, | ||
1215 | ket: &token::Token, | ||
1216 | sep: SeqSep, | ||
1217 | f: F) | ||
1218 | -> PResult<'a, Vec<T>> where | ||
1219 | F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, | ||
1220 | { | ||
1221 | let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; | ||
1222 | if !recovered { | ||
1223 | self.bump(); | ||
1224 | } | ||
1225 | Ok(val) | ||
1226 | } | ||
1227 | |||
1228 | /// Parses a sequence, not including the closing delimiter. The function | ||
1229 | /// `f` must consume tokens until reaching the next separator or | ||
1230 | /// closing bracket. | ||
1231 | pub fn parse_seq_to_before_end<T, F>( | ||
1232 | &mut self, | ||
1233 | ket: &token::Token, | ||
1234 | sep: SeqSep, | ||
1235 | f: F, | ||
1236 | ) -> PResult<'a, (Vec<T>, bool)> | ||
1237 | where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> | ||
1238 | { | ||
1239 | self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) | ||
1240 | } | ||
1241 | |||
1242 | fn parse_seq_to_before_tokens<T, F>( | ||
1243 | &mut self, | ||
1244 | kets: &[&token::Token], | ||
1245 | sep: SeqSep, | ||
1246 | expect: TokenExpectType, | ||
1247 | mut f: F, | ||
1248 | ) -> PResult<'a, (Vec<T>, bool /* recovered */)> | ||
1249 | where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> | ||
1250 | { | ||
1251 | let mut first = true; | ||
1252 | let mut recovered = false; | ||
1253 | let mut v = vec![]; | ||
1254 | while !kets.iter().any(|k| { | ||
1255 | match expect { | ||
1256 | TokenExpectType::Expect => self.check(k), | ||
1257 | TokenExpectType::NoExpect => self.token == **k, | ||
1258 | } | ||
1259 | }) { | ||
1260 | match self.token { | ||
1261 | token::CloseDelim(..) | token::Eof => break, | ||
1262 | _ => {} | ||
1263 | }; | ||
1264 | if let Some(ref t) = sep.sep { | ||
1265 | if first { | ||
1266 | first = false; | ||
1267 | } else { | ||
1268 | match self.expect(t) { | ||
1269 | Ok(false) => {} | ||
1270 | Ok(true) => { | ||
1271 | recovered = true; | ||
1272 | break; | ||
1273 | } | ||
1274 | Err(mut e) => { | ||
1275 | // Attempt to keep parsing if it was a similar separator | ||
1276 | if let Some(ref tokens) = t.similar_tokens() { | ||
1277 | if tokens.contains(&self.token) { | ||
1278 | self.bump(); | ||
1279 | } | ||
1280 | } | ||
1281 | e.emit(); | ||
1282 | // Attempt to keep parsing if it was an omitted separator | ||
1283 | match f(self) { | ||
1284 | Ok(t) => { | ||
1285 | v.push(t); | ||
1286 | continue; | ||
1287 | }, | ||
1288 | Err(mut e) => { | ||
1289 | e.cancel(); | ||
1290 | break; | ||
1291 | } | ||
1292 | } | ||
1293 | } | ||
1294 | } | ||
1295 | } | ||
1296 | } | ||
1297 | if sep.trailing_sep_allowed && kets.iter().any(|k| { | ||
1298 | match expect { | ||
1299 | TokenExpectType::Expect => self.check(k), | ||
1300 | TokenExpectType::NoExpect => self.token == **k, | ||
1301 | } | ||
1302 | }) { | ||
1303 | break; | ||
1304 | } | ||
1305 | |||
1306 | let t = f(self)?; | ||
1307 | v.push(t); | ||
1308 | } | ||
1309 | |||
1310 | Ok((v, recovered)) | ||
1311 | } | ||
1312 | |||
1313 | /// Parses a sequence, including the closing delimiter. The function | ||
1314 | /// `f` must consume tokens until reaching the next separator or | ||
1315 | /// closing bracket. | ||
1316 | fn parse_unspanned_seq<T, F>( | ||
1317 | &mut self, | ||
1318 | bra: &token::Token, | ||
1319 | ket: &token::Token, | ||
1320 | sep: SeqSep, | ||
1321 | f: F, | ||
1322 | ) -> PResult<'a, Vec<T>> where | ||
1323 | F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, | ||
1324 | { | ||
1325 | self.expect(bra)?; | ||
1326 | let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; | ||
1327 | if !recovered { | ||
1328 | self.eat(ket); | ||
1329 | } | ||
1330 | Ok(result) | ||
1331 | } | ||
1332 | |||
1333 | /// Advance the parser by one token | ||
1334 | pub fn bump(&mut self) { | ||
1335 | if self.prev_token_kind == PrevTokenKind::Eof { | ||
1336 | // Bumping after EOF is a bad sign, usually an infinite loop. | ||
1337 | self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); | ||
1338 | } | ||
1339 | |||
1340 | self.prev_span = self.meta_var_span.take().unwrap_or(self.span); | ||
1341 | |||
1342 | // Record last token kind for possible error recovery. | ||
1343 | self.prev_token_kind = match self.token { | ||
1344 | token::DocComment(..) => PrevTokenKind::DocComment, | ||
1345 | token::Comma => PrevTokenKind::Comma, | ||
1346 | token::BinOp(token::Plus) => PrevTokenKind::Plus, | ||
1347 | token::Interpolated(..) => PrevTokenKind::Interpolated, | ||
1348 | token::Eof => PrevTokenKind::Eof, | ||
1349 | token::Ident(..) => PrevTokenKind::Ident, | ||
1350 | _ => PrevTokenKind::Other, | ||
1351 | }; | ||
1352 | |||
1353 | let next = self.next_tok(); | ||
1354 | self.span = next.sp; | ||
1355 | self.token = next.tok; | ||
1356 | self.expected_tokens.clear(); | ||
1357 | // check after each token | ||
1358 | self.process_potential_macro_variable(); | ||
1359 | } | ||
1360 | |||
1361 | /// Advance the parser using provided token as a next one. Use this when | ||
1362 | /// consuming a part of a token. For example a single `<` from `<<`. | ||
1363 | fn bump_with(&mut self, next: token::Token, span: Span) { | ||
1364 | self.prev_span = self.span.with_hi(span.lo()); | ||
1365 | // It would be incorrect to record the kind of the current token, but | ||
1366 | // fortunately for tokens currently using `bump_with`, the | ||
1367 | // prev_token_kind will be of no use anyway. | ||
1368 | self.prev_token_kind = PrevTokenKind::Other; | ||
1369 | self.span = span; | ||
1370 | self.token = next; | ||
1371 | self.expected_tokens.clear(); | ||
1372 | } | ||
1373 | |||
1374 | pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where | ||
1375 | F: FnOnce(&token::Token) -> R, | ||
1376 | { | ||
1377 | if dist == 0 { | ||
1378 | return f(&self.token) | ||
1379 | } | ||
1380 | |||
1381 | f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { | ||
1382 | Some(tree) => match tree { | ||
1383 | TokenTree::Token(_, tok) => tok, | ||
1384 | TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), | ||
1385 | }, | ||
1386 | None => token::CloseDelim(self.token_cursor.frame.delim), | ||
1387 | }) | ||
1388 | } | ||
1389 | |||
1390 | fn look_ahead_span(&self, dist: usize) -> Span { | ||
1391 | if dist == 0 { | ||
1392 | return self.span | ||
1393 | } | ||
1394 | |||
1395 | match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { | ||
1396 | Some(TokenTree::Token(span, _)) => span, | ||
1397 | Some(TokenTree::Delimited(span, ..)) => span.entire(), | ||
1398 | None => self.look_ahead_span(dist - 1), | ||
1399 | } | ||
1400 | } | ||
1401 | pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { | ||
1402 | self.sess.span_diagnostic.struct_span_fatal(self.span, m) | ||
1403 | } | ||
1404 | pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { | ||
1405 | self.sess.span_diagnostic.struct_span_fatal(sp, m) | ||
1406 | } | ||
1407 | fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { | ||
1408 | err.span_err(sp, self.diagnostic()) | ||
1409 | } | ||
1410 | fn bug(&self, m: &str) -> ! { | ||
1411 | self.sess.span_diagnostic.span_bug(self.span, m) | ||
1412 | } | ||
1413 | fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) { | ||
1414 | self.sess.span_diagnostic.span_err(sp, m) | ||
1415 | } | ||
1416 | fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { | ||
1417 | self.sess.span_diagnostic.struct_span_err(sp, m) | ||
1418 | } | ||
1419 | crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! { | ||
1420 | self.sess.span_diagnostic.span_bug(sp, m) | ||
1421 | } | ||
1422 | |||
1423 | fn cancel(&self, err: &mut DiagnosticBuilder<'_>) { | ||
1424 | self.sess.span_diagnostic.cancel(err) | ||
1425 | } | ||
1426 | |||
1427 | crate fn diagnostic(&self) -> &'a errors::Handler { | ||
1428 | &self.sess.span_diagnostic | ||
1429 | } | ||
1430 | |||
1431 | /// Is the current token one of the keywords that signals a bare function type? | ||
1432 | fn token_is_bare_fn_keyword(&mut self) -> bool { | ||
1433 | self.check_keyword(keywords::Fn) || | ||
1434 | self.check_keyword(keywords::Unsafe) || | ||
1435 | self.check_keyword(keywords::Extern) | ||
1436 | } | ||
1437 | |||
1438 | /// Parses a `TyKind::BareFn` type. | ||
1439 | fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> { | ||
1440 | /* | ||
1441 | |||
1442 | [unsafe] [extern "ABI"] fn (S) -> T | ||
1443 | ^~~~^ ^~~~^ ^~^ ^ | ||
1444 | | | | | | ||
1445 | | | | Return type | ||
1446 | | | Argument types | ||
1447 | | | | ||
1448 | | ABI | ||
1449 | Function Style | ||
1450 | */ | ||
1451 | |||
1452 | let unsafety = self.parse_unsafety(); | ||
1453 | let abi = if self.eat_keyword(keywords::Extern) { | ||
1454 | self.parse_opt_abi()?.unwrap_or(Abi::C) | ||
1455 | } else { | ||
1456 | Abi::Rust | ||
1457 | }; | ||
1458 | |||
1459 | self.expect_keyword(keywords::Fn)?; | ||
1460 | let (inputs, variadic) = self.parse_fn_args(false, true)?; | ||
1461 | let ret_ty = self.parse_ret_ty(false)?; | ||
1462 | let decl = P(FnDecl { | ||
1463 | inputs, | ||
1464 | output: ret_ty, | ||
1465 | variadic, | ||
1466 | }); | ||
1467 | Ok(TyKind::BareFn(P(BareFnTy { | ||
1468 | abi, | ||
1469 | unsafety, | ||
1470 | generic_params, | ||
1471 | decl, | ||
1472 | }))) | ||
1473 | } | ||
1474 | |||
1475 | /// Parses asyncness: `async` or nothing. | ||
1476 | fn parse_asyncness(&mut self) -> IsAsync { | ||
1477 | if self.eat_keyword(keywords::Async) { | ||
1478 | IsAsync::Async { | ||
1479 | closure_id: ast::DUMMY_NODE_ID, | ||
1480 | return_impl_trait_id: ast::DUMMY_NODE_ID, | ||
1481 | } | ||
1482 | } else { | ||
1483 | IsAsync::NotAsync | ||
1484 | } | ||
1485 | } | ||
1486 | |||
1487 | /// Parses unsafety: `unsafe` or nothing. | ||
1488 | fn parse_unsafety(&mut self) -> Unsafety { | ||
1489 | if self.eat_keyword(keywords::Unsafe) { | ||
1490 | Unsafety::Unsafe | ||
1491 | } else { | ||
1492 | Unsafety::Normal | ||
1493 | } | ||
1494 | } | ||
1495 | |||
1496 | /// Parses the items in a trait declaration. | ||
1497 | pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> { | ||
1498 | maybe_whole!(self, NtTraitItem, |x| x); | ||
1499 | let attrs = self.parse_outer_attributes()?; | ||
1500 | let (mut item, tokens) = self.collect_tokens(|this| { | ||
1501 | this.parse_trait_item_(at_end, attrs) | ||
1502 | })?; | ||
1503 | // See `parse_item` for why this clause is here. | ||
1504 | if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { | ||
1505 | item.tokens = Some(tokens); | ||
1506 | } | ||
1507 | Ok(item) | ||
1508 | } | ||
1509 | |||
1510 | fn parse_trait_item_(&mut self, | ||
1511 | at_end: &mut bool, | ||
1512 | mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> { | ||
1513 | let lo = self.span; | ||
1514 | |||
1515 | let (name, node, generics) = if self.eat_keyword(keywords::Type) { | ||
1516 | self.parse_trait_item_assoc_ty()? | ||
1517 | } else if self.is_const_item() { | ||
1518 | self.expect_keyword(keywords::Const)?; | ||
1519 | let ident = self.parse_ident()?; | ||
1520 | self.expect(&token::Colon)?; | ||
1521 | let ty = self.parse_ty()?; | ||
1522 | let default = if self.eat(&token::Eq) { | ||
1523 | let expr = self.parse_expr()?; | ||
1524 | self.expect(&token::Semi)?; | ||
1525 | Some(expr) | ||
1526 | } else { | ||
1527 | self.expect(&token::Semi)?; | ||
1528 | None | ||
1529 | }; | ||
1530 | (ident, TraitItemKind::Const(ty, default), ast::Generics::default()) | ||
1531 | } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? { | ||
1532 | // trait item macro. | ||
1533 | (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default()) | ||
1534 | } else { | ||
1535 | let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?; | ||
1536 | |||
1537 | let ident = self.parse_ident()?; | ||
1538 | let mut generics = self.parse_generics()?; | ||
1539 | |||
1540 | let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| { | ||
1541 | // This is somewhat dubious; We don't want to allow | ||
1542 | // argument names to be left off if there is a | ||
1543 | // definition... | ||
1544 | |||
1545 | // We don't allow argument names to be left off in edition 2018. | ||
1546 | p.parse_arg_general(p.span.rust_2018(), true) | ||
1547 | })?; | ||
1548 | generics.where_clause = self.parse_where_clause()?; | ||
1549 | |||
1550 | let sig = ast::MethodSig { | ||
1551 | header: FnHeader { | ||
1552 | unsafety, | ||
1553 | constness, | ||
1554 | abi, | ||
1555 | asyncness, | ||
1556 | }, | ||
1557 | decl: d, | ||
1558 | }; | ||
1559 | |||
1560 | let body = match self.token { | ||
1561 | token::Semi => { | ||
1562 | self.bump(); | ||
1563 | *at_end = true; | ||
1564 | debug!("parse_trait_methods(): parsing required method"); | ||
1565 | None | ||
1566 | } | ||
1567 | token::OpenDelim(token::Brace) => { | ||
1568 | debug!("parse_trait_methods(): parsing provided method"); | ||
1569 | *at_end = true; | ||
1570 | let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; | ||
1571 | attrs.extend(inner_attrs.iter().cloned()); | ||
1572 | Some(body) | ||
1573 | } | ||
1574 | token::Interpolated(ref nt) => { | ||
1575 | match **nt { | ||
1576 | token::NtBlock(..) => { | ||
1577 | *at_end = true; | ||
1578 | let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; | ||
1579 | attrs.extend(inner_attrs.iter().cloned()); | ||
1580 | Some(body) | ||
1581 | } | ||
1582 | _ => { | ||
1583 | let token_str = self.this_token_descr(); | ||
1584 | let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", | ||
1585 | token_str)); | ||
1586 | err.span_label(self.span, "expected `;` or `{`"); | ||
1587 | return Err(err); | ||
1588 | } | ||
1589 | } | ||
1590 | } | ||
1591 | _ => { | ||
1592 | let token_str = self.this_token_descr(); | ||
1593 | let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", | ||
1594 | token_str)); | ||
1595 | err.span_label(self.span, "expected `;` or `{`"); | ||
1596 | return Err(err); | ||
1597 | } | ||
1598 | }; | ||
1599 | (ident, ast::TraitItemKind::Method(sig, body), generics) | ||
1600 | }; | ||
1601 | |||
1602 | Ok(TraitItem { | ||
1603 | id: ast::DUMMY_NODE_ID, | ||
1604 | ident: name, | ||
1605 | attrs, | ||
1606 | generics, | ||
1607 | node, | ||
1608 | span: lo.to(self.prev_span), | ||
1609 | tokens: None, | ||
1610 | }) | ||
1611 | } | ||
1612 | |||
1613 | /// Parses an optional return type `[ -> TY ]` in a function declaration. | ||
1614 | fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> { | ||
1615 | if self.eat(&token::RArrow) { | ||
1616 | Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?)) | ||
1617 | } else { | ||
1618 | Ok(FunctionRetTy::Default(self.span.shrink_to_lo())) | ||
1619 | } | ||
1620 | } | ||
1621 | |||
1622 | /// Parses a type. | ||
1623 | pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> { | ||
1624 | self.parse_ty_common(true, true) | ||
1625 | } | ||
1626 | |||
1627 | /// Parses a type in restricted contexts where `+` is not permitted. | ||
1628 | /// | ||
1629 | /// Example 1: `&'a TYPE` | ||
1630 | /// `+` is prohibited to maintain operator priority (P(+) < P(&)). | ||
1631 | /// Example 2: `value1 as TYPE + value2` | ||
1632 | /// `+` is prohibited to avoid interactions with expression grammar. | ||
1633 | fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> { | ||
1634 | self.parse_ty_common(false, true) | ||
1635 | } | ||
1636 | |||
1637 | fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool) | ||
1638 | -> PResult<'a, P<Ty>> { | ||
1639 | maybe_whole!(self, NtTy, |x| x); | ||
1640 | |||
1641 | let lo = self.span; | ||
1642 | let mut impl_dyn_multi = false; | ||
1643 | let node = if self.eat(&token::OpenDelim(token::Paren)) { | ||
1644 | // `(TYPE)` is a parenthesized type. | ||
1645 | // `(TYPE,)` is a tuple with a single field of type TYPE. | ||
1646 | let mut ts = vec![]; | ||
1647 | let mut last_comma = false; | ||
1648 | while self.token != token::CloseDelim(token::Paren) { | ||
1649 | ts.push(self.parse_ty()?); | ||
1650 | if self.eat(&token::Comma) { | ||
1651 | last_comma = true; | ||
1652 | } else { | ||
1653 | last_comma = false; | ||
1654 | break; | ||
1655 | } | ||
1656 | } | ||
1657 | let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus; | ||
1658 | self.expect(&token::CloseDelim(token::Paren))?; | ||
1659 | |||
1660 | if ts.len() == 1 && !last_comma { | ||
1661 | let ty = ts.into_iter().nth(0).unwrap().into_inner(); | ||
1662 | let maybe_bounds = allow_plus && self.token.is_like_plus(); | ||
1663 | match ty.node { | ||
1664 | // `(TY_BOUND_NOPAREN) + BOUND + ...`. | ||
1665 | TyKind::Path(None, ref path) if maybe_bounds => { | ||
1666 | self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)? | ||
1667 | } | ||
1668 | TyKind::TraitObject(ref bounds, TraitObjectSyntax::None) | ||
1669 | if maybe_bounds && bounds.len() == 1 && !trailing_plus => { | ||
1670 | let path = match bounds[0] { | ||
1671 | GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(), | ||
1672 | GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"), | ||
1673 | }; | ||
1674 | self.parse_remaining_bounds(Vec::new(), path, lo, true)? | ||
1675 | } | ||
1676 | // `(TYPE)` | ||
1677 | _ => TyKind::Paren(P(ty)) | ||
1678 | } | ||
1679 | } else { | ||
1680 | TyKind::Tup(ts) | ||
1681 | } | ||
1682 | } else if self.eat(&token::Not) { | ||
1683 | // Never type `!` | ||
1684 | TyKind::Never | ||
1685 | } else if self.eat(&token::BinOp(token::Star)) { | ||
1686 | // Raw pointer | ||
1687 | TyKind::Ptr(self.parse_ptr()?) | ||
1688 | } else if self.eat(&token::OpenDelim(token::Bracket)) { | ||
1689 | // Array or slice | ||
1690 | let t = self.parse_ty()?; | ||
1691 | // Parse optional `; EXPR` in `[TYPE; EXPR]` | ||
1692 | let t = match self.maybe_parse_fixed_length_of_vec()? { | ||
1693 | None => TyKind::Slice(t), | ||
1694 | Some(length) => TyKind::Array(t, AnonConst { | ||
1695 | id: ast::DUMMY_NODE_ID, | ||
1696 | value: length, | ||
1697 | }), | ||
1698 | }; | ||
1699 | self.expect(&token::CloseDelim(token::Bracket))?; | ||
1700 | t | ||
1701 | } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) { | ||
1702 | // Reference | ||
1703 | self.expect_and()?; | ||
1704 | self.parse_borrowed_pointee()? | ||
1705 | } else if self.eat_keyword_noexpect(keywords::Typeof) { | ||
1706 | // `typeof(EXPR)` | ||
1707 | // In order to not be ambiguous, the type must be surrounded by parens. | ||
1708 | self.expect(&token::OpenDelim(token::Paren))?; | ||
1709 | let e = AnonConst { | ||
1710 | id: ast::DUMMY_NODE_ID, | ||
1711 | value: self.parse_expr()?, | ||
1712 | }; | ||
1713 | self.expect(&token::CloseDelim(token::Paren))?; | ||
1714 | TyKind::Typeof(e) | ||
1715 | } else if self.eat_keyword(keywords::Underscore) { | ||
1716 | // A type to be inferred `_` | ||
1717 | TyKind::Infer | ||
1718 | } else if self.token_is_bare_fn_keyword() { | ||
1719 | // Function pointer type | ||
1720 | self.parse_ty_bare_fn(Vec::new())? | ||
1721 | } else if self.check_keyword(keywords::For) { | ||
1722 | // Function pointer type or bound list (trait object type) starting with a poly-trait. | ||
1723 | // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` | ||
1724 | // `for<'lt> Trait1<'lt> + Trait2 + 'a` | ||
1725 | let lo = self.span; | ||
1726 | let lifetime_defs = self.parse_late_bound_lifetime_defs()?; | ||
1727 | if self.token_is_bare_fn_keyword() { | ||
1728 | self.parse_ty_bare_fn(lifetime_defs)? | ||
1729 | } else { | ||
1730 | let path = self.parse_path(PathStyle::Type)?; | ||
1731 | let parse_plus = allow_plus && self.check_plus(); | ||
1732 | self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)? | ||
1733 | } | ||
1734 | } else if self.eat_keyword(keywords::Impl) { | ||
1735 | // Always parse bounds greedily for better error recovery. | ||
1736 | let bounds = self.parse_generic_bounds(None)?; | ||
1737 | impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; | ||
1738 | TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds) | ||
1739 | } else if self.check_keyword(keywords::Dyn) && | ||
1740 | (self.span.rust_2018() || | ||
1741 | self.look_ahead(1, |t| t.can_begin_bound() && | ||
1742 | !can_continue_type_after_non_fn_ident(t))) { | ||
1743 | self.bump(); // `dyn` | ||
1744 | // Always parse bounds greedily for better error recovery. | ||
1745 | let bounds = self.parse_generic_bounds(None)?; | ||
1746 | impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; | ||
1747 | TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn) | ||
1748 | } else if self.check(&token::Question) || | ||
1749 | self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) { | ||
1750 | // Bound list (trait object type) | ||
1751 | TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?, | ||
1752 | TraitObjectSyntax::None) | ||
1753 | } else if self.eat_lt() { | ||
1754 | // Qualified path | ||
1755 | let (qself, path) = self.parse_qpath(PathStyle::Type)?; | ||
1756 | TyKind::Path(Some(qself), path) | ||
1757 | } else if self.token.is_path_start() { | ||
1758 | // Simple path | ||
1759 | let path = self.parse_path(PathStyle::Type)?; | ||
1760 | if self.eat(&token::Not) { | ||
1761 | // Macro invocation in type position | ||
1762 | let (delim, tts) = self.expect_delimited_token_tree()?; | ||
1763 | let node = Mac_ { path, tts, delim }; | ||
1764 | TyKind::Mac(respan(lo.to(self.prev_span), node)) | ||
1765 | } else { | ||
1766 | // Just a type path or bound list (trait object type) starting with a trait. | ||
1767 | // `Type` | ||
1768 | // `Trait1 + Trait2 + 'a` | ||
1769 | if allow_plus && self.check_plus() { | ||
1770 | self.parse_remaining_bounds(Vec::new(), path, lo, true)? | ||
1771 | } else { | ||
1772 | TyKind::Path(None, path) | ||
1773 | } | ||
1774 | } | ||
1775 | } else { | ||
1776 | let msg = format!("expected type, found {}", self.this_token_descr()); | ||
1777 | return Err(self.fatal(&msg)); | ||
1778 | }; | ||
1779 | |||
1780 | let span = lo.to(self.prev_span); | ||
1781 | let ty = Ty { node, span, id: ast::DUMMY_NODE_ID }; | ||
1782 | |||
1783 | // Try to recover from use of `+` with incorrect priority. | ||
1784 | self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty); | ||
1785 | self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?; | ||
1786 | let ty = self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)?; | ||
1787 | |||
1788 | Ok(P(ty)) | ||
1789 | } | ||
1790 | |||
1791 | fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path, | ||
1792 | lo: Span, parse_plus: bool) -> PResult<'a, TyKind> { | ||
1793 | let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span)); | ||
1794 | let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)]; | ||
1795 | if parse_plus { | ||
1796 | self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded | ||
1797 | bounds.append(&mut self.parse_generic_bounds(None)?); | ||
1798 | } | ||
1799 | Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None)) | ||
1800 | } | ||
1801 | |||
1802 | fn maybe_report_ambiguous_plus(&mut self, allow_plus: bool, impl_dyn_multi: bool, ty: &Ty) { | ||
1803 | if !allow_plus && impl_dyn_multi { | ||
1804 | let sum_with_parens = format!("({})", pprust::ty_to_string(&ty)); | ||
1805 | self.struct_span_err(ty.span, "ambiguous `+` in a type") | ||
1806 | .span_suggestion( | ||
1807 | ty.span, | ||
1808 | "use parentheses to disambiguate", | ||
1809 | sum_with_parens, | ||
1810 | Applicability::MachineApplicable | ||
1811 | ).emit(); | ||
1812 | } | ||
1813 | } | ||
1814 | |||
1815 | fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PResult<'a, ()> { | ||
1816 | // Do not add `+` to expected tokens. | ||
1817 | if !allow_plus || !self.token.is_like_plus() { | ||
1818 | return Ok(()) | ||
1819 | } | ||
1820 | |||
1821 | self.bump(); // `+` | ||
1822 | let bounds = self.parse_generic_bounds(None)?; | ||
1823 | let sum_span = ty.span.to(self.prev_span); | ||
1824 | |||
1825 | let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178, | ||
1826 | "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty)); | ||
1827 | |||
1828 | match ty.node { | ||
1829 | TyKind::Rptr(ref lifetime, ref mut_ty) => { | ||
1830 | let sum_with_parens = pprust::to_string(|s| { | ||
1831 | use crate::print::pprust::PrintState; | ||
1832 | |||
1833 | s.s.word("&")?; | ||
1834 | s.print_opt_lifetime(lifetime)?; | ||
1835 | s.print_mutability(mut_ty.mutbl)?; | ||
1836 | s.popen()?; | ||
1837 | s.print_type(&mut_ty.ty)?; | ||
1838 | s.print_type_bounds(" +", &bounds)?; | ||
1839 | s.pclose() | ||
1840 | }); | ||
1841 | err.span_suggestion( | ||
1842 | sum_span, | ||
1843 | "try adding parentheses", | ||
1844 | sum_with_parens, | ||
1845 | Applicability::MachineApplicable | ||
1846 | ); | ||
1847 | } | ||
1848 | TyKind::Ptr(..) | TyKind::BareFn(..) => { | ||
1849 | err.span_label(sum_span, "perhaps you forgot parentheses?"); | ||
1850 | } | ||
1851 | _ => { | ||
1852 | err.span_label(sum_span, "expected a path"); | ||
1853 | }, | ||
1854 | } | ||
1855 | err.emit(); | ||
1856 | Ok(()) | ||
1857 | } | ||
1858 | |||
1859 | // Try to recover from associated item paths like `[T]::AssocItem`/`(T, U)::AssocItem`. | ||
1860 | fn maybe_recover_from_bad_qpath<T: RecoverQPath>(&mut self, base: T, allow_recovery: bool) | ||
1861 | -> PResult<'a, T> { | ||
1862 | // Do not add `::` to expected tokens. | ||
1863 | if !allow_recovery || self.token != token::ModSep { | ||
1864 | return Ok(base); | ||
1865 | } | ||
1866 | let ty = match base.to_ty() { | ||
1867 | Some(ty) => ty, | ||
1868 | None => return Ok(base), | ||
1869 | }; | ||
1870 | |||
1871 | self.bump(); // `::` | ||
1872 | let mut segments = Vec::new(); | ||
1873 | self.parse_path_segments(&mut segments, T::PATH_STYLE, true)?; | ||
1874 | |||
1875 | let span = ty.span.to(self.prev_span); | ||
1876 | let path_span = span.to(span); // use an empty path since `position` == 0 | ||
1877 | let recovered = base.to_recovered( | ||
1878 | Some(QSelf { ty, path_span, position: 0 }), | ||
1879 | ast::Path { segments, span }, | ||
1880 | ); | ||
1881 | |||
1882 | self.diagnostic() | ||
1883 | .struct_span_err(span, "missing angle brackets in associated item path") | ||
1884 | .span_suggestion( // this is a best-effort recovery | ||
1885 | span, "try", recovered.to_string(), Applicability::MaybeIncorrect | ||
1886 | ).emit(); | ||
1887 | |||
1888 | Ok(recovered) | ||
1889 | } | ||
1890 | |||
1891 | fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> { | ||
1892 | let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None }; | ||
1893 | let mutbl = self.parse_mutability(); | ||
1894 | let ty = self.parse_ty_no_plus()?; | ||
1895 | return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl })); | ||
1896 | } | ||
1897 | |||
1898 | fn parse_ptr(&mut self) -> PResult<'a, MutTy> { | ||
1899 | let mutbl = if self.eat_keyword(keywords::Mut) { | ||
1900 | Mutability::Mutable | ||
1901 | } else if self.eat_keyword(keywords::Const) { | ||
1902 | Mutability::Immutable | ||
1903 | } else { | ||
1904 | let span = self.prev_span; | ||
1905 | let msg = "expected mut or const in raw pointer type"; | ||
1906 | self.struct_span_err(span, msg) | ||
1907 | .span_label(span, msg) | ||
1908 | .help("use `*mut T` or `*const T` as appropriate") | ||
1909 | .emit(); | ||
1910 | Mutability::Immutable | ||
1911 | }; | ||
1912 | let t = self.parse_ty_no_plus()?; | ||
1913 | Ok(MutTy { ty: t, mutbl: mutbl }) | ||
1914 | } | ||
1915 | |||
1916 | fn is_named_argument(&mut self) -> bool { | ||
1917 | let offset = match self.token { | ||
1918 | token::Interpolated(ref nt) => match **nt { | ||
1919 | token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), | ||
1920 | _ => 0, | ||
1921 | } | ||
1922 | token::BinOp(token::And) | token::AndAnd => 1, | ||
1923 | _ if self.token.is_keyword(keywords::Mut) => 1, | ||
1924 | _ => 0, | ||
1925 | }; | ||
1926 | |||
1927 | self.look_ahead(offset, |t| t.is_ident()) && | ||
1928 | self.look_ahead(offset + 1, |t| t == &token::Colon) | ||
1929 | } | ||
1930 | |||
1931 | /// Skips unexpected attributes and doc comments in this position and emits an appropriate | ||
1932 | /// error. | ||
1933 | fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { | ||
1934 | if let token::DocComment(_) = self.token { | ||
1935 | let mut err = self.diagnostic().struct_span_err( | ||
1936 | self.span, | ||
1937 | &format!("documentation comments cannot be applied to {}", applied_to), | ||
1938 | ); | ||
1939 | err.span_label(self.span, "doc comments are not allowed here"); | ||
1940 | err.emit(); | ||
1941 | self.bump(); | ||
1942 | } else if self.token == token::Pound && self.look_ahead(1, |t| { | ||
1943 | *t == token::OpenDelim(token::Bracket) | ||
1944 | }) { | ||
1945 | let lo = self.span; | ||
1946 | // Skip every token until next possible arg. | ||
1947 | while self.token != token::CloseDelim(token::Bracket) { | ||
1948 | self.bump(); | ||
1949 | } | ||
1950 | let sp = lo.to(self.span); | ||
1951 | self.bump(); | ||
1952 | let mut err = self.diagnostic().struct_span_err( | ||
1953 | sp, | ||
1954 | &format!("attributes cannot be applied to {}", applied_to), | ||
1955 | ); | ||
1956 | err.span_label(sp, "attributes are not allowed here"); | ||
1957 | err.emit(); | ||
1958 | } | ||
1959 | } | ||
1960 | |||
1961 | /// This version of parse arg doesn't necessarily require identifier names. | ||
1962 | fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> { | ||
1963 | maybe_whole!(self, NtArg, |x| x); | ||
1964 | |||
1965 | if let Ok(Some(_)) = self.parse_self_arg() { | ||
1966 | let mut err = self.struct_span_err(self.prev_span, | ||
1967 | "unexpected `self` argument in function"); | ||
1968 | err.span_label(self.prev_span, | ||
1969 | "`self` is only valid as the first argument of an associated function"); | ||
1970 | return Err(err); | ||
1971 | } | ||
1972 | |||
1973 | let (pat, ty) = if require_name || self.is_named_argument() { | ||
1974 | debug!("parse_arg_general parse_pat (require_name:{})", | ||
1975 | require_name); | ||
1976 | self.eat_incorrect_doc_comment("method arguments"); | ||
1977 | let pat = self.parse_pat(Some("argument name"))?; | ||
1978 | |||
1979 | if let Err(mut err) = self.expect(&token::Colon) { | ||
1980 | // If we find a pattern followed by an identifier, it could be an (incorrect) | ||
1981 | // C-style parameter declaration. | ||
1982 | if self.check_ident() && self.look_ahead(1, |t| { | ||
1983 | *t == token::Comma || *t == token::CloseDelim(token::Paren) | ||
1984 | }) { | ||
1985 | let ident = self.parse_ident().unwrap(); | ||
1986 | let span = pat.span.with_hi(ident.span.hi()); | ||
1987 | |||
1988 | err.span_suggestion( | ||
1989 | span, | ||
1990 | "declare the type after the parameter binding", | ||
1991 | String::from("<identifier>: <type>"), | ||
1992 | Applicability::HasPlaceholders, | ||
1993 | ); | ||
1994 | } else if require_name && is_trait_item { | ||
1995 | if let PatKind::Ident(_, ident, _) = pat.node { | ||
1996 | err.span_suggestion( | ||
1997 | pat.span, | ||
1998 | "explicitly ignore parameter", | ||
1999 | format!("_: {}", ident), | ||
2000 | Applicability::MachineApplicable, | ||
2001 | ); | ||
2002 | } | ||
2003 | |||
2004 | err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); | ||
2005 | } | ||
2006 | |||
2007 | return Err(err); | ||
2008 | } | ||
2009 | |||
2010 | self.eat_incorrect_doc_comment("a method argument's type"); | ||
2011 | (pat, self.parse_ty()?) | ||
2012 | } else { | ||
2013 | debug!("parse_arg_general ident_to_pat"); | ||
2014 | let parser_snapshot_before_ty = self.clone(); | ||
2015 | self.eat_incorrect_doc_comment("a method argument's type"); | ||
2016 | let mut ty = self.parse_ty(); | ||
2017 | if ty.is_ok() && self.token != token::Comma && | ||
2018 | self.token != token::CloseDelim(token::Paren) { | ||
2019 | // This wasn't actually a type, but a pattern looking like a type, | ||
2020 | // so we are going to rollback and re-parse for recovery. | ||
2021 | ty = self.unexpected(); | ||
2022 | } | ||
2023 | match ty { | ||
2024 | Ok(ty) => { | ||
2025 | let ident = Ident::new(keywords::Invalid.name(), self.prev_span); | ||
2026 | let pat = P(Pat { | ||
2027 | id: ast::DUMMY_NODE_ID, | ||
2028 | node: PatKind::Ident( | ||
2029 | BindingMode::ByValue(Mutability::Immutable), ident, None), | ||
2030 | span: ty.span, | ||
2031 | }); | ||
2032 | (pat, ty) | ||
2033 | } | ||
2034 | Err(mut err) => { | ||
2035 | // Recover from attempting to parse the argument as a type without pattern. | ||
2036 | err.cancel(); | ||
2037 | mem::replace(self, parser_snapshot_before_ty); | ||
2038 | let pat = self.parse_pat(Some("argument name"))?; | ||
2039 | self.expect(&token::Colon)?; | ||
2040 | let ty = self.parse_ty()?; | ||
2041 | |||
2042 | let mut err = self.diagnostic().struct_span_err_with_code( | ||
2043 | pat.span, | ||
2044 | "patterns aren't allowed in methods without bodies", | ||
2045 | DiagnosticId::Error("E0642".into()), | ||
2046 | ); | ||
2047 | err.span_suggestion_short( | ||
2048 | pat.span, | ||
2049 | "give this argument a name or use an underscore to ignore it", | ||
2050 | "_".to_owned(), | ||
2051 | Applicability::MachineApplicable, | ||
2052 | ); | ||
2053 | err.emit(); | ||
2054 | |||
2055 | // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. | ||
2056 | let pat = P(Pat { | ||
2057 | node: PatKind::Wild, | ||
2058 | span: pat.span, | ||
2059 | id: ast::DUMMY_NODE_ID | ||
2060 | }); | ||
2061 | (pat, ty) | ||
2062 | } | ||
2063 | } | ||
2064 | }; | ||
2065 | |||
2066 | Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID }) | ||
2067 | } | ||
2068 | |||
2069 | /// Parses a single function argument. | ||
2070 | crate fn parse_arg(&mut self) -> PResult<'a, Arg> { | ||
2071 | self.parse_arg_general(true, false) | ||
2072 | } | ||
2073 | |||
2074 | /// Parses an argument in a lambda header (e.g., `|arg, arg|`). | ||
2075 | fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { | ||
2076 | let pat = self.parse_pat(Some("argument name"))?; | ||
2077 | let t = if self.eat(&token::Colon) { | ||
2078 | self.parse_ty()? | ||
2079 | } else { | ||
2080 | P(Ty { | ||
2081 | id: ast::DUMMY_NODE_ID, | ||
2082 | node: TyKind::Infer, | ||
2083 | span: self.prev_span, | ||
2084 | }) | ||
2085 | }; | ||
2086 | Ok(Arg { | ||
2087 | ty: t, | ||
2088 | pat, | ||
2089 | id: ast::DUMMY_NODE_ID | ||
2090 | }) | ||
2091 | } | ||
2092 | |||
2093 | fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> { | ||
2094 | if self.eat(&token::Semi) { | ||
2095 | Ok(Some(self.parse_expr()?)) | ||
2096 | } else { | ||
2097 | Ok(None) | ||
2098 | } | ||
2099 | } | ||
2100 | |||
2101 | /// Matches `token_lit = LIT_INTEGER | ...`. | ||
2102 | fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { | ||
2103 | let out = match self.token { | ||
2104 | token::Interpolated(ref nt) => match **nt { | ||
2105 | token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { | ||
2106 | ExprKind::Lit(ref lit) => { lit.node.clone() } | ||
2107 | _ => { return self.unexpected_last(&self.token); } | ||
2108 | }, | ||
2109 | _ => { return self.unexpected_last(&self.token); } | ||
2110 | }, | ||
2111 | token::Literal(lit, suf) => { | ||
2112 | let diag = Some((self.span, &self.sess.span_diagnostic)); | ||
2113 | let (suffix_illegal, result) = parse::lit_token(lit, suf, diag); | ||
2114 | |||
2115 | if suffix_illegal { | ||
2116 | let sp = self.span; | ||
2117 | self.expect_no_suffix(sp, lit.literal_name(), suf) | ||
2118 | } | ||
2119 | |||
2120 | result.unwrap() | ||
2121 | } | ||
2122 | token::Dot if self.look_ahead(1, |t| match t { | ||
2123 | token::Literal(parse::token::Lit::Integer(_) , _) => true, | ||
2124 | _ => false, | ||
2125 | }) => { // recover from `let x = .4;` | ||
2126 | let lo = self.span; | ||
2127 | self.bump(); | ||
2128 | if let token::Literal( | ||
2129 | parse::token::Lit::Integer(val), | ||
2130 | suffix, | ||
2131 | ) = self.token { | ||
2132 | let suffix = suffix.and_then(|s| { | ||
2133 | let s = s.as_str().get(); | ||
2134 | if ["f32", "f64"].contains(&s) { | ||
2135 | Some(s) | ||
2136 | } else { | ||
2137 | None | ||
2138 | } | ||
2139 | }).unwrap_or(""); | ||
2140 | self.bump(); | ||
2141 | let sp = lo.to(self.prev_span); | ||
2142 | let mut err = self.diagnostic() | ||
2143 | .struct_span_err(sp, "float literals must have an integer part"); | ||
2144 | err.span_suggestion( | ||
2145 | sp, | ||
2146 | "must have an integer part", | ||
2147 | format!("0.{}{}", val, suffix), | ||
2148 | Applicability::MachineApplicable, | ||
2149 | ); | ||
2150 | err.emit(); | ||
2151 | return Ok(match suffix { | ||
2152 | "f32" => ast::LitKind::Float(val, ast::FloatTy::F32), | ||
2153 | "f64" => ast::LitKind::Float(val, ast::FloatTy::F64), | ||
2154 | _ => ast::LitKind::FloatUnsuffixed(val), | ||
2155 | }); | ||
2156 | } else { | ||
2157 | unreachable!(); | ||
2158 | }; | ||
2159 | } | ||
2160 | _ => { return self.unexpected_last(&self.token); } | ||
2161 | }; | ||
2162 | |||
2163 | self.bump(); | ||
2164 | Ok(out) | ||
2165 | } | ||
2166 | |||
2167 | /// Matches `lit = true | false | token_lit`. | ||
2168 | crate fn parse_lit(&mut self) -> PResult<'a, Lit> { | ||
2169 | let lo = self.span; | ||
2170 | let lit = if self.eat_keyword(keywords::True) { | ||
2171 | LitKind::Bool(true) | ||
2172 | } else if self.eat_keyword(keywords::False) { | ||
2173 | LitKind::Bool(false) | ||
2174 | } else { | ||
2175 | let lit = self.parse_lit_token()?; | ||
2176 | lit | ||
2177 | }; | ||
2178 | Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) }) | ||
2179 | } | ||
2180 | |||
2181 | /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). | ||
2182 | crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { | ||
2183 | maybe_whole_expr!(self); | ||
2184 | |||
2185 | let minus_lo = self.span; | ||
2186 | let minus_present = self.eat(&token::BinOp(token::Minus)); | ||
2187 | let lo = self.span; | ||
2188 | let literal = self.parse_lit()?; | ||
2189 | let hi = self.prev_span; | ||
2190 | let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); | ||
2191 | |||
2192 | if minus_present { | ||
2193 | let minus_hi = self.prev_span; | ||
2194 | let unary = self.mk_unary(UnOp::Neg, expr); | ||
2195 | Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new())) | ||
2196 | } else { | ||
2197 | Ok(expr) | ||
2198 | } | ||
2199 | } | ||
2200 | |||
2201 | fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { | ||
2202 | match self.token { | ||
2203 | token::Ident(ident, _) if self.token.is_path_segment_keyword() => { | ||
2204 | let span = self.span; | ||
2205 | self.bump(); | ||
2206 | Ok(Ident::new(ident.name, span)) | ||
2207 | } | ||
2208 | _ => self.parse_ident(), | ||
2209 | } | ||
2210 | } | ||
2211 | |||
2212 | fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { | ||
2213 | match self.token { | ||
2214 | token::Ident(ident, false) if ident.name == keywords::Underscore.name() => { | ||
2215 | let span = self.span; | ||
2216 | self.bump(); | ||
2217 | Ok(Ident::new(ident.name, span)) | ||
2218 | } | ||
2219 | _ => self.parse_ident(), | ||
2220 | } | ||
2221 | } | ||
2222 | |||
2223 | /// Parses a qualified path. | ||
2224 | /// Assumes that the leading `<` has been parsed already. | ||
2225 | /// | ||
2226 | /// `qualified_path = <type [as trait_ref]>::path` | ||
2227 | /// | ||
2228 | /// # Examples | ||
2229 | /// `<T>::default` | ||
2230 | /// `<T as U>::a` | ||
2231 | /// `<T as U>::F::a<S>` (without disambiguator) | ||
2232 | /// `<T as U>::F::a::<S>` (with disambiguator) | ||
2233 | fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> { | ||
2234 | let lo = self.prev_span; | ||
2235 | let ty = self.parse_ty()?; | ||
2236 | |||
2237 | // `path` will contain the prefix of the path up to the `>`, | ||
2238 | // if any (e.g., `U` in the `<T as U>::*` examples | ||
2239 | // above). `path_span` has the span of that path, or an empty | ||
2240 | // span in the case of something like `<T>::Bar`. | ||
2241 | let (mut path, path_span); | ||
2242 | if self.eat_keyword(keywords::As) { | ||
2243 | let path_lo = self.span; | ||
2244 | path = self.parse_path(PathStyle::Type)?; | ||
2245 | path_span = path_lo.to(self.prev_span); | ||
2246 | } else { | ||
2247 | path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP }; | ||
2248 | path_span = self.span.to(self.span); | ||
2249 | } | ||
2250 | |||
2251 | // See doc comment for `unmatched_angle_bracket_count`. | ||
2252 | self.expect(&token::Gt)?; | ||
2253 | if self.unmatched_angle_bracket_count > 0 { | ||
2254 | self.unmatched_angle_bracket_count -= 1; | ||
2255 | debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count); | ||
2256 | } | ||
2257 | |||
2258 | self.expect(&token::ModSep)?; | ||
2259 | |||
2260 | let qself = QSelf { ty, path_span, position: path.segments.len() }; | ||
2261 | self.parse_path_segments(&mut path.segments, style, true)?; | ||
2262 | |||
2263 | Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) })) | ||
2264 | } | ||
2265 | |||
2266 | /// Parses simple paths. | ||
2267 | /// | ||
2268 | /// `path = [::] segment+` | ||
2269 | /// `segment = ident | ident[::]<args> | ident[::](args) [-> type]` | ||
2270 | /// | ||
2271 | /// # Examples | ||
2272 | /// `a::b::C<D>` (without disambiguator) | ||
2273 | /// `a::b::C::<D>` (with disambiguator) | ||
2274 | /// `Fn(Args)` (without disambiguator) | ||
2275 | /// `Fn::(Args)` (with disambiguator) | ||
2276 | pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { | ||
2277 | self.parse_path_common(style, true) | ||
2278 | } | ||
2279 | |||
2280 | crate fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool) | ||
2281 | -> PResult<'a, ast::Path> { | ||
2282 | maybe_whole!(self, NtPath, |path| { | ||
2283 | if style == PathStyle::Mod && | ||
2284 | path.segments.iter().any(|segment| segment.args.is_some()) { | ||
2285 | self.diagnostic().span_err(path.span, "unexpected generic arguments in path"); | ||
2286 | } | ||
2287 | path | ||
2288 | }); | ||
2289 | |||
2290 | let lo = self.meta_var_span.unwrap_or(self.span); | ||
2291 | let mut segments = Vec::new(); | ||
2292 | let mod_sep_ctxt = self.span.ctxt(); | ||
2293 | if self.eat(&token::ModSep) { | ||
2294 | segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); | ||
2295 | } | ||
2296 | self.parse_path_segments(&mut segments, style, enable_warning)?; | ||
2297 | |||
2298 | Ok(ast::Path { segments, span: lo.to(self.prev_span) }) | ||
2299 | } | ||
2300 | |||
2301 | /// Like `parse_path`, but also supports parsing `Word` meta items into paths for | ||
2302 | /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` | ||
2303 | /// attributes. | ||
2304 | pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { | ||
2305 | let meta_ident = match self.token { | ||
2306 | token::Interpolated(ref nt) => match **nt { | ||
2307 | token::NtMeta(ref meta) => match meta.node { | ||
2308 | ast::MetaItemKind::Word => Some(meta.ident.clone()), | ||
2309 | _ => None, | ||
2310 | }, | ||
2311 | _ => None, | ||
2312 | }, | ||
2313 | _ => None, | ||
2314 | }; | ||
2315 | if let Some(path) = meta_ident { | ||
2316 | self.bump(); | ||
2317 | return Ok(path); | ||
2318 | } | ||
2319 | self.parse_path(style) | ||
2320 | } | ||
2321 | |||
2322 | fn parse_path_segments(&mut self, | ||
2323 | segments: &mut Vec<PathSegment>, | ||
2324 | style: PathStyle, | ||
2325 | enable_warning: bool) | ||
2326 | -> PResult<'a, ()> { | ||
2327 | loop { | ||
2328 | let segment = self.parse_path_segment(style, enable_warning)?; | ||
2329 | if style == PathStyle::Expr { | ||
2330 | // In order to check for trailing angle brackets, we must have finished | ||
2331 | // recursing (`parse_path_segment` can indirectly call this function), | ||
2332 | // that is, the next token must be the highlighted part of the below example: | ||
2333 | // | ||
2334 | // `Foo::<Bar as Baz<T>>::Qux` | ||
2335 | // ^ here | ||
2336 | // | ||
2337 | // As opposed to the below highlight (if we had only finished the first | ||
2338 | // recursion): | ||
2339 | // | ||
2340 | // `Foo::<Bar as Baz<T>>::Qux` | ||
2341 | // ^ here | ||
2342 | // | ||
2343 | // `PathStyle::Expr` is only provided at the root invocation and never in | ||
2344 | // `parse_path_segment` to recurse and therefore can be checked to maintain | ||
2345 | // this invariant. | ||
2346 | self.check_trailing_angle_brackets(&segment, token::ModSep); | ||
2347 | } | ||
2348 | segments.push(segment); | ||
2349 | |||
2350 | if self.is_import_coupler() || !self.eat(&token::ModSep) { | ||
2351 | return Ok(()); | ||
2352 | } | ||
2353 | } | ||
2354 | } | ||
2355 | |||
2356 | fn parse_path_segment(&mut self, style: PathStyle, enable_warning: bool) | ||
2357 | -> PResult<'a, PathSegment> { | ||
2358 | let ident = self.parse_path_segment_ident()?; | ||
2359 | |||
2360 | let is_args_start = |token: &token::Token| match *token { | ||
2361 | token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) => true, | ||
2362 | _ => false, | ||
2363 | }; | ||
2364 | let check_args_start = |this: &mut Self| { | ||
2365 | this.expected_tokens.extend_from_slice( | ||
2366 | &[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))] | ||
2367 | ); | ||
2368 | is_args_start(&this.token) | ||
2369 | }; | ||
2370 | |||
2371 | Ok(if style == PathStyle::Type && check_args_start(self) || | ||
2372 | style != PathStyle::Mod && self.check(&token::ModSep) | ||
2373 | && self.look_ahead(1, |t| is_args_start(t)) { | ||
2374 | // Generic arguments are found - `<`, `(`, `::<` or `::(`. | ||
2375 | if self.eat(&token::ModSep) && style == PathStyle::Type && enable_warning { | ||
2376 | self.diagnostic().struct_span_warn(self.prev_span, "unnecessary path disambiguator") | ||
2377 | .span_label(self.prev_span, "try removing `::`").emit(); | ||
2378 | } | ||
2379 | let lo = self.span; | ||
2380 | |||
2381 | // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If | ||
2382 | // it isn't, then we reset the unmatched angle bracket count as we're about to start | ||
2383 | // parsing a new path. | ||
2384 | if style == PathStyle::Expr { | ||
2385 | self.unmatched_angle_bracket_count = 0; | ||
2386 | self.max_angle_bracket_count = 0; | ||
2387 | } | ||
2388 | |||
2389 | let args = if self.eat_lt() { | ||
2390 | // `<'a, T, A = U>` | ||
2391 | let (args, bindings) = | ||
2392 | self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?; | ||
2393 | self.expect_gt()?; | ||
2394 | let span = lo.to(self.prev_span); | ||
2395 | AngleBracketedArgs { args, bindings, span }.into() | ||
2396 | } else { | ||
2397 | // `(T, U) -> R` | ||
2398 | self.bump(); // `(` | ||
2399 | let (inputs, recovered) = self.parse_seq_to_before_tokens( | ||
2400 | &[&token::CloseDelim(token::Paren)], | ||
2401 | SeqSep::trailing_allowed(token::Comma), | ||
2402 | TokenExpectType::Expect, | ||
2403 | |p| p.parse_ty())?; | ||
2404 | if !recovered { | ||
2405 | self.bump(); // `)` | ||
2406 | } | ||
2407 | let span = lo.to(self.prev_span); | ||
2408 | let output = if self.eat(&token::RArrow) { | ||
2409 | Some(self.parse_ty_common(false, false)?) | ||
2410 | } else { | ||
2411 | None | ||
2412 | }; | ||
2413 | ParenthesizedArgs { inputs, output, span }.into() | ||
2414 | }; | ||
2415 | |||
2416 | PathSegment { ident, args, id: ast::DUMMY_NODE_ID } | ||
2417 | } else { | ||
2418 | // Generic arguments are not found. | ||
2419 | PathSegment::from_ident(ident) | ||
2420 | }) | ||
2421 | } | ||
2422 | |||
2423 | crate fn check_lifetime(&mut self) -> bool { | ||
2424 | self.expected_tokens.push(TokenType::Lifetime); | ||
2425 | self.token.is_lifetime() | ||
2426 | } | ||
2427 | |||
2428 | /// Parses a single lifetime `'a` or panics. | ||
2429 | crate fn expect_lifetime(&mut self) -> Lifetime { | ||
2430 | if let Some(ident) = self.token.lifetime() { | ||
2431 | let span = self.span; | ||
2432 | self.bump(); | ||
2433 | Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID } | ||
2434 | } else { | ||
2435 | self.span_bug(self.span, "not a lifetime") | ||
2436 | } | ||
2437 | } | ||
2438 | |||
2439 | fn eat_label(&mut self) -> Option<Label> { | ||
2440 | if let Some(ident) = self.token.lifetime() { | ||
2441 | let span = self.span; | ||
2442 | self.bump(); | ||
2443 | Some(Label { ident: Ident::new(ident.name, span) }) | ||
2444 | } else { | ||
2445 | None | ||
2446 | } | ||
2447 | } | ||
2448 | |||
2449 | /// Parses mutability (`mut` or nothing). | ||
2450 | fn parse_mutability(&mut self) -> Mutability { | ||
2451 | if self.eat_keyword(keywords::Mut) { | ||
2452 | Mutability::Mutable | ||
2453 | } else { | ||
2454 | Mutability::Immutable | ||
2455 | } | ||
2456 | } | ||
2457 | |||
2458 | fn parse_field_name(&mut self) -> PResult<'a, Ident> { | ||
2459 | if let token::Literal(token::Integer(name), None) = self.token { | ||
2460 | self.bump(); | ||
2461 | Ok(Ident::new(name, self.prev_span)) | ||
2462 | } else { | ||
2463 | self.parse_ident_common(false) | ||
2464 | } | ||
2465 | } | ||
2466 | |||
2467 | /// Parse ident (COLON expr)? | ||
2468 | fn parse_field(&mut self) -> PResult<'a, Field> { | ||
2469 | let attrs = self.parse_outer_attributes()?; | ||
2470 | let lo = self.span; | ||
2471 | |||
2472 | // Check if a colon exists one ahead. This means we're parsing a fieldname. | ||
2473 | let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| { | ||
2474 | t == &token::Colon || t == &token::Eq | ||
2475 | }) { | ||
2476 | let fieldname = self.parse_field_name()?; | ||
2477 | |||
2478 | // Check for an equals token. This means the source incorrectly attempts to | ||
2479 | // initialize a field with an eq rather than a colon. | ||
2480 | if self.token == token::Eq { | ||
2481 | self.diagnostic() | ||
2482 | .struct_span_err(self.span, "expected `:`, found `=`") | ||
2483 | .span_suggestion( | ||
2484 | fieldname.span.shrink_to_hi().to(self.span), | ||
2485 | "replace equals symbol with a colon", | ||
2486 | ":".to_string(), | ||
2487 | Applicability::MachineApplicable, | ||
2488 | ) | ||
2489 | .emit(); | ||
2490 | } | ||
2491 | self.bump(); // `:` | ||
2492 | (fieldname, self.parse_expr()?, false) | ||
2493 | } else { | ||
2494 | let fieldname = self.parse_ident_common(false)?; | ||
2495 | |||
2496 | // Mimic `x: x` for the `x` field shorthand. | ||
2497 | let path = ast::Path::from_ident(fieldname); | ||
2498 | let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new()); | ||
2499 | (fieldname, expr, true) | ||
2500 | }; | ||
2501 | Ok(ast::Field { | ||
2502 | ident: fieldname, | ||
2503 | span: lo.to(expr.span), | ||
2504 | expr, | ||
2505 | is_shorthand, | ||
2506 | attrs: attrs.into(), | ||
2507 | }) | ||
2508 | } | ||
2509 | |||
2510 | fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> { | ||
2511 | P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID }) | ||
2512 | } | ||
2513 | |||
2514 | fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind { | ||
2515 | ExprKind::Unary(unop, expr) | ||
2516 | } | ||
2517 | |||
2518 | fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind { | ||
2519 | ExprKind::Binary(binop, lhs, rhs) | ||
2520 | } | ||
2521 | |||
2522 | fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind { | ||
2523 | ExprKind::Call(f, args) | ||
2524 | } | ||
2525 | |||
2526 | fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind { | ||
2527 | ExprKind::Index(expr, idx) | ||
2528 | } | ||
2529 | |||
2530 | fn mk_range(&mut self, | ||
2531 | start: Option<P<Expr>>, | ||
2532 | end: Option<P<Expr>>, | ||
2533 | limits: RangeLimits) | ||
2534 | -> PResult<'a, ast::ExprKind> { | ||
2535 | if end.is_none() && limits == RangeLimits::Closed { | ||
2536 | Err(self.span_fatal_err(self.span, Error::InclusiveRangeWithNoEnd)) | ||
2537 | } else { | ||
2538 | Ok(ExprKind::Range(start, end, limits)) | ||
2539 | } | ||
2540 | } | ||
2541 | |||
2542 | fn mk_assign_op(&mut self, binop: ast::BinOp, | ||
2543 | lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind { | ||
2544 | ExprKind::AssignOp(binop, lhs, rhs) | ||
2545 | } | ||
2546 | |||
2547 | pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec<Attribute>) -> P<Expr> { | ||
2548 | P(Expr { | ||
2549 | id: ast::DUMMY_NODE_ID, | ||
2550 | node: ExprKind::Mac(source_map::Spanned {node: m, span: span}), | ||
2551 | span, | ||
2552 | attrs, | ||
2553 | }) | ||
2554 | } | ||
2555 | |||
2556 | fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { | ||
2557 | let delim = match self.token { | ||
2558 | token::OpenDelim(delim) => delim, | ||
2559 | _ => { | ||
2560 | let msg = "expected open delimiter"; | ||
2561 | let mut err = self.fatal(msg); | ||
2562 | err.span_label(self.span, msg); | ||
2563 | return Err(err) | ||
2564 | } | ||
2565 | }; | ||
2566 | let tts = match self.parse_token_tree() { | ||
2567 | TokenTree::Delimited(_, _, tts) => tts, | ||
2568 | _ => unreachable!(), | ||
2569 | }; | ||
2570 | let delim = match delim { | ||
2571 | token::Paren => MacDelimiter::Parenthesis, | ||
2572 | token::Bracket => MacDelimiter::Bracket, | ||
2573 | token::Brace => MacDelimiter::Brace, | ||
2574 | token::NoDelim => self.bug("unexpected no delimiter"), | ||
2575 | }; | ||
2576 | Ok((delim, tts.into())) | ||
2577 | } | ||
2578 | |||
2579 | /// At the bottom (top?) of the precedence hierarchy, | ||
2580 | /// Parses things like parenthesized exprs, macros, `return`, etc. | ||
2581 | /// | ||
2582 | /// N.B., this does not parse outer attributes, and is private because it only works | ||
2583 | /// correctly if called from `parse_dot_or_call_expr()`. | ||
2584 | fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> { | ||
2585 | maybe_whole_expr!(self); | ||
2586 | |||
2587 | // Outer attributes are already parsed and will be | ||
2588 | // added to the return value after the fact. | ||
2589 | // | ||
2590 | // Therefore, prevent sub-parser from parsing | ||
2591 | // attributes by giving them a empty "already parsed" list. | ||
2592 | let mut attrs = ThinVec::new(); | ||
2593 | |||
2594 | let lo = self.span; | ||
2595 | let mut hi = self.span; | ||
2596 | |||
2597 | let ex: ExprKind; | ||
2598 | |||
2599 | // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr(). | ||
2600 | match self.token { | ||
2601 | token::OpenDelim(token::Paren) => { | ||
2602 | self.bump(); | ||
2603 | |||
2604 | attrs.extend(self.parse_inner_attributes()?); | ||
2605 | |||
2606 | // (e) is parenthesized e | ||
2607 | // (e,) is a tuple with only one field, e | ||
2608 | let mut es = vec![]; | ||
2609 | let mut trailing_comma = false; | ||
2610 | let mut recovered = false; | ||
2611 | while self.token != token::CloseDelim(token::Paren) { | ||
2612 | es.push(self.parse_expr()?); | ||
2613 | recovered = self.expect_one_of( | ||
2614 | &[], | ||
2615 | &[token::Comma, token::CloseDelim(token::Paren)], | ||
2616 | )?; | ||
2617 | if self.eat(&token::Comma) { | ||
2618 | trailing_comma = true; | ||
2619 | } else { | ||
2620 | trailing_comma = false; | ||
2621 | break; | ||
2622 | } | ||
2623 | } | ||
2624 | if !recovered { | ||
2625 | self.bump(); | ||
2626 | } | ||
2627 | |||
2628 | hi = self.prev_span; | ||
2629 | ex = if es.len() == 1 && !trailing_comma { | ||
2630 | ExprKind::Paren(es.into_iter().nth(0).unwrap()) | ||
2631 | } else { | ||
2632 | ExprKind::Tup(es) | ||
2633 | }; | ||
2634 | } | ||
2635 | token::OpenDelim(token::Brace) => { | ||
2636 | return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs); | ||
2637 | } | ||
2638 | token::BinOp(token::Or) | token::OrOr => { | ||
2639 | return self.parse_lambda_expr(attrs); | ||
2640 | } | ||
2641 | token::OpenDelim(token::Bracket) => { | ||
2642 | self.bump(); | ||
2643 | |||
2644 | attrs.extend(self.parse_inner_attributes()?); | ||
2645 | |||
2646 | if self.eat(&token::CloseDelim(token::Bracket)) { | ||
2647 | // Empty vector. | ||
2648 | ex = ExprKind::Array(Vec::new()); | ||
2649 | } else { | ||
2650 | // Nonempty vector. | ||
2651 | let first_expr = self.parse_expr()?; | ||
2652 | if self.eat(&token::Semi) { | ||
2653 | // Repeating array syntax: [ 0; 512 ] | ||
2654 | let count = AnonConst { | ||
2655 | id: ast::DUMMY_NODE_ID, | ||
2656 | value: self.parse_expr()?, | ||
2657 | }; | ||
2658 | self.expect(&token::CloseDelim(token::Bracket))?; | ||
2659 | ex = ExprKind::Repeat(first_expr, count); | ||
2660 | } else if self.eat(&token::Comma) { | ||
2661 | // Vector with two or more elements. | ||
2662 | let remaining_exprs = self.parse_seq_to_end( | ||
2663 | &token::CloseDelim(token::Bracket), | ||
2664 | SeqSep::trailing_allowed(token::Comma), | ||
2665 | |p| Ok(p.parse_expr()?) | ||
2666 | )?; | ||
2667 | let mut exprs = vec![first_expr]; | ||
2668 | exprs.extend(remaining_exprs); | ||
2669 | ex = ExprKind::Array(exprs); | ||
2670 | } else { | ||
2671 | // Vector with one element. | ||
2672 | self.expect(&token::CloseDelim(token::Bracket))?; | ||
2673 | ex = ExprKind::Array(vec![first_expr]); | ||
2674 | } | ||
2675 | } | ||
2676 | hi = self.prev_span; | ||
2677 | } | ||
2678 | _ => { | ||
2679 | if self.eat_lt() { | ||
2680 | let (qself, path) = self.parse_qpath(PathStyle::Expr)?; | ||
2681 | hi = path.span; | ||
2682 | return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs)); | ||
2683 | } | ||
2684 | if self.span.rust_2018() && self.check_keyword(keywords::Async) | ||
2685 | { | ||
2686 | if self.is_async_block() { // check for `async {` and `async move {` | ||
2687 | return self.parse_async_block(attrs); | ||
2688 | } else { | ||
2689 | return self.parse_lambda_expr(attrs); | ||
2690 | } | ||
2691 | } | ||
2692 | if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) { | ||
2693 | return self.parse_lambda_expr(attrs); | ||
2694 | } | ||
2695 | if self.eat_keyword(keywords::If) { | ||
2696 | return self.parse_if_expr(attrs); | ||
2697 | } | ||
2698 | if self.eat_keyword(keywords::For) { | ||
2699 | let lo = self.prev_span; | ||
2700 | return self.parse_for_expr(None, lo, attrs); | ||
2701 | } | ||
2702 | if self.eat_keyword(keywords::While) { | ||
2703 | let lo = self.prev_span; | ||
2704 | return self.parse_while_expr(None, lo, attrs); | ||
2705 | } | ||
2706 | if let Some(label) = self.eat_label() { | ||
2707 | let lo = label.ident.span; | ||
2708 | self.expect(&token::Colon)?; | ||
2709 | if self.eat_keyword(keywords::While) { | ||
2710 | return self.parse_while_expr(Some(label), lo, attrs) | ||
2711 | } | ||
2712 | if self.eat_keyword(keywords::For) { | ||
2713 | return self.parse_for_expr(Some(label), lo, attrs) | ||
2714 | } | ||
2715 | if self.eat_keyword(keywords::Loop) { | ||
2716 | return self.parse_loop_expr(Some(label), lo, attrs) | ||
2717 | } | ||
2718 | if self.token == token::OpenDelim(token::Brace) { | ||
2719 | return self.parse_block_expr(Some(label), | ||
2720 | lo, | ||
2721 | BlockCheckMode::Default, | ||
2722 | attrs); | ||
2723 | } | ||
2724 | let msg = "expected `while`, `for`, `loop` or `{` after a label"; | ||
2725 | let mut err = self.fatal(msg); | ||
2726 | err.span_label(self.span, msg); | ||
2727 | return Err(err); | ||
2728 | } | ||
2729 | if self.eat_keyword(keywords::Loop) { | ||
2730 | let lo = self.prev_span; | ||
2731 | return self.parse_loop_expr(None, lo, attrs); | ||
2732 | } | ||
2733 | if self.eat_keyword(keywords::Continue) { | ||
2734 | let label = self.eat_label(); | ||
2735 | let ex = ExprKind::Continue(label); | ||
2736 | let hi = self.prev_span; | ||
2737 | return Ok(self.mk_expr(lo.to(hi), ex, attrs)); | ||
2738 | } | ||
2739 | if self.eat_keyword(keywords::Match) { | ||
2740 | let match_sp = self.prev_span; | ||
2741 | return self.parse_match_expr(attrs).map_err(|mut err| { | ||
2742 | err.span_label(match_sp, "while parsing this match expression"); | ||
2743 | err | ||
2744 | }); | ||
2745 | } | ||
2746 | if self.eat_keyword(keywords::Unsafe) { | ||
2747 | return self.parse_block_expr( | ||
2748 | None, | ||
2749 | lo, | ||
2750 | BlockCheckMode::Unsafe(ast::UserProvided), | ||
2751 | attrs); | ||
2752 | } | ||
2753 | if self.is_do_catch_block() { | ||
2754 | let mut db = self.fatal("found removed `do catch` syntax"); | ||
2755 | db.help("Following RFC #2388, the new non-placeholder syntax is `try`"); | ||
2756 | return Err(db); | ||
2757 | } | ||
2758 | if self.is_try_block() { | ||
2759 | let lo = self.span; | ||
2760 | assert!(self.eat_keyword(keywords::Try)); | ||
2761 | return self.parse_try_block(lo, attrs); | ||
2762 | } | ||
2763 | if self.eat_keyword(keywords::Return) { | ||
2764 | if self.token.can_begin_expr() { | ||
2765 | let e = self.parse_expr()?; | ||
2766 | hi = e.span; | ||
2767 | ex = ExprKind::Ret(Some(e)); | ||
2768 | } else { | ||
2769 | ex = ExprKind::Ret(None); | ||
2770 | } | ||
2771 | } else if self.eat_keyword(keywords::Break) { | ||
2772 | let label = self.eat_label(); | ||
2773 | let e = if self.token.can_begin_expr() | ||
2774 | && !(self.token == token::OpenDelim(token::Brace) | ||
2775 | && self.restrictions.contains( | ||
2776 | Restrictions::NO_STRUCT_LITERAL)) { | ||
2777 | Some(self.parse_expr()?) | ||
2778 | } else { | ||
2779 | None | ||
2780 | }; | ||
2781 | ex = ExprKind::Break(label, e); | ||
2782 | hi = self.prev_span; | ||
2783 | } else if self.eat_keyword(keywords::Yield) { | ||
2784 | if self.token.can_begin_expr() { | ||
2785 | let e = self.parse_expr()?; | ||
2786 | hi = e.span; | ||
2787 | ex = ExprKind::Yield(Some(e)); | ||
2788 | } else { | ||
2789 | ex = ExprKind::Yield(None); | ||
2790 | } | ||
2791 | } else if self.token.is_keyword(keywords::Let) { | ||
2792 | // Catch this syntax error here, instead of in `parse_ident`, so | ||
2793 | // that we can explicitly mention that let is not to be used as an expression | ||
2794 | let mut db = self.fatal("expected expression, found statement (`let`)"); | ||
2795 | db.span_label(self.span, "expected expression"); | ||
2796 | db.note("variable declaration using `let` is a statement"); | ||
2797 | return Err(db); | ||
2798 | } else if self.token.is_path_start() { | ||
2799 | let pth = self.parse_path(PathStyle::Expr)?; | ||
2800 | |||
2801 | // `!`, as an operator, is prefix, so we know this isn't that | ||
2802 | if self.eat(&token::Not) { | ||
2803 | // MACRO INVOCATION expression | ||
2804 | let (delim, tts) = self.expect_delimited_token_tree()?; | ||
2805 | let hi = self.prev_span; | ||
2806 | let node = Mac_ { path: pth, tts, delim }; | ||
2807 | return Ok(self.mk_mac_expr(lo.to(hi), node, attrs)) | ||
2808 | } | ||
2809 | if self.check(&token::OpenDelim(token::Brace)) { | ||
2810 | // This is a struct literal, unless we're prohibited | ||
2811 | // from parsing struct literals here. | ||
2812 | let prohibited = self.restrictions.contains( | ||
2813 | Restrictions::NO_STRUCT_LITERAL | ||
2814 | ); | ||
2815 | if !prohibited { | ||
2816 | return self.parse_struct_expr(lo, pth, attrs); | ||
2817 | } | ||
2818 | } | ||
2819 | |||
2820 | hi = pth.span; | ||
2821 | ex = ExprKind::Path(None, pth); | ||
2822 | } else { | ||
2823 | if !self.unclosed_delims.is_empty() && self.check(&token::Semi) { | ||
2824 | // Don't complain about bare semicolons after unclosed braces | ||
2825 | // recovery in order to keep the error count down. Fixing the | ||
2826 | // delimiters will possibly also fix the bare semicolon found in | ||
2827 | // expression context. For example, silence the following error: | ||
2828 | // ``` | ||
2829 | // error: expected expression, found `;` | ||
2830 | // --> file.rs:2:13 | ||
2831 | // | | ||
2832 | // 2 | foo(bar(; | ||
2833 | // | ^ expected expression | ||
2834 | // ``` | ||
2835 | self.bump(); | ||
2836 | return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new())); | ||
2837 | } | ||
2838 | match self.parse_literal_maybe_minus() { | ||
2839 | Ok(expr) => { | ||
2840 | hi = expr.span; | ||
2841 | ex = expr.node.clone(); | ||
2842 | } | ||
2843 | Err(mut err) => { | ||
2844 | self.cancel(&mut err); | ||
2845 | let msg = format!("expected expression, found {}", | ||
2846 | self.this_token_descr()); | ||
2847 | let mut err = self.fatal(&msg); | ||
2848 | err.span_label(self.span, "expected expression"); | ||
2849 | return Err(err); | ||
2850 | } | ||
2851 | } | ||
2852 | } | ||
2853 | } | ||
2854 | } | ||
2855 | |||
2856 | let expr = Expr { node: ex, span: lo.to(hi), id: ast::DUMMY_NODE_ID, attrs }; | ||
2857 | let expr = self.maybe_recover_from_bad_qpath(expr, true)?; | ||
2858 | |||
2859 | return Ok(P(expr)); | ||
2860 | } | ||
2861 | |||
2862 | fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Attribute>) | ||
2863 | -> PResult<'a, P<Expr>> { | ||
2864 | let struct_sp = lo.to(self.prev_span); | ||
2865 | self.bump(); | ||
2866 | let mut fields = Vec::new(); | ||
2867 | let mut base = None; | ||
2868 | |||
2869 | attrs.extend(self.parse_inner_attributes()?); | ||
2870 | |||
2871 | while self.token != token::CloseDelim(token::Brace) { | ||
2872 | if self.eat(&token::DotDot) { | ||
2873 | let exp_span = self.prev_span; | ||
2874 | match self.parse_expr() { | ||
2875 | Ok(e) => { | ||
2876 | base = Some(e); | ||
2877 | } | ||
2878 | Err(mut e) => { | ||
2879 | e.emit(); | ||
2880 | self.recover_stmt(); | ||
2881 | } | ||
2882 | } | ||
2883 | if self.token == token::Comma { | ||
2884 | let mut err = self.sess.span_diagnostic.mut_span_err( | ||
2885 | exp_span.to(self.prev_span), | ||
2886 | "cannot use a comma after the base struct", | ||
2887 | ); | ||
2888 | err.span_suggestion_short( | ||
2889 | self.span, | ||
2890 | "remove this comma", | ||
2891 | String::new(), | ||
2892 | Applicability::MachineApplicable | ||
2893 | ); | ||
2894 | err.note("the base struct must always be the last field"); | ||
2895 | err.emit(); | ||
2896 | self.recover_stmt(); | ||
2897 | } | ||
2898 | break; | ||
2899 | } | ||
2900 | |||
2901 | let mut recovery_field = None; | ||
2902 | if let token::Ident(ident, _) = self.token { | ||
2903 | if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) { | ||
2904 | // Use in case of error after field-looking code: `S { foo: () with a }` | ||
2905 | let mut ident = ident.clone(); | ||
2906 | ident.span = self.span; | ||
2907 | recovery_field = Some(ast::Field { | ||
2908 | ident, | ||
2909 | span: self.span, | ||
2910 | expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()), | ||
2911 | is_shorthand: false, | ||
2912 | attrs: ThinVec::new(), | ||
2913 | }); | ||
2914 | } | ||
2915 | } | ||
2916 | let mut parsed_field = None; | ||
2917 | match self.parse_field() { | ||
2918 | Ok(f) => parsed_field = Some(f), | ||
2919 | Err(mut e) => { | ||
2920 | e.span_label(struct_sp, "while parsing this struct"); | ||
2921 | e.emit(); | ||
2922 | |||
2923 | // If the next token is a comma, then try to parse | ||
2924 | // what comes next as additional fields, rather than | ||
2925 | // bailing out until next `}`. | ||
2926 | if self.token != token::Comma { | ||
2927 | self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore); | ||
2928 | if self.token != token::Comma { | ||
2929 | break; | ||
2930 | } | ||
2931 | } | ||
2932 | } | ||
2933 | } | ||
2934 | |||
2935 | match self.expect_one_of(&[token::Comma], | ||
2936 | &[token::CloseDelim(token::Brace)]) { | ||
2937 | Ok(_) => if let Some(f) = parsed_field.or(recovery_field) { | ||
2938 | // only include the field if there's no parse error for the field name | ||
2939 | fields.push(f); | ||
2940 | } | ||
2941 | Err(mut e) => { | ||
2942 | if let Some(f) = recovery_field { | ||
2943 | fields.push(f); | ||
2944 | } | ||
2945 | e.span_label(struct_sp, "while parsing this struct"); | ||
2946 | e.emit(); | ||
2947 | self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore); | ||
2948 | self.eat(&token::Comma); | ||
2949 | } | ||
2950 | } | ||
2951 | } | ||
2952 | |||
2953 | let span = lo.to(self.span); | ||
2954 | self.expect(&token::CloseDelim(token::Brace))?; | ||
2955 | return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs)); | ||
2956 | } | ||
2957 | |||
2958 | fn parse_or_use_outer_attributes(&mut self, | ||
2959 | already_parsed_attrs: Option<ThinVec<Attribute>>) | ||
2960 | -> PResult<'a, ThinVec<Attribute>> { | ||
2961 | if let Some(attrs) = already_parsed_attrs { | ||
2962 | Ok(attrs) | ||
2963 | } else { | ||
2964 | self.parse_outer_attributes().map(|a| a.into()) | ||
2965 | } | ||
2966 | } | ||
2967 | |||
2968 | /// Parses a block or unsafe block. | ||
2969 | fn parse_block_expr(&mut self, opt_label: Option<Label>, | ||
2970 | lo: Span, blk_mode: BlockCheckMode, | ||
2971 | outer_attrs: ThinVec<Attribute>) | ||
2972 | -> PResult<'a, P<Expr>> { | ||
2973 | self.expect(&token::OpenDelim(token::Brace))?; | ||
2974 | |||
2975 | let mut attrs = outer_attrs; | ||
2976 | attrs.extend(self.parse_inner_attributes()?); | ||
2977 | |||
2978 | let blk = self.parse_block_tail(lo, blk_mode)?; | ||
2979 | return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs)); | ||
2980 | } | ||
2981 | |||
2982 | /// Parses `a.b` or `a(13)` or `a[4]` or just `a`. | ||
2983 | fn parse_dot_or_call_expr(&mut self, | ||
2984 | already_parsed_attrs: Option<ThinVec<Attribute>>) | ||
2985 | -> PResult<'a, P<Expr>> { | ||
2986 | let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; | ||
2987 | |||
2988 | let b = self.parse_bottom_expr(); | ||
2989 | let (span, b) = self.interpolated_or_expr_span(b)?; | ||
2990 | self.parse_dot_or_call_expr_with(b, span, attrs) | ||
2991 | } | ||
2992 | |||
2993 | fn parse_dot_or_call_expr_with(&mut self, | ||
2994 | e0: P<Expr>, | ||
2995 | lo: Span, | ||
2996 | mut attrs: ThinVec<Attribute>) | ||
2997 | -> PResult<'a, P<Expr>> { | ||
2998 | // Stitch the list of outer attributes onto the return value. | ||
2999 | // A little bit ugly, but the best way given the current code | ||
3000 | // structure | ||
3001 | self.parse_dot_or_call_expr_with_(e0, lo) | ||
3002 | .map(|expr| | ||
3003 | expr.map(|mut expr| { | ||
3004 | attrs.extend::<Vec<_>>(expr.attrs.into()); | ||
3005 | expr.attrs = attrs; | ||
3006 | match expr.node { | ||
3007 | ExprKind::If(..) | ExprKind::IfLet(..) => { | ||
3008 | if !expr.attrs.is_empty() { | ||
3009 | // Just point to the first attribute in there... | ||
3010 | let span = expr.attrs[0].span; | ||
3011 | |||
3012 | self.span_err(span, | ||
3013 | "attributes are not yet allowed on `if` \ | ||
3014 | expressions"); | ||
3015 | } | ||
3016 | } | ||
3017 | _ => {} | ||
3018 | } | ||
3019 | expr | ||
3020 | }) | ||
3021 | ) | ||
3022 | } | ||
3023 | |||
3024 | // Assuming we have just parsed `.`, continue parsing into an expression. | ||
3025 | fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { | ||
3026 | let segment = self.parse_path_segment(PathStyle::Expr, true)?; | ||
3027 | self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren)); | ||
3028 | |||
3029 | Ok(match self.token { | ||
3030 | token::OpenDelim(token::Paren) => { | ||
3031 | // Method call `expr.f()` | ||
3032 | let mut args = self.parse_unspanned_seq( | ||
3033 | &token::OpenDelim(token::Paren), | ||
3034 | &token::CloseDelim(token::Paren), | ||
3035 | SeqSep::trailing_allowed(token::Comma), | ||
3036 | |p| Ok(p.parse_expr()?) | ||
3037 | )?; | ||
3038 | args.insert(0, self_arg); | ||
3039 | |||
3040 | let span = lo.to(self.prev_span); | ||
3041 | self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new()) | ||
3042 | } | ||
3043 | _ => { | ||
3044 | // Field access `expr.f` | ||
3045 | if let Some(args) = segment.args { | ||
3046 | self.span_err(args.span(), | ||
3047 | "field expressions may not have generic arguments"); | ||
3048 | } | ||
3049 | |||
3050 | let span = lo.to(self.prev_span); | ||
3051 | self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new()) | ||
3052 | } | ||
3053 | }) | ||
3054 | } | ||
3055 | |||
3056 | /// This function checks if there are trailing angle brackets and produces | ||
3057 | /// a diagnostic to suggest removing them. | ||
3058 | /// | ||
3059 | /// ```ignore (diagnostic) | ||
3060 | /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>(); | ||
3061 | /// ^^ help: remove extra angle brackets | ||
3062 | /// ``` | ||
3063 | fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) { | ||
3064 | // This function is intended to be invoked after parsing a path segment where there are two | ||
3065 | // cases: | ||
3066 | // | ||
3067 | // 1. A specific token is expected after the path segment. | ||
3068 | // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call), | ||
3069 | // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path). | ||
3070 | // 2. No specific token is expected after the path segment. | ||
3071 | // eg. `x.foo` (field access) | ||
3072 | // | ||
3073 | // This function is called after parsing `.foo` and before parsing the token `end` (if | ||
3074 | // present). This includes any angle bracket arguments, such as `.foo::<u32>` or | ||
3075 | // `Foo::<Bar>`. | ||
3076 | |||
3077 | // We only care about trailing angle brackets if we previously parsed angle bracket | ||
3078 | // arguments. This helps stop us incorrectly suggesting that extra angle brackets be | ||
3079 | // removed in this case: | ||
3080 | // | ||
3081 | // `x.foo >> (3)` (where `x.foo` is a `u32` for example) | ||
3082 | // | ||
3083 | // This case is particularly tricky as we won't notice it just looking at the tokens - | ||
3084 | // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will | ||
3085 | // have already been parsed): | ||
3086 | // | ||
3087 | // `x.foo::<u32>>>(3)` | ||
3088 | let parsed_angle_bracket_args = segment.args | ||
3089 | .as_ref() | ||
3090 | .map(|args| args.is_angle_bracketed()) | ||
3091 | .unwrap_or(false); | ||
3092 | |||
3093 | debug!( | ||
3094 | "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}", | ||
3095 | parsed_angle_bracket_args, | ||
3096 | ); | ||
3097 | if !parsed_angle_bracket_args { | ||
3098 | return; | ||
3099 | } | ||
3100 | |||
3101 | // Keep the span at the start so we can highlight the sequence of `>` characters to be | ||
3102 | // removed. | ||
3103 | let lo = self.span; | ||
3104 | |||
3105 | // We need to look-ahead to see if we have `>` characters without moving the cursor forward | ||
3106 | // (since we might have the field access case and the characters we're eating are | ||
3107 | // actual operators and not trailing characters - ie `x.foo >> 3`). | ||
3108 | let mut position = 0; | ||
3109 | |||
3110 | // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how | ||
3111 | // many of each (so we can correctly pluralize our error messages) and continue to | ||
3112 | // advance. | ||
3113 | let mut number_of_shr = 0; | ||
3114 | let mut number_of_gt = 0; | ||
3115 | while self.look_ahead(position, |t| { | ||
3116 | trace!("check_trailing_angle_brackets: t={:?}", t); | ||
3117 | if *t == token::BinOp(token::BinOpToken::Shr) { | ||
3118 | number_of_shr += 1; | ||
3119 | true | ||
3120 | } else if *t == token::Gt { | ||
3121 | number_of_gt += 1; | ||
3122 | true | ||
3123 | } else { | ||
3124 | false | ||
3125 | } | ||
3126 | }) { | ||
3127 | position += 1; | ||
3128 | } | ||
3129 | |||
3130 | // If we didn't find any trailing `>` characters, then we have nothing to error about. | ||
3131 | debug!( | ||
3132 | "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}", | ||
3133 | number_of_gt, number_of_shr, | ||
3134 | ); | ||
3135 | if number_of_gt < 1 && number_of_shr < 1 { | ||
3136 | return; | ||
3137 | } | ||
3138 | |||
3139 | // Finally, double check that we have our end token as otherwise this is the | ||
3140 | // second case. | ||
3141 | if self.look_ahead(position, |t| { | ||
3142 | trace!("check_trailing_angle_brackets: t={:?}", t); | ||
3143 | *t == end | ||
3144 | }) { | ||
3145 | // Eat from where we started until the end token so that parsing can continue | ||
3146 | // as if we didn't have those extra angle brackets. | ||
3147 | self.eat_to_tokens(&[&end]); | ||
3148 | let span = lo.until(self.span); | ||
3149 | |||
3150 | let plural = number_of_gt > 1 || number_of_shr >= 1; | ||
3151 | self.diagnostic() | ||
3152 | .struct_span_err( | ||
3153 | span, | ||
3154 | &format!("unmatched angle bracket{}", if plural { "s" } else { "" }), | ||
3155 | ) | ||
3156 | .span_suggestion( | ||
3157 | span, | ||
3158 | &format!("remove extra angle bracket{}", if plural { "s" } else { "" }), | ||
3159 | String::new(), | ||
3160 | Applicability::MachineApplicable, | ||
3161 | ) | ||
3162 | .emit(); | ||
3163 | } | ||
3164 | } | ||
3165 | |||
3166 | fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { | ||
3167 | let mut e = e0; | ||
3168 | let mut hi; | ||
3169 | loop { | ||
3170 | // expr? | ||
3171 | while self.eat(&token::Question) { | ||
3172 | let hi = self.prev_span; | ||
3173 | e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new()); | ||
3174 | } | ||
3175 | |||
3176 | // expr.f | ||
3177 | if self.eat(&token::Dot) { | ||
3178 | match self.token { | ||
3179 | token::Ident(..) => { | ||
3180 | e = self.parse_dot_suffix(e, lo)?; | ||
3181 | } | ||
3182 | token::Literal(token::Integer(name), _) => { | ||
3183 | let span = self.span; | ||
3184 | self.bump(); | ||
3185 | let field = ExprKind::Field(e, Ident::new(name, span)); | ||
3186 | e = self.mk_expr(lo.to(span), field, ThinVec::new()); | ||
3187 | } | ||
3188 | token::Literal(token::Float(n), _suf) => { | ||
3189 | self.bump(); | ||
3190 | let fstr = n.as_str(); | ||
3191 | let mut err = self.diagnostic() | ||
3192 | .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n)); | ||
3193 | err.span_label(self.prev_span, "unexpected token"); | ||
3194 | if fstr.chars().all(|x| "0123456789.".contains(x)) { | ||
3195 | let float = match fstr.parse::<f64>().ok() { | ||
3196 | Some(f) => f, | ||
3197 | None => continue, | ||
3198 | }; | ||
3199 | let sugg = pprust::to_string(|s| { | ||
3200 | use crate::print::pprust::PrintState; | ||
3201 | s.popen()?; | ||
3202 | s.print_expr(&e)?; | ||
3203 | s.s.word( ".")?; | ||
3204 | s.print_usize(float.trunc() as usize)?; | ||
3205 | s.pclose()?; | ||
3206 | s.s.word(".")?; | ||
3207 | s.s.word(fstr.splitn(2, ".").last().unwrap().to_string()) | ||
3208 | }); | ||
3209 | err.span_suggestion( | ||
3210 | lo.to(self.prev_span), | ||
3211 | "try parenthesizing the first index", | ||
3212 | sugg, | ||
3213 | Applicability::MachineApplicable | ||
3214 | ); | ||
3215 | } | ||
3216 | return Err(err); | ||
3217 | |||
3218 | } | ||
3219 | _ => { | ||
3220 | // FIXME Could factor this out into non_fatal_unexpected or something. | ||
3221 | let actual = self.this_token_to_string(); | ||
3222 | self.span_err(self.span, &format!("unexpected token: `{}`", actual)); | ||
3223 | } | ||
3224 | } | ||
3225 | continue; | ||
3226 | } | ||
3227 | if self.expr_is_complete(&e) { break; } | ||
3228 | match self.token { | ||
3229 | // expr(...) | ||
3230 | token::OpenDelim(token::Paren) => { | ||
3231 | let es = self.parse_unspanned_seq( | ||
3232 | &token::OpenDelim(token::Paren), | ||
3233 | &token::CloseDelim(token::Paren), | ||
3234 | SeqSep::trailing_allowed(token::Comma), | ||
3235 | |p| Ok(p.parse_expr()?) | ||
3236 | )?; | ||
3237 | hi = self.prev_span; | ||
3238 | |||
3239 | let nd = self.mk_call(e, es); | ||
3240 | e = self.mk_expr(lo.to(hi), nd, ThinVec::new()); | ||
3241 | } | ||
3242 | |||
3243 | // expr[...] | ||
3244 | // Could be either an index expression or a slicing expression. | ||
3245 | token::OpenDelim(token::Bracket) => { | ||
3246 | self.bump(); | ||
3247 | let ix = self.parse_expr()?; | ||
3248 | hi = self.span; | ||
3249 | self.expect(&token::CloseDelim(token::Bracket))?; | ||
3250 | let index = self.mk_index(e, ix); | ||
3251 | e = self.mk_expr(lo.to(hi), index, ThinVec::new()) | ||
3252 | } | ||
3253 | _ => return Ok(e) | ||
3254 | } | ||
3255 | } | ||
3256 | return Ok(e); | ||
3257 | } | ||
3258 | |||
3259 | crate fn process_potential_macro_variable(&mut self) { | ||
3260 | let (token, span) = match self.token { | ||
3261 | token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && | ||
3262 | self.look_ahead(1, |t| t.is_ident()) => { | ||
3263 | self.bump(); | ||
3264 | let name = match self.token { | ||
3265 | token::Ident(ident, _) => ident, | ||
3266 | _ => unreachable!() | ||
3267 | }; | ||
3268 | let mut err = self.fatal(&format!("unknown macro variable `{}`", name)); | ||
3269 | err.span_label(self.span, "unknown macro variable"); | ||
3270 | err.emit(); | ||
3271 | self.bump(); | ||
3272 | return | ||
3273 | } | ||
3274 | token::Interpolated(ref nt) => { | ||
3275 | self.meta_var_span = Some(self.span); | ||
3276 | // Interpolated identifier and lifetime tokens are replaced with usual identifier | ||
3277 | // and lifetime tokens, so the former are never encountered during normal parsing. | ||
3278 | match **nt { | ||
3279 | token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span), | ||
3280 | token::NtLifetime(ident) => (token::Lifetime(ident), ident.span), | ||
3281 | _ => return, | ||
3282 | } | ||
3283 | } | ||
3284 | _ => return, | ||
3285 | }; | ||
3286 | self.token = token; | ||
3287 | self.span = span; | ||
3288 | } | ||
3289 | |||
3290 | /// Parses a single token tree from the input. | ||
3291 | crate fn parse_token_tree(&mut self) -> TokenTree { | ||
3292 | match self.token { | ||
3293 | token::OpenDelim(..) => { | ||
3294 | let frame = mem::replace(&mut self.token_cursor.frame, | ||
3295 | self.token_cursor.stack.pop().unwrap()); | ||
3296 | self.span = frame.span.entire(); | ||
3297 | self.bump(); | ||
3298 | TokenTree::Delimited( | ||
3299 | frame.span, | ||
3300 | frame.delim, | ||
3301 | frame.tree_cursor.stream.into(), | ||
3302 | ) | ||
3303 | }, | ||
3304 | token::CloseDelim(_) | token::Eof => unreachable!(), | ||
3305 | _ => { | ||
3306 | let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); | ||
3307 | self.bump(); | ||
3308 | TokenTree::Token(span, token) | ||
3309 | } | ||
3310 | } | ||
3311 | } | ||
3312 | |||
3313 | // parse a stream of tokens into a list of TokenTree's, | ||
3314 | // up to EOF. | ||
3315 | pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> { | ||
3316 | let mut tts = Vec::new(); | ||
3317 | while self.token != token::Eof { | ||
3318 | tts.push(self.parse_token_tree()); | ||
3319 | } | ||
3320 | Ok(tts) | ||
3321 | } | ||
3322 | |||
3323 | pub fn parse_tokens(&mut self) -> TokenStream { | ||
3324 | let mut result = Vec::new(); | ||
3325 | loop { | ||
3326 | match self.token { | ||
3327 | token::Eof | token::CloseDelim(..) => break, | ||
3328 | _ => result.push(self.parse_token_tree().into()), | ||
3329 | } | ||
3330 | } | ||
3331 | TokenStream::new(result) | ||
3332 | } | ||
3333 | |||
3334 | /// Parse a prefix-unary-operator expr | ||
3335 | fn parse_prefix_expr(&mut self, | ||
3336 | already_parsed_attrs: Option<ThinVec<Attribute>>) | ||
3337 | -> PResult<'a, P<Expr>> { | ||
3338 | let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; | ||
3339 | let lo = self.span; | ||
3340 | // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() | ||
3341 | let (hi, ex) = match self.token { | ||
3342 | token::Not => { | ||
3343 | self.bump(); | ||
3344 | let e = self.parse_prefix_expr(None); | ||
3345 | let (span, e) = self.interpolated_or_expr_span(e)?; | ||
3346 | (lo.to(span), self.mk_unary(UnOp::Not, e)) | ||
3347 | } | ||
3348 | // Suggest `!` for bitwise negation when encountering a `~` | ||
3349 | token::Tilde => { | ||
3350 | self.bump(); | ||
3351 | let e = self.parse_prefix_expr(None); | ||
3352 | let (span, e) = self.interpolated_or_expr_span(e)?; | ||
3353 | let span_of_tilde = lo; | ||
3354 | let mut err = self.diagnostic() | ||
3355 | .struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator"); | ||
3356 | err.span_suggestion_short( | ||
3357 | span_of_tilde, | ||
3358 | "use `!` to perform bitwise negation", | ||
3359 | "!".to_owned(), | ||
3360 | Applicability::MachineApplicable | ||
3361 | ); | ||
3362 | err.emit(); | ||
3363 | (lo.to(span), self.mk_unary(UnOp::Not, e)) | ||
3364 | } | ||
3365 | token::BinOp(token::Minus) => { | ||
3366 | self.bump(); | ||
3367 | let e = self.parse_prefix_expr(None); | ||
3368 | let (span, e) = self.interpolated_or_expr_span(e)?; | ||
3369 | (lo.to(span), self.mk_unary(UnOp::Neg, e)) | ||
3370 | } | ||
3371 | token::BinOp(token::Star) => { | ||
3372 | self.bump(); | ||
3373 | let e = self.parse_prefix_expr(None); | ||
3374 | let (span, e) = self.interpolated_or_expr_span(e)?; | ||
3375 | (lo.to(span), self.mk_unary(UnOp::Deref, e)) | ||
3376 | } | ||
3377 | token::BinOp(token::And) | token::AndAnd => { | ||
3378 | self.expect_and()?; | ||
3379 | let m = self.parse_mutability(); | ||
3380 | let e = self.parse_prefix_expr(None); | ||
3381 | let (span, e) = self.interpolated_or_expr_span(e)?; | ||
3382 | (lo.to(span), ExprKind::AddrOf(m, e)) | ||
3383 | } | ||
3384 | token::Ident(..) if self.token.is_keyword(keywords::In) => { | ||
3385 | self.bump(); | ||
3386 | let place = self.parse_expr_res( | ||
3387 | Restrictions::NO_STRUCT_LITERAL, | ||
3388 | None, | ||
3389 | )?; | ||
3390 | let blk = self.parse_block()?; | ||
3391 | let span = blk.span; | ||
3392 | let blk_expr = self.mk_expr(span, ExprKind::Block(blk, None), ThinVec::new()); | ||
3393 | (lo.to(span), ExprKind::ObsoleteInPlace(place, blk_expr)) | ||
3394 | } | ||
3395 | token::Ident(..) if self.token.is_keyword(keywords::Box) => { | ||
3396 | self.bump(); | ||
3397 | let e = self.parse_prefix_expr(None); | ||
3398 | let (span, e) = self.interpolated_or_expr_span(e)?; | ||
3399 | (lo.to(span), ExprKind::Box(e)) | ||
3400 | } | ||
3401 | token::Ident(..) if self.token.is_ident_named("not") => { | ||
3402 | // `not` is just an ordinary identifier in Rust-the-language, | ||
3403 | // but as `rustc`-the-compiler, we can issue clever diagnostics | ||
3404 | // for confused users who really want to say `!` | ||
3405 | let token_cannot_continue_expr = |t: &token::Token| match *t { | ||
3406 | // These tokens can start an expression after `!`, but | ||
3407 | // can't continue an expression after an ident | ||
3408 | token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), | ||
3409 | token::Literal(..) | token::Pound => true, | ||
3410 | token::Interpolated(ref nt) => match **nt { | ||
3411 | token::NtIdent(..) | token::NtExpr(..) | | ||
3412 | token::NtBlock(..) | token::NtPath(..) => true, | ||
3413 | _ => false, | ||
3414 | }, | ||
3415 | _ => false | ||
3416 | }; | ||
3417 | let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr); | ||
3418 | if cannot_continue_expr { | ||
3419 | self.bump(); | ||
3420 | // Emit the error ... | ||
3421 | let mut err = self.diagnostic() | ||
3422 | .struct_span_err(self.span, | ||
3423 | &format!("unexpected {} after identifier", | ||
3424 | self.this_token_descr())); | ||
3425 | // span the `not` plus trailing whitespace to avoid | ||
3426 | // trailing whitespace after the `!` in our suggestion | ||
3427 | let to_replace = self.sess.source_map() | ||
3428 | .span_until_non_whitespace(lo.to(self.span)); | ||
3429 | err.span_suggestion_short( | ||
3430 | to_replace, | ||
3431 | "use `!` to perform logical negation", | ||
3432 | "!".to_owned(), | ||
3433 | Applicability::MachineApplicable | ||
3434 | ); | ||
3435 | err.emit(); | ||
3436 | // —and recover! (just as if we were in the block | ||
3437 | // for the `token::Not` arm) | ||
3438 | let e = self.parse_prefix_expr(None); | ||
3439 | let (span, e) = self.interpolated_or_expr_span(e)?; | ||
3440 | (lo.to(span), self.mk_unary(UnOp::Not, e)) | ||
3441 | } else { | ||
3442 | return self.parse_dot_or_call_expr(Some(attrs)); | ||
3443 | } | ||
3444 | } | ||
3445 | _ => { return self.parse_dot_or_call_expr(Some(attrs)); } | ||
3446 | }; | ||
3447 | return Ok(self.mk_expr(lo.to(hi), ex, attrs)); | ||
3448 | } | ||
3449 | |||
3450 | /// Parses an associative expression. | ||
3451 | /// | ||
3452 | /// This parses an expression accounting for associativity and precedence of the operators in | ||
3453 | /// the expression. | ||
3454 | #[inline] | ||
3455 | fn parse_assoc_expr(&mut self, | ||
3456 | already_parsed_attrs: Option<ThinVec<Attribute>>) | ||
3457 | -> PResult<'a, P<Expr>> { | ||
3458 | self.parse_assoc_expr_with(0, already_parsed_attrs.into()) | ||
3459 | } | ||
3460 | |||
3461 | /// Parses an associative expression with operators of at least `min_prec` precedence. | ||
3462 | fn parse_assoc_expr_with(&mut self, | ||
3463 | min_prec: usize, | ||
3464 | lhs: LhsExpr) | ||
3465 | -> PResult<'a, P<Expr>> { | ||
3466 | let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs { | ||
3467 | expr | ||
3468 | } else { | ||
3469 | let attrs = match lhs { | ||
3470 | LhsExpr::AttributesParsed(attrs) => Some(attrs), | ||
3471 | _ => None, | ||
3472 | }; | ||
3473 | if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) { | ||
3474 | return self.parse_prefix_range_expr(attrs); | ||
3475 | } else { | ||
3476 | self.parse_prefix_expr(attrs)? | ||
3477 | } | ||
3478 | }; | ||
3479 | |||
3480 | if self.expr_is_complete(&lhs) { | ||
3481 | // Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071 | ||
3482 | return Ok(lhs); | ||
3483 | } | ||
3484 | self.expected_tokens.push(TokenType::Operator); | ||
3485 | while let Some(op) = AssocOp::from_token(&self.token) { | ||
3486 | |||
3487 | // Adjust the span for interpolated LHS to point to the `$lhs` token and not to what | ||
3488 | // it refers to. Interpolated identifiers are unwrapped early and never show up here | ||
3489 | // as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process | ||
3490 | // it as "interpolated", it doesn't change the answer for non-interpolated idents. | ||
3491 | let lhs_span = match (self.prev_token_kind, &lhs.node) { | ||
3492 | (PrevTokenKind::Interpolated, _) => self.prev_span, | ||
3493 | (PrevTokenKind::Ident, &ExprKind::Path(None, ref path)) | ||
3494 | if path.segments.len() == 1 => self.prev_span, | ||
3495 | _ => lhs.span, | ||
3496 | }; | ||
3497 | |||
3498 | let cur_op_span = self.span; | ||
3499 | let restrictions = if op.is_assign_like() { | ||
3500 | self.restrictions & Restrictions::NO_STRUCT_LITERAL | ||
3501 | } else { | ||
3502 | self.restrictions | ||
3503 | }; | ||
3504 | if op.precedence() < min_prec { | ||
3505 | break; | ||
3506 | } | ||
3507 | // Check for deprecated `...` syntax | ||
3508 | if self.token == token::DotDotDot && op == AssocOp::DotDotEq { | ||
3509 | self.err_dotdotdot_syntax(self.span); | ||
3510 | } | ||
3511 | |||
3512 | self.bump(); | ||
3513 | if op.is_comparison() { | ||
3514 | self.check_no_chained_comparison(&lhs, &op); | ||
3515 | } | ||
3516 | // Special cases: | ||
3517 | if op == AssocOp::As { | ||
3518 | lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; | ||
3519 | continue | ||
3520 | } else if op == AssocOp::Colon { | ||
3521 | lhs = match self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type) { | ||
3522 | Ok(lhs) => lhs, | ||
3523 | Err(mut err) => { | ||
3524 | err.span_label(self.span, | ||
3525 | "expecting a type here because of type ascription"); | ||
3526 | let cm = self.sess.source_map(); | ||
3527 | let cur_pos = cm.lookup_char_pos(self.span.lo()); | ||
3528 | let op_pos = cm.lookup_char_pos(cur_op_span.hi()); | ||
3529 | if cur_pos.line != op_pos.line { | ||
3530 | err.span_suggestion( | ||
3531 | cur_op_span, | ||
3532 | "try using a semicolon", | ||
3533 | ";".to_string(), | ||
3534 | Applicability::MaybeIncorrect // speculative | ||
3535 | ); | ||
3536 | } | ||
3537 | return Err(err); | ||
3538 | } | ||
3539 | }; | ||
3540 | continue | ||
3541 | } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { | ||
3542 | // If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to | ||
3543 | // generalise it to the Fixity::None code. | ||
3544 | // | ||
3545 | // We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other | ||
3546 | // two variants are handled with `parse_prefix_range_expr` call above. | ||
3547 | let rhs = if self.is_at_start_of_range_notation_rhs() { | ||
3548 | Some(self.parse_assoc_expr_with(op.precedence() + 1, | ||
3549 | LhsExpr::NotYetParsed)?) | ||
3550 | } else { | ||
3551 | None | ||
3552 | }; | ||
3553 | let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs { | ||
3554 | x.span | ||
3555 | } else { | ||
3556 | cur_op_span | ||
3557 | }); | ||
3558 | let limits = if op == AssocOp::DotDot { | ||
3559 | RangeLimits::HalfOpen | ||
3560 | } else { | ||
3561 | RangeLimits::Closed | ||
3562 | }; | ||
3563 | |||
3564 | let r = self.mk_range(Some(lhs), rhs, limits)?; | ||
3565 | lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new()); | ||
3566 | break | ||
3567 | } | ||
3568 | |||
3569 | let rhs = match op.fixity() { | ||
3570 | Fixity::Right => self.with_res( | ||
3571 | restrictions - Restrictions::STMT_EXPR, | ||
3572 | |this| { | ||
3573 | this.parse_assoc_expr_with(op.precedence(), | ||
3574 | LhsExpr::NotYetParsed) | ||
3575 | }), | ||
3576 | Fixity::Left => self.with_res( | ||
3577 | restrictions - Restrictions::STMT_EXPR, | ||
3578 | |this| { | ||
3579 | this.parse_assoc_expr_with(op.precedence() + 1, | ||
3580 | LhsExpr::NotYetParsed) | ||
3581 | }), | ||
3582 | // We currently have no non-associative operators that are not handled above by | ||
3583 | // the special cases. The code is here only for future convenience. | ||
3584 | Fixity::None => self.with_res( | ||
3585 | restrictions - Restrictions::STMT_EXPR, | ||
3586 | |this| { | ||
3587 | this.parse_assoc_expr_with(op.precedence() + 1, | ||
3588 | LhsExpr::NotYetParsed) | ||
3589 | }), | ||
3590 | }?; | ||
3591 | |||
3592 | // Make sure that the span of the parent node is larger than the span of lhs and rhs, | ||
3593 | // including the attributes. | ||
3594 | let lhs_span = lhs | ||
3595 | .attrs | ||
3596 | .iter() | ||
3597 | .filter(|a| a.style == AttrStyle::Outer) | ||
3598 | .next() | ||
3599 | .map_or(lhs_span, |a| a.span); | ||
3600 | let span = lhs_span.to(rhs.span); | ||
3601 | lhs = match op { | ||
3602 | AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | | ||
3603 | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | | ||
3604 | AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight | | ||
3605 | AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual | | ||
3606 | AssocOp::Greater | AssocOp::GreaterEqual => { | ||
3607 | let ast_op = op.to_ast_binop().unwrap(); | ||
3608 | let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); | ||
3609 | self.mk_expr(span, binary, ThinVec::new()) | ||
3610 | } | ||
3611 | AssocOp::Assign => | ||
3612 | self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()), | ||
3613 | AssocOp::ObsoleteInPlace => | ||
3614 | self.mk_expr(span, ExprKind::ObsoleteInPlace(lhs, rhs), ThinVec::new()), | ||
3615 | AssocOp::AssignOp(k) => { | ||
3616 | let aop = match k { | ||
3617 | token::Plus => BinOpKind::Add, | ||
3618 | token::Minus => BinOpKind::Sub, | ||
3619 | token::Star => BinOpKind::Mul, | ||
3620 | token::Slash => BinOpKind::Div, | ||
3621 | token::Percent => BinOpKind::Rem, | ||
3622 | token::Caret => BinOpKind::BitXor, | ||
3623 | token::And => BinOpKind::BitAnd, | ||
3624 | token::Or => BinOpKind::BitOr, | ||
3625 | token::Shl => BinOpKind::Shl, | ||
3626 | token::Shr => BinOpKind::Shr, | ||
3627 | }; | ||
3628 | let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); | ||
3629 | self.mk_expr(span, aopexpr, ThinVec::new()) | ||
3630 | } | ||
3631 | AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => { | ||
3632 | self.bug("AssocOp should have been handled by special case") | ||
3633 | } | ||
3634 | }; | ||
3635 | |||
3636 | if op.fixity() == Fixity::None { break } | ||
3637 | } | ||
3638 | Ok(lhs) | ||
3639 | } | ||
3640 | |||
3641 | fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span, | ||
3642 | expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind) | ||
3643 | -> PResult<'a, P<Expr>> { | ||
3644 | let mk_expr = |this: &mut Self, rhs: P<Ty>| { | ||
3645 | this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new()) | ||
3646 | }; | ||
3647 | |||
3648 | // Save the state of the parser before parsing type normally, in case there is a | ||
3649 | // LessThan comparison after this cast. | ||
3650 | let parser_snapshot_before_type = self.clone(); | ||
3651 | match self.parse_ty_no_plus() { | ||
3652 | Ok(rhs) => { | ||
3653 | Ok(mk_expr(self, rhs)) | ||
3654 | } | ||
3655 | Err(mut type_err) => { | ||
3656 | // Rewind to before attempting to parse the type with generics, to recover | ||
3657 | // from situations like `x as usize < y` in which we first tried to parse | ||
3658 | // `usize < y` as a type with generic arguments. | ||
3659 | let parser_snapshot_after_type = self.clone(); | ||
3660 | mem::replace(self, parser_snapshot_before_type); | ||
3661 | |||
3662 | match self.parse_path(PathStyle::Expr) { | ||
3663 | Ok(path) => { | ||
3664 | let (op_noun, op_verb) = match self.token { | ||
3665 | token::Lt => ("comparison", "comparing"), | ||
3666 | token::BinOp(token::Shl) => ("shift", "shifting"), | ||
3667 | _ => { | ||
3668 | // We can end up here even without `<` being the next token, for | ||
3669 | // example because `parse_ty_no_plus` returns `Err` on keywords, | ||
3670 | // but `parse_path` returns `Ok` on them due to error recovery. | ||
3671 | // Return original error and parser state. | ||
3672 | mem::replace(self, parser_snapshot_after_type); | ||
3673 | return Err(type_err); | ||
3674 | } | ||
3675 | }; | ||
3676 | |||
3677 | // Successfully parsed the type path leaving a `<` yet to parse. | ||
3678 | type_err.cancel(); | ||
3679 | |||
3680 | // Report non-fatal diagnostics, keep `x as usize` as an expression | ||
3681 | // in AST and continue parsing. | ||
3682 | let msg = format!("`<` is interpreted as a start of generic \ | ||
3683 | arguments for `{}`, not a {}", path, op_noun); | ||
3684 | let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg); | ||
3685 | err.span_label(self.look_ahead_span(1).to(parser_snapshot_after_type.span), | ||
3686 | "interpreted as generic arguments"); | ||
3687 | err.span_label(self.span, format!("not interpreted as {}", op_noun)); | ||
3688 | |||
3689 | let expr = mk_expr(self, P(Ty { | ||
3690 | span: path.span, | ||
3691 | node: TyKind::Path(None, path), | ||
3692 | id: ast::DUMMY_NODE_ID | ||
3693 | })); | ||
3694 | |||
3695 | let expr_str = self.sess.source_map().span_to_snippet(expr.span) | ||
3696 | .unwrap_or_else(|_| pprust::expr_to_string(&expr)); | ||
3697 | err.span_suggestion( | ||
3698 | expr.span, | ||
3699 | &format!("try {} the cast value", op_verb), | ||
3700 | format!("({})", expr_str), | ||
3701 | Applicability::MachineApplicable | ||
3702 | ); | ||
3703 | err.emit(); | ||
3704 | |||
3705 | Ok(expr) | ||
3706 | } | ||
3707 | Err(mut path_err) => { | ||
3708 | // Couldn't parse as a path, return original error and parser state. | ||
3709 | path_err.cancel(); | ||
3710 | mem::replace(self, parser_snapshot_after_type); | ||
3711 | Err(type_err) | ||
3712 | } | ||
3713 | } | ||
3714 | } | ||
3715 | } | ||
3716 | } | ||
3717 | |||
3718 | /// Produce an error if comparison operators are chained (RFC #558). | ||
3719 | /// We only need to check lhs, not rhs, because all comparison ops | ||
3720 | /// have same precedence and are left-associative | ||
3721 | fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: &AssocOp) { | ||
3722 | debug_assert!(outer_op.is_comparison(), | ||
3723 | "check_no_chained_comparison: {:?} is not comparison", | ||
3724 | outer_op); | ||
3725 | match lhs.node { | ||
3726 | ExprKind::Binary(op, _, _) if op.node.is_comparison() => { | ||
3727 | // respan to include both operators | ||
3728 | let op_span = op.span.to(self.span); | ||
3729 | let mut err = self.diagnostic().struct_span_err(op_span, | ||
3730 | "chained comparison operators require parentheses"); | ||
3731 | if op.node == BinOpKind::Lt && | ||
3732 | *outer_op == AssocOp::Less || // Include `<` to provide this recommendation | ||
3733 | *outer_op == AssocOp::Greater // even in a case like the following: | ||
3734 | { // Foo<Bar<Baz<Qux, ()>>> | ||
3735 | err.help( | ||
3736 | "use `::<...>` instead of `<...>` if you meant to specify type arguments"); | ||
3737 | err.help("or use `(...)` if you meant to specify fn arguments"); | ||
3738 | } | ||
3739 | err.emit(); | ||
3740 | } | ||
3741 | _ => {} | ||
3742 | } | ||
3743 | } | ||
3744 | |||
3745 | /// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr` | ||
3746 | fn parse_prefix_range_expr(&mut self, | ||
3747 | already_parsed_attrs: Option<ThinVec<Attribute>>) | ||
3748 | -> PResult<'a, P<Expr>> { | ||
3749 | // Check for deprecated `...` syntax | ||
3750 | if self.token == token::DotDotDot { | ||
3751 | self.err_dotdotdot_syntax(self.span); | ||
3752 | } | ||
3753 | |||
3754 | debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token), | ||
3755 | "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq", | ||
3756 | self.token); | ||
3757 | let tok = self.token.clone(); | ||
3758 | let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; | ||
3759 | let lo = self.span; | ||
3760 | let mut hi = self.span; | ||
3761 | self.bump(); | ||
3762 | let opt_end = if self.is_at_start_of_range_notation_rhs() { | ||
3763 | // RHS must be parsed with more associativity than the dots. | ||
3764 | let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1; | ||
3765 | Some(self.parse_assoc_expr_with(next_prec, | ||
3766 | LhsExpr::NotYetParsed) | ||
3767 | .map(|x|{ | ||
3768 | hi = x.span; | ||
3769 | x | ||
3770 | })?) | ||
3771 | } else { | ||
3772 | None | ||
3773 | }; | ||
3774 | let limits = if tok == token::DotDot { | ||
3775 | RangeLimits::HalfOpen | ||
3776 | } else { | ||
3777 | RangeLimits::Closed | ||
3778 | }; | ||
3779 | |||
3780 | let r = self.mk_range(None, opt_end, limits)?; | ||
3781 | Ok(self.mk_expr(lo.to(hi), r, attrs)) | ||
3782 | } | ||
3783 | |||
3784 | fn is_at_start_of_range_notation_rhs(&self) -> bool { | ||
3785 | if self.token.can_begin_expr() { | ||
3786 | // parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. | ||
3787 | if self.token == token::OpenDelim(token::Brace) { | ||
3788 | return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); | ||
3789 | } | ||
3790 | true | ||
3791 | } else { | ||
3792 | false | ||
3793 | } | ||
3794 | } | ||
3795 | |||
3796 | /// Parses an `if` or `if let` expression (`if` token already eaten). | ||
3797 | fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { | ||
3798 | if self.check_keyword(keywords::Let) { | ||
3799 | return self.parse_if_let_expr(attrs); | ||
3800 | } | ||
3801 | let lo = self.prev_span; | ||
3802 | let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; | ||
3803 | |||
3804 | // Verify that the parsed `if` condition makes sense as a condition. If it is a block, then | ||
3805 | // verify that the last statement is either an implicit return (no `;`) or an explicit | ||
3806 | // return. This won't catch blocks with an explicit `return`, but that would be caught by | ||
3807 | // the dead code lint. | ||
3808 | if self.eat_keyword(keywords::Else) || !cond.returns() { | ||
3809 | let sp = self.sess.source_map().next_point(lo); | ||
3810 | let mut err = self.diagnostic() | ||
3811 | .struct_span_err(sp, "missing condition for `if` statemement"); | ||
3812 | err.span_label(sp, "expected if condition here"); | ||
3813 | return Err(err) | ||
3814 | } | ||
3815 | let not_block = self.token != token::OpenDelim(token::Brace); | ||
3816 | let thn = self.parse_block().map_err(|mut err| { | ||
3817 | if not_block { | ||
3818 | err.span_label(lo, "this `if` statement has a condition, but no block"); | ||
3819 | } | ||
3820 | err | ||
3821 | })?; | ||
3822 | let mut els: Option<P<Expr>> = None; | ||
3823 | let mut hi = thn.span; | ||
3824 | if self.eat_keyword(keywords::Else) { | ||
3825 | let elexpr = self.parse_else_expr()?; | ||
3826 | hi = elexpr.span; | ||
3827 | els = Some(elexpr); | ||
3828 | } | ||
3829 | Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs)) | ||
3830 | } | ||
3831 | |||
3832 | /// Parses an `if let` expression (`if` token already eaten). | ||
3833 | fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>) | ||
3834 | -> PResult<'a, P<Expr>> { | ||
3835 | let lo = self.prev_span; | ||
3836 | self.expect_keyword(keywords::Let)?; | ||
3837 | let pats = self.parse_pats()?; | ||
3838 | self.expect(&token::Eq)?; | ||
3839 | let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; | ||
3840 | let thn = self.parse_block()?; | ||
3841 | let (hi, els) = if self.eat_keyword(keywords::Else) { | ||
3842 | let expr = self.parse_else_expr()?; | ||
3843 | (expr.span, Some(expr)) | ||
3844 | } else { | ||
3845 | (thn.span, None) | ||
3846 | }; | ||
3847 | Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs)) | ||
3848 | } | ||
3849 | |||
3850 | /// Parses `move |args| expr`. | ||
3851 | fn parse_lambda_expr(&mut self, | ||
3852 | attrs: ThinVec<Attribute>) | ||
3853 | -> PResult<'a, P<Expr>> | ||
3854 | { | ||
3855 | let lo = self.span; | ||
3856 | let movability = if self.eat_keyword(keywords::Static) { | ||
3857 | Movability::Static | ||
3858 | } else { | ||
3859 | Movability::Movable | ||
3860 | }; | ||
3861 | let asyncness = if self.span.rust_2018() { | ||
3862 | self.parse_asyncness() | ||
3863 | } else { | ||
3864 | IsAsync::NotAsync | ||
3865 | }; | ||
3866 | let capture_clause = if self.eat_keyword(keywords::Move) { | ||
3867 | CaptureBy::Value | ||
3868 | } else { | ||
3869 | CaptureBy::Ref | ||
3870 | }; | ||
3871 | let decl = self.parse_fn_block_decl()?; | ||
3872 | let decl_hi = self.prev_span; | ||
3873 | let body = match decl.output { | ||
3874 | FunctionRetTy::Default(_) => { | ||
3875 | let restrictions = self.restrictions - Restrictions::STMT_EXPR; | ||
3876 | self.parse_expr_res(restrictions, None)? | ||
3877 | }, | ||
3878 | _ => { | ||
3879 | // If an explicit return type is given, require a | ||
3880 | // block to appear (RFC 968). | ||
3881 | let body_lo = self.span; | ||
3882 | self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())? | ||
3883 | } | ||
3884 | }; | ||
3885 | |||
3886 | Ok(self.mk_expr( | ||
3887 | lo.to(body.span), | ||
3888 | ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)), | ||
3889 | attrs)) | ||
3890 | } | ||
3891 | |||
3892 | // `else` token already eaten | ||
3893 | fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> { | ||
3894 | if self.eat_keyword(keywords::If) { | ||
3895 | return self.parse_if_expr(ThinVec::new()); | ||
3896 | } else { | ||
3897 | let blk = self.parse_block()?; | ||
3898 | return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new())); | ||
3899 | } | ||
3900 | } | ||
3901 | |||
3902 | /// Parse a 'for' .. 'in' expression ('for' token already eaten) | ||
3903 | fn parse_for_expr(&mut self, opt_label: Option<Label>, | ||
3904 | span_lo: Span, | ||
3905 | mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { | ||
3906 | // Parse: `for <src_pat> in <src_expr> <src_loop_block>` | ||
3907 | |||
3908 | let pat = self.parse_top_level_pat()?; | ||
3909 | if !self.eat_keyword(keywords::In) { | ||
3910 | let in_span = self.prev_span.between(self.span); | ||
3911 | let mut err = self.sess.span_diagnostic | ||
3912 | .struct_span_err(in_span, "missing `in` in `for` loop"); | ||
3913 | err.span_suggestion_short( | ||
3914 | in_span, "try adding `in` here", " in ".into(), | ||
3915 | // has been misleading, at least in the past (closed Issue #48492) | ||
3916 | Applicability::MaybeIncorrect | ||
3917 | ); | ||
3918 | err.emit(); | ||
3919 | } | ||
3920 | let in_span = self.prev_span; | ||
3921 | if self.eat_keyword(keywords::In) { | ||
3922 | // a common typo: `for _ in in bar {}` | ||
3923 | let mut err = self.sess.span_diagnostic.struct_span_err( | ||
3924 | self.prev_span, | ||
3925 | "expected iterable, found keyword `in`", | ||
3926 | ); | ||
3927 | err.span_suggestion_short( | ||
3928 | in_span.until(self.prev_span), | ||
3929 | "remove the duplicated `in`", | ||
3930 | String::new(), | ||
3931 | Applicability::MachineApplicable, | ||
3932 | ); | ||
3933 | err.note("if you meant to use emplacement syntax, it is obsolete (for now, anyway)"); | ||
3934 | err.note("for more information on the status of emplacement syntax, see <\ | ||
3935 | https://github.com/rust-lang/rust/issues/27779#issuecomment-378416911>"); | ||
3936 | err.emit(); | ||
3937 | } | ||
3938 | let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; | ||
3939 | let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; | ||
3940 | attrs.extend(iattrs); | ||
3941 | |||
3942 | let hi = self.prev_span; | ||
3943 | Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs)) | ||
3944 | } | ||
3945 | |||
3946 | /// Parses a `while` or `while let` expression (`while` token already eaten). | ||
3947 | fn parse_while_expr(&mut self, opt_label: Option<Label>, | ||
3948 | span_lo: Span, | ||
3949 | mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { | ||
3950 | if self.token.is_keyword(keywords::Let) { | ||
3951 | return self.parse_while_let_expr(opt_label, span_lo, attrs); | ||
3952 | } | ||
3953 | let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; | ||
3954 | let (iattrs, body) = self.parse_inner_attrs_and_block()?; | ||
3955 | attrs.extend(iattrs); | ||
3956 | let span = span_lo.to(body.span); | ||
3957 | return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs)); | ||
3958 | } | ||
3959 | |||
3960 | /// Parses a `while let` expression (`while` token already eaten). | ||
3961 | fn parse_while_let_expr(&mut self, opt_label: Option<Label>, | ||
3962 | span_lo: Span, | ||
3963 | mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { | ||
3964 | self.expect_keyword(keywords::Let)?; | ||
3965 | let pats = self.parse_pats()?; | ||
3966 | self.expect(&token::Eq)?; | ||
3967 | let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; | ||
3968 | let (iattrs, body) = self.parse_inner_attrs_and_block()?; | ||
3969 | attrs.extend(iattrs); | ||
3970 | let span = span_lo.to(body.span); | ||
3971 | return Ok(self.mk_expr(span, ExprKind::WhileLet(pats, expr, body, opt_label), attrs)); | ||
3972 | } | ||
3973 | |||
3974 | // parse `loop {...}`, `loop` token already eaten | ||
3975 | fn parse_loop_expr(&mut self, opt_label: Option<Label>, | ||
3976 | span_lo: Span, | ||
3977 | mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { | ||
3978 | let (iattrs, body) = self.parse_inner_attrs_and_block()?; | ||
3979 | attrs.extend(iattrs); | ||
3980 | let span = span_lo.to(body.span); | ||
3981 | Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs)) | ||
3982 | } | ||
3983 | |||
3984 | /// Parses an `async move {...}` expression. | ||
3985 | pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) | ||
3986 | -> PResult<'a, P<Expr>> | ||
3987 | { | ||
3988 | let span_lo = self.span; | ||
3989 | self.expect_keyword(keywords::Async)?; | ||
3990 | let capture_clause = if self.eat_keyword(keywords::Move) { | ||
3991 | CaptureBy::Value | ||
3992 | } else { | ||
3993 | CaptureBy::Ref | ||
3994 | }; | ||
3995 | let (iattrs, body) = self.parse_inner_attrs_and_block()?; | ||
3996 | attrs.extend(iattrs); | ||
3997 | Ok(self.mk_expr( | ||
3998 | span_lo.to(body.span), | ||
3999 | ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs)) | ||
4000 | } | ||
4001 | |||
4002 | /// Parses a `try {...}` expression (`try` token already eaten). | ||
4003 | fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>) | ||
4004 | -> PResult<'a, P<Expr>> | ||
4005 | { | ||
4006 | let (iattrs, body) = self.parse_inner_attrs_and_block()?; | ||
4007 | attrs.extend(iattrs); | ||
4008 | Ok(self.mk_expr(span_lo.to(body.span), ExprKind::TryBlock(body), attrs)) | ||
4009 | } | ||
4010 | |||
4011 | // `match` token already eaten | ||
4012 | fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { | ||
4013 | let match_span = self.prev_span; | ||
4014 | let lo = self.prev_span; | ||
4015 | let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, | ||
4016 | None)?; | ||
4017 | if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { | ||
4018 | if self.token == token::Token::Semi { | ||
4019 | e.span_suggestion_short( | ||
4020 | match_span, | ||
4021 | "try removing this `match`", | ||
4022 | String::new(), | ||
4023 | Applicability::MaybeIncorrect // speculative | ||
4024 | ); | ||
4025 | } | ||
4026 | return Err(e) | ||
4027 | } | ||
4028 | attrs.extend(self.parse_inner_attributes()?); | ||
4029 | |||
4030 | let mut arms: Vec<Arm> = Vec::new(); | ||
4031 | while self.token != token::CloseDelim(token::Brace) { | ||
4032 | match self.parse_arm() { | ||
4033 | Ok(arm) => arms.push(arm), | ||
4034 | Err(mut e) => { | ||
4035 | // Recover by skipping to the end of the block. | ||
4036 | e.emit(); | ||
4037 | self.recover_stmt(); | ||
4038 | let span = lo.to(self.span); | ||
4039 | if self.token == token::CloseDelim(token::Brace) { | ||
4040 | self.bump(); | ||
4041 | } | ||
4042 | return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs)); | ||
4043 | } | ||
4044 | } | ||
4045 | } | ||
4046 | let hi = self.span; | ||
4047 | self.bump(); | ||
4048 | return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); | ||
4049 | } | ||
4050 | |||
4051 | crate fn parse_arm(&mut self) -> PResult<'a, Arm> { | ||
4052 | maybe_whole!(self, NtArm, |x| x); | ||
4053 | |||
4054 | let attrs = self.parse_outer_attributes()?; | ||
4055 | let pats = self.parse_pats()?; | ||
4056 | let guard = if self.eat_keyword(keywords::If) { | ||
4057 | Some(Guard::If(self.parse_expr()?)) | ||
4058 | } else { | ||
4059 | None | ||
4060 | }; | ||
4061 | let arrow_span = self.span; | ||
4062 | self.expect(&token::FatArrow)?; | ||
4063 | let arm_start_span = self.span; | ||
4064 | |||
4065 | let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None) | ||
4066 | .map_err(|mut err| { | ||
4067 | err.span_label(arrow_span, "while parsing the `match` arm starting here"); | ||
4068 | err | ||
4069 | })?; | ||
4070 | |||
4071 | let require_comma = classify::expr_requires_semi_to_be_stmt(&expr) | ||
4072 | && self.token != token::CloseDelim(token::Brace); | ||
4073 | |||
4074 | if require_comma { | ||
4075 | let cm = self.sess.source_map(); | ||
4076 | self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) | ||
4077 | .map_err(|mut err| { | ||
4078 | match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) { | ||
4079 | (Ok(ref expr_lines), Ok(ref arm_start_lines)) | ||
4080 | if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col | ||
4081 | && expr_lines.lines.len() == 2 | ||
4082 | && self.token == token::FatArrow => { | ||
4083 | // We check whether there's any trailing code in the parse span, | ||
4084 | // if there isn't, we very likely have the following: | ||
4085 | // | ||
4086 | // X | &Y => "y" | ||
4087 | // | -- - missing comma | ||
4088 | // | | | ||
4089 | // | arrow_span | ||
4090 | // X | &X => "x" | ||
4091 | // | - ^^ self.span | ||
4092 | // | | | ||
4093 | // | parsed until here as `"y" & X` | ||
4094 | err.span_suggestion_short( | ||
4095 | cm.next_point(arm_start_span), | ||
4096 | "missing a comma here to end this `match` arm", | ||
4097 | ",".to_owned(), | ||
4098 | Applicability::MachineApplicable | ||
4099 | ); | ||
4100 | } | ||
4101 | _ => { | ||
4102 | err.span_label(arrow_span, | ||
4103 | "while parsing the `match` arm starting here"); | ||
4104 | } | ||
4105 | } | ||
4106 | err | ||
4107 | })?; | ||
4108 | } else { | ||
4109 | self.eat(&token::Comma); | ||
4110 | } | ||
4111 | |||
4112 | Ok(ast::Arm { | ||
4113 | attrs, | ||
4114 | pats, | ||
4115 | guard, | ||
4116 | body: expr, | ||
4117 | }) | ||
4118 | } | ||
4119 | |||
4120 | /// Parses an expression. | ||
4121 | #[inline] | ||
4122 | pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> { | ||
4123 | self.parse_expr_res(Restrictions::empty(), None) | ||
4124 | } | ||
4125 | |||
4126 | /// Evaluates the closure with restrictions in place. | ||
4127 | /// | ||
4128 | /// Afters the closure is evaluated, restrictions are reset. | ||
4129 | fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T | ||
4130 | where F: FnOnce(&mut Self) -> T | ||
4131 | { | ||
4132 | let old = self.restrictions; | ||
4133 | self.restrictions = r; | ||
4134 | let r = f(self); | ||
4135 | self.restrictions = old; | ||
4136 | return r; | ||
4137 | |||
4138 | } | ||
4139 | |||
4140 | /// Parses an expression, subject to the given restrictions. | ||
4141 | #[inline] | ||
4142 | fn parse_expr_res(&mut self, r: Restrictions, | ||
4143 | already_parsed_attrs: Option<ThinVec<Attribute>>) | ||
4144 | -> PResult<'a, P<Expr>> { | ||
4145 | self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) | ||
4146 | } | ||
4147 | |||
4148 | /// Parses the RHS of a local variable declaration (e.g., '= 14;'). | ||
4149 | fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> { | ||
4150 | if self.eat(&token::Eq) { | ||
4151 | Ok(Some(self.parse_expr()?)) | ||
4152 | } else if skip_eq { | ||
4153 | Ok(Some(self.parse_expr()?)) | ||
4154 | } else { | ||
4155 | Ok(None) | ||
4156 | } | ||
4157 | } | ||
4158 | |||
4159 | /// Parses patterns, separated by '|' s. | ||
4160 | fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> { | ||
4161 | // Allow a '|' before the pats (RFC 1925 + RFC 2530) | ||
4162 | self.eat(&token::BinOp(token::Or)); | ||
4163 | |||
4164 | let mut pats = Vec::new(); | ||
4165 | loop { | ||
4166 | pats.push(self.parse_top_level_pat()?); | ||
4167 | |||
4168 | if self.token == token::OrOr { | ||
4169 | let mut err = self.struct_span_err(self.span, | ||
4170 | "unexpected token `||` after pattern"); | ||
4171 | err.span_suggestion( | ||
4172 | self.span, | ||
4173 | "use a single `|` to specify multiple patterns", | ||
4174 | "|".to_owned(), | ||
4175 | Applicability::MachineApplicable | ||
4176 | ); | ||
4177 | err.emit(); | ||
4178 | self.bump(); | ||
4179 | } else if self.eat(&token::BinOp(token::Or)) { | ||
4180 | // This is a No-op. Continue the loop to parse the next | ||
4181 | // pattern. | ||
4182 | } else { | ||
4183 | return Ok(pats); | ||
4184 | } | ||
4185 | }; | ||
4186 | } | ||
4187 | |||
4188 | // Parses a parenthesized list of patterns like | ||
4189 | // `()`, `(p)`, `(p,)`, `(p, q)`, or `(p, .., q)`. Returns: | ||
4190 | // - a vector of the patterns that were parsed | ||
4191 | // - an option indicating the index of the `..` element | ||
4192 | // - a boolean indicating whether a trailing comma was present. | ||
4193 | // Trailing commas are significant because (p) and (p,) are different patterns. | ||
4194 | fn parse_parenthesized_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> { | ||
4195 | self.expect(&token::OpenDelim(token::Paren))?; | ||
4196 | let result = self.parse_pat_list()?; | ||
4197 | self.expect(&token::CloseDelim(token::Paren))?; | ||
4198 | Ok(result) | ||
4199 | } | ||
4200 | |||
4201 | fn parse_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> { | ||
4202 | let mut fields = Vec::new(); | ||
4203 | let mut ddpos = None; | ||
4204 | let mut trailing_comma = false; | ||
4205 | loop { | ||
4206 | if self.eat(&token::DotDot) { | ||
4207 | if ddpos.is_none() { | ||
4208 | ddpos = Some(fields.len()); | ||
4209 | } else { | ||
4210 | // Emit a friendly error, ignore `..` and continue parsing | ||
4211 | self.struct_span_err( | ||
4212 | self.prev_span, | ||
4213 | "`..` can only be used once per tuple or tuple struct pattern", | ||
4214 | ) | ||
4215 | .span_label(self.prev_span, "can only be used once per pattern") | ||
4216 | .emit(); | ||
4217 | } | ||
4218 | } else if !self.check(&token::CloseDelim(token::Paren)) { | ||
4219 | fields.push(self.parse_pat(None)?); | ||
4220 | } else { | ||
4221 | break | ||
4222 | } | ||
4223 | |||
4224 | trailing_comma = self.eat(&token::Comma); | ||
4225 | if !trailing_comma { | ||
4226 | break | ||
4227 | } | ||
4228 | } | ||
4229 | |||
4230 | if ddpos == Some(fields.len()) && trailing_comma { | ||
4231 | // `..` needs to be followed by `)` or `, pat`, `..,)` is disallowed. | ||
4232 | let msg = "trailing comma is not permitted after `..`"; | ||
4233 | self.struct_span_err(self.prev_span, msg) | ||
4234 | .span_label(self.prev_span, msg) | ||
4235 | .emit(); | ||
4236 | } | ||
4237 | |||
4238 | Ok((fields, ddpos, trailing_comma)) | ||
4239 | } | ||
4240 | |||
4241 | fn parse_pat_vec_elements( | ||
4242 | &mut self, | ||
4243 | ) -> PResult<'a, (Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> { | ||
4244 | let mut before = Vec::new(); | ||
4245 | let mut slice = None; | ||
4246 | let mut after = Vec::new(); | ||
4247 | let mut first = true; | ||
4248 | let mut before_slice = true; | ||
4249 | |||
4250 | while self.token != token::CloseDelim(token::Bracket) { | ||
4251 | if first { | ||
4252 | first = false; | ||
4253 | } else { | ||
4254 | self.expect(&token::Comma)?; | ||
4255 | |||
4256 | if self.token == token::CloseDelim(token::Bracket) | ||
4257 | && (before_slice || !after.is_empty()) { | ||
4258 | break | ||
4259 | } | ||
4260 | } | ||
4261 | |||
4262 | if before_slice { | ||
4263 | if self.eat(&token::DotDot) { | ||
4264 | |||
4265 | if self.check(&token::Comma) || | ||
4266 | self.check(&token::CloseDelim(token::Bracket)) { | ||
4267 | slice = Some(P(Pat { | ||
4268 | id: ast::DUMMY_NODE_ID, | ||
4269 | node: PatKind::Wild, | ||
4270 | span: self.prev_span, | ||
4271 | })); | ||
4272 | before_slice = false; | ||
4273 | } | ||
4274 | continue | ||
4275 | } | ||
4276 | } | ||
4277 | |||
4278 | let subpat = self.parse_pat(None)?; | ||
4279 | if before_slice && self.eat(&token::DotDot) { | ||
4280 | slice = Some(subpat); | ||
4281 | before_slice = false; | ||
4282 | } else if before_slice { | ||
4283 | before.push(subpat); | ||
4284 | } else { | ||
4285 | after.push(subpat); | ||
4286 | } | ||
4287 | } | ||
4288 | |||
4289 | Ok((before, slice, after)) | ||
4290 | } | ||
4291 | |||
4292 | fn parse_pat_field( | ||
4293 | &mut self, | ||
4294 | lo: Span, | ||
4295 | attrs: Vec<Attribute> | ||
4296 | ) -> PResult<'a, source_map::Spanned<ast::FieldPat>> { | ||
4297 | // Check if a colon exists one ahead. This means we're parsing a fieldname. | ||
4298 | let hi; | ||
4299 | let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { | ||
4300 | // Parsing a pattern of the form "fieldname: pat" | ||
4301 | let fieldname = self.parse_field_name()?; | ||
4302 | self.bump(); | ||
4303 | let pat = self.parse_pat(None)?; | ||
4304 | hi = pat.span; | ||
4305 | (pat, fieldname, false) | ||
4306 | } else { | ||
4307 | // Parsing a pattern of the form "(box) (ref) (mut) fieldname" | ||
4308 | let is_box = self.eat_keyword(keywords::Box); | ||
4309 | let boxed_span = self.span; | ||
4310 | let is_ref = self.eat_keyword(keywords::Ref); | ||
4311 | let is_mut = self.eat_keyword(keywords::Mut); | ||
4312 | let fieldname = self.parse_ident()?; | ||
4313 | hi = self.prev_span; | ||
4314 | |||
4315 | let bind_type = match (is_ref, is_mut) { | ||
4316 | (true, true) => BindingMode::ByRef(Mutability::Mutable), | ||
4317 | (true, false) => BindingMode::ByRef(Mutability::Immutable), | ||
4318 | (false, true) => BindingMode::ByValue(Mutability::Mutable), | ||
4319 | (false, false) => BindingMode::ByValue(Mutability::Immutable), | ||
4320 | }; | ||
4321 | let fieldpat = P(Pat { | ||
4322 | id: ast::DUMMY_NODE_ID, | ||
4323 | node: PatKind::Ident(bind_type, fieldname, None), | ||
4324 | span: boxed_span.to(hi), | ||
4325 | }); | ||
4326 | |||
4327 | let subpat = if is_box { | ||
4328 | P(Pat { | ||
4329 | id: ast::DUMMY_NODE_ID, | ||
4330 | node: PatKind::Box(fieldpat), | ||
4331 | span: lo.to(hi), | ||
4332 | }) | ||
4333 | } else { | ||
4334 | fieldpat | ||
4335 | }; | ||
4336 | (subpat, fieldname, true) | ||
4337 | }; | ||
4338 | |||
4339 | Ok(source_map::Spanned { | ||
4340 | span: lo.to(hi), | ||
4341 | node: ast::FieldPat { | ||
4342 | ident: fieldname, | ||
4343 | pat: subpat, | ||
4344 | is_shorthand, | ||
4345 | attrs: attrs.into(), | ||
4346 | } | ||
4347 | }) | ||
4348 | } | ||
4349 | |||
4350 | /// Parses the fields of a struct-like pattern. | ||
4351 | fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> { | ||
4352 | let mut fields = Vec::new(); | ||
4353 | let mut etc = false; | ||
4354 | let mut ate_comma = true; | ||
4355 | let mut delayed_err: Option<DiagnosticBuilder<'a>> = None; | ||
4356 | let mut etc_span = None; | ||
4357 | |||
4358 | while self.token != token::CloseDelim(token::Brace) { | ||
4359 | let attrs = self.parse_outer_attributes()?; | ||
4360 | let lo = self.span; | ||
4361 | |||
4362 | // check that a comma comes after every field | ||
4363 | if !ate_comma { | ||
4364 | let err = self.struct_span_err(self.prev_span, "expected `,`"); | ||
4365 | if let Some(mut delayed) = delayed_err { | ||
4366 | delayed.emit(); | ||
4367 | } | ||
4368 | return Err(err); | ||
4369 | } | ||
4370 | ate_comma = false; | ||
4371 | |||
4372 | if self.check(&token::DotDot) || self.token == token::DotDotDot { | ||
4373 | etc = true; | ||
4374 | let mut etc_sp = self.span; | ||
4375 | |||
4376 | if self.token == token::DotDotDot { // Issue #46718 | ||
4377 | // Accept `...` as if it were `..` to avoid further errors | ||
4378 | let mut err = self.struct_span_err(self.span, | ||
4379 | "expected field pattern, found `...`"); | ||
4380 | err.span_suggestion( | ||
4381 | self.span, | ||
4382 | "to omit remaining fields, use one fewer `.`", | ||
4383 | "..".to_owned(), | ||
4384 | Applicability::MachineApplicable | ||
4385 | ); | ||
4386 | err.emit(); | ||
4387 | } | ||
4388 | self.bump(); // `..` || `...` | ||
4389 | |||
4390 | if self.token == token::CloseDelim(token::Brace) { | ||
4391 | etc_span = Some(etc_sp); | ||
4392 | break; | ||
4393 | } | ||
4394 | let token_str = self.this_token_descr(); | ||
4395 | let mut err = self.fatal(&format!("expected `}}`, found {}", token_str)); | ||
4396 | |||
4397 | err.span_label(self.span, "expected `}`"); | ||
4398 | let mut comma_sp = None; | ||
4399 | if self.token == token::Comma { // Issue #49257 | ||
4400 | etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span)); | ||
4401 | err.span_label(etc_sp, | ||
4402 | "`..` must be at the end and cannot have a trailing comma"); | ||
4403 | comma_sp = Some(self.span); | ||
4404 | self.bump(); | ||
4405 | ate_comma = true; | ||
4406 | } | ||
4407 | |||
4408 | etc_span = Some(etc_sp.until(self.span)); | ||
4409 | if self.token == token::CloseDelim(token::Brace) { | ||
4410 | // If the struct looks otherwise well formed, recover and continue. | ||
4411 | if let Some(sp) = comma_sp { | ||
4412 | err.span_suggestion_short( | ||
4413 | sp, | ||
4414 | "remove this comma", | ||
4415 | String::new(), | ||
4416 | Applicability::MachineApplicable, | ||
4417 | ); | ||
4418 | } | ||
4419 | err.emit(); | ||
4420 | break; | ||
4421 | } else if self.token.is_ident() && ate_comma { | ||
4422 | // Accept fields coming after `..,`. | ||
4423 | // This way we avoid "pattern missing fields" errors afterwards. | ||
4424 | // We delay this error until the end in order to have a span for a | ||
4425 | // suggested fix. | ||
4426 | if let Some(mut delayed_err) = delayed_err { | ||
4427 | delayed_err.emit(); | ||
4428 | return Err(err); | ||
4429 | } else { | ||
4430 | delayed_err = Some(err); | ||
4431 | } | ||
4432 | } else { | ||
4433 | if let Some(mut err) = delayed_err { | ||
4434 | err.emit(); | ||
4435 | } | ||
4436 | return Err(err); | ||
4437 | } | ||
4438 | } | ||
4439 | |||
4440 | fields.push(match self.parse_pat_field(lo, attrs) { | ||
4441 | Ok(field) => field, | ||
4442 | Err(err) => { | ||
4443 | if let Some(mut delayed_err) = delayed_err { | ||
4444 | delayed_err.emit(); | ||
4445 | } | ||
4446 | return Err(err); | ||
4447 | } | ||
4448 | }); | ||
4449 | ate_comma = self.eat(&token::Comma); | ||
4450 | } | ||
4451 | |||
4452 | if let Some(mut err) = delayed_err { | ||
4453 | if let Some(etc_span) = etc_span { | ||
4454 | err.multipart_suggestion( | ||
4455 | "move the `..` to the end of the field list", | ||
4456 | vec![ | ||
4457 | (etc_span, String::new()), | ||
4458 | (self.span, format!("{}.. }}", if ate_comma { "" } else { ", " })), | ||
4459 | ], | ||
4460 | Applicability::MachineApplicable, | ||
4461 | ); | ||
4462 | } | ||
4463 | err.emit(); | ||
4464 | } | ||
4465 | return Ok((fields, etc)); | ||
4466 | } | ||
4467 | |||
4468 | fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> { | ||
4469 | if self.token.is_path_start() { | ||
4470 | let lo = self.span; | ||
4471 | let (qself, path) = if self.eat_lt() { | ||
4472 | // Parse a qualified path | ||
4473 | let (qself, path) = self.parse_qpath(PathStyle::Expr)?; | ||
4474 | (Some(qself), path) | ||
4475 | } else { | ||
4476 | // Parse an unqualified path | ||
4477 | (None, self.parse_path(PathStyle::Expr)?) | ||
4478 | }; | ||
4479 | let hi = self.prev_span; | ||
4480 | Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new())) | ||
4481 | } else { | ||
4482 | self.parse_literal_maybe_minus() | ||
4483 | } | ||
4484 | } | ||
4485 | |||
4486 | // helper function to decide whether to parse as ident binding or to try to do | ||
4487 | // something more complex like range patterns | ||
4488 | fn parse_as_ident(&mut self) -> bool { | ||
4489 | self.look_ahead(1, |t| match *t { | ||
4490 | token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | | ||
4491 | token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false), | ||
4492 | // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the | ||
4493 | // range pattern branch | ||
4494 | token::DotDot => None, | ||
4495 | _ => Some(true), | ||
4496 | }).unwrap_or_else(|| self.look_ahead(2, |t| match *t { | ||
4497 | token::Comma | token::CloseDelim(token::Bracket) => true, | ||
4498 | _ => false, | ||
4499 | })) | ||
4500 | } | ||
4501 | |||
4502 | /// A wrapper around `parse_pat` with some special error handling for the | ||
4503 | /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast | ||
4504 | /// to subpatterns within such). | ||
4505 | fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> { | ||
4506 | let pat = self.parse_pat(None)?; | ||
4507 | if self.token == token::Comma { | ||
4508 | // An unexpected comma after a top-level pattern is a clue that the | ||
4509 | // user (perhaps more accustomed to some other language) forgot the | ||
4510 | // parentheses in what should have been a tuple pattern; return a | ||
4511 | // suggestion-enhanced error here rather than choking on the comma | ||
4512 | // later. | ||
4513 | let comma_span = self.span; | ||
4514 | self.bump(); | ||
4515 | if let Err(mut err) = self.parse_pat_list() { | ||
4516 | // We didn't expect this to work anyway; we just wanted | ||
4517 | // to advance to the end of the comma-sequence so we know | ||
4518 | // the span to suggest parenthesizing | ||
4519 | err.cancel(); | ||
4520 | } | ||
4521 | let seq_span = pat.span.to(self.prev_span); | ||
4522 | let mut err = self.struct_span_err(comma_span, | ||
4523 | "unexpected `,` in pattern"); | ||
4524 | if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) { | ||
4525 | err.span_suggestion( | ||
4526 | seq_span, | ||
4527 | "try adding parentheses to match on a tuple..", | ||
4528 | format!("({})", seq_snippet), | ||
4529 | Applicability::MachineApplicable | ||
4530 | ).span_suggestion( | ||
4531 | seq_span, | ||
4532 | "..or a vertical bar to match on multiple alternatives", | ||
4533 | format!("{}", seq_snippet.replace(",", " |")), | ||
4534 | Applicability::MachineApplicable | ||
4535 | ); | ||
4536 | } | ||
4537 | return Err(err); | ||
4538 | } | ||
4539 | Ok(pat) | ||
4540 | } | ||
4541 | |||
4542 | /// Parses a pattern. | ||
4543 | pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> { | ||
4544 | self.parse_pat_with_range_pat(true, expected) | ||
4545 | } | ||
4546 | |||
4547 | /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are | ||
4548 | /// allowed). | ||
4549 | fn parse_pat_with_range_pat( | ||
4550 | &mut self, | ||
4551 | allow_range_pat: bool, | ||
4552 | expected: Option<&'static str>, | ||
4553 | ) -> PResult<'a, P<Pat>> { | ||
4554 | maybe_whole!(self, NtPat, |x| x); | ||
4555 | |||
4556 | let lo = self.span; | ||
4557 | let pat; | ||
4558 | match self.token { | ||
4559 | token::BinOp(token::And) | token::AndAnd => { | ||
4560 | // Parse &pat / &mut pat | ||
4561 | self.expect_and()?; | ||
4562 | let mutbl = self.parse_mutability(); | ||
4563 | if let token::Lifetime(ident) = self.token { | ||
4564 | let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", | ||
4565 | ident)); | ||
4566 | err.span_label(self.span, "unexpected lifetime"); | ||
4567 | return Err(err); | ||
4568 | } | ||
4569 | let subpat = self.parse_pat_with_range_pat(false, expected)?; | ||
4570 | pat = PatKind::Ref(subpat, mutbl); | ||
4571 | } | ||
4572 | token::OpenDelim(token::Paren) => { | ||
4573 | // Parse (pat,pat,pat,...) as tuple pattern | ||
4574 | let (fields, ddpos, trailing_comma) = self.parse_parenthesized_pat_list()?; | ||
4575 | pat = if fields.len() == 1 && ddpos.is_none() && !trailing_comma { | ||
4576 | PatKind::Paren(fields.into_iter().nth(0).unwrap()) | ||
4577 | } else { | ||
4578 | PatKind::Tuple(fields, ddpos) | ||
4579 | }; | ||
4580 | } | ||
4581 | token::OpenDelim(token::Bracket) => { | ||
4582 | // Parse [pat,pat,...] as slice pattern | ||
4583 | self.bump(); | ||
4584 | let (before, slice, after) = self.parse_pat_vec_elements()?; | ||
4585 | self.expect(&token::CloseDelim(token::Bracket))?; | ||
4586 | pat = PatKind::Slice(before, slice, after); | ||
4587 | } | ||
4588 | // At this point, token != &, &&, (, [ | ||
4589 | _ => if self.eat_keyword(keywords::Underscore) { | ||
4590 | // Parse _ | ||
4591 | pat = PatKind::Wild; | ||
4592 | } else if self.eat_keyword(keywords::Mut) { | ||
4593 | // Parse mut ident @ pat / mut ref ident @ pat | ||
4594 | let mutref_span = self.prev_span.to(self.span); | ||
4595 | let binding_mode = if self.eat_keyword(keywords::Ref) { | ||
4596 | self.diagnostic() | ||
4597 | .struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect") | ||
4598 | .span_suggestion( | ||
4599 | mutref_span, | ||
4600 | "try switching the order", | ||
4601 | "ref mut".into(), | ||
4602 | Applicability::MachineApplicable | ||
4603 | ).emit(); | ||
4604 | BindingMode::ByRef(Mutability::Mutable) | ||
4605 | } else { | ||
4606 | BindingMode::ByValue(Mutability::Mutable) | ||
4607 | }; | ||
4608 | pat = self.parse_pat_ident(binding_mode)?; | ||
4609 | } else if self.eat_keyword(keywords::Ref) { | ||
4610 | // Parse ref ident @ pat / ref mut ident @ pat | ||
4611 | let mutbl = self.parse_mutability(); | ||
4612 | pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?; | ||
4613 | } else if self.eat_keyword(keywords::Box) { | ||
4614 | // Parse box pat | ||
4615 | let subpat = self.parse_pat_with_range_pat(false, None)?; | ||
4616 | pat = PatKind::Box(subpat); | ||
4617 | } else if self.token.is_ident() && !self.token.is_reserved_ident() && | ||
4618 | self.parse_as_ident() { | ||
4619 | // Parse ident @ pat | ||
4620 | // This can give false positives and parse nullary enums, | ||
4621 | // they are dealt with later in resolve | ||
4622 | let binding_mode = BindingMode::ByValue(Mutability::Immutable); | ||
4623 | pat = self.parse_pat_ident(binding_mode)?; | ||
4624 | } else if self.token.is_path_start() { | ||
4625 | // Parse pattern starting with a path | ||
4626 | let (qself, path) = if self.eat_lt() { | ||
4627 | // Parse a qualified path | ||
4628 | let (qself, path) = self.parse_qpath(PathStyle::Expr)?; | ||
4629 | (Some(qself), path) | ||
4630 | } else { | ||
4631 | // Parse an unqualified path | ||
4632 | (None, self.parse_path(PathStyle::Expr)?) | ||
4633 | }; | ||
4634 | match self.token { | ||
4635 | token::Not if qself.is_none() => { | ||
4636 | // Parse macro invocation | ||
4637 | self.bump(); | ||
4638 | let (delim, tts) = self.expect_delimited_token_tree()?; | ||
4639 | let mac = respan(lo.to(self.prev_span), Mac_ { path, tts, delim }); | ||
4640 | pat = PatKind::Mac(mac); | ||
4641 | } | ||
4642 | token::DotDotDot | token::DotDotEq | token::DotDot => { | ||
4643 | let end_kind = match self.token { | ||
4644 | token::DotDot => RangeEnd::Excluded, | ||
4645 | token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot), | ||
4646 | token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq), | ||
4647 | _ => panic!("can only parse `..`/`...`/`..=` for ranges \ | ||
4648 | (checked above)"), | ||
4649 | }; | ||
4650 | let op_span = self.span; | ||
4651 | // Parse range | ||
4652 | let span = lo.to(self.prev_span); | ||
4653 | let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); | ||
4654 | self.bump(); | ||
4655 | let end = self.parse_pat_range_end()?; | ||
4656 | let op = Spanned { span: op_span, node: end_kind }; | ||
4657 | pat = PatKind::Range(begin, end, op); | ||
4658 | } | ||
4659 | token::OpenDelim(token::Brace) => { | ||
4660 | if qself.is_some() { | ||
4661 | let msg = "unexpected `{` after qualified path"; | ||
4662 | let mut err = self.fatal(msg); | ||
4663 | err.span_label(self.span, msg); | ||
4664 | return Err(err); | ||
4665 | } | ||
4666 | // Parse struct pattern | ||
4667 | self.bump(); | ||
4668 | let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| { | ||
4669 | e.emit(); | ||
4670 | self.recover_stmt(); | ||
4671 | (vec![], false) | ||
4672 | }); | ||
4673 | self.bump(); | ||
4674 | pat = PatKind::Struct(path, fields, etc); | ||
4675 | } | ||
4676 | token::OpenDelim(token::Paren) => { | ||
4677 | if qself.is_some() { | ||
4678 | let msg = "unexpected `(` after qualified path"; | ||
4679 | let mut err = self.fatal(msg); | ||
4680 | err.span_label(self.span, msg); | ||
4681 | return Err(err); | ||
4682 | } | ||
4683 | // Parse tuple struct or enum pattern | ||
4684 | let (fields, ddpos, _) = self.parse_parenthesized_pat_list()?; | ||
4685 | pat = PatKind::TupleStruct(path, fields, ddpos) | ||
4686 | } | ||
4687 | _ => pat = PatKind::Path(qself, path), | ||
4688 | } | ||
4689 | } else { | ||
4690 | // Try to parse everything else as literal with optional minus | ||
4691 | match self.parse_literal_maybe_minus() { | ||
4692 | Ok(begin) => { | ||
4693 | let op_span = self.span; | ||
4694 | if self.check(&token::DotDot) || self.check(&token::DotDotEq) || | ||
4695 | self.check(&token::DotDotDot) { | ||
4696 | let end_kind = if self.eat(&token::DotDotDot) { | ||
4697 | RangeEnd::Included(RangeSyntax::DotDotDot) | ||
4698 | } else if self.eat(&token::DotDotEq) { | ||
4699 | RangeEnd::Included(RangeSyntax::DotDotEq) | ||
4700 | } else if self.eat(&token::DotDot) { | ||
4701 | RangeEnd::Excluded | ||
4702 | } else { | ||
4703 | panic!("impossible case: we already matched \ | ||
4704 | on a range-operator token") | ||
4705 | }; | ||
4706 | let end = self.parse_pat_range_end()?; | ||
4707 | let op = Spanned { span: op_span, node: end_kind }; | ||
4708 | pat = PatKind::Range(begin, end, op); | ||
4709 | } else { | ||
4710 | pat = PatKind::Lit(begin); | ||
4711 | } | ||
4712 | } | ||
4713 | Err(mut err) => { | ||
4714 | self.cancel(&mut err); | ||
4715 | let expected = expected.unwrap_or("pattern"); | ||
4716 | let msg = format!( | ||
4717 | "expected {}, found {}", | ||
4718 | expected, | ||
4719 | self.this_token_descr(), | ||
4720 | ); | ||
4721 | let mut err = self.fatal(&msg); | ||
4722 | err.span_label(self.span, format!("expected {}", expected)); | ||
4723 | return Err(err); | ||
4724 | } | ||
4725 | } | ||
4726 | } | ||
4727 | } | ||
4728 | |||
4729 | let pat = Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID }; | ||
4730 | let pat = self.maybe_recover_from_bad_qpath(pat, true)?; | ||
4731 | |||
4732 | if !allow_range_pat { | ||
4733 | match pat.node { | ||
4734 | PatKind::Range( | ||
4735 | _, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. } | ||
4736 | ) => {}, | ||
4737 | PatKind::Range(..) => { | ||
4738 | let mut err = self.struct_span_err( | ||
4739 | pat.span, | ||
4740 | "the range pattern here has ambiguous interpretation", | ||
4741 | ); | ||
4742 | err.span_suggestion( | ||
4743 | pat.span, | ||
4744 | "add parentheses to clarify the precedence", | ||
4745 | format!("({})", pprust::pat_to_string(&pat)), | ||
4746 | // "ambiguous interpretation" implies that we have to be guessing | ||
4747 | Applicability::MaybeIncorrect | ||
4748 | ); | ||
4749 | return Err(err); | ||
4750 | } | ||
4751 | _ => {} | ||
4752 | } | ||
4753 | } | ||
4754 | |||
4755 | Ok(P(pat)) | ||
4756 | } | ||
4757 | |||
4758 | /// Parses `ident` or `ident @ pat`. | ||
4759 | /// used by the copy foo and ref foo patterns to give a good | ||
4760 | /// error message when parsing mistakes like `ref foo(a, b)`. | ||
4761 | fn parse_pat_ident(&mut self, | ||
4762 | binding_mode: ast::BindingMode) | ||
4763 | -> PResult<'a, PatKind> { | ||
4764 | let ident = self.parse_ident()?; | ||
4765 | let sub = if self.eat(&token::At) { | ||
4766 | Some(self.parse_pat(Some("binding pattern"))?) | ||
4767 | } else { | ||
4768 | None | ||
4769 | }; | ||
4770 | |||
4771 | // just to be friendly, if they write something like | ||
4772 | // ref Some(i) | ||
4773 | // we end up here with ( as the current token. This shortly | ||
4774 | // leads to a parse error. Note that if there is no explicit | ||
4775 | // binding mode then we do not end up here, because the lookahead | ||
4776 | // will direct us over to parse_enum_variant() | ||
4777 | if self.token == token::OpenDelim(token::Paren) { | ||
4778 | return Err(self.span_fatal( | ||
4779 | self.prev_span, | ||
4780 | "expected identifier, found enum pattern")) | ||
4781 | } | ||
4782 | |||
4783 | Ok(PatKind::Ident(binding_mode, ident, sub)) | ||
4784 | } | ||
4785 | |||
4786 | /// Parses a local variable declaration. | ||
4787 | fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> { | ||
4788 | let lo = self.prev_span; | ||
4789 | let pat = self.parse_top_level_pat()?; | ||
4790 | |||
4791 | let (err, ty) = if self.eat(&token::Colon) { | ||
4792 | // Save the state of the parser before parsing type normally, in case there is a `:` | ||
4793 | // instead of an `=` typo. | ||
4794 | let parser_snapshot_before_type = self.clone(); | ||
4795 | let colon_sp = self.prev_span; | ||
4796 | match self.parse_ty() { | ||
4797 | Ok(ty) => (None, Some(ty)), | ||
4798 | Err(mut err) => { | ||
4799 | // Rewind to before attempting to parse the type and continue parsing | ||
4800 | let parser_snapshot_after_type = self.clone(); | ||
4801 | mem::replace(self, parser_snapshot_before_type); | ||
4802 | |||
4803 | let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap(); | ||
4804 | err.span_label(pat.span, format!("while parsing the type for `{}`", snippet)); | ||
4805 | (Some((parser_snapshot_after_type, colon_sp, err)), None) | ||
4806 | } | ||
4807 | } | ||
4808 | } else { | ||
4809 | (None, None) | ||
4810 | }; | ||
4811 | let init = match (self.parse_initializer(err.is_some()), err) { | ||
4812 | (Ok(init), None) => { // init parsed, ty parsed | ||
4813 | init | ||
4814 | } | ||
4815 | (Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error | ||
4816 | // Could parse the type as if it were the initializer, it is likely there was a | ||
4817 | // typo in the code: `:` instead of `=`. Add suggestion and emit the error. | ||
4818 | err.span_suggestion_short( | ||
4819 | colon_sp, | ||
4820 | "use `=` if you meant to assign", | ||
4821 | "=".to_string(), | ||
4822 | Applicability::MachineApplicable | ||
4823 | ); | ||
4824 | err.emit(); | ||
4825 | // As this was parsed successfully, continue as if the code has been fixed for the | ||
4826 | // rest of the file. It will still fail due to the emitted error, but we avoid | ||
4827 | // extra noise. | ||
4828 | init | ||
4829 | } | ||
4830 | (Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error | ||
4831 | init_err.cancel(); | ||
4832 | // Couldn't parse the type nor the initializer, only raise the type error and | ||
4833 | // return to the parser state before parsing the type as the initializer. | ||
4834 | // let x: <parse_error>; | ||
4835 | mem::replace(self, snapshot); | ||
4836 | return Err(ty_err); | ||
4837 | } | ||
4838 | (Err(err), None) => { // init error, ty parsed | ||
4839 | // Couldn't parse the initializer and we're not attempting to recover a failed | ||
4840 | // parse of the type, return the error. | ||
4841 | return Err(err); | ||
4842 | } | ||
4843 | }; | ||
4844 | let hi = if self.token == token::Semi { | ||
4845 | self.span | ||
4846 | } else { | ||
4847 | self.prev_span | ||
4848 | }; | ||
4849 | Ok(P(ast::Local { | ||
4850 | ty, | ||
4851 | pat, | ||
4852 | init, | ||
4853 | id: ast::DUMMY_NODE_ID, | ||
4854 | span: lo.to(hi), | ||
4855 | attrs, | ||
4856 | })) | ||
4857 | } | ||
4858 | |||
4859 | /// Parses a structure field. | ||
4860 | fn parse_name_and_ty(&mut self, | ||
4861 | lo: Span, | ||
4862 | vis: Visibility, | ||
4863 | attrs: Vec<Attribute>) | ||
4864 | -> PResult<'a, StructField> { | ||
4865 | let name = self.parse_ident()?; | ||
4866 | self.expect(&token::Colon)?; | ||
4867 | let ty = self.parse_ty()?; | ||
4868 | Ok(StructField { | ||
4869 | span: lo.to(self.prev_span), | ||
4870 | ident: Some(name), | ||
4871 | vis, | ||
4872 | id: ast::DUMMY_NODE_ID, | ||
4873 | ty, | ||
4874 | attrs, | ||
4875 | }) | ||
4876 | } | ||
4877 | |||
4878 | /// Emits an expected-item-after-attributes error. | ||
4879 | fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> { | ||
4880 | let message = match attrs.last() { | ||
4881 | Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment", | ||
4882 | _ => "expected item after attributes", | ||
4883 | }; | ||
4884 | |||
4885 | let mut err = self.diagnostic().struct_span_err(self.prev_span, message); | ||
4886 | if attrs.last().unwrap().is_sugared_doc { | ||
4887 | err.span_label(self.prev_span, "this doc comment doesn't document anything"); | ||
4888 | } | ||
4889 | Err(err) | ||
4890 | } | ||
4891 | |||
4892 | /// Parse a statement. This stops just before trailing semicolons on everything but items. | ||
4893 | /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed. | ||
4894 | pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> { | ||
4895 | Ok(self.parse_stmt_(true)) | ||
4896 | } | ||
4897 | |||
4898 | // Eat tokens until we can be relatively sure we reached the end of the | ||
4899 | // statement. This is something of a best-effort heuristic. | ||
4900 | // | ||
4901 | // We terminate when we find an unmatched `}` (without consuming it). | ||
4902 | fn recover_stmt(&mut self) { | ||
4903 | self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore) | ||
4904 | } | ||
4905 | |||
4906 | // If `break_on_semi` is `Break`, then we will stop consuming tokens after | ||
4907 | // finding (and consuming) |