use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy}; use crate::ast::{GenericBound, TraitBoundModifier}; use crate::ast::Unsafety; use crate::ast::{Mod, AnonConst, Arg, Arm, Guard, Attribute, BindingMode, TraitItemKind}; use crate::ast::Block; use crate::ast::{BlockCheckMode, CaptureBy, Movability}; use crate::ast::{Constness, Crate}; use crate::ast::Defaultness; use crate::ast::EnumDef; use crate::ast::{Expr, ExprKind, RangeLimits}; use crate::ast::{Field, FnDecl, FnHeader}; use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; use crate::ast::{GenericParam, GenericParamKind}; use crate::ast::GenericArg; use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind}; use crate::ast::{Label, Lifetime, Lit, LitKind}; use crate::ast::Local; use crate::ast::MacStmtStyle; use crate::ast::{Mac, Mac_, MacDelimiter}; use crate::ast::{MutTy, Mutability}; use crate::ast::{Pat, PatKind, PathSegment}; use crate::ast::{PolyTraitRef, QSelf}; use crate::ast::{Stmt, StmtKind}; use crate::ast::{VariantData, StructField}; use crate::ast::StrStyle; use crate::ast::SelfKind; use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax}; use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds}; use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar}; use crate::ast::{UseTree, UseTreeKind}; use crate::ast::{BinOpKind, UnOp}; use crate::ast::{RangeEnd, RangeSyntax}; use crate::{ast, attr}; use crate::ext::base::DummyResult; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::parse::{self, SeqSep, classify, token}; use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::token::DelimToken; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use crate::util::parser::{AssocOp, Fixity}; use crate::print::pprust; use crate::ptr::P; use crate::parse::PResult; use crate::ThinVec; use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; use crate::symbol::{Symbol, keywords}; use errors::{Applicability, DiagnosticBuilder, DiagnosticId}; use rustc_target::spec::abi::{self, Abi}; use syntax_pos::{Span, MultiSpan, BytePos, FileName}; use log::{debug, trace}; use std::borrow::Cow; use std::cmp; use std::mem; use std::path::{self, Path, PathBuf}; use std::slice; #[derive(Debug)] /// Whether the type alias or associated type is a concrete type or an existential type pub enum AliasKind { /// Just a new name for the same type Weak(P), /// Only trait impls of the type will be usable, not the actual type itself Existential(GenericBounds), } bitflags::bitflags! { struct Restrictions: u8 { const STMT_EXPR = 1 << 0; const NO_STRUCT_LITERAL = 1 << 1; } } type ItemInfo = (Ident, ItemKind, Option>); /// Specifies how to parse a path. #[derive(Copy, Clone, PartialEq)] pub enum PathStyle { /// In some contexts, notably in expressions, paths with generic arguments are ambiguous /// with something else. For example, in expressions `segment < ....` can be interpreted /// as a comparison and `segment ( ....` can be interpreted as a function call. /// In all such contexts the non-path interpretation is preferred by default for practical /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g. /// `x` - comparisons, `x::` - unambiguously a path. Expr, /// In other contexts, notably in types, no ambiguity exists and paths can be written /// without the disambiguator, e.g., `x` - unambiguously a path. /// Paths with disambiguators are still accepted, `x::` - unambiguously a path too. Type, /// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports, /// visibilities or attributes. /// Technically, this variant is unnecessary and e.g., `Expr` can be used instead /// (paths in "mod" contexts have to be checked later for absence of generic arguments /// anyway, due to macros), but it is used to avoid weird suggestions about expected /// tokens when something goes wrong. Mod, } #[derive(Clone, Copy, PartialEq, Debug)] enum SemiColonMode { Break, Ignore, Comma, } #[derive(Clone, Copy, PartialEq, Debug)] enum BlockMode { Break, Ignore, } /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of /// macro expansion). Placement of these is not as complex as I feared it would /// be. The important thing is to make sure that lookahead doesn't balk at /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = $p.token.clone() { match *nt { token::NtExpr(ref e) | token::NtLiteral(ref e) => { $p.bump(); return Ok((*e).clone()); } token::NtPath(ref path) => { $p.bump(); let span = $p.span; let kind = ExprKind::Path(None, (*path).clone()); return Ok($p.mk_expr(span, kind, ThinVec::new())); } token::NtBlock(ref block) => { $p.bump(); let span = $p.span; let kind = ExprKind::Block((*block).clone(), None); return Ok($p.mk_expr(span, kind, ThinVec::new())); } _ => {}, }; } } } /// As maybe_whole_expr, but for things other than expressions macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { if let token::Interpolated(nt) = $p.token.clone() { if let token::$constructor($x) = (*nt).clone() { $p.bump(); return Ok($e); } } }; } fn maybe_append(mut lhs: Vec, mut rhs: Option>) -> Vec { if let Some(ref mut rhs) = rhs { lhs.append(rhs); } lhs } #[derive(Debug, Clone, Copy, PartialEq)] enum PrevTokenKind { DocComment, Comma, Plus, Interpolated, Eof, Ident, Other, } trait RecoverQPath: Sized { const PATH_STYLE: PathStyle = PathStyle::Expr; fn to_ty(&self) -> Option>; fn to_recovered(&self, qself: Option, path: ast::Path) -> Self; fn to_string(&self) -> String; } impl RecoverQPath for Ty { const PATH_STYLE: PathStyle = PathStyle::Type; fn to_ty(&self) -> Option> { Some(P(self.clone())) } fn to_recovered(&self, qself: Option, path: ast::Path) -> Self { Self { span: path.span, node: TyKind::Path(qself, path), id: self.id } } fn to_string(&self) -> String { pprust::ty_to_string(self) } } impl RecoverQPath for Pat { fn to_ty(&self) -> Option> { self.to_ty() } fn to_recovered(&self, qself: Option, path: ast::Path) -> Self { Self { span: path.span, node: PatKind::Path(qself, path), id: self.id } } fn to_string(&self) -> String { pprust::pat_to_string(self) } } impl RecoverQPath for Expr { fn to_ty(&self) -> Option> { self.to_ty() } fn to_recovered(&self, qself: Option, path: ast::Path) -> Self { Self { span: path.span, node: ExprKind::Path(qself, path), id: self.id, attrs: self.attrs.clone() } } fn to_string(&self) -> String { pprust::expr_to_string(self) } } /* ident is handled by common.rs */ #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, /// the current token: pub token: token::Token, /// the span of the current token: pub span: Span, /// the span of the previous token: meta_var_span: Option, pub prev_span: Span, /// the previous token kind prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files crate directory: Directory<'a>, /// Whether to parse sub-modules in other files. pub recurse_into_file_modules: bool, /// Name of the root module this parser originated from. If `None`, then the /// name is not known. This does not change while the parser is descending /// into modules, and sub-parsers have new values for this name. pub root_module_name: Option, crate expected_tokens: Vec, token_cursor: TokenCursor, desugar_doc_comments: bool, /// Whether we should configure out of line modules as we parse. pub cfg_mods: bool, /// This field is used to keep track of how many left angle brackets we have seen. This is /// required in order to detect extra leading left angle brackets (`<` characters) and error /// appropriately. /// /// See the comments in the `parse_path_segment` function for more details. crate unmatched_angle_bracket_count: u32, crate max_angle_bracket_count: u32, /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery /// it gets removed from here. Every entry left at the end gets emitted as an independent /// error. crate unclosed_delims: Vec, } #[derive(Clone)] struct TokenCursor { frame: TokenCursorFrame, stack: Vec, } #[derive(Clone)] struct TokenCursorFrame { delim: token::DelimToken, span: DelimSpan, open_delim: bool, tree_cursor: tokenstream::Cursor, close_delim: bool, last_token: LastToken, } /// This is used in `TokenCursorFrame` above to track tokens that are consumed /// by the parser, and then that's transitively used to record the tokens that /// each parse AST item is created with. /// /// Right now this has two states, either collecting tokens or not collecting /// tokens. If we're collecting tokens we just save everything off into a local /// `Vec`. This should eventually though likely save tokens from the original /// token stream and just use slicing of token streams to avoid creation of a /// whole new vector. /// /// The second state is where we're passively not recording tokens, but the last /// token is still tracked for when we want to start recording tokens. This /// "last token" means that when we start recording tokens we'll want to ensure /// that this, the first token, is included in the output. /// /// You can find some more example usage of this in the `collect_tokens` method /// on the parser. #[derive(Clone)] enum LastToken { Collecting(Vec), Was(Option), } impl TokenCursorFrame { fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { TokenCursorFrame { delim: delim, span: sp, open_delim: delim == token::NoDelim, tree_cursor: tts.clone().into_trees(), close_delim: delim == token::NoDelim, last_token: LastToken::Was(None), } } } impl TokenCursor { fn next(&mut self) -> TokenAndSpan { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; TokenTree::open_tt(self.frame.span.open, self.frame.delim) } else if let Some(tree) = self.frame.tree_cursor.next() { tree } else if !self.frame.close_delim { self.frame.close_delim = true; TokenTree::close_tt(self.frame.span.close, self.frame.delim) } else if let Some(frame) = self.stack.pop() { self.frame = frame; continue } else { return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP } }; match self.frame.last_token { LastToken::Collecting(ref mut v) => v.push(tree.clone().into()), LastToken::Was(ref mut t) => *t = Some(tree.clone().into()), } match tree { TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, TokenTree::Delimited(sp, delim, tts) => { let frame = TokenCursorFrame::new(sp, delim, &tts); self.stack.push(mem::replace(&mut self.frame, frame)); } } } } fn next_desugared(&mut self) -> TokenAndSpan { let (sp, name) = match self.next() { TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), tok => return tok, }; let stripped = strip_doc_comment_decoration(&name.as_str()); // Searches for the occurrences of `"#*` and returns the minimum number of `#`s // required to wrap the text. let mut num_of_hashes = 0; let mut count = 0; for ch in stripped.chars() { count = match ch { '"' => 1, '#' if count > 0 => count + 1, _ => 0, }; num_of_hashes = cmp::max(num_of_hashes, count); } let delim_span = DelimSpan::from_single(sp); let body = TokenTree::Delimited( delim_span, token::Bracket, [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None)) ] .iter().cloned().collect::().into(), ); self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new( delim_span, token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] .iter().cloned().collect::().into() } else { [TokenTree::Token(sp, token::Pound), body] .iter().cloned().collect::().into() }, ))); self.next() } } #[derive(Clone, PartialEq)] crate enum TokenType { Token(token::Token), Keyword(keywords::Keyword), Operator, Lifetime, Ident, Path, Type, Const, } impl TokenType { fn to_string(&self) -> String { match *self { TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)), TokenType::Keyword(kw) => format!("`{}`", kw.name()), TokenType::Operator => "an operator".to_string(), TokenType::Lifetime => "lifetime".to_string(), TokenType::Ident => "identifier".to_string(), TokenType::Path => "path".to_string(), TokenType::Type => "type".to_string(), TokenType::Const => "const".to_string(), } } } /// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT`, /// `IDENT<::AssocTy>`. /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that `IDENT` is not the ident of a fn trait. fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } /// Information about the path to a module. pub struct ModulePath { name: String, path_exists: bool, pub result: Result, } pub struct ModulePathSuccess { pub path: PathBuf, pub directory_ownership: DirectoryOwnership, warn: bool, } pub enum Error { FileNotFoundForModule { mod_name: String, default_path: String, secondary_path: String, dir_path: String, }, DuplicatePaths { mod_name: String, default_path: String, secondary_path: String, }, UselessDocComment, InclusiveRangeWithNoEnd, } impl Error { fn span_err>(self, sp: S, handler: &errors::Handler) -> DiagnosticBuilder<'_> { match self { Error::FileNotFoundForModule { ref mod_name, ref default_path, ref secondary_path, ref dir_path } => { let mut err = struct_span_err!(handler, sp, E0583, "file not found for module `{}`", mod_name); err.help(&format!("name the file either {} or {} inside the directory \"{}\"", default_path, secondary_path, dir_path)); err } Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { let mut err = struct_span_err!(handler, sp, E0584, "file for module `{}` found at both {} and {}", mod_name, default_path, secondary_path); err.help("delete or rename one of them to remove the ambiguity"); err } Error::UselessDocComment => { let mut err = struct_span_err!(handler, sp, E0585, "found a documentation comment that doesn't document anything"); err.help("doc comments must come before what they document, maybe a comment was \ intended with `//`?"); err } Error::InclusiveRangeWithNoEnd => { let mut err = struct_span_err!(handler, sp, E0586, "inclusive range with no end"); err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); err } } } } #[derive(Debug)] enum LhsExpr { NotYetParsed, AttributesParsed(ThinVec), AlreadyParsed(P), } impl From>> for LhsExpr { fn from(o: Option>) -> Self { if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed } } } impl From> for LhsExpr { fn from(expr: P) -> Self { LhsExpr::AlreadyParsed(expr) } } /// Creates a placeholder argument. fn dummy_arg(span: Span) -> Arg { let ident = Ident::new(keywords::Invalid.name(), span); let pat = P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), span, }); let ty = Ty { node: TyKind::Err, span, id: ast::DUMMY_NODE_ID }; Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID } } #[derive(Copy, Clone, Debug)] enum TokenExpectType { Expect, NoExpect, } impl<'a> Parser<'a> { pub fn new(sess: &'a ParseSess, tokens: TokenStream, directory: Option>, recurse_into_file_modules: bool, desugar_doc_comments: bool) -> Self { let mut parser = Parser { sess, token: token::Whitespace, span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), recurse_into_file_modules, directory: Directory { path: Cow::from(PathBuf::new()), ownership: DirectoryOwnership::Owned { relative: None } }, root_module_name: None, expected_tokens: Vec::new(), token_cursor: TokenCursor { frame: TokenCursorFrame::new( DelimSpan::dummy(), token::NoDelim, &tokens.into(), ), stack: Vec::new(), }, desugar_doc_comments, cfg_mods: true, unmatched_angle_bracket_count: 0, max_angle_bracket_count: 0, unclosed_delims: Vec::new(), }; let tok = parser.next_tok(); parser.token = tok.tok; parser.span = tok.sp; if let Some(directory) = directory { parser.directory = directory; } else if !parser.span.is_dummy() { if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) { path.pop(); parser.directory.path = Cow::from(path); } } parser.process_potential_macro_variable(); parser } fn next_tok(&mut self) -> TokenAndSpan { let mut next = if self.desugar_doc_comments { self.token_cursor.next_desugared() } else { self.token_cursor.next() }; if next.sp.is_dummy() { // Tweak the location for better diagnostics, but keep syntactic context intact. next.sp = self.prev_span.with_ctxt(next.sp.ctxt()); } next } /// Converts the current token to a string using `self`'s reader. pub fn this_token_to_string(&self) -> String { pprust::token_to_string(&self.token) } fn token_descr(&self) -> Option<&'static str> { Some(match &self.token { t if t.is_special_ident() => "reserved identifier", t if t.is_used_keyword() => "keyword", t if t.is_unused_keyword() => "reserved keyword", token::DocComment(..) => "doc comment", _ => return None, }) } fn this_token_descr(&self) -> String { if let Some(prefix) = self.token_descr() { format!("{} `{}`", prefix, self.this_token_to_string()) } else { format!("`{}`", self.this_token_to_string()) } } fn unexpected_last(&self, t: &token::Token) -> PResult<'a, T> { let token_str = pprust::token_to_string(t); Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str))) } crate fn unexpected(&mut self) -> PResult<'a, T> { match self.expect_one_of(&[], &[]) { Err(e) => Err(e), Ok(_) => unreachable!(), } } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); Ok(false) } else { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); let mut err = self.fatal(&format!("expected `{}`, found {}", token_str, this_token_str)); let sp = if self.token == token::Token::Eof { // EOF, don't want to point at the following char, but rather the last token self.prev_span } else { self.sess.source_map().next_point(self.prev_span) }; let label_exp = format!("expected `{}`", token_str); match self.recover_closing_delimiter(&[t.clone()], err) { Err(e) => err = e, Ok(recovered) => { return Ok(recovered); } } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content // between them is whitespace, point only at the found token. err.span_label(self.span, label_exp); } _ => { err.span_label(sp, label_exp); err.span_label(self.span, "unexpected token"); } } Err(err) } } else { self.expect_one_of(slice::from_ref(t), &[]) } } fn recover_closing_delimiter( &mut self, tokens: &[token::Token], mut err: DiagnosticBuilder<'a>, ) -> PResult<'a, bool> { let mut pos = None; // we want to use the last closing delim that would apply for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) && Some(self.span) > unmatched.unclosed_span { pos = Some(i); } } match pos { Some(pos) => { // Recover and assume that the detected unclosed delimiter was meant for // this location. Emit the diagnostic and act as if the delimiter was // present for the parser's sake. // Don't attempt to recover from this unclosed delimiter more than once. let unmatched = self.unclosed_delims.remove(pos); let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); // We want to suggest the inclusion of the closing delimiter where it makes // the most sense, which is immediately after the last token: // // {foo(bar {}} // - ^ // | | // | help: `)` may belong here (FIXME: #58270) // | // unclosed delimiter if let Some(sp) = unmatched.unclosed_span { err.span_label(sp, "unclosed delimiter"); } err.span_suggestion_short( self.sess.source_map().next_point(self.prev_span), &format!("{} may belong here", delim.to_string()), delim.to_string(), Applicability::MaybeIncorrect, ); err.emit(); self.expected_tokens.clear(); // reduce errors Ok(true) } _ => Err(err), } } /// Expect next token to be edible or inedible token. If edible, /// then consume it; if inedible, then return without consuming /// anything. Signal a fatal error if next token is unexpected. pub fn expect_one_of( &mut self, edible: &[token::Token], inedible: &[token::Token], ) -> PResult<'a, bool /* recovered */> { fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. let b = i.next() .map_or(String::new(), |t| t.to_string()); i.enumerate().fold(b, |mut b, (i, a)| { if tokens.len() > 2 && i == tokens.len() - 2 { b.push_str(", or "); } else if tokens.len() == 2 && i == tokens.len() - 2 { b.push_str(" or "); } else { b.push_str(", "); } b.push_str(&a.to_string()); b }) } if edible.contains(&self.token) { self.bump(); Ok(false) } else if inedible.contains(&self.token) { // leave it in the input Ok(false) } else { let mut expected = edible.iter() .map(|x| TokenType::Token(x.clone())) .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) .chain(self.expected_tokens.iter().cloned()) .collect::>(); expected.sort_by_cached_key(|x| x.to_string()); expected.dedup(); let expect = tokens_to_string(&expected[..]); let actual = self.this_token_to_string(); let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { let short_expect = if expected.len() > 6 { format!("{} possible tokens", expected.len()) } else { expect.clone() }; (format!("expected one of {}, found `{}`", expect, actual), (self.sess.source_map().next_point(self.prev_span), format!("expected one of {} here", short_expect))) } else if expected.is_empty() { (format!("unexpected token: `{}`", actual), (self.prev_span, "unexpected token after this".to_string())) } else { (format!("expected {}, found `{}`", expect, actual), (self.sess.source_map().next_point(self.prev_span), format!("expected {} here", expect))) }; let mut err = self.fatal(&msg_exp); if self.token.is_ident_named("and") { err.span_suggestion_short( self.span, "use `&&` instead of `and` for the boolean operator", "&&".to_string(), Applicability::MaybeIncorrect, ); } if self.token.is_ident_named("or") { err.span_suggestion_short( self.span, "use `||` instead of `or` for the boolean operator", "||".to_string(), Applicability::MaybeIncorrect, ); } let sp = if self.token == token::Token::Eof { // This is EOF, don't want to point at the following char, but rather the last token self.prev_span } else { label_sp }; match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { TokenType::Token(t) => Some(t.clone()), _ => None, }).collect::>(), err) { Err(e) => err = e, Ok(recovered) => { return Ok(recovered); } } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content between // them is whitespace, point at the found token in that case: // // X | () => { syntax error }; // | ^^^^^ expected one of 8 possible tokens here // // instead of having: // // X | () => { syntax error }; // | -^^^^^ unexpected token // | | // | expected one of 8 possible tokens here err.span_label(self.span, label_exp); } _ if self.prev_span == syntax_pos::DUMMY_SP => { // Account for macro context where the previous span might not be // available to avoid incorrect output (#54841). err.span_label(self.span, "unexpected token"); } _ => { err.span_label(sp, label_exp); err.span_label(self.span, "unexpected token"); } } Err(err) } } /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: PResult<'a, P>) -> PResult<'a, (Span, P)> { expr.map(|e| { if self.prev_token_kind == PrevTokenKind::Interpolated { (self.prev_span, e) } else { (e.span, e) } }) } fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { let mut err = self.struct_span_err(self.span, &format!("expected identifier, found {}", self.this_token_descr())); if let token::Ident(ident, false) = &self.token { if ident.is_reserved() && !ident.is_path_segment_keyword() && ident.name != keywords::Underscore.name() { err.span_suggestion( self.span, "you can escape reserved keywords to use them as identifiers", format!("r#{}", ident), Applicability::MaybeIncorrect, ); } } if let Some(token_descr) = self.token_descr() { err.span_label(self.span, format!("expected identifier, found {}", token_descr)); } else { err.span_label(self.span, "expected identifier"); if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { err.span_suggestion( self.span, "remove this comma", String::new(), Applicability::MachineApplicable, ); } } err } pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { self.parse_ident_common(true) } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { match self.token { token::Ident(ident, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { err.emit(); } else { return Err(err); } } let span = self.span; self.bump(); Ok(Ident::new(ident.name, span)) } _ => { Err(if self.prev_token_kind == PrevTokenKind::DocComment { self.span_fatal_err(self.prev_span, Error::UselessDocComment) } else { self.expected_ident_found() }) } } } /// Checks if the next token is `tok`, and returns `true` if so. /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. crate fn check(&mut self, tok: &token::Token) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consumes a token 'tok' if it exists. Returns whether the given token was present. pub fn eat(&mut self, tok: &token::Token) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present } fn check_keyword(&mut self, kw: keywords::Keyword) -> bool { self.expected_tokens.push(TokenType::Keyword(kw)); self.token.is_keyword(kw) } /// If the next token is the given keyword, eats it and returns /// `true`. Otherwise, returns `false`. pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool { if self.check_keyword(kw) { self.bump(); true } else { false } } fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool { if self.token.is_keyword(kw) { self.bump(); true } else { false } } /// If the given word is not a keyword, signals an error. /// If the next token is not the given word, signals an error. /// Otherwise, eats it. fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> { if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) } } fn check_ident(&mut self) -> bool { if self.token.is_ident() { true } else { self.expected_tokens.push(TokenType::Ident); false } } fn check_path(&mut self) -> bool { if self.token.is_path_start() { true } else { self.expected_tokens.push(TokenType::Path); false } } fn check_type(&mut self) -> bool { if self.token.can_begin_type() { true } else { self.expected_tokens.push(TokenType::Type); false } } fn check_const_arg(&mut self) -> bool { if self.token.can_begin_const_arg() { true } else { self.expected_tokens.push(TokenType::Const); false } } /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=` /// and continues. If a `+` is not seen, returns `false`. /// /// This is used when token-splitting `+=` into `+`. /// See issue #47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); match self.token { token::BinOp(token::Plus) => { self.bump(); true } token::BinOpEq(token::Plus) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); self.bump_with(token::Eq, span); true } _ => false, } } /// Checks to see if the next token is either `+` or `+=`. /// Otherwise returns `false`. fn check_plus(&mut self) -> bool { if self.token.is_like_plus() { true } else { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); false } } /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); match self.token { token::BinOp(token::And) => { self.bump(); Ok(()) } token::AndAnd => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::And), span)) } _ => self.unexpected() } } /// Expects and consumes an `|`. If `||` is seen, replaces it with a single /// `|` and continues. If an `|` is not seen, signals an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); match self.token { token::BinOp(token::Or) => { self.bump(); Ok(()) } token::OrOr => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::Or), span)) } _ => self.unexpected() } } fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option) { match suffix { None => {/* everything ok */} Some(suf) => { let text = suf.as_str(); if text.is_empty() { self.span_bug(sp, "found empty literal suffix in Some") } let msg = format!("{} with a suffix is invalid", kind); self.struct_span_err(sp, &msg) .span_label(sp, msg) .emit(); } } } /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single /// `<` and continue. If `<-` is seen, replaces it with a single `<` /// and continue. If a `<` is not seen, returns false. /// /// This is meant to be used when parsing generics on a path to get the /// starting token. fn eat_lt(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::Lt)); let ate = match self.token { token::Lt => { self.bump(); true } token::BinOp(token::Shl) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); self.bump_with(token::Lt, span); true } token::LArrow => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); self.bump_with(token::BinOp(token::Minus), span); true } _ => false, }; if ate { // See doc comment for `unmatched_angle_bracket_count`. self.unmatched_angle_bracket_count += 1; self.max_angle_bracket_count += 1; debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); } ate } fn expect_lt(&mut self) -> PResult<'a, ()> { if !self.eat_lt() { self.unexpected() } else { Ok(()) } } /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it /// with a single `>` and continues. If a `>` is not seen, signals an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); let ate = match self.token { token::Gt => { self.bump(); Some(()) } token::BinOp(token::Shr) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Some(self.bump_with(token::Gt, span)) } token::BinOpEq(token::Shr) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Some(self.bump_with(token::Ge, span)) } token::Ge => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Some(self.bump_with(token::Eq, span)) } _ => None, }; match ate { Some(_) => { // See doc comment for `unmatched_angle_bracket_count`. if self.unmatched_angle_bracket_count > 0 { self.unmatched_angle_bracket_count -= 1; debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); } Ok(()) }, None => self.unexpected(), } } /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. fn eat_to_tokens(&mut self, kets: &[&token::Token]) { let handler = self.diagnostic(); if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| Ok(p.parse_token_tree())) { handler.cancel(err); } } /// Parses a sequence, including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end(&mut self, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; if !recovered { self.bump(); } Ok(val) } /// Parses a sequence, not including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_before_end( &mut self, ket: &token::Token, sep: SeqSep, f: F, ) -> PResult<'a, (Vec, bool)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } fn parse_seq_to_before_tokens( &mut self, kets: &[&token::Token], sep: SeqSep, expect: TokenExpectType, mut f: F, ) -> PResult<'a, (Vec, bool /* recovered */)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { let mut first = true; let mut recovered = false; let mut v = vec![]; while !kets.iter().any(|k| { match expect { TokenExpectType::Expect => self.check(k), TokenExpectType::NoExpect => self.token == **k, } }) { match self.token { token::CloseDelim(..) | token::Eof => break, _ => {} }; if let Some(ref t) = sep.sep { if first { first = false; } else { match self.expect(t) { Ok(false) => {} Ok(true) => { recovered = true; break; } Err(mut e) => { // Attempt to keep parsing if it was a similar separator if let Some(ref tokens) = t.similar_tokens() { if tokens.contains(&self.token) { self.bump(); } } e.emit(); // Attempt to keep parsing if it was an omitted separator match f(self) { Ok(t) => { v.push(t); continue; }, Err(mut e) => { e.cancel(); break; } } } } } } if sep.trailing_sep_allowed && kets.iter().any(|k| { match expect { TokenExpectType::Expect => self.check(k), TokenExpectType::NoExpect => self.token == **k, } }) { break; } let t = f(self)?; v.push(t); } Ok((v, recovered)) } /// Parses a sequence, including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. fn parse_unspanned_seq( &mut self, bra: &token::Token, ket: &token::Token, sep: SeqSep, f: F, ) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { self.expect(bra)?; let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; if !recovered { self.eat(ket); } Ok(result) } /// Advance the parser by one token pub fn bump(&mut self) { if self.prev_token_kind == PrevTokenKind::Eof { // Bumping after EOF is a bad sign, usually an infinite loop. self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token { token::DocComment(..) => PrevTokenKind::DocComment, token::Comma => PrevTokenKind::Comma, token::BinOp(token::Plus) => PrevTokenKind::Plus, token::Interpolated(..) => PrevTokenKind::Interpolated, token::Eof => PrevTokenKind::Eof, token::Ident(..) => PrevTokenKind::Ident, _ => PrevTokenKind::Other, }; let next = self.next_tok(); self.span = next.sp; self.token = next.tok; self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); } /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. fn bump_with(&mut self, next: token::Token, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; self.span = span; self.token = next; self.expected_tokens.clear(); } pub fn look_ahead(&self, dist: usize, f: F) -> R where F: FnOnce(&token::Token) -> R, { if dist == 0 { return f(&self.token) } f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { TokenTree::Token(_, tok) => tok, TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), }, None => token::CloseDelim(self.token_cursor.frame.delim), }) } fn look_ahead_span(&self, dist: usize) -> Span { if dist == 0 { return self.span } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(TokenTree::Token(span, _)) => span, Some(TokenTree::Delimited(span, ..)) => span.entire(), None => self.look_ahead_span(dist - 1), } } pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(self.span, m) } pub fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(sp, m) } fn span_fatal_err>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { err.span_err(sp, self.diagnostic()) } fn bug(&self, m: &str) -> ! { self.sess.span_diagnostic.span_bug(self.span, m) } fn span_err>(&self, sp: S, m: &str) { self.sess.span_diagnostic.span_err(sp, m) } fn struct_span_err>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_err(sp, m) } crate fn span_bug>(&self, sp: S, m: &str) -> ! { self.sess.span_diagnostic.span_bug(sp, m) } fn cancel(&self, err: &mut DiagnosticBuilder<'_>) { self.sess.span_diagnostic.cancel(err) } crate fn diagnostic(&self) -> &'a errors::Handler { &self.sess.span_diagnostic } /// Is the current token one of the keywords that signals a bare function type? fn token_is_bare_fn_keyword(&mut self) -> bool { self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) || self.check_keyword(keywords::Extern) } /// Parses a `TyKind::BareFn` type. fn parse_ty_bare_fn(&mut self, generic_params: Vec) -> PResult<'a, TyKind> { /* [unsafe] [extern "ABI"] fn (S) -> T ^~~~^ ^~~~^ ^~^ ^ | | | | | | | Return type | | Argument types | | | ABI Function Style */ let unsafety = self.parse_unsafety(); let abi = if self.eat_keyword(keywords::Extern) { self.parse_opt_abi()?.unwrap_or(Abi::C) } else { Abi::Rust }; self.expect_keyword(keywords::Fn)?; let (inputs, variadic) = self.parse_fn_args(false, true)?; let ret_ty = self.parse_ret_ty(false)?; let decl = P(FnDecl { inputs, output: ret_ty, variadic, }); Ok(TyKind::BareFn(P(BareFnTy { abi, unsafety, generic_params, decl, }))) } /// Parses asyncness: `async` or nothing. fn parse_asyncness(&mut self) -> IsAsync { if self.eat_keyword(keywords::Async) { IsAsync::Async { closure_id: ast::DUMMY_NODE_ID, return_impl_trait_id: ast::DUMMY_NODE_ID, } } else { IsAsync::NotAsync } } /// Parses unsafety: `unsafe` or nothing. fn parse_unsafety(&mut self) -> Unsafety { if self.eat_keyword(keywords::Unsafe) { Unsafety::Unsafe } else { Unsafety::Normal } } /// Parses the items in a trait declaration. pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> { maybe_whole!(self, NtTraitItem, |x| x); let attrs = self.parse_outer_attributes()?; let (mut item, tokens) = self.collect_tokens(|this| { this.parse_trait_item_(at_end, attrs) })?; // See `parse_item` for why this clause is here. if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { item.tokens = Some(tokens); } Ok(item) } fn parse_trait_item_(&mut self, at_end: &mut bool, mut attrs: Vec) -> PResult<'a, TraitItem> { let lo = self.span; let (name, node, generics) = if self.eat_keyword(keywords::Type) { self.parse_trait_item_assoc_ty()? } else if self.is_const_item() { self.expect_keyword(keywords::Const)?; let ident = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; let default = if self.eat(&token::Eq) { let expr = self.parse_expr()?; self.expect(&token::Semi)?; Some(expr) } else { self.expect(&token::Semi)?; None }; (ident, TraitItemKind::Const(ty, default), ast::Generics::default()) } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? { // trait item macro. (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default()) } else { let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?; let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| { // This is somewhat dubious; We don't want to allow // argument names to be left off if there is a // definition... // We don't allow argument names to be left off in edition 2018. p.parse_arg_general(p.span.rust_2018(), true) })?; generics.where_clause = self.parse_where_clause()?; let sig = ast::MethodSig { header: FnHeader { unsafety, constness, abi, asyncness, }, decl: d, }; let body = match self.token { token::Semi => { self.bump(); *at_end = true; debug!("parse_trait_methods(): parsing required method"); None } token::OpenDelim(token::Brace) => { debug!("parse_trait_methods(): parsing provided method"); *at_end = true; let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(inner_attrs.iter().cloned()); Some(body) } token::Interpolated(ref nt) => { match **nt { token::NtBlock(..) => { *at_end = true; let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(inner_attrs.iter().cloned()); Some(body) } _ => { let token_str = self.this_token_descr(); let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); err.span_label(self.span, "expected `;` or `{`"); return Err(err); } } } _ => { let token_str = self.this_token_descr(); let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); err.span_label(self.span, "expected `;` or `{`"); return Err(err); } }; (ident, ast::TraitItemKind::Method(sig, body), generics) }; Ok(TraitItem { id: ast::DUMMY_NODE_ID, ident: name, attrs, generics, node, span: lo.to(self.prev_span), tokens: None, }) } /// Parses an optional return type `[ -> TY ]` in a function declaration. fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?)) } else { Ok(FunctionRetTy::Default(self.span.shrink_to_lo())) } } /// Parses a type. pub fn parse_ty(&mut self) -> PResult<'a, P> { self.parse_ty_common(true, true) } /// Parses a type in restricted contexts where `+` is not permitted. /// /// Example 1: `&'a TYPE` /// `+` is prohibited to maintain operator priority (P(+) < P(&)). /// Example 2: `value1 as TYPE + value2` /// `+` is prohibited to avoid interactions with expression grammar. fn parse_ty_no_plus(&mut self) -> PResult<'a, P> { self.parse_ty_common(false, true) } fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool) -> PResult<'a, P> { maybe_whole!(self, NtTy, |x| x); let lo = self.span; let mut impl_dyn_multi = false; let node = if self.eat(&token::OpenDelim(token::Paren)) { // `(TYPE)` is a parenthesized type. // `(TYPE,)` is a tuple with a single field of type TYPE. let mut ts = vec![]; let mut last_comma = false; while self.token != token::CloseDelim(token::Paren) { ts.push(self.parse_ty()?); if self.eat(&token::Comma) { last_comma = true; } else { last_comma = false; break; } } let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus; self.expect(&token::CloseDelim(token::Paren))?; if ts.len() == 1 && !last_comma { let ty = ts.into_iter().nth(0).unwrap().into_inner(); let maybe_bounds = allow_plus && self.token.is_like_plus(); match ty.node { // `(TY_BOUND_NOPAREN) + BOUND + ...`. TyKind::Path(None, ref path) if maybe_bounds => { self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)? } TyKind::TraitObject(ref bounds, TraitObjectSyntax::None) if maybe_bounds && bounds.len() == 1 && !trailing_plus => { let path = match bounds[0] { GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(), GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"), }; self.parse_remaining_bounds(Vec::new(), path, lo, true)? } // `(TYPE)` _ => TyKind::Paren(P(ty)) } } else { TyKind::Tup(ts) } } else if self.eat(&token::Not) { // Never type `!` TyKind::Never } else if self.eat(&token::BinOp(token::Star)) { // Raw pointer TyKind::Ptr(self.parse_ptr()?) } else if self.eat(&token::OpenDelim(token::Bracket)) { // Array or slice let t = self.parse_ty()?; // Parse optional `; EXPR` in `[TYPE; EXPR]` let t = match self.maybe_parse_fixed_length_of_vec()? { None => TyKind::Slice(t), Some(length) => TyKind::Array(t, AnonConst { id: ast::DUMMY_NODE_ID, value: length, }), }; self.expect(&token::CloseDelim(token::Bracket))?; t } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) { // Reference self.expect_and()?; self.parse_borrowed_pointee()? } else if self.eat_keyword_noexpect(keywords::Typeof) { // `typeof(EXPR)` // In order to not be ambiguous, the type must be surrounded by parens. self.expect(&token::OpenDelim(token::Paren))?; let e = AnonConst { id: ast::DUMMY_NODE_ID, value: self.parse_expr()?, }; self.expect(&token::CloseDelim(token::Paren))?; TyKind::Typeof(e) } else if self.eat_keyword(keywords::Underscore) { // A type to be inferred `_` TyKind::Infer } else if self.token_is_bare_fn_keyword() { // Function pointer type self.parse_ty_bare_fn(Vec::new())? } else if self.check_keyword(keywords::For) { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.token_is_bare_fn_keyword() { self.parse_ty_bare_fn(lifetime_defs)? } else { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus && self.check_plus(); self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)? } } else if self.eat_keyword(keywords::Impl) { // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds(None)?; impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds) } else if self.check_keyword(keywords::Dyn) && (self.span.rust_2018() || self.look_ahead(1, |t| t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t))) { self.bump(); // `dyn` // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds(None)?; impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn) } else if self.check(&token::Question) || self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) { // Bound list (trait object type) TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?, TraitObjectSyntax::None) } else if self.eat_lt() { // Qualified path let (qself, path) = self.parse_qpath(PathStyle::Type)?; TyKind::Path(Some(qself), path) } else if self.token.is_path_start() { // Simple path let path = self.parse_path(PathStyle::Type)?; if self.eat(&token::Not) { // Macro invocation in type position let (delim, tts) = self.expect_delimited_token_tree()?; let node = Mac_ { path, tts, delim }; TyKind::Mac(respan(lo.to(self.prev_span), node)) } else { // Just a type path or bound list (trait object type) starting with a trait. // `Type` // `Trait1 + Trait2 + 'a` if allow_plus && self.check_plus() { self.parse_remaining_bounds(Vec::new(), path, lo, true)? } else { TyKind::Path(None, path) } } } else { let msg = format!("expected type, found {}", self.this_token_descr()); return Err(self.fatal(&msg)); }; let span = lo.to(self.prev_span); let ty = Ty { node, span, id: ast::DUMMY_NODE_ID }; // Try to recover from use of `+` with incorrect priority. self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty); self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?; let ty = self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)?; Ok(P(ty)) } fn parse_remaining_bounds(&mut self, generic_params: Vec, path: ast::Path, lo: Span, parse_plus: bool) -> PResult<'a, TyKind> { let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span)); let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)]; if parse_plus { self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded bounds.append(&mut self.parse_generic_bounds(None)?); } Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None)) } fn maybe_report_ambiguous_plus(&mut self, allow_plus: bool, impl_dyn_multi: bool, ty: &Ty) { if !allow_plus && impl_dyn_multi { let sum_with_parens = format!("({})", pprust::ty_to_string(&ty)); self.struct_span_err(ty.span, "ambiguous `+` in a type") .span_suggestion( ty.span, "use parentheses to disambiguate", sum_with_parens, Applicability::MachineApplicable ).emit(); } } fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PResult<'a, ()> { // Do not add `+` to expected tokens. if !allow_plus || !self.token.is_like_plus() { return Ok(()) } self.bump(); // `+` let bounds = self.parse_generic_bounds(None)?; let sum_span = ty.span.to(self.prev_span); let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178, "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty)); match ty.node { TyKind::Rptr(ref lifetime, ref mut_ty) => { let sum_with_parens = pprust::to_string(|s| { use crate::print::pprust::PrintState; s.s.word("&")?; s.print_opt_lifetime(lifetime)?; s.print_mutability(mut_ty.mutbl)?; s.popen()?; s.print_type(&mut_ty.ty)?; s.print_type_bounds(" +", &bounds)?; s.pclose() }); err.span_suggestion( sum_span, "try adding parentheses", sum_with_parens, Applicability::MachineApplicable ); } TyKind::Ptr(..) | TyKind::BareFn(..) => { err.span_label(sum_span, "perhaps you forgot parentheses?"); } _ => { err.span_label(sum_span, "expected a path"); }, } err.emit(); Ok(()) } // Try to recover from associated item paths like `[T]::AssocItem`/`(T, U)::AssocItem`. fn maybe_recover_from_bad_qpath(&mut self, base: T, allow_recovery: bool) -> PResult<'a, T> { // Do not add `::` to expected tokens. if !allow_recovery || self.token != token::ModSep { return Ok(base); } let ty = match base.to_ty() { Some(ty) => ty, None => return Ok(base), }; self.bump(); // `::` let mut segments = Vec::new(); self.parse_path_segments(&mut segments, T::PATH_STYLE, true)?; let span = ty.span.to(self.prev_span); let path_span = span.to(span); // use an empty path since `position` == 0 let recovered = base.to_recovered( Some(QSelf { ty, path_span, position: 0 }), ast::Path { segments, span }, ); self.diagnostic() .struct_span_err(span, "missing angle brackets in associated item path") .span_suggestion( // this is a best-effort recovery span, "try", recovered.to_string(), Applicability::MaybeIncorrect ).emit(); Ok(recovered) } fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> { let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None }; let mutbl = self.parse_mutability(); let ty = self.parse_ty_no_plus()?; return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl })); } fn parse_ptr(&mut self) -> PResult<'a, MutTy> { let mutbl = if self.eat_keyword(keywords::Mut) { Mutability::Mutable } else if self.eat_keyword(keywords::Const) { Mutability::Immutable } else { let span = self.prev_span; let msg = "expected mut or const in raw pointer type"; self.struct_span_err(span, msg) .span_label(span, msg) .help("use `*mut T` or `*const T` as appropriate") .emit(); Mutability::Immutable }; let t = self.parse_ty_no_plus()?; Ok(MutTy { ty: t, mutbl: mutbl }) } fn is_named_argument(&mut self) -> bool { let offset = match self.token { token::Interpolated(ref nt) => match **nt { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, } token::BinOp(token::And) | token::AndAnd => 1, _ if self.token.is_keyword(keywords::Mut) => 1, _ => 0, }; self.look_ahead(offset, |t| t.is_ident()) && self.look_ahead(offset + 1, |t| t == &token::Colon) } /// Skips unexpected attributes and doc comments in this position and emits an appropriate /// error. fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { if let token::DocComment(_) = self.token { let mut err = self.diagnostic().struct_span_err( self.span, &format!("documentation comments cannot be applied to {}", applied_to), ); err.span_label(self.span, "doc comments are not allowed here"); err.emit(); self.bump(); } else if self.token == token::Pound && self.look_ahead(1, |t| { *t == token::OpenDelim(token::Bracket) }) { let lo = self.span; // Skip every token until next possible arg. while self.token != token::CloseDelim(token::Bracket) { self.bump(); } let sp = lo.to(self.span); self.bump(); let mut err = self.diagnostic().struct_span_err( sp, &format!("attributes cannot be applied to {}", applied_to), ); err.span_label(sp, "attributes are not allowed here"); err.emit(); } } /// This version of parse arg doesn't necessarily require identifier names. fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> { maybe_whole!(self, NtArg, |x| x); if let Ok(Some(_)) = self.parse_self_arg() { let mut err = self.struct_span_err(self.prev_span, "unexpected `self` argument in function"); err.span_label(self.prev_span, "`self` is only valid as the first argument of an associated function"); return Err(err); } let (pat, ty) = if require_name || self.is_named_argument() { debug!("parse_arg_general parse_pat (require_name:{})", require_name); self.eat_incorrect_doc_comment("method arguments"); let pat = self.parse_pat(Some("argument name"))?; if let Err(mut err) = self.expect(&token::Colon) { // If we find a pattern followed by an identifier, it could be an (incorrect) // C-style parameter declaration. if self.check_ident() && self.look_ahead(1, |t| { *t == token::Comma || *t == token::CloseDelim(token::Paren) }) { let ident = self.parse_ident().unwrap(); let span = pat.span.with_hi(ident.span.hi()); err.span_suggestion( span, "declare the type after the parameter binding", String::from(": "), Applicability::HasPlaceholders, ); } else if require_name && is_trait_item { if let PatKind::Ident(_, ident, _) = pat.node { err.span_suggestion( pat.span, "explicitly ignore parameter", format!("_: {}", ident), Applicability::MachineApplicable, ); } err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); } return Err(err); } self.eat_incorrect_doc_comment("a method argument's type"); (pat, self.parse_ty()?) } else { debug!("parse_arg_general ident_to_pat"); let parser_snapshot_before_ty = self.clone(); self.eat_incorrect_doc_comment("a method argument's type"); let mut ty = self.parse_ty(); if ty.is_ok() && self.token != token::Comma && self.token != token::CloseDelim(token::Paren) { // This wasn't actually a type, but a pattern looking like a type, // so we are going to rollback and re-parse for recovery. ty = self.unexpected(); } match ty { Ok(ty) => { let ident = Ident::new(keywords::Invalid.name(), self.prev_span); let pat = P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident( BindingMode::ByValue(Mutability::Immutable), ident, None), span: ty.span, }); (pat, ty) } Err(mut err) => { // Recover from attempting to parse the argument as a type without pattern. err.cancel(); mem::replace(self, parser_snapshot_before_ty); let pat = self.parse_pat(Some("argument name"))?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; let mut err = self.diagnostic().struct_span_err_with_code( pat.span, "patterns aren't allowed in methods without bodies", DiagnosticId::Error("E0642".into()), ); err.span_suggestion_short( pat.span, "give this argument a name or use an underscore to ignore it", "_".to_owned(), Applicability::MachineApplicable, ); err.emit(); // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. let pat = P(Pat { node: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID }); (pat, ty) } } }; Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID }) } /// Parses a single function argument. crate fn parse_arg(&mut self) -> PResult<'a, Arg> { self.parse_arg_general(true, false) } /// Parses an argument in a lambda header (e.g., `|arg, arg|`). fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { let pat = self.parse_pat(Some("argument name"))?; let t = if self.eat(&token::Colon) { self.parse_ty()? } else { P(Ty { id: ast::DUMMY_NODE_ID, node: TyKind::Infer, span: self.prev_span, }) }; Ok(Arg { ty: t, pat, id: ast::DUMMY_NODE_ID }) } fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option>> { if self.eat(&token::Semi) { Ok(Some(self.parse_expr()?)) } else { Ok(None) } } /// Matches `token_lit = LIT_INTEGER | ...`. fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { token::Interpolated(ref nt) => match **nt { token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { ExprKind::Lit(ref lit) => { lit.node.clone() } _ => { return self.unexpected_last(&self.token); } }, _ => { return self.unexpected_last(&self.token); } }, token::Literal(lit, suf) => { let diag = Some((self.span, &self.sess.span_diagnostic)); let (suffix_illegal, result) = parse::lit_token(lit, suf, diag); if suffix_illegal { let sp = self.span; self.expect_no_suffix(sp, lit.literal_name(), suf) } result.unwrap() } token::Dot if self.look_ahead(1, |t| match t { token::Literal(parse::token::Lit::Integer(_) , _) => true, _ => false, }) => { // recover from `let x = .4;` let lo = self.span; self.bump(); if let token::Literal( parse::token::Lit::Integer(val), suffix, ) = self.token { let suffix = suffix.and_then(|s| { let s = s.as_str().get(); if ["f32", "f64"].contains(&s) { Some(s) } else { None } }).unwrap_or(""); self.bump(); let sp = lo.to(self.prev_span); let mut err = self.diagnostic() .struct_span_err(sp, "float literals must have an integer part"); err.span_suggestion( sp, "must have an integer part", format!("0.{}{}", val, suffix), Applicability::MachineApplicable, ); err.emit(); return Ok(match suffix { "f32" => ast::LitKind::Float(val, ast::FloatTy::F32), "f64" => ast::LitKind::Float(val, ast::FloatTy::F64), _ => ast::LitKind::FloatUnsuffixed(val), }); } else { unreachable!(); }; } _ => { return self.unexpected_last(&self.token); } }; self.bump(); Ok(out) } /// Matches `lit = true | false | token_lit`. crate fn parse_lit(&mut self) -> PResult<'a, Lit> { let lo = self.span; let lit = if self.eat_keyword(keywords::True) { LitKind::Bool(true) } else if self.eat_keyword(keywords::False) { LitKind::Bool(false) } else { let lit = self.parse_lit_token()?; lit }; Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) }) } /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); let minus_lo = self.span; let minus_present = self.eat(&token::BinOp(token::Minus)); let lo = self.span; let literal = self.parse_lit()?; let hi = self.prev_span; let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); if minus_present { let minus_hi = self.prev_span; let unary = self.mk_unary(UnOp::Neg, expr); Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new())) } else { Ok(expr) } } fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token { token::Ident(ident, _) if self.token.is_path_segment_keyword() => { let span = self.span; self.bump(); Ok(Ident::new(ident.name, span)) } _ => self.parse_ident(), } } fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { match self.token { token::Ident(ident, false) if ident.name == keywords::Underscore.name() => { let span = self.span; self.bump(); Ok(Ident::new(ident.name, span)) } _ => self.parse_ident(), } } /// Parses a qualified path. /// Assumes that the leading `<` has been parsed already. /// /// `qualified_path = ::path` /// /// # Examples /// `::default` /// `::a` /// `::F::a` (without disambiguator) /// `::F::a::` (with disambiguator) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> { let lo = self.prev_span; let ty = self.parse_ty()?; // `path` will contain the prefix of the path up to the `>`, // if any (e.g., `U` in the `::*` examples // above). `path_span` has the span of that path, or an empty // span in the case of something like `::Bar`. let (mut path, path_span); if self.eat_keyword(keywords::As) { let path_lo = self.span; path = self.parse_path(PathStyle::Type)?; path_span = path_lo.to(self.prev_span); } else { path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP }; path_span = self.span.to(self.span); } // See doc comment for `unmatched_angle_bracket_count`. self.expect(&token::Gt)?; if self.unmatched_angle_bracket_count > 0 { self.unmatched_angle_bracket_count -= 1; debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count); } self.expect(&token::ModSep)?; let qself = QSelf { ty, path_span, position: path.segments.len() }; self.parse_path_segments(&mut path.segments, style, true)?; Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) })) } /// Parses simple paths. /// /// `path = [::] segment+` /// `segment = ident | ident[::] | ident[::](args) [-> type]` /// /// # Examples /// `a::b::C` (without disambiguator) /// `a::b::C::` (with disambiguator) /// `Fn(Args)` (without disambiguator) /// `Fn::(Args)` (with disambiguator) pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { self.parse_path_common(style, true) } crate fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |path| { if style == PathStyle::Mod && path.segments.iter().any(|segment| segment.args.is_some()) { self.diagnostic().span_err(path.span, "unexpected generic arguments in path"); } path }); let lo = self.meta_var_span.unwrap_or(self.span); let mut segments = Vec::new(); let mod_sep_ctxt = self.span.ctxt(); if self.eat(&token::ModSep) { segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); } self.parse_path_segments(&mut segments, style, enable_warning)?; Ok(ast::Path { segments, span: lo.to(self.prev_span) }) } /// Like `parse_path`, but also supports parsing `Word` meta items into paths for /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(meta.ident.clone()), _ => None, }, _ => None, }, _ => None, }; if let Some(path) = meta_ident { self.bump(); return Ok(path); } self.parse_path(style) } fn parse_path_segments(&mut self, segments: &mut Vec, style: PathStyle, enable_warning: bool) -> PResult<'a, ()> { loop { let segment = self.parse_path_segment(style, enable_warning)?; if style == PathStyle::Expr { // In order to check for trailing angle brackets, we must have finished // recursing (`parse_path_segment` can indirectly call this function), // that is, the next token must be the highlighted part of the below example: // // `Foo::>::Qux` // ^ here // // As opposed to the below highlight (if we had only finished the first // recursion): // // `Foo::>::Qux` // ^ here // // `PathStyle::Expr` is only provided at the root invocation and never in // `parse_path_segment` to recurse and therefore can be checked to maintain // this invariant. self.check_trailing_angle_brackets(&segment, token::ModSep); } segments.push(segment); if self.is_import_coupler() || !self.eat(&token::ModSep) { return Ok(()); } } } fn parse_path_segment(&mut self, style: PathStyle, enable_warning: bool) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; let is_args_start = |token: &token::Token| match *token { token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) => true, _ => false, }; let check_args_start = |this: &mut Self| { this.expected_tokens.extend_from_slice( &[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))] ); is_args_start(&this.token) }; Ok(if style == PathStyle::Type && check_args_start(self) || style != PathStyle::Mod && self.check(&token::ModSep) && self.look_ahead(1, |t| is_args_start(t)) { // Generic arguments are found - `<`, `(`, `::<` or `::(`. if self.eat(&token::ModSep) && style == PathStyle::Type && enable_warning { self.diagnostic().struct_span_warn(self.prev_span, "unnecessary path disambiguator") .span_label(self.prev_span, "try removing `::`").emit(); } let lo = self.span; // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If // it isn't, then we reset the unmatched angle bracket count as we're about to start // parsing a new path. if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; self.max_angle_bracket_count = 0; } let args = if self.eat_lt() { // `<'a, T, A = U>` let (args, bindings) = self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?; self.expect_gt()?; let span = lo.to(self.prev_span); AngleBracketedArgs { args, bindings, span }.into() } else { // `(T, U) -> R` self.bump(); // `(` let (inputs, recovered) = self.parse_seq_to_before_tokens( &[&token::CloseDelim(token::Paren)], SeqSep::trailing_allowed(token::Comma), TokenExpectType::Expect, |p| p.parse_ty())?; if !recovered { self.bump(); // `)` } let span = lo.to(self.prev_span); let output = if self.eat(&token::RArrow) { Some(self.parse_ty_common(false, false)?) } else { None }; ParenthesizedArgs { inputs, output, span }.into() }; PathSegment { ident, args, id: ast::DUMMY_NODE_ID } } else { // Generic arguments are not found. PathSegment::from_ident(ident) }) } crate fn check_lifetime(&mut self) -> bool { self.expected_tokens.push(TokenType::Lifetime); self.token.is_lifetime() } /// Parses a single lifetime `'a` or panics. crate fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { let span = self.span; self.bump(); Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID } } else { self.span_bug(self.span, "not a lifetime") } } fn eat_label(&mut self) -> Option