diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index a0cf20b0c6394..c0641edc080d6 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -62,26 +62,6 @@ pub enum InvisibleSource { // Converted from `proc_macro::Delimiter` in // `proc_macro::Delimiter::to_internal`, i.e. returned by a proc macro. ProcMacro, - - // Converted from `TokenKind::Interpolated` in - // `TokenStream::flatten_token`. Treated similarly to `ProcMacro`. - FlattenToken, -} - -impl Delimiter { - // Should the parser skip these delimiters? Only happens for certain kinds - // of invisible delimiters. Once all interpolated nonterminals are removed, - // the answer should become `false` for all kinds, whereupon this function - // can be removed. - pub fn skip(&self) -> bool { - match self { - Delimiter::Invisible(src) => match src { - InvisibleSource::FlattenToken | InvisibleSource::ProcMacro => true, - InvisibleSource::MetaVar(_) => false, - }, - Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false, - } - } } // Note that the suffix is *not* considered when deciding the `LitKind` in this @@ -136,21 +116,8 @@ impl Lit { match token.uninterpolate().kind { Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), Literal(token_lit) => Some(token_lit), - OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(NonterminalKind::Literal))) => { - panic!("njn: FROM_TOKEN (1)"); - // if let NtExpr(expr) | NtLiteral(expr) = &**nt - // && let ast::ExprKind::Lit(token_lit) = expr.kind => - // { - // Some(token_lit) - // } - } - OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(NonterminalKind::Expr))) => { - panic!("njn: FROM_TOKEN (2)"); - // if let NtExpr(expr) | NtLiteral(expr) = &**nt - // && let ast::ExprKind::Lit(token_lit) = expr.kind => - // { - // Some(token_lit) - // } + OpenDelim(Delimiter::Invisible(source)) => { + panic!("njn: from_token {source:?}"); } _ => None, } @@ -415,8 +382,8 @@ impl Token { match self.kind { InterpolatedIdent(_, _, uninterpolated_span) | InterpolatedLifetime(_, uninterpolated_span) => uninterpolated_span, - OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(kind))) => { - panic!("njn: uninterpolated_span {kind:?}"); + OpenDelim(Delimiter::Invisible(source)) => { + panic!("njn: uninterpolated_span {source:?}"); } _ => self.span, } @@ -473,8 +440,8 @@ impl Token { NonterminalKind::Expr | NonterminalKind::Literal | NonterminalKind::Path - ))) - => true, + ))) | + OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, _ => false, } } @@ -501,7 +468,8 @@ impl Token { NonterminalKind::PatWithOr | NonterminalKind::Path | NonterminalKind::Literal - ))) => true, + ))) | + OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, _ => false, } } @@ -524,7 +492,8 @@ impl Token { OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar( NonterminalKind::Ty | NonterminalKind::Path - ))) => true, + ))) | + OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, _ => false, } } @@ -536,6 +505,7 @@ impl Token { OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar( NonterminalKind::Block | NonterminalKind::Expr | NonterminalKind::Literal, ))) => true, + OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, _ => self.can_begin_literal_maybe_minus(), } } @@ -592,7 +562,8 @@ impl Token { Ident(name, false) if name.is_bool_lit() => true, OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar( NonterminalKind::Literal | NonterminalKind::Expr, - ))) => true, + ))) + | OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, _ => false, } } @@ -658,6 +629,7 @@ impl Token { /// Would `maybe_reparse_metavar_expr` in `parser.rs` return `Ok(..)`? /// That is, is this a pre-parsed expression dropped into the token stream /// (which happens while parsing the result of macro expansion)? + // njn: proc macro? pub fn is_metavar_expr(&self) -> bool { matches!( self.is_metavar_seq(), @@ -672,6 +644,7 @@ impl Token { /// Are we at a block from a metavar (`$b:block`)? pub fn is_metavar_block(&self) -> bool { + // njn: handle proc-macro here too? matches!(self.is_metavar_seq(), Some(NonterminalKind::Block)) } @@ -687,6 +660,7 @@ impl Token { pub fn is_path_start(&self) -> bool { self == &ModSep || self.is_qpath_start() + // njn: proc macro? || matches!(self.is_metavar_seq(), Some(NonterminalKind::Path)) || self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident() @@ -760,6 +734,13 @@ impl Token { } } + /// Is this an invisible open delimiter at the start of a token sequence + /// from a proc macro? + // njn: need to use this more + pub fn is_proc_macro_seq(&self) -> bool { + matches!(self.kind, OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro))) + } + pub fn glue(&self, joint: &Token) -> Option { let kind = match self.kind { Eq => match joint.kind { diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 1edcbd6f02d69..22d3f1e01b8d5 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -15,7 +15,7 @@ use crate::ast::AttrStyle; use crate::ast_traits::{HasAttrs, HasSpan, HasTokens}; -use crate::token::{self, Delimiter, InvisibleSource, Token, TokenKind}; +use crate::token::{self, Delimiter, InvisibleSource, NonterminalKind, Token, TokenKind}; use crate::AttrVec; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -456,9 +456,10 @@ impl TokenStream { Token::new(token::Ident(name, is_raw), uninterpolated_span), spacing, ), + // njn: not actually a metavar, but we know the kind token::InterpolatedLifetime(name, uninterpolated_span) => TokenTree::Delimited( DelimSpan::from_single(token.span), - Delimiter::Invisible(InvisibleSource::FlattenToken), + Delimiter::Invisible(InvisibleSource::MetaVar(NonterminalKind::Lifetime)), TokenStream::token_alone(token::Lifetime(name), uninterpolated_span), ), _ => TokenTree::Token(*token, spacing), @@ -474,6 +475,7 @@ impl TokenStream { } } + // njn: do we still need this? #[must_use] pub fn flattened(&self) -> TokenStream { fn can_skip(stream: &TokenStream) -> bool { diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index eb6e92965b93b..50773a653ac40 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -721,8 +721,7 @@ fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool { ident == sym::compile_error && let mbe::TokenTree::Token(bang) = bang && let TokenKind::Not = bang.kind && - let mbe::TokenTree::Delimited(_, del) = args && - !del.delim.skip() + let mbe::TokenTree::Delimited(..) = args { true } else { diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 8e3395bf30370..ffe0b1dcb3201 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -147,17 +147,9 @@ fn parse_tree<'a>( match tree { // `tree` is a `$` token. Look at the next token in `trees` &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => { - // FIXME: Handle `Invisible`-delimited groups in a more systematic way - // during parsing. - let mut next = outer_trees.next(); - let mut trees: Box>; - match next { - Some(tokenstream::TokenTree::Delimited(_, delim, tts)) if delim.skip() => { - trees = Box::new(tts.trees()); - next = trees.next(); - } - _ => trees = Box::new(outer_trees), - } + let next = outer_trees.next(); + let mut trees: Box> = + Box::new(outer_trees); match next { // `tree` is followed by a delimited set of token trees. diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 7d3266f451cdb..f2687a8440b58 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -709,6 +709,7 @@ impl<'a> Parser<'a> { } /// Returns the span of expr if it was not interpolated, or the span of the interpolated token. + // njn: needs adjusting for ProcMacro? fn interpolated_or_expr_span(&self, expr: &Expr) -> Span { match self.prev_token.kind { TokenKind::InterpolatedIdent(..) | TokenKind::InterpolatedLifetime(..) => { @@ -1363,16 +1364,39 @@ impl<'a> Parser<'a> { /// correctly if called from `parse_dot_or_call_expr()`. fn parse_expr_bottom(&mut self) -> PResult<'a, P> { maybe_recover_from_interpolated_ty_qpath!(self, true); - //eprintln!("AAA {:?}", self.token); + //eprintln!("AAA {:?}", self.token.kind); maybe_reparse_metavar_expr!(self); - //eprintln!("BBB {:?}", self.token); + //eprintln!("BBB {:?}", self.token.kind); + + if let token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) = self.token.kind + { + //eprintln!("BUMP {:?}", self.token.kind); + self.bump(); + // njn: parse_expr_force_collect? + match self.parse_expr() { + Ok(expr) => { + match self.expect(&token::CloseDelim(Delimiter::Invisible( + InvisibleSource::ProcMacro, + ))) { + Ok(_) => { + return Ok(expr); + } + Err(_) => panic!("njn: no invisible close delim: {:?}", self.token), + } + } + Err(_) => { + panic!("njn: bad expr parse"); + } + } + } + //eprintln!("CCC {:?}", self.token.kind); // Outer attributes are already parsed and will be // added to the return value after the fact. // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`. let lo = self.token.span; - if let token::Literal(_) = self.token.kind { + let res = if let token::Literal(_) = self.token.kind { // This match arm is a special-case of the `_` match arm below and // could be removed without changing functionality, but it's faster // to have it here, especially for programs with large constants. @@ -1473,7 +1497,9 @@ impl<'a> Parser<'a> { } } else { self.parse_expr_lit() - } + }; + //eprintln!("DDD {:?} -> {:#?}", self.token.kind, res); + res } fn parse_expr_lit(&mut self) -> PResult<'a, P> { @@ -1964,6 +1990,7 @@ impl<'a> Parser<'a> { &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { + // njn: proc macro too? if let Some(NonterminalKind::Expr | NonterminalKind::Literal) = self.token.is_metavar_seq() { // njn: not checking for ExprKind::Err @@ -2090,6 +2117,9 @@ impl<'a> Parser<'a> { None } } + token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => { + panic!("njn: maybe_parse_token_lit"); + } _ => None, } } @@ -2157,6 +2187,25 @@ impl<'a> Parser<'a> { pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_reparse_metavar_expr!(self); + // if let token::OpenDelim(Delimiter::Invisible) = self.token.kind { + // // njn: need parse_expr for negative case? + // self.bump(); + // // njn: parse_expr_force_collect? + // match self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus()) { + // Ok(expr) => { + // match self.expect(&token::CloseDelim(Delimiter::Invisible)) { + // Ok(_) => { + // return Ok(expr); + // } + // Err(_) => panic!("njn: no invisible close delim: {:?}", self.token), + // } + // } + // Err(_) => { + // panic!("njn: bad expr parse"); + // } + // } + // } + let lo = self.token.span; let minus_present = self.eat(&token::BinOp(token::Minus)); let (token_lit, span) = self.parse_token_lit()?; diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 431a055d5550a..3e330cc503143 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -130,6 +130,36 @@ impl<'a> Parser<'a> { return Ok(Some(item.into_inner())); } + if let token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) = self.token.kind + { + //eprintln!("ITEM BUMP {:?}", self.token.kind); + self.bump(); + match self.parse_item(ForceCollect::Yes) { + Ok(Some(mut item)) => { + match self.expect(&token::CloseDelim(Delimiter::Invisible( + InvisibleSource::ProcMacro, + ))) { + Ok(_) => { + attrs.prepend_to_nt_inner(&mut item.attrs); + return Ok(Some(item.into_inner())); + } + Err(_) => panic!("njn: no invisible close delim 1: {:?}", self.token), + } + } + Ok(None) => { + panic!("njn: missing item {:?}", self.token); + // match self.expect(&token::CloseDelim(Delimiter::Invisible)) { + // Ok(_) => return Ok(None), + // // njn: hitting on tests/ui/proc-macro/issue-75734-pp-paren.rs, hmm + // Err(_) => panic!("njn: no invisible close delim 2: {:?}", self.token), + // } + } + Err(_) => { + panic!("njn: bad item parse"); + } + } + } + let item = self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| { let item = diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 7e8b759505ce7..a508147d23a6c 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -282,39 +282,28 @@ impl TokenCursor { /// This always-inlined version should only be used on hot code paths. #[inline(always)] fn inlined_next(&mut self) -> (Token, Spacing) { - loop { - // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix - // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions - // below can be removed. - if let Some(tree) = self.tree_cursor.next_ref() { - match tree { - &TokenTree::Token(token, spacing) => { - debug_assert!(!matches!( - token.kind, - token::OpenDelim(_) | token::CloseDelim(_) - )); - return (token, spacing); - } - &TokenTree::Delimited(sp, delim, ref tts) => { - let trees = tts.clone().into_trees(); - self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp)); - if !delim.skip() { - return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone); - } - // No open delimiter to return; continue on to the next iteration. - } - }; - } else if let Some((tree_cursor, delim, span)) = self.stack.pop() { - // We have exhausted this token stream. Move back to its parent token stream. - self.tree_cursor = tree_cursor; - if !delim.skip() { - return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone); + if let Some(tree) = self.tree_cursor.next_ref() { + match tree { + &TokenTree::Token(token, spacing) => { + debug_assert!(!matches!( + token.kind, + token::OpenDelim(_) | token::CloseDelim(_) + )); + (token, spacing) + } + &TokenTree::Delimited(sp, delim, ref tts) => { + let trees = tts.clone().into_trees(); + self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp)); + (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone) } - // No close delimiter to return; continue on to the next iteration. - } else { - // We have exhausted the outermost token stream. - return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone); } + } else if let Some((tree_cursor, delim, span)) = self.stack.pop() { + // We have exhausted this token stream. Move back to its parent token stream. + self.tree_cursor = tree_cursor; + (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone) + } else { + // We have exhausted the outermost token stream. + (Token::new(token::Eof, DUMMY_SP), Spacing::Alone) } } } @@ -1117,10 +1106,6 @@ impl<'a> Parser<'a> { next.0.span = fallback_span.with_ctxt(next.0.span.ctxt()); //eprintln!("fallback {:?}", next.0.span); } - debug_assert!(!matches!( - next.0.kind, - token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip() - )); self.inlined_bump_with(1, next) } @@ -1132,52 +1117,38 @@ impl<'a> Parser<'a> { return looker(&self.token); } - if let Some(&(_, delim, span)) = self.token_cursor.stack.last() - && !delim.skip() - { - // We are not in the outermost token stream, and the token stream - // we are in has non-skipped delimiters. Look for skipped - // delimiters in the lookahead range. + // njn: more simplification here? + + if let Some(&(_, delim, span)) = self.token_cursor.stack.last() { + // We are not in the outermost token stream. Do lookahead by plain + // indexing. let tree_cursor = &self.token_cursor.tree_cursor; - let any_skip = (0..dist).any(|i| { - let token = tree_cursor.look_ahead(i); - matches!(token, Some(TokenTree::Delimited(_, delim, _)) if delim.skip()) - }); - if !any_skip { - // There were no skipped delimiters. Do lookahead by plain indexing. - return match tree_cursor.look_ahead(dist - 1) { - Some(tree) => { - // Indexing stayed within the current token stream. - match tree { - TokenTree::Token(token, _) => looker(token), - TokenTree::Delimited(dspan, delim, _) => { - looker(&Token::new(token::OpenDelim(*delim), dspan.open)) - } + return match tree_cursor.look_ahead(dist - 1) { + Some(tree) => { + // Indexing stayed within the current token stream. + match tree { + TokenTree::Token(token, _) => looker(token), + TokenTree::Delimited(dspan, delim, _) => { + looker(&Token::new(token::OpenDelim(*delim), dspan.open)) } } - None => { - // Indexing went past the end of the current token - // stream. Use the close delimiter, no matter how far - // ahead `dist` went. - looker(&Token::new(token::CloseDelim(delim), span.close)) - } - }; - } + } + None => { + // Indexing went past the end of the current token + // stream. Use the close delimiter, no matter how far + // ahead `dist` went. + looker(&Token::new(token::CloseDelim(delim), span.close)) + } + }; } // We are in a more complex case. Just clone the token cursor and use - // `next`, skipping delimiters as necessary. Slow but simple. + // `next`. Slow but simple. let mut cursor = self.token_cursor.clone(); let mut i = 0; let mut token = Token::dummy(); while i < dist { token = cursor.next().0; - if matches!( - token.kind, - token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip() - ) { - continue; - } i += 1; } looker(&token) @@ -1412,6 +1383,9 @@ impl<'a> Parser<'a> { /// so emit a proper diagnostic. // Public for rustfmt usage. pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> { + // njn: possible empty `vis` causes problems -- if you see + // open-invis-proc-macro, could be a vis, could be the following item? + // Hard to tell without the metavar kind of the open delim if let Some(vis) = maybe_reparse_metavar_seq!( self, NonterminalKind::Vis, diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index e71893b707113..f91bc6646875b 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -56,7 +56,7 @@ impl<'a> Parser<'a> { | token::Ident(..) | token::InterpolatedIdent(..) | token::InterpolatedLifetime(..) - | token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(_))) => true, + | token::OpenDelim(Delimiter::Invisible(_)) => true, _ => token.can_begin_type(), }, NonterminalKind::Block => match &token.kind { @@ -77,6 +77,7 @@ impl<'a> Parser<'a> { unreachable!() } }, + token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, token::InterpolatedLifetime(..) => true, _ => false, }, @@ -85,6 +86,7 @@ impl<'a> Parser<'a> { token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(kind))) => { may_be_ident(*kind) } + token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, _ => false, }, NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => match &token.kind { @@ -106,6 +108,7 @@ impl<'a> Parser<'a> { token::OpenDelim(Delimiter::Invisible(InvisibleSource::MetaVar(kind))) => { may_be_ident(*kind) } + token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) => true, _ => false, }, NonterminalKind::Lifetime => match &token.kind { diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index d64b3afe1a8a5..ffcd1500ab368 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -12,7 +12,7 @@ use crate::errors::{ use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_reparse_metavar_seq}; use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, NonterminalKind}; +use rustc_ast::token::{self, Delimiter, InvisibleSource, NonterminalKind}; use rustc_ast::{ self as ast, AttrVec, BindingAnnotation, ByRef, Expr, ExprKind, MacCall, Mutability, Pat, PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax, @@ -355,6 +355,32 @@ impl<'a> Parser<'a> { return Ok(pat); } + if let token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) = self.token.kind + { + //eprintln!("PAT BUMP {:?}", self.token.kind); + self.bump(); + match self.parse_pat_allow_top_alt( + None, + RecoverComma::No, + RecoverColon::No, + CommaRecoveryMode::EitherTupleOrPipe, + ) { + Ok(pat) => { + match self.expect(&token::CloseDelim(Delimiter::Invisible( + InvisibleSource::ProcMacro, + ))) { + Ok(_) => { + return Ok(pat); + } + Err(_) => panic!("njn: no invisible close delim: {:?}", self.token), + } + } + Err(_) => { + panic!("njn: bad pat parse"); + } + } + } + let mut lo = self.token.span; if self.token.is_keyword(kw::Let) && self.look_ahead(1, |tok| tok.can_begin_pattern()) { @@ -609,6 +635,7 @@ impl<'a> Parser<'a> { self.recover_additional_muts(); if let Some(NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr) = + // njn: proc macro? self.token.is_metavar_seq() { self.expected_ident_found_err().emit(); diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 294d051c40963..5c40877054537 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -190,6 +190,7 @@ impl<'a> Parser<'a> { return Ok(path.into_inner()); } + // njn: proc macro? if let Some(NonterminalKind::Ty) = self.token.is_metavar_seq() { let mut self2 = self.clone(); let ty = diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 9e5a9f8984a31..c682fd552ed81 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -12,7 +12,7 @@ use crate::maybe_reparse_metavar_seq; use ast::Label; use rustc_ast as ast; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, NonterminalKind, TokenKind}; +use rustc_ast::token::{self, Delimiter, InvisibleSource, NonterminalKind, TokenKind}; use rustc_ast::util::classify; use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyle}; use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt}; @@ -61,6 +61,51 @@ impl<'a> Parser<'a> { return Ok(Some(stmt.into_inner())); } + // njn: We have this statement: + // <<1 + 2>> * 3; + // When the <<>> is known to be an expression, we don't try to parse it + // as a statement here. Instead we end up parsing the entire `<<1 + 2>> + // * 3` as an expression. + // But when the <<>> isn't known to be an expression, we currently try + // to parse it as a statement, and the `<<1 + 2>>` gets parsed as a + // Stmt::Expr, and then the `* 3` part is unexpected and causes an + // error. + // + // Fix: try to parse as a statement at the end? parse it as an expression + if let token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) = self.token.kind + { + //eprintln!("STMT BUMP {:?}", self.token.kind); + self.bump(); + match self.parse_stmt(ForceCollect::Yes) { + Ok(Some(mut stmt)) => { + //eprintln!("STMT MID {:?}", self.token.kind); + match self.expect(&token::CloseDelim(Delimiter::Invisible( + InvisibleSource::ProcMacro, + ))) { + Ok(_) => { + //eprintln!("STMT END {:?} -> {:#?}", self.token.kind, stmt); + stmt.visit_attrs(|stmt_attrs| { + attrs.prepend_to_nt_inner(stmt_attrs); + }); + return Ok(Some(stmt)); + } + Err(_) => panic!("njn: no invisible close delim 1: {:?}", self.token), + } + } + Ok(None) => { + panic!("njn: missing stmt {:?}", self.token); + // match self.expect(&token::CloseDelim(Delimiter::Invisible)) { + // Ok(_) => return Ok(None), + // // njn: hitting on tests/ui/proc-macro/issue-75734-pp-paren.rs, hmm + // Err(_) => panic!("njn: no invisible close delim 2: {:?}", self.token), + // } + } + Err(_) => { + panic!("njn: bad stmt parse"); + } + } + } + if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) { self.bump(); let mut_let_span = lo.to(self.token.span); diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 25ca37901bf9d..89c0230119b64 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -10,7 +10,7 @@ use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_reparse_metavar_seq} use ast::DUMMY_NODE_ID; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, InvisibleSource, NonterminalKind, Token, TokenKind}; use rustc_ast::util::case::Case; use rustc_ast::{ self as ast, BareFnTy, BoundPolarity, FnRetTy, GenericBound, GenericBounds, GenericParam, @@ -263,6 +263,27 @@ impl<'a> Parser<'a> { return Ok(ty); } + if let token::OpenDelim(Delimiter::Invisible(InvisibleSource::ProcMacro)) = self.token.kind + { + //eprintln!("TY BUMP {:?}", self.token.kind); + self.bump(); + match self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover()) { + Ok(ty) => { + match self.expect(&token::CloseDelim(Delimiter::Invisible( + InvisibleSource::ProcMacro, + ))) { + Ok(_) => { + return Ok(ty); + } + Err(_) => panic!("njn: no invisible close delim: {:?}", self.token), + } + } + Err(_) => { + panic!("njn: bad ty parse"); + } + } + } + let lo = self.token.span; let mut impl_dyn_multi = false; let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { diff --git a/tests/ui/proc-macro/issue-75734-pp-paren.rs b/tests/ui/proc-macro/issue-75734-pp-paren.rs index faa93787d1385..26710bd9ac93d 100644 --- a/tests/ui/proc-macro/issue-75734-pp-paren.rs +++ b/tests/ui/proc-macro/issue-75734-pp-paren.rs @@ -14,7 +14,7 @@ extern crate test_macros; macro_rules! mul_2 { ($val:expr) => { - print_bang!($val * 2); + print_bang!($val * 3); }; } @@ -22,5 +22,5 @@ macro_rules! mul_2 { #[print_attr] fn main() { &|_: u8| {}; - mul_2!(1 + 1); + mul_2!(1 + 2); }