Skip to content

Commit

Permalink
Rollup merge of rust-lang#61616 - petrochenkov:parsderef, r=oli-obk
Browse files Browse the repository at this point in the history
parser: Remove `Deref` impl from `Parser`

Follow up to rust-lang#61541

You have to write `self.token.span` instead of `self.span` in the parser now, which is not nice, but not too bad either, I guess.
Not sure.
Probably still better than people using both and being confused about the definition point of `span`.

r? @oli-obk @estebank
  • Loading branch information
Centril committed Jun 8, 2019
2 parents 5062ad3 + 3dbee57 commit ae487e0
Show file tree
Hide file tree
Showing 13 changed files with 256 additions and 266 deletions.
4 changes: 2 additions & 2 deletions src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -735,9 +735,9 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
raw_attr.clone(),
);

let start_span = parser.span;
let start_span = parser.token.span;
let (path, tokens) = panictry!(parser.parse_meta_item_unrestricted());
let end_span = parser.span;
let end_span = parser.token.span;
if parser.token != token::Eof {
parse_sess.span_diagnostic
.span_err(start_span.to(end_span), "invalid crate attribute");
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ impl<'a> StripUnconfigured<'a> {
let mut expanded_attrs = Vec::with_capacity(1);

while !parser.check(&token::CloseDelim(token::Paren)) {
let lo = parser.span.lo();
let lo = parser.token.span.lo();
let (path, tokens) = parser.parse_meta_item_unrestricted()?;
expanded_attrs.push((path, tokens, parser.prev_span.with_lo(lo)));
parser.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?;
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1041,7 +1041,7 @@ impl<'a> Parser<'a> {
let msg = format!("macro expansion ignores token `{}` and any following",
self.this_token_to_string());
// Avoid emitting backtrace info twice.
let def_site_span = self.span.with_ctxt(SyntaxContext::empty());
let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty());
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
err.span_label(span, "caused by the macro expansion here");
let msg = format!(
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/source_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstrea
while self.p.token != token::Eof {
match panictry!(self.p.parse_item()) {
Some(item) => ret.push(item),
None => self.p.diagnostic().span_fatal(self.p.span,
None => self.p.diagnostic().span_fatal(self.p.token.span,
&format!("expected item, found `{}`",
self.p.this_token_to_string()))
.raise()
Expand Down
14 changes: 7 additions & 7 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,7 @@ pub fn parse(
//
// This MatcherPos instance is allocated on the stack. All others -- and
// there are frequently *no* others! -- are allocated on the heap.
let mut initial = initial_matcher_pos(ms, parser.span);
let mut initial = initial_matcher_pos(ms, parser.token.span);
let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
let mut next_items = Vec::new();

Expand Down Expand Up @@ -721,15 +721,15 @@ pub fn parse(
return nameize(sess, ms, matches);
} else if eof_items.len() > 1 {
return Error(
parser.span,
parser.token.span,
"ambiguity: multiple successful parses".to_string(),
);
} else {
return Failure(
Token::new(token::Eof, if parser.span.is_dummy() {
parser.span
Token::new(token::Eof, if parser.token.span.is_dummy() {
parser.token.span
} else {
sess.source_map().next_point(parser.span)
sess.source_map().next_point(parser.token.span)
}),
"missing tokens in macro arguments",
);
Expand All @@ -753,7 +753,7 @@ pub fn parse(
.join(" or ");

return Error(
parser.span,
parser.token.span,
format!(
"local ambiguity: multiple parsing options: {}",
match next_items.len() {
Expand Down Expand Up @@ -927,7 +927,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal {
sym::ty => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
let span = p.span;
let span = p.token.span;
p.bump();
token::NtIdent(Ident::new(name, span), is_raw)
} else {
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ impl<'a> ParserAnyMacro<'a> {
let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
if !e.span.is_dummy() { // early end of macro arm (#52866)
e.replace_span_with(parser.sess.source_map().next_point(parser.span));
e.replace_span_with(parser.sess.source_map().next_point(parser.token.span));
}
let msg = &e.message[0];
e.message[0] = (
Expand All @@ -63,7 +63,7 @@ impl<'a> ParserAnyMacro<'a> {
if parser.sess.source_map().span_to_filename(arm_span).is_real() {
e.span_label(arm_span, "in this macro arm");
}
} else if !parser.sess.source_map().span_to_filename(parser.span).is_real() {
} else if !parser.sess.source_map().span_to_filename(parser.token.span).is_real() {
e.span_label(site_span, "in this macro invocation");
}
e
Expand Down
12 changes: 6 additions & 6 deletions src/libsyntax/parse/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ impl<'a> Parser<'a> {
just_parsed_doc_comment = false;
}
token::DocComment(s) => {
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span);
if attr.style != ast::AttrStyle::Outer {
let mut err = self.fatal("expected outer doc comment");
err.note("inner doc comments like this (starting with \
Expand Down Expand Up @@ -83,7 +83,7 @@ impl<'a> Parser<'a> {
self.token);
let (span, path, tokens, style) = match self.token.kind {
token::Pound => {
let lo = self.span;
let lo = self.token.span;
self.bump();

if let InnerAttributeParsePolicy::Permitted = inner_parse_policy {
Expand All @@ -93,7 +93,7 @@ impl<'a> Parser<'a> {
self.bump();
if let InnerAttributeParsePolicy::NotPermitted { reason } = inner_parse_policy
{
let span = self.span;
let span = self.token.span;
self.diagnostic()
.struct_span_err(span, reason)
.note("inner attributes, like `#![no_std]`, annotate the item \
Expand Down Expand Up @@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
}
token::DocComment(s) => {
// we need to get the position of this token before we bump.
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span);
if attr.style == ast::AttrStyle::Inner {
attrs.push(attr);
self.bump();
Expand Down Expand Up @@ -249,7 +249,7 @@ impl<'a> Parser<'a> {
return Ok(meta);
}

let lo = self.span;
let lo = self.token.span;
let path = self.parse_path(PathStyle::Mod)?;
let node = self.parse_meta_item_kind()?;
let span = lo.to(self.prev_span);
Expand Down Expand Up @@ -284,7 +284,7 @@ impl<'a> Parser<'a> {

let found = self.this_token_to_string();
let msg = format!("expected unsuffixed literal or identifier, found `{}`", found);
Err(self.diagnostic().struct_span_err(self.span, &msg))
Err(self.diagnostic().struct_span_err(self.token.span, &msg))
}

/// matches meta_seq = ( COMMASEP(meta_item_inner) )
Expand Down
Loading

0 comments on commit ae487e0

Please sign in to comment.