Skip to content

Commit

Permalink
Move jointness info from TokenStream to Token
Browse files Browse the repository at this point in the history
Part of #63689.
  • Loading branch information
matklad committed Aug 14, 2020
1 parent 5e3f1b1 commit 922ec17
Show file tree
Hide file tree
Showing 13 changed files with 160 additions and 156 deletions.
20 changes: 12 additions & 8 deletions src/librustc_ast/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::ast::{Path, PathSegment};
use crate::mut_visit::visit_clobber;
use crate::ptr::P;
use crate::token::{self, CommentKind, Token};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};

use rustc_index::bit_set::GrowableBitSet;
use rustc_span::source_map::{BytePos, Spanned};
Expand Down Expand Up @@ -362,17 +362,17 @@ pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
}

impl MetaItem {
fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
fn token_trees_and_joints(&self) -> Vec<TokenTree> {
let mut idents = vec![];
let mut last_pos = BytePos(0 as u32);
for (i, segment) in self.path.segments.iter().enumerate() {
let is_first = i == 0;
if !is_first {
let mod_sep_span =
Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt());
idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
idents.push(TokenTree::token(token::ModSep, mod_sep_span));
}
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)));
last_pos = segment.ident.span.hi();
}
idents.extend(self.kind.token_trees_and_joints(self.span));
Expand All @@ -388,6 +388,7 @@ impl MetaItem {
Some(TokenTree::Token(Token {
kind: kind @ (token::Ident(..) | token::ModSep),
span,
spacing: _,
})) => 'arm: {
let mut segments = if let token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek()
Expand All @@ -401,8 +402,11 @@ impl MetaItem {
vec![PathSegment::path_root(span)]
};
loop {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) =
tokens.next().map(TokenTree::uninterpolate)
if let Some(TokenTree::Token(Token {
kind: token::Ident(name, _),
span,
spacing: _,
})) = tokens.next().map(TokenTree::uninterpolate)
{
segments.push(PathSegment::from_ident(Ident::new(name, span)));
} else {
Expand Down Expand Up @@ -459,7 +463,7 @@ impl MetaItemKind {
}
}

fn token_trees_and_joints(&self, span: Span) -> Vec<TreeAndJoint> {
fn token_trees_and_joints(&self, span: Span) -> Vec<TokenTree> {
match *self {
MetaItemKind::Word => vec![],
MetaItemKind::NameValue(ref lit) => {
Expand Down Expand Up @@ -554,7 +558,7 @@ impl NestedMetaItem {
}
}

fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
fn token_trees_and_joints(&self) -> Vec<TokenTree> {
match *self {
NestedMetaItem::MetaItem(ref item) => item.token_trees_and_joints(),
NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()],
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_ast/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -640,14 +640,14 @@ pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {

pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
visit_vec(tts, |tree| vis.visit_tt(tree));
}

// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
// In practice the ident part is not actually used by specific visitors right now,
// but there's a test below checking that it works.
pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
let Token { kind, span } = t;
let Token { kind, span, spacing: _ } = t;
match kind {
token::Ident(name, _) | token::Lifetime(name) => {
let mut ident = Ident::new(*name, *span);
Expand Down
29 changes: 26 additions & 3 deletions src/librustc_ast/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -267,10 +267,24 @@ pub enum TokenKind {
#[cfg(target_arch = "x86_64")]
rustc_data_structures::static_assert_size!(TokenKind, 16);

// FIXME: remove to Spacing, to match proc_macro
#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum Spacing {
Alone,
Joint,
}

#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub struct Token {
pub kind: TokenKind,
pub span: Span,
/// Is this operator token immediately followed by another operator token,
/// without whitespace?
///
/// At the moment, this field is only used for proc_macros, and joint tokens
/// are usually represented by dedicated token kinds. We want to transition
/// to using jointness everywhere though (#63689).
pub spacing: Spacing,
}

impl TokenKind {
Expand Down Expand Up @@ -324,7 +338,11 @@ impl TokenKind {

impl Token {
pub fn new(kind: TokenKind, span: Span) -> Self {
Token { kind, span }
Token::with_spacing(kind, span, Spacing::Alone)
}

pub fn with_spacing(kind: TokenKind, span: Span, spacing: Spacing) -> Self {
Token { kind, span, spacing }
}

/// Some token that will be thrown away later.
Expand Down Expand Up @@ -622,6 +640,9 @@ impl Token {
}

pub fn glue(&self, joint: &Token) -> Option<Token> {
if self.spacing == Spacing::Alone {
return None;
}
let kind = match self.kind {
Eq => match joint.kind {
Eq => EqEq,
Expand Down Expand Up @@ -678,7 +699,9 @@ impl Token {
| Shebang(..) | Unknown(..) | Eof => return None,
};

Some(Token::new(kind, self.span.to(joint.span)))
let mut token = Token::new(kind, self.span.to(joint.span));
token.spacing = joint.spacing;
Some(token)
}
}

Expand Down Expand Up @@ -709,7 +732,7 @@ pub enum Nonterminal {

// `Nonterminal` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(target_arch = "x86_64")]
rustc_data_structures::static_assert_size!(Nonterminal, 40);
rustc_data_structures::static_assert_size!(Nonterminal, 48);

#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum NonterminalKind {
Expand Down
79 changes: 24 additions & 55 deletions src/librustc_ast/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original.

use crate::token::{self, DelimToken, Token, TokenKind};
use crate::token::{self, DelimToken, Spacing, Token, TokenKind};

use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -82,10 +82,6 @@ impl TokenTree {
}
}

pub fn joint(self) -> TokenStream {
TokenStream::new(vec![(self, Joint)])
}

pub fn token(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span))
}
Expand Down Expand Up @@ -125,22 +121,12 @@ where
/// instead of a representation of the abstract syntax tree.
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
#[derive(Clone, Debug, Default, RustcEncodable, RustcDecodable)]
pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);

pub type TreeAndJoint = (TokenTree, IsJoint);
pub struct TokenStream(pub Lrc<Vec<TokenTree>>);

// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(target_arch = "x86_64")]
rustc_data_structures::static_assert_size!(TokenStream, 8);

#[derive(Clone, Copy, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub enum IsJoint {
Joint,
NonJoint,
}

use IsJoint::*;

impl TokenStream {
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
/// separating the two arguments with a comma for diagnostic suggestions.
Expand All @@ -151,22 +137,22 @@ impl TokenStream {
while let Some((pos, ts)) = iter.next() {
if let Some((_, next)) = iter.peek() {
let sp = match (&ts, &next) {
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
(_, TokenTree::Token(Token { kind: token::Comma, .. })) => continue,
(
(TokenTree::Token(token_left), NonJoint),
(TokenTree::Token(token_right), _),
TokenTree::Token(token_left @ Token { spacing: Spacing::Alone, .. }),
TokenTree::Token(token_right),
) if ((token_left.is_ident() && !token_left.is_reserved_ident())
|| token_left.is_lit())
&& ((token_right.is_ident() && !token_right.is_reserved_ident())
|| token_right.is_lit()) =>
{
token_left.span
}
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
(TokenTree::Delimited(sp, ..), _) => sp.entire(),
_ => continue,
};
let sp = sp.shrink_to_hi();
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
let comma = TokenTree::token(token::Comma, sp);
suggestion = Some((pos, comma, sp));
}
}
Expand All @@ -184,19 +170,13 @@ impl TokenStream {

impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream::new(vec![(tree, NonJoint)])
}
}

impl From<TokenTree> for TreeAndJoint {
fn from(tree: TokenTree) -> TreeAndJoint {
(tree, NonJoint)
TokenStream::new(vec![(tree)])
}
}

impl iter::FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndJoint>>())
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TokenTree>>())
}
}

Expand All @@ -209,7 +189,7 @@ impl PartialEq<TokenStream> for TokenStream {
}

impl TokenStream {
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
pub fn new(streams: Vec<TokenTree>) -> TokenStream {
TokenStream(Lrc::new(streams))
}

Expand All @@ -224,8 +204,8 @@ impl TokenStream {
pub fn span(&self) -> Option<Span> {
match &**self.0 {
[] => None,
[(tt, _)] => Some(tt.span()),
[(tt_start, _), .., (tt_end, _)] => Some(tt_start.span().to(tt_end.span())),
[tt] => Some(tt.span()),
[tt_start, .., tt_end] => Some(tt_start.span().to(tt_end.span())),
}
}

Expand Down Expand Up @@ -290,18 +270,12 @@ impl TokenStream {

pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
TokenStream(Lrc::new(
self.0
.iter()
.enumerate()
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
.collect(),
self.0.iter().enumerate().map(|(i, tree)| f(i, tree.clone())).collect(),
))
}

pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
TokenStream(Lrc::new(
self.0.iter().map(|(tree, is_joint)| (f(tree.clone()), *is_joint)).collect(),
))
TokenStream(Lrc::new(self.0.iter().map(|tree| f(tree.clone())).collect()))
}
}

Expand All @@ -320,11 +294,11 @@ impl TokenStreamBuilder {
// If `self` is not empty and the last tree within the last stream is a
// token tree marked with `Joint`...
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
if let Some(TokenTree::Token(last_token)) = last_stream_lrc.last() {
// ...and `stream` is not empty and the first tree within it is
// a token tree...
let TokenStream(ref mut stream_lrc) = stream;
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
if let Some(TokenTree::Token(token)) = stream_lrc.first() {
// ...and the two tokens can be glued together...
if let Some(glued_tok) = last_token.glue(&token) {
// ...then do so, by overwriting the last token
Expand All @@ -337,8 +311,7 @@ impl TokenStreamBuilder {
// Overwrite the last token tree with the merged
// token.
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
*last_vec_mut.last_mut().unwrap() =
(TokenTree::Token(glued_tok), *is_joint);
*last_vec_mut.last_mut().unwrap() = TokenTree::Token(glued_tok);

// Remove the first token tree from `stream`. (This
// is almost always the only tree in `stream`.)
Expand Down Expand Up @@ -375,23 +348,19 @@ impl Iterator for Cursor {
type Item = TokenTree;

fn next(&mut self) -> Option<TokenTree> {
self.next_with_joint().map(|(tree, _)| tree)
}
}

impl Cursor {
fn new(stream: TokenStream) -> Self {
Cursor { stream, index: 0 }
}

pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
if self.index < self.stream.len() {
self.index += 1;
Some(self.stream.0[self.index - 1].clone())
} else {
None
}
}
}

impl Cursor {
fn new(stream: TokenStream) -> Self {
Cursor { stream, index: 0 }
}

pub fn append(&mut self, new_stream: TokenStream) {
if new_stream.is_empty() {
Expand All @@ -404,7 +373,7 @@ impl Cursor {
}

pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree.clone())
self.stream.0[self.index..].get(n).cloned()
}
}

Expand Down
Loading

0 comments on commit 922ec17

Please sign in to comment.