Skip to content

Commit

Permalink
Auto merge of #80993 - Aaron1011:collect-set-tokens, r=petrochenkov
Browse files Browse the repository at this point in the history
Set tokens on AST node in `collect_tokens`

A new `HasTokens` trait is introduced, which is used to move logic from
the callers of `collect_tokens` into the body of `collect_tokens`.

In addition to reducing duplication, this paves the way for PR #80689,
which needs to perform additional logic during token collection.
  • Loading branch information
bors committed Jan 15, 2021
2 parents 3419da8 + a961e67 commit dcf622e
Show file tree
Hide file tree
Showing 7 changed files with 101 additions and 147 deletions.
76 changes: 66 additions & 10 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -925,16 +925,6 @@ impl Stmt {
}
}

pub fn set_tokens(&mut self, tokens: Option<LazyTokenStream>) {
match self.kind {
StmtKind::Local(ref mut local) => local.tokens = tokens,
StmtKind::Item(ref mut item) => item.tokens = tokens,
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens,
StmtKind::Empty => {}
StmtKind::MacCall(ref mut mac) => mac.tokens = tokens,
}
}

pub fn has_trailing_semicolon(&self) -> bool {
match &self.kind {
StmtKind::Semi(_) => true,
Expand Down Expand Up @@ -2890,3 +2880,69 @@ impl TryFrom<ItemKind> for ForeignItemKind {
}

pub type ForeignItem = Item<ForeignItemKind>;

pub trait HasTokens {
/// Called by `Parser::collect_tokens` to store the collected
/// tokens inside an AST node
fn finalize_tokens(&mut self, tokens: LazyTokenStream);
}

impl<T: HasTokens + 'static> HasTokens for P<T> {
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
(**self).finalize_tokens(tokens);
}
}

impl<T: HasTokens> HasTokens for Option<T> {
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
if let Some(inner) = self {
inner.finalize_tokens(tokens);
}
}
}

impl HasTokens for Attribute {
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
match &mut self.kind {
AttrKind::Normal(_, attr_tokens) => {
if attr_tokens.is_none() {
*attr_tokens = Some(tokens);
}
}
AttrKind::DocComment(..) => {
panic!("Called finalize_tokens on doc comment attr {:?}", self)
}
}
}
}

impl HasTokens for Stmt {
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
let stmt_tokens = match self.kind {
StmtKind::Local(ref mut local) => &mut local.tokens,
StmtKind::Item(ref mut item) => &mut item.tokens,
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => &mut expr.tokens,
StmtKind::Empty => return,
StmtKind::MacCall(ref mut mac) => &mut mac.tokens,
};
if stmt_tokens.is_none() {
*stmt_tokens = Some(tokens);
}
}
}

macro_rules! derive_has_tokens {
($($ty:path),*) => { $(
impl HasTokens for $ty {
fn finalize_tokens(&mut self, tokens: LazyTokenStream) {
if self.tokens.is_none() {
self.tokens = Some(tokens);
}
}
}
)* }
}

derive_has_tokens! {
Item, Expr, Ty, AttrItem, Visibility, Path, Block, Pat
}
16 changes: 4 additions & 12 deletions compiler/rustc_parse/src/parser/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ impl<'a> Parser<'a> {
inner_parse_policy, self.token
);
let lo = self.token.span;
let ((item, style, span), tokens) = self.collect_tokens(|this| {
self.collect_tokens(|this| {
if this.eat(&token::Pound) {
let style = if this.eat(&token::Not) {
ast::AttrStyle::Inner
Expand All @@ -107,15 +107,13 @@ impl<'a> Parser<'a> {
this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
}

Ok((item, style, attr_sp))
Ok(attr::mk_attr_from_item(item, None, style, attr_sp))
} else {
let token_str = pprust::token_to_string(&this.token);
let msg = &format!("expected `#`, found `{}`", token_str);
Err(this.struct_span_err(this.token.span, msg))
}
})?;

Ok(attr::mk_attr_from_item(item, tokens, style, span))
})
}

pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy<'_>) {
Expand Down Expand Up @@ -165,13 +163,7 @@ impl<'a> Parser<'a> {
let args = this.parse_attr_args()?;
Ok(ast::AttrItem { path, args, tokens: None })
};
if capture_tokens {
let (mut item, tokens) = self.collect_tokens(do_parse)?;
item.tokens = tokens;
item
} else {
do_parse(self)?
}
if capture_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }?
})
}

Expand Down
20 changes: 4 additions & 16 deletions compiler/rustc_parse/src/parser/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,8 @@ impl<'a> Parser<'a> {
/// Parses a prefix-unary-operator expr.
fn parse_prefix_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(attrs)?;
self.maybe_collect_tokens(super::attr::maybe_needs_tokens(&attrs), |this| {
let needs_tokens = super::attr::maybe_needs_tokens(&attrs);
let do_parse = |this: &mut Parser<'a>| {
let lo = this.token.span;
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
let (hi, ex) = match this.token.uninterpolate().kind {
Expand All @@ -488,7 +489,8 @@ impl<'a> Parser<'a> {
_ => return this.parse_dot_or_call_expr(Some(attrs)),
}?;
Ok(this.mk_expr(lo.to(hi), ex, attrs))
})
};
if needs_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }
}

fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
Expand Down Expand Up @@ -1125,20 +1127,6 @@ impl<'a> Parser<'a> {
}
}

fn maybe_collect_tokens(
&mut self,
needs_tokens: bool,
f: impl FnOnce(&mut Self) -> PResult<'a, P<Expr>>,
) -> PResult<'a, P<Expr>> {
if needs_tokens {
let (mut expr, tokens) = self.collect_tokens(f)?;
expr.tokens = tokens;
Ok(expr)
} else {
f(self)
}
}

fn parse_lit_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
match self.parse_opt_lit() {
Expand Down
14 changes: 1 addition & 13 deletions compiler/rustc_parse/src/parser/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,19 +125,7 @@ impl<'a> Parser<'a> {
item
};

let (mut item, tokens) = if needs_tokens {
let (item, tokens) = self.collect_tokens(parse_item)?;
(item, tokens)
} else {
(parse_item(self)?, None)
};
if let Some(item) = &mut item {
// If we captured tokens during parsing (due to encountering an `NtItem`),
// use those instead
if item.tokens.is_none() {
item.tokens = tokens;
}
}
let item = if needs_tokens { self.collect_tokens(parse_item) } else { parse_item(self) }?;

self.unclosed_delims.append(&mut unclosed_delims);
Ok(item)
Expand Down
13 changes: 7 additions & 6 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
use rustc_ast::{Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -1234,10 +1234,10 @@ impl<'a> Parser<'a> {
/// This restriction shouldn't be an issue in practice,
/// since this function is used to record the tokens for
/// a parsed AST item, which always has matching delimiters.
pub fn collect_tokens<R>(
pub fn collect_tokens<R: HasTokens>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
) -> PResult<'a, R> {
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = TokenCursor {
frame: self.token_cursor.frame.clone(),
Expand All @@ -1249,7 +1249,7 @@ impl<'a> Parser<'a> {
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};

let ret = f(self)?;
let mut ret = f(self)?;

// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen
Expand Down Expand Up @@ -1319,7 +1319,8 @@ impl<'a> Parser<'a> {
trailing_semi: false,
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
Ok(ret)
}

/// `::{` or `::*`
Expand Down
99 changes: 18 additions & 81 deletions compiler/rustc_parse/src/parser/nonterminal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,80 +99,34 @@ impl<'a> Parser<'a> {
// we always capture tokens for any `Nonterminal` which needs them.
Ok(match kind {
NonterminalKind::Item => match self.collect_tokens(|this| this.parse_item())? {
(Some(mut item), tokens) => {
// If we captured tokens during parsing (due to outer attributes),
// use those.
if item.tokens.is_none() {
item.tokens = tokens;
}
token::NtItem(item)
}
(None, _) => {
Some(item) => token::NtItem(item),
None => {
return Err(self.struct_span_err(self.token.span, "expected an item keyword"));
}
},
NonterminalKind::Block => {
let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?;
// We have have eaten an NtBlock, which could already have tokens
if block.tokens.is_none() {
block.tokens = tokens;
}
token::NtBlock(block)
token::NtBlock(self.collect_tokens(|this| this.parse_block())?)
}
NonterminalKind::Stmt => {
let (stmt, tokens) = self.collect_tokens(|this| this.parse_stmt())?;
match stmt {
Some(mut s) => {
if s.tokens().is_none() {
s.set_tokens(tokens);
}
token::NtStmt(s)
}
None => {
return Err(self.struct_span_err(self.token.span, "expected a statement"));
}
NonterminalKind::Stmt => match self.collect_tokens(|this| this.parse_stmt())? {
Some(s) => token::NtStmt(s),
None => {
return Err(self.struct_span_err(self.token.span, "expected a statement"));
}
}
},
NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => {
let (mut pat, tokens) = self.collect_tokens(|this| match kind {
token::NtPat(self.collect_tokens(|this| match kind {
NonterminalKind::Pat2018 { .. } => this.parse_pat(None),
NonterminalKind::Pat2021 { .. } => {
this.parse_top_pat(GateOr::Yes, RecoverComma::No)
}
_ => unreachable!(),
})?;
// We have have eaten an NtPat, which could already have tokens
if pat.tokens.is_none() {
pat.tokens = tokens;
}
token::NtPat(pat)
}
NonterminalKind::Expr => {
let (mut expr, tokens) = self.collect_tokens(|this| this.parse_expr())?;
// If we captured tokens during parsing (due to outer attributes),
// use those.
if expr.tokens.is_none() {
expr.tokens = tokens;
}
token::NtExpr(expr)
})?)
}
NonterminalKind::Expr => token::NtExpr(self.collect_tokens(|this| this.parse_expr())?),
NonterminalKind::Literal => {
let (mut lit, tokens) =
self.collect_tokens(|this| this.parse_literal_maybe_minus())?;
// We have have eaten a nonterminal, which could already have tokens
if lit.tokens.is_none() {
lit.tokens = tokens;
}
token::NtLiteral(lit)
}
NonterminalKind::Ty => {
let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?;
// We have an eaten an NtTy, which could already have tokens
if ty.tokens.is_none() {
ty.tokens = tokens;
}
token::NtTy(ty)
token::NtLiteral(self.collect_tokens(|this| this.parse_literal_maybe_minus())?)
}
NonterminalKind::Ty => token::NtTy(self.collect_tokens(|this| this.parse_ty())?),
// this could be handled like a token, since it is one
NonterminalKind::Ident => {
if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
Expand All @@ -185,32 +139,15 @@ impl<'a> Parser<'a> {
}
}
NonterminalKind::Path => {
let (mut path, tokens) =
self.collect_tokens(|this| this.parse_path(PathStyle::Type))?;
// We have have eaten an NtPath, which could already have tokens
if path.tokens.is_none() {
path.tokens = tokens;
}
token::NtPath(path)
token::NtPath(self.collect_tokens(|this| this.parse_path(PathStyle::Type))?)
}
NonterminalKind::Meta => {
let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?;
// We may have eaten a nonterminal, which could already have tokens
if attr.tokens.is_none() {
attr.tokens = tokens;
}
token::NtMeta(P(attr))
token::NtMeta(P(self.collect_tokens(|this| this.parse_attr_item(false))?))
}
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
NonterminalKind::Vis => {
let (mut vis, tokens) =
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?;
// We may have etan an `NtVis`, which could already have tokens
if vis.tokens.is_none() {
vis.tokens = tokens;
}
token::NtVis(vis)
}
NonterminalKind::Vis => token::NtVis(
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?,
),
NonterminalKind::Lifetime => {
if self.check_lifetime() {
token::NtLifetime(self.expect_lifetime().ident)
Expand Down
10 changes: 1 addition & 9 deletions compiler/rustc_parse/src/parser/stmt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,15 +89,7 @@ impl<'a> Parser<'a> {
};

let stmt = if has_attrs {
let (mut stmt, tokens) = self.collect_tokens(parse_stmt_inner)?;
if let Some(stmt) = &mut stmt {
// If we already have tokens (e.g. due to encounting an `NtStmt`),
// use those instead.
if stmt.tokens().is_none() {
stmt.set_tokens(tokens);
}
}
stmt
self.collect_tokens(parse_stmt_inner)?
} else {
parse_stmt_inner(self)?
};
Expand Down

0 comments on commit dcf622e

Please sign in to comment.