Skip to content

Commit

Permalink
Auto merge of rust-lang#125174 - nnethercote:less-ast-pretty-printing…
Browse files Browse the repository at this point in the history
…, r=<try>

Print `token::Interpolated` with token stream pretty printing.

This is a step towards removing `token::Interpolated` (rust-lang#124141). It unavoidably changes the output of the `stringify!` macro, generally for the better.

r? `@petrochenkov`
  • Loading branch information
bors committed May 17, 2024
2 parents 8af67ba + 9c693ad commit e07dc21
Show file tree
Hide file tree
Showing 29 changed files with 221 additions and 312 deletions.
4 changes: 2 additions & 2 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -661,11 +661,11 @@ impl TokenStream {
if attr_style == AttrStyle::Inner {
vec![
TokenTree::token_joint(token::Pound, span),
TokenTree::token_alone(token::Not, span),
TokenTree::token_joint_hidden(token::Not, span),
body,
]
} else {
vec![TokenTree::token_alone(token::Pound, span), body]
vec![TokenTree::token_joint_hidden(token::Pound, span), body]
}
}
}
Expand Down
30 changes: 13 additions & 17 deletions compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,10 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
}

fn peek_comment<'b>(&'b self) -> Option<&'b Comment> where 'a: 'b {
fn peek_comment<'b>(&'b self) -> Option<&'b Comment>
where
'a: 'b,
{
self.comments().and_then(|c| c.peek())
}

Expand Down Expand Up @@ -849,18 +852,11 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}

fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
match nt {
token::NtExpr(e) => self.expr_to_string(e),
token::NtMeta(e) => self.attr_item_to_string(e),
token::NtTy(e) => self.ty_to_string(e),
token::NtPath(e) => self.path_to_string(e),
token::NtItem(e) => self.item_to_string(e),
token::NtBlock(e) => self.block_to_string(e),
token::NtStmt(e) => self.stmt_to_string(e),
token::NtPat(e) => self.pat_to_string(e),
token::NtLiteral(e) => self.expr_to_string(e),
token::NtVis(e) => self.vis_to_string(e),
}
// We convert the AST fragment to a token stream and pretty print that,
// rather than using AST pretty printing, because `Nonterminal` is
// slated for removal in #124141. (This method will also then be
// removed.)
self.tts_to_string(&TokenStream::from_nonterminal_ast(nt))
}

/// Print the token kind precisely, without converting `$crate` into its respective crate name.
Expand Down Expand Up @@ -994,6 +990,10 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Self::to_string(|s| s.print_attr_item(ai, ai.path.span))
}

fn tts_to_string(&self, tokens: &TokenStream) -> String {
Self::to_string(|s| s.print_tts(tokens, false))
}

fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
let mut printer = State::new();
f(&mut printer);
Expand Down Expand Up @@ -2039,10 +2039,6 @@ impl<'a> State<'a> {
})
}

pub(crate) fn tts_to_string(&self, tokens: &TokenStream) -> String {
Self::to_string(|s| s.print_tts(tokens, false))
}

pub(crate) fn path_segment_to_string(&self, p: &ast::PathSegment) -> String {
Self::to_string(|s| s.print_path_segment(p, false))
}
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_builtin_macros/src/assert/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
fn build_panic(&self, expr_str: &str, panic_path: Path) -> P<Expr> {
let escaped_expr_str = escape_to_fmt(expr_str);
let initial = [
TokenTree::token_joint_hidden(
TokenTree::token_joint(
token::Literal(token::Lit {
kind: token::LitKind::Str,
symbol: Symbol::intern(&if self.fmt_string.is_empty() {
Expand All @@ -172,7 +172,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
];
let captures = self.capture_decls.iter().flat_map(|cap| {
[
TokenTree::token_joint_hidden(
TokenTree::token_joint(
token::Ident(cap.ident.name, IdentIsRaw::No),
cap.ident.span,
),
Expand Down
12 changes: 6 additions & 6 deletions compiler/rustc_expand/src/mbe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ mod transcribe;

use metavar_expr::MetaVarExpr;
use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing};
use rustc_macros::{Decodable, Encodable};
use rustc_span::symbol::Ident;
use rustc_span::Span;
Expand Down Expand Up @@ -68,7 +68,7 @@ pub(crate) enum KleeneOp {
/// `MetaVarExpr` are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, PartialEq, Encodable, Decodable)]
enum TokenTree {
Token(Token),
Token(Token, Spacing),
/// A delimited sequence, e.g. `($e:expr)` (RHS) or `{ $e }` (LHS).
Delimited(DelimSpan, DelimSpacing, Delimited),
/// A kleene-style repetition sequence, e.g. `$($e:expr)*` (RHS) or `$($e),*` (LHS).
Expand All @@ -90,15 +90,15 @@ impl TokenTree {
/// Returns `true` if the given token tree is a token of the given kind.
fn is_token(&self, expected_kind: &TokenKind) -> bool {
match self {
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
TokenTree::Token(Token { kind: actual_kind, .. }, _) => actual_kind == expected_kind,
_ => false,
}
}

/// Retrieves the `TokenTree`'s span.
fn span(&self) -> Span {
match *self {
TokenTree::Token(Token { span, .. })
TokenTree::Token(Token { span, .. }, _)
| TokenTree::MetaVar(span, _)
| TokenTree::MetaVarDecl(span, _, _) => span,
TokenTree::Delimited(span, ..)
Expand All @@ -107,7 +107,7 @@ impl TokenTree {
}
}

fn token(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span))
fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span), Spacing::Alone)
}
}
8 changes: 4 additions & 4 deletions compiler/rustc_expand/src/mbe/macro_check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ fn check_nested_occurrences(
match (state, tt) {
(
NestedMacroState::Empty,
&TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }),
&TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }, _),
) => {
if name == kw::MacroRules {
state = NestedMacroState::MacroRules;
Expand All @@ -422,13 +422,13 @@ fn check_nested_occurrences(
}
(
NestedMacroState::MacroRules,
&TokenTree::Token(Token { kind: TokenKind::Not, .. }),
&TokenTree::Token(Token { kind: TokenKind::Not, .. }, _),
) => {
state = NestedMacroState::MacroRulesNot;
}
(
NestedMacroState::MacroRulesNot,
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }, _),
) => {
state = NestedMacroState::MacroRulesNotName;
}
Expand Down Expand Up @@ -459,7 +459,7 @@ fn check_nested_occurrences(
}
(
NestedMacroState::Macro,
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }, _),
) => {
state = NestedMacroState::MacroName;
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/mbe/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
) {
for tt in tts {
match tt {
TokenTree::Token(token) => {
TokenTree::Token(token, _) => {
locs.push(MatcherLoc::Token { token: token.clone() });
}
TokenTree::Delimited(span, _, delimited) => {
Expand Down
37 changes: 20 additions & 17 deletions compiler/rustc_expand/src/mbe/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use crate::mbe::transcribe::transcribe;
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream};
use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, TransparencyError};
Expand Down Expand Up @@ -402,7 +402,7 @@ pub fn compile_declarative_macro(
mbe::SequenceRepetition {
tts: vec![
mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
mbe::TokenTree::token(token::FatArrow, def.span),
mbe::TokenTree::token_alone(token::FatArrow, def.span),
mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
],
separator: Some(Token::new(
Expand All @@ -417,7 +417,7 @@ pub fn compile_declarative_macro(
mbe::TokenTree::Sequence(
DelimSpan::dummy(),
mbe::SequenceRepetition {
tts: vec![mbe::TokenTree::token(
tts: vec![mbe::TokenTree::token_alone(
if macro_rules { token::Semi } else { token::Comma },
def.span,
)],
Expand Down Expand Up @@ -627,10 +627,11 @@ fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
while let Some(tt) = iter.next() {
match tt {
mbe::TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => {}
mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }, _) => {
let mut now = t;
while let Some(&mbe::TokenTree::Token(
next @ Token { kind: DocComment(..), .. },
_,
)) = iter.peek()
{
now = next;
Expand Down Expand Up @@ -696,10 +697,10 @@ fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
match rhs {
mbe::TokenTree::Delimited(.., d) => {
let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
if let mbe::TokenTree::Token(ident) = ident
if let mbe::TokenTree::Token(ident, _) = ident
&& let TokenKind::Ident(ident, _) = ident.kind
&& ident == sym::compile_error
&& let mbe::TokenTree::Token(bang) = bang
&& let mbe::TokenTree::Token(bang, _) = bang
&& let TokenKind::Not = bang.kind
&& let mbe::TokenTree::Delimited(.., del) = args
&& del.delim != Delimiter::Invisible
Expand Down Expand Up @@ -896,7 +897,9 @@ enum TtHandle<'tt> {

impl<'tt> TtHandle<'tt> {
fn from_token(tok: Token) -> Self {
TtHandle::Token(mbe::TokenTree::Token(tok))
// `Spacing::Alone` is pessimistic but changing it has no effect on the
// current test suite.
TtHandle::Token(mbe::TokenTree::Token(tok, Spacing::Alone))
}

fn from_token_kind(kind: TokenKind, span: Span) -> Self {
Expand Down Expand Up @@ -925,8 +928,8 @@ impl<'tt> Clone for TtHandle<'tt> {

// This variant *must* contain a `mbe::TokenTree::Token`, and not
// any other variant of `mbe::TokenTree`.
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
TtHandle::Token(mbe::TokenTree::Token(tok, spacing)) => {
TtHandle::Token(mbe::TokenTree::Token(tok.clone(), *spacing))
}

_ => unreachable!(),
Expand Down Expand Up @@ -1143,7 +1146,7 @@ fn check_matcher_core<'tt>(
// whereas macros from an external crate have a dummy id.
if def.id != DUMMY_NODE_ID
&& matches!(kind, NonterminalKind::PatParam { inferred: true })
&& matches!(next_token, TokenTree::Token(token) if token.kind == BinOp(token::BinOpToken::Or))
&& matches!(next_token, TokenTree::Token(token, _) if token.kind == BinOp(token::BinOpToken::Or))
{
// It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param.
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
Expand Down Expand Up @@ -1274,7 +1277,7 @@ enum IsInFollow {
fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
use mbe::TokenTree;

if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }, _) = *tok {
// closing a token tree can never be matched by any fragment;
// iow, we always require that `(` and `)` match, etc.
IsInFollow::Yes
Expand All @@ -1293,7 +1296,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
NonterminalKind::Stmt | NonterminalKind::Expr => {
const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
match tok {
TokenTree::Token(token) => match token.kind {
TokenTree::Token(token, _) => match token.kind {
FatArrow | Comma | Semi => IsInFollow::Yes,
_ => IsInFollow::No(TOKENS),
},
Expand All @@ -1303,7 +1306,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
NonterminalKind::PatParam { .. } => {
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
match tok {
TokenTree::Token(token) => match token.kind {
TokenTree::Token(token, _) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
Expand All @@ -1316,7 +1319,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
NonterminalKind::PatWithOr => {
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
match tok {
TokenTree::Token(token) => match token.kind {
TokenTree::Token(token, _) => match token.kind {
FatArrow | Comma | Eq => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
Expand All @@ -1332,7 +1335,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
"`where`",
];
match tok {
TokenTree::Token(token) => match token.kind {
TokenTree::Token(token, _) => match token.kind {
OpenDelim(Delimiter::Brace)
| OpenDelim(Delimiter::Bracket)
| Comma
Expand Down Expand Up @@ -1369,7 +1372,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
// Explicitly disallow `priv`, on the off chance it comes back.
const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
match tok {
TokenTree::Token(token) => match token.kind {
TokenTree::Token(token, _) => match token.kind {
Comma => IsInFollow::Yes,
Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
Expand All @@ -1395,7 +1398,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {

fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
match tt {
mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
mbe::TokenTree::Token(token, _) => pprust::token_to_string(token).into(),
mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${name}:{kind}"),
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${name}:"),
Expand Down
15 changes: 9 additions & 6 deletions compiler/rustc_expand/src/mbe/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ fn parse_tree<'a>(
err.emit();
// Returns early the same read `$` to avoid spanning
// unrelated diagnostics that could be performed afterwards
return TokenTree::token(token::Dollar, span);
return TokenTree::token_alone(token::Dollar, span);
}
Ok(elem) => {
maybe_emit_macro_metavar_expr_feature(
Expand Down Expand Up @@ -220,11 +220,14 @@ fn parse_tree<'a>(

// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate`
// special metavariable that names the crate of the invocation.
Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
Some(tokenstream::TokenTree::Token(token, spacing)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = ident.span.with_lo(span.lo());
if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) {
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
TokenTree::Token(
Token::new(token::Ident(kw::DollarCrate, is_raw), span),
*spacing,
)
} else {
TokenTree::MetaVar(span, ident)
}
Expand All @@ -240,7 +243,7 @@ fn parse_tree<'a>(
} else {
maybe_emit_macro_metavar_expr_feature(features, sess, span);
}
TokenTree::token(token::Dollar, span)
TokenTree::token_alone(token::Dollar, span)
}

// `tree` is followed by some other token. This is an error.
Expand All @@ -252,12 +255,12 @@ fn parse_tree<'a>(
}

// There are no more tokens. Just return the `$` we already have.
None => TokenTree::token(token::Dollar, span),
None => TokenTree::token_alone(token::Dollar, span),
}
}

// `tree` is an arbitrary token. Keep it.
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
tokenstream::TokenTree::Token(token, spacing) => TokenTree::Token(token.clone(), *spacing),

// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
// descend into the delimited set and further parse it.
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,10 +315,10 @@ pub(super) fn transcribe<'a>(

// Nothing much to do here. Just push the token to the result, being careful to
// preserve syntax context.
mbe::TokenTree::Token(token) => {
mbe::TokenTree::Token(token, spacing) => {
let mut token = token.clone();
mut_visit::visit_token(&mut token, &mut marker);
let tt = TokenTree::Token(token, Spacing::Alone);
let tt = TokenTree::Token(token, *spacing);
result.push(tt);
}

Expand Down
Loading

0 comments on commit e07dc21

Please sign in to comment.