Skip to content

Commit

Permalink
syntax: Remove duplicate span from token::Ident
Browse files Browse the repository at this point in the history
  • Loading branch information
petrochenkov committed Jun 6, 2019
1 parent 4c5d773 commit f745e5f
Show file tree
Hide file tree
Showing 21 changed files with 181 additions and 184 deletions.
4 changes: 2 additions & 2 deletions src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -353,8 +353,8 @@ impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
}
token::Literal(lit) => lit.hash_stable(hcx, hasher),

token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
token::Ident(name, is_raw) => {
name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Lifetime(name) => name.hash_stable(hcx, hasher),
Expand Down
4 changes: 2 additions & 2 deletions src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,8 +325,8 @@ impl<'a> Classifier<'a> {
}

// Keywords are also included in the identifier set.
token::Ident(ident, is_raw) => {
match ident.name {
token::Ident(name, is_raw) => {
match name {
kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,

kw::SelfLower | kw::SelfUpper => Class::Self_,
Expand Down
10 changes: 5 additions & 5 deletions src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -482,19 +482,19 @@ impl MetaItem {
let path = match tokens.next() {
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
let mut segments = if let token::Ident(ident, _) = kind {
let mut segments = if let token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
tokens.next();
vec![PathSegment::from_ident(ident.with_span_pos(span))]
vec![PathSegment::from_ident(Ident::new(name, span))]
} else {
break 'arm Path::from_ident(ident.with_span_pos(span));
break 'arm Path::from_ident(Ident::new(name, span));
}
} else {
vec![PathSegment::path_root(span)]
};
loop {
if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() {
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() {
segments.push(PathSegment::from_ident(Ident::new(name, span)));
} else {
return None;
}
Expand Down
18 changes: 9 additions & 9 deletions src/libsyntax/diagnostics/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
};

ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
match diagnostics.get_mut(&code.name) {
match diagnostics.get_mut(&code) {
// Previously used errors.
Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => {
ecx.struct_span_warn(span, &format!(
Expand Down Expand Up @@ -72,10 +72,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
token_tree.get(1),
token_tree.get(2)
) {
(1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => {
(1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), None, None) => {
(code, None)
},
(3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })),
(3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })),
Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
(code, Some(symbol))
Expand Down Expand Up @@ -112,7 +112,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
description,
use_site: None
};
if diagnostics.insert(code.name, info).is_some() {
if diagnostics.insert(code, info).is_some() {
ecx.span_err(span, &format!(
"diagnostic code {} already registered", code
));
Expand Down Expand Up @@ -140,13 +140,13 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
token_tree: &[TokenTree])
-> Box<dyn MacResult+'cx> {
assert_eq!(token_tree.len(), 3);
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
let (crate_name, ident) = match (&token_tree[0], &token_tree[2]) {
(
// Crate name.
&TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }),
&TokenTree::Token(Token { kind: token::Ident(crate_name, _), .. }),
// DIAGNOSTICS ident.
&TokenTree::Token(Token { kind: token::Ident(ref name, _), .. })
) => (*&crate_name, name),
&TokenTree::Token(Token { kind: token::Ident(name, _), span })
) => (crate_name, Ident::new(name, span)),
_ => unreachable!()
};

Expand Down Expand Up @@ -209,7 +209,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,

MacEager::items(smallvec![
P(ast::Item {
ident: *name,
ident,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Const(
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ impl<F> TTMacroExpander for F
if let token::Interpolated(nt) = &token.kind {
if let token::NtIdent(ident, is_raw) = **nt {
*tt = tokenstream::TokenTree::token(ident.span,
token::Ident(ident, is_raw));
token::Ident(ident.name, is_raw));
}
}
}
Expand Down
39 changes: 18 additions & 21 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ pub use NamedMatch::*;
pub use ParseResult::*;
use TokenTreeOrTokenTreeSlice::*;

use crate::ast::Ident;
use crate::ast::{Ident, Name};
use crate::ext::tt::quoted::{self, TokenTree};
use crate::parse::{Directory, ParseSess};
use crate::parse::parser::{Parser, PathStyle};
Expand Down Expand Up @@ -429,8 +429,8 @@ pub fn parse_failure_msg(tok: TokenKind) -> String {

/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool {
if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
id1.name == id2.name && is_raw1 == is_raw2
if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) {
name1 == name2 && is_raw1 == is_raw2
} else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) {
name1 == name2
} else {
Expand Down Expand Up @@ -466,8 +466,7 @@ fn inner_parse_loop<'root, 'tt>(
next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
token: &TokenKind,
span: syntax_pos::Span,
token: &Token,
) -> ParseResult<()> {
// Pop items from `cur_items` until it is empty.
while let Some(mut item) = cur_items.pop() {
Expand Down Expand Up @@ -510,7 +509,7 @@ fn inner_parse_loop<'root, 'tt>(
// Add matches from this repetition to the `matches` of `up`
for idx in item.match_lo..item.match_hi {
let sub = item.matches[idx].clone();
let span = DelimSpan::from_pair(item.sp_open, span);
let span = DelimSpan::from_pair(item.sp_open, token.span);
new_pos.push_match(idx, MatchedSeq(sub, span));
}

Expand Down Expand Up @@ -598,7 +597,7 @@ fn inner_parse_loop<'root, 'tt>(
TokenTree::MetaVarDecl(_, _, id) => {
// Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration.
if may_begin_with(id.name, token) {
if may_begin_with(token, id.name) {
bb_items.push(item);
}
}
Expand Down Expand Up @@ -698,7 +697,6 @@ pub fn parse(
&mut eof_items,
&mut bb_items,
&parser.token,
parser.span,
) {
Success(_) => {}
Failure(token, msg) => return Failure(token, msg),
Expand Down Expand Up @@ -806,10 +804,9 @@ pub fn parse(

/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> {
fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
match *token {
token::Ident(ident, is_raw) if ident.name != kw::Underscore =>
Some((ident, is_raw)),
token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
_ => None,
}
}
Expand All @@ -818,7 +815,7 @@ fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> {
///
/// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
/// token. Be conservative (return true) if not sure.
fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
fn may_begin_with(token: &Token, name: Name) -> bool {
/// Checks whether the non-terminal may contain a single (non-keyword) identifier.
fn may_be_ident(nt: &token::Nonterminal) -> bool {
match *nt {
Expand All @@ -830,14 +827,14 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
match name {
sym::expr => token.can_begin_expr(),
sym::ty => token.can_begin_type(),
sym::ident => get_macro_ident(token).is_some(),
sym::ident => get_macro_name(token).is_some(),
sym::literal => token.can_begin_literal_or_bool(),
sym::vis => match *token {
sym::vis => match token.kind {
// The follow-set of :vis + "priv" keyword + interpolated
token::Comma | token::Ident(..) | token::Interpolated(_) => true,
_ => token.can_begin_type(),
},
sym::block => match *token {
sym::block => match token.kind {
token::OpenDelim(token::Brace) => true,
token::Interpolated(ref nt) => match **nt {
token::NtItem(_)
Expand All @@ -851,15 +848,15 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
},
_ => false,
},
sym::path | sym::meta => match *token {
sym::path | sym::meta => match token.kind {
token::ModSep | token::Ident(..) => true,
token::Interpolated(ref nt) => match **nt {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt),
},
_ => false,
},
sym::pat => match *token {
sym::pat => match token.kind {
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
token::OpenDelim(token::Paren) | // tuple pattern
token::OpenDelim(token::Bracket) | // slice pattern
Expand All @@ -875,15 +872,15 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool {
token::Interpolated(ref nt) => may_be_ident(nt),
_ => false,
},
sym::lifetime => match *token {
sym::lifetime => match token.kind {
token::Lifetime(_) => true,
token::Interpolated(ref nt) => match **nt {
token::NtLifetime(_) | token::NtTT(_) => true,
_ => false,
},
_ => false,
},
_ => match *token {
_ => match token.kind {
token::CloseDelim(_) => false,
_ => true,
},
Expand Down Expand Up @@ -929,10 +926,10 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal {
sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())),
sym::ty => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
sym::ident => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
let span = p.span;
p.bump();
token::NtIdent(Ident::new(ident.name, span), is_raw)
token::NtIdent(Ident::new(name, span), is_raw)
} else {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", &token_str)).emit();
Expand Down
12 changes: 5 additions & 7 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1046,8 +1046,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(i, false) if i.name == kw::If ||
i.name == kw::In => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
_ => IsInFollow::No(tokens),
},
_ => IsInFollow::No(tokens),
Expand All @@ -1064,8 +1063,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
BinOp(token::Or) => IsInFollow::Yes,
Ident(i, false) if i.name == kw::As ||
i.name == kw::Where => IsInFollow::Yes,
Ident(name, false) if name == kw::As ||
name == kw::Where => IsInFollow::Yes,
_ => IsInFollow::No(tokens),
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
Expand All @@ -1092,9 +1091,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
Comma => IsInFollow::Yes,
Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
IsInFollow::Yes,
ref tok => if tok.can_begin_type() {
Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
_ => if token.can_begin_type() {
IsInFollow::Yes
} else {
IsInFollow::No(tokens)
Expand Down
5 changes: 2 additions & 3 deletions src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,10 +323,9 @@ where
// metavariable that names the crate of the invocation.
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = token.span.with_lo(span.lo());
let span = ident.span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw {
let ident = ast::Ident::new(kw::DollarCrate, ident.span);
TokenTree::token(span, token::Ident(ident, is_raw))
TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw))
} else {
TokenTree::MetaVar(span, ident)
}
Expand Down
1 change: 0 additions & 1 deletion src/libsyntax/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,6 @@ pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &m
// apply ident visitor if it's an ident, apply other visits to interpolated nodes
pub fn noop_visit_token<T: MutVisitor>(t: &mut TokenKind, vis: &mut T) {
match t {
token::Ident(id, _is_raw) => vis.visit_ident(id),
token::Interpolated(nt) => {
let mut nt = Lrc::make_mut(nt);
vis.visit_interpolated(&mut nt);
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/parse/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,12 +201,12 @@ impl<'a> Parser<'a> {
self.span,
&format!("expected identifier, found {}", self.this_token_descr()),
);
if let token::Ident(ident, false) = &self.token.kind {
if ident.is_raw_guess() {
if let token::Ident(name, false) = self.token.kind {
if Ident::new(name, self.span).is_raw_guess() {
err.span_suggestion(
self.span,
"you can escape reserved keywords to use them as identifiers",
format!("r#{}", ident),
format!("r#{}", name),
Applicability::MaybeIncorrect,
);
}
Expand Down
22 changes: 6 additions & 16 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::ast::{self, Ident};
use crate::ast;
use crate::parse::ParseSess;
use crate::parse::token::{self, Token, TokenKind};
use crate::symbol::{sym, Symbol};
Expand Down Expand Up @@ -61,15 +61,6 @@ impl<'a> StringReader<'a> {
(real, raw)
}

fn mk_ident(&self, string: &str) -> Ident {
let mut ident = Ident::from_str(string);
if let Some(span) = self.override_span {
ident.span = span;
}

ident
}

fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
match res {
Ok(tok) => tok,
Expand Down Expand Up @@ -858,17 +849,17 @@ impl<'a> StringReader<'a> {

return Ok(self.with_str_from(start, |string| {
// FIXME: perform NFKC normalization here. (Issue #2253)
let ident = self.mk_ident(string);
let name = ast::Name::intern(string);

if is_raw_ident {
let span = self.mk_sp(raw_start, self.pos);
if !ident.can_be_raw() {
self.err_span(span, &format!("`{}` cannot be a raw identifier", ident));
if !name.can_be_raw() {
self.err_span(span, &format!("`{}` cannot be a raw identifier", name));
}
self.sess.raw_identifier_spans.borrow_mut().push(span);
}

token::Ident(ident, is_raw_ident)
token::Ident(name, is_raw_ident)
}));
}
}
Expand Down Expand Up @@ -1567,12 +1558,11 @@ mod tests {
&sh,
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
.to_string());
let id = Ident::from_str("fn");
assert_eq!(string_reader.next_token(), token::Comment);
assert_eq!(string_reader.next_token(), token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = Token::new(
token::Ident(id, false),
token::Ident(Symbol::intern("fn"), false),
Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
);
assert_eq!(tok1.kind, tok2.kind);
Expand Down
Loading

0 comments on commit f745e5f

Please sign in to comment.