Skip to content

Commit 25b0514

Browse files
committed
syntax: Remove Deref impl from Token
1 parent 0ca3c2f commit 25b0514

File tree

10 files changed

+45
-66
lines changed

10 files changed

+45
-66
lines changed

src/libsyntax/ext/tt/macro_parser.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ use crate::ast::{Ident, Name};
7878
use crate::ext::tt::quoted::{self, TokenTree};
7979
use crate::parse::{Directory, ParseSess};
8080
use crate::parse::parser::{Parser, PathStyle};
81-
use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
81+
use crate::parse::token::{self, DocComment, Nonterminal, Token};
8282
use crate::print::pprust;
8383
use crate::symbol::{kw, sym, Symbol};
8484
use crate::tokenstream::{DelimSpan, TokenStream};
@@ -417,12 +417,12 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
417417

418418
/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
419419
/// other tokens, this is "unexpected token...".
420-
pub fn parse_failure_msg(tok: TokenKind) -> String {
421-
match tok {
420+
pub fn parse_failure_msg(tok: &Token) -> String {
421+
match tok.kind {
422422
token::Eof => "unexpected end of macro invocation".to_string(),
423423
_ => format!(
424424
"no rules expected the token `{}`",
425-
pprust::token_to_string(&tok)
425+
pprust::token_to_string(tok)
426426
),
427427
}
428428
}
@@ -804,8 +804,8 @@ pub fn parse(
804804

805805
/// The token is an identifier, but not `_`.
806806
/// We prohibit passing `_` to macros expecting `ident` for now.
807-
fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
808-
match *token {
807+
fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
808+
match token.kind {
809809
token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
810810
_ => None,
811811
}

src/libsyntax/ext/tt/macro_rules.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
200200

201201
let (token, label) = best_failure.expect("ran no matchers");
202202
let span = token.span.substitute_dummy(sp);
203-
let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind));
203+
let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
204204
err.span_label(span, label);
205205
if let Some(sp) = def_span {
206206
if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
@@ -288,7 +288,7 @@ pub fn compile(
288288
let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
289289
Success(m) => m,
290290
Failure(token, msg) => {
291-
let s = parse_failure_msg(token.kind);
291+
let s = parse_failure_msg(&token);
292292
let sp = token.span.substitute_dummy(def.span);
293293
let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
294294
err.span_label(sp, msg);

src/libsyntax/ext/tt/quoted.rs

Lines changed: 5 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -23,24 +23,14 @@ pub struct Delimited {
2323
}
2424

2525
impl Delimited {
26-
/// Returns the opening delimiter (possibly `NoDelim`).
27-
pub fn open_token(&self) -> TokenKind {
28-
token::OpenDelim(self.delim)
29-
}
30-
31-
/// Returns the closing delimiter (possibly `NoDelim`).
32-
pub fn close_token(&self) -> TokenKind {
33-
token::CloseDelim(self.delim)
34-
}
35-
3626
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
3727
pub fn open_tt(&self, span: Span) -> TokenTree {
3828
let open_span = if span.is_dummy() {
3929
span
4030
} else {
4131
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
4232
};
43-
TokenTree::token(self.open_token(), open_span)
33+
TokenTree::token(token::OpenDelim(self.delim), open_span)
4434
}
4535

4636
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +40,7 @@ impl Delimited {
5040
} else {
5141
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
5242
};
53-
TokenTree::token(self.close_token(), close_span)
43+
TokenTree::token(token::CloseDelim(self.delim), close_span)
5444
}
5545
}
5646

@@ -282,7 +272,7 @@ where
282272
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
283273
// Must have `(` not `{` or `[`
284274
if delim != token::Paren {
285-
let tok = pprust::token_to_string(&token::OpenDelim(delim));
275+
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
286276
let msg = format!("expected `(`, found `{}`", tok);
287277
sess.span_diagnostic.span_err(span.entire(), &msg);
288278
}
@@ -371,8 +361,8 @@ where
371361

372362
/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
373363
/// `None`.
374-
fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
375-
match *token {
364+
fn kleene_op(token: &Token) -> Option<KleeneOp> {
365+
match token.kind {
376366
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
377367
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
378368
token::Question => Some(KleeneOp::ZeroOrOne),

src/libsyntax/parse/diagnostics.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -729,7 +729,7 @@ impl<'a> Parser<'a> {
729729
&mut self,
730730
t: &TokenKind,
731731
) -> PResult<'a, bool /* recovered */> {
732-
let token_str = pprust::token_to_string(t);
732+
let token_str = pprust::token_kind_to_string(t);
733733
let this_token_str = self.this_token_descr();
734734
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
735735
// Point at the end of the macro call when reaching end of macro arguments.

src/libsyntax/parse/lexer/tokentrees.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ impl<'a> TokenTreesReader<'a> {
211211
let raw = self.string_reader.peek_span_src_raw;
212212
self.real_token();
213213
let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
214-
&& token::is_op(&self.token);
214+
&& self.token.is_op();
215215
Ok((tt, if is_joint { Joint } else { NonJoint }))
216216
}
217217
}

src/libsyntax/parse/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use crate::parse::parser::emit_unclosed_delims;
99
use crate::parse::token::TokenKind;
1010
use crate::tokenstream::{TokenStream, TokenTree};
1111
use crate::diagnostics::plugin::ErrorMap;
12-
use crate::print::pprust::token_to_string;
12+
use crate::print::pprust;
1313

1414
use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
1515
use rustc_data_structures::sync::{Lrc, Lock};
@@ -312,7 +312,7 @@ pub fn maybe_file_to_stream(
312312
for unmatched in unmatched_braces {
313313
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
314314
"incorrect close delimiter: `{}`",
315-
token_to_string(&token::CloseDelim(unmatched.found_delim)),
315+
pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
316316
));
317317
db.span_label(unmatched.found_span, "incorrect close delimiter");
318318
if let Some(sp) = unmatched.candidate_span {

src/libsyntax/parse/parser.rs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -401,7 +401,7 @@ crate enum TokenType {
401401
impl TokenType {
402402
crate fn to_string(&self) -> String {
403403
match *self {
404-
TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
404+
TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
405405
TokenType::Keyword(kw) => format!("`{}`", kw),
406406
TokenType::Operator => "an operator".to_string(),
407407
TokenType::Lifetime => "lifetime".to_string(),
@@ -418,7 +418,7 @@ impl TokenType {
418418
///
419419
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
420420
/// that `IDENT` is not the ident of a fn trait.
421-
fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool {
421+
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
422422
t == &token::ModSep || t == &token::Lt ||
423423
t == &token::BinOp(token::Shl)
424424
}
@@ -586,10 +586,10 @@ impl<'a> Parser<'a> {
586586
edible: &[TokenKind],
587587
inedible: &[TokenKind],
588588
) -> PResult<'a, bool /* recovered */> {
589-
if edible.contains(&self.token) {
589+
if edible.contains(&self.token.kind) {
590590
self.bump();
591591
Ok(false)
592-
} else if inedible.contains(&self.token) {
592+
} else if inedible.contains(&self.token.kind) {
593593
// leave it in the input
594594
Ok(false)
595595
} else if self.last_unexpected_token_span == Some(self.token.span) {
@@ -951,7 +951,7 @@ impl<'a> Parser<'a> {
951951
Err(mut e) => {
952952
// Attempt to keep parsing if it was a similar separator
953953
if let Some(ref tokens) = t.similar_tokens() {
954-
if tokens.contains(&self.token) {
954+
if tokens.contains(&self.token.kind) {
955955
self.bump();
956956
}
957957
}
@@ -1756,7 +1756,7 @@ impl<'a> Parser<'a> {
17561756
fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
17571757
let ident = self.parse_path_segment_ident()?;
17581758

1759-
let is_args_start = |token: &TokenKind| match *token {
1759+
let is_args_start = |token: &Token| match token.kind {
17601760
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
17611761
| token::LArrow => true,
17621762
_ => false,
@@ -2822,7 +2822,7 @@ impl<'a> Parser<'a> {
28222822
LhsExpr::AttributesParsed(attrs) => Some(attrs),
28232823
_ => None,
28242824
};
2825-
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
2825+
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
28262826
return self.parse_prefix_range_expr(attrs);
28272827
} else {
28282828
self.parse_prefix_expr(attrs)?
@@ -3099,7 +3099,7 @@ impl<'a> Parser<'a> {
30993099
self.err_dotdotdot_syntax(self.token.span);
31003100
}
31013101

3102-
debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
3102+
debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
31033103
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
31043104
self.token);
31053105
let tok = self.token.clone();
@@ -7867,7 +7867,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
78677867
for unmatched in unclosed_delims.iter() {
78687868
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
78697869
"incorrect close delimiter: `{}`",
7870-
pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
7870+
pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
78717871
));
78727872
err.span_label(unmatched.found_span, "incorrect close delimiter");
78737873
if let Some(sp) = unmatched.candidate_span {

src/libsyntax/parse/token.rs

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ use log::info;
1717

1818
use std::fmt;
1919
use std::mem;
20-
use std::ops::Deref;
2120
#[cfg(target_arch = "x86_64")]
2221
use rustc_data_structures::static_assert_size;
2322
use rustc_data_structures::sync::Lrc;
@@ -553,11 +552,11 @@ impl TokenKind {
553552
impl Token {
554553
// See comments in `Nonterminal::to_tokenstream` for why we care about
555554
// *probably* equal here rather than actual equality
556-
crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool {
557-
if mem::discriminant(&self.kind) != mem::discriminant(other) {
555+
crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
556+
if mem::discriminant(&self.kind) != mem::discriminant(&other.kind) {
558557
return false
559558
}
560-
match (&self.kind, other) {
559+
match (&self.kind, &other.kind) {
561560
(&Eq, &Eq) |
562561
(&Lt, &Lt) |
563562
(&Le, &Le) |
@@ -631,14 +630,6 @@ impl PartialEq<TokenKind> for Token {
631630
}
632631
}
633632

634-
// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
635-
impl Deref for Token {
636-
type Target = TokenKind;
637-
fn deref(&self) -> &Self::Target {
638-
&self.kind
639-
}
640-
}
641-
642633
#[derive(Clone, RustcEncodable, RustcDecodable)]
643634
/// For interpolation during macro expansion.
644635
pub enum Nonterminal {
@@ -778,12 +769,14 @@ impl Nonterminal {
778769
}
779770
}
780771

781-
crate fn is_op(tok: &TokenKind) -> bool {
782-
match *tok {
783-
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
784-
Ident(..) | Lifetime(..) | Interpolated(..) |
785-
Whitespace | Comment | Shebang(..) | Eof => false,
786-
_ => true,
772+
impl Token {
773+
crate fn is_op(&self) -> bool {
774+
match self.kind {
775+
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
776+
Ident(..) | Lifetime(..) | Interpolated(..) |
777+
Whitespace | Comment | Shebang(..) | Eof => false,
778+
_ => true,
779+
}
787780
}
788781
}
789782

src/libsyntax/print/pprust.rs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg};
66
use crate::util::parser::{self, AssocOp, Fixity};
77
use crate::attr;
88
use crate::source_map::{self, SourceMap, Spanned};
9-
use crate::parse::token::{self, BinOpToken, Nonterminal, TokenKind};
9+
use crate::parse::token::{self, BinOpToken, Nonterminal, Token, TokenKind};
1010
use crate::parse::lexer::comments;
1111
use crate::parse::{self, ParseSess};
1212
use crate::print::pp::{self, Breaks};
@@ -189,7 +189,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
189189
out
190190
}
191191

192-
pub fn token_to_string(tok: &TokenKind) -> String {
192+
pub fn token_kind_to_string(tok: &TokenKind) -> String {
193193
match *tok {
194194
token::Eq => "=".to_string(),
195195
token::Lt => "<".to_string(),
@@ -250,6 +250,10 @@ pub fn token_to_string(tok: &TokenKind) -> String {
250250
}
251251
}
252252

253+
pub fn token_to_string(token: &Token) -> String {
254+
token_kind_to_string(&token.kind)
255+
}
256+
253257
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
254258
match *nt {
255259
token::NtExpr(ref e) => expr_to_string(e),
@@ -734,11 +738,11 @@ pub trait PrintState<'a> {
734738
}
735739
}
736740
TokenTree::Delimited(_, delim, tts) => {
737-
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
741+
self.writer().word(token_kind_to_string(&token::OpenDelim(delim)))?;
738742
self.writer().space()?;
739743
self.print_tts(tts)?;
740744
self.writer().space()?;
741-
self.writer().word(token_to_string(&token::CloseDelim(delim)))
745+
self.writer().word(token_kind_to_string(&token::CloseDelim(delim)))
742746
},
743747
}
744748
}

src/libsyntax/tokenstream.rs

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -126,14 +126,6 @@ impl TokenTree {
126126
}
127127
}
128128

129-
/// Indicates if the stream is a token that is equal to the provided token.
130-
pub fn eq_token(&self, t: TokenKind) -> bool {
131-
match self {
132-
TokenTree::Token(token) => *token == t,
133-
_ => false,
134-
}
135-
}
136-
137129
pub fn joint(self) -> TokenStream {
138130
TokenStream::new(vec![(self, Joint)])
139131
}

0 commit comments

Comments
 (0)