Skip to content

Commit 8fc2809

Browse files
committed
Impl Copy for Token and TokenKind.
1 parent 2c24f50 commit 8fc2809

File tree

21 files changed

+52
-57
lines changed

21 files changed

+52
-57
lines changed

compiler/rustc_ast/src/token.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
259259
.contains(&name)
260260
}
261261

262-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
262+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
263263
pub enum TokenKind {
264264
/* Expression-operator symbols. */
265265
Eq,
@@ -329,7 +329,7 @@ pub enum TokenKind {
329329
Eof,
330330
}
331331

332-
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
332+
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
333333
pub struct Token {
334334
pub kind: TokenKind,
335335
pub span: Span,

compiler/rustc_ast/src/tokenstream.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -461,7 +461,7 @@ impl TokenStream {
461461
Delimiter::Invisible(InvisibleSource::FlattenToken),
462462
TokenStream::token_alone(token::Lifetime(name), uninterpolated_span),
463463
),
464-
_ => TokenTree::Token(token.clone(), spacing),
464+
_ => TokenTree::Token(*token, spacing),
465465
}
466466
}
467467

compiler/rustc_expand/src/mbe/diagnostics.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
147147
.map_or(true, |failure| failure.is_better_position(*approx_position))
148148
{
149149
self.best_failure = Some(BestFailure {
150-
token: token.clone(),
150+
token: *token,
151151
position_in_tokenstream: *approx_position,
152152
msg,
153153
remaining_matcher: self

compiler/rustc_expand/src/mbe/macro_parser.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
181181
for tt in tts {
182182
match tt {
183183
TokenTree::Token(token) => {
184-
locs.push(MatcherLoc::Token { token: token.clone() });
184+
locs.push(MatcherLoc::Token { token: *token });
185185
}
186186
TokenTree::Delimited(span, delimited) => {
187187
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
@@ -645,7 +645,7 @@ impl TtParser {
645645
// There are no possible next positions AND we aren't waiting for the black-box
646646
// parser: syntax error.
647647
return Failure(T::build_failure(
648-
parser.token.clone(),
648+
parser.token,
649649
parser.approx_token_stream_pos(),
650650
"no rules expected this token in macro call",
651651
));

compiler/rustc_expand/src/mbe/macro_rules.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -807,7 +807,7 @@ impl<'tt> FirstSets<'tt> {
807807
// token could be the separator token itself.
808808

809809
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
810-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
810+
first.add_one_maybe(TtHandle::from_token(*sep));
811811
}
812812

813813
// Reverse scan: Sequence comes before `first`.
@@ -870,7 +870,7 @@ impl<'tt> FirstSets<'tt> {
870870
// If the sequence contents can be empty, then the first
871871
// token could be the separator token itself.
872872
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
873-
first.add_one_maybe(TtHandle::from_token(sep.clone()));
873+
first.add_one_maybe(TtHandle::from_token(*sep));
874874
}
875875

876876
assert!(first.maybe_empty);
@@ -946,7 +946,7 @@ impl<'tt> Clone for TtHandle<'tt> {
946946
// This variant *must* contain a `mbe::TokenTree::Token`, and not
947947
// any other variant of `mbe::TokenTree`.
948948
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
949-
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
949+
TtHandle::Token(mbe::TokenTree::Token(*tok))
950950
}
951951

952952
_ => unreachable!(),
@@ -1120,7 +1120,7 @@ fn check_matcher_core<'tt>(
11201120
let mut new;
11211121
let my_suffix = if let Some(sep) = &seq_rep.separator {
11221122
new = suffix_first.clone();
1123-
new.add_one_maybe(TtHandle::from_token(sep.clone()));
1123+
new.add_one_maybe(TtHandle::from_token(*sep));
11241124
&new
11251125
} else {
11261126
&suffix_first

compiler/rustc_expand/src/mbe/quoted.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ fn parse_tree<'a>(
257257
}
258258

259259
// `tree` is an arbitrary token. Keep it.
260-
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
260+
tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
261261

262262
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
263263
// descend into the delimited set and further parse it.
@@ -294,7 +294,7 @@ fn parse_kleene_op<'a>(
294294
match input.next() {
295295
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) {
296296
Some(op) => Ok(Ok((op, token.span))),
297-
None => Ok(Err(token.clone())),
297+
None => Ok(Err(*token)),
298298
},
299299
tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
300300
}

compiler/rustc_expand/src/mbe/transcribe.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ pub(super) fn transcribe<'a>(
128128
if repeat_idx < repeat_len {
129129
*idx = 0;
130130
if let Some(sep) = sep {
131-
result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
131+
result.push(TokenTree::Token(*sep, Spacing::Alone));
132132
}
133133
continue;
134134
}
@@ -330,7 +330,7 @@ pub(super) fn transcribe<'a>(
330330
// Nothing much to do here. Just push the token to the result, being careful to
331331
// preserve syntax context.
332332
mbe::TokenTree::Token(token) => {
333-
let mut token = token.clone();
333+
let mut token = *token;
334334
mut_visit::visit_token(&mut token, &mut marker);
335335
let tt = TokenTree::Token(token, Spacing::Alone);
336336
result.push(tt);

compiler/rustc_parse/src/lexer/unicode_chars.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ pub(super) fn check_for_substitution(
377377
ascii_name,
378378
})
379379
};
380-
(token.clone(), sugg)
380+
(*token, sugg)
381381
}
382382

383383
/// Extract string if found at current position with given delimiters

compiler/rustc_parse/src/parser/attr.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ impl<'a> Parser<'a> {
411411
Err(err) => err.cancel(),
412412
}
413413

414-
Err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }
414+
Err(InvalidMetaItem { span: self.token.span, token: self.token }
415415
.into_diagnostic(&self.sess.span_diagnostic))
416416
}
417417
}

compiler/rustc_parse/src/parser/attr_wrapper.rs

+7-8
Original file line numberDiff line numberDiff line change
@@ -104,13 +104,12 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
104104
// produce an empty `TokenStream` if no calls were made, and omit the
105105
// final token otherwise.
106106
let mut cursor_snapshot = self.cursor_snapshot.clone();
107-
let tokens =
108-
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
109-
.chain((0..self.num_calls).map(|_| {
110-
let token = cursor_snapshot.next();
111-
(FlatToken::Token(token.0), token.1)
112-
}))
113-
.take(self.num_calls);
107+
let tokens = std::iter::once((FlatToken::Token(self.start_token.0), self.start_token.1))
108+
.chain((0..self.num_calls).map(|_| {
109+
let token = cursor_snapshot.next();
110+
(FlatToken::Token(token.0), token.1)
111+
}))
112+
.take(self.num_calls);
114113

115114
if !self.replace_ranges.is_empty() {
116115
let mut tokens: Vec<_> = tokens.collect();
@@ -211,7 +210,7 @@ impl<'a> Parser<'a> {
211210
return Ok(f(self, attrs.attrs)?.0);
212211
}
213212

214-
let start_token = (self.token.clone(), self.token_spacing);
213+
let start_token = (self.token, self.token_spacing);
215214
let cursor_snapshot = self.token_cursor.clone();
216215
let start_pos = self.num_bump_calls;
217216

compiler/rustc_parse/src/parser/diagnostics.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ impl<'a> Parser<'a> {
301301
let mut recovered_ident = None;
302302
// we take this here so that the correct original token is retained in
303303
// the diagnostic, regardless of eager recovery.
304-
let bad_token = self.token.clone();
304+
let bad_token = self.token;
305305

306306
// suggest prepending a keyword in identifier position with `r#`
307307
let suggest_raw = if let Some((ident, false)) = self.token.ident()
@@ -362,7 +362,7 @@ impl<'a> Parser<'a> {
362362
// if the previous token is a valid keyword
363363
// that might use a generic, then suggest a correct
364364
// generic placement (later on)
365-
let maybe_keyword = self.prev_token.clone();
365+
let maybe_keyword = self.prev_token;
366366
if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
367367
// if we have a valid keyword, attempt to parse generics
368368
// also obtain the keywords symbol
@@ -474,7 +474,7 @@ impl<'a> Parser<'a> {
474474
}
475475
false
476476
}
477-
if token != parser::TokenType::Token(self.token.kind.clone()) {
477+
if token != parser::TokenType::Token(self.token.kind) {
478478
let eq = is_ident_eq_keyword(&self.token.kind, &token);
479479
// if the suggestion is a keyword and the found token is an ident,
480480
// the content of which are equal to the suggestion's content,
@@ -533,7 +533,7 @@ impl<'a> Parser<'a> {
533533
// let y = 42;
534534
self.sess.emit_err(ExpectedSemi {
535535
span: self.token.span,
536-
token: self.token.clone(),
536+
token: self.token,
537537
unexpected_token_label: None,
538538
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
539539
});
@@ -558,7 +558,7 @@ impl<'a> Parser<'a> {
558558
let span = self.prev_token.span.shrink_to_hi();
559559
self.sess.emit_err(ExpectedSemi {
560560
span,
561-
token: self.token.clone(),
561+
token: self.token,
562562
unexpected_token_label: Some(self.token.span),
563563
sugg: ExpectedSemiSugg::AddSemi(span),
564564
});

compiler/rustc_parse/src/parser/expr.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -430,7 +430,7 @@ impl<'a> Parser<'a> {
430430
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
431431
self.sess.emit_err(errors::FoundExprWouldBeStmt {
432432
span: self.token.span,
433-
token: self.token.clone(),
433+
token: self.token,
434434
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
435435
});
436436
}
@@ -685,7 +685,7 @@ impl<'a> Parser<'a> {
685685

686686
/// Recover on `not expr` in favor of `!expr`.
687687
fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
688-
let negated_token = self.look_ahead(1, |t| t.clone());
688+
let negated_token = self.look_ahead(1, |t| *t);
689689

690690
let sub_diag = if negated_token.is_numeric_lit() {
691691
errors::NotAsNegationOperatorSub::SuggestNotBitwise
@@ -1544,7 +1544,7 @@ impl<'a> Parser<'a> {
15441544
}
15451545

15461546
fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
1547-
let maybe_eq_tok = self.prev_token.clone();
1547+
let maybe_eq_tok = self.prev_token;
15481548
let (qself, path) = if self.eat_lt() {
15491549
let lt_span = self.prev_token.span;
15501550
let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
@@ -1983,7 +1983,7 @@ impl<'a> Parser<'a> {
19831983
// err.downgrade_to_delayed_bug();
19841984
// return Err(err);
19851985
// }
1986-
let token = self.token.clone();
1986+
let token = self.token;
19871987
let err = |self_: &Self| {
19881988
let msg = format!("unexpected token: {}", super::token_descr(&token));
19891989
self_.struct_span_err(token.span, msg)

compiler/rustc_parse/src/parser/item.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -1560,8 +1560,7 @@ impl<'a> Parser<'a> {
15601560
self.expect_semi()?;
15611561
body
15621562
} else {
1563-
let err =
1564-
errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
1563+
let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token);
15651564
return Err(err.into_diagnostic(&self.sess.span_diagnostic));
15661565
};
15671566

@@ -2114,7 +2113,7 @@ impl<'a> Parser<'a> {
21142113
|| self.token.is_keyword(kw::Union))
21152114
&& self.look_ahead(1, |t| t.is_ident())
21162115
{
2117-
let kw_token = self.token.clone();
2116+
let kw_token = self.token;
21182117
let kw_str = pprust::token_to_string(&kw_token);
21192118
let item = self.parse_item(ForceCollect::No)?;
21202119
self.sess.emit_err(errors::NestedAdt {

compiler/rustc_parse/src/parser/mod.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -288,12 +288,12 @@ impl TokenCursor {
288288
// below can be removed.
289289
if let Some(tree) = self.tree_cursor.next_ref() {
290290
match tree {
291-
&TokenTree::Token(ref token, spacing) => {
291+
&TokenTree::Token(token, spacing) => {
292292
debug_assert!(!matches!(
293293
token.kind,
294294
token::OpenDelim(_) | token::CloseDelim(_)
295295
));
296-
return (token.clone(), spacing);
296+
return (token, spacing);
297297
}
298298
&TokenTree::Delimited(sp, delim, ref tts) => {
299299
let trees = tts.clone().into_trees();
@@ -564,7 +564,7 @@ impl<'a> Parser<'a> {
564564
fn check(&mut self, tok: &TokenKind) -> bool {
565565
let is_present = self.token == *tok;
566566
if !is_present {
567-
self.expected_tokens.push(TokenType::Token(tok.clone()));
567+
self.expected_tokens.push(TokenType::Token(*tok));
568568
}
569569
is_present
570570
}
@@ -1378,7 +1378,7 @@ impl<'a> Parser<'a> {
13781378
token::CloseDelim(_) | token::Eof => unreachable!(),
13791379
_ => {
13801380
self.bump();
1381-
TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
1381+
TokenTree::Token(self.prev_token, Spacing::Alone)
13821382
}
13831383
}
13841384
}

compiler/rustc_parse/src/parser/nonterminal.rs

+3-6
Original file line numberDiff line numberDiff line change
@@ -172,11 +172,8 @@ impl<'a> Parser<'a> {
172172
self.bump();
173173
Ok(ParseNtResult::Ident(ident, is_raw))
174174
} else {
175-
Err(UnexpectedNonterminal::Ident {
176-
span: self.token.span,
177-
token: self.token.clone(),
178-
}
179-
.into_diagnostic(&self.sess.span_diagnostic))
175+
Err(UnexpectedNonterminal::Ident { span: self.token.span, token: self.token }
176+
.into_diagnostic(&self.sess.span_diagnostic))
180177
}
181178
}
182179
NonterminalKind::Path => Ok(ParseNtResult::Path(P(
@@ -193,7 +190,7 @@ impl<'a> Parser<'a> {
193190
} else {
194191
Err(UnexpectedNonterminal::Lifetime {
195192
span: self.token.span,
196-
token: self.token.clone(),
193+
token: self.token,
197194
}
198195
.into_diagnostic(&self.sess.span_diagnostic))
199196
}

compiler/rustc_parse/src/parser/pat.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -315,7 +315,7 @@ impl<'a> Parser<'a> {
315315
self.sess.emit_err(TrailingVertNotAllowed {
316316
span: self.token.span,
317317
start: lo,
318-
token: self.token.clone(),
318+
token: self.token,
319319
note_double_vert: matches!(self.token.kind, token::OrOr).then_some(()),
320320
});
321321
self.bump();
@@ -1000,8 +1000,8 @@ impl<'a> Parser<'a> {
10001000
etc = true;
10011001
let mut etc_sp = self.token.span;
10021002
if first_etc_and_maybe_comma_span.is_none() {
1003-
if let Some(comma_tok) = self
1004-
.look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None })
1003+
if let Some(comma_tok) =
1004+
self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None })
10051005
{
10061006
let nw_span = self
10071007
.sess

compiler/rustc_parse/src/parser/stmt.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
527527
return Ok((AttrVec::new(), block));
528528
}
529529

530-
let maybe_ident = self.prev_token.clone();
530+
let maybe_ident = self.prev_token;
531531
self.maybe_recover_unexpected_block_label();
532532
if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
533533
return self.error_block_no_opening_brace();

compiler/rustc_parse/src/parser/ty.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -493,7 +493,7 @@ impl<'a> Parser<'a> {
493493

494494
// Recovery
495495
mutbl = Mutability::Mut;
496-
let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing);
496+
let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing);
497497
self.bump();
498498
self.bump_with(5, (dyn_tok, dyn_tok_sp));
499499
}
@@ -724,7 +724,7 @@ impl<'a> Parser<'a> {
724724
/// ```
725725
fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
726726
let lo = self.token.span;
727-
let leading_token = self.prev_token.clone();
727+
let leading_token = self.prev_token;
728728
let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
729729
let inner_lo = self.token.span;
730730

src/librustdoc/clean/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -2490,7 +2490,7 @@ fn filter_tokens_from_list(
24902490
}
24912491
token if should_retain(token) => {
24922492
skip_next_comma = false;
2493-
tokens.push(token.clone());
2493+
tokens.push(token);
24942494
}
24952495
_ => {
24962496
skip_next_comma = true;

src/tools/rust-analyzer/crates/ide/src/expand_macro.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
4040
// struct Bar;
4141
// ```
4242

43-
let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| {
43+
let derive = sema.descend_into_macros(tok).into_iter().find_map(|descended| {
4444
let hir_file = sema.hir_file_for(&descended.parent()?);
4545
if !hir_file.is_derive_attr_pseudo_expansion(db) {
4646
return None;

0 commit comments

Comments
 (0)