@@ -35,10 +35,11 @@ fn t1() {
35
35
with_default_globals ( || {
36
36
let sm = Lrc :: new ( SourceMap :: new ( FilePathMapping :: empty ( ) ) ) ;
37
37
let sh = mk_sess ( sm. clone ( ) ) ;
38
- let mut string_reader = setup ( & sm,
39
- & sh,
40
- "/* my source file */ fn main() { println!(\" zebra\" ); }\n "
41
- . to_string ( ) ) ;
38
+ let mut string_reader = setup (
39
+ & sm,
40
+ & sh,
41
+ "/* my source file */ fn main() { println!(\" zebra\" ); }\n " . to_string ( ) ,
42
+ ) ;
42
43
assert_eq ! ( string_reader. next_token( ) , token:: Comment ) ;
43
44
assert_eq ! ( string_reader. next_token( ) , token:: Whitespace ) ;
44
45
let tok1 = string_reader. next_token ( ) ;
@@ -134,8 +135,10 @@ fn character_a() {
134
135
with_default_globals ( || {
135
136
let sm = Lrc :: new ( SourceMap :: new ( FilePathMapping :: empty ( ) ) ) ;
136
137
let sh = mk_sess ( sm. clone ( ) ) ;
137
- assert_eq ! ( setup( & sm, & sh, "'a'" . to_string( ) ) . next_token( ) ,
138
- mk_lit( token:: Char , "a" , None ) ) ;
138
+ assert_eq ! (
139
+ setup( & sm, & sh, "'a'" . to_string( ) ) . next_token( ) ,
140
+ mk_lit( token:: Char , "a" , None ) ,
141
+ ) ;
139
142
} )
140
143
}
141
144
@@ -144,8 +147,10 @@ fn character_space() {
144
147
with_default_globals ( || {
145
148
let sm = Lrc :: new ( SourceMap :: new ( FilePathMapping :: empty ( ) ) ) ;
146
149
let sh = mk_sess ( sm. clone ( ) ) ;
147
- assert_eq ! ( setup( & sm, & sh, "' '" . to_string( ) ) . next_token( ) ,
148
- mk_lit( token:: Char , " " , None ) ) ;
150
+ assert_eq ! (
151
+ setup( & sm, & sh, "' '" . to_string( ) ) . next_token( ) ,
152
+ mk_lit( token:: Char , " " , None ) ,
153
+ ) ;
149
154
} )
150
155
}
151
156
@@ -154,8 +159,10 @@ fn character_escaped() {
154
159
with_default_globals ( || {
155
160
let sm = Lrc :: new ( SourceMap :: new ( FilePathMapping :: empty ( ) ) ) ;
156
161
let sh = mk_sess ( sm. clone ( ) ) ;
157
- assert_eq ! ( setup( & sm, & sh, "'\\ n'" . to_string( ) ) . next_token( ) ,
158
- mk_lit( token:: Char , "\\ n" , None ) ) ;
162
+ assert_eq ! (
163
+ setup( & sm, & sh, "'\\ n'" . to_string( ) ) . next_token( ) ,
164
+ mk_lit( token:: Char , "\\ n" , None ) ,
165
+ ) ;
159
166
} )
160
167
}
161
168
@@ -164,8 +171,10 @@ fn lifetime_name() {
164
171
with_default_globals ( || {
165
172
let sm = Lrc :: new ( SourceMap :: new ( FilePathMapping :: empty ( ) ) ) ;
166
173
let sh = mk_sess ( sm. clone ( ) ) ;
167
- assert_eq ! ( setup( & sm, & sh, "'abc" . to_string( ) ) . next_token( ) ,
168
- token:: Lifetime ( Symbol :: intern( "'abc" ) ) ) ;
174
+ assert_eq ! (
175
+ setup( & sm, & sh, "'abc" . to_string( ) ) . next_token( ) ,
176
+ token:: Lifetime ( Symbol :: intern( "'abc" ) ) ,
177
+ ) ;
169
178
} )
170
179
}
171
180
@@ -174,8 +183,10 @@ fn raw_string() {
174
183
with_default_globals ( || {
175
184
let sm = Lrc :: new ( SourceMap :: new ( FilePathMapping :: empty ( ) ) ) ;
176
185
let sh = mk_sess ( sm. clone ( ) ) ;
177
- assert_eq ! ( setup( & sm, & sh, "r###\" \" #a\\ b\x00 c\" \" ###" . to_string( ) ) . next_token( ) ,
178
- mk_lit( token:: StrRaw ( 3 ) , "\" #a\\ b\x00 c\" " , None ) ) ;
186
+ assert_eq ! (
187
+ setup( & sm, & sh, "r###\" \" #a\\ b\x00 c\" \" ###" . to_string( ) ) . next_token( ) ,
188
+ mk_lit( token:: StrRaw ( 3 ) , "\" #a\\ b\x00 c\" " , None ) ,
189
+ ) ;
179
190
} )
180
191
}
181
192
@@ -186,11 +197,15 @@ fn literal_suffixes() {
186
197
let sh = mk_sess ( sm. clone ( ) ) ;
187
198
macro_rules! test {
188
199
( $input: expr, $tok_type: ident, $tok_contents: expr) => { {
189
- assert_eq!( setup( & sm, & sh, format!( "{}suffix" , $input) ) . next_token( ) ,
190
- mk_lit( token:: $tok_type, $tok_contents, Some ( "suffix" ) ) ) ;
200
+ assert_eq!(
201
+ setup( & sm, & sh, format!( "{}suffix" , $input) ) . next_token( ) ,
202
+ mk_lit( token:: $tok_type, $tok_contents, Some ( "suffix" ) ) ,
203
+ ) ;
191
204
// with a whitespace separator:
192
- assert_eq!( setup( & sm, & sh, format!( "{} suffix" , $input) ) . next_token( ) ,
193
- mk_lit( token:: $tok_type, $tok_contents, None ) ) ;
205
+ assert_eq!(
206
+ setup( & sm, & sh, format!( "{} suffix" , $input) ) . next_token( ) ,
207
+ mk_lit( token:: $tok_type, $tok_contents, None ) ,
208
+ ) ;
194
209
} }
195
210
}
196
211
@@ -204,12 +219,18 @@ fn literal_suffixes() {
204
219
test ! ( "1.0" , Float , "1.0" ) ;
205
220
test ! ( "1.0e10" , Float , "1.0e10" ) ;
206
221
207
- assert_eq ! ( setup( & sm, & sh, "2us" . to_string( ) ) . next_token( ) ,
208
- mk_lit( token:: Integer , "2" , Some ( "us" ) ) ) ;
209
- assert_eq ! ( setup( & sm, & sh, "r###\" raw\" ###suffix" . to_string( ) ) . next_token( ) ,
210
- mk_lit( token:: StrRaw ( 3 ) , "raw" , Some ( "suffix" ) ) ) ;
211
- assert_eq ! ( setup( & sm, & sh, "br###\" raw\" ###suffix" . to_string( ) ) . next_token( ) ,
212
- mk_lit( token:: ByteStrRaw ( 3 ) , "raw" , Some ( "suffix" ) ) ) ;
222
+ assert_eq ! (
223
+ setup( & sm, & sh, "2us" . to_string( ) ) . next_token( ) ,
224
+ mk_lit( token:: Integer , "2" , Some ( "us" ) ) ,
225
+ ) ;
226
+ assert_eq ! (
227
+ setup( & sm, & sh, "r###\" raw\" ###suffix" . to_string( ) ) . next_token( ) ,
228
+ mk_lit( token:: StrRaw ( 3 ) , "raw" , Some ( "suffix" ) ) ,
229
+ ) ;
230
+ assert_eq ! (
231
+ setup( & sm, & sh, "br###\" raw\" ###suffix" . to_string( ) ) . next_token( ) ,
232
+ mk_lit( token:: ByteStrRaw ( 3 ) , "raw" , Some ( "suffix" ) ) ,
233
+ ) ;
213
234
} )
214
235
}
215
236
0 commit comments