@@ -72,7 +72,7 @@ pub enum TokenError {
72
72
73
73
#[ derive( Debug ) ]
74
74
pub struct TokenStream < ' src > {
75
- tokens : Vec < ( Token < ' src > , usize , usize ) > ,
75
+ tokens : Vec < ( Token < ' src > , u32 , u32 ) > ,
76
76
}
77
77
78
78
#[ derive( Debug , PartialEq ) ]
@@ -108,8 +108,8 @@ impl Token<'_> {
108
108
const OPERATORS : & ' static str = "=+-*/%&|^!<>." ;
109
109
const BRACKETS : & ' static str = "()[]{}" ;
110
110
111
- fn parse ( source : & str , start_of_file : bool ) -> Result < ( Token , usize ) , TokenError > {
112
- let mut chars = source. char_indices ( ) . peekable ( ) ;
111
+ fn parse ( source : & str , start_of_file : bool ) -> Result < ( Token , u32 ) , TokenError > {
112
+ let mut chars = source. char_indices ( ) . map ( | ( i , c ) | ( i as u32 , c ) ) . peekable ( ) ;
113
113
' chr: while let Some ( ( start, c) ) = chars. next ( ) {
114
114
return match c {
115
115
'#' => {
@@ -126,7 +126,7 @@ impl Token<'_> {
126
126
return match chars. next ( ) {
127
127
Some ( ( _, '\t' ) ) => continue ,
128
128
Some ( ( _, ' ' ) ) => Err ( TokenError :: SpaceInIndent ) ,
129
- Some ( r) => Ok ( ( Token :: Indent ( i) , start + ( i + 1 - s) as usize ) ) ,
129
+ Some ( r) => Ok ( ( Token :: Indent ( i) , start + ( i + 1 - s) as u32 ) ) ,
130
130
None => Err ( TokenError :: Empty ) ,
131
131
} ;
132
132
}
@@ -145,7 +145,7 @@ impl Token<'_> {
145
145
'"' => loop {
146
146
if let Some ( ( i, c) ) = chars. next ( ) {
147
147
if c == '"' {
148
- let s = & source[ start + 1 ..i] ;
148
+ let s = & source[ start as usize + 1 ..i as usize ] ;
149
149
break Ok ( ( Token :: String ( s) , i + 1 ) ) ;
150
150
}
151
151
} else {
@@ -192,14 +192,15 @@ impl Token<'_> {
192
192
}
193
193
}
194
194
_ if c. is_digit ( 10 ) => {
195
+ let start = start as usize ;
195
196
let mut dot_encountered = false ;
196
197
let mut prev_was_dot = false ;
197
198
loop {
198
199
if let Some ( ( i, c) ) = chars. next ( ) {
199
200
if !c. is_alphanumeric ( ) && c != '_' {
200
201
if dot_encountered || c != '.' {
201
202
let i = if prev_was_dot { i - 1 } else { i } ;
202
- let s = & source[ start..i] ;
203
+ let s = & source[ start..i as usize ] ;
203
204
break Ok ( ( Token :: Number ( s) , i) ) ;
204
205
} else {
205
206
dot_encountered = true ;
@@ -210,22 +211,23 @@ impl Token<'_> {
210
211
}
211
212
} else {
212
213
let s = & source[ start..] ;
213
- break Ok ( ( Token :: Number ( s) , source. len ( ) ) ) ;
214
+ break Ok ( ( Token :: Number ( s) , source. len ( ) as u32 ) ) ;
214
215
}
215
216
}
216
217
}
217
218
_ => {
219
+ let start = start as usize ;
218
220
let ( s, i) = loop {
219
221
if let Some ( ( i, c) ) = chars. next ( ) {
220
222
if c. is_whitespace ( )
221
223
|| Self :: OPERATORS . contains ( c)
222
224
|| Self :: BRACKETS . contains ( c)
223
225
|| c == ','
224
226
{
225
- break ( & source[ start..i] , i) ;
227
+ break ( & source[ start..i as usize ] , i) ;
226
228
}
227
229
} else {
228
- break ( & source[ start..] , source. len ( ) ) ;
230
+ break ( & source[ start..] , source. len ( ) as u32 ) ;
229
231
}
230
232
} ;
231
233
Ok ( (
@@ -242,7 +244,7 @@ impl Token<'_> {
242
244
"pass" => Token :: Pass ,
243
245
_ => Token :: Name ( s) ,
244
246
} ,
245
- i,
247
+ i as u32 ,
246
248
) )
247
249
}
248
250
} ;
@@ -260,13 +262,17 @@ impl<'src> TokenStream<'src> {
260
262
loop {
261
263
match Token :: parse ( source, start) {
262
264
Ok ( ( tk, len) ) => {
263
- if let Token :: Indent ( i) = tk {
265
+ let prev_col = if let Token :: Indent ( i) = tk {
264
266
line += 1 ;
265
- column = 0 ;
266
- }
267
- column += len;
268
- tokens. push ( ( tk, line, column) ) ;
269
- source = & source[ len..] ;
267
+ column = i as u32 + 1 ;
268
+ 1
269
+ } else {
270
+ let c = column;
271
+ column += len;
272
+ c
273
+ } ;
274
+ tokens. push ( ( tk, line, prev_col) ) ;
275
+ source = & source[ len as usize ..] ;
270
276
start = false ;
271
277
}
272
278
Err ( e) => {
@@ -281,12 +287,12 @@ impl<'src> TokenStream<'src> {
281
287
}
282
288
}
283
289
284
- pub fn iter ( & self ) -> impl DoubleEndedIterator < Item = ( Token < ' src > , usize , usize ) > + ' _ {
290
+ pub fn iter ( & self ) -> impl DoubleEndedIterator < Item = ( Token < ' src > , u32 , u32 ) > + ' _ {
285
291
self . tokens . iter ( ) . cloned ( )
286
292
}
287
293
288
294
/// Removes redundant tokens, such as multiple Indents in a row. It also shrinks the vec
289
- fn remove_redundant ( tokens : & mut Vec < ( Token , usize , usize ) > ) {
295
+ fn remove_redundant ( tokens : & mut Vec < ( Token , u32 , u32 ) > ) {
290
296
// Remove trailing newlines
291
297
while let Some ( ( Token :: Indent ( _) , ..) ) = tokens. last ( ) {
292
298
tokens. pop ( ) . unwrap ( ) ;
0 commit comments