Skip to content

Commit a0c5b6c

Browse files
committed
Change line/column type to u32, make sure col points before Token
It points a bit too far in front actually (it includes spaces) but that can be fixed later.
1 parent cc2143d commit a0c5b6c

File tree

2 files changed

+31
-25
lines changed

2 files changed

+31
-25
lines changed

script/src/ast.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,8 @@ pub(crate) enum Expression<'src> {
6969
#[derive(Debug)]
7070
pub struct Error {
7171
error: ErrorType,
72-
line: usize,
73-
column: usize,
72+
line: u32,
73+
column: u32,
7474
rust_src_line: u32,
7575
}
7676

@@ -124,7 +124,7 @@ impl<'src> Script<'src> {
124124
}
125125
}
126126

127-
type TokenGroup<'src> = (Token<'src>, usize, usize);
127+
type TokenGroup<'src> = (Token<'src>, u32, u32);
128128

129129
impl<'src> Function<'src> {
130130
fn parse(tokens: &mut impl Iterator<Item = TokenGroup<'src>>) -> Result<Self, Error> {
@@ -173,8 +173,8 @@ impl<'src> Function<'src> {
173173
fn parse_block(
174174
tokens: &mut impl Iterator<Item = TokenGroup<'src>>,
175175
expected_indent: u8,
176-
mut line: usize,
177-
mut column: usize,
176+
mut line: u32,
177+
mut column: u32,
178178
) -> Result<(Lines<'src>, u8), Error> {
179179
let mut lines = Lines::new();
180180
loop {
@@ -256,7 +256,7 @@ impl<'src> Function<'src> {
256256
impl<'src> Expression<'src> {
257257
fn parse(
258258
pre: Token<'src>,
259-
tokens: &mut impl Iterator<Item = (Token<'src>, usize, usize)>,
259+
tokens: &mut impl Iterator<Item = (Token<'src>, u32, u32)>,
260260
) -> Result<(Self, Option<Token<'src>>), Error> {
261261
let (lhs, last_tk) = match pre {
262262
Token::BracketRoundOpen => match tokens.next() {
@@ -387,7 +387,7 @@ impl<'src> Expression<'src> {
387387
}
388388

389389
impl Error {
390-
fn new<T>(error: ErrorType, line: usize, column: usize, rust_src_line: u32) -> Result<T, Self> {
390+
fn new<T>(error: ErrorType, line: u32, column: u32, rust_src_line: u32) -> Result<T, Self> {
391391
Err(Self {
392392
error,
393393
line,

script/src/tokenizer.rs

Lines changed: 24 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ pub enum TokenError {
7272

7373
#[derive(Debug)]
7474
pub struct TokenStream<'src> {
75-
tokens: Vec<(Token<'src>, usize, usize)>,
75+
tokens: Vec<(Token<'src>, u32, u32)>,
7676
}
7777

7878
#[derive(Debug, PartialEq)]
@@ -108,8 +108,8 @@ impl Token<'_> {
108108
const OPERATORS: &'static str = "=+-*/%&|^!<>.";
109109
const BRACKETS: &'static str = "()[]{}";
110110

111-
fn parse(source: &str, start_of_file: bool) -> Result<(Token, usize), TokenError> {
112-
let mut chars = source.char_indices().peekable();
111+
fn parse(source: &str, start_of_file: bool) -> Result<(Token, u32), TokenError> {
112+
let mut chars = source.char_indices().map(|(i, c)| (i as u32, c)).peekable();
113113
'chr: while let Some((start, c)) = chars.next() {
114114
return match c {
115115
'#' => {
@@ -126,7 +126,7 @@ impl Token<'_> {
126126
return match chars.next() {
127127
Some((_, '\t')) => continue,
128128
Some((_, ' ')) => Err(TokenError::SpaceInIndent),
129-
Some(r) => Ok((Token::Indent(i), start + (i + 1 - s) as usize)),
129+
Some(r) => Ok((Token::Indent(i), start + (i + 1 - s) as u32)),
130130
None => Err(TokenError::Empty),
131131
};
132132
}
@@ -145,7 +145,7 @@ impl Token<'_> {
145145
'"' => loop {
146146
if let Some((i, c)) = chars.next() {
147147
if c == '"' {
148-
let s = &source[start + 1..i];
148+
let s = &source[start as usize + 1..i as usize];
149149
break Ok((Token::String(s), i + 1));
150150
}
151151
} else {
@@ -192,14 +192,15 @@ impl Token<'_> {
192192
}
193193
}
194194
_ if c.is_digit(10) => {
195+
let start = start as usize;
195196
let mut dot_encountered = false;
196197
let mut prev_was_dot = false;
197198
loop {
198199
if let Some((i, c)) = chars.next() {
199200
if !c.is_alphanumeric() && c != '_' {
200201
if dot_encountered || c != '.' {
201202
let i = if prev_was_dot { i - 1 } else { i };
202-
let s = &source[start..i];
203+
let s = &source[start..i as usize];
203204
break Ok((Token::Number(s), i));
204205
} else {
205206
dot_encountered = true;
@@ -210,22 +211,23 @@ impl Token<'_> {
210211
}
211212
} else {
212213
let s = &source[start..];
213-
break Ok((Token::Number(s), source.len()));
214+
break Ok((Token::Number(s), source.len() as u32));
214215
}
215216
}
216217
}
217218
_ => {
219+
let start = start as usize;
218220
let (s, i) = loop {
219221
if let Some((i, c)) = chars.next() {
220222
if c.is_whitespace()
221223
|| Self::OPERATORS.contains(c)
222224
|| Self::BRACKETS.contains(c)
223225
|| c == ','
224226
{
225-
break (&source[start..i], i);
227+
break (&source[start..i as usize], i);
226228
}
227229
} else {
228-
break (&source[start..], source.len());
230+
break (&source[start..], source.len() as u32);
229231
}
230232
};
231233
Ok((
@@ -242,7 +244,7 @@ impl Token<'_> {
242244
"pass" => Token::Pass,
243245
_ => Token::Name(s),
244246
},
245-
i,
247+
i as u32,
246248
))
247249
}
248250
};
@@ -260,13 +262,17 @@ impl<'src> TokenStream<'src> {
260262
loop {
261263
match Token::parse(source, start) {
262264
Ok((tk, len)) => {
263-
if let Token::Indent(i) = tk {
265+
let prev_col = if let Token::Indent(i) = tk {
264266
line += 1;
265-
column = 0;
266-
}
267-
column += len;
268-
tokens.push((tk, line, column));
269-
source = &source[len..];
267+
column = i as u32 + 1;
268+
1
269+
} else {
270+
let c = column;
271+
column += len;
272+
c
273+
};
274+
tokens.push((tk, line, prev_col));
275+
source = &source[len as usize..];
270276
start = false;
271277
}
272278
Err(e) => {
@@ -281,12 +287,12 @@ impl<'src> TokenStream<'src> {
281287
}
282288
}
283289

284-
pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Token<'src>, usize, usize)> + '_ {
290+
pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Token<'src>, u32, u32)> + '_ {
285291
self.tokens.iter().cloned()
286292
}
287293

288294
/// Removes redundant tokens, such as multiple Indents in a row. It also shrinks the vec
289-
fn remove_redundant(tokens: &mut Vec<(Token, usize, usize)>) {
295+
fn remove_redundant(tokens: &mut Vec<(Token, u32, u32)>) {
290296
// Remove trailing newlines
291297
while let Some((Token::Indent(_), ..)) = tokens.last() {
292298
tokens.pop().unwrap();

0 commit comments

Comments
 (0)