Skip to content

Commit f03543e

Browse files
authored
Improved tokenizer performance by reducing complexity from O(n²) to O(n) (#24)
Fixes #23
1 parent b84db2d commit f03543e

File tree

1 file changed

+8
-8
lines changed

1 file changed

+8
-8
lines changed

src/tokenizer.rs

+8-8
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,15 @@ use unicode_categories::UnicodeCategories;
1313
pub(crate) fn tokenize(mut input: &str) -> Vec<Token<'_>> {
1414
let mut tokens: Vec<Token> = Vec::new();
1515

16+
let mut last_reserved_token = None;
17+
1618
// Keep processing the string until it is empty
17-
while let Ok(result) = get_next_token(
18-
input,
19-
tokens.last().cloned(),
20-
tokens
21-
.iter()
22-
.rfind(|token| token.kind == TokenKind::Reserved)
23-
.cloned(),
24-
) {
19+
while let Ok(result) =
20+
get_next_token(input, tokens.last().cloned(), last_reserved_token.clone())
21+
{
22+
if result.1.kind == TokenKind::Reserved {
23+
last_reserved_token = Some(result.1.clone());
24+
}
2525
input = result.0;
2626

2727
tokens.push(result.1);

0 commit comments

Comments
 (0)