Skip to content

Commit 9a1958f

Browse files
committed
Implement caching next token in tokenizer (faster by 10-15%)
1 parent 57b5c1a commit 9a1958f

File tree

1 file changed

+13
-1
lines changed

1 file changed

+13
-1
lines changed

src/tokenizer.rs

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ pub struct TokenStream<'a> {
2828
buf: &'a str,
2929
position: Pos,
3030
off: usize,
31+
next_state: Option<(usize, Token<'a>, usize, Pos)>,
3132
}
3233

3334
#[derive(Clone, Debug)]
@@ -43,10 +44,20 @@ impl<'a> StreamOnce for TokenStream<'a> {
4344
type Error = Errors<Token<'a>, Token<'a>, Pos>;
4445

4546
fn uncons(&mut self) -> Result<Self::Item, Error<Token<'a>, Token<'a>>> {
47+
if let Some((at, tok, off, pos)) = self.next_state {
48+
if at == self.off {
49+
self.off = off;
50+
self.position = pos;
51+
return Ok(tok);
52+
}
53+
}
54+
let old_pos = self.off;
4655
let (kind, len) = self.peek_token()?;
4756
let value = &self.buf[self.off-len..self.off];
4857
self.skip_whitespace();
49-
Ok(Token { kind, value })
58+
let token = Token { kind, value };
59+
self.next_state = Some((old_pos, token, self.off, self.position));
60+
Ok(token)
5061
}
5162
}
5263

@@ -111,6 +122,7 @@ impl<'a> TokenStream<'a> {
111122
buf: s,
112123
position: Pos { line: 1, column: 1 },
113124
off: 0,
125+
next_state: None,
114126
};
115127
me.skip_whitespace();
116128
return me;

0 commit comments

Comments
 (0)