Skip to content

Commit 0ce9850

Browse files
committed
ci(benchmarks/lexer): ensure next_token is inlined into lexer benchmark (#15519)
Preparatory step for #15513. That PR was showing a massive slowdown on lexer benchmarks, but it was only due to the change in that PR resulting in `next_token` not being inlined into the lexer benchmark. Add a separate function `next_token_for_benchmarks` which has identical context as `next_token`, but is marked `#[inline(always)]`, and use it in lexer benchmark instead. This fixes the problem with the benchmark in #15513.
1 parent 4a53d09 commit 0ce9850

File tree

2 files changed

+14
-1
lines changed

2 files changed

+14
-1
lines changed

crates/oxc_parser/src/lexer/mod.rs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -214,6 +214,16 @@ impl<'a> Lexer<'a> {
214214
self.finish_next(kind)
215215
}
216216

217+
// This is a workaround for a problem where `next_token` is not inlined in lexer benchmark.
218+
// Must be kept in sync with `next_token` above, and contain exactly the same code.
219+
#[cfg(feature = "benchmarking")]
220+
#[expect(clippy::inline_always)]
221+
#[inline(always)]
222+
pub fn next_token_for_benchmarks(&mut self) -> Token {
223+
let kind = self.read_next_token();
224+
self.finish_next(kind)
225+
}
226+
217227
fn finish_next(&mut self, kind: Kind) -> Token {
218228
self.token.set_kind(kind);
219229
self.token.set_end(self.offset());

tasks/benchmark/benches/lexer.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,10 @@ fn lex_whole_file<'a>(
7373
) -> Lexer<'a> {
7474
let mut lexer = Lexer::new_for_benchmarks(allocator, source_text, source_type);
7575
if lexer.first_token().kind() != Kind::Eof {
76-
while lexer.next_token().kind() != Kind::Eof {}
76+
// Use `next_token_for_benchmarks` instead of `next_token`, to work around problem
77+
// where `next_token` wasn't inlined here.
78+
// `next_token_for_benchmarks` is identical to `next_token`, but is marked `#[inline(always)]`.
79+
while lexer.next_token_for_benchmarks().kind() != Kind::Eof {}
7780
}
7881
lexer
7982
}

0 commit comments

Comments
 (0)