diff --git a/src/js_lexer.zig b/src/js_lexer.zig index de7cfc228396f9..ca946482f3977b 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -153,6 +153,13 @@ fn NewLexer_( code_point: CodePoint = -1, identifier: []const u8 = "", jsx_pragma: JSXPragma = .{}, + bun_pragma: enum { + none, + bun, + bun_cjs, + bytecode, + bytecode_cjs, + } = .none, source_mapping_url: ?js_ast.Span = null, number: f64 = 0.0, rescan_close_brace_as_template_token: bool = false, @@ -1950,7 +1957,9 @@ fn NewLexer_( // } } - if (strings.hasPrefixWithWordBoundary(chunk, "jsx")) { + if (lexer.bun_pragma == .none and strings.hasPrefixWithWordBoundary(chunk, "bun")) { + lexer.bun_pragma = .bun; + } else if (strings.hasPrefixWithWordBoundary(chunk, "jsx")) { if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsx", chunk)) |span| { lexer.jsx_pragma._jsx = span; } @@ -1970,6 +1979,10 @@ fn NewLexer_( if (PragmaArg.scan(.no_space_first, lexer.start + i + 1, " sourceMappingURL=", chunk)) |span| { lexer.source_mapping_url = span; } + } else if ((lexer.bun_pragma == .bun or lexer.bun_pragma == .bun_cjs) and strings.hasPrefixWithWordBoundary(chunk, "bytecode")) { + lexer.bun_pragma = if (lexer.bun_pragma == .bun) .bytecode else .bytecode_cjs; + } else if ((lexer.bun_pragma == .bytecode or lexer.bun_pragma == .bun) and strings.hasPrefixWithWordBoundary(chunk, "bun-cjs")) { + lexer.bun_pragma = if (lexer.bun_pragma == .bytecode) .bytecode_cjs else .bun_cjs; } }, else => {}, @@ -1999,7 +2012,9 @@ fn NewLexer_( } } - if (strings.hasPrefixWithWordBoundary(chunk, "jsx")) { + if (lexer.bun_pragma == .none and strings.hasPrefixWithWordBoundary(chunk, "bun")) { + lexer.bun_pragma = .bun; + } else if (strings.hasPrefixWithWordBoundary(chunk, "jsx")) { if (PragmaArg.scan(.skip_space_first, lexer.start + i + 1, "jsx", chunk)) |span| { lexer.jsx_pragma._jsx = span; } @@ -2019,6 +2034,10 @@ fn NewLexer_( if (PragmaArg.scan(.no_space_first, lexer.start + i + 1, " sourceMappingURL=", chunk)) |span| { lexer.source_mapping_url = span; } + } else if ((lexer.bun_pragma == .bun or lexer.bun_pragma == .bun_cjs) and strings.hasPrefixWithWordBoundary(chunk, "bytecode")) { + lexer.bun_pragma = if (lexer.bun_pragma == .bun) .bytecode else .bytecode_cjs; + } else if ((lexer.bun_pragma == .bytecode or lexer.bun_pragma == .bun) and strings.hasPrefixWithWordBoundary(chunk, "bun-cjs")) { + lexer.bun_pragma = if (lexer.bun_pragma == .bytecode) .bytecode_cjs else .bun_cjs; } }, else => {}, diff --git a/src/js_parser.zig b/src/js_parser.zig index 231a1cf9e6cc50..c21d275903c2cc 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -3,7 +3,6 @@ /// ** you must also increment the `expected_version` in RuntimeTranspilerCache.zig ** /// ** IMPORTANT ** pub const std = @import("std"); -const bun = @import("root").bun; pub const logger = bun.logger; pub const js_lexer = bun.js_lexer; pub const importRecord = @import("./import_record.zig"); @@ -16,6 +15,7 @@ pub const RuntimeImports = _runtime.Runtime.Imports; pub const RuntimeFeatures = _runtime.Runtime.Features; pub const RuntimeNames = _runtime.Runtime.Names; pub const fs = @import("./fs.zig"); +const bun = @import("root").bun; const string = bun.string; const Output = bun.Output; const Global = bun.Global; @@ -3226,12 +3226,16 @@ pub const Parser = struct { } // Detect a leading "// @bun" pragma - if (self.options.features.dont_bundle_twice) { - if (self.hasBunPragma()) |pragma| { - return js_ast.Result{ - .already_bundled = pragma, - }; - } + if (p.lexer.bun_pragma != .none and p.options.features.dont_bundle_twice) { + return js_ast.Result{ + .already_bundled = switch (p.lexer.bun_pragma) { + .bun => .bun, + .bytecode => .bytecode, + .bytecode_cjs => .bytecode_cjs, + .bun_cjs => .bun_cjs, + else => unreachable, + }, + }; } // We must check the cache only after we've consumed the hashbang and leading // @bun pragma @@ -4278,45 +4282,6 @@ pub const Parser = struct { .log = log, }; } - - const PragmaState = packed struct { seen_cjs: bool = false, seen_bytecode: bool = false }; - - fn hasBunPragma(self: *const Parser) ?js_ast.Result.AlreadyBundled { - const BUN_PRAGMA = "// @bun"; - var cursor: usize = 0; - - const contents = self.lexer.source.contents; - if (!bun.strings.startsWith(contents[cursor..], BUN_PRAGMA)) return null; - cursor += BUN_PRAGMA.len; - - var state: PragmaState = .{}; - - while (cursor < self.lexer.end) : (cursor += 1) { - switch (contents[cursor]) { - '\n' => break, - '@' => { - cursor += 1; - if (cursor >= contents.len) break; - if (contents[cursor] != 'b') continue; - const slice = contents[cursor..]; - if (bun.strings.startsWith(slice, "bun-cjs")) { - state.seen_cjs = true; - cursor += "bun-cjs".len; - } else if (bun.strings.startsWith(slice, "bytecode")) { - state.seen_bytecode = true; - cursor += "bytecode".len; - } - }, - else => {}, - } - } - - if (state.seen_cjs) { - return if (state.seen_bytecode) .bytecode_cjs else .bun_cjs; - } else { - return if (state.seen_bytecode) .bytecode else .bun; - } - } }; const FindLabelSymbolResult = struct { ref: Ref, is_loop: bool, found: bool = false }; diff --git a/test/bundler/transpiler/bun-pragma.test.ts b/test/bundler/transpiler/bun-pragma.test.ts deleted file mode 100644 index d9c9243d8b77f2..00000000000000 --- a/test/bundler/transpiler/bun-pragma.test.ts +++ /dev/null @@ -1,15 +0,0 @@ -import path from "path"; -import { bunExe, bunEnv } from "harness"; - -const fixturePath = (name: string): string => path.join(import.meta.dirname, "fixtures", name); - -describe("@bun pragma", () => { - it("is not detected when embedded in a URL", async () => { - const res = Bun.spawn({ - cmd: [bunExe(), "run", fixturePath("bun-in-url.ts")], - stdio: ["ignore", "ignore", "ignore"], - }); - await res.exited; - expect(res.exitCode).toBe(0); - }); -}); diff --git a/test/bundler/transpiler/fixtures/bun-in-url.ts b/test/bundler/transpiler/fixtures/bun-in-url.ts deleted file mode 100644 index 64af8d8e3bbf05..00000000000000 --- a/test/bundler/transpiler/fixtures/bun-in-url.ts +++ /dev/null @@ -1,5 +0,0 @@ -// https://bun.sh/docs/api/http#bun-serve -const a: string = "hello"; -console.log(a); - -// '#bun' spotted in first comment but it's not a valid bun pragma