diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 44b394bfcc0cea..3f7524ef57234c 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -2579,7 +2579,7 @@ pub const ParseTask = struct { .json => { const trace = tracer(@src(), "ParseJSON"); defer trace.end(); - const root = (try resolver.caches.json.parseJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); + const root = (try resolver.caches.json.parsePackageJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); }, .toml => { diff --git a/src/cache.zig b/src/cache.zig index 43273e9bb5f3f6..4cefe3b9f02d8f 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -314,6 +314,10 @@ pub const Json = struct { return try parse(cache, log, source, allocator, json_parser.ParseJSON); } + pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { + return try parse(cache, log, source, allocator, json_parser.ParseTSConfig); + } + pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { return try parse(cache, log, source, allocator, json_parser.ParseTSConfig); } diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index 79d5de7c42c420..34558a1b6a2d6a 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -81,7 +81,7 @@ pub const BunxCommand = struct { bun.JSAst.Expr.Data.Store.create(default_allocator); bun.JSAst.Stmt.Data.Store.create(default_allocator); - const expr = try bun.JSON.ParseJSONUTF8(&source, bundler.log, bundler.allocator); + const expr = try bun.JSON.ParsePackageJSONUTF8(&source, bundler.log, bundler.allocator); // choose the first package that fits if (expr.get("bin")) |bin_expr| { diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index d5d5bfb09b0904..5de7fd8b5bdc45 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -66,7 +66,7 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge }; defer allocator.free(json_source.contents); - const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); + const json = try json_parser.ParsePackageJSONUTF8(&json_source, log, allocator); const prop = json.asProperty("workspaces") orelse continue; diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index c878e28e784587..5e2baeb9f17beb 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -12,7 +12,7 @@ const std = @import("std"); const open = @import("../open.zig"); const CLI = @import("../cli.zig"); const Fs = @import("../fs.zig"); -const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8; +const ParseJSON = @import("../json_parser.zig").ParsePackageJSONUTF8; const js_parser = bun.js_parser; const js_ast = bun.JSAst; const linker = @import("../linker.zig"); diff --git a/src/install/install.zig b/src/install/install.zig index f135cfd57a4bd6..67d12022a2e6f3 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -5127,7 +5127,7 @@ pub const PackageManager = struct { data.json_buf, ); initializeStore(); - const json = json_parser.ParseJSONUTF8( + const json = json_parser.ParsePackageJSONUTF8( &package_json_source, manager.log, manager.allocator, @@ -7102,7 +7102,7 @@ pub const PackageManager = struct { const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf); const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); initializeStore(); - const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator); + const json = try json_parser.ParsePackageJSONUTF8(&json_source, ctx.log, ctx.allocator); if (json.asProperty("workspaces")) |prop| { const json_array = switch (prop.expr.data) { .e_array => |arr| arr, @@ -8396,7 +8396,7 @@ pub const PackageManager = struct { current_package_json_buf[current_package_json_contents_len - 1] == '\n'; initializeStore(); - var current_package_json = json_parser.ParseJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| { + var current_package_json = json_parser.ParsePackageJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| { switch (Output.enable_ansi_colors) { inline else => |enable_ansi_colors| { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; @@ -8547,7 +8547,7 @@ pub const PackageManager = struct { // Now, we _re_ parse our in-memory edited package.json // so we can commit the version we changed from the lockfile - current_package_json = json_parser.ParseJSONUTF8(&source, ctx.log, manager.allocator) catch |err| { + current_package_json = json_parser.ParsePackageJSONUTF8(&source, ctx.log, manager.allocator) catch |err| { Output.prettyErrorln("error: package.json failed to parse due to error {s}", .{@errorName(err)}); Global.exit(1); return; diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 75f3629eeac334..83169003129468 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -2669,7 +2669,7 @@ pub const Package = extern struct { }; initializeStore(); - break :brk try json_parser.ParseJSONUTF8( + break :brk try json_parser.ParsePackageJSONUTF8( &json_src, log, allocator, @@ -3479,7 +3479,7 @@ pub const Package = extern struct { comptime features: Features, ) !void { initializeStore(); - const json = json_parser.ParseJSONUTF8AlwaysDecode(&source, log, allocator) catch |err| { + const json = json_parser.ParsePackageJSONUTF8(&source, log, allocator) catch |err| { switch (Output.enable_ansi_colors) { inline else => |enable_ansi_colors| { log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; diff --git a/src/json_parser.zig b/src/json_parser.zig index 8110aa3c53753e..ad3cc27da2a6a4 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -378,6 +378,7 @@ pub const PackageJSONVersionChecker = struct { .is_json = true, .json_warn_duplicate_keys = false, .allow_trailing_commas = true, + .allow_comments = true, }; pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser { @@ -732,7 +733,44 @@ pub fn ParseJSON( return try parser.parseExpr(false, false); } -/// Parse JSON +/// Parse Package JSON +/// Allow trailing commas & comments. +/// This eagerly transcodes UTF-16 strings into UTF-8 strings +/// Use this when the text may need to be reprinted to disk as JSON (and not as JavaScript) +/// Eagerly converting UTF-8 to UTF-16 can cause a performance issue +pub fn ParsePackageJSONUTF8( + source: *const logger.Source, + log: *logger.Log, + allocator: std.mem.Allocator, +) !Expr { + const len = source.contents.len; + + switch (len) { + // This is to be consisntent with how disabled JS files are handled + 0 => { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; + }, + // This is a fast pass I guess + 2 => { + if (strings.eqlComptime(source.contents[0..1], "\"\"") or strings.eqlComptime(source.contents[0..1], "''")) { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_string_data }; + } else if (strings.eqlComptime(source.contents[0..1], "{}")) { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; + } else if (strings.eqlComptime(source.contents[0..1], "[]")) { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_array_data }; + } + }, + else => {}, + } + + var parser = try TSConfigParser.init(allocator, source.*, log); + bun.assert(parser.source().contents.len > 0); + + return try parser.parseExpr(false, true); +} + +/// Parse Package JSON +/// Allow trailing commas & comments. /// This eagerly transcodes UTF-16 strings into UTF-8 strings /// Use this when the text may need to be reprinted to disk as JSON (and not as JavaScript) /// Eagerly converting UTF-8 to UTF-16 can cause a performance issue @@ -794,6 +832,8 @@ pub fn ParseJSONUTF8AlwaysDecode( var parser = try JSONLikeParser(.{ .is_json = true, .always_decode_escape_sequences = true, + .allow_comments = true, + .allow_trailing_commas = true, }).init(allocator, source.*, log); if (comptime Environment.allow_assert) { bun.assert(parser.source().contents.len > 0); diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index ac13ca954e11a5..b99ef46aeaecc6 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -617,7 +617,7 @@ pub const PackageJSON = struct { var json_source = logger.Source.initPathString(key_path.text, entry.contents); json_source.path.pretty = r.prettyPath(json_source.path); - const json: js_ast.Expr = (r.caches.json.parseJSON(r.log, json_source, allocator) catch |err| { + const json: js_ast.Expr = (r.caches.json.parsePackageJSON(r.log, json_source, allocator) catch |err| { if (Environment.isDebug) { Output.printError("{s}: JSON parse error: {s}", .{ package_json_path, @errorName(err) }); } diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 92afb977cec59a..fbe669c4be3c67 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -731,7 +731,7 @@ pub const Resolver = struct { // support passing a package.json or path to a package const pkg: *const PackageJSON = result.package_json orelse r.packageJSONForResolvedNodeModuleWithIgnoreMissingName(&result, true) orelse return error.MissingPackageJSON; - const json = (try r.caches.json.parseJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError; + const json = (try r.caches.json.parsePackageJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError; pkg.loadFrameworkWithPreference(pair, json, r.allocator, load_defines, preference); const dir = pkg.source.path.sourceDir(); diff --git a/test/bundler/esbuild/packagejson.test.ts b/test/bundler/esbuild/packagejson.test.ts index fcb2c10eccb2a2..0b065f697b7b57 100644 --- a/test/bundler/esbuild/packagejson.test.ts +++ b/test/bundler/esbuild/packagejson.test.ts @@ -28,7 +28,7 @@ describe("bundler", () => { stdout: "123", }, }); - itBundled("packagejson/BadMain", { + itBundled("packagejson/trailing-comma", { files: { "/Users/user/project/src/entry.js": /* js */ ` import fn from 'demo-pkg' @@ -36,10 +36,13 @@ describe("bundler", () => { `, "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` { - "main": "./does-not-exist.js" + // very comment!! + /** even multi-line comment!! */ + /** such feature much compatible very ecosystem */ + "main": "./custom-main.js", } `, - "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` + "/Users/user/project/node_modules/demo-pkg/custom-main.js": /* js */ ` module.exports = function() { return 123 } @@ -49,30 +52,7 @@ describe("bundler", () => { stdout: "123", }, }); - itBundled("packagejson/SyntaxErrorComment", { - todo: true, - files: { - "/Users/user/project/src/entry.js": /* js */ ` - import fn from 'demo-pkg' - console.log(fn()) - `, - "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` - { - // Single-line comment - "a": 1 - } - `, - "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` - module.exports = function() { - return 123 - } - `, - }, - bundleErrors: { - "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support comments"], - }, - }); - itBundled("packagejson/SyntaxErrorTrailingComma", { + itBundled("packagejson/BadMain", { files: { "/Users/user/project/src/entry.js": /* js */ ` import fn from 'demo-pkg' @@ -80,8 +60,7 @@ describe("bundler", () => { `, "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` { - "a": 1, - "b": 2, + "main": "./does-not-exist.js" } `, "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` @@ -90,10 +69,55 @@ describe("bundler", () => { } `, }, - bundleErrors: { - "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support trailing commas"], + run: { + stdout: "123", }, }); + // itBundled("packagejson/SyntaxErrorComment", { + // todo: true, + // files: { + // "/Users/user/project/src/entry.js": /* js */ ` + // import fn from 'demo-pkg' + // console.log(fn()) + // `, + // "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` + // { + // // Single-line comment + // "a": 1 + // } + // `, + // "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` + // module.exports = function() { + // return 123 + // } + // `, + // }, + // bundleErrors: { + // "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support comments"], + // }, + // }); + // itBundled("packagejson/SyntaxErrorTrailingComma", { + // files: { + // "/Users/user/project/src/entry.js": /* js */ ` + // import fn from 'demo-pkg' + // console.log(fn()) + // `, + // "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` + // { + // "a": 1, + // "b": 2, + // } + // `, + // "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` + // module.exports = function() { + // return 123 + // } + // `, + // }, + // bundleErrors: { + // "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support trailing commas"], + // }, + // }); itBundled("packagejson/Module", { // GENERATED files: { diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index 7e8c1c1e8b6c56..4eab91b7c7818b 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -44,6 +44,47 @@ afterAll(dummyAfterAll); beforeEach(dummyBeforeEach); afterEach(dummyAfterEach); +it("should not error when package.json has comments and trailing commas", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + ` + { + // such comment! + "name": "foo", + /** even multi-line comment!! */ + "version": "0.0.1", + "dependencies": { + "bar": "^1", + }, + } +`, + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain('error: No version matching "^1" found for specifier "bar" (but package exists)'); + expect(stdout).toBeDefined(); + expect(await new Response(stdout).text()).toBeEmpty(); + expect(await exited).toBe(1); + expect(urls.sort()).toEqual([`${root_url}/bar`]); + expect(requested).toBe(1); + try { + await access(join(package_dir, "bun.lockb")); + expect(() => {}).toThrow(); + } catch (err: any) { + expect(err.code).toBe("ENOENT"); + } +}); + describe("chooses", () => { async function runTest(latest: string, range: string, chosen = "0.0.5") { const exeName: string = { diff --git a/test/cli/install/bun-run.test.ts b/test/cli/install/bun-run.test.ts index efbfbe0be7f23c..9b8c8cb08f02e2 100644 --- a/test/cli/install/bun-run.test.ts +++ b/test/cli/install/bun-run.test.ts @@ -17,15 +17,19 @@ beforeEach(async () => { for (let withRun of [false, true]) { describe(withRun ? "bun run" : "bun", () => { describe("should work with .", () => { - it("respecting 'main' field", async () => { + it("respecting 'main' field and allowing trailing commas/comments in package.json", async () => { await writeFile(join(run_dir, "test.js"), "console.log('Hello, world!');"); await writeFile( join(run_dir, "package.json"), - JSON.stringify({ - name: "test", - version: "0.0.0", - main: "test.js", - }), + `{ + // single-line comment + "name": "test", + /** even multi-line comment!! + * such feature much compatible very ecosystem + */ + "version": "0.0.0", + "main": "test.js", + }`, ); const { stdout, stderr, exitCode } = spawnSync({ cmd: [bunExe(), withRun ? "run" : "", "."].filter(Boolean),