From 6beceacf04d4e1dedf0e2f07a0e160bda66e1682 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Wed, 29 Jan 2025 15:37:36 -0800 Subject: [PATCH 01/28] begin --- .vscode/settings.json | 2 +- src/bake/DevServer.zig | 271 ++++++++++++++++---------- src/bake/bake.zig | 20 +- src/bun.js/api/server.zig | 37 +++- src/bun.js/api/server/HTMLBundle.zig | 19 +- src/bun.js/api/server/StaticRoute.zig | 6 +- src/bun.zig | 4 + src/feature_flags.zig | 5 +- 8 files changed, 238 insertions(+), 126 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index eca14849b66fe7..8d2afd7e72530f 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -90,7 +90,7 @@ "editor.defaultFormatter": "esbenp.prettier-vscode", }, "[jsonc]": { - "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.defaultFormatter": "vscode.json-language-features", }, // Markdown diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 04a8d5f23b714a..439a2098f1bc28 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -17,6 +17,9 @@ pub const Options = struct { vm: *VirtualMachine, framework: bake.Framework, bundler_options: bake.SplitBundlerOptions, + /// When set, nothing is ever bundled for the server-side, + /// and DevSever acts purely as a frontend bundler. + frontend_only: bool = false, // Debugging features dump_sources: ?[]const u8 = if (Environment.isDebug) ".bake-debug" else null, @@ -76,6 +79,7 @@ bundling_failures: std.ArrayHashMapUnmanaged( SerializedFailure.ArrayHashContextViaOwner, false, ) = .{}, +frontend_only: bool, // These values are handles to the functions in `hmr-runtime-server.ts`. // For type definitions, see `./bake.private.d.ts` @@ -142,7 +146,7 @@ dump_dir: if (bun.FeatureFlags.bake_debugging_features) ?std.fs.Dir else void, emit_visualizer_events: u32, has_pre_crash_handler: bool, -pub const internal_prefix = "/_bun"; +pub const internal_prefix = "/:bun:"; pub const client_prefix = internal_prefix ++ "/client"; pub const asset_prefix = internal_prefix ++ "/asset"; pub const css_prefix = internal_prefix ++ "/css"; @@ -150,39 +154,52 @@ pub const css_prefix = internal_prefix ++ "/css"; pub const RouteBundle = struct { pub const Index = bun.GenericIndex(u30, RouteBundle); - route: Route.Index, - + /// There are two distinct types of route bundles. server_state: State, - + data: union(enum) { + /// FrameworkRouter provided route + framework: Framework, + /// HTMLBundle provided route + html: HTML, + }, /// Used to communicate over WebSocket the pattern. The HMR client contains code /// to match this against the URL bar to determine if a reloaded route applies. - full_pattern: []const u8, + full_pattern: bun.CowString, /// Generated lazily when the client JS is requested (HTTP GET /_bun/client/*.js), /// which is only needed when a hard-reload is performed. /// /// Freed when a client module updates. client_bundle: ?[]const u8, - /// Contain the list of serialized failures. Hashmap allows for - /// efficient lookup and removal of failing files. - /// When state == .evaluation_failure, this is popualted with that error. - evaluate_failure: ?SerializedFailure, - - // TODO: micro-opt: use a singular strong - - /// Cached to avoid re-creating the array every request. - /// Invalidated when a layout is added or removed from this route. - cached_module_list: JSC.Strong, - /// Cached to avoid re-creating the string every request. - /// Invalidated when any client file associated with the route is updated. - cached_client_bundle_url: JSC.Strong, - /// Cached to avoid re-creating the array every request. - /// Invalidated when the list of CSS files changes. - cached_css_file_array: JSC.Strong, /// Reference count of how many HmrSockets say they are on this route. This /// allows hot-reloading events to reduce the amount of times it traces the /// graph. - active_viewers: usize, + active_viewers: u32, + + const Framework = struct { + route_index: Route.Index, + + // TODO: micro-opt: use a singular strong + /// Cached to avoid re-creating the array every request. + /// Invalidated when a layout is added or removed from this route. + cached_module_list: JSC.Strong, + /// Cached to avoid re-creating the string every request. + /// Invalidated when any client file associated with the route is updated. + cached_client_bundle_url: JSC.Strong, + /// Cached to avoid re-creating the array every request. + /// Invalidated when the list of CSS files changes. + cached_css_file_array: JSC.Strong, + + /// Contain the list of serialized failures. Hashmap allows for + /// efficient lookup and removal of failing files. + /// When state == .evaluation_failure, this is popualted with that error. + evaluate_failure: ?SerializedFailure, + }; + + const HTML = struct { + /// DevServer increments the ref count of this bundle + html_bundle: *HTMLBundle, + }; /// A union is not used so that `bundler_failure_logs` can re-use memory, as /// this state frequently changes between `loaded` and the failure variants. @@ -209,6 +226,35 @@ pub const RouteBundle = struct { }; }; +pub const RouteIdentifier = union(enum) { + /// FrameworkRouter provides a fullstack server-side route + framework: FrameworkRouter.Route.Index, + /// HTMLBundle provides a frontend-only route, SPA-style + html: *HTMLBundle, + + /// May allocate memory + pub fn pattern(id: RouteIdentifier, dev: *DevServer) !bun.CowString { + return switch (id) { + .framework => |index| full_pattern: { + var buf = bake.PatternBuffer.empty; + var current: *Route = dev.router.routePtr(index); + // This loop is done to avoid prepending `/` at the root + // if there is more than one component. + buf.prependPart(current.part); + if (current.parent.unwrap()) |first| { + current = dev.router.routePtr(first); + while (current.parent.unwrap()) |next| { + buf.prependPart(current.part); + current = dev.router.routePtr(next); + } + } + break :full_pattern try bun.CowString.initDupe(buf.slice(), dev.allocator); + }, + .html => |html_bundle| bun.CowString.initNeverFree(html_bundle.pattern), + }; + } +}; + /// DevServer is stored on the heap, storing its allocator. pub fn init(options: Options) bun.JSOOM!*DevServer { const allocator = bun.default_allocator; @@ -247,8 +293,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { bun.getRuntimeFeatureFlag("BUN_DUMP_STATE_ON_CRASH"), .css_files = .{}, .route_js_payloads = .{}, - // .assets = .{}, - + .frontend_only = options.frontend_only, .client_graph = IncrementalGraph(.client).empty, .server_graph = IncrementalGraph(.server).empty, .incremental_result = IncrementalResult.empty, @@ -261,7 +306,6 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .reload_event = null, .requests = .{}, }, - .log = bun.logger.Log.init(allocator), .server_bundler = undefined, @@ -391,6 +435,8 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { } hash.update(&.{0}); + bun.writeAnyToHasher(&hash, options.frontend_only); + break :hash_key std.fmt.bytesToHex(std.mem.asBytes(&hash.final()), .lower); }; @@ -400,7 +446,9 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { assert(try dev.client_graph.insertStale(rfr.import_source, false) == IncrementalGraph(.client).react_refresh_index); } - dev.initServerRuntime(); + if (!options.frontend_only) { + dev.initServerRuntime(); + } // Initialize FrameworkRouter dev.router = router: { @@ -439,10 +487,12 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { break :router try FrameworkRouter.initEmpty(dev.root, types.items, allocator); }; - // TODO: move scanning to be one tick after server startup. this way the - // line saying the server is ready shows quicker, and route errors show up - // after that line. - try dev.scanInitialRoutes(); + if (options.frontend_only) { + // TODO: move scanning to be one tick after server startup. this way the + // line saying the server is ready shows quicker, and route errors show up + // after that line. + try dev.scanInitialRoutes(); + } if (bun.FeatureFlags.bake_debugging_features and dev.has_pre_crash_handler) try bun.crash_handler.appendPreCrashHandler(DevServer, dev, dumpStateDueToCrash); @@ -526,7 +576,7 @@ pub fn deinit(dev: *DevServer) void { // bun.todoPanic(@src(), "bake.DevServer.deinit()", .{}); } -fn onJsRequest(dev: *DevServer, req: *Request, resp: *Response) void { +fn onJsRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { const maybe_route = route: { const route_id = req.parameter(0); if (!bun.strings.hasSuffixComptime(route_id, ".js")) @@ -546,7 +596,7 @@ fn onJsRequest(dev: *DevServer, req: *Request, resp: *Response) void { } } -fn onAssetRequest(dev: *DevServer, req: *Request, resp: *Response) void { +fn onAssetRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { _ = dev; _ = req; _ = resp; @@ -558,7 +608,7 @@ fn onAssetRequest(dev: *DevServer, req: *Request, resp: *Response) void { } -fn onCssRequest(dev: *DevServer, req: *Request, resp: *Response) void { +fn onCssRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { const param = req.parameter(0); if (!bun.strings.hasSuffixComptime(param, ".css")) return req.setYield(true); @@ -583,11 +633,11 @@ fn parseHexToInt(comptime T: type, slice: []const u8) ?T { return @bitCast(out); } -fn onIncrementalVisualizer(_: *DevServer, _: *Request, resp: *Response) void { +fn onIncrementalVisualizer(_: *DevServer, _: *Request, resp: AnyResponse) void { resp.corked(onIncrementalVisualizerCorked, .{resp}); } -fn onIncrementalVisualizerCorked(resp: *Response) void { +fn onIncrementalVisualizerCorked(resp: AnyResponse) void { const code = if (Environment.codegen_embed) @embedFile("incremental_visualizer.html") else @@ -598,12 +648,12 @@ fn onIncrementalVisualizerCorked(resp: *Response) void { fn ensureRouteIsBundled( dev: *DevServer, - route_index: Route.Index, + id: RouteIdentifier, kind: DeferredRequest.Data.Tag, req: *Request, - resp: *Response, + resp: AnyResponse, ) bun.OOM!void { - const route_bundle_index = try dev.getOrPutRouteBundle(route_index); + const route_bundle_index = try dev.getOrPutRouteBundle(id); // TODO: Zig 0.14 gets labelled continue: // - Remove the `while` @@ -640,7 +690,7 @@ fn ensureRouteIsBundled( var entry_points: EntryPointList = EntryPointList.empty; defer entry_points.deinit(temp_alloc); - dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, route_index) catch bun.outOfMemory(); + dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, id) catch bun.outOfMemory(); if (entry_points.set.count() == 0) { if (dev.bundling_failures.count() > 0) { @@ -722,20 +772,28 @@ fn ensureRouteIsBundled( } } -fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointList, alloc: Allocator, route_index: Route.Index) bun.OOM!void { +fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointList, alloc: Allocator, id: RouteIdentifier) bun.OOM!void { const server_file_names = dev.server_graph.bundled_files.keys(); const client_file_names = dev.client_graph.bundled_files.keys(); // Build a list of all files that have not yet been bundled. - var route = dev.router.routePtr(route_index); - const router_type = dev.router.typePtr(route.type); - try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, router_type.server_file); - try dev.appendOpaqueEntryPoint(client_file_names, entry_points, alloc, .client, router_type.client_file); - try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, route.file_page); - try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, route.file_layout); - while (route.parent.unwrap()) |parent_index| { - route = dev.router.routePtr(parent_index); - try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, route.file_layout); + switch (id) { + .framework => |route_index| { + var route = dev.router.routePtr(route_index); + const router_type = dev.router.typePtr(route.type); + try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, router_type.server_file); + try dev.appendOpaqueEntryPoint(client_file_names, entry_points, alloc, .client, router_type.client_file); + try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, route.file_page); + try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, route.file_layout); + while (route.parent.unwrap()) |parent_index| { + route = dev.router.routePtr(parent_index); + try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, route.file_layout); + } + }, + .html => |html| { + _ = html; + @panic("TODO"); + }, } } @@ -743,14 +801,25 @@ fn onRequestWithBundle( dev: *DevServer, route_bundle_index: RouteBundle.Index, req: bun.JSC.API.SavedRequest.Union, - resp: *Response, + resp: AnyResponse, ) void { - const server_request_callback = dev.server_fetch_function_callback.get() orelse - unreachable; // did not bundle - const route_bundle = dev.routeBundlePtr(route_bundle_index); + switch (route_bundle.data) { + .framework => |*fw| dev.onFrameworkRequestWithBundle(fw, req, resp), + .html => @panic("walaa"), + } +} + +fn onFrameworkRequestWithBundle( + dev: *DevServer, + bundle: *RouteBundle.Framework, + req: bun.JSC.API.SavedRequest.Union, + resp: AnyResponse, +) void { + const server_request_callback = dev.server_fetch_function_callback.get() orelse + unreachable; // did not initialize server code - const router_type = dev.router.typePtr(dev.router.routePtr(route_bundle.route).type); + const router_type = dev.router.typePtr(dev.router.routePtr(bundle.route_index).type); dev.server.?.onRequestFromSaved( req, @@ -766,17 +835,17 @@ fn onRequestWithBundle( break :str str; }, // routeModules - route_bundle.cached_module_list.get() orelse arr: { + bundle.cached_module_list.get() orelse arr: { const global = dev.vm.global; const keys = dev.server_graph.bundled_files.keys(); var n: usize = 1; - var route = dev.router.routePtr(route_bundle.route); + var route = dev.router.routePtr(bundle.route_index); while (true) { if (route.file_layout != .none) n += 1; route = dev.router.routePtr(route.parent.unwrap() orelse break); } const arr = JSValue.createEmptyArray(global, n); - route = dev.router.routePtr(route_bundle.route); + route = dev.router.routePtr(bundle.route_index); var route_name = bun.String.createUTF8(dev.relativePath(keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()])); arr.putIndex(global, 0, route_name.transferToJS(global)); n = 1; @@ -788,13 +857,13 @@ fn onRequestWithBundle( } route = dev.router.routePtr(route.parent.unwrap() orelse break); } - route_bundle.cached_module_list = JSC.Strong.create(arr, global); + bundle.cached_module_list = JSC.Strong.create(arr, global); break :arr arr; }, // clientId - route_bundle.cached_client_bundle_url.get() orelse str: { + bundle.cached_client_bundle_url.get() orelse str: { const id, const route_index: Route.Index.Optional = if (router_type.client_file != .none) - .{ std.crypto.random.int(u64), route_bundle.route.toOptional() } + .{ std.crypto.random.int(u64), bundle.route_index.toOptional() } else // When there is no framework-provided client code, generate // a JS file so that the hot-reloading code can reload the @@ -804,20 +873,20 @@ fn onRequestWithBundle( const str = bun.String.createFormat(client_prefix ++ "/route.{}.js", .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&id))}) catch bun.outOfMemory(); defer str.deref(); const js = str.toJS(dev.vm.global); - route_bundle.cached_client_bundle_url = JSC.Strong.create(js, dev.vm.global); + bundle.cached_client_bundle_url = JSC.Strong.create(js, dev.vm.global); break :str js; }, // styles - route_bundle.cached_css_file_array.get() orelse arr: { - const js = dev.generateCssJSArray(route_bundle) catch bun.outOfMemory(); - route_bundle.cached_css_file_array = JSC.Strong.create(js, dev.vm.global); + bundle.cached_css_file_array.get() orelse arr: { + const js = dev.generateCssJSArray(bundle) catch bun.outOfMemory(); + bundle.cached_css_file_array = JSC.Strong.create(js, dev.vm.global); break :arr js; }, }, ); } -pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: *Response) void { +pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: AnyResponse) void { const route_bundle = dev.routeBundlePtr(bundle_index); const code = route_bundle.client_bundle orelse code: { const code = dev.generateClientBundle(route_bundle) catch bun.outOfMemory(); @@ -827,7 +896,7 @@ pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, r sendTextFile(code, MimeType.javascript.value, resp); } -pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: *App.Response) void { +pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: *AnyResponse) void { if (req.header("open-in-editor") == null) { resp.writeStatus("501 Not Implemented"); resp.end("Viewing source without opening in editor is not implemented yet!", false); @@ -863,7 +932,7 @@ const DeferredRequest = struct { const Data = union(enum) { server_handler: bun.JSC.API.SavedRequest, - js_payload: *Response, + js_payload: AnyResponse, const Tag = @typeInfo(Data).Union.tag_type.?; }; @@ -1404,8 +1473,8 @@ pub fn finalizeBundle( } if (bundle.active_viewers == 0 or !will_hear_hot_update) continue; try w.writeInt(i32, @intCast(i), .little); - try w.writeInt(u32, @intCast(bundle.full_pattern.len), .little); - try w.writeAll(bundle.full_pattern); + try w.writeInt(u32, @intCast(bundle.full_pattern.flags.len), .little); + try w.writeAll(bundle.full_pattern.slice()); // If no edges were changed, then it is impossible to // change the list of CSS files. @@ -1460,7 +1529,7 @@ pub fn finalizeBundle( const rb = dev.routeBundlePtr(req.route_bundle_index); rb.server_state = .possible_bundling_failures; - const resp: *Response = switch (req.data) { + const resp: AnyResponse = switch (req.data) { .server_handler => |*saved| brk: { const resp = saved.response.TCP; saved.deinit(); @@ -1548,7 +1617,7 @@ fn startNextBundleIfPresent(dev: *DevServer) void { dev.current_bundle = null; dev.log.clearAndFree(); dev.current_bundle_requests.clearRetainingCapacity(); - dev.emitVisualizerMessageIfNeeded() catch {}; + dev.emitVisualizerMessageIfNeeded(); // If there were pending requests, begin another bundle. if (dev.next_bundle.reload_event != null or dev.next_bundle.requests.items.len > 0) { @@ -1685,7 +1754,7 @@ pub fn routeBundlePtr(dev: *DevServer, idx: RouteBundle.Index) *RouteBundle { return &dev.route_bundles.items[idx.get()]; } -fn onRequest(dev: *DevServer, req: *Request, resp: *Response) void { +fn onRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { var params: FrameworkRouter.MatchedParams = undefined; if (dev.router.matchSlow(req.url(), ¶ms)) |route_index| { dev.ensureRouteIsBundled(route_index, .server_handler, req, resp) catch bun.outOfMemory(); @@ -1703,26 +1772,16 @@ fn onRequest(dev: *DevServer, req: *Request, resp: *Response) void { sendBuiltInNotFound(resp); } -fn getOrPutRouteBundle(dev: *DevServer, route: Route.Index) !RouteBundle.Index { +pub fn respondForHTMLBundle(dev: *DevServer, hbr: *HTMLBundle.HTMLBundleRoute, req: *uws.Request, resp: AnyResponse) void { + dev.ensureRouteIsBundled(hbr.dev_server_id.?, .server_handler, req, resp); +} + +fn getOrPutRouteBundle(dev: *DevServer, route: RouteIdentifier) !RouteBundle.Index { if (dev.router.routePtr(route).bundle.unwrap()) |bundle_index| return bundle_index; - const full_pattern = full_pattern: { - var buf = bake.PatternBuffer.empty; - var current: *Route = dev.router.routePtr(route); - // This loop is done to avoid prepending `/` at the root - // if there is more than one component. - buf.prependPart(current.part); - if (current.parent.unwrap()) |first| { - current = dev.router.routePtr(first); - while (current.parent.unwrap()) |next| { - buf.prependPart(current.part); - current = dev.router.routePtr(next); - } - } - break :full_pattern try dev.allocator.dupe(u8, buf.slice()); - }; - errdefer dev.allocator.free(full_pattern); + const full_pattern = route.pattern(dev); + errdefer full_pattern.deinit(dev.allocator); try dev.route_bundles.append(dev.allocator, .{ .route = route, @@ -1740,7 +1799,7 @@ fn getOrPutRouteBundle(dev: *DevServer, route: Route.Index) !RouteBundle.Index { return bundle_index; } -fn sendTextFile(code: []const u8, content_type: []const u8, resp: *Response) void { +fn sendTextFile(code: []const u8, content_type: []const u8, resp: AnyResponse) void { if (code.len == 0) { resp.writeStatus("202 No Content"); resp.writeHeaderInt("Content-Length", 0); @@ -1764,7 +1823,7 @@ const ErrorPageKind = enum { fn sendSerializedFailures( dev: *DevServer, - resp: *Response, + resp: AnyResponse, failures: []const SerializedFailure, kind: ErrorPageKind, ) void { @@ -1819,7 +1878,7 @@ fn sendSerializedFailures( } } -fn sendBuiltInNotFound(resp: *Response) void { +fn sendBuiltInNotFound(resp: AnyResponse) void { const message = "404 Not Found"; resp.writeStatus("404 Not Found"); resp.end(message, true); @@ -3497,7 +3556,7 @@ fn dumpBundle(dump_dir: std.fs.Dir, side: bake.Graph, rel_path: []const u8, chun try bufw.flush(); } -fn emitVisualizerMessageIfNeeded(dev: *DevServer) !void { +fn emitVisualizerMessageIfNeeded(dev: *DevServer) void { if (!bun.FeatureFlags.bake_debugging_features) return; if (dev.emit_visualizer_events == 0) return; @@ -3505,7 +3564,7 @@ fn emitVisualizerMessageIfNeeded(dev: *DevServer) !void { var payload = try std.ArrayList(u8).initCapacity(sfb.get(), 65536); defer payload.deinit(); - try dev.writeVisualizerMessage(&payload); + dev.writeVisualizerMessage(&payload) catch return; // visualizer does not get an update if it OOMs dev.publish(.visualizer, payload.items, .binary); } @@ -3558,7 +3617,7 @@ fn writeVisualizerMessage(dev: *DevServer, payload: *std.ArrayList(u8)) !void { pub fn onWebSocketUpgrade( dev: *DevServer, - res: *Response, + res: AnyResponse, req: *Request, upgrade_ctx: *uws.uws_socket_context_t, id: usize, @@ -3783,7 +3842,7 @@ const HmrSocket = struct { switch (field) { .visualizer => { s.dev.emit_visualizer_events += 1; - s.dev.emitVisualizerMessageIfNeeded() catch bun.outOfMemory(); + s.dev.emitVisualizerMessageIfNeeded(); }, else => {}, } @@ -4255,11 +4314,13 @@ pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []? } pub fn onWatchError(_: *DevServer, err: bun.sys.Error) void { - // TODO: how to recover? the watcher can't just ... crash???????? - Output.err(@as(bun.C.E, @enumFromInt(err.errno)), "Watcher crashed", .{}); - if (bun.Environment.isDebug) { - bun.todoPanic(@src(), "Watcher crash", .{}); + if (err.path.len > 0) { + Output.err(err, "failed to watch {} for hot-reloading", .{bun.fmt.quote(err.path)}); + } else { + Output.err(err, "failed to watch files for hot-reloading", .{}); } + Output.warn("The development server is still running, but hot-reloading is disabled until a restart.", .{}); + // TODO: attempt to automatically restart the watcher thread, perhaps wait for next request. } pub fn publish(dev: *DevServer, topic: HmrTopic, message: []const u8, opcode: uws.Opcode) void { @@ -4327,8 +4388,8 @@ fn fromOpaqueFileId(comptime side: bake.Side, id: OpaqueFileId) IncrementalGraph return IncrementalGraph(side).FileIndex.init(@intCast(id.get())); } +/// Returns posix style path, suitible for URLs and reproducible hashes. fn relativePath(dev: *const DevServer, path: []const u8) []const u8 { - // TODO: windows slash normalization bun.assert(dev.root[dev.root.len - 1] != '/'); if (path.len >= dev.root.len + 1 and path[dev.root.len] == '/' and @@ -4337,7 +4398,7 @@ fn relativePath(dev: *const DevServer, path: []const u8) []const u8 { return path[dev.root.len + 1 ..]; } const rel = bun.path.relative(dev.root, path); - // `rel` is owned by a mutable threadlocal buffer in the path code. + // @constCast: `rel` is owned by a mutable threadlocal buffer in the path code. bun.path.platformToPosixInPlace(u8, @constCast(rel)); return rel; } @@ -4465,13 +4526,11 @@ const Transpiler = bun.transpiler.Transpiler; const BundleV2 = bun.bundle_v2.BundleV2; const Define = bun.options.Define; -const OutputFile = bun.options.OutputFile; const uws = bun.uws; -const App = uws.NewApp(false); const AnyWebSocket = uws.AnyWebSocket; const Request = uws.Request; -const Response = App.Response; +const AnyResponse = bun.uws.AnyResponse; const MimeType = bun.http.MimeType; @@ -4481,7 +4540,7 @@ const JSValue = JSC.JSValue; const VirtualMachine = JSC.VirtualMachine; const JSModuleLoader = JSC.JSModuleLoader; const EventLoopHandle = JSC.EventLoopHandle; -const JSInternalPromise = JSC.JSInternalPromise; +const HTMLBundle = JSC.API.HTMLBundle; const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena; const Chunk = bun.bundle_v2.Chunk; diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 5bd4a162434ca7..4686fe0e4a9f21 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -12,6 +12,7 @@ pub const api_name = "app"; /// Zig version of the TS definition 'Bake.Options' in 'bake.d.ts' pub const UserOptions = struct { + /// This arena contains some miscellaneous allocations at startup arena: std.heap.ArenaAllocator, allocations: StringRefList, @@ -19,6 +20,8 @@ pub const UserOptions = struct { framework: Framework, bundler_options: SplitBundlerOptions, + frontend_only: bool = false, + pub fn deinit(options: *UserOptions) void { options.arena.deinit(); options.allocations.free(); @@ -75,9 +78,11 @@ pub const UserOptions = struct { }; /// Each string stores its allocator since some may hold reference counts to JSC -const StringRefList = struct { +pub const StringRefList = struct { strings: std.ArrayListUnmanaged(ZigString.Slice), + pub const empty: StringRefList = .{ .strings = .{} }; + pub fn track(al: *StringRefList, str: ZigString.Slice) [:0]const u8 { al.strings.append(bun.default_allocator, str) catch bun.outOfMemory(); return str.sliceZ(); @@ -87,8 +92,6 @@ const StringRefList = struct { for (al.strings.items) |item| item.deinit(); al.strings.clearAndFree(bun.default_allocator); } - - pub const empty: StringRefList = .{ .strings = .{} }; }; pub const SplitBundlerOptions = struct { @@ -220,6 +223,14 @@ pub const Framework = struct { }; } + pub const none: Framework = .{ + .is_built_in_react = false, + .file_system_router_types = &.{}, + .server_components = null, + .react_fast_refresh = null, + .built_in_modules = .{}, + }; + pub const FileSystemRouterType = struct { root: []const u8, prefix: []const u8, @@ -648,7 +659,7 @@ pub inline fn getHmrRuntime(side: Side) [:0]const u8 { } else switch (side) { .client => bun.runtimeEmbedFile(.codegen_eager, "bake.client.js"), - // may not be live-reloaded + // server runtime is loaded once .server => bun.runtimeEmbedFile(.codegen, "bake.server.js"), }; } @@ -799,6 +810,5 @@ const Environment = bun.Environment; const JSC = bun.JSC; const JSValue = JSC.JSValue; -const validators = bun.JSC.Node.validators; const ZigString = JSC.ZigString; const Plugin = JSC.API.JSBundler.Plugin; diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index aa087a29f0ab10..2ef9832e484e7f 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -175,7 +175,9 @@ pub fn writeStatus(comptime ssl: bool, resp_ptr: ?*uws.NewApp(ssl).Response, sta } } +// TODO: rename to StaticBlobRoute, rename AnyStaticRoute to StaticRoute const StaticRoute = @import("./server/StaticRoute.zig"); + const HTMLBundle = JSC.API.HTMLBundle; const HTMLBundleRoute = HTMLBundle.HTMLBundleRoute; pub const AnyStaticRoute = union(enum) { @@ -286,6 +288,8 @@ pub const ServerConfig = struct { cost += this.base_url.href.len; return cost; } + + // TODO: rename to StaticRoute.Entry pub const StaticRouteEntry = struct { path: []const u8, route: AnyStaticRoute, @@ -1151,16 +1155,15 @@ pub const ServerConfig = struct { while (try iter.next()) |key| { const path, const is_ascii = key.toOwnedSliceReturningAllASCII(bun.default_allocator) catch bun.outOfMemory(); + errdefer bun.default_allocator.free(path); const value = iter.value; if (path.len == 0 or path[0] != '/') { - bun.default_allocator.free(path); return global.throwInvalidArguments("Invalid static route \"{s}\". path must start with '/'", .{path}); } if (!is_ascii) { - bun.default_allocator.free(path); return global.throwInvalidArguments("Invalid static route \"{s}\". Please encode all non-ASCII characters in the path.", .{path}); } @@ -1170,6 +1173,27 @@ pub const ServerConfig = struct { .route = route, }) catch bun.outOfMemory(); } + + // When HTML bundles are provided, ensure DevServer options are ready + // The precense of these options + if (dedupe_html_bundle_map.count() > 0) { + args.bake = .{ + .arena = std.heap.ArenaAllocator.init(bun.default_allocator), + .allocations = bun.bake.StringRefList.empty, + + // TODO: this should be the dir with bunfig?? + .root = bun.fs.FileSystem.instance.top_level_dir, + // TODO: framework / react fast refresh + // probably specify framework details through bunfig, + // but also it would be very nice to have built-in + // support to just load node_modules/react-refresh if + // react is installed. maybe even ship a fallback copy + // of rfr with bun so it always "just works" + .framework = bun.bake.Framework.none, + .frontend_only = true, + .bundler_options = bun.bake.SplitBundlerOptions.empty, + }; + } } if (global.hasException()) return error.JSError; @@ -1276,6 +1300,10 @@ pub const ServerConfig = struct { if (global.hasException()) return error.JSError; if (try arg.getTruthy(global, "app")) |bake_args_js| { + if (args.bake != null) { + // "app" is likely to be removed in favor of the HTML loader. + return global.throwInvalidArguments("'app' + HTML loader not supported.", .{}); + } if (!bun.FeatureFlags.bake()) { return global.throwInvalidArguments("To use the experimental \"app\" option, upgrade to the canary build of bun via \"bun upgrade --canary\"", .{}); } @@ -6024,6 +6052,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp return .pending; } + // rename to loadAndResolvePlugins pub fn getPluginsAsync( this: *ThisServer, bundle: *HTMLBundleRoute, @@ -6863,7 +6892,9 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp errdefer bun.default_allocator.free(base_url); const dev_server = if (config.bake) |*bake_options| dev_server: { - bun.bake.printWarning(); + if (!bake_options.frontend_only) { + bun.bake.printWarning(); + } break :dev_server try bun.bake.DevServer.init(.{ .arena = bake_options.arena.allocator(), diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index 02db1b48968ed3..cc6e76fc4cdacf 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -1,5 +1,9 @@ -// This is a description of what the build will be. -// It doesn't do the build. +//! This is a description of what the build will be. +//! It doesn't perform the build itself +pub const HTMLBundle = @This(); +pub usingnamespace JSC.Codegen.JSHTMLBundle; +// HTMLBundle can be owned by JavaScript as well as any number of Server instances. +pub usingnamespace bun.NewRefCounted(HTMLBundle, deinit); ref_count: u32 = 1, globalObject: *JSGlobalObject, @@ -11,7 +15,7 @@ plugins: union(enum) { }, bunfig_dir: []const u8, -/// Initialize an HTMLBundle.a +/// Initialize an HTMLBundle. /// /// `plugins` is array of serve plugins defined in the bunfig.toml file. They will be resolved and loaded. /// `bunfig_path` is the path to the bunfig.toml configuration file. It used to resolve the plugins relative @@ -58,6 +62,8 @@ pub const HTMLBundleRoute = struct { ref_count: u32 = 1, server: ?AnyServer = null, value: Value = .pending_plugins, + /// Written and read by DevServer to identify if this route has been registered with the bundler. + dev_server_id: *bun.bake.DevServer.RouteBundle.Index = 0, pub fn memoryCost(this: *const HTMLBundleRoute) usize { var cost: usize = 0; @@ -141,6 +147,10 @@ pub const HTMLBundleRoute = struct { }; if (server.config().development) { + if (server.getDevServer()) |dev| { + dev.respondForHTMLBundle(); + } + // TODO: actually implement proper watch mode instead of "rebuild on every request" if (this.value == .html) { this.value.html.deref(); @@ -543,8 +553,6 @@ pub const HTMLBundleRoute = struct { }; }; -pub usingnamespace JSC.Codegen.JSHTMLBundle; -pub usingnamespace bun.NewRefCounted(HTMLBundle, deinit); const bun = @import("root").bun; const std = @import("std"); const JSC = bun.JSC; @@ -552,7 +560,6 @@ const JSValue = JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; const JSString = JSC.JSString; const JSValueRef = JSC.JSValueRef; -const HTMLBundle = @This(); const JSBundler = JSC.API.JSBundler; const HTTPResponse = bun.uws.AnyResponse; const uws = bun.uws; diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index 31336978771a8d..83c20e834f5cf9 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -1,4 +1,5 @@ -const std = @import("std"); +//! A static route serving a response object's blob. +const StaticRoute = @This(); server: ?AnyServer = null, status_code: u16, @@ -286,8 +287,7 @@ fn renderMetadata(this: *StaticRoute, resp: HTTPResponse) void { this.doWriteHeaders(resp); } -const StaticRoute = @This(); - +const std = @import("std"); const bun = @import("root").bun; const Api = @import("../../../api/schema.zig").Api; diff --git a/src/bun.zig b/src/bun.zig index a61b2373dab8c7..b8c1d760fa4273 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -4415,6 +4415,10 @@ pub fn CowSlice(T: type) type { }; } + pub fn initDupe(data: []const T, allocator: Allocator) !@This() { + return initOwned(try allocator.dupe(T, data), allocator); + } + /// `.deinit` will not free memory from this slice. pub fn initNeverFree(data: []const T) @This() { return .{ diff --git a/src/feature_flags.zig b/src/feature_flags.zig index 4def9f6352ed93..a60b850e34d890 100644 --- a/src/feature_flags.zig +++ b/src/feature_flags.zig @@ -155,12 +155,13 @@ pub fn isLibdeflateEnabled() bool { return !bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_NO_LIBDEFLATE"); } -/// Enable Bun Kit's experimental bundler tools. +/// Enable the "app" option in Bun.serve. This option will likely be removed +/// in favor of HTML loaders and configuring framework options in bunfig.toml pub fn bake() bool { // In canary or if an environment variable is specified. return env.is_canary or env.isDebug or bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_EXPERIMENTAL_BAKE"); } -/// Additional debugging features for Bake, such as the incremental visualizer. +/// Additional debugging features for bake.DevServer, such as the incremental visualizer. /// To use them, extra flags are passed in addition to this one. pub const bake_debugging_features = env.is_canary or env.isDebug; From d8c68a33505b9c8a43e703fa025b21cf835d6d5c Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Wed, 29 Jan 2025 16:40:09 -0800 Subject: [PATCH 02/28] compile with html bundle support + todos --- src/bake/DevServer.zig | 320 ++++++++++++++++----------- src/bun.js/api/server.zig | 14 +- src/bun.js/api/server/HTMLBundle.zig | 11 +- src/bun.zig | 11 +- src/deps/uws.zig | 7 +- 5 files changed, 217 insertions(+), 146 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 439a2098f1bc28..e589f2ba582abf 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -154,8 +154,8 @@ pub const css_prefix = internal_prefix ++ "/css"; pub const RouteBundle = struct { pub const Index = bun.GenericIndex(u30, RouteBundle); - /// There are two distinct types of route bundles. server_state: State, + /// There are two distinct types of route bundles. data: union(enum) { /// FrameworkRouter provided route framework: Framework, @@ -198,7 +198,7 @@ pub const RouteBundle = struct { const HTML = struct { /// DevServer increments the ref count of this bundle - html_bundle: *HTMLBundle, + html_bundle: *HTMLBundle.HTMLBundleRoute, }; /// A union is not used so that `bundler_failure_logs` can re-use memory, as @@ -224,35 +224,56 @@ pub const RouteBundle = struct { /// at fault of bundling, nor would re-bundling change anything. loaded, }; -}; -pub const RouteIdentifier = union(enum) { - /// FrameworkRouter provides a fullstack server-side route - framework: FrameworkRouter.Route.Index, - /// HTMLBundle provides a frontend-only route, SPA-style - html: *HTMLBundle, - - /// May allocate memory - pub fn pattern(id: RouteIdentifier, dev: *DevServer) !bun.CowString { - return switch (id) { - .framework => |index| full_pattern: { - var buf = bake.PatternBuffer.empty; - var current: *Route = dev.router.routePtr(index); - // This loop is done to avoid prepending `/` at the root - // if there is more than one component. - buf.prependPart(current.part); - if (current.parent.unwrap()) |first| { - current = dev.router.routePtr(first); - while (current.parent.unwrap()) |next| { - buf.prependPart(current.part); - current = dev.router.routePtr(next); - } - } - break :full_pattern try bun.CowString.initDupe(buf.slice(), dev.allocator); - }, - .html => |html_bundle| bun.CowString.initNeverFree(html_bundle.pattern), + /// This identifier is used to refer to a RouteBundle that does not have it's + /// index known, or may not be initialized yet. + pub const Identifier = union(enum) { + /// FrameworkRouter provides a fullstack server-side route + framework: FrameworkRouter.Route.Index, + /// HTMLBundle provides a frontend-only route, SPA-style + html: *HTMLBundle.HTMLBundleRoute, + + pub const Packed = packed struct(u32) { + tag: enum(u1) { framework, html }, + data: u31, + + pub fn encode(id: Identifier) Packed { + return switch (id) { + .framework => |index| .{ .tag = .framework, .data = index.get() }, + .html => |html_bundle| .{ .tag = .html, .data = @truncate(@intFromPtr(html_bundle)) }, + }; + } + + pub fn decode(id: Packed) Identifier { + return switch (id.tag) { + .framework => |index| .{ .framework = Route.Index.init(index) }, + .html => |html_bundle| .{ .html = @ptrFromInt(html_bundle) }, + }; + } }; - } + + /// May allocate memory + pub fn pattern(id: Identifier, dev: *DevServer) !bun.CowString { + return switch (id) { + .framework => |index| full_pattern: { + var buf = bake.PatternBuffer.empty; + var current: *Route = dev.router.routePtr(index); + // This loop is done to avoid prepending `/` at the root + // if there is more than one component. + buf.prependPart(current.part); + if (current.parent.unwrap()) |first| { + current = dev.router.routePtr(first); + while (current.parent.unwrap()) |next| { + buf.prependPart(current.part); + current = dev.router.routePtr(next); + } + } + break :full_pattern try bun.CowString.initDupe(buf.slice(), dev.allocator); + }, + .html => |html_bundle| bun.CowString.initNeverFree(html_bundle.pattern), + }; + } + }; }; /// DevServer is stored on the heap, storing its allocator. @@ -547,22 +568,22 @@ pub fn attachRoutes(dev: *DevServer, server: anytype) !void { bun.todoPanic(@src(), "DevServer does not support SSL yet", .{}); } - app.get(client_prefix ++ "/:route", *DevServer, dev, onJsRequest); - app.get(asset_prefix ++ "/:asset", *DevServer, dev, onAssetRequest); - app.get(css_prefix ++ "/:asset", *DevServer, dev, onCssRequest); - app.get(internal_prefix ++ "/src/*", *DevServer, dev, onSrcRequest); + // app.get(client_prefix ++ "/:route", *DevServer, dev, onJsRequest); + // app.get(asset_prefix ++ "/:asset", *DevServer, dev, onAssetRequest); + // app.get(css_prefix ++ "/:asset", *DevServer, dev, onCssRequest); + // app.get(internal_prefix ++ "/src/*", *DevServer, dev, onSrcRequest); - app.ws( - internal_prefix ++ "/hmr", - dev, - 0, - uws.WebSocketBehavior.Wrap(DevServer, HmrSocket, false).apply(.{}), - ); + // app.ws( + // internal_prefix ++ "/hmr", + // dev, + // 0, + // uws.WebSocketBehavior.Wrap(DevServer, HmrSocket, false).apply(.{}), + // ); - if (bun.FeatureFlags.bake_debugging_features) - app.get(internal_prefix ++ "/incremental_visualizer", *DevServer, dev, onIncrementalVisualizer); + // if (bun.FeatureFlags.bake_debugging_features) + // app.get(internal_prefix ++ "/incremental_visualizer", *DevServer, dev, onIncrementalVisualizer); - app.any("/*", *DevServer, dev, onRequest); + // app.any("/*", *DevServer, dev, onRequest); } pub fn deinit(dev: *DevServer) void { @@ -648,7 +669,7 @@ fn onIncrementalVisualizerCorked(resp: AnyResponse) void { fn ensureRouteIsBundled( dev: *DevServer, - id: RouteIdentifier, + id: RouteBundle.Identifier, kind: DeferredRequest.Data.Tag, req: *Request, resp: AnyResponse, @@ -673,8 +694,8 @@ fn ensureRouteIsBundled( .data = switch (kind) { .js_payload => .{ .js_payload = resp }, .server_handler => .{ - .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp, null) orelse return) - .save(dev.vm.global, req, resp), + .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) + .save(dev.vm.global, req, resp.TCP), }, }, }; @@ -690,7 +711,7 @@ fn ensureRouteIsBundled( var entry_points: EntryPointList = EntryPointList.empty; defer entry_points.deinit(temp_alloc); - dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, id) catch bun.outOfMemory(); + dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, route_bundle_index) catch bun.outOfMemory(); if (entry_points.set.count() == 0) { if (dev.bundling_failures.count() > 0) { @@ -726,8 +747,8 @@ fn ensureRouteIsBundled( .data = switch (kind) { .js_payload => .{ .js_payload = resp }, .server_handler => .{ - .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp, null) orelse return) - .save(dev.vm.global, req, resp), + .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) + .save(dev.vm.global, req, resp.TCP), }, }, }; @@ -754,7 +775,7 @@ fn ensureRouteIsBundled( resp.corked(sendSerializedFailures, .{ dev, resp, - (&(dev.routeBundlePtr(route_bundle_index).evaluate_failure orelse @panic("missing error")))[0..1], + (&(dev.routeBundlePtr(route_bundle_index).data.framework.evaluate_failure.?))[0..1], .evaluation, }); return; @@ -772,14 +793,14 @@ fn ensureRouteIsBundled( } } -fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointList, alloc: Allocator, id: RouteIdentifier) bun.OOM!void { +fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointList, alloc: Allocator, rbi: RouteBundle.Index) bun.OOM!void { const server_file_names = dev.server_graph.bundled_files.keys(); const client_file_names = dev.client_graph.bundled_files.keys(); // Build a list of all files that have not yet been bundled. - switch (id) { - .framework => |route_index| { - var route = dev.router.routePtr(route_index); + switch (dev.routeBundlePtr(rbi).data) { + .framework => |*bundle| { + var route = dev.router.routePtr(bundle.route_index); const router_type = dev.router.typePtr(route.type); try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, router_type.server_file); try dev.appendOpaqueEntryPoint(client_file_names, entry_points, alloc, .client, router_type.client_file); @@ -790,7 +811,7 @@ fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointLi try dev.appendOpaqueEntryPoint(server_file_names, entry_points, alloc, .server, route.file_layout); } }, - .html => |html| { + .html => |*html| { _ = html; @panic("TODO"); }, @@ -805,13 +826,14 @@ fn onRequestWithBundle( ) void { const route_bundle = dev.routeBundlePtr(route_bundle_index); switch (route_bundle.data) { - .framework => |*fw| dev.onFrameworkRequestWithBundle(fw, req, resp), + .framework => |*fw| dev.onFrameworkRequestWithBundle(route_bundle, fw, req, resp), .html => @panic("walaa"), } } fn onFrameworkRequestWithBundle( dev: *DevServer, + route_bundle: *RouteBundle, bundle: *RouteBundle.Framework, req: bun.JSC.API.SavedRequest.Union, resp: AnyResponse, @@ -878,7 +900,7 @@ fn onFrameworkRequestWithBundle( }, // styles bundle.cached_css_file_array.get() orelse arr: { - const js = dev.generateCssJSArray(bundle) catch bun.outOfMemory(); + const js = dev.generateCssJSArray(route_bundle) catch bun.outOfMemory(); bundle.cached_css_file_array = JSC.Strong.create(js, dev.vm.global); break :arr js; }, @@ -1075,6 +1097,9 @@ fn indexFailures(dev: *DevServer) !void { fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]const u8 { assert(route_bundle.client_bundle == null); assert(route_bundle.server_state == .loaded); // page is unfit to load + { + return "console.log('TODO')"; + } dev.graph_safety_lock.lock(); defer dev.graph_safety_lock.unlock(); @@ -1099,7 +1124,8 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]c } fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.JSValue { - if (Environment.allow_assert) assert(!route_bundle.cached_css_file_array.has()); + assert(route_bundle.data == .framework); // a JSC.JSValue has no purpose, and therefore isn't implemented. + if (Environment.allow_assert) assert(!route_bundle.data.framework.cached_css_file_array.has()); assert(route_bundle.server_state == .loaded); // page is unfit to load dev.graph_safety_lock.lock(); @@ -1127,26 +1153,34 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.J } fn traceAllRouteImports(dev: *DevServer, route_bundle: *RouteBundle, gts: *GraphTraceState, goal: TraceImportGoal) !void { - var route = dev.router.routePtr(route_bundle.route); - const router_type = dev.router.typePtr(route.type); + switch (route_bundle.data) { + .framework => |fw| { + var route = dev.router.routePtr(fw.route_index); + const router_type = dev.router.typePtr(route.type); - // Both framework entry points are considered - try dev.server_graph.traceImports(fromOpaqueFileId(.server, router_type.server_file), gts, .{ .find_css = true }); - if (router_type.client_file.unwrap()) |id| { - try dev.client_graph.traceImports(fromOpaqueFileId(.client, id), gts, goal); - } + // Both framework entry points are considered + try dev.server_graph.traceImports(fromOpaqueFileId(.server, router_type.server_file), gts, .{ .find_css = true }); + if (router_type.client_file.unwrap()) |id| { + try dev.client_graph.traceImports(fromOpaqueFileId(.client, id), gts, goal); + } - // The route file is considered - if (route.file_page.unwrap()) |id| { - try dev.server_graph.traceImports(fromOpaqueFileId(.server, id), gts, goal); - } + // The route file is considered + if (route.file_page.unwrap()) |id| { + try dev.server_graph.traceImports(fromOpaqueFileId(.server, id), gts, goal); + } - // For all parents, the layout is considered - while (true) { - if (route.file_layout.unwrap()) |id| { - try dev.server_graph.traceImports(fromOpaqueFileId(.server, id), gts, goal); - } - route = dev.router.routePtr(route.parent.unwrap() orelse break); + // For all parents, the layout is considered + while (true) { + if (route.file_layout.unwrap()) |id| { + try dev.server_graph.traceImports(fromOpaqueFileId(.server, id), gts, goal); + } + route = dev.router.routePtr(route.parent.unwrap() orelse break); + } + }, + .html => |html| { + _ = html; + @panic("TODO"); + }, } } @@ -1467,32 +1501,38 @@ pub fn finalizeBundle( var it = route_bits.iterator(.{ .kind = .set }); // List 2 while (it.next()) |i| { - const bundle = dev.routeBundlePtr(RouteBundle.Index.init(@intCast(i))); - if (dev.incremental_result.had_adjusted_edges) { - bundle.cached_css_file_array.clear(); - } - if (bundle.active_viewers == 0 or !will_hear_hot_update) continue; - try w.writeInt(i32, @intCast(i), .little); - try w.writeInt(u32, @intCast(bundle.full_pattern.flags.len), .little); - try w.writeAll(bundle.full_pattern.slice()); - - // If no edges were changed, then it is impossible to - // change the list of CSS files. - if (dev.incremental_result.had_adjusted_edges) { - gts.clear(); - try dev.traceAllRouteImports(bundle, >s, .{ .find_css = true }); - const names = dev.client_graph.current_css_files.items; - - try w.writeInt(i32, @intCast(names.len), .little); - for (names) |name| { - const css_prefix_slash = css_prefix ++ "/"; - // These slices are url pathnames. The ID can be extracted - bun.assert(name.len == (css_prefix_slash ++ ".css").len + 16); - bun.assert(bun.strings.hasPrefix(name, css_prefix_slash)); - try w.writeAll(name[css_prefix_slash.len..][0..16]); - } - } else { - try w.writeInt(i32, -1, .little); + const route_bundle = dev.routeBundlePtr(RouteBundle.Index.init(@intCast(i))); + switch (route_bundle.data) { + .framework => |*fw_bundle| { + if (dev.incremental_result.had_adjusted_edges) { + fw_bundle.cached_css_file_array.clear(); + } + if (route_bundle.active_viewers == 0 or !will_hear_hot_update) continue; + try w.writeInt(i32, @intCast(i), .little); + try w.writeInt(u32, @intCast(route_bundle.full_pattern.flags.len), .little); + try w.writeAll(route_bundle.full_pattern.slice()); + + // If no edges were changed, then it is impossible to + // change the list of CSS files. + if (dev.incremental_result.had_adjusted_edges) { + gts.clear(); + try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); + const names = dev.client_graph.current_css_files.items; + + try w.writeInt(i32, @intCast(names.len), .little); + for (names) |name| { + const css_prefix_slash = css_prefix ++ "/"; + // These slices are url pathnames. The ID can be extracted + const css_hash_len = 16; + bun.assert(name.len == (css_prefix_slash ++ ".css").len + css_hash_len); + bun.assert(bun.strings.hasPrefix(name, css_prefix_slash)); + try w.writeAll(name[css_prefix_slash.len..][0..css_hash_len]); + } + } else { + try w.writeInt(i32, -1, .little); + } + }, + .html => @panic("TODO"), } } } @@ -1531,7 +1571,7 @@ pub fn finalizeBundle( const resp: AnyResponse = switch (req.data) { .server_handler => |*saved| brk: { - const resp = saved.response.TCP; + const resp = saved.response; saved.deinit(); break :brk resp; }, @@ -1575,17 +1615,19 @@ pub fn finalizeBundle( const file_name: ?[]const u8, const total_count: usize = if (current_bundle.had_reload_event) .{ null, 0 } else first_route_file_name: { - const opaque_id = dev.router.routePtr( - dev.routeBundlePtr(dev.current_bundle_requests.items[0].route_bundle_index) - .route, - ).file_page.unwrap() orelse - break :first_route_file_name .{ null, 0 }; - const server_index = fromOpaqueFileId(.server, opaque_id); - - break :first_route_file_name .{ - dev.relativePath(dev.server_graph.bundled_files.keys()[server_index.get()]), - 0, - }; + // TODO: + break :first_route_file_name .{ null, 0 }; + // const opaque_id = dev.router.routePtr( + // dev.routeBundlePtr(dev.current_bundle_requests.items[0].route_bundle_index) + // .route, + // ).file_page.unwrap() orelse + // break :first_route_file_name .{ null, 0 }; + // const server_index = fromOpaqueFileId(.server, opaque_id); + + // break :first_route_file_name .{ + // dev.relativePath(dev.server_graph.bundled_files.keys()[server_index.get()]), + // 0, + // }; }; if (file_name) |name| { Output.prettyError(": {s}", .{name}); @@ -1606,7 +1648,7 @@ pub fn finalizeBundle( rb.server_state = .loaded; switch (req.data) { - .server_handler => |saved| dev.onRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response.TCP), + .server_handler => |saved| dev.onRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response), .js_payload => |resp| dev.onJsRequestWithBundle(req.route_bundle_index, resp), } } @@ -1638,7 +1680,7 @@ fn startNextBundleIfPresent(dev: *DevServer) void { for (dev.next_bundle.route_queue.keys()) |route_bundle_index| { const rb = dev.routeBundlePtr(route_bundle_index); rb.server_state = .bundling; - dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, rb.route) catch bun.outOfMemory(); + dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, route_bundle_index) catch bun.outOfMemory(); } dev.startAsyncBundle( @@ -1773,29 +1815,41 @@ fn onRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { } pub fn respondForHTMLBundle(dev: *DevServer, hbr: *HTMLBundle.HTMLBundleRoute, req: *uws.Request, resp: AnyResponse) void { - dev.ensureRouteIsBundled(hbr.dev_server_id.?, .server_handler, req, resp); + dev.ensureRouteIsBundled(.{ .html = hbr }, .server_handler, req, resp) catch bun.outOfMemory(); } -fn getOrPutRouteBundle(dev: *DevServer, route: RouteIdentifier) !RouteBundle.Index { - if (dev.router.routePtr(route).bundle.unwrap()) |bundle_index| +fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.Identifier) !RouteBundle.Index { + const index_location: *RouteBundle.Index.Optional = switch (route) { + .framework => |route_index| &dev.router.routePtr(route_index).bundle, + .html => |html| &html.dev_server_id, + }; + if (index_location.unwrap()) |bundle_index| { return bundle_index; + } - const full_pattern = route.pattern(dev); + const full_pattern = try route.pattern(dev); errdefer full_pattern.deinit(dev.allocator); try dev.route_bundles.append(dev.allocator, .{ - .route = route, + .data = switch (route) { + .framework => |route_index| .{ .framework = .{ + .route_index = route_index, + .evaluate_failure = null, + .cached_module_list = .{}, + .cached_client_bundle_url = .{}, + .cached_css_file_array = .{}, + } }, + .html => |html| .{ .html = .{ + .html_bundle = html, + } }, + }, .server_state = .unqueued, .full_pattern = full_pattern, .client_bundle = null, - .evaluate_failure = null, - .cached_module_list = .{}, - .cached_client_bundle_url = .{}, - .cached_css_file_array = .{}, .active_viewers = 0, }); const bundle_index = RouteBundle.Index.init(@intCast(dev.route_bundles.items.len - 1)); - dev.router.routePtr(route).bundle = bundle_index.toOptional(); + index_location.* = bundle_index.toOptional(); return bundle_index; } @@ -1826,6 +1880,17 @@ fn sendSerializedFailures( resp: AnyResponse, failures: []const SerializedFailure, kind: ErrorPageKind, +) void { + switch (resp) { + inline else => |r| sendSerializedFailuresInner(dev, r, failures, kind), + } +} + +fn sendSerializedFailuresInner( + dev: *DevServer, + resp: anytype, + failures: []const SerializedFailure, + kind: ErrorPageKind, ) void { resp.writeStatus("500 Internal Server Error"); resp.writeHeader("Content-Type", MimeType.html.value); @@ -3561,7 +3626,8 @@ fn emitVisualizerMessageIfNeeded(dev: *DevServer) void { if (dev.emit_visualizer_events == 0) return; var sfb = std.heap.stackFallback(65536, bun.default_allocator); - var payload = try std.ArrayList(u8).initCapacity(sfb.get(), 65536); + var payload = std.ArrayList(u8).initCapacity(sfb.get(), 65536) catch + unreachable; // enough capacity on the stack defer payload.deinit(); dev.writeVisualizerMessage(&payload) catch return; // visualizer does not get an update if it OOMs @@ -3862,8 +3928,8 @@ const HmrSocket = struct { .set_url => { const pattern = msg[1..]; var params: FrameworkRouter.MatchedParams = undefined; - if (s.dev.router.matchSlow(pattern, ¶ms)) |route| { - const rbi = s.dev.getOrPutRouteBundle(route) catch bun.outOfMemory(); + if (s.dev.router.matchSlow(pattern, ¶ms)) |route_index| { + const rbi = s.dev.getOrPutRouteBundle(.{ .framework = route_index }) catch bun.outOfMemory(); if (s.active_route.unwrap()) |old| { if (old == rbi) return; s.dev.routeBundlePtr(old).active_viewers -= 1; diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 2ef9832e484e7f..0d4767897cb95a 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -7762,19 +7762,17 @@ pub const AnyServer = union(enum) { }; } - // TODO: support TLS pub fn onRequestFromSaved( this: AnyServer, req: SavedRequest.Union, - resp: *uws.NewApp(false).Response, + resp: uws.AnyResponse, callback: JSC.JSValue, comptime extra_arg_count: usize, extra_args: [extra_arg_count]JSValue, ) void { return switch (this) { - inline else => |server| server.onRequestFromSaved(req, resp, callback, extra_arg_count, extra_args), - .HTTPSServer => @panic("TODO: https"), - .DebugHTTPSServer => @panic("TODO: https"), + inline .HTTPServer, .DebugHTTPServer => |server| server.onRequestFromSaved(req, resp.TCP, callback, extra_arg_count, extra_args), + inline .HTTPSServer, .DebugHTTPSServer => |server| server.onRequestFromSaved(req, resp.SSL, callback, extra_arg_count, extra_args), }; } @@ -7783,6 +7781,12 @@ pub const AnyServer = union(enum) { inline else => |server| server.app.?.numSubscribers(topic), }; } + + pub fn devServer(this: AnyServer) ?*bun.bake.DevServer { + return switch (this) { + inline else => |server| server.dev_server, + }; + } }; const welcome_page_html_gz = @embedFile("welcome-page.html.gz"); diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index cc6e76fc4cdacf..d7798f045ae619 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -63,7 +63,8 @@ pub const HTMLBundleRoute = struct { server: ?AnyServer = null, value: Value = .pending_plugins, /// Written and read by DevServer to identify if this route has been registered with the bundler. - dev_server_id: *bun.bake.DevServer.RouteBundle.Index = 0, + dev_server_id: bun.bake.DevServer.RouteBundle.Index.Optional = .none, + pattern: []const u8, pub fn memoryCost(this: *const HTMLBundleRoute) usize { var cost: usize = 0; @@ -80,6 +81,7 @@ pub const HTMLBundleRoute = struct { .ref_count = 1, .server = null, .value = .pending_plugins, + .pattern = "/", // TODO: design flaw: HTMLBundleRoute can be present at multiple paths }); } @@ -147,11 +149,12 @@ pub const HTMLBundleRoute = struct { }; if (server.config().development) { - if (server.getDevServer()) |dev| { - dev.respondForHTMLBundle(); + if (server.devServer()) |dev| { + dev.respondForHTMLBundle(this, req, resp); + return; } - // TODO: actually implement proper watch mode instead of "rebuild on every request" + // Simple development workflow which rebundles on every request. if (this.value == .html) { this.value.html.deref(); this.value = .pending_plugins; diff --git a/src/bun.zig b/src/bun.zig index b8c1d760fa4273..bb37179649c0d8 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -4360,7 +4360,7 @@ pub const OSPathBufferPool = if (Environment.isWindows) WPathBufferPool else Pat pub const S3 = @import("./s3/client.zig"); -const CowString = CowSlice(u8); +pub const CowString = CowSlice(u8); /// "Copy on write" slice. There are many instances when it is desired to re-use /// a slice, but doing so would make it unknown if that slice should be freed. @@ -4407,11 +4407,11 @@ pub fn CowSlice(T: type) type { .len = @intCast(data.len), }, .debug = if (cow_str_assertions) - bun.new(DebugData(.{ + bun.new(DebugData, .{ .mutex = .{}, .allocator = allocator, .borrows = 0, - })), + }), }; } @@ -4457,7 +4457,10 @@ pub fn CowSlice(T: type) type { if (cow_str_assertions) if (str.debug) |debug| { debug.mutex.lock(); defer debug.mutex.unlock(); - bun.assert(debug.allocator == allocator); + bun.assert( + debug.allocator.ptr == allocator.ptr and + debug.allocator.vtable == allocator.vtable, + ); if (str.flags.is_owned) { bun.assert(debug.borrows == 0); // active borrows become invalid data } else { diff --git a/src/deps/uws.zig b/src/deps/uws.zig index f08d381e0b65e3..b4f72c02b130c2 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -3221,12 +3221,7 @@ pub const AnyResponse = union(enum) { }; } - pub fn write(this: AnyResponse, data: []const u8) void { - return switch (this) { - .SSL => |resp| resp.write(data), - .TCP => |resp| resp.write(data), - }; - } + pub const write = @compileError("this function is not provided to discourage repeatedly checking the response type. use `switch(...) { inline else => ... }` so that multiple calls"); pub fn end(this: AnyResponse, data: []const u8, close_connection: bool) void { return switch (this) { From e80063eccc2c5c3632ea0edce511e813d30c8d13 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Wed, 29 Jan 2025 18:01:20 -0800 Subject: [PATCH 03/28] waaa --- src/bake/DevServer.zig | 158 ++++++++++++++++++++++++-------------- src/bundler/bundle_v2.zig | 9 ++- src/deps/uws.zig | 5 +- 3 files changed, 107 insertions(+), 65 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index e589f2ba582abf..eb9dce5db11e15 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -561,29 +561,37 @@ fn scanInitialRoutes(dev: *DevServer) !void { pub fn attachRoutes(dev: *DevServer, server: anytype) !void { dev.server = bun.JSC.API.AnyServer.from(server); const app = server.app.?; + const Server = @typeInfo(@TypeOf(server)).Pointer.child; + const is_ssl = @typeInfo(@TypeOf(app)).Pointer.child.is_ssl; + + app.get(client_prefix ++ "/:route", *DevServer, dev, wrapGenericRequestHandler(onJsRequest, is_ssl)); + app.get(asset_prefix ++ "/:asset", *DevServer, dev, wrapGenericRequestHandler(onAssetRequest, is_ssl)); + app.get(css_prefix ++ "/:asset", *DevServer, dev, wrapGenericRequestHandler(onCssRequest, is_ssl)); + app.get(internal_prefix ++ "/src/*", *DevServer, dev, wrapGenericRequestHandler(onSrcRequest, is_ssl)); + + app.ws( + internal_prefix ++ "/hmr", + dev, + 0, + uws.WebSocketBehavior.Wrap(DevServer, HmrSocket, is_ssl).apply(.{}), + ); - // For this to work, the route handlers need to be augmented to use the comptime - // SSL parameter. It's worth considering removing the SSL boolean. - if (@TypeOf(app) == *uws.NewApp(true)) { - bun.todoPanic(@src(), "DevServer does not support SSL yet", .{}); + if (bun.FeatureFlags.bake_debugging_features) { + app.get( + internal_prefix ++ "/incremental_visualizer", + *DevServer, + dev, + wrapGenericRequestHandler(onIncrementalVisualizer, is_ssl), + ); } - // app.get(client_prefix ++ "/:route", *DevServer, dev, onJsRequest); - // app.get(asset_prefix ++ "/:asset", *DevServer, dev, onAssetRequest); - // app.get(css_prefix ++ "/:asset", *DevServer, dev, onCssRequest); - // app.get(internal_prefix ++ "/src/*", *DevServer, dev, onSrcRequest); - - // app.ws( - // internal_prefix ++ "/hmr", - // dev, - // 0, - // uws.WebSocketBehavior.Wrap(DevServer, HmrSocket, false).apply(.{}), - // ); - - // if (bun.FeatureFlags.bake_debugging_features) - // app.get(internal_prefix ++ "/incremental_visualizer", *DevServer, dev, onIncrementalVisualizer); - - // app.any("/*", *DevServer, dev, onRequest); + // Only attach a catch-all handler if the framework has filesystem router + // types. Otherwise, this can just be Bun.serve's default handler. + if (dev.framework.file_system_router_types.len > 0) { + app.any("/*", *DevServer, dev, wrapGenericRequestHandler(onRequest, is_ssl)); + } else { + app.any("/*", *Server, server, Server.onRequest); + } } pub fn deinit(dev: *DevServer) void { @@ -611,13 +619,16 @@ fn onJsRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { }; if (maybe_route.unwrap()) |route| { + { + @panic("TODO"); + } dev.ensureRouteIsBundled(route, .js_payload, req, resp) catch bun.outOfMemory(); } else { @panic("TODO: generate client bundle with no source files"); } } -fn onAssetRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { +fn onAssetRequest(dev: *DevServer, req: *Request, resp: anytype) void { _ = dev; _ = req; _ = resp; @@ -654,11 +665,29 @@ fn parseHexToInt(comptime T: type, slice: []const u8) ?T { return @bitCast(out); } -fn onIncrementalVisualizer(_: *DevServer, _: *Request, resp: AnyResponse) void { +inline fn wrapGenericRequestHandler( + comptime handler: anytype, + comptime is_ssl: bool, +) fn ( + dev: *DevServer, + req: *Request, + resp: *uws.NewApp(is_ssl).Response, +) void { + const fn_info = @typeInfo(@TypeOf(handler)).Fn; + assert(fn_info.params.len == 3); + const uses_any_response = if (fn_info.params[2].type) |t| t == AnyResponse else false; + return struct { + fn handle(dev: *DevServer, req: *Request, resp: *uws.NewApp(is_ssl).Response) void { + handler(dev, req, if (uses_any_response) AnyResponse.init(resp) else resp); + } + }.handle; +} + +fn onIncrementalVisualizer(_: *DevServer, _: *Request, resp: anytype) void { resp.corked(onIncrementalVisualizerCorked, .{resp}); } -fn onIncrementalVisualizerCorked(resp: AnyResponse) void { +fn onIncrementalVisualizerCorked(resp: anytype) void { const code = if (Environment.codegen_embed) @embedFile("incremental_visualizer.html") else @@ -812,8 +841,7 @@ fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointLi } }, .html => |*html| { - _ = html; - @panic("TODO"); + try entry_points.append(alloc, html.html_bundle.html_bundle.path, .{ .client = true }); }, } } @@ -918,7 +946,7 @@ pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, r sendTextFile(code, MimeType.javascript.value, resp); } -pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: *AnyResponse) void { +pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: anytype) void { if (req.header("open-in-editor") == null) { resp.writeStatus("501 Not Implemented"); resp.end("Viewing source without opening in editor is not implemented yet!", false); @@ -980,24 +1008,23 @@ fn startAsyncBundle( ast_memory_allocator.reset(); ast_memory_allocator.push(); - if (dev.framework.server_components == null) { - // The handling of the dependency graphs are SLIGHTLY different when - // server components are disabled. It's subtle, but enough that it - // would be incorrect to even try to run a build. - bun.todoPanic(@src(), "support non-server components build", .{}); - } - const bv2 = try BundleV2.init( - &dev.server_bundler, - if (dev.framework.server_components != null) .{ - .framework = dev.framework, - .client_bundler = &dev.client_bundler, - .ssr_bundler = &dev.ssr_bundler, - .plugins = dev.bundler_options.plugin, - } else @panic("TODO: support non-server components"), + if (dev.frontend_only) + &dev.client_bundler + else + &dev.server_bundler, + if (dev.frontend_only) + null + else + .{ + .framework = dev.framework, + .client_bundler = &dev.client_bundler, + .ssr_bundler = &dev.ssr_bundler, + .plugins = dev.bundler_options.plugin, + }, allocator, .{ .js = dev.vm.eventLoop() }, - false, // reloading is handled separately + false, // watching is handled separately JSC.WorkPool.get(), heap, ); @@ -1796,19 +1823,28 @@ pub fn routeBundlePtr(dev: *DevServer, idx: RouteBundle.Index) *RouteBundle { return &dev.route_bundles.items[idx.get()]; } -fn onRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { +fn onRequest(dev: *DevServer, req: *Request, resp: anytype) void { var params: FrameworkRouter.MatchedParams = undefined; if (dev.router.matchSlow(req.url(), ¶ms)) |route_index| { - dev.ensureRouteIsBundled(route_index, .server_handler, req, resp) catch bun.outOfMemory(); + dev.ensureRouteIsBundled( + .{ .framework = route_index }, + .server_handler, + req, + AnyResponse.init(resp), + ) catch bun.outOfMemory(); return; } switch (dev.server.?) { - inline .DebugHTTPServer, .HTTPServer => |s| if (s.config.onRequest != .zero) { - s.onRequest(req, resp); - return; + inline else => |s| { + if (@typeInfo(@TypeOf(s.app.?)).Pointer.child.Response != @typeInfo(@TypeOf(resp)).Pointer.child) { + unreachable; // mismatch between `is_ssl` with server and response types. optimize these checks out. + } + if (s.config.onRequest != .zero) { + s.onRequest(req, resp); + return; + } }, - else => @panic("TODO: HTTPS"), } sendBuiltInNotFound(resp); @@ -1853,17 +1889,21 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.Identifier) !RouteBun return bundle_index; } -fn sendTextFile(code: []const u8, content_type: []const u8, resp: AnyResponse) void { - if (code.len == 0) { - resp.writeStatus("202 No Content"); - resp.writeHeaderInt("Content-Length", 0); - resp.end("", true); - return; - } +fn sendTextFile(code: []const u8, content_type: []const u8, any_resp: AnyResponse) void { + switch (any_resp) { + inline else => |resp| { + if (code.len == 0) { + resp.writeStatus("202 No Content"); + resp.writeHeaderInt("Content-Length", 0); + resp.end("", true); + return; + } - resp.writeStatus("200 OK"); - resp.writeHeader("Content-Type", content_type); - resp.end(code, true); // TODO: You should never call res.end(huge buffer) + resp.writeStatus("200 OK"); + resp.writeHeader("Content-Type", content_type); + resp.end(code, true); // TODO: You should never call res.end(huge buffer) + }, + } } const ErrorPageKind = enum { @@ -1943,7 +1983,7 @@ fn sendSerializedFailuresInner( } } -fn sendBuiltInNotFound(resp: AnyResponse) void { +fn sendBuiltInNotFound(resp: anytype) void { const message = "404 Not Found"; resp.writeStatus("404 Not Found"); resp.end(message, true); @@ -3683,7 +3723,7 @@ fn writeVisualizerMessage(dev: *DevServer, payload: *std.ArrayList(u8)) !void { pub fn onWebSocketUpgrade( dev: *DevServer, - res: AnyResponse, + res: anytype, req: *Request, upgrade_ctx: *uws.uws_socket_context_t, id: usize, diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 066843e3231266..8b4cc7e5303378 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -977,10 +977,11 @@ pub const BundleV2 = struct { this.framework = bo.framework; this.linker.framework = &this.framework.?; this.plugins = bo.plugins; - bun.assert(transpiler.options.server_components); - bun.assert(this.client_bundler.options.server_components); - if (bo.framework.server_components.?.separate_ssr_graph) - bun.assert(this.ssr_bundler.options.server_components); + if (transpiler.options.server_components) { + bun.assert(this.client_bundler.options.server_components); + if (bo.framework.server_components.?.separate_ssr_graph) + bun.assert(this.ssr_bundler.options.server_components); + } } this.linker.graph.allocator = this.graph.heap.allocator(); this.graph.allocator = this.linker.graph.allocator; diff --git a/src/deps/uws.zig b/src/deps/uws.zig index b4f72c02b130c2..6926d96b9c436f 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -3192,7 +3192,7 @@ pub const AnyResponse = union(enum) { SSL: *NewApp(true).Response, TCP: *NewApp(false).Response, - pub fn init(response: anytype) AnyResponse { + pub inline fn init(response: anytype) AnyResponse { return switch (@TypeOf(response)) { *NewApp(true).Response => .{ .SSL = response }, *NewApp(false).Response => .{ .TCP = response }, @@ -3353,7 +3353,8 @@ pub const AnyResponse = union(enum) { }; pub fn NewApp(comptime ssl: bool) type { return opaque { - const ssl_flag = @as(i32, @intFromBool(ssl)); + pub const is_ssl = ssl; + const ssl_flag: i32 = @intFromBool(ssl); const ThisApp = @This(); pub fn close(this: *ThisApp) void { From 0b1992c173445f0631cc6511c1b15b319a5deae2 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Thu, 30 Jan 2025 18:09:26 -0800 Subject: [PATCH 04/28] extremely primative html dev server --- cmake/tools/SetupBun.cmake | 7 + src/HTMLScanner.zig | 75 ++-- src/bake/DevServer.zig | 518 ++++++++++++++++-------- src/bake/bun-framework-react/client.tsx | 7 - src/bake/client/overlay.ts | 2 +- src/bake/client/websocket.ts | 76 ++-- src/bake/hmr-runtime-client.ts | 15 +- src/bake/hmr-runtime-server.ts | 1 - src/bake/incremental_visualizer.html | 60 ++- src/bake/macros.ts | 28 +- src/bun.js/api/server.zig | 1 + src/bun.js/api/server/HTMLBundle.zig | 11 +- src/bundler/bundle_v2.zig | 214 ++++++---- src/cli.zig | 1 + src/deps/libuv.zig | 3 +- src/deps/lol-html.zig | 4 +- src/deps/uws.zig | 6 +- src/import_record.zig | 2 - 18 files changed, 677 insertions(+), 354 deletions(-) diff --git a/cmake/tools/SetupBun.cmake b/cmake/tools/SetupBun.cmake index 5377eb1cff1a19..837248f65f74e8 100644 --- a/cmake/tools/SetupBun.cmake +++ b/cmake/tools/SetupBun.cmake @@ -9,6 +9,13 @@ find_command( >=1.1.26 ) +if (NOT CI) + # If node.js is not installed, it is extremely easy to make this path point to + # a tempdir such as /private/tmp/bun-node-ce532901c/bun, which may cause this + # CMake configuration break after tempdir is cleaned up (ex. after reboot). + get_filename_component(BUN_EXECUTABLE ${BUN_EXECUTABLE} REALPATH) +endif() + # If this is not set, some advanced features are not checked. # https://github.com/oven-sh/bun/blob/cd7f6a1589db7f1e39dc4e3f4a17234afbe7826c/src/bun.js/javascript.zig#L1069-L1072 setenv(BUN_GARBAGE_COLLECTOR_LEVEL 1) diff --git a/src/HTMLScanner.zig b/src/HTMLScanner.zig index b029e3dd46f6c6..8bc6160df6f87a 100644 --- a/src/HTMLScanner.zig +++ b/src/HTMLScanner.zig @@ -80,7 +80,7 @@ pub fn scan(this: *HTMLScanner, input: []const u8) !void { try processor.run(this, input); } -pub fn HTMLProcessor(comptime T: type, comptime add_head_or_html_tag: bool) type { +pub fn HTMLProcessor(comptime T: type, comptime visit_head_and_body: bool) type { return struct { const TagHandler = struct { /// CSS selector to match elements @@ -95,7 +95,7 @@ pub fn HTMLProcessor(comptime T: type, comptime add_head_or_html_tag: bool) type is_head_or_html: bool = false, }; - const tag_handlers_ = [_]TagHandler{ + const tag_handlers = [_]TagHandler{ // Module scripts with src .{ .selector = "script[src]", @@ -208,16 +208,6 @@ pub fn HTMLProcessor(comptime T: type, comptime add_head_or_html_tag: bool) type // }, }; - const html_head_tag_handler: TagHandler = .{ - .selector = "head", - .has_content = false, - .url_attribute = "", - .kind = .stmt, - .is_head_or_html = true, - }; - - const tag_handlers = if (add_head_or_html_tag) tag_handlers_ ++ [_]TagHandler{html_head_tag_handler} else tag_handlers_; - fn generateHandlerForTag(comptime tag_info: TagHandler) fn (*T, *lol.Element) bool { const Handler = struct { pub fn handle(this: *T, element: *lol.Element) bool { @@ -232,13 +222,6 @@ pub fn HTMLProcessor(comptime T: type, comptime add_head_or_html_tag: bool) type } } } - - if (comptime add_head_or_html_tag) { - if (tag_info.is_head_or_html) { - T.onHEADTag(this, element); - } - } - return false; } }; @@ -248,18 +231,16 @@ pub fn HTMLProcessor(comptime T: type, comptime add_head_or_html_tag: bool) type pub fn run(this: *T, input: []const u8) !void { var builder = lol.HTMLRewriter.Builder.init(); defer builder.deinit(); - var selectors = try std.ArrayList(*lol.HTMLSelector).initCapacity(this.allocator, tag_handlers.len); - defer { - for (selectors.items) |selector| { - selector.deinit(); - } - selectors.deinit(); - } + + var selectors: std.BoundedArray(*lol.HTMLSelector, tag_handlers.len + if (visit_head_and_body) 2 else 0) = .{}; + defer for (selectors.slice()) |selector| { + selector.deinit(); + }; + // Add handlers for each tag type inline for (tag_handlers) |tag_info| { const selector = try lol.HTMLSelector.parse(tag_info.selector); - try selectors.append(selector); - + selectors.appendAssumeCapacity(selector); try builder.addElementContentHandlers( selector, T, @@ -274,6 +255,38 @@ pub fn HTMLProcessor(comptime T: type, comptime add_head_or_html_tag: bool) type ); } + if (visit_head_and_body) { + const head_selector = try lol.HTMLSelector.parse("head"); + selectors.appendAssumeCapacity(head_selector); + try builder.addElementContentHandlers( + head_selector, + T, + T.onHeadTag, + this, + void, + null, + null, + void, + null, + null, + ); + + const body_selector = try lol.HTMLSelector.parse("body"); + selectors.appendAssumeCapacity(body_selector); + try builder.addElementContentHandlers( + body_selector, + T, + T.onBodyTag, + this, + void, + null, + null, + void, + null, + null, + ); + } + const memory_settings = lol.MemorySettings{ .preallocated_parsing_buffer_size = @max(input.len / 4, 1024), .max_allowed_memory_usage = 1024 * 1024 * 10, @@ -294,11 +307,7 @@ pub fn HTMLProcessor(comptime T: type, comptime add_head_or_html_tag: bool) type false, T, this, - struct { - fn write(self: *T, bytes: []const u8) void { - self.onWriteHTML(bytes); - } - }.write, + T.onWriteHTML, struct { fn done(_: *T) void {} }.done, diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index eb9dce5db11e15..05e06d3b24a3f9 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -58,16 +58,19 @@ client_graph: IncrementalGraph(.client), server_graph: IncrementalGraph(.server), /// State populated during bundling and hot updates. Often cleared incremental_result: IncrementalResult, -/// Quickly retrieve a route's index from its entry point file. These are -/// populated as the routes are discovered. The route may not be bundled OR +/// Quickly retrieve a framework route's index from its entry point file. These +/// are populated as the routes are discovered. The route may not be bundled OR /// navigatable, such as the case where a layout's index is looked up. route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.server).FileIndex, RouteIndexAndRecurseFlag), +/// Quickly retrieve an HTML route's index from its incremental graph index. +// TODO: store this in IncrementalGraph(.client).File instead of this hash map. +html_route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.client).FileIndex, RouteBundle.Index), /// CSS files are accessible via `/_bun/css/.css` /// Value is bundled code owned by `dev.allocator` css_files: AutoArrayHashMapUnmanaged(u64, []const u8), /// JS files are accessible via `/_bun/client/route..js` /// These are randomly generated to avoid possible browser caching of old assets. -route_js_payloads: AutoArrayHashMapUnmanaged(u64, Route.Index.Optional), +route_js_payloads: AutoArrayHashMapUnmanaged(u64, RouteBundle.Index.Optional), // /// Assets are accessible via `/_bun/asset/` // assets: bun.StringArrayHashMapUnmanaged(u64, Asset), /// All bundling failures are stored until a file is saved and rebuilt. @@ -113,7 +116,7 @@ log: Log, current_bundle: ?struct { bv2: *BundleV2, /// Information BundleV2 needs to finalize the bundle - start_data: bun.bundle_v2.BakeBundleStart, + start_data: bun.bundle_v2.DevServerInput, /// Started when the bundle was queued timer: std.time.Timer, /// If any files in this bundle were due to hot-reloading, some extra work @@ -146,7 +149,7 @@ dump_dir: if (bun.FeatureFlags.bake_debugging_features) ?std.fs.Dir else void, emit_visualizer_events: u32, has_pre_crash_handler: bool, -pub const internal_prefix = "/:bun:"; +pub const internal_prefix = "/_bun"; pub const client_prefix = internal_prefix ++ "/client"; pub const asset_prefix = internal_prefix ++ "/asset"; pub const css_prefix = internal_prefix ++ "/css"; @@ -176,15 +179,16 @@ pub const RouteBundle = struct { /// graph. active_viewers: u32, - const Framework = struct { + pub const Framework = struct { route_index: Route.Index, // TODO: micro-opt: use a singular strong + /// Cached to avoid re-creating the array every request. - /// Invalidated when a layout is added or removed from this route. + /// TODO: Invalidated when a layout is added or removed from this route. cached_module_list: JSC.Strong, /// Cached to avoid re-creating the string every request. - /// Invalidated when any client file associated with the route is updated. + /// TODO: Invalidated when any client file associated with the route is updated. cached_client_bundle_url: JSC.Strong, /// Cached to avoid re-creating the array every request. /// Invalidated when the list of CSS files changes. @@ -196,14 +200,34 @@ pub const RouteBundle = struct { evaluate_failure: ?SerializedFailure, }; - const HTML = struct { + pub const HTML = struct { /// DevServer increments the ref count of this bundle html_bundle: *HTMLBundle.HTMLBundleRoute, + bundled_file: IncrementalGraph(.client).FileIndex, + /// Invalidated when the HTML file is modified, but not it's imports. + /// The style tag is injected here. + head_end_tag_index: ByteOffset.Optional, + /// Invalidated when the HTML file is modified, but not it's imports. + /// The script tag is injected here. + body_end_tag_index: ByteOffset.Optional, + /// The HTML file bundled, from the bundler. + bundled_html_text: ?[]const u8, + /// Invalidated when + /// - The HTML file itself modified. + /// - The list of CSS files changes. + /// - TODO: Any downstream file is rebundled. + cached_response_body: ?[]const u8, + /// Hash used for the client script tag. + // TODO: do not make this lazy + client_script_uid: ScriptUid.Optional, + + const ByteOffset = bun.GenericIndex(u32, u8); + const ScriptUid = bun.GenericIndex(u64, "an entry in route_js_payloads"); }; /// A union is not used so that `bundler_failure_logs` can re-use memory, as /// this state frequently changes between `loaded` and the failure variants. - const State = enum { + pub const State = enum { /// In development mode, routes are lazily built. This state implies a /// build of this route has never been run. It is possible to bundle the /// route entry point and still have an unqueued route if another route @@ -225,54 +249,15 @@ pub const RouteBundle = struct { loaded, }; - /// This identifier is used to refer to a RouteBundle that does not have it's - /// index known, or may not be initialized yet. - pub const Identifier = union(enum) { + /// Used as the input to some functions which may already have a + /// RouteBundle.Index, but also lookup an entry or init a new one. + pub const MaybeIndex = union(enum) { + /// Already inserted. This prevents an extra loopback to lookup. + resolved: RouteBundle.Index, /// FrameworkRouter provides a fullstack server-side route framework: FrameworkRouter.Route.Index, /// HTMLBundle provides a frontend-only route, SPA-style html: *HTMLBundle.HTMLBundleRoute, - - pub const Packed = packed struct(u32) { - tag: enum(u1) { framework, html }, - data: u31, - - pub fn encode(id: Identifier) Packed { - return switch (id) { - .framework => |index| .{ .tag = .framework, .data = index.get() }, - .html => |html_bundle| .{ .tag = .html, .data = @truncate(@intFromPtr(html_bundle)) }, - }; - } - - pub fn decode(id: Packed) Identifier { - return switch (id.tag) { - .framework => |index| .{ .framework = Route.Index.init(index) }, - .html => |html_bundle| .{ .html = @ptrFromInt(html_bundle) }, - }; - } - }; - - /// May allocate memory - pub fn pattern(id: Identifier, dev: *DevServer) !bun.CowString { - return switch (id) { - .framework => |index| full_pattern: { - var buf = bake.PatternBuffer.empty; - var current: *Route = dev.router.routePtr(index); - // This loop is done to avoid prepending `/` at the root - // if there is more than one component. - buf.prependPart(current.part); - if (current.parent.unwrap()) |first| { - current = dev.router.routePtr(first); - while (current.parent.unwrap()) |next| { - buf.prependPart(current.part); - current = dev.router.routePtr(next); - } - } - break :full_pattern try bun.CowString.initDupe(buf.slice(), dev.allocator); - }, - .html => |html_bundle| bun.CowString.initNeverFree(html_bundle.pattern), - }; - } }; }; @@ -320,6 +305,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .incremental_result = IncrementalResult.empty, .route_lookup = .{}, .route_bundles = .{}, + .html_route_lookup = .{}, .current_bundle = null, .current_bundle_requests = .{}, .next_bundle = .{ @@ -619,10 +605,7 @@ fn onJsRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { }; if (maybe_route.unwrap()) |route| { - { - @panic("TODO"); - } - dev.ensureRouteIsBundled(route, .js_payload, req, resp) catch bun.outOfMemory(); + dev.ensureRouteIsBundled(.{ .resolved = route }, .js_payload, req, resp) catch bun.outOfMemory(); } else { @panic("TODO: generate client bundle with no source files"); } @@ -698,12 +681,12 @@ fn onIncrementalVisualizerCorked(resp: anytype) void { fn ensureRouteIsBundled( dev: *DevServer, - id: RouteBundle.Identifier, + maybe_index: RouteBundle.MaybeIndex, kind: DeferredRequest.Data.Tag, req: *Request, resp: AnyResponse, ) bun.OOM!void { - const route_bundle_index = try dev.getOrPutRouteBundle(id); + const route_bundle_index = try dev.getOrPutRouteBundle(maybe_index); // TODO: Zig 0.14 gets labelled continue: // - Remove the `while` @@ -721,10 +704,16 @@ fn ensureRouteIsBundled( const deferred: DeferredRequest = .{ .route_bundle_index = route_bundle_index, .data = switch (kind) { - .js_payload => .{ .js_payload = resp }, - .server_handler => .{ - .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) - .save(dev.vm.global, req, resp.TCP), + inline .js_payload, .bundled_html_page => |tag| brk: { + resp.onAborted(*DeferredRequest, DeferredRequest.onAbort, undefined); // TODO: pass stable pointer. + break :brk @unionInit(DeferredRequest.Data, @tagName(tag), resp); + }, + .server_handler => brk: { + assert(maybe_index == .framework); + break :brk .{ + .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) + .save(dev.vm.global, req, resp.TCP), + }; }, }, }; @@ -755,16 +744,9 @@ fn ensureRouteIsBundled( entry_points, false, std.time.Timer.start() catch @panic("timers unsupported"), - ) catch |err| { - if (dev.log.hasAny()) { - dev.log.print(Output.errorWriterBuffered()) catch {}; - Output.flush(); - } - Output.panic("Fatal error while initializing bundle job: {}", .{err}); - }; - - dev.routeBundlePtr(route_bundle_index).server_state = .bundling; + ) catch bun.outOfMemory(); } + dev.routeBundlePtr(route_bundle_index).server_state = .bundling; return; }, .bundling => { @@ -775,6 +757,7 @@ fn ensureRouteIsBundled( .route_bundle_index = route_bundle_index, .data = switch (kind) { .js_payload => .{ .js_payload = resp }, + .bundled_html_page => .{ .bundled_html_page = resp }, .server_handler => .{ .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) .save(dev.vm.global, req, resp.TCP), @@ -817,7 +800,8 @@ fn ensureRouteIsBundled( } switch (kind) { - .server_handler => dev.onRequestWithBundle(route_bundle_index, .{ .stack = req }, resp), + .server_handler => dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp), + .bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp), .js_payload => dev.onJsRequestWithBundle(route_bundle_index, resp), } } @@ -846,26 +830,16 @@ fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointLi } } -fn onRequestWithBundle( +fn onFrameworkRequestWithBundle( dev: *DevServer, route_bundle_index: RouteBundle.Index, req: bun.JSC.API.SavedRequest.Union, resp: AnyResponse, ) void { const route_bundle = dev.routeBundlePtr(route_bundle_index); - switch (route_bundle.data) { - .framework => |*fw| dev.onFrameworkRequestWithBundle(route_bundle, fw, req, resp), - .html => @panic("walaa"), - } -} + assert(route_bundle.data == .framework); + const bundle = &route_bundle.data.framework; -fn onFrameworkRequestWithBundle( - dev: *DevServer, - route_bundle: *RouteBundle, - bundle: *RouteBundle.Framework, - req: bun.JSC.API.SavedRequest.Union, - resp: AnyResponse, -) void { const server_request_callback = dev.server_fetch_function_callback.get() orelse unreachable; // did not initialize server code @@ -912,8 +886,8 @@ fn onFrameworkRequestWithBundle( }, // clientId bundle.cached_client_bundle_url.get() orelse str: { - const id, const route_index: Route.Index.Optional = if (router_type.client_file != .none) - .{ std.crypto.random.int(u64), bundle.route_index.toOptional() } + const id, const route_index: RouteBundle.Index.Optional = if (router_type.client_file != .none) + .{ std.crypto.random.int(u64), route_bundle_index.toOptional() } else // When there is no framework-provided client code, generate // a JS file so that the hot-reloading code can reload the @@ -936,6 +910,122 @@ fn onFrameworkRequestWithBundle( ); } +fn onHtmlRequestWithBundle(dev: *DevServer, route_bundle_index: RouteBundle.Index, resp: AnyResponse) void { + const route_bundle = dev.routeBundlePtr(route_bundle_index); + assert(route_bundle.data == .html); + const html = &route_bundle.data.html; + const payload = getHTMLPayload(dev, route_bundle_index, route_bundle, html) catch bun.outOfMemory(); + sendTextFile(payload, MimeType.html.value, resp); +} + +fn getHTMLPayload(dev: *DevServer, route_bundle_index: RouteBundle.Index, route_bundle: *RouteBundle, html: *RouteBundle.HTML) bun.OOM![]const u8 { + assert(route_bundle.server_state == .loaded); // if not loaded, following values wont be initialized + assert(html.html_bundle.dev_server_id.unwrap() == route_bundle_index); + if (html.cached_response_body) |slice| { + return slice; + } + const head_end_tag_index = (html.head_end_tag_index.unwrap() orelse unreachable).get(); + const body_end_tag_index = (html.body_end_tag_index.unwrap() orelse unreachable).get(); + const bundled_html = html.bundled_html_text orelse unreachable; + + // The bundler records two offsets in development mode, splitting the HTML + // file into three chunks. DevServer is able to insert style/script tags + // using the information available in IncrementalGraph. This approach + // allows downstream files to update without re-bundling the HTML file. + // + // + // + // + // Single Page Web App + // {head_end_tag_index} + // + //
+ // {body_end_tag_index} + // + const before_head_end = bundled_html[0..head_end_tag_index]; + const before_body_end = bundled_html[head_end_tag_index..body_end_tag_index]; + const after_body_end = bundled_html[body_end_tag_index..]; + + // client_script_uid's value is lazily initialized. + const client_script_uid = if (html.client_script_uid.unwrap()) |id| id.get() else brk: { + const id = std.crypto.random.int(u64); + dev.route_js_payloads.put(dev.allocator, id, route_bundle_index.toOptional()) catch bun.outOfMemory(); + html.client_script_uid = RouteBundle.HTML.ScriptUid.init(id).toOptional(); + break :brk id; + }; + + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + // Prepare bitsets for tracing + var sfa_state = std.heap.stackFallback(65536, dev.allocator); + const sfa = sfa_state.get(); + var gts = try dev.initGraphTraceState(sfa); + defer gts.deinit(sfa); + // Run tracing + dev.client_graph.reset(); + try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); + + const names = dev.client_graph.current_css_files.items; + + var payload_size = bundled_html.len; + for (names) |name| { + payload_size += "".len; + payload_size += name.len; + } + payload_size += "".len + client_prefix.len + "/route.0000000000000000.js".len; + + var array: std.ArrayListUnmanaged(u8) = try std.ArrayListUnmanaged(u8).initCapacity(dev.allocator, payload_size); + errdefer array.deinit(dev.allocator); + array.appendSliceAssumeCapacity(before_head_end); + // Insert all link tags before "" + for (names) |name| { + array.appendSliceAssumeCapacity(""); + } + array.appendSliceAssumeCapacity(before_body_end); + // Insert the client script tag before "" + array.appendSliceAssumeCapacity(""); + array.appendSliceAssumeCapacity(after_body_end); + assert(array.items.len == array.capacity); // incorrect memory allocation size + html.cached_response_body = array.items; + return array.items; +} + +fn getJavaScriptCodeForHTMLFile( + dev: *DevServer, + index: bun.JSAst.Index, + import_records: []bun.BabyList(bun.ImportRecord), + input_file_sources: []bun.logger.Source, +) bun.OOM![]const u8 { + var sfa_state = std.heap.stackFallback(65536, dev.allocator); + const sfa = sfa_state.get(); + var array: std.ArrayListUnmanaged(u8) = std.ArrayListUnmanaged(u8).initCapacity(sfa, 65536) catch bun.outOfMemory(); + defer array.deinit(sfa); + const w = array.writer(sfa); + + try w.writeAll(" "); + try bun.js_printer.writeJSONString(input_file_sources[index.get()].path.pretty, @TypeOf(w), w, .utf8); + try w.writeAll("(m) {\n "); + for (import_records[index.get()].slice()) |import| { + try w.writeAll("m.dynamicImport("); + try bun.js_printer.writeJSONString(import.path.pretty, @TypeOf(w), w, .utf8); + try w.writeAll(");\n "); + } + try w.writeAll("},\n"); + + // Avoid-recloning if it is was moved to the hap + return if (array.items.ptr == &sfa_state.buffer) + try bun.default_allocator.dupe(u8, array.items) + else + array.items; +} + pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: AnyResponse) void { const route_bundle = dev.routeBundlePtr(bundle_index); const code = route_bundle.client_bundle orelse code: { @@ -981,11 +1071,21 @@ const DeferredRequest = struct { data: Data, const Data = union(enum) { + /// For a .framework route. This says to call and render the page. server_handler: bun.JSC.API.SavedRequest, + /// For a .html route. Serve the bundled HTML page. + bundled_html_page: AnyResponse, + /// Serve the JavaScript payload for this route. js_payload: AnyResponse, const Tag = @typeInfo(Data).Union.tag_type.?; }; + + fn onAbort(this: *DeferredRequest, resp: AnyResponse) void { + _ = this; + _ = resp; + @panic("TODO"); + } }; fn startAsyncBundle( @@ -1009,19 +1109,13 @@ fn startAsyncBundle( ast_memory_allocator.push(); const bv2 = try BundleV2.init( - if (dev.frontend_only) - &dev.client_bundler - else - &dev.server_bundler, - if (dev.frontend_only) - null - else - .{ - .framework = dev.framework, - .client_bundler = &dev.client_bundler, - .ssr_bundler = &dev.ssr_bundler, - .plugins = dev.bundler_options.plugin, - }, + &dev.server_bundler, + .{ + .framework = dev.framework, + .client_bundler = &dev.client_bundler, + .ssr_bundler = &dev.ssr_bundler, + .plugins = dev.bundler_options.plugin, + }, allocator, .{ .js = dev.vm.eventLoop() }, false, // watching is handled separately @@ -1124,9 +1218,6 @@ fn indexFailures(dev: *DevServer) !void { fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]const u8 { assert(route_bundle.client_bundle == null); assert(route_bundle.server_state == .loaded); // page is unfit to load - { - return "console.log('TODO')"; - } dev.graph_safety_lock.lock(); defer dev.graph_safety_lock.unlock(); @@ -1141,12 +1232,17 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]c dev.client_graph.reset(); try dev.traceAllRouteImports(route_bundle, >s, .{ .find_client_modules = true }); - const client_file = dev.router.typePtr(dev.router.routePtr(route_bundle.route).type).client_file.unwrap() orelse - @panic("No client side entrypoint in client bundle"); + const client_file: ?IncrementalGraph(.client).FileIndex = switch (route_bundle.data) { + .framework => |fw| if (dev.router.typePtr(dev.router.routePtr(fw.route_index).type).client_file.unwrap()) |ofi| + fromOpaqueFileId(.client, ofi) + else + null, + .html => |html| html.bundled_file, + }; return dev.client_graph.takeBundle( .initial_response, - dev.relativePath(dev.client_graph.bundled_files.keys()[fromOpaqueFileId(.client, client_file).get()]), + if (client_file) |index| dev.relativePath(dev.client_graph.bundled_files.keys()[index.get()]) else "", ); } @@ -1205,8 +1301,7 @@ fn traceAllRouteImports(dev: *DevServer, route_bundle: *RouteBundle, gts: *Graph } }, .html => |html| { - _ = html; - @panic("TODO"); + try dev.client_graph.traceImports(html.bundled_file, gts, goal); }, } } @@ -1268,8 +1363,9 @@ pub const HotUpdateContext = struct { pub fn finalizeBundle( dev: *DevServer, bv2: *bun.bundle_v2.BundleV2, - result: bun.bundle_v2.BakeBundleOutput, + result: bun.bundle_v2.DevServerOutput, ) bun.OOM!void { + // TODO: this leaks BundleV2 defer dev.startNextBundleIfPresent(); const current_bundle = &dev.current_bundle.?; @@ -1356,6 +1452,33 @@ pub fn finalizeBundle( } } + // TODO: consider storing something in `result.html_files.values()` as a + // means to removing the hashmap lookup in `dev.html_route_lookup` + for (result.htmlChunks()) |*chunk| { + const index = bun.JSAst.Index.init(chunk.entry_point.source_index); + const compile_result = chunk.compile_results_for_chunk[0].html; + const generated_js = try dev.getJavaScriptCodeForHTMLFile(index, import_records, input_file_sources); + try dev.client_graph.receiveChunk(&ctx, index, generated_js, .js, false); + const client_index = ctx.getCachedIndex(.client, index).*; + const route_bundle_index = dev.html_route_lookup.get(client_index) orelse + @panic("Route for HTML file was not registered"); + const route_bundle = dev.routeBundlePtr(route_bundle_index); + assert(route_bundle.data.html.bundled_file == client_index); + const html = &route_bundle.data.html; + + if (html.cached_response_body) |slice| { + dev.allocator.free(slice); + html.cached_response_body = null; + } + if (html.bundled_html_text) |slice| { + dev.allocator.free(slice); + } + html.bundled_html_text = compile_result.code; + + html.head_end_tag_index = RouteBundle.HTML.ByteOffset.init(compile_result.offsets.head_end_tag).toOptional(); + html.body_end_tag_index = RouteBundle.HTML.ByteOffset.init(compile_result.offsets.body_end_tag).toOptional(); + } + var gts = try dev.initGraphTraceState(bv2.graph.allocator); defer gts.deinit(bv2.graph.allocator); ctx.gts = >s; @@ -1372,9 +1495,14 @@ pub fn finalizeBundle( .client => try dev.client_graph.processChunkDependencies(&ctx, part_range.source_index, bv2.graph.allocator), } } + for (result.htmlChunks()) |*chunk| { + const index = bun.JSAst.Index.init(chunk.entry_point.source_index); + try dev.client_graph.processChunkDependencies(&ctx, index, bv2.graph.allocator); + } for (result.cssChunks(), result.css_file_list.values()) |*chunk, metadata| { const index = bun.JSAst.Index.init(chunk.entry_point.source_index); - // TODO: index css deps + // TODO: index css deps. this must add all recursively referenced files + // as dependencies of the entry point, instead of building a large tree. _ = index; _ = metadata; } @@ -1396,7 +1524,7 @@ pub fn finalizeBundle( } // Load all new chunks into the server runtime. - if (dev.server_graph.current_chunk_len > 0) { + if (!dev.frontend_only and dev.server_graph.current_chunk_len > 0) { const server_bundle = try dev.server_graph.takeBundle(.hmr_chunk, ""); defer dev.allocator.free(server_bundle); @@ -1529,37 +1657,38 @@ pub fn finalizeBundle( // List 2 while (it.next()) |i| { const route_bundle = dev.routeBundlePtr(RouteBundle.Index.init(@intCast(i))); - switch (route_bundle.data) { - .framework => |*fw_bundle| { - if (dev.incremental_result.had_adjusted_edges) { - fw_bundle.cached_css_file_array.clear(); - } - if (route_bundle.active_viewers == 0 or !will_hear_hot_update) continue; - try w.writeInt(i32, @intCast(i), .little); - try w.writeInt(u32, @intCast(route_bundle.full_pattern.flags.len), .little); - try w.writeAll(route_bundle.full_pattern.slice()); - - // If no edges were changed, then it is impossible to - // change the list of CSS files. - if (dev.incremental_result.had_adjusted_edges) { - gts.clear(); - try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); - const names = dev.client_graph.current_css_files.items; - - try w.writeInt(i32, @intCast(names.len), .little); - for (names) |name| { - const css_prefix_slash = css_prefix ++ "/"; - // These slices are url pathnames. The ID can be extracted - const css_hash_len = 16; - bun.assert(name.len == (css_prefix_slash ++ ".css").len + css_hash_len); - bun.assert(bun.strings.hasPrefix(name, css_prefix_slash)); - try w.writeAll(name[css_prefix_slash.len..][0..css_hash_len]); - } - } else { - try w.writeInt(i32, -1, .little); - } - }, - .html => @panic("TODO"), + if (dev.incremental_result.had_adjusted_edges) { + switch (route_bundle.data) { + .framework => |*fw_bundle| fw_bundle.cached_css_file_array.clear(), + .html => |*html| if (html.cached_response_body) |slice| { + dev.allocator.free(slice); + html.cached_response_body = null; + }, + } + } + if (route_bundle.active_viewers == 0 or !will_hear_hot_update) continue; + try w.writeInt(i32, @intCast(i), .little); + try w.writeInt(u32, @intCast(route_bundle.full_pattern.flags.len), .little); + try w.writeAll(route_bundle.full_pattern.slice()); + + // If no edges were changed, then it is impossible to + // change the list of CSS files. + if (dev.incremental_result.had_adjusted_edges) { + gts.clear(); + try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); + const names = dev.client_graph.current_css_files.items; + + try w.writeInt(i32, @intCast(names.len), .little); + for (names) |name| { + const css_prefix_slash = css_prefix ++ "/"; + // These slices are url pathnames. The ID can be extracted + const css_hash_len = 16; + bun.assert(name.len == (css_prefix_slash ++ ".css").len + css_hash_len); + bun.assert(bun.strings.hasPrefix(name, css_prefix_slash)); + try w.writeAll(name[css_prefix_slash.len..][0..css_hash_len]); + } + } else { + try w.writeInt(i32, -1, .little); } } } @@ -1602,7 +1731,7 @@ pub fn finalizeBundle( saved.deinit(); break :brk resp; }, - .js_payload => |resp| resp, + .js_payload, .bundled_html_page => |resp| resp, }; resp.corked(sendSerializedFailures, .{ @@ -1675,7 +1804,8 @@ pub fn finalizeBundle( rb.server_state = .loaded; switch (req.data) { - .server_handler => |saved| dev.onRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response), + .server_handler => |saved| dev.onFrameworkRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response), + .bundled_html_page => |resp| dev.onHtmlRequestWithBundle(req.route_bundle_index, resp), .js_payload => |resp| dev.onJsRequestWithBundle(req.route_bundle_index, resp), } } @@ -1850,12 +1980,15 @@ fn onRequest(dev: *DevServer, req: *Request, resp: anytype) void { sendBuiltInNotFound(resp); } -pub fn respondForHTMLBundle(dev: *DevServer, hbr: *HTMLBundle.HTMLBundleRoute, req: *uws.Request, resp: AnyResponse) void { - dev.ensureRouteIsBundled(.{ .html = hbr }, .server_handler, req, resp) catch bun.outOfMemory(); +pub fn respondForHTMLBundle(dev: *DevServer, html: *HTMLBundle.HTMLBundleRoute, req: *uws.Request, resp: AnyResponse) void { + dev.ensureRouteIsBundled(.{ .html = html }, .bundled_html_page, req, resp) catch bun.outOfMemory(); } -fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.Identifier) !RouteBundle.Index { +fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.MaybeIndex) !RouteBundle.Index { const index_location: *RouteBundle.Index.Optional = switch (route) { + // Already inserted, return. + .resolved => |idx| return idx, + .framework => |route_index| &dev.router.routePtr(route_index).bundle, .html => |html| &html.dev_server_id, }; @@ -1863,11 +1996,34 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.Identifier) !RouteBun return bundle_index; } - const full_pattern = try route.pattern(dev); + const full_pattern = switch (route) { + .resolved => unreachable, // returned already + .framework => |index| full_pattern: { + var buf = bake.PatternBuffer.empty; + var current: *Route = dev.router.routePtr(index); + // This loop is done to avoid prepending `/` at the root + // if there is more than one component. + buf.prependPart(current.part); + if (current.parent.unwrap()) |first| { + current = dev.router.routePtr(first); + while (current.parent.unwrap()) |next| { + buf.prependPart(current.part); + current = dev.router.routePtr(next); + } + } + break :full_pattern try bun.CowString.initDupe(buf.slice(), dev.allocator); + }, + .html => |html_bundle| bun.CowString.initNeverFree(html_bundle.pattern), + }; errdefer full_pattern.deinit(dev.allocator); - try dev.route_bundles.append(dev.allocator, .{ + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + try dev.route_bundles.ensureUnusedCapacity(dev.allocator, 1); + dev.route_bundles.appendAssumeCapacity(.{ .data = switch (route) { + .resolved => unreachable, // returned already .framework => |route_index| .{ .framework = .{ .route_index = route_index, .evaluate_failure = null, @@ -1875,9 +2031,23 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.Identifier) !RouteBun .cached_client_bundle_url = .{}, .cached_css_file_array = .{}, } }, - .html => |html| .{ .html = .{ - .html_bundle = html, - } }, + .html => |html| brk: { + const incremental_graph_index = try dev.client_graph.insertStaleExtra(html.html_bundle.path, false, true); + try dev.html_route_lookup.put( + dev.allocator, + incremental_graph_index, + RouteBundle.Index.init(@intCast(dev.route_bundles.items.len)), + ); + break :brk .{ .html = .{ + .html_bundle = html, + .bundled_file = incremental_graph_index, + .head_end_tag_index = .none, + .body_end_tag_index = .none, + .cached_response_body = null, + .bundled_html_text = null, + .client_script_uid = .none, + } }; + }, }, .server_state = .unqueued, .full_pattern = full_pattern, @@ -1993,6 +2163,8 @@ const FileKind = enum(u2) { /// Files that failed to bundle or do not exist on disk will appear in the /// graph as "unknown". unknown, + /// Stores JavaScript code. This field is also used for HTML files, where + /// the associated JS just calls `require` to emulate having script tags. js, css, asset, @@ -2121,7 +2293,7 @@ pub fn IncrementalGraph(side: bake.Side) type { code_len: u32, flags: Flags, - const Flags = struct { + const Flags = packed struct(u32) { /// If the file has an error, the failure can be looked up /// in the `.failures` map. failed: bool, @@ -2131,8 +2303,11 @@ pub fn IncrementalGraph(side: bake.Side) type { /// This is a file is an entry point to the framework. /// Changing this will always cause a full page reload. is_special_framework_file: bool, + /// If this file has a HTML RouteBundle and associated entry in `html_route_lookup` + is_html_route: bool, /// CSS and Asset files get special handling kind: FileKind, + unused: enum(u26) { unused } = .unused, }; comptime { @@ -2188,6 +2363,11 @@ pub fn IncrementalGraph(side: bake.Side) type { return if (g.bundled_files.getIndex(path)) |i| FileIndex.init(@intCast(i)) else null; } + /// Prefer calling .values() and indexing manually if accessing more than one + pub fn getFileByIndex(g: *@This(), index: FileIndex) File { + return g.bundled_files.values()[index.get()]; + } + /// Tracks a bundled code chunk for cross-bundle chunks, /// ensuring it has an entry in `bundled_files`. /// @@ -2284,6 +2464,7 @@ pub fn IncrementalGraph(side: bake.Side) type { .failed = false, .is_hmr_root = ctx.server_to_client_bitset.isSet(index.get()), .is_special_framework_file = false, + .is_html_route = false, .kind = kind, }; if (kind == .css) { @@ -2678,7 +2859,9 @@ pub fn IncrementalGraph(side: bake.Side) type { try g.first_import.append(g.owner().allocator, .none); } else { if (side == .server) { - if (is_route) gop.value_ptr.*.is_route = is_route; + if (is_route) gop.value_ptr.*.is_route = true; + } else { + if (is_route) gop.value_ptr.*.flags.is_html_route = true; } } @@ -2692,6 +2875,7 @@ pub fn IncrementalGraph(side: bake.Side) type { .failed = false, .is_hmr_root = false, .is_special_framework_file = false, + .is_html_route = is_route, .kind = .unknown, }); }, @@ -2775,6 +2959,7 @@ pub fn IncrementalGraph(side: bake.Side) type { .failed = true, .is_hmr_root = false, .is_special_framework_file = false, + .is_html_route = false, .kind = .unknown, }); }, @@ -2921,12 +3106,16 @@ pub fn IncrementalGraph(side: bake.Side) type { .initial_response => { const fw = g.owner().framework; try w.writeAll("}, {\n main: "); - try bun.js_printer.writeJSONString( - g.owner().relativePath(initial_response_entry_point), - @TypeOf(w), - w, - .utf8, - ); + if (initial_response_entry_point.len > 0) { + try bun.js_printer.writeJSONString( + g.owner().relativePath(initial_response_entry_point), + @TypeOf(w), + w, + .utf8, + ); + } else { + try w.writeAll("null"); + } switch (side) { .client => { try w.writeAll(",\n version: \""); @@ -3699,7 +3888,7 @@ fn writeVisualizerMessage(dev: *DevServer, payload: *std.ArrayList(u8)) !void { })); try w.writeByte(@intFromBool(side == .server and v.is_rsc)); try w.writeByte(@intFromBool(side == .server and v.is_ssr)); - try w.writeByte(@intFromBool(side == .server and v.is_route)); + try w.writeByte(@intFromBool(if (side == .server) v.is_route else v.flags.is_html_route)); try w.writeByte(@intFromBool(side == .client and v.flags.is_special_framework_file)); try w.writeByte(@intFromBool(switch (side) { .server => v.is_client_component_boundary, @@ -4523,8 +4712,9 @@ fn dumpStateDueToCrash(dev: *DevServer) !void { defer file.close(); const start, const end = comptime brk: { + @setEvalBranchQuota(5000); const visualizer = @embedFile("incremental_visualizer.html"); - const i = (std.mem.indexOf(u8, visualizer, "", .{js_chunk.unique_key}) catch bun.outOfMemory(); + defer allocator.free(script); + element.append(script, true) catch bun.outOfMemory(); + } + } else { + element.onEndTag(endHeadTagHandler, this) catch return true; } - if (this.chunk.getJSChunkForHTML(this.chunks)) |js_chunk| { - const script = std.fmt.allocPrintZ(allocator, "", .{js_chunk.unique_key}) catch bun.outOfMemory(); - defer allocator.free(script); - element.append(script, true) catch bun.outOfMemory(); + return false; + } + + pub fn onBodyTag(this: *@This(), element: *lol.Element) bool { + if (this.linker.dev_server != null) { + element.onEndTag(endBodyTagHandler, this) catch return true; } + return false; } - const processor = HTMLScanner.HTMLProcessor(@This(), true); + fn endHeadTagHandler(_: *lol.EndTag, opaque_this: ?*anyopaque) callconv(.C) lol.Directive { + const this: *@This() = @alignCast(@ptrCast(opaque_this.?)); + this.head_end_tag_index = @intCast(this.output.items.len); + return .@"continue"; + } - pub fn run(this: *@This(), input: []const u8) !void { - processor.run(this, input) catch bun.outOfMemory(); + fn endBodyTagHandler(_: *lol.EndTag, opaque_this: ?*anyopaque) callconv(.C) lol.Directive { + const this: *@This() = @alignCast(@ptrCast(opaque_this.?)); + this.body_end_tag_index = @intCast(this.output.items.len); + return .@"continue"; } }; - var html_loader = HTMLLoader{ + var html_loader: HTMLLoader = .{ .linker = c, .source_index = chunk.entry_point.source_index, .import_records = import_records[chunk.entry_point.source_index].slice(), @@ -10199,20 +10236,25 @@ pub const LinkerContext = struct { .current_import_record_index = 0, }; - html_loader.run(sources[chunk.entry_point.source_index].contents) catch bun.outOfMemory(); + HTMLScanner.HTMLProcessor(HTMLLoader, true).run( + &html_loader, + sources[chunk.entry_point.source_index].contents, + ) catch bun.outOfMemory(); return .{ .html = .{ .code = html_loader.output.items, .source_index = chunk.entry_point.source_index, + .offsets = .{ + .head_end_tag = html_loader.head_end_tag_index, + .body_end_tag = html_loader.body_end_tag_index, + }, }, }; } fn postProcessHTMLChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chunk: *Chunk) !void { - // This is where we split output into pieces - const c = ctx.c; var j = StringJoiner{ .allocator = worker.allocator, @@ -13351,6 +13393,7 @@ pub const LinkerContext = struct { // // - Reuse unchanged parts to assemble the full bundle if Cmd+R is used in the browser // - Send only the newly changed code through a socket. + // - Use IncrementalGraph to have full knowledge of referenced CSS files. // // When this isn't the initial bundle, concatenation as usual would produce a // broken module. It is DevServer's job to create and send HMR patches. @@ -15918,7 +15961,6 @@ pub const Chunk = struct { pub const CssImportOrder = struct { conditions: BabyList(bun.css.ImportConditions) = .{}, - // TODO: unfuck this condition_import_records: BabyList(ImportRecord) = .{}, kind: union(enum) { @@ -16006,27 +16048,16 @@ pub const Chunk = struct { pub const ImportsFromOtherChunks = std.AutoArrayHashMapUnmanaged(Index.Int, CrossChunkImport.Item.List); - pub const ContentKind = enum { - javascript, - css, - html, - }; - - pub const HtmlChunk = struct {}; - - pub const Content = union(ContentKind) { + pub const Content = union(enum) { javascript: JavaScriptChunk, css: CssChunk, - html: HtmlChunk, + html, pub fn sourcemap(this: *const Content, default: options.SourceMapOption) options.SourceMapOption { return switch (this.*) { .javascript => default, - // TODO: - .css => options.SourceMapOption.none, - - // probably never - .html => options.SourceMapOption.none, + .css => .none, // TODO: css source maps + .html => .none, }; } @@ -16124,6 +16155,13 @@ pub const CompileResult = union(enum) { html: struct { source_index: Index.Int, code: []const u8, + /// Offsets are used for DevServer to inject resources without re-bundling + offsets: struct { + /// The index of the "<" byte of "" + head_end_tag: u32, + /// The index of the "<" byte of "" + body_end_tag: u32, + }, }, pub const empty = CompileResult{ @@ -16245,11 +16283,9 @@ fn getRedirectId(id: u32) ?u32 { if (id == std.math.maxInt(u32)) { return null; } - return id; } -// TODO: this needs to also update `define` and `external`. This whole setup needs to be more resilient. fn targetFromHashbang(buffer: []const u8) ?options.Target { if (buffer.len > "#!/usr/bin/env bun".len) { if (strings.hasPrefixComptime(buffer, "#!/usr/bin/env bun")) { @@ -16259,7 +16295,6 @@ fn targetFromHashbang(buffer: []const u8) ?options.Target { } } } - return null; } @@ -16607,21 +16642,26 @@ pub const CssEntryPointMeta = struct { }; /// The lifetime of this structure is tied to the transpiler's arena -pub const BakeBundleStart = struct { +pub const DevServerInput = struct { css_entry_points: std.AutoArrayHashMapUnmanaged(Index, CssEntryPointMeta), }; /// The lifetime of this structure is tied to the transpiler's arena -pub const BakeBundleOutput = struct { +pub const DevServerOutput = struct { chunks: []Chunk, css_file_list: std.AutoArrayHashMapUnmanaged(Index, CssEntryPointMeta), + html_files: std.AutoArrayHashMapUnmanaged(Index, void), - pub fn jsPseudoChunk(out: BakeBundleOutput) *Chunk { + pub fn jsPseudoChunk(out: DevServerOutput) *Chunk { return &out.chunks[0]; } - pub fn cssChunks(out: BakeBundleOutput) []Chunk { - return out.chunks[1..]; + pub fn cssChunks(out: DevServerOutput) []Chunk { + return out.chunks[1..][0..out.css_file_list.count()]; + } + + pub fn htmlChunks(out: DevServerOutput) []Chunk { + return out.chunks[1 + out.css_file_list.count() ..][0..out.html_files.count()]; } }; diff --git a/src/cli.zig b/src/cli.zig index 0f1bad446ed8ee..63cd0a783cb271 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -243,6 +243,7 @@ pub const Arguments = struct { clap.parseParam("--throw-deprecation Determine whether or not deprecation warnings result in errors.") catch unreachable, clap.parseParam("--title Set the process title") catch unreachable, clap.parseParam("--zero-fill-buffers Boolean to force Buffer.allocUnsafe(size) to be zero-filled.") catch unreachable, + clap.parseParam("--no-hmr Disable Hot-module-replacement when using HTML imports with Bun.serve") catch unreachable, }; const auto_or_run_params = [_]ParamType{ diff --git a/src/deps/libuv.zig b/src/deps/libuv.zig index a07f095a2615ec..199c4898e56f64 100644 --- a/src/deps/libuv.zig +++ b/src/deps/libuv.zig @@ -2274,8 +2274,7 @@ pub const uv_stdio_container_t = struct_uv_stdio_container_s; pub const uv_process_options_t = extern struct { exit_cb: uv_exit_cb, file: [*:0]const u8, - // TODO(@paperdave): upstream changing libuv's args to const - // it is not mutated in any of their code + // In libuv, this is not 'const', but they never mutate it. args: [*:null]?[*:0]const u8, env: [*:null]?[*:0]const u8, cwd: [*:0]const u8, diff --git a/src/deps/lol-html.zig b/src/deps/lol-html.zig index 303588edc09276..489f2e99c04d6a 100644 --- a/src/deps/lol-html.zig +++ b/src/deps/lol-html.zig @@ -732,8 +732,8 @@ pub const Comment = opaque { }; pub const Directive = enum(c_uint) { - stop = 0, - @"continue" = 1, + @"continue" = 0, + stop = 1, }; pub const lol_html_comment_handler_t = *const fn (*Comment, ?*anyopaque) callconv(.C) Directive; pub const lol_html_text_handler_handler_t = *const fn (*TextChunk, ?*anyopaque) callconv(.C) Directive; diff --git a/src/deps/uws.zig b/src/deps/uws.zig index 6926d96b9c436f..26467462c640e3 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -3288,7 +3288,7 @@ pub const AnyResponse = union(enum) { }; } - pub fn onAborted(this: AnyResponse, comptime UserDataType: type, comptime handler: fn (UserDataType, AnyResponse) void, opcional_data: UserDataType) void { + pub fn onAborted(this: AnyResponse, comptime UserDataType: type, comptime handler: fn (UserDataType, AnyResponse) void, optional_data: UserDataType) void { const wrapper = struct { pub fn ssl_handler(user_data: UserDataType, resp: *NewApp(true).Response) void { handler(user_data, .{ .SSL = resp }); @@ -3298,8 +3298,8 @@ pub const AnyResponse = union(enum) { } }; return switch (this) { - .SSL => |resp| resp.onAborted(UserDataType, wrapper.ssl_handler, opcional_data), - .TCP => |resp| resp.onAborted(UserDataType, wrapper.tcp_handler, opcional_data), + .SSL => |resp| resp.onAborted(UserDataType, wrapper.ssl_handler, optional_data), + .TCP => |resp| resp.onAborted(UserDataType, wrapper.tcp_handler, optional_data), }; } diff --git a/src/import_record.zig b/src/import_record.zig index a81f5f180b073d..ab1ff7958dbd27 100644 --- a/src/import_record.zig +++ b/src/import_record.zig @@ -196,8 +196,6 @@ pub const ImportRecord = struct { with_type_toml, with_type_file, - tailwind, - pub fn loader(this: Tag) ?bun.options.Loader { return switch (this) { .with_type_sqlite => .sqlite, From cb17fb5728a8b8a8199787c8309766b3bde822a6 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 31 Jan 2025 14:32:15 -0800 Subject: [PATCH 05/28] rfr --- build.zig | 1 + bun.lock | 7 +- package.json | 9 +- src/bake/DevServer.zig | 21 ++- src/bake/bake.zig | 37 +++++ src/bun.js/api/server.zig | 18 +-- src/bundler/bundle_v2.zig | 29 ++-- src/codegen/bake-codegen.ts | 19 ++- src/js_ast.zig | 12 +- src/js_parser.zig | 299 ++++++++++++++++++++---------------- 10 files changed, 276 insertions(+), 176 deletions(-) diff --git a/build.zig b/build.zig index 01ebdaaeea9d2d..47e057f6f5a495 100644 --- a/build.zig +++ b/build.zig @@ -536,6 +536,7 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void { .{ .file = "bake.client.js", .import = "bake-codegen/bake.client.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bake.error.js", .import = "bake-codegen/bake.error.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bake.server.js", .import = "bake-codegen/bake.server.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "bake.react-refresh-prebuilt.js", .import = "bake-codegen/bake.react-refresh-prebuilt.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bun-error/index.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bun-error/bun-error.css", .enable = opts.shouldEmbedCode() }, .{ .file = "fallback-decoder.js", .enable = opts.shouldEmbedCode() }, diff --git a/bun.lock b/bun.lock index b251272571f4a8..8df43f61548d29 100644 --- a/bun.lock +++ b/bun.lock @@ -1,5 +1,5 @@ { - "lockfileVersion": 0, + "lockfileVersion": 1, "workspaces": { "": { "name": "bun", @@ -21,6 +21,7 @@ "prettier-plugin-organize-imports": "^4.0.0", "react": "^18.3.1", "react-dom": "^18.3.1", + "react-refresh": "^0.16.0", "source-map-js": "^1.2.0", "typescript": "^5.7.2", }, @@ -257,7 +258,7 @@ "builtins": ["builtins@1.0.3", "", {}, "sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ=="], - "bun-types": ["bun-types@workspace:packages/bun-types", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" }, "devDependencies": { "@biomejs/biome": "^1.5.3", "@definitelytyped/dtslint": "^0.0.199", "@definitelytyped/eslint-plugin": "^0.0.197", "typescript": "^5.0.2" } }], + "bun-types": ["bun-types@workspace:packages/bun-types"], "call-bind": ["call-bind@1.0.7", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.1" } }, "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w=="], @@ -707,6 +708,8 @@ "react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="], + "react-refresh": ["react-refresh@0.16.0", "", {}, "sha512-FPvF2XxTSikpJxcr+bHut2H4gJ17+18Uy20D5/F+SKzFap62R3cM5wH6b8WN3LyGSYeQilLEcJcR1fjBSI2S1A=="], + "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], "regexp.prototype.flags": ["regexp.prototype.flags@1.5.2", "", { "dependencies": { "call-bind": "^1.0.6", "define-properties": "^1.2.1", "es-errors": "^1.3.0", "set-function-name": "^2.0.1" } }, "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw=="], diff --git a/package.json b/package.json index 1055954b3bce54..72d0ca72464d3e 100644 --- a/package.json +++ b/package.json @@ -6,11 +6,14 @@ "./packages/bun-types" ], "devDependencies": { + "@mdn/browser-compat-data": "~5.5.28", "@types/bun": "*", "@types/react": "^18.3.3", "@typescript-eslint/eslint-plugin": "^7.11.0", "@typescript-eslint/parser": "^7.11.0", "@vscode/debugadapter": "^1.65.0", + "autoprefixer": "^10.4.19", + "caniuse-lite": "^1.0.30001620", "esbuild": "^0.21.4", "eslint": "^9.4.0", "eslint-config-prettier": "^9.1.0", @@ -20,11 +23,9 @@ "prettier-plugin-organize-imports": "^4.0.0", "react": "^18.3.1", "react-dom": "^18.3.1", + "react-refresh": "^0.16.0", "source-map-js": "^1.2.0", - "typescript": "^5.7.2", - "caniuse-lite": "^1.0.30001620", - "autoprefixer": "^10.4.19", - "@mdn/browser-compat-data": "~5.5.28" + "typescript": "^5.7.2" }, "resolutions": { "bun-types": "workspace:packages/bun-types" diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 05e06d3b24a3f9..f5c738f518c86a 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -1013,7 +1013,7 @@ fn getJavaScriptCodeForHTMLFile( try bun.js_printer.writeJSONString(input_file_sources[index.get()].path.pretty, @TypeOf(w), w, .utf8); try w.writeAll("(m) {\n "); for (import_records[index.get()].slice()) |import| { - try w.writeAll("m.dynamicImport("); + try w.writeAll(" m.dynamicImport("); try bun.js_printer.writeJSONString(import.path.pretty, @TypeOf(w), w, .utf8); try w.writeAll(");\n "); } @@ -1232,6 +1232,18 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]c dev.client_graph.reset(); try dev.traceAllRouteImports(route_bundle, >s, .{ .find_client_modules = true }); + if (dev.framework.react_fast_refresh) |rfr| brk: { + const rfr_index = dev.client_graph.getFileIndex(rfr.import_source) orelse + break :brk; + if (!dev.client_graph.stale_files.isSet(rfr_index.get())) { + try dev.client_graph.traceImports( + rfr_index, + >s, + .{ .find_client_modules = true }, + ); + } + } + const client_file: ?IncrementalGraph(.client).FileIndex = switch (route_bundle.data) { .framework => |fw| if (dev.router.typePtr(dev.router.routePtr(fw.route_index).type).client_file.unwrap()) |ofi| fromOpaqueFileId(.client, ofi) @@ -4548,6 +4560,8 @@ pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []? // TODO: alot of code is missing // TODO: story for busting resolution cache smartly? for (events) |event| { + // TODO: why does this out of bounds when you delete every file in the directory? + if (event.index >= file_paths.len) continue; const file_path = file_paths[event.index]; const update_count = counts[event.index] + 1; counts[event.index] = update_count; @@ -4686,6 +4700,11 @@ fn fromOpaqueFileId(comptime side: bake.Side, id: OpaqueFileId) IncrementalGraph /// Returns posix style path, suitible for URLs and reproducible hashes. fn relativePath(dev: *const DevServer, path: []const u8) []const u8 { bun.assert(dev.root[dev.root.len - 1] != '/'); + + if (!std.fs.path.isAbsolute(path)) { + return path; + } + if (path.len >= dev.root.len + 1 and path[dev.root.len] == '/' and bun.strings.startsWith(path, dev.root)) diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 4686fe0e4a9f21..3c0243791d6cac 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -223,6 +223,36 @@ pub const Framework = struct { }; } + /// Default that requires no packages or configuration. + /// - If `react-refresh` is installed, enable react fast refresh with it. + /// - Otherwise, if `react` is installed, use a bundled copy of + /// react-refresh so that it still works. + /// The provided allocator is not stored. + pub fn auto(arena: std.mem.Allocator, resolver: *bun.resolver.Resolver) !Framework { + var fw: Framework = Framework.none; + + if (resolveOrNull(resolver, "react-refresh/runtime")) |rfr| { + fw.react_fast_refresh = .{ + .import_source = rfr, + }; + } else if (resolveOrNull(resolver, "react")) |_| { + fw.react_fast_refresh = .{ + .import_source = "react-refresh/runtime", + }; + try fw.built_in_modules.put( + arena, + "react-refresh/runtime", + if (Environment.codegen_embed) + .{ .code = @embedFile("bake.react-refresh-prebuilt.js") } + else + .{ .code = bun.runtimeEmbedFile(.codegen, "bake.react-refresh-prebuilt.js") }, + ); + } + + return fw; + } + + /// Unopiniated default. pub const none: Framework = .{ .is_built_in_react = false, .file_system_router_types = &.{}, @@ -317,6 +347,13 @@ pub const Framework = struct { path.* = result.path().?.text; } + inline fn resolveOrNull(r: *bun.resolver.Resolver, path: []const u8) ?[]const u8 { + return (r.resolve(r.fs.top_level_dir, path, .stmt) catch { + r.log.reset(); + return null; + }).pathConst().?.text; + } + fn fromJS( opts: JSValue, global: *JSC.JSGlobalObject, diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 934b30f3f6e816..ec6b86bde81a51 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1177,19 +1177,15 @@ pub const ServerConfig = struct { // When HTML bundles are provided, ensure DevServer options are ready // The precense of these options if (dedupe_html_bundle_map.count() > 0) { + // TODO: this should be the dir with bunfig?? + const root = bun.fs.FileSystem.instance.top_level_dir; + var arena = std.heap.ArenaAllocator.init(bun.default_allocator); + const framework = try bun.bake.Framework.auto(arena.allocator(), &global.bunVM().transpiler.resolver); args.bake = .{ - .arena = std.heap.ArenaAllocator.init(bun.default_allocator), + .arena = arena, .allocations = bun.bake.StringRefList.empty, - - // TODO: this should be the dir with bunfig?? - .root = bun.fs.FileSystem.instance.top_level_dir, - // TODO: framework / react fast refresh - // probably specify framework details through bunfig, - // but also it would be very nice to have built-in - // support to just load node_modules/react-refresh if - // react is installed. maybe even ship a fallback copy - // of rfr with bun so it always "just works" - .framework = bun.bake.Framework.none, + .root = root, + .framework = framework, .frontend_only = true, .bundler_options = bun.bake.SplitBundlerOptions.empty, }; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index dc4be37be77109..3f0a71b23e3a46 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -4551,6 +4551,7 @@ pub const ParseTask = struct { transpiler.options.react_fast_refresh and loader.isJSX() and !source.path.isNodeModule(); + std.debug.print("{s} - {}\n", .{ source.path.text, opts.features.react_fast_refresh }); opts.features.server_components = if (transpiler.options.server_components) switch (target) { .browser => .client_side, @@ -12342,20 +12343,6 @@ pub const LinkerContext = struct { }); const module_id = Expr.initIdentifier(ast.module_ref, Logger.Loc.Empty); - // add a marker for the client runtime to tell that this is an ES module - if (ast.exports_kind == .esm) { - try stmts.inside_wrapper_prefix.append(Stmt.alloc(S.SExpr, .{ - .value = Expr.assign( - Expr.init(E.Dot, .{ - .target = Expr.initIdentifier(ast.module_ref, Loc.Empty), - .name = "__esModule", - .name_loc = Loc.Empty, - }, Loc.Empty), - Expr.init(E.Boolean, .{ .value = true }, Loc.Empty), - ), - }, Loc.Empty)); - } - for (part_stmts) |stmt| { switch (stmt.data) { else => { @@ -12493,6 +12480,20 @@ pub const LinkerContext = struct { if (c.options.output_format == .internal_bake_dev) { bun.assert(!part_range.source_index.isRuntime()); // embedded in HMR runtime + // add a marker for the client runtime to tell that this is an ES module + if (ast.exports_kind == .esm) { + stmts.inside_wrapper_prefix.append(Stmt.alloc(S.SExpr, .{ + .value = Expr.assign( + Expr.init(E.Dot, .{ + .target = Expr.initIdentifier(ast.module_ref, Loc.Empty), + .name = "__esModule", + .name_loc = Loc.Empty, + }, Loc.Empty), + Expr.init(E.Boolean, .{ .value = true }, Loc.Empty), + ), + }, Loc.Empty)) catch bun.outOfMemory(); + } + for (parts) |part| { c.convertStmtsForChunkForBake(part_range.source_index.get(), stmts, part.stmts, allocator, &ast) catch |err| return .{ .err = err }; diff --git a/src/codegen/bake-codegen.ts b/src/codegen/bake-codegen.ts index a48fbaae4e4750..e4b07248ce0823 100644 --- a/src/codegen/bake-codegen.ts +++ b/src/codegen/bake-codegen.ts @@ -31,7 +31,22 @@ async function run() { writeIfNotChanged(join(base_dir, "generated.ts"), convertZigEnum(devServerZig)); const results = await Promise.allSettled( - ["client", "server", "error"].map(async file => { + ["client", "server", "error", "react-refresh"].map(async file => { + if (file === "react-refresh") { + let result = await Bun.build({ + entrypoints: [require.resolve("react-refresh")], + minify: true, + target: "browser", + external: ["*"], + }); + if (!result.success) throw new AggregateError(result.logs); + assert(result.outputs.length === 1, "must bundle to a single file"); + // @ts-ignore + let code = await result.outputs[0].text(); + writeIfNotChanged(join(codegenRoot, `bake.react-refresh-prebuilt.js`), code); + return; + } + const side = file === "error" ? "client" : file; let result = await Bun.build({ entrypoints: [join(base_dir, `hmr-runtime-${file}.ts`)], @@ -166,7 +181,7 @@ async function run() { console.error(err); } } else { - console.log("-> bake.client.js, bake.server.js, bake.error.js"); + console.log("-> bake.client.js, bake.server.js, bake.error.js, bake.react-refresh-prebuilt.js"); const empty_file = join(codegenRoot, "bake_empty_file"); if (!existsSync(empty_file)) writeIfNotChanged(empty_file, "this is used to fulfill a cmake dependency"); diff --git a/src/js_ast.zig b/src/js_ast.zig index f7187cbfe953e6..3e2f181141fef8 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1615,7 +1615,7 @@ pub const E = struct { pub fn hasSameFlagsAs(a: *Dot, b: *Dot) bool { return (a.optional_chain == b.optional_chain and a.is_direct_eval == b.is_direct_eval and - a.can_be_unwrapped_if_unused == b.can_be_unwrapped_if_unused and a.call_can_be_unwrapped_if_unused == b.call_can_be_unwrapped_if_unused); + a.can_be_removed_if_unused == b.can_be_removed_if_unused and a.call_can_be_unwrapped_if_unused == b.call_can_be_unwrapped_if_unused); } }; @@ -1649,7 +1649,7 @@ pub const E = struct { must_keep_due_to_with_stmt: bool = false, // If true, this identifier is known to not have a side effect (i.e. to not - // throw an exception) when referenced. If false, this identifier may or may + // throw an exception) when referenced. If false, this identifier may or // not have side effects when referenced. This is used to allow the removal // of known globals such as "Object" if they aren't used. can_be_removed_if_unused: bool = false, @@ -2028,12 +2028,12 @@ pub const E = struct { return error.Clobber; }, .e_object => |object| { - if (rope.next == null) { - // success - return existing; + if (rope.next != null) { + return try object.getOrPutObject(rope.next.?, allocator); } - return try object.getOrPutObject(rope.next.?, allocator); + // success + return existing; }, else => { return error.Clobber; diff --git a/src/js_parser.zig b/src/js_parser.zig index 69a7b91c021c9f..91049427e6dbbc 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -5104,136 +5104,6 @@ fn NewParser_( }; }; - /// "Fast Refresh" is React's solution for hot-module-reloading in the context of the UI framework - /// user guide: https://reactnative.dev/docs/fast-refresh (applies to react-dom and native) - /// - /// This depends on performing a couple extra transformations at bundle time, as well as - /// including the `react-refresh` NPM package, which is able to do the heavy lifting, - /// integrating with `react` and `react-dom`. - /// - /// Prior implementations: - /// [1]: https://github.com/facebook/react/blob/main/packages/react-refresh/src/ReactFreshBabelPlugin.js - /// [2]: https://github.com/swc-project/swc/blob/main/crates/swc_ecma_transforms_react/src/refresh/mod.rs - /// - /// Additional reading: - /// [3] https://github.com/facebook/react/issues/16604#issuecomment-528663101 - /// [4] https://github.com/facebook/react/blob/master/packages/react-refresh/src/__tests__/ReactFreshIntegration-test.js - /// - /// Instead of a plugin which visits the tree separately, Bun's implementation of fast refresh - /// happens in tandem with the visit pass. The responsibilities of the transform are as follows: - /// - /// 1. For all Components (which is defined as any top-level function/function variable, that is - /// named with a capital letter; see `isComponentishName`), register them to the runtime using - /// `$RefreshReg$(ComponentFunction, "Component");`. Implemented in `p.handleReactRefreshRegister` - /// HOC components are also registered, but only through a special case for `export default` - /// - /// 2. For all functions which call a Hook (a hook is an identifier matching /^use[A-Z]/): - /// a. Outside of the function, create a signature function `const _s = $RefreshSig$();` - /// b. At the start of the function, call `_s()` - /// c. Record all of the hooks called, the variables they are assigned to, and - /// arguments depending on which hook has been used. `useState` and `useReducer`, - /// for example, are special-cased. - /// d. Directly after the function, call `_s(hook, "", forceReset)` - /// - If a user-defined hook is called, the alterate form is used: - /// `_s(hook, "", forceReset, () => [useCustom1, useCustom2])` - /// - /// The upstream transforms do not declare `$RefreshReg$` or `$RefreshSig$`. A typical - /// implementation might look like this, prepending this data to the module start: - /// - /// import * as Refresh from 'react-refresh/runtime'; - /// const $RefreshReg$ = (type, id) => Refresh.register(type, "" + id); - /// const $RefreshSig$ = Refresh.createSignatureFunctionForTransform; - /// - /// Since Bun is a transpiler *and* bundler, we take a slightly different approach. Aside - /// from including the link to the refresh runtime, our notation of $RefreshReg$ is just - /// pointing at `Refresh.register`, which means when we call it, the second argument has - /// to be a string containing the filepath, not just the component name. - const ReactRefresh = struct { - // Set if this JSX/TSX file uses the refresh runtime. If so, - // we must insert an import statement to it. - register_used: bool = false, - signature_used: bool = false, - - /// $RefreshReg$ is called on all top-level variables that are - /// components, as well as HOCs found in the `export default` clause. - register_ref: Ref = Ref.None, - - /// $RefreshSig$ is called to create a signature function, which is - /// used by the refresh runtime to perform smart hook tracking. - create_signature_ref: Ref = Ref.None, - - /// If a comment with '@refresh reset' is seen, we will forward a - /// force refresh to the refresh runtime. This lets you reset the - /// state of hooks on an update on a per-component basis. - // TODO: this is never set - force_reset: bool = false, - - /// The last hook that was scanned. This is used when visiting - /// `.s_local`, as we must hash the variable destructure if the - /// hook's result is assigned directly to a local. - last_hook_seen: ?*E.Call = null, - - /// Every function sets up stack memory to hold data related to it's - /// hook tracking. This is a pointer to that ?HookContext, where an - /// inner null means there are no hook calls. - /// - /// The inner value is initialized when the first hook .e_call is - /// visited, where the '_s' symbol is reserved. Additional hook calls - /// append to the `hasher` and `user_hooks` as needed. - /// - /// When a function is done visiting, the stack location is checked, - /// and then it will insert `var _s = ...`, add the `_s()` call at - /// the start of the function, and then add the call to `_s(func, ...)`. - hook_ctx_storage: ?*?HookContext = null, - - pub const HookContext = struct { - hasher: std.hash.Wyhash, - signature_cb: Ref, - user_hooks: std.AutoArrayHashMapUnmanaged(Ref, Expr), - }; - - // https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L42 - pub fn isComponentishName(id: []const u8) bool { - if (id.len == 0) return false; - return switch (id[0]) { - 'A'...'Z' => true, - else => false, - }; - } - - // https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L408 - pub fn isHookName(id: []const u8) bool { - return id.len >= 4 and - strings.hasPrefixComptime(id, "use") and - switch (id[3]) { - 'A'...'Z' => true, - else => false, - }; - } - - pub const built_in_hooks = bun.ComptimeEnumMap(enum { - useState, - useReducer, - useEffect, - useLayoutEffect, - useMemo, - useCallback, - useRef, - useContext, - useImperativeHandle, - useDebugValue, - useId, - useDeferredValue, - useTransition, - useInsertionEffect, - useSyncExternalStore, - useFormStatus, - useFormState, - useActionState, - useOptimistic, - }); - }; - /// use this instead of checking p.source.index /// because when not bundling, p.source.index is `0` inline fn isSourceRuntime(p: *const P) bool { @@ -19390,6 +19260,10 @@ fn NewParser_( data.default_name = createDefaultName(p, stmt.loc) catch unreachable; } + if (p.options.features.react_fast_refresh) { + try p.handleReactRefreshRegister(stmts, name, data.default_name.ref.?, .default); + } + if (p.options.features.server_components.wrapsExports()) { data.value = .{ .expr = p.wrapValueForServerComponentReference(p.newExpr(E.Function{ .func = func.func }, stmt.loc), "default") }; } @@ -19539,7 +19413,7 @@ fn NewParser_( } if (p.current_scope == p.module_scope) { - try p.handleReactRefreshRegister(stmts, original_name, name_ref); + try p.handleReactRefreshRegister(stmts, original_name, name_ref, .named); } } @@ -19763,7 +19637,7 @@ fn NewParser_( else => break :try_register, }; const original_name = p.symbols.items[id.innerIndex()].original_name; - try p.handleReactRefreshRegister(stmts, original_name, id); + try p.handleReactRefreshRegister(stmts, original_name, id, .named); } } @@ -23292,7 +23166,7 @@ fn NewParser_( } }; - pub fn handleReactRefreshRegister(p: *P, stmts: *ListManaged(Stmt), original_name: []const u8, ref: Ref) !void { + pub fn handleReactRefreshRegister(p: *P, stmts: *ListManaged(Stmt), original_name: []const u8, ref: Ref, export_kind: enum { named, default }) !void { bun.assert(p.options.features.react_fast_refresh); bun.assert(p.current_scope == p.module_scope); @@ -23307,12 +23181,16 @@ fn NewParser_( .data = try bun.strings.concat(p.allocator, &.{ p.source.path.pretty, ":", - original_name, + switch (export_kind) { + .named => original_name, + .default => "default", + }, }), }, loc), }), }, loc) }, loc)); + p.recordUsage(ref); p.react_refresh.register_used = true; } } @@ -24050,6 +23928,136 @@ const WrapMode = enum { bun_commonjs, }; +/// "Fast Refresh" is React's solution for hot-module-reloading in the context of the UI framework +/// user guide: https://reactnative.dev/docs/fast-refresh (applies to react-dom and native) +/// +/// This depends on performing a couple extra transformations at bundle time, as well as +/// including the `react-refresh` NPM package, which is able to do the heavy lifting, +/// integrating with `react` and `react-dom`. +/// +/// Prior implementations: +/// [1]: https://github.com/facebook/react/blob/main/packages/react-refresh/src/ReactFreshBabelPlugin.js +/// [2]: https://github.com/swc-project/swc/blob/main/crates/swc_ecma_transforms_react/src/refresh/mod.rs +/// +/// Additional reading: +/// [3] https://github.com/facebook/react/issues/16604#issuecomment-528663101 +/// [4] https://github.com/facebook/react/blob/master/packages/react-refresh/src/__tests__/ReactFreshIntegration-test.js +/// +/// Instead of a plugin which visits the tree separately, Bun's implementation of fast refresh +/// happens in tandem with the visit pass. The responsibilities of the transform are as follows: +/// +/// 1. For all Components (which is defined as any top-level function/function variable, that is +/// named with a capital letter; see `isComponentishName`), register them to the runtime using +/// `$RefreshReg$(ComponentFunction, "Component");`. Implemented in `p.handleReactRefreshRegister` +/// HOC components are also registered, but only through a special case for `export default` +/// +/// 2. For all functions which call a Hook (a hook is an identifier matching /^use[A-Z]/): +/// a. Outside of the function, create a signature function `const _s = $RefreshSig$();` +/// b. At the start of the function, call `_s()` +/// c. Record all of the hooks called, the variables they are assigned to, and +/// arguments depending on which hook has been used. `useState` and `useReducer`, +/// for example, are special-cased. +/// d. Directly after the function, call `_s(hook, "", forceReset)` +/// - If a user-defined hook is called, the alterate form is used: +/// `_s(hook, "", forceReset, () => [useCustom1, useCustom2])` +/// +/// The upstream transforms do not declare `$RefreshReg$` or `$RefreshSig$`. A typical +/// implementation might look like this, prepending this data to the module start: +/// +/// import * as Refresh from 'react-refresh/runtime'; +/// const $RefreshReg$ = (type, id) => Refresh.register(type, "" + id); +/// const $RefreshSig$ = Refresh.createSignatureFunctionForTransform; +/// +/// Since Bun is a transpiler *and* bundler, we take a slightly different approach. Aside +/// from including the link to the refresh runtime, our notation of $RefreshReg$ is just +/// pointing at `Refresh.register`, which means when we call it, the second argument has +/// to be a string containing the filepath, not just the component name. +const ReactRefresh = struct { + // Set if this JSX/TSX file uses the refresh runtime. If so, + // we must insert an import statement to it. + register_used: bool = false, + signature_used: bool = false, + + /// $RefreshReg$ is called on all top-level variables that are + /// components, as well as HOCs found in the `export default` clause. + register_ref: Ref = Ref.None, + + /// $RefreshSig$ is called to create a signature function, which is + /// used by the refresh runtime to perform smart hook tracking. + create_signature_ref: Ref = Ref.None, + + /// If a comment with '@refresh reset' is seen, we will forward a + /// force refresh to the refresh runtime. This lets you reset the + /// state of hooks on an update on a per-component basis. + // TODO: this is never set + force_reset: bool = false, + + /// The last hook that was scanned. This is used when visiting + /// `.s_local`, as we must hash the variable destructure if the + /// hook's result is assigned directly to a local. + last_hook_seen: ?*E.Call = null, + + /// Every function sets up stack memory to hold data related to it's + /// hook tracking. This is a pointer to that ?HookContext, where an + /// inner null means there are no hook calls. + /// + /// The inner value is initialized when the first hook .e_call is + /// visited, where the '_s' symbol is reserved. Additional hook calls + /// append to the `hasher` and `user_hooks` as needed. + /// + /// When a function is done visiting, the stack location is checked, + /// and then it will insert `var _s = ...`, add the `_s()` call at + /// the start of the function, and then add the call to `_s(func, ...)`. + hook_ctx_storage: ?*?HookContext = null, + + pub const HookContext = struct { + hasher: std.hash.Wyhash, + signature_cb: Ref, + user_hooks: std.AutoArrayHashMapUnmanaged(Ref, Expr), + }; + + // https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L42 + pub fn isComponentishName(id: []const u8) bool { + if (id.len == 0) return false; + return switch (id[0]) { + 'A'...'Z' => true, + else => false, + }; + } + + // https://github.com/facebook/react/blob/d1afcb43fd506297109c32ff462f6f659f9110ae/packages/react-refresh/src/ReactFreshBabelPlugin.js#L408 + pub fn isHookName(id: []const u8) bool { + return id.len >= 4 and + strings.hasPrefixComptime(id, "use") and + switch (id[3]) { + 'A'...'Z' => true, + else => false, + }; + } + + pub const built_in_hooks = bun.ComptimeEnumMap(enum { + useState, + useReducer, + useEffect, + useLayoutEffect, + useMemo, + useCallback, + useRef, + useContext, + useImperativeHandle, + useDebugValue, + useId, + useDeferredValue, + useTransition, + useInsertionEffect, + useSyncExternalStore, + useFormStatus, + useFormState, + useActionState, + useOptimistic, + }); +}; + pub const ConvertESMExportsForHmr = struct { last_part: *js_ast.Part, imports_seen: std.AutoArrayHashMapUnmanaged(u32, void) = .{}, @@ -24111,7 +24119,26 @@ pub const ConvertESMExportsForHmr = struct { break :stmt stmt; }, .s_export_default => |st| stmt: { - // Simple case: we can move this to the default property of the exports object + // When React Fast Refresh needs to tag the default export, the statement + // cannot be moved, since a local reference is required. + if (p.options.features.react_fast_refresh and + st.value == .stmt and st.value.stmt.data == .s_function) + fast_refresh_edge_case: { + const symbol = st.value.stmt.data.s_function.func.name orelse + break :fast_refresh_edge_case; + const name = p.symbols.items[symbol.ref.?.inner_index].original_name; + if (ReactRefresh.isComponentishName(name)) { + // Lower to a function statement, and reference the function in the export list. + try ctx.export_props.append(p.allocator, .{ + .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc), + .value = Expr.initIdentifier(symbol.ref.?, stmt.loc), + }); + break :stmt st.value.stmt; + } + // All other functions can be properly moved. + } + + // Try to move the export default expression to the end. if (st.canBeMoved()) { try ctx.export_props.append(p.allocator, .{ .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc), @@ -24121,7 +24148,7 @@ pub const ConvertESMExportsForHmr = struct { return; } - // Otherwise, we need a temporary + // Otherwise, a new symbol is needed const temp_id = p.generateTempRef("default_export"); try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true }); try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 }); From c3b12fb60b45062370af821a212c4a7011105af5 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 31 Jan 2025 14:37:23 -0800 Subject: [PATCH 06/28] lock --- bun.lock | 4 ++-- package.json | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/bun.lock b/bun.lock index 9313f988e4c3c2..d1eb9c5535ff5c 100644 --- a/bun.lock +++ b/bun.lock @@ -10,8 +10,8 @@ "@typescript-eslint/eslint-plugin": "^7.11.0", "@typescript-eslint/parser": "^7.11.0", "@vscode/debugadapter": "^1.65.0", - "autoprefixer": "^10.4.20", - "caniuse-lite": "^1.0.30001660", + "autoprefixer": "^10.4.19", + "caniuse-lite": "^1.0.30001620", "esbuild": "^0.21.4", "eslint": "^9.4.0", "eslint-config-prettier": "^9.1.0", diff --git a/package.json b/package.json index f04be82ce0c057..e8ef684b999061 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "prettier-plugin-organize-imports": "^4.0.0", "react": "^18.3.1", "react-dom": "^18.3.1", + "react-refresh": "^0.16.0", "source-map-js": "^1.2.0", "typescript": "^5.7.2" }, @@ -75,8 +76,5 @@ "prettier:extra": "bun run analysis:no-llvm --target prettier-extra", "prettier:diff": "bun run analysis:no-llvm --target prettier-diff", "node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests " - }, - "dependencies": { - "react-refresh": "^0.16.0" } } From 531546f8c5cd200443eded48b159f5aef92e181e Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 31 Jan 2025 18:01:54 -0800 Subject: [PATCH 07/28] async stuff, fix some export forms --- src/bake/hmr-module.ts | 68 +++++++++++++----- src/bake/hmr-runtime-client.ts | 2 +- src/bake/hmr-runtime-server.ts | 15 ++-- src/bundler/bundle_v2.zig | 29 +++++--- src/codegen/bake-codegen.ts | 2 +- src/js_parser.zig | 126 +++++++++++++++++++++++++++------ src/js_printer.zig | 3 + 7 files changed, 191 insertions(+), 54 deletions(-) diff --git a/src/bake/hmr-module.ts b/src/bake/hmr-module.ts index 4cec10a244366a..69b12b663fcd9c 100644 --- a/src/bake/hmr-module.ts +++ b/src/bake/hmr-module.ts @@ -3,6 +3,11 @@ import * as runtimeHelpers from "../runtime.bun.js"; let refreshRuntime: any; const registry = new Map(); +const asyncFunctionPrototype = Object.getPrototypeOf(async function () {}); +function isAsyncFunction(fn: Function) { + return Object.getPrototypeOf(fn) === asyncFunctionPrototype; +} + export type ModuleLoadFunction = (module: HotModule) => void; export type ExportsCallbackFunction = (new_exports: any) => void; @@ -12,9 +17,11 @@ export const enum State { Error, } +// negative = sync, positive = async export const enum LoadModuleType { - AssertPresent, - UserDynamic, + AsyncAssertPresent = 1, + AsyncUserDynamic = 2, + SyncUserDynamic = -1, } interface DepEntry { @@ -46,30 +53,30 @@ export class HotModule { } require(id: Id, onReload?: ExportsCallbackFunction) { - const mod = loadModule(id, LoadModuleType.UserDynamic); + const mod = loadModule(id, LoadModuleType.SyncUserDynamic) as HotModule; mod._deps.set(this, onReload ? { _callback: onReload, _expectedImports: undefined } : undefined); return mod.exports; } - importSync(id: Id, onReload?: ExportsCallbackFunction, expectedImports?: string[]) { - const mod = loadModule(id, LoadModuleType.AssertPresent); + async importStmt(id: Id, onReload?: ExportsCallbackFunction, expectedImports?: string[]) { + const mod = await (loadModule(id, LoadModuleType.AsyncAssertPresent) as Promise); mod._deps.set(this, onReload ? { _callback: onReload, _expectedImports: expectedImports } : undefined); const { exports, __esModule } = mod; const object = __esModule ? exports : (mod._ext_exports ??= { ...exports, default: exports }); if (expectedImports && mod._state === State.Ready) { - for (const key of expectedImports) { - if (!(key in object)) { - throw new SyntaxError(`The requested module '${id}' does not provide an export named '${key}'`); - } - } + // for (const key of expectedImports) { + // if (!(key in object)) { + // throw new SyntaxError(`The requested module '${id}' does not provide an export named '${key}'`); + // } + // } } return object; } /// Equivalent to `import()` in ES modules async dynamicImport(specifier: string, opts?: ImportCallOptions) { - const mod = loadModule(specifier, LoadModuleType.UserDynamic); + const mod = await (loadModule(specifier, LoadModuleType.AsyncUserDynamic) as Promise); // insert into the map if not present mod._deps.set(this, mod._deps.get(this)); const { exports, __esModule } = mod; @@ -172,7 +179,7 @@ function isUnsupportedViteEventName(str: string) { * Load a module by ID. Use `type` to specify if the module is supposed to be * present, or is something a user is able to dynamically specify. */ -export function loadModule(key: Id, type: LoadModuleType): HotModule { +export function loadModule(key: Id, type: LoadModuleType): HotModule | Promise> { let mod = registry.get(key); if (mod) { // Preserve failures until they are re-saved. @@ -182,8 +189,12 @@ export function loadModule(key: Id, type: LoadModuleType): HotModule } mod = new HotModule(key); const load = input_graph[key]; + if (type < 0 && isAsyncFunction(load)) { + // TODO: This is possible to implement, but requires some care. + throw new Error("Cannot load ES module synchronously"); + } if (!load) { - if (type == LoadModuleType.AssertPresent) { + if (type == LoadModuleType.AsyncAssertPresent) { throw new Error( `Failed to load bundled module '${key}'. This is not a dynamic import, and therefore is a bug in Bun's bundler.`, ); @@ -195,7 +206,32 @@ export function loadModule(key: Id, type: LoadModuleType): HotModule } try { registry.set(key, mod); - load(mod); + const promise = load(mod); + if (promise) { + if (IS_BUN_DEVELOPMENT) { + if (type !== LoadModuleType.AsyncUserDynamic && type !== LoadModuleType.AsyncAssertPresent) { + throw new Error("Did not expect a promise from loadModule"); + } + if (!(promise instanceof Promise)) { + throw new Error("Expected a promise from loadModule"); + } + } + return promise.then( + () => { + mod._state = State.Ready; + mod._deps.forEach((entry, dep) => { + entry?._callback(mod.exports); + }); + return mod; + }, + err => { + console.error(err); + mod._cached_failure = err; + mod._state = State.Error; + throw err; + }, + ); + } mod._state = State.Ready; mod._deps.forEach((entry, dep) => { entry?._callback(mod.exports); @@ -263,7 +299,7 @@ if (side === "server") { if (side === "client") { const { refresh } = config; if (refresh) { - refreshRuntime = loadModule(refresh, LoadModuleType.AssertPresent).exports; + refreshRuntime = (await loadModule(refresh, LoadModuleType.AsyncAssertPresent)).exports; refreshRuntime.injectIntoGlobalHook(window); } @@ -277,6 +313,6 @@ if (side === "client") { registry.set(server_module.id, server_module); } -runtimeHelpers.__name(HotModule.prototype.importSync, " importSync"); +runtimeHelpers.__name(HotModule.prototype.importStmt, " importStmt"); runtimeHelpers.__name(HotModule.prototype.require, " require"); runtimeHelpers.__name(loadModule, " loadModule"); diff --git a/src/bake/hmr-runtime-client.ts b/src/bake/hmr-runtime-client.ts index 7ccfe129c57359..d8283bea1648ce 100644 --- a/src/bake/hmr-runtime-client.ts +++ b/src/bake/hmr-runtime-client.ts @@ -160,7 +160,7 @@ const ws = initWebSocket({ } try { - loadModule(config.main, LoadModuleType.AssertPresent); + await loadModule(config.main, LoadModuleType.AsyncAssertPresent); } catch (e) { onRuntimeError(e, RuntimeErrorType.fatal); } diff --git a/src/bake/hmr-runtime-server.ts b/src/bake/hmr-runtime-server.ts index b3e91159ef69a0..6b8de6f76abd2e 100644 --- a/src/bake/hmr-runtime-server.ts +++ b/src/bake/hmr-runtime-server.ts @@ -1,7 +1,7 @@ // This file is the entrypoint to the hot-module-reloading runtime. // On the server, communication is established with `server_exports`. import type { Bake } from "bun"; -import { loadModule, LoadModuleType, replaceModules, ssrManifest, serverManifest } from "./hmr-module"; +import { loadModule, LoadModuleType, replaceModules, ssrManifest, serverManifest, HotModule } from "./hmr-module"; if (typeof IS_BUN_DEVELOPMENT !== "boolean") { throw new Error("DCE is configured incorrectly"); @@ -35,8 +35,8 @@ server_exports = { }); } - const serverRenderer = loadModule(routerTypeMain, LoadModuleType.AssertPresent).exports - .render; + const serverRenderer = (await loadModule(routerTypeMain, LoadModuleType.AsyncAssertPresent)) + .exports.render; if (!serverRenderer) { throw new Error('Framework server entrypoint is missing a "render" export.'); @@ -45,7 +45,9 @@ server_exports = { throw new Error('Framework server entrypoint\'s "render" export is not a function.'); } - const [pageModule, ...layouts] = routeModules.map(id => loadModule(id, LoadModuleType.AssertPresent).exports); + const [pageModule, ...layouts] = await Promise.all( + routeModules.map(async id => (await loadModule(id, LoadModuleType.AsyncAssertPresent)).exports), + ); const response = await serverRenderer(req, { styles: styles, @@ -68,13 +70,14 @@ server_exports = { if (componentManifestAdd) { for (const uid of componentManifestAdd) { try { - const mod = loadModule(uid, LoadModuleType.AssertPresent); + // TODO: async + const mod = loadModule(uid, LoadModuleType.SyncUserDynamic) as HotModule; const { exports, __esModule } = mod; const exp = __esModule ? exports : (mod._ext_exports ??= { ...exports, default: exports }); const client = {}; for (const exportName of Object.keys(exp)) { - serverManifest[uid + '#' + exportName] = { + serverManifest[uid + "#" + exportName] = { id: uid, name: exportName, chunks: [], diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 3f0a71b23e3a46..b1cf2884492ce7 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -12349,13 +12349,16 @@ pub const LinkerContext = struct { try stmts.inside_wrapper_suffix.append(stmt); }, .s_import => |st| { - // hmr-runtime.ts defines `module.importSync` to be - // a synchronous import. this is different from - // require in that esm <-> cjs is handled - // automatically, instead of with transpiler-added - // annotations like '__commonJS'. + // hmr-runtime.ts defines `module.dynamicImport` to be the + // ESM `import`. this is different from `require` in that + // esm <-> cjs is handled by the runtime instead of via + // transpiler-added annotations like '__commonJS'. These + // annotations couldn't be added since the bundled file + // must not have any reference to it's imports. That way + // changing a module's type does not re-bundle its + // incremental dependencies. // - // this cannot be done in the parse step because the final + // This cannot be done in the parse step because the final // pretty path is not yet known. the other statement types // are not handled here because some of those generate // new local variables (it is too late to do that here). @@ -12387,10 +12390,13 @@ pub const LinkerContext = struct { str.* = Expr.init(E.String, .{ .data = item.alias }, item.name.loc); } - break :call Expr.init(E.Call, .{ + const expr = Expr.init(E.Call, .{ .target = Expr.init(E.Dot, .{ .target = module_id, - .name = if (is_builtin) "importBuiltin" else "importSync", + .name = if (is_builtin) + "importBuiltin" + else + "importStmt", .name_loc = stmt.loc, }, stmt.loc), .args = js_ast.ExprNodeList.init( @@ -12419,6 +12425,10 @@ pub const LinkerContext = struct { }), ), }, stmt.loc); + break :call if (is_builtin) + expr + else + Expr.init(E.Await, .{ .value = expr }, stmt.loc); } else Expr.init(E.Object, .{}, stmt.loc); if (is_bare_import) { @@ -12471,7 +12481,7 @@ pub const LinkerContext = struct { Index.invalid; // referencing everything by array makes the code a lot more annoying :( - const ast: JSAst = c.graph.ast.get(part_range.source_index.get()); + var ast: JSAst = c.graph.ast.get(part_range.source_index.get()); // For Bun Kit, part generation is entirely special cased. // - export wrapping is already done. @@ -12492,6 +12502,7 @@ pub const LinkerContext = struct { Expr.init(E.Boolean, .{ .value = true }, Loc.Empty), ), }, Loc.Empty)) catch bun.outOfMemory(); + ast.top_level_await_keyword = .{ .loc = .{ .start = 0 }, .len = 1 }; } for (parts) |part| { diff --git a/src/codegen/bake-codegen.ts b/src/codegen/bake-codegen.ts index e4b07248ce0823..89dfa8d3db3dfe 100644 --- a/src/codegen/bake-codegen.ts +++ b/src/codegen/bake-codegen.ts @@ -135,7 +135,7 @@ async function run() { code = debug ? `((${params}) => {${code}})\n` : `((${params})=>{${code}})\n`; } else { - code = debug ? `((${names}) => {${code}})({\n` : `((${names})=>{${code}})({`; + code = debug ? `(async (${names}) => {${code}})({\n` : `(async(${names})=>{${code}})({`; } } diff --git a/src/js_parser.zig b/src/js_parser.zig index 91049427e6dbbc..495dc88a38aa6c 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -24060,10 +24060,16 @@ const ReactRefresh = struct { pub const ConvertESMExportsForHmr = struct { last_part: *js_ast.Part, - imports_seen: std.AutoArrayHashMapUnmanaged(u32, void) = .{}, + imports_seen: bun.StringArrayHashMapUnmanaged(ImportRef) = .{}, + export_star_props: std.ArrayListUnmanaged(G.Property) = .{}, export_props: std.ArrayListUnmanaged(G.Property) = .{}, stmts: std.ArrayListUnmanaged(Stmt) = .{}, + const ImportRef = struct { + /// Index into ConvertESMExportsForHmr.stmts + stmt_index: u32, + }; + fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void { const new_stmt = switch (stmt.data) { else => stmt, @@ -24209,13 +24215,21 @@ pub const ConvertESMExportsForHmr = struct { return; // do not emit a statement here }, - .s_export_from => |st| stmt: { + .s_export_from => |st| { + const namespace_ref = try ctx.deduplicatedImport( + p, + st.import_record_index, + st.namespace_ref, + st.items, + stmt.loc, + stmt.loc, + ); for (st.items) |*item| { const ref = item.name.ref.?; const symbol = &p.symbols.items[ref.innerIndex()]; if (symbol.namespace_alias == null) { symbol.namespace_alias = .{ - .namespace_ref = st.namespace_ref, + .namespace_ref = namespace_ref, .alias = item.original_name, .import_record_index = st.import_record_index, }; @@ -24231,34 +24245,90 @@ pub const ConvertESMExportsForHmr = struct { item.alias = item.original_name; item.original_name = alias; } - - const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index); - if (gop.found_existing) return; - break :stmt Stmt.alloc(S.Import, .{ - .import_record_index = st.import_record_index, - .is_single_line = true, - .default_name = null, - .items = st.items, - .namespace_ref = st.namespace_ref, - .star_name_loc = null, - }, stmt.loc); + return; }, - .s_export_star => { - bun.todoPanic(@src(), "hot-module-reloading instrumentation for 'export * from'", .{}); + .s_export_star => |st| { + const namespace_ref = try ctx.deduplicatedImport( + p, + st.import_record_index, + st.namespace_ref, + &.{}, + stmt.loc, + stmt.loc, + ); + try ctx.export_star_props.append(p.allocator, .{ + .kind = .spread, + .value = Expr.initIdentifier(namespace_ref, stmt.loc), + }); + return; }, // De-duplicate import statements. It is okay to disregard // named/default imports here as we always rewrite them as - // full qualified property accesses (need to so live-bindings) - .s_import => |st| stmt: { - const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index); - if (gop.found_existing) return; - break :stmt stmt; + // full qualified property accesses (needed for live-bindings) + .s_import => |st| { + _ = try ctx.deduplicatedImport(p, st.import_record_index, st.namespace_ref, st.items, st.star_name_loc, stmt.loc); + return; }, }; try ctx.stmts.append(p.allocator, new_stmt); } + /// Deduplicates imports, returning a previously used Ref if present. + fn deduplicatedImport( + ctx: *ConvertESMExportsForHmr, + p: anytype, + import_record_index: u32, + namespace_ref: Ref, + items: []js_ast.ClauseItem, + star_name_loc: ?logger.Loc, + loc: logger.Loc, + ) !Ref { + const ir = p.import_records.items[import_record_index]; + const gop = try ctx.imports_seen.getOrPut(p.allocator, ir.path.text); + if (gop.found_existing) { + const stmt = ctx.stmts.items[gop.value_ptr.stmt_index].data.s_import; + if (items.len > 0) { + if (stmt.items.len == 0) { + stmt.items = items; + } else { + stmt.items = try std.mem.concat(p.allocator, js_ast.ClauseItem, &.{ stmt.items, items }); + } + } + if (namespace_ref.isValid()) { + if (!stmt.namespace_ref.isValid()) { + stmt.namespace_ref = namespace_ref; + return namespace_ref; + } else { + // Erase this namespace ref, but since it may be used in + // existing AST trees, a link must be established. + const symbol = &p.symbols.items[namespace_ref.innerIndex()]; + symbol.use_count_estimate = 0; + symbol.link = stmt.namespace_ref; + if (@hasField(@typeInfo(@TypeOf(p)).Pointer.child, "symbol_uses")) { + _ = p.symbol_uses.swapRemove(namespace_ref); + } + } + } + if (stmt.star_name_loc == null) if (star_name_loc) |stl| { + stmt.star_name_loc = stl; + }; + return stmt.namespace_ref; + } + + try ctx.stmts.append(p.allocator, Stmt.alloc(S.Import, .{ + .import_record_index = import_record_index, + .is_single_line = true, + .default_name = null, + .items = items, + .namespace_ref = namespace_ref, + .star_name_loc = star_name_loc, + }, loc)); + + gop.value_ptr.* = .{ .stmt_index = @intCast(ctx.stmts.items.len - 1) }; + return namespace_ref; + } + fn visitBindingToExport( ctx: *ConvertESMExportsForHmr, p: anytype, @@ -24340,6 +24410,18 @@ pub const ConvertESMExportsForHmr = struct { } pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.Part) !void { + if (ctx.export_star_props.items.len > 0) { + if (ctx.export_props.items.len == 0) { + ctx.export_props = ctx.export_star_props; + } else { + const export_star_len = ctx.export_star_props.items.len; + try ctx.export_props.ensureUnusedCapacity(p.allocator, export_star_len); + const len = ctx.export_props.items.len; + ctx.export_props.items.len += export_star_len; + bun.copy(G.Property, ctx.export_props.items[export_star_len..], ctx.export_props.items[0..len]); + @memcpy(ctx.export_props.items[0..export_star_len], ctx.export_star_props.items); + } + } if (ctx.export_props.items.len > 0) { try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{ .value = Expr.assign( @@ -24359,6 +24441,8 @@ pub const ConvertESMExportsForHmr = struct { try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = p.module_ref, .is_top_level = true }); } + // TODO: emit a marker for HMR runtime to know the non-star export fields. + // TODO: this is a tiny mess. it is honestly trying to hard to merge all parts into one for (all_parts[0 .. all_parts.len - 1]) |*part| { try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols); diff --git a/src/js_printer.zig b/src/js_printer.zig index 2f791393556cb7..6082212d4842a8 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -5972,6 +5972,9 @@ pub fn printWithWriterAndPlatform( if (opts.module_type == .internal_bake_dev) { printer.indent(); printer.printIndent(); + if (!ast.top_level_await_keyword.isEmpty()) { + printer.print("async "); + } printer.printStringLiteralUTF8(source.path.pretty, false); const func = parts[0].stmts[0].data.s_expr.value.data.e_function.func; if (!(func.body.stmts.len == 1 and func.body.stmts[0].data == .s_lazy_export)) { From 96f00a91aa8bd6609e76e1385d211397c6a25785 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 31 Jan 2025 18:43:03 -0800 Subject: [PATCH 08/28] better --- src/bake/DevServer.zig | 67 ++++++++++++++++++++++++++++++------------ src/bake/hmr-module.ts | 23 ++++++++++++--- src/js_parser.zig | 7 ++++- 3 files changed, 74 insertions(+), 23 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index f5c738f518c86a..ff52201768b667 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -2175,10 +2175,13 @@ const FileKind = enum(u2) { /// Files that failed to bundle or do not exist on disk will appear in the /// graph as "unknown". unknown, - /// Stores JavaScript code. This field is also used for HTML files, where - /// the associated JS just calls `require` to emulate having script tags. + /// `code` is JavaScript code. This field is also used for HTML files, where + /// the associated JS just calls `require` to emulate the script tags. js, + /// `code` is the URL where the CSS file is to be fetched from, ex. + /// '/_bun/css/0000000000000000.css' css, + /// TODO: asset, }; @@ -2456,6 +2459,13 @@ pub fn IncrementalGraph(side: bake.Side) type { switch (side) { .client => { + var flags: File.Flags = .{ + .failed = false, + .is_hmr_root = ctx.server_to_client_bitset.isSet(index.get()), + .is_special_framework_file = false, + .is_html_route = false, + .kind = kind, + }; if (gop.found_existing) { if (kind == .js) bun.default_allocator.free(gop.value_ptr.code()); @@ -2471,14 +2481,9 @@ pub fn IncrementalGraph(side: bake.Side) type { kv.key, ); } + flags.is_special_framework_file = gop.value_ptr.flags.is_special_framework_file; + flags.is_html_route = gop.value_ptr.flags.is_html_route; } - const flags: File.Flags = .{ - .failed = false, - .is_hmr_root = ctx.server_to_client_bitset.isSet(index.get()), - .is_special_framework_file = false, - .is_html_route = false, - .kind = kind, - }; if (kind == .css) { if (!gop.found_existing or gop.value_ptr.code_len == 0) { gop.value_ptr.* = File.init(try std.fmt.allocPrint( @@ -2590,6 +2595,12 @@ pub fn IncrementalGraph(side: bake.Side) type { } } + // `processChunkImportRecords` appends items into `quick_lookup`, + // but those entries always have .seen = true. Snapshot the length + // of original entries so that the new ones can be ignored when + // removing edges. + const quick_lookup_values_to_care_len = quick_lookup.count(); + var new_imports: EdgeIndex.Optional = .none; defer g.first_import.items[file_index.get()] = new_imports; @@ -2603,6 +2614,7 @@ pub fn IncrementalGraph(side: bake.Side) type { // the edges the first added. if (file.is_rsc and file.is_ssr) { // The non-ssr file is always first. + // TODO: // const ssr_index = ctx.scbs.getSSRIndex(bundle_graph_index.get()) orelse { // @panic("Unexpected missing server-component-boundary entry"); // }; @@ -2610,10 +2622,10 @@ pub fn IncrementalGraph(side: bake.Side) type { } } - try g.processChunkImportRecords(ctx, &quick_lookup, &new_imports, file_index, bundle_graph_index); + try g.processChunkImportRecords(ctx, temp_alloc, &quick_lookup, &new_imports, file_index, bundle_graph_index); // '.seen = false' means an import was removed and should be freed - for (quick_lookup.values()) |val| { + for (quick_lookup.values()[0..quick_lookup_values_to_care_len]) |val| { if (!val.seen) { g.owner().incremental_result.had_adjusted_edges = true; @@ -2660,6 +2672,7 @@ pub fn IncrementalGraph(side: bake.Side) type { fn processChunkImportRecords( g: *@This(), ctx: *HotUpdateContext, + temp_alloc: Allocator, quick_lookup: *TempLookup.HashTable, new_imports: *EdgeIndex.Optional, file_index: FileIndex, @@ -2667,6 +2680,11 @@ pub fn IncrementalGraph(side: bake.Side) type { ) !void { const log = bun.Output.scoped(.processChunkDependencies, false); for (ctx.import_records[index.get()].slice()) |import_record| { + // When an import record is duplicated, it gets marked unused. + // This happens in `ConvertESMExportsForHmr.deduplicatedImport` + // There is still a case where deduplication must happen. + if (import_record.is_unused) continue; + if (!import_record.source_index.isRuntime()) try_index_record: { const key = import_record.path.keyForIncrementalGraph(); const imported_file_index = if (import_record.source_index.isInvalid()) @@ -2687,12 +2705,13 @@ pub fn IncrementalGraph(side: bake.Side) type { } } - if (quick_lookup.getPtr(imported_file_index)) |lookup| { + const gop = try quick_lookup.getOrPut(temp_alloc, imported_file_index); + if (gop.found_existing) { // If the edge has already been seen, it will be skipped // to ensure duplicate edges never exist. - if (lookup.seen) continue; + if (gop.value_ptr.seen) continue; + const lookup = gop.value_ptr; lookup.seen = true; - const dep = &g.edges.items[lookup.edge_index.get()]; dep.next_import = new_imports.*; new_imports.* = lookup.edge_index.toOptional(); @@ -2714,6 +2733,13 @@ pub fn IncrementalGraph(side: bake.Side) type { g.owner().incremental_result.had_adjusted_edges = true; + // To prevent duplicates, add into the quick lookup map + // the file index so that it does exist. + gop.value_ptr.* = .{ + .edge_index = edge, + .seen = true, + }; + log("attach edge={d} | id={d} {} -> id={d} {}", .{ edge.get(), file_index.get(), @@ -2872,8 +2898,6 @@ pub fn IncrementalGraph(side: bake.Side) type { } else { if (side == .server) { if (is_route) gop.value_ptr.*.is_route = true; - } else { - if (is_route) gop.value_ptr.*.flags.is_html_route = true; } } @@ -2883,13 +2907,20 @@ pub fn IncrementalGraph(side: bake.Side) type { switch (side) { .client => { - gop.value_ptr.* = File.init("", .{ + var flags: File.Flags = .{ .failed = false, .is_hmr_root = false, .is_special_framework_file = false, .is_html_route = is_route, .kind = .unknown, - }); + }; + if (gop.found_existing) { + if (gop.value_ptr.code().len > 0) { + g.owner().allocator.free(gop.value_ptr.code()); + } + flags.is_html_route = flags.is_html_route or gop.value_ptr.flags.is_html_route; + } + gop.value_ptr.* = File.init("", flags); }, .server => { if (!gop.found_existing) { diff --git a/src/bake/hmr-module.ts b/src/bake/hmr-module.ts index 69b12b663fcd9c..df7b07a16b7f0e 100644 --- a/src/bake/hmr-module.ts +++ b/src/bake/hmr-module.ts @@ -247,12 +247,20 @@ export function loadModule(key: Id, type: LoadModuleType): HotModule export const getModule = registry.get.bind(registry); -export function replaceModule(key: Id, load: ModuleLoadFunction) { +export function replaceModule(key: Id, load: ModuleLoadFunction): Promise | void { const module = registry.get(key); if (module) { module._onDispose?.forEach(cb => cb(null)); module.exports = {}; - load(module); + const promise = load(module) as Promise | undefined; + if (promise) { + return promise.then(() => { + const { exports } = module; + for (const updater of module._deps.values()) { + updater?._callback?.(exports); + } + }); + } const { exports } = module; for (const updater of module._deps.values()) { updater?._callback?.(exports); @@ -260,18 +268,25 @@ export function replaceModule(key: Id, load: ModuleLoadFunction) { } } -export function replaceModules(modules: any) { +export async function replaceModules(modules: any) { for (const k in modules) { input_graph[k] = modules[k]; } + const promises: Promise[] = []; for (const k in modules) { try { - replaceModule(k, modules[k]); + const p = replaceModule(k, modules[k]); + if (p) { + promises.push(p); + } } catch (err) { // TODO: overlay for client console.error(err); } } + if (promises.length) { + await Promise.all(promises); + } if (side === "client" && refreshRuntime) { refreshRuntime.performReactRefresh(window); } diff --git a/src/js_parser.zig b/src/js_parser.zig index 495dc88a38aa6c..98c8eb58706e59 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -24284,9 +24284,14 @@ pub const ConvertESMExportsForHmr = struct { star_name_loc: ?logger.Loc, loc: logger.Loc, ) !Ref { - const ir = p.import_records.items[import_record_index]; + const ir = &p.import_records.items[import_record_index]; const gop = try ctx.imports_seen.getOrPut(p.allocator, ir.path.text); if (gop.found_existing) { + // Disable this one since an older record is getting used. It isn't + // practical to delete this import record entry since an import or + // require expression can exist. + ir.is_unused = true; + const stmt = ctx.stmts.items[gop.value_ptr.stmt_index].data.s_import; if (items.len > 0) { if (stmt.items.len == 0) { From e86e150e60ca32c14b8517993fcb0f50a00967fb Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 31 Jan 2025 19:09:22 -0800 Subject: [PATCH 09/28] a --- .vscode/settings.json | 2 +- src/bake/DevServer.zig | 4 +++- src/bun.js/api/server.zig | 13 +++++++++++-- src/js_printer.zig | 15 +++++++++------ 4 files changed, 24 insertions(+), 10 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 8d2afd7e72530f..eca14849b66fe7 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -90,7 +90,7 @@ "editor.defaultFormatter": "esbenp.prettier-vscode", }, "[jsonc]": { - "editor.defaultFormatter": "vscode.json-language-features", + "editor.defaultFormatter": "esbenp.prettier-vscode", }, // Markdown diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index ff52201768b667..a23666dad7cbcb 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -1377,8 +1377,10 @@ pub fn finalizeBundle( bv2: *bun.bundle_v2.BundleV2, result: bun.bundle_v2.DevServerOutput, ) bun.OOM!void { - // TODO: this leaks BundleV2 defer dev.startNextBundleIfPresent(); + defer { + bv2.deinit(); + } const current_bundle = &dev.current_bundle.?; dev.graph_safety_lock.lock(); diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index ea925cb5001db6..17cd8793417f16 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1174,8 +1174,17 @@ pub const ServerConfig = struct { } // When HTML bundles are provided, ensure DevServer options are ready - // The precense of these options - if (dedupe_html_bundle_map.count() > 0) { + // The precense of these options causes Bun.serve to initialize things. + // + // TODO: remove canary gate once the following things are fixed: + // - more extensive hmr reliability testing + // - asset support + // - plugin support + // - tailwind plugin verified functional + // - source maps + if ((Environment.is_canary or Environment.isDebug) and + dedupe_html_bundle_map.count() > 0) + { // TODO: this should be the dir with bunfig?? const root = bun.fs.FileSystem.instance.top_level_dir; var arena = std.heap.ArenaAllocator.init(bun.default_allocator); diff --git a/src/js_printer.zig b/src/js_printer.zig index 6082212d4842a8..48b8f4a617edb2 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -1856,12 +1856,17 @@ fn NewPrinter( p.printSpaceBeforeIdentifier(); // Allow it to fail at runtime, if it should - p.print("import("); - p.printImportRecordPath(record); + if (module_type != .internal_bake_dev) { + p.print("import("); + p.printImportRecordPath(record); + } else { + p.printSymbol(p.options.commonjs_module_ref); + p.print(".dynamicImport("); + const path = record.path; + p.printStringLiteralUTF8(path.pretty, false); + } if (!import_options.isMissing()) { - // since we previously stripped type, it is a breaking change to - // enable this for non-bun platforms p.printWhitespacer(ws(", ")); p.printExpr(import_options, .comma, .{}); } @@ -2356,8 +2361,6 @@ fn NewPrinter( p.printExpr(e.expr, .comma, ExprFlag.None()); if (!e.options.isMissing()) { - // since we previously stripped type, it is a breaking change to - // enable this for non-bun platforms p.printWhitespacer(ws(", ")); p.printExpr(e.options, .comma, .{}); } From 0a61a7212e32dfab3f8282e02d2f5b925b7ffa6a Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Mon, 3 Feb 2025 13:24:34 -0800 Subject: [PATCH 10/28] stuff --- src/bake/DevServer.zig | 98 +++++++++++++++++++-------- src/bun.js/api/server/StaticRoute.zig | 2 +- 2 files changed, 70 insertions(+), 30 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index a23666dad7cbcb..85284d525548c5 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -1185,7 +1185,7 @@ fn indexFailures(dev: *DevServer) !void { } } - for (dev.incremental_result.routes_affected.items) |entry| { + for (dev.incremental_result.framework_routes_affected.items) |entry| { if (dev.router.routePtr(entry.route_index).bundle.unwrap()) |index| { dev.routeBundlePtr(index).server_state = .possible_bundling_failures; } @@ -1491,6 +1491,8 @@ pub fn finalizeBundle( html.head_end_tag_index = RouteBundle.HTML.ByteOffset.init(compile_result.offsets.head_end_tag).toOptional(); html.body_end_tag_index = RouteBundle.HTML.ByteOffset.init(compile_result.offsets.body_end_tag).toOptional(); + + chunk.entry_point.entry_point_id = @intCast(route_bundle_index.get()); } var gts = try dev.initGraphTraceState(bv2.graph.allocator); @@ -1598,20 +1600,25 @@ pub fn finalizeBundle( // clear for those) if (will_hear_hot_update and current_bundle.had_reload_event and - dev.incremental_result.routes_affected.items.len > 0 and + (dev.incremental_result.framework_routes_affected.items.len + + dev.incremental_result.html_routes_affected.items.len) > 0 and dev.bundling_failures.count() == 0) { has_route_bits_set = true; // A bit-set is used to avoid duplicate entries. This is not a problem - // with `dev.incremental_result.routes_affected` - for (dev.incremental_result.routes_affected.items) |request| { + // with `dev.incremental_result.framework_routes_affected` + for (dev.incremental_result.framework_routes_affected.items) |request| { const route = dev.router.routePtr(request.route_index); if (route.bundle.unwrap()) |id| route_bits.set(id.get()); if (request.should_recurse_when_visiting) { markAllRouteChildren(&dev.router, 1, .{&route_bits}, request.route_index); } } + for (dev.incremental_result.html_routes_affected.items) |route_bundle_index| { + route_bits.set(route_bundle_index.get()); + route_bits_client.set(route_bundle_index.get()); + } // List 1 var it = route_bits.iterator(.{ .kind = .set }); @@ -1630,7 +1637,7 @@ pub fn finalizeBundle( if (dev.incremental_result.client_components_affected.items.len > 0) { has_route_bits_set = true; - dev.incremental_result.routes_affected.clearRetainingCapacity(); + dev.incremental_result.framework_routes_affected.clearRetainingCapacity(); gts.clear(); for (dev.incremental_result.client_components_affected.items) |index| { @@ -1639,7 +1646,7 @@ pub fn finalizeBundle( // A bit-set is used to avoid duplicate entries. This is not a problem // with `dev.incremental_result.routes_affected` - for (dev.incremental_result.routes_affected.items) |request| { + for (dev.incremental_result.framework_routes_affected.items) |request| { const route = dev.router.routePtr(request.route_index); if (route.bundle.unwrap()) |id| { route_bits.set(id.get()); @@ -1650,6 +1657,21 @@ pub fn finalizeBundle( } } + // Free old bundles + var it = route_bits_client.iterator(.{ .kind = .set }); + while (it.next()) |bundled_route_index| { + const bundle = &dev.route_bundles.items[bundled_route_index]; + if (bundle.client_bundle) |old| { + dev.allocator.free(old); + } + bundle.client_bundle = null; + } + } else if (dev.incremental_result.html_routes_affected.items.len > 0) { + // When only HTML routes were affected, there may not be any client + // components that got affected, but the bundles for these HTML routes + // are invalid now. That is why HTML routes above writes into + // `route_bits_client`. + // Free old bundles var it = route_bits_client.iterator(.{ .kind = .set }); while (it.next()) |bundled_route_index| { @@ -1777,7 +1799,10 @@ pub fn finalizeBundle( } Output.prettyError("{s} in {d}ms", .{ - if (current_bundle.had_reload_event) "Reloaded" else "Bundled route", + if (current_bundle.had_reload_event) + "Reloaded" + else + "Bundled page", @divFloor(current_bundle.timer.read(), std.time.ns_per_ms), }); @@ -1785,19 +1810,19 @@ pub fn finalizeBundle( const file_name: ?[]const u8, const total_count: usize = if (current_bundle.had_reload_event) .{ null, 0 } else first_route_file_name: { - // TODO: - break :first_route_file_name .{ null, 0 }; - // const opaque_id = dev.router.routePtr( - // dev.routeBundlePtr(dev.current_bundle_requests.items[0].route_bundle_index) - // .route, - // ).file_page.unwrap() orelse - // break :first_route_file_name .{ null, 0 }; - // const server_index = fromOpaqueFileId(.server, opaque_id); - - // break :first_route_file_name .{ - // dev.relativePath(dev.server_graph.bundled_files.keys()[server_index.get()]), - // 0, - // }; + const route_bundle = dev.routeBundlePtr(dev.current_bundle_requests.items[0].route_bundle_index); + + const opaque_id = dev.router.routePtr( + + .route, + ).file_page.unwrap() orelse + break :first_route_file_name .{ null, 0 }; + const server_index = fromOpaqueFileId(.server, opaque_id); + + break :first_route_file_name .{ + dev.relativePath(dev.server_graph.bundled_files.keys()[server_index.get()]), + 0, + }; }; if (file_name) |name| { Output.prettyError(": {s}", .{name}); @@ -2644,8 +2669,8 @@ pub fn IncrementalGraph(side: bake.Side) type { // Follow this file to the route to mark it as stale. try g.traceDependencies(file_index, ctx.gts, .stop_at_boundary); } else { - // TODO: Follow this file to the HMR root (info to determine is currently not stored) - // without this, changing a client-only file will not mark the route's client bundle as stale + // Follow this file to the HTML route or HMR root to mark the client bundle as stale. + try g.traceDependencies(file_index, ctx.gts, .stop_at_boundary); } } @@ -2785,19 +2810,29 @@ pub fn IncrementalGraph(side: bake.Side) type { Output.panic("Route not in lookup index: {d} {}", .{ file_index.get(), bun.fmt.quote(g.bundled_files.keys()[file_index.get()]) }); igLog("\\<- Route", .{}); - try dev.incremental_result.routes_affected.append(dev.allocator, route_index); + try dev.incremental_result.framework_routes_affected.append(dev.allocator, route_index); } if (file.is_client_component_boundary) { try dev.incremental_result.client_components_affected.append(dev.allocator, file_index); } }, .client => { + const dev = g.owner(); if (file.flags.is_hmr_root or (file.flags.kind == .css and trace_kind == .css_to_route)) { - const dev = g.owner(); const key = g.bundled_files.keys()[file_index.get()]; const index = dev.server_graph.getFileIndex(key) orelse Output.panic("Server Incremental Graph is missing component for {}", .{bun.fmt.quote(key)}); try dev.server_graph.traceDependencies(index, gts, trace_kind); + } else if (file.flags.is_html_route) { + const route_bundle_index = dev.html_route_lookup.get(file_index) orelse { + Output.panic("HTML route not in lookup index: {d} {}", .{ + file_index.get(), + bun.fmt.quote(g.bundled_files.keys()[file_index.get()]), + }); + }; + try dev.incremental_result.html_routes_affected.append(dev.allocator, route_bundle_index); + if (trace_kind == .stop_at_boundary) + return; } }, } @@ -3291,10 +3326,13 @@ pub fn IncrementalGraph(side: bake.Side) type { const IncrementalResult = struct { /// When tracing a file's dependencies via `traceDependencies`, this is - /// populated with the hit `Route.Index`s. To know what `RouteBundle`s - /// are affected, the route graph must be traced downwards. + /// populated with the hit `Route.Index`s. To know what framework + /// `RouteBundle`s are affected, the route graph must be traced downwards. /// Tracing is used for multiple purposes. - routes_affected: ArrayListUnmanaged(RouteIndexAndRecurseFlag), + framework_routes_affected: ArrayListUnmanaged(RouteIndexAndRecurseFlag), + /// HTML routes have slight different anatomy than framework ones, and + /// get a separate list. + html_routes_affected: ArrayListUnmanaged(RouteBundle.Index), /// Set to true if any IncrementalGraph edges were added or removed. had_adjusted_edges: bool, @@ -3329,7 +3367,8 @@ const IncrementalResult = struct { delete_client_files_later: ArrayListUnmanaged(IncrementalGraph(.client).FileIndex), const empty: IncrementalResult = .{ - .routes_affected = .{}, + .framework_routes_affected = .{}, + .html_routes_affected = .{}, .had_adjusted_edges = false, .failures_removed = .{}, .failures_added = .{}, @@ -3340,7 +3379,8 @@ const IncrementalResult = struct { }; fn reset(result: *IncrementalResult) void { - result.routes_affected.clearRetainingCapacity(); + result.framework_routes_affected.clearRetainingCapacity(); + result.html_routes_affected.clearRetainingCapacity(); assert(result.failures_removed.items.len == 0); result.failures_added.clearRetainingCapacity(); result.client_components_added.clearRetainingCapacity(); diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index 83c20e834f5cf9..b81794bea7c144 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -1,7 +1,7 @@ //! A static route serving a response object's blob. const StaticRoute = @This(); -server: ?AnyServer = null, +`: ?AnyServer = null, status_code: u16, blob: AnyBlob, cached_blob_size: u64 = 0, From 4a50a0ba59ede754c38b48f6376908d34c8392ba Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Mon, 3 Feb 2025 13:58:55 -0800 Subject: [PATCH 11/28] merge main --- src/bake/DevServer.zig | 45 ++++++++++++++------------- src/bun.js/api/server.zig | 26 +++++++--------- src/bun.js/api/server/StaticRoute.zig | 2 +- src/bun.zig | 2 +- src/js_parser.zig | 2 +- 5 files changed, 38 insertions(+), 39 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 3fee48ddcc2541..98193a30cb02c5 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -544,11 +544,11 @@ fn scanInitialRoutes(dev: *DevServer) !void { try dev.client_graph.ensureStaleBitCapacity(true); } -pub fn attachRoutes(dev: *DevServer, server: anytype) !void { +/// Returns true if a catch-all handler was attached. +pub fn attachRoutes(dev: *DevServer, server: anytype) !bool { dev.server = bun.JSC.API.AnyServer.from(server); const app = server.app.?; - const Server = @typeInfo(@TypeOf(server)).Pointer.child; - const is_ssl = @typeInfo(@TypeOf(app)).Pointer.child.is_ssl; + const is_ssl = @typeInfo(@TypeOf(app)).pointer.child.is_ssl; app.get(client_prefix ++ "/:route", *DevServer, dev, wrapGenericRequestHandler(onJsRequest, is_ssl)); app.get(asset_prefix ++ "/:asset", *DevServer, dev, wrapGenericRequestHandler(onAssetRequest, is_ssl)); @@ -575,8 +575,9 @@ pub fn attachRoutes(dev: *DevServer, server: anytype) !void { // types. Otherwise, this can just be Bun.serve's default handler. if (dev.framework.file_system_router_types.len > 0) { app.any("/*", *DevServer, dev, wrapGenericRequestHandler(onRequest, is_ssl)); + return true; } else { - app.any("/*", *Server, server, Server.onRequest); + return false; } } @@ -656,7 +657,7 @@ inline fn wrapGenericRequestHandler( req: *Request, resp: *uws.NewApp(is_ssl).Response, ) void { - const fn_info = @typeInfo(@TypeOf(handler)).Fn; + const fn_info = @typeInfo(@TypeOf(handler)).@"fn"; assert(fn_info.params.len == 3); const uses_any_response = if (fn_info.params[2].type) |t| t == AnyResponse else false; return struct { @@ -1807,23 +1808,23 @@ pub fn finalizeBundle( }); // Compute a file name to display - const file_name: ?[]const u8, const total_count: usize = if (current_bundle.had_reload_event) - .{ null, 0 } - else first_route_file_name: { - const route_bundle = dev.routeBundlePtr(dev.current_bundle_requests.items[0].route_bundle_index); - - const opaque_id = dev.router.routePtr( - - .route, - ).file_page.unwrap() orelse - break :first_route_file_name .{ null, 0 }; - const server_index = fromOpaqueFileId(.server, opaque_id); - - break :first_route_file_name .{ - dev.relativePath(dev.server_graph.bundled_files.keys()[server_index.get()]), - 0, - }; + const file_name: ?[]const u8 = if (current_bundle.had_reload_event) + dev.relativePath( + bv2.graph.input_files.items(.source)[bv2.graph.entry_points.items[0].get()].path.text, + ) + else switch (dev.routeBundlePtr(dev.current_bundle_requests.items[0].route_bundle_index).data) { + .html => |html| dev.relativePath(html.html_bundle.html_bundle.path), + .framework => |fw| file_name: { + const route = dev.router.routePtr(fw.route_index); + const opaque_id = route.file_page.unwrap() orelse + route.file_layout.unwrap() orelse + break :file_name null; + const server_index = fromOpaqueFileId(.server, opaque_id); + const abs_path = dev.server_graph.bundled_files.keys()[server_index.get()]; + break :file_name dev.relativePath(abs_path); + }, }; + const total_count = bv2.graph.entry_points.items.len; if (file_name) |name| { Output.prettyError(": {s}", .{name}); if (total_count > 1) { @@ -2006,7 +2007,7 @@ fn onRequest(dev: *DevServer, req: *Request, resp: anytype) void { switch (dev.server.?) { inline else => |s| { - if (@typeInfo(@TypeOf(s.app.?)).Pointer.child.Response != @typeInfo(@TypeOf(resp)).Pointer.child) { + if (@typeInfo(@TypeOf(s.app.?)).pointer.child.Response != @typeInfo(@TypeOf(resp)).pointer.child) { unreachable; // mismatch between `is_ssl` with server and response types. optimize these checks out. } if (s.config.onRequest != .zero) { diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 2b804429fbc9c0..35c6d665bfc953 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -7488,25 +7488,23 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp app.get("/src:/*", *ThisServer, this, onSrcRequest); } + var has_dev_catch_all = false; if (this.dev_server) |dev| { - dev.attachRoutes(this) catch bun.outOfMemory(); - } else { - const @"has /*" = brk: { - for (this.config.static_routes.items) |route| { - if (strings.eqlComptime(route.path, "/*")) { - break :brk true; - } - } - - break :brk false; - }; + has_dev_catch_all = dev.attachRoutes(this) catch bun.outOfMemory(); + } + if (!has_dev_catch_all) { + const has_html_catch_all = for (this.config.static_routes.items) |route| { + if (strings.eqlComptime(route.path, "/*")) + break true; + } else false; - // "/*" routes are added backwards, so if they have a static route, it will never be matched - // so we need to check for that first - if (!@"has /*") { + // "/*" routes are added backwards, so if they have a static route, + // it will never be matched so we need to check for that first + if (!has_html_catch_all) { bun.assert(this.config.onRequest != .zero); app.any("/*", *ThisServer, this, onRequest); } else if (this.config.onRequest != .zero) { + // The HTML catch all recieves GET, HEAD, and OPTIONS app.post("/*", *ThisServer, this, onRequest); app.put("/*", *ThisServer, this, onRequest); app.patch("/*", *ThisServer, this, onRequest); diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index 99b0aba8649eea..5a3fcea178ad9d 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -1,7 +1,7 @@ //! A static route serving a response object's blob. const StaticRoute = @This(); -`: ?AnyServer = null, +server: ?AnyServer = null, status_code: u16, blob: AnyBlob, cached_blob_size: u64 = 0, diff --git a/src/bun.zig b/src/bun.zig index 4284994883a740..f80f34bbee597b 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -4392,7 +4392,7 @@ pub fn CowSlice(T: type) type { return struct { ptr: [*]const T, flags: packed struct(usize) { - len: @Type(.{ .Int = .{ + len: @Type(.{ .int = .{ .bits = @bitSizeOf(usize) - 1, .signedness = .unsigned, } }), diff --git a/src/js_parser.zig b/src/js_parser.zig index 0072c55efb025d..b3ecfd352107e1 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -24310,7 +24310,7 @@ pub const ConvertESMExportsForHmr = struct { const symbol = &p.symbols.items[namespace_ref.innerIndex()]; symbol.use_count_estimate = 0; symbol.link = stmt.namespace_ref; - if (@hasField(@typeInfo(@TypeOf(p)).Pointer.child, "symbol_uses")) { + if (@hasField(@typeInfo(@TypeOf(p)).pointer.child, "symbol_uses")) { _ = p.symbol_uses.swapRemove(namespace_ref); } } From d7a32ef3932455772dffdc8de84da5b464e9c718 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Mon, 3 Feb 2025 15:38:53 -0800 Subject: [PATCH 12/28] edit html file -> reload --- src/bake/DevServer.zig | 163 +++++++++++++++----------- src/bake/client/reader.ts | 4 +- src/bake/client/route.ts | 6 - src/bake/client/websocket.ts | 2 +- src/bake/hmr-runtime-client.ts | 9 +- src/bake/hmr-runtime-error.ts | 27 ++--- src/bun.js/api/server.zig | 8 +- src/bun.js/api/server/HTMLBundle.zig | 10 +- src/bun.js/api/server/StaticRoute.zig | 3 +- src/bundler/bundle_v2.zig | 7 +- src/deps/uws.zig | 6 +- 11 files changed, 135 insertions(+), 110 deletions(-) delete mode 100644 src/bake/client/route.ts diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 98193a30cb02c5..44786c6e9b1a6f 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -65,8 +65,13 @@ route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.server).FileIndex, Rou /// Quickly retrieve an HTML route's index from its incremental graph index. // TODO: store this in IncrementalGraph(.client).File instead of this hash map. html_route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.client).FileIndex, RouteBundle.Index), +/// This acts as a duplicate of the lookup table in uws, but only for HTML routes +/// Used to identify what route a connected WebSocket is on, so that only +/// the active pages are notified of a hot updates. +html_router: HTMLRouter, /// CSS files are accessible via `/_bun/css/.css` /// Value is bundled code owned by `dev.allocator` +// TODO: StaticRoute css_files: AutoArrayHashMapUnmanaged(u64, []const u8), /// JS files are accessible via `/_bun/client/route..js` /// These are randomly generated to avoid possible browser caching of old assets. @@ -165,13 +170,11 @@ pub const RouteBundle = struct { /// HTMLBundle provided route html: HTML, }, - /// Used to communicate over WebSocket the pattern. The HMR client contains code - /// to match this against the URL bar to determine if a reloaded route applies. - full_pattern: bun.CowString, /// Generated lazily when the client JS is requested (HTTP GET /_bun/client/*.js), /// which is only needed when a hard-reload is performed. /// /// Freed when a client module updates. + // TODO: ?*StaticRoute client_bundle: ?[]const u8, /// Reference count of how many HmrSockets say they are on this route. This @@ -249,11 +252,7 @@ pub const RouteBundle = struct { loaded, }; - /// Used as the input to some functions which may already have a - /// RouteBundle.Index, but also lookup an entry or init a new one. - pub const MaybeIndex = union(enum) { - /// Already inserted. This prevents an extra loopback to lookup. - resolved: RouteBundle.Index, + pub const UnresolvedIndex = union(enum) { /// FrameworkRouter provides a fullstack server-side route framework: FrameworkRouter.Route.Index, /// HTMLBundle provides a frontend-only route, SPA-style @@ -285,11 +284,11 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .root = options.root, .vm = options.vm, .server = null, - .directory_watchers = DirectoryWatchStore.empty, + .directory_watchers = .empty, .server_fetch_function_callback = .{}, .server_register_update_callback = .{}, .generation = 0, - .graph_safety_lock = bun.DebugThreadLock.unlocked, + .graph_safety_lock = .unlocked, .dump_dir = dump_dir, .framework = options.framework, .bundler_options = options.bundler_options, @@ -297,21 +296,22 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .has_pre_crash_handler = bun.FeatureFlags.bake_debugging_features and options.dump_state_on_crash orelse bun.getRuntimeFeatureFlag("BUN_DUMP_STATE_ON_CRASH"), - .css_files = .{}, - .route_js_payloads = .{}, + .css_files = .empty, + .route_js_payloads = .empty, .frontend_only = options.frontend_only, - .client_graph = IncrementalGraph(.client).empty, - .server_graph = IncrementalGraph(.server).empty, - .incremental_result = IncrementalResult.empty, - .route_lookup = .{}, - .route_bundles = .{}, - .html_route_lookup = .{}, + .client_graph = .empty, + .server_graph = .empty, + .incremental_result = .empty, + .route_lookup = .empty, + .route_bundles = .empty, + .html_route_lookup = .empty, + .html_router = .empty, .current_bundle = null, - .current_bundle_requests = .{}, + .current_bundle_requests = .empty, .next_bundle = .{ - .route_queue = .{}, + .route_queue = .empty, .reload_event = null, - .requests = .{}, + .requests = .empty, }, .log = bun.logger.Log.init(allocator), @@ -606,7 +606,7 @@ fn onJsRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { }; if (maybe_route.unwrap()) |route| { - dev.ensureRouteIsBundled(.{ .resolved = route }, .js_payload, req, resp) catch bun.outOfMemory(); + dev.ensureRouteIsBundled(route, .js_payload, req, resp) catch bun.outOfMemory(); } else { @panic("TODO: generate client bundle with no source files"); } @@ -682,13 +682,11 @@ fn onIncrementalVisualizerCorked(resp: anytype) void { fn ensureRouteIsBundled( dev: *DevServer, - maybe_index: RouteBundle.MaybeIndex, + route_bundle_index: RouteBundle.Index, kind: DeferredRequest.Data.Tag, req: *Request, resp: AnyResponse, ) bun.OOM!void { - const route_bundle_index = try dev.getOrPutRouteBundle(maybe_index); - // TODO: Zig 0.14 gets labelled continue: // - Remove the `while` // - Move the code after this switch into `.loaded =>` @@ -710,7 +708,7 @@ fn ensureRouteIsBundled( break :brk @unionInit(DeferredRequest.Data, @tagName(tag), resp); }, .server_handler => brk: { - assert(maybe_index == .framework); + assert(dev.routeBundlePtr(route_bundle_index).data == .framework); break :brk .{ .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) .save(dev.vm.global, req, resp.TCP), @@ -1705,8 +1703,6 @@ pub fn finalizeBundle( } if (route_bundle.active_viewers == 0 or !will_hear_hot_update) continue; try w.writeInt(i32, @intCast(i), .little); - try w.writeInt(u32, @intCast(route_bundle.full_pattern.flags.len), .little); - try w.writeAll(route_bundle.full_pattern.slice()); // If no edges were changed, then it is impossible to // change the list of CSS files. @@ -1997,7 +1993,7 @@ fn onRequest(dev: *DevServer, req: *Request, resp: anytype) void { var params: FrameworkRouter.MatchedParams = undefined; if (dev.router.matchSlow(req.url(), ¶ms)) |route_index| { dev.ensureRouteIsBundled( - .{ .framework = route_index }, + dev.getOrPutRouteBundle(.{ .framework = route_index }) catch bun.outOfMemory(), .server_handler, req, AnyResponse.init(resp), @@ -2020,15 +2016,12 @@ fn onRequest(dev: *DevServer, req: *Request, resp: anytype) void { sendBuiltInNotFound(resp); } -pub fn respondForHTMLBundle(dev: *DevServer, html: *HTMLBundle.HTMLBundleRoute, req: *uws.Request, resp: AnyResponse) void { - dev.ensureRouteIsBundled(.{ .html = html }, .bundled_html_page, req, resp) catch bun.outOfMemory(); +pub fn respondForHTMLBundle(dev: *DevServer, html: *HTMLBundle.HTMLBundleRoute, req: *uws.Request, resp: AnyResponse) !void { + try dev.ensureRouteIsBundled(try dev.getOrPutRouteBundle(.{ .html = html }), .bundled_html_page, req, resp); } -fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.MaybeIndex) !RouteBundle.Index { +fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.UnresolvedIndex) !RouteBundle.Index { const index_location: *RouteBundle.Index.Optional = switch (route) { - // Already inserted, return. - .resolved => |idx| return idx, - .framework => |route_index| &dev.router.routePtr(route_index).bundle, .html => |html| &html.dev_server_id, }; @@ -2036,34 +2029,14 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.MaybeIndex) !RouteBun return bundle_index; } - const full_pattern = switch (route) { - .resolved => unreachable, // returned already - .framework => |index| full_pattern: { - var buf = bake.PatternBuffer.empty; - var current: *Route = dev.router.routePtr(index); - // This loop is done to avoid prepending `/` at the root - // if there is more than one component. - buf.prependPart(current.part); - if (current.parent.unwrap()) |first| { - current = dev.router.routePtr(first); - while (current.parent.unwrap()) |next| { - buf.prependPart(current.part); - current = dev.router.routePtr(next); - } - } - break :full_pattern try bun.CowString.initDupe(buf.slice(), dev.allocator); - }, - .html => |html_bundle| bun.CowString.initNeverFree(html_bundle.pattern), - }; - errdefer full_pattern.deinit(dev.allocator); - dev.graph_safety_lock.lock(); defer dev.graph_safety_lock.unlock(); + const bundle_index = RouteBundle.Index.init(@intCast(dev.route_bundles.items.len)); + try dev.route_bundles.ensureUnusedCapacity(dev.allocator, 1); dev.route_bundles.appendAssumeCapacity(.{ .data = switch (route) { - .resolved => unreachable, // returned already .framework => |route_index| .{ .framework = .{ .route_index = route_index, .evaluate_failure = null, @@ -2076,7 +2049,7 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.MaybeIndex) !RouteBun try dev.html_route_lookup.put( dev.allocator, incremental_graph_index, - RouteBundle.Index.init(@intCast(dev.route_bundles.items.len)), + bundle_index, ); break :brk .{ .html = .{ .html_bundle = html, @@ -2090,15 +2063,18 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.MaybeIndex) !RouteBun }, }, .server_state = .unqueued, - .full_pattern = full_pattern, .client_bundle = null, .active_viewers = 0, }); - const bundle_index = RouteBundle.Index.init(@intCast(dev.route_bundles.items.len - 1)); index_location.* = bundle_index.toOptional(); return bundle_index; } +fn registerCatchAllHtmlRoute(dev: *DevServer, html: *HTMLBundle.HTMLBundleRoute) !void { + const bundle_index = try getOrPutRouteBundle(dev, .{ .html = html }); + dev.html_router.fallback = bundle_index.toOptional(); +} + fn sendTextFile(code: []const u8, content_type: []const u8, any_resp: AnyResponse) void { switch (any_resp) { inline else => |resp| { @@ -4117,6 +4093,9 @@ pub const MessageId = enum(u8) { /// - `u32`: File index of the dependency file /// - `u32`: File index of the imported file visualizer = 'v', + /// Sent in response to `set_url`. + /// - `u32`: Route index + set_url_response = 'n', pub inline fn char(id: MessageId) u8 { return @intFromEnum(id); @@ -4189,7 +4168,7 @@ const HmrSocket = struct { /// By telling DevServer the active route, this enables receiving detailed /// `hot_update` events for when the route is updated. active_route: RouteBundle.Index.Optional, - /// Files which the client definitely has and should not be re-sent + pub fn onOpen(s: *HmrSocket, ws: AnyWebSocket) void { _ = ws.send(&(.{MessageId.version.char()} ++ s.dev.configuration_hash_key), .binary, false, true); } @@ -4242,15 +4221,16 @@ const HmrSocket = struct { }, .set_url => { const pattern = msg[1..]; - var params: FrameworkRouter.MatchedParams = undefined; - if (s.dev.router.matchSlow(pattern, ¶ms)) |route_index| { - const rbi = s.dev.getOrPutRouteBundle(.{ .framework = route_index }) catch bun.outOfMemory(); - if (s.active_route.unwrap()) |old| { - if (old == rbi) return; - s.dev.routeBundlePtr(old).active_viewers -= 1; - } - s.dev.routeBundlePtr(rbi).active_viewers += 1; + const rbi = s.dev.routeToBundleIndexSlow(pattern) orelse + return; + if (s.active_route.unwrap()) |old| { + if (old == rbi) return; + s.dev.routeBundlePtr(old).active_viewers -= 1; } + s.dev.routeBundlePtr(rbi).active_viewers += 1; + s.active_route = rbi.toOptional(); + var response: [5]u8 = .{MessageId.set_url_response.char()} ++ std.mem.toBytes(rbi.get()); + _ = ws.send(&response, .binary, false, true); }, else => ws.close(), } @@ -4273,6 +4253,17 @@ const HmrSocket = struct { } }; +pub fn routeToBundleIndexSlow(dev: *DevServer, pattern: []const u8) ?RouteBundle.Index { + var params: FrameworkRouter.MatchedParams = undefined; + if (dev.router.matchSlow(pattern, ¶ms)) |route_index| { + return dev.getOrPutRouteBundle(.{ .framework = route_index }) catch bun.outOfMemory(); + } + if (dev.html_router.get(pattern)) |html| { + return dev.getOrPutRouteBundle(.{ .html = html }) catch bun.outOfMemory(); + } + return null; +} + const c = struct { // BakeSourceProvider.cpp extern fn BakeGetDefaultExportFromModule(global: *JSC.JSGlobalObject, module: JSValue) JSValue; @@ -4892,6 +4883,40 @@ pub const EntryPointList = struct { } }; +/// This structure does not increment the reference count of its contents, as +/// the lifetime of them are all tied to the underling Bun.serve instance. +const HTMLRouter = struct { + map: bun.StringHashMapUnmanaged(*HTMLBundle.HTMLBundleRoute), + /// If a catch-all route exists, it is not stored in map, but here. + fallback: ?*HTMLBundle.HTMLBundleRoute, + + pub const empty: HTMLRouter = .{ + .map = .empty, + .fallback = null, + }; + + pub fn get(router: *HTMLRouter, path: []const u8) ?*HTMLBundle.HTMLBundleRoute { + return router.map.get(path) orelse router.fallback; + } + + pub fn put(router: *HTMLRouter, alloc: Allocator, path: []const u8, route: *HTMLBundle.HTMLBundleRoute) !void { + if (bun.strings.eqlComptime(path, "/*")) { + router.fallback = route; + } else { + try router.map.put(alloc, path, route); + } + } + + pub fn clear(router: *HTMLRouter) void { + router.map.clearRetainingCapacity(); + router.fallback = null; + } + + pub fn deinit(router: *HTMLRouter, alloc: Allocator) void { + router.map.deinit(alloc); + } +}; + const std = @import("std"); const Allocator = std.mem.Allocator; const Mutex = bun.Mutex; diff --git a/src/bake/client/reader.ts b/src/bake/client/reader.ts index 6da8d2ba6cb95e..66ee73ee369b24 100644 --- a/src/bake/client/reader.ts +++ b/src/bake/client/reader.ts @@ -1,10 +1,10 @@ import { td } from "../shared"; export class DataViewReader { - view: DataView; + view: DataView; cursor: number; - constructor(view: DataView, cursor: number = 0) { + constructor(view: DataView, cursor: number = 0) { this.view = view; this.cursor = cursor; } diff --git a/src/bake/client/route.ts b/src/bake/client/route.ts deleted file mode 100644 index 275555cb94ce35..00000000000000 --- a/src/bake/client/route.ts +++ /dev/null @@ -1,6 +0,0 @@ -export function routeMatch(routeId: number, routePattern: string) { - console.log(`routeMatch(${routeId}, ${routePattern})`); - // TODO: pattern parsing - // TODO: use routeId to cache the current route to avoid reparsing text we dont care about - return routePattern === location.pathname; -} diff --git a/src/bake/client/websocket.ts b/src/bake/client/websocket.ts index e913b39b5444bd..462cc9c678764f 100644 --- a/src/bake/client/websocket.ts +++ b/src/bake/client/websocket.ts @@ -45,7 +45,7 @@ interface WebSocketWrapper { } export function initWebSocket( - handlers: Record void>, + handlers: Record, ws: WebSocket) => void>, url: string = "/_bun/hmr", ): WebSocketWrapper { let firstConnection = true; diff --git a/src/bake/hmr-runtime-client.ts b/src/bake/hmr-runtime-client.ts index d8283bea1648ce..cbecaab3c477f9 100644 --- a/src/bake/hmr-runtime-client.ts +++ b/src/bake/hmr-runtime-client.ts @@ -4,7 +4,6 @@ import { loadModule, LoadModuleType, onServerSideReload, replaceModules } from " import { hasFatalError, onErrorMessage, onRuntimeError, RuntimeErrorType } from "./client/overlay"; import { Bake } from "bun"; import { DataViewReader } from "./client/reader"; -import { routeMatch } from "./client/route"; import { initWebSocket } from "./client/websocket"; import { MessageId } from "./generated"; import { editCssContent, editCssArray } from "./client/css-reloader"; @@ -16,6 +15,7 @@ if (typeof IS_BUN_DEVELOPMENT !== "boolean") { let isPerformingRouteReload = false; let shouldPerformAnotherRouteReload = false; +let currentRouteIndex: number = -1; async function performRouteReload() { console.info("[Bun] Server-side code changed, reloading!"); @@ -89,8 +89,7 @@ const ws = initWebSocket({ do { const routeId = reader.i32(); if (routeId === -1 || routeId == undefined) break; - const routePattern = reader.string32(); - if (routeMatch(routeId, routePattern)) { + if (routeId === currentRouteIndex) { isServerSideRouteUpdate = serverSideRoutesUpdated.has(routeId); const cssCount = reader.i32(); if (cssCount !== -1) { @@ -137,6 +136,10 @@ const ws = initWebSocket({ performRouteReload(); } }, + [MessageId.set_url_response](view) { + const reader = new DataViewReader(view, 1); + currentRouteIndex = reader.u32(); + }, [MessageId.errors]: onErrorMessage, }); diff --git a/src/bake/hmr-runtime-error.ts b/src/bake/hmr-runtime-error.ts index 433f70b8c78ef0..3058275caeb076 100644 --- a/src/bake/hmr-runtime-error.ts +++ b/src/bake/hmr-runtime-error.ts @@ -8,12 +8,11 @@ // stopped by the fact this script runs synchronously. import { decodeAndAppendError, onErrorMessage, updateErrorOverlay } from "./client/overlay"; import { DataViewReader } from "./client/reader"; -import { routeMatch } from "./client/route"; import { initWebSocket } from "./client/websocket"; import { MessageId } from "./generated"; /** Injected by DevServer */ -declare const error: Uint8Array; +declare const error: Uint8Array; { const reader = new DataViewReader(new DataView(error.buffer), 0); @@ -24,6 +23,7 @@ declare const error: Uint8Array; } let firstVersionPacket = true; +let currentRouteIndex = -1; initWebSocket({ [MessageId.version](dv) { @@ -39,22 +39,21 @@ initWebSocket({ [MessageId.errors]: onErrorMessage, - [MessageId.route_update](view) { + [MessageId.hot_update](view) { const reader = new DataViewReader(view, 1); - let routeCount = reader.u32(); - - while (routeCount > 0) { - routeCount -= 1; - const routeId = reader.u32(); - const routePattern = reader.stringWithLength(reader.u16()); - if (routeMatch(routeId, routePattern)) { + const serverSideRoutesUpdated = new Set(); + do { + const routeId = reader.i32(); + if (routeId === -1 || routeId == undefined) break; + if (routeId === currentRouteIndex) { location.reload(); break; } - } + } while (true); }, - // [MessageId.errors_cleared]() { - // location.reload(); - // }, + [MessageId.set_url_response](view) { + const reader = new DataViewReader(view, 1); + currentRouteIndex = reader.u32(); + }, }); diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 35c6d665bfc953..16045b2eaa4c30 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -383,7 +383,8 @@ pub const ServerConfig = struct { app.any(path, T, entry, handler_wrap.handler); } - pub fn applyStaticRoutes(this: *ServerConfig, comptime ssl: bool, server: AnyServer, app: *uws.NewApp(ssl)) void { + pub fn applyStaticRoutes(this: *ServerConfig, comptime ssl: bool, server: AnyServer, app: *uws.NewApp(ssl)) !void { + const dev_server = server.devServer(); for (this.static_routes.items) |*entry| { switch (entry.route) { .StaticRoute => |static_route| { @@ -391,6 +392,9 @@ pub const ServerConfig = struct { }, .HTMLBundleRoute => |html_bundle_route| { applyStaticRoute(server, ssl, app, *HTMLBundleRoute, html_bundle_route, entry.path); + if (dev_server) |dev| { + try dev.html_router.put(dev.allocator, entry.path, html_bundle_route); + } }, } } @@ -7463,7 +7467,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp ssl_enabled, AnyServer.from(this), app, - ); + ) catch bun.outOfMemory(); } if (this.config.websocket) |*websocket| { diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index ae750bc8966d13..390980e52462f1 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -69,9 +69,6 @@ pub const HTMLBundleRoute = struct { value: Value = .pending_plugins, /// Written and read by DevServer to identify if this route has been registered with the bundler. dev_server_id: bun.bake.DevServer.RouteBundle.Index.Optional = .none, - /// Used by DevServer - // TODO: design flaw: HTMLBundleRoute can be present at multiple paths - pattern: []const u8, pub fn memoryCost(this: *const HTMLBundleRoute) usize { var cost: usize = 0; @@ -89,7 +86,6 @@ pub const HTMLBundleRoute = struct { .ref_count = 1, .server = null, .value = .pending_plugins, - .pattern = "/", // TODO: design flaw: HTMLBundleRoute can be present at multiple paths }); } @@ -158,7 +154,7 @@ pub const HTMLBundleRoute = struct { if (server.config().development) { if (server.devServer()) |dev| { - dev.respondForHTMLBundle(this, req, resp); + dev.respondForHTMLBundle(this, req, resp) catch bun.outOfMemory(); return; } @@ -533,9 +529,9 @@ pub const HTMLBundleRoute = struct { server: ?AnyServer = null, route: *HTMLBundleRoute, - pub usingnamespace bun.NewRefCounted(@This(), __deinit, null); + pub usingnamespace bun.NewRefCounted(@This(), destroyInternal, null); - fn __deinit(this: *PendingResponse) void { + fn destroyInternal(this: *PendingResponse) void { if (this.is_response_pending) { this.resp.clearAborted(); this.resp.clearOnWritable(); diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index 5a3fcea178ad9d..ef45d940e84002 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -1,4 +1,5 @@ -//! A static route serving a response object's blob. +//! StaticRoute stores and serves a static blob. This can be created out of a JS +//! Response object, or from globally allocated bytes. const StaticRoute = @This(); server: ?AnyServer = null, diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index b5d4208b07720f..c89c134490d686 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -4551,7 +4551,6 @@ pub const ParseTask = struct { transpiler.options.react_fast_refresh and loader.isJSX() and !source.path.isNodeModule(); - std.debug.print("{s} - {}\n", .{ source.path.text, opts.features.react_fast_refresh }); opts.features.server_components = if (transpiler.options.server_components) switch (target) { .browser => .client_side, @@ -10224,6 +10223,10 @@ pub const LinkerContext = struct { } }; + // HTML bundles for Bake must be globally allocated, as it must outlive + // the bundle task. See `DevServer.RouteBundle.HTML.bundled_html_text` + const output_allocator = if (c.dev_server != null) bun.default_allocator else worker.allocator; + var html_loader: HTMLLoader = .{ .linker = c, .source_index = chunk.entry_point.source_index, @@ -10233,7 +10236,7 @@ pub const LinkerContext = struct { .minify_whitespace = c.options.minify_whitespace, .chunk = chunk, .chunks = chunks, - .output = std.ArrayList(u8).init(worker.allocator), + .output = std.ArrayList(u8).init(output_allocator), .current_import_record_index = 0, }; diff --git a/src/deps/uws.zig b/src/deps/uws.zig index cc23f259483c25..a64dc65f52a950 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -3039,7 +3039,7 @@ pub const WebSocketBehavior = extern struct { pub fn onMessage(raw_ws: *RawWebSocket, message: [*c]const u8, length: usize, opcode: Opcode) callconv(.C) void { const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; - @call(.always_inline, Type.onMessage, .{ + @call(bun.callmod_inline, Type.onMessage, .{ this, ws, if (length > 0) message[0..length] else "", @@ -3079,7 +3079,7 @@ pub const WebSocketBehavior = extern struct { pub fn onClose(raw_ws: *RawWebSocket, code: i32, message: [*c]const u8, length: usize) callconv(.C) void { const ws = @unionInit(AnyWebSocket, active_field_name, @as(*WebSocket, @ptrCast(raw_ws))); const this = ws.as(Type).?; - @call(.always_inline, Type.onClose, .{ + @call(bun.callmod_inline, Type.onClose, .{ this, ws, code, @@ -3088,7 +3088,7 @@ pub const WebSocketBehavior = extern struct { } pub fn onUpgrade(ptr: *anyopaque, res: *uws_res, req: *Request, context: *uws_socket_context_t, id: usize) callconv(.C) void { - @call(.always_inline, Server.onWebSocketUpgrade, .{ + @call(bun.callmod_inline, Server.onWebSocketUpgrade, .{ bun.cast(*Server, ptr), @as(*NewApp(is_ssl).Response, @ptrCast(res)), req, From f50928d9cc53ebd5f3dfcf4f261270975d0c5ed7 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Mon, 3 Feb 2025 19:59:37 -0800 Subject: [PATCH 13/28] assets --- src/bake/DevServer.zig | 161 +++++++++++++++++++++++--- src/bake/hmr-module.ts | 4 +- src/bun.js/api/server.zig | 2 +- src/bun.js/api/server/StaticRoute.zig | 63 ++++++---- src/bun.js/webcore/blob.zig | 2 +- src/bun.zig | 2 + src/bundler/bundle_v2.zig | 90 +++++++++++--- src/http/mime_type.zig | 5 + src/js_parser.zig | 24 ++-- 9 files changed, 289 insertions(+), 64 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 44786c6e9b1a6f..96fc357ee56201 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -76,8 +76,8 @@ css_files: AutoArrayHashMapUnmanaged(u64, []const u8), /// JS files are accessible via `/_bun/client/route..js` /// These are randomly generated to avoid possible browser caching of old assets. route_js_payloads: AutoArrayHashMapUnmanaged(u64, RouteBundle.Index.Optional), -// /// Assets are accessible via `/_bun/asset/` -// assets: bun.StringArrayHashMapUnmanaged(u64, Asset), +/// Assets are accessible via `/_bun/asset/` +assets: Assets, /// All bundling failures are stored until a file is saved and rebuilt. /// They are stored in the wire format the HMR runtime expects so that /// serialization only happens once. @@ -313,7 +313,12 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .reload_event = null, .requests = .empty, }, - .log = bun.logger.Log.init(allocator), + .assets = .{ + .path_map = .empty, + .files = .empty, + .refs = .empty, + }, + .log = .init(allocator), .server_bundler = undefined, .client_bundler = undefined, @@ -352,10 +357,10 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { dev.framework.initBundler(allocator, &dev.log, .development, .server, &dev.server_bundler) catch |err| return global.throwError(err, generic_action); - dev.client_bundler.options.dev_server = dev; + dev.server_bundler.options.dev_server = dev; dev.framework.initBundler(allocator, &dev.log, .development, .client, &dev.client_bundler) catch |err| return global.throwError(err, generic_action); - dev.server_bundler.options.dev_server = dev; + dev.client_bundler.options.dev_server = dev; if (separate_ssr_graph) { dev.framework.initBundler(allocator, &dev.log, .development, .ssr, &dev.ssr_bundler) catch |err| return global.throwError(err, generic_action); @@ -612,16 +617,20 @@ fn onJsRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { } } -fn onAssetRequest(dev: *DevServer, req: *Request, resp: anytype) void { - _ = dev; - _ = req; - _ = resp; - bun.todoPanic(@src(), "serve asset file", .{}); - // const route_id = req.parameter(0); - // const asset = dev.assets.get(route_id) orelse - // return req.setYield(true); - // _ = asset; // autofix - +fn onAssetRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { + const param = req.parameter(0); + const last_dot = std.mem.lastIndexOf(u8, param, ".") orelse param.len; + const hex = param[0..last_dot]; + if (hex.len != @sizeOf(u64) * 2) + return req.setYield(true); + var out: [@sizeOf(u64)]u8 = undefined; + assert((std.fmt.hexToBytes(&out, hex) catch + return req.setYield(true)).len == @sizeOf(u64)); + const hash: u64 = @bitCast(out); + const asset = dev.assets.get(hash) orelse + return req.setYield(true); + req.setYield(false); + asset.on(resp); } fn onCssRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { @@ -2957,6 +2966,26 @@ pub fn IncrementalGraph(side: bake.Side) type { return file_index; } + /// Returns the key that was inserted. + pub fn insertEmpty(g: *@This(), abs_path: []const u8) bun.OOM![]const u8 { + comptime assert(side == .client); // not implemented + g.owner().graph_safety_lock.assertLocked(); + const gop = try g.bundled_files.getOrPut(g.owner().allocator, abs_path); + if (!gop.found_existing) { + gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); + gop.value_ptr.* = File.init("", .{ + .failed = false, + .is_hmr_root = false, + .is_special_framework_file = false, + .is_html_route = false, + .kind = .unknown, + }); + try g.first_dep.append(g.owner().allocator, .none); + try g.first_import.append(g.owner().allocator, .none); + } + return gop.key_ptr.*; + } + /// Server CSS files are just used to be targets for graph traversal. /// Its content lives only on the client. pub fn insertCssFileOnServer(g: *@This(), ctx: *HotUpdateContext, index: bun.JSAst.Index, abs_path: []const u8) bun.OOM!void { @@ -4627,6 +4656,7 @@ pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []? for (events) |event| { // TODO: why does this out of bounds when you delete every file in the directory? if (event.index >= file_paths.len) continue; + const file_path = file_paths[event.index]; const update_count = counts[event.index] + 1; counts[event.index] = update_count; @@ -4917,6 +4947,105 @@ const HTMLRouter = struct { } }; +pub fn putOrOverwriteAsset( + dev: *DevServer, + abs_path: []const u8, + contents: []u8, + content_hash: u64, +) !void { + try dev.assets.putOrOverwrite(dev.allocator, abs_path, contents, content_hash); +} + +pub const Assets = struct { + /// Keys are absolute paths, sharing memory with the keys in IncrementalGraph(.client) + /// Values are indexes into files + path_map: bun.StringArrayHashMapUnmanaged(u32), + /// Content-addressable store. Multiple paths can point to the same content + /// hash, which is tracked by the `refs` array. One reference is held to + /// contained StaticRoute instances when they are stored. + files: AutoArrayHashMapUnmanaged(u64, *StaticRoute), + /// Indexed by the same index of `files`. The value is never `0`. + refs: ArrayListUnmanaged(u32), + + needs_reindex: bool = false, + + fn owner(assets: *Assets) *DevServer { + return @alignCast(@fieldParentPtr("assets", assets)); + } + + pub fn putOrOverwrite( + assets: *Assets, + alloc: Allocator, + /// not allocated + abs_path: []const u8, + /// allocated by bun.default_allocator, ownership given to DevServer + contents: []u8, + /// content hash of the asset + content_hash: u64, + ) !void { + assets.owner().graph_safety_lock.lock(); + defer assets.owner().graph_safety_lock.unlock(); + + const gop = try assets.path_map.getOrPut(alloc, abs_path); + if (!gop.found_existing) { + // Locate a stable pointer for the file path + const stable_abs_path = try assets.owner().client_graph.insertEmpty(abs_path); + gop.key_ptr.* = stable_abs_path; + } else { + const i = gop.value_ptr.*; + // When there is one reference to the asset, the entry can be + // replaced in-place with the new asset. + if (assets.refs.items[i] == 1) { + const slice = assets.files.entries.slice(); + slice.items(.key)[i] = content_hash; + slice.items(.value)[i] = initStaticRouteFromBytes(alloc, contents, .detectFromPath(abs_path)); + comptime assert(@TypeOf(slice.items(.hash)[0]) == void); + assets.needs_reindex = true; + return; + } else { + assets.refs.items[gop.value_ptr.*] -= 1; + } + } + + try assets.reindexIfNeeded(alloc); + const file_index_gop = try assets.files.getOrPut(alloc, content_hash); + if (!file_index_gop.found_existing) { + try assets.refs.append(alloc, 1); + file_index_gop.value_ptr.* = initStaticRouteFromBytes(alloc, contents, .detectFromPath(abs_path)); + } else { + file_index_gop.value_ptr.*.ref_count += 1; + bun.default_allocator.free(contents); + } + gop.value_ptr.* = @intCast(file_index_gop.index); + } + + pub fn reindexIfNeeded(assets: *Assets, alloc: Allocator) !void { + if (assets.needs_reindex) { + try assets.files.reIndex(alloc); + assets.needs_reindex = false; + } + } + + pub fn get(assets: *Assets, content_hash: u64) ?*StaticRoute { + return assets.files.get(content_hash); + } + + pub fn deinit(assets: *Assets, alloc: Allocator) void { + assets.map.deinit(alloc); + assets.hash_lookups.deinit(alloc); + } +}; + +/// `bytes` is allocated by `allocator`, ownership moved into the Blob +fn initAnyBlobFromBytes(allocator: Allocator, bytes: []u8) JSC.WebCore.AnyBlob { + return .{ .InternalBlob = .{ .bytes = .fromOwnedSlice(allocator, bytes) } }; +} + +fn initStaticRouteFromBytes(allocator: Allocator, bytes: []u8, mime_type: MimeType) *StaticRoute { + _ = mime_type; + return .initFromBlob(initAnyBlobFromBytes(allocator, bytes)); +} + const std = @import("std"); const Allocator = std.mem.Allocator; const Mutex = bun.Mutex; @@ -4958,3 +5087,5 @@ const HTMLBundle = JSC.API.HTMLBundle; const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena; const Chunk = bun.bundle_v2.Chunk; + +const StaticRoute = bun.server.StaticRoute; diff --git a/src/bake/hmr-module.ts b/src/bake/hmr-module.ts index df7b07a16b7f0e..88156bbf9a2889 100644 --- a/src/bake/hmr-module.ts +++ b/src/bake/hmr-module.ts @@ -62,7 +62,9 @@ export class HotModule { const mod = await (loadModule(id, LoadModuleType.AsyncAssertPresent) as Promise); mod._deps.set(this, onReload ? { _callback: onReload, _expectedImports: expectedImports } : undefined); const { exports, __esModule } = mod; - const object = __esModule ? exports : (mod._ext_exports ??= { ...exports, default: exports }); + const object = __esModule + ? exports + : (mod._ext_exports ??= { ...(typeof exports === "object" && exports), default: exports }); if (expectedImports && mod._state === State.Ready) { // for (const key of expectedImports) { diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 16045b2eaa4c30..1d19bcda28b0d4 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -175,7 +175,7 @@ pub fn writeStatus(comptime ssl: bool, resp_ptr: ?*uws.NewApp(ssl).Response, sta } // TODO: rename to StaticBlobRoute, rename AnyStaticRoute to StaticRoute -const StaticRoute = @import("./server/StaticRoute.zig"); +pub const StaticRoute = @import("./server/StaticRoute.zig"); const HTMLBundle = JSC.API.HTMLBundle; const HTMLBundleRoute = HTMLBundle.HTMLBundleRoute; diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index ef45d940e84002..e79b08850be225 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -5,7 +5,7 @@ const StaticRoute = @This(); server: ?AnyServer = null, status_code: u16, blob: AnyBlob, -cached_blob_size: u64 = 0, +cached_blob_size: u64, has_content_disposition: bool = false, headers: Headers = .{ .allocator = bun.default_allocator, @@ -14,6 +14,22 @@ ref_count: u32 = 1, pub usingnamespace bun.NewRefCounted(@This(), deinit, null); +pub fn initFromBlob(blob: AnyBlob) *StaticRoute { + const headers = Headers.from(null, bun.default_allocator, .{ .body = &blob }) catch bun.outOfMemory(); + return StaticRoute.new(.{ + .blob = blob, + .cached_blob_size = blob.size(), + .has_content_disposition = false, + .headers = headers, + .server = null, + .status_code = 200, + }); +} + +pub const InitFromBytesOptions = struct { + mime_type: ?bun.http.MimeType = null, +}; + fn deinit(this: *StaticRoute) void { this.blob.detach(); this.headers.deinit(); @@ -46,17 +62,14 @@ pub fn fromJS(globalThis: *JSC.JSGlobalObject, argument: JSC.JSValue) bun.JSErro // Let's let them do that. response.body.value.toBlobIfPossible(); - var blob: AnyBlob = brk: { + const blob: AnyBlob = brk: { switch (response.body.value) { .Used => { return globalThis.throwInvalidArguments("Response body has already been used", .{}); }, - else => { - return globalThis.throwInvalidArguments("Body must be fully buffered before it can be used in a static route. Consider calling new Response(await response.blob()) to buffer the body.", .{}); - }, .Null, .Empty => { - break :brk AnyBlob{ + break :brk .{ .InternalBlob = JSC.WebCore.InternalBlob{ .bytes = std.ArrayList(u8).init(bun.default_allocator), }, @@ -74,6 +87,10 @@ pub fn fromJS(globalThis: *JSC.JSGlobalObject, argument: JSC.JSValue) bun.JSErro break :brk .{ .Blob = blob }; }, + + else => { + return globalThis.throwInvalidArguments("Body must be fully buffered before it can be used in a static route. Consider calling new Response(await response.blob()) to buffer the body.", .{}); + }, } }; @@ -138,12 +155,12 @@ pub fn fromJS(globalThis: *JSC.JSGlobalObject, argument: JSC.JSValue) bun.JSErro } // HEAD requests have no body. -pub fn onHEADRequest(this: *StaticRoute, req: *uws.Request, resp: HTTPResponse) void { +pub fn onHEADRequest(this: *StaticRoute, req: *uws.Request, resp: AnyResponse) void { req.setYield(false); this.onHEAD(resp); } -pub fn onHEAD(this: *StaticRoute, resp: HTTPResponse) void { +pub fn onHEAD(this: *StaticRoute, resp: AnyResponse) void { this.ref(); if (this.server) |server| { server.onPendingRequest(); @@ -153,18 +170,18 @@ pub fn onHEAD(this: *StaticRoute, resp: HTTPResponse) void { this.onResponseComplete(resp); } -fn renderMetadataAndEnd(this: *StaticRoute, resp: HTTPResponse) void { +fn renderMetadataAndEnd(this: *StaticRoute, resp: AnyResponse) void { this.renderMetadata(resp); resp.writeHeaderInt("Content-Length", this.cached_blob_size); resp.endWithoutBody(resp.shouldCloseConnection()); } -pub fn onRequest(this: *StaticRoute, req: *uws.Request, resp: HTTPResponse) void { +pub fn onRequest(this: *StaticRoute, req: *uws.Request, resp: AnyResponse) void { req.setYield(false); this.on(resp); } -pub fn on(this: *StaticRoute, resp: HTTPResponse) void { +pub fn on(this: *StaticRoute, resp: AnyResponse) void { this.ref(); if (this.server) |server| { server.onPendingRequest(); @@ -180,16 +197,16 @@ pub fn on(this: *StaticRoute, resp: HTTPResponse) void { this.toAsync(resp); } -fn toAsync(this: *StaticRoute, resp: HTTPResponse) void { +fn toAsync(this: *StaticRoute, resp: AnyResponse) void { resp.onAborted(*StaticRoute, onAborted, this); resp.onWritable(*StaticRoute, onWritableBytes, this); } -fn onAborted(this: *StaticRoute, resp: HTTPResponse) void { +fn onAborted(this: *StaticRoute, resp: AnyResponse) void { this.onResponseComplete(resp); } -fn onResponseComplete(this: *StaticRoute, resp: HTTPResponse) void { +fn onResponseComplete(this: *StaticRoute, resp: AnyResponse) void { resp.clearAborted(); resp.clearOnWritable(); resp.clearTimeout(); @@ -201,7 +218,7 @@ fn onResponseComplete(this: *StaticRoute, resp: HTTPResponse) void { this.deref(); } -pub fn doRenderBlob(this: *StaticRoute, resp: HTTPResponse, did_finish: *bool) void { +pub fn doRenderBlob(this: *StaticRoute, resp: AnyResponse, did_finish: *bool) void { // We are not corked // The body is small // Faster to do the memcpy than to do the two network calls @@ -214,12 +231,12 @@ pub fn doRenderBlob(this: *StaticRoute, resp: HTTPResponse, did_finish: *bool) v } } -pub fn doRenderBlobCorked(this: *StaticRoute, resp: HTTPResponse, did_finish: *bool) void { +pub fn doRenderBlobCorked(this: *StaticRoute, resp: AnyResponse, did_finish: *bool) void { this.renderMetadata(resp); this.renderBytes(resp, did_finish); } -fn onWritable(this: *StaticRoute, write_offset: u64, resp: HTTPResponse) void { +fn onWritable(this: *StaticRoute, write_offset: u64, resp: AnyResponse) void { if (this.server) |server| { resp.timeout(server.config().idleTimeout); } @@ -232,7 +249,7 @@ fn onWritable(this: *StaticRoute, write_offset: u64, resp: HTTPResponse) void { this.onResponseComplete(resp); } -fn onWritableBytes(this: *StaticRoute, write_offset: u64, resp: HTTPResponse) bool { +fn onWritableBytes(this: *StaticRoute, write_offset: u64, resp: AnyResponse) bool { const blob = this.blob; const all_bytes = blob.slice(); @@ -249,14 +266,14 @@ fn onWritableBytes(this: *StaticRoute, write_offset: u64, resp: HTTPResponse) bo return true; } -fn doWriteStatus(_: *StaticRoute, status: u16, resp: HTTPResponse) void { +fn doWriteStatus(_: *StaticRoute, status: u16, resp: AnyResponse) void { switch (resp) { .SSL => |r| writeStatus(true, r, status), .TCP => |r| writeStatus(false, r, status), } } -fn doWriteHeaders(this: *StaticRoute, resp: HTTPResponse) void { +fn doWriteHeaders(this: *StaticRoute, resp: AnyResponse) void { switch (resp) { inline .SSL, .TCP => |s| { const entries = this.headers.entries.slice(); @@ -271,11 +288,11 @@ fn doWriteHeaders(this: *StaticRoute, resp: HTTPResponse) void { } } -fn renderBytes(this: *StaticRoute, resp: HTTPResponse, did_finish: *bool) void { +fn renderBytes(this: *StaticRoute, resp: AnyResponse, did_finish: *bool) void { did_finish.* = this.onWritableBytes(0, resp); } -fn renderMetadata(this: *StaticRoute, resp: HTTPResponse) void { +fn renderMetadata(this: *StaticRoute, resp: AnyResponse) void { var status = this.status_code; const size = this.cached_blob_size; @@ -298,4 +315,4 @@ const Headers = JSC.WebCore.Headers; const AnyServer = JSC.API.AnyServer; const AnyBlob = JSC.WebCore.AnyBlob; const writeStatus = @import("../server.zig").writeStatus; -const HTTPResponse = uws.AnyResponse; +const AnyResponse = uws.AnyResponse; diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig index 9b29a52dce8e02..5ae160ca5008f3 100644 --- a/src/bun.js/webcore/blob.zig +++ b/src/bun.js/webcore/blob.zig @@ -2060,7 +2060,7 @@ pub const Blob = struct { .bytes = ByteStore.init(bytes, allocator), }, .allocator = allocator, - .ref_count = std.atomic.Value(u32).init(1), + .ref_count = .init(1), }); return store; } diff --git a/src/bun.zig b/src/bun.zig index f80f34bbee597b..7bcda9025b2cf8 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -4481,3 +4481,5 @@ pub fn CowSlice(T: type) type { } const Allocator = std.mem.Allocator; + +pub const server = @import("./bun.js/api/server.zig"); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index da12dd162d5adb..4025ae65dbc0e9 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -3045,8 +3045,20 @@ pub const BundleV2 = struct { result.source.contents.len else @as(usize, 0); + graph.input_files.items(.unique_key_for_additional_file)[result.source.index.get()] = result.unique_key_for_additional_file; graph.input_files.items(.content_hash_for_additional_file)[result.source.index.get()] = result.content_hash_for_additional_file; + if (result.unique_key_for_additional_file.len > 0 and result.loader.shouldCopyForBundling()) { + if (this.transpiler.options.dev_server) |dev| { + dev.putOrOverwriteAsset( + result.source.path.text, + // SAFETY: when shouldCopyForBundling is true, the + // contents are allocated by bun.default_allocator + @constCast(result.source.contents), + result.content_hash_for_additional_file, + ) catch bun.outOfMemory(); + } + } // Record which loader we used for this file graph.input_files.items(.loader)[result.source.index.get()] = result.loader; @@ -3772,6 +3784,11 @@ pub const ParseTask = struct { return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); } + const FileLoaderHash = struct { + key: []const u8, + content_hash: u64, + }; + fn getAST( log: *Logger.Log, transpiler: *Transpiler, @@ -3781,7 +3798,7 @@ pub const ParseTask = struct { source: Logger.Source, loader: Loader, unique_key_prefix: u64, - unique_key_for_additional_file: *[]const u8, + unique_key_for_additional_file: *FileLoaderHash, ) !JSAst { switch (loader) { .jsx, .tsx, .js, .ts => { @@ -3841,7 +3858,10 @@ pub const ParseTask = struct { // Implements embedded sqlite if (loader == .sqlite_embedded) { const embedded_path = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; - unique_key_for_additional_file.* = embedded_path; + unique_key_for_additional_file.* = .{ + .key = embedded_path, + .content_hash = ContentHasher.run(source.contents), + }; break :brk embedded_path; } @@ -3918,7 +3938,10 @@ pub const ParseTask = struct { .args = BabyList(Expr).init(require_args), }, Logger.Loc{ .start = 0 }); - unique_key_for_additional_file.* = unique_key; + unique_key_for_additional_file.* = .{ + .key = unique_key, + .content_hash = ContentHasher.run(source.contents), + }; return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); }, .html => { @@ -4000,16 +4023,48 @@ pub const ParseTask = struct { ast.import_records = import_records; return ast; }, - else => {}, + // TODO: + .dataurl, .base64, .bunsh => { + return try getEmptyAST(log, transpiler, opts, allocator, source, E.String); + }, + .file, .wasm => { + bun.assert(loader.shouldCopyForBundling()); + + // Put a unique key in the AST to implement the URL loader. At the end + // of the bundle, the key is replaced with the actual URL. + const content_hash = ContentHasher.run(source.contents); + + const unique_key: []const u8 = if (transpiler.options.dev_server != null) + // With DevServer, the actual URL is added now, since it can be + // known this far ahead of time, and it means the unique key code + // does not have to perform an additional pass over files. + // + // To avoid a mutex, the actual insertion of the asset to DevServer + // is done on the bundler thread. + try std.fmt.allocPrint( + allocator, + bun.bake.DevServer.asset_prefix ++ "/{s}{s}", + .{ + &std.fmt.bytesToHex(std.mem.asBytes(&content_hash), .lower), + std.fs.path.extension(source.path.text), + }, + ) + else + try std.fmt.allocPrint( + allocator, + "{any}A{d:0>8}", + .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }, + ); + const root = Expr.init(E.String, .{ .data = unique_key }, .{ .start = 0 }); + unique_key_for_additional_file.* = .{ + .key = unique_key, + .content_hash = content_hash, + }; + var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); + ast.addUrlForCss(allocator, &source, null, unique_key); + return ast; + }, } - const unique_key = std.fmt.allocPrint(allocator, "{any}A{d:0>8}", .{ bun.fmt.hexIntLower(unique_key_prefix), source.index.get() }) catch unreachable; - const root = Expr.init(E.String, E.String{ - .data = unique_key, - }, Logger.Loc{ .start = 0 }); - unique_key_for_additional_file.* = unique_key; - var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); - ast.addUrlForCss(allocator, &source, null, unique_key); - return ast; } fn getCodeForParseTaskWithoutPlugins( @@ -4596,7 +4651,10 @@ pub const ParseTask = struct { task.jsx.parse = loader.isJSX(); - var unique_key_for_additional_file: []const u8 = ""; + var unique_key_for_additional_file: FileLoaderHash = .{ + .key = "", + .content_hash = 0, + }; var ast: JSAst = if (!is_empty) try getAST(log, transpiler, opts, allocator, resolver, source, loader, task.ctx.unique_key, &unique_key_for_additional_file) else switch (opts.module_type == .esm) { @@ -4621,6 +4679,8 @@ pub const ParseTask = struct { task.side_effects = .no_side_effects__empty_ast; } + bun.debugAssert(ast.parts.len > 0); // when parts.len == 0, it is assumed to be pending/failed. empty ast has at least 1 part. + step.* = .resolve; return .{ @@ -4628,13 +4688,13 @@ pub const ParseTask = struct { .source = source, .log = log.*, .use_directive = use_directive, - .unique_key_for_additional_file = unique_key_for_additional_file, + .unique_key_for_additional_file = unique_key_for_additional_file.key, .side_effects = task.side_effects, .loader = loader, // Hash the files in here so that we do it in parallel. .content_hash_for_additional_file = if (loader.shouldCopyForBundling()) - ContentHasher.run(source.contents) + unique_key_for_additional_file.content_hash else 0, }; diff --git a/src/http/mime_type.zig b/src/http/mime_type.zig index 1f284dc7ee4769..084ebfe1a7f78f 100644 --- a/src/http/mime_type.zig +++ b/src/http/mime_type.zig @@ -239,6 +239,11 @@ pub fn byExtensionNoDefault(ext: string) ?MimeType { return extensions.get(ext); } +pub fn detectFromPath(path: string) MimeType { + const ext = std.fs.path.extension(path); + return byExtension(ext); +} + // this is partially auto-generated pub const all = struct { pub const @"application/webassembly" = wasm; diff --git a/src/js_parser.zig b/src/js_parser.zig index aed717cd2e60cf..8d88beb6ccc9d6 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -1470,12 +1470,6 @@ pub const ImportScanner = struct { } } } - - // when bundling, all top-level variables become var - // TODO(@paperdave): we already do this earlier in visiting? - if (!hot_module_reloading_transformations and p.options.bundle and !st.kind.isUsing()) { - st.kind = .k_var; - } }, .s_export_default => |st| { // This is defer'd so that we still record export default for identifiers @@ -24225,6 +24219,7 @@ pub const ConvertESMExportsForHmr = struct { st.namespace_ref, st.items, stmt.loc, + null, stmt.loc, ); for (st.items) |*item| { @@ -24257,6 +24252,7 @@ pub const ConvertESMExportsForHmr = struct { st.namespace_ref, &.{}, stmt.loc, + null, stmt.loc, ); try ctx.export_star_props.append(p.allocator, .{ @@ -24269,7 +24265,15 @@ pub const ConvertESMExportsForHmr = struct { // named/default imports here as we always rewrite them as // full qualified property accesses (needed for live-bindings) .s_import => |st| { - _ = try ctx.deduplicatedImport(p, st.import_record_index, st.namespace_ref, st.items, st.star_name_loc, stmt.loc); + _ = try ctx.deduplicatedImport( + p, + st.import_record_index, + st.namespace_ref, + st.items, + st.star_name_loc, + st.default_name, + stmt.loc, + ); return; }, }; @@ -24285,6 +24289,7 @@ pub const ConvertESMExportsForHmr = struct { namespace_ref: Ref, items: []js_ast.ClauseItem, star_name_loc: ?logger.Loc, + default_name: ?js_ast.LocRef, loc: logger.Loc, ) !Ref { const ir = &p.import_records.items[import_record_index]; @@ -24321,13 +24326,16 @@ pub const ConvertESMExportsForHmr = struct { if (stmt.star_name_loc == null) if (star_name_loc) |stl| { stmt.star_name_loc = stl; }; + if (stmt.default_name == null) if (default_name) |dn| { + stmt.default_name = dn; + }; return stmt.namespace_ref; } try ctx.stmts.append(p.allocator, Stmt.alloc(S.Import, .{ .import_record_index = import_record_index, .is_single_line = true, - .default_name = null, + .default_name = default_name, .items = items, .namespace_ref = namespace_ref, .star_name_loc = star_name_loc, From 8f1e52101c5330d1c7e0fef64d948595ba58939e Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Tue, 4 Feb 2025 17:44:16 -0800 Subject: [PATCH 14/28] support plugins in DevServer --- cmake/tools/SetupBun.cmake | 1 + src/bake/DevServer.zig | 274 +++++++++++++--------- src/bun.js/api/server.zig | 331 ++++++++++++++------------- src/bun.js/api/server/HTMLBundle.zig | 278 ++++++++-------------- src/bun.js/module_loader.zig | 2 +- src/bun.zig | 2 - src/bundler/bundle_v2.zig | 7 +- src/cli/install.ps1 | 2 +- src/fs.zig | 8 +- src/resolver/resolve_path.zig | 298 ++++++++++-------------- src/watcher.zig | 2 +- 11 files changed, 557 insertions(+), 648 deletions(-) diff --git a/cmake/tools/SetupBun.cmake b/cmake/tools/SetupBun.cmake index 837248f65f74e8..3cb77ff4be9e3b 100644 --- a/cmake/tools/SetupBun.cmake +++ b/cmake/tools/SetupBun.cmake @@ -14,6 +14,7 @@ if (NOT CI) # a tempdir such as /private/tmp/bun-node-ce532901c/bun, which may cause this # CMake configuration break after tempdir is cleaned up (ex. after reboot). get_filename_component(BUN_EXECUTABLE ${BUN_EXECUTABLE} REALPATH) + set(BUN_EXECUTABLE ${BUN_EXECUTABLE} CACHE FILEPATH "Bun executable" FORCE) endif() # If this is not set, some advanced features are not checked. diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 96fc357ee56201..95c9c98418592b 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -27,10 +27,10 @@ pub const Options = struct { verbose_watcher: bool = false, }; -// The fields `client_graph`, `server_graph`, and `directory_watchers` all -// use `@fieldParentPointer` to access DevServer's state. This pattern has -// made it easier to group related fields together, but one must remember -// those structures still depend on the DevServer pointer. +// The fields `client_graph`, `server_graph`, `directory_watchers`, and `assets` +// all use `@fieldParentPointer` to access DevServer's state. This pattern has +// made it easier to group related fields together, but one must remember those +// structures still depend on the DevServer pointer. /// Used for all server-wide allocations. In debug, this shows up in /// a separate named heap. Thread-safe. @@ -115,6 +115,17 @@ ssr_bundler: Transpiler, /// Note that it is rarely correct to write messages into it. Instead, associate /// messages with the IncrementalGraph file or Route using `SerializedFailure` log: Log, +plugin_state: enum { + /// Should ask server for plugins. Once plugins are loaded, the plugin + /// pointer is written into `server_bundler.options.plugin` + unknown, + // These two states mean that `server.getOrLoadPlugins()` was called. + pending, + loaded, + /// Currently, this represents a degraded state where no bundle can + /// be correctly executed because the plugins did not load successfully. + err, +}, /// There is only ever one bundle executing at the same time, since all bundles /// inevitably share state. This bundle is asynchronous, storing its state here /// while in-flight. All allocations held by `.bv2.graph.heap`'s arena @@ -130,7 +141,7 @@ current_bundle: ?struct { had_reload_event: bool, }, /// This is not stored in `current_bundle` so that its memory can be reused when -/// there is no active bundle. After the bundle finishes, these requests will +/// there is no active bundle. After a bundle finishes, these requests will /// be continued, either calling their handler on success or sending the error /// page on failure. current_bundle_requests: ArrayListUnmanaged(DeferredRequest), @@ -319,6 +330,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .refs = .empty, }, .log = .init(allocator), + .plugin_state = .unknown, .server_bundler = undefined, .client_bundler = undefined, @@ -696,122 +708,134 @@ fn ensureRouteIsBundled( req: *Request, resp: AnyResponse, ) bun.OOM!void { - // TODO: Zig 0.14 gets labelled continue: - // - Remove the `while` - // - Move the code after this switch into `.loaded =>` - // - Replace `break` with `continue :sw .loaded` - // - Replace `continue` with `continue :sw ` - while (true) { - switch (dev.routeBundlePtr(route_bundle_index).server_state) { - .unqueued => { - try dev.next_bundle.requests.ensureUnusedCapacity(dev.allocator, 1); - if (dev.current_bundle != null) { - try dev.next_bundle.route_queue.ensureUnusedCapacity(dev.allocator, 1); - } - - const deferred: DeferredRequest = .{ - .route_bundle_index = route_bundle_index, - .data = switch (kind) { - inline .js_payload, .bundled_html_page => |tag| brk: { - resp.onAborted(*DeferredRequest, DeferredRequest.onAbort, undefined); // TODO: pass stable pointer. - break :brk @unionInit(DeferredRequest.Data, @tagName(tag), resp); + sw: switch (dev.routeBundlePtr(route_bundle_index).server_state) { + .unqueued => { + if (dev.current_bundle != null) { + try dev.next_bundle.route_queue.put(dev.allocator, route_bundle_index, {}); + dev.routeBundlePtr(route_bundle_index).server_state = .bundling; + try dev.deferRequest(&dev.next_bundle.requests, route_bundle_index, kind, req, resp); + } else { + // If plugins are not yet loaded, prepare them. + // In the case plugins are set to &.{}, this will not hit `.pending`. + plugin: switch (dev.plugin_state) { + .unknown => if (dev.bundler_options.plugin != null) { + // Framework-provided plugin is likely going to be phased out later + dev.plugin_state = .loaded; + } else switch (dev.server.?.getOrLoadPlugins(.{ .dev_server = dev })) { + .pending => { + dev.plugin_state = .pending; + continue :plugin .pending; }, - .server_handler => brk: { - assert(dev.routeBundlePtr(route_bundle_index).data == .framework); - break :brk .{ - .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) - .save(dev.vm.global, req, resp.TCP), - }; + .err => { + dev.plugin_state = .err; + continue :plugin .err; }, + .ready => {}, }, - }; - errdefer @compileError("cannot error since the request is already stored"); - - dev.next_bundle.requests.appendAssumeCapacity(deferred); - if (dev.current_bundle != null) { - dev.next_bundle.route_queue.putAssumeCapacity(route_bundle_index, {}); - } else { - var sfa = std.heap.stackFallback(4096, dev.allocator); - const temp_alloc = sfa.get(); - - var entry_points: EntryPointList = EntryPointList.empty; - defer entry_points.deinit(temp_alloc); - - dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, route_bundle_index) catch bun.outOfMemory(); + .pending => { + try dev.next_bundle.route_queue.put(dev.allocator, route_bundle_index, {}); + dev.routeBundlePtr(route_bundle_index).server_state = .bundling; + try dev.deferRequest(&dev.next_bundle.requests, route_bundle_index, kind, req, resp); + return; + }, + .err => { + // TODO: render plugin error page + resp.endWithoutBody(true); + return; + }, + .loaded => {}, + } - if (entry_points.set.count() == 0) { - if (dev.bundling_failures.count() > 0) { - dev.routeBundlePtr(route_bundle_index).server_state = .possible_bundling_failures; - } else { - dev.routeBundlePtr(route_bundle_index).server_state = .loaded; - } - continue; + // Prepare a bundle with just this route. + var sfa = std.heap.stackFallback(4096, dev.allocator); + const temp_alloc = sfa.get(); + + var entry_points: EntryPointList = .empty; + defer entry_points.deinit(temp_alloc); + try dev.appendRouteEntryPointsIfNotStale(&entry_points, temp_alloc, route_bundle_index); + + // If all files were already bundled (possible with layouts), + // then no entry points will be queued up here. That does + // not mean the route is ready for presentation. + if (entry_points.set.count() == 0) { + if (dev.bundling_failures.count() > 0) { + dev.routeBundlePtr(route_bundle_index).server_state = .possible_bundling_failures; + continue :sw .possible_bundling_failures; + } else { + dev.routeBundlePtr(route_bundle_index).server_state = .loaded; + continue :sw .loaded; } - - dev.startAsyncBundle( - entry_points, - false, - std.time.Timer.start() catch @panic("timers unsupported"), - ) catch bun.outOfMemory(); } - dev.routeBundlePtr(route_bundle_index).server_state = .bundling; - return; - }, - .bundling => { - bun.assert(dev.current_bundle != null); - try dev.current_bundle_requests.ensureUnusedCapacity(dev.allocator, 1); - - const deferred: DeferredRequest = .{ - .route_bundle_index = route_bundle_index, - .data = switch (kind) { - .js_payload => .{ .js_payload = resp }, - .bundled_html_page => .{ .bundled_html_page = resp }, - .server_handler => .{ - .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) - .save(dev.vm.global, req, resp.TCP), - }, - }, - }; - dev.current_bundle_requests.appendAssumeCapacity(deferred); - return; - }, - .possible_bundling_failures => { - // TODO: perform a graph trace to find just the errors that are needed - if (dev.bundling_failures.count() > 0) { - resp.corked(sendSerializedFailures, .{ - dev, - resp, - dev.bundling_failures.keys(), - .bundler, - }); - return; - } else { - dev.routeBundlePtr(route_bundle_index).server_state = .loaded; - break; - } - }, - .evaluation_failure => { + try dev.deferRequest(&dev.next_bundle.requests, route_bundle_index, kind, req, resp); + + dev.startAsyncBundle( + entry_points, + false, + std.time.Timer.start() catch @panic("timers unsupported"), + ) catch bun.outOfMemory(); + } + + dev.routeBundlePtr(route_bundle_index).server_state = .bundling; + }, + .bundling => { + bun.assert(dev.current_bundle != null); + try dev.deferRequest(&dev.current_bundle_requests, route_bundle_index, kind, req, resp); + }, + .possible_bundling_failures => { + // TODO: perform a graph trace to find just the errors that are needed + if (dev.bundling_failures.count() > 0) { resp.corked(sendSerializedFailures, .{ dev, resp, - (&(dev.routeBundlePtr(route_bundle_index).data.framework.evaluate_failure.?))[0..1], - .evaluation, + dev.bundling_failures.keys(), + .bundler, }); return; - }, - .loaded => break, - } - - // this error is here to make sure there are no accidental loop exits - @compileError("all branches above should `return`, `break` or `continue`"); + } else { + dev.routeBundlePtr(route_bundle_index).server_state = .loaded; + continue :sw .loaded; + } + }, + .evaluation_failure => { + resp.corked(sendSerializedFailures, .{ + dev, + resp, + (&(dev.routeBundlePtr(route_bundle_index).data.framework.evaluate_failure.?))[0..1], + .evaluation, + }); + }, + .loaded => switch (kind) { + .server_handler => dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp), + .bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp), + .js_payload => dev.onJsRequestWithBundle(route_bundle_index, resp), + }, } +} - switch (kind) { - .server_handler => dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp), - .bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp), - .js_payload => dev.onJsRequestWithBundle(route_bundle_index, resp), - } +fn deferRequest( + dev: *DevServer, + requests_array: *std.ArrayListUnmanaged(DeferredRequest), + route_bundle_index: RouteBundle.Index, + kind: DeferredRequest.Data.Tag, + req: *Request, + resp: AnyResponse, +) !void { + try requests_array.ensureUnusedCapacity(dev.allocator, 1); + + const deferred: DeferredRequest = .{ + .route_bundle_index = route_bundle_index, + .data = switch (kind) { + .js_payload => .{ .js_payload = resp }, + .bundled_html_page => .{ .bundled_html_page = resp }, + .server_handler => .{ + .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) + .save(dev.vm.global, req, resp.TCP), + }, + }, + }; + + requests_array.appendAssumeCapacity(deferred); } fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointList, alloc: Allocator, rbi: RouteBundle.Index) bun.OOM!void { @@ -1094,6 +1118,18 @@ const DeferredRequest = struct { _ = resp; @panic("TODO"); } + + fn abortAndDeinit(this: *DeferredRequest) void { + switch (this.data) { + .server_handler => |*saved| { + saved.response.endWithoutBody(true); + saved.deinit(); + }, + .bundled_html_page, .js_payload => |resp| { + resp.endWithoutBody(true); + }, + } + } }; fn startAsyncBundle( @@ -1385,9 +1421,10 @@ pub fn finalizeBundle( bv2: *bun.bundle_v2.BundleV2, result: bun.bundle_v2.DevServerOutput, ) bun.OOM!void { - defer dev.startNextBundleIfPresent(); defer { bv2.deinit(); + dev.current_bundle = null; + dev.startNextBundleIfPresent(); } const current_bundle = &dev.current_bundle.?; @@ -1858,7 +1895,7 @@ pub fn finalizeBundle( fn startNextBundleIfPresent(dev: *DevServer) void { // Clear the current bundle - dev.current_bundle = null; + assert(dev.current_bundle == null); dev.log.clearAndFree(); dev.current_bundle_requests.clearRetainingCapacity(); dev.emitVisualizerMessageIfNeeded(); @@ -2877,7 +2914,7 @@ pub fn IncrementalGraph(side: bake.Side) type { } // Do not count css files as a client module - // and also do not trace its dependencies. + // and also do not trace its imports. // // The server version of this code does not need to // early return, since server css files never have @@ -5036,6 +5073,22 @@ pub const Assets = struct { } }; +pub fn onPluginsResolved(dev: *DevServer, plugins: ?*Plugin) !void { + dev.bundler_options.plugin = plugins; + dev.plugin_state = .loaded; + dev.startNextBundleIfPresent(); +} + +pub fn onPluginsRejected(dev: *DevServer) !void { + dev.plugin_state = .err; + for (dev.next_bundle.requests.items) |*item| { + item.abortAndDeinit(); + } + dev.next_bundle.requests.clearRetainingCapacity(); + dev.next_bundle.route_queue.clearRetainingCapacity(); + // TODO: allow recovery from this state +} + /// `bytes` is allocated by `allocator`, ownership moved into the Blob fn initAnyBlobFromBytes(allocator: Allocator, bytes: []u8) JSC.WebCore.AnyBlob { return .{ .InternalBlob = .{ .bytes = .fromOwnedSlice(allocator, bytes) } }; @@ -5084,6 +5137,7 @@ const VirtualMachine = JSC.VirtualMachine; const JSModuleLoader = JSC.JSModuleLoader; const EventLoopHandle = JSC.EventLoopHandle; const HTMLBundle = JSC.API.HTMLBundle; +const Plugin = JSC.API.JSBundler.Plugin; const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena; const Chunk = bun.bundle_v2.Chunk; diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 1d19bcda28b0d4..b263bfeaaac7f8 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -383,23 +383,6 @@ pub const ServerConfig = struct { app.any(path, T, entry, handler_wrap.handler); } - pub fn applyStaticRoutes(this: *ServerConfig, comptime ssl: bool, server: AnyServer, app: *uws.NewApp(ssl)) !void { - const dev_server = server.devServer(); - for (this.static_routes.items) |*entry| { - switch (entry.route) { - .StaticRoute => |static_route| { - applyStaticRoute(server, ssl, app, *StaticRoute, static_route, entry.path); - }, - .HTMLBundleRoute => |html_bundle_route| { - applyStaticRoute(server, ssl, app, *HTMLBundleRoute, html_bundle_route, entry.path); - if (dev_server) |dev| { - try dev.html_router.put(dev.allocator, entry.path, html_bundle_route); - } - }, - } - } - } - pub fn deinit(this: *ServerConfig) void { this.address.deinit(bun.default_allocator); @@ -5829,40 +5812,76 @@ pub const ServerWebSocket = struct { } }; +/// State machine to handle loading plugins asyncronously. This structure is not thread-safe. const ServePlugins = struct { - value: Value, + state: State, ref_count: u32 = 1, + /// Reference count is incremented while there are other objects that are waiting on plugin loads. pub usingnamespace bun.NewRefCounted(ServePlugins, deinit, null); - pub const Value = union(enum) { + pub const State = union(enum) { + unqueued: []const []const u8, pending: struct { - raw_plugins: []const []const u8, + /// Promise may be empty if the plugin load finishes synchronously. + plugin: *bun.JSC.API.JSBundler.Plugin, promise: JSC.JSPromise.Strong, - plugins: ?*bun.JSC.API.JSBundler.Plugin, - pending_bundled_routes: bun.ArrayList(*HTMLBundleRoute), + html_bundle_routes: std.ArrayListUnmanaged(*HTMLBundleRoute), + dev_server: ?*bun.bake.DevServer, }, - result: ?*bun.JSC.API.JSBundler.Plugin, + loaded: *bun.JSC.API.JSBundler.Plugin, + /// Error information is not stored as it is already reported. err, }; - pub fn init(server: AnyServer, plugins: []const []const u8, initial_pending: *HTMLBundleRoute) *ServePlugins { + pub const GetOrStartLoadResult = union(enum) { + /// null = no plugins, used by server implementation + ready: ?*bun.JSC.API.JSBundler.Plugin, + pending, + err, + }; - // TODO: call builtin which resolves and imports plugin modules + pub const Callback = union(enum) { + html_bundle_route: *HTMLBundleRoute, + dev_server: *bun.bake.DevServer, + }; - var pending_bundled_routes = bun.ArrayList(*HTMLBundleRoute){}; - pending_bundled_routes.append(bun.default_allocator, initial_pending) catch bun.outOfMemory(); - const this = ServePlugins.new(.{ - .value = .{ - .pending = .{ - .plugins = null, - .raw_plugins = plugins, - .promise = JSC.JSPromise.Strong.init(server.globalThis()), - .pending_bundled_routes = pending_bundled_routes, - }, + pub fn init(plugins: []const []const u8) *ServePlugins { + return ServePlugins.new(.{ .state = .{ .unqueued = plugins } }); + } + + pub fn deinit(this: *ServePlugins) void { + switch (this.state) { + .unqueued => {}, + .pending => assert(false), // should have one ref while pending! + .loaded => |loaded| loaded.deinit(), + .err => {}, + } + this.destroy(); + } + + pub fn getOrStartLoad(this: *ServePlugins, global: *JSC.JSGlobalObject, cb: Callback) bun.OOM!GetOrStartLoadResult { + sw: switch (this.state) { + .unqueued => { + this.loadAndResolvePlugins(global); + continue :sw this.state; // could jump to any branch if syncronously resolved }, - }); - return this; + .pending => |*pending| { + switch (cb) { + .html_bundle_route => |route| { + route.ref(); + try pending.html_bundle_routes.append(bun.default_allocator, route); + }, + .dev_server => |server| { + assert(pending.dev_server == null or pending.dev_server == server); // one dev server per server + pending.dev_server = server; + }, + } + return .pending; + }, + .loaded => |plugins| return .{ .ready = plugins }, + .err => return .err, + } } extern fn JSBundlerPlugin__loadAndResolvePluginsForServe( @@ -5871,41 +5890,51 @@ const ServePlugins = struct { bunfig_folder: JSC.JSValue, ) JSValue; - pub fn loadAndResolvePlugins(this: *ServePlugins, globalThis: *JSC.JSGlobalObject, bunfig_folder: string) void { - bun.assert(this.value == .pending); + fn loadAndResolvePlugins(this: *ServePlugins, global: *JSC.JSGlobalObject) void { + bun.assert(this.state == .unqueued); + const plugin_list = this.state.unqueued; + const bunfig_folder = bun.path.dirname(global.bunVM().transpiler.options.bunfig_path, .auto); + this.ref(); defer this.deref(); - const plugin = bun.JSC.API.JSBundler.Plugin.create(globalThis, .browser); - this.value.pending.plugins = plugin; + const plugin = bun.JSC.API.JSBundler.Plugin.create(global, .browser); var sfb = std.heap.stackFallback(@sizeOf(bun.String) * 4, bun.default_allocator); const alloc = sfb.get(); - const bunstring_array = alloc.alloc(bun.String, this.value.pending.raw_plugins.len) catch bun.outOfMemory(); + const bunstring_array = alloc.alloc(bun.String, plugin_list.len) catch bun.outOfMemory(); defer alloc.free(bunstring_array); - for (this.value.pending.raw_plugins, bunstring_array) |raw_plugin, *out| { + for (plugin_list, bunstring_array) |raw_plugin, *out| { out.* = bun.String.init(raw_plugin); } - const plugins = bun.String.toJSArray(globalThis, bunstring_array); - const bunfig_folder_bunstr = bun.String.createUTF8ForJS(globalThis, bunfig_folder); + const plugin_js_array = bun.String.toJSArray(global, bunstring_array); + const bunfig_folder_bunstr = bun.String.createUTF8ForJS(global, bunfig_folder); + + this.state = .{ .pending = .{ + .promise = JSC.JSPromise.Strong.init(global), + .plugin = plugin, + .html_bundle_routes = .empty, + .dev_server = null, + } }; + + global.bunVM().eventLoop().enter(); + const result = JSBundlerPlugin__loadAndResolvePluginsForServe(plugin, plugin_js_array, bunfig_folder_bunstr); + global.bunVM().eventLoop().exit(); - globalThis.bunVM().eventLoop().enter(); - const result = JSBundlerPlugin__loadAndResolvePluginsForServe(plugin, plugins, bunfig_folder_bunstr); - globalThis.bunVM().eventLoop().exit(); // handle the case where js synchronously throws an error - if (globalThis.tryTakeException()) |e| { - handleOnReject(this, globalThis, e); + if (global.tryTakeException()) |e| { + handleOnReject(this, global, e); return; } if (!result.isEmptyOrUndefinedOrNull()) { // handle the case where js returns a promise if (result.asAnyPromise()) |promise| { - switch (promise.status(globalThis.vm())) { + switch (promise.status(global.vm())) { // promise not fulfilled yet .pending => { this.ref(); - this.value.pending.promise.strong.set(globalThis, promise.asValue(globalThis)); - promise.asValue(globalThis).then(globalThis, this, onResolveImpl, onRejectImpl); + this.state.pending.promise.strong.set(global, promise.asValue(global)); + promise.asValue(global).then(global, this, onResolveImpl, onRejectImpl); return; }, .fulfilled => { @@ -5913,41 +5942,30 @@ const ServePlugins = struct { return; }, .rejected => { - // const value = promise.asValue(globalThis); - const value = promise.result(globalThis.vm()); - handleOnReject(this, globalThis, value); + const value = promise.result(global.vm()); + handleOnReject(this, global, value); return; }, } } if (result.toError()) |e| { - handleOnReject(this, globalThis, e); + handleOnReject(this, global, e); } else { handleOnResolve(this); } } } - pub fn deinit(this: *ServePlugins) void { - if (this.value == .result) { - if (this.value.result) |plugins| { - plugins.deinit(); - } - } - ServePlugins.destroy(this); - } - pub const onResolve = JSC.toJSHostFunction(onResolveImpl); pub const onReject = JSC.toJSHostFunction(onRejectImpl); pub fn onResolveImpl(_: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { ctxLog("onResolve", .{}); - const arguments = callframe.arguments_old(2); - var plugins = arguments.ptr[1].asPromisePtr(ServePlugins); + const plugins_js, const plugins_result = callframe.argumentsAsArray(2); + var plugins = plugins_js.asPromisePtr(ServePlugins); defer plugins.deref(); - const plugins_result = arguments.ptr[0]; plugins_result.ensureStillAlive(); handleOnResolve(plugins); @@ -5956,38 +5974,52 @@ const ServePlugins = struct { } pub fn handleOnResolve(this: *ServePlugins) void { - this.value.pending.promise.deinit(); - var pending_bundled_routes = this.value.pending.pending_bundled_routes; - defer pending_bundled_routes.deinit(bun.default_allocator); - this.value = .{ .result = this.value.pending.plugins }; - for (pending_bundled_routes.items) |route| { - route.onPluginsResolved(this.value.result); + bun.assert(this.state == .pending); + const pending = &this.state.pending; + const plugin = pending.plugin; + pending.promise.deinit(); + defer pending.html_bundle_routes.deinit(bun.default_allocator); + + this.state = .{ .loaded = plugin }; + + for (pending.html_bundle_routes.items) |route| { + route.onPluginsResolved(plugin) catch bun.outOfMemory(); route.deref(); } + if (pending.dev_server) |server| { + server.onPluginsResolved(plugin) catch bun.outOfMemory(); + } } pub fn onRejectImpl(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { ctxLog("onReject", .{}); - const arguments = callframe.arguments_old(2); - const plugins = arguments.ptr[1].asPromisePtr(ServePlugins); - handleOnReject(plugins, globalThis, arguments.ptr[0]); + const error_js, const plugin_js = callframe.argumentsAsArray(2); + const plugins = plugin_js.asPromisePtr(ServePlugins); + handleOnReject(plugins, globalThis, error_js); return JSValue.jsUndefined(); } - pub fn handleOnReject(plugins: *ServePlugins, globalThis: *JSC.JSGlobalObject, e: JSValue) void { - defer plugins.deref(); - var pending_bundled_routes = plugins.value.pending.pending_bundled_routes; - defer pending_bundled_routes.deinit(bun.default_allocator); - plugins.value.pending.promise.deinit(); - plugins.value.pending.pending_bundled_routes = .{}; - plugins.value = .err; - for (pending_bundled_routes.items) |route| { - route.onPluginsRejected(); + pub fn handleOnReject(this: *ServePlugins, global: *JSC.JSGlobalObject, err: JSValue) void { + bun.assert(this.state == .pending); + const pending = &this.state.pending; + pending.plugin.deinit(); + pending.promise.deinit(); + defer pending.html_bundle_routes.deinit(bun.default_allocator); + + this.state = .err; + + Output.errGeneric("Failed to load plugins for Bun.serve:", .{}); + global.bunVM().runErrorHandler(err, null); + + for (pending.html_bundle_routes.items) |route| { + route.onPluginsRejected() catch bun.outOfMemory(); route.deref(); } - globalThis.bunVM().runErrorHandler(e, null); + if (pending.dev_server) |server| { + server.onPluginsRejected() catch bun.outOfMemory(); + } } comptime { @@ -6050,48 +6082,16 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp pub const doRequestIP = JSC.wrapInstanceMethod(ThisServer, "requestIP", false); pub const doTimeout = JSC.wrapInstanceMethod(ThisServer, "timeout", false); - pub fn getPlugins( - this: *ThisServer, - ) PluginsResult { - if (this.plugins) |p| { - switch (p.value) { - .result => |plugins| { - return .{ .found = plugins }; - }, - .pending => return .pending, - .err => return .err, - } - } - return .pending; - } - - // rename to loadAndResolvePlugins - pub fn getPluginsAsync( - this: *ThisServer, - bundle: *HTMLBundleRoute, - raw_plugins: []const []const u8, - bunfig_folder: string, - ) void { - bun.assert(this.plugins == null or this.plugins.?.value == .pending); - if (this.plugins) |p| { - bun.assert(p.value != .err); // call .getPlugins() first - switch (p.value) { - .pending => { - bundle.ref(); - p.value.pending.pending_bundled_routes.append( - bun.default_allocator, - bundle, - ) catch unreachable; - - return; - }, - .result => {}, - .err => {}, - } - } else { - this.plugins = ServePlugins.init(AnyServer.from(this), raw_plugins, bundle); - this.plugins.?.loadAndResolvePlugins(this.globalThis, bunfig_folder); + /// Returns: + /// - .ready if no plugin has to be loaded + /// - .err if there is a cached failure. Currently, this requires restarting the entire server. + /// - .pending if `callback` was stored. It will call `onPluginsResolved` or `onPluginsRejected` later. + pub fn getOrLoadPlugins(server: *ThisServer, callback: ServePlugins.Callback) ServePlugins.GetOrStartLoadResult { + if (server.plugins) |p| { + return p.getOrStartLoad(server.globalThis, callback) catch bun.outOfMemory(); } + // no plugins + return .{ .ready = null }; } pub fn doSubscriberCount(this: *ThisServer, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { @@ -7462,14 +7462,45 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp fn setRoutes(this: *ThisServer) void { const app = this.app.?; + const any_server = AnyServer.from(this); + const dev_server = this.dev_server; + + // Plugins need to be registered if any of the following are + // assigned. This is done in `setRoutes` so that reloading + // a server can initialize such state. + // - DevServer + // - HTML Bundle + var needs_plugins = dev_server != null; + var has_html_catch_all = false; + if (this.config.static_routes.items.len > 0) { - this.config.applyStaticRoutes( - ssl_enabled, - AnyServer.from(this), - app, - ) catch bun.outOfMemory(); + for (this.config.static_routes.items) |*entry| { + switch (entry.route) { + .StaticRoute => |static_route| { + ServerConfig.applyStaticRoute(any_server, ssl_enabled, app, *StaticRoute, static_route, entry.path); + }, + .HTMLBundleRoute => |html_bundle_route| { + ServerConfig.applyStaticRoute(any_server, ssl_enabled, app, *HTMLBundleRoute, html_bundle_route, entry.path); + if (dev_server) |dev| { + dev.html_router.put(dev.allocator, entry.path, html_bundle_route) catch bun.outOfMemory(); + } + needs_plugins = true; + }, + } + if (strings.eqlComptime(entry.path, "/*")) { + has_html_catch_all = true; + } + } } + // If there are plugins, initialize the ServePlugins object in + // an unqueued state. The first thing (HTML Bundle, DevServer) + // that needs plugins will cause the load to happen. + if (needs_plugins and this.plugins == null) if (this.vm.transpiler.options.serve_plugins) |serve_plugins| { + this.plugins = ServePlugins.init(serve_plugins); + }; + + // Setup user websocket routes. if (this.config.websocket) |*websocket| { websocket.globalObject = this.globalThis; websocket.handler.app = app; @@ -7482,7 +7513,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp ); } - if (comptime debug_mode) { + if (debug_mode) { app.get("/bun:info", *ThisServer, this, onBunInfoRequest); if (this.config.inspector) { JSC.markBinding(@src()); @@ -7493,15 +7524,11 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp } var has_dev_catch_all = false; - if (this.dev_server) |dev| { + if (dev_server) |dev| { + // DevServer adds a catch-all handler to use FrameworkRouter (full stack apps) has_dev_catch_all = dev.attachRoutes(this) catch bun.outOfMemory(); } if (!has_dev_catch_all) { - const has_html_catch_all = for (this.config.static_routes.items) |route| { - if (strings.eqlComptime(route.path, "/*")) - break true; - } else false; - // "/*" routes are added backwards, so if they have a static route, // it will never be matched so we need to check for that first if (!has_html_catch_all) { @@ -7720,21 +7747,13 @@ pub const AnyServer = union(enum) { DebugHTTPServer: *DebugHTTPServer, DebugHTTPSServer: *DebugHTTPSServer, - pub fn plugins(this: AnyServer) ?*ServePlugins { - return switch (this) { - inline else => |server| server.plugins, - }; - } - - pub fn getPlugins(this: AnyServer) PluginsResult { - return switch (this) { - inline else => |server| server.getPlugins(), - }; - } - - pub fn loadAndResolvePlugins(this: AnyServer, bundle: *HTMLBundleRoute, raw_plugins: []const []const u8, bunfig_path: []const u8) void { - return switch (this) { - inline else => |server| server.getPluginsAsync(bundle, raw_plugins, bunfig_path), + /// Returns: + /// - .ready if no plugin has to be loaded + /// - .err if there is a cached failure. Currently, this requires restarting the entire server. + /// - .pending if `callback` was stored. It will call `onPluginsResolved` or `onPluginsRejected` later. + pub fn getOrLoadPlugins(server: AnyServer, callback: ServePlugins.Callback) ServePlugins.GetOrStartLoadResult { + return switch (server) { + inline else => |s| s.getOrLoadPlugins(callback), }; } diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index b94bd90adf39fc..4a90e4c317d005 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -7,41 +7,14 @@ pub usingnamespace JSC.Codegen.JSHTMLBundle; pub usingnamespace bun.NewRefCounted(HTMLBundle, deinit, null); ref_count: u32 = 1, -globalObject: *JSGlobalObject, +global: *JSGlobalObject, path: []const u8, -// TODO: move these three options from the HTML bundle to the server. -// Downside would be that the process bunfig would be used, but upside is it -// allows DevServer to merge shared assets between HTML routes. -config: bun.JSC.API.JSBundler.Config, -plugins: union(enum) { - pending: ?[]const []const u8, - result: ?*bun.JSC.API.JSBundler.Plugin, -}, -bunfig_dir: []const u8, - -/// Initialize an HTMLBundle. -/// -/// `plugins` is array of serve plugins defined in the bunfig.toml file. They will be resolved and loaded. -/// `bunfig_path` is the path to the bunfig.toml configuration file. It used to resolve the plugins relative -/// to the bunfig.toml file. -pub fn init( - globalObject: *JSGlobalObject, - path: []const u8, - bunfig_path: []const u8, - plugins: ?[]const []const u8, -) !*HTMLBundle { - var config = bun.JSC.API.JSBundler.Config{}; - try config.entry_points.insert(path); - config.target = .browser; - try config.public_path.appendChar('/'); + +/// Initialize an HTMLBundle given a path. +pub fn init(global: *JSGlobalObject, path: []const u8) !*HTMLBundle { return HTMLBundle.new(.{ - .globalObject = globalObject, + .global = global, .path = try bun.default_allocator.dupe(u8, path), - .config = config, - .plugins = .{ - .pending = plugins, - }, - .bunfig_dir = bun.path.dirname(bunfig_path, .auto), }); } @@ -51,7 +24,6 @@ pub fn finalize(this: *HTMLBundle) void { pub fn deinit(this: *HTMLBundle) void { bun.default_allocator.free(this.path); - this.config.deinit(bun.default_allocator); this.destroy(); } @@ -60,21 +32,34 @@ pub fn getIndex(this: *HTMLBundle, globalObject: *JSGlobalObject) JSValue { return str.transferToJS(globalObject); } -/// Rename to `Route` +// TODO: Rename to `Route` +/// An HTMLBundle can be used across multiple server instances, an +/// HTMLBundle.Route can only be used on one server, but is also +/// reference-counted because a server can have multiple instances of the same +/// html file on multiple endpoints. pub const HTMLBundleRoute = struct { + /// Rename to `bundle` html_bundle: *HTMLBundle, - pending_responses: std.ArrayListUnmanaged(*PendingResponse) = .{}, ref_count: u32 = 1, + // TODO: attempt to remove the null case. null is only present during server + // initialization as only a ServerConfig object is present. server: ?AnyServer = null, - value: Value = .pending_plugins, - /// Written and read by DevServer to identify if this route has been registered with the bundler. + /// When using DevServer, this value is never read or written to. + state: State, + /// Written and read by DevServer to identify if this route has been + /// registered with the bundler. dev_server_id: bun.bake.DevServer.RouteBundle.Index.Optional = .none, + /// When state == .pending, incomplete responses are stored here. + pending_responses: std.ArrayListUnmanaged(*PendingResponse) = .{}, + + /// One HTMLBundle.Route can be specified multiple times + pub usingnamespace bun.NewRefCounted(@This(), _deinit, null); pub fn memoryCost(this: *const HTMLBundleRoute) usize { var cost: usize = 0; cost += @sizeOf(HTMLBundleRoute); cost += this.pending_responses.items.len * @sizeOf(PendingResponse); - cost += this.value.memoryCost(); + cost += this.state.memoryCost(); return cost; } @@ -85,25 +70,21 @@ pub const HTMLBundleRoute = struct { .pending_responses = .{}, .ref_count = 1, .server = null, - .value = .pending_plugins, + .state = .pending, }); } - pub usingnamespace bun.NewRefCounted(@This(), _deinit, null); - - pub const Value = union(enum) { - pending_plugins, - pending: void, + pub const State = union(enum) { + pending, building: *bun.BundleV2.JSBundleCompletionTask, err: bun.logger.Log, html: *StaticRoute, - pub fn deinit(this: *Value) void { + pub fn deinit(this: *State) void { switch (this.*) { .err => |*log| { log.deinit(); }, - .pending_plugins => {}, .building => |completion| { completion.cancelled = true; completion.deref(); @@ -115,9 +96,8 @@ pub const HTMLBundleRoute = struct { } } - pub fn memoryCost(this: *const Value) usize { + pub fn memoryCost(this: *const State) usize { return switch (this.*) { - .pending_plugins => 0, .pending => 0, .building => 0, .err => |log| log.memoryCost(), @@ -132,7 +112,7 @@ pub const HTMLBundleRoute = struct { } this.pending_responses.deinit(bun.default_allocator); this.html_bundle.deref(); - this.value.deinit(); + this.state.deinit(); this.destroy(); } @@ -158,66 +138,27 @@ pub const HTMLBundleRoute = struct { return; } - // Simple development workflow which rebundles on every request. - if (this.value == .html) { - this.value.html.deref(); - this.value = .pending_plugins; - } else if (this.value == .err) { - this.value.err.deinit(); - this.value = .pending_plugins; + // Simpler development workflow which rebundles on every request. + if (this.state == .html) { + this.state.html.deref(); + this.state = .pending; + } else if (this.state == .err) { + this.state.err.deinit(); + this.state = .pending; } } - if (this.value == .pending_plugins) out_of_pending_plugins: { - var plugins: ?*bun.JSC.API.JSBundler.Plugin = null; - switch (this.html_bundle.plugins) { - .pending => |raw_plugins| have_plugins: { - if (raw_plugins == null or raw_plugins.?.len == 0) { - break :have_plugins; - } - - switch (server.getPlugins()) { - .pending => {}, - .err => { - this.value = .{ .err = bun.logger.Log.init(bun.default_allocator) }; - break :out_of_pending_plugins; - }, - .found => |result| { - plugins = result; - break :have_plugins; - }, - } - - this.value = .pending_plugins; - break :out_of_pending_plugins; - }, - .result => |existing_plugins| { - plugins = existing_plugins; - }, - } - debug("HTMLBundleRoute(0x{x}) plugins resolved", .{@intFromPtr(this)}); - this.html_bundle.plugins = .{ .result = plugins }; - this.value = .pending; - } - - if (this.value == .pending) { - if (bun.Environment.enable_logs) - debug("onRequest: {s} - pending", .{req.url()}); - - const success = this.scheduleBundle(server); - if (!success) { - resp.endWithoutBody(true); - bun.outOfMemory(); - return; - } - } - - switch (this.value) { - .pending => unreachable, - - .building, .pending_plugins => { + state: switch (this.state) { + .pending => { + if (bun.Environment.enable_logs) + debug("onRequest: {s} - pending", .{req.url()}); + this.scheduleBundle(server) catch bun.outOfMemory(); + continue :state this.state; + }, + .building => { if (bun.Environment.enable_logs) debug("onRequest: {s} - building", .{req.url()}); + // create the PendingResponse, add it to the list var pending = PendingResponse.new(.{ .method = bun.http.Method.which(req.method()) orelse { @@ -231,37 +172,23 @@ pub const HTMLBundleRoute = struct { .ref_count = 1, }); - this.pending_responses.append(bun.default_allocator, pending) catch { - pending.deref(); - resp.endWithoutBody(true); - bun.outOfMemory(); - return; - }; + this.pending_responses.append(bun.default_allocator, pending) catch bun.outOfMemory(); this.ref(); pending.ref(); resp.onAborted(*PendingResponse, PendingResponse.onAborted, pending); req.setYield(false); - - if (this.value == .pending_plugins) { - const raw_plugins = this.html_bundle.plugins.pending.?; - const bunfig_folder = this.html_bundle.bunfig_dir; - this.ref(); - debug("HTMLBundleRoute(0x{x}) resolving plugins...", .{@intFromPtr(this)}); - server.loadAndResolvePlugins(this, raw_plugins, bunfig_folder); - } }, .err => |log| { if (bun.Environment.enable_logs) debug("onRequest: {s} - err", .{req.url()}); _ = log; // autofix - // use the code from server.zig to render the error + // TODO: use the code from DevServer.zig to render the error resp.endWithoutBody(true); }, .html => |html| { if (bun.Environment.enable_logs) debug("onRequest: {s} - html", .{req.url()}); - // we already have the html, so we can just serve it if (is_head) { html.onHEADRequest(req, resp); } else { @@ -273,98 +200,74 @@ pub const HTMLBundleRoute = struct { /// Schedule a bundle to be built. /// If success, bumps the ref count and returns true; - /// Returns false if the bundle task could not be scheduled. - fn scheduleBundle(this: *HTMLBundleRoute, server: AnyServer) bool { - const globalThis = server.globalThis(); - const vm = globalThis.bunVM(); - const plugins = this.html_bundle.plugins.result; + fn scheduleBundle(this: *HTMLBundleRoute, server: AnyServer) !void { + switch (server.getOrLoadPlugins(.{ .html_bundle_route = this })) { + .err => this.state = .{ .err = bun.logger.Log.init(bun.default_allocator) }, + .ready => |plugins| try onPluginsResolved(this, plugins), + .pending => {}, + } + } - var config = this.html_bundle.config; - config.entry_points = config.entry_points.clone() catch bun.outOfMemory(); - config.public_path = config.public_path.clone() catch bun.outOfMemory(); - config.define = config.define.clone() catch bun.outOfMemory(); + pub fn onPluginsResolved(this: *HTMLBundleRoute, plugins: ?*bun.JSC.API.JSBundler.Plugin) !void { + const global = this.html_bundle.global; + const server = this.server.?; + const development = server.config().development; + const vm = global.bunVM(); + + var config: JSBundler.Config = .{}; + errdefer config.deinit(bun.default_allocator); + try config.entry_points.insert(this.html_bundle.path); + try config.public_path.appendChar('/'); if (bun.CLI.Command.get().args.serve_minify_identifiers) |minify_identifiers| { config.minify.identifiers = minify_identifiers; - } else if (!server.config().development) { + } else if (!development) { config.minify.identifiers = true; } if (bun.CLI.Command.get().args.serve_minify_whitespace) |minify_whitespace| { config.minify.whitespace = minify_whitespace; - } else if (!server.config().development) { + } else if (!development) { config.minify.whitespace = true; } if (bun.CLI.Command.get().args.serve_minify_syntax) |minify_syntax| { config.minify.syntax = minify_syntax; - } else if (!server.config().development) { + } else if (!development) { config.minify.syntax = true; } - if (!server.config().development) { + if (!development) { config.define.put("process.env.NODE_ENV", "\"production\"") catch bun.outOfMemory(); config.jsx.development = false; } else { config.force_node_env = .development; config.jsx.development = true; } - config.source_map = .linked; - const completion_task = bun.BundleV2.createAndScheduleCompletionTask( + const completion_task = try bun.BundleV2.createAndScheduleCompletionTask( config, plugins, - globalThis, + global, vm.eventLoop(), bun.default_allocator, - ) catch { - return false; - }; + ); completion_task.started_at_ns = bun.getRoughTickCount().ns(); completion_task.html_build_task = this; - this.value = .{ .building = completion_task }; + this.state = .{ .building = completion_task }; // While we're building, ensure this doesn't get freed. this.ref(); - return true; - } - - pub fn onPluginsResolved(this: *HTMLBundleRoute, plugins: ?*bun.JSC.API.JSBundler.Plugin) void { - debug("HTMLBundleRoute(0x{x}) plugins resolved", .{@intFromPtr(this)}); - this.html_bundle.plugins = .{ .result = plugins }; - // TODO: is this even possible? - if (this.value != .pending_plugins) { - return; - } - - const server: AnyServer = this.server orelse return; - const success = this.scheduleBundle(server); - - if (!success) { - var pending = this.pending_responses; - defer pending.deinit(bun.default_allocator); - this.pending_responses = .{}; - for (pending.items) |pending_response| { - // for the list of pending responses - defer pending_response.deref(); - pending_response.resp.endWithoutBody(true); - } - } } - pub fn onPluginsRejected(this: *HTMLBundleRoute) void { + pub fn onPluginsRejected(this: *HTMLBundleRoute) !void { debug("HTMLBundleRoute(0x{x}) plugins rejected", .{@intFromPtr(this)}); - this.value = .{ .err = bun.logger.Log.init(bun.default_allocator) }; - + this.state = .{ .err = bun.logger.Log.init(bun.default_allocator) }; this.resumePendingResponses(); } pub fn onComplete(this: *HTMLBundleRoute, completion_task: *bun.BundleV2.JSBundleCompletionTask) void { - // To ensure it stays alive for the deuration of this function. - this.ref(); - defer this.deref(); - // For the build task. defer this.deref(); @@ -372,15 +275,15 @@ pub const HTMLBundleRoute = struct { .err => |err| { if (bun.Environment.enable_logs) debug("onComplete: err - {s}", .{@errorName(err)}); - this.value = .{ .err = bun.logger.Log.init(bun.default_allocator) }; - completion_task.log.cloneToWithRecycled(&this.value.err, true) catch bun.outOfMemory(); + this.state = .{ .err = bun.logger.Log.init(bun.default_allocator) }; + completion_task.log.cloneToWithRecycled(&this.state.err, true) catch bun.outOfMemory(); if (this.server) |server| { if (server.config().development) { switch (bun.Output.enable_ansi_colors_stderr) { inline else => |enable_ansi_colors| { var writer = bun.Output.errorWriterBuffered(); - this.value.err.printWithEnableAnsiColors(&writer, enable_ansi_colors) catch {}; + this.state.err.printWithEnableAnsiColors(&writer, enable_ansi_colors) catch {}; writer.context.flush() catch {}; }, } @@ -465,7 +368,7 @@ pub const HTMLBundleRoute = struct { const html_route: *StaticRoute = this_html_route orelse @panic("Internal assertion failure: HTML entry point not found in HTMLBundle."); const html_route_clone = html_route.clone(globalThis) catch bun.outOfMemory(); - this.value = .{ .html = html_route_clone }; + this.state = .{ .html = html_route_clone }; if (!(server.reloadStaticRoutes() catch bun.outOfMemory())) { // Server has shutdown, so it won't receive any new requests @@ -484,24 +387,21 @@ pub const HTMLBundleRoute = struct { defer pending.deinit(bun.default_allocator); this.pending_responses = .{}; for (pending.items) |pending_response| { - // for the list of pending responses - defer pending_response.deref(); + defer pending_response.deref(); // First ref for being in the pending items array. const resp = pending_response.resp; const method = pending_response.method; - if (!pending_response.is_response_pending) { - // request already aborted + // Aborted continue; } + // Second ref for UWS abort callback. + defer pending_response.deref(); pending_response.is_response_pending = false; resp.clearAborted(); - switch (this.value) { - .pending_plugins => { - // this.onAnyRequest(req: *uws.Request, resp: HTTPResponse, is_head: bool) - }, + switch (this.state) { .html => |html| { if (method == .HEAD) { html.onHEAD(resp); @@ -510,7 +410,12 @@ pub const HTMLBundleRoute = struct { } }, .err => |log| { - _ = log; // autofix + if (this.server.?.config().development) { + _ = log; // TODO: use the code from DevServer.zig to render the error + } else { + // To protect privacy, do not show errors to end users in production. + // TODO: Show a generic error page. + } resp.writeStatus("500 Build Failed"); resp.endWithoutBody(false); }, @@ -518,13 +423,10 @@ pub const HTMLBundleRoute = struct { resp.endWithoutBody(false); }, } - - // for the HTTP response. - pending_response.deref(); } } - // Represents an in-flight response before the bundle has finished building. + /// Represents an in-flight response before the bundle has finished building. pub const PendingResponse = struct { method: bun.http.Method, resp: HTTPResponse, diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 240e64ee2c36b7..4202937199d76a 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -2091,7 +2091,7 @@ pub const ModuleLoader = struct { return error.NotSupported; } - const html_bundle = try JSC.API.HTMLBundle.init(globalObject.?, path.text, jsc_vm.transpiler.options.bunfig_path, jsc_vm.transpiler.options.serve_plugins); + const html_bundle = try JSC.API.HTMLBundle.init(globalObject.?, path.text); return ResolvedSource{ .allocator = &jsc_vm.allocator, .jsvalue_for_export = html_bundle.toJS(globalObject.?), diff --git a/src/bun.zig b/src/bun.zig index 7bcda9025b2cf8..ee3b30dba952ba 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -70,8 +70,6 @@ pub inline fn clampFloat(_self: anytype, min: @TypeOf(_self), max: @TypeOf(_self return self; } -pub const ArrayList = std.ArrayListUnmanaged; - /// We cannot use a threadlocal memory allocator for FileSystem-related things /// FileSystem is a singleton. pub const fs_allocator = default_allocator; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 4025ae65dbc0e9..6e4facf142c7c1 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -1637,9 +1637,8 @@ pub const BundleV2 = struct { plugins: ?*bun.JSC.API.JSBundler.Plugin, globalThis: *JSC.JSGlobalObject, event_loop: *bun.JSC.EventLoop, - allocator: std.mem.Allocator, + _: std.mem.Allocator, ) OOM!*JSBundleCompletionTask { - _ = allocator; // autofix const completion = JSBundleCompletionTask.new(.{ .config = config, .jsc_event_loop = event_loop, @@ -16734,12 +16733,12 @@ pub const CssEntryPointMeta = struct { imported_on_server: bool, }; -/// The lifetime of this structure is tied to the transpiler's arena +/// The lifetime of this structure is tied to the bundler's arena pub const DevServerInput = struct { css_entry_points: std.AutoArrayHashMapUnmanaged(Index, CssEntryPointMeta), }; -/// The lifetime of this structure is tied to the transpiler's arena +/// The lifetime of this structure is tied to the bundler's arena pub const DevServerOutput = struct { chunks: []Chunk, css_file_list: std.AutoArrayHashMapUnmanaged(Index, CssEntryPointMeta), diff --git a/src/cli/install.ps1 b/src/cli/install.ps1 index 4d3e36a5792b1d..39c49ac348818c 100644 --- a/src/cli/install.ps1 +++ b/src/cli/install.ps1 @@ -23,7 +23,7 @@ if (-not ((Get-CimInstance Win32_ComputerSystem)).SystemType -match "x64-based") # This corresponds to .win10_rs5 in build.zig $MinBuild = 17763; -$MinBuildName = "Windows 10 1809" +$MinBuildName = "Windows 10 1809 / Windows Server 2019" $WinVer = [System.Environment]::OSVersion.Version if ($WinVer.Major -lt 10 -or ($WinVer.Major -eq 10 -and $WinVer.Build -lt $MinBuild)) { diff --git a/src/fs.zig b/src/fs.zig index fa20e43a5272aa..a0b5344df55bc3 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -412,18 +412,18 @@ pub const FileSystem = struct { // } pub fn normalize(_: *@This(), str: string) string { - return @call(bun.callmod_inline, path_handler.normalizeString, .{ str, true, .auto }); + return @call(bun.callmod_inline, path_handler.normalizeString, .{ str, true, bun.path.Platform.auto }); } pub fn normalizeBuf(_: *@This(), buf: []u8, str: string) string { - return @call(bun.callmod_inline, path_handler.normalizeStringBuf, .{ str, buf, false, .auto, false }); + return @call(bun.callmod_inline, path_handler.normalizeStringBuf, .{ str, buf, false, bun.path.Platform.auto, false }); } pub fn join(_: *@This(), parts: anytype) string { return @call(bun.callmod_inline, path_handler.joinStringBuf, .{ &join_buf, parts, - .loose, + bun.path.Platform.loose, }); } @@ -431,7 +431,7 @@ pub const FileSystem = struct { return @call(bun.callmod_inline, path_handler.joinStringBuf, .{ buf, parts, - .loose, + bun.path.Platform.loose, }); } diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index c1f9b3e9959ed8..66038e5d37e721 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -86,8 +86,7 @@ pub fn isParentOrEqual(parent_: []const u8, child: []const u8) ParentEqual { return .unrelated; } -pub fn getIfExistsLongestCommonPathGeneric(input: []const []const u8, comptime _platform: Platform) ?[]const u8 { - const platform = comptime _platform.resolve(); +pub fn getIfExistsLongestCommonPathGeneric(input: []const []const u8, comptime platform: Platform) ?[]const u8 { const separator = comptime platform.separator(); const isPathSeparator = comptime platform.getSeparatorFunc(); @@ -178,8 +177,7 @@ pub fn getIfExistsLongestCommonPathGeneric(input: []const []const u8, comptime _ // TODO: is it faster to determine longest_common_separator in the while loop // or as an extra step at the end? // only boether to check if this function appears in benchmarking -pub fn longestCommonPathGeneric(input: []const []const u8, comptime _platform: Platform) []const u8 { - const platform = comptime _platform.resolve(); +pub fn longestCommonPathGeneric(input: []const []const u8, comptime platform: Platform) []const u8 { const separator = comptime platform.separator(); const isPathSeparator = comptime platform.getSeparatorFunc(); @@ -318,9 +316,8 @@ pub fn relativeToCommonPath( normalized_to_: []const u8, buf: []u8, comptime always_copy: bool, - comptime _platform: Platform, + comptime platform: Platform, ) []const u8 { - const platform = comptime _platform.resolve(); var normalized_from = normalized_from_; var normalized_to = normalized_to_; const win_root_len = if (platform == .windows) k: { @@ -463,8 +460,7 @@ pub fn relativeToCommonPath( return out_slice; } -pub fn relativeNormalizedBuf(buf: []u8, from: []const u8, to: []const u8, comptime _platform: Platform, comptime always_copy: bool) []const u8 { - const platform = comptime _platform.resolve(); +pub fn relativeNormalizedBuf(buf: []u8, from: []const u8, to: []const u8, comptime platform: Platform, comptime always_copy: bool) []const u8 { if ((if (platform == .windows) strings.eqlCaseInsensitiveASCII(from, to, true) else @@ -480,11 +476,11 @@ pub fn relativeNormalizedBuf(buf: []u8, from: []const u8, to: []const u8, compti } pub fn relativeNormalized(from: []const u8, to: []const u8, comptime platform: Platform, comptime always_copy: bool) []const u8 { - return relativeNormalizedBuf(&relative_to_common_path_buf, from, to, comptime platform.resolve(), always_copy); + return relativeNormalizedBuf(&relative_to_common_path_buf, from, to, platform, always_copy); } pub fn dirname(str: []const u8, comptime platform: Platform) []const u8 { - switch (comptime platform.resolve()) { + switch (platform) { .loose => { const separator = lastIndexOfSeparatorLoose(str) orelse return ""; return str[0..separator]; @@ -499,7 +495,7 @@ pub fn dirname(str: []const u8, comptime platform: Platform) []const u8 { const separator = lastIndexOfSeparatorWindows(str) orelse return std.fs.path.diskDesignatorWindows(str); return str[0..separator]; }, - else => @compileError("unreachable"), + else => @compileError("not implemented"), } } @@ -531,8 +527,7 @@ pub fn relativeBufZ(buf: []u8, from: []const u8, to: []const u8) [:0]const u8 { return buf[0..rel.len :0]; } -pub fn relativePlatformBuf(buf: []u8, from: []const u8, to: []const u8, comptime _platform: Platform, comptime always_copy: bool) []const u8 { - const platform = comptime _platform.resolve(); +pub fn relativePlatformBuf(buf: []u8, from: []const u8, to: []const u8, comptime platform: Platform, comptime always_copy: bool) []const u8 { const normalized_from = if (platform.isAbsolute(from)) brk: { if (platform == .loose and bun.Environment.isWindows) { // we want to invoke the windows resolution behavior but end up with a @@ -577,11 +572,11 @@ pub fn relativePlatformBuf(buf: []u8, from: []const u8, to: []const u8, comptime } pub fn relativePlatform(from: []const u8, to: []const u8, comptime platform: Platform, comptime always_copy: bool) []const u8 { - return relativePlatformBuf(&relative_to_common_path_buf, from, to, comptime platform.resolve(), always_copy); + return relativePlatformBuf(&relative_to_common_path_buf, from, to, platform, always_copy); } pub fn relativeAlloc(allocator: std.mem.Allocator, from: []const u8, to: []const u8) ![]const u8 { - const result = relativePlatform(from, to, Platform.current, false); + const result = relativePlatform(from, to, .auto, false); return try allocator.dupe(u8, result); } @@ -961,20 +956,24 @@ pub fn normalizeStringGenericTZ( } pub const Platform = enum { - auto, loose, windows, posix, nt, + pub const auto: Platform = switch (bun.Environment.os) { + .windows => .windows, + .linux, .mac => .posix, + .wasm => .loose, + }; + pub fn isAbsolute(comptime platform: Platform, path: []const u8) bool { return isAbsoluteT(platform, u8, path); } pub fn isAbsoluteT(comptime platform: Platform, comptime T: type, path: []const T) bool { - if (comptime T != u8 and T != u16) @compileError("Unsupported type given to isAbsoluteT"); - return switch (comptime platform) { - .auto => (comptime platform.resolve()).isAbsoluteT(T, path), + if (T != u8 and T != u16) @compileError("Unsupported type given to isAbsoluteT"); + return switch (platform) { .posix => path.len > 0 and path[0] == '/', .nt, .windows, @@ -986,116 +985,73 @@ pub const Platform = enum { }; } - pub fn separator(comptime platform: Platform) u8 { - return comptime switch (platform) { - .auto => platform.resolve().separator(), + pub inline fn separator(comptime platform: Platform) u8 { + return switch (platform) { .loose, .posix => std.fs.path.sep_posix, .nt, .windows => std.fs.path.sep_windows, }; } - pub fn separatorString(comptime platform: Platform) []const u8 { - return comptime switch (platform) { - .auto => platform.resolve().separatorString(), + pub inline fn separatorString(comptime platform: Platform) []const u8 { + return switch (platform) { .loose, .posix => std.fs.path.sep_str_posix, .nt, .windows => std.fs.path.sep_str_windows, }; } - pub const current: Platform = switch (@import("builtin").target.os.tag) { - .windows => Platform.windows, - else => Platform.posix, - }; - - pub fn getSeparatorFunc(comptime _platform: Platform) IsSeparatorFunc { - switch (comptime _platform.resolve()) { - .auto => @compileError("unreachable"), - .loose => { - return isSepAny; - }, - .nt, .windows => { - return isSepAny; - }, - .posix => { - return isSepPosix; - }, - } + pub fn getSeparatorFunc(comptime platform: Platform) IsSeparatorFunc { + return switch (platform) { + .loose => isSepAny, + .nt, .windows => isSepAny, + .posix => isSepPosix, + }; } - pub fn getSeparatorFuncT(comptime _platform: Platform) IsSeparatorFuncT { - switch (comptime _platform.resolve()) { - .auto => @compileError("unreachable"), - .loose => { - return isSepAnyT; - }, - .nt, .windows => { - return isSepAnyT; - }, - .posix => { - return isSepPosixT; - }, - } + pub fn getSeparatorFuncT(comptime platform: Platform) IsSeparatorFuncT { + return switch (platform) { + .loose => isSepAnyT, + .nt, .windows => isSepAnyT, + .posix => isSepPosixT, + }; } - pub fn getLastSeparatorFunc(comptime _platform: Platform) LastSeparatorFunction { - switch (comptime _platform.resolve()) { - .auto => @compileError("unreachable"), - .loose => { - return lastIndexOfSeparatorLoose; - }, - .nt, .windows => { - return lastIndexOfSeparatorWindows; - }, - .posix => { - return lastIndexOfSeparatorPosix; - }, - } + pub fn getLastSeparatorFunc(comptime platform: Platform) LastSeparatorFunction { + return switch (platform) { + .loose => lastIndexOfSeparatorLoose, + .nt, .windows => lastIndexOfSeparatorWindows, + .posix => lastIndexOfSeparatorPosix, + }; } - pub fn getLastSeparatorFuncT(comptime _platform: Platform) LastSeparatorFunctionT { - switch (comptime _platform.resolve()) { - .auto => @compileError("unreachable"), - .loose => { - return lastIndexOfSeparatorLooseT; - }, - .nt, .windows => { - return lastIndexOfSeparatorWindowsT; - }, - .posix => { - return lastIndexOfSeparatorPosixT; - }, - } + pub fn getLastSeparatorFuncT(comptime platform: Platform) LastSeparatorFunctionT { + return switch (platform) { + .loose => lastIndexOfSeparatorLooseT, + .nt, .windows => lastIndexOfSeparatorWindowsT, + .posix => lastIndexOfSeparatorPosixT, + }; } - pub inline fn isSeparator(comptime _platform: Platform, char: u8) bool { - return isSeparatorT(_platform, u8, char); + pub inline fn isSeparator(comptime platform: Platform, char: u8) bool { + return isSeparatorT(platform, u8, char); } - pub inline fn isSeparatorT(comptime _platform: Platform, comptime T: type, char: T) bool { - switch (comptime _platform.resolve()) { - .auto => @compileError("unreachable"), - .loose => { - return isSepAnyT(T, char); - }, - .nt, .windows => { - return isSepAnyT(T, char); - }, - .posix => { - return isSepPosixT(T, char); - }, - } + pub inline fn isSeparatorT(comptime platform: Platform, comptime T: type, char: T) bool { + return switch (platform) { + .loose => isSepAnyT(T, char), + .nt, .windows => isSepAnyT(T, char), + .posix => isSepPosixT(T, char), + }; } - pub fn trailingSeparator(comptime _platform: Platform) [2]u8 { - return comptime switch (_platform) { - .auto => _platform.resolve().trailingSeparator(), + pub fn trailingSeparator(comptime platform: Platform) [2]u8 { + return switch (platform) { .nt, .windows => ".\\".*, .posix, .loose => "./".*, }; } - pub fn leadingSeparatorIndex(comptime _platform: Platform, path: anytype) ?usize { - switch (comptime _platform.resolve()) { + pub fn leadingSeparatorIndex(comptime platform: Platform, path: anytype) ?usize { + switch (platform) { .nt, .windows => { if (path.len < 1) return null; @@ -1129,66 +1085,51 @@ pub const Platform = enum { return null; } }, - else => { - return leadingSeparatorIndex(.windows, path) orelse leadingSeparatorIndex(.posix, path); - }, + .loose => return leadingSeparatorIndex(.windows, path) orelse + leadingSeparatorIndex(.posix, path), } } - - pub inline fn resolve(comptime _platform: Platform) Platform { - if (comptime _platform == .auto) { - return switch (@import("builtin").target.os.tag) { - .windows => Platform.windows, - - .freestanding, .emscripten, .other => Platform.loose, - - else => Platform.posix, - }; - } - - return _platform; - } }; -pub fn normalizeString(str: []const u8, comptime allow_above_root: bool, comptime _platform: Platform) []u8 { - return normalizeStringBuf(str, &parser_buffer, allow_above_root, _platform, false); +pub fn normalizeString(str: []const u8, comptime allow_above_root: bool, comptime platform: Platform) []u8 { + return normalizeStringBuf(str, &parser_buffer, allow_above_root, platform, false); } -pub fn normalizeStringZ(str: []const u8, comptime allow_above_root: bool, comptime _platform: Platform) [:0]u8 { - const normalized = normalizeStringBuf(str, &parser_buffer, allow_above_root, _platform, false); +pub fn normalizeStringZ(str: []const u8, comptime allow_above_root: bool, comptime platform: Platform) [:0]u8 { + const normalized = normalizeStringBuf(str, &parser_buffer, allow_above_root, platform, false); parser_buffer[normalized.len] = 0; return parser_buffer[0..normalized.len :0]; } -pub fn normalizeBuf(str: []const u8, buf: []u8, comptime _platform: Platform) []u8 { - return normalizeBufT(u8, str, buf, _platform); +pub fn normalizeBuf(str: []const u8, buf: []u8, comptime platform: Platform) []u8 { + return normalizeBufT(u8, str, buf, platform); } -pub fn normalizeBufZ(str: []const u8, buf: []u8, comptime _platform: Platform) [:0]u8 { - const norm = normalizeBufT(u8, str, buf, _platform); +pub fn normalizeBufZ(str: []const u8, buf: []u8, comptime platform: Platform) [:0]u8 { + const norm = normalizeBufT(u8, str, buf, platform); buf[norm.len] = 0; return buf[0..norm.len :0]; } -pub fn normalizeBufT(comptime T: type, str: []const T, buf: []T, comptime _platform: Platform) []T { +pub fn normalizeBufT(comptime T: type, str: []const T, buf: []T, comptime platform: Platform) []T { if (str.len == 0) { buf[0] = '.'; return buf[0..1]; } - const is_absolute = _platform.isAbsoluteT(T, str); + const is_absolute = platform.isAbsoluteT(T, str); - const trailing_separator = _platform.getLastSeparatorFuncT()(T, str) == str.len - 1; + const trailing_separator = platform.getLastSeparatorFuncT()(T, str) == str.len - 1; if (is_absolute and trailing_separator) - return normalizeStringBufT(T, str, buf, true, _platform, true); + return normalizeStringBufT(T, str, buf, true, platform, true); if (is_absolute and !trailing_separator) - return normalizeStringBufT(T, str, buf, true, _platform, false); + return normalizeStringBufT(T, str, buf, true, platform, false); if (!is_absolute and !trailing_separator) - return normalizeStringBufT(T, str, buf, false, _platform, false); + return normalizeStringBufT(T, str, buf, false, platform, false); - return normalizeStringBufT(T, str, buf, false, _platform, true); + return normalizeStringBufT(T, str, buf, false, platform, true); } pub fn normalizeStringBuf( @@ -1209,9 +1150,8 @@ pub fn normalizeStringBufT( comptime platform: Platform, comptime preserve_trailing_slash: bool, ) []T { - switch (comptime platform.resolve()) { - .nt, .auto => @compileError("unreachable"), - + switch (platform) { + .nt => @compileError("not implemented"), .windows => { return normalizeStringWindowsT( T, @@ -1243,18 +1183,18 @@ pub fn normalizeStringBufT( } } -pub fn normalizeStringAlloc(allocator: std.mem.Allocator, str: []const u8, comptime allow_above_root: bool, comptime _platform: Platform) ![]const u8 { - return try allocator.dupe(u8, normalizeString(str, allow_above_root, _platform)); +pub fn normalizeStringAlloc(allocator: std.mem.Allocator, str: []const u8, comptime allow_above_root: bool, comptime platform: Platform) ![]const u8 { + return try allocator.dupe(u8, normalizeString(str, allow_above_root, platform)); } -pub fn joinAbs2(_cwd: []const u8, comptime _platform: Platform, part: anytype, part2: anytype) []const u8 { +pub fn joinAbs2(_cwd: []const u8, comptime platform: Platform, part: anytype, part2: anytype) []const u8 { const parts = [_][]const u8{ part, part2 }; - const slice = joinAbsString(_cwd, &parts, _platform); + const slice = joinAbsString(_cwd, &parts, platform); return slice; } -pub fn joinAbs(cwd: []const u8, comptime _platform: Platform, part: []const u8) []const u8 { - return joinAbsString(cwd, &.{part}, _platform); +pub fn joinAbs(cwd: []const u8, comptime platform: Platform, part: []const u8) []const u8 { + return joinAbsString(cwd, &.{part}, platform); } /// Convert parts of potentially invalid file paths into a single valid filpeath @@ -1262,12 +1202,12 @@ pub fn joinAbs(cwd: []const u8, comptime _platform: Platform, part: []const u8) /// This is the equivalent of path.resolve /// /// Returned path is stored in a temporary buffer. It must be copied if it needs to be stored. -pub fn joinAbsString(_cwd: []const u8, parts: anytype, comptime _platform: Platform) []const u8 { +pub fn joinAbsString(_cwd: []const u8, parts: anytype, comptime platform: Platform) []const u8 { return joinAbsStringBuf( _cwd, &parser_join_input_buffer, parts, - _platform, + platform, ); } @@ -1276,48 +1216,46 @@ pub fn joinAbsString(_cwd: []const u8, parts: anytype, comptime _platform: Platf /// This is the equivalent of path.resolve /// /// Returned path is stored in a temporary buffer. It must be copied if it needs to be stored. -pub fn joinAbsStringZ(_cwd: []const u8, parts: anytype, comptime _platform: Platform) [:0]const u8 { +pub fn joinAbsStringZ(_cwd: []const u8, parts: anytype, comptime platform: Platform) [:0]const u8 { return joinAbsStringBufZ( _cwd, &parser_join_input_buffer, parts, - _platform, + platform, ); } pub threadlocal var join_buf: [4096]u8 = undefined; -pub fn join(_parts: anytype, comptime _platform: Platform) []const u8 { - return joinStringBuf(&join_buf, _parts, _platform); +pub fn join(_parts: anytype, comptime platform: Platform) []const u8 { + return joinStringBuf(&join_buf, _parts, platform); } -pub fn joinZ(_parts: anytype, comptime _platform: Platform) [:0]const u8 { - return joinZBuf(&join_buf, _parts, _platform); +pub fn joinZ(_parts: anytype, comptime platform: Platform) [:0]const u8 { + return joinZBuf(&join_buf, _parts, platform); } -pub fn joinZBuf(buf: []u8, _parts: anytype, comptime _platform: Platform) [:0]const u8 { - const joined = joinStringBuf(buf[0 .. buf.len - 1], _parts, _platform); +pub fn joinZBuf(buf: []u8, _parts: anytype, comptime platform: Platform) [:0]const u8 { + const joined = joinStringBuf(buf[0 .. buf.len - 1], _parts, platform); assert(bun.isSliceInBuffer(joined, buf)); const start_offset = @intFromPtr(joined.ptr) - @intFromPtr(buf.ptr); buf[joined.len + start_offset] = 0; return buf[start_offset..][0..joined.len :0]; } -pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) []const u8 { - return joinStringBufT(u8, buf, parts, _platform); +pub fn joinStringBuf(buf: []u8, parts: anytype, comptime platform: Platform) []const u8 { + return joinStringBufT(u8, buf, parts, platform); } -pub fn joinStringBufW(buf: []u16, parts: anytype, comptime _platform: Platform) []const u16 { - return joinStringBufT(u16, buf, parts, _platform); +pub fn joinStringBufW(buf: []u16, parts: anytype, comptime platform: Platform) []const u16 { + return joinStringBufT(u16, buf, parts, platform); } -pub fn joinStringBufWZ(buf: []u16, parts: anytype, comptime _platform: Platform) [:0]const u16 { - const joined = joinStringBufT(u16, buf[0 .. buf.len - 1], parts, _platform); +pub fn joinStringBufWZ(buf: []u16, parts: anytype, comptime platform: Platform) [:0]const u16 { + const joined = joinStringBufT(u16, buf[0 .. buf.len - 1], parts, platform); assert(bun.isSliceInBufferT(u16, joined, buf)); const start_offset = @intFromPtr(joined.ptr) / 2 - @intFromPtr(buf.ptr) / 2; buf[joined.len + start_offset] = 0; return buf[start_offset..][0..joined.len :0]; } -pub fn joinStringBufT(comptime T: type, buf: []T, parts: anytype, comptime _platform: Platform) []const T { - const platform = comptime _platform.resolve(); - +pub fn joinStringBufT(comptime T: type, buf: []T, parts: anytype, comptime platform: Platform) []const T { var written: usize = 0; var temp_buf_: [4096]T = undefined; var temp_buf: []T = &temp_buf_; @@ -1367,26 +1305,26 @@ pub fn joinStringBufT(comptime T: type, buf: []T, parts: anytype, comptime _plat return normalizeStringNodeT(T, temp_buf[0..written], buf, platform); } -pub fn joinAbsStringBuf(cwd: []const u8, buf: []u8, _parts: anytype, comptime _platform: Platform) []const u8 { - return _joinAbsStringBuf(false, []const u8, cwd, buf, _parts, _platform); +pub fn joinAbsStringBuf(cwd: []const u8, buf: []u8, _parts: anytype, comptime platform: Platform) []const u8 { + return _joinAbsStringBuf(false, []const u8, cwd, buf, _parts, platform); } -pub fn joinAbsStringBufZ(cwd: []const u8, buf: []u8, _parts: anytype, comptime _platform: Platform) [:0]const u8 { - return _joinAbsStringBuf(true, [:0]const u8, cwd, buf, _parts, _platform); +pub fn joinAbsStringBufZ(cwd: []const u8, buf: []u8, _parts: anytype, comptime platform: Platform) [:0]const u8 { + return _joinAbsStringBuf(true, [:0]const u8, cwd, buf, _parts, platform); } -pub fn joinAbsStringBufZNT(cwd: []const u8, buf: []u8, _parts: anytype, comptime _platform: Platform) [:0]const u8 { - if ((_platform == .auto or _platform == .loose or _platform == .windows) and bun.Environment.isWindows) { +pub fn joinAbsStringBufZNT(cwd: []const u8, buf: []u8, _parts: anytype, comptime platform: Platform) [:0]const u8 { + if ((platform == .auto or platform == .loose or platform == .windows) and bun.Environment.isWindows) { return _joinAbsStringBuf(true, [:0]const u8, cwd, buf, _parts, .nt); } - return _joinAbsStringBuf(true, [:0]const u8, cwd, buf, _parts, _platform); + return _joinAbsStringBuf(true, [:0]const u8, cwd, buf, _parts, platform); } -pub fn joinAbsStringBufZTrailingSlash(cwd: []const u8, buf: []u8, _parts: anytype, comptime _platform: Platform) [:0]const u8 { - const out = _joinAbsStringBuf(true, [:0]const u8, cwd, buf, _parts, _platform); - if (out.len + 2 < buf.len and out.len > 0 and out[out.len - 1] != _platform.separator()) { - buf[out.len] = _platform.separator(); +pub fn joinAbsStringBufZTrailingSlash(cwd: []const u8, buf: []u8, _parts: anytype, comptime platform: Platform) [:0]const u8 { + const out = _joinAbsStringBuf(true, [:0]const u8, cwd, buf, _parts, platform); + if (out.len + 2 < buf.len and out.len > 0 and out[out.len - 1] != platform.separator()) { + buf[out.len] = platform.separator(); buf[out.len + 1] = 0; return buf[0 .. out.len + 1 :0]; } @@ -1394,15 +1332,14 @@ pub fn joinAbsStringBufZTrailingSlash(cwd: []const u8, buf: []u8, _parts: anytyp return out; } -fn _joinAbsStringBuf(comptime is_sentinel: bool, comptime ReturnType: type, _cwd: []const u8, buf: []u8, _parts: anytype, comptime _platform: Platform) ReturnType { - const platform = comptime _platform.resolve(); +fn _joinAbsStringBuf(comptime is_sentinel: bool, comptime ReturnType: type, _cwd: []const u8, buf: []u8, _parts: anytype, comptime platform: Platform) ReturnType { if (platform == .windows or (bun.Environment.os == .windows and platform == .loose)) { return _joinAbsStringBufWindows(is_sentinel, ReturnType, _cwd, buf, _parts); } - if (comptime platform.resolve() == .nt) { + if (platform == .nt) { const end_path = _joinAbsStringBufWindows(is_sentinel, ReturnType, _cwd, buf[4..], _parts); buf[0..4].* = "\\\\?\\".*; if (comptime is_sentinel) { @@ -1744,9 +1681,8 @@ pub fn normalizeStringNodeT( comptime T: type, str: []const T, buf: []T, - comptime _platform: Platform, + comptime platform: Platform, ) []const T { - const platform = comptime _platform.resolve(); if (str.len == 0) { buf[0] = '.'; return buf[0..1]; @@ -1764,7 +1700,7 @@ pub fn normalizeStringNodeT( str, buf_, true, - comptime platform.resolve().separator(), + comptime platform.separator(), comptime platform.getSeparatorFuncT(), false, ) else normalizeStringGenericT( @@ -1772,7 +1708,7 @@ pub fn normalizeStringNodeT( str, buf_, false, - comptime platform.resolve().separator(), + comptime platform.separator(), comptime platform.getSeparatorFuncT(), false, ); @@ -2065,7 +2001,7 @@ export fn ResolvePath__joinAbsStringBufCurrentPlatformBunString( globalObject.bunVM().transpiler.fs.top_level_dir, &join_buf, &.{str.slice()}, - comptime Platform.auto.resolve(), + .auto, ); return bun.String.createUTF8(out_slice); diff --git a/src/watcher.zig b/src/watcher.zig index 3f710c64ceb354..77a802d91d7c29 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -35,7 +35,7 @@ ctx: *anyopaque, onFileUpdate: *const fn (this: *anyopaque, events: []WatchEvent, changed_files: []?[:0]u8, watchlist: WatchList) void, onError: *const fn (this: *anyopaque, err: bun.sys.Error) void, -thread_lock: bun.DebugThreadLock = bun.DebugThreadLock.unlocked, +thread_lock: bun.DebugThreadLock = .unlocked, /// Initializes a watcher. Each watcher is tied to some context type, which /// recieves watch callbacks on the watcher thread. This function does not From 4bf972c53c1fd644b8c7649c07899853aa4fd86e Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Wed, 5 Feb 2025 16:30:28 -0800 Subject: [PATCH 15/28] tailwind --- packages/bun-types/bun.d.ts | 9 +- src/bake/DevServer.zig | 398 ++++++++++++++++++++-------- src/bake/hmr-module.ts | 18 +- src/bake/hmr-runtime-client.ts | 13 +- src/bake/hmr-runtime-error.ts | 3 +- src/bun.js/api/bun/dns_resolver.zig | 32 +-- src/bun.js/api/server.zig | 5 +- src/bun.zig | 2 +- src/bundler/bundle_v2.zig | 112 ++++---- src/cache.zig | 18 +- src/feature_flags.zig | 3 - src/hive_array.zig | 16 +- src/http.zig | 2 +- src/install/install.zig | 4 +- src/renamer.zig | 2 +- src/watcher.zig | 16 +- 16 files changed, 439 insertions(+), 214 deletions(-) diff --git a/packages/bun-types/bun.d.ts b/packages/bun-types/bun.d.ts index bfe9476bef6aff..8c91f2960d7600 100644 --- a/packages/bun-types/bun.d.ts +++ b/packages/bun-types/bun.d.ts @@ -5309,7 +5309,10 @@ declare module "bun" { interface PluginBuilder { /** - * Register a callback which will be invoked when bundling starts. + * Register a callback which will be invoked when bundling starts. When + * using hot module reloading, this is called at the start of each + * incremental rebuild. + * * @example * ```ts * Bun.plugin({ @@ -5406,9 +5409,9 @@ declare module "bun" { * - `browser`: The plugin will be applied to browser builds * - `node`: The plugin will be applied to Node.js builds * - * If in Bun's runtime, the default target is `bun`. + * If unspecified, it is assumed that the plugin is compatible with all targets. * - * If unspecified, it is assumed that the plugin is compatible with the default target. + * This field is not read by Bun.plugin */ target?: Target; /** diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 95c9c98418592b..a349af9e0fd8c3 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -24,7 +24,6 @@ pub const Options = struct { // Debugging features dump_sources: ?[]const u8 = if (Environment.isDebug) ".bake-debug" else null, dump_state_on_crash: ?bool = null, - verbose_watcher: bool = false, }; // The fields `client_graph`, `server_graph`, `directory_watchers`, and `assets` @@ -71,7 +70,7 @@ html_route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.client).FileIndex html_router: HTMLRouter, /// CSS files are accessible via `/_bun/css/.css` /// Value is bundled code owned by `dev.allocator` -// TODO: StaticRoute +// TODO: Move to Assets css_files: AutoArrayHashMapUnmanaged(u64, []const u8), /// JS files are accessible via `/_bun/client/route..js` /// These are randomly generated to avoid possible browser caching of old assets. @@ -88,6 +87,10 @@ bundling_failures: std.ArrayHashMapUnmanaged( false, ) = .{}, frontend_only: bool, +/// The Plugin API is missing a way to attach filesystem watchers (addWatchFile) +/// This special case makes `bun-plugin-tailwind` work, which is a requirement +/// to ship initial incremental bundling support for HTML files. +has_tailwind_plugin_hack: ?bun.StringArrayHashMapUnmanaged(void) = null, // These values are handles to the functions in `hmr-runtime-server.ts`. // For type definitions, see `./bake.private.d.ts` @@ -139,12 +142,14 @@ current_bundle: ?struct { /// must be done to inform clients to reload routes. When this is false, /// all entry points do not have bundles yet. had_reload_event: bool, + /// After a bundle finishes, these requests will be continued, either + /// calling their handler on success or sending the error page on failure. + requests: DeferredRequest.List, + /// Resolution failures are grouped by incremental graph file index. + /// Unlike parse failures (`handleParseTaskFailure`), the resolution + /// failures can be created asyncronously, and out of order. + resolution_failure_entries: AutoArrayHashMapUnmanaged(SerializedFailure.Owner.Packed, bun.logger.Log), }, -/// This is not stored in `current_bundle` so that its memory can be reused when -/// there is no active bundle. After a bundle finishes, these requests will -/// be continued, either calling their handler on success or sending the error -/// page on failure. -current_bundle_requests: ArrayListUnmanaged(DeferredRequest), /// When `current_bundle` is non-null and new requests to bundle come in, /// those are temporaried here. When the current bundle is finished, it /// will immediately enqueue this. @@ -155,8 +160,9 @@ next_bundle: struct { /// for this watch event is in one of the `watch_events` reload_event: ?*HotReloadEvent, /// The list of requests that are blocked on this bundle. - requests: ArrayListUnmanaged(DeferredRequest), + requests: DeferredRequest.List, }, +deferred_request_pool: bun.HiveArray(DeferredRequest.Node, DeferredRequest.max_preallocated).Fallback, // Debugging @@ -232,7 +238,6 @@ pub const RouteBundle = struct { /// - TODO: Any downstream file is rebundled. cached_response_body: ?[]const u8, /// Hash used for the client script tag. - // TODO: do not make this lazy client_script_uid: ScriptUid.Optional, const ByteOffset = bun.GenericIndex(u32, u8); @@ -318,11 +323,10 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .html_route_lookup = .empty, .html_router = .empty, .current_bundle = null, - .current_bundle_requests = .empty, .next_bundle = .{ .route_queue = .empty, .reload_event = null, - .requests = .empty, + .requests = .{}, }, .assets = .{ .path_map = .empty, @@ -331,6 +335,8 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { }, .log = .init(allocator), .plugin_state = .unknown, + .bundling_failures = .{}, + .deferred_request_pool = .init(allocator), .server_bundler = undefined, .client_bundler = undefined, @@ -704,10 +710,11 @@ fn onIncrementalVisualizerCorked(resp: anytype) void { fn ensureRouteIsBundled( dev: *DevServer, route_bundle_index: RouteBundle.Index, - kind: DeferredRequest.Data.Tag, + kind: DeferredRequest.Handler.Kind, req: *Request, resp: AnyResponse, ) bun.OOM!void { + assert(dev.server != null); sw: switch (dev.routeBundlePtr(route_bundle_index).server_state) { .unqueued => { if (dev.current_bundle != null) { @@ -721,16 +728,29 @@ fn ensureRouteIsBundled( .unknown => if (dev.bundler_options.plugin != null) { // Framework-provided plugin is likely going to be phased out later dev.plugin_state = .loaded; - } else switch (dev.server.?.getOrLoadPlugins(.{ .dev_server = dev })) { - .pending => { - dev.plugin_state = .pending; - continue :plugin .pending; - }, - .err => { - dev.plugin_state = .err; - continue :plugin .err; - }, - .ready => {}, + } else { + // TODO: implement a proper solution here + dev.has_tailwind_plugin_hack = if (dev.vm.transpiler.options.serve_plugins) |serve_plugins| + for (serve_plugins) |plugin| { + if (bun.strings.includes(plugin, "tailwind")) break .empty; + } else null + else + null; + + switch (dev.server.?.getOrLoadPlugins(.{ .dev_server = dev })) { + .pending => { + dev.plugin_state = .pending; + continue :plugin .pending; + }, + .err => { + dev.plugin_state = .err; + continue :plugin .err; + }, + .ready => |ready| { + dev.plugin_state = .loaded; + dev.bundler_options.plugin = ready; + }, + } }, .pending => { try dev.next_bundle.route_queue.put(dev.allocator, route_bundle_index, {}); @@ -780,7 +800,7 @@ fn ensureRouteIsBundled( }, .bundling => { bun.assert(dev.current_bundle != null); - try dev.deferRequest(&dev.current_bundle_requests, route_bundle_index, kind, req, resp); + try dev.deferRequest(&dev.current_bundle.?.requests, route_bundle_index, kind, req, resp); }, .possible_bundling_failures => { // TODO: perform a graph trace to find just the errors that are needed @@ -807,35 +827,36 @@ fn ensureRouteIsBundled( }, .loaded => switch (kind) { .server_handler => dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp), - .bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp), - .js_payload => dev.onJsRequestWithBundle(route_bundle_index, resp), + .bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp, bun.http.Method.which(req.method()) orelse .POST), + .js_payload => dev.onJsRequestWithBundle(route_bundle_index, resp, bun.http.Method.which(req.method()) orelse .POST), }, } } fn deferRequest( dev: *DevServer, - requests_array: *std.ArrayListUnmanaged(DeferredRequest), + requests_array: *DeferredRequest.List, route_bundle_index: RouteBundle.Index, - kind: DeferredRequest.Data.Tag, + kind: DeferredRequest.Handler.Kind, req: *Request, resp: AnyResponse, ) !void { - try requests_array.ensureUnusedCapacity(dev.allocator, 1); - - const deferred: DeferredRequest = .{ + const deferred = dev.deferred_request_pool.get(); + deferred.data = .{ .route_bundle_index = route_bundle_index, - .data = switch (kind) { - .js_payload => .{ .js_payload = resp }, - .bundled_html_page => .{ .bundled_html_page = resp }, + .handler = switch (kind) { + // POST is specified for unknown methods. + .js_payload => .{ .js_payload = .{ .response = resp, .method = bun.http.Method.which(req.method()) orelse .POST } }, + .bundled_html_page => .{ .bundled_html_page = .{ .response = resp, .method = bun.http.Method.which(req.method()) orelse .POST } }, .server_handler => .{ + // TODO: SSL by moving this to AnyServer.prepareAndSaveJsRequestContext .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) .save(dev.vm.global, req, resp.TCP), }, }, }; - - requests_array.appendAssumeCapacity(deferred); + resp.onAborted(*DeferredRequest, DeferredRequest.onAbort, &deferred.data); + requests_array.prepend(deferred); } fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointList, alloc: Allocator, rbi: RouteBundle.Index) bun.OOM!void { @@ -942,7 +963,8 @@ fn onFrameworkRequestWithBundle( ); } -fn onHtmlRequestWithBundle(dev: *DevServer, route_bundle_index: RouteBundle.Index, resp: AnyResponse) void { +fn onHtmlRequestWithBundle(dev: *DevServer, route_bundle_index: RouteBundle.Index, resp: AnyResponse, method: bun.http.Method) void { + _ = method; // TODO: staticroute const route_bundle = dev.routeBundlePtr(route_bundle_index); assert(route_bundle.data == .html); const html = &route_bundle.data.html; @@ -1058,7 +1080,8 @@ fn getJavaScriptCodeForHTMLFile( array.items; } -pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: AnyResponse) void { +pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: AnyResponse, method: bun.http.Method) void { + _ = method; // TODO: staticroute const route_bundle = dev.routeBundlePtr(bundle_index); const code = route_bundle.client_bundle orelse code: { const code = dev.generateClientBundle(route_bundle) catch bun.outOfMemory(); @@ -1098,40 +1121,78 @@ pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: anytype) void { } } +/// When requests are waiting on a bundle, the relevant request information is +/// prepared and stored in a linked list. const DeferredRequest = struct { + /// A small maximum is set because development servers are unlikely to + /// aquire much load, so allocating a ton at the start for no reason + /// is very silly. This contributes to ~6kb of the initial DevServer allocation. + const max_preallocated = 16; + + pub const List = std.SinglyLinkedList(DeferredRequest); + pub const Node = List.Node; + route_bundle_index: RouteBundle.Index, - data: Data, + handler: Handler, - const Data = union(enum) { + const Handler = union(enum) { /// For a .framework route. This says to call and render the page. server_handler: bun.JSC.API.SavedRequest, /// For a .html route. Serve the bundled HTML page. - bundled_html_page: AnyResponse, + bundled_html_page: ResponseAndMethod, /// Serve the JavaScript payload for this route. - js_payload: AnyResponse, - - const Tag = @typeInfo(Data).@"union".tag_type.?; + js_payload: ResponseAndMethod, + /// Do nothing and free this node. To simplify lifetimes, + /// the `DeferredRequest` is not freed upon abortion. Which + /// is okay since most requests do not abort. + aborted, + + /// Does not include `aborted` because branching on that value + /// has no meaningful purpose, so it is excluded. + const Kind = enum { + server_handler, + bundled_html_page, + js_payload, + }; }; fn onAbort(this: *DeferredRequest, resp: AnyResponse) void { - _ = this; _ = resp; - @panic("TODO"); + this.abort(); + assert(this.handler == .aborted); } - fn abortAndDeinit(this: *DeferredRequest) void { - switch (this.data) { + /// Calling this is only required if the desired handler is going to be avoided, + /// such as for bundling failures or aborting the server. + /// Does not free the underlying `DeferredRequest.Node` + fn deinit(this: *DeferredRequest) void { + switch (this.handler) { + .server_handler => |*saved| saved.deinit(), + .bundled_html_page, .js_payload, .aborted => {}, + } + } + + /// Deinitializes state by aborting the connection. + fn abort(this: *DeferredRequest) void { + switch (this.handler) { .server_handler => |*saved| { saved.response.endWithoutBody(true); saved.deinit(); }, - .bundled_html_page, .js_payload => |resp| { - resp.endWithoutBody(true); + .bundled_html_page, .js_payload => |r| { + r.response.endWithoutBody(true); }, + .aborted => return, } + this.handler = .aborted; } }; +const ResponseAndMethod = struct { + response: AnyResponse, + method: bun.http.Method, +}; + fn startAsyncBundle( dev: *DevServer, entry_points: EntryPointList, @@ -1184,14 +1245,36 @@ fn startAsyncBundle( .timer = timer, .start_data = start_data, .had_reload_event = had_reload_event, + .requests = dev.next_bundle.requests, + .resolution_failure_entries = .{}, }; - const old_current_requests = dev.current_bundle_requests; - bun.assert(old_current_requests.items.len == 0); - dev.current_bundle_requests = dev.next_bundle.requests; - dev.next_bundle.requests = old_current_requests; + dev.next_bundle.requests = .{}; } fn indexFailures(dev: *DevServer) !void { + // Since resolution failures can be asyncronous, their logs are not inserted + // until the very end. + const resolution_failures = dev.current_bundle.?.resolution_failure_entries; + if (resolution_failures.count() > 0) { + for (resolution_failures.keys(), resolution_failures.values()) |owner, *log| { + switch (owner.decode()) { + .client => |index| try dev.client_graph.insertFailure(.index, index, log, false), + .server => |index| try dev.server_graph.insertFailure(.index, index, log, true), + .none, .route => unreachable, + } + } + } + + // Theoretically, it shouldn't be possible for errors to leak into dev.log, but just in + // case that happens, they can be printed out. + if (dev.log.hasErrors()) { + if (Environment.isDebug) { + Output.debugWarn("dev.log should not be written into when using DevServer", .{}); + } + dev.log.print(Output.errorWriter()) catch {}; + } + + // After inserting failures into the IncrementalGraphs, they are traced to their routes. var sfa_state = std.heap.stackFallback(65536, dev.allocator); const sfa = sfa_state.get(); @@ -1427,6 +1510,17 @@ pub fn finalizeBundle( dev.startNextBundleIfPresent(); } const current_bundle = &dev.current_bundle.?; + defer { + if (current_bundle.requests.first != null) { + // cannot be an assertion because in the case of error.OutOfMemory, the request list was not drained. + Output.debug("current_bundle.requests.first != null. this leaves pending requests without an error page!", .{}); + } + while (current_bundle.requests.popFirst()) |node| { + defer dev.deferred_request_pool.put(node); + const req = &node.data; + req.abort(); + } + } dev.graph_safety_lock.lock(); defer dev.graph_safety_lock.unlock(); @@ -1488,7 +1582,7 @@ pub fn finalizeBundle( chunk, result.chunks, null, - false, // TODO: sourcemaps true + false, // TODO: css sourcemaps true ); // Create an entry for this file. @@ -1497,6 +1591,16 @@ pub fn finalizeBundle( // The hack is to use `entry_point_id`, which is otherwise unused, to store an index. chunk.entry_point.entry_point_id = try dev.insertOrUpdateCssAsset(key, code.buffer); + // Track css files that look like tailwind files. + if (dev.has_tailwind_plugin_hack) |*map| { + const first_1024 = code.buffer[0..1024]; + if (std.mem.indexOf(u8, first_1024, "tailwind") != null) { + try map.put(dev.allocator, key, {}); + } else { + _ = map.swapRemove(key); + } + } + try dev.client_graph.receiveChunk(&ctx, index, "", .css, false); // If imported on server, there needs to be a server-side file entry @@ -1800,17 +1904,21 @@ pub fn finalizeBundle( if (dev.incremental_result.failures_added.items.len > 0) { dev.bundles_since_last_error = 0; - for (dev.current_bundle_requests.items) |*req| { + while (current_bundle.requests.popFirst()) |node| { + defer dev.deferred_request_pool.put(node); + const req = &node.data; + const rb = dev.routeBundlePtr(req.route_bundle_index); rb.server_state = .possible_bundling_failures; - const resp: AnyResponse = switch (req.data) { + const resp: AnyResponse = switch (req.handler) { + .aborted => continue, .server_handler => |*saved| brk: { const resp = saved.response; saved.deinit(); break :brk resp; }, - .js_payload, .bundled_html_page => |resp| resp, + .js_payload, .bundled_html_page => |ram| ram.response, }; resp.corked(sendSerializedFailures, .{ @@ -1823,7 +1931,6 @@ pub fn finalizeBundle( return; } - // TODO: improve this visual feedback if (dev.bundling_failures.count() == 0) { if (current_bundle.had_reload_event) { const clear_terminal = !debug.isVisible(); @@ -1854,7 +1961,7 @@ pub fn finalizeBundle( dev.relativePath( bv2.graph.input_files.items(.source)[bv2.graph.entry_points.items[0].get()].path.text, ) - else switch (dev.routeBundlePtr(dev.current_bundle_requests.items[0].route_bundle_index).data) { + else switch (dev.routeBundlePtr(current_bundle.requests.first.?.data.route_bundle_index).data) { .html => |html| dev.relativePath(html.html_bundle.html_bundle.path), .framework => |fw| file_name: { const route = dev.router.routePtr(fw.route_index); @@ -1881,14 +1988,18 @@ pub fn finalizeBundle( dev.graph_safety_lock.unlock(); defer dev.graph_safety_lock.lock(); - for (dev.current_bundle_requests.items) |req| { + while (current_bundle.requests.popFirst()) |node| { + defer dev.deferred_request_pool.put(node); + const req = &node.data; + const rb = dev.routeBundlePtr(req.route_bundle_index); rb.server_state = .loaded; - switch (req.data) { + switch (req.handler) { + .aborted => continue, .server_handler => |saved| dev.onFrameworkRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response), - .bundled_html_page => |resp| dev.onHtmlRequestWithBundle(req.route_bundle_index, resp), - .js_payload => |resp| dev.onJsRequestWithBundle(req.route_bundle_index, resp), + .bundled_html_page => |ram| dev.onHtmlRequestWithBundle(req.route_bundle_index, ram.response, ram.method), + .js_payload => |ram| dev.onJsRequestWithBundle(req.route_bundle_index, ram.response, ram.method), } } } @@ -1897,11 +2008,10 @@ fn startNextBundleIfPresent(dev: *DevServer) void { // Clear the current bundle assert(dev.current_bundle == null); dev.log.clearAndFree(); - dev.current_bundle_requests.clearRetainingCapacity(); dev.emitVisualizerMessageIfNeeded(); // If there were pending requests, begin another bundle. - if (dev.next_bundle.reload_event != null or dev.next_bundle.requests.items.len > 0) { + if (dev.next_bundle.reload_event != null or dev.next_bundle.requests.first != null) { var sfb = std.heap.stackFallback(4096, bun.default_allocator); const temp_alloc = sfb.get(); var entry_points: EntryPointList = EntryPointList.empty; @@ -1948,7 +2058,7 @@ pub fn handleParseTaskFailure( dev: *DevServer, err: anyerror, graph: bake.Graph, - key: []const u8, + abs_path: []const u8, log: *const Log, ) bun.OOM!void { dev.graph_safety_lock.lock(); @@ -1961,28 +2071,51 @@ pub fn handleParseTaskFailure( // TODO: this should walk up the graph one level, and queue all of these // files for re-bundling if they aren't already in the BundleV2 graph. switch (graph) { - .server, .ssr => try dev.server_graph.onFileDeleted(key, log), - .client => try dev.client_graph.onFileDeleted(key, log), + .server, .ssr => try dev.server_graph.onFileDeleted(abs_path, log), + .client => try dev.client_graph.onFileDeleted(abs_path, log), } } else { Output.prettyErrorln("Error{s} while bundling \"{s}\":", .{ if (log.errors +| log.warnings != 1) "s" else "", - dev.relativePath(key), + dev.relativePath(abs_path), }); log.print(Output.errorWriterBuffered()) catch {}; Output.flush(); // Do not index css errors - if (!bun.strings.hasSuffixComptime(key, ".css")) { + if (!bun.strings.hasSuffixComptime(abs_path, ".css")) { switch (graph) { - .server => try dev.server_graph.insertFailure(key, log, false), - .ssr => try dev.server_graph.insertFailure(key, log, true), - .client => try dev.client_graph.insertFailure(key, log, false), + .server => try dev.server_graph.insertFailure(.abs_path, abs_path, log, false), + .ssr => try dev.server_graph.insertFailure(.abs_path, abs_path, log, true), + .client => try dev.client_graph.insertFailure(.abs_path, abs_path, log, false), } } } } +/// Return a log to write resolution failures into. +pub fn getLogForResolutionFailures(dev: *DevServer, abs_path: []const u8, graph: bake.Graph) !*bun.logger.Log { + assert(dev.current_bundle != null); + const current_bundle = &dev.current_bundle.?; + + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + + const owner = switch (graph == .client) { + inline else => |is_client| @unionInit( + SerializedFailure.Owner, + if (is_client) "client" else "server", + try (if (is_client) dev.client_graph else dev.server_graph) + .insertStale(abs_path, !is_client and graph == .ssr), + ).encode(), + }; + const gop = try current_bundle.resolution_failure_entries.getOrPut(current_bundle.bv2.graph.allocator, owner); + if (!gop.found_existing) { + gop.value_ptr.* = bun.logger.Log.init(current_bundle.bv2.graph.allocator); + } + return gop.value_ptr; +} + const CacheEntry = struct { kind: FileKind, }; @@ -2121,6 +2254,7 @@ fn registerCatchAllHtmlRoute(dev: *DevServer, html: *HTMLBundle.HTMLBundleRoute) dev.html_router.fallback = bundle_index.toOptional(); } +// TODO: Delete this file in favor of StaticRoute fn sendTextFile(code: []const u8, content_type: []const u8, any_resp: AnyResponse) void { switch (any_resp) { inline else => |resp| { @@ -2133,7 +2267,7 @@ fn sendTextFile(code: []const u8, content_type: []const u8, any_resp: AnyRespons resp.writeStatus("200 OK"); resp.writeHeader("Content-Type", content_type); - resp.end(code, true); // TODO: You should never call res.end(huge buffer) + resp.end(code, true); }, } } @@ -2164,6 +2298,7 @@ fn sendSerializedFailuresInner( failures: []const SerializedFailure, kind: ErrorPageKind, ) void { + // TODO: write to Blob and serve that resp.writeStatus("500 Internal Server Error"); resp.writeHeader("Content-Type", MimeType.html.value); @@ -2179,11 +2314,11 @@ fn sendSerializedFailuresInner( \\ \\ \\ - \\ + \\ \\".len + client_prefix.len + "/route.0000000000000000.js".len; + const payload_size = bundled_html.len + + ("").len * css_ids.len + + "".len + + client_prefix.len + "/".len + + display_name.len + + "-0000000000000000.js".len; var array: std.ArrayListUnmanaged(u8) = try std.ArrayListUnmanaged(u8).initCapacity(dev.allocator, payload_size); errdefer array.deinit(dev.allocator); array.appendSliceAssumeCapacity(before_head_end); // Insert all link tags before "" - for (names) |name| { - array.appendSliceAssumeCapacity(""); + for (css_ids) |name| { + array.appendSliceAssumeCapacity(""); } array.appendSliceAssumeCapacity(before_body_end); // Insert the client script tag before "" array.appendSliceAssumeCapacity(""); array.appendSliceAssumeCapacity(after_body_end); assert(array.items.len == array.capacity); // incorrect memory allocation size - html.cached_response_body = array.items; return array.items; } @@ -1056,6 +1063,7 @@ fn getJavaScriptCodeForHTMLFile( index: bun.JSAst.Index, import_records: []bun.BabyList(bun.ImportRecord), input_file_sources: []bun.logger.Source, + loaders: []bun.options.Loader, ) bun.OOM![]const u8 { var sfa_state = std.heap.stackFallback(65536, dev.allocator); const sfa = sfa_state.get(); @@ -1067,6 +1075,7 @@ fn getJavaScriptCodeForHTMLFile( try bun.js_printer.writeJSONString(input_file_sources[index.get()].path.pretty, @TypeOf(w), w, .utf8); try w.writeAll("(m) {\n "); for (import_records[index.get()].slice()) |import| { + if (loaders[index.get()] == .css) continue; try w.writeAll(" m.dynamicImport("); try bun.js_printer.writeJSONString(import.path.pretty, @TypeOf(w), w, .utf8); try w.writeAll(");\n "); @@ -1081,14 +1090,20 @@ fn getJavaScriptCodeForHTMLFile( } pub fn onJsRequestWithBundle(dev: *DevServer, bundle_index: RouteBundle.Index, resp: AnyResponse, method: bun.http.Method) void { - _ = method; // TODO: staticroute const route_bundle = dev.routeBundlePtr(bundle_index); - const code = route_bundle.client_bundle orelse code: { - const code = dev.generateClientBundle(route_bundle) catch bun.outOfMemory(); - route_bundle.client_bundle = code; - break :code code; + const blob = route_bundle.client_bundle orelse generate: { + const payload = dev.generateClientBundle(route_bundle) catch bun.outOfMemory(); + errdefer dev.allocator.free(payload); + route_bundle.client_bundle = StaticRoute.initFromAnyBlob( + .fromOwnedSlice(dev.allocator, payload), + .{ + .mime_type = .javascript, + .server = dev.server orelse unreachable, + }, + ); + break :generate route_bundle.client_bundle.?; }; - sendTextFile(code, MimeType.javascript.value, resp); + blob.onWithMethod(method, resp); } pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: anytype) void { @@ -1098,27 +1113,32 @@ pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: anytype) void { return; } - const ctx = &dev.vm.rareData().editor_context; - ctx.autoDetectEditor(JSC.VirtualMachine.get().transpiler.env); - const line: ?[]const u8 = req.header("editor-line"); - const column: ?[]const u8 = req.header("editor-column"); + // TODO: better editor detection. on chloe's dev env, this opens apple terminal + vim + resp.writeStatus("501 Not Implemented"); + resp.end("TODO", false); + _ = dev; - if (ctx.editor) |editor| { - var url = req.url()[internal_prefix.len + "/src/".len ..]; - if (bun.strings.indexOfChar(url, ':')) |colon| { - url = url[0..colon]; - } - editor.open(ctx.path, url, line, column, dev.allocator) catch { - resp.writeStatus("202 No Content"); - resp.end("", false); - return; - }; - resp.writeStatus("202 No Content"); - resp.end("", false); - } else { - resp.writeStatus("500 Internal Server Error"); - resp.end("Please set your editor in bunfig.toml", false); - } + // const ctx = &dev.vm.rareData().editor_context; + // ctx.autoDetectEditor(JSC.VirtualMachine.get().transpiler.env); + // const line: ?[]const u8 = req.header("editor-line"); + // const column: ?[]const u8 = req.header("editor-column"); + + // if (ctx.editor) |editor| { + // var url = req.url()[internal_prefix.len + "/src/".len ..]; + // if (bun.strings.indexOfChar(url, ':')) |colon| { + // url = url[0..colon]; + // } + // editor.open(ctx.path, url, line, column, dev.allocator) catch { + // resp.writeStatus("202 No Content"); + // resp.end("", false); + // return; + // }; + // resp.writeStatus("202 No Content"); + // resp.end("", false); + // } else { + // resp.writeStatus("500 Internal Server Error"); + // resp.end("Please set your editor in bunfig.toml", false); + // } } /// When requests are waiting on a bundle, the relevant request information is @@ -1141,6 +1161,7 @@ const DeferredRequest = struct { /// For a .html route. Serve the bundled HTML page. bundled_html_page: ResponseAndMethod, /// Serve the JavaScript payload for this route. + /// TODO: remove this js_payload: ResponseAndMethod, /// Do nothing and free this node. To simplify lifetimes, /// the `DeferredRequest` is not freed upon abortion. Which @@ -1342,7 +1363,7 @@ fn indexFailures(dev: *DevServer) !void { /// Used to generate the entry point. Unlike incremental patches, this always /// contains all needed files for a route. -fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]const u8 { +fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]u8 { assert(route_bundle.client_bundle == null); assert(route_bundle.server_state == .loaded); // page is unfit to load @@ -1379,10 +1400,18 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]c .html => |html| html.bundled_file, }; - return dev.client_graph.takeBundle( - .initial_response, - if (client_file) |index| dev.relativePath(dev.client_graph.bundled_files.keys()[index.get()]) else "", - ); + const client_bundle = dev.client_graph.takeJSBundle(.{ + .kind = .initial_response, + .initial_response_entry_point = if (client_file) |index| + dev.relativePath(dev.client_graph.bundled_files.keys()[index.get()]) + else + "", + }); + + const source_map = try dev.client_graph.takeSourceMap(.initial_response, sfa, dev.allocator); + dev.allocator.free(source_map); + + return client_bundle; } fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.JSValue { @@ -1407,7 +1436,11 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.J const names = dev.client_graph.current_css_files.items; const arr = JSC.JSArray.createEmpty(dev.vm.global, names.len); for (names, 0..) |item, i| { - const str = bun.String.createUTF8(item); + var buf: [asset_prefix.len + @sizeOf(u64) * 2 + "/.css".len]u8 = undefined; + const path = std.fmt.bufPrint(&buf, asset_prefix ++ "/{s}.css", .{ + &std.fmt.bytesToHex(std.mem.asBytes(&item), .lower), + }) catch unreachable; + const str = bun.String.createUTF8(path); defer str.deref(); arr.putIndex(dev.vm.global, @intCast(i), str.toJS(dev.vm.global)); } @@ -1507,6 +1540,11 @@ pub fn finalizeBundle( defer { bv2.deinit(); dev.current_bundle = null; + + dev.assets.reindexIfNeeded(dev.allocator) catch { + // not fatal: the assets may be reindexed some time later. + }; + dev.startNextBundleIfPresent(); } const current_bundle = &dev.current_bundle.?; @@ -1564,10 +1602,17 @@ pub fn finalizeBundle( js_chunk.compile_results_for_chunk, ) |part_range, compile_result| { const index = part_range.source_index; + const source_map: SourceMap.Chunk = compile_result.sourceMapChunk() orelse brk: { + // The source map is `null` if empty + bun.assert(compile_result.javascript.result == .result); + bun.assert(dev.server_bundler.options.source_map != .none); + bun.assert(!part_range.source_index.isRuntime()); + break :brk .empty; + }; switch (targets[part_range.source_index.get()].bakeGraph()) { - .server => try dev.server_graph.receiveChunk(&ctx, index, compile_result.code(), .js, false), - .ssr => try dev.server_graph.receiveChunk(&ctx, index, compile_result.code(), .js, true), - .client => try dev.client_graph.receiveChunk(&ctx, index, compile_result.code(), .js, false), + .server => try dev.server_graph.receiveChunk(&ctx, index, .{ .js = compile_result.code() }, source_map, false), + .ssr => try dev.server_graph.receiveChunk(&ctx, index, .{ .js = compile_result.code() }, source_map, true), + .client => try dev.client_graph.receiveChunk(&ctx, index, .{ .js = compile_result.code() }, source_map, false), } } @@ -1582,18 +1627,30 @@ pub fn finalizeBundle( chunk, result.chunks, null, - false, // TODO: css sourcemaps true + false, ); // Create an entry for this file. const key = ctx.sources[index.get()].path.keyForIncrementalGraph(); + const hash = brk: { + var hash: ContentHasher.Hash = .init(0x9a4e); // arbitrary seed + hash.update(key); + hash.update(code.buffer); + break :brk hash.final(); + }; + const asset_index = (try dev.assets.replacePath( + key, + .fromOwnedSlice(dev.allocator, code.buffer), + .css, + hash, + )).index; // Later code needs to retrieve the CSS content // The hack is to use `entry_point_id`, which is otherwise unused, to store an index. - chunk.entry_point.entry_point_id = try dev.insertOrUpdateCssAsset(key, code.buffer); + chunk.entry_point.entry_point_id = asset_index; // Track css files that look like tailwind files. if (dev.has_tailwind_plugin_hack) |*map| { - const first_1024 = code.buffer[0..1024]; + const first_1024 = code.buffer[0..@min(code.buffer.len, 1024)]; if (std.mem.indexOf(u8, first_1024, "tailwind") != null) { try map.put(dev.allocator, key, {}); } else { @@ -1601,7 +1658,7 @@ pub fn finalizeBundle( } } - try dev.client_graph.receiveChunk(&ctx, index, "", .css, false); + try dev.client_graph.receiveChunk(&ctx, index, .{ .css = hash }, null, false); // If imported on server, there needs to be a server-side file entry // so that edges can be attached. When a file is only imported on @@ -1620,8 +1677,19 @@ pub fn finalizeBundle( for (result.htmlChunks()) |*chunk| { const index = bun.JSAst.Index.init(chunk.entry_point.source_index); const compile_result = chunk.compile_results_for_chunk[0].html; - const generated_js = try dev.getJavaScriptCodeForHTMLFile(index, import_records, input_file_sources); - try dev.client_graph.receiveChunk(&ctx, index, generated_js, .js, false); + const generated_js = try dev.getJavaScriptCodeForHTMLFile( + index, + import_records, + input_file_sources, + bv2.graph.input_files.items(.loader), + ); + try dev.client_graph.receiveChunk( + &ctx, + index, + .{ .js = generated_js }, + .empty, // HTML chunk does not have a source map. + false, + ); const client_index = ctx.getCachedIndex(.client, index).*; const route_bundle_index = dev.html_route_lookup.get(client_index) orelse @panic("Route for HTML file was not registered"); @@ -1629,9 +1697,10 @@ pub fn finalizeBundle( assert(route_bundle.data.html.bundled_file == client_index); const html = &route_bundle.data.html; - if (html.cached_response_body) |slice| { - dev.allocator.free(slice); - html.cached_response_body = null; + if (html.cached_response) |blob| { + blob.deref(); + html.cached_response = null; + route_bundle.invalidateClientBundle(); } if (html.bundled_html_text) |slice| { dev.allocator.free(slice); @@ -1667,7 +1736,7 @@ pub fn finalizeBundle( for (result.cssChunks(), result.css_file_list.values()) |*chunk, metadata| { const index = bun.JSAst.Index.init(chunk.entry_point.source_index); // TODO: index css deps. this must add all recursively referenced files - // as dependencies of the entry point, instead of building a large tree. + // as dependencies of the entry point, instead of building a recursive tree. _ = index; _ = metadata; } @@ -1690,7 +1759,7 @@ pub fn finalizeBundle( // Load all new chunks into the server runtime. if (!dev.frontend_only and dev.server_graph.current_chunk_len > 0) { - const server_bundle = try dev.server_graph.takeBundle(.hmr_chunk, ""); + const server_bundle = try dev.server_graph.takeJSBundle(.{ .kind = .hmr_chunk }); defer dev.allocator.free(server_bundle); const server_modules = c.BakeLoadServerHmrPatch(@ptrCast(dev.vm.global), bun.String.createLatin1(server_bundle)) catch |err| { @@ -1738,7 +1807,8 @@ pub fn finalizeBundle( // It was discovered that if a tree falls with nobody around it, it does not // make any sound. Let's avoid writing into `w` if no sockets are open. - const will_hear_hot_update = dev.numSubscribers(.hot_update) > 0; + const hot_update_subscribers = dev.numSubscribers(.hot_update); + const will_hear_hot_update = hot_update_subscribers > 0; // This list of routes affected excludes client code. This means changing // a client component wont count as a route to trigger a reload on. @@ -1810,10 +1880,7 @@ pub fn finalizeBundle( var it = route_bits_client.iterator(.{ .kind = .set }); while (it.next()) |bundled_route_index| { const bundle = &dev.route_bundles.items[bundled_route_index]; - if (bundle.client_bundle) |old| { - dev.allocator.free(old); - } - bundle.client_bundle = null; + bundle.invalidateClientBundle(); } } else if (dev.incremental_result.html_routes_affected.items.len > 0) { // When only HTML routes were affected, there may not be any client @@ -1825,10 +1892,7 @@ pub fn finalizeBundle( var it = route_bits_client.iterator(.{ .kind = .set }); while (it.next()) |bundled_route_index| { const bundle = &dev.route_bundles.items[bundled_route_index]; - if (bundle.client_bundle) |old| { - dev.allocator.free(old); - } - bundle.client_bundle = null; + bundle.invalidateClientBundle(); } } @@ -1845,9 +1909,9 @@ pub fn finalizeBundle( if (dev.incremental_result.had_adjusted_edges) { switch (route_bundle.data) { .framework => |*fw_bundle| fw_bundle.cached_css_file_array.clear(), - .html => |*html| if (html.cached_response_body) |slice| { - dev.allocator.free(slice); - html.cached_response_body = null; + .html => |*html| if (html.cached_response) |blob| { + blob.deref(); + html.cached_response = null; }, } } @@ -1858,17 +1922,13 @@ pub fn finalizeBundle( // change the list of CSS files. if (dev.incremental_result.had_adjusted_edges) { gts.clear(); + dev.client_graph.current_css_files.clearRetainingCapacity(); try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); - const names = dev.client_graph.current_css_files.items; - - try w.writeInt(i32, @intCast(names.len), .little); - for (names) |name| { - const css_prefix_slash = css_prefix ++ "/"; - // These slices are url pathnames. The ID can be extracted - const css_hash_len = 16; - bun.assert(name.len == (css_prefix_slash ++ ".css").len + css_hash_len); - bun.assert(bun.strings.hasPrefix(name, css_prefix_slash)); - try w.writeAll(name[css_prefix_slash.len..][0..css_hash_len]); + const css_ids = dev.client_graph.current_css_files.items; + + try w.writeInt(i32, @intCast(css_ids.len), .little); + for (css_ids) |css_id| { + try w.writeAll(&std.fmt.bytesToHex(std.mem.asBytes(&css_id), .lower)); } } else { try w.writeInt(i32, -1, .little); @@ -1881,19 +1941,42 @@ pub fn finalizeBundle( const css_chunks = result.cssChunks(); if (will_hear_hot_update) { if (dev.client_graph.current_chunk_len > 0 or css_chunks.len > 0) { - const css_values = dev.css_files.values(); + const asset_values = dev.assets.files.values(); try w.writeInt(u32, @intCast(css_chunks.len), .little); const sources = bv2.graph.input_files.items(.source); for (css_chunks) |chunk| { const key = sources[chunk.entry_point.source_index].path.keyForIncrementalGraph(); try w.writeAll(&std.fmt.bytesToHex(std.mem.asBytes(&bun.hash(key)), .lower)); - const css_data = css_values[chunk.entry_point.entry_point_id]; + const css_data = asset_values[chunk.entry_point.entry_point_id].blob.InternalBlob.bytes.items; try w.writeInt(u32, @intCast(css_data.len), .little); try w.writeAll(css_data); } - if (dev.client_graph.current_chunk_len > 0) - try dev.client_graph.takeBundleToList(.hmr_chunk, &hot_update_payload, ""); + if (dev.client_graph.current_chunk_len > 0) { + const hash = hash: { + var source_map_hash: bun.bundle_v2.ContentHasher.Hash = .init(0x4b12); // arbitrarily different seed than what .initial_response uses + const keys = dev.client_graph.bundled_files.keys(); + for (dev.client_graph.current_chunk_parts.items) |part| { + source_map_hash.update(keys[part.get()]); + source_map_hash.update(dev.client_graph.source_maps.items[part.get()].vlq_chunk.slice()); + } + break :hash source_map_hash.final(); + }; + // Insert the source map + if (try dev.assets.putOrIncrementRefCount(hash, hot_update_subscribers)) |static_route_ptr| { + const source_map = try dev.client_graph.takeSourceMap(.hmr_chunk, bv2.graph.allocator, dev.allocator); + errdefer dev.allocator.free(source_map); + static_route_ptr.* = StaticRoute.initFromAnyBlob(.fromOwnedSlice(dev.allocator, source_map), .{ + .server = dev.server.?, + .mime_type = .json, + }); + } + // Build and send the source chunk + try dev.client_graph.takeJSBundleToList(&hot_update_payload, .{ + .kind = .hmr_chunk, + .source_map_id = hash, + }); + } } else { try w.writeInt(i32, 0, .little); } @@ -2043,16 +2126,6 @@ fn startNextBundleIfPresent(dev: *DevServer) void { } } -fn insertOrUpdateCssAsset(dev: *DevServer, abs_path: []const u8, code: []const u8) !Chunk.EntryPoint.ID { - const path_hash = bun.hash(abs_path); - const gop = try dev.css_files.getOrPut(dev.allocator, path_hash); - if (gop.found_existing) { - dev.allocator.free(gop.value_ptr.*); - } - gop.value_ptr.* = code; - return @intCast(gop.index); -} - /// Note: The log is not consumed here pub fn handleParseTaskFailure( dev: *DevServer, @@ -2235,12 +2308,12 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.UnresolvedIndex) !Rou .bundled_file = incremental_graph_index, .head_end_tag_index = .none, .body_end_tag_index = .none, - .cached_response_body = null, + .cached_response = null, .bundled_html_text = null, - .client_script_uid = .none, } }; }, }, + .client_script_generation = 0, .server_state = .unqueued, .client_bundle = null, .active_viewers = 0, @@ -2254,24 +2327,6 @@ fn registerCatchAllHtmlRoute(dev: *DevServer, html: *HTMLBundle.HTMLBundleRoute) dev.html_router.fallback = bundle_index.toOptional(); } -// TODO: Delete this file in favor of StaticRoute -fn sendTextFile(code: []const u8, content_type: []const u8, any_resp: AnyResponse) void { - switch (any_resp) { - inline else => |resp| { - if (code.len == 0) { - resp.writeStatus("202 No Content"); - resp.writeHeaderInt("Content-Length", 0); - resp.end("", true); - return; - } - - resp.writeStatus("200 OK"); - resp.writeHeader("Content-Type", content_type); - resp.end(code, true); - }, - } -} - const ErrorPageKind = enum { /// Modules failed to bundle bundler, @@ -2395,6 +2450,9 @@ pub fn IncrementalGraph(side: bake.Side) type { /// are stored so the watcher can quickly query and invalidate them. /// Key slices are owned by `default_allocator` bundled_files: bun.StringArrayHashMapUnmanaged(File), + /// Source maps are stored out-of-line to make `File` objects smaller, + /// as file information is accessed much more frequently than source maps. + source_maps: if (side == .client) ArrayListUnmanaged(PackedMap) else void, /// Track bools for files which are "stale", meaning they should be /// re-bundled before being used. Resizing this is usually deferred /// until after a bundle, since resizing the bit-set requires an @@ -2429,27 +2487,26 @@ pub fn IncrementalGraph(side: bake.Side) type { .server => []const u8, }), + /// Asset IDs, which can be printed as hex in '/_bun/asset/{hash}.css' current_css_files: switch (side) { - .client => ArrayListUnmanaged([]const u8), + .client => ArrayListUnmanaged(u64), .server => void, }, const empty: @This() = .{ - .bundled_files = .{}, - .stale_files = .{}, + .bundled_files = .empty, + .source_maps = if (side == .client) .empty, + .stale_files = .empty, + .first_dep = .empty, + .first_import = .empty, - .first_dep = .{}, - .first_import = .{}, - .edges = .{}, - .edges_free_list = .{}, + .edges = .empty, + .edges_free_list = .empty, .current_chunk_len = 0, .current_chunk_parts = .{}, - .current_css_files = switch (side) { - .client => .{}, - .server => {}, - }, + .current_css_files = if (side == .client) .empty, }; pub const File = switch (side) { @@ -2485,8 +2542,16 @@ pub fn IncrementalGraph(side: bake.Side) type { } }, .client => struct { - /// Allocated by default_allocator. Access with `.code()` - code_ptr: [*]const u8, + /// See function wrappers to safely read into this data + content: extern union { + /// Allocated by default_allocator. Access with `.jsCode()` + /// When stale, the code is "", otherwise it contains at + /// least one non-whitespace character, as empty chunks + /// contain at least a function wrapper. + js_code_ptr: [*]const u8, + /// Access with `.cssAssetId()` + css_asset_id: u64, + }, /// Separated from the pointer to reduce struct size. /// Parser does not support files >4gb anyways. code_len: u32, @@ -2504,27 +2569,52 @@ pub fn IncrementalGraph(side: bake.Side) type { is_special_framework_file: bool, /// If this file has a HTML RouteBundle and associated entry in `html_route_lookup` is_html_route: bool, - /// CSS and Asset files get special handling + /// CSS files get special handling kind: FileKind, unused: enum(u26) { unused } = .unused, }; comptime { - assert(@sizeOf(@This()) == @sizeOf(usize) * 2); + assert(@sizeOf(@This()) == @sizeOf(u64) * 2); assert(@alignOf(@This()) == @alignOf([*]u8)); } - fn init(code_slice: []const u8, flags: Flags) @This() { + fn initJavaScript(code_slice: []const u8, flags: Flags) @This() { + assert(flags.kind == .js); return .{ - .code_ptr = code_slice.ptr, + .content = .{ .js_code_ptr = code_slice.ptr }, .code_len = @intCast(code_slice.len), .flags = flags, }; } - fn code(file: @This()) []const u8 { - return file.code_ptr[0..file.code_len]; + fn initCSS(asset_id: u64, flags: Flags) @This() { + assert(flags.kind == .css); + return .{ + .content = .{ .css_asset_id = asset_id }, + .code_len = 0, + .flags = flags, + }; + } + + fn initUnknown(flags: Flags) @This() { + assert(flags.kind == .unknown); + return .{ + .content = .{ .css_asset_id = 0 }, + .code_len = 0, + .flags = flags, + }; + } + + fn jsCode(file: @This()) []const u8 { + assert(file.flags.kind == .js); + return file.content.js_code_ptr[0..file.code_len]; + } + + fn cssAssetId(file: @This()) u64 { + assert(file.flags.kind == .css); + return file.content.css_asset_id; } inline fn stopsDependencyTrace(_: @This()) bool { @@ -2537,6 +2627,68 @@ pub fn IncrementalGraph(side: bake.Side) type { }, }; + /// Packed source mapping data + pub const PackedMap = struct { + /// Allocated by default_allocator. Access with `.slice()` + /// This is stored to allow lazy construction of source map files. + /// Aligned to 4 bytes to reduce struct size/padding. + vlq_chunk: struct { + ptr_top: u32, + ptr_bottom: u32, + len: u32, + + pub fn init(data: []const u8) @This() { + if (@inComptime() and data.len == 0) { + return .{ .ptr_top = 0, .ptr_bottom = 0xffffffff, .len = 0 }; + } + return .{ + .ptr_top = @intCast(@intFromPtr(data.ptr) >> 32), + .ptr_bottom = @intCast(@intFromPtr(data.ptr) & 0xffffffff), + .len = @intCast(data.len), + }; + } + + pub fn slice(chunk: @This()) []const u8 { + return @as([*]const u8, @ptrFromInt( + (@as(usize, chunk.ptr_top) << 32) + chunk.ptr_bottom, + ))[0..chunk.len]; + } + }, + /// Used to track the last state of the source map chunk. This + /// is used when concatenating chunks. The generated column + /// is not tracked because it is always zero (all chunks end + /// in a newline because minification is off), and the generated + /// line is recomputed on demand + end_state: struct { + /// This field is overloaded for an empty chunk's line count. + original_line: i32, + original_column: i32, + }, + + pub const empty: PackedMap = .{ + .vlq_chunk = .init(""), + .end_state = .{ + .original_line = 0, + .original_column = 0, + }, + }; + + pub fn fromSourceMap(source_map: SourceMap.Chunk) PackedMap { + return if (source_map.buffer.list.items.len > 0) .{ + .vlq_chunk = .init(source_map.buffer.list.items), + .end_state = .{ + .original_line = source_map.end_state.original_line, + .original_column = source_map.end_state.original_column, + }, + } else .empty; + } + + comptime { + assert(@sizeOf(@This()) == @sizeOf(u32) * 5); + assert(@alignOf(@This()) == @alignOf(u32)); + } + }; + // If this data structure is not clear, see `DirectoryWatchStore.Dep` // for a simpler example. It is more complicated here because this // structure is two-way. @@ -2580,8 +2732,11 @@ pub fn IncrementalGraph(side: bake.Side) type { g: *@This(), ctx: *HotUpdateContext, index: bun.JSAst.Index, - code: []const u8, - kind: FileKind, + content: union(enum) { + js: []const u8, + css: u64, + }, + source_map: ?SourceMap.Chunk, is_ssr_graph: bool, ) !void { const dev = g.owner(); @@ -2591,9 +2746,9 @@ pub fn IncrementalGraph(side: bake.Side) type { const key = path.keyForIncrementalGraph(); if (Environment.allow_assert) { - switch (kind) { - .css => bun.assert(code.len == 0), - .js => if (bun.strings.isAllWhitespace(code)) { + switch (content) { + .css => {}, + .js => |js| if (bun.strings.isAllWhitespace(js)) { // Should at least contain the function wrapper bun.Output.panic("Empty chunk is impossible: {s} {s}", .{ key, @@ -2603,12 +2758,11 @@ pub fn IncrementalGraph(side: bake.Side) type { }, }); }, - else => Output.panic("unexpected file kind: .{s}", .{@tagName(kind)}), } } // Dump to filesystem if enabled - if (bun.FeatureFlags.bake_debugging_features) if (dev.dump_dir) |dump_dir| { + if (bun.FeatureFlags.bake_debugging_features and content == .js) if (dev.dump_dir) |dump_dir| { const cwd = dev.root; var a: bun.PathBuffer = undefined; var b: [bun.MAX_PATH_BYTES * 2]u8 = undefined; @@ -2619,7 +2773,7 @@ pub fn IncrementalGraph(side: bake.Side) type { dumpBundle(dump_dir, switch (side) { .client => .client, .server => if (is_ssr_graph) .ssr else .server, - }, rel_path_escaped, code, true) catch |err| { + }, rel_path_escaped, content.js, true) catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); Output.warn("Could not dump bundle: {}", .{err}); }; @@ -2632,6 +2786,8 @@ pub fn IncrementalGraph(side: bake.Side) type { gop.key_ptr.* = try bun.default_allocator.dupe(u8, key); try g.first_dep.append(dev.allocator, .none); try g.first_import.append(dev.allocator, .none); + if (side == .client) + try g.source_maps.append(dev.allocator, if (source_map) |map| .fromSourceMap(map) else .empty); } if (g.stale_files.bit_length > gop.index) { @@ -2647,12 +2803,26 @@ pub fn IncrementalGraph(side: bake.Side) type { .is_hmr_root = ctx.server_to_client_bitset.isSet(index.get()), .is_special_framework_file = false, .is_html_route = false, - .kind = kind, + .kind = switch (content) { + .js => .js, + .css => .css, + }, }; if (gop.found_existing) { - if (kind == .js) - bun.default_allocator.free(gop.value_ptr.code()); + // Free the original content + switch (gop.value_ptr.flags.kind) { + .js => { + bun.default_allocator.free(gop.value_ptr.jsCode()); + bun.default_allocator.free(g.source_maps.items[file_index.get()].vlq_chunk.slice()); + g.source_maps.items[file_index.get()] = .fromSourceMap( + source_map orelse unreachable, // JS needs a source map (can be empty, but not null) + ); + }, + .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), + .unknown => {}, + } + // Free a failure if it exists if (gop.value_ptr.flags.failed) { const kv = dev.bundling_failures.fetchSwapRemoveAdapted( SerializedFailure.Owner{ .client = file_index }, @@ -2664,26 +2834,19 @@ pub fn IncrementalGraph(side: bake.Side) type { kv.key, ); } + + // Persist some flags flags.is_special_framework_file = gop.value_ptr.flags.is_special_framework_file; flags.is_html_route = gop.value_ptr.flags.is_html_route; } - if (kind == .css) { - if (!gop.found_existing or gop.value_ptr.code_len == 0) { - gop.value_ptr.* = File.init(try std.fmt.allocPrint( - dev.allocator, - css_prefix ++ "/{}.css", - .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&bun.hash(key)))}, - ), flags); - } else { - // The key is just the file-path - gop.value_ptr.flags = flags; - } - } else { - gop.value_ptr.* = File.init(code, flags); - } - if (kind == .js) { - try g.current_chunk_parts.append(dev.allocator, file_index); - g.current_chunk_len += code.len; + switch (content) { + .css => |css| gop.value_ptr.* = .initCSS(css, flags), + .js => |js| { + gop.value_ptr.* = .initJavaScript(js, flags); + // Track JavaScript chunks for concatenation + try g.current_chunk_parts.append(dev.allocator, file_index); + g.current_chunk_len += js.len; + }, } }, .server => { @@ -2696,14 +2859,20 @@ pub fn IncrementalGraph(side: bake.Side) type { .is_route = false, .is_client_component_boundary = client_component_boundary, .failed = false, - .kind = kind, + .kind = switch (content) { + .js => .js, + .css => .css, + }, }; if (client_component_boundary) { try dev.incremental_result.client_components_added.append(dev.allocator, file_index); } } else { - gop.value_ptr.kind = kind; + gop.value_ptr.kind = switch (content) { + .js => .js, + .css => .css, + }; if (is_ssr_graph) { gop.value_ptr.is_ssr = true; @@ -2737,8 +2906,10 @@ pub fn IncrementalGraph(side: bake.Side) type { ); } } - try g.current_chunk_parts.append(dev.allocator, code); - g.current_chunk_len += code.len; + if (content == .js) { + try g.current_chunk_parts.append(dev.allocator, content.js); + g.current_chunk_len += content.js.len; + } }, } } @@ -3044,10 +3215,10 @@ pub fn IncrementalGraph(side: bake.Side) type { } }, .client => { - // assert(!g.stale_files.isSet(file_index.get())); // should not be left stale + assert(!g.stale_files.isSet(file_index.get())); // should not be left stale if (file.flags.kind == .css) { if (goal.find_css) { - try g.current_css_files.append(g.owner().allocator, file.code()); + try g.current_css_files.append(g.owner().allocator, file.cssAssetId()); } // Do not count css files as a client module @@ -3092,6 +3263,8 @@ pub fn IncrementalGraph(side: bake.Side) type { gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); try g.first_dep.append(g.owner().allocator, .none); try g.first_import.append(g.owner().allocator, .none); + if (side == .client) + try g.source_maps.append(g.owner().allocator, .empty); } else { if (side == .server) { if (is_route) gop.value_ptr.*.is_route = true; @@ -3112,12 +3285,17 @@ pub fn IncrementalGraph(side: bake.Side) type { .kind = .unknown, }; if (gop.found_existing) { - if (gop.value_ptr.code().len > 0) { - g.owner().allocator.free(gop.value_ptr.code()); + const source_map = &g.source_maps.items[file_index.get()]; + switch (gop.value_ptr.flags.kind) { + .js => g.owner().allocator.free(gop.value_ptr.jsCode()), + .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), + .unknown => {}, } + g.owner().allocator.free(source_map.vlq_chunk.slice()); + source_map.* = .empty; flags.is_html_route = flags.is_html_route or gop.value_ptr.flags.is_html_route; } - gop.value_ptr.* = File.init("", flags); + gop.value_ptr.* = File.initUnknown(flags); }, .server => { if (!gop.found_existing) { @@ -3147,7 +3325,7 @@ pub fn IncrementalGraph(side: bake.Side) type { const gop = try g.bundled_files.getOrPut(g.owner().allocator, abs_path); if (!gop.found_existing) { gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); - gop.value_ptr.* = File.init("", .{ + gop.value_ptr.* = File.initUnknown(.{ .failed = false, .is_hmr_root = false, .is_special_framework_file = false, @@ -3156,6 +3334,8 @@ pub fn IncrementalGraph(side: bake.Side) type { }); try g.first_dep.append(g.owner().allocator, .none); try g.first_import.append(g.owner().allocator, .none); + if (side == .client) + try g.source_maps.append(g.owner().allocator, .empty); } return gop.key_ptr.*; } @@ -3173,6 +3353,8 @@ pub fn IncrementalGraph(side: bake.Side) type { gop.key_ptr.* = try bun.default_allocator.dupe(u8, abs_path); try g.first_dep.append(g.owner().allocator, .none); try g.first_import.append(g.owner().allocator, .none); + if (side == .client) + try g.source_maps.append(g.owner().allocator, .empty); } switch (side) { @@ -3233,6 +3415,8 @@ pub fn IncrementalGraph(side: bake.Side) type { gop.key_ptr.* = try bun.default_allocator.dupe(u8, key); try g.first_dep.append(g.owner().allocator, .none); try g.first_import.append(g.owner().allocator, .none); + if (side == .client) + try g.source_maps.append(g.owner().allocator, .empty); } try g.ensureStaleBitCapacity(true); @@ -3240,13 +3424,25 @@ pub fn IncrementalGraph(side: bake.Side) type { switch (side) { .client => { - gop.value_ptr.* = File.init("", .{ - .failed = true, + var flags: File.Flags = .{ + .failed = false, .is_hmr_root = false, .is_special_framework_file = false, .is_html_route = false, .kind = .unknown, - }); + }; + if (found_existing) { + const source_map = &g.source_maps.items[file_index.get()]; + switch (gop.value_ptr.flags.kind) { + .js => g.owner().allocator.free(gop.value_ptr.jsCode()), + .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), + .unknown => {}, + } + g.owner().allocator.free(source_map.vlq_chunk.slice()); + source_map.* = .empty; + flags.is_html_route = flags.is_html_route or gop.value_ptr.flags.is_html_route; + } + gop.value_ptr.* = File.initUnknown(flags); }, .server => { if (!gop.found_existing) { @@ -3354,31 +3550,41 @@ pub fn IncrementalGraph(side: bake.Side) type { if (side == .client) g.current_css_files.clearRetainingCapacity(); } - pub fn takeBundle( + const TakeJSBundleOptions = switch (side) { + .client => struct { + kind: ChunkKind, + initial_response_entry_point: []const u8 = "", + source_map_id: ?u64 = null, + }, + .server => struct { + kind: ChunkKind, + }, + }; + + pub fn takeJSBundle( g: *@This(), - kind: ChunkKind, - initial_response_entry_point: []const u8, - ) ![]const u8 { + options: TakeJSBundleOptions, + ) ![]u8 { var chunk = std.ArrayList(u8).init(g.owner().allocator); - try g.takeBundleToList(kind, &chunk, initial_response_entry_point); + try g.takeJSBundleToList(&chunk, options); bun.assert(chunk.items.len == chunk.capacity); return chunk.items; } - pub fn takeBundleToList( + pub fn takeJSBundleToList( g: *@This(), - kind: ChunkKind, list: *std.ArrayList(u8), - initial_response_entry_point: []const u8, + options: TakeJSBundleOptions, ) !void { + const kind = options.kind; g.owner().graph_safety_lock.assertLocked(); // initial bundle needs at least the entry point // hot updates shouldn't be emitted if there are no chunks assert(g.current_chunk_len > 0); - const runtime = switch (kind) { + const runtime: bake.HmrRuntime = switch (kind) { .initial_response => bun.bake.getHmrRuntime(side), - .hmr_chunk => "({\n", + .hmr_chunk => comptime .init("({\n"), }; // A small amount of metadata is present at the end of the chunk @@ -3392,8 +3598,10 @@ pub fn IncrementalGraph(side: bake.Side) type { const w = end_list.writer(); switch (kind) { .initial_response => { + if (side == .server) @panic("unreachable"); const fw = g.owner().framework; try w.writeAll("}, {\n main: "); + const initial_response_entry_point = options.initial_response_entry_point; if (initial_response_entry_point.len > 0) { try bun.js_printer.writeJSONString( g.owner().relativePath(initial_response_entry_point), @@ -3404,35 +3612,27 @@ pub fn IncrementalGraph(side: bake.Side) type { } else { try w.writeAll("null"); } - switch (side) { - .client => { - try w.writeAll(",\n version: \""); - try w.writeAll(&g.owner().configuration_hash_key); - try w.writeAll("\""); - if (fw.react_fast_refresh) |rfr| { - try w.writeAll(",\n refresh: "); - try bun.js_printer.writeJSONString( - g.owner().relativePath(rfr.import_source), - @TypeOf(w), - w, - .utf8, - ); - } - }, - .server => { - if (fw.server_components) |sc| { - if (sc.separate_ssr_graph) { - try w.writeAll(",\n separateSSRGraph: true"); - } - } - }, + try w.writeAll(",\n version: \""); + try w.writeAll(&g.owner().configuration_hash_key); + try w.writeAll("\""); + if (fw.react_fast_refresh) |rfr| { + try w.writeAll(",\n refresh: "); + try bun.js_printer.writeJSONString( + g.owner().relativePath(rfr.import_source), + @TypeOf(w), + w, + .utf8, + ); } - try w.writeAll("\n})"); - }, - .hmr_chunk => { - try w.writeAll("\n})"); }, + .hmr_chunk => {}, } + try w.writeAll("\n})"); + if (side == .client) if (options.source_map_id) |source_map_id| { + try w.writeAll("//# sourceMappingURL=" ++ asset_prefix); + try w.writeAll(&std.fmt.bytesToHex(std.mem.asBytes(&source_map_id), .lower)); + try w.writeAll(".js.map"); + }; break :end end_list.items; }; @@ -3440,18 +3640,18 @@ pub fn IncrementalGraph(side: bake.Side) type { const start = list.items.len; if (start == 0) - try list.ensureTotalCapacityPrecise(g.current_chunk_len + runtime.len + end.len) + try list.ensureTotalCapacityPrecise(g.current_chunk_len + runtime.code.len + end.len) else - try list.ensureUnusedCapacity(g.current_chunk_len + runtime.len + end.len); + try list.ensureUnusedCapacity(g.current_chunk_len + runtime.code.len + end.len); - list.appendSliceAssumeCapacity(runtime); + list.appendSliceAssumeCapacity(runtime.code); for (g.current_chunk_parts.items) |entry| { list.appendSliceAssumeCapacity(switch (side) { // entry is an index into files .client => brk: { if (Environment.allow_assert) bun.assert(files[entry.get()].flags.kind == .js); - break :brk files[entry.get()].code(); + break :brk files[entry.get()].jsCode(); }, // entry is the '[]const u8' itself .server => entry, @@ -3460,7 +3660,10 @@ pub fn IncrementalGraph(side: bake.Side) type { list.appendSliceAssumeCapacity(end); if (bun.FeatureFlags.bake_debugging_features) if (g.owner().dump_dir) |dump_dir| { - const rel_path_escaped = "latest_chunk.js"; + const rel_path_escaped = switch (kind) { + .initial_response => "latest_chunk.js", + .hmr_chunk => "latest_hmr.js", + }; dumpBundle(dump_dir, switch (side) { .client => .client, .server => .server, @@ -3471,6 +3674,113 @@ pub fn IncrementalGraph(side: bake.Side) type { }; } + /// Uses `arena` as a temporary allocator, returning a string owned by `gpa` + pub fn takeSourceMap(g: *@This(), kind: ChunkKind, arena: std.mem.Allocator, gpa: Allocator) ![]u8 { + if (side == .server) @compileError("not implemented"); + + const paths = g.bundled_files.keys(); + const files = g.bundled_files.values(); + const source_maps = g.source_maps.items; + + var j: StringJoiner = .{ .allocator = arena }; + + const runtime: bake.HmrRuntime = switch (kind) { + .initial_response => bun.bake.getHmrRuntime(side), + .hmr_chunk => comptime .init("({\n"), + }; + + j.pushStatic( + \\{"version":3,"sourceRoot":"/_bun/src","sources":[ + ); + + var source_map_strings = std.ArrayList(u8).init(arena); + defer source_map_strings.deinit(); + const smw = source_map_strings.writer(); + var needs_comma = false; + for (g.current_chunk_parts.items) |entry| { + if (source_maps[entry.get()].vlq_chunk.len == 0) + continue; + if (needs_comma) + try source_map_strings.appendSlice(","); + needs_comma = true; + try bun.js_printer.writeJSONString( + g.owner().relativePath(paths[entry.get()]), + @TypeOf(smw), + smw, + .utf8, + ); + } + j.pushStatic(source_map_strings.items); + j.pushStatic( + \\],"names":[],"mappings":" + ); + + var prev_end_state: SourceMap.SourceMapState = .{ + .generated_line = 0, + .generated_column = 0, + .source_index = 0, + .original_line = 0, + .original_column = 0, + }; + + var lines_between: i32 = runtime.line_count + 2; + + var non_empty_source_index: i32 = 0; + for (g.current_chunk_parts.items) |entry| { + const source_map = &source_maps[entry.get()]; + if (source_map.vlq_chunk.len == 0) { + if (source_map.end_state.original_line == 0) { + const count = bun.strings.countChar(files[entry.get()].jsCode(), '\n'); + source_map.end_state.original_line = @intCast(count); + } + lines_between += source_map.end_state.original_line; + continue; + } + + const start_state: SourceMap.SourceMapState = .{ + .source_index = non_empty_source_index, + .generated_line = lines_between, + .generated_column = 0, + .original_line = 0, + .original_column = 0, + }; + lines_between = 0; + + try SourceMap.appendSourceMapChunk( + &j, + arena, + prev_end_state, + start_state, + source_map.vlq_chunk.slice(), + ); + + prev_end_state = .{ + .source_index = non_empty_source_index, + .generated_line = 0, + .generated_column = 0, + .original_line = source_map.end_state.original_line, + .original_column = source_map.end_state.original_column, + }; + + non_empty_source_index += 1; + } + + const slice = try j.doneWithEnd(gpa, "\"}"); + + if (bun.FeatureFlags.bake_debugging_features) if (g.owner().dump_dir) |dump_dir| { + const rel_path_escaped = "latest_chunk.js.map"; + dumpBundle(dump_dir, switch (side) { + .client => .client, + .server => .server, + }, rel_path_escaped, slice, false) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.warn("Could not dump bundle: {}", .{err}); + }; + }; + + return slice; + } + fn disconnectAndDeleteFile(g: *@This(), file_index: FileIndex) void { bun.assert(g.bundled_files.count() > 1); // never remove all files bun.assert(g.first_dep.items[file_index.get()] == .none); // must have no dependencies @@ -4109,9 +4419,9 @@ pub const SerializedFailure = struct { }; // For debugging, it is helpful to be able to see bundles. -fn dumpBundle(dump_dir: std.fs.Dir, side: bake.Graph, rel_path: []const u8, chunk: []const u8, wrap: bool) !void { +fn dumpBundle(dump_dir: std.fs.Dir, graph: bake.Graph, rel_path: []const u8, chunk: []const u8, wrap: bool) !void { const name = bun.path.joinAbsString("/", &.{ - @tagName(side), + @tagName(graph), rel_path, }, .auto)[1..]; var inner_dir = try dump_dir.makeOpenPath(bun.Dirname.dirname(u8, name).?, .{}); @@ -4122,13 +4432,15 @@ fn dumpBundle(dump_dir: std.fs.Dir, side: bake.Graph, rel_path: []const u8, chun var bufw = std.io.bufferedWriter(file.writer()); - try bufw.writer().print("// {s} bundled for {s}\n", .{ - bun.fmt.quote(rel_path), - @tagName(side), - }); - try bufw.writer().print("// Bundled at {d}, Bun " ++ bun.Global.package_json_version_with_canary ++ "\n", .{ - std.time.nanoTimestamp(), - }); + if (!bun.strings.hasSuffixComptime(rel_path, ".map")) { + try bufw.writer().print("// {s} bundled for {s}\n", .{ + bun.fmt.quote(rel_path), + @tagName(graph), + }); + try bufw.writer().print("// Bundled at {d}, Bun " ++ bun.Global.package_json_version_with_canary ++ "\n", .{ + std.time.nanoTimestamp(), + }); + } // Wrap in an object to make it valid syntax. Regardless, these files // are never executable on their own as they contain only a single module. @@ -4689,7 +5001,7 @@ pub const HotReloadEvent = struct { /// All code working with atomics to communicate watcher is in this struct. It /// attempts to recycle as much memory as possible since files are very -/// frequently updated. +/// frequently updated (the whole point of hmr) const WatcherAtomics = struct { const log = Output.scoped(.DevServerWatchAtomics, true); @@ -5162,17 +5474,15 @@ const HTMLRouter = struct { pub fn putOrOverwriteAsset( dev: *DevServer, abs_path: []const u8, - contents: []u8, + contents: AnyBlob, content_hash: u64, ) !void { - try dev.assets.putOrOverwrite(dev.allocator, abs_path, contents, content_hash); + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + _ = try dev.assets.replacePath(abs_path, contents, .detectFromPath(abs_path), content_hash); } -// TODO: use this structure for managing: -// - JavaScript bundles -// - CSS bundles -// - Source Map generations -// (all of these assets are accessed via hash and have a canonical filepath key) +/// Storage for hashed assets on `/_bun/asset/{hash}.ext` pub const Assets = struct { /// Keys are absolute paths, sharing memory with the keys in IncrementalGraph(.client) /// Values are indexes into files @@ -5192,18 +5502,22 @@ pub const Assets = struct { /// When an asset is overwritten, it recieves a new URL to get around browser auto-caching. /// The old URL is immediately invalidated. - pub fn putOrOverwrite( + pub fn replacePath( assets: *Assets, - alloc: Allocator, /// not allocated abs_path: []const u8, - /// allocated by bun.default_allocator, ownership given to DevServer - contents: []u8, + contents: AnyBlob, + mime_type: MimeType, /// content hash of the asset content_hash: u64, - ) !void { - assets.owner().graph_safety_lock.lock(); - defer assets.owner().graph_safety_lock.unlock(); + ) !struct { index: u30 } { + const alloc = assets.owner().allocator; + debug.log("replacePath {} {} - {s}/{s}", .{ + bun.fmt.quote(abs_path), + content_hash, + asset_prefix, + &std.fmt.bytesToHex(std.mem.asBytes(&content_hash), .lower), + }); const gop = try assets.path_map.getOrPut(alloc, abs_path); if (!gop.found_existing) { @@ -5217,12 +5531,15 @@ pub const Assets = struct { if (assets.refs.items[i] == 1) { const slice = assets.files.entries.slice(); slice.items(.key)[i] = content_hash; - slice.items(.value)[i] = initStaticRouteFromBytes(alloc, contents, .detectFromPath(abs_path)); + slice.items(.value)[i] = StaticRoute.initFromAnyBlob(contents, .{ + .mime_type = mime_type, + .server = assets.owner().server orelse unreachable, + }); comptime assert(@TypeOf(slice.items(.hash)[0]) == void); assets.needs_reindex = true; - return; + return .{ .index = @intCast(i) }; } else { - assets.refs.items[gop.value_ptr.*] -= 1; + assets.refs.items[i] -= 1; } } @@ -5230,12 +5547,47 @@ pub const Assets = struct { const file_index_gop = try assets.files.getOrPut(alloc, content_hash); if (!file_index_gop.found_existing) { try assets.refs.append(alloc, 1); - file_index_gop.value_ptr.* = initStaticRouteFromBytes(alloc, contents, .detectFromPath(abs_path)); + file_index_gop.value_ptr.* = StaticRoute.initFromAnyBlob(contents, .{ + .mime_type = mime_type, + .server = assets.owner().server orelse unreachable, + }); } else { - file_index_gop.value_ptr.*.ref_count += 1; - bun.default_allocator.free(contents); + assets.refs.items[file_index_gop.index] += 1; + var contents_mut = contents; + contents_mut.detach(); } gop.value_ptr.* = @intCast(file_index_gop.index); + + return .{ .index = @intCast(gop.value_ptr.*) }; + } + + /// Returns a pointer to insert the *StaticRoute. If `null` is returned, then it + /// means there is already data here. + pub fn putOrIncrementRefCount(assets: *Assets, content_hash: u64, ref_count: u32) !?**StaticRoute { + const file_index_gop = try assets.files.getOrPut(assets.owner().allocator, content_hash); + if (!file_index_gop.found_existing) { + try assets.refs.append(assets.owner().allocator, ref_count); + return file_index_gop.value_ptr; + } else { + assets.refs.items[file_index_gop.index] += ref_count; + return null; + } + } + + pub fn unrefByHash(assets: *Assets, content_hash: u64, dec_count: u32) void { + assert(dec_count > 0); + const index = assets.files.getIndex(content_hash) orelse + Output.panic("Asset double unref: {s}", .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&content_hash))}); + assets.refs.items[index] -= dec_count; + if (assets.refs.items[index] == 0) { + defer assert(assets.files.count() == assets.refs.items.len); + assets.files.swapRemoveAt(index); + if (index == assets.refs.items.len) { + assets.refs.items.len -= 1; + } else if (index > 0) { + assets.refs.items[index] = assets.refs.pop(); + } + } } pub fn reindexIfNeeded(assets: *Assets, alloc: Allocator) !void { @@ -5271,16 +5623,6 @@ pub fn onPluginsRejected(dev: *DevServer) !void { // TODO: allow recovery from this state } -/// `bytes` is allocated by `allocator`, ownership moved into the Blob -fn initAnyBlobFromBytes(allocator: Allocator, bytes: []u8) JSC.WebCore.AnyBlob { - return .{ .InternalBlob = .{ .bytes = .fromOwnedSlice(allocator, bytes) } }; -} - -fn initStaticRouteFromBytes(allocator: Allocator, bytes: []u8, mime_type: MimeType) *StaticRoute { - _ = mime_type; - return .initFromBlob(initAnyBlobFromBytes(allocator, bytes)); -} - const std = @import("std"); const Allocator = std.mem.Allocator; const Mutex = bun.Mutex; @@ -5302,6 +5644,8 @@ const Output = bun.Output; const Transpiler = bun.transpiler.Transpiler; const BundleV2 = bun.bundle_v2.BundleV2; +const Chunk = bun.bundle_v2.Chunk; +const ContentHasher = bun.bundle_v2.ContentHasher; const Define = bun.options.Define; @@ -5313,7 +5657,6 @@ const AnyResponse = bun.uws.AnyResponse; const MimeType = bun.http.MimeType; const JSC = bun.JSC; -const Watcher = bun.Watcher; const JSValue = JSC.JSValue; const VirtualMachine = JSC.VirtualMachine; const JSModuleLoader = JSC.JSModuleLoader; @@ -5322,6 +5665,13 @@ const HTMLBundle = JSC.API.HTMLBundle; const Plugin = JSC.API.JSBundler.Plugin; const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena; -const Chunk = bun.bundle_v2.Chunk; +const Watcher = bun.Watcher; const StaticRoute = bun.server.StaticRoute; + +const AnyBlob = JSC.WebCore.AnyBlob; + +const SourceMap = bun.sourcemap; +const VLQ = SourceMap.VLQ; + +const StringJoiner = bun.StringJoiner; diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 3c0243791d6cac..a83e7e811ccf93 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -688,17 +688,34 @@ fn getOptionalString( return allocations.track(str.toUTF8(arena)); } -pub inline fn getHmrRuntime(side: Side) [:0]const u8 { +pub const HmrRuntime = struct { + code: [:0]const u8, + /// The number of lines in the HMR runtime. This is used for sourcemap + /// generation, where the first n lines are skipped. In release, these + /// are always precalculated. u31 to allow coercion to i32 & usize. + line_count: u31, + + pub fn init(code: [:0]const u8) HmrRuntime { + if (@inComptime()) @setEvalBranchQuota(@intCast(code.len)); + return .{ + .code = code, + .line_count = @intCast(std.mem.count(u8, code, "\n")), + }; + } +}; + +pub fn getHmrRuntime(side: Side) callconv(bun.callconv_inline) HmrRuntime { return if (Environment.codegen_embed) switch (side) { - .client => @embedFile("bake-codegen/bake.client.js"), - .server => @embedFile("bake-codegen/bake.server.js"), + .client => comptime .init(@embedFile("bake-codegen/bake.client.js")), + .server => comptime .init(@embedFile("bake-codegen/bake.server.js")), } - else switch (side) { - .client => bun.runtimeEmbedFile(.codegen_eager, "bake.client.js"), - // server runtime is loaded once - .server => bun.runtimeEmbedFile(.codegen, "bake.server.js"), - }; + else + .init(switch (side) { + .client => bun.runtimeEmbedFile(.codegen_eager, "bake.client.js"), + // server runtime is loaded once, so it is pointless to make this eager. + .server => bun.runtimeEmbedFile(.codegen, "bake.server.js"), + }); } pub const Mode = enum { diff --git a/src/bake/client/overlay.css b/src/bake/client/overlay.css index 171d057505691b..d3b08f23b7b989 100644 --- a/src/bake/client/overlay.css +++ b/src/bake/client/overlay.css @@ -126,6 +126,10 @@ button + .message { color: var(--color); } +.message-text:last-child { + margin-bottom: 0.5rem; +} + .log-error { --color: var(--log-error); font-weight: bold; diff --git a/src/bit_set.zig b/src/bit_set.zig index f702915dfbe581..1340ebed94e3e5 100644 --- a/src/bit_set.zig +++ b/src/bit_set.zig @@ -702,6 +702,11 @@ pub const DynamicBitSetUnmanaged = struct { var empty_masks_data = [_]MaskInt{ 0, undefined }; const empty_masks_ptr = empty_masks_data[1..2]; + pub const empty: Self = .{ + .bit_length = 0, + .masks = empty_masks_ptr, + }; + /// Do not resize the bitsets! /// /// Single buffer for multiple bitsets of equal length. Does not diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index cb405911b18da8..3ddd9702e31d14 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -3558,7 +3558,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp // If we've received the complete body by the time this function is called // we can avoid streaming it and just send it all at once. if (byte_stream.has_received_last_chunk) { - this.blob.from(byte_stream.drain().listManaged(bun.default_allocator)); + this.blob = .fromArrayList(byte_stream.drain().listManaged(bun.default_allocator)); this.readable_stream_ref.deinit(); this.doRenderBlob(); return; diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index e79b08850be225..363960d7630e65 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -2,6 +2,8 @@ //! Response object, or from globally allocated bytes. const StaticRoute = @This(); +// TODO: Remove optional. StaticRoute requires a server object or else it will +// not ensure it is alive while sending a large blob. server: ?AnyServer = null, status_code: u16, blob: AnyBlob, @@ -14,22 +16,40 @@ ref_count: u32 = 1, pub usingnamespace bun.NewRefCounted(@This(), deinit, null); -pub fn initFromBlob(blob: AnyBlob) *StaticRoute { - const headers = Headers.from(null, bun.default_allocator, .{ .body = &blob }) catch bun.outOfMemory(); +// pub fn initFromBlob(blob: AnyBlob) *StaticRoute { +// const headers = Headers.from(null, bun.default_allocator, .{ .body = &blob }) catch bun.outOfMemory(); +// return StaticRoute.new(.{ +// .blob = blob, +// .cached_blob_size = blob.size(), +// .has_content_disposition = false, +// .headers = headers, +// .server = null, +// .status_code = 200, +// }); +// } + +pub const InitFromBytesOptions = struct { + server: AnyServer, + mime_type: ?bun.http.MimeType = null, +}; + +pub fn initFromAnyBlob(blob: AnyBlob, options: InitFromBytesOptions) *StaticRoute { + var headers = Headers.from(null, bun.default_allocator, .{ .body = &blob }) catch bun.outOfMemory(); + if (options.mime_type) |mime_type| { + if (headers.getContentType() == null) { + headers.append("Content-Type", mime_type.value) catch bun.outOfMemory(); + } + } return StaticRoute.new(.{ .blob = blob, .cached_blob_size = blob.size(), .has_content_disposition = false, .headers = headers, - .server = null, + .server = options.server, .status_code = 200, }); } -pub const InitFromBytesOptions = struct { - mime_type: ?bun.http.MimeType = null, -}; - fn deinit(this: *StaticRoute) void { this.blob.detach(); this.headers.deinit(); @@ -161,6 +181,7 @@ pub fn onHEADRequest(this: *StaticRoute, req: *uws.Request, resp: AnyResponse) v } pub fn onHEAD(this: *StaticRoute, resp: AnyResponse) void { + bun.debugAssert(this.server != null); this.ref(); if (this.server) |server| { server.onPendingRequest(); @@ -182,6 +203,7 @@ pub fn onRequest(this: *StaticRoute, req: *uws.Request, resp: AnyResponse) void } pub fn on(this: *StaticRoute, resp: AnyResponse) void { + bun.debugAssert(this.server != null); this.ref(); if (this.server) |server| { server.onPendingRequest(); @@ -305,6 +327,17 @@ fn renderMetadata(this: *StaticRoute, resp: AnyResponse) void { this.doWriteHeaders(resp); } +pub fn onWithMethod(this: *StaticRoute, method: bun.http.Method, resp: AnyResponse) void { + switch (method) { + .GET => this.on(resp), + .HEAD => this.onHEAD(resp), + else => { + this.doWriteStatus(405, resp); // Method not allowed + resp.endWithoutBody(resp.shouldCloseConnection()); + }, + } +} + const std = @import("std"); const bun = @import("root").bun; diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index f0a0fa6cc4f1d0..61e698b933de0b 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -6634,8 +6634,8 @@ pub fn toJSHostFunction(comptime Function: JSHostZigFunction) JSC.JSHostFunction if (value != .zero) { if (globalThis.hasException()) { var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis }; - bun.Output.prettyErrorln( - \\Assertion failed: Native function returned a non-zero JSValue while an exception is pending + bun.Output.err("Assertion failed", + \\Native function returned a non-zero JSValue while an exception is pending \\ \\ fn: {s} \\ value: {} @@ -6670,8 +6670,8 @@ pub fn toJSHostFunctionWithContext(comptime ContextType: type, comptime Function if (value != .zero) { if (globalThis.hasException()) { var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis }; - bun.Output.prettyErrorln( - \\Assertion failed: Native function returned a non-zero JSValue while an exception is pending + bun.Output.err("Assertion failed", + \\Native function returned a non-zero JSValue while an exception is pending \\ \\ fn: {s} \\ value: {} diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig index 5ae160ca5008f3..4fb86a10566d01 100644 --- a/src/bun.js/webcore/blob.zig +++ b/src/bun.js/webcore/blob.zig @@ -5739,10 +5739,17 @@ pub const Blob = struct { pub const AnyBlob = union(enum) { Blob: Blob, - // InlineBlob: InlineBlob, InternalBlob: InternalBlob, WTFStringImpl: bun.WTF.StringImpl, + pub fn fromOwnedSlice(allocator: std.mem.Allocator, bytes: []u8) AnyBlob { + return .{ .InternalBlob = .{ .bytes = .fromOwnedSlice(allocator, bytes) } }; + } + + pub fn fromArrayList(list: std.ArrayList(u8)) AnyBlob { + return .{ .InternalBlob = .{ .bytes = list } }; + } + /// Assumed that AnyBlob itself is covered by the caller. pub fn memoryCost(this: *const AnyBlob) usize { return switch (this.*) { @@ -5771,8 +5778,16 @@ pub const AnyBlob = union(enum) { pub inline fn fastSize(this: *const AnyBlob) Blob.SizeType { return switch (this.*) { .Blob => this.Blob.size, - .WTFStringImpl => @as(Blob.SizeType, @truncate(this.WTFStringImpl.byteLength())), - else => @as(Blob.SizeType, @truncate(this.slice().len)), + .WTFStringImpl => @truncate(this.WTFStringImpl.byteLength()), + .InternalBlob => @truncate(this.slice().len), + }; + } + + pub inline fn size(this: *const AnyBlob) Blob.SizeType { + return switch (this.*) { + .Blob => this.Blob.size, + .WTFStringImpl => @truncate(this.WTFStringImpl.utf8ByteLength()), + else => @truncate(this.slice().len), }; } @@ -6006,22 +6021,6 @@ pub const AnyBlob = union(enum) { } } - pub inline fn size(this: *const AnyBlob) Blob.SizeType { - return switch (this.*) { - .Blob => this.Blob.size, - .WTFStringImpl => @as(Blob.SizeType, @truncate(this.WTFStringImpl.utf8ByteLength())), - else => @as(Blob.SizeType, @truncate(this.slice().len)), - }; - } - - pub fn from(this: *AnyBlob, list: std.ArrayList(u8)) void { - this.* = .{ - .InternalBlob = InternalBlob{ - .bytes = list, - }, - }; - } - pub fn isDetached(this: *const AnyBlob) bool { return switch (this.*) { .Blob => |blob| blob.isDetached(), diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index a5bf52f203b5b7..a5a26fad87cc1f 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -3248,7 +3248,7 @@ pub const Fetch = struct { }, .result => |result| { body.detach(); - body.AnyBlob.from(std.ArrayList(u8).fromOwnedSlice(allocator, @constCast(result.slice()))); + body = .{ .AnyBlob = .fromOwnedSlice(allocator, @constCast(result.slice())) }; http_body = .{ .AnyBlob = body.AnyBlob }; }, } @@ -3483,10 +3483,17 @@ pub const Fetch = struct { } }; -// https://developer.mozilla.org/en-US/docs/Web/API/Headers +/// https://developer.mozilla.org/en-US/docs/Web/API/Headers +// TODO: move to http.zig. this has nothing to do with JSC or WebCore pub const Headers = struct { - pub usingnamespace http.Headers; - entries: Headers.Entries = .{}, + pub const Entry = struct { + name: Api.StringPointer, + value: Api.StringPointer, + + pub const List = bun.MultiArrayList(Entry); + }; + + entries: Entry.List = .{}, buf: std.ArrayListUnmanaged(u8) = .{}, allocator: std.mem.Allocator, diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 0a2d30aafd725a..5bd1e886d0dcdb 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -2408,6 +2408,11 @@ pub const BundleV2 = struct { this.graph.heap.helpCatchMemoryIssues(); + // Compute line offset tables, used in source maps. + this.linker.computeDataForSourceMap(@as([]Index.Int, @ptrCast(js_reachable_files))); + + this.graph.heap.helpCatchMemoryIssues(); + // Generate chunks const js_part_ranges = try this.graph.allocator.alloc(PartRange, js_reachable_files.len); const parts = this.graph.ast.items(.parts); @@ -3063,7 +3068,7 @@ pub const BundleV2 = struct { result.source.path.text, // SAFETY: when shouldCopyForBundling is true, the // contents are allocated by bun.default_allocator - @constCast(result.source.contents), + .fromOwnedSlice(bun.default_allocator, @constCast(result.source.contents)), result.content_hash_for_additional_file, ) catch bun.outOfMemory(); } @@ -6008,6 +6013,7 @@ pub const LinkerContext = struct { this: *LinkerContext, reachable: []const Index.Int, ) void { + bun.assert(this.options.source_maps != .none); this.source_maps.line_offset_wait_group.init(); this.source_maps.quoted_contents_wait_group.init(); this.source_maps.line_offset_wait_group.counter = @as(u32, @truncate(reachable.len)); @@ -10432,7 +10438,7 @@ pub const LinkerContext = struct { // Save the offset to the start of the stored JavaScript j.push(compile_result.code(), bun.default_allocator); - if (compile_result.source_map_chunk()) |source_map_chunk| { + if (compile_result.sourceMapChunk()) |source_map_chunk| { if (c.options.source_maps != .none) { try compile_results_for_source_map.append(worker.allocator, CompileResultForSourceMap{ .source_map_chunk = source_map_chunk, @@ -10668,8 +10674,8 @@ pub const LinkerContext = struct { switch (c.options.output_format) { .internal_bake_dev => { const start = bun.bake.getHmrRuntime(if (c.options.target.isServerSide()) .server else .client); - j.pushStatic(start); - line_offset.advance(start); + j.pushStatic(start.code); + line_offset.advance(start.code); }, .iife => { // Bun does not do arrow function lowering. So the wrapper can be an arrow. @@ -10779,7 +10785,7 @@ pub const LinkerContext = struct { } else { j.push(compile_result.code(), bun.default_allocator); - if (compile_result.source_map_chunk()) |source_map_chunk| { + if (compile_result.sourceMapChunk()) |source_map_chunk| { if (c.options.source_maps != .none) { try compile_results_for_source_map.append(worker.allocator, CompileResultForSourceMap{ .source_map_chunk = source_map_chunk, @@ -13471,7 +13477,7 @@ pub const LinkerContext = struct { c.source_maps.quoted_contents_tasks.len = 0; } - // For dev server, only post-process CSS chunks. + // For dev server, only post-process CSS + HTML chunks. const chunks_to_do = if (is_dev_server) chunks[1..] else chunks; if (!is_dev_server or chunks_to_do.len > 0) { bun.assert(chunks_to_do.len > 0); @@ -15715,7 +15721,7 @@ pub const Chunk = struct { } pub const CodeResult = struct { - buffer: string, + buffer: []u8, shifts: []sourcemap.SourceMapShifts, }; @@ -16292,7 +16298,7 @@ pub const CompileResult = union(enum) { }; } - pub fn source_map_chunk(this: *const CompileResult) ?sourcemap.Chunk { + pub fn sourceMapChunk(this: *const CompileResult) ?sourcemap.Chunk { return switch (this.*) { .javascript => |r| switch (r.result) { .result => |r2| r2.source_map, @@ -16316,9 +16322,11 @@ const CompileResultForSourceMap = struct { source_index: u32, }; -const ContentHasher = struct { +pub const ContentHasher = struct { + pub const Hash = std.hash.XxHash64; + // xxhash64 outperforms Wyhash if the file is > 1KB or so - hasher: std.hash.XxHash64 = std.hash.XxHash64.init(0), + hasher: Hash = .init(0), const log = bun.Output.scoped(.ContentHasher, true); diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 07656cefcfa870..ea181e1e9eb06d 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -1931,7 +1931,7 @@ pub const Example = struct { ), ); - var header_entries: Headers.Entries = .{}; + var header_entries: Headers.Entry.List = .{}; var headers_buf: string = ""; if (env_loader.map.get("GITHUB_TOKEN") orelse env_loader.map.get("GITHUB_ACCESS_TOKEN")) |access_token| { @@ -1939,14 +1939,14 @@ pub const Example = struct { headers_buf = try std.fmt.allocPrint(ctx.allocator, "AuthorizationBearer {s}", .{access_token}); try header_entries.append( ctx.allocator, - Headers.Kv{ - .name = Api.StringPointer{ + .{ + .name = .{ .offset = 0, - .length = @as(u32, @intCast("Authorization".len)), + .length = @intCast("Authorization".len), }, - .value = Api.StringPointer{ - .offset = @as(u32, @intCast("Authorization".len)), - .length = @as(u32, @intCast(headers_buf.len - "Authorization".len)), + .value = .{ + .offset = @intCast("Authorization".len), + .length = @intCast(headers_buf.len - "Authorization".len), }, }, ); diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index dc9204a14d0cbb..8f38f9eb0f5d51 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -186,10 +186,10 @@ pub const UpgradeCommand = struct { } } - var header_entries: Headers.Entries = .{}; - const accept = Headers.Kv{ - .name = Api.StringPointer{ .offset = 0, .length = @as(u32, @intCast("Accept".len)) }, - .value = Api.StringPointer{ .offset = @as(u32, @intCast("Accept".len)), .length = @as(u32, @intCast("application/vnd.github.v3+json".len)) }, + var header_entries: Headers.Entry.List = .empty; + const accept = Headers.Entry{ + .name = .{ .offset = 0, .length = @intCast("Accept".len) }, + .value = .{ .offset = @intCast("Accept".len), .length = @intCast("application/vnd.github.v3+json".len) }, }; try header_entries.append(allocator, accept); defer if (comptime silent) header_entries.deinit(allocator); @@ -217,14 +217,14 @@ pub const UpgradeCommand = struct { headers_buf = try std.fmt.allocPrint(allocator, default_github_headers ++ "AuthorizationBearer {s}", .{access_token}); try header_entries.append( allocator, - Headers.Kv{ - .name = Api.StringPointer{ + .{ + .name = .{ .offset = accept.value.offset + accept.value.length, - .length = @as(u32, @intCast("Authorization".len)), + .length = @intCast("Authorization".len), }, - .value = Api.StringPointer{ - .offset = @as(u32, @intCast(accept.value.offset + accept.value.length + "Authorization".len)), - .length = @as(u32, @intCast("Bearer ".len + access_token.len)), + .value = .{ + .offset = @intCast(accept.value.offset + accept.value.length + "Authorization".len), + .length = @intCast("Bearer ".len + access_token.len), }, }, ); diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 982a2970a8abde..f8aef7da0018d9 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -1687,6 +1687,12 @@ pub const StoredTrace = struct { var frame = stored.trace(); std.debug.captureStackTrace(begin orelse @returnAddress(), &frame); stored.index = frame.index; + for (frame.instruction_addresses[0..frame.index], 0..) |addr, i| { + if (addr == 0) { + stored.index = i; + break; + } + } return stored; } diff --git a/src/http.zig b/src/http.zig index 81f083d2d84404..c6920268c0c84e 100644 --- a/src/http.zig +++ b/src/http.zig @@ -1767,7 +1767,7 @@ pub inline fn cleanup(force: bool) void { default_arena.gc(force); } -pub const Headers = @import("./http/headers.zig"); +pub const Headers = JSC.WebCore.Headers; pub const SOCKET_FLAGS: u32 = if (Environment.isLinux) SOCK.CLOEXEC | posix.MSG.NOSIGNAL @@ -2226,7 +2226,7 @@ pub const Flags = packed struct { // TODO: reduce the size of this struct // Many of these fields can be moved to a packed struct and use less space method: Method, -header_entries: Headers.Entries, +header_entries: Headers.Entry.List, header_buf: string, url: URL, connected_url: URL = URL{}, @@ -2400,8 +2400,8 @@ pub const HTTPChannelContext = struct { pub const AsyncHTTP = struct { request: ?picohttp.Request = null, response: ?picohttp.Response = null, - request_headers: Headers.Entries = Headers.Entries{}, - response_headers: Headers.Entries = Headers.Entries{}, + request_headers: Headers.Entry.List = .empty, + response_headers: Headers.Entry.List = .empty, response_buffer: *MutableString, request_body: HTTPRequestBody = .{ .bytes = "" }, allocator: std.mem.Allocator, @@ -2551,7 +2551,7 @@ pub const AsyncHTTP = struct { allocator: std.mem.Allocator, method: Method, url: URL, - headers: Headers.Entries, + headers: Headers.Entry.List, headers_buf: string, response_buffer: *MutableString, request_body: []const u8, @@ -2671,7 +2671,7 @@ pub const AsyncHTTP = struct { allocator: std.mem.Allocator, method: Method, url: URL, - headers: Headers.Entries, + headers: Headers.Entry.List, headers_buf: string, response_buffer: *MutableString, request_body: []const u8, diff --git a/src/http/header_builder.zig b/src/http/header_builder.zig index 94744fc3263e5e..247bcf1cadc132 100644 --- a/src/http/header_builder.zig +++ b/src/http/header_builder.zig @@ -1,15 +1,15 @@ const HeaderBuilder = @This(); const StringBuilder = @import("../string_builder.zig"); -const Headers = @import("./headers.zig"); +const Headers = bun.JSC.WebCore.Headers; const string = bun.string; const HTTPClient = @import("../http.zig"); const Api = @import("../api/schema.zig").Api; const std = @import("std"); const bun = @import("root").bun; -content: StringBuilder = StringBuilder{}, +content: StringBuilder = .{}, header_count: u64 = 0, -entries: Headers.Entries = Headers.Entries{}, +entries: Headers.Entry.List = .empty, pub fn count(this: *HeaderBuilder, name: string, value: string) void { this.header_count += 1; @@ -34,7 +34,7 @@ pub fn append(this: *HeaderBuilder, name: string, value: string) void { .length = @as(u32, @truncate(value.len)), }; _ = this.content.append(value); - this.entries.appendAssumeCapacity(Headers.Kv{ .name = name_ptr, .value = value_ptr }); + this.entries.appendAssumeCapacity(.{ .name = name_ptr, .value = value_ptr }); } pub fn appendFmt(this: *HeaderBuilder, name: string, comptime fmt: string, args: anytype) void { @@ -52,7 +52,7 @@ pub fn appendFmt(this: *HeaderBuilder, name: string, comptime fmt: string, args: .length = @as(u32, @truncate(value.len)), }; - this.entries.appendAssumeCapacity(Headers.Kv{ .name = name_ptr, .value = value_ptr }); + this.entries.appendAssumeCapacity(.{ .name = name_ptr, .value = value_ptr }); } pub fn apply(this: *HeaderBuilder, client: *HTTPClient) void { diff --git a/src/http/headers.zig b/src/http/headers.zig deleted file mode 100644 index fe1f08e98b5c8c..00000000000000 --- a/src/http/headers.zig +++ /dev/null @@ -1,8 +0,0 @@ -const Api = @import("../api/schema.zig").Api; -const std = @import("std"); -const bun = @import("root").bun; -pub const Kv = struct { - name: Api.StringPointer, - value: Api.StringPointer, -}; -pub const Entries = bun.MultiArrayList(Kv); diff --git a/src/js_printer.zig b/src/js_printer.zig index 7d93f14c9a6592..c019fd1d41c256 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -2021,6 +2021,7 @@ fn NewPrinter( switch (expr.data) { .e_missing => {}, .e_undefined => { + p.addSourceMapping(expr.loc); p.printUndefined(expr.loc, level); }, .e_super => { @@ -2561,8 +2562,8 @@ fn NewPrinter( } p.printSpaceBeforeIdentifier(); + p.addSourceMapping(expr.loc); if (e.func.flags.contains(.is_async)) { - p.addSourceMapping(expr.loc); p.print("async "); } p.print("function"); @@ -2893,7 +2894,6 @@ fn NewPrinter( // } if (!didPrint) { - // assert(p.options.module_type != .internal_bake_dev); p.printSpaceBeforeIdentifier(); p.addSourceMapping(expr.loc); p.printSymbol(e.ref); @@ -2953,6 +2953,7 @@ fn NewPrinter( if (entry.is_keyword) { p.printSpaceBeforeIdentifier(); + p.addSourceMapping(expr.loc); p.print(entry.text); p.printSpace(); } else { @@ -3627,7 +3628,6 @@ fn NewPrinter( p.prev_stmt_tag = std.meta.activeTag(stmt.data); } - p.addSourceMapping(stmt.loc); switch (stmt.data) { .s_comment => |s| { p.printIndentedComment(s.text); @@ -3635,6 +3635,7 @@ fn NewPrinter( .s_function => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); const name = s.func.name orelse Output.panic("Internal error: expected func to have a name ref\n{any}", .{s}); const nameRef = name.ref orelse Output.panic("Internal error: expected func to have a name\n{any}", .{s}); @@ -3650,9 +3651,10 @@ fn NewPrinter( if (s.func.flags.contains(.is_generator)) { p.print("*"); p.printSpace(); + } else { + p.printSpaceBeforeIdentifier(); } - p.printSpaceBeforeIdentifier(); p.addSourceMapping(name.loc); p.printSymbol(nameRef); p.printFunc(s.func); @@ -3682,6 +3684,7 @@ fn NewPrinter( p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); const nameRef = s.class.class_name.?.ref.?; if (s.is_export) { if (!rewrite_esm_to_cjs) { @@ -3712,12 +3715,14 @@ fn NewPrinter( if (p.prev_stmt_tag == .s_empty and p.options.indent.count == 0) return; p.printIndent(); + p.addSourceMapping(stmt.loc); p.print(";"); p.printNewline(); }, .s_export_default => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("export default "); switch (s.value) { @@ -3784,6 +3789,7 @@ fn NewPrinter( } p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); if (s.alias != null) p.printWhitespacer(comptime ws("export *").append(" as ")) @@ -3803,6 +3809,7 @@ fn NewPrinter( if (rewrite_esm_to_cjs) { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); switch (s.items.len) { 0 => {}, @@ -3863,6 +3870,7 @@ fn NewPrinter( p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("export"); p.printSpace(); @@ -3962,6 +3970,7 @@ fn NewPrinter( .s_export_from => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); const import_record = p.importRecord(s.import_record_index); @@ -4001,6 +4010,9 @@ fn NewPrinter( p.printSemicolonAfterStatement(); }, .s_local => |s| { + p.printIndent(); + p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); switch (s.kind) { .k_const => { p.printDeclStmt(s.is_export, "const", s.decls.slice()); @@ -4021,11 +4033,12 @@ fn NewPrinter( }, .s_if => |s| { p.printIndent(); - p.printIf(s); + p.printIf(s, stmt.loc); }, .s_do_while => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("do"); switch (s.body.data) { .s_block => { @@ -4053,6 +4066,7 @@ fn NewPrinter( .s_for_in => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("for"); p.printSpace(); p.print("("); @@ -4068,6 +4082,7 @@ fn NewPrinter( .s_for_of => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("for"); if (s.is_await) { p.print(" await"); @@ -4087,6 +4102,7 @@ fn NewPrinter( .s_while => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("while"); p.printSpace(); p.print("("); @@ -4097,6 +4113,7 @@ fn NewPrinter( .s_with => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("with"); p.printSpace(); p.print("("); @@ -4106,10 +4123,10 @@ fn NewPrinter( }, .s_label => |s| { if (!p.options.minify_whitespace and p.options.indent.count > 0) { - p.addSourceMapping(stmt.loc); p.printIndent(); } p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.printSymbol(s.name.ref orelse Output.panic("Internal error: expected label to have a name {any}", .{s})); p.print(":"); p.printBody(s.stmt); @@ -4117,12 +4134,14 @@ fn NewPrinter( .s_try => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("try"); p.printSpace(); p.printBlock(s.body_loc, s.body, null); if (s.catch_) |catch_| { p.printSpace(); + p.addSourceMapping(catch_.loc); p.print("catch"); if (catch_.binding) |binding| { p.printSpace(); @@ -4131,7 +4150,7 @@ fn NewPrinter( p.print(")"); } p.printSpace(); - p.printBlock(catch_.loc, catch_.body, null); + p.printBlock(catch_.body_loc, catch_.body, null); } if (s.finally) |finally| { @@ -4146,6 +4165,7 @@ fn NewPrinter( .s_for => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("for"); p.printSpace(); p.print("("); @@ -4173,6 +4193,7 @@ fn NewPrinter( .s_switch => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("switch"); p.printSpace(); p.print("("); @@ -4235,7 +4256,7 @@ fn NewPrinter( .css => { switch (p.options.css_import_behavior) { .facade => { - + p.addSourceMapping(stmt.loc); // This comment exists to let tooling authors know which files CSS originated from // To parse this, you just look for a line that starts with //@import url(" p.print("//@import url(\""); @@ -4255,6 +4276,7 @@ fn NewPrinter( }, .auto_onimportcss, .facade_onimportcss => { + p.addSourceMapping(stmt.loc); p.print("globalThis.document?.dispatchEvent(new CustomEvent(\"onimportcss\", {detail: "); p.printStringLiteralUTF8(record.path.text, false); p.print("}));\n"); @@ -4271,6 +4293,7 @@ fn NewPrinter( return; }, .import_path => { + p.addSourceMapping(stmt.loc); if (s.default_name) |name| { p.print("var "); p.printSymbol(name.ref.?); @@ -4292,6 +4315,8 @@ fn NewPrinter( .napi_module => { if (comptime is_bun_platform) { p.printIndent(); + p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("var "); p.printSymbol(s.namespace_ref); p.@"print = "(); @@ -4308,6 +4333,7 @@ fn NewPrinter( p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); if (comptime is_bun_platform) { switch (record.tag) { @@ -4499,6 +4525,7 @@ fn NewPrinter( .s_debugger => { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("debugger"); p.printSemicolonAfterStatement(); }, @@ -4508,12 +4535,14 @@ fn NewPrinter( p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.printStringLiteralUTF8(s.value, false); p.printSemicolonAfterStatement(); }, .s_break => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("break"); if (s.label) |label| { p.print(" "); @@ -4525,6 +4554,7 @@ fn NewPrinter( .s_continue => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("continue"); if (s.label) |label| { @@ -4536,6 +4566,7 @@ fn NewPrinter( .s_return => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("return"); if (s.value) |value| { @@ -4547,6 +4578,7 @@ fn NewPrinter( .s_throw => |s| { p.printIndent(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(stmt.loc); p.print("throw"); p.printSpace(); p.printExpr(s.value, .lowest, ExprFlag.None()); @@ -4554,7 +4586,6 @@ fn NewPrinter( }, .s_expr => |s| { if (!p.options.minify_whitespace and p.options.indent.count > 0) { - p.addSourceMapping(stmt.loc); p.printIndent(); } @@ -4787,8 +4818,9 @@ fn NewPrinter( }, } } - pub fn printIf(p: *Printer, s: *const S.If) void { + pub fn printIf(p: *Printer, s: *const S.If, loc: logger.Loc) void { p.printSpaceBeforeIdentifier(); + p.addSourceMapping(loc); p.print("if"); p.printSpace(); p.print("("); @@ -4841,6 +4873,7 @@ fn NewPrinter( if (s.no) |no_block| { p.printSemicolonIfNeeded(); p.printSpaceBeforeIdentifier(); + p.addSourceMapping(no_block.loc); p.print("else"); switch (no_block.data) { @@ -4850,7 +4883,7 @@ fn NewPrinter( p.printNewline(); }, .s_if => { - p.printIf(no_block.data.s_if); + p.printIf(no_block.data.s_if, no_block.loc); }, else => { p.printNewline(); @@ -4937,9 +4970,6 @@ fn NewPrinter( } pub fn printDeclStmt(p: *Printer, is_export: bool, comptime keyword: string, decls: []G.Decl) void { - p.printIndent(); - p.printSpaceBeforeIdentifier(); - if (!rewrite_esm_to_cjs and is_export) { p.print("export "); } diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 50c37980c19953..7efc33b91b7f38 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -1158,14 +1158,11 @@ const vlq_lookup_table: [256]VLQ = brk: { break :brk entries; }; -const vlq_max_in_bytes = 8; +/// Source map VLQ values are limited to i32 +/// Encoding min and max ints are "//////D" and "+/////D", respectively. +/// These are 7 bytes long. This makes the `VLQ` struct 8 bytes. +const vlq_max_in_bytes = 7; pub const VLQ = struct { - // We only need to worry about i32 - // That means the maximum VLQ-encoded value is 8 bytes - // because there are only 4 bits of number inside each VLQ value - // and it expects i32 - // therefore, it can never be more than 32 bits long - // I believe the actual number is 7 bytes long, however we can add an extra byte to be more cautious bytes: [vlq_max_in_bytes]u8, len: u4 = 0, @@ -1602,6 +1599,14 @@ pub const Chunk = struct { /// ignore empty chunks should_ignore: bool = true, + pub const empty: Chunk = .{ + .buffer = MutableString.initEmpty(bun.default_allocator), + .mappings_count = 0, + .end_state = .{}, + .final_generated_column = 0, + .should_ignore = true, + }; + pub fn printSourceMapContents( chunk: Chunk, source: Logger.Source, From 3618add31bed87517e46573ccc0685252fc711a3 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Thu, 6 Feb 2025 21:24:35 -0800 Subject: [PATCH 19/28] a --- src/bake/DevServer.zig | 25 +++++++++++++++---------- src/bake/bake.zig | 8 +++----- src/bake/macros.ts | 2 +- src/codegen/bake-codegen.ts | 13 ++++++++++++- 4 files changed, 31 insertions(+), 17 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 874f964c6ac013..18f284aae3d1e3 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -2542,6 +2542,7 @@ pub fn IncrementalGraph(side: bake.Side) type { } }, .client => struct { + /// Content depends on `flags.kind` /// See function wrappers to safely read into this data content: extern union { /// Allocated by default_allocator. Access with `.jsCode()` @@ -2551,6 +2552,8 @@ pub fn IncrementalGraph(side: bake.Side) type { js_code_ptr: [*]const u8, /// Access with `.cssAssetId()` css_asset_id: u64, + + unknown: enum(u32) { unknown = 0 }, }, /// Separated from the pointer to reduce struct size. /// Parser does not support files >4gb anyways. @@ -2593,7 +2596,7 @@ pub fn IncrementalGraph(side: bake.Side) type { assert(flags.kind == .css); return .{ .content = .{ .css_asset_id = asset_id }, - .code_len = 0, + .code_len = 0, // unused .flags = flags, }; } @@ -2601,8 +2604,8 @@ pub fn IncrementalGraph(side: bake.Side) type { fn initUnknown(flags: Flags) @This() { assert(flags.kind == .unknown); return .{ - .content = .{ .css_asset_id = 0 }, - .code_len = 0, + .content = .{ .unknown = .unknown }, // unused + .code_len = 0, // unused .flags = flags, }; } @@ -2818,7 +2821,7 @@ pub fn IncrementalGraph(side: bake.Side) type { source_map orelse unreachable, // JS needs a source map (can be empty, but not null) ); }, - .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), + .css => {}, // do not need to unref css as it has been replaced already .unknown => {}, } @@ -3288,6 +3291,7 @@ pub fn IncrementalGraph(side: bake.Side) type { const source_map = &g.source_maps.items[file_index.get()]; switch (gop.value_ptr.flags.kind) { .js => g.owner().allocator.free(gop.value_ptr.jsCode()), + // TODO: fix this please .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), .unknown => {}, } @@ -3435,6 +3439,7 @@ pub fn IncrementalGraph(side: bake.Side) type { const source_map = &g.source_maps.items[file_index.get()]; switch (gop.value_ptr.flags.kind) { .js => g.owner().allocator.free(gop.value_ptr.jsCode()), + // TODO: fix this please .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), .unknown => {}, } @@ -5511,6 +5516,7 @@ pub const Assets = struct { /// content hash of the asset content_hash: u64, ) !struct { index: u30 } { + defer assert(assets.files.count() == assets.refs.items.len); const alloc = assets.owner().allocator; debug.log("replacePath {} {} - {s}/{s}", .{ bun.fmt.quote(abs_path), @@ -5540,6 +5546,7 @@ pub const Assets = struct { return .{ .index = @intCast(i) }; } else { assets.refs.items[i] -= 1; + assert(assets.refs.items[i] > 0); } } @@ -5564,6 +5571,7 @@ pub const Assets = struct { /// Returns a pointer to insert the *StaticRoute. If `null` is returned, then it /// means there is already data here. pub fn putOrIncrementRefCount(assets: *Assets, content_hash: u64, ref_count: u32) !?**StaticRoute { + defer assert(assets.files.count() == assets.refs.items.len); const file_index_gop = try assets.files.getOrPut(assets.owner().allocator, content_hash); if (!file_index_gop.found_existing) { try assets.refs.append(assets.owner().allocator, ref_count); @@ -5575,18 +5583,14 @@ pub const Assets = struct { } pub fn unrefByHash(assets: *Assets, content_hash: u64, dec_count: u32) void { + defer assert(assets.files.count() == assets.refs.items.len); assert(dec_count > 0); const index = assets.files.getIndex(content_hash) orelse Output.panic("Asset double unref: {s}", .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&content_hash))}); assets.refs.items[index] -= dec_count; if (assets.refs.items[index] == 0) { - defer assert(assets.files.count() == assets.refs.items.len); assets.files.swapRemoveAt(index); - if (index == assets.refs.items.len) { - assets.refs.items.len -= 1; - } else if (index > 0) { - assets.refs.items[index] = assets.refs.pop(); - } + _ = assets.refs.swapRemove(index); } } @@ -5598,6 +5602,7 @@ pub const Assets = struct { } pub fn get(assets: *Assets, content_hash: u64) ?*StaticRoute { + assert(assets.files.count() == assets.refs.items.len); return assets.files.get(content_hash); } diff --git a/src/bake/bake.zig b/src/bake/bake.zig index a83e7e811ccf93..10954ba7b141a0 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -232,16 +232,14 @@ pub const Framework = struct { var fw: Framework = Framework.none; if (resolveOrNull(resolver, "react-refresh/runtime")) |rfr| { - fw.react_fast_refresh = .{ - .import_source = rfr, - }; + fw.react_fast_refresh = .{ .import_source = rfr }; } else if (resolveOrNull(resolver, "react")) |_| { fw.react_fast_refresh = .{ - .import_source = "react-refresh/runtime", + .import_source = "react-refresh/runtime/index.js", }; try fw.built_in_modules.put( arena, - "react-refresh/runtime", + "react-refresh/runtime/index.js", if (Environment.codegen_embed) .{ .code = @embedFile("bake.react-refresh-prebuilt.js") } else diff --git a/src/bake/macros.ts b/src/bake/macros.ts index 62054b34b34c17..189c37e122379d 100644 --- a/src/bake/macros.ts +++ b/src/bake/macros.ts @@ -7,7 +7,7 @@ export async function css(file: string, is_development: boolean): string { { const { success, stdout, stderr } = await Bun.spawnSync({ // TODO: remove the --experimental-css flag here once CI is upgraded to a post-#16561 bun - cmd: [process.execPath, "esbuild", file, ...(is_development ? [] : ["--minify"])], + cmd: [process.execPath, "x", "esbuild", file, ...(is_development ? [] : ["--minify"])], cwd: import.meta.dir, stdio: ["ignore", "pipe", "pipe"], }); diff --git a/src/codegen/bake-codegen.ts b/src/codegen/bake-codegen.ts index 89dfa8d3db3dfe..f3a1046690032a 100644 --- a/src/codegen/bake-codegen.ts +++ b/src/codegen/bake-codegen.ts @@ -32,17 +32,27 @@ async function run() { const results = await Promise.allSettled( ["client", "server", "error", "react-refresh"].map(async file => { + // An embedded copy of react-refresh is used when the user forgets to install it. + // The library is not versioned alongside React. if (file === "react-refresh") { + const reactRefresh = require.resolve( + "../../node_modules/react-refresh/cjs/react-refresh-runtime.development.js", + ); let result = await Bun.build({ - entrypoints: [require.resolve("react-refresh")], + entrypoints: [reactRefresh], minify: true, target: "browser", external: ["*"], + format: "cjs", + define: { + "process.env.NODE_ENV": JSON.stringify("development"), + }, }); if (!result.success) throw new AggregateError(result.logs); assert(result.outputs.length === 1, "must bundle to a single file"); // @ts-ignore let code = await result.outputs[0].text(); + assert(code.trim().length > 0, "react-refresh-prebuilt is empty, built from " + reactRefresh); writeIfNotChanged(join(codegenRoot, `bake.react-refresh-prebuilt.js`), code); return; } @@ -152,6 +162,7 @@ async function run() { { kind: ["client"], result: results[0] }, { kind: ["server"], result: results[1] }, { kind: ["error"], result: results[2] }, + { kind: ["react-refresh"], result: results[3] }, ] .filter(x => x.result.status === "rejected") .map(x => ({ kind: x.kind, err: x.result.reason })) as Err[]; From 03e3d2f192a6d8f588f2264453c0f22a6d1f3d8c Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Thu, 6 Feb 2025 21:47:25 -0800 Subject: [PATCH 20/28] a --- src/bake/DevServer.zig | 28 +++++++++++++++++----------- src/bake/client/css-reloader.ts | 2 +- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 18f284aae3d1e3..591d0eccd2ac0a 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -1075,7 +1075,7 @@ fn getJavaScriptCodeForHTMLFile( try bun.js_printer.writeJSONString(input_file_sources[index.get()].path.pretty, @TypeOf(w), w, .utf8); try w.writeAll("(m) {\n "); for (import_records[index.get()].slice()) |import| { - if (loaders[index.get()] == .css) continue; + if (import.source_index.isValid() and loaders[import.source_index.get()] == .css) continue; try w.writeAll(" m.dynamicImport("); try bun.js_printer.writeJSONString(import.path.pretty, @TypeOf(w), w, .utf8); try w.writeAll(");\n "); @@ -1632,12 +1632,15 @@ pub fn finalizeBundle( // Create an entry for this file. const key = ctx.sources[index.get()].path.keyForIncrementalGraph(); - const hash = brk: { - var hash: ContentHasher.Hash = .init(0x9a4e); // arbitrary seed - hash.update(key); - hash.update(code.buffer); - break :brk hash.final(); - }; + // const hash = brk: { + // var hash: ContentHasher.Hash = .init(0x9a4e); // arbitrary seed + // hash.update(key); + // hash.update(code.buffer); + // break :brk hash.final(); + // }; + // TODO: use a hash mix with the first half being a path hash (to identify files) and + // the second half to be the content hash (to know if the file has changed) + const hash = bun.hash(key); const asset_index = (try dev.assets.replacePath( key, .fromOwnedSlice(dev.allocator, code.buffer), @@ -3291,8 +3294,7 @@ pub fn IncrementalGraph(side: bake.Side) type { const source_map = &g.source_maps.items[file_index.get()]; switch (gop.value_ptr.flags.kind) { .js => g.owner().allocator.free(gop.value_ptr.jsCode()), - // TODO: fix this please - .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), + .css => g.owner().assets.unrefByPath(gop.key_ptr.*), .unknown => {}, } g.owner().allocator.free(source_map.vlq_chunk.slice()); @@ -3439,8 +3441,7 @@ pub fn IncrementalGraph(side: bake.Side) type { const source_map = &g.source_maps.items[file_index.get()]; switch (gop.value_ptr.flags.kind) { .js => g.owner().allocator.free(gop.value_ptr.jsCode()), - // TODO: fix this please - .css => g.owner().assets.unrefByHash(gop.value_ptr.cssAssetId(), 1), + .css => g.owner().assets.unrefByPath(gop.key_ptr.*), .unknown => {}, } g.owner().allocator.free(source_map.vlq_chunk.slice()); @@ -5594,6 +5595,11 @@ pub const Assets = struct { } } + pub fn unrefByPath(assets: *Assets, path: []const u8) void { + const entry = assets.path_map.fetchSwapRemove(path) orelse return; + assets.unrefByHash(entry.value, 1); + } + pub fn reindexIfNeeded(assets: *Assets, alloc: Allocator) !void { if (assets.needs_reindex) { try assets.files.reIndex(alloc); diff --git a/src/bake/client/css-reloader.ts b/src/bake/client/css-reloader.ts index d58f2bf4a35ad3..ecad31dac47ab6 100644 --- a/src/bake/client/css-reloader.ts +++ b/src/bake/client/css-reloader.ts @@ -110,7 +110,7 @@ function maybeAddCssLink(link: HTMLLinkElement) { const pathname = new URL(link.href).pathname; if (pathname.startsWith("/_bun/css/")) { const id = pathname.slice("/_bun/css/".length).slice(0, 16); - if ( !/^[a-f0-9]{16}$/.test(id)) { + if (!/^[a-f0-9]{16}$/.test(id)) { return; } const existing = cssStore.get(id); From c08965be184c9d590b1b4d417d3b3c8fbaa13e3c Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 7 Feb 2025 15:59:34 -0800 Subject: [PATCH 21/28] i think were bing chilling now? --- src/HTMLScanner.zig | 7 +- src/ast/base.zig | 2 +- src/bake/DevServer.zig | 487 +++++++++++++++++++++-------- src/bake/FrameworkRouter.zig | 14 +- src/bake/bake.zig | 4 +- src/bake/client/css-reloader.ts | 4 +- src/bake/hmr-runtime-client.ts | 2 +- src/bake/production.zig | 32 +- src/bun.js/api/server.zig | 12 + src/bun.js/bindings/BunProcess.cpp | 2 +- src/bun.js/bindings/BunProcess.h | 3 +- src/bun.zig | 11 +- src/bundler/bundle_v2.zig | 98 +++--- src/cli/build_command.zig | 20 +- src/fmt.zig | 10 +- src/js_ast.zig | 4 +- src/js_parser.zig | 2 +- src/js_printer.zig | 19 +- src/renamer.zig | 2 +- src/sourcemap/sourcemap.zig | 3 +- src/sys.zig | 9 + src/watcher.zig | 43 +-- src/watcher/INotifyWatcher.zig | 2 +- src/watcher/KEventWatcher.zig | 18 +- 24 files changed, 533 insertions(+), 277 deletions(-) diff --git a/src/HTMLScanner.zig b/src/HTMLScanner.zig index 8bc6160df6f87a..4a69da3574af6c 100644 --- a/src/HTMLScanner.zig +++ b/src/HTMLScanner.zig @@ -56,9 +56,8 @@ fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKi const debug = bun.Output.scoped(.HTMLScanner, true); -pub fn onWriteHTML(this: *HTMLScanner, bytes: []const u8) void { - _ = this; // autofix - _ = bytes; // autofix +pub fn onWriteHTML(_: *HTMLScanner, bytes: []const u8) void { + _ = bytes; // bytes are not written in scan phase } pub fn onHTMLParseError(this: *HTMLScanner, message: []const u8) void { @@ -70,7 +69,7 @@ pub fn onHTMLParseError(this: *HTMLScanner, message: []const u8) void { } pub fn onTag(this: *HTMLScanner, _: *lol.Element, path: []const u8, url_attribute: []const u8, kind: ImportKind) void { - _ = url_attribute; // autofix + _ = url_attribute; this.createImportRecord(path, kind) catch {}; } diff --git a/src/ast/base.zig b/src/ast/base.zig index de1f8a5a3f3d20..b1cc26244d139b 100644 --- a/src/ast/base.zig +++ b/src/ast/base.zig @@ -188,7 +188,7 @@ pub const Ref = packed struct(u64) { return this.tag == .source_contents_slice; } - pub fn init(inner_index: Int, source_index: usize, is_source_contents_slice: bool) Ref { + pub fn init(inner_index: Int, source_index: u32, is_source_contents_slice: bool) Ref { return .{ .inner_index = inner_index, .source_index = @intCast(source_index), diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 591d0eccd2ac0a..1238ed31e58278 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -6,8 +6,10 @@ //! adjusting imports) must always rebundle only that one file. //! //! All work is held in-memory, using manually managed data-oriented design. +//! For questions about DevServer, please consult the delusional @paperclover pub const DevServer = @This(); pub const debug = bun.Output.Scoped(.DevServer, false); +pub const memoryLog = bun.Output.Scoped(.DevServerMemory, true); pub const igLog = bun.Output.scoped(.IncrementalGraph, false); pub const Options = struct { @@ -33,6 +35,7 @@ pub const Options = struct { /// Used for all server-wide allocations. In debug, this shows up in /// a separate named heap. Thread-safe. +// TODO: make this an "AllocationScope" (debug memory tool i've yet to write) allocator: Allocator, /// Absolute path to project root directory. For the HMR /// runtime, its module IDs are strings relative to this. @@ -43,7 +46,9 @@ root: []const u8, configuration_hash_key: [16]u8, /// The virtual machine (global object) to execute code in. vm: *VirtualMachine, -/// May be `null` if not attached to an HTTP server yet. +/// May be `null` if not attached to an HTTP server yet. When no server is +/// available, functions taking in requests and responses are unavailable. +/// However, a lot of testing in this mode is missing, so it may hit assertions. server: ?bun.JSC.API.AnyServer, /// Contains the tree of routes. This structure contains FileIndex router: FrameworkRouter, @@ -61,9 +66,6 @@ incremental_result: IncrementalResult, /// are populated as the routes are discovered. The route may not be bundled OR /// navigatable, such as the case where a layout's index is looked up. route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.server).FileIndex, RouteIndexAndRecurseFlag), -/// Quickly retrieve an HTML route's index from its incremental graph index. -// TODO: store this in IncrementalGraph(.client).PackedMap (since HTML doesnt get source maps) instead of this hash map. -html_route_lookup: AutoArrayHashMapUnmanaged(IncrementalGraph(.client).FileIndex, RouteBundle.Index), /// This acts as a duplicate of the lookup table in uws, but only for HTML routes /// Used to identify what route a connected WebSocket is on, so that only /// the active pages are notified of a hot updates. @@ -104,16 +106,16 @@ bundles_since_last_error: usize = 0, framework: bake.Framework, bundler_options: bake.SplitBundlerOptions, // Each logical graph gets its own bundler configuration -server_bundler: Transpiler, -client_bundler: Transpiler, -ssr_bundler: Transpiler, -/// The log used by all `server_bundler`, `client_bundler` and `ssr_bundler`. +server_transpiler: Transpiler, +client_transpiler: Transpiler, +ssr_transpiler: Transpiler, +/// The log used by all `server_transpiler`, `client_transpiler` and `ssr_transpiler`. /// Note that it is rarely correct to write messages into it. Instead, associate /// messages with the IncrementalGraph file or Route using `SerializedFailure` log: Log, plugin_state: enum { /// Should ask server for plugins. Once plugins are loaded, the plugin - /// pointer is written into `server_bundler.options.plugin` + /// pointer is written into `server_transpiler.options.plugin` unknown, // These two states mean that `server.getOrLoadPlugins()` was called. pending, @@ -137,6 +139,7 @@ current_bundle: ?struct { had_reload_event: bool, /// After a bundle finishes, these requests will be continued, either /// calling their handler on success or sending the error page on failure. + /// Owned by `deferred_request_pool` in DevServer. requests: DeferredRequest.List, /// Resolution failures are grouped by incremental graph file index. /// Unlike parse failures (`handleParseTaskFailure`), the resolution @@ -283,6 +286,30 @@ pub const RouteBundle = struct { self.client_bundle = null; } self.client_script_generation = std.crypto.random.int(u32); + switch (self.data) { + .framework => |*fw| fw.cached_client_bundle_url.clear(), + .html => |*html| if (html.cached_response) |cached_response| { + cached_response.deref(); + html.cached_response = null; + }, + } + } + + /// Does NOT count @sizeOf(RouteBundle) + pub fn memoryCost(self: *const RouteBundle) usize { + var cost: usize = 0; + if (self.client_bundle) |bundle| cost += bundle.memoryCost(); + switch (self.data) { + .framework => { + // the JSC.Strong children do not support memoryCost. likely not needed + // .evaluate_failure is not owned + }, + .html => |*html| { + if (html.bundled_html_text) |text| cost += text.len; + if (html.cached_response) |cached_response| cost += cached_response.memoryCost(); + }, + } + return cost; } }; @@ -328,7 +355,6 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .incremental_result = .empty, .route_lookup = .empty, .route_bundles = .empty, - .html_route_lookup = .empty, .html_router = .empty, .current_bundle = null, .next_bundle = .{ @@ -346,9 +372,9 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { .bundling_failures = .{}, .deferred_request_pool = .init(allocator), - .server_bundler = undefined, - .client_bundler = undefined, - .ssr_bundler = undefined, + .server_transpiler = undefined, + .client_transpiler = undefined, + .ssr_transpiler = undefined, .bun_watcher = undefined, .configuration_hash_key = undefined, .router = undefined, @@ -375,25 +401,25 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { dev.bun_watcher.start() catch |err| return global.throwError(err, "while initializing file watcher thread for development server"); - dev.server_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); - dev.client_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); - dev.ssr_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + dev.server_transpiler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + dev.client_transpiler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); + dev.ssr_transpiler.resolver.watcher = dev.bun_watcher.getResolveWatcher(); dev.watcher_atomics = WatcherAtomics.init(dev); - dev.framework.initBundler(allocator, &dev.log, .development, .server, &dev.server_bundler) catch |err| + dev.framework.initBundler(allocator, &dev.log, .development, .server, &dev.server_transpiler) catch |err| return global.throwError(err, generic_action); - dev.server_bundler.options.dev_server = dev; - dev.framework.initBundler(allocator, &dev.log, .development, .client, &dev.client_bundler) catch |err| + dev.server_transpiler.options.dev_server = dev; + dev.framework.initBundler(allocator, &dev.log, .development, .client, &dev.client_transpiler) catch |err| return global.throwError(err, generic_action); - dev.client_bundler.options.dev_server = dev; + dev.client_transpiler.options.dev_server = dev; if (separate_ssr_graph) { - dev.framework.initBundler(allocator, &dev.log, .development, .ssr, &dev.ssr_bundler) catch |err| + dev.framework.initBundler(allocator, &dev.log, .development, .ssr, &dev.ssr_transpiler) catch |err| return global.throwError(err, generic_action); - dev.ssr_bundler.options.dev_server = dev; + dev.ssr_transpiler.options.dev_server = dev; } - dev.framework = dev.framework.resolve(&dev.server_bundler.resolver, &dev.client_bundler.resolver, options.arena) catch { + dev.framework = dev.framework.resolve(&dev.server_transpiler.resolver, &dev.client_transpiler.resolver, options.arena) catch { if (dev.framework.is_built_in_react) try bake.Framework.addReactInstallCommandNote(&dev.log); return global.throwValue(dev.log.toJSAggregateError(global, bun.String.static("Framework is missing required files!"))); @@ -495,7 +521,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { for (options.framework.file_system_router_types, 0..) |fsr, i| { const joined_root = bun.path.joinAbs(dev.root, .auto, fsr.root); - const entry = dev.server_bundler.resolver.readDirInfoIgnoreError(joined_root) orelse + const entry = dev.server_transpiler.resolver.readDirInfoIgnoreError(joined_root) orelse continue; const server_file = try dev.server_graph.insertStaleExtra(fsr.entry_server, false, true); @@ -538,6 +564,140 @@ pub fn init(options: Options) bun.JSOOM!*DevServer { return dev; } +pub fn deinit(dev: *DevServer) void { + // TODO: Currently deinit is not implemented, as it was assumed to be alive for + // the remainder of this process' lifespan. This isn't always true. + const allocator = dev.allocator; + if (dev.has_pre_crash_handler) + bun.crash_handler.removePreCrashHandler(dev); + allocator.destroy(dev); + // if (bun.Environment.isDebug) + // bun.todoPanic(@src(), "bake.DevServer.deinit()", .{}); +} + +/// Returns an estimation for how many bytes DevServer is explicitly aware of. +/// If this number stays constant but RSS grows, then there is a memory leak. If +/// this number grows out of control, then incremental garbage collection is not +/// good enough. +/// +/// Memory measurements are important as DevServer has a long lifetime, but +/// unlike the HTTP server, it controls a lot of objects that are frequently +/// being added, removed, and changed (as the developer edits source files). It +/// is exponentially easy to mess up memory management. +pub fn memoryCost(dev: *DevServer) usize { + var cost: usize = @sizeOf(DevServer); + // See https://github.com/ziglang/zig/issues/21879 + voidFieldTypes(DevServer).* = .{ + // does not contain pointers + .allocator = {}, + .configuration_hash_key = {}, + .graph_safety_lock = {}, + .bun_watcher = {}, + .watcher_atomics = {}, + .plugin_state = {}, + .generation = {}, + .bundles_since_last_error = {}, + .emit_visualizer_events = {}, + .dump_dir = {}, + .has_pre_crash_handler = {}, + .frontend_only = {}, + .server_fetch_function_callback = {}, + .server_register_update_callback = {}, + .deferred_request_pool = {}, + + // pointers that are not considered a part of DevServer + .vm = {}, + .server = {}, + .server_transpiler = {}, + .client_transpiler = {}, + .ssr_transpiler = {}, + .log = {}, + .framework = {}, // TODO: maybe + .bundler_options = {}, // TODO: maybe + + // to be counted. + .root = { + cost += dev.root.len; + }, + .router = { + cost += dev.router.memoryCost(); + }, + .route_bundles = for (dev.route_bundles.items) |*bundle| { + cost += bundle.memoryCost(); + }, + .server_graph = { + cost += dev.server_graph.memoryCost(); + }, + .client_graph = { + cost += dev.client_graph.memoryCost(); + }, + .assets = { + cost += dev.assets.memoryCost(); + }, + .incremental_result = { + cost += memoryCostArrayList(dev.incremental_result.client_components_added); + cost += memoryCostArrayList(dev.incremental_result.html_routes_affected); + cost += memoryCostArrayList(dev.incremental_result.framework_routes_affected); + cost += memoryCostArrayList(dev.incremental_result.client_components_removed); + cost += memoryCostArrayList(dev.incremental_result.failures_removed); + cost += memoryCostArrayList(dev.incremental_result.client_components_affected); + cost += memoryCostArrayList(dev.incremental_result.failures_added); + }, + .has_tailwind_plugin_hack = if (dev.has_tailwind_plugin_hack) |hack| { + cost += memoryCostArrayHashMap(hack); + }, + .directory_watchers = { + cost += memoryCostArrayList(dev.directory_watchers.dependencies); + cost += memoryCostArrayList(dev.directory_watchers.dependencies_free_list); + cost += memoryCostArrayHashMap(dev.directory_watchers.watches); + for (dev.directory_watchers.dependencies.items) |dep| { + cost += dep.specifier.len; + } + }, + .html_router = { + // std does not provide a way to measure exact allocation size of HashMapUnmanaged + cost += dev.html_router.map.capacity() * (@sizeOf(*HTMLBundle.HTMLBundleRoute) + @sizeOf([]const u8)); + // DevServer does not count the referenced HTMLBundle.HTMLBundleRoutes + }, + .bundling_failures = { + cost += memoryCostSlice(dev.bundling_failures.keys()); + for (dev.bundling_failures.keys()) |failure| { + cost += failure.data.len; + } + }, + .current_bundle = { + // All entries are owned by the bundler arena, not DevServer, except for `requests` + if (dev.current_bundle) |bundle| { + var r = bundle.requests.first; + while (r) |request| : (r = request.next) { + cost += @sizeOf(DeferredRequest.Node); + } + } + }, + .next_bundle = { + var r = dev.next_bundle.requests.first; + while (r) |request| : (r = request.next) { + cost += @sizeOf(DeferredRequest.Node); + } + cost += memoryCostArrayHashMap(dev.next_bundle.route_queue); + }, + .route_lookup = { + cost += memoryCostArrayHashMap(dev.route_lookup); + }, + }; + return cost; +} + +fn memoryCostArrayList(slice: anytype) usize { + return slice.capacity * @sizeOf(@typeInfo(@TypeOf(slice.items)).pointer.child); +} +fn memoryCostSlice(slice: anytype) usize { + return slice.len * @sizeOf(@typeInfo(@TypeOf(slice)).pointer.child); +} +fn memoryCostArrayHashMap(map: anytype) usize { + return @TypeOf(map.entries).capacityInBytes(map.entries.capacity); +} + fn initServerRuntime(dev: *DevServer) void { const runtime = bun.String.static(bun.bake.getHmrRuntime(.server).code); @@ -567,7 +727,7 @@ fn initServerRuntime(dev: *DevServer) void { fn scanInitialRoutes(dev: *DevServer) !void { try dev.router.scanAll( dev.allocator, - &dev.server_bundler.resolver, + &dev.server_transpiler.resolver, FrameworkRouter.InsertionContext.wrap(DevServer, dev), ); @@ -611,17 +771,6 @@ pub fn attachRoutes(dev: *DevServer, server: anytype) !bool { } } -pub fn deinit(dev: *DevServer) void { - // TODO: Currently deinit is not implemented, as it was assumed to be alive for - // the remainder of this process' lifespan. This isn't always true. - const allocator = dev.allocator; - if (dev.has_pre_crash_handler) - bun.crash_handler.removePreCrashHandler(dev); - allocator.destroy(dev); - // if (bun.Environment.isDebug) - // bun.todoPanic(@src(), "bake.DevServer.deinit()", .{}); -} - fn onJsRequest(dev: *DevServer, req: *Request, resp: AnyResponse) void { const route_id = req.parameter(0); if (!bun.strings.hasSuffixComptime(route_id, ".js")) @@ -826,7 +975,6 @@ fn ensureRouteIsBundled( .loaded => switch (kind) { .server_handler => dev.onFrameworkRequestWithBundle(route_bundle_index, .{ .stack = req }, resp), .bundled_html_page => dev.onHtmlRequestWithBundle(route_bundle_index, resp, bun.http.Method.which(req.method()) orelse .POST), - .js_payload => dev.onJsRequestWithBundle(route_bundle_index, resp, bun.http.Method.which(req.method()) orelse .POST), }, } } @@ -843,13 +991,9 @@ fn deferRequest( deferred.data = .{ .route_bundle_index = route_bundle_index, .handler = switch (kind) { - // POST is specified for unknown methods. - .js_payload => .{ .js_payload = .{ .response = resp, .method = bun.http.Method.which(req.method()) orelse .POST } }, .bundled_html_page => .{ .bundled_html_page = .{ .response = resp, .method = bun.http.Method.which(req.method()) orelse .POST } }, .server_handler => .{ - // TODO: SSL by moving this to AnyServer.prepareAndSaveJsRequestContext - .server_handler = (dev.server.?.DebugHTTPServer.prepareJsRequestContext(req, resp.TCP, null) orelse return) - .save(dev.vm.global, req, resp.TCP), + .server_handler = dev.server.?.prepareAndSaveJsRequestContext(req, resp, dev.vm.global) orelse return, }, }, }; @@ -937,22 +1081,16 @@ fn onFrameworkRequestWithBundle( }, // clientId bundle.cached_client_bundle_url.get() orelse str: { - break :str { - @panic("TODO"); - }; - // const id, const route_index: RouteBundle.Index.Optional = if (router_type.client_file != .none) - // .{ std.crypto.random.int(u64), route_bundle_index.toOptional() } - // else - // // When there is no framework-provided client code, generate - // // a JS file so that the hot-reloading code can reload the - // // page on server-side changes and show errors in-browser. - // .{ 0, .none }; - // dev.route_js_payloads.put(dev.allocator, id, route_index) catch bun.outOfMemory(); - // const str = bun.String.createFormat(client_prefix ++ "/route.{}.js", .{std.fmt.fmtSliceHexLower(std.mem.asBytes(&id))}) catch bun.outOfMemory(); - // defer str.deref(); - // const js = str.toJS(dev.vm.global); - // bundle.cached_client_bundle_url = JSC.Strong.create(js, dev.vm.global); - // break :str js; + const bundle_index: u32 = route_bundle_index.get(); + const generation: u32 = route_bundle.client_script_generation; + const str = bun.String.createFormat(client_prefix ++ "/route-{}{}.js", .{ + std.fmt.fmtSliceHexLower(std.mem.asBytes(&bundle_index)), + std.fmt.fmtSliceHexLower(std.mem.asBytes(&generation)), + }) catch bun.outOfMemory(); + defer str.deref(); + const js = str.toJS(dev.vm.global); + bundle.cached_client_bundle_url = JSC.Strong.create(js, dev.vm.global); + break :str js; }, // styles bundle.cached_css_file_array.get() orelse arr: { @@ -1160,9 +1298,6 @@ const DeferredRequest = struct { server_handler: bun.JSC.API.SavedRequest, /// For a .html route. Serve the bundled HTML page. bundled_html_page: ResponseAndMethod, - /// Serve the JavaScript payload for this route. - /// TODO: remove this - js_payload: ResponseAndMethod, /// Do nothing and free this node. To simplify lifetimes, /// the `DeferredRequest` is not freed upon abortion. Which /// is okay since most requests do not abort. @@ -1173,7 +1308,6 @@ const DeferredRequest = struct { const Kind = enum { server_handler, bundled_html_page, - js_payload, }; }; @@ -1189,7 +1323,7 @@ const DeferredRequest = struct { fn deinit(this: *DeferredRequest) void { switch (this.handler) { .server_handler => |*saved| saved.deinit(), - .bundled_html_page, .js_payload, .aborted => {}, + .bundled_html_page, .aborted => {}, } } @@ -1200,7 +1334,7 @@ const DeferredRequest = struct { saved.response.endWithoutBody(true); saved.deinit(); }, - .bundled_html_page, .js_payload => |r| { + .bundled_html_page => |r| { r.response.endWithoutBody(true); }, .aborted => return, @@ -1235,11 +1369,11 @@ fn startAsyncBundle( ast_memory_allocator.push(); const bv2 = try BundleV2.init( - &dev.server_bundler, + &dev.server_transpiler, .{ .framework = dev.framework, - .client_bundler = &dev.client_bundler, - .ssr_bundler = &dev.ssr_bundler, + .client_transpiler = &dev.client_transpiler, + .ssr_transpiler = &dev.ssr_transpiler, .plugins = dev.bundler_options.plugin, }, allocator, @@ -1605,7 +1739,7 @@ pub fn finalizeBundle( const source_map: SourceMap.Chunk = compile_result.sourceMapChunk() orelse brk: { // The source map is `null` if empty bun.assert(compile_result.javascript.result == .result); - bun.assert(dev.server_bundler.options.source_map != .none); + bun.assert(dev.server_transpiler.options.source_map != .none); bun.assert(!part_range.source_index.isRuntime()); break :brk .empty; }; @@ -1675,8 +1809,6 @@ pub fn finalizeBundle( } } - // TODO: consider storing something in `result.html_files.values()` as a - // means to removing the hashmap lookup in `dev.html_route_lookup` for (result.htmlChunks()) |*chunk| { const index = bun.JSAst.Index.init(chunk.entry_point.source_index); const compile_result = chunk.compile_results_for_chunk[0].html; @@ -1690,12 +1822,11 @@ pub fn finalizeBundle( &ctx, index, .{ .js = generated_js }, - .empty, // HTML chunk does not have a source map. + null, // HTML chunk does not have a source map. false, ); const client_index = ctx.getCachedIndex(.client, index).*; - const route_bundle_index = dev.html_route_lookup.get(client_index) orelse - @panic("Route for HTML file was not registered"); + const route_bundle_index = dev.client_graph.htmlRouteBundleIndex(client_index); const route_bundle = dev.routeBundlePtr(route_bundle_index); assert(route_bundle.data.html.bundled_file == client_index); const html = &route_bundle.data.html; @@ -1710,8 +1841,8 @@ pub fn finalizeBundle( } html.bundled_html_text = compile_result.code; - html.head_end_tag_index = RouteBundle.HTML.ByteOffset.init(compile_result.offsets.head_end_tag).toOptional(); - html.body_end_tag_index = RouteBundle.HTML.ByteOffset.init(compile_result.offsets.body_end_tag).toOptional(); + html.head_end_tag_index = .init(compile_result.offsets.head_end_tag); + html.body_end_tag_index = .init(compile_result.offsets.body_end_tag); chunk.entry_point.entry_point_id = @intCast(route_bundle_index.get()); } @@ -2004,7 +2135,7 @@ pub fn finalizeBundle( saved.deinit(); break :brk resp; }, - .js_payload, .bundled_html_page => |ram| ram.response, + .bundled_html_page => |ram| ram.response, }; resp.corked(sendSerializedFailures, .{ @@ -2026,12 +2157,25 @@ pub fn finalizeBundle( Output.enableBuffering(); } + if (Environment.isDebug and memoryLog.isVisible()) { + Output.prettyErrorln("DevServer: {}, RSS: {}", .{ + bun.fmt.size(dev.memoryCost(), .{}), + bun.fmt.size(bun.sys.selfProcessMemoryUsage() orelse 0, .{}), + }); + } + dev.bundles_since_last_error += 1; if (dev.bundles_since_last_error > 1) { Output.prettyError("[x{d}] ", .{dev.bundles_since_last_error}); } } else { dev.bundles_since_last_error = 0; + if (Environment.isDebug and memoryLog.isVisible()) { + Output.prettyErrorln("DevServer: {}, RSS: {}", .{ + bun.fmt.size(dev.memoryCost(), .{}), + bun.fmt.size(bun.sys.selfProcessMemoryUsage() orelse 0, .{}), + }); + } } Output.prettyError("{s} in {d}ms", .{ @@ -2085,7 +2229,6 @@ pub fn finalizeBundle( .aborted => continue, .server_handler => |saved| dev.onFrameworkRequestWithBundle(req.route_bundle_index, .{ .saved = saved }, saved.response), .bundled_html_page => |ram| dev.onHtmlRequestWithBundle(req.route_bundle_index, ram.response, ram.method), - .js_payload => |ram| dev.onJsRequestWithBundle(req.route_bundle_index, ram.response, ram.method), } } } @@ -2301,11 +2444,7 @@ fn getOrPutRouteBundle(dev: *DevServer, route: RouteBundle.UnresolvedIndex) !Rou } }, .html => |html| brk: { const incremental_graph_index = try dev.client_graph.insertStaleExtra(html.html_bundle.path, false, true); - try dev.html_route_lookup.put( - dev.allocator, - incremental_graph_index, - bundle_index, - ); + dev.client_graph.source_maps.items[incremental_graph_index.get()].extra.empty.html_bundle_route_index = .init(bundle_index.get()); break :brk .{ .html = .{ .html_bundle = html, .bundled_file = incremental_graph_index, @@ -2431,7 +2570,7 @@ const FileKind = enum(u2) { /// /// Upon bundle completion, both `client_graph` and `server_graph` have their /// `receiveChunk` methods called with all new chunks, counting the total length -/// needed. A call to `takeBundle` joins all of the chunks, resulting in the +/// needed. A call to `takeJSBundle` joins all of the chunks, resulting in the /// code to send to client or evaluate on the server. /// /// Then, `processChunkDependencies` is called on each chunk to update the @@ -2444,6 +2583,12 @@ const FileKind = enum(u2) { /// do not emit duplicate dependencies. By tracing `imports` on each file in /// the module graph recursively, the full bundle for any given route can /// be re-materialized (required when pressing Cmd+R after any client update) +/// +/// Since source mappings are all relative to their previous mapping, each +/// chunk's mappings can be stored in the graph, and very trivially built into +/// JSON source map files (`takeSourceMap`), even after hot updates. The +/// lifetime for these sourcemaps is a bit tricky and depend on the lifetime of +/// of WebSocket connections; see comments in `Assets` for more details. pub fn IncrementalGraph(side: bake.Side) type { return struct { // Unless otherwise mentioned, all data structures use DevServer's allocator. @@ -2507,7 +2652,7 @@ pub fn IncrementalGraph(side: bake.Side) type { .edges_free_list = .empty, .current_chunk_len = 0, - .current_chunk_parts = .{}, + .current_chunk_parts = .empty, .current_css_files = if (side == .client) .empty, }; @@ -2573,7 +2718,8 @@ pub fn IncrementalGraph(side: bake.Side) type { /// This is a file is an entry point to the framework. /// Changing this will always cause a full page reload. is_special_framework_file: bool, - /// If this file has a HTML RouteBundle and associated entry in `html_route_lookup` + /// If this file has a HTML RouteBundle. The bundle index is tucked away in: + /// `graph.source_maps.items[i].extra.empty.html_bundle_route_index` is_html_route: bool, /// CSS files get special handling kind: FileKind, @@ -2660,31 +2806,41 @@ pub fn IncrementalGraph(side: bake.Side) type { ))[0..chunk.len]; } }, - /// Used to track the last state of the source map chunk. This - /// is used when concatenating chunks. The generated column - /// is not tracked because it is always zero (all chunks end - /// in a newline because minification is off), and the generated - /// line is recomputed on demand - end_state: struct { - /// This field is overloaded for an empty chunk's line count. - original_line: i32, - original_column: i32, + /// This field is overloaded depending on if the source map data is + /// present or not (.len != 0). + extra: extern union { + /// Used to track the last state of the source map chunk. This + /// is used when concatenating chunks. The generated column + /// is not tracked because it is always zero (all chunks end + /// in a newline because minification is off), and the generated + /// line is recomputed on demand + end_state: extern struct { + original_line: i32, + original_column: i32, + }, + empty: extern struct { + line_count: bun.GenericIndex(u32, u8).Optional, + // Cannot use RouteBundle.Index because `extern union` above :( + html_bundle_route_index: bun.GenericIndex(u32, u8).Optional, + }, }, pub const empty: PackedMap = .{ .vlq_chunk = .init(""), - .end_state = .{ - .original_line = 0, - .original_column = 0, - }, + .extra = .{ .empty = .{ + .line_count = .none, + .html_bundle_route_index = .none, + } }, }; pub fn fromSourceMap(source_map: SourceMap.Chunk) PackedMap { return if (source_map.buffer.list.items.len > 0) .{ .vlq_chunk = .init(source_map.buffer.list.items), - .end_state = .{ - .original_line = source_map.end_state.original_line, - .original_column = source_map.end_state.original_column, + .extra = .{ + .end_state = .{ + .original_line = source_map.end_state.original_line, + .original_column = source_map.end_state.original_column, + }, }, } else .empty; } @@ -2717,6 +2873,29 @@ pub fn IncrementalGraph(side: bake.Side) type { /// An index into `edges` const EdgeIndex = bun.GenericIndex(u32, Edge); + pub fn deinit() void { + @panic("TODO"); + } + + /// Does NOT count @sizeOf(@This()) + pub fn memoryCost(g: *@This()) usize { + var cost: usize = 0; + cost += memoryCostArrayHashMap(g.bundled_files); + cost += g.stale_files.bytes().len; + cost += memoryCostArrayList(g.first_dep); + cost += memoryCostArrayList(g.first_import); + cost += memoryCostArrayList(g.edges); + cost += memoryCostArrayList(g.edges_free_list); + cost += memoryCostArrayList(g.current_chunk_parts); + + if (side == .client) { + cost += memoryCostArrayList(g.source_maps); + cost += memoryCostArrayList(g.current_css_files); + } + + return cost; + } + fn getFileIndex(g: *@This(), path: []const u8) ?FileIndex { return if (g.bundled_files.getIndex(path)) |i| FileIndex.init(@intCast(i)) else null; } @@ -2726,6 +2905,14 @@ pub fn IncrementalGraph(side: bake.Side) type { return g.bundled_files.values()[index.get()]; } + pub fn htmlRouteBundleIndex(g: *@This(), index: FileIndex) RouteBundle.Index { + if (Environment.allow_assert) { + assert(g.bundled_files.values()[index.get()].flags.is_html_route); + } + return .init(@intCast((g.source_maps.items[index.get()].extra.empty.html_bundle_route_index.unwrap() orelse + @panic("Internal assertion failure: HTML bundle not registered correctly")).get())); + } + /// Tracks a bundled code chunk for cross-bundle chunks, /// ensuring it has an entry in `bundled_files`. /// @@ -2820,9 +3007,7 @@ pub fn IncrementalGraph(side: bake.Side) type { .js => { bun.default_allocator.free(gop.value_ptr.jsCode()); bun.default_allocator.free(g.source_maps.items[file_index.get()].vlq_chunk.slice()); - g.source_maps.items[file_index.get()] = .fromSourceMap( - source_map orelse unreachable, // JS needs a source map (can be empty, but not null) - ); + if (source_map) |map| g.source_maps.items[file_index.get()] = .fromSourceMap(map); }, .css => {}, // do not need to unref css as it has been replaced already .unknown => {}, @@ -3161,12 +3346,7 @@ pub fn IncrementalGraph(side: bake.Side) type { Output.panic("Server Incremental Graph is missing component for {}", .{bun.fmt.quote(key)}); try dev.server_graph.traceDependencies(index, gts, trace_kind); } else if (file.flags.is_html_route) { - const route_bundle_index = dev.html_route_lookup.get(file_index) orelse { - Output.panic("HTML route not in lookup index: {d} {}", .{ - file_index.get(), - bun.fmt.quote(g.bundled_files.keys()[file_index.get()]), - }); - }; + const route_bundle_index = dev.client_graph.htmlRouteBundleIndex(file_index); try dev.incremental_result.html_routes_affected.append(dev.allocator, route_bundle_index); if (trace_kind == .stop_at_boundary) return; @@ -3297,9 +3477,14 @@ pub fn IncrementalGraph(side: bake.Side) type { .css => g.owner().assets.unrefByPath(gop.key_ptr.*), .unknown => {}, } - g.owner().allocator.free(source_map.vlq_chunk.slice()); - source_map.* = .empty; + if (source_map.vlq_chunk.len > 0) { + g.owner().allocator.free(source_map.vlq_chunk.slice()); + source_map.* = .empty; + } flags.is_html_route = flags.is_html_route or gop.value_ptr.flags.is_html_route; + flags.failed = gop.value_ptr.flags.failed; + flags.is_special_framework_file = gop.value_ptr.flags.is_special_framework_file; + flags.is_hmr_root = gop.value_ptr.flags.is_hmr_root; } gop.value_ptr.* = File.initUnknown(flags); }, @@ -3431,7 +3616,7 @@ pub fn IncrementalGraph(side: bake.Side) type { switch (side) { .client => { var flags: File.Flags = .{ - .failed = false, + .failed = true, .is_hmr_root = false, .is_special_framework_file = false, .is_html_route = false, @@ -3444,9 +3629,13 @@ pub fn IncrementalGraph(side: bake.Side) type { .css => g.owner().assets.unrefByPath(gop.key_ptr.*), .unknown => {}, } - g.owner().allocator.free(source_map.vlq_chunk.slice()); - source_map.* = .empty; - flags.is_html_route = flags.is_html_route or gop.value_ptr.flags.is_html_route; + if (source_map.vlq_chunk.len > 0) { + g.owner().allocator.free(source_map.vlq_chunk.slice()); + source_map.* = .empty; + } + flags.is_html_route = gop.value_ptr.flags.is_html_route; + flags.is_special_framework_file = gop.value_ptr.flags.is_special_framework_file; + flags.is_hmr_root = gop.value_ptr.flags.is_hmr_root; } gop.value_ptr.* = File.initUnknown(flags); }, @@ -3729,23 +3918,26 @@ pub fn IncrementalGraph(side: bake.Side) type { .original_column = 0, }; - var lines_between: i32 = runtime.line_count + 2; + // +2 because the magic fairy in my dreams said it would align the source maps. + var lines_between: u32 = runtime.line_count + 2; var non_empty_source_index: i32 = 0; for (g.current_chunk_parts.items) |entry| { const source_map = &source_maps[entry.get()]; if (source_map.vlq_chunk.len == 0) { - if (source_map.end_state.original_line == 0) { - const count = bun.strings.countChar(files[entry.get()].jsCode(), '\n'); - source_map.end_state.original_line = @intCast(count); + if (source_map.extra.empty.line_count.unwrap()) |line_count| { + lines_between += line_count.get(); + } else { + const count: u32 = @intCast(bun.strings.countChar(files[entry.get()].jsCode(), '\n')); + source_map.extra.empty.line_count = .init(count); + lines_between += count; } - lines_between += source_map.end_state.original_line; continue; } const start_state: SourceMap.SourceMapState = .{ .source_index = non_empty_source_index, - .generated_line = lines_between, + .generated_line = @intCast(lines_between), .generated_column = 0, .original_line = 0, .original_column = 0, @@ -3764,8 +3956,8 @@ pub fn IncrementalGraph(side: bake.Side) type { .source_index = non_empty_source_index, .generated_line = 0, .generated_column = 0, - .original_line = source_map.end_state.original_line, - .original_column = source_map.end_state.original_column, + .original_line = source_map.extra.end_state.original_line, + .original_column = source_map.extra.end_state.original_column, }; non_empty_source_index += 1; @@ -3788,7 +3980,6 @@ pub fn IncrementalGraph(side: bake.Side) type { } fn disconnectAndDeleteFile(g: *@This(), file_index: FileIndex) void { - bun.assert(g.bundled_files.count() > 1); // never remove all files bun.assert(g.first_dep.items[file_index.get()] == .none); // must have no dependencies // Disconnect all imports @@ -3813,9 +4004,10 @@ pub fn IncrementalGraph(side: bake.Side) type { g.owner().allocator.free(keys[file_index.get()]); keys[file_index.get()] = ""; // cannot be `undefined` as it may be read by hashmap logic - // TODO: it is infeasible to swapRemove a file since FrameworkRouter - // contains file indices to the server graph. Instead, `file_index` - // should go in a free-list for use by new files. + // TODO: it is infeasible to swapRemove a file since + // FrameworkRouter, SerializedFailure, and more structures contains + // file indices to the server graph. Instead, `file_index` should + // go in a free-list for use by new files. } fn newEdge(g: *@This(), edge: Edge) !EdgeIndex { @@ -4059,7 +4251,7 @@ const DirectoryWatchStore = struct { errdefer store.watches.swapRemoveAt(gop.index); // Try to use an existing open directory handle - const cache_fd = if (dev.server_bundler.resolver.readDirInfo(dir_name_to_watch) catch null) |cache| fd: { + const cache_fd = if (dev.server_transpiler.resolver.readDirInfo(dir_name_to_watch) catch null) |cache| fd: { const fd = cache.getFileDescriptor(); break :fd if (fd == .zero) null else fd; } else null; @@ -5204,7 +5396,7 @@ pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []? }, .directory => { // bust the directory cache since this directory has changed - _ = dev.server_bundler.resolver.bustDirCache(bun.strings.withoutTrailingSlashWindowsPath(file_path)); + _ = dev.server_transpiler.resolver.bustDirCache(bun.strings.withoutTrailingSlashWindowsPath(file_path)); // if a directory watch exists for resolution // failures, check those now. @@ -5218,7 +5410,7 @@ pub fn onFileUpdate(dev: *DevServer, events: []Watcher.Event, changed_files: []? while (it) |index| { const dep = &dev.directory_watchers.dependencies.items[index.get()]; it = dep.next.unwrap(); - if ((dev.server_bundler.resolver.resolve( + if ((dev.server_transpiler.resolver.resolve( bun.path.dirname(dep.source_file_path, .auto), dep.specifier, .stmt, @@ -5613,8 +5805,19 @@ pub const Assets = struct { } pub fn deinit(assets: *Assets, alloc: Allocator) void { - assets.map.deinit(alloc); - assets.hash_lookups.deinit(alloc); + assets.path_map.deinit(alloc); + for (assets.files.values()) |blob| blob.deref(); + assets.files.deinit(alloc); + assets.refs.deinit(alloc); + } + + pub fn memoryCost(assets: *Assets) usize { + var cost: usize = 0; + cost += memoryCostArrayHashMap(assets.path_map); + for (assets.files.values()) |blob| cost += blob.memoryCost(); + cost += memoryCostArrayHashMap(assets.files); + cost += memoryCostArrayList(assets.refs); + return cost; } }; @@ -5634,6 +5837,24 @@ pub fn onPluginsRejected(dev: *DevServer) !void { // TODO: allow recovery from this state } +/// userland implementation of https://github.com/ziglang/zig/issues/21879 +fn voidFieldTypes(comptime T: type) *brk: { + const fields = @typeInfo(T).@"struct".fields; + var new_fields = fields[0..fields.len].*; + for (&new_fields) |*field| { + field.type = void; + field.default_value_ptr = null; + } + break :brk @Type(.{ .@"struct" = .{ + .layout = .auto, + .fields = &new_fields, + .decls = &.{}, + .is_tuple = false, + } }); +} { + return undefined; +} + const std = @import("std"); const Allocator = std.mem.Allocator; const Mutex = bun.Mutex; diff --git a/src/bake/FrameworkRouter.zig b/src/bake/FrameworkRouter.zig index 3182e8ca7c594a..f8f4a3bfb06913 100644 --- a/src/bake/FrameworkRouter.zig +++ b/src/bake/FrameworkRouter.zig @@ -133,9 +133,19 @@ pub fn initEmpty(root: []const u8, types: []Type, allocator: Allocator) !Framewo pub fn deinit(fr: *FrameworkRouter, allocator: Allocator) void { fr.routes.deinit(allocator); + fr.static_routes.deinit(allocator); + fr.dynamic_routes.deinit(allocator); allocator.free(fr.types); } +pub fn memoryCost(fr: *FrameworkRouter) usize { + var cost: usize = @sizeOf(FrameworkRouter); + cost += fr.routes.capacity * @sizeOf(Route); + cost += StaticRouteMap.DataList.capacityInBytes(fr.static_routes.entries.capacity); + cost += DynamicRouteMap.DataList.capacityInBytes(fr.dynamic_routes.entries.capacity); + return cost; +} + pub fn scanAll(fr: *FrameworkRouter, allocator: Allocator, r: *Resolver, ctx: anytype) !void { for (fr.types, 0..) |ty, i| { _ = ty; @@ -691,7 +701,7 @@ pub fn insert( .type = ty, .parent = route_index.toOptional(), .first_child = .none, - .prev_sibling = Route.Index.Optional.init(next), + .prev_sibling = .init(next), .next_sibling = .none, }); @@ -709,7 +719,7 @@ pub fn insert( .type = ty, .parent = new_route_index.toOptional(), .first_child = .none, - .prev_sibling = Route.Index.Optional.init(next), + .prev_sibling = .init(next), .next_sibling = .none, }); fr.routePtr(new_route_index).first_child = newer_route_index.toOptional(); diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 10954ba7b141a0..1010dd9610a2bd 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -690,8 +690,8 @@ pub const HmrRuntime = struct { code: [:0]const u8, /// The number of lines in the HMR runtime. This is used for sourcemap /// generation, where the first n lines are skipped. In release, these - /// are always precalculated. u31 to allow coercion to i32 & usize. - line_count: u31, + /// are always precalculated. + line_count: u32, pub fn init(code: [:0]const u8) HmrRuntime { if (@inComptime()) @setEvalBranchQuota(@intCast(code.len)); diff --git a/src/bake/client/css-reloader.ts b/src/bake/client/css-reloader.ts index ecad31dac47ab6..7fb19976b11305 100644 --- a/src/bake/client/css-reloader.ts +++ b/src/bake/client/css-reloader.ts @@ -108,8 +108,8 @@ const headObserver = new MutationObserver(list => { function maybeAddCssLink(link: HTMLLinkElement) { const pathname = new URL(link.href).pathname; - if (pathname.startsWith("/_bun/css/")) { - const id = pathname.slice("/_bun/css/".length).slice(0, 16); + if (pathname.startsWith("/_bun/asset/")) { + const id = pathname.slice("/_bun/asset/".length).slice(0, 16); if (!/^[a-f0-9]{16}$/.test(id)) { return; } diff --git a/src/bake/hmr-runtime-client.ts b/src/bake/hmr-runtime-client.ts index 0b4b6766401c2b..fb19a5146b4b17 100644 --- a/src/bake/hmr-runtime-client.ts +++ b/src/bake/hmr-runtime-client.ts @@ -68,7 +68,7 @@ const ws = initWebSocket({ return; } - ws.send("she"); // IncomingMessageId.subscribe with hot_update and route_update + ws.send("she"); // IncomingMessageId.subscribe with hot_update and errors ws.send("n" + location.pathname); // IncomingMessageId.set_url }, [MessageId.hot_update](view) { diff --git a/src/bake/production.zig b/src/bake/production.zig index b7be6033dca1cf..a974926683020d 100644 --- a/src/bake/production.zig +++ b/src/bake/production.zig @@ -171,17 +171,17 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa try loader.map.put("NODE_ENV", "production"); bun.DotEnv.instance = loader; - var client_bundler: bun.transpiler.Transpiler = undefined; - var server_bundler: bun.transpiler.Transpiler = undefined; - var ssr_bundler: bun.transpiler.Transpiler = undefined; - try framework.initBundler(allocator, vm.log, .production_static, .server, &server_bundler); - try framework.initBundler(allocator, vm.log, .production_static, .client, &client_bundler); + var client_transpiler: bun.transpiler.Transpiler = undefined; + var server_transpiler: bun.transpiler.Transpiler = undefined; + var ssr_transpiler: bun.transpiler.Transpiler = undefined; + try framework.initBundler(allocator, vm.log, .production_static, .server, &server_transpiler); + try framework.initBundler(allocator, vm.log, .production_static, .client, &client_transpiler); if (separate_ssr_graph) { - try framework.initBundler(allocator, vm.log, .production_static, .ssr, &ssr_bundler); + try framework.initBundler(allocator, vm.log, .production_static, .ssr, &ssr_transpiler); } if (ctx.bundler_options.bake_debug_disable_minify) { - for ([_]*bun.transpiler.Transpiler{ &client_bundler, &server_bundler, &ssr_bundler }) |transpiler| { + for ([_]*bun.transpiler.Transpiler{ &client_transpiler, &server_transpiler, &ssr_transpiler }) |transpiler| { transpiler.options.minify_syntax = false; transpiler.options.minify_identifiers = false; transpiler.options.minify_whitespace = false; @@ -192,14 +192,14 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa } // these share pointers right now, so setting NODE_ENV == production on one should affect all - bun.assert(server_bundler.env == client_bundler.env); + bun.assert(server_transpiler.env == client_transpiler.env); - framework.* = framework.resolve(&server_bundler.resolver, &client_bundler.resolver, allocator) catch { + framework.* = framework.resolve(&server_transpiler.resolver, &client_transpiler.resolver, allocator) catch { if (framework.is_built_in_react) - try bake.Framework.addReactInstallCommandNote(server_bundler.log); + try bake.Framework.addReactInstallCommandNote(server_transpiler.log); Output.errGeneric("Failed to resolve all imports required by the framework", .{}); Output.flush(); - server_bundler.log.print(Output.errorWriter()) catch {}; + server_transpiler.log.print(Output.errorWriter()) catch {}; bun.Global.crash(); }; @@ -222,7 +222,7 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa for (options.framework.file_system_router_types) |fsr| { const joined_root = bun.path.joinAbs(cwd, .auto, fsr.root); - const entry = server_bundler.resolver.readDirInfoIgnoreError(joined_root) orelse + const entry = server_transpiler.resolver.readDirInfoIgnoreError(joined_root) orelse continue; try router_types.append(allocator, .{ .abs_root = bun.strings.withoutTrailingSlashWindowsPath(entry.abs_path), @@ -244,17 +244,17 @@ pub fn buildWithVm(ctx: bun.CLI.Command.Context, cwd: []const u8, vm: *VirtualMa var router = try FrameworkRouter.initEmpty(cwd, router_types.items, allocator); try router.scanAll( allocator, - &server_bundler.resolver, + &server_transpiler.resolver, FrameworkRouter.InsertionContext.wrap(EntryPointMap, &entry_points), ); const bundled_outputs_list = try bun.BundleV2.generateFromBakeProductionCLI( entry_points, - &server_bundler, + &server_transpiler, .{ .framework = framework.*, - .client_bundler = &client_bundler, - .ssr_bundler = if (separate_ssr_graph) &ssr_bundler else &server_bundler, + .client_transpiler = &client_transpiler, + .ssr_transpiler = if (separate_ssr_graph) &ssr_transpiler else &server_transpiler, .plugins = options.bundler_options.plugin, }, allocator, diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 3ddd9702e31d14..66ec1fbcf64638 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -7832,6 +7832,18 @@ pub const AnyServer = union(enum) { }; } + pub fn prepareAndSaveJsRequestContext( + server: AnyServer, + req: *uws.Request, + resp: uws.AnyResponse, + global: *JSC.JSGlobalObject, + ) ?SavedRequest { + return switch (server) { + inline .HTTPServer, .DebugHTTPServer => |s| (s.prepareJsRequestContext(req, resp.TCP, null) orelse return null).save(global, req, resp.TCP), + inline .HTTPSServer, .DebugHTTPSServer => |s| (s.prepareJsRequestContext(req, resp.SSL, null) orelse return null).save(global, req, resp.SSL), + }; + } + pub fn numSubscribers(this: AnyServer, topic: []const u8) u32 { return switch (this) { inline else => |server| server.app.?.numSubscribers(topic), diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index a3063b71420077..3581aa64928fc9 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -2770,7 +2770,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionCpuUsage, (JSC::JSGlobalObject * global RELEASE_AND_RETURN(throwScope, JSC::JSValue::encode(result)); } -int getRSS(size_t* rss) +extern "C" int getRSS(size_t* rss) { #if defined(__APPLE__) mach_msg_type_number_t count; diff --git a/src/bun.js/bindings/BunProcess.h b/src/bun.js/bindings/BunProcess.h index 3fbbfd01429691..8475dbd994463d 100644 --- a/src/bun.js/bindings/BunProcess.h +++ b/src/bun.js/bindings/BunProcess.h @@ -12,8 +12,7 @@ class GlobalObject; namespace Bun { -// TODO: find a better place for this -int getRSS(size_t* rss); +extern "C" int getRSS(size_t* rss); using namespace JSC; diff --git a/src/bun.zig b/src/bun.zig index 9307bce5bc4089..cebfe1cc172073 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -4050,8 +4050,15 @@ pub fn GenericIndex(backing_int: type, uid: anytype) type { none = std.math.maxInt(backing_int), _, - pub inline fn init(maybe: ?Index) Optional { - return if (maybe) |i| i.toOptional() else .none; + /// Signatures: + /// - `init(maybe: ?Index) Optional` + /// - `init(maybe: ?backing_int) Optional` + pub inline fn init(maybe: anytype) Optional { + comptime var t = @typeInfo(@TypeOf(maybe)); + if (t == .optional) t = @typeInfo(t.optional.child); + if (t == .int or t == .comptime_int) + return if (@as(?backing_int, maybe)) |i| Index.init(i).toOptional() else .none; + return if (@as(?Index, maybe)) |i| i.toOptional() else .none; } pub inline fn unwrap(oi: Optional) ?Index { diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 5bd1e886d0dcdb..548daa3d6233e3 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -377,9 +377,9 @@ pub const BundleV2 = struct { transpiler: *Transpiler, /// When Server Component is enabled, this is used for the client bundles /// and `transpiler` is used for the server bundles. - client_bundler: *Transpiler, + client_transpiler: *Transpiler, /// See bake.Framework.ServerComponents.separate_ssr_graph - ssr_bundler: *Transpiler, + ssr_transpiler: *Transpiler, /// When Bun Bake is used, the resolved framework is passed here framework: ?bake.Framework, graph: Graph, @@ -412,8 +412,8 @@ pub const BundleV2 = struct { const BakeOptions = struct { framework: bake.Framework, - client_bundler: *Transpiler, - ssr_bundler: *Transpiler, + client_transpiler: *Transpiler, + ssr_transpiler: *Transpiler, plugins: ?*JSC.API.JSBundler.Plugin, }; @@ -448,8 +448,8 @@ pub const BundleV2 = struct { this.transpiler else switch (target) { else => this.transpiler, - .browser => this.client_bundler, - .bake_server_components_ssr => this.ssr_bundler, + .browser => this.client_transpiler, + .bake_server_components_ssr => this.ssr_transpiler, }; } @@ -937,7 +937,7 @@ pub const BundleV2 = struct { this.graph.estimated_file_loader_count += 1; } - batch.push(ThreadPoolLib.Batch.from(&task.task)); + batch.push(.from(&task.task)); } try this.graph.entry_points.append(this.graph.allocator, source_index); @@ -962,8 +962,8 @@ pub const BundleV2 = struct { this.* = .{ .transpiler = transpiler, - .client_bundler = transpiler, - .ssr_bundler = transpiler, + .client_transpiler = transpiler, + .ssr_transpiler = transpiler, .framework = null, .graph = .{ .pool = undefined, @@ -985,15 +985,15 @@ pub const BundleV2 = struct { .thread_lock = bun.DebugThreadLock.initLocked(), }; if (bake_options) |bo| { - this.client_bundler = bo.client_bundler; - this.ssr_bundler = bo.ssr_bundler; + this.client_transpiler = bo.client_transpiler; + this.ssr_transpiler = bo.ssr_transpiler; this.framework = bo.framework; this.linker.framework = &this.framework.?; this.plugins = bo.plugins; if (transpiler.options.server_components) { - bun.assert(this.client_bundler.options.server_components); + bun.assert(this.client_transpiler.options.server_components); if (bo.framework.server_components.?.separate_ssr_graph) - bun.assert(this.ssr_bundler.options.server_components); + bun.assert(this.ssr_transpiler.options.server_components); } } this.linker.graph.allocator = this.graph.heap.allocator(); @@ -1108,7 +1108,7 @@ pub const BundleV2 = struct { runtime_parse_task.tree_shaking = true; runtime_parse_task.loader = .js; this.incrementScanCounter(); - batch.push(ThreadPoolLib.Batch.from(&runtime_parse_task.task)); + batch.push(.from(&runtime_parse_task.task)); } // Bake reserves two source indexes at the start of the file list, but @@ -1512,12 +1512,12 @@ pub const BundleV2 = struct { pub fn generateFromBakeProductionCLI( entry_points: bake.production.EntryPointMap, - server_bundler: *Transpiler, + server_transpiler: *Transpiler, kit_options: BakeOptions, allocator: std.mem.Allocator, event_loop: EventLoop, ) !std.ArrayList(options.OutputFile) { - var this = try BundleV2.init(server_bundler, kit_options, allocator, event_loop, false, null, null); + var this = try BundleV2.init(server_transpiler, kit_options, allocator, event_loop, false, null, null); this.unique_key = generateUniqueKey(); if (this.transpiler.log.hasErrors()) { @@ -2757,7 +2757,7 @@ pub const BundleV2 = struct { } break :brk .{ - this.ssr_bundler, + this.ssr_transpiler, .ssr, .bake_server_components_ssr, }; @@ -4592,7 +4592,7 @@ pub const ParseTask = struct { // set the target to the client when bundling client-side files (task.known_target == .browser)) { - transpiler = this.ctx.client_bundler; + transpiler = this.ctx.client_transpiler; resolver = &transpiler.resolver; bun.assert(transpiler.options.target == .browser); } @@ -5379,8 +5379,8 @@ const LinkerGraph = struct { const source_symbols = &this.symbols.symbols_for_source.slice()[source_index]; var ref = Ref.init( - @as(Ref.Int, @truncate(source_symbols.len)), - @as(Ref.Int, @truncate(source_index)), + @truncate(source_symbols.len), + @truncate(source_index), false, ); ref.tag = .symbol; @@ -5761,7 +5761,7 @@ const LinkerGraph = struct { /// a Source.Index to its output path inb reakOutputIntoPieces entry_point_chunk_index: u32 = std.math.maxInt(u32), - line_offset_table: bun.sourcemap.LineOffsetTable.List = .{}, + line_offset_table: bun.sourcemap.LineOffsetTable.List = .empty, quoted_source_contents: string = "", pub fn isEntryPoint(this: *const File) bool { @@ -6034,8 +6034,8 @@ pub const LinkerContext = struct { .source_index = source_index, .thread_task = .{ .callback = &SourceMapData.Task.runQuotedSourceContents }, }; - batch.push(ThreadPoolLib.Batch.from(&line_offset.thread_task)); - second_batch.push(ThreadPoolLib.Batch.from("ed.thread_task)); + batch.push(.from(&line_offset.thread_task)); + second_batch.push(.from("ed.thread_task)); } // line offsets block sooner and are faster to compute, so we should schedule those first @@ -8937,7 +8937,8 @@ pub const LinkerContext = struct { ambiguous, }; }; - pub fn source_(c: *LinkerContext, index: anytype) *const Logger.Source { + + pub fn getSource(c: *LinkerContext, index: usize) *const Logger.Source { return &c.parse_graph.input_files.items(.source)[index]; } @@ -10548,7 +10549,7 @@ pub const LinkerContext = struct { worker.allocator, c.resolver.opts.target, ast.toAST(), - c.source_(chunk.entry_point.source_index), + c.getSource(chunk.entry_point.source_index), print_options, cross_chunk_import_records.slice(), &[_]Part{ @@ -10561,7 +10562,7 @@ pub const LinkerContext = struct { worker.allocator, c.resolver.opts.target, ast.toAST(), - c.source_(chunk.entry_point.source_index), + c.getSource(chunk.entry_point.source_index), print_options, &.{}, &[_]Part{ @@ -11675,7 +11676,7 @@ pub const LinkerContext = struct { allocator, c.resolver.opts.target, ast.toAST(), - c.source_(source_index), + c.getSource(source_index), print_options, ast.import_records.slice(), &[_]Part{ @@ -13182,7 +13183,7 @@ pub const LinkerContext = struct { source_index: Index, ) js_printer.PrintResult { const parts_to_print = &[_]Part{ - Part{ .stmts = out_stmts }, + .{ .stmts = out_stmts }, }; const print_options = js_printer.Options{ @@ -13207,13 +13208,14 @@ pub const LinkerContext = struct { .has_run_symbol_renamer = true, .allocator = allocator, + .source_map_allocator = writer.buffer.allocator, .to_esm_ref = to_esm_ref, .to_commonjs_ref = to_commonjs_ref, .require_ref = switch (c.options.output_format) { - .cjs => null, + .cjs => null, // use unbounded global else => runtime_require_ref, }, - .require_or_import_meta_for_source_callback = js_printer.RequireOrImportMeta.Callback.init( + .require_or_import_meta_for_source_callback = .init( LinkerContext, requireOrImportMetaForSource, c, @@ -13238,7 +13240,7 @@ pub const LinkerContext = struct { &printer, ast.target, ast.toAST(), - c.source_(source_index.get()), + c.getSource(source_index.get()), print_options, ast.import_records.slice(), parts_to_print, @@ -13315,13 +13317,7 @@ pub const LinkerContext = struct { // Per CSS chunk: // Remove duplicate rules across files. This must be done in serial, not // in parallel, and must be done from the last rule to the first rule. - if (brk: { - // TODO: Have count of chunks with css on linker context? - for (chunks) |*chunk| { - if (chunk.content == .css) break :brk true; - } - break :brk false; - }) { + if (c.parse_graph.css_file_count > 0) { var wait_group = try c.allocator.create(sync.WaitGroup); wait_group.init(); defer { @@ -13352,13 +13348,17 @@ pub const LinkerContext = struct { .linker = c, .wg = wait_group, }; - batch.push(ThreadPoolLib.Batch.from(&tasks[i].task)); + batch.push(.from(&tasks[i].task)); i += 1; } } wait_group.counter = @as(u32, @truncate(total_count)); c.parse_graph.pool.pool.schedule(batch); wait_group.wait(); + } else if (Environment.isDebug) { + for (chunks) |*chunk| { + bun.assert(chunk.content != .css); + } } } @@ -13423,13 +13423,13 @@ pub const LinkerContext = struct { remaining_part_ranges[0] = .{ .part_range = part_range, - .i = @truncate(i), + .i = @intCast(i), .task = .{ .callback = &generateCompileResultForJSChunk, }, .ctx = chunk_ctx, }; - batch.push(ThreadPoolLib.Batch.from(&remaining_part_ranges[0].task)); + batch.push(.from(&remaining_part_ranges[0].task)); remaining_part_ranges = remaining_part_ranges[1..]; } @@ -13438,13 +13438,13 @@ pub const LinkerContext = struct { for (0..chunk.content.css.imports_in_chunk_in_order.len) |i| { remaining_part_ranges[0] = .{ .part_range = .{}, - .i = @as(u32, @truncate(i)), - .task = ThreadPoolLib.Task{ + .i = @intCast(i), + .task = .{ .callback = &generateCompileResultForCssChunk, }, .ctx = chunk_ctx, }; - batch.push(ThreadPoolLib.Batch.from(&remaining_part_ranges[0].task)); + batch.push(.from(&remaining_part_ranges[0].task)); remaining_part_ranges = remaining_part_ranges[1..]; } @@ -13453,13 +13453,13 @@ pub const LinkerContext = struct { remaining_part_ranges[0] = .{ .part_range = .{}, .i = 0, - .task = ThreadPoolLib.Task{ + .task = .{ .callback = &generateCompileResultForHtmlChunk, }, .ctx = chunk_ctx, }; - batch.push(ThreadPoolLib.Batch.from(&remaining_part_ranges[0].task)); + batch.push(.from(&remaining_part_ranges[0].task)); remaining_part_ranges = remaining_part_ranges[1..]; }, } @@ -14675,7 +14675,7 @@ pub const LinkerContext = struct { // Warn about importing from a file that is known to not have any exports if (status == .cjs_without_exports) { - const source = c.source_(tracker.source_index.get()); + const source = c.getSource(tracker.source_index.get()); c.log.addRangeWarningFmt( source, source.rangeOfIdentifier(named_import.alias_loc.?), @@ -14726,9 +14726,9 @@ pub const LinkerContext = struct { // Report mismatched imports and exports const symbol = c.graph.symbols.get(tracker.import_ref).?; const named_import: js_ast.NamedImport = named_imports[prev_source_index].get(tracker.import_ref).?; - const source = c.source_(prev_source_index); + const source = c.getSource(prev_source_index); - const next_source = c.source_(next_tracker.source_index.get()); + const next_source = c.getSource(next_tracker.source_index.get()); const r = source.rangeOfIdentifier(named_import.alias_loc.?); // Report mismatched imports and exports diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index 0b371a67232890..37e2feb1b27dde 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -235,18 +235,18 @@ pub const BuildCommand = struct { .unspecified => {}, } - var client_bundler: transpiler.Transpiler = undefined; + var client_transpiler: transpiler.Transpiler = undefined; if (this_transpiler.options.server_components) { - client_bundler = try transpiler.Transpiler.init(allocator, log, ctx.args, null); - client_bundler.options = this_transpiler.options; - client_bundler.options.target = .browser; - client_bundler.options.server_components = true; - client_bundler.options.conditions = try this_transpiler.options.conditions.clone(); + client_transpiler = try transpiler.Transpiler.init(allocator, log, ctx.args, null); + client_transpiler.options = this_transpiler.options; + client_transpiler.options.target = .browser; + client_transpiler.options.server_components = true; + client_transpiler.options.conditions = try this_transpiler.options.conditions.clone(); try this_transpiler.options.conditions.appendSlice(&.{"react-server"}); this_transpiler.options.react_fast_refresh = false; this_transpiler.options.minify_syntax = true; - client_bundler.options.minify_syntax = true; - client_bundler.options.define = try options.Define.init( + client_transpiler.options.minify_syntax = true; + client_transpiler.options.define = try options.Define.init( allocator, if (ctx.args.define) |user_defines| try options.Define.Data.fromInput(try options.stringHashMapFromArrays( @@ -262,10 +262,10 @@ pub const BuildCommand = struct { ); try bun.bake.addImportMetaDefines(allocator, this_transpiler.options.define, .development, .server); - try bun.bake.addImportMetaDefines(allocator, client_bundler.options.define, .development, .client); + try bun.bake.addImportMetaDefines(allocator, client_transpiler.options.define, .development, .client); this_transpiler.resolver.opts = this_transpiler.options; - client_bundler.resolver.opts = client_bundler.options; + client_transpiler.resolver.opts = client_transpiler.options; } // var env_loader = this_transpiler.env; diff --git a/src/fmt.zig b/src/fmt.zig index 179c29a26ef0ff..2720f0b6f9e062 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -1491,12 +1491,12 @@ pub const SizeFormatter = struct { } }; -pub fn size(value: anytype, opts: SizeFormatter.Options) SizeFormatter { +pub fn size(bytes: anytype, opts: SizeFormatter.Options) SizeFormatter { return .{ - .value = switch (@TypeOf(value)) { - f64, f32, f128 => @intFromFloat(value), - i64, isize => @intCast(value), - else => value, + .value = switch (@TypeOf(bytes)) { + f64, f32, f128 => @intFromFloat(bytes), + i64, isize => @intCast(bytes), + else => bytes, }, .opts = opts, }; diff --git a/src/js_ast.zig b/src/js_ast.zig index 22341b119d7528..38105e00907294 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1301,8 +1301,8 @@ pub const Symbol = struct { .{ symbol.original_name, @tagName(symbol.kind), if (symbol.hasLink()) symbol.link else Ref{ - .source_index = @as(Ref.Int, @truncate(i)), - .inner_index = @as(Ref.Int, @truncate(inner_index)), + .source_index = @truncate(i), + .inner_index = @truncate(inner_index), .tag = .symbol, }, }, diff --git a/src/js_parser.zig b/src/js_parser.zig index 8d88beb6ccc9d6..d9da97b9699a52 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -9263,7 +9263,7 @@ fn NewParser_( } pub fn newSymbol(p: *P, kind: Symbol.Kind, identifier: string) !Ref { - const inner_index = @as(Ref.Int, @truncate(p.symbols.items.len)); + const inner_index: Ref.Int = @truncate(p.symbols.items.len); try p.symbols.append(Symbol{ .kind = kind, .original_name = identifier, diff --git a/src/js_printer.zig b/src/js_printer.zig index c019fd1d41c256..a9fcaa86406492 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -450,6 +450,7 @@ pub const Options = struct { module_hash: u32 = 0, source_path: ?fs.Path = null, allocator: std.mem.Allocator = default_allocator, + source_map_allocator: ?std.mem.Allocator = null, source_map_handler: ?SourceMapHandler = null, source_map_builder: ?*bun.sourcemap.Chunk.Builder = null, css_import_behavior: Api.CssInJsBehavior = Api.CssInJsBehavior.facade, @@ -5670,8 +5671,8 @@ pub fn getSourceMapBuilder( return undefined; return .{ - .source_map = SourceMap.Chunk.Builder.SourceMapper.init( - opts.allocator, + .source_map = .init( + opts.source_map_allocator orelse opts.allocator, is_bun_platform and generate_source_map == .lazy, ), .cover_lines_without_mappings = true, @@ -5679,15 +5680,14 @@ pub fn getSourceMapBuilder( .prepend_count = is_bun_platform and generate_source_map == .lazy, .line_offset_tables = opts.line_offset_tables orelse brk: { if (generate_source_map == .lazy) break :brk SourceMap.LineOffsetTable.generate( - opts.allocator, + opts.source_map_allocator orelse opts.allocator, source.contents, @as( i32, @intCast(tree.approximate_newline_count), ), ); - - break :brk SourceMap.LineOffsetTable.List{}; + break :brk .empty; }, }; } @@ -5932,7 +5932,7 @@ pub fn print( pub fn printWithWriter( comptime Writer: type, - _writer: Writer, + writer: Writer, target: options.Target, ast: Ast, source: *const logger.Source, @@ -5945,7 +5945,7 @@ pub fn printWithWriter( return switch (target.isBun()) { inline else => |is_bun| printWithWriterAndPlatform( Writer, - _writer, + writer, is_bun, ast, source, @@ -5961,7 +5961,7 @@ pub fn printWithWriter( /// The real one pub fn printWithWriterAndPlatform( comptime Writer: type, - _writer: Writer, + writer: Writer, comptime is_bun_platform: bool, ast: Ast, source: *const logger.Source, @@ -5984,7 +5984,6 @@ pub fn printWithWriterAndPlatform( false, generate_source_maps, ); - const writer = _writer; var printer = PrinterType.init( writer, import_records, @@ -5997,7 +5996,7 @@ pub fn printWithWriterAndPlatform( defer printer.binary_expression_stack.clearAndFree(); defer printer.temporary_bindings.deinit(bun.default_allocator); - defer _writer.* = printer.writer.*; + defer writer.* = printer.writer.*; defer { imported_module_ids_list = printer.imported_module_ids; } diff --git a/src/renamer.zig b/src/renamer.zig index 5b7a0d1981b276..ac4c38cda3f03b 100644 --- a/src/renamer.zig +++ b/src/renamer.zig @@ -597,7 +597,7 @@ pub const NumberRenamer = struct { std.sort.pdq(u32, sorted.items, {}, std.sort.asc(u32)); for (sorted.items) |inner_index| { - r.assignName(s, Ref.init(@as(Ref.Int, @intCast(inner_index)), source_index, false)); + r.assignName(s, Ref.init(@intCast(inner_index), source_index, false)); } } diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 7efc33b91b7f38..bca26ab6acdcfa 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -1665,13 +1665,14 @@ pub const Chunk = struct { return output; } + // TODO: remove the indirection by having generic functions for SourceMapFormat and NewBuilder. Source maps are always VLQ pub fn SourceMapFormat(comptime Type: type) type { return struct { ctx: Type, const Format = @This(); pub fn init(allocator: std.mem.Allocator, prepend_count: bool) Format { - return Format{ .ctx = Type.init(allocator, prepend_count) }; + return .{ .ctx = Type.init(allocator, prepend_count) }; } pub inline fn appendLineSeparator(this: *Format) anyerror!void { diff --git a/src/sys.zig b/src/sys.zig index b90fbbbe2095c7..1062fcffbcd297 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -4541,3 +4541,12 @@ pub const coreutils_error_map = brk: { break :brk map; }; + +extern fn getRSS(rss: *usize) c_int; +pub fn selfProcessMemoryUsage() ?usize { + var rss: usize = undefined; + if (getRSS(&rss) != 0) { + return null; + } + return rss; +} diff --git a/src/watcher.zig b/src/watcher.zig index 1e881e19f85356..ae0be0426810be 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -1,17 +1,11 @@ //! Bun's cross-platform filesystem watcher. Runs on its own thread. const Watcher = @This(); -pub const max_count = 128; - -pub const Event = WatchEvent; -pub const Item = WatchItem; -pub const ItemList = WatchList; -pub const WatchList = std.MultiArrayList(WatchItem); -pub const HashType = u32; -const no_watch_item: WatchItemIndex = std.math.maxInt(WatchItemIndex); +const DebugLogScope = bun.Output.Scoped(.watcher, false); +const log = DebugLogScope.log; // Consumer-facing -watch_events: [128]WatchEvent, -changed_filepaths: [128]?[:0]u8, +watch_events: [max_count]WatchEvent, +changed_filepaths: [max_count]?[:0]u8, /// The platform-specific implementation of the watcher platform: Platform, @@ -37,6 +31,15 @@ onError: *const fn (this: *anyopaque, err: bun.sys.Error) void, thread_lock: bun.DebugThreadLock = bun.DebugThreadLock.unlocked, +pub const max_count = 128; + +pub const Event = WatchEvent; +pub const Item = WatchItem; +pub const ItemList = WatchList; +pub const WatchList = std.MultiArrayList(WatchItem); +pub const HashType = u32; +const no_watch_item: WatchItemIndex = std.math.maxInt(WatchItemIndex); + /// Initializes a watcher. Each watcher is tied to some context type, which /// recieves watch callbacks on the watcher thread. This function does not /// actually start the watcher thread. @@ -68,7 +71,7 @@ pub fn init(comptime T: type, ctx: *T, fs: *bun.fs.FileSystem, allocator: std.me const watcher = try allocator.create(Watcher); errdefer allocator.destroy(watcher); - watcher.* = Watcher{ + watcher.* = .{ .fs = fs, .allocator = allocator, .watched_count = 0, @@ -80,7 +83,7 @@ pub fn init(comptime T: type, ctx: *T, fs: *bun.fs.FileSystem, allocator: std.me .onError = &wrapped.onErrorWrapped, .platform = .{}, .watch_events = undefined, - .changed_filepaths = [_]?[:0]u8{null} ** 128, + .changed_filepaths = [_]?[:0]u8{null} ** max_count, }; try Platform.init(&watcher.platform, fs.top_level_dir); @@ -118,10 +121,6 @@ pub fn getHash(filepath: string) HashType { pub const WatchItemIndex = u16; pub const max_eviction_count = 8096; - -const DebugLogScope = bun.Output.Scoped(.watcher, false); -const log = DebugLogScope.log; - const WindowsWatcher = @import("./watcher/WindowsWatcher.zig"); // TODO: some platform-specific behavior is implemented in // this file instead of the platform-specific file. @@ -536,11 +535,13 @@ pub fn appendFileMaybeLock( } if (DebugLogScope.isVisible()) { - if (strings.indexOf(file_path, this.cwd)) |i| { - log("Added ./{s} to watch list.", .{file_path[i + this.cwd.len ..]}); - } else { - log("Added {s} to watch list.", .{file_path}); - } + const cwd_len_with_slash = if (this.cwd[this.cwd.len - 1] == '/') this.cwd.len else this.cwd.len + 1; + log("Added {s} to watch list.", .{ + if (file_path.len > cwd_len_with_slash and bun.strings.startsWith(file_path, this.cwd)) + file_path[cwd_len_with_slash..] + else + file_path, + }); } return .{ .result = {} }; diff --git a/src/watcher/INotifyWatcher.zig b/src/watcher/INotifyWatcher.zig index d86694d6338bf3..653841431bf7dd 100644 --- a/src/watcher/INotifyWatcher.zig +++ b/src/watcher/INotifyWatcher.zig @@ -1,7 +1,7 @@ //! Bun's filesystem watcher implementation for linux using inotify //! https://man7.org/linux/man-pages/man7/inotify.7.html const INotifyWatcher = @This(); -const log = Output.scoped(.inotify, false); +const log = Output.scoped(.watcher, false); // inotify events are variable-sized, so a byte buffer is used (also needed // since communication is done via the `read` syscall). what is notable about diff --git a/src/watcher/KEventWatcher.zig b/src/watcher/KEventWatcher.zig index e1037db01a3506..b6d6181450e5ca 100644 --- a/src/watcher/KEventWatcher.zig +++ b/src/watcher/KEventWatcher.zig @@ -1,4 +1,5 @@ const KEventWatcher = @This(); +const log = Output.scoped(.watcher, false); pub const EventListIndex = u32; const KEvent = std.c.Kevent; @@ -48,32 +49,29 @@ pub fn watchLoopCycle(this: *Watcher) bun.JSC.Maybe(void) { var count = std.posix.system.kevent( this.platform.fd.cast(), - @as([*]KEvent, changelist), + changelist, 0, - @as([*]KEvent, changelist), + changelist, 128, - - null, + null, // timeout ); // Give the events more time to coalesce if (count < 128 / 2) { const remain = 128 - count; - var timespec = std.posix.timespec{ .sec = 0, .nsec = 100_000 }; const extra = std.posix.system.kevent( this.platform.fd.cast(), - @as([*]KEvent, changelist[@as(usize, @intCast(count))..].ptr), + changelist[@intCast(count)..].ptr, 0, - @as([*]KEvent, changelist[@as(usize, @intCast(count))..].ptr), + changelist[@intCast(count)..].ptr, remain, - - ×pec, + &.{ .sec = 0, .nsec = 100_000 }, // 0.0001 seconds ); count += extra; } - var changes = changelist[0..@as(usize, @intCast(@max(0, count)))]; + var changes = changelist[0..@intCast(@max(0, count))]; var watchevents = this.watch_events[0..changes.len]; var out_len: usize = 0; if (changes.len > 0) { From cfc14987a7fd1a666778658dd3fb21d5c60b3230 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 7 Feb 2025 16:04:39 -0800 Subject: [PATCH 22/28] fix filename? --- src/{watcher.zig => Watcher.zig} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/{watcher.zig => Watcher.zig} (100%) diff --git a/src/watcher.zig b/src/Watcher.zig similarity index 100% rename from src/watcher.zig rename to src/Watcher.zig From daf7a1b6b743cf76f98ddba51ce818ba090600c5 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 7 Feb 2025 18:47:02 -0800 Subject: [PATCH 23/28] a --- build.zig | 2 +- bun.lock | 65 +++--- cmake/targets/BuildBun.cmake | 28 +++ package.json | 1 - src/api/schema.zig | 2 +- src/bake/DevServer.zig | 301 ++++++++++++++++++++------ src/bake/bake.d.ts | 4 +- src/bake/bake.private.d.ts | 3 + src/bake/bake.zig | 9 +- src/bake/client/overlay.css | 8 +- src/bake/client/overlay.ts | 35 +-- src/bake/hmr-runtime-error.ts | 19 -- src/bake/incremental_visualizer.html | 14 +- src/bake/macros.ts | 9 +- src/bun.js/api/server.zig | 8 +- src/bun.js/api/server/HTMLBundle.zig | 3 +- src/bun.js/api/server/StaticRoute.zig | 12 - src/bun.js/module_loader.zig | 2 +- src/bundler/bundle_v2.zig | 13 +- src/codegen/bake-codegen.ts | 30 +-- src/http.zig | 2 +- src/js_parser.zig | 2 +- src/node-fallbacks/bun.lock | 45 ++-- src/node-fallbacks/package.json | 4 +- src/resolver/resolver.zig | 7 + 25 files changed, 388 insertions(+), 240 deletions(-) diff --git a/build.zig b/build.zig index 7e6841918820a0..ce8624f7e5306c 100644 --- a/build.zig +++ b/build.zig @@ -558,10 +558,10 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void { .{ .file = "bake.client.js", .import = "bake-codegen/bake.client.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bake.error.js", .import = "bake-codegen/bake.error.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bake.server.js", .import = "bake-codegen/bake.server.js", .enable = opts.shouldEmbedCode() }, - .{ .file = "bake.react-refresh-prebuilt.js", .import = "bake-codegen/bake.react-refresh-prebuilt.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bun-error/index.js", .enable = opts.shouldEmbedCode() }, .{ .file = "bun-error/bun-error.css", .enable = opts.shouldEmbedCode() }, .{ .file = "fallback-decoder.js", .enable = opts.shouldEmbedCode() }, + .{ .file = "node-fallbacks/react-refresh.js", .enable = opts.shouldEmbedCode() }, .{ .file = "node-fallbacks/assert.js", .enable = opts.shouldEmbedCode() }, .{ .file = "node-fallbacks/buffer.js", .enable = opts.shouldEmbedCode() }, .{ .file = "node-fallbacks/console.js", .enable = opts.shouldEmbedCode() }, diff --git a/bun.lock b/bun.lock index d1eb9c5535ff5c..81b4fa1253a77d 100644 --- a/bun.lock +++ b/bun.lock @@ -21,7 +21,6 @@ "prettier-plugin-organize-imports": "^4.0.0", "react": "^18.3.1", "react-dom": "^18.3.1", - "react-refresh": "^0.16.0", "source-map-js": "^1.2.0", "typescript": "^5.7.2", }, @@ -46,21 +45,21 @@ "packages": { "@biomejs/biome": ["@biomejs/biome@1.8.3", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.8.3", "@biomejs/cli-darwin-x64": "1.8.3", "@biomejs/cli-linux-arm64": "1.8.3", "@biomejs/cli-linux-arm64-musl": "1.8.3", "@biomejs/cli-linux-x64": "1.8.3", "@biomejs/cli-linux-x64-musl": "1.8.3", "@biomejs/cli-win32-arm64": "1.8.3", "@biomejs/cli-win32-x64": "1.8.3" }, "bin": { "biome": "bin/biome" } }, "sha512-/uUV3MV+vyAczO+vKrPdOW0Iaet7UnJMU4bNMinggGJTAnBPjCoLEYcyYtYHNnUNYlv4xZMH6hVIQCAozq8d5w=="], - "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.8.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-9DYOjclFpKrH/m1Oz75SSExR8VKvNSSsLnVIqdnKexj6NwmiMlKk94Wa1kZEdv6MCOHGHgyyoV57Cw8WzL5n3A=="], + "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.8.3", "", { "os":"darwin", "cpu":"arm64" }, "sha512-9DYOjclFpKrH/m1Oz75SSExR8VKvNSSsLnVIqdnKexj6NwmiMlKk94Wa1kZEdv6MCOHGHgyyoV57Cw8WzL5n3A=="], - "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.8.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-UeW44L/AtbmOF7KXLCoM+9PSgPo0IDcyEUfIoOXYeANaNXXf9mLUwV1GeF2OWjyic5zj6CnAJ9uzk2LT3v/wAw=="], + "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.8.3", "", { "os":"darwin", "cpu":"x64" }, "sha512-UeW44L/AtbmOF7KXLCoM+9PSgPo0IDcyEUfIoOXYeANaNXXf9mLUwV1GeF2OWjyic5zj6CnAJ9uzk2LT3v/wAw=="], - "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.8.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-fed2ji8s+I/m8upWpTJGanqiJ0rnlHOK3DdxsyVLZQ8ClY6qLuPc9uehCREBifRJLl/iJyQpHIRufLDeotsPtw=="], + "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.8.3", "", { "os":"linux", "cpu":"arm64" }, "sha512-fed2ji8s+I/m8upWpTJGanqiJ0rnlHOK3DdxsyVLZQ8ClY6qLuPc9uehCREBifRJLl/iJyQpHIRufLDeotsPtw=="], - "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.8.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-9yjUfOFN7wrYsXt/T/gEWfvVxKlnh3yBpnScw98IF+oOeCYb5/b/+K7YNqKROV2i1DlMjg9g/EcN9wvj+NkMuQ=="], + "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.8.3", "", { "os":"linux", "cpu":"arm64" }, "sha512-9yjUfOFN7wrYsXt/T/gEWfvVxKlnh3yBpnScw98IF+oOeCYb5/b/+K7YNqKROV2i1DlMjg9g/EcN9wvj+NkMuQ=="], - "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.8.3", "", { "os": "linux", "cpu": "x64" }, "sha512-I8G2QmuE1teISyT8ie1HXsjFRz9L1m5n83U1O6m30Kw+kPMPSKjag6QGUn+sXT8V+XWIZxFFBoTDEDZW2KPDDw=="], + "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.8.3", "", { "os":"linux", "cpu":"x64" }, "sha512-I8G2QmuE1teISyT8ie1HXsjFRz9L1m5n83U1O6m30Kw+kPMPSKjag6QGUn+sXT8V+XWIZxFFBoTDEDZW2KPDDw=="], - "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.8.3", "", { "os": "linux", "cpu": "x64" }, "sha512-UHrGJX7PrKMKzPGoEsooKC9jXJMa28TUSMjcIlbDnIO4EAavCoVmNQaIuUSH0Ls2mpGMwUIf+aZJv657zfWWjA=="], + "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.8.3", "", { "os":"linux", "cpu":"x64" }, "sha512-UHrGJX7PrKMKzPGoEsooKC9jXJMa28TUSMjcIlbDnIO4EAavCoVmNQaIuUSH0Ls2mpGMwUIf+aZJv657zfWWjA=="], - "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.8.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-J+Hu9WvrBevfy06eU1Na0lpc7uR9tibm9maHynLIoAjLZpQU3IW+OKHUtyL8p6/3pT2Ju5t5emReeIS2SAxhkQ=="], + "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.8.3", "", { "os":"win32", "cpu":"arm64" }, "sha512-J+Hu9WvrBevfy06eU1Na0lpc7uR9tibm9maHynLIoAjLZpQU3IW+OKHUtyL8p6/3pT2Ju5t5emReeIS2SAxhkQ=="], - "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.8.3", "", { "os": "win32", "cpu": "x64" }, "sha512-/PJ59vA1pnQeKahemaQf4Nyj7IKUvGQSc3Ze1uIGi+Wvr1xF7rGobSrAAG01T/gUDG21vkDsZYM03NAmPiVkqg=="], + "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.8.3", "", { "os":"win32", "cpu":"x64" }, "sha512-/PJ59vA1pnQeKahemaQf4Nyj7IKUvGQSc3Ze1uIGi+Wvr1xF7rGobSrAAG01T/gUDG21vkDsZYM03NAmPiVkqg=="], "@definitelytyped/dts-critic": ["@definitelytyped/dts-critic@0.0.191", "", { "dependencies": { "@definitelytyped/header-parser": "0.0.190", "command-exists": "^1.2.9", "semver": "^7.5.4", "tmp": "^0.2.1", "typescript": "^5.2.2", "yargs": "^17.7.2" } }, "sha512-j5HK3pQYiQwSXRLJzyhXJ6KxdzLl4gXXhz3ysCtLnRQkj+zsEfloDkEZ3x2bZMWS0OsKLXmR91JeQ2/c9DFEjg=="], @@ -76,51 +75,51 @@ "@es-joy/jsdoccomment": ["@es-joy/jsdoccomment@0.39.4", "", { "dependencies": { "comment-parser": "1.3.1", "esquery": "^1.5.0", "jsdoc-type-pratt-parser": "~4.0.0" } }, "sha512-Jvw915fjqQct445+yron7Dufix9A+m9j1fCJYlCo1FWlRvTxa3pjJelxdSTdaLWcTwRU6vbL+NYjO4YuNIS5Qg=="], - "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="], + "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os":"aix", "cpu":"ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="], - "@esbuild/android-arm": ["@esbuild/android-arm@0.21.5", "", { "os": "android", "cpu": "arm" }, "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg=="], + "@esbuild/android-arm": ["@esbuild/android-arm@0.21.5", "", { "os":"android", "cpu":"arm" }, "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg=="], - "@esbuild/android-arm64": ["@esbuild/android-arm64@0.21.5", "", { "os": "android", "cpu": "arm64" }, "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A=="], + "@esbuild/android-arm64": ["@esbuild/android-arm64@0.21.5", "", { "os":"android", "cpu":"arm64" }, "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A=="], - "@esbuild/android-x64": ["@esbuild/android-x64@0.21.5", "", { "os": "android", "cpu": "x64" }, "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA=="], + "@esbuild/android-x64": ["@esbuild/android-x64@0.21.5", "", { "os":"android", "cpu":"x64" }, "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA=="], - "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.21.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ=="], + "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.21.5", "", { "os":"darwin", "cpu":"arm64" }, "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ=="], - "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.21.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw=="], + "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.21.5", "", { "os":"darwin", "cpu":"x64" }, "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw=="], - "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.21.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g=="], + "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.21.5", "", { "os":"freebsd", "cpu":"arm64" }, "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g=="], - "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.21.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ=="], + "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.21.5", "", { "os":"freebsd", "cpu":"x64" }, "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ=="], - "@esbuild/linux-arm": ["@esbuild/linux-arm@0.21.5", "", { "os": "linux", "cpu": "arm" }, "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA=="], + "@esbuild/linux-arm": ["@esbuild/linux-arm@0.21.5", "", { "os":"linux", "cpu":"arm" }, "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA=="], - "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.21.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q=="], + "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.21.5", "", { "os":"linux", "cpu":"arm64" }, "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q=="], - "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.21.5", "", { "os": "linux", "cpu": "ia32" }, "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg=="], + "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.21.5", "", { "os":"linux", "cpu":"ia32" }, "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg=="], - "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg=="], + "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.21.5", "", { "os":"linux", "cpu":"none" }, "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg=="], - "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg=="], + "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.21.5", "", { "os":"linux", "cpu":"none" }, "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg=="], - "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.21.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w=="], + "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.21.5", "", { "os":"linux", "cpu":"ppc64" }, "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w=="], - "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.21.5", "", { "os": "linux", "cpu": "none" }, "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA=="], + "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.21.5", "", { "os":"linux", "cpu":"none" }, "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA=="], - "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.21.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A=="], + "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.21.5", "", { "os":"linux", "cpu":"s390x" }, "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A=="], - "@esbuild/linux-x64": ["@esbuild/linux-x64@0.21.5", "", { "os": "linux", "cpu": "x64" }, "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ=="], + "@esbuild/linux-x64": ["@esbuild/linux-x64@0.21.5", "", { "os":"linux", "cpu":"x64" }, "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ=="], - "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.21.5", "", { "os": "none", "cpu": "x64" }, "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg=="], + "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.21.5", "", { "os":"none", "cpu":"x64" }, "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg=="], - "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.21.5", "", { "os": "openbsd", "cpu": "x64" }, "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow=="], + "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.21.5", "", { "os":"openbsd", "cpu":"x64" }, "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow=="], - "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.21.5", "", { "os": "sunos", "cpu": "x64" }, "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg=="], + "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.21.5", "", { "os":"sunos", "cpu":"x64" }, "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg=="], - "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.21.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A=="], + "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.21.5", "", { "os":"win32", "cpu":"arm64" }, "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A=="], - "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.21.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA=="], + "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.21.5", "", { "os":"win32", "cpu":"ia32" }, "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA=="], - "@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os": "win32", "cpu": "x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="], + "@esbuild/win32-x64": ["@esbuild/win32-x64@0.21.5", "", { "os":"win32", "cpu":"x64" }, "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw=="], "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.4.0", "", { "dependencies": { "eslint-visitor-keys": "^3.3.0" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA=="], @@ -708,8 +707,6 @@ "react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="], - "react-refresh": ["react-refresh@0.16.0", "", {}, "sha512-FPvF2XxTSikpJxcr+bHut2H4gJ17+18Uy20D5/F+SKzFap62R3cM5wH6b8WN3LyGSYeQilLEcJcR1fjBSI2S1A=="], - "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], "regexp.prototype.flags": ["regexp.prototype.flags@1.5.2", "", { "dependencies": { "call-bind": "^1.0.6", "define-properties": "^1.2.1", "es-errors": "^1.3.0", "set-function-name": "^2.0.1" } }, "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw=="], diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 3eaf1b566d2fdd..57cf51326a47dc 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -179,6 +179,33 @@ register_command( ${BUN_NODE_FALLBACKS_OUTPUTS} ) +# An embedded copy of react-refresh is used when the user forgets to install it. +# The library is not versioned alongside React. +set(BUN_REACT_REFRESH_OUTPUT ${BUN_NODE_FALLBACKS_OUTPUT}/react-refresh.js) +register_command( + TARGET + bun-node-fallbacks-react-refresh + COMMENT + "Building node-fallbacks/react-refresh.js" + CWD + ${BUN_NODE_FALLBACKS_SOURCE} + COMMAND + ${BUN_EXECUTABLE} build + ${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js + --outfile=${BUN_REACT_REFRESH_OUTPUT} + --target=browser + --format=cjs + --minify + --define:process.env.NODE_ENV=\"'development'\" + SOURCES + ${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js + ${BUN_NODE_FALLBACKS_SOURCE}/package.json + ${BUN_NODE_FALLBACKS_SOURCE}/bun.lock + ${BUN_NODE_FALLBACKS_NODE_MODULES} + OUTPUTS + ${BUN_REACT_REFRESH_OUTPUT} +) + set(BUN_ERROR_CODE_SCRIPT ${CWD}/src/codegen/generate-node-errors.ts) set(BUN_ERROR_CODE_SOURCES @@ -510,6 +537,7 @@ set(BUN_ZIG_GENERATED_SOURCES ${BUN_FALLBACK_DECODER_OUTPUT} ${BUN_RUNTIME_JS_OUTPUT} ${BUN_NODE_FALLBACKS_OUTPUTS} + ${BUN_REACT_REFRESH_OUTPUT} ${BUN_ERROR_CODE_OUTPUTS} ${BUN_ZIG_GENERATED_CLASSES_OUTPUTS} ${BUN_JAVASCRIPT_OUTPUTS} diff --git a/package.json b/package.json index 17b81230147f27..e064be915a6d10 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,6 @@ "prettier-plugin-organize-imports": "^4.0.0", "react": "^18.3.1", "react-dom": "^18.3.1", - "react-refresh": "^0.16.0", "source-map-js": "^1.2.0", "typescript": "^5.7.2" }, diff --git a/src/api/schema.zig b/src/api/schema.zig index 163a1a76131aeb..f7a36f2df42f2e 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -1678,7 +1678,7 @@ pub const Api = struct { no_summary: ?bool = null, /// disable_hmr - disable_hmr: ?bool = null, + disable_hmr: bool = false, /// port port: ?u16 = null, diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 1238ed31e58278..980dfd7c1c397a 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -143,7 +143,7 @@ current_bundle: ?struct { requests: DeferredRequest.List, /// Resolution failures are grouped by incremental graph file index. /// Unlike parse failures (`handleParseTaskFailure`), the resolution - /// failures can be created asyncronously, and out of order. + /// failures can be created asynchronously, and out of order. resolution_failure_entries: AutoArrayHashMapUnmanaged(SerializedFailure.Owner.Packed, bun.logger.Log), }, /// When `current_bundle` is non-null and new requests to bundle come in, @@ -197,7 +197,7 @@ pub const RouteBundle = struct { client_bundle: ?*StaticRoute, /// If the client tries to load a script with the wrong generation, it will - /// recieve a bundle that instantly reloads the page, implying a bundle + /// receive a bundle that instantly reloads the page, implying a bundle /// change has occurred while fetching the script. client_script_generation: u32, @@ -223,7 +223,7 @@ pub const RouteBundle = struct { /// Contain the list of serialized failures. Hashmap allows for /// efficient lookup and removal of failing files. - /// When state == .evaluation_failure, this is popualted with that error. + /// When state == .evaluation_failure, this is populated with that error. evaluate_failure: ?SerializedFailure, }; @@ -568,8 +568,108 @@ pub fn deinit(dev: *DevServer) void { // TODO: Currently deinit is not implemented, as it was assumed to be alive for // the remainder of this process' lifespan. This isn't always true. const allocator = dev.allocator; - if (dev.has_pre_crash_handler) - bun.crash_handler.removePreCrashHandler(dev); + + // _ = VoidFieldTypes(DevServer){ + // // has no action taken + // .allocator = {}, + // .configuration_hash_key = {}, + // .graph_safety_lock = {}, + // .bun_watcher = {}, + // .watcher_atomics = {}, + // .plugin_state = {}, + // .generation = {}, + // .bundles_since_last_error = {}, + // .emit_visualizer_events = {}, + // .dump_dir = {}, + // .frontend_only = {}, + // .server_fetch_function_callback = {}, + // .server_register_update_callback = {}, + // .deferred_request_pool = {}, + + // .has_pre_crash_handler = if (dev.has_pre_crash_handler) + // bun.crash_handler.removePreCrashHandler(dev), + + // // pointers that are not considered a part of DevServer + // .vm = {}, + // .server = {}, + // .server_transpiler = {}, + // .client_transpiler = {}, + // .ssr_transpiler = {}, + // .log = {}, + // .framework = {}, // TODO: maybe + // .bundler_options = {}, // TODO: maybe + + // // to be counted. + // .root = { + // cost += dev.root.len; + // }, + // .router = { + // cost += dev.router.memoryCost(); + // }, + // .route_bundles = for (dev.route_bundles.items) |*bundle| { + // cost += bundle.memoryCost(); + // }, + // .server_graph = { + // cost += dev.server_graph.memoryCost(); + // }, + // .client_graph = { + // cost += dev.client_graph.memoryCost(); + // }, + // .assets = { + // cost += dev.assets.memoryCost(); + // }, + // .incremental_result = { + // cost += memoryCostArrayList(dev.incremental_result.client_components_added); + // cost += memoryCostArrayList(dev.incremental_result.html_routes_affected); + // cost += memoryCostArrayList(dev.incremental_result.framework_routes_affected); + // cost += memoryCostArrayList(dev.incremental_result.client_components_removed); + // cost += memoryCostArrayList(dev.incremental_result.failures_removed); + // cost += memoryCostArrayList(dev.incremental_result.client_components_affected); + // cost += memoryCostArrayList(dev.incremental_result.failures_added); + // }, + // .has_tailwind_plugin_hack = if (dev.has_tailwind_plugin_hack) |hack| { + // cost += memoryCostArrayHashMap(hack); + // }, + // .directory_watchers = { + // cost += memoryCostArrayList(dev.directory_watchers.dependencies); + // cost += memoryCostArrayList(dev.directory_watchers.dependencies_free_list); + // cost += memoryCostArrayHashMap(dev.directory_watchers.watches); + // for (dev.directory_watchers.dependencies.items) |dep| { + // cost += dep.specifier.len; + // } + // }, + // .html_router = { + // // std does not provide a way to measure exact allocation size of HashMapUnmanaged + // cost += dev.html_router.map.capacity() * (@sizeOf(*HTMLBundle.HTMLBundleRoute) + @sizeOf([]const u8)); + // // DevServer does not count the referenced HTMLBundle.HTMLBundleRoutes + // }, + // .bundling_failures = { + // cost += memoryCostSlice(dev.bundling_failures.keys()); + // for (dev.bundling_failures.keys()) |failure| { + // cost += failure.data.len; + // } + // }, + // .current_bundle = { + // // All entries are owned by the bundler arena, not DevServer, except for `requests` + // if (dev.current_bundle) |bundle| { + // var r = bundle.requests.first; + // while (r) |request| : (r = request.next) { + // cost += @sizeOf(DeferredRequest.Node); + // } + // } + // }, + // .next_bundle = { + // var r = dev.next_bundle.requests.first; + // while (r) |request| : (r = request.next) { + // cost += @sizeOf(DeferredRequest.Node); + // } + // cost += memoryCostArrayHashMap(dev.next_bundle.route_queue); + // }, + // .route_lookup = { + // cost += memoryCostArrayHashMap(dev.route_lookup); + // }, + // }; + allocator.destroy(dev); // if (bun.Environment.isDebug) // bun.todoPanic(@src(), "bake.DevServer.deinit()", .{}); @@ -587,7 +687,7 @@ pub fn deinit(dev: *DevServer) void { pub fn memoryCost(dev: *DevServer) usize { var cost: usize = @sizeOf(DevServer); // See https://github.com/ziglang/zig/issues/21879 - voidFieldTypes(DevServer).* = .{ + _ = VoidFieldTypes(DevServer){ // does not contain pointers .allocator = {}, .configuration_hash_key = {}, @@ -950,19 +1050,17 @@ fn ensureRouteIsBundled( try dev.deferRequest(&dev.current_bundle.?.requests, route_bundle_index, kind, req, resp); }, .possible_bundling_failures => { - // TODO: perform a graph trace to find just the errors that are needed if (dev.bundling_failures.count() > 0) { - resp.corked(sendSerializedFailures, .{ - dev, - resp, - dev.bundling_failures.keys(), - .bundler, - }); - return; - } else { - dev.routeBundlePtr(route_bundle_index).server_state = .loaded; - continue :sw .loaded; + // Trace the graph to see if there are any failures that are + // reachable by this route. + switch (try checkRouteFailures(dev, route_bundle_index, resp)) { + .stop => return, + .ok => {}, // Errors were cleared or not in the way. + } } + + dev.routeBundlePtr(route_bundle_index).server_state = .loaded; + continue :sw .loaded; }, .evaluation_failure => { resp.corked(sendSerializedFailures, .{ @@ -1001,6 +1099,29 @@ fn deferRequest( requests_array.prepend(deferred); } +fn checkRouteFailures(dev: *DevServer, route_bundle_index: RouteBundle.Index, resp: anytype) !enum { stop, ok } { + var sfa_state = std.heap.stackFallback(65536, dev.allocator); + const sfa = sfa_state.get(); + var gts = try dev.initGraphTraceState(sfa); + defer gts.deinit(sfa); + defer dev.incremental_result.failures_added.clearRetainingCapacity(); + dev.graph_safety_lock.lock(); + defer dev.graph_safety_lock.unlock(); + try dev.traceAllRouteImports(dev.routeBundlePtr(route_bundle_index), >s, .find_errors); + if (dev.incremental_result.failures_added.items.len > 0) { + resp.corked(sendSerializedFailures, .{ + dev, + resp, + dev.incremental_result.failures_added.items, + .bundler, + }); + return .stop; + } else { + // Failures are unreachable by this route, so it is OK to load. + return .ok; + } +} + fn appendRouteEntryPointsIfNotStale(dev: *DevServer, entry_points: *EntryPointList, alloc: Allocator, rbi: RouteBundle.Index) bun.OOM!void { const server_file_names = dev.server_graph.bundled_files.keys(); const client_file_names = dev.client_graph.bundled_files.keys(); @@ -1161,7 +1282,7 @@ fn generateHTMLPayload(dev: *DevServer, route_bundle_index: RouteBundle.Index, r defer gts.deinit(sfa); // Run tracing dev.client_graph.reset(); - try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); + try dev.traceAllRouteImports(route_bundle, >s, .find_css); const css_ids = dev.client_graph.current_css_files.items; @@ -1283,7 +1404,7 @@ pub fn onSrcRequest(dev: *DevServer, req: *uws.Request, resp: anytype) void { /// prepared and stored in a linked list. const DeferredRequest = struct { /// A small maximum is set because development servers are unlikely to - /// aquire much load, so allocating a ton at the start for no reason + /// acquire much load, so allocating a ton at the start for no reason /// is very silly. This contributes to ~6kb of the initial DevServer allocation. const max_preallocated = 16; @@ -1360,6 +1481,9 @@ fn startAsyncBundle( dev.incremental_result.reset(); + // Ref server to keep it from closing. + if (dev.server) |server| server.onPendingRequest(); + var heap = try ThreadlocalArena.init(); errdefer heap.deinit(); const allocator = heap.allocator(); @@ -1407,15 +1531,17 @@ fn startAsyncBundle( } fn indexFailures(dev: *DevServer) !void { - // Since resolution failures can be asyncronous, their logs are not inserted + // Since resolution failures can be asynchronous, their logs are not inserted // until the very end. const resolution_failures = dev.current_bundle.?.resolution_failure_entries; if (resolution_failures.count() > 0) { for (resolution_failures.keys(), resolution_failures.values()) |owner, *log| { - switch (owner.decode()) { - .client => |index| try dev.client_graph.insertFailure(.index, index, log, false), - .server => |index| try dev.server_graph.insertFailure(.index, index, log, true), - .none, .route => unreachable, + if (log.hasErrors()) { + switch (owner.decode()) { + .client => |index| try dev.client_graph.insertFailure(.index, index, log, false), + .server => |index| try dev.server_graph.insertFailure(.index, index, log, true), + .none, .route => unreachable, + } } } } @@ -1475,6 +1601,10 @@ fn indexFailures(dev: *DevServer) !void { dev.markAllRouteChildrenFailed(entry.route_index); } + for (dev.incremental_result.html_routes_affected.items) |index| { + dev.routeBundlePtr(index).server_state = .possible_bundling_failures; + } + dev.publish(.errors, payload.items, .binary); } else if (dev.incremental_result.failures_removed.items.len > 0) { var payload = try std.ArrayList(u8).initCapacity(sfa, @sizeOf(MessageId) + @sizeOf(u32) + dev.incremental_result.failures_removed.items.len * @sizeOf(u32)); @@ -1512,17 +1642,15 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]u // Run tracing dev.client_graph.reset(); - try dev.traceAllRouteImports(route_bundle, >s, .{ .find_client_modules = true }); + try dev.traceAllRouteImports(route_bundle, >s, .find_client_modules); + var react_fast_refresh_id: []const u8 = ""; if (dev.framework.react_fast_refresh) |rfr| brk: { const rfr_index = dev.client_graph.getFileIndex(rfr.import_source) orelse break :brk; if (!dev.client_graph.stale_files.isSet(rfr_index.get())) { - try dev.client_graph.traceImports( - rfr_index, - >s, - .{ .find_client_modules = true }, - ); + try dev.client_graph.traceImports(rfr_index, >s, .find_client_modules); + react_fast_refresh_id = dev.relativePath(rfr.import_source); } } @@ -1534,12 +1662,34 @@ fn generateClientBundle(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM![]u .html => |html| html.bundled_file, }; + const hash = hash: { + var source_map_hash: bun.bundle_v2.ContentHasher.Hash = .init(0x4b10); // arbitrarily different seed than what .initial_response uses + const keys = dev.client_graph.bundled_files.keys(); + for (dev.client_graph.current_chunk_parts.items) |part| { + source_map_hash.update(keys[part.get()]); + source_map_hash.update(dev.client_graph.source_maps.items[part.get()].vlq_chunk.slice()); + } + break :hash source_map_hash.final(); + }; + // Insert the source map + if (try dev.assets.putOrIncrementRefCount(hash, 1)) |static_route_ptr| { + // TODO: this asset is never unreferenced + const source_map = try dev.client_graph.takeSourceMap(.initial_response, sfa, dev.allocator); + errdefer dev.allocator.free(source_map); + static_route_ptr.* = StaticRoute.initFromAnyBlob(.fromOwnedSlice(dev.allocator, source_map), .{ + .server = dev.server.?, + .mime_type = .json, + }); + } + const client_bundle = dev.client_graph.takeJSBundle(.{ .kind = .initial_response, .initial_response_entry_point = if (client_file) |index| dev.relativePath(dev.client_graph.bundled_files.keys()[index.get()]) else "", + .react_refresh_entry_point = react_fast_refresh_id, + .source_map_id = hash, }); const source_map = try dev.client_graph.takeSourceMap(.initial_response, sfa, dev.allocator); @@ -1565,7 +1715,7 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.J // Run tracing dev.client_graph.reset(); - try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); + try dev.traceAllRouteImports(route_bundle, >s, .find_css); const names = dev.client_graph.current_css_files.items; const arr = JSC.JSArray.createEmpty(dev.vm.global, names.len); @@ -1581,14 +1731,14 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.OOM!JSC.J return arr; } -fn traceAllRouteImports(dev: *DevServer, route_bundle: *RouteBundle, gts: *GraphTraceState, goal: TraceImportGoal) !void { +fn traceAllRouteImports(dev: *DevServer, route_bundle: *RouteBundle, gts: *GraphTraceState, comptime goal: TraceImportGoal) !void { switch (route_bundle.data) { .framework => |fw| { var route = dev.router.routePtr(fw.route_index); const router_type = dev.router.typePtr(route.type); // Both framework entry points are considered - try dev.server_graph.traceImports(fromOpaqueFileId(.server, router_type.server_file), gts, .{ .find_css = true }); + try dev.server_graph.traceImports(fromOpaqueFileId(.server, router_type.server_file), gts, .find_css); if (router_type.client_file.unwrap()) |id| { try dev.client_graph.traceImports(fromOpaqueFileId(.client, id), gts, goal); } @@ -1680,6 +1830,9 @@ pub fn finalizeBundle( }; dev.startNextBundleIfPresent(); + + // Unref the ref added in `startAsyncBundle` + if (dev.server) |server| server.onStaticRequestComplete(); } const current_bundle = &dev.current_bundle.?; defer { @@ -2057,7 +2210,7 @@ pub fn finalizeBundle( if (dev.incremental_result.had_adjusted_edges) { gts.clear(); dev.client_graph.current_css_files.clearRetainingCapacity(); - try dev.traceAllRouteImports(route_bundle, >s, .{ .find_css = true }); + try dev.traceAllRouteImports(route_bundle, >s, .find_css); const css_ids = dev.client_graph.current_css_files.items; try w.writeInt(i32, @intCast(css_ids.len), .little); @@ -2283,6 +2436,13 @@ pub fn handleParseTaskFailure( dev.graph_safety_lock.lock(); defer dev.graph_safety_lock.unlock(); + debug.log("handleParseTaskFailure({}, .{s}, {}, {d} messages)", .{ + err, + @tagName(graph), + bun.fmt.quote(abs_path), + log.msgs.items.len, + }); + if (err == error.FileNotFound) { // Special-case files being deleted. Note that if a // file never existed, resolution would fail first. @@ -2920,7 +3080,7 @@ pub fn IncrementalGraph(side: bake.Side) type { /// /// For server, the code is temporarily kept in the /// `current_chunk_parts` array, where it must live until - /// takeBundle is called. Then it can be freed. + /// takeJSBundle is called. Then it can be freed. pub fn receiveChunk( g: *@This(), ctx: *HotUpdateContext, @@ -3301,13 +3461,13 @@ pub fn IncrementalGraph(side: bake.Side) type { } } - const TraceDependencyKind = enum { + const TraceDependencyGoal = enum { stop_at_boundary, no_stop, css_to_route, }; - fn traceDependencies(g: *@This(), file_index: FileIndex, gts: *GraphTraceState, trace_kind: TraceDependencyKind) !void { + fn traceDependencies(g: *@This(), file_index: FileIndex, gts: *GraphTraceState, comptime goal: TraceDependencyGoal) !void { g.owner().graph_safety_lock.assertLocked(); if (Environment.enable_logs) { @@ -3340,15 +3500,15 @@ pub fn IncrementalGraph(side: bake.Side) type { }, .client => { const dev = g.owner(); - if (file.flags.is_hmr_root or (file.flags.kind == .css and trace_kind == .css_to_route)) { + if (file.flags.is_hmr_root or (file.flags.kind == .css and goal == .css_to_route)) { const key = g.bundled_files.keys()[file_index.get()]; const index = dev.server_graph.getFileIndex(key) orelse Output.panic("Server Incremental Graph is missing component for {}", .{bun.fmt.quote(key)}); - try dev.server_graph.traceDependencies(index, gts, trace_kind); + try dev.server_graph.traceDependencies(index, gts, goal); } else if (file.flags.is_html_route) { const route_bundle_index = dev.client_graph.htmlRouteBundleIndex(file_index); try dev.incremental_result.html_routes_affected.append(dev.allocator, route_bundle_index); - if (trace_kind == .stop_at_boundary) + if (goal == .stop_at_boundary) return; } }, @@ -3357,7 +3517,7 @@ pub fn IncrementalGraph(side: bake.Side) type { // Certain files do not propagate updates to dependencies. // This is how updating a client component doesn't cause // a server-side reload. - if (trace_kind == .stop_at_boundary) { + if (goal == .stop_at_boundary) { if (file.stopsDependencyTrace()) { igLog("\\<- this file stops propagation", .{}); return; @@ -3369,16 +3529,17 @@ pub fn IncrementalGraph(side: bake.Side) type { while (it) |dep_index| { const edge = g.edges.items[dep_index.get()]; it = edge.next_dependency.unwrap(); - try g.traceDependencies(edge.dependency, gts, trace_kind); + try g.traceDependencies(edge.dependency, gts, goal); } } - fn traceImports(g: *@This(), file_index: FileIndex, gts: *GraphTraceState, goal: TraceImportGoal) !void { + fn traceImports(g: *@This(), file_index: FileIndex, gts: *GraphTraceState, comptime goal: TraceImportGoal) !void { g.owner().graph_safety_lock.assertLocked(); if (Environment.enable_logs) { - igLog("traceImports(.{s}, {}{s})", .{ + igLog("traceImports(.{s}, .{s}, {}{s})", .{ @tagName(side), + @tagName(goal), bun.fmt.quote(g.bundled_files.keys()[file_index.get()]), if (gts.bits(side).isSet(file_index.get())) " [already visited]" else "", }); @@ -3399,11 +3560,19 @@ pub fn IncrementalGraph(side: bake.Side) type { Output.panic("Client Incremental Graph is missing component for {}", .{bun.fmt.quote(key)}); try dev.client_graph.traceImports(index, gts, goal); } + if (goal == .find_errors and file.failed) { + const fail = g.owner().bundling_failures.getKeyAdapted( + SerializedFailure.Owner{ .server = file_index }, + SerializedFailure.ArrayHashAdapter{}, + ) orelse + @panic("Failed to get bundling failure"); + try g.owner().incremental_result.failures_added.append(g.owner().allocator, fail); + } }, .client => { assert(!g.stale_files.isSet(file_index.get())); // should not be left stale if (file.flags.kind == .css) { - if (goal.find_css) { + if (goal == .find_css) { try g.current_css_files.append(g.owner().allocator, file.cssAssetId()); } @@ -3416,10 +3585,19 @@ pub fn IncrementalGraph(side: bake.Side) type { return; } - if (goal.find_client_modules) { + if (goal == .find_client_modules) { try g.current_chunk_parts.append(g.owner().allocator, file_index); g.current_chunk_len += file.code_len; } + + if (goal == .find_errors and file.flags.failed) { + const fail = g.owner().bundling_failures.getKeyAdapted( + SerializedFailure.Owner{ .client = file_index }, + SerializedFailure.ArrayHashAdapter{}, + ) orelse + @panic("Failed to get bundling failure"); + try g.owner().incremental_result.failures_added.append(g.owner().allocator, fail); + } }, } @@ -3688,6 +3866,7 @@ pub fn IncrementalGraph(side: bake.Side) type { if (g.first_dep.items[index.get()] == .none) { g.disconnectAndDeleteFile(index); } else { + // TODO: This is incorrect, delete it! // Keep the file so others may refer to it, but mark as failed. try g.insertFailure(.abs_path, abs_path, log, false); } @@ -3748,8 +3927,9 @@ pub fn IncrementalGraph(side: bake.Side) type { const TakeJSBundleOptions = switch (side) { .client => struct { kind: ChunkKind, - initial_response_entry_point: []const u8 = "", source_map_id: ?u64 = null, + initial_response_entry_point: []const u8 = "", + react_refresh_entry_point: []const u8 = "", }, .server => struct { kind: ChunkKind, @@ -3794,7 +3974,6 @@ pub fn IncrementalGraph(side: bake.Side) type { switch (kind) { .initial_response => { if (side == .server) @panic("unreachable"); - const fw = g.owner().framework; try w.writeAll("}, {\n main: "); const initial_response_entry_point = options.initial_response_entry_point; if (initial_response_entry_point.len > 0) { @@ -3810,10 +3989,10 @@ pub fn IncrementalGraph(side: bake.Side) type { try w.writeAll(",\n version: \""); try w.writeAll(&g.owner().configuration_hash_key); try w.writeAll("\""); - if (fw.react_fast_refresh) |rfr| { + if (options.react_refresh_entry_point.len > 0) { try w.writeAll(",\n refresh: "); try bun.js_printer.writeJSONString( - g.owner().relativePath(rfr.import_source), + g.owner().relativePath(options.react_refresh_entry_point), @TypeOf(w), w, .utf8, @@ -3824,9 +4003,9 @@ pub fn IncrementalGraph(side: bake.Side) type { } try w.writeAll("\n})"); if (side == .client) if (options.source_map_id) |source_map_id| { - try w.writeAll("//# sourceMappingURL=" ++ asset_prefix); + try w.writeAll("\n//# sourceMappingURL=" ++ asset_prefix); try w.writeAll(&std.fmt.bytesToHex(std.mem.asBytes(&source_map_id), .lower)); - try w.writeAll(".js.map"); + try w.writeAll(".js.map\n"); }; break :end end_list.items; }; @@ -4081,6 +4260,8 @@ const IncrementalResult = struct { /// /// Populated from within the bundler via `handleParseTaskFailure`, as well /// as at the start of `indexFailures`. + /// + /// This is also populated when calling `traceImports` with `find_errors` failures_added: ArrayListUnmanaged(SerializedFailure), const empty: IncrementalResult = .{ @@ -4130,10 +4311,10 @@ const GraphTraceState = struct { } }; -const TraceImportGoal = struct { - // gts: *GraphTraceState, - find_css: bool = false, - find_client_modules: bool = false, +const TraceImportGoal = enum { + find_css, + find_client_modules, + find_errors, }; fn initGraphTraceState(dev: *const DevServer, sfa: Allocator) !GraphTraceState { @@ -5698,7 +5879,7 @@ pub const Assets = struct { return @alignCast(@fieldParentPtr("assets", assets)); } - /// When an asset is overwritten, it recieves a new URL to get around browser auto-caching. + // / When an asset is overwritten, it receives a new URL to get around browser auto-caching. /// The old URL is immediately invalidated. pub fn replacePath( assets: *Assets, @@ -5838,21 +6019,19 @@ pub fn onPluginsRejected(dev: *DevServer) !void { } /// userland implementation of https://github.com/ziglang/zig/issues/21879 -fn voidFieldTypes(comptime T: type) *brk: { +fn VoidFieldTypes(comptime T: type) type { const fields = @typeInfo(T).@"struct".fields; var new_fields = fields[0..fields.len].*; for (&new_fields) |*field| { field.type = void; field.default_value_ptr = null; } - break :brk @Type(.{ .@"struct" = .{ + return @Type(.{ .@"struct" = .{ .layout = .auto, .fields = &new_fields, .decls = &.{}, .is_tuple = false, } }); -} { - return undefined; } const std = @import("std"); diff --git a/src/bake/bake.d.ts b/src/bake/bake.d.ts index fc7bde89123d34..af40b0cd7fb65b 100644 --- a/src/bake/bake.d.ts +++ b/src/bake/bake.d.ts @@ -604,6 +604,4 @@ declare module "bun:bake/client" { } /** Available during development */ -declare module "bun:bake/dev" { - -}; +declare module "bun:bake/dev" {} diff --git a/src/bake/bake.private.d.ts b/src/bake/bake.private.d.ts index 204a68f151349d..3b0952223410c1 100644 --- a/src/bake/bake.private.d.ts +++ b/src/bake/bake.private.d.ts @@ -43,6 +43,9 @@ declare const side: "client" | "server"; */ declare const IS_BUN_DEVELOPMENT: any; +/** If this is the fallback error page */ +declare const IS_ERROR_RUNTIME: boolean; + declare var __bun_f: any; // The following interfaces have been transcribed manually. diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 1010dd9610a2bd..52a3dde42a6354 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -241,9 +241,9 @@ pub const Framework = struct { arena, "react-refresh/runtime/index.js", if (Environment.codegen_embed) - .{ .code = @embedFile("bake.react-refresh-prebuilt.js") } + .{ .code = @embedFile("node-fallbacks/react-refresh-prebuilt.js") } else - .{ .code = bun.runtimeEmbedFile(.codegen, "bake.react-refresh-prebuilt.js") }, + .{ .code = bun.runtimeEmbedFile(.codegen, "node-fallbacks/react-refresh-prebuilt.js") }, ); } @@ -694,7 +694,6 @@ pub const HmrRuntime = struct { line_count: u32, pub fn init(code: [:0]const u8) HmrRuntime { - if (@inComptime()) @setEvalBranchQuota(@intCast(code.len)); return .{ .code = code, .line_count = @intCast(std.mem.count(u8, code, "\n")), @@ -705,8 +704,8 @@ pub const HmrRuntime = struct { pub fn getHmrRuntime(side: Side) callconv(bun.callconv_inline) HmrRuntime { return if (Environment.codegen_embed) switch (side) { - .client => comptime .init(@embedFile("bake-codegen/bake.client.js")), - .server => comptime .init(@embedFile("bake-codegen/bake.server.js")), + .client => .init(@embedFile("bake-codegen/bake.client.js")), + .server => .init(@embedFile("bake-codegen/bake.server.js")), } else .init(switch (side) { diff --git a/src/bake/client/overlay.css b/src/bake/client/overlay.css index d3b08f23b7b989..0413054e07b34b 100644 --- a/src/bake/client/overlay.css +++ b/src/bake/client/overlay.css @@ -95,10 +95,10 @@ pre { font-weight: bold; padding: 0.5rem 1rem; text-align: left; - cursor: pointer; + /* cursor: pointer; */ } -.file-name:hover, +/* .file-name:hover, .file-name:focus-visible { background-color: var(--item-bg-hover); } @@ -111,7 +111,7 @@ pre { .file-name:hover::after, .file-name:focus-visible { content: " (click to open in editor)"; -} +} */ .message { margin: 1rem; @@ -127,7 +127,7 @@ button + .message { } .message-text:last-child { - margin-bottom: 0.5rem; + margin-bottom: 1rem; } .log-error { diff --git a/src/bake/client/overlay.ts b/src/bake/client/overlay.ts index 34c5bb4a0b1d72..b860a672e021f2 100644 --- a/src/bake/client/overlay.ts +++ b/src/bake/client/overlay.ts @@ -113,7 +113,7 @@ function setModalVisible(visible: boolean) { } /** Handler for `MessageId.errors` websocket packet */ -export function onErrorMessage(view: DataView) { +export function onErrorMessage(view: DataView) { const reader = new DataViewReader(view, 1); const removedCount = reader.u32(); @@ -164,7 +164,11 @@ export function updateErrorOverlay() { console.log(errors, updatedErrorOwners); if (errors.size === 0) { - setModalVisible(false); + if (IS_ERROR_RUNTIME) { + location.reload(); + } else { + setModalVisible(false); + } return; } @@ -190,20 +194,21 @@ export function updateErrorOverlay() { let title; let btn; const root = elem("div", { class: "message-group" }, [ - (btn = elem("button", { class: "file-name" }, [(title = textNode())])), + // (btn = elem("button", { class: "file-name" }, [(title = textNode())])), + elem("div", { class: "file-name" }, [(title = textNode())]), ]); - btn.addEventListener("click", () => { - const firstLocation = errors.get(owner)?.messages[0]?.location; - if (!firstLocation) return; - let fileName = title.textContent.replace(/^\//, ""); - fetch("/_bun/src/" + fileName, { - headers: { - "Open-In-Editor": "1", - "Editor-Line": firstLocation.line.toString(), - "Editor-Column": firstLocation.column.toString(), - }, - }); - }); + // btn.addEventListener("click", () => { + // const firstLocation = errors.get(owner)?.messages[0]?.location; + // if (!firstLocation) return; + // let fileName = title.textContent.replace(/^\//, ""); + // fetch("/_bun/src/" + fileName, { + // headers: { + // "Open-In-Editor": "1", + // "Editor-Line": firstLocation.line.toString(), + // "Editor-Column": firstLocation.column.toString(), + // }, + // }); + // }); dom = { root, title, messages: [] }; // TODO: sorted insert? domErrorList.appendChild(root); diff --git a/src/bake/hmr-runtime-error.ts b/src/bake/hmr-runtime-error.ts index 338c15f1772291..7fb4d98df92a1a 100644 --- a/src/bake/hmr-runtime-error.ts +++ b/src/bake/hmr-runtime-error.ts @@ -23,7 +23,6 @@ declare const error: Uint8Array; } let firstVersionPacket = true; -let currentRouteIndex = -1; const ws = initWebSocket({ [MessageId.version](dv) { @@ -39,22 +38,4 @@ const ws = initWebSocket({ }, [MessageId.errors]: onErrorMessage, - - [MessageId.hot_update](view) { - const reader = new DataViewReader(view, 1); - const serverSideRoutesUpdated = new Set(); - do { - const routeId = reader.i32(); - if (routeId === -1 || routeId == undefined) break; - if (routeId === currentRouteIndex) { - location.reload(); - break; - } - } while (true); - }, - - [MessageId.set_url_response](view) { - const reader = new DataViewReader(view, 1); - currentRouteIndex = reader.u32(); - }, }); diff --git a/src/bake/incremental_visualizer.html b/src/bake/incremental_visualizer.html index 0b00a295dafe29..31af95998c1576 100644 --- a/src/bake/incremental_visualizer.html +++ b/src/bake/incremental_visualizer.html @@ -67,31 +67,31 @@

IncrementalGraph Visualization

-
+
Stale
-
+
Client
-
+
HTML
-
+
Route
-
+
SSR
-
+
Server
-
+
SSR + Server
diff --git a/src/bake/macros.ts b/src/bake/macros.ts index 189c37e122379d..7d57cdb09bcbb2 100644 --- a/src/bake/macros.ts +++ b/src/bake/macros.ts @@ -5,14 +5,7 @@ import { resolve } from "node:path"; export async function css(file: string, is_development: boolean): string { // TODO: CSS does not process the error modal correctly. { - const { success, stdout, stderr } = await Bun.spawnSync({ - // TODO: remove the --experimental-css flag here once CI is upgraded to a post-#16561 bun - cmd: [process.execPath, "x", "esbuild", file, ...(is_development ? [] : ["--minify"])], - cwd: import.meta.dir, - stdio: ["ignore", "pipe", "pipe"], - }); - if (!success) throw new Error(stderr.toString("utf-8")); - return stdout.toString("utf-8"); + return readFileSync(resolve(import.meta.dir, file), "utf-8"); } // const { success, stdout, stderr } = await Bun.spawnSync({ diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 66ec1fbcf64638..844a12be688e80 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1161,15 +1161,11 @@ pub const ServerConfig = struct { } // When HTML bundles are provided, ensure DevServer options are ready - // The precense of these options causes Bun.serve to initialize things. + // The presence of these options causes Bun.serve to initialize things. // // TODO: remove canary gate once the following things are fixed: // - more extensive hmr reliability testing - // - asset support - // - plugin support - // - tailwind plugin verified functional - // - source maps - if ((Environment.is_canary or Environment.isDebug) and + if (!bun.CLI.Command.get().args.disable_hmr and dedupe_html_bundle_map.count() > 0) { // TODO: this should be the dir with bunfig?? diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index 4a90e4c317d005..b0e72ec2a086a8 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -182,8 +182,7 @@ pub const HTMLBundleRoute = struct { .err => |log| { if (bun.Environment.enable_logs) debug("onRequest: {s} - err", .{req.url()}); - _ = log; // autofix - // TODO: use the code from DevServer.zig to render the error + _ = log; // TODO: use the code from DevServer.zig to render the error resp.endWithoutBody(true); }, .html => |html| { diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index 363960d7630e65..6fe85623793e90 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -16,18 +16,6 @@ ref_count: u32 = 1, pub usingnamespace bun.NewRefCounted(@This(), deinit, null); -// pub fn initFromBlob(blob: AnyBlob) *StaticRoute { -// const headers = Headers.from(null, bun.default_allocator, .{ .body = &blob }) catch bun.outOfMemory(); -// return StaticRoute.new(.{ -// .blob = blob, -// .cached_blob_size = blob.size(), -// .has_content_disposition = false, -// .headers = headers, -// .server = null, -// .status_code = 200, -// }); -// } - pub const InitFromBytesOptions = struct { server: AnyServer, mime_type: ?bun.http.MimeType = null, diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 4202937199d76a..1d3b0b8b033046 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -212,7 +212,7 @@ pub const RuntimeTranspilerStore = struct { }; } - // Thsi is run at the top of the event loop on the JS thread. + // This is run at the top of the event loop on the JS thread. pub fn drain(this: *RuntimeTranspilerStore) void { var batch = this.queue.popBatch(); var iter = batch.iterator(); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 548daa3d6233e3..c3a752ed22e685 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -2779,6 +2779,7 @@ pub const BundleV2 = struct { if (err == error.ModuleNotFound) { if (this.bun_watcher != null) { if (!had_busted_dir_cache) { + bun.Output.scoped(.watcher, false)("busting dir cache {s} -> {s}", .{ source.path.text, import_record.path.text }); // Only re-query if we previously had something cached. if (transpiler.resolver.bustDirCacheFromSpecifier( source.path.text, @@ -9767,8 +9768,7 @@ pub const LinkerContext = struct { const css: *const bun.css.BundlerStyleSheet = &chunk.content.css.asts[imports_in_chunk_index]; switch (css_import.kind) { - .layers => |layers| { - _ = layers; // autofix + .layers => { const printer_options = bun.css.PrinterOptions{ // TODO: make this more configurable .minify = c.options.minify_whitespace, @@ -9947,8 +9947,6 @@ pub const LinkerContext = struct { } fn prepareCssAstsForChunkImpl(c: *LinkerContext, chunk: *Chunk, allocator: std.mem.Allocator) void { - const import_records: []const BabyList(ImportRecord) = c.graph.ast.items(.import_records); - _ = import_records; // autofix const asts: []const ?*bun.css.BundlerStyleSheet = c.graph.ast.items(.css); // Prepare CSS asts @@ -10105,13 +10103,10 @@ pub const LinkerContext = struct { ast: *bun.css.BundlerStyleSheet, temp_allocator: std.mem.Allocator, conditions: *const BabyList(bun.css.ImportConditions), - condition_import_records: *const BabyList(ImportRecord), + _: *const BabyList(ImportRecord), ) void { - _ = condition_import_records; // autofix var dummy_import_records = bun.BabyList(bun.ImportRecord){}; - defer { - bun.debugAssert(dummy_import_records.len == 0); - } + defer bun.debugAssert(dummy_import_records.len == 0); var i: usize = conditions.len; while (i > 0) { diff --git a/src/codegen/bake-codegen.ts b/src/codegen/bake-codegen.ts index f3a1046690032a..1d3b5ff91b8506 100644 --- a/src/codegen/bake-codegen.ts +++ b/src/codegen/bake-codegen.ts @@ -31,37 +31,13 @@ async function run() { writeIfNotChanged(join(base_dir, "generated.ts"), convertZigEnum(devServerZig)); const results = await Promise.allSettled( - ["client", "server", "error", "react-refresh"].map(async file => { - // An embedded copy of react-refresh is used when the user forgets to install it. - // The library is not versioned alongside React. - if (file === "react-refresh") { - const reactRefresh = require.resolve( - "../../node_modules/react-refresh/cjs/react-refresh-runtime.development.js", - ); - let result = await Bun.build({ - entrypoints: [reactRefresh], - minify: true, - target: "browser", - external: ["*"], - format: "cjs", - define: { - "process.env.NODE_ENV": JSON.stringify("development"), - }, - }); - if (!result.success) throw new AggregateError(result.logs); - assert(result.outputs.length === 1, "must bundle to a single file"); - // @ts-ignore - let code = await result.outputs[0].text(); - assert(code.trim().length > 0, "react-refresh-prebuilt is empty, built from " + reactRefresh); - writeIfNotChanged(join(codegenRoot, `bake.react-refresh-prebuilt.js`), code); - return; - } - + ["client", "server", "error"].map(async file => { const side = file === "error" ? "client" : file; let result = await Bun.build({ entrypoints: [join(base_dir, `hmr-runtime-${file}.ts`)], define: { side: JSON.stringify(side), + IS_ERROR_RUNTIME: String(file === "error"), IS_BUN_DEVELOPMENT: String(!!debug), }, minify: { @@ -162,9 +138,9 @@ async function run() { { kind: ["client"], result: results[0] }, { kind: ["server"], result: results[1] }, { kind: ["error"], result: results[2] }, - { kind: ["react-refresh"], result: results[3] }, ] .filter(x => x.result.status === "rejected") + // @ts-ignore .map(x => ({ kind: x.kind, err: x.result.reason })) as Err[]; if (failed.length > 0) { const flattened_errors: Err[] = []; diff --git a/src/http.zig b/src/http.zig index c6920268c0c84e..6eb64576ac4b91 100644 --- a/src/http.zig +++ b/src/http.zig @@ -410,7 +410,7 @@ const ProxyTunnel = struct { }; const pending = encoded_data[@intCast(written)..]; if (pending.len > 0) { - // lets flush when we are trully writable + // lets flush when we are truly writable proxy.write_buffer.write(pending) catch bun.outOfMemory(); } } diff --git a/src/js_parser.zig b/src/js_parser.zig index d9da97b9699a52..b7a62f08644345 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -3273,7 +3273,7 @@ pub const Parser = struct { // The lexer location won't be totally accurate, but it's kind of helpful. try p.log.addError(p.source, p.lexer.loc(), "Maximum call stack size exceeded"); - // Return a SyntaxError so that we reuse existing code for handling erorrs. + // Return a SyntaxError so that we reuse existing code for handling errors. return error.SyntaxError; } diff --git a/src/node-fallbacks/bun.lock b/src/node-fallbacks/bun.lock index 26187aaf6f5de7..1730d5da2dc3be 100644 --- a/src/node-fallbacks/bun.lock +++ b/src/node-fallbacks/bun.lock @@ -19,6 +19,7 @@ "process": "^0.11.10", "punycode": "^2.1.1", "querystring-es3": "^1.0.0-0", + "react-refresh": "^0.16.0", "readable-stream": "^4.1.0", "stream-http": "^3.2.0", "string_decoder": "^1.3.0", @@ -31,7 +32,7 @@ }, }, "packages": { - "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw=="], + "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.14.54", "", { "os":"linux", "cpu":"none" }, "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw=="], "abort-controller": ["abort-controller@3.0.0", "", { "dependencies": { "event-target-shim": "^5.0.0" } }, "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg=="], @@ -95,45 +96,45 @@ "esbuild": ["esbuild@0.14.54", "", { "dependencies": { "@esbuild/linux-loong64": "0.14.54", "esbuild-android-64": "0.14.54", "esbuild-android-arm64": "0.14.54", "esbuild-darwin-64": "0.14.54", "esbuild-darwin-arm64": "0.14.54", "esbuild-freebsd-64": "0.14.54", "esbuild-freebsd-arm64": "0.14.54", "esbuild-linux-32": "0.14.54", "esbuild-linux-64": "0.14.54", "esbuild-linux-arm": "0.14.54", "esbuild-linux-arm64": "0.14.54", "esbuild-linux-mips64le": "0.14.54", "esbuild-linux-ppc64le": "0.14.54", "esbuild-linux-riscv64": "0.14.54", "esbuild-linux-s390x": "0.14.54", "esbuild-netbsd-64": "0.14.54", "esbuild-openbsd-64": "0.14.54", "esbuild-sunos-64": "0.14.54", "esbuild-windows-32": "0.14.54", "esbuild-windows-64": "0.14.54", "esbuild-windows-arm64": "0.14.54" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA=="], - "esbuild-android-64": ["esbuild-android-64@0.14.54", "", { "os": "android", "cpu": "x64" }, "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ=="], + "esbuild-android-64": ["esbuild-android-64@0.14.54", "", { "os":"android", "cpu":"x64" }, "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ=="], - "esbuild-android-arm64": ["esbuild-android-arm64@0.14.54", "", { "os": "android", "cpu": "arm64" }, "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg=="], + "esbuild-android-arm64": ["esbuild-android-arm64@0.14.54", "", { "os":"android", "cpu":"arm64" }, "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg=="], - "esbuild-darwin-64": ["esbuild-darwin-64@0.14.54", "", { "os": "darwin", "cpu": "x64" }, "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug=="], + "esbuild-darwin-64": ["esbuild-darwin-64@0.14.54", "", { "os":"darwin", "cpu":"x64" }, "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug=="], - "esbuild-darwin-arm64": ["esbuild-darwin-arm64@0.14.54", "", { "os": "darwin", "cpu": "arm64" }, "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw=="], + "esbuild-darwin-arm64": ["esbuild-darwin-arm64@0.14.54", "", { "os":"darwin", "cpu":"arm64" }, "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw=="], - "esbuild-freebsd-64": ["esbuild-freebsd-64@0.14.54", "", { "os": "freebsd", "cpu": "x64" }, "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg=="], + "esbuild-freebsd-64": ["esbuild-freebsd-64@0.14.54", "", { "os":"freebsd", "cpu":"x64" }, "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg=="], - "esbuild-freebsd-arm64": ["esbuild-freebsd-arm64@0.14.54", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q=="], + "esbuild-freebsd-arm64": ["esbuild-freebsd-arm64@0.14.54", "", { "os":"freebsd", "cpu":"arm64" }, "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q=="], - "esbuild-linux-32": ["esbuild-linux-32@0.14.54", "", { "os": "linux", "cpu": "ia32" }, "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw=="], + "esbuild-linux-32": ["esbuild-linux-32@0.14.54", "", { "os":"linux", "cpu":"ia32" }, "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw=="], - "esbuild-linux-64": ["esbuild-linux-64@0.14.54", "", { "os": "linux", "cpu": "x64" }, "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg=="], + "esbuild-linux-64": ["esbuild-linux-64@0.14.54", "", { "os":"linux", "cpu":"x64" }, "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg=="], - "esbuild-linux-arm": ["esbuild-linux-arm@0.14.54", "", { "os": "linux", "cpu": "arm" }, "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw=="], + "esbuild-linux-arm": ["esbuild-linux-arm@0.14.54", "", { "os":"linux", "cpu":"arm" }, "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw=="], - "esbuild-linux-arm64": ["esbuild-linux-arm64@0.14.54", "", { "os": "linux", "cpu": "arm64" }, "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig=="], + "esbuild-linux-arm64": ["esbuild-linux-arm64@0.14.54", "", { "os":"linux", "cpu":"arm64" }, "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig=="], - "esbuild-linux-mips64le": ["esbuild-linux-mips64le@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw=="], + "esbuild-linux-mips64le": ["esbuild-linux-mips64le@0.14.54", "", { "os":"linux", "cpu":"none" }, "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw=="], - "esbuild-linux-ppc64le": ["esbuild-linux-ppc64le@0.14.54", "", { "os": "linux", "cpu": "ppc64" }, "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ=="], + "esbuild-linux-ppc64le": ["esbuild-linux-ppc64le@0.14.54", "", { "os":"linux", "cpu":"ppc64" }, "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ=="], - "esbuild-linux-riscv64": ["esbuild-linux-riscv64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg=="], + "esbuild-linux-riscv64": ["esbuild-linux-riscv64@0.14.54", "", { "os":"linux", "cpu":"none" }, "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg=="], - "esbuild-linux-s390x": ["esbuild-linux-s390x@0.14.54", "", { "os": "linux", "cpu": "s390x" }, "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA=="], + "esbuild-linux-s390x": ["esbuild-linux-s390x@0.14.54", "", { "os":"linux", "cpu":"s390x" }, "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA=="], - "esbuild-netbsd-64": ["esbuild-netbsd-64@0.14.54", "", { "os": "none", "cpu": "x64" }, "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w=="], + "esbuild-netbsd-64": ["esbuild-netbsd-64@0.14.54", "", { "os":"none", "cpu":"x64" }, "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w=="], - "esbuild-openbsd-64": ["esbuild-openbsd-64@0.14.54", "", { "os": "openbsd", "cpu": "x64" }, "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw=="], + "esbuild-openbsd-64": ["esbuild-openbsd-64@0.14.54", "", { "os":"openbsd", "cpu":"x64" }, "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw=="], - "esbuild-sunos-64": ["esbuild-sunos-64@0.14.54", "", { "os": "sunos", "cpu": "x64" }, "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw=="], + "esbuild-sunos-64": ["esbuild-sunos-64@0.14.54", "", { "os":"sunos", "cpu":"x64" }, "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw=="], - "esbuild-windows-32": ["esbuild-windows-32@0.14.54", "", { "os": "win32", "cpu": "ia32" }, "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w=="], + "esbuild-windows-32": ["esbuild-windows-32@0.14.54", "", { "os":"win32", "cpu":"ia32" }, "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w=="], - "esbuild-windows-64": ["esbuild-windows-64@0.14.54", "", { "os": "win32", "cpu": "x64" }, "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ=="], + "esbuild-windows-64": ["esbuild-windows-64@0.14.54", "", { "os":"win32", "cpu":"x64" }, "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ=="], - "esbuild-windows-arm64": ["esbuild-windows-arm64@0.14.54", "", { "os": "win32", "cpu": "arm64" }, "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg=="], + "esbuild-windows-arm64": ["esbuild-windows-arm64@0.14.54", "", { "os":"win32", "cpu":"arm64" }, "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg=="], "event-target-shim": ["event-target-shim@5.0.1", "", {}, "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="], @@ -215,6 +216,8 @@ "randomfill": ["randomfill@1.0.4", "", { "dependencies": { "randombytes": "^2.0.5", "safe-buffer": "^5.1.0" } }, "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw=="], + "react-refresh": ["react-refresh@0.16.0", "", {}, "sha512-FPvF2XxTSikpJxcr+bHut2H4gJ17+18Uy20D5/F+SKzFap62R3cM5wH6b8WN3LyGSYeQilLEcJcR1fjBSI2S1A=="], + "readable-stream": ["readable-stream@4.3.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10" } }, "sha512-MuEnA0lbSi7JS8XM+WNJlWZkHAAdm7gETHdFK//Q/mChGyj2akEFtdLZh32jSdkWGbRwCW9pn6g3LWDdDeZnBQ=="], "ripemd160": ["ripemd160@2.0.2", "", { "dependencies": { "hash-base": "^3.0.0", "inherits": "^2.0.1" } }, "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA=="], diff --git a/src/node-fallbacks/package.json b/src/node-fallbacks/package.json index 4c54b4c438df99..99dedbb47bab27 100644 --- a/src/node-fallbacks/package.json +++ b/src/node-fallbacks/package.json @@ -6,7 +6,8 @@ "scripts": { "test": "echo \"Error: no test specified\" && exit 1", "build-gen": "bash -c 'esbuild --bundle *.js --outdir=bun --format=esm --platform=browser --external:buffer --external:stream --external:util --external:util/ --external:assert'", - "build": "bash -c 'esbuild --bundle *.js --outdir=out --format=esm --minify --platform=browser'" + "build": "bash -c 'esbuild --bundle *.js --outdir=out --format=esm --minify --platform=browser'", + "build-react-refresh": "NODE_ENV=development bun build --target=browser --external=* --format=cjs --outfile=out/react-refresh.js ./node_modules/react-refresh/cjs/react-refresh-runtime.development.js --define=process.env.NODE_ENV=development --minify" }, "author": "", "license": "ISC", @@ -26,6 +27,7 @@ "process": "^0.11.10", "punycode": "^2.1.1", "querystring-es3": "^1.0.0-0", + "react-refresh": "^0.16.0", "readable-stream": "^4.1.0", "stream-http": "^3.2.0", "string_decoder": "^1.3.0", diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index b714d33ace181c..b8ce712bac62c7 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -1629,6 +1629,13 @@ pub const Resolver = struct { /// bust both the named file and a parent directory, because `./hello` can resolve /// to `./hello.js` or `./hello/index.js` pub fn bustDirCacheFromSpecifier(r: *ThisResolver, import_source: []const u8, specifier: []const u8) bool { + if (std.fs.path.isAbsolute(specifier)) { + const dir = bun.path.dirname(specifier, .auto); + const a = r.bustDirCache(dir); + const b = r.bustDirCache(specifier); + return a or b; + } + if (!(bun.strings.startsWith(specifier, "./") or bun.strings.startsWith(specifier, "../"))) return false; if (!std.fs.path.isAbsolute(import_source)) return false; From 3dddfa8d13c6e77ade301108cbdad23c0e266252 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 7 Feb 2025 18:51:54 -0800 Subject: [PATCH 24/28] wallaa --- cmake/targets/BuildBun.cmake | 1 - src/bake/bake.zig | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 57cf51326a47dc..5ded96fa618577 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -198,7 +198,6 @@ register_command( --minify --define:process.env.NODE_ENV=\"'development'\" SOURCES - ${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js ${BUN_NODE_FALLBACKS_SOURCE}/package.json ${BUN_NODE_FALLBACKS_SOURCE}/bun.lock ${BUN_NODE_FALLBACKS_NODE_MODULES} diff --git a/src/bake/bake.zig b/src/bake/bake.zig index 52a3dde42a6354..ea0a548d3a1547 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -241,9 +241,9 @@ pub const Framework = struct { arena, "react-refresh/runtime/index.js", if (Environment.codegen_embed) - .{ .code = @embedFile("node-fallbacks/react-refresh-prebuilt.js") } + .{ .code = @embedFile("node-fallbacks/react-refresh.js") } else - .{ .code = bun.runtimeEmbedFile(.codegen, "node-fallbacks/react-refresh-prebuilt.js") }, + .{ .code = bun.runtimeEmbedFile(.codegen, "node-fallbacks/react-refresh.js") }, ); } From 3592a671053d58c4c024cdb4fe079ed652cf5bff Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 7 Feb 2025 19:54:49 -0800 Subject: [PATCH 25/28] a --- src/bake/DevServer.zig | 3 +++ src/bake/bake.zig | 1 - src/bun.js/api/server.zig | 7 +------ src/bun.js/api/server/HTMLBundle.zig | 1 + src/bundler/bundle_v2.zig | 5 +++-- src/cli.zig | 4 ++++ src/codegen/bake-codegen.ts | 2 +- 7 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 980dfd7c1c397a..8189226571878b 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -12,6 +12,9 @@ pub const debug = bun.Output.Scoped(.DevServer, false); pub const memoryLog = bun.Output.Scoped(.DevServerMemory, true); pub const igLog = bun.Output.scoped(.IncrementalGraph, false); +/// --no-hmr sets this to false +pub var enabled = true; + pub const Options = struct { /// Arena must live until DevServer.deinit() arena: Allocator, diff --git a/src/bake/bake.zig b/src/bake/bake.zig index ea0a548d3a1547..40885f3422fac7 100644 --- a/src/bake/bake.zig +++ b/src/bake/bake.zig @@ -2,7 +2,6 @@ //! combines `Bun.build` and `Bun.serve`, providing a hot-reloading development //! server, server components, and other integrations. Instead of taking the //! role as a framework, Bake is tool for frameworks to build on top of. - pub const production = @import("./production.zig"); pub const DevServer = @import("./DevServer.zig"); pub const FrameworkRouter = @import("./FrameworkRouter.zig"); diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 844a12be688e80..c500a5f6dd5596 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1162,12 +1162,7 @@ pub const ServerConfig = struct { // When HTML bundles are provided, ensure DevServer options are ready // The presence of these options causes Bun.serve to initialize things. - // - // TODO: remove canary gate once the following things are fixed: - // - more extensive hmr reliability testing - if (!bun.CLI.Command.get().args.disable_hmr and - dedupe_html_bundle_map.count() > 0) - { + if (bun.bake.DevServer.enabled and dedupe_html_bundle_map.count() > 0) { // TODO: this should be the dir with bunfig?? const root = bun.fs.FileSystem.instance.top_level_dir; var arena = std.heap.ArenaAllocator.init(bun.default_allocator); diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index b0e72ec2a086a8..6aaa5c7e4a24a8 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -217,6 +217,7 @@ pub const HTMLBundleRoute = struct { errdefer config.deinit(bun.default_allocator); try config.entry_points.insert(this.html_bundle.path); try config.public_path.appendChar('/'); + config.target = .browser; if (bun.CLI.Command.get().args.serve_minify_identifiers) |minify_identifiers| { config.minify.identifiers = minify_identifiers; diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index c3a752ed22e685..3da0e7ef6b1396 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -4591,7 +4591,8 @@ pub const ParseTask = struct { task.known_target != .bake_server_components_ssr and this.ctx.framework.?.server_components.?.separate_ssr_graph) or // set the target to the client when bundling client-side files - (task.known_target == .browser)) + ((transpiler.options.server_components or transpiler.options.dev_server != null) and + task.known_target == .browser)) { transpiler = this.ctx.client_transpiler; resolver = &transpiler.resolver; @@ -4695,7 +4696,7 @@ pub const ParseTask = struct { task.side_effects = .no_side_effects__empty_ast; } - bun.debugAssert(ast.parts.len > 0); // when parts.len == 0, it is assumed to be pending/failed. empty ast has at least 1 part. + // bun.debugAssert(ast.parts.len > 0); // when parts.len == 0, it is assumed to be pending/failed. empty ast has at least 1 part. step.* = .resolve; diff --git a/src/cli.zig b/src/cli.zig index f3be4c2b9a68c2..f8b42edb10fb40 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -819,6 +819,10 @@ pub const Arguments = struct { if (args.flag("--zero-fill-buffers")) { Bun__Node__ZeroFillBuffers = true; } + + if (args.flag("--no-hmr")) { + bun.bake.DevServer.enabled = false; + } } if (opts.port != null and opts.origin == null) { diff --git a/src/codegen/bake-codegen.ts b/src/codegen/bake-codegen.ts index 1d3b5ff91b8506..21d1aa607e00ae 100644 --- a/src/codegen/bake-codegen.ts +++ b/src/codegen/bake-codegen.ts @@ -168,7 +168,7 @@ async function run() { console.error(err); } } else { - console.log("-> bake.client.js, bake.server.js, bake.error.js, bake.react-refresh-prebuilt.js"); + console.log("-> bake.client.js, bake.server.js, bake.error.js"); const empty_file = join(codegenRoot, "bake_empty_file"); if (!existsSync(empty_file)) writeIfNotChanged(empty_file, "this is used to fulfill a cmake dependency"); From 9b23cd63c4b59618c681e68cef344e579d5ba4f2 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 7 Feb 2025 20:23:56 -0800 Subject: [PATCH 26/28] a --- src/bake/BakeGlobalObject.cpp | 17 +++++++++-------- test/js/bun/http/bun-serve-html-entry.test.ts | 16 ++++++++-------- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/src/bake/BakeGlobalObject.cpp b/src/bake/BakeGlobalObject.cpp index efa663385904d3..abcaf83f6e03a0 100644 --- a/src/bake/BakeGlobalObject.cpp +++ b/src/bake/BakeGlobalObject.cpp @@ -58,17 +58,18 @@ JSC::Identifier bakeModuleLoaderResolve(JSC::JSGlobalObject* jsGlobal, auto& vm = JSC::getVM(global); auto scope = DECLARE_THROW_SCOPE(vm); - ASSERT(referrer.isString()); - WTF::String refererString = jsCast(referrer)->getString(global); + if (auto string = jsDynamicCast(referrer)) { + WTF::String refererString = string->getString(global); - WTF::String keyString = key.toWTFString(global); - RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier); - - if (refererString.startsWith("bake:/"_s) || (refererString == "."_s && keyString.startsWith("bake:/"_s))) { - BunString result = BakeProdResolve(global, Bun::toString(referrer.getString(global)), Bun::toString(keyString)); + WTF::String keyString = key.toWTFString(global); RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier); - return JSC::Identifier::fromString(vm, result.toWTFString(BunString::ZeroCopy)); + if (refererString.startsWith("bake:/"_s) || (refererString == "."_s && keyString.startsWith("bake:/"_s))) { + BunString result = BakeProdResolve(global, Bun::toString(referrer.getString(global)), Bun::toString(keyString)); + RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier); + + return JSC::Identifier::fromString(vm, result.toWTFString(BunString::ZeroCopy)); + } } // Use Zig::GlobalObject's function diff --git a/test/js/bun/http/bun-serve-html-entry.test.ts b/test/js/bun/http/bun-serve-html-entry.test.ts index 250629671db154..b31c9e88fbd988 100644 --- a/test/js/bun/http/bun-serve-html-entry.test.ts +++ b/test/js/bun/http/bun-serve-html-entry.test.ts @@ -95,7 +95,7 @@ test("bun ./index.html", async () => { cmd: [bunExe(), "index.html", "--port=0"], env: { ...bunEnv, - NODE_ENV: undefined, + NODE_ENV: "production", }, cwd: dir, stdout: "pipe", @@ -127,7 +127,7 @@ test("bun ./index.html", async () => { expect(cssResponse.headers.get("content-type")).toContain("text/css"); const css = await cssResponse.text(); expect(css).toContain(".container"); - expect(css).toContain("max-width: 800px"); + expect(css).toContain("max-width:800px"); } // Get and verify the bundled JS @@ -213,7 +213,7 @@ test("bun ./index.html ./about.html", async () => { cmd: [bunExe(), "index.html", "about.html", "--port=0"], env: { ...bunEnv, - NODE_ENV: undefined, + NODE_ENV: "production", }, cwd: dir, stdout: "pipe", @@ -258,7 +258,7 @@ test("bun ./index.html ./about.html", async () => { expect(cssResponse.status).toBe(200); const css = await cssResponse.text(); expect(css).toContain(".container"); - expect(css).toContain("max-width: 800px"); + expect(css).toContain("max-width:800px"); } // Verify both JS bundles work @@ -408,7 +408,7 @@ test("bun *.html", async () => { cmd: [bunExe(), "*.html", "--port=0"], env: { ...bunEnv, - NODE_ENV: undefined, + NODE_ENV: "production", }, cwd: dir, stdout: "pipe", @@ -450,9 +450,9 @@ test("bun *.html", async () => { const cssResponse = await fetch(new URL(cssMatches[0]!, serverUrl).href); expect(cssResponse.status).toBe(200); const css = await cssResponse.text(); - expect(css).toContain("nav {"); - expect(css).toContain(".container {"); - expect(css).toContain("form {"); + expect(css).toContain("nav{"); + expect(css).toContain(".container{"); + expect(css).toContain("form{"); // Verify each page has its own JS functionality const jsMatches = responses.map(html => html.match(/src="(\/chunk-[a-z0-9]+\.js)"/)?.[1]!); From 9e922462bfedc62b82ce55d65da4d51d164fbb8c Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 7 Feb 2025 21:35:31 -0800 Subject: [PATCH 27/28] wa --- .gitignore | 1 + cmake/targets/BuildBun.cmake | 1 - src/bake/DevServer.zig | 4 +-- src/bun.js/api/server.zig | 6 ++--- src/bun.js/api/server/HTMLBundle.zig | 10 ++++---- test/js/bun/http/bun-serve-html.test.ts | 34 +++++++++++++------------ 6 files changed, 29 insertions(+), 27 deletions(-) diff --git a/.gitignore b/.gitignore index b791b8bc3a047f..e8a8c92992968e 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ .vscode/clang* .vscode/cpp* .zig-cache +.bake-debug *.a *.bc *.big diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 5ded96fa618577..2e7df7b43aff08 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -594,7 +594,6 @@ register_command( -Dcanary=${CANARY_REVISION} -Dcodegen_path=${CODEGEN_PATH} -Dcodegen_embed=$,true,false> - -Denable_asan=$,true,false> --prominent-compile-errors ${ZIG_FLAGS_BUN} ARTIFACTS diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 8189226571878b..25475232e96945 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -1291,7 +1291,7 @@ fn generateHTMLPayload(dev: *DevServer, route_bundle_index: RouteBundle.Index, r const payload_size = bundled_html.len + ("").len * css_ids.len + - "".len + + "".len + client_prefix.len + "/".len + display_name.len + "-0000000000000000.js".len; @@ -1307,7 +1307,7 @@ fn generateHTMLPayload(dev: *DevServer, route_bundle_index: RouteBundle.Index, r } array.appendSliceAssumeCapacity(before_body_end); // Insert the client script tag before "" - array.appendSliceAssumeCapacity("