@b9g/platform 0.1.8 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/config.d.ts +21 -0
- package/src/config.js +43 -0
- package/src/index.d.ts +1 -1
- package/src/index.js +2 -0
- package/src/runtime.js +19 -19
- package/src/single-threaded.d.ts +4 -2
- package/src/single-threaded.js +15 -13
- package/src/worker-pool.d.ts +5 -5
- package/src/worker-pool.js +12 -13
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@b9g/platform",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.10",
|
|
4
4
|
"description": "ServiceWorker-first universal deployment platform. Write ServiceWorker apps once, deploy anywhere (Node/Bun/Cloudflare). Registry-based multi-app orchestration.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"serviceworker",
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
"dependencies": {
|
|
18
18
|
"@b9g/async-context": "^0.1.1",
|
|
19
19
|
"@b9g/cache": "^0.1.4",
|
|
20
|
-
"@b9g/filesystem": "^0.1.
|
|
20
|
+
"@b9g/filesystem": "^0.1.6",
|
|
21
21
|
"@logtape/logtape": "^1.2.0"
|
|
22
22
|
},
|
|
23
23
|
"devDependencies": {
|
package/src/config.d.ts
CHANGED
|
@@ -56,10 +56,19 @@ export interface BucketConfig {
|
|
|
56
56
|
region?: string | number;
|
|
57
57
|
endpoint?: string | number;
|
|
58
58
|
}
|
|
59
|
+
/** Log level for filtering */
|
|
60
|
+
export type LogLevel = "debug" | "info" | "warning" | "error";
|
|
61
|
+
export interface LoggingConfig {
|
|
62
|
+
/** Default log level. Defaults to "error" */
|
|
63
|
+
level?: LogLevel;
|
|
64
|
+
/** Per-category log levels (overrides default) */
|
|
65
|
+
categories?: Record<string, LogLevel>;
|
|
66
|
+
}
|
|
59
67
|
export interface ShovelConfig {
|
|
60
68
|
port?: number | string;
|
|
61
69
|
host?: string;
|
|
62
70
|
workers?: number | string;
|
|
71
|
+
logging?: LoggingConfig;
|
|
63
72
|
caches?: Record<string, CacheConfig>;
|
|
64
73
|
buckets?: Record<string, BucketConfig>;
|
|
65
74
|
}
|
|
@@ -67,6 +76,7 @@ export interface ProcessedShovelConfig {
|
|
|
67
76
|
port: number;
|
|
68
77
|
host: string;
|
|
69
78
|
workers: number;
|
|
79
|
+
logging: Required<LoggingConfig>;
|
|
70
80
|
caches: Record<string, CacheConfig>;
|
|
71
81
|
buckets: Record<string, BucketConfig>;
|
|
72
82
|
}
|
|
@@ -76,6 +86,17 @@ export interface ProcessedShovelConfig {
|
|
|
76
86
|
* @param cwd - Current working directory (must be provided by runtime adapter)
|
|
77
87
|
*/
|
|
78
88
|
export declare function loadConfig(cwd: string): ProcessedShovelConfig;
|
|
89
|
+
/**
|
|
90
|
+
* Configure LogTape logging based on Shovel config.
|
|
91
|
+
* Call this in both main thread and workers.
|
|
92
|
+
*
|
|
93
|
+
* @param loggingConfig - The logging configuration from ProcessedShovelConfig.logging
|
|
94
|
+
* @param options - Additional options
|
|
95
|
+
* @param options.reset - Whether to reset existing LogTape config (default: true)
|
|
96
|
+
*/
|
|
97
|
+
export declare function configureLogging(loggingConfig: Required<LoggingConfig>, options?: {
|
|
98
|
+
reset?: boolean;
|
|
99
|
+
}): Promise<void>;
|
|
79
100
|
/**
|
|
80
101
|
* Get cache config for a specific cache name (with pattern matching)
|
|
81
102
|
*/
|
package/src/config.js
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import { readFileSync } from "fs";
|
|
4
4
|
import { resolve } from "path";
|
|
5
5
|
import { Cache } from "@b9g/cache";
|
|
6
|
+
import { configure, getConsoleSink } from "@logtape/logtape";
|
|
6
7
|
function getEnv() {
|
|
7
8
|
if (typeof import.meta !== "undefined" && import.meta.env) {
|
|
8
9
|
return import.meta.env;
|
|
@@ -395,11 +396,52 @@ function loadConfig(cwd) {
|
|
|
395
396
|
port: typeof processed.port === "number" ? processed.port : 3e3,
|
|
396
397
|
host: processed.host || "localhost",
|
|
397
398
|
workers: typeof processed.workers === "number" ? processed.workers : 1,
|
|
399
|
+
logging: {
|
|
400
|
+
level: processed.logging?.level || "error",
|
|
401
|
+
categories: processed.logging?.categories || {}
|
|
402
|
+
},
|
|
398
403
|
caches: processed.caches || {},
|
|
399
404
|
buckets: processed.buckets || {}
|
|
400
405
|
};
|
|
401
406
|
return config;
|
|
402
407
|
}
|
|
408
|
+
var SHOVEL_CATEGORIES = [
|
|
409
|
+
"cli",
|
|
410
|
+
"watcher",
|
|
411
|
+
"worker",
|
|
412
|
+
"single-threaded",
|
|
413
|
+
"assets",
|
|
414
|
+
"platform-node",
|
|
415
|
+
"platform-bun",
|
|
416
|
+
"platform-cloudflare",
|
|
417
|
+
"cache",
|
|
418
|
+
"cache-redis",
|
|
419
|
+
"router"
|
|
420
|
+
];
|
|
421
|
+
async function configureLogging(loggingConfig, options = {}) {
|
|
422
|
+
const { level, categories } = loggingConfig;
|
|
423
|
+
const reset = options.reset !== false;
|
|
424
|
+
const loggers = SHOVEL_CATEGORIES.map((category) => {
|
|
425
|
+
const categoryLevel = categories[category] || level;
|
|
426
|
+
return {
|
|
427
|
+
category: [category],
|
|
428
|
+
level: categoryLevel,
|
|
429
|
+
sinks: ["console"]
|
|
430
|
+
};
|
|
431
|
+
});
|
|
432
|
+
loggers.push({
|
|
433
|
+
category: ["logtape", "meta"],
|
|
434
|
+
level: "warning",
|
|
435
|
+
sinks: []
|
|
436
|
+
});
|
|
437
|
+
await configure({
|
|
438
|
+
reset,
|
|
439
|
+
sinks: {
|
|
440
|
+
console: getConsoleSink()
|
|
441
|
+
},
|
|
442
|
+
loggers
|
|
443
|
+
});
|
|
444
|
+
}
|
|
403
445
|
function getCacheConfig(config, name) {
|
|
404
446
|
return matchPattern(name, config.caches) || {};
|
|
405
447
|
}
|
|
@@ -506,6 +548,7 @@ For redis: npm install @b9g/cache-redis`
|
|
|
506
548
|
};
|
|
507
549
|
}
|
|
508
550
|
export {
|
|
551
|
+
configureLogging,
|
|
509
552
|
createBucketFactory,
|
|
510
553
|
createCacheFactory,
|
|
511
554
|
getBucketConfig,
|
package/src/index.d.ts
CHANGED
|
@@ -192,4 +192,4 @@ export { SingleThreadedRuntime, type SingleThreadedRuntimeOptions, } from "./sin
|
|
|
192
192
|
export { ShovelServiceWorkerRegistration, ShovelGlobalScope, FetchEvent, InstallEvent, ActivateEvent, ExtendableEvent, } from "./runtime.js";
|
|
193
193
|
export { RequestCookieStore, type CookieListItem, type CookieInit, type CookieStoreGetOptions, type CookieStoreDeleteOptions, type CookieSameSite, type CookieList, parseCookieHeader, serializeCookie, parseSetCookieHeader, } from "./cookie-store.js";
|
|
194
194
|
export { CustomBucketStorage } from "@b9g/filesystem";
|
|
195
|
-
export { loadConfig, getCacheConfig, getBucketConfig, parseConfigExpr, processConfigValue, matchPattern, createBucketFactory, createCacheFactory, type ShovelConfig, type CacheConfig, type BucketConfig, type BucketFactoryOptions, type CacheFactoryOptions, type ProcessedShovelConfig, } from "./config.js";
|
|
195
|
+
export { loadConfig, configureLogging, getCacheConfig, getBucketConfig, parseConfigExpr, processConfigValue, matchPattern, createBucketFactory, createCacheFactory, type ShovelConfig, type CacheConfig, type BucketConfig, type LoggingConfig, type LogLevel, type BucketFactoryOptions, type CacheFactoryOptions, type ProcessedShovelConfig, } from "./config.js";
|
package/src/index.js
CHANGED
|
@@ -27,6 +27,7 @@ import {
|
|
|
27
27
|
import { CustomBucketStorage } from "@b9g/filesystem";
|
|
28
28
|
import {
|
|
29
29
|
loadConfig,
|
|
30
|
+
configureLogging,
|
|
30
31
|
getCacheConfig,
|
|
31
32
|
getBucketConfig,
|
|
32
33
|
parseConfigExpr,
|
|
@@ -237,6 +238,7 @@ export {
|
|
|
237
238
|
ShovelGlobalScope,
|
|
238
239
|
ShovelServiceWorkerRegistration,
|
|
239
240
|
SingleThreadedRuntime,
|
|
241
|
+
configureLogging,
|
|
240
242
|
createBucketFactory,
|
|
241
243
|
createCacheFactory,
|
|
242
244
|
createPlatform,
|
package/src/runtime.js
CHANGED
|
@@ -6,6 +6,7 @@ import { CustomBucketStorage } from "@b9g/filesystem";
|
|
|
6
6
|
import { CustomCacheStorage } from "@b9g/cache";
|
|
7
7
|
import { createBucketFactory, createCacheFactory } from "./config.js";
|
|
8
8
|
import { getLogger } from "@logtape/logtape";
|
|
9
|
+
import { configureLogging } from "./config.js";
|
|
9
10
|
if (import.meta.env && !import.meta.env.MODE && import.meta.env.NODE_ENV) {
|
|
10
11
|
import.meta.env.MODE = import.meta.env.NODE_ENV;
|
|
11
12
|
}
|
|
@@ -817,7 +818,7 @@ var scope = null;
|
|
|
817
818
|
var _workerSelf = null;
|
|
818
819
|
var currentApp = null;
|
|
819
820
|
var serviceWorkerReady = false;
|
|
820
|
-
var
|
|
821
|
+
var loadedEntrypoint = null;
|
|
821
822
|
var caches;
|
|
822
823
|
var buckets;
|
|
823
824
|
async function handleFetchEvent(request) {
|
|
@@ -832,27 +833,23 @@ async function handleFetchEvent(request) {
|
|
|
832
833
|
return response;
|
|
833
834
|
} catch (error) {
|
|
834
835
|
logger.error("[Worker] ServiceWorker request failed", { error });
|
|
836
|
+
console.error("[Worker] ServiceWorker request failed:", error);
|
|
835
837
|
const response = new Response("ServiceWorker request failed", {
|
|
836
838
|
status: 500
|
|
837
839
|
});
|
|
838
840
|
return response;
|
|
839
841
|
}
|
|
840
842
|
}
|
|
841
|
-
async function loadServiceWorker(
|
|
843
|
+
async function loadServiceWorker(entrypoint) {
|
|
842
844
|
try {
|
|
843
845
|
logger.debug("loadServiceWorker called", {
|
|
844
|
-
|
|
845
|
-
|
|
846
|
+
entrypoint,
|
|
847
|
+
loadedEntrypoint
|
|
846
848
|
});
|
|
847
|
-
if (!entrypoint) {
|
|
848
|
-
throw new Error(
|
|
849
|
-
"ServiceWorker entrypoint must be provided via loadServiceWorker() call"
|
|
850
|
-
);
|
|
851
|
-
}
|
|
852
849
|
logger.info("[Worker] Loading from", { entrypoint });
|
|
853
|
-
if (
|
|
850
|
+
if (loadedEntrypoint !== null && loadedEntrypoint !== entrypoint) {
|
|
854
851
|
logger.info(
|
|
855
|
-
`[Worker] Hot reload detected: ${
|
|
852
|
+
`[Worker] Hot reload detected: ${loadedEntrypoint} -> ${entrypoint}`
|
|
856
853
|
);
|
|
857
854
|
logger.info("[Worker] Creating completely fresh ServiceWorker context");
|
|
858
855
|
registration = new ShovelServiceWorkerRegistration();
|
|
@@ -869,14 +866,14 @@ async function loadServiceWorker(version, entrypoint) {
|
|
|
869
866
|
currentApp = null;
|
|
870
867
|
serviceWorkerReady = false;
|
|
871
868
|
}
|
|
872
|
-
if (
|
|
873
|
-
logger.info("[Worker] ServiceWorker already loaded for
|
|
874
|
-
|
|
869
|
+
if (loadedEntrypoint === entrypoint) {
|
|
870
|
+
logger.info("[Worker] ServiceWorker already loaded for entrypoint", {
|
|
871
|
+
entrypoint
|
|
875
872
|
});
|
|
876
873
|
return;
|
|
877
874
|
}
|
|
878
|
-
const appModule = await import(
|
|
879
|
-
|
|
875
|
+
const appModule = await import(entrypoint);
|
|
876
|
+
loadedEntrypoint = entrypoint;
|
|
880
877
|
currentApp = appModule;
|
|
881
878
|
if (!registration) {
|
|
882
879
|
throw new Error("ServiceWorker runtime not initialized");
|
|
@@ -885,7 +882,7 @@ async function loadServiceWorker(version, entrypoint) {
|
|
|
885
882
|
await registration.activate();
|
|
886
883
|
serviceWorkerReady = true;
|
|
887
884
|
logger.info(
|
|
888
|
-
`[Worker] ServiceWorker loaded and activated
|
|
885
|
+
`[Worker] ServiceWorker loaded and activated from ${entrypoint}`
|
|
889
886
|
);
|
|
890
887
|
} catch (error) {
|
|
891
888
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
@@ -903,6 +900,9 @@ var workerId = Math.random().toString(36).substring(2, 8);
|
|
|
903
900
|
var sendMessage;
|
|
904
901
|
async function initializeRuntime(config, baseDir) {
|
|
905
902
|
try {
|
|
903
|
+
if (config?.logging) {
|
|
904
|
+
await configureLogging(config.logging);
|
|
905
|
+
}
|
|
906
906
|
logger.info(`[Worker-${workerId}] Initializing runtime with config`, {
|
|
907
907
|
config,
|
|
908
908
|
baseDir
|
|
@@ -932,8 +932,8 @@ async function handleMessage(message) {
|
|
|
932
932
|
sendMessage({ type: "initialized" });
|
|
933
933
|
} else if (message.type === "load") {
|
|
934
934
|
const loadMsg = message;
|
|
935
|
-
await loadServiceWorker(loadMsg.
|
|
936
|
-
sendMessage({ type: "ready",
|
|
935
|
+
await loadServiceWorker(loadMsg.entrypoint);
|
|
936
|
+
sendMessage({ type: "ready", entrypoint: loadMsg.entrypoint });
|
|
937
937
|
} else if (message.type === "request") {
|
|
938
938
|
const reqMsg = message;
|
|
939
939
|
const request = new Request(reqMsg.request.url, {
|
package/src/single-threaded.d.ts
CHANGED
|
@@ -31,12 +31,14 @@ export declare class SingleThreadedRuntime {
|
|
|
31
31
|
init(): Promise<void>;
|
|
32
32
|
/**
|
|
33
33
|
* Load and run a ServiceWorker entrypoint
|
|
34
|
+
* @param entrypoint - Path to the new entrypoint (hashed filename for cache busting)
|
|
34
35
|
*/
|
|
35
|
-
reloadWorkers(
|
|
36
|
+
reloadWorkers(entrypoint: string): Promise<void>;
|
|
36
37
|
/**
|
|
37
38
|
* Load a ServiceWorker entrypoint for the first time
|
|
39
|
+
* @param entrypoint - Path to the entrypoint file (content-hashed filename)
|
|
38
40
|
*/
|
|
39
|
-
loadEntrypoint(entrypoint: string
|
|
41
|
+
loadEntrypoint(entrypoint: string): Promise<void>;
|
|
40
42
|
/**
|
|
41
43
|
* Handle an HTTP request
|
|
42
44
|
* This is the key method - direct call, no postMessage!
|
package/src/single-threaded.js
CHANGED
|
@@ -5,6 +5,7 @@ import { ShovelGlobalScope, ShovelServiceWorkerRegistration } from "./runtime.js
|
|
|
5
5
|
import { CustomBucketStorage } from "@b9g/filesystem";
|
|
6
6
|
import { CustomCacheStorage } from "@b9g/cache";
|
|
7
7
|
import {
|
|
8
|
+
configureLogging,
|
|
8
9
|
createBucketFactory,
|
|
9
10
|
createCacheFactory
|
|
10
11
|
} from "./config.js";
|
|
@@ -34,37 +35,38 @@ var SingleThreadedRuntime = class {
|
|
|
34
35
|
* Initialize the runtime (install scope as globalThis.self)
|
|
35
36
|
*/
|
|
36
37
|
async init() {
|
|
38
|
+
if (this.#config?.logging) {
|
|
39
|
+
await configureLogging(this.#config.logging);
|
|
40
|
+
}
|
|
37
41
|
this.#scope.install();
|
|
38
42
|
logger.info("SingleThreadedRuntime initialized - scope installed");
|
|
39
43
|
}
|
|
40
44
|
/**
|
|
41
45
|
* Load and run a ServiceWorker entrypoint
|
|
46
|
+
* @param entrypoint - Path to the new entrypoint (hashed filename for cache busting)
|
|
42
47
|
*/
|
|
43
|
-
async reloadWorkers(
|
|
44
|
-
if (!this.#entrypoint) {
|
|
45
|
-
throw new Error("No entrypoint set - call loadEntrypoint first");
|
|
46
|
-
}
|
|
48
|
+
async reloadWorkers(entrypoint) {
|
|
47
49
|
logger.info("Reloading ServiceWorker", {
|
|
48
|
-
|
|
49
|
-
|
|
50
|
+
oldEntrypoint: this.#entrypoint,
|
|
51
|
+
newEntrypoint: entrypoint
|
|
50
52
|
});
|
|
51
|
-
|
|
53
|
+
this.#entrypoint = entrypoint;
|
|
52
54
|
this.#registration._serviceWorker._setState("parsed");
|
|
53
55
|
this.#ready = false;
|
|
54
|
-
await import(
|
|
56
|
+
await import(entrypoint);
|
|
55
57
|
await this.#registration.install();
|
|
56
58
|
await this.#registration.activate();
|
|
57
59
|
this.#ready = true;
|
|
58
|
-
logger.info("ServiceWorker loaded and activated", {
|
|
60
|
+
logger.info("ServiceWorker loaded and activated", { entrypoint });
|
|
59
61
|
}
|
|
60
62
|
/**
|
|
61
63
|
* Load a ServiceWorker entrypoint for the first time
|
|
64
|
+
* @param entrypoint - Path to the entrypoint file (content-hashed filename)
|
|
62
65
|
*/
|
|
63
|
-
async loadEntrypoint(entrypoint
|
|
66
|
+
async loadEntrypoint(entrypoint) {
|
|
64
67
|
this.#entrypoint = entrypoint;
|
|
65
|
-
logger.info("Loading ServiceWorker entrypoint", { entrypoint
|
|
66
|
-
|
|
67
|
-
await import(importPath);
|
|
68
|
+
logger.info("Loading ServiceWorker entrypoint", { entrypoint });
|
|
69
|
+
await import(entrypoint);
|
|
68
70
|
await this.#registration.install();
|
|
69
71
|
await this.#registration.activate();
|
|
70
72
|
this.#ready = true;
|
package/src/worker-pool.d.ts
CHANGED
|
@@ -38,12 +38,11 @@ export interface WorkerResponse extends WorkerMessage {
|
|
|
38
38
|
}
|
|
39
39
|
export interface WorkerLoadMessage extends WorkerMessage {
|
|
40
40
|
type: "load";
|
|
41
|
-
|
|
42
|
-
entrypoint?: string;
|
|
41
|
+
entrypoint: string;
|
|
43
42
|
}
|
|
44
43
|
export interface WorkerReadyMessage extends WorkerMessage {
|
|
45
44
|
type: "ready" | "worker-ready";
|
|
46
|
-
|
|
45
|
+
entrypoint?: string;
|
|
47
46
|
}
|
|
48
47
|
export interface WorkerErrorMessage extends WorkerMessage {
|
|
49
48
|
type: "error";
|
|
@@ -75,9 +74,10 @@ export declare class ServiceWorkerPool {
|
|
|
75
74
|
*/
|
|
76
75
|
handleRequest(request: Request): Promise<Response>;
|
|
77
76
|
/**
|
|
78
|
-
* Reload ServiceWorker with new
|
|
77
|
+
* Reload ServiceWorker with new entrypoint (hot reload)
|
|
78
|
+
* The entrypoint path contains a content hash for cache busting
|
|
79
79
|
*/
|
|
80
|
-
reloadWorkers(
|
|
80
|
+
reloadWorkers(entrypoint: string): Promise<void>;
|
|
81
81
|
/**
|
|
82
82
|
* Graceful shutdown of all workers
|
|
83
83
|
*/
|
package/src/worker-pool.js
CHANGED
|
@@ -251,7 +251,7 @@ var ServiceWorkerPool = class {
|
|
|
251
251
|
}
|
|
252
252
|
#handleReady(message) {
|
|
253
253
|
if (message.type === "ready") {
|
|
254
|
-
logger.info("ServiceWorker ready", {
|
|
254
|
+
logger.info("ServiceWorker ready", { entrypoint: message.entrypoint });
|
|
255
255
|
} else if (message.type === "worker-ready") {
|
|
256
256
|
logger.info("Worker initialized", {});
|
|
257
257
|
}
|
|
@@ -303,10 +303,12 @@ var ServiceWorkerPool = class {
|
|
|
303
303
|
}
|
|
304
304
|
}
|
|
305
305
|
/**
|
|
306
|
-
* Reload ServiceWorker with new
|
|
306
|
+
* Reload ServiceWorker with new entrypoint (hot reload)
|
|
307
|
+
* The entrypoint path contains a content hash for cache busting
|
|
307
308
|
*/
|
|
308
|
-
async reloadWorkers(
|
|
309
|
-
logger.info("Reloading ServiceWorker", {
|
|
309
|
+
async reloadWorkers(entrypoint) {
|
|
310
|
+
logger.info("Reloading ServiceWorker", { entrypoint });
|
|
311
|
+
this.#appEntrypoint = entrypoint;
|
|
310
312
|
const loadPromises = this.#workers.map((worker) => {
|
|
311
313
|
return new Promise((resolve, reject) => {
|
|
312
314
|
let timeoutId;
|
|
@@ -319,7 +321,7 @@ var ServiceWorkerPool = class {
|
|
|
319
321
|
};
|
|
320
322
|
const handleReady = (event) => {
|
|
321
323
|
const message = event.data || event;
|
|
322
|
-
if (message.type === "ready" && message.
|
|
324
|
+
if (message.type === "ready" && message.entrypoint === entrypoint) {
|
|
323
325
|
cleanup();
|
|
324
326
|
resolve();
|
|
325
327
|
} else if (message.type === "error") {
|
|
@@ -340,30 +342,27 @@ var ServiceWorkerPool = class {
|
|
|
340
342
|
cleanup();
|
|
341
343
|
reject(
|
|
342
344
|
new Error(
|
|
343
|
-
`Worker failed to load ServiceWorker within 30000ms (
|
|
345
|
+
`Worker failed to load ServiceWorker within 30000ms (entrypoint ${entrypoint})`
|
|
344
346
|
)
|
|
345
347
|
);
|
|
346
348
|
}, 3e4);
|
|
347
349
|
logger.info("Sending load message", {
|
|
348
|
-
|
|
349
|
-
entrypoint: this.#appEntrypoint
|
|
350
|
+
entrypoint
|
|
350
351
|
});
|
|
351
352
|
worker.addEventListener("message", handleReady);
|
|
352
353
|
worker.addEventListener("error", handleError);
|
|
353
354
|
const loadMessage = {
|
|
354
355
|
type: "load",
|
|
355
|
-
|
|
356
|
-
entrypoint: this.#appEntrypoint
|
|
356
|
+
entrypoint
|
|
357
357
|
};
|
|
358
358
|
logger.debug("[WorkerPool] Sending load message", {
|
|
359
|
-
entrypoint
|
|
360
|
-
version
|
|
359
|
+
entrypoint
|
|
361
360
|
});
|
|
362
361
|
worker.postMessage(loadMessage);
|
|
363
362
|
});
|
|
364
363
|
});
|
|
365
364
|
await Promise.all(loadPromises);
|
|
366
|
-
logger.info("All workers reloaded", {
|
|
365
|
+
logger.info("All workers reloaded", { entrypoint });
|
|
367
366
|
}
|
|
368
367
|
/**
|
|
369
368
|
* Graceful shutdown of all workers
|