@b9g/platform 0.1.11 → 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/package.json +7 -31
- package/src/index.d.ts +216 -11
- package/src/index.js +488 -103
- package/src/runtime.d.ts +155 -7
- package/src/runtime.js +393 -245
- package/src/shovel-config.d.ts +10 -0
- package/src/worker.d.ts +39 -0
- package/src/worker.js +285 -0
- package/src/config.d.ts +0 -172
- package/src/config.js +0 -641
- package/src/cookie-store.d.ts +0 -80
- package/src/cookie-store.js +0 -233
- package/src/single-threaded.d.ts +0 -59
- package/src/single-threaded.js +0 -114
- package/src/worker-pool.d.ts +0 -93
- package/src/worker-pool.js +0 -390
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Type declarations for the shovel:config virtual module.
|
|
3
|
+
* This module is resolved by esbuild at build time.
|
|
4
|
+
*/
|
|
5
|
+
declare module "shovel:config" {
|
|
6
|
+
import type {ShovelConfig} from "./runtime.js";
|
|
7
|
+
|
|
8
|
+
export const config: ShovelConfig;
|
|
9
|
+
export type {ShovelConfig};
|
|
10
|
+
}
|
package/src/worker.d.ts
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Worker Entry Point for MultiThreadedRuntime
|
|
3
|
+
*
|
|
4
|
+
* This is the explicit entry point for worker threads spawned by MultiThreadedRuntime.
|
|
5
|
+
* It sets up message handling and initializes the ServiceWorker runtime.
|
|
6
|
+
*
|
|
7
|
+
* This file is loaded directly as a Worker script - no detection needed.
|
|
8
|
+
*
|
|
9
|
+
* BOOTSTRAP ORDER:
|
|
10
|
+
* 1. Create placeholder caches/directories with deferred factories
|
|
11
|
+
* 2. Create and install ServiceWorkerGlobals (provides `self`, `addEventListener`, etc.)
|
|
12
|
+
* 3. Set up message handlers using `self.addEventListener`
|
|
13
|
+
* 4. Wait for "init" message to configure factories with real config
|
|
14
|
+
* 5. Wait for "load" message to load and activate ServiceWorker
|
|
15
|
+
*/
|
|
16
|
+
import { Cache } from "@b9g/cache";
|
|
17
|
+
import { type ShovelConfig } from "./runtime.js";
|
|
18
|
+
export interface DirectoryFactoryOptions {
|
|
19
|
+
/** Base directory for path resolution (entrypoint directory) - REQUIRED */
|
|
20
|
+
baseDir: string;
|
|
21
|
+
/** Shovel configuration for overrides */
|
|
22
|
+
config?: ShovelConfig;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Creates a directory factory function for CustomDirectoryStorage.
|
|
26
|
+
*/
|
|
27
|
+
export declare function createDirectoryFactory(options: DirectoryFactoryOptions): (name: string) => Promise<FileSystemDirectoryHandle>;
|
|
28
|
+
export interface CacheFactoryOptions {
|
|
29
|
+
/** Shovel configuration for cache settings */
|
|
30
|
+
config?: ShovelConfig;
|
|
31
|
+
/** Default provider when not specified in config. Defaults to "memory". */
|
|
32
|
+
defaultProvider?: string;
|
|
33
|
+
/** If true, use PostMessageCache for memory caches (for workers) */
|
|
34
|
+
usePostMessage?: boolean;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Creates a cache factory function for CustomCacheStorage.
|
|
38
|
+
*/
|
|
39
|
+
export declare function createCacheFactory(options?: CacheFactoryOptions): (name: string) => Promise<Cache>;
|
package/src/worker.js
ADDED
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
/// <reference types="./worker.d.ts" />
|
|
2
|
+
import "../chunk-P57PW2II.js";
|
|
3
|
+
|
|
4
|
+
// src/worker.ts
|
|
5
|
+
import { resolve } from "path";
|
|
6
|
+
import { getLogger } from "@logtape/logtape";
|
|
7
|
+
import { CustomDirectoryStorage } from "@b9g/filesystem";
|
|
8
|
+
import { CustomCacheStorage } from "@b9g/cache";
|
|
9
|
+
import { handleCacheResponse, PostMessageCache } from "@b9g/cache/postmessage";
|
|
10
|
+
import {
|
|
11
|
+
ServiceWorkerGlobals,
|
|
12
|
+
ShovelServiceWorkerRegistration,
|
|
13
|
+
CustomLoggerStorage,
|
|
14
|
+
configureLogging
|
|
15
|
+
} from "./runtime.js";
|
|
16
|
+
function matchPattern(name, patterns) {
|
|
17
|
+
if (!patterns)
|
|
18
|
+
return void 0;
|
|
19
|
+
if (patterns[name])
|
|
20
|
+
return patterns[name];
|
|
21
|
+
for (const [pattern, value] of Object.entries(patterns)) {
|
|
22
|
+
if (pattern.includes("*")) {
|
|
23
|
+
const regex = new RegExp("^" + pattern.replace(/\*/g, ".*") + "$");
|
|
24
|
+
if (regex.test(name))
|
|
25
|
+
return value;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return void 0;
|
|
29
|
+
}
|
|
30
|
+
function getCacheConfig(config, name) {
|
|
31
|
+
return matchPattern(name, config.caches) || {};
|
|
32
|
+
}
|
|
33
|
+
function getDirectoryConfig(config, name) {
|
|
34
|
+
return matchPattern(name, config.directories) || {};
|
|
35
|
+
}
|
|
36
|
+
var WELL_KNOWN_DIRECTORY_PATHS = {
|
|
37
|
+
static: (baseDir) => resolve(baseDir, "../static"),
|
|
38
|
+
server: (baseDir) => baseDir
|
|
39
|
+
};
|
|
40
|
+
var BUILTIN_DIRECTORY_PROVIDERS = {
|
|
41
|
+
node: "@b9g/filesystem/node.js",
|
|
42
|
+
memory: "@b9g/filesystem/memory.js",
|
|
43
|
+
s3: "@b9g/filesystem-s3"
|
|
44
|
+
};
|
|
45
|
+
function createDirectoryFactory(options) {
|
|
46
|
+
const { baseDir, config } = options;
|
|
47
|
+
return async (name) => {
|
|
48
|
+
const dirConfig = config ? getDirectoryConfig(config, name) : {};
|
|
49
|
+
let dirPath;
|
|
50
|
+
if (dirConfig.path) {
|
|
51
|
+
dirPath = String(dirConfig.path);
|
|
52
|
+
} else if (WELL_KNOWN_DIRECTORY_PATHS[name]) {
|
|
53
|
+
dirPath = WELL_KNOWN_DIRECTORY_PATHS[name](baseDir);
|
|
54
|
+
} else {
|
|
55
|
+
dirPath = resolve(baseDir, `../${name}`);
|
|
56
|
+
}
|
|
57
|
+
const provider = String(dirConfig.provider || "node");
|
|
58
|
+
const modulePath = BUILTIN_DIRECTORY_PROVIDERS[provider] || provider;
|
|
59
|
+
if (modulePath === "@b9g/filesystem/node.js") {
|
|
60
|
+
const { NodeDirectory } = await import("@b9g/filesystem/node.js");
|
|
61
|
+
return new NodeDirectory(dirPath);
|
|
62
|
+
}
|
|
63
|
+
if (modulePath === "@b9g/filesystem/memory.js") {
|
|
64
|
+
const { MemoryDirectory } = await import("@b9g/filesystem/memory.js");
|
|
65
|
+
return new MemoryDirectory(name);
|
|
66
|
+
}
|
|
67
|
+
const module = await import(modulePath);
|
|
68
|
+
const DirectoryClass = module.default || module.S3Directory || module.Directory || Object.values(module).find(
|
|
69
|
+
(v) => typeof v === "function" && v.name?.includes("Directory")
|
|
70
|
+
);
|
|
71
|
+
if (!DirectoryClass) {
|
|
72
|
+
throw new Error(
|
|
73
|
+
`Directory module "${modulePath}" does not export a valid directory class.`
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
const { provider: _, path: __, ...dirOptions } = dirConfig;
|
|
77
|
+
return new DirectoryClass(name, { path: dirPath, ...dirOptions });
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
var BUILTIN_CACHE_PROVIDERS = {
|
|
81
|
+
memory: "@b9g/cache/memory.js",
|
|
82
|
+
redis: "@b9g/cache-redis"
|
|
83
|
+
};
|
|
84
|
+
function createCacheFactory(options = {}) {
|
|
85
|
+
const { config, defaultProvider = "memory", usePostMessage = false } = options;
|
|
86
|
+
return async (name) => {
|
|
87
|
+
const cacheConfig = config ? getCacheConfig(config, name) : {};
|
|
88
|
+
const provider = String(cacheConfig.provider || defaultProvider);
|
|
89
|
+
if (provider === "cloudflare") {
|
|
90
|
+
const nativeCaches = globalThis.__cloudflareCaches ?? globalThis.caches;
|
|
91
|
+
if (!nativeCaches) {
|
|
92
|
+
throw new Error(
|
|
93
|
+
"Cloudflare cache provider requires native caches API."
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
return nativeCaches.open(name);
|
|
97
|
+
}
|
|
98
|
+
if (provider === "memory" && usePostMessage) {
|
|
99
|
+
return new PostMessageCache(name);
|
|
100
|
+
}
|
|
101
|
+
const { provider: _, ...cacheOptions } = cacheConfig;
|
|
102
|
+
const modulePath = BUILTIN_CACHE_PROVIDERS[provider] || provider;
|
|
103
|
+
const module = await import(modulePath);
|
|
104
|
+
const CacheClass = module.default || module.RedisCache || module.MemoryCache || module.Cache || Object.values(module).find(
|
|
105
|
+
(v) => typeof v === "function" && v.name?.includes("Cache")
|
|
106
|
+
);
|
|
107
|
+
if (!CacheClass) {
|
|
108
|
+
throw new Error(
|
|
109
|
+
`Cache module "${modulePath}" does not export a valid cache class.`
|
|
110
|
+
);
|
|
111
|
+
}
|
|
112
|
+
return new CacheClass(name, cacheOptions);
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
var workerId = Math.random().toString(36).substring(2, 8);
|
|
116
|
+
var resolveCacheFactory;
|
|
117
|
+
var resolveDirectoryFactory;
|
|
118
|
+
var cacheFactoryPromise = new Promise((resolve2) => {
|
|
119
|
+
resolveCacheFactory = resolve2;
|
|
120
|
+
});
|
|
121
|
+
var directoryFactoryPromise = new Promise((resolve2) => {
|
|
122
|
+
resolveDirectoryFactory = resolve2;
|
|
123
|
+
});
|
|
124
|
+
var caches = new CustomCacheStorage(async (name) => {
|
|
125
|
+
const factory = await cacheFactoryPromise;
|
|
126
|
+
return factory(name);
|
|
127
|
+
});
|
|
128
|
+
var directories = new CustomDirectoryStorage(async (name) => {
|
|
129
|
+
const factory = await directoryFactoryPromise;
|
|
130
|
+
return factory(name);
|
|
131
|
+
});
|
|
132
|
+
var loggers = new CustomLoggerStorage(
|
|
133
|
+
(...categories) => getLogger(categories)
|
|
134
|
+
);
|
|
135
|
+
var registration = new ShovelServiceWorkerRegistration();
|
|
136
|
+
var scope = new ServiceWorkerGlobals({
|
|
137
|
+
registration,
|
|
138
|
+
caches,
|
|
139
|
+
directories,
|
|
140
|
+
loggers
|
|
141
|
+
});
|
|
142
|
+
scope.install();
|
|
143
|
+
var logger = getLogger(["platform"]);
|
|
144
|
+
var sendMessage;
|
|
145
|
+
var serviceWorkerReady = false;
|
|
146
|
+
var loadedEntrypoint = null;
|
|
147
|
+
async function handleFetchEvent(request) {
|
|
148
|
+
if (!serviceWorkerReady) {
|
|
149
|
+
throw new Error("ServiceWorker not ready");
|
|
150
|
+
}
|
|
151
|
+
try {
|
|
152
|
+
return await registration.handleRequest(request);
|
|
153
|
+
} catch (error) {
|
|
154
|
+
logger.error("[Worker] ServiceWorker request failed: {error}", { error });
|
|
155
|
+
console.error("[Worker] ServiceWorker request failed:", error);
|
|
156
|
+
return new Response("ServiceWorker request failed", { status: 500 });
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
async function loadServiceWorker(entrypoint) {
|
|
160
|
+
try {
|
|
161
|
+
logger.debug("loadServiceWorker called", { entrypoint, loadedEntrypoint });
|
|
162
|
+
logger.info("[Worker] Loading from", { entrypoint });
|
|
163
|
+
if (loadedEntrypoint !== null && loadedEntrypoint !== entrypoint) {
|
|
164
|
+
logger.info(
|
|
165
|
+
`[Worker] Hot reload detected: ${loadedEntrypoint} -> ${entrypoint}`
|
|
166
|
+
);
|
|
167
|
+
logger.info("[Worker] Creating completely fresh ServiceWorker context");
|
|
168
|
+
registration = new ShovelServiceWorkerRegistration();
|
|
169
|
+
scope = new ServiceWorkerGlobals({
|
|
170
|
+
registration,
|
|
171
|
+
caches,
|
|
172
|
+
directories,
|
|
173
|
+
loggers
|
|
174
|
+
});
|
|
175
|
+
scope.install();
|
|
176
|
+
}
|
|
177
|
+
loadedEntrypoint = entrypoint;
|
|
178
|
+
const app = await import(entrypoint);
|
|
179
|
+
logger.debug("[Worker] ServiceWorker module loaded", {
|
|
180
|
+
exports: Object.keys(app)
|
|
181
|
+
});
|
|
182
|
+
logger.info("[Worker] Running install event");
|
|
183
|
+
await registration.install();
|
|
184
|
+
logger.info("[Worker] Running activate event");
|
|
185
|
+
await registration.activate();
|
|
186
|
+
serviceWorkerReady = true;
|
|
187
|
+
logger.info("[Worker] ServiceWorker ready", { entrypoint });
|
|
188
|
+
} catch (error) {
|
|
189
|
+
logger.error("[Worker] Failed to load ServiceWorker: {error}", {
|
|
190
|
+
error,
|
|
191
|
+
entrypoint
|
|
192
|
+
});
|
|
193
|
+
serviceWorkerReady = false;
|
|
194
|
+
throw error;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
async function initializeRuntime(config, baseDir) {
|
|
198
|
+
try {
|
|
199
|
+
if (config?.logging) {
|
|
200
|
+
await configureLogging(config.logging, { reset: true });
|
|
201
|
+
}
|
|
202
|
+
logger.info(`[Worker-${workerId}] Initializing runtime`, { config, baseDir });
|
|
203
|
+
logger.info(`[Worker-${workerId}] Configuring cache factory`);
|
|
204
|
+
resolveCacheFactory(createCacheFactory({ config, usePostMessage: true }));
|
|
205
|
+
logger.info(`[Worker-${workerId}] Configuring directory factory`);
|
|
206
|
+
resolveDirectoryFactory(createDirectoryFactory({ baseDir, config }));
|
|
207
|
+
logger.info(`[Worker-${workerId}] Runtime initialized successfully`);
|
|
208
|
+
} catch (error) {
|
|
209
|
+
logger.error(`[Worker-${workerId}] Failed to initialize runtime: {error}`, {
|
|
210
|
+
error
|
|
211
|
+
});
|
|
212
|
+
throw error;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
async function handleMessage(message) {
|
|
216
|
+
try {
|
|
217
|
+
logger.info(`[Worker-${workerId}] Received message`, { type: message.type });
|
|
218
|
+
if (message.type === "init") {
|
|
219
|
+
const initMsg = message;
|
|
220
|
+
await initializeRuntime(initMsg.config, initMsg.baseDir);
|
|
221
|
+
logger.info(`[Worker-${workerId}] Sending initialized message`);
|
|
222
|
+
sendMessage({ type: "initialized" });
|
|
223
|
+
} else if (message.type === "load") {
|
|
224
|
+
const loadMsg = message;
|
|
225
|
+
await loadServiceWorker(loadMsg.entrypoint);
|
|
226
|
+
sendMessage({ type: "ready", entrypoint: loadMsg.entrypoint });
|
|
227
|
+
} else if (message.type === "request") {
|
|
228
|
+
const reqMsg = message;
|
|
229
|
+
const request = new Request(reqMsg.request.url, {
|
|
230
|
+
method: reqMsg.request.method,
|
|
231
|
+
headers: reqMsg.request.headers,
|
|
232
|
+
body: reqMsg.request.body
|
|
233
|
+
});
|
|
234
|
+
const response = await handleFetchEvent(request);
|
|
235
|
+
const body = await response.arrayBuffer();
|
|
236
|
+
const headers = Object.fromEntries(response.headers.entries());
|
|
237
|
+
if (!headers["Content-Type"] && !headers["content-type"]) {
|
|
238
|
+
headers["Content-Type"] = "text/plain; charset=utf-8";
|
|
239
|
+
}
|
|
240
|
+
const responseMsg = {
|
|
241
|
+
type: "response",
|
|
242
|
+
response: {
|
|
243
|
+
status: response.status,
|
|
244
|
+
statusText: response.statusText,
|
|
245
|
+
headers,
|
|
246
|
+
body
|
|
247
|
+
},
|
|
248
|
+
requestID: reqMsg.requestID
|
|
249
|
+
};
|
|
250
|
+
sendMessage(responseMsg, [body]);
|
|
251
|
+
}
|
|
252
|
+
} catch (error) {
|
|
253
|
+
const errorMsg = {
|
|
254
|
+
type: "error",
|
|
255
|
+
error: error instanceof Error ? error.message : String(error),
|
|
256
|
+
stack: error instanceof Error ? error.stack : void 0,
|
|
257
|
+
requestID: message.requestID
|
|
258
|
+
};
|
|
259
|
+
sendMessage(errorMsg);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
self.addEventListener("message", (event) => {
|
|
263
|
+
const msg = event.data;
|
|
264
|
+
if (msg?.type === "cache:response" || msg?.type === "cache:error") {
|
|
265
|
+
logger.debug(`[Worker-${workerId}] Forwarding cache message`, {
|
|
266
|
+
type: msg.type,
|
|
267
|
+
requestID: msg.requestID
|
|
268
|
+
});
|
|
269
|
+
handleCacheResponse(msg);
|
|
270
|
+
return;
|
|
271
|
+
}
|
|
272
|
+
void handleMessage(event.data);
|
|
273
|
+
});
|
|
274
|
+
sendMessage = (message, transfer) => {
|
|
275
|
+
if (transfer && transfer.length > 0) {
|
|
276
|
+
postMessage(message, transfer);
|
|
277
|
+
} else {
|
|
278
|
+
postMessage(message);
|
|
279
|
+
}
|
|
280
|
+
};
|
|
281
|
+
sendMessage({ type: "worker-ready" });
|
|
282
|
+
export {
|
|
283
|
+
createCacheFactory,
|
|
284
|
+
createDirectoryFactory
|
|
285
|
+
};
|
package/src/config.d.ts
DELETED
|
@@ -1,172 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Configuration expression parser
|
|
3
|
-
*
|
|
4
|
-
* Embeddable JavaScript-like expressions for JSON config:
|
|
5
|
-
* - ALL_CAPS = env var reference (e.g., NODE_ENV, PORT)
|
|
6
|
-
* - Everything else = string literal (kebab-case, URLs, camelCase, PascalCase)
|
|
7
|
-
* - Quoted strings = explicit strings (escape hatch)
|
|
8
|
-
* - JavaScript keywords: true, false, null, undefined
|
|
9
|
-
* - Operators: ||, &&, ===, !==, ==, !=, ? :, !
|
|
10
|
-
* - No eval - uses recursive descent parser
|
|
11
|
-
*
|
|
12
|
-
* Examples:
|
|
13
|
-
* "PORT || 3000"
|
|
14
|
-
* "NODE_ENV === production ? redis : memory"
|
|
15
|
-
* "REDIS_URL || redis://localhost:6379"
|
|
16
|
-
* "S3_BUCKET || my-bucket-name"
|
|
17
|
-
* "BASE_PATH || ./uploads"
|
|
18
|
-
*/
|
|
19
|
-
import { Cache } from "@b9g/cache";
|
|
20
|
-
/**
|
|
21
|
-
* Parse a configuration expression with the DSL
|
|
22
|
-
*/
|
|
23
|
-
export declare function parseConfigExpr(expr: string, env?: Record<string, string | undefined>, options?: {
|
|
24
|
-
strict?: boolean;
|
|
25
|
-
}): any;
|
|
26
|
-
/**
|
|
27
|
-
* Process a config value (handles nested objects/arrays)
|
|
28
|
-
*/
|
|
29
|
-
export declare function processConfigValue(value: any, env?: Record<string, string | undefined>, options?: {
|
|
30
|
-
strict?: boolean;
|
|
31
|
-
}): any;
|
|
32
|
-
/**
|
|
33
|
-
* Match a name against config patterns
|
|
34
|
-
*
|
|
35
|
-
* Priority:
|
|
36
|
-
* 1. Exact match: "sessions" matches "sessions"
|
|
37
|
-
* 2. Prefix patterns: "api-*" matches "api-v1", "api-v2" (longest first)
|
|
38
|
-
* 3. Catch-all: "*" matches everything
|
|
39
|
-
*
|
|
40
|
-
* Examples:
|
|
41
|
-
* matchPattern("sessions", {"sessions": {...}, "*": {...}}) → sessions config
|
|
42
|
-
* matchPattern("api-v1", {"api-*": {...}, "*": {...}}) → api-* config
|
|
43
|
-
* matchPattern("random", {"*": {...}}) → * config
|
|
44
|
-
*/
|
|
45
|
-
export declare function matchPattern<T>(name: string, config: Record<string, T>): T | undefined;
|
|
46
|
-
export interface CacheConfig {
|
|
47
|
-
provider?: string | number;
|
|
48
|
-
url?: string | number;
|
|
49
|
-
maxEntries?: string | number;
|
|
50
|
-
TTL?: string | number;
|
|
51
|
-
}
|
|
52
|
-
export interface BucketConfig {
|
|
53
|
-
provider?: string | number;
|
|
54
|
-
path?: string | number;
|
|
55
|
-
bucket?: string | number;
|
|
56
|
-
region?: string | number;
|
|
57
|
-
endpoint?: string | number;
|
|
58
|
-
}
|
|
59
|
-
/** Log level for filtering */
|
|
60
|
-
export type LogLevel = "debug" | "info" | "warning" | "error";
|
|
61
|
-
/** Sink configuration */
|
|
62
|
-
export interface SinkConfig {
|
|
63
|
-
provider: string;
|
|
64
|
-
/** Provider-specific options (path, maxSize, etc.) */
|
|
65
|
-
[key: string]: any;
|
|
66
|
-
}
|
|
67
|
-
/** Per-category logging configuration */
|
|
68
|
-
export interface CategoryLoggingConfig {
|
|
69
|
-
level?: LogLevel;
|
|
70
|
-
sinks?: SinkConfig[];
|
|
71
|
-
}
|
|
72
|
-
export interface LoggingConfig {
|
|
73
|
-
/** Default log level. Defaults to "info" */
|
|
74
|
-
level?: LogLevel;
|
|
75
|
-
/** Default sinks. Defaults to console */
|
|
76
|
-
sinks?: SinkConfig[];
|
|
77
|
-
/** Per-category config (inherits from top-level, can override level and/or sinks) */
|
|
78
|
-
categories?: Record<string, CategoryLoggingConfig>;
|
|
79
|
-
}
|
|
80
|
-
export interface ShovelConfig {
|
|
81
|
-
platform?: string;
|
|
82
|
-
port?: number | string;
|
|
83
|
-
host?: string;
|
|
84
|
-
workers?: number | string;
|
|
85
|
-
logging?: LoggingConfig;
|
|
86
|
-
caches?: Record<string, CacheConfig>;
|
|
87
|
-
buckets?: Record<string, BucketConfig>;
|
|
88
|
-
}
|
|
89
|
-
/** Processed logging config with all defaults applied */
|
|
90
|
-
export interface ProcessedLoggingConfig {
|
|
91
|
-
level: LogLevel;
|
|
92
|
-
sinks: SinkConfig[];
|
|
93
|
-
categories: Record<string, CategoryLoggingConfig>;
|
|
94
|
-
}
|
|
95
|
-
export interface ProcessedShovelConfig {
|
|
96
|
-
platform?: string;
|
|
97
|
-
port: number;
|
|
98
|
-
host: string;
|
|
99
|
-
workers: number;
|
|
100
|
-
logging: ProcessedLoggingConfig;
|
|
101
|
-
caches: Record<string, CacheConfig>;
|
|
102
|
-
buckets: Record<string, BucketConfig>;
|
|
103
|
-
}
|
|
104
|
-
/**
|
|
105
|
-
* Load Shovel configuration from shovel.json or package.json
|
|
106
|
-
* Priority: shovel.json > package.json "shovel" field > defaults
|
|
107
|
-
* @param cwd - Current working directory (must be provided by runtime adapter)
|
|
108
|
-
*/
|
|
109
|
-
export declare function loadConfig(cwd: string): ProcessedShovelConfig;
|
|
110
|
-
/**
|
|
111
|
-
* Configure LogTape logging based on Shovel config.
|
|
112
|
-
* Call this in both main thread and workers.
|
|
113
|
-
*
|
|
114
|
-
* @param loggingConfig - The logging configuration from ProcessedShovelConfig.logging
|
|
115
|
-
* @param options - Additional options
|
|
116
|
-
* @param options.reset - Whether to reset existing LogTape config (default: true)
|
|
117
|
-
* @param options.cwd - Working directory for resolving relative paths
|
|
118
|
-
*/
|
|
119
|
-
export declare function configureLogging(loggingConfig: ProcessedLoggingConfig, options?: {
|
|
120
|
-
reset?: boolean;
|
|
121
|
-
cwd?: string;
|
|
122
|
-
}): Promise<void>;
|
|
123
|
-
/**
|
|
124
|
-
* Get cache config for a specific cache name (with pattern matching)
|
|
125
|
-
*/
|
|
126
|
-
export declare function getCacheConfig(config: ProcessedShovelConfig, name: string): CacheConfig;
|
|
127
|
-
/**
|
|
128
|
-
* Get bucket config for a specific bucket name (with pattern matching)
|
|
129
|
-
*/
|
|
130
|
-
export declare function getBucketConfig(config: ProcessedShovelConfig, name: string): BucketConfig;
|
|
131
|
-
export interface BucketFactoryOptions {
|
|
132
|
-
/** Base directory for path resolution (entrypoint directory) - REQUIRED */
|
|
133
|
-
baseDir: string;
|
|
134
|
-
/** Shovel configuration for overrides */
|
|
135
|
-
config?: ProcessedShovelConfig;
|
|
136
|
-
}
|
|
137
|
-
/**
|
|
138
|
-
* Creates a bucket factory function for CustomBucketStorage.
|
|
139
|
-
* Lazily imports bucket implementations.
|
|
140
|
-
*
|
|
141
|
-
* Provider resolution:
|
|
142
|
-
* 1. "node" -> built-in NodeBucket (default)
|
|
143
|
-
* 2. "memory" -> built-in MemoryBucket
|
|
144
|
-
* 3. "s3" -> @b9g/filesystem-s3 (blessed module)
|
|
145
|
-
* 4. Any other string -> treated as a module name (e.g., "my-custom-bucket")
|
|
146
|
-
*
|
|
147
|
-
* Custom bucket modules must export a class that:
|
|
148
|
-
* - Implements FileSystemDirectoryHandle
|
|
149
|
-
* - Has constructor(name: string, options?: object)
|
|
150
|
-
*/
|
|
151
|
-
export declare function createBucketFactory(options: BucketFactoryOptions): (name: string) => Promise<FileSystemDirectoryHandle>;
|
|
152
|
-
export interface CacheFactoryOptions {
|
|
153
|
-
/** Shovel configuration for cache settings */
|
|
154
|
-
config?: ProcessedShovelConfig;
|
|
155
|
-
/** Default provider when not specified in config. Defaults to "memory". */
|
|
156
|
-
defaultProvider?: string;
|
|
157
|
-
}
|
|
158
|
-
/**
|
|
159
|
-
* Creates a cache factory function for CustomCacheStorage.
|
|
160
|
-
* Lazily imports cache implementations.
|
|
161
|
-
*
|
|
162
|
-
* Provider resolution:
|
|
163
|
-
* 1. "memory" -> built-in MemoryCache
|
|
164
|
-
* 2. "redis" -> @b9g/cache-redis (blessed module)
|
|
165
|
-
* 3. "cloudflare" -> native Cloudflare caches.open(name)
|
|
166
|
-
* 4. Any other string -> treated as a module name (e.g., "my-custom-cache")
|
|
167
|
-
*
|
|
168
|
-
* Custom cache modules must export a class that:
|
|
169
|
-
* - Extends Cache (from @b9g/cache)
|
|
170
|
-
* - Has constructor(name: string, options?: object)
|
|
171
|
-
*/
|
|
172
|
-
export declare function createCacheFactory(options?: CacheFactoryOptions): (name: string) => Promise<Cache>;
|