@zenbujs/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +11 -0
- package/dist/advice-config-CjgkEf2E.mjs +135 -0
- package/dist/advice-config-Cy133IQP.mjs +2 -0
- package/dist/advice-runtime.d.mts +35 -0
- package/dist/advice-runtime.mjs +131 -0
- package/dist/advice.d.mts +36 -0
- package/dist/advice.mjs +2 -0
- package/dist/base-window-BUt8pwbw.mjs +94 -0
- package/dist/base-window-DEIAk618.mjs +2 -0
- package/dist/build-config-pbv0w4oN.mjs +17 -0
- package/dist/build-electron-B4Gd0Gi4.mjs +516 -0
- package/dist/build-source-_q1n1zTV.mjs +162 -0
- package/dist/chunk-Dm34NbLt.mjs +6 -0
- package/dist/cli/bin.d.mts +1 -0
- package/dist/cli/bin.mjs +88 -0
- package/dist/cli/build.d.mts +53 -0
- package/dist/cli/build.mjs +48 -0
- package/dist/cli-BLbQQIVB.mjs +8054 -0
- package/dist/config-CdVrW85P.mjs +59 -0
- package/dist/config-LK73dJmO.mjs +2 -0
- package/dist/db-ByKPbnP6.mjs +2 -0
- package/dist/db-DhuAJrye.mjs +531 -0
- package/dist/db.d.mts +16 -0
- package/dist/db.mjs +16 -0
- package/dist/dev-BuqklM0k.mjs +85 -0
- package/dist/env-bootstrap-BtVME-CU.d.mts +16 -0
- package/dist/env-bootstrap-rj7I-59x.mjs +53 -0
- package/dist/env-bootstrap.d.mts +2 -0
- package/dist/env-bootstrap.mjs +2 -0
- package/dist/http-IBcLzbYu.mjs +2 -0
- package/dist/index-Bhlbyrn7.d.mts +63 -0
- package/dist/index-CPZ5d6Hl.d.mts +442 -0
- package/dist/index-FtE8MXJ_.d.mts +1 -0
- package/dist/index.d.mts +6 -0
- package/dist/index.mjs +5 -0
- package/dist/launcher.mjs +173 -0
- package/dist/link-6roQ7Cn6.mjs +580 -0
- package/dist/loaders/zenbu.d.mts +22 -0
- package/dist/loaders/zenbu.mjs +267 -0
- package/dist/log-CyKv8hQg.mjs +20 -0
- package/dist/mirror-sync-CodOnwkD.mjs +332 -0
- package/dist/monorepo-CmGPHsVm.mjs +119 -0
- package/dist/node-D4M19_mV.mjs +5 -0
- package/dist/node-loader.d.mts +17 -0
- package/dist/node-loader.mjs +33 -0
- package/dist/pause-DvAUNmKn.mjs +52 -0
- package/dist/publish-source-BVgB62Zj.mjs +131 -0
- package/dist/react.d.mts +76 -0
- package/dist/react.mjs +291 -0
- package/dist/registry-Dh_e7HU1.d.mts +61 -0
- package/dist/registry.d.mts +2 -0
- package/dist/registry.mjs +1 -0
- package/dist/reloader-BCkLjDhS.mjs +2 -0
- package/dist/reloader-lLAJ3lqg.mjs +164 -0
- package/dist/renderer-host-Bg8QdeeH.mjs +1508 -0
- package/dist/renderer-host-DpvBPTHJ.mjs +2 -0
- package/dist/rpc-BwwQK6hD.mjs +71 -0
- package/dist/rpc-CqitnyR4.mjs +2 -0
- package/dist/rpc.d.mts +2 -0
- package/dist/rpc.mjs +2 -0
- package/dist/runtime-CjqDr8Yf.d.mts +109 -0
- package/dist/runtime-DUFKDIe4.mjs +409 -0
- package/dist/runtime.d.mts +2 -0
- package/dist/runtime.mjs +2 -0
- package/dist/schema-CIg4GzHQ.mjs +100 -0
- package/dist/schema-DMoSkwUx.d.mts +62 -0
- package/dist/schema-dGK6qkfR.mjs +28 -0
- package/dist/schema.d.mts +2 -0
- package/dist/schema.mjs +2 -0
- package/dist/server-BXwZEQ-n.mjs +66 -0
- package/dist/server-DjrZUbbu.mjs +2 -0
- package/dist/services/default.d.mts +11 -0
- package/dist/services/default.mjs +22 -0
- package/dist/services/index.d.mts +276 -0
- package/dist/services/index.mjs +7 -0
- package/dist/setup-gate-BeD6WS6d.mjs +110 -0
- package/dist/setup-gate-BqOzm7zp.d.mts +4 -0
- package/dist/setup-gate.d.mts +2 -0
- package/dist/setup-gate.mjs +2 -0
- package/dist/src-pELM4_iH.mjs +376 -0
- package/dist/trace-DCB7qFzT.mjs +10 -0
- package/dist/transform-DJH3vN4b.mjs +84041 -0
- package/dist/transport-BMSzG2-F.mjs +1045 -0
- package/dist/view-registry-BualWgAf.mjs +2 -0
- package/dist/vite-plugins-Bh3SCOw-.mjs +331 -0
- package/dist/vite.d.mts +68 -0
- package/dist/vite.mjs +2 -0
- package/dist/window-CM2a9Kyc.mjs +2 -0
- package/dist/window-CmmpCVX6.mjs +156 -0
- package/dist/write-9dRFczGJ.mjs +1248 -0
- package/migrations/0000_migration.ts +34 -0
- package/migrations/meta/0000_snapshot.json +18 -0
- package/migrations/meta/_journal.json +10 -0
- package/package.json +124 -0
|
@@ -0,0 +1,1508 @@
|
|
|
1
|
+
import { i as runtime, t as Service } from "./runtime-DUFKDIe4.mjs";
|
|
2
|
+
import { o as getZodDefault } from "./schema-CIg4GzHQ.mjs";
|
|
3
|
+
import { t as schema } from "./schema-dGK6qkfR.mjs";
|
|
4
|
+
import { t as createLogger } from "./log-CyKv8hQg.mjs";
|
|
5
|
+
import { t as ServerService } from "./server-BXwZEQ-n.mjs";
|
|
6
|
+
import { n as DB_CONFIG_JSON, r as INTERNAL_DIR, t as ReloaderService } from "./reloader-lLAJ3lqg.mjs";
|
|
7
|
+
import { a as createBlob, c as makeErrorAck, g as layer$1, h as writeJsonFile, i as cleanupStaleTmpFiles, l as paths, m as validateSession, n as makeRootCache, o as createCollection, p as sendAck, r as broadcastDbUpdate, s as makeAck, t as handleWrite, u as readCollectionItemRange, v as FileSystem } from "./write-9dRFczGJ.mjs";
|
|
8
|
+
import { t as traceKyju } from "./trace-DCB7qFzT.mjs";
|
|
9
|
+
import { a as createClient, i as dbStringify, n as createRouter, o as createEffectClient, r as dbParse, s as createReplica } from "./transport-BMSzG2-F.mjs";
|
|
10
|
+
import fs from "node:fs";
|
|
11
|
+
import os from "node:os";
|
|
12
|
+
import path from "node:path";
|
|
13
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
14
|
+
import http from "node:http";
|
|
15
|
+
import fsp from "node:fs/promises";
|
|
16
|
+
import * as Effect from "effect/Effect";
|
|
17
|
+
import * as Ref from "effect/Ref";
|
|
18
|
+
import { nanoid } from "nanoid";
|
|
19
|
+
//#region ../../node_modules/.pnpm/@effect+platform-node@0.104.1_@effect+cluster@0.56.4_@effect+platform@0.94.5_effect@3.2_4dd4c94e7ca0ae112861b6965a5da5f4/node_modules/@effect/platform-node/dist/esm/NodeFileSystem.js
|
|
20
|
+
/**
|
|
21
|
+
* @since 1.0.0
|
|
22
|
+
*/
|
|
23
|
+
/**
|
|
24
|
+
* @since 1.0.0
|
|
25
|
+
* @category layer
|
|
26
|
+
*/
|
|
27
|
+
const layer = layer$1;
|
|
28
|
+
//#endregion
|
|
29
|
+
//#region ../kyju/src/v2/db/handlers/connect.ts
|
|
30
|
+
const handleConnect = (ctx, event, latch) => latch.whenOpen(Effect.gen(function* () {
|
|
31
|
+
const msg = event.message;
|
|
32
|
+
const { replicaId } = msg;
|
|
33
|
+
if (msg.version !== 0) {
|
|
34
|
+
ctx.dbSend({
|
|
35
|
+
kind: "db-update",
|
|
36
|
+
replicaId,
|
|
37
|
+
message: makeErrorAck({
|
|
38
|
+
requestId: msg.requestId,
|
|
39
|
+
_tag: "VersionMismatchError",
|
|
40
|
+
message: `Expected version 0, got ${msg.version}`
|
|
41
|
+
})
|
|
42
|
+
});
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
const sessionId = nanoid();
|
|
46
|
+
const root = yield* ctx.rootCache.read();
|
|
47
|
+
const session = {
|
|
48
|
+
sessionId,
|
|
49
|
+
replicaId,
|
|
50
|
+
subscriptions: /* @__PURE__ */ new Set(),
|
|
51
|
+
send: (event) => ctx.dbSend({
|
|
52
|
+
...event,
|
|
53
|
+
replicaId
|
|
54
|
+
})
|
|
55
|
+
};
|
|
56
|
+
yield* Ref.update(ctx.sessionsRef, (sessions) => {
|
|
57
|
+
const next = new Map(sessions);
|
|
58
|
+
next.set(sessionId, session);
|
|
59
|
+
return next;
|
|
60
|
+
});
|
|
61
|
+
sendAck({
|
|
62
|
+
session,
|
|
63
|
+
ack: makeAck({
|
|
64
|
+
requestId: msg.requestId,
|
|
65
|
+
sessionId,
|
|
66
|
+
data: { root }
|
|
67
|
+
})
|
|
68
|
+
});
|
|
69
|
+
}));
|
|
70
|
+
//#endregion
|
|
71
|
+
//#region ../kyju/src/v2/db/handlers/disconnect.ts
|
|
72
|
+
const handleDisconnect = (ctx, event) => Effect.gen(function* () {
|
|
73
|
+
const msg = event.message;
|
|
74
|
+
const session = (yield* Ref.get(ctx.sessionsRef)).get(msg.sessionId);
|
|
75
|
+
if (!session) {
|
|
76
|
+
ctx.dbSend({
|
|
77
|
+
kind: "db-update",
|
|
78
|
+
replicaId: event.replicaId,
|
|
79
|
+
message: makeErrorAck({
|
|
80
|
+
requestId: msg.requestId,
|
|
81
|
+
sessionId: msg.sessionId,
|
|
82
|
+
_tag: "InvalidSessionError",
|
|
83
|
+
message: "Invalid session"
|
|
84
|
+
})
|
|
85
|
+
});
|
|
86
|
+
return yield* Effect.fail("INVALID_SESSION");
|
|
87
|
+
}
|
|
88
|
+
yield* Ref.update(ctx.sessionsRef, (s) => {
|
|
89
|
+
const next = new Map(s);
|
|
90
|
+
next.delete(msg.sessionId);
|
|
91
|
+
return next;
|
|
92
|
+
});
|
|
93
|
+
sendAck({
|
|
94
|
+
session,
|
|
95
|
+
ack: makeAck({
|
|
96
|
+
requestId: msg.requestId,
|
|
97
|
+
sessionId: msg.sessionId
|
|
98
|
+
})
|
|
99
|
+
});
|
|
100
|
+
});
|
|
101
|
+
//#endregion
|
|
102
|
+
//#region ../kyju/src/v2/db/handlers/subscribe.ts
|
|
103
|
+
const handleSubscribe = (ctx, event) => Effect.gen(function* () {
|
|
104
|
+
const session = yield* validateSession(ctx, event.sessionId, event.requestId, event.replicaId);
|
|
105
|
+
yield* ctx.collectionMutex.withPermits(1)(Effect.gen(function* () {
|
|
106
|
+
const collectionDir = paths.collection({
|
|
107
|
+
config: ctx.config,
|
|
108
|
+
collectionId: event.collectionId
|
|
109
|
+
});
|
|
110
|
+
if (!(yield* ctx.fs.exists(collectionDir))) yield* createCollection({
|
|
111
|
+
fs: ctx.fs,
|
|
112
|
+
config: ctx.config,
|
|
113
|
+
collectionId: event.collectionId
|
|
114
|
+
});
|
|
115
|
+
session.subscriptions.add(event.collectionId);
|
|
116
|
+
const { items, totalCount } = yield* readCollectionItemRange({
|
|
117
|
+
fs: ctx.fs,
|
|
118
|
+
config: ctx.config,
|
|
119
|
+
collectionId: event.collectionId
|
|
120
|
+
});
|
|
121
|
+
sendAck({
|
|
122
|
+
session,
|
|
123
|
+
ack: makeAck({
|
|
124
|
+
requestId: event.requestId,
|
|
125
|
+
sessionId: event.sessionId,
|
|
126
|
+
data: {
|
|
127
|
+
items,
|
|
128
|
+
totalCount
|
|
129
|
+
}
|
|
130
|
+
})
|
|
131
|
+
});
|
|
132
|
+
}));
|
|
133
|
+
});
|
|
134
|
+
//#endregion
|
|
135
|
+
//#region ../kyju/src/v2/db/handlers/unsubscribe.ts
|
|
136
|
+
const handleUnsubscribe = (ctx, event) => Effect.gen(function* () {
|
|
137
|
+
const session = yield* validateSession(ctx, event.sessionId, event.requestId, event.replicaId);
|
|
138
|
+
session.subscriptions.delete(event.collectionId);
|
|
139
|
+
sendAck({
|
|
140
|
+
session,
|
|
141
|
+
ack: makeAck({
|
|
142
|
+
requestId: event.requestId,
|
|
143
|
+
sessionId: event.sessionId
|
|
144
|
+
})
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
//#endregion
|
|
148
|
+
//#region ../kyju/src/v2/db/handlers/read.ts
|
|
149
|
+
const handleRead = (ctx, event) => Effect.gen(function* () {
|
|
150
|
+
const session = yield* validateSession(ctx, event.sessionId, event.requestId, event.replicaId);
|
|
151
|
+
const readOp = event.op;
|
|
152
|
+
switch (readOp.type) {
|
|
153
|
+
case "collection.fetch-range":
|
|
154
|
+
yield* ctx.collectionMutex.withPermits(1)(Effect.gen(function* () {
|
|
155
|
+
const collectionDir = paths.collection({
|
|
156
|
+
config: ctx.config,
|
|
157
|
+
collectionId: readOp.collectionId
|
|
158
|
+
});
|
|
159
|
+
if (!(yield* ctx.fs.exists(collectionDir))) {
|
|
160
|
+
sendAck({
|
|
161
|
+
session,
|
|
162
|
+
ack: makeErrorAck({
|
|
163
|
+
requestId: event.requestId,
|
|
164
|
+
sessionId: event.sessionId,
|
|
165
|
+
_tag: "NotFoundError",
|
|
166
|
+
message: `Collection ${readOp.collectionId} not found`
|
|
167
|
+
})
|
|
168
|
+
});
|
|
169
|
+
return;
|
|
170
|
+
}
|
|
171
|
+
const { items, totalCount } = yield* readCollectionItemRange({
|
|
172
|
+
fs: ctx.fs,
|
|
173
|
+
config: ctx.config,
|
|
174
|
+
collectionId: readOp.collectionId,
|
|
175
|
+
start: readOp.range.start,
|
|
176
|
+
end: readOp.range.end
|
|
177
|
+
});
|
|
178
|
+
sendAck({
|
|
179
|
+
session,
|
|
180
|
+
ack: makeAck({
|
|
181
|
+
requestId: event.requestId,
|
|
182
|
+
sessionId: event.sessionId,
|
|
183
|
+
data: {
|
|
184
|
+
items,
|
|
185
|
+
totalCount
|
|
186
|
+
}
|
|
187
|
+
})
|
|
188
|
+
});
|
|
189
|
+
}));
|
|
190
|
+
return;
|
|
191
|
+
case "blob.read":
|
|
192
|
+
yield* ctx.blobMutex.withPermits(1)(Effect.gen(function* () {
|
|
193
|
+
if (!(yield* ctx.fs.exists(paths.blob({
|
|
194
|
+
config: ctx.config,
|
|
195
|
+
blobId: readOp.blobId
|
|
196
|
+
})))) {
|
|
197
|
+
sendAck({
|
|
198
|
+
session,
|
|
199
|
+
ack: makeErrorAck({
|
|
200
|
+
requestId: event.requestId,
|
|
201
|
+
sessionId: event.sessionId,
|
|
202
|
+
_tag: "NotFoundError",
|
|
203
|
+
message: `Blob ${readOp.blobId} not found`
|
|
204
|
+
})
|
|
205
|
+
});
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
const data = yield* ctx.fs.readFile(paths.blobData({
|
|
209
|
+
config: ctx.config,
|
|
210
|
+
blobId: readOp.blobId
|
|
211
|
+
}));
|
|
212
|
+
sendAck({
|
|
213
|
+
session,
|
|
214
|
+
ack: makeAck({
|
|
215
|
+
requestId: event.requestId,
|
|
216
|
+
sessionId: event.sessionId,
|
|
217
|
+
data: { data }
|
|
218
|
+
})
|
|
219
|
+
});
|
|
220
|
+
}));
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
//#endregion
|
|
225
|
+
//#region ../kyju/src/v2/db/handlers/plugins.ts
|
|
226
|
+
const runPlugins = (ctx, postMessageEffect, plugins) => Effect.gen(function* () {
|
|
227
|
+
const sessionId = nanoid();
|
|
228
|
+
const root = yield* ctx.rootCache.read();
|
|
229
|
+
const replica = createReplica({
|
|
230
|
+
send: (event) => {
|
|
231
|
+
Effect.runPromise(postMessageEffect(event));
|
|
232
|
+
},
|
|
233
|
+
maxPageSizeBytes: ctx.config.maxPageSize
|
|
234
|
+
});
|
|
235
|
+
const session = {
|
|
236
|
+
sessionId,
|
|
237
|
+
replicaId: replica.replicaId,
|
|
238
|
+
subscriptions: /* @__PURE__ */ new Set(),
|
|
239
|
+
send: (event) => {
|
|
240
|
+
replica.postMessage(event);
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
yield* Ref.update(ctx.sessionsRef, (s) => {
|
|
244
|
+
const next = new Map(s);
|
|
245
|
+
next.set(sessionId, session);
|
|
246
|
+
return next;
|
|
247
|
+
});
|
|
248
|
+
replica._forceState({
|
|
249
|
+
kind: "connected",
|
|
250
|
+
sessionId,
|
|
251
|
+
root,
|
|
252
|
+
collections: [],
|
|
253
|
+
blobs: []
|
|
254
|
+
});
|
|
255
|
+
const client = createClient(replica);
|
|
256
|
+
for (const plugin of plugins) {
|
|
257
|
+
if (!plugin.onBeforeStart) continue;
|
|
258
|
+
yield* traceKyju("kyju:db.plugin.onBeforeStart", Effect.promise(() => plugin.onBeforeStart({
|
|
259
|
+
client,
|
|
260
|
+
pluginPath: ["_plugins", plugin.name]
|
|
261
|
+
})), { plugin: plugin.name });
|
|
262
|
+
}
|
|
263
|
+
return replica;
|
|
264
|
+
});
|
|
265
|
+
//#endregion
|
|
266
|
+
//#region ../kyju/src/v2/migrations.ts
|
|
267
|
+
/**
|
|
268
|
+
* Returns true if `current` is "structurally equivalent" to `oldDefault`,
|
|
269
|
+
* tolerating extra keys on objects that `oldDefault` didn't know about.
|
|
270
|
+
*
|
|
271
|
+
* Why this exists: when an alter op carries a `default` change, apply()
|
|
272
|
+
* uses this to decide whether the current DB value is "still the stale
|
|
273
|
+
* default" and therefore safe to overwrite with the new one. Strict
|
|
274
|
+
* equality would skip the overwrite whenever an intermediate migration
|
|
275
|
+
* (via a custom `migrate()`) enriched entries with new schema fields —
|
|
276
|
+
* which is common. That bug left users stuck on old defaults indefinitely.
|
|
277
|
+
*
|
|
278
|
+
* For objects: every key in `oldDefault` must be present and match in
|
|
279
|
+
* `current`; `current` is allowed to have *additional* keys (enrichment
|
|
280
|
+
* artifacts). For arrays: same length + elementwise subset match. For
|
|
281
|
+
* primitives: strict equality.
|
|
282
|
+
*/
|
|
283
|
+
function isDefaultSubsetMatch(current, oldDefault) {
|
|
284
|
+
if (oldDefault === null || oldDefault === void 0) return current === oldDefault;
|
|
285
|
+
if (typeof oldDefault !== "object") return current === oldDefault;
|
|
286
|
+
if (Array.isArray(oldDefault)) {
|
|
287
|
+
if (!Array.isArray(current) || current.length !== oldDefault.length) return false;
|
|
288
|
+
return oldDefault.every((item, i) => isDefaultSubsetMatch(current[i], item));
|
|
289
|
+
}
|
|
290
|
+
if (typeof current !== "object" || current === null || Array.isArray(current)) return false;
|
|
291
|
+
for (const key of Object.keys(oldDefault)) {
|
|
292
|
+
if (!(key in current)) return false;
|
|
293
|
+
if (!isDefaultSubsetMatch(current[key], oldDefault[key])) return false;
|
|
294
|
+
}
|
|
295
|
+
return true;
|
|
296
|
+
}
|
|
297
|
+
function applyOperations(data, ops) {
|
|
298
|
+
const result = { ...data };
|
|
299
|
+
for (const op of ops) switch (op.op) {
|
|
300
|
+
case "add":
|
|
301
|
+
if (op.kind === "collection") {
|
|
302
|
+
if (!(op.key in result)) result[op.key] = {
|
|
303
|
+
collectionId: nanoid(),
|
|
304
|
+
debugName: op.debugName ?? op.key
|
|
305
|
+
};
|
|
306
|
+
} else if (op.kind === "blob") {
|
|
307
|
+
if (!(op.key in result)) result[op.key] = {
|
|
308
|
+
blobId: nanoid(),
|
|
309
|
+
debugName: op.debugName ?? op.key
|
|
310
|
+
};
|
|
311
|
+
} else if (op.hasDefault) {
|
|
312
|
+
if (!(op.key in result)) result[op.key] = op.default;
|
|
313
|
+
} else if (!(op.key in result)) result[op.key] = void 0;
|
|
314
|
+
break;
|
|
315
|
+
case "remove":
|
|
316
|
+
delete result[op.key];
|
|
317
|
+
break;
|
|
318
|
+
case "alter":
|
|
319
|
+
if (op.changes.default) {
|
|
320
|
+
const current = result[op.key];
|
|
321
|
+
const oldDefault = op.changes.default.from;
|
|
322
|
+
if (isDefaultSubsetMatch(current, oldDefault)) result[op.key] = op.changes.default.to;
|
|
323
|
+
}
|
|
324
|
+
break;
|
|
325
|
+
}
|
|
326
|
+
return result;
|
|
327
|
+
}
|
|
328
|
+
//#endregion
|
|
329
|
+
//#region ../kyju/src/v2/core-plugins/migration.ts
|
|
330
|
+
const migrationPlugin = (migrations) => ({
|
|
331
|
+
name: "kyjuMigrator",
|
|
332
|
+
onBeforeStart: async ({ client, pluginPath }) => {
|
|
333
|
+
const targetVersion = migrations.length;
|
|
334
|
+
const pluginState = (client.readRoot() ?? {})?._plugins?.kyjuMigrator ?? {};
|
|
335
|
+
const currentVersion = typeof pluginState === "object" && pluginState !== null ? pluginState.version ?? 0 : 0;
|
|
336
|
+
if (currentVersion >= targetVersion) return;
|
|
337
|
+
for (let v = currentVersion; v < targetVersion; v++) {
|
|
338
|
+
const migration = migrations[v];
|
|
339
|
+
const ops = migration.operations ?? [];
|
|
340
|
+
const removeOps = ops.filter((o) => o.op === "remove" && (o.kind === "collection" || o.kind === "blob"));
|
|
341
|
+
for (const op of removeOps) await client[op.key]?.delete?.();
|
|
342
|
+
const currentRoot = client.readRoot();
|
|
343
|
+
const apply = (data) => applyOperations(data, ops);
|
|
344
|
+
let newRoot;
|
|
345
|
+
if (migration.migrate) newRoot = migration.migrate(currentRoot, { apply });
|
|
346
|
+
else newRoot = apply(currentRoot);
|
|
347
|
+
await client.update(() => newRoot);
|
|
348
|
+
const addCollectionOps = ops.filter((o) => o.op === "add" && o.kind === "collection");
|
|
349
|
+
for (const op of addCollectionOps) await client[op.key]?.create?.();
|
|
350
|
+
const addBlobOps = ops.filter((o) => o.op === "add" && o.kind === "blob");
|
|
351
|
+
for (const op of addBlobOps) await client[op.key]?.create?.(new Uint8Array(0));
|
|
352
|
+
if (migration.afterMigrate) await migration.afterMigrate({ client });
|
|
353
|
+
await client.update((r) => {
|
|
354
|
+
let target = r;
|
|
355
|
+
for (const segment of pluginPath) {
|
|
356
|
+
if (!target[segment]) target[segment] = {};
|
|
357
|
+
target = target[segment];
|
|
358
|
+
}
|
|
359
|
+
target.version = v + 1;
|
|
360
|
+
});
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
});
|
|
364
|
+
const sectionMigrationPlugin = (sections) => ({
|
|
365
|
+
name: "sectionMigrator",
|
|
366
|
+
onBeforeStart: async ({ client, pluginPath }) => {
|
|
367
|
+
for (const section of sections) {
|
|
368
|
+
const { name, migrations } = section;
|
|
369
|
+
const targetVersion = migrations.length;
|
|
370
|
+
const pluginState = (client.readRoot() ?? {})?._plugins?.sectionMigrator?.[name] ?? {};
|
|
371
|
+
const currentVersion = typeof pluginState === "object" && pluginState !== null ? pluginState.version ?? 0 : 0;
|
|
372
|
+
if (currentVersion >= targetVersion) continue;
|
|
373
|
+
for (let v = currentVersion; v < targetVersion; v++) {
|
|
374
|
+
const migration = migrations[v];
|
|
375
|
+
const ops = migration.operations ?? [];
|
|
376
|
+
const removeOps = ops.filter((o) => o.op === "remove" && (o.kind === "collection" || o.kind === "blob"));
|
|
377
|
+
for (const op of removeOps) await client.plugin?.[name]?.[op.key]?.delete?.();
|
|
378
|
+
const sectionData = client.readRoot()?.plugin?.[name] ?? {};
|
|
379
|
+
const apply = (data) => applyOperations(data, ops);
|
|
380
|
+
let newSectionData;
|
|
381
|
+
if (migration.migrate) newSectionData = migration.migrate(sectionData, { apply });
|
|
382
|
+
else newSectionData = apply(sectionData);
|
|
383
|
+
await client.update((r) => {
|
|
384
|
+
if (!r.plugin) r.plugin = {};
|
|
385
|
+
r.plugin[name] = newSectionData;
|
|
386
|
+
});
|
|
387
|
+
const addCollectionOps = ops.filter((o) => o.op === "add" && o.kind === "collection");
|
|
388
|
+
for (const op of addCollectionOps) await client.plugin?.[name]?.[op.key]?.create?.();
|
|
389
|
+
const addBlobOps = ops.filter((o) => o.op === "add" && o.kind === "blob");
|
|
390
|
+
for (const op of addBlobOps) await client.plugin?.[name]?.[op.key]?.create?.(new Uint8Array(0));
|
|
391
|
+
if (migration.afterMigrate) {
|
|
392
|
+
const sectionClient = client.plugin?.[name];
|
|
393
|
+
await migration.afterMigrate({ client: sectionClient });
|
|
394
|
+
}
|
|
395
|
+
await client.update((r) => {
|
|
396
|
+
let target = r;
|
|
397
|
+
for (const segment of [...pluginPath, name]) {
|
|
398
|
+
if (!target[segment]) target[segment] = {};
|
|
399
|
+
target = target[segment];
|
|
400
|
+
}
|
|
401
|
+
target.version = v + 1;
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
});
|
|
407
|
+
//#endregion
|
|
408
|
+
//#region ../kyju/src/v2/db/db.ts
|
|
409
|
+
var DbLockedError = class extends Error {
|
|
410
|
+
constructor(dbPath, holder) {
|
|
411
|
+
super(`Database at ${dbPath} is locked by pid ${holder.pid} on host ${holder.hostname} (started ${holder.startedAt}).\nkyju refuses to open a second concurrent writer to avoid clobbering its flushes.\nQuit the running process and try again, or remove ${path.join(dbPath, ".lock")} if you're sure no process is using this DB.`);
|
|
412
|
+
this.name = "DbLockedError";
|
|
413
|
+
}
|
|
414
|
+
};
|
|
415
|
+
function lockPath(dbPath) {
|
|
416
|
+
return path.join(dbPath, ".lock");
|
|
417
|
+
}
|
|
418
|
+
function isPidAlive(pid) {
|
|
419
|
+
try {
|
|
420
|
+
process.kill(pid, 0);
|
|
421
|
+
return true;
|
|
422
|
+
} catch (err) {
|
|
423
|
+
return err.code !== "ESRCH";
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
async function readLock(dbPath) {
|
|
427
|
+
try {
|
|
428
|
+
const raw = await fs.promises.readFile(lockPath(dbPath), "utf8");
|
|
429
|
+
return JSON.parse(raw);
|
|
430
|
+
} catch {
|
|
431
|
+
return null;
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
async function acquireLock(dbPath) {
|
|
435
|
+
await fs.promises.mkdir(dbPath, { recursive: true });
|
|
436
|
+
const existing = await readLock(dbPath);
|
|
437
|
+
if (existing) {
|
|
438
|
+
const sameHost = existing.hostname === os.hostname();
|
|
439
|
+
if (!(sameHost && existing.pid === process.pid)) {
|
|
440
|
+
if (!sameHost || isPidAlive(existing.pid)) throw new DbLockedError(dbPath, existing);
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
const nonce = nanoid();
|
|
444
|
+
const payload = {
|
|
445
|
+
pid: process.pid,
|
|
446
|
+
hostname: os.hostname(),
|
|
447
|
+
startedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
448
|
+
nonce
|
|
449
|
+
};
|
|
450
|
+
await fs.promises.writeFile(lockPath(dbPath), JSON.stringify(payload));
|
|
451
|
+
return nonce;
|
|
452
|
+
}
|
|
453
|
+
async function releaseLock(dbPath, ourNonce) {
|
|
454
|
+
const current = await readLock(dbPath);
|
|
455
|
+
if (!current || current.nonce !== ourNonce) return;
|
|
456
|
+
try {
|
|
457
|
+
await fs.promises.unlink(lockPath(dbPath));
|
|
458
|
+
} catch {}
|
|
459
|
+
}
|
|
460
|
+
/**
|
|
461
|
+
* Sync release for the `process.on("exit")` path. Async callbacks don't
|
|
462
|
+
* run during process exit (the event loop is already torn down), so
|
|
463
|
+
* we have no choice but to use sync fs here. Mirrors `releaseLock`'s
|
|
464
|
+
* nonce check exactly.
|
|
465
|
+
*/
|
|
466
|
+
function releaseLockOnExit(dbPath, ourNonce) {
|
|
467
|
+
let current = null;
|
|
468
|
+
try {
|
|
469
|
+
current = JSON.parse(fs.readFileSync(lockPath(dbPath), "utf8"));
|
|
470
|
+
} catch {
|
|
471
|
+
return;
|
|
472
|
+
}
|
|
473
|
+
if (!current || current.nonce !== ourNonce) return;
|
|
474
|
+
try {
|
|
475
|
+
fs.unlinkSync(lockPath(dbPath));
|
|
476
|
+
} catch {}
|
|
477
|
+
}
|
|
478
|
+
const DEFAULT_CONFIG = {
|
|
479
|
+
rootName: "root",
|
|
480
|
+
collectionsDirName: "collections",
|
|
481
|
+
collectionIndexName: "index",
|
|
482
|
+
pagesDirName: "pages",
|
|
483
|
+
pageIndexName: "index",
|
|
484
|
+
pageDataName: "data",
|
|
485
|
+
blobsDirName: "blobs",
|
|
486
|
+
blobIndexName: "index",
|
|
487
|
+
blobDataName: "data"
|
|
488
|
+
};
|
|
489
|
+
const buildSchemaRoot = function* (fs, config, schema) {
|
|
490
|
+
const root = {};
|
|
491
|
+
for (const [key, entry] of Object.entries(schema.shape)) {
|
|
492
|
+
const fs_obj = entry && typeof entry === "object" && "schema" in entry ? entry.schema : entry;
|
|
493
|
+
if (fs_obj.__kyjuCollectionRef) {
|
|
494
|
+
const dn = fs_obj._debugName;
|
|
495
|
+
root[key] = {
|
|
496
|
+
collectionId: "",
|
|
497
|
+
debugName: typeof dn === "string" ? dn : key
|
|
498
|
+
};
|
|
499
|
+
} else if (fs_obj.type === "blob" || entry?.type === "blob") {
|
|
500
|
+
const blobId = nanoid();
|
|
501
|
+
root[key] = {
|
|
502
|
+
blobId,
|
|
503
|
+
debugName: fs_obj.debugName ?? entry?.debugName ?? key
|
|
504
|
+
};
|
|
505
|
+
yield* createBlob({
|
|
506
|
+
fs,
|
|
507
|
+
config,
|
|
508
|
+
blobId,
|
|
509
|
+
data: new Uint8Array(0)
|
|
510
|
+
});
|
|
511
|
+
} else if (entry._hasDefault) root[key] = entry._defaultValue;
|
|
512
|
+
else {
|
|
513
|
+
const def = getZodDefault(entry);
|
|
514
|
+
if (def.hasDefault) root[key] = def.value;
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
return root;
|
|
518
|
+
};
|
|
519
|
+
const finalizeAndWriteRoot = function* (fs, config, root) {
|
|
520
|
+
const pendingCollections = [];
|
|
521
|
+
const initNestedCollections = (obj) => {
|
|
522
|
+
if (obj == null || typeof obj !== "object") return obj;
|
|
523
|
+
if (Array.isArray(obj)) return obj.map(initNestedCollections);
|
|
524
|
+
if (typeof obj.collectionId === "string" && typeof obj.debugName === "string" && !obj.collectionId) {
|
|
525
|
+
const collectionId = nanoid();
|
|
526
|
+
pendingCollections.push({ collectionId });
|
|
527
|
+
return {
|
|
528
|
+
...obj,
|
|
529
|
+
collectionId
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
const result = {};
|
|
533
|
+
for (const [k, v] of Object.entries(obj)) result[k] = initNestedCollections(v);
|
|
534
|
+
return result;
|
|
535
|
+
};
|
|
536
|
+
const finalRoot = initNestedCollections(root);
|
|
537
|
+
for (const { collectionId } of pendingCollections) yield* createCollection({
|
|
538
|
+
fs,
|
|
539
|
+
config,
|
|
540
|
+
collectionId
|
|
541
|
+
});
|
|
542
|
+
yield* writeJsonFile({
|
|
543
|
+
fs,
|
|
544
|
+
config,
|
|
545
|
+
path: paths.root({ config }),
|
|
546
|
+
data: finalRoot
|
|
547
|
+
});
|
|
548
|
+
};
|
|
549
|
+
const initializeDbIfNeeded = (fs, config, schema) => Effect.gen(function* () {
|
|
550
|
+
const rootPath = paths.root({ config });
|
|
551
|
+
if (yield* fs.exists(rootPath)) return;
|
|
552
|
+
yield* fs.makeDirectory(config.dbPath, { recursive: true });
|
|
553
|
+
yield* finalizeAndWriteRoot(fs, config, yield* buildSchemaRoot(fs, config, schema));
|
|
554
|
+
});
|
|
555
|
+
const initializeSectionedDbIfNeeded = (fs, config, sections) => Effect.gen(function* () {
|
|
556
|
+
const rootPath = paths.root({ config });
|
|
557
|
+
if (yield* fs.exists(rootPath)) return;
|
|
558
|
+
yield* fs.makeDirectory(config.dbPath, { recursive: true });
|
|
559
|
+
const sectionsData = {};
|
|
560
|
+
for (const section of sections) sectionsData[section.name] = yield* buildSchemaRoot(fs, config, section.schema);
|
|
561
|
+
yield* finalizeAndWriteRoot(fs, config, { plugin: sectionsData });
|
|
562
|
+
});
|
|
563
|
+
const createDbEffect = (userConfig) => Effect.gen(function* () {
|
|
564
|
+
const fs = yield* FileSystem;
|
|
565
|
+
const config = {
|
|
566
|
+
...DEFAULT_CONFIG,
|
|
567
|
+
dbPath: userConfig.path,
|
|
568
|
+
tmpDir: path.join(userConfig.path, ".tmp"),
|
|
569
|
+
maxPageSize: userConfig.maxPageSize ?? 1024 * 1024,
|
|
570
|
+
checkReferences: userConfig.checkReferences ?? false
|
|
571
|
+
};
|
|
572
|
+
const lockNonce = yield* Effect.tryPromise({
|
|
573
|
+
try: () => acquireLock(config.dbPath),
|
|
574
|
+
catch: (e) => e
|
|
575
|
+
});
|
|
576
|
+
yield* fs.makeDirectory(config.tmpDir, { recursive: true });
|
|
577
|
+
yield* traceKyju("kyju:db.init.dir-setup", Effect.gen(function* () {
|
|
578
|
+
if (userConfig.sections) {
|
|
579
|
+
const names = userConfig.sections.map((s) => s.name);
|
|
580
|
+
const dupes = names.filter((n, i) => names.indexOf(n) !== i);
|
|
581
|
+
if (dupes.length > 0) throw new Error(`Duplicate section names: ${[...new Set(dupes)].join(", ")}`);
|
|
582
|
+
yield* initializeSectionedDbIfNeeded(fs, config, userConfig.sections);
|
|
583
|
+
} else if (userConfig.schema) yield* initializeDbIfNeeded(fs, config, userConfig.schema);
|
|
584
|
+
}));
|
|
585
|
+
yield* traceKyju("kyju:db.init.tmp-cleanup", cleanupStaleTmpFiles(fs, config.tmpDir).pipe(Effect.catchAll((err) => {
|
|
586
|
+
console.error("[kyju:db] tmp sweep failed (non-fatal):", err);
|
|
587
|
+
return Effect.void;
|
|
588
|
+
})));
|
|
589
|
+
const sessionsRef = yield* Ref.make(/* @__PURE__ */ new Map());
|
|
590
|
+
const rootMutex = yield* Effect.makeSemaphore(1);
|
|
591
|
+
const collectionMutex = yield* Effect.makeSemaphore(1);
|
|
592
|
+
const blobMutex = yield* Effect.makeSemaphore(1);
|
|
593
|
+
const latch = yield* Effect.makeLatch(false);
|
|
594
|
+
const rootCache = yield* traceKyju("kyju:db.init.make-cache", makeRootCache(fs, config, rootMutex));
|
|
595
|
+
const ctx = {
|
|
596
|
+
fs,
|
|
597
|
+
config,
|
|
598
|
+
sessionsRef,
|
|
599
|
+
rootMutex,
|
|
600
|
+
collectionMutex,
|
|
601
|
+
blobMutex,
|
|
602
|
+
dbSend: userConfig.send,
|
|
603
|
+
rootCache
|
|
604
|
+
};
|
|
605
|
+
const postMessageEffect = (event) => Effect.gen(function* () {
|
|
606
|
+
switch (event.kind) {
|
|
607
|
+
case "connect": return yield* handleConnect(ctx, event, latch);
|
|
608
|
+
case "disconnect": return yield* handleDisconnect(ctx, event);
|
|
609
|
+
case "subscribe-collection": return yield* handleSubscribe(ctx, event);
|
|
610
|
+
case "unsubscribe-collection": return yield* handleUnsubscribe(ctx, event);
|
|
611
|
+
case "write": return yield* handleWrite(ctx, event);
|
|
612
|
+
case "read": return yield* handleRead(ctx, event);
|
|
613
|
+
}
|
|
614
|
+
}).pipe(Effect.catchAll((err) => {
|
|
615
|
+
console.error("[kyju:db] unhandled error in postMessage handler:", err);
|
|
616
|
+
return Effect.void;
|
|
617
|
+
}));
|
|
618
|
+
const reconnectClientsEffect = Effect.gen(function* () {
|
|
619
|
+
broadcastDbUpdate({
|
|
620
|
+
sessions: yield* Ref.get(sessionsRef),
|
|
621
|
+
message: { type: "reconnect" }
|
|
622
|
+
});
|
|
623
|
+
});
|
|
624
|
+
let plugins;
|
|
625
|
+
if (userConfig.sections) plugins = [sectionMigrationPlugin(userConfig.sections), ...userConfig.plugins ?? []];
|
|
626
|
+
else plugins = [migrationPlugin(userConfig.migrations ?? []), ...userConfig.plugins ?? []];
|
|
627
|
+
const localReplica = yield* traceKyju("kyju:db.init.run-plugins", runPlugins(ctx, postMessageEffect, plugins));
|
|
628
|
+
yield* latch.open;
|
|
629
|
+
yield* traceKyju("kyju:db.init.final-flush", rootCache.flush());
|
|
630
|
+
const client = createClient(localReplica);
|
|
631
|
+
const effectClient = createEffectClient(localReplica);
|
|
632
|
+
let closed = false;
|
|
633
|
+
const onProcessExit = () => {
|
|
634
|
+
if (closed) return;
|
|
635
|
+
closed = true;
|
|
636
|
+
releaseLockOnExit(config.dbPath, lockNonce);
|
|
637
|
+
};
|
|
638
|
+
process.once("exit", onProcessExit);
|
|
639
|
+
const close = async () => {
|
|
640
|
+
if (closed) return;
|
|
641
|
+
closed = true;
|
|
642
|
+
try {
|
|
643
|
+
await Effect.runPromise(rootCache.flush());
|
|
644
|
+
} catch (err) {
|
|
645
|
+
console.error("[kyju:db.close] final flush failed:", err);
|
|
646
|
+
}
|
|
647
|
+
process.off("exit", onProcessExit);
|
|
648
|
+
await releaseLock(config.dbPath, lockNonce);
|
|
649
|
+
};
|
|
650
|
+
return {
|
|
651
|
+
postMessage: (event) => Effect.runPromise(postMessageEffect(event)),
|
|
652
|
+
reconnectClients: () => Effect.runPromise(reconnectClientsEffect),
|
|
653
|
+
flush: () => Effect.runPromise(rootCache.flush()),
|
|
654
|
+
close,
|
|
655
|
+
client,
|
|
656
|
+
effectClient
|
|
657
|
+
};
|
|
658
|
+
});
|
|
659
|
+
const createDb = async (config) => {
|
|
660
|
+
return Effect.runPromise(createDbEffect(config).pipe(Effect.provide(layer)));
|
|
661
|
+
};
|
|
662
|
+
//#endregion
|
|
663
|
+
//#region ../kyju/src/cli/loader.ts
|
|
664
|
+
function pad(n, width = 4) {
|
|
665
|
+
return String(n).padStart(width, "0");
|
|
666
|
+
}
|
|
667
|
+
function readJournalFromDir(dir) {
|
|
668
|
+
const journalPath = path.join(dir, "meta", "_journal.json");
|
|
669
|
+
if (!fs.existsSync(journalPath)) return {
|
|
670
|
+
version: "1",
|
|
671
|
+
entries: []
|
|
672
|
+
};
|
|
673
|
+
return JSON.parse(fs.readFileSync(journalPath, "utf-8"));
|
|
674
|
+
}
|
|
675
|
+
/**
|
|
676
|
+
* Load migrations from a kyju output directory by reading the journal
|
|
677
|
+
* and dynamically importing each migration file in order.
|
|
678
|
+
*/
|
|
679
|
+
async function loadMigrationsFromDir(dir) {
|
|
680
|
+
const absDir = path.resolve(dir);
|
|
681
|
+
const journal = readJournalFromDir(absDir);
|
|
682
|
+
if (journal.entries.length === 0) return [];
|
|
683
|
+
const migrations = [];
|
|
684
|
+
for (const entry of journal.entries) {
|
|
685
|
+
const base = `${pad(entry.idx)}_${entry.tag}`;
|
|
686
|
+
const candidates = [
|
|
687
|
+
".ts",
|
|
688
|
+
".js",
|
|
689
|
+
".mjs"
|
|
690
|
+
];
|
|
691
|
+
let filePath = null;
|
|
692
|
+
for (const ext of candidates) {
|
|
693
|
+
const candidate = path.join(absDir, base + ext);
|
|
694
|
+
if (fs.existsSync(candidate)) {
|
|
695
|
+
filePath = candidate;
|
|
696
|
+
break;
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
if (!filePath) throw new Error(`Migration file not found for entry ${entry.idx} (${entry.tag}) in ${absDir}`);
|
|
700
|
+
const mod = await import(pathToFileURL(filePath).href);
|
|
701
|
+
const migration = mod.default ?? mod;
|
|
702
|
+
migrations.push(migration);
|
|
703
|
+
}
|
|
704
|
+
return migrations;
|
|
705
|
+
}
|
|
706
|
+
//#endregion
|
|
707
|
+
//#region src/shared/db-registry.ts
|
|
708
|
+
const DEFAULT_REGISTRY = {
|
|
709
|
+
defaultDbPath: null,
|
|
710
|
+
dbs: []
|
|
711
|
+
};
|
|
712
|
+
function normalize(p) {
|
|
713
|
+
return path.resolve(p);
|
|
714
|
+
}
|
|
715
|
+
async function ensureDir() {
|
|
716
|
+
await fsp.mkdir(INTERNAL_DIR, { recursive: true });
|
|
717
|
+
}
|
|
718
|
+
async function loadRegistry() {
|
|
719
|
+
let raw;
|
|
720
|
+
try {
|
|
721
|
+
const text = await fsp.readFile(DB_CONFIG_JSON, "utf8");
|
|
722
|
+
raw = JSON.parse(text);
|
|
723
|
+
} catch {
|
|
724
|
+
return { ...DEFAULT_REGISTRY };
|
|
725
|
+
}
|
|
726
|
+
if (!raw || typeof raw !== "object") return { ...DEFAULT_REGISTRY };
|
|
727
|
+
const obj = raw;
|
|
728
|
+
if (typeof obj.dbPath === "string" && obj.defaultDbPath === void 0 && obj.dbs === void 0) {
|
|
729
|
+
const p = normalize(obj.dbPath);
|
|
730
|
+
const upgraded = {
|
|
731
|
+
defaultDbPath: p,
|
|
732
|
+
dbs: [{
|
|
733
|
+
path: p,
|
|
734
|
+
lastUsedAt: Date.now()
|
|
735
|
+
}]
|
|
736
|
+
};
|
|
737
|
+
await saveRegistry(upgraded);
|
|
738
|
+
return upgraded;
|
|
739
|
+
}
|
|
740
|
+
return {
|
|
741
|
+
defaultDbPath: typeof obj.defaultDbPath === "string" ? normalize(obj.defaultDbPath) : null,
|
|
742
|
+
dbs: Array.isArray(obj.dbs) ? obj.dbs.filter((e) => !!e && typeof e === "object" && typeof e.path === "string").map((e) => ({
|
|
743
|
+
path: normalize(e.path),
|
|
744
|
+
lastUsedAt: typeof e.lastUsedAt === "number" ? e.lastUsedAt : 0
|
|
745
|
+
})) : []
|
|
746
|
+
};
|
|
747
|
+
}
|
|
748
|
+
async function saveRegistry(reg) {
|
|
749
|
+
await ensureDir();
|
|
750
|
+
await fsp.writeFile(DB_CONFIG_JSON, JSON.stringify(reg, null, 2));
|
|
751
|
+
}
|
|
752
|
+
async function addDb(absPath) {
|
|
753
|
+
const p = normalize(absPath);
|
|
754
|
+
await fsp.mkdir(p, { recursive: true });
|
|
755
|
+
const reg = await loadRegistry();
|
|
756
|
+
const now = Date.now();
|
|
757
|
+
const existing = reg.dbs.find((e) => e.path === p);
|
|
758
|
+
if (existing) existing.lastUsedAt = now;
|
|
759
|
+
else reg.dbs.push({
|
|
760
|
+
path: p,
|
|
761
|
+
lastUsedAt: now
|
|
762
|
+
});
|
|
763
|
+
await saveRegistry(reg);
|
|
764
|
+
return reg;
|
|
765
|
+
}
|
|
766
|
+
const FLAG_PREFIX = "--zen-db-path=";
|
|
767
|
+
/**
|
|
768
|
+
* Resolve the active DB path. The app's `config.json` is the single source of
|
|
769
|
+
* truth: every project that uses Zenbu must declare `"db": "<path>"` (relative
|
|
770
|
+
* to `config.json` or absolute). The only override is `--zen-db-path=<x>` for
|
|
771
|
+
* one-off dev runs (e.g. `zen --db /tmp/scratch .`). There is no global
|
|
772
|
+
* fallback by design — we don't want a project's data location to depend on
|
|
773
|
+
* the developer's machine state.
|
|
774
|
+
*
|
|
775
|
+
* The recently-used registry at `~/.zenbu/.internal/db.json` is *only* a
|
|
776
|
+
* convenience index for `zen db list / pick` navigation; it does not drive
|
|
777
|
+
* resolution.
|
|
778
|
+
*
|
|
779
|
+
* Always mkdir -p before returning so DbService can hand the path straight to
|
|
780
|
+
* kyju without an extra existence check.
|
|
781
|
+
*/
|
|
782
|
+
async function resolveDbPath(argv, app) {
|
|
783
|
+
for (const arg of argv) if (arg.startsWith(FLAG_PREFIX)) {
|
|
784
|
+
const p = normalize(arg.slice(14));
|
|
785
|
+
await fsp.mkdir(p, { recursive: true });
|
|
786
|
+
return {
|
|
787
|
+
path: p,
|
|
788
|
+
source: "flag"
|
|
789
|
+
};
|
|
790
|
+
}
|
|
791
|
+
if (!app.configDb || typeof app.configDb !== "string") throw new Error(`Zenbu config is missing the required "db" field at ${app.configPath}.\nAdd a relative or absolute path, e.g. \`{ "db": "./.zenbu/db", "plugins": [...] }\`.`);
|
|
792
|
+
const resolved = normalize(path.isAbsolute(app.configDb) ? app.configDb : path.resolve(app.configDir, app.configDb));
|
|
793
|
+
await fsp.mkdir(resolved, { recursive: true });
|
|
794
|
+
return {
|
|
795
|
+
path: resolved,
|
|
796
|
+
source: "config"
|
|
797
|
+
};
|
|
798
|
+
}
|
|
799
|
+
//#endregion
|
|
800
|
+
//#region src/services/http.ts
|
|
801
|
+
const log$3 = createLogger("http");
|
|
802
|
+
var HttpService = class extends Service {
|
|
803
|
+
static key = "http";
|
|
804
|
+
static deps = {
|
|
805
|
+
server: ServerService,
|
|
806
|
+
reloader: ReloaderService
|
|
807
|
+
};
|
|
808
|
+
connectedCallbacks = [];
|
|
809
|
+
disconnectedCallbacks = [];
|
|
810
|
+
activeConnections = /* @__PURE__ */ new Map();
|
|
811
|
+
requestHandlers = /* @__PURE__ */ new Map();
|
|
812
|
+
get port() {
|
|
813
|
+
return this.ctx.server.port;
|
|
814
|
+
}
|
|
815
|
+
get authToken() {
|
|
816
|
+
return this.ctx.server.authToken;
|
|
817
|
+
}
|
|
818
|
+
addRequestHandler(prefix, handler) {
|
|
819
|
+
this.requestHandlers.set(prefix, handler);
|
|
820
|
+
return () => {
|
|
821
|
+
this.requestHandlers.delete(prefix);
|
|
822
|
+
};
|
|
823
|
+
}
|
|
824
|
+
onConnected(cb) {
|
|
825
|
+
this.connectedCallbacks.push(cb);
|
|
826
|
+
return () => {
|
|
827
|
+
this.connectedCallbacks = this.connectedCallbacks.filter((f) => f !== cb);
|
|
828
|
+
};
|
|
829
|
+
}
|
|
830
|
+
onDisconnected(cb) {
|
|
831
|
+
this.disconnectedCallbacks.push(cb);
|
|
832
|
+
return () => {
|
|
833
|
+
this.disconnectedCallbacks = this.disconnectedCallbacks.filter((f) => f !== cb);
|
|
834
|
+
};
|
|
835
|
+
}
|
|
836
|
+
evaluate() {
|
|
837
|
+
this.connectedCallbacks = [];
|
|
838
|
+
this.disconnectedCallbacks = [];
|
|
839
|
+
const { server } = this.ctx;
|
|
840
|
+
performance.now();
|
|
841
|
+
this.setup("proxy", () => {
|
|
842
|
+
const viteAgent = new http.Agent({
|
|
843
|
+
keepAlive: true,
|
|
844
|
+
maxSockets: 20
|
|
845
|
+
});
|
|
846
|
+
const handler = (req, res) => {
|
|
847
|
+
const url = req.url ?? "/";
|
|
848
|
+
for (const [prefix, routeHandler] of this.requestHandlers) if (url.startsWith(prefix)) {
|
|
849
|
+
routeHandler(req, res);
|
|
850
|
+
return;
|
|
851
|
+
}
|
|
852
|
+
const coreEntry = this.ctx.reloader.get("app");
|
|
853
|
+
if (!coreEntry) {
|
|
854
|
+
res.writeHead(503);
|
|
855
|
+
res.end("Vite server not ready");
|
|
856
|
+
return;
|
|
857
|
+
}
|
|
858
|
+
const reloaderUrl = coreEntry.url.replace(/\/$/, "");
|
|
859
|
+
const target = new URL(url, reloaderUrl);
|
|
860
|
+
const proxyHeaders = {
|
|
861
|
+
...req.headers,
|
|
862
|
+
host: target.host
|
|
863
|
+
};
|
|
864
|
+
const proxyReq = http.request(target, {
|
|
865
|
+
method: req.method,
|
|
866
|
+
headers: proxyHeaders,
|
|
867
|
+
agent: viteAgent
|
|
868
|
+
}, (proxyRes) => {
|
|
869
|
+
res.writeHead(proxyRes.statusCode ?? 502, proxyRes.headers);
|
|
870
|
+
proxyRes.pipe(res);
|
|
871
|
+
});
|
|
872
|
+
proxyReq.on("error", () => {
|
|
873
|
+
res.writeHead(502);
|
|
874
|
+
res.end("Bad Gateway");
|
|
875
|
+
});
|
|
876
|
+
req.pipe(proxyReq);
|
|
877
|
+
};
|
|
878
|
+
server.server.on("request", handler);
|
|
879
|
+
return () => {
|
|
880
|
+
server.server.off("request", handler);
|
|
881
|
+
viteAgent.destroy();
|
|
882
|
+
};
|
|
883
|
+
});
|
|
884
|
+
this.setup("vite-hmr-proxy", () => {
|
|
885
|
+
return server.addUpgradeHandler((req, socket, head) => {
|
|
886
|
+
const protocols = req.headers["sec-websocket-protocol"];
|
|
887
|
+
if (!protocols || !protocols.includes("vite-hmr")) return false;
|
|
888
|
+
const coreEntry = this.ctx.reloader.get("app");
|
|
889
|
+
if (!coreEntry) {
|
|
890
|
+
socket.destroy();
|
|
891
|
+
return true;
|
|
892
|
+
}
|
|
893
|
+
const target = new URL(coreEntry.url);
|
|
894
|
+
const proxyReq = http.request({
|
|
895
|
+
hostname: target.hostname,
|
|
896
|
+
port: target.port,
|
|
897
|
+
path: req.url,
|
|
898
|
+
method: req.method,
|
|
899
|
+
headers: {
|
|
900
|
+
...req.headers,
|
|
901
|
+
host: target.host
|
|
902
|
+
}
|
|
903
|
+
});
|
|
904
|
+
proxyReq.on("upgrade", (_res, proxySocket, proxyHead) => {
|
|
905
|
+
socket.write(`HTTP/1.1 101 Switching Protocols\r\n` + Object.entries(_res.headers).map(([k, v]) => `${k}: ${v}`).join("\r\n") + "\r\n\r\n");
|
|
906
|
+
if (proxyHead.length) socket.write(proxyHead);
|
|
907
|
+
proxySocket.pipe(socket).pipe(proxySocket);
|
|
908
|
+
});
|
|
909
|
+
proxyReq.on("error", () => socket.destroy());
|
|
910
|
+
proxyReq.end(head);
|
|
911
|
+
return true;
|
|
912
|
+
});
|
|
913
|
+
});
|
|
914
|
+
this.setup("ws-dispatch", () => {
|
|
915
|
+
const onConnection = (ws, req) => {
|
|
916
|
+
const id = nanoid();
|
|
917
|
+
this.activeConnections.set(id, ws);
|
|
918
|
+
for (const cb of this.connectedCallbacks) cb(id, ws);
|
|
919
|
+
ws.on("close", () => {
|
|
920
|
+
this.activeConnections.delete(id);
|
|
921
|
+
for (const cb of this.disconnectedCallbacks) cb(id);
|
|
922
|
+
});
|
|
923
|
+
};
|
|
924
|
+
server.wss.on("connection", onConnection);
|
|
925
|
+
return () => {
|
|
926
|
+
server.wss.off("connection", onConnection);
|
|
927
|
+
};
|
|
928
|
+
});
|
|
929
|
+
log$3.verbose(`service ready on port ${this.port}`);
|
|
930
|
+
}
|
|
931
|
+
};
|
|
932
|
+
runtime.register(HttpService, import.meta);
|
|
933
|
+
//#endregion
|
|
934
|
+
//#region src/services/db.ts
|
|
935
|
+
const log$2 = createLogger("db");
|
|
936
|
+
/**
|
|
937
|
+
* Walk up from this file's location until we hit the @zenbujs/core
|
|
938
|
+
* package.json. Same trick as `loaders/zenbu.ts` and `vite-plugins.ts`;
|
|
939
|
+
* worth deduping into a shared helper once we have a fourth caller.
|
|
940
|
+
*/
|
|
941
|
+
async function findCorePackageRoot() {
|
|
942
|
+
const here = path.dirname(fileURLToPath(import.meta.url));
|
|
943
|
+
let dir = path.resolve(here, "..", "..");
|
|
944
|
+
while (dir !== path.dirname(dir)) {
|
|
945
|
+
const pkg = path.join(dir, "package.json");
|
|
946
|
+
try {
|
|
947
|
+
if (JSON.parse(await fsp.readFile(pkg, "utf8")).name === "@zenbujs/core") return dir;
|
|
948
|
+
} catch {}
|
|
949
|
+
dir = path.dirname(dir);
|
|
950
|
+
}
|
|
951
|
+
return path.resolve(here, "..", "..");
|
|
952
|
+
}
|
|
953
|
+
/**
|
|
954
|
+
* Build the core's DB section from the schema in this package + any
|
|
955
|
+
* migrations shipped in `<core>/migrations/`. Loaded via tsx (registered
|
|
956
|
+
* by setup-gate before the runtime services boot), so the source `.ts`
|
|
957
|
+
* files in the published package are imported directly with no extra
|
|
958
|
+
* compile step.
|
|
959
|
+
*
|
|
960
|
+
* If the migrations directory is missing (e.g. before the first
|
|
961
|
+
* `npm run db:generate` runs in the core package), the section degrades
|
|
962
|
+
* to `migrations: []` — same behavior as before this builder existed.
|
|
963
|
+
*/
|
|
964
|
+
async function buildCoreSection() {
|
|
965
|
+
const corePackageRoot = await findCorePackageRoot();
|
|
966
|
+
const migrationsDir = path.join(corePackageRoot, "migrations");
|
|
967
|
+
let migrations = [];
|
|
968
|
+
try {
|
|
969
|
+
await fsp.access(migrationsDir);
|
|
970
|
+
migrations = await loadMigrationsFromDir(migrationsDir);
|
|
971
|
+
} catch {}
|
|
972
|
+
return {
|
|
973
|
+
name: "core",
|
|
974
|
+
schema,
|
|
975
|
+
migrations
|
|
976
|
+
};
|
|
977
|
+
}
|
|
978
|
+
async function resolveManifestModulePath(baseDir, specifier) {
|
|
979
|
+
const resolved = path.resolve(baseDir, specifier);
|
|
980
|
+
const candidates = path.extname(resolved) ? [resolved] : [
|
|
981
|
+
resolved,
|
|
982
|
+
`${resolved}.ts`,
|
|
983
|
+
`${resolved}.js`,
|
|
984
|
+
`${resolved}.mjs`,
|
|
985
|
+
path.join(resolved, "index.ts"),
|
|
986
|
+
path.join(resolved, "index.js"),
|
|
987
|
+
path.join(resolved, "index.mjs")
|
|
988
|
+
];
|
|
989
|
+
for (const candidate of candidates) try {
|
|
990
|
+
const stat = await fsp.stat(candidate);
|
|
991
|
+
if (stat.isFile() || stat.isDirectory()) return candidate;
|
|
992
|
+
} catch {}
|
|
993
|
+
throw new Error(`Could not resolve module entry for "${specifier}" from "${baseDir}"`);
|
|
994
|
+
}
|
|
995
|
+
async function importFreshModule(modulePath) {
|
|
996
|
+
return import(pathToFileURL(modulePath).href);
|
|
997
|
+
}
|
|
998
|
+
async function resolveConfigPath$1() {
|
|
999
|
+
if (process.env.ZENBU_CONFIG_PATH) return process.env.ZENBU_CONFIG_PATH;
|
|
1000
|
+
const jsonc = path.join(os.homedir(), ".zenbu", "config.jsonc");
|
|
1001
|
+
try {
|
|
1002
|
+
await fsp.access(jsonc);
|
|
1003
|
+
return jsonc;
|
|
1004
|
+
} catch {
|
|
1005
|
+
return path.join(os.homedir(), ".zenbu", "config.json");
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
async function loadAppConfig(configPath) {
|
|
1009
|
+
let raw;
|
|
1010
|
+
try {
|
|
1011
|
+
raw = parseJsonc$2(await fsp.readFile(configPath, "utf8"));
|
|
1012
|
+
} catch (err) {
|
|
1013
|
+
throw new Error(`Failed to read Zenbu config at ${configPath}: ${err instanceof Error ? err.message : String(err)}`);
|
|
1014
|
+
}
|
|
1015
|
+
if (!raw || typeof raw !== "object") throw new Error(`Zenbu config at ${configPath} is not a JSON object`);
|
|
1016
|
+
const obj = raw;
|
|
1017
|
+
const plugins = Array.isArray(obj.plugins) ? obj.plugins.filter((p) => typeof p === "string") : [];
|
|
1018
|
+
return {
|
|
1019
|
+
db: typeof obj.db === "string" ? obj.db : "",
|
|
1020
|
+
plugins
|
|
1021
|
+
};
|
|
1022
|
+
}
|
|
1023
|
+
function parseJsonc$2(str) {
|
|
1024
|
+
let result = "";
|
|
1025
|
+
let i = 0;
|
|
1026
|
+
while (i < str.length) if (str[i] === "\"") {
|
|
1027
|
+
let j = i + 1;
|
|
1028
|
+
while (j < str.length) if (str[j] === "\\") j += 2;
|
|
1029
|
+
else if (str[j] === "\"") {
|
|
1030
|
+
j++;
|
|
1031
|
+
break;
|
|
1032
|
+
} else j++;
|
|
1033
|
+
result += str.slice(i, j);
|
|
1034
|
+
i = j;
|
|
1035
|
+
} else if (str[i] === "/" && str[i + 1] === "/") {
|
|
1036
|
+
i += 2;
|
|
1037
|
+
while (i < str.length && str[i] !== "\n") i++;
|
|
1038
|
+
} else if (str[i] === "/" && str[i + 1] === "*") {
|
|
1039
|
+
i += 2;
|
|
1040
|
+
while (i < str.length && !(str[i] === "*" && str[i + 1] === "/")) i++;
|
|
1041
|
+
i += 2;
|
|
1042
|
+
} else {
|
|
1043
|
+
result += str[i];
|
|
1044
|
+
i++;
|
|
1045
|
+
}
|
|
1046
|
+
return JSON.parse(result.replace(/,\s*([\]}])/g, "$1"));
|
|
1047
|
+
}
|
|
1048
|
+
async function discoverSections(configPath) {
|
|
1049
|
+
const resolvedConfigPath = configPath ?? await resolveConfigPath$1();
|
|
1050
|
+
let config = { plugins: [] };
|
|
1051
|
+
try {
|
|
1052
|
+
config = parseJsonc$2(await fsp.readFile(resolvedConfigPath, "utf8"));
|
|
1053
|
+
} catch (error) {
|
|
1054
|
+
log$2.error(`failed to read plugin config at ${resolvedConfigPath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
1055
|
+
return [];
|
|
1056
|
+
}
|
|
1057
|
+
const perPluginTimings = [];
|
|
1058
|
+
const tasks = config.plugins.map(async (manifestPath) => {
|
|
1059
|
+
const pluginStart = Date.now();
|
|
1060
|
+
let manifestMs = 0;
|
|
1061
|
+
let resolveSchemaMs = 0;
|
|
1062
|
+
let importSchemaMs = 0;
|
|
1063
|
+
let resolveMigrationsMs = 0;
|
|
1064
|
+
let importMigrationsMs = 0;
|
|
1065
|
+
let pluginName = path.basename(path.dirname(manifestPath));
|
|
1066
|
+
try {
|
|
1067
|
+
const t0 = Date.now();
|
|
1068
|
+
const raw = await fsp.readFile(manifestPath, "utf8");
|
|
1069
|
+
manifestMs = Date.now() - t0;
|
|
1070
|
+
const manifest = JSON.parse(raw);
|
|
1071
|
+
pluginName = manifest.name ?? pluginName;
|
|
1072
|
+
if (!manifest.name || !manifest.schema) {
|
|
1073
|
+
log$2.error(`skipping manifest without name/schema: ${manifestPath}`);
|
|
1074
|
+
return null;
|
|
1075
|
+
}
|
|
1076
|
+
const baseDir = path.dirname(path.resolve(manifestPath));
|
|
1077
|
+
const schemaChain = (async () => {
|
|
1078
|
+
const s0 = Date.now();
|
|
1079
|
+
const schemaPath = await resolveManifestModulePath(baseDir, manifest.schema);
|
|
1080
|
+
resolveSchemaMs = Date.now() - s0;
|
|
1081
|
+
const s1 = Date.now();
|
|
1082
|
+
const schemaModule = await importFreshModule(schemaPath);
|
|
1083
|
+
importSchemaMs = Date.now() - s1;
|
|
1084
|
+
return {
|
|
1085
|
+
schemaPath,
|
|
1086
|
+
schemaModule
|
|
1087
|
+
};
|
|
1088
|
+
})();
|
|
1089
|
+
const migrationsChain = manifest.migrations ? (async () => {
|
|
1090
|
+
try {
|
|
1091
|
+
const m0 = Date.now();
|
|
1092
|
+
const migrationsPath = await resolveManifestModulePath(baseDir, manifest.migrations);
|
|
1093
|
+
resolveMigrationsMs = Date.now() - m0;
|
|
1094
|
+
const m1 = Date.now();
|
|
1095
|
+
const stat = await fsp.stat(migrationsPath);
|
|
1096
|
+
let migrations;
|
|
1097
|
+
if (stat.isDirectory()) migrations = await loadMigrationsFromDir(migrationsPath);
|
|
1098
|
+
else {
|
|
1099
|
+
const migModule = await importFreshModule(migrationsPath);
|
|
1100
|
+
migrations = migModule.migrations ?? migModule.default ?? [];
|
|
1101
|
+
}
|
|
1102
|
+
importMigrationsMs = Date.now() - m1;
|
|
1103
|
+
return {
|
|
1104
|
+
migrations,
|
|
1105
|
+
failed: false
|
|
1106
|
+
};
|
|
1107
|
+
} catch (error) {
|
|
1108
|
+
log$2.error(`failed to load migrations from ${manifestPath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
1109
|
+
return {
|
|
1110
|
+
migrations: [],
|
|
1111
|
+
failed: true
|
|
1112
|
+
};
|
|
1113
|
+
}
|
|
1114
|
+
})() : Promise.resolve({
|
|
1115
|
+
migrations: [],
|
|
1116
|
+
failed: false
|
|
1117
|
+
});
|
|
1118
|
+
const [schemaResult, migrationsResult] = await Promise.all([schemaChain, migrationsChain]);
|
|
1119
|
+
if (migrationsResult.failed) return null;
|
|
1120
|
+
const schema = schemaResult.schemaModule.schema ?? schemaResult.schemaModule.default;
|
|
1121
|
+
if (!schema?.shape) {
|
|
1122
|
+
log$2.error(`schema module did not export a valid schema: ${schemaResult.schemaPath}`);
|
|
1123
|
+
return null;
|
|
1124
|
+
}
|
|
1125
|
+
return {
|
|
1126
|
+
name: manifest.name,
|
|
1127
|
+
schema,
|
|
1128
|
+
migrations: migrationsResult.migrations
|
|
1129
|
+
};
|
|
1130
|
+
} catch (error) {
|
|
1131
|
+
log$2.error(`failed to load section from ${manifestPath}: ${error instanceof Error ? error.message : String(error)}`);
|
|
1132
|
+
return null;
|
|
1133
|
+
} finally {
|
|
1134
|
+
perPluginTimings.push({
|
|
1135
|
+
name: pluginName,
|
|
1136
|
+
manifestMs,
|
|
1137
|
+
resolveSchemaMs,
|
|
1138
|
+
importSchemaMs,
|
|
1139
|
+
resolveMigrationsMs,
|
|
1140
|
+
importMigrationsMs,
|
|
1141
|
+
totalMs: Date.now() - pluginStart
|
|
1142
|
+
});
|
|
1143
|
+
}
|
|
1144
|
+
});
|
|
1145
|
+
const sections = (await Promise.all(tasks)).filter((s) => s !== null);
|
|
1146
|
+
const sorted = [...perPluginTimings].sort((a, b) => b.totalMs - a.totalMs);
|
|
1147
|
+
const sum = (k) => perPluginTimings.reduce((acc, p) => acc + p[k], 0);
|
|
1148
|
+
log$2.verbose("per-plugin breakdown (ms, parallel):");
|
|
1149
|
+
log$2.verbose(` ${"plugin".padEnd(28)} ${"total".padStart(6)} ${"man".padStart(5)} ${"resS".padStart(5)} ${"impS".padStart(6)} ${"resM".padStart(5)} ${"impM".padStart(6)}`);
|
|
1150
|
+
for (const p of sorted) log$2.verbose(` ${p.name.padEnd(28)} ${String(p.totalMs).padStart(6)} ${String(p.manifestMs).padStart(5)} ${String(p.resolveSchemaMs).padStart(5)} ${String(p.importSchemaMs).padStart(6)} ${String(p.resolveMigrationsMs).padStart(5)} ${String(p.importMigrationsMs).padStart(6)}`);
|
|
1151
|
+
log$2.verbose(` ${"SUM(cpu)".padEnd(28)} ${String(sum("totalMs")).padStart(6)} ${String(sum("manifestMs")).padStart(5)} ${String(sum("resolveSchemaMs")).padStart(5)} ${String(sum("importSchemaMs")).padStart(6)} ${String(sum("resolveMigrationsMs")).padStart(5)} ${String(sum("importMigrationsMs")).padStart(6)}`);
|
|
1152
|
+
log$2.verbose(` (wall time: look at db.discover-sections span — should be ~max(totalMs) not SUM)`);
|
|
1153
|
+
return sections;
|
|
1154
|
+
}
|
|
1155
|
+
var DbService = class extends Service {
|
|
1156
|
+
static key = "db";
|
|
1157
|
+
static deps = { http: HttpService };
|
|
1158
|
+
db = null;
|
|
1159
|
+
dbRouter = null;
|
|
1160
|
+
sectionsHash = "";
|
|
1161
|
+
_dbPath = null;
|
|
1162
|
+
/**
|
|
1163
|
+
* Resolved DB path. Throws if accessed before `evaluate()` has run — the
|
|
1164
|
+
* service contract guarantees deps are evaluated before dependents, so any
|
|
1165
|
+
* access from a dependent service or RPC handler is safe.
|
|
1166
|
+
*/
|
|
1167
|
+
get dbPath() {
|
|
1168
|
+
if (this._dbPath === null) throw new Error("DbService.dbPath accessed before evaluate()");
|
|
1169
|
+
return this._dbPath;
|
|
1170
|
+
}
|
|
1171
|
+
get client() {
|
|
1172
|
+
return this.db.client;
|
|
1173
|
+
}
|
|
1174
|
+
get effectClient() {
|
|
1175
|
+
return this.db.effectClient;
|
|
1176
|
+
}
|
|
1177
|
+
/**
|
|
1178
|
+
* Drain kyju's lagged-persistence queue. Safe to call anytime; idempotent
|
|
1179
|
+
* when nothing is pending. Used by service teardown (effect cleanup) so
|
|
1180
|
+
* shutdown / hot-reload don't lose in-memory writes.
|
|
1181
|
+
*/
|
|
1182
|
+
async flush() {
|
|
1183
|
+
if (!this.db) return;
|
|
1184
|
+
try {
|
|
1185
|
+
await this.db.flush();
|
|
1186
|
+
} catch (err) {
|
|
1187
|
+
log$2.error("flush failed:", err);
|
|
1188
|
+
}
|
|
1189
|
+
}
|
|
1190
|
+
/**
|
|
1191
|
+
* Flush + release the kyju cross-process lock at `<dbPath>/.lock`.
|
|
1192
|
+
* Called on service teardown so a subsequent process can open the DB
|
|
1193
|
+
* without seeing a stale lock. Idempotent.
|
|
1194
|
+
*/
|
|
1195
|
+
async close() {
|
|
1196
|
+
if (!this.db) return;
|
|
1197
|
+
try {
|
|
1198
|
+
await this.db.close();
|
|
1199
|
+
} catch (err) {
|
|
1200
|
+
log$2.error("close failed:", err);
|
|
1201
|
+
}
|
|
1202
|
+
}
|
|
1203
|
+
async evaluate() {
|
|
1204
|
+
const configPath = await resolveConfigPath$1();
|
|
1205
|
+
const configDir = path.dirname(configPath);
|
|
1206
|
+
const appConfig = await loadAppConfig(configPath);
|
|
1207
|
+
const [coreSec, pluginSections, resolved] = await Promise.all([
|
|
1208
|
+
this.trace("build-core-section", () => buildCoreSection()),
|
|
1209
|
+
this.trace("discover-sections", () => discoverSections(configPath)),
|
|
1210
|
+
resolveDbPath(process.argv, {
|
|
1211
|
+
configDb: appConfig.db,
|
|
1212
|
+
configDir,
|
|
1213
|
+
configPath
|
|
1214
|
+
})
|
|
1215
|
+
]);
|
|
1216
|
+
const sections = [coreSec, ...pluginSections];
|
|
1217
|
+
const sectionsHash = JSON.stringify(sections.map((s) => ({
|
|
1218
|
+
name: s.name,
|
|
1219
|
+
v: s.migrations.length
|
|
1220
|
+
})));
|
|
1221
|
+
const dbPath = resolved.path;
|
|
1222
|
+
if (!this.db || this.sectionsHash !== sectionsHash || this._dbPath !== dbPath) {
|
|
1223
|
+
if (this.db) try {
|
|
1224
|
+
await this.db.close();
|
|
1225
|
+
} catch (err) {
|
|
1226
|
+
log$2.error("close of previous db failed:", err);
|
|
1227
|
+
}
|
|
1228
|
+
this.dbRouter = createRouter();
|
|
1229
|
+
this.db = await this.trace("create-db", () => createDb({
|
|
1230
|
+
sections,
|
|
1231
|
+
path: dbPath,
|
|
1232
|
+
send: (event) => this.dbRouter.send(event)
|
|
1233
|
+
}));
|
|
1234
|
+
this.sectionsHash = sectionsHash;
|
|
1235
|
+
this._dbPath = dbPath;
|
|
1236
|
+
addDb(dbPath).catch((err) => {
|
|
1237
|
+
log$2.error("failed to bump registry lastUsedAt:", err);
|
|
1238
|
+
});
|
|
1239
|
+
log$2.verbose(`ready at ${dbPath} (source: ${resolved.source}, sections: ${sections.map((s) => `${s.name}@v${s.migrations.length}`).join(", ")})`);
|
|
1240
|
+
}
|
|
1241
|
+
const { http } = this.ctx;
|
|
1242
|
+
const wsDbConnections = /* @__PURE__ */ new Map();
|
|
1243
|
+
this.setup("kyju-close-on-cleanup", () => async () => {
|
|
1244
|
+
await this.close();
|
|
1245
|
+
});
|
|
1246
|
+
this.setup("ws-transport", () => {
|
|
1247
|
+
const onConnected = (id, ws) => {
|
|
1248
|
+
const dbConn = this.dbRouter.connection({
|
|
1249
|
+
send: (event) => {
|
|
1250
|
+
if (ws.readyState === ws.OPEN) ws.send(dbStringify({
|
|
1251
|
+
ch: "db",
|
|
1252
|
+
data: event
|
|
1253
|
+
}));
|
|
1254
|
+
},
|
|
1255
|
+
postMessage: this.db.postMessage
|
|
1256
|
+
});
|
|
1257
|
+
wsDbConnections.set(id, dbConn);
|
|
1258
|
+
ws.on("message", async (raw) => {
|
|
1259
|
+
const msg = dbParse(String(raw));
|
|
1260
|
+
if (msg.ch === "db") await dbConn.receive(msg.data);
|
|
1261
|
+
});
|
|
1262
|
+
};
|
|
1263
|
+
const onDisconnected = (id) => {
|
|
1264
|
+
const conn = wsDbConnections.get(id);
|
|
1265
|
+
if (conn) {
|
|
1266
|
+
conn.close();
|
|
1267
|
+
wsDbConnections.delete(id);
|
|
1268
|
+
}
|
|
1269
|
+
};
|
|
1270
|
+
const unsubConnected = http.onConnected(onConnected);
|
|
1271
|
+
const unsubDisconnected = http.onDisconnected(onDisconnected);
|
|
1272
|
+
for (const [id, ws] of http.activeConnections) onConnected(id, ws);
|
|
1273
|
+
return () => {
|
|
1274
|
+
unsubConnected();
|
|
1275
|
+
unsubDisconnected();
|
|
1276
|
+
for (const conn of wsDbConnections.values()) conn.close();
|
|
1277
|
+
wsDbConnections.clear();
|
|
1278
|
+
};
|
|
1279
|
+
});
|
|
1280
|
+
}
|
|
1281
|
+
};
|
|
1282
|
+
runtime.register(DbService, import.meta);
|
|
1283
|
+
//#endregion
|
|
1284
|
+
//#region src/services/view-registry.ts
|
|
1285
|
+
const log$1 = createLogger("view-registry");
|
|
1286
|
+
var ViewRegistryService = class extends Service {
|
|
1287
|
+
static key = "view-registry";
|
|
1288
|
+
static deps = {
|
|
1289
|
+
reloader: ReloaderService,
|
|
1290
|
+
db: DbService
|
|
1291
|
+
};
|
|
1292
|
+
views = /* @__PURE__ */ new Map();
|
|
1293
|
+
manifestIcons = /* @__PURE__ */ new Map();
|
|
1294
|
+
async register(scope, root, configFile, meta) {
|
|
1295
|
+
log$1.verbose(`register("${scope}", root="${root}", config="${configFile}")`);
|
|
1296
|
+
const existing = this.views.get(scope);
|
|
1297
|
+
if (existing) {
|
|
1298
|
+
log$1.verbose(`"${scope}" already exists at ${existing.url}`);
|
|
1299
|
+
return existing;
|
|
1300
|
+
}
|
|
1301
|
+
log$1.verbose(`creating reloader for "${scope}"...`);
|
|
1302
|
+
const reloaderEntry = await this.ctx.reloader.create(scope, root, configFile);
|
|
1303
|
+
log$1.verbose(`reloader created: ${reloaderEntry.url} (port ${reloaderEntry.port})`);
|
|
1304
|
+
const entry = {
|
|
1305
|
+
scope,
|
|
1306
|
+
url: reloaderEntry.url,
|
|
1307
|
+
port: reloaderEntry.port,
|
|
1308
|
+
ownsServer: true,
|
|
1309
|
+
meta
|
|
1310
|
+
};
|
|
1311
|
+
this.views.set(scope, entry);
|
|
1312
|
+
await this.syncToDb();
|
|
1313
|
+
log$1.verbose(`"${scope}" registered at ${entry.url}`);
|
|
1314
|
+
return entry;
|
|
1315
|
+
}
|
|
1316
|
+
registerAlias(scope, reloaderId, pathPrefix, meta) {
|
|
1317
|
+
const existing = this.views.get(scope);
|
|
1318
|
+
if (existing) return existing;
|
|
1319
|
+
const reloaderEntry = this.ctx.reloader.get(reloaderId);
|
|
1320
|
+
if (!reloaderEntry) throw new Error(`Reloader "${reloaderId}" not found for alias "${scope}"`);
|
|
1321
|
+
const entry = {
|
|
1322
|
+
scope,
|
|
1323
|
+
url: `${reloaderEntry.url}${pathPrefix}`,
|
|
1324
|
+
port: reloaderEntry.port,
|
|
1325
|
+
ownsServer: false,
|
|
1326
|
+
meta
|
|
1327
|
+
};
|
|
1328
|
+
this.views.set(scope, entry);
|
|
1329
|
+
this.syncToDb();
|
|
1330
|
+
return entry;
|
|
1331
|
+
}
|
|
1332
|
+
async unregister(scope) {
|
|
1333
|
+
const entry = this.views.get(scope);
|
|
1334
|
+
if (!entry) return;
|
|
1335
|
+
if (entry.ownsServer) await this.ctx.reloader.remove(scope);
|
|
1336
|
+
this.views.delete(scope);
|
|
1337
|
+
await this.syncToDb();
|
|
1338
|
+
}
|
|
1339
|
+
get(scope) {
|
|
1340
|
+
return this.views.get(scope);
|
|
1341
|
+
}
|
|
1342
|
+
evaluate() {
|
|
1343
|
+
this.loadManifestIcons().catch((err) => {});
|
|
1344
|
+
this.syncToDb();
|
|
1345
|
+
this.setup("view-registry-cleanup", () => {
|
|
1346
|
+
return async () => {
|
|
1347
|
+
for (const [scope, entry] of this.views) if (entry.ownsServer) await this.ctx.reloader.remove(scope);
|
|
1348
|
+
this.views.clear();
|
|
1349
|
+
await this.syncToDb();
|
|
1350
|
+
};
|
|
1351
|
+
});
|
|
1352
|
+
}
|
|
1353
|
+
async loadManifestIcons() {
|
|
1354
|
+
this.manifestIcons.clear();
|
|
1355
|
+
try {
|
|
1356
|
+
const configPath = await resolveConfigPath();
|
|
1357
|
+
let raw;
|
|
1358
|
+
try {
|
|
1359
|
+
raw = await fsp.readFile(configPath, "utf8");
|
|
1360
|
+
} catch {
|
|
1361
|
+
return;
|
|
1362
|
+
}
|
|
1363
|
+
const config = parseJsonc$1(raw);
|
|
1364
|
+
for (const manifestPath of config.plugins ?? []) try {
|
|
1365
|
+
const manifestRaw = await fsp.readFile(manifestPath, "utf8");
|
|
1366
|
+
const icons = JSON.parse(manifestRaw).icons ?? {};
|
|
1367
|
+
for (const [scope, svg] of Object.entries(icons)) this.manifestIcons.set(scope, svg);
|
|
1368
|
+
} catch {}
|
|
1369
|
+
} catch {}
|
|
1370
|
+
}
|
|
1371
|
+
async syncToDb() {
|
|
1372
|
+
const client = this.ctx.db.effectClient;
|
|
1373
|
+
const snapshot = [...this.views.values()].map((e) => ({
|
|
1374
|
+
scope: e.scope,
|
|
1375
|
+
url: e.url,
|
|
1376
|
+
port: e.port,
|
|
1377
|
+
icon: this.manifestIcons.get(e.scope),
|
|
1378
|
+
meta: e.meta
|
|
1379
|
+
}));
|
|
1380
|
+
await Effect.runPromise(client.update((root) => {
|
|
1381
|
+
root.plugin.core.lastKnownViewRegistry = snapshot;
|
|
1382
|
+
})).catch((err) => {});
|
|
1383
|
+
}
|
|
1384
|
+
};
|
|
1385
|
+
async function resolveConfigPath() {
|
|
1386
|
+
if (process.env.ZENBU_CONFIG_PATH) return process.env.ZENBU_CONFIG_PATH;
|
|
1387
|
+
const jsonc = path.join(os.homedir(), ".zenbu", "config.jsonc");
|
|
1388
|
+
try {
|
|
1389
|
+
await fsp.access(jsonc);
|
|
1390
|
+
return jsonc;
|
|
1391
|
+
} catch {
|
|
1392
|
+
return path.join(os.homedir(), ".zenbu", "config.json");
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
function parseJsonc$1(str) {
|
|
1396
|
+
let result = "";
|
|
1397
|
+
let i = 0;
|
|
1398
|
+
while (i < str.length) if (str[i] === "\"") {
|
|
1399
|
+
let j = i + 1;
|
|
1400
|
+
while (j < str.length) if (str[j] === "\\") j += 2;
|
|
1401
|
+
else if (str[j] === "\"") {
|
|
1402
|
+
j++;
|
|
1403
|
+
break;
|
|
1404
|
+
} else j++;
|
|
1405
|
+
result += str.slice(i, j);
|
|
1406
|
+
i = j;
|
|
1407
|
+
} else if (str[i] === "/" && str[i + 1] === "/") {
|
|
1408
|
+
i += 2;
|
|
1409
|
+
while (i < str.length && str[i] !== "\n") i++;
|
|
1410
|
+
} else if (str[i] === "/" && str[i + 1] === "*") {
|
|
1411
|
+
i += 2;
|
|
1412
|
+
while (i < str.length && !(str[i] === "*" && str[i + 1] === "/")) i++;
|
|
1413
|
+
i += 2;
|
|
1414
|
+
} else {
|
|
1415
|
+
result += str[i];
|
|
1416
|
+
i++;
|
|
1417
|
+
}
|
|
1418
|
+
return JSON.parse(result.replace(/,\s*([\]}])/g, "$1"));
|
|
1419
|
+
}
|
|
1420
|
+
runtime.register(ViewRegistryService, import.meta);
|
|
1421
|
+
//#endregion
|
|
1422
|
+
//#region src/services/renderer-host.ts
|
|
1423
|
+
const log = createLogger("renderer-host");
|
|
1424
|
+
async function pathExists(filePath) {
|
|
1425
|
+
try {
|
|
1426
|
+
await fsp.access(filePath);
|
|
1427
|
+
return true;
|
|
1428
|
+
} catch {
|
|
1429
|
+
return false;
|
|
1430
|
+
}
|
|
1431
|
+
}
|
|
1432
|
+
function parseJsonc(str) {
|
|
1433
|
+
let result = "";
|
|
1434
|
+
let i = 0;
|
|
1435
|
+
while (i < str.length) if (str[i] === "\"") {
|
|
1436
|
+
let j = i + 1;
|
|
1437
|
+
while (j < str.length) if (str[j] === "\\") j += 2;
|
|
1438
|
+
else if (str[j] === "\"") {
|
|
1439
|
+
j++;
|
|
1440
|
+
break;
|
|
1441
|
+
} else j++;
|
|
1442
|
+
result += str.slice(i, j);
|
|
1443
|
+
i = j;
|
|
1444
|
+
} else if (str[i] === "/" && str[i + 1] === "/") {
|
|
1445
|
+
i += 2;
|
|
1446
|
+
while (i < str.length && str[i] !== "\n") i++;
|
|
1447
|
+
} else if (str[i] === "/" && str[i + 1] === "*") {
|
|
1448
|
+
i += 2;
|
|
1449
|
+
while (i < str.length && !(str[i] === "*" && str[i + 1] === "/")) i++;
|
|
1450
|
+
i += 2;
|
|
1451
|
+
} else {
|
|
1452
|
+
result += str[i];
|
|
1453
|
+
i++;
|
|
1454
|
+
}
|
|
1455
|
+
return JSON.parse(result.replace(/,\s*([\]}])/g, "$1"));
|
|
1456
|
+
}
|
|
1457
|
+
async function resolveRendererRoot() {
|
|
1458
|
+
const configPath = process.env.ZENBU_CONFIG_PATH;
|
|
1459
|
+
if (!configPath) throw new Error("ZENBU_CONFIG_PATH is required to resolve the app renderer");
|
|
1460
|
+
const config = parseJsonc(await fsp.readFile(configPath, "utf8"));
|
|
1461
|
+
const configDir = path.dirname(configPath);
|
|
1462
|
+
for (const manifestRel of config.plugins ?? []) {
|
|
1463
|
+
const resolvedManifest = path.isAbsolute(manifestRel) ? manifestRel : path.resolve(configDir, manifestRel);
|
|
1464
|
+
try {
|
|
1465
|
+
const manifest = JSON.parse(await fsp.readFile(resolvedManifest, "utf8"));
|
|
1466
|
+
if (!manifest.uiEntrypoint) continue;
|
|
1467
|
+
const projectDir = path.dirname(resolvedManifest);
|
|
1468
|
+
const rendererDir = path.resolve(projectDir, manifest.uiEntrypoint);
|
|
1469
|
+
const viteConfig = path.join(projectDir, "vite.config.ts");
|
|
1470
|
+
const configFile = await pathExists(viteConfig) ? viteConfig : false;
|
|
1471
|
+
if (!await pathExists(rendererDir)) continue;
|
|
1472
|
+
return {
|
|
1473
|
+
rendererRoot: rendererDir,
|
|
1474
|
+
configFile
|
|
1475
|
+
};
|
|
1476
|
+
} catch {}
|
|
1477
|
+
}
|
|
1478
|
+
const rendererDir = path.resolve(configDir, "src", "renderer");
|
|
1479
|
+
const viteConfig = path.join(configDir, "vite.config.ts");
|
|
1480
|
+
if (await pathExists(rendererDir)) return {
|
|
1481
|
+
rendererRoot: rendererDir,
|
|
1482
|
+
configFile: await pathExists(viteConfig) ? viteConfig : false
|
|
1483
|
+
};
|
|
1484
|
+
throw new Error(`No renderer entrypoint found. Add uiEntrypoint to the app plugin manifest or create ${rendererDir}.`);
|
|
1485
|
+
}
|
|
1486
|
+
var RendererHostService = class extends Service {
|
|
1487
|
+
static key = "renderer-host";
|
|
1488
|
+
static deps = {
|
|
1489
|
+
reloader: ReloaderService,
|
|
1490
|
+
viewRegistry: ViewRegistryService
|
|
1491
|
+
};
|
|
1492
|
+
url = "";
|
|
1493
|
+
port = 0;
|
|
1494
|
+
async evaluate() {
|
|
1495
|
+
const { rendererRoot, configFile } = await resolveRendererRoot();
|
|
1496
|
+
const entry = await this.ctx.reloader.create("app", rendererRoot, configFile);
|
|
1497
|
+
this.url = entry.url;
|
|
1498
|
+
this.port = entry.port;
|
|
1499
|
+
this.ctx.viewRegistry.registerAlias("app", "app", "", {
|
|
1500
|
+
kind: "app",
|
|
1501
|
+
label: "App"
|
|
1502
|
+
});
|
|
1503
|
+
log.verbose(`ready at ${this.url}`);
|
|
1504
|
+
}
|
|
1505
|
+
};
|
|
1506
|
+
runtime.register(RendererHostService, import.meta);
|
|
1507
|
+
//#endregion
|
|
1508
|
+
export { resolveManifestModulePath as a, discoverSections as i, ViewRegistryService as n, HttpService as o, DbService as r, RendererHostService as t };
|