@zenbujs/core 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/LICENSE +11 -0
  2. package/dist/advice-config-CjgkEf2E.mjs +135 -0
  3. package/dist/advice-config-Cy133IQP.mjs +2 -0
  4. package/dist/advice-runtime.d.mts +35 -0
  5. package/dist/advice-runtime.mjs +131 -0
  6. package/dist/advice.d.mts +36 -0
  7. package/dist/advice.mjs +2 -0
  8. package/dist/base-window-BUt8pwbw.mjs +94 -0
  9. package/dist/base-window-DEIAk618.mjs +2 -0
  10. package/dist/build-config-pbv0w4oN.mjs +17 -0
  11. package/dist/build-electron-B4Gd0Gi4.mjs +516 -0
  12. package/dist/build-source-_q1n1zTV.mjs +162 -0
  13. package/dist/chunk-Dm34NbLt.mjs +6 -0
  14. package/dist/cli/bin.d.mts +1 -0
  15. package/dist/cli/bin.mjs +88 -0
  16. package/dist/cli/build.d.mts +53 -0
  17. package/dist/cli/build.mjs +48 -0
  18. package/dist/cli-BLbQQIVB.mjs +8054 -0
  19. package/dist/config-CdVrW85P.mjs +59 -0
  20. package/dist/config-LK73dJmO.mjs +2 -0
  21. package/dist/db-ByKPbnP6.mjs +2 -0
  22. package/dist/db-DhuAJrye.mjs +531 -0
  23. package/dist/db.d.mts +16 -0
  24. package/dist/db.mjs +16 -0
  25. package/dist/dev-BuqklM0k.mjs +85 -0
  26. package/dist/env-bootstrap-BtVME-CU.d.mts +16 -0
  27. package/dist/env-bootstrap-rj7I-59x.mjs +53 -0
  28. package/dist/env-bootstrap.d.mts +2 -0
  29. package/dist/env-bootstrap.mjs +2 -0
  30. package/dist/http-IBcLzbYu.mjs +2 -0
  31. package/dist/index-Bhlbyrn7.d.mts +63 -0
  32. package/dist/index-CPZ5d6Hl.d.mts +442 -0
  33. package/dist/index-FtE8MXJ_.d.mts +1 -0
  34. package/dist/index.d.mts +6 -0
  35. package/dist/index.mjs +5 -0
  36. package/dist/launcher.mjs +173 -0
  37. package/dist/link-6roQ7Cn6.mjs +580 -0
  38. package/dist/loaders/zenbu.d.mts +22 -0
  39. package/dist/loaders/zenbu.mjs +267 -0
  40. package/dist/log-CyKv8hQg.mjs +20 -0
  41. package/dist/mirror-sync-CodOnwkD.mjs +332 -0
  42. package/dist/monorepo-CmGPHsVm.mjs +119 -0
  43. package/dist/node-D4M19_mV.mjs +5 -0
  44. package/dist/node-loader.d.mts +17 -0
  45. package/dist/node-loader.mjs +33 -0
  46. package/dist/pause-DvAUNmKn.mjs +52 -0
  47. package/dist/publish-source-BVgB62Zj.mjs +131 -0
  48. package/dist/react.d.mts +76 -0
  49. package/dist/react.mjs +291 -0
  50. package/dist/registry-Dh_e7HU1.d.mts +61 -0
  51. package/dist/registry.d.mts +2 -0
  52. package/dist/registry.mjs +1 -0
  53. package/dist/reloader-BCkLjDhS.mjs +2 -0
  54. package/dist/reloader-lLAJ3lqg.mjs +164 -0
  55. package/dist/renderer-host-Bg8QdeeH.mjs +1508 -0
  56. package/dist/renderer-host-DpvBPTHJ.mjs +2 -0
  57. package/dist/rpc-BwwQK6hD.mjs +71 -0
  58. package/dist/rpc-CqitnyR4.mjs +2 -0
  59. package/dist/rpc.d.mts +2 -0
  60. package/dist/rpc.mjs +2 -0
  61. package/dist/runtime-CjqDr8Yf.d.mts +109 -0
  62. package/dist/runtime-DUFKDIe4.mjs +409 -0
  63. package/dist/runtime.d.mts +2 -0
  64. package/dist/runtime.mjs +2 -0
  65. package/dist/schema-CIg4GzHQ.mjs +100 -0
  66. package/dist/schema-DMoSkwUx.d.mts +62 -0
  67. package/dist/schema-dGK6qkfR.mjs +28 -0
  68. package/dist/schema.d.mts +2 -0
  69. package/dist/schema.mjs +2 -0
  70. package/dist/server-BXwZEQ-n.mjs +66 -0
  71. package/dist/server-DjrZUbbu.mjs +2 -0
  72. package/dist/services/default.d.mts +11 -0
  73. package/dist/services/default.mjs +22 -0
  74. package/dist/services/index.d.mts +276 -0
  75. package/dist/services/index.mjs +7 -0
  76. package/dist/setup-gate-BeD6WS6d.mjs +110 -0
  77. package/dist/setup-gate-BqOzm7zp.d.mts +4 -0
  78. package/dist/setup-gate.d.mts +2 -0
  79. package/dist/setup-gate.mjs +2 -0
  80. package/dist/src-pELM4_iH.mjs +376 -0
  81. package/dist/trace-DCB7qFzT.mjs +10 -0
  82. package/dist/transform-DJH3vN4b.mjs +84041 -0
  83. package/dist/transport-BMSzG2-F.mjs +1045 -0
  84. package/dist/view-registry-BualWgAf.mjs +2 -0
  85. package/dist/vite-plugins-Bh3SCOw-.mjs +331 -0
  86. package/dist/vite.d.mts +68 -0
  87. package/dist/vite.mjs +2 -0
  88. package/dist/window-CM2a9Kyc.mjs +2 -0
  89. package/dist/window-CmmpCVX6.mjs +156 -0
  90. package/dist/write-9dRFczGJ.mjs +1248 -0
  91. package/migrations/0000_migration.ts +34 -0
  92. package/migrations/meta/0000_snapshot.json +18 -0
  93. package/migrations/meta/_journal.json +10 -0
  94. package/package.json +124 -0
@@ -0,0 +1,1248 @@
1
+ import { n as traceKyjuSync, t as traceKyju } from "./trace-DCB7qFzT.mjs";
2
+ import * as NFS from "node:fs";
3
+ import * as OS from "node:os";
4
+ import * as Path from "node:path";
5
+ import path from "node:path";
6
+ import * as Crypto from "node:crypto";
7
+ import * as Effect from "effect/Effect";
8
+ import * as Ref from "effect/Ref";
9
+ import * as Brand from "effect/Brand";
10
+ import * as Context from "effect/Context";
11
+ import { GenericTag } from "effect/Context";
12
+ import * as Data from "effect/Data";
13
+ import * as Channel from "effect/Channel";
14
+ import * as Chunk from "effect/Chunk";
15
+ import { identity, pipe } from "effect/Function";
16
+ import * as Layer from "effect/Layer";
17
+ import * as Option from "effect/Option";
18
+ import * as Sink from "effect/Sink";
19
+ import * as Stream from "effect/Stream";
20
+ import "effect/Predicate";
21
+ import * as Schema from "effect/Schema";
22
+ import { nanoid } from "nanoid";
23
+ //#region ../../node_modules/.pnpm/@effect+platform@0.94.5_effect@3.21.1/node_modules/@effect/platform/dist/esm/Error.js
24
+ /**
25
+ * @since 1.0.0
26
+ * @category type id
27
+ */
28
+ const TypeId = /* @__PURE__ */ Symbol.for("@effect/platform/Error");
29
+ /**
30
+ * @since 1.0.0
31
+ * @category Models
32
+ */
33
+ const Module = /* @__PURE__ */ Schema.Literal("Clipboard", "Command", "FileSystem", "KeyValueStore", "Path", "Stream", "Terminal");
34
+ /**
35
+ * @since 1.0.0
36
+ * @category Models
37
+ */
38
+ var BadArgument = class extends Schema.TaggedError("@effect/platform/Error/BadArgument")("BadArgument", {
39
+ module: Module,
40
+ method: Schema.String,
41
+ description: /* @__PURE__ */ Schema.optional(Schema.String),
42
+ cause: /* @__PURE__ */ Schema.optional(Schema.Defect)
43
+ }) {
44
+ /**
45
+ * @since 1.0.0
46
+ */
47
+ [TypeId] = TypeId;
48
+ /**
49
+ * @since 1.0.0
50
+ */
51
+ get message() {
52
+ return `${this.module}.${this.method}${this.description ? `: ${this.description}` : ""}`;
53
+ }
54
+ };
55
+ /**
56
+ * @since 1.0.0
57
+ * @category Model
58
+ */
59
+ const SystemErrorReason = /* @__PURE__ */ Schema.Literal("AlreadyExists", "BadResource", "Busy", "InvalidData", "NotFound", "PermissionDenied", "TimedOut", "UnexpectedEof", "Unknown", "WouldBlock", "WriteZero");
60
+ /**
61
+ * @since 1.0.0
62
+ * @category models
63
+ */
64
+ var SystemError = class extends Schema.TaggedError("@effect/platform/Error/SystemError")("SystemError", {
65
+ reason: SystemErrorReason,
66
+ module: Module,
67
+ method: Schema.String,
68
+ description: /* @__PURE__ */ Schema.optional(Schema.String),
69
+ syscall: /* @__PURE__ */ Schema.optional(Schema.String),
70
+ pathOrDescriptor: /* @__PURE__ */ Schema.optional(/* @__PURE__ */ Schema.Union(Schema.String, Schema.Number)),
71
+ cause: /* @__PURE__ */ Schema.optional(Schema.Defect)
72
+ }) {
73
+ /**
74
+ * @since 1.0.0
75
+ */
76
+ [TypeId] = TypeId;
77
+ /**
78
+ * @since 1.0.0
79
+ */
80
+ get message() {
81
+ return `${this.reason}: ${this.module}.${this.method}${this.pathOrDescriptor !== void 0 ? ` (${this.pathOrDescriptor})` : ""}${this.description ? `: ${this.description}` : ""}`;
82
+ }
83
+ };
84
+ //#endregion
85
+ //#region ../../node_modules/.pnpm/@effect+platform@0.94.5_effect@3.21.1/node_modules/@effect/platform/dist/esm/internal/fileSystem.js
86
+ /** @internal */
87
+ const tag = /* @__PURE__ */ GenericTag("@effect/platform/FileSystem");
88
+ /** @internal */
89
+ const Size$1 = (bytes) => typeof bytes === "bigint" ? bytes : BigInt(bytes);
90
+ const bigint1024 = /* @__PURE__ */ BigInt(1024);
91
+ bigint1024 * bigint1024 * bigint1024 * bigint1024 * bigint1024;
92
+ /** @internal */
93
+ const make$1 = (impl) => {
94
+ return tag.of({
95
+ ...impl,
96
+ exists: (path) => pipe(impl.access(path), Effect.as(true), Effect.catchTag("SystemError", (e) => e.reason === "NotFound" ? Effect.succeed(false) : Effect.fail(e))),
97
+ readFileString: (path, encoding) => Effect.tryMap(impl.readFile(path), {
98
+ try: (_) => new TextDecoder(encoding).decode(_),
99
+ catch: (cause) => new BadArgument({
100
+ module: "FileSystem",
101
+ method: "readFileString",
102
+ description: "invalid encoding",
103
+ cause
104
+ })
105
+ }),
106
+ stream: (path, options) => pipe(impl.open(path, { flag: "r" }), options?.offset ? Effect.tap((file) => file.seek(options.offset, "start")) : identity, Effect.map((file) => stream(file, options)), Stream.unwrapScoped),
107
+ sink: (path, options) => pipe(impl.open(path, {
108
+ flag: "w",
109
+ ...options
110
+ }), Effect.map((file) => Sink.forEach((_) => file.writeAll(_))), Sink.unwrapScoped),
111
+ writeFileString: (path, data, options) => Effect.flatMap(Effect.try({
112
+ try: () => new TextEncoder().encode(data),
113
+ catch: (cause) => new BadArgument({
114
+ module: "FileSystem",
115
+ method: "writeFileString",
116
+ description: "could not encode string",
117
+ cause
118
+ })
119
+ }), (_) => impl.writeFile(path, _, options))
120
+ });
121
+ };
122
+ /** @internal */
123
+ const stream = (file, { bufferSize = 16, bytesToRead: bytesToRead_, chunkSize: chunkSize_ = Size$1(64 * 1024) } = {}) => {
124
+ const bytesToRead = bytesToRead_ !== void 0 ? Size$1(bytesToRead_) : void 0;
125
+ const chunkSize = Size$1(chunkSize_);
126
+ function loop(totalBytesRead) {
127
+ if (bytesToRead !== void 0 && bytesToRead <= totalBytesRead) return Channel.void;
128
+ const toRead = bytesToRead !== void 0 && bytesToRead - totalBytesRead < chunkSize ? bytesToRead - totalBytesRead : chunkSize;
129
+ return Channel.flatMap(file.readAlloc(toRead), Option.match({
130
+ onNone: () => Channel.void,
131
+ onSome: (buf) => Channel.flatMap(Channel.write(Chunk.of(buf)), (_) => loop(totalBytesRead + BigInt(buf.length)))
132
+ }));
133
+ }
134
+ return Stream.bufferChunks(Stream.fromChannel(loop(BigInt(0))), { capacity: bufferSize });
135
+ };
136
+ //#endregion
137
+ //#region ../../node_modules/.pnpm/@effect+platform@0.94.5_effect@3.21.1/node_modules/@effect/platform/dist/esm/FileSystem.js
138
+ /**
139
+ * @since 1.0.0
140
+ */
141
+ /**
142
+ * @since 1.0.0
143
+ * @category sizes
144
+ */
145
+ const Size = Size$1;
146
+ /**
147
+ * @since 1.0.0
148
+ * @category tag
149
+ */
150
+ const FileSystem = tag;
151
+ /**
152
+ * @since 1.0.0
153
+ * @category constructor
154
+ */
155
+ const make = make$1;
156
+ /**
157
+ * @since 1.0.0
158
+ * @category type id
159
+ */
160
+ const FileTypeId = /* @__PURE__ */ Symbol.for("@effect/platform/FileSystem/File");
161
+ /**
162
+ * @since 1.0.0
163
+ * @category constructor
164
+ */
165
+ const FileDescriptor = /* @__PURE__ */ Brand.nominal();
166
+ /**
167
+ * @since 1.0.0
168
+ * @category constructor
169
+ */
170
+ const WatchEventCreate = /* @__PURE__ */ Data.tagged("Create");
171
+ /**
172
+ * @since 1.0.0
173
+ * @category constructor
174
+ */
175
+ const WatchEventUpdate = /* @__PURE__ */ Data.tagged("Update");
176
+ /**
177
+ * @since 1.0.0
178
+ * @category constructor
179
+ */
180
+ const WatchEventRemove = /* @__PURE__ */ Data.tagged("Remove");
181
+ /**
182
+ * @since 1.0.0
183
+ * @category file watcher
184
+ */
185
+ var WatchBackend = class extends Context.Tag("@effect/platform/FileSystem/WatchBackend")() {};
186
+ //#endregion
187
+ //#region ../../node_modules/.pnpm/@effect+platform@0.94.5_effect@3.21.1/node_modules/@effect/platform/dist/esm/internal/effectify.js
188
+ /** @internal */
189
+ const effectify$1 = (fn, onError, onSyncError) => (...args) => Effect.async((resume) => {
190
+ try {
191
+ fn(...args, (err, result) => {
192
+ if (err) resume(Effect.fail(onError ? onError(err, args) : err));
193
+ else resume(Effect.succeed(result));
194
+ });
195
+ } catch (err) {
196
+ resume(onSyncError ? Effect.fail(onSyncError(err, args)) : Effect.die(err));
197
+ }
198
+ });
199
+ //#endregion
200
+ //#region ../../node_modules/.pnpm/@effect+platform@0.94.5_effect@3.21.1/node_modules/@effect/platform/dist/esm/Effectify.js
201
+ /**
202
+ * @since 1.0.0
203
+ */
204
+ const effectify = effectify$1;
205
+ //#endregion
206
+ //#region ../../node_modules/.pnpm/@effect+platform-node-shared@0.57.1_@effect+cluster@0.56.4_@effect+platform@0.94.5_effe_8874b4c5b98109adb0b32dd122fa88a9/node_modules/@effect/platform-node-shared/dist/esm/internal/error.js
207
+ /** @internal */
208
+ const handleErrnoException = (module, method) => (err, [path]) => {
209
+ let reason = "Unknown";
210
+ switch (err.code) {
211
+ case "ENOENT":
212
+ reason = "NotFound";
213
+ break;
214
+ case "EACCES":
215
+ reason = "PermissionDenied";
216
+ break;
217
+ case "EEXIST":
218
+ reason = "AlreadyExists";
219
+ break;
220
+ case "EISDIR":
221
+ reason = "BadResource";
222
+ break;
223
+ case "ENOTDIR":
224
+ reason = "BadResource";
225
+ break;
226
+ case "EBUSY":
227
+ reason = "Busy";
228
+ break;
229
+ case "ELOOP":
230
+ reason = "BadResource";
231
+ break;
232
+ }
233
+ return new SystemError({
234
+ reason,
235
+ module,
236
+ method,
237
+ pathOrDescriptor: path,
238
+ syscall: err.syscall,
239
+ description: err.message,
240
+ cause: err
241
+ });
242
+ };
243
+ //#endregion
244
+ //#region ../../node_modules/.pnpm/@effect+platform-node-shared@0.57.1_@effect+cluster@0.56.4_@effect+platform@0.94.5_effe_8874b4c5b98109adb0b32dd122fa88a9/node_modules/@effect/platform-node-shared/dist/esm/internal/fileSystem.js
245
+ const handleBadArgument = (method) => (cause) => new BadArgument({
246
+ module: "FileSystem",
247
+ method,
248
+ cause
249
+ });
250
+ const access = /* @__PURE__ */ (() => {
251
+ const nodeAccess = /* @__PURE__ */ effectify(NFS.access, /* @__PURE__ */ handleErrnoException("FileSystem", "access"), /* @__PURE__ */ handleBadArgument("access"));
252
+ return (path, options) => {
253
+ let mode = NFS.constants.F_OK;
254
+ if (options?.readable) mode |= NFS.constants.R_OK;
255
+ if (options?.writable) mode |= NFS.constants.W_OK;
256
+ return nodeAccess(path, mode);
257
+ };
258
+ })();
259
+ const copy = /* @__PURE__ */ (() => {
260
+ const nodeCp = /* @__PURE__ */ effectify(NFS.cp, /* @__PURE__ */ handleErrnoException("FileSystem", "copy"), /* @__PURE__ */ handleBadArgument("copy"));
261
+ return (fromPath, toPath, options) => nodeCp(fromPath, toPath, {
262
+ force: options?.overwrite ?? false,
263
+ preserveTimestamps: options?.preserveTimestamps ?? false,
264
+ recursive: true
265
+ });
266
+ })();
267
+ const copyFile = /* @__PURE__ */ (() => {
268
+ const nodeCopyFile = /* @__PURE__ */ effectify(NFS.copyFile, /* @__PURE__ */ handleErrnoException("FileSystem", "copyFile"), /* @__PURE__ */ handleBadArgument("copyFile"));
269
+ return (fromPath, toPath) => nodeCopyFile(fromPath, toPath);
270
+ })();
271
+ const chmod = /* @__PURE__ */ (() => {
272
+ const nodeChmod = /* @__PURE__ */ effectify(NFS.chmod, /* @__PURE__ */ handleErrnoException("FileSystem", "chmod"), /* @__PURE__ */ handleBadArgument("chmod"));
273
+ return (path, mode) => nodeChmod(path, mode);
274
+ })();
275
+ const chown = /* @__PURE__ */ (() => {
276
+ const nodeChown = /* @__PURE__ */ effectify(NFS.chown, /* @__PURE__ */ handleErrnoException("FileSystem", "chown"), /* @__PURE__ */ handleBadArgument("chown"));
277
+ return (path, uid, gid) => nodeChown(path, uid, gid);
278
+ })();
279
+ const link = /* @__PURE__ */ (() => {
280
+ const nodeLink = /* @__PURE__ */ effectify(NFS.link, /* @__PURE__ */ handleErrnoException("FileSystem", "link"), /* @__PURE__ */ handleBadArgument("link"));
281
+ return (existingPath, newPath) => nodeLink(existingPath, newPath);
282
+ })();
283
+ const makeDirectory = /* @__PURE__ */ (() => {
284
+ const nodeMkdir = /* @__PURE__ */ effectify(NFS.mkdir, /* @__PURE__ */ handleErrnoException("FileSystem", "makeDirectory"), /* @__PURE__ */ handleBadArgument("makeDirectory"));
285
+ return (path, options) => nodeMkdir(path, {
286
+ recursive: options?.recursive ?? false,
287
+ mode: options?.mode
288
+ });
289
+ })();
290
+ const makeTempDirectoryFactory = (method) => {
291
+ const nodeMkdtemp = effectify(NFS.mkdtemp, handleErrnoException("FileSystem", method), handleBadArgument(method));
292
+ return (options) => Effect.suspend(() => {
293
+ const prefix = options?.prefix ?? "";
294
+ const directory = typeof options?.directory === "string" ? Path.join(options.directory, ".") : OS.tmpdir();
295
+ return nodeMkdtemp(prefix ? Path.join(directory, prefix) : directory + "/");
296
+ });
297
+ };
298
+ const makeTempDirectory = /* @__PURE__ */ makeTempDirectoryFactory("makeTempDirectory");
299
+ const removeFactory = (method) => {
300
+ const nodeRm = effectify(NFS.rm, handleErrnoException("FileSystem", method), handleBadArgument(method));
301
+ return (path, options) => nodeRm(path, {
302
+ recursive: options?.recursive ?? false,
303
+ force: options?.force ?? false
304
+ });
305
+ };
306
+ const remove = /* @__PURE__ */ removeFactory("remove");
307
+ const makeTempDirectoryScoped = /* @__PURE__ */ (() => {
308
+ const makeDirectory = /* @__PURE__ */ makeTempDirectoryFactory("makeTempDirectoryScoped");
309
+ const removeDirectory = /* @__PURE__ */ removeFactory("makeTempDirectoryScoped");
310
+ return (options) => Effect.acquireRelease(makeDirectory(options), (directory) => Effect.orDie(removeDirectory(directory, { recursive: true })));
311
+ })();
312
+ const openFactory = (method) => {
313
+ const nodeOpen = effectify(NFS.open, handleErrnoException("FileSystem", method), handleBadArgument(method));
314
+ const nodeClose = effectify(NFS.close, handleErrnoException("FileSystem", method), handleBadArgument(method));
315
+ return (path, options) => pipe(Effect.acquireRelease(nodeOpen(path, options?.flag ?? "r", options?.mode), (fd) => Effect.orDie(nodeClose(fd))), Effect.map((fd) => makeFile(FileDescriptor(fd), options?.flag?.startsWith("a") ?? false)));
316
+ };
317
+ const open = /* @__PURE__ */ openFactory("open");
318
+ const makeFile = /* @__PURE__ */ (() => {
319
+ const nodeReadFactory = (method) => effectify(NFS.read, handleErrnoException("FileSystem", method), handleBadArgument(method));
320
+ const nodeRead = /* @__PURE__ */ nodeReadFactory("read");
321
+ const nodeReadAlloc = /* @__PURE__ */ nodeReadFactory("readAlloc");
322
+ const nodeStat = /* @__PURE__ */ effectify(NFS.fstat, /* @__PURE__ */ handleErrnoException("FileSystem", "stat"), /* @__PURE__ */ handleBadArgument("stat"));
323
+ const nodeTruncate = /* @__PURE__ */ effectify(NFS.ftruncate, /* @__PURE__ */ handleErrnoException("FileSystem", "truncate"), /* @__PURE__ */ handleBadArgument("truncate"));
324
+ const nodeSync = /* @__PURE__ */ effectify(NFS.fsync, /* @__PURE__ */ handleErrnoException("FileSystem", "sync"), /* @__PURE__ */ handleBadArgument("sync"));
325
+ const nodeWriteFactory = (method) => effectify(NFS.write, handleErrnoException("FileSystem", method), handleBadArgument(method));
326
+ const nodeWrite = /* @__PURE__ */ nodeWriteFactory("write");
327
+ const nodeWriteAll = /* @__PURE__ */ nodeWriteFactory("writeAll");
328
+ class FileImpl {
329
+ fd;
330
+ append;
331
+ [FileTypeId];
332
+ semaphore = /* @__PURE__ */ Effect.unsafeMakeSemaphore(1);
333
+ position = 0n;
334
+ constructor(fd, append) {
335
+ this.fd = fd;
336
+ this.append = append;
337
+ this[FileTypeId] = FileTypeId;
338
+ }
339
+ get stat() {
340
+ return Effect.map(nodeStat(this.fd), makeFileInfo);
341
+ }
342
+ get sync() {
343
+ return nodeSync(this.fd);
344
+ }
345
+ seek(offset, from) {
346
+ const offsetSize = Size(offset);
347
+ return this.semaphore.withPermits(1)(Effect.sync(() => {
348
+ if (from === "start") this.position = offsetSize;
349
+ else if (from === "current") this.position = this.position + offsetSize;
350
+ return this.position;
351
+ }));
352
+ }
353
+ read(buffer) {
354
+ return this.semaphore.withPermits(1)(Effect.map(Effect.suspend(() => nodeRead(this.fd, {
355
+ buffer,
356
+ position: this.position
357
+ })), (bytesRead) => {
358
+ const sizeRead = Size(bytesRead);
359
+ this.position = this.position + sizeRead;
360
+ return sizeRead;
361
+ }));
362
+ }
363
+ readAlloc(size) {
364
+ const sizeNumber = Number(size);
365
+ return this.semaphore.withPermits(1)(Effect.flatMap(Effect.sync(() => Buffer.allocUnsafeSlow(sizeNumber)), (buffer) => Effect.map(nodeReadAlloc(this.fd, {
366
+ buffer,
367
+ position: this.position
368
+ }), (bytesRead) => {
369
+ if (bytesRead === 0) return Option.none();
370
+ this.position = this.position + BigInt(bytesRead);
371
+ if (bytesRead === sizeNumber) return Option.some(buffer);
372
+ const dst = Buffer.allocUnsafeSlow(bytesRead);
373
+ buffer.copy(dst, 0, 0, bytesRead);
374
+ return Option.some(dst);
375
+ })));
376
+ }
377
+ truncate(length) {
378
+ return this.semaphore.withPermits(1)(Effect.map(nodeTruncate(this.fd, length ? Number(length) : void 0), () => {
379
+ if (!this.append) {
380
+ const len = BigInt(length ?? 0);
381
+ if (this.position > len) this.position = len;
382
+ }
383
+ }));
384
+ }
385
+ write(buffer) {
386
+ return this.semaphore.withPermits(1)(Effect.map(Effect.suspend(() => nodeWrite(this.fd, buffer, void 0, void 0, this.append ? void 0 : Number(this.position))), (bytesWritten) => {
387
+ const sizeWritten = Size(bytesWritten);
388
+ if (!this.append) this.position = this.position + sizeWritten;
389
+ return sizeWritten;
390
+ }));
391
+ }
392
+ writeAllChunk(buffer) {
393
+ return Effect.flatMap(Effect.suspend(() => nodeWriteAll(this.fd, buffer, void 0, void 0, this.append ? void 0 : Number(this.position))), (bytesWritten) => {
394
+ if (bytesWritten === 0) return Effect.fail(new SystemError({
395
+ module: "FileSystem",
396
+ method: "writeAll",
397
+ reason: "WriteZero",
398
+ pathOrDescriptor: this.fd,
399
+ description: "write returned 0 bytes written"
400
+ }));
401
+ if (!this.append) this.position = this.position + BigInt(bytesWritten);
402
+ return bytesWritten < buffer.length ? this.writeAllChunk(buffer.subarray(bytesWritten)) : Effect.void;
403
+ });
404
+ }
405
+ writeAll(buffer) {
406
+ return this.semaphore.withPermits(1)(this.writeAllChunk(buffer));
407
+ }
408
+ }
409
+ return (fd, append) => new FileImpl(fd, append);
410
+ })();
411
+ const makeTempFileFactory = (method) => {
412
+ const makeDirectory = makeTempDirectoryFactory(method);
413
+ const open = openFactory(method);
414
+ const randomHexString = (bytes) => Effect.sync(() => Crypto.randomBytes(bytes).toString("hex"));
415
+ return (options) => pipe(Effect.zip(makeDirectory(options), randomHexString(6)), Effect.map(([directory, random]) => Path.join(directory, random + (options?.suffix ?? ""))), Effect.tap((path) => Effect.scoped(open(path, { flag: "w+" }))));
416
+ };
417
+ const makeTempFile = /* @__PURE__ */ makeTempFileFactory("makeTempFile");
418
+ const makeTempFileScoped = /* @__PURE__ */ (() => {
419
+ const makeFile = /* @__PURE__ */ makeTempFileFactory("makeTempFileScoped");
420
+ const removeDirectory = /* @__PURE__ */ removeFactory("makeTempFileScoped");
421
+ return (options) => Effect.acquireRelease(makeFile(options), (file) => Effect.orDie(removeDirectory(Path.dirname(file), { recursive: true })));
422
+ })();
423
+ const readDirectory = (path, options) => Effect.tryPromise({
424
+ try: () => NFS.promises.readdir(path, options),
425
+ catch: (err) => handleErrnoException("FileSystem", "readDirectory")(err, [path])
426
+ });
427
+ const readFile = (path) => Effect.async((resume, signal) => {
428
+ try {
429
+ NFS.readFile(path, { signal }, (err, data) => {
430
+ if (err) resume(Effect.fail(handleErrnoException("FileSystem", "readFile")(err, [path])));
431
+ else resume(Effect.succeed(data));
432
+ });
433
+ } catch (err) {
434
+ resume(Effect.fail(handleBadArgument("readFile")(err)));
435
+ }
436
+ });
437
+ const readLink = /* @__PURE__ */ (() => {
438
+ const nodeReadLink = /* @__PURE__ */ effectify(NFS.readlink, /* @__PURE__ */ handleErrnoException("FileSystem", "readLink"), /* @__PURE__ */ handleBadArgument("readLink"));
439
+ return (path) => nodeReadLink(path);
440
+ })();
441
+ const realPath = /* @__PURE__ */ (() => {
442
+ const nodeRealPath = /* @__PURE__ */ effectify(NFS.realpath, /* @__PURE__ */ handleErrnoException("FileSystem", "realPath"), /* @__PURE__ */ handleBadArgument("realPath"));
443
+ return (path) => nodeRealPath(path);
444
+ })();
445
+ const rename = /* @__PURE__ */ (() => {
446
+ const nodeRename = /* @__PURE__ */ effectify(NFS.rename, /* @__PURE__ */ handleErrnoException("FileSystem", "rename"), /* @__PURE__ */ handleBadArgument("rename"));
447
+ return (oldPath, newPath) => nodeRename(oldPath, newPath);
448
+ })();
449
+ const makeFileInfo = (stat) => ({
450
+ type: stat.isFile() ? "File" : stat.isDirectory() ? "Directory" : stat.isSymbolicLink() ? "SymbolicLink" : stat.isBlockDevice() ? "BlockDevice" : stat.isCharacterDevice() ? "CharacterDevice" : stat.isFIFO() ? "FIFO" : stat.isSocket() ? "Socket" : "Unknown",
451
+ mtime: Option.fromNullable(stat.mtime),
452
+ atime: Option.fromNullable(stat.atime),
453
+ birthtime: Option.fromNullable(stat.birthtime),
454
+ dev: stat.dev,
455
+ rdev: Option.fromNullable(stat.rdev),
456
+ ino: Option.fromNullable(stat.ino),
457
+ mode: stat.mode,
458
+ nlink: Option.fromNullable(stat.nlink),
459
+ uid: Option.fromNullable(stat.uid),
460
+ gid: Option.fromNullable(stat.gid),
461
+ size: Size(stat.size),
462
+ blksize: Option.map(Option.fromNullable(stat.blksize), Size),
463
+ blocks: Option.fromNullable(stat.blocks)
464
+ });
465
+ const stat = /* @__PURE__ */ (() => {
466
+ const nodeStat = /* @__PURE__ */ effectify(NFS.stat, /* @__PURE__ */ handleErrnoException("FileSystem", "stat"), /* @__PURE__ */ handleBadArgument("stat"));
467
+ return (path) => Effect.map(nodeStat(path), makeFileInfo);
468
+ })();
469
+ const symlink = /* @__PURE__ */ (() => {
470
+ const nodeSymlink = /* @__PURE__ */ effectify(NFS.symlink, /* @__PURE__ */ handleErrnoException("FileSystem", "symlink"), /* @__PURE__ */ handleBadArgument("symlink"));
471
+ return (target, path) => nodeSymlink(target, path);
472
+ })();
473
+ const truncate = /* @__PURE__ */ (() => {
474
+ const nodeTruncate = /* @__PURE__ */ effectify(NFS.truncate, /* @__PURE__ */ handleErrnoException("FileSystem", "truncate"), /* @__PURE__ */ handleBadArgument("truncate"));
475
+ return (path, length) => nodeTruncate(path, length !== void 0 ? Number(length) : void 0);
476
+ })();
477
+ const utimes = /* @__PURE__ */ (() => {
478
+ const nodeUtimes = /* @__PURE__ */ effectify(NFS.utimes, /* @__PURE__ */ handleErrnoException("FileSystem", "utime"), /* @__PURE__ */ handleBadArgument("utime"));
479
+ return (path, atime, mtime) => nodeUtimes(path, atime, mtime);
480
+ })();
481
+ const watchNode = (path, options) => Stream.asyncScoped((emit) => Effect.acquireRelease(Effect.sync(() => {
482
+ const watcher = NFS.watch(path, { recursive: options?.recursive }, (event, path) => {
483
+ if (!path) return;
484
+ switch (event) {
485
+ case "rename":
486
+ emit.fromEffect(Effect.matchEffect(stat(path), {
487
+ onSuccess: (_) => Effect.succeed(WatchEventCreate({ path })),
488
+ onFailure: (err) => err._tag === "SystemError" && err.reason === "NotFound" ? Effect.succeed(WatchEventRemove({ path })) : Effect.fail(err)
489
+ }));
490
+ return;
491
+ case "change":
492
+ emit.single(WatchEventUpdate({ path }));
493
+ return;
494
+ }
495
+ });
496
+ watcher.on("error", (error) => {
497
+ emit.fail(new SystemError({
498
+ module: "FileSystem",
499
+ reason: "Unknown",
500
+ method: "watch",
501
+ pathOrDescriptor: path,
502
+ cause: error
503
+ }));
504
+ });
505
+ watcher.on("close", () => {
506
+ emit.end();
507
+ });
508
+ return watcher;
509
+ }), (watcher) => Effect.sync(() => watcher.close())));
510
+ const watch = (backend, path, options) => stat(path).pipe(Effect.map((stat) => backend.pipe(Option.flatMap((_) => _.register(path, stat, options)), Option.getOrElse(() => watchNode(path, options)))), Stream.unwrap);
511
+ const writeFile = (path, data, options) => Effect.async((resume, signal) => {
512
+ try {
513
+ NFS.writeFile(path, data, {
514
+ signal,
515
+ flag: options?.flag,
516
+ mode: options?.mode
517
+ }, (err) => {
518
+ if (err) resume(Effect.fail(handleErrnoException("FileSystem", "writeFile")(err, [path])));
519
+ else resume(Effect.void);
520
+ });
521
+ } catch (err) {
522
+ resume(Effect.fail(handleBadArgument("writeFile")(err)));
523
+ }
524
+ });
525
+ const makeFileSystem = /* @__PURE__ */ Effect.map(/* @__PURE__ */ Effect.serviceOption(WatchBackend), (backend) => make({
526
+ access,
527
+ chmod,
528
+ chown,
529
+ copy,
530
+ copyFile,
531
+ link,
532
+ makeDirectory,
533
+ makeTempDirectory,
534
+ makeTempDirectoryScoped,
535
+ makeTempFile,
536
+ makeTempFileScoped,
537
+ open,
538
+ readDirectory,
539
+ readFile,
540
+ readLink,
541
+ realPath,
542
+ remove,
543
+ rename,
544
+ stat,
545
+ symlink,
546
+ truncate,
547
+ utimes,
548
+ watch(path, options) {
549
+ return watch(backend, path, options);
550
+ },
551
+ writeFile
552
+ }));
553
+ //#endregion
554
+ //#region ../../node_modules/.pnpm/@effect+platform-node-shared@0.57.1_@effect+cluster@0.56.4_@effect+platform@0.94.5_effe_8874b4c5b98109adb0b32dd122fa88a9/node_modules/@effect/platform-node-shared/dist/esm/NodeFileSystem.js
555
+ /**
556
+ * @since 1.0.0
557
+ */
558
+ /**
559
+ * @since 1.0.0
560
+ * @category layer
561
+ */
562
+ const layer = /* @__PURE__ */ Layer.effect(FileSystem, makeFileSystem);
563
+ //#endregion
564
+ //#region ../kyju/src/v2/db/helpers.ts
565
+ const paths = {
566
+ root: ({ config }) => path.join(config.dbPath, config.rootName + ".json"),
567
+ collection: ({ config, collectionId }) => path.join(config.dbPath, config.collectionsDirName, collectionId),
568
+ collectionIndex: ({ config, collectionId }) => path.join(config.dbPath, config.collectionsDirName, collectionId, config.collectionIndexName + ".json"),
569
+ page: ({ config, collectionId, pageId }) => path.join(config.dbPath, config.collectionsDirName, collectionId, config.pagesDirName, pageId),
570
+ pageIndex: ({ config, collectionId, pageId }) => path.join(config.dbPath, config.collectionsDirName, collectionId, config.pagesDirName, pageId, config.pageIndexName + ".json"),
571
+ pageData: ({ config, collectionId, pageId }) => path.join(config.dbPath, config.collectionsDirName, collectionId, config.pagesDirName, pageId, config.pageDataName + ".jsonl"),
572
+ blob: ({ config, blobId }) => path.join(config.dbPath, config.blobsDirName, blobId),
573
+ blobIndex: ({ config, blobId }) => path.join(config.dbPath, config.blobsDirName, blobId, config.blobIndexName + ".json"),
574
+ blobData: ({ config, blobId }) => path.join(config.dbPath, config.blobsDirName, blobId, config.blobDataName)
575
+ };
576
+ const makeAck = ({ requestId, sessionId, data }) => ({
577
+ type: "ack",
578
+ requestId,
579
+ sessionId,
580
+ ...data
581
+ });
582
+ const makeErrorAck = ({ requestId, sessionId, _tag, message }) => ({
583
+ type: "ack",
584
+ requestId,
585
+ sessionId: sessionId ?? "",
586
+ error: {
587
+ _tag,
588
+ message
589
+ }
590
+ });
591
+ const broadcastWrite = ({ sessions, excludeSessionId, op }) => {
592
+ for (const [, session] of sessions) {
593
+ if (session.sessionId === excludeSessionId) continue;
594
+ session.send({
595
+ kind: "replicated-write",
596
+ op
597
+ });
598
+ }
599
+ };
600
+ const broadcastDbUpdate = ({ sessions, message }) => {
601
+ for (const [, session] of sessions) session.send({
602
+ kind: "db-update",
603
+ message
604
+ });
605
+ };
606
+ const broadcastCollectionWrite = ({ sessions, excludeSessionId, collectionId, op }) => {
607
+ for (const [, session] of sessions) {
608
+ if (session.sessionId === excludeSessionId) continue;
609
+ if (!session.subscriptions.has(collectionId)) continue;
610
+ session.send({
611
+ kind: "replicated-write",
612
+ op
613
+ });
614
+ }
615
+ };
616
+ const sendAck = ({ session, ack }) => {
617
+ session.send({
618
+ kind: "db-update",
619
+ message: ack
620
+ });
621
+ };
622
+ /**
623
+ *
624
+ *todo: explore this impl im very sus with the regex testing
625
+ *
626
+ */
627
+ const setAtPath = ({ root, path: pathSegments, value }) => {
628
+ if (pathSegments.length === 0) return value;
629
+ const [head, ...rest] = pathSegments;
630
+ const isIndex = /^\d+$/.test(head);
631
+ if (Array.isArray(root)) {
632
+ const arr = root;
633
+ const index = Number(head);
634
+ if (rest.length === 0) {
635
+ arr[index] = value;
636
+ return arr;
637
+ }
638
+ arr[index] = setAtPath({
639
+ root: arr[index] ?? (/^\d+$/.test(rest[0] ?? "") ? [] : {}),
640
+ path: rest,
641
+ value
642
+ });
643
+ return arr;
644
+ }
645
+ if (typeof root !== "object" || root === null) root = isIndex ? [] : {};
646
+ if (Array.isArray(root)) {
647
+ const arr = root;
648
+ const index = Number(head);
649
+ if (rest.length === 0) {
650
+ arr[index] = value;
651
+ return arr;
652
+ }
653
+ arr[index] = setAtPath({
654
+ root: arr[index] ?? (/^\d+$/.test(rest[0] ?? "") ? [] : {}),
655
+ path: rest,
656
+ value
657
+ });
658
+ return arr;
659
+ }
660
+ const obj = root;
661
+ if (rest.length === 0) {
662
+ obj[head] = value;
663
+ return obj;
664
+ }
665
+ obj[head] = setAtPath({
666
+ root: obj[head] ?? (/^\d+$/.test(rest[0] ?? "") ? [] : {}),
667
+ path: rest,
668
+ value
669
+ });
670
+ return obj;
671
+ };
672
+ const createCollection = ({ fs, config, collectionId, data }) => Effect.gen(function* () {
673
+ const pageId = nanoid();
674
+ const items = data ?? [];
675
+ yield* fs.makeDirectory(paths.page({
676
+ config,
677
+ collectionId,
678
+ pageId
679
+ }), { recursive: true });
680
+ const jsonlContent = items.map((item) => JSON.stringify(item)).join("\n") + (items.length > 0 ? "\n" : "");
681
+ yield* Effect.all([
682
+ writeJsonFile({
683
+ fs,
684
+ config,
685
+ path: paths.collectionIndex({
686
+ config,
687
+ collectionId
688
+ }),
689
+ data: {
690
+ activePageId: pageId,
691
+ totalPages: 1,
692
+ totalCount: items.length
693
+ }
694
+ }),
695
+ writeJsonFile({
696
+ fs,
697
+ config,
698
+ path: paths.pageIndex({
699
+ config,
700
+ collectionId,
701
+ pageId
702
+ }),
703
+ data: {
704
+ pageId,
705
+ order: 0
706
+ }
707
+ }),
708
+ fs.writeFileString(paths.pageData({
709
+ config,
710
+ collectionId,
711
+ pageId
712
+ }), jsonlContent)
713
+ ]);
714
+ });
715
+ const createBlob = ({ fs, config, blobId, data }) => Effect.gen(function* () {
716
+ yield* fs.makeDirectory(paths.blob({
717
+ config,
718
+ blobId
719
+ }), { recursive: true });
720
+ const dataPath = paths.blobData({
721
+ config,
722
+ blobId
723
+ });
724
+ yield* fs.writeFile(dataPath, data);
725
+ const stats = yield* fs.stat(dataPath);
726
+ yield* writeJsonFile({
727
+ fs,
728
+ config,
729
+ path: paths.blobIndex({
730
+ config,
731
+ blobId
732
+ }),
733
+ data: {
734
+ blobId,
735
+ fileSize: Number(stats.size)
736
+ }
737
+ });
738
+ });
739
+ const readJsonFile = ({ fs, path: filePath }) => Effect.gen(function* () {
740
+ const content = yield* fs.readFileString(filePath);
741
+ return JSON.parse(content);
742
+ });
743
+ /**
744
+ * Atomic JSON write.
745
+ *
746
+ * Writes happen in two steps: the JSON is staged in the dedicated
747
+ * `config.tmpDir` under a unique filename, then `rename(2)`'d over the
748
+ * final path. `rename` is atomic within a filesystem, so readers always
749
+ * observe either the prior contents or the fully-written new contents —
750
+ * never a partial. A process death between write and rename leaves an
751
+ * orphan tmp file; `cleanupStaleTmpFiles` sweeps those at boot.
752
+ *
753
+ * Staging all tmp files into a single, known directory (rather than
754
+ * alongside each destination as `<file>.tmp-<id>`) means recovery is O(1)
755
+ * in directory count: one readdir on `<dbPath>/.tmp/`. The previous
756
+ * "sentinel suffix scattered across the tree" layout forced an O(db-size)
757
+ * tree walk on every boot.
758
+ *
759
+ * Precondition: `config.tmpDir` must exist (createDb creates it during
760
+ * init) and live on the same filesystem as `filePath` (always true when
761
+ * both are under `config.dbPath`).
762
+ */
763
+ const writeJsonFile = ({ fs, config, path: filePath, data }) => Effect.gen(function* () {
764
+ const json = JSON.stringify(data);
765
+ const tmpPath = path.join(config.tmpDir, `${path.basename(filePath)}-${nanoid(8)}`);
766
+ yield* fs.writeFileString(tmpPath, json);
767
+ yield* fs.rename(tmpPath, filePath);
768
+ });
769
+ /**
770
+ * Reclaim orphan tmp files under `config.tmpDir`. Intended to run at
771
+ * startup, before any writer is active — every file remaining in tmpDir
772
+ * at that moment is by definition an orphan from a prior process (the
773
+ * successful writes got rename'd away; the unsuccessful ones never made
774
+ * it to their final destinations).
775
+ *
776
+ * O(pending tmp files), not O(db size). Single flat directory, no
777
+ * recursion, no sentinel matching.
778
+ */
779
+ const cleanupStaleTmpFiles = (fs, tmpDir) => Effect.gen(function* () {
780
+ if (!(yield* fs.exists(tmpDir))) return;
781
+ const entries = yield* fs.readDirectory(tmpDir).pipe(Effect.catchAll(() => Effect.succeed([])));
782
+ for (const name of entries) {
783
+ const full = path.join(tmpDir, name);
784
+ yield* fs.remove(full).pipe(Effect.catchAll((err) => {
785
+ console.error(`[kyju:db] failed to remove stale tmp ${full}:`, err);
786
+ return Effect.void;
787
+ }));
788
+ }
789
+ });
790
+ const readJsonlFile = ({ fs, path: filePath }) => Effect.gen(function* () {
791
+ const content = yield* fs.readFileString(filePath);
792
+ if (content.trim() === "") return [];
793
+ return content.trim().split("\n").map((line) => JSON.parse(line));
794
+ });
795
+ /**
796
+ * Read items from a collection by global index range [start, end).
797
+ * Walks the page index to find which page files contain the requested
798
+ * items, reads only those pages, and returns the sliced items plus
799
+ * totalCount. Pages are a storage concern — callers get a flat item
800
+ * array.
801
+ */
802
+ const readCollectionItemRange = ({ fs, config, collectionId, start, end }) => Effect.gen(function* () {
803
+ const collectionDir = paths.collection({
804
+ config,
805
+ collectionId
806
+ });
807
+ const pagesDir = path.join(collectionDir, config.pagesDirName);
808
+ const pageDirs = yield* fs.readDirectory(pagesDir);
809
+ const sorted = (yield* Effect.all(pageDirs.map((pageId) => Effect.gen(function* () {
810
+ return {
811
+ id: pageId,
812
+ order: JSON.parse(yield* fs.readFileString(paths.pageIndex({
813
+ config,
814
+ collectionId,
815
+ pageId
816
+ }))).order
817
+ };
818
+ })), { concurrency: "unbounded" })).sort((a, b) => a.order - b.order);
819
+ const allItems = [];
820
+ for (const entry of sorted) {
821
+ const pageItems = yield* readJsonlFile({
822
+ fs,
823
+ path: paths.pageData({
824
+ config,
825
+ collectionId,
826
+ pageId: entry.id
827
+ })
828
+ });
829
+ allItems.push(...pageItems);
830
+ }
831
+ const totalCount = allItems.length;
832
+ const resolvedStart = start ?? 0;
833
+ const resolvedEnd = end ?? totalCount;
834
+ return {
835
+ items: allItems.slice(Math.max(0, resolvedStart), Math.min(totalCount, resolvedEnd)),
836
+ totalCount
837
+ };
838
+ });
839
+ const validateSession = (ctx, sessionId, requestId, replicaId) => Effect.gen(function* () {
840
+ const session = (yield* Ref.get(ctx.sessionsRef)).get(sessionId);
841
+ if (!session) {
842
+ ctx.dbSend({
843
+ kind: "db-update",
844
+ replicaId,
845
+ message: makeErrorAck({
846
+ requestId,
847
+ sessionId,
848
+ _tag: "InvalidSessionError",
849
+ message: "Invalid session"
850
+ })
851
+ });
852
+ return yield* Effect.fail("INVALID_SESSION");
853
+ }
854
+ return session;
855
+ });
856
+ //#endregion
857
+ //#region ../kyju/src/v2/db/root-cache.ts
858
+ const makeRootCache = (fs, config, rootMutex) => Effect.gen(function* () {
859
+ let cached = yield* readJsonFile({
860
+ fs,
861
+ path: paths.root({ config })
862
+ });
863
+ let scheduledTimer = null;
864
+ let runner = null;
865
+ let pending = false;
866
+ const doFlush = () => Effect.runPromise(rootMutex.withPermits(1)(traceKyju("kyju:db.root.flush", writeJsonFile({
867
+ fs,
868
+ config,
869
+ path: paths.root({ config }),
870
+ data: cached
871
+ }))).pipe(Effect.catchAll((err) => {
872
+ console.error("[kyju:rootCache] flush failed:", err);
873
+ return Effect.void;
874
+ })));
875
+ const startRunner = () => {
876
+ scheduledTimer = null;
877
+ if (runner) return;
878
+ runner = (async () => {
879
+ while (pending) {
880
+ pending = false;
881
+ await doFlush();
882
+ }
883
+ runner = null;
884
+ })();
885
+ };
886
+ return {
887
+ read: () => Effect.sync(() => cached),
888
+ set: (root) => Effect.sync(() => {
889
+ cached = root;
890
+ pending = true;
891
+ if (runner || scheduledTimer) return;
892
+ scheduledTimer = setImmediate(startRunner);
893
+ }),
894
+ flush: () => Effect.gen(function* () {
895
+ if (scheduledTimer) {
896
+ const t = scheduledTimer;
897
+ scheduledTimer = null;
898
+ yield* Effect.sync(() => clearImmediate(t));
899
+ startRunner();
900
+ } else if (pending && !runner) startRunner();
901
+ if (runner) yield* Effect.promise(() => runner);
902
+ })
903
+ };
904
+ });
905
+ //#endregion
906
+ //#region ../kyju/src/v2/db/handlers/write.ts
907
+ const handleWrite = (ctx, event) => traceKyju(`kyju:db.handleWrite`, handleWriteImpl(ctx, event), { op: event.op.type });
908
+ const handleWriteImpl = (ctx, event) => Effect.gen(function* () {
909
+ const session = yield* validateSession(ctx, event.sessionId, event.requestId, event.replicaId);
910
+ switch (event.op.type) {
911
+ case "root.set": {
912
+ const typedOp = event.op;
913
+ yield* traceKyju("kyju:db.root.mutex+cache", ctx.rootMutex.withPermits(1)(Effect.gen(function* () {
914
+ const root = yield* ctx.rootCache.read();
915
+ const updated = traceKyjuSync("kyju:db.root.setAtPath", () => setAtPath({
916
+ root,
917
+ path: typedOp.path,
918
+ value: typedOp.value
919
+ }));
920
+ yield* ctx.rootCache.set(updated);
921
+ })));
922
+ sendAck({
923
+ session,
924
+ ack: makeAck({
925
+ requestId: event.requestId,
926
+ sessionId: event.sessionId
927
+ })
928
+ });
929
+ const sessions = yield* Ref.get(ctx.sessionsRef);
930
+ traceKyjuSync("kyju:db.root.broadcast", () => broadcastWrite({
931
+ sessions,
932
+ excludeSessionId: event.sessionId,
933
+ op: event.op
934
+ }));
935
+ return;
936
+ }
937
+ case "collection.create":
938
+ yield* createCollection({
939
+ fs: ctx.fs,
940
+ config: ctx.config,
941
+ collectionId: event.op.collectionId,
942
+ data: event.op.data
943
+ });
944
+ sendAck({
945
+ session,
946
+ ack: makeAck({
947
+ requestId: event.requestId,
948
+ sessionId: event.sessionId
949
+ })
950
+ });
951
+ broadcastCollectionWrite({
952
+ sessions: yield* Ref.get(ctx.sessionsRef),
953
+ excludeSessionId: event.sessionId,
954
+ collectionId: event.op.collectionId,
955
+ op: event.op
956
+ });
957
+ return;
958
+ case "collection.concat": {
959
+ const typedOp = event.op;
960
+ yield* ctx.collectionMutex.withPermits(1)(Effect.gen(function* () {
961
+ if (typedOp.data.length === 0) {
962
+ sendAck({
963
+ session,
964
+ ack: makeAck({
965
+ requestId: event.requestId,
966
+ sessionId: event.sessionId
967
+ })
968
+ });
969
+ return;
970
+ }
971
+ const collectionDir = paths.collection({
972
+ config: ctx.config,
973
+ collectionId: typedOp.collectionId
974
+ });
975
+ if (!(yield* ctx.fs.exists(collectionDir))) yield* createCollection({
976
+ fs: ctx.fs,
977
+ config: ctx.config,
978
+ collectionId: typedOp.collectionId
979
+ });
980
+ const collectionIndexPath = paths.collectionIndex({
981
+ config: ctx.config,
982
+ collectionId: typedOp.collectionId
983
+ });
984
+ const collectionIndex = JSON.parse(yield* ctx.fs.readFileString(collectionIndexPath));
985
+ let activePageId = collectionIndex.activePageId;
986
+ const activeDataPath = paths.pageData({
987
+ config: ctx.config,
988
+ collectionId: typedOp.collectionId,
989
+ pageId: activePageId
990
+ });
991
+ const stats = yield* ctx.fs.stat(activeDataPath);
992
+ const currentSize = Number(stats.size);
993
+ const jsonlContent = typedOp.data.map((item) => JSON.stringify(item)).join("\n") + "\n";
994
+ if (currentSize >= ctx.config.maxPageSize) {
995
+ const newPageId = nanoid();
996
+ yield* ctx.fs.makeDirectory(paths.page({
997
+ config: ctx.config,
998
+ collectionId: typedOp.collectionId,
999
+ pageId: newPageId
1000
+ }), { recursive: true });
1001
+ const pIndex = {
1002
+ pageId: newPageId,
1003
+ order: collectionIndex.totalPages
1004
+ };
1005
+ yield* Effect.all([writeJsonFile({
1006
+ fs: ctx.fs,
1007
+ config: ctx.config,
1008
+ path: paths.pageIndex({
1009
+ config: ctx.config,
1010
+ collectionId: typedOp.collectionId,
1011
+ pageId: newPageId
1012
+ }),
1013
+ data: pIndex
1014
+ }), ctx.fs.writeFileString(paths.pageData({
1015
+ config: ctx.config,
1016
+ collectionId: typedOp.collectionId,
1017
+ pageId: newPageId
1018
+ }), jsonlContent)]);
1019
+ collectionIndex.activePageId = newPageId;
1020
+ collectionIndex.totalPages += 1;
1021
+ activePageId = newPageId;
1022
+ } else yield* Effect.scoped(Effect.gen(function* () {
1023
+ yield* (yield* ctx.fs.open(activeDataPath, { flag: "a" })).write(new TextEncoder().encode(jsonlContent));
1024
+ }));
1025
+ const startIndex = collectionIndex.totalCount;
1026
+ collectionIndex.totalCount += typedOp.data.length;
1027
+ yield* writeJsonFile({
1028
+ fs: ctx.fs,
1029
+ config: ctx.config,
1030
+ path: collectionIndexPath,
1031
+ data: collectionIndex
1032
+ });
1033
+ const newStats = yield* ctx.fs.stat(paths.pageData({
1034
+ config: ctx.config,
1035
+ collectionId: typedOp.collectionId,
1036
+ pageId: activePageId
1037
+ }));
1038
+ Number(newStats.size);
1039
+ sendAck({
1040
+ session,
1041
+ ack: makeAck({
1042
+ requestId: event.requestId,
1043
+ sessionId: event.sessionId
1044
+ })
1045
+ });
1046
+ broadcastCollectionWrite({
1047
+ sessions: yield* Ref.get(ctx.sessionsRef),
1048
+ excludeSessionId: event.sessionId,
1049
+ collectionId: typedOp.collectionId,
1050
+ op: {
1051
+ ...typedOp,
1052
+ authority: {
1053
+ startIndex,
1054
+ totalCount: collectionIndex.totalCount
1055
+ }
1056
+ }
1057
+ });
1058
+ }));
1059
+ return;
1060
+ }
1061
+ case "collection.delete": {
1062
+ const typedOp = event.op;
1063
+ yield* ctx.collectionMutex.withPermits(1)(Effect.gen(function* () {
1064
+ const collectionDir = paths.collection({
1065
+ config: ctx.config,
1066
+ collectionId: typedOp.collectionId
1067
+ });
1068
+ if (!(yield* ctx.fs.exists(collectionDir))) {
1069
+ sendAck({
1070
+ session,
1071
+ ack: makeErrorAck({
1072
+ requestId: event.requestId,
1073
+ sessionId: event.sessionId,
1074
+ _tag: "NotFoundError",
1075
+ message: `Collection ${typedOp.collectionId} not found`
1076
+ })
1077
+ });
1078
+ return;
1079
+ }
1080
+ if (ctx.config.checkReferences) {
1081
+ const root = yield* ctx.rootCache.read();
1082
+ if (JSON.stringify(root).includes(typedOp.collectionId)) {
1083
+ sendAck({
1084
+ session,
1085
+ ack: makeErrorAck({
1086
+ requestId: event.requestId,
1087
+ sessionId: event.sessionId,
1088
+ _tag: "ReferenceExistsError",
1089
+ message: `Collection ${typedOp.collectionId} still has references`
1090
+ })
1091
+ });
1092
+ return;
1093
+ }
1094
+ }
1095
+ yield* ctx.fs.remove(collectionDir, { recursive: true });
1096
+ sendAck({
1097
+ session,
1098
+ ack: makeAck({
1099
+ requestId: event.requestId,
1100
+ sessionId: event.sessionId
1101
+ })
1102
+ });
1103
+ const sessions = yield* Ref.get(ctx.sessionsRef);
1104
+ broadcastCollectionWrite({
1105
+ sessions,
1106
+ excludeSessionId: event.sessionId,
1107
+ collectionId: typedOp.collectionId,
1108
+ op: event.op
1109
+ });
1110
+ for (const [, s] of sessions) s.subscriptions.delete(typedOp.collectionId);
1111
+ }));
1112
+ return;
1113
+ }
1114
+ case "blob.create":
1115
+ yield* createBlob({
1116
+ fs: ctx.fs,
1117
+ config: ctx.config,
1118
+ blobId: event.op.blobId,
1119
+ data: event.op.data
1120
+ });
1121
+ sendAck({
1122
+ session,
1123
+ ack: makeAck({
1124
+ requestId: event.requestId,
1125
+ sessionId: event.sessionId
1126
+ })
1127
+ });
1128
+ broadcastWrite({
1129
+ sessions: yield* Ref.get(ctx.sessionsRef),
1130
+ excludeSessionId: event.sessionId,
1131
+ op: event.op
1132
+ });
1133
+ return;
1134
+ case "blob.set": {
1135
+ const typedOp = event.op;
1136
+ yield* ctx.blobMutex.withPermits(1)(Effect.gen(function* () {
1137
+ if (!(yield* ctx.fs.exists(paths.blob({
1138
+ config: ctx.config,
1139
+ blobId: typedOp.blobId
1140
+ })))) {
1141
+ sendAck({
1142
+ session,
1143
+ ack: makeErrorAck({
1144
+ requestId: event.requestId,
1145
+ sessionId: event.sessionId,
1146
+ _tag: "NotFoundError",
1147
+ message: `Blob ${typedOp.blobId} not found`
1148
+ })
1149
+ });
1150
+ return;
1151
+ }
1152
+ const dataPath = paths.blobData({
1153
+ config: ctx.config,
1154
+ blobId: typedOp.blobId
1155
+ });
1156
+ yield* ctx.fs.writeFile(dataPath, typedOp.data);
1157
+ const blobStats = yield* ctx.fs.stat(dataPath);
1158
+ const bIdx = {
1159
+ blobId: typedOp.blobId,
1160
+ fileSize: Number(blobStats.size)
1161
+ };
1162
+ yield* writeJsonFile({
1163
+ fs: ctx.fs,
1164
+ config: ctx.config,
1165
+ path: paths.blobIndex({
1166
+ config: ctx.config,
1167
+ blobId: typedOp.blobId
1168
+ }),
1169
+ data: bIdx
1170
+ });
1171
+ sendAck({
1172
+ session,
1173
+ ack: makeAck({
1174
+ requestId: event.requestId,
1175
+ sessionId: event.sessionId
1176
+ })
1177
+ });
1178
+ const sessions = yield* Ref.get(ctx.sessionsRef);
1179
+ broadcastWrite({
1180
+ sessions,
1181
+ excludeSessionId: event.sessionId,
1182
+ op: typedOp
1183
+ });
1184
+ broadcastDbUpdate({
1185
+ sessions,
1186
+ message: {
1187
+ type: "blob.metadataUpdate",
1188
+ blobId: typedOp.blobId,
1189
+ fileSize: Number(blobStats.size)
1190
+ }
1191
+ });
1192
+ }));
1193
+ return;
1194
+ }
1195
+ case "blob.delete": {
1196
+ const typedOp = event.op;
1197
+ yield* ctx.blobMutex.withPermits(1)(Effect.gen(function* () {
1198
+ const blobDir = paths.blob({
1199
+ config: ctx.config,
1200
+ blobId: typedOp.blobId
1201
+ });
1202
+ if (!(yield* ctx.fs.exists(blobDir))) {
1203
+ sendAck({
1204
+ session,
1205
+ ack: makeErrorAck({
1206
+ requestId: event.requestId,
1207
+ sessionId: event.sessionId,
1208
+ _tag: "NotFoundError",
1209
+ message: `Blob ${typedOp.blobId} not found`
1210
+ })
1211
+ });
1212
+ return;
1213
+ }
1214
+ if (ctx.config.checkReferences) {
1215
+ const root = yield* ctx.rootCache.read();
1216
+ if (JSON.stringify(root).includes(typedOp.blobId)) {
1217
+ sendAck({
1218
+ session,
1219
+ ack: makeErrorAck({
1220
+ requestId: event.requestId,
1221
+ sessionId: event.sessionId,
1222
+ _tag: "ReferenceExistsError",
1223
+ message: `Blob ${typedOp.blobId} still has references`
1224
+ })
1225
+ });
1226
+ return;
1227
+ }
1228
+ }
1229
+ yield* ctx.fs.remove(blobDir, { recursive: true });
1230
+ sendAck({
1231
+ session,
1232
+ ack: makeAck({
1233
+ requestId: event.requestId,
1234
+ sessionId: event.sessionId
1235
+ })
1236
+ });
1237
+ broadcastWrite({
1238
+ sessions: yield* Ref.get(ctx.sessionsRef),
1239
+ excludeSessionId: event.sessionId,
1240
+ op: event.op
1241
+ });
1242
+ }));
1243
+ return;
1244
+ }
1245
+ }
1246
+ });
1247
+ //#endregion
1248
+ export { handleErrnoException as _, createBlob as a, makeErrorAck as c, readJsonFile as d, readJsonlFile as f, layer as g, writeJsonFile as h, cleanupStaleTmpFiles as i, paths as l, validateSession as m, makeRootCache as n, createCollection as o, sendAck as p, broadcastDbUpdate as r, makeAck as s, handleWrite as t, readCollectionItemRange as u, FileSystem as v, BadArgument as y };