@f0rbit/corpus 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/dist/backend/cloudflare.d.ts +1 -1
  2. package/dist/backend/cloudflare.d.ts.map +1 -1
  3. package/dist/backend/cloudflare.js +168 -58
  4. package/dist/backend/file.d.ts +1 -1
  5. package/dist/backend/file.d.ts.map +1 -1
  6. package/dist/backend/file.js +65 -67
  7. package/dist/backend/layered.d.ts.map +1 -1
  8. package/dist/backend/layered.js +67 -19
  9. package/dist/backend/memory.d.ts +2 -1
  10. package/dist/backend/memory.d.ts.map +1 -1
  11. package/dist/backend/memory.js +39 -53
  12. package/dist/corpus.d.ts +11 -0
  13. package/dist/corpus.d.ts.map +1 -1
  14. package/dist/corpus.js +52 -0
  15. package/dist/index.d.ts +2 -1
  16. package/dist/index.d.ts.map +1 -1
  17. package/dist/index.js +1 -0
  18. package/dist/observations/client.d.ts +12 -0
  19. package/dist/observations/client.d.ts.map +1 -0
  20. package/dist/observations/client.js +115 -0
  21. package/dist/observations/index.d.ts +12 -0
  22. package/dist/observations/index.d.ts.map +1 -0
  23. package/dist/observations/index.js +11 -0
  24. package/dist/observations/schema.d.ts +267 -0
  25. package/dist/observations/schema.d.ts.map +1 -0
  26. package/dist/observations/schema.js +55 -0
  27. package/dist/observations/storage.d.ts +75 -0
  28. package/dist/observations/storage.d.ts.map +1 -0
  29. package/dist/observations/storage.js +137 -0
  30. package/dist/observations/types.d.ts +219 -0
  31. package/dist/observations/types.d.ts.map +1 -0
  32. package/dist/observations/types.js +40 -0
  33. package/dist/observations/utils.d.ts +183 -0
  34. package/dist/observations/utils.d.ts.map +1 -0
  35. package/dist/observations/utils.js +272 -0
  36. package/dist/sst.d.ts +1 -1
  37. package/dist/sst.d.ts.map +1 -1
  38. package/dist/sst.js +20 -0
  39. package/dist/types.d.ts +61 -0
  40. package/dist/types.d.ts.map +1 -1
  41. package/dist/utils.d.ts +38 -1
  42. package/dist/utils.d.ts.map +1 -1
  43. package/dist/utils.js +84 -0
  44. package/package.json +1 -1
  45. package/dist/codec.d.ts +0 -9
  46. package/dist/codec.d.ts.map +0 -1
  47. package/dist/codec.js +0 -21
  48. package/dist/codecs.d.ts +0 -8
  49. package/dist/codecs.d.ts.map +0 -1
  50. package/dist/codecs.js +0 -6
  51. package/dist/core.d.ts +0 -9
  52. package/dist/core.d.ts.map +0 -1
  53. package/dist/core.js +0 -7
  54. package/dist/hash.d.ts +0 -2
  55. package/dist/hash.d.ts.map +0 -1
  56. package/dist/hash.js +0 -5
  57. package/dist/store.d.ts +0 -3
  58. package/dist/store.d.ts.map +0 -1
  59. package/dist/store.js +0 -125
  60. package/dist/version.d.ts +0 -7
  61. package/dist/version.d.ts.map +0 -1
  62. package/dist/version.js +0 -31
@@ -2,7 +2,7 @@
2
2
  * @module Backends
3
3
  * @description Cloudflare Workers storage backend using D1 and R2.
4
4
  */
5
- import type { Backend, EventHandler } from '../types';
5
+ import type { Backend, EventHandler } from "../types";
6
6
  type D1Database = {
7
7
  prepare: (sql: string) => unknown;
8
8
  };
@@ -1 +1 @@
1
- {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../../backend/cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAI3I,KAAK,UAAU,GAAG;IAAE,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAA;CAAE,CAAA;AACvD,KAAK,QAAQ,GAAG;IACd,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;QAAC,WAAW,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAA;KAAE,GAAG,IAAI,CAAC,CAAA;IACnH,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,GAAG,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IAClF,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IACtC,IAAI,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC,CAAA;CACvD,CAAA;AAED,MAAM,MAAM,uBAAuB,GAAG;IACpC,EAAE,EAAE,UAAU,CAAA;IACd,EAAE,EAAE,QAAQ,CAAA;IACZ,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CA8PlF"}
1
+ {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../../backend/cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,OAAO,EAAiE,YAAY,EAAE,MAAM,UAAU,CAAC;AAMrH,KAAK,UAAU,GAAG;IAAE,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAA;CAAE,CAAC;AACxD,KAAK,QAAQ,GAAG;IACf,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;QAAC,WAAW,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAA;KAAE,GAAG,IAAI,CAAC,CAAC;IACpH,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,GAAG,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACnF,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACvC,IAAI,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC,CAAC;CACvD,CAAC;AAEF,MAAM,MAAM,uBAAuB,GAAG;IACrC,EAAE,EAAE,UAAU,CAAC;IACf,EAAE,EAAE,QAAQ,CAAC;IACb,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AA6IF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CAmOlF"}
@@ -2,10 +2,138 @@
2
2
  * @module Backends
3
3
  * @description Cloudflare Workers storage backend using D1 and R2.
4
4
  */
5
- import { eq, and, desc, lt, gt, like, sql } from 'drizzle-orm';
6
- import { drizzle } from 'drizzle-orm/d1';
7
- import { ok, err } from '../types';
8
- import { corpus_snapshots } from '../schema';
5
+ import { eq, and, desc, lt, gt, like, sql, inArray } from "drizzle-orm";
6
+ import { drizzle } from "drizzle-orm/d1";
7
+ import { create_emitter, parse_snapshot_meta } from "../utils";
8
+ import { ok, err } from "../types";
9
+ import { corpus_snapshots } from "../schema";
10
+ import { corpus_observations, create_observations_client } from "../observations";
11
+ function create_cloudflare_storage(db) {
12
+ return {
13
+ async put_row(row) {
14
+ try {
15
+ await db.insert(corpus_observations).values(row);
16
+ return ok(row);
17
+ }
18
+ catch (cause) {
19
+ return err({
20
+ kind: "storage_error",
21
+ cause: cause,
22
+ operation: "observations.put"
23
+ });
24
+ }
25
+ },
26
+ async get_row(id) {
27
+ try {
28
+ const rows = await db
29
+ .select()
30
+ .from(corpus_observations)
31
+ .where(eq(corpus_observations.id, id))
32
+ .limit(1);
33
+ return ok(rows[0] ?? null);
34
+ }
35
+ catch (cause) {
36
+ return err({
37
+ kind: "storage_error",
38
+ cause: cause,
39
+ operation: "observations.get"
40
+ });
41
+ }
42
+ },
43
+ async *query_rows(opts = {}) {
44
+ const conditions = [];
45
+ if (opts.type) {
46
+ if (Array.isArray(opts.type)) {
47
+ conditions.push(inArray(corpus_observations.type, opts.type));
48
+ }
49
+ else {
50
+ conditions.push(eq(corpus_observations.type, opts.type));
51
+ }
52
+ }
53
+ if (opts.source_store_id) {
54
+ conditions.push(eq(corpus_observations.source_store_id, opts.source_store_id));
55
+ }
56
+ if (opts.source_version) {
57
+ conditions.push(eq(corpus_observations.source_version, opts.source_version));
58
+ }
59
+ if (opts.source_prefix) {
60
+ conditions.push(like(corpus_observations.source_version, `${opts.source_prefix}%`));
61
+ }
62
+ if (opts.created_after) {
63
+ conditions.push(gt(corpus_observations.created_at, opts.created_after));
64
+ }
65
+ if (opts.created_before) {
66
+ conditions.push(lt(corpus_observations.created_at, opts.created_before));
67
+ }
68
+ if (opts.observed_after) {
69
+ conditions.push(gt(corpus_observations.observed_at, opts.observed_after));
70
+ }
71
+ if (opts.observed_before) {
72
+ conditions.push(lt(corpus_observations.observed_at, opts.observed_before));
73
+ }
74
+ let query = db
75
+ .select()
76
+ .from(corpus_observations)
77
+ .where(conditions.length > 0 ? and(...conditions) : undefined)
78
+ .orderBy(desc(corpus_observations.created_at));
79
+ if (opts.limit) {
80
+ query = query.limit(opts.limit);
81
+ }
82
+ const rows = await query;
83
+ for (const row of rows) {
84
+ yield row;
85
+ }
86
+ },
87
+ async delete_row(id) {
88
+ try {
89
+ const existing = await db
90
+ .select()
91
+ .from(corpus_observations)
92
+ .where(eq(corpus_observations.id, id))
93
+ .limit(1);
94
+ if (existing.length === 0) {
95
+ return ok(false);
96
+ }
97
+ await db.delete(corpus_observations).where(eq(corpus_observations.id, id));
98
+ return ok(true);
99
+ }
100
+ catch (cause) {
101
+ return err({
102
+ kind: "storage_error",
103
+ cause: cause,
104
+ operation: "observations.delete"
105
+ });
106
+ }
107
+ },
108
+ async delete_by_source(store_id, version, path) {
109
+ try {
110
+ const conditions = [
111
+ eq(corpus_observations.source_store_id, store_id),
112
+ eq(corpus_observations.source_version, version)
113
+ ];
114
+ if (path !== undefined) {
115
+ conditions.push(eq(corpus_observations.source_path, path));
116
+ }
117
+ const toDelete = await db
118
+ .select()
119
+ .from(corpus_observations)
120
+ .where(and(...conditions));
121
+ const count = toDelete.length;
122
+ if (count > 0) {
123
+ await db.delete(corpus_observations).where(and(...conditions));
124
+ }
125
+ return ok(count);
126
+ }
127
+ catch (cause) {
128
+ return err({
129
+ kind: "storage_error",
130
+ cause: cause,
131
+ operation: "observations.delete_by_source"
132
+ });
133
+ }
134
+ }
135
+ };
136
+ }
9
137
  /**
10
138
  * Creates a Cloudflare Workers storage backend using D1 and R2.
11
139
  * @category Backends
@@ -45,22 +173,9 @@ import { corpus_snapshots } from '../schema';
45
173
  export function create_cloudflare_backend(config) {
46
174
  const db = drizzle(config.d1);
47
175
  const { r2, on_event } = config;
48
- function emit(event) {
49
- on_event?.(event);
50
- }
51
- function row_to_meta(row) {
52
- return {
53
- store_id: row.store_id,
54
- version: row.version,
55
- parents: JSON.parse(row.parents),
56
- created_at: new Date(row.created_at),
57
- invoked_at: row.invoked_at ? new Date(row.invoked_at) : undefined,
58
- content_hash: row.content_hash,
59
- content_type: row.content_type,
60
- size_bytes: row.size_bytes,
61
- data_key: row.data_key,
62
- tags: row.tags ? JSON.parse(row.tags) : undefined,
63
- };
176
+ const emit = create_emitter(on_event);
177
+ function snapshot_row_to_meta(row) {
178
+ return parse_snapshot_meta(row);
64
179
  }
65
180
  const metadata = {
66
181
  async get(store_id, version) {
@@ -71,15 +186,15 @@ export function create_cloudflare_backend(config) {
71
186
  .where(and(eq(corpus_snapshots.store_id, store_id), eq(corpus_snapshots.version, version)))
72
187
  .limit(1);
73
188
  const row = rows[0];
74
- emit({ type: 'meta_get', store_id, version, found: !!row });
189
+ emit({ type: "meta_get", store_id, version, found: !!row });
75
190
  if (!row) {
76
- return err({ kind: 'not_found', store_id, version });
191
+ return err({ kind: "not_found", store_id, version });
77
192
  }
78
- return ok(row_to_meta(row));
193
+ return ok(snapshot_row_to_meta(row));
79
194
  }
80
195
  catch (cause) {
81
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.get' };
82
- emit({ type: 'error', error });
196
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.get" };
197
+ emit({ type: "error", error });
83
198
  return err(error);
84
199
  }
85
200
  },
@@ -112,26 +227,24 @@ export function create_cloudflare_backend(config) {
112
227
  tags: meta.tags ? JSON.stringify(meta.tags) : null,
113
228
  },
114
229
  });
115
- emit({ type: 'meta_put', store_id: meta.store_id, version: meta.version });
230
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
116
231
  return ok(undefined);
117
232
  }
118
233
  catch (cause) {
119
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.put' };
120
- emit({ type: 'error', error });
234
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.put" };
235
+ emit({ type: "error", error });
121
236
  return err(error);
122
237
  }
123
238
  },
124
239
  async delete(store_id, version) {
125
240
  try {
126
- await db
127
- .delete(corpus_snapshots)
128
- .where(and(eq(corpus_snapshots.store_id, store_id), eq(corpus_snapshots.version, version)));
129
- emit({ type: 'meta_delete', store_id, version });
241
+ await db.delete(corpus_snapshots).where(and(eq(corpus_snapshots.store_id, store_id), eq(corpus_snapshots.version, version)));
242
+ emit({ type: "meta_delete", store_id, version });
130
243
  return ok(undefined);
131
244
  }
132
245
  catch (cause) {
133
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.delete' };
134
- emit({ type: 'error', error });
246
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.delete" };
247
+ emit({ type: "error", error });
135
248
  return err(error);
136
249
  }
137
250
  },
@@ -154,32 +267,27 @@ export function create_cloudflare_backend(config) {
154
267
  const rows = await query;
155
268
  let count = 0;
156
269
  for (const row of rows) {
157
- const meta = row_to_meta(row);
158
- if (opts?.tags?.length && !opts.tags.some(t => meta.tags?.includes(t))) {
270
+ const meta = snapshot_row_to_meta(row);
271
+ if (opts?.tags?.length && !opts.tags.every(t => meta.tags?.includes(t))) {
159
272
  continue;
160
273
  }
161
274
  yield meta;
162
275
  count++;
163
276
  }
164
- emit({ type: 'meta_list', store_id, count });
277
+ emit({ type: "meta_list", store_id, count });
165
278
  },
166
279
  async get_latest(store_id) {
167
280
  try {
168
- const rows = await db
169
- .select()
170
- .from(corpus_snapshots)
171
- .where(eq(corpus_snapshots.store_id, store_id))
172
- .orderBy(desc(corpus_snapshots.created_at))
173
- .limit(1);
281
+ const rows = await db.select().from(corpus_snapshots).where(eq(corpus_snapshots.store_id, store_id)).orderBy(desc(corpus_snapshots.created_at)).limit(1);
174
282
  const row = rows[0];
175
283
  if (!row) {
176
- return err({ kind: 'not_found', store_id, version: 'latest' });
284
+ return err({ kind: "not_found", store_id, version: "latest" });
177
285
  }
178
- return ok(row_to_meta(row));
286
+ return ok(snapshot_row_to_meta(row));
179
287
  }
180
288
  catch (cause) {
181
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.get_latest' };
182
- emit({ type: 'error', error });
289
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.get_latest" };
290
+ emit({ type: "error", error });
183
291
  return err(error);
184
292
  }
185
293
  },
@@ -193,7 +301,7 @@ export function create_cloudflare_backend(config) {
193
301
  AND json_extract(value, '$.version') = ${parent_version}
194
302
  )`);
195
303
  for (const row of rows) {
196
- yield row_to_meta(row);
304
+ yield snapshot_row_to_meta(row);
197
305
  }
198
306
  },
199
307
  async find_by_hash(store_id, content_hash) {
@@ -204,7 +312,7 @@ export function create_cloudflare_backend(config) {
204
312
  .where(and(eq(corpus_snapshots.store_id, store_id), eq(corpus_snapshots.content_hash, content_hash)))
205
313
  .limit(1);
206
314
  const row = rows[0];
207
- return row ? row_to_meta(row) : null;
315
+ return row ? snapshot_row_to_meta(row) : null;
208
316
  }
209
317
  catch {
210
318
  return null;
@@ -215,9 +323,9 @@ export function create_cloudflare_backend(config) {
215
323
  async get(data_key) {
216
324
  try {
217
325
  const object = await r2.get(data_key);
218
- emit({ type: 'data_get', store_id: data_key.split('/')[0] ?? data_key, version: data_key, found: !!object });
326
+ emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found: !!object });
219
327
  if (!object) {
220
- return err({ kind: 'not_found', store_id: data_key, version: '' });
328
+ return err({ kind: "not_found", store_id: data_key, version: "" });
221
329
  }
222
330
  return ok({
223
331
  stream: () => object.body,
@@ -225,8 +333,8 @@ export function create_cloudflare_backend(config) {
225
333
  });
226
334
  }
227
335
  catch (cause) {
228
- const error = { kind: 'storage_error', cause: cause, operation: 'data.get' };
229
- emit({ type: 'error', error });
336
+ const error = { kind: "storage_error", cause: cause, operation: "data.get" };
337
+ emit({ type: "error", error });
230
338
  return err(error);
231
339
  }
232
340
  },
@@ -236,8 +344,8 @@ export function create_cloudflare_backend(config) {
236
344
  return ok(undefined);
237
345
  }
238
346
  catch (cause) {
239
- const error = { kind: 'storage_error', cause: cause, operation: 'data.put' };
240
- emit({ type: 'error', error });
347
+ const error = { kind: "storage_error", cause: cause, operation: "data.put" };
348
+ emit({ type: "error", error });
241
349
  return err(error);
242
350
  }
243
351
  },
@@ -247,8 +355,8 @@ export function create_cloudflare_backend(config) {
247
355
  return ok(undefined);
248
356
  }
249
357
  catch (cause) {
250
- const error = { kind: 'storage_error', cause: cause, operation: 'data.delete' };
251
- emit({ type: 'error', error });
358
+ const error = { kind: "storage_error", cause: cause, operation: "data.delete" };
359
+ emit({ type: "error", error });
252
360
  return err(error);
253
361
  }
254
362
  },
@@ -262,5 +370,7 @@ export function create_cloudflare_backend(config) {
262
370
  }
263
371
  },
264
372
  };
265
- return { metadata, data, on_event };
373
+ const storage = create_cloudflare_storage(db);
374
+ const observations = create_observations_client(storage, metadata);
375
+ return { metadata, data, observations, on_event };
266
376
  }
@@ -2,7 +2,7 @@
2
2
  * @module Backends
3
3
  * @description File-system storage backend for local persistence.
4
4
  */
5
- import type { Backend, EventHandler } from '../types';
5
+ import type { Backend, EventHandler } from "../types";
6
6
  export type FileBackendConfig = {
7
7
  base_path: string;
8
8
  on_event?: EventHandler;
@@ -1 +1 @@
1
- {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAK3I,MAAM,MAAM,iBAAiB,GAAG;IAC9B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAqMtE"}
1
+ {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAiE,YAAY,EAAE,MAAM,UAAU,CAAC;AAQrH,MAAM,MAAM,iBAAiB,GAAG;IAC/B,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CA6MtE"}
@@ -2,9 +2,11 @@
2
2
  * @module Backends
3
3
  * @description File-system storage backend for local persistence.
4
4
  */
5
- import { ok, err } from '../types';
6
- import { mkdir, readdir } from 'node:fs/promises';
7
- import { join, dirname } from 'node:path';
5
+ import { create_observations_client, create_observations_storage } from "../observations";
6
+ import { ok, err } from "../types";
7
+ import { to_bytes, create_emitter, filter_snapshots, parse_snapshot_meta } from "../utils";
8
+ import { mkdir, readdir } from "node:fs/promises";
9
+ import { join, dirname } from "node:path";
8
10
  /**
9
11
  * Creates a file-system storage backend for local persistence.
10
12
  * @category Backends
@@ -38,29 +40,22 @@ import { join, dirname } from 'node:path';
38
40
  */
39
41
  export function create_file_backend(config) {
40
42
  const { base_path, on_event } = config;
41
- function emit(event) {
42
- on_event?.(event);
43
- }
43
+ const emit = create_emitter(on_event);
44
44
  function meta_path(store_id) {
45
- return join(base_path, store_id, '_meta.json');
45
+ return join(base_path, store_id, "_meta.json");
46
46
  }
47
47
  function data_path(data_key) {
48
- return join(base_path, '_data', `${data_key.replace(/\//g, '_')}.bin`);
48
+ return join(base_path, "_data", `${data_key.replace(/\//g, "_")}.bin`);
49
49
  }
50
50
  async function read_store_meta(store_id) {
51
51
  const path = meta_path(store_id);
52
52
  const file = Bun.file(path);
53
- if (!await file.exists())
53
+ if (!(await file.exists()))
54
54
  return new Map();
55
55
  try {
56
56
  const content = await file.text();
57
- const entries = JSON.parse(content, (key, value) => {
58
- if (key === 'created_at' || key === 'invoked_at') {
59
- return value ? new Date(value) : value;
60
- }
61
- return value;
62
- });
63
- return new Map(entries);
57
+ const entries = JSON.parse(content);
58
+ return new Map(entries.map(([key, raw]) => [key, parse_snapshot_meta(raw)]));
64
59
  }
65
60
  catch {
66
61
  return new Map();
@@ -76,9 +71,9 @@ export function create_file_backend(config) {
76
71
  async get(store_id, version) {
77
72
  const store_meta = await read_store_meta(store_id);
78
73
  const meta = store_meta.get(version);
79
- emit({ type: 'meta_get', store_id, version, found: !!meta });
74
+ emit({ type: "meta_get", store_id, version, found: !!meta });
80
75
  if (!meta) {
81
- return err({ kind: 'not_found', store_id, version });
76
+ return err({ kind: "not_found", store_id, version });
82
77
  }
83
78
  return ok(meta);
84
79
  },
@@ -86,36 +81,25 @@ export function create_file_backend(config) {
86
81
  const store_meta = await read_store_meta(meta.store_id);
87
82
  store_meta.set(meta.version, meta);
88
83
  await write_store_meta(meta.store_id, store_meta);
89
- emit({ type: 'meta_put', store_id: meta.store_id, version: meta.version });
84
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
90
85
  return ok(undefined);
91
86
  },
92
87
  async delete(store_id, version) {
93
88
  const store_meta = await read_store_meta(store_id);
94
89
  store_meta.delete(version);
95
90
  await write_store_meta(store_id, store_meta);
96
- emit({ type: 'meta_delete', store_id, version });
91
+ emit({ type: "meta_delete", store_id, version });
97
92
  return ok(undefined);
98
93
  },
99
94
  async *list(store_id, opts) {
100
95
  const store_meta = await read_store_meta(store_id);
101
- const matches = Array.from(store_meta.values())
102
- .filter(meta => {
103
- if (opts?.before && meta.created_at >= opts.before)
104
- return false;
105
- if (opts?.after && meta.created_at <= opts.after)
106
- return false;
107
- if (opts?.tags?.length && !opts.tags.some(t => meta.tags?.includes(t)))
108
- return false;
109
- return true;
110
- })
111
- .sort((a, b) => b.created_at.getTime() - a.created_at.getTime());
112
- const limit = opts?.limit ?? Infinity;
96
+ const filtered = filter_snapshots(Array.from(store_meta.values()), opts);
113
97
  let count = 0;
114
- for (const meta of matches.slice(0, limit)) {
98
+ for (const meta of filtered) {
115
99
  yield meta;
116
100
  count++;
117
101
  }
118
- emit({ type: 'meta_list', store_id, count });
102
+ emit({ type: "meta_list", store_id, count });
119
103
  },
120
104
  async get_latest(store_id) {
121
105
  const store_meta = await read_store_meta(store_id);
@@ -126,7 +110,7 @@ export function create_file_backend(config) {
126
110
  }
127
111
  }
128
112
  if (!latest) {
129
- return err({ kind: 'not_found', store_id, version: 'latest' });
113
+ return err({ kind: "not_found", store_id, version: "latest" });
130
114
  }
131
115
  return ok(latest);
132
116
  },
@@ -134,7 +118,7 @@ export function create_file_backend(config) {
134
118
  try {
135
119
  const entries = await readdir(base_path, { withFileTypes: true });
136
120
  for (const entry of entries) {
137
- if (!entry.isDirectory() || entry.name.startsWith('_'))
121
+ if (!entry.isDirectory() || entry.name.startsWith("_"))
138
122
  continue;
139
123
  const store_meta = await read_store_meta(entry.name);
140
124
  for (const meta of store_meta.values()) {
@@ -144,8 +128,7 @@ export function create_file_backend(config) {
144
128
  }
145
129
  }
146
130
  }
147
- catch {
148
- }
131
+ catch { }
149
132
  },
150
133
  async find_by_hash(store_id, content_hash) {
151
134
  const store_meta = await read_store_meta(store_id);
@@ -162,9 +145,9 @@ export function create_file_backend(config) {
162
145
  const path = data_path(data_key);
163
146
  const file = Bun.file(path);
164
147
  const found = await file.exists();
165
- emit({ type: 'data_get', store_id: data_key.split('/')[0] ?? data_key, version: data_key, found });
148
+ emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found });
166
149
  if (!found) {
167
- return err({ kind: 'not_found', store_id: data_key, version: '' });
150
+ return err({ kind: "not_found", store_id: data_key, version: "" });
168
151
  }
169
152
  return ok({
170
153
  stream: () => file.stream(),
@@ -175,25 +158,12 @@ export function create_file_backend(config) {
175
158
  const path = data_path(data_key);
176
159
  await mkdir(dirname(path), { recursive: true });
177
160
  try {
178
- if (input instanceof Uint8Array) {
179
- await Bun.write(path, input);
180
- }
181
- else {
182
- const chunks = [];
183
- const reader = input.getReader();
184
- while (true) {
185
- const { done, value } = await reader.read();
186
- if (done)
187
- break;
188
- chunks.push(value);
189
- }
190
- const bytes = concat_bytes(chunks);
191
- await Bun.write(path, bytes);
192
- }
161
+ const bytes = await to_bytes(input);
162
+ await Bun.write(path, bytes);
193
163
  return ok(undefined);
194
164
  }
195
165
  catch (cause) {
196
- return err({ kind: 'storage_error', cause: cause, operation: 'put' });
166
+ return err({ kind: "storage_error", cause: cause, operation: "put" });
197
167
  }
198
168
  },
199
169
  async delete(data_key) {
@@ -206,7 +176,7 @@ export function create_file_backend(config) {
206
176
  return ok(undefined);
207
177
  }
208
178
  catch (cause) {
209
- return err({ kind: 'storage_error', cause: cause, operation: 'delete' });
179
+ return err({ kind: "storage_error", cause: cause, operation: "delete" });
210
180
  }
211
181
  },
212
182
  async exists(data_key) {
@@ -215,15 +185,43 @@ export function create_file_backend(config) {
215
185
  return file.exists();
216
186
  },
217
187
  };
218
- return { metadata, data, on_event };
219
- }
220
- function concat_bytes(chunks) {
221
- const total = chunks.reduce((sum, c) => sum + c.length, 0);
222
- const result = new Uint8Array(total);
223
- let offset = 0;
224
- for (const chunk of chunks) {
225
- result.set(chunk, offset);
226
- offset += chunk.length;
188
+ const file_path = join(base_path, "_observations.json");
189
+ async function read_observations() {
190
+ const file = Bun.file(file_path);
191
+ if (!(await file.exists()))
192
+ return [];
193
+ try {
194
+ return await file.json();
195
+ }
196
+ catch {
197
+ return [];
198
+ }
227
199
  }
228
- return result;
200
+ async function write_observations(rows) {
201
+ await Bun.write(file_path, JSON.stringify(rows, null, 2));
202
+ }
203
+ const storage = create_observations_storage({
204
+ get_all: read_observations,
205
+ set_all: write_observations,
206
+ get_one: async (id) => {
207
+ const rows = await read_observations();
208
+ return rows.find(r => r.id === id) ?? null;
209
+ },
210
+ add_one: async (row) => {
211
+ const rows = await read_observations();
212
+ rows.push(row);
213
+ await write_observations(rows);
214
+ },
215
+ remove_one: async (id) => {
216
+ const rows = await read_observations();
217
+ const idx = rows.findIndex(r => r.id === id);
218
+ if (idx === -1)
219
+ return false;
220
+ rows.splice(idx, 1);
221
+ await write_observations(rows);
222
+ return true;
223
+ }
224
+ });
225
+ const observations = create_observations_client(storage, metadata);
226
+ return { metadata, data, observations, on_event };
229
227
  }
@@ -1 +1 @@
1
- {"version":3,"file":"layered.d.ts","sourceRoot":"","sources":["../../backend/layered.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAA6E,MAAM,UAAU,CAAA;AAGlH,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,OAAO,EAAE,CAAA;IACf,KAAK,EAAE,OAAO,EAAE,CAAA;IAChB,aAAa,CAAC,EAAE,OAAO,GAAG,OAAO,CAAA;CAClC,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,wBAAgB,sBAAsB,CAAC,OAAO,EAAE,qBAAqB,GAAG,OAAO,CA4I9E"}
1
+ {"version":3,"file":"layered.d.ts","sourceRoot":"","sources":["../../backend/layered.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAiG,MAAM,UAAU,CAAA;AAItI,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,OAAO,EAAE,CAAA;IACf,KAAK,EAAE,OAAO,EAAE,CAAA;IAChB,aAAa,CAAC,EAAE,OAAO,GAAG,OAAO,CAAA;CAClC,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,wBAAgB,sBAAsB,CAAC,OAAO,EAAE,qBAAqB,GAAG,OAAO,CAkJ9E"}