@f0rbit/corpus 0.1.4 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@
2
2
  * @module Backends
3
3
  * @description Cloudflare Workers storage backend using D1 and R2.
4
4
  */
5
- import type { Backend, EventHandler } from '../types';
5
+ import type { Backend, EventHandler } from "../types";
6
6
  type D1Database = {
7
7
  prepare: (sql: string) => unknown;
8
8
  };
@@ -1 +1 @@
1
- {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../../backend/cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAI3I,KAAK,UAAU,GAAG;IAAE,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAA;CAAE,CAAA;AACvD,KAAK,QAAQ,GAAG;IACd,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;QAAC,WAAW,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAA;KAAE,GAAG,IAAI,CAAC,CAAA;IACnH,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,GAAG,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IAClF,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IACtC,IAAI,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC,CAAA;CACvD,CAAA;AAED,MAAM,MAAM,uBAAuB,GAAG;IACpC,EAAE,EAAE,UAAU,CAAA;IACd,EAAE,EAAE,QAAQ,CAAA;IACZ,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CA8PlF"}
1
+ {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../../backend/cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAC;AAI5I,KAAK,UAAU,GAAG;IAAE,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAA;CAAE,CAAC;AACxD,KAAK,QAAQ,GAAG;IACf,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;QAAC,WAAW,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAA;KAAE,GAAG,IAAI,CAAC,CAAC;IACpH,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,GAAG,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACnF,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACvC,IAAI,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC,CAAC;CACvD,CAAC;AAEF,MAAM,MAAM,uBAAuB,GAAG;IACrC,EAAE,EAAE,UAAU,CAAC;IACf,EAAE,EAAE,QAAQ,CAAC;IACb,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CA8OlF"}
@@ -2,10 +2,10 @@
2
2
  * @module Backends
3
3
  * @description Cloudflare Workers storage backend using D1 and R2.
4
4
  */
5
- import { eq, and, desc, lt, gt, like, sql } from 'drizzle-orm';
6
- import { drizzle } from 'drizzle-orm/d1';
7
- import { ok, err } from '../types';
8
- import { corpus_snapshots } from '../schema';
5
+ import { eq, and, desc, lt, gt, like, sql } from "drizzle-orm";
6
+ import { drizzle } from "drizzle-orm/d1";
7
+ import { ok, err } from "../types";
8
+ import { corpus_snapshots } from "../schema";
9
9
  /**
10
10
  * Creates a Cloudflare Workers storage backend using D1 and R2.
11
11
  * @category Backends
@@ -71,15 +71,15 @@ export function create_cloudflare_backend(config) {
71
71
  .where(and(eq(corpus_snapshots.store_id, store_id), eq(corpus_snapshots.version, version)))
72
72
  .limit(1);
73
73
  const row = rows[0];
74
- emit({ type: 'meta_get', store_id, version, found: !!row });
74
+ emit({ type: "meta_get", store_id, version, found: !!row });
75
75
  if (!row) {
76
- return err({ kind: 'not_found', store_id, version });
76
+ return err({ kind: "not_found", store_id, version });
77
77
  }
78
78
  return ok(row_to_meta(row));
79
79
  }
80
80
  catch (cause) {
81
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.get' };
82
- emit({ type: 'error', error });
81
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.get" };
82
+ emit({ type: "error", error });
83
83
  return err(error);
84
84
  }
85
85
  },
@@ -112,26 +112,24 @@ export function create_cloudflare_backend(config) {
112
112
  tags: meta.tags ? JSON.stringify(meta.tags) : null,
113
113
  },
114
114
  });
115
- emit({ type: 'meta_put', store_id: meta.store_id, version: meta.version });
115
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
116
116
  return ok(undefined);
117
117
  }
118
118
  catch (cause) {
119
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.put' };
120
- emit({ type: 'error', error });
119
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.put" };
120
+ emit({ type: "error", error });
121
121
  return err(error);
122
122
  }
123
123
  },
124
124
  async delete(store_id, version) {
125
125
  try {
126
- await db
127
- .delete(corpus_snapshots)
128
- .where(and(eq(corpus_snapshots.store_id, store_id), eq(corpus_snapshots.version, version)));
129
- emit({ type: 'meta_delete', store_id, version });
126
+ await db.delete(corpus_snapshots).where(and(eq(corpus_snapshots.store_id, store_id), eq(corpus_snapshots.version, version)));
127
+ emit({ type: "meta_delete", store_id, version });
130
128
  return ok(undefined);
131
129
  }
132
130
  catch (cause) {
133
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.delete' };
134
- emit({ type: 'error', error });
131
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.delete" };
132
+ emit({ type: "error", error });
135
133
  return err(error);
136
134
  }
137
135
  },
@@ -155,31 +153,26 @@ export function create_cloudflare_backend(config) {
155
153
  let count = 0;
156
154
  for (const row of rows) {
157
155
  const meta = row_to_meta(row);
158
- if (opts?.tags?.length && !opts.tags.some(t => meta.tags?.includes(t))) {
156
+ if (opts?.tags?.length && !opts.tags.every(t => meta.tags?.includes(t))) {
159
157
  continue;
160
158
  }
161
159
  yield meta;
162
160
  count++;
163
161
  }
164
- emit({ type: 'meta_list', store_id, count });
162
+ emit({ type: "meta_list", store_id, count });
165
163
  },
166
164
  async get_latest(store_id) {
167
165
  try {
168
- const rows = await db
169
- .select()
170
- .from(corpus_snapshots)
171
- .where(eq(corpus_snapshots.store_id, store_id))
172
- .orderBy(desc(corpus_snapshots.created_at))
173
- .limit(1);
166
+ const rows = await db.select().from(corpus_snapshots).where(eq(corpus_snapshots.store_id, store_id)).orderBy(desc(corpus_snapshots.created_at)).limit(1);
174
167
  const row = rows[0];
175
168
  if (!row) {
176
- return err({ kind: 'not_found', store_id, version: 'latest' });
169
+ return err({ kind: "not_found", store_id, version: "latest" });
177
170
  }
178
171
  return ok(row_to_meta(row));
179
172
  }
180
173
  catch (cause) {
181
- const error = { kind: 'storage_error', cause: cause, operation: 'metadata.get_latest' };
182
- emit({ type: 'error', error });
174
+ const error = { kind: "storage_error", cause: cause, operation: "metadata.get_latest" };
175
+ emit({ type: "error", error });
183
176
  return err(error);
184
177
  }
185
178
  },
@@ -215,9 +208,9 @@ export function create_cloudflare_backend(config) {
215
208
  async get(data_key) {
216
209
  try {
217
210
  const object = await r2.get(data_key);
218
- emit({ type: 'data_get', store_id: data_key.split('/')[0] ?? data_key, version: data_key, found: !!object });
211
+ emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found: !!object });
219
212
  if (!object) {
220
- return err({ kind: 'not_found', store_id: data_key, version: '' });
213
+ return err({ kind: "not_found", store_id: data_key, version: "" });
221
214
  }
222
215
  return ok({
223
216
  stream: () => object.body,
@@ -225,8 +218,8 @@ export function create_cloudflare_backend(config) {
225
218
  });
226
219
  }
227
220
  catch (cause) {
228
- const error = { kind: 'storage_error', cause: cause, operation: 'data.get' };
229
- emit({ type: 'error', error });
221
+ const error = { kind: "storage_error", cause: cause, operation: "data.get" };
222
+ emit({ type: "error", error });
230
223
  return err(error);
231
224
  }
232
225
  },
@@ -236,8 +229,8 @@ export function create_cloudflare_backend(config) {
236
229
  return ok(undefined);
237
230
  }
238
231
  catch (cause) {
239
- const error = { kind: 'storage_error', cause: cause, operation: 'data.put' };
240
- emit({ type: 'error', error });
232
+ const error = { kind: "storage_error", cause: cause, operation: "data.put" };
233
+ emit({ type: "error", error });
241
234
  return err(error);
242
235
  }
243
236
  },
@@ -247,8 +240,8 @@ export function create_cloudflare_backend(config) {
247
240
  return ok(undefined);
248
241
  }
249
242
  catch (cause) {
250
- const error = { kind: 'storage_error', cause: cause, operation: 'data.delete' };
251
- emit({ type: 'error', error });
243
+ const error = { kind: "storage_error", cause: cause, operation: "data.delete" };
244
+ emit({ type: "error", error });
252
245
  return err(error);
253
246
  }
254
247
  },
@@ -2,7 +2,7 @@
2
2
  * @module Backends
3
3
  * @description File-system storage backend for local persistence.
4
4
  */
5
- import type { Backend, EventHandler } from '../types';
5
+ import type { Backend, EventHandler } from "../types";
6
6
  export type FileBackendConfig = {
7
7
  base_path: string;
8
8
  on_event?: EventHandler;
@@ -1 +1 @@
1
- {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAK3I,MAAM,MAAM,iBAAiB,GAAG;IAC9B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAqMtE"}
1
+ {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAC;AAK5I,MAAM,MAAM,iBAAiB,GAAG;IAC/B,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAkMtE"}
@@ -2,9 +2,9 @@
2
2
  * @module Backends
3
3
  * @description File-system storage backend for local persistence.
4
4
  */
5
- import { ok, err } from '../types';
6
- import { mkdir, readdir } from 'node:fs/promises';
7
- import { join, dirname } from 'node:path';
5
+ import { ok, err } from "../types";
6
+ import { mkdir, readdir } from "node:fs/promises";
7
+ import { join, dirname } from "node:path";
8
8
  /**
9
9
  * Creates a file-system storage backend for local persistence.
10
10
  * @category Backends
@@ -42,20 +42,20 @@ export function create_file_backend(config) {
42
42
  on_event?.(event);
43
43
  }
44
44
  function meta_path(store_id) {
45
- return join(base_path, store_id, '_meta.json');
45
+ return join(base_path, store_id, "_meta.json");
46
46
  }
47
47
  function data_path(data_key) {
48
- return join(base_path, '_data', `${data_key.replace(/\//g, '_')}.bin`);
48
+ return join(base_path, "_data", `${data_key.replace(/\//g, "_")}.bin`);
49
49
  }
50
50
  async function read_store_meta(store_id) {
51
51
  const path = meta_path(store_id);
52
52
  const file = Bun.file(path);
53
- if (!await file.exists())
53
+ if (!(await file.exists()))
54
54
  return new Map();
55
55
  try {
56
56
  const content = await file.text();
57
57
  const entries = JSON.parse(content, (key, value) => {
58
- if (key === 'created_at' || key === 'invoked_at') {
58
+ if (key === "created_at" || key === "invoked_at") {
59
59
  return value ? new Date(value) : value;
60
60
  }
61
61
  return value;
@@ -76,9 +76,9 @@ export function create_file_backend(config) {
76
76
  async get(store_id, version) {
77
77
  const store_meta = await read_store_meta(store_id);
78
78
  const meta = store_meta.get(version);
79
- emit({ type: 'meta_get', store_id, version, found: !!meta });
79
+ emit({ type: "meta_get", store_id, version, found: !!meta });
80
80
  if (!meta) {
81
- return err({ kind: 'not_found', store_id, version });
81
+ return err({ kind: "not_found", store_id, version });
82
82
  }
83
83
  return ok(meta);
84
84
  },
@@ -86,14 +86,14 @@ export function create_file_backend(config) {
86
86
  const store_meta = await read_store_meta(meta.store_id);
87
87
  store_meta.set(meta.version, meta);
88
88
  await write_store_meta(meta.store_id, store_meta);
89
- emit({ type: 'meta_put', store_id: meta.store_id, version: meta.version });
89
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
90
90
  return ok(undefined);
91
91
  },
92
92
  async delete(store_id, version) {
93
93
  const store_meta = await read_store_meta(store_id);
94
94
  store_meta.delete(version);
95
95
  await write_store_meta(store_id, store_meta);
96
- emit({ type: 'meta_delete', store_id, version });
96
+ emit({ type: "meta_delete", store_id, version });
97
97
  return ok(undefined);
98
98
  },
99
99
  async *list(store_id, opts) {
@@ -104,7 +104,7 @@ export function create_file_backend(config) {
104
104
  return false;
105
105
  if (opts?.after && meta.created_at <= opts.after)
106
106
  return false;
107
- if (opts?.tags?.length && !opts.tags.some(t => meta.tags?.includes(t)))
107
+ if (opts?.tags?.length && !opts.tags.every(t => meta.tags?.includes(t)))
108
108
  return false;
109
109
  return true;
110
110
  })
@@ -115,7 +115,7 @@ export function create_file_backend(config) {
115
115
  yield meta;
116
116
  count++;
117
117
  }
118
- emit({ type: 'meta_list', store_id, count });
118
+ emit({ type: "meta_list", store_id, count });
119
119
  },
120
120
  async get_latest(store_id) {
121
121
  const store_meta = await read_store_meta(store_id);
@@ -126,7 +126,7 @@ export function create_file_backend(config) {
126
126
  }
127
127
  }
128
128
  if (!latest) {
129
- return err({ kind: 'not_found', store_id, version: 'latest' });
129
+ return err({ kind: "not_found", store_id, version: "latest" });
130
130
  }
131
131
  return ok(latest);
132
132
  },
@@ -134,7 +134,7 @@ export function create_file_backend(config) {
134
134
  try {
135
135
  const entries = await readdir(base_path, { withFileTypes: true });
136
136
  for (const entry of entries) {
137
- if (!entry.isDirectory() || entry.name.startsWith('_'))
137
+ if (!entry.isDirectory() || entry.name.startsWith("_"))
138
138
  continue;
139
139
  const store_meta = await read_store_meta(entry.name);
140
140
  for (const meta of store_meta.values()) {
@@ -144,8 +144,7 @@ export function create_file_backend(config) {
144
144
  }
145
145
  }
146
146
  }
147
- catch {
148
- }
147
+ catch { }
149
148
  },
150
149
  async find_by_hash(store_id, content_hash) {
151
150
  const store_meta = await read_store_meta(store_id);
@@ -162,9 +161,9 @@ export function create_file_backend(config) {
162
161
  const path = data_path(data_key);
163
162
  const file = Bun.file(path);
164
163
  const found = await file.exists();
165
- emit({ type: 'data_get', store_id: data_key.split('/')[0] ?? data_key, version: data_key, found });
164
+ emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found });
166
165
  if (!found) {
167
- return err({ kind: 'not_found', store_id: data_key, version: '' });
166
+ return err({ kind: "not_found", store_id: data_key, version: "" });
168
167
  }
169
168
  return ok({
170
169
  stream: () => file.stream(),
@@ -193,7 +192,7 @@ export function create_file_backend(config) {
193
192
  return ok(undefined);
194
193
  }
195
194
  catch (cause) {
196
- return err({ kind: 'storage_error', cause: cause, operation: 'put' });
195
+ return err({ kind: "storage_error", cause: cause, operation: "put" });
197
196
  }
198
197
  },
199
198
  async delete(data_key) {
@@ -206,7 +205,7 @@ export function create_file_backend(config) {
206
205
  return ok(undefined);
207
206
  }
208
207
  catch (cause) {
209
- return err({ kind: 'storage_error', cause: cause, operation: 'delete' });
208
+ return err({ kind: "storage_error", cause: cause, operation: "delete" });
210
209
  }
211
210
  },
212
211
  async exists(data_key) {
@@ -2,7 +2,7 @@
2
2
  * @module Backends
3
3
  * @description In-memory storage backend for testing and development.
4
4
  */
5
- import type { Backend, EventHandler } from '../types';
5
+ import type { Backend, EventHandler } from "../types";
6
6
  export type MemoryBackendOptions = {
7
7
  on_event?: EventHandler;
8
8
  };
@@ -1 +1 @@
1
- {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAG3I,MAAM,MAAM,oBAAoB,GAAG;IACjC,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAiJ7E"}
1
+ {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAC;AAG5I,MAAM,MAAM,oBAAoB,GAAG;IAClC,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAgJ7E"}
@@ -2,7 +2,7 @@
2
2
  * @module Backends
3
3
  * @description In-memory storage backend for testing and development.
4
4
  */
5
- import { ok, err } from '../types';
5
+ import { ok, err } from "../types";
6
6
  /**
7
7
  * Creates an in-memory storage backend.
8
8
  * @category Backends
@@ -42,20 +42,20 @@ export function create_memory_backend(options) {
42
42
  const metadata = {
43
43
  async get(store_id, version) {
44
44
  const meta = meta_store.get(make_meta_key(store_id, version));
45
- emit({ type: 'meta_get', store_id, version, found: !!meta });
45
+ emit({ type: "meta_get", store_id, version, found: !!meta });
46
46
  if (!meta) {
47
- return err({ kind: 'not_found', store_id, version });
47
+ return err({ kind: "not_found", store_id, version });
48
48
  }
49
49
  return ok(meta);
50
50
  },
51
51
  async put(meta) {
52
52
  meta_store.set(make_meta_key(meta.store_id, meta.version), meta);
53
- emit({ type: 'meta_put', store_id: meta.store_id, version: meta.version });
53
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
54
54
  return ok(undefined);
55
55
  },
56
56
  async delete(store_id, version) {
57
57
  meta_store.delete(make_meta_key(store_id, version));
58
- emit({ type: 'meta_delete', store_id, version });
58
+ emit({ type: "meta_delete", store_id, version });
59
59
  return ok(undefined);
60
60
  },
61
61
  async *list(store_id, opts) {
@@ -68,7 +68,7 @@ export function create_memory_backend(options) {
68
68
  continue;
69
69
  if (opts?.after && meta.created_at <= opts.after)
70
70
  continue;
71
- if (opts?.tags?.length && !opts.tags.some(t => meta.tags?.includes(t)))
71
+ if (opts?.tags?.length && !opts.tags.every(t => meta.tags?.includes(t)))
72
72
  continue;
73
73
  matches.push(meta);
74
74
  }
@@ -79,7 +79,7 @@ export function create_memory_backend(options) {
79
79
  yield match;
80
80
  count++;
81
81
  }
82
- emit({ type: 'meta_list', store_id, count });
82
+ emit({ type: "meta_list", store_id, count });
83
83
  },
84
84
  async get_latest(store_id) {
85
85
  let latest = null;
@@ -92,7 +92,7 @@ export function create_memory_backend(options) {
92
92
  }
93
93
  }
94
94
  if (!latest) {
95
- return err({ kind: 'not_found', store_id, version: 'latest' });
95
+ return err({ kind: "not_found", store_id, version: "latest" });
96
96
  }
97
97
  return ok(latest);
98
98
  },
@@ -116,16 +116,16 @@ export function create_memory_backend(options) {
116
116
  const data = {
117
117
  async get(data_key) {
118
118
  const bytes = data_store.get(data_key);
119
- emit({ type: 'data_get', store_id: data_key.split('/')[0] ?? data_key, version: data_key, found: !!bytes });
119
+ emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found: !!bytes });
120
120
  if (!bytes) {
121
- return err({ kind: 'not_found', store_id: data_key, version: '' });
121
+ return err({ kind: "not_found", store_id: data_key, version: "" });
122
122
  }
123
123
  return ok({
124
124
  stream: () => new ReadableStream({
125
125
  start(controller) {
126
126
  controller.enqueue(bytes);
127
127
  controller.close();
128
- }
128
+ },
129
129
  }),
130
130
  bytes: async () => bytes,
131
131
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@f0rbit/corpus",
3
- "version": "0.1.4",
3
+ "version": "0.1.5",
4
4
  "description": "A functional snapshotting library for TypeScript with versioned data storage, lineage tracking, and multiple backend support",
5
5
  "module": "dist/index.js",
6
6
  "main": "dist/index.js",
package/dist/codec.d.ts DELETED
@@ -1,9 +0,0 @@
1
- import type { Codec } from "./types";
2
- type ZodLike<T> = {
3
- parse: (data: unknown) => T;
4
- };
5
- export declare function json_codec<T>(schema: ZodLike<T>): Codec<T>;
6
- export declare function text_codec(): Codec<string>;
7
- export declare function binary_codec(): Codec<Uint8Array>;
8
- export {};
9
- //# sourceMappingURL=codec.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"codec.d.ts","sourceRoot":"","sources":["../codec.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAGrC,KAAK,OAAO,CAAC,CAAC,IAAI;IAAE,KAAK,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,CAAC,CAAA;CAAE,CAAC;AAElD,wBAAgB,UAAU,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAM1D;AAED,wBAAgB,UAAU,IAAI,KAAK,CAAC,MAAM,CAAC,CAM1C;AAED,wBAAgB,YAAY,IAAI,KAAK,CAAC,UAAU,CAAC,CAMhD"}
package/dist/codec.js DELETED
@@ -1,21 +0,0 @@
1
- export function json_codec(schema) {
2
- return {
3
- content_type: "application/json",
4
- encode: (value) => new TextEncoder().encode(JSON.stringify(value)),
5
- decode: (bytes) => schema.parse(JSON.parse(new TextDecoder().decode(bytes))),
6
- };
7
- }
8
- export function text_codec() {
9
- return {
10
- content_type: "text/plain",
11
- encode: (value) => new TextEncoder().encode(value),
12
- decode: (bytes) => new TextDecoder().decode(bytes),
13
- };
14
- }
15
- export function binary_codec() {
16
- return {
17
- content_type: "application/octet-stream",
18
- encode: (value) => value,
19
- decode: (bytes) => bytes,
20
- };
21
- }
package/dist/hash.d.ts DELETED
@@ -1,2 +0,0 @@
1
- export declare function compute_hash(data: Uint8Array): Promise<string>;
2
- //# sourceMappingURL=hash.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../hash.ts"],"names":[],"mappings":"AAAA,wBAAsB,YAAY,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAIpE"}
package/dist/hash.js DELETED
@@ -1,5 +0,0 @@
1
- export async function compute_hash(data) {
2
- const hash_buffer = await crypto.subtle.digest('SHA-256', data);
3
- const hash_array = new Uint8Array(hash_buffer);
4
- return Array.from(hash_array).map(b => b.toString(16).padStart(2, '0')).join('');
5
- }
package/dist/store.d.ts DELETED
@@ -1,3 +0,0 @@
1
- import type { Backend, Store, StoreDefinition } from './types';
2
- export declare function create_store<T>(backend: Backend, definition: StoreDefinition<string, T>): Store<T>;
3
- //# sourceMappingURL=store.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"store.d.ts","sourceRoot":"","sources":["../store.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,eAAe,EAA8C,MAAM,SAAS,CAAA;AAK1G,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CA+IlG"}
package/dist/store.js DELETED
@@ -1,125 +0,0 @@
1
- import { ok, err } from './types';
2
- import { compute_hash } from './hash';
3
- import { generate_version } from './version';
4
- export function create_store(backend, definition) {
5
- const { id, codec } = definition;
6
- function emit(event) {
7
- backend.on_event?.(event);
8
- }
9
- function make_data_key(store_id, content_hash) {
10
- return `${store_id}/${content_hash}`;
11
- }
12
- return {
13
- id,
14
- codec,
15
- async put(data, opts) {
16
- const version = generate_version();
17
- let bytes;
18
- try {
19
- bytes = codec.encode(data);
20
- }
21
- catch (cause) {
22
- const error = { kind: 'encode_error', cause: cause };
23
- emit({ type: 'error', error });
24
- return err(error);
25
- }
26
- const content_hash = await compute_hash(bytes);
27
- // deduplication: reuse existing data_key if content already exists
28
- const existing = await backend.metadata.find_by_hash(id, content_hash);
29
- const deduplicated = existing !== null;
30
- const data_key = deduplicated ? existing.data_key : make_data_key(id, content_hash);
31
- if (!deduplicated) {
32
- const data_result = await backend.data.put(data_key, bytes);
33
- if (!data_result.ok) {
34
- emit({ type: 'error', error: data_result.error });
35
- return data_result;
36
- }
37
- }
38
- emit({ type: 'data_put', store_id: id, version, size_bytes: bytes.length, deduplicated });
39
- const meta = {
40
- store_id: id,
41
- version,
42
- parents: opts?.parents ?? [],
43
- created_at: new Date(),
44
- invoked_at: opts?.invoked_at,
45
- content_hash,
46
- content_type: codec.content_type,
47
- size_bytes: bytes.length,
48
- data_key,
49
- tags: opts?.tags,
50
- };
51
- const meta_result = await backend.metadata.put(meta);
52
- if (!meta_result.ok) {
53
- emit({ type: 'error', error: meta_result.error });
54
- return meta_result;
55
- }
56
- emit({ type: 'snapshot_put', store_id: id, version, content_hash, deduplicated });
57
- return ok(meta);
58
- },
59
- async get(version) {
60
- const meta_result = await backend.metadata.get(id, version);
61
- if (!meta_result.ok) {
62
- emit({ type: 'snapshot_get', store_id: id, version, found: false });
63
- return meta_result;
64
- }
65
- const meta = meta_result.value;
66
- const data_result = await backend.data.get(meta.data_key);
67
- if (!data_result.ok) {
68
- emit({ type: 'error', error: data_result.error });
69
- return data_result;
70
- }
71
- const bytes = await data_result.value.bytes();
72
- let data;
73
- try {
74
- data = codec.decode(bytes);
75
- }
76
- catch (cause) {
77
- const error = { kind: 'decode_error', cause: cause };
78
- emit({ type: 'error', error });
79
- return err(error);
80
- }
81
- emit({ type: 'snapshot_get', store_id: id, version, found: true });
82
- return ok({ meta, data });
83
- },
84
- async get_latest() {
85
- const meta_result = await backend.metadata.get_latest(id);
86
- if (!meta_result.ok) {
87
- return meta_result;
88
- }
89
- const meta = meta_result.value;
90
- const data_result = await backend.data.get(meta.data_key);
91
- if (!data_result.ok) {
92
- return data_result;
93
- }
94
- const bytes = await data_result.value.bytes();
95
- let data;
96
- try {
97
- data = codec.decode(bytes);
98
- }
99
- catch (cause) {
100
- const error = { kind: 'decode_error', cause: cause };
101
- emit({ type: 'error', error });
102
- return err(error);
103
- }
104
- return ok({ meta, data });
105
- },
106
- async get_meta(version) {
107
- return backend.metadata.get(id, version);
108
- },
109
- list(opts) {
110
- return backend.metadata.list(id, opts);
111
- },
112
- async delete(version) {
113
- const meta_result = await backend.metadata.get(id, version);
114
- if (!meta_result.ok) {
115
- return meta_result;
116
- }
117
- const delete_meta_result = await backend.metadata.delete(id, version);
118
- if (!delete_meta_result.ok) {
119
- return delete_meta_result;
120
- }
121
- emit({ type: 'meta_delete', store_id: id, version });
122
- return ok(undefined);
123
- },
124
- };
125
- }
package/dist/version.d.ts DELETED
@@ -1,7 +0,0 @@
1
- /**
2
- * Generates a unique, sortable version string.
3
- * Format: base64url-encoded timestamp with optional sequence suffix for same-millisecond calls.
4
- * Versions sort lexicographically in chronological order.
5
- */
6
- export declare function generate_version(): string;
7
- //# sourceMappingURL=version.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"version.d.ts","sourceRoot":"","sources":["../version.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,gBAAgB,IAAI,MAAM,CA0BzC"}
package/dist/version.js DELETED
@@ -1,31 +0,0 @@
1
- let last_timestamp = 0;
2
- let sequence = 0;
3
- /**
4
- * Generates a unique, sortable version string.
5
- * Format: base64url-encoded timestamp with optional sequence suffix for same-millisecond calls.
6
- * Versions sort lexicographically in chronological order.
7
- */
8
- export function generate_version() {
9
- const now = Date.now();
10
- if (now === last_timestamp) {
11
- sequence++;
12
- }
13
- else {
14
- last_timestamp = now;
15
- sequence = 0;
16
- }
17
- // base64url encode the timestamp (no padding, url-safe)
18
- const timestamp_bytes = new Uint8Array(8);
19
- const view = new DataView(timestamp_bytes.buffer);
20
- view.setBigUint64(0, BigInt(now), false); // big-endian for lexicographic sorting
21
- // trim leading zeros for compactness
22
- let start = 0;
23
- while (start < 7 && timestamp_bytes[start] === 0)
24
- start++;
25
- const trimmed = timestamp_bytes.slice(start);
26
- const base64 = btoa(String.fromCharCode(...trimmed))
27
- .replace(/\+/g, '-')
28
- .replace(/\//g, '_')
29
- .replace(/=/g, '');
30
- return sequence > 0 ? `${base64}.${sequence}` : base64;
31
- }