@f0rbit/corpus 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/backend/cloudflare.d.ts +41 -1
  2. package/dist/backend/cloudflare.d.ts.map +1 -1
  3. package/dist/backend/cloudflare.js +69 -36
  4. package/dist/backend/file.d.ts +36 -1
  5. package/dist/backend/file.d.ts.map +1 -1
  6. package/dist/backend/file.js +55 -21
  7. package/dist/backend/layered.d.ts +38 -0
  8. package/dist/backend/layered.d.ts.map +1 -1
  9. package/dist/backend/layered.js +38 -0
  10. package/dist/backend/memory.d.ts +31 -1
  11. package/dist/backend/memory.d.ts.map +1 -1
  12. package/dist/backend/memory.js +41 -11
  13. package/dist/backends.d.ts +11 -0
  14. package/dist/backends.d.ts.map +1 -0
  15. package/dist/backends.js +9 -0
  16. package/dist/cloudflare.d.ts +2 -5
  17. package/dist/cloudflare.d.ts.map +1 -1
  18. package/dist/cloudflare.js +2 -5
  19. package/dist/codecs.d.ts +8 -0
  20. package/dist/codecs.d.ts.map +1 -0
  21. package/dist/codecs.js +6 -0
  22. package/dist/core.d.ts +9 -0
  23. package/dist/core.d.ts.map +1 -0
  24. package/dist/core.js +7 -0
  25. package/dist/corpus.d.ts +68 -1
  26. package/dist/corpus.d.ts.map +1 -1
  27. package/dist/corpus.js +194 -1
  28. package/dist/index.d.ts +3 -6
  29. package/dist/index.d.ts.map +1 -1
  30. package/dist/index.js +2 -5
  31. package/dist/schema.d.ts +27 -0
  32. package/dist/schema.d.ts.map +1 -1
  33. package/dist/schema.js +27 -0
  34. package/dist/sst.d.ts +38 -0
  35. package/dist/sst.d.ts.map +1 -1
  36. package/dist/sst.js +38 -0
  37. package/dist/types.d.ts +229 -1
  38. package/dist/types.d.ts.map +1 -1
  39. package/dist/types.js +91 -2
  40. package/dist/utils.d.ts +133 -0
  41. package/dist/utils.d.ts.map +1 -0
  42. package/dist/utils.js +174 -0
  43. package/package.json +5 -2
  44. package/dist/codec.d.ts +0 -9
  45. package/dist/codec.d.ts.map +0 -1
  46. package/dist/codec.js +0 -21
  47. package/dist/hash.d.ts +0 -2
  48. package/dist/hash.d.ts.map +0 -1
  49. package/dist/hash.js +0 -5
  50. package/dist/store.d.ts +0 -3
  51. package/dist/store.d.ts.map +0 -1
  52. package/dist/store.js +0 -125
  53. package/dist/version.d.ts +0 -7
  54. package/dist/version.d.ts.map +0 -1
  55. package/dist/version.js +0 -31
@@ -1,4 +1,34 @@
1
- import { ok, err } from '../types';
1
+ /**
2
+ * @module Backends
3
+ * @description In-memory storage backend for testing and development.
4
+ */
5
+ import { ok, err } from "../types";
6
+ /**
7
+ * Creates an in-memory storage backend.
8
+ * @category Backends
9
+ * @group Storage Backends
10
+ *
11
+ * Ideal for testing, development, and ephemeral storage scenarios.
12
+ * All data is lost when the process ends.
13
+ *
14
+ * @param options - Optional configuration with `on_event` handler for observability
15
+ * @returns A Backend instance using in-memory storage
16
+ *
17
+ * @example
18
+ * ```ts
19
+ * // Basic usage for testing
20
+ * const backend = create_memory_backend()
21
+ * const corpus = create_corpus()
22
+ * .with_backend(backend)
23
+ * .with_store(define_store('test', text_codec()))
24
+ * .build()
25
+ *
26
+ * // With event logging
27
+ * const backend = create_memory_backend({
28
+ * on_event: (e) => console.log(`[${e.type}]`, e)
29
+ * })
30
+ * ```
31
+ */
2
32
  export function create_memory_backend(options) {
3
33
  const meta_store = new Map();
4
34
  const data_store = new Map();
@@ -12,20 +42,20 @@ export function create_memory_backend(options) {
12
42
  const metadata = {
13
43
  async get(store_id, version) {
14
44
  const meta = meta_store.get(make_meta_key(store_id, version));
15
- emit({ type: 'meta_get', store_id, version, found: !!meta });
45
+ emit({ type: "meta_get", store_id, version, found: !!meta });
16
46
  if (!meta) {
17
- return err({ kind: 'not_found', store_id, version });
47
+ return err({ kind: "not_found", store_id, version });
18
48
  }
19
49
  return ok(meta);
20
50
  },
21
51
  async put(meta) {
22
52
  meta_store.set(make_meta_key(meta.store_id, meta.version), meta);
23
- emit({ type: 'meta_put', store_id: meta.store_id, version: meta.version });
53
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
24
54
  return ok(undefined);
25
55
  },
26
56
  async delete(store_id, version) {
27
57
  meta_store.delete(make_meta_key(store_id, version));
28
- emit({ type: 'meta_delete', store_id, version });
58
+ emit({ type: "meta_delete", store_id, version });
29
59
  return ok(undefined);
30
60
  },
31
61
  async *list(store_id, opts) {
@@ -38,7 +68,7 @@ export function create_memory_backend(options) {
38
68
  continue;
39
69
  if (opts?.after && meta.created_at <= opts.after)
40
70
  continue;
41
- if (opts?.tags?.length && !opts.tags.some(t => meta.tags?.includes(t)))
71
+ if (opts?.tags?.length && !opts.tags.every(t => meta.tags?.includes(t)))
42
72
  continue;
43
73
  matches.push(meta);
44
74
  }
@@ -49,7 +79,7 @@ export function create_memory_backend(options) {
49
79
  yield match;
50
80
  count++;
51
81
  }
52
- emit({ type: 'meta_list', store_id, count });
82
+ emit({ type: "meta_list", store_id, count });
53
83
  },
54
84
  async get_latest(store_id) {
55
85
  let latest = null;
@@ -62,7 +92,7 @@ export function create_memory_backend(options) {
62
92
  }
63
93
  }
64
94
  if (!latest) {
65
- return err({ kind: 'not_found', store_id, version: 'latest' });
95
+ return err({ kind: "not_found", store_id, version: "latest" });
66
96
  }
67
97
  return ok(latest);
68
98
  },
@@ -86,16 +116,16 @@ export function create_memory_backend(options) {
86
116
  const data = {
87
117
  async get(data_key) {
88
118
  const bytes = data_store.get(data_key);
89
- emit({ type: 'data_get', store_id: data_key.split('/')[0] ?? data_key, version: data_key, found: !!bytes });
119
+ emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found: !!bytes });
90
120
  if (!bytes) {
91
- return err({ kind: 'not_found', store_id: data_key, version: '' });
121
+ return err({ kind: "not_found", store_id: data_key, version: "" });
92
122
  }
93
123
  return ok({
94
124
  stream: () => new ReadableStream({
95
125
  start(controller) {
96
126
  controller.enqueue(bytes);
97
127
  controller.close();
98
- }
128
+ },
99
129
  }),
100
130
  bytes: async () => bytes,
101
131
  });
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Storage backend implementations for different environments.
3
+ * @module Backends
4
+ * @packageDocumentation
5
+ */
6
+ export { create_memory_backend, type MemoryBackendOptions } from './backend/memory';
7
+ export { create_file_backend, type FileBackendConfig } from './backend/file';
8
+ export { create_cloudflare_backend, type CloudflareBackendConfig } from './backend/cloudflare';
9
+ export { create_layered_backend, type LayeredBackendOptions } from './backend/layered';
10
+ export type { Backend, MetadataClient, DataClient, DataHandle, EventHandler, CorpusEvent } from './types';
11
+ //# sourceMappingURL=backends.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"backends.d.ts","sourceRoot":"","sources":["../backends.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AACtF,YAAY,EAAE,OAAO,EAAE,cAAc,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA"}
@@ -0,0 +1,9 @@
1
+ /**
2
+ * Storage backend implementations for different environments.
3
+ * @module Backends
4
+ * @packageDocumentation
5
+ */
6
+ export { create_memory_backend } from './backend/memory';
7
+ export { create_file_backend } from './backend/file';
8
+ export { create_cloudflare_backend } from './backend/cloudflare';
9
+ export { create_layered_backend } from './backend/layered';
@@ -2,14 +2,11 @@
2
2
  * Cloudflare Workers compatible exports
3
3
  * This entry point excludes the file backend which uses Node.js APIs
4
4
  */
5
- export { create_corpus } from './corpus';
6
- export { create_store } from './store';
5
+ export { create_corpus, create_store } from './corpus';
7
6
  export { create_memory_backend, type MemoryBackendOptions } from './backend/memory';
8
7
  export { create_cloudflare_backend, type CloudflareBackendConfig } from './backend/cloudflare';
9
- export { json_codec, text_codec, binary_codec } from './codec';
8
+ export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
10
9
  export { corpus_snapshots, type CorpusSnapshotRow, type CorpusSnapshotInsert } from './schema';
11
- export { compute_hash } from './hash';
12
- export { generate_version } from './version';
13
10
  export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, } from './types';
14
11
  export { ok, err, define_store } from './types';
15
12
  //# sourceMappingURL=cloudflare.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAA;AACxC,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAEtC,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE9D,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAA;AACrC,OAAO,EAAE,gBAAgB,EAAE,MAAM,WAAW,CAAA;AAE5C,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA"}
1
+ {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAEtD,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAE9F,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA"}
@@ -2,12 +2,9 @@
2
2
  * Cloudflare Workers compatible exports
3
3
  * This entry point excludes the file backend which uses Node.js APIs
4
4
  */
5
- export { create_corpus } from './corpus';
6
- export { create_store } from './store';
5
+ export { create_corpus, create_store } from './corpus';
7
6
  export { create_memory_backend } from './backend/memory';
8
7
  export { create_cloudflare_backend } from './backend/cloudflare';
9
- export { json_codec, text_codec, binary_codec } from './codec';
8
+ export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
10
9
  export { corpus_snapshots } from './schema';
11
- export { compute_hash } from './hash';
12
- export { generate_version } from './version';
13
10
  export { ok, err, define_store } from './types';
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Codec implementations for serializing and deserializing data.
3
+ * @module Codecs
4
+ * @packageDocumentation
5
+ */
6
+ export { json_codec, text_codec, binary_codec } from './utils';
7
+ export type { Codec, ContentType } from './types';
8
+ //# sourceMappingURL=codecs.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"codecs.d.ts","sourceRoot":"","sources":["../codecs.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAC9D,YAAY,EAAE,KAAK,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA"}
package/dist/codecs.js ADDED
@@ -0,0 +1,6 @@
1
+ /**
2
+ * Codec implementations for serializing and deserializing data.
3
+ * @module Codecs
4
+ * @packageDocumentation
5
+ */
6
+ export { json_codec, text_codec, binary_codec } from './utils';
package/dist/core.d.ts ADDED
@@ -0,0 +1,9 @@
1
+ /**
2
+ * Core corpus functionality for creating and managing versioned data stores.
3
+ * @module Core
4
+ * @packageDocumentation
5
+ */
6
+ export { create_corpus, create_store } from './corpus';
7
+ export { define_store, ok, err } from './types';
8
+ export type { Corpus, CorpusBuilder, Store, StoreDefinition, Result, CorpusError, PutOpts, } from './types';
9
+ //# sourceMappingURL=core.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"core.d.ts","sourceRoot":"","sources":["../core.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AACtD,OAAO,EAAE,YAAY,EAAE,EAAE,EAAE,GAAG,EAAE,MAAM,SAAS,CAAA;AAC/C,YAAY,EACV,MAAM,EACN,aAAa,EACb,KAAK,EACL,eAAe,EACf,MAAM,EACN,WAAW,EACX,OAAO,GACR,MAAM,SAAS,CAAA"}
package/dist/core.js ADDED
@@ -0,0 +1,7 @@
1
+ /**
2
+ * Core corpus functionality for creating and managing versioned data stores.
3
+ * @module Core
4
+ * @packageDocumentation
5
+ */
6
+ export { create_corpus, create_store } from './corpus';
7
+ export { define_store, ok, err } from './types';
package/dist/corpus.d.ts CHANGED
@@ -1,3 +1,70 @@
1
- import type { CorpusBuilder } from './types';
1
+ /**
2
+ * @module Core
3
+ * @description Core corpus and store creation functions.
4
+ */
5
+ import type { Backend, CorpusBuilder, StoreDefinition, Store } from './types';
6
+ /**
7
+ * Creates a typed Store instance bound to a Backend.
8
+ * @category Core
9
+ * @group Builders
10
+ *
11
+ * Each store manages versioned snapshots of data with automatic deduplication:
12
+ * when the same content is stored twice, only one copy of the data is kept
13
+ * (identified by content hash), though separate metadata entries are created.
14
+ *
15
+ * Stores are typically created via `create_corpus().with_store()` rather than
16
+ * directly, which provides type-safe access through `corpus.stores.<id>`.
17
+ *
18
+ * @param backend - The storage backend for persistence
19
+ * @param definition - Store configuration including id and codec
20
+ * @returns A Store instance for the specified type
21
+ *
22
+ * @example
23
+ * ```ts
24
+ * const backend = create_memory_backend()
25
+ * const users = define_store('users', json_codec(UserSchema))
26
+ * const store = create_store(backend, users)
27
+ *
28
+ * // Store a snapshot
29
+ * const result = await store.put({ name: 'Alice', email: 'alice@example.com' })
30
+ * if (result.ok) {
31
+ * console.log('Stored version:', result.value.version)
32
+ * }
33
+ *
34
+ * // Storing identical content reuses the same data_key (deduplication)
35
+ * const result2 = await store.put({ name: 'Alice', email: 'alice@example.com' })
36
+ * // result.value.data_key === result2.value.data_key (same content hash)
37
+ * ```
38
+ */
39
+ export declare function create_store<T>(backend: Backend, definition: StoreDefinition<string, T>): Store<T>;
40
+ /**
41
+ * Creates a new Corpus instance using the builder pattern.
42
+ *
43
+ * A Corpus is a collection of typed stores backed by a storage backend.
44
+ * Use the builder chain to configure: `with_backend()` → `with_store()` → `build()`.
45
+ *
46
+ * @category Core
47
+ * @group Builders
48
+ * @returns A CorpusBuilder to configure and build the Corpus
49
+ *
50
+ * @example
51
+ * ```ts
52
+ * import { z } from 'zod'
53
+ *
54
+ * const UserSchema = z.object({ name: z.string(), email: z.string() })
55
+ * const users = define_store('users', json_codec(UserSchema))
56
+ * const notes = define_store('notes', text_codec())
57
+ *
58
+ * const corpus = create_corpus()
59
+ * .with_backend(create_memory_backend())
60
+ * .with_store(users)
61
+ * .with_store(notes)
62
+ * .build()
63
+ *
64
+ * // Type-safe access to stores
65
+ * await corpus.stores.users.put({ name: 'Alice', email: 'alice@example.com' })
66
+ * await corpus.stores.notes.put('Hello, world!')
67
+ * ```
68
+ */
2
69
  export declare function create_corpus(): CorpusBuilder<{}>;
3
70
  //# sourceMappingURL=corpus.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"corpus.d.ts","sourceRoot":"","sources":["../corpus.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAmB,aAAa,EAA0B,MAAM,SAAS,CAAA;AAGrF,wBAAgB,aAAa,IAAI,aAAa,CAAC,EAAE,CAAC,CAoCjD"}
1
+ {"version":3,"file":"corpus.d.ts","sourceRoot":"","sources":["../corpus.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAU,aAAa,EAAE,eAAe,EAAE,KAAK,EAAqD,MAAM,SAAS,CAAA;AAIxI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAmJlG;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAgB,aAAa,IAAI,aAAa,CAAC,EAAE,CAAC,CAoCjD"}
package/dist/corpus.js CHANGED
@@ -1,4 +1,197 @@
1
- import { create_store } from './store';
1
+ /**
2
+ * @module Core
3
+ * @description Core corpus and store creation functions.
4
+ */
5
+ import { ok, err } from './types';
6
+ import { compute_hash, generate_version } from './utils';
7
+ /**
8
+ * Creates a typed Store instance bound to a Backend.
9
+ * @category Core
10
+ * @group Builders
11
+ *
12
+ * Each store manages versioned snapshots of data with automatic deduplication:
13
+ * when the same content is stored twice, only one copy of the data is kept
14
+ * (identified by content hash), though separate metadata entries are created.
15
+ *
16
+ * Stores are typically created via `create_corpus().with_store()` rather than
17
+ * directly, which provides type-safe access through `corpus.stores.<id>`.
18
+ *
19
+ * @param backend - The storage backend for persistence
20
+ * @param definition - Store configuration including id and codec
21
+ * @returns A Store instance for the specified type
22
+ *
23
+ * @example
24
+ * ```ts
25
+ * const backend = create_memory_backend()
26
+ * const users = define_store('users', json_codec(UserSchema))
27
+ * const store = create_store(backend, users)
28
+ *
29
+ * // Store a snapshot
30
+ * const result = await store.put({ name: 'Alice', email: 'alice@example.com' })
31
+ * if (result.ok) {
32
+ * console.log('Stored version:', result.value.version)
33
+ * }
34
+ *
35
+ * // Storing identical content reuses the same data_key (deduplication)
36
+ * const result2 = await store.put({ name: 'Alice', email: 'alice@example.com' })
37
+ * // result.value.data_key === result2.value.data_key (same content hash)
38
+ * ```
39
+ */
40
+ export function create_store(backend, definition) {
41
+ const { id, codec, data_key_fn } = definition;
42
+ function emit(event) {
43
+ backend.on_event?.(event);
44
+ }
45
+ function make_data_key(ctx) {
46
+ if (data_key_fn) {
47
+ return data_key_fn(ctx);
48
+ }
49
+ return `${ctx.store_id}/${ctx.content_hash}`;
50
+ }
51
+ return {
52
+ id,
53
+ codec,
54
+ async put(data, opts) {
55
+ const version = generate_version();
56
+ let bytes;
57
+ try {
58
+ bytes = codec.encode(data);
59
+ }
60
+ catch (cause) {
61
+ const error = { kind: 'encode_error', cause: cause };
62
+ emit({ type: 'error', error });
63
+ return err(error);
64
+ }
65
+ const content_hash = await compute_hash(bytes);
66
+ const key_ctx = { store_id: id, version, content_hash, tags: opts?.tags };
67
+ // deduplication: reuse existing data_key if content already exists
68
+ const existing = await backend.metadata.find_by_hash(id, content_hash);
69
+ const deduplicated = existing !== null;
70
+ const data_key = deduplicated ? existing.data_key : make_data_key(key_ctx);
71
+ if (!deduplicated) {
72
+ const data_result = await backend.data.put(data_key, bytes);
73
+ if (!data_result.ok) {
74
+ emit({ type: 'error', error: data_result.error });
75
+ return data_result;
76
+ }
77
+ }
78
+ emit({ type: 'data_put', store_id: id, version, size_bytes: bytes.length, deduplicated });
79
+ const meta = {
80
+ store_id: id,
81
+ version,
82
+ parents: opts?.parents ?? [],
83
+ created_at: new Date(),
84
+ invoked_at: opts?.invoked_at,
85
+ content_hash,
86
+ content_type: codec.content_type,
87
+ size_bytes: bytes.length,
88
+ data_key,
89
+ tags: opts?.tags,
90
+ };
91
+ const meta_result = await backend.metadata.put(meta);
92
+ if (!meta_result.ok) {
93
+ emit({ type: 'error', error: meta_result.error });
94
+ return meta_result;
95
+ }
96
+ emit({ type: 'snapshot_put', store_id: id, version, content_hash, deduplicated });
97
+ return ok(meta);
98
+ },
99
+ async get(version) {
100
+ const meta_result = await backend.metadata.get(id, version);
101
+ if (!meta_result.ok) {
102
+ emit({ type: 'snapshot_get', store_id: id, version, found: false });
103
+ return meta_result;
104
+ }
105
+ const meta = meta_result.value;
106
+ const data_result = await backend.data.get(meta.data_key);
107
+ if (!data_result.ok) {
108
+ emit({ type: 'error', error: data_result.error });
109
+ return data_result;
110
+ }
111
+ const bytes = await data_result.value.bytes();
112
+ let data;
113
+ try {
114
+ data = codec.decode(bytes);
115
+ }
116
+ catch (cause) {
117
+ const error = { kind: 'decode_error', cause: cause };
118
+ emit({ type: 'error', error });
119
+ return err(error);
120
+ }
121
+ emit({ type: 'snapshot_get', store_id: id, version, found: true });
122
+ return ok({ meta, data });
123
+ },
124
+ async get_latest() {
125
+ const meta_result = await backend.metadata.get_latest(id);
126
+ if (!meta_result.ok) {
127
+ return meta_result;
128
+ }
129
+ const meta = meta_result.value;
130
+ const data_result = await backend.data.get(meta.data_key);
131
+ if (!data_result.ok) {
132
+ return data_result;
133
+ }
134
+ const bytes = await data_result.value.bytes();
135
+ let data;
136
+ try {
137
+ data = codec.decode(bytes);
138
+ }
139
+ catch (cause) {
140
+ const error = { kind: 'decode_error', cause: cause };
141
+ emit({ type: 'error', error });
142
+ return err(error);
143
+ }
144
+ return ok({ meta, data });
145
+ },
146
+ async get_meta(version) {
147
+ return backend.metadata.get(id, version);
148
+ },
149
+ list(opts) {
150
+ return backend.metadata.list(id, opts);
151
+ },
152
+ async delete(version) {
153
+ const meta_result = await backend.metadata.get(id, version);
154
+ if (!meta_result.ok) {
155
+ return meta_result;
156
+ }
157
+ const delete_meta_result = await backend.metadata.delete(id, version);
158
+ if (!delete_meta_result.ok) {
159
+ return delete_meta_result;
160
+ }
161
+ emit({ type: 'meta_delete', store_id: id, version });
162
+ return ok(undefined);
163
+ },
164
+ };
165
+ }
166
+ /**
167
+ * Creates a new Corpus instance using the builder pattern.
168
+ *
169
+ * A Corpus is a collection of typed stores backed by a storage backend.
170
+ * Use the builder chain to configure: `with_backend()` → `with_store()` → `build()`.
171
+ *
172
+ * @category Core
173
+ * @group Builders
174
+ * @returns A CorpusBuilder to configure and build the Corpus
175
+ *
176
+ * @example
177
+ * ```ts
178
+ * import { z } from 'zod'
179
+ *
180
+ * const UserSchema = z.object({ name: z.string(), email: z.string() })
181
+ * const users = define_store('users', json_codec(UserSchema))
182
+ * const notes = define_store('notes', text_codec())
183
+ *
184
+ * const corpus = create_corpus()
185
+ * .with_backend(create_memory_backend())
186
+ * .with_store(users)
187
+ * .with_store(notes)
188
+ * .build()
189
+ *
190
+ * // Type-safe access to stores
191
+ * await corpus.stores.users.put({ name: 'Alice', email: 'alice@example.com' })
192
+ * await corpus.stores.notes.put('Hello, world!')
193
+ * ```
194
+ */
2
195
  export function create_corpus() {
3
196
  let backend = null;
4
197
  const definitions = [];
package/dist/index.d.ts CHANGED
@@ -1,14 +1,11 @@
1
- export { create_corpus } from './corpus';
2
- export { create_store } from './store';
1
+ export { create_corpus, create_store } from './corpus';
3
2
  export { create_memory_backend, type MemoryBackendOptions } from './backend/memory';
4
3
  export { create_file_backend, type FileBackendConfig } from './backend/file';
5
4
  export { create_cloudflare_backend, type CloudflareBackendConfig } from './backend/cloudflare';
6
5
  export { create_layered_backend, type LayeredBackendOptions } from './backend/layered';
7
- export { json_codec, text_codec, binary_codec } from './codec';
6
+ export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
8
7
  export { corpus_snapshots, type CorpusSnapshotRow, type CorpusSnapshotInsert } from './schema';
9
- export { compute_hash } from './hash';
10
- export { generate_version } from './version';
11
- export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, } from './types';
8
+ export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, DefineStoreOpts, DataKeyContext, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, } from './types';
12
9
  export { ok, err, define_store } from './types';
13
10
  export { createCorpusInfra, CORPUS_MIGRATION_SQL, type CorpusInfra, type CorpusInfraConfig } from './sst';
14
11
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAA;AACxC,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAEtC,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AAEtF,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE9D,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAA;AACrC,OAAO,EAAE,gBAAgB,EAAE,MAAM,WAAW,CAAA;AAE5C,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE/C,OAAO,EAAE,iBAAiB,EAAE,oBAAoB,EAAE,KAAK,WAAW,EAAE,KAAK,iBAAiB,EAAE,MAAM,OAAO,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAEtD,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AAEtF,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAE9F,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,eAAe,EACf,cAAc,EACd,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE/C,OAAO,EAAE,iBAAiB,EAAE,oBAAoB,EAAE,KAAK,WAAW,EAAE,KAAK,iBAAiB,EAAE,MAAM,OAAO,CAAA"}
package/dist/index.js CHANGED
@@ -1,12 +1,9 @@
1
- export { create_corpus } from './corpus';
2
- export { create_store } from './store';
1
+ export { create_corpus, create_store } from './corpus';
3
2
  export { create_memory_backend } from './backend/memory';
4
3
  export { create_file_backend } from './backend/file';
5
4
  export { create_cloudflare_backend } from './backend/cloudflare';
6
5
  export { create_layered_backend } from './backend/layered';
7
- export { json_codec, text_codec, binary_codec } from './codec';
6
+ export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
8
7
  export { corpus_snapshots } from './schema';
9
- export { compute_hash } from './hash';
10
- export { generate_version } from './version';
11
8
  export { ok, err, define_store } from './types';
12
9
  export { createCorpusInfra, CORPUS_MIGRATION_SQL } from './sst';
package/dist/schema.d.ts CHANGED
@@ -1,3 +1,30 @@
1
+ /**
2
+ * @module Schema
3
+ * @description Database schema definitions for Drizzle ORM.
4
+ */
5
+ /**
6
+ * Drizzle ORM schema for the corpus_snapshots table.
7
+ *
8
+ * Used by the Cloudflare backend with D1 (SQLite). Defines the table structure
9
+ * for storing snapshot metadata.
10
+ *
11
+ * Columns:
12
+ * - `store_id` + `version` - Composite primary key
13
+ * - `parents` - JSON array of parent references
14
+ * - `created_at` / `invoked_at` - ISO 8601 timestamps
15
+ * - `content_hash` - SHA-256 hash for deduplication
16
+ * - `data_key` - Key to retrieve binary data from R2
17
+ * - `tags` - Optional JSON array of tags
18
+ *
19
+ * @example
20
+ * ```ts
21
+ * import { drizzle } from 'drizzle-orm/d1'
22
+ * import { corpus_snapshots } from 'corpus/schema'
23
+ *
24
+ * const db = drizzle(env.D1)
25
+ * const rows = await db.select().from(corpus_snapshots).limit(10)
26
+ * ```
27
+ */
1
28
  export declare const corpus_snapshots: import("drizzle-orm/sqlite-core").SQLiteTableWithColumns<{
2
29
  name: "corpus_snapshots";
3
30
  schema: undefined;
@@ -1 +1 @@
1
- {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../schema.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAgB1B,CAAA;AAEH,MAAM,MAAM,iBAAiB,GAAG,OAAO,gBAAgB,CAAC,YAAY,CAAA;AACpE,MAAM,MAAM,oBAAoB,GAAG,OAAO,gBAAgB,CAAC,YAAY,CAAA"}
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../schema.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH;;;;;;;;;;;;;;;;;;;;;;GAsBG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAgB1B,CAAA;AAEH,MAAM,MAAM,iBAAiB,GAAG,OAAO,gBAAgB,CAAC,YAAY,CAAA;AACpE,MAAM,MAAM,oBAAoB,GAAG,OAAO,gBAAgB,CAAC,YAAY,CAAA"}
package/dist/schema.js CHANGED
@@ -1,4 +1,31 @@
1
+ /**
2
+ * @module Schema
3
+ * @description Database schema definitions for Drizzle ORM.
4
+ */
1
5
  import { sqliteTable, text, integer, primaryKey, index } from 'drizzle-orm/sqlite-core';
6
+ /**
7
+ * Drizzle ORM schema for the corpus_snapshots table.
8
+ *
9
+ * Used by the Cloudflare backend with D1 (SQLite). Defines the table structure
10
+ * for storing snapshot metadata.
11
+ *
12
+ * Columns:
13
+ * - `store_id` + `version` - Composite primary key
14
+ * - `parents` - JSON array of parent references
15
+ * - `created_at` / `invoked_at` - ISO 8601 timestamps
16
+ * - `content_hash` - SHA-256 hash for deduplication
17
+ * - `data_key` - Key to retrieve binary data from R2
18
+ * - `tags` - Optional JSON array of tags
19
+ *
20
+ * @example
21
+ * ```ts
22
+ * import { drizzle } from 'drizzle-orm/d1'
23
+ * import { corpus_snapshots } from 'corpus/schema'
24
+ *
25
+ * const db = drizzle(env.D1)
26
+ * const rows = await db.select().from(corpus_snapshots).limit(10)
27
+ * ```
28
+ */
2
29
  export const corpus_snapshots = sqliteTable('corpus_snapshots', {
3
30
  store_id: text('store_id').notNull(),
4
31
  version: text('version').notNull(),
package/dist/sst.d.ts CHANGED
@@ -13,6 +13,44 @@ export type CorpusInfra = {
13
13
  database_name: string;
14
14
  bucket_name: string;
15
15
  };
16
+ /**
17
+ * SST infrastructure helper for creating Corpus resources.
18
+ *
19
+ * Generates resource names for D1 database and R2 bucket based on a prefix.
20
+ * Returns objects compatible with SST resource definitions.
21
+ *
22
+ * @param name - Base name prefix for resources
23
+ * @param config - Optional overrides for resource names
24
+ * @returns Resource definitions with database and bucket names
25
+ *
26
+ * @example
27
+ * ```ts
28
+ * // In sst.config.ts
29
+ * const corpus = createCorpusInfra('myapp')
30
+ *
31
+ * const db = new sst.cloudflare.D1(corpus.database.name)
32
+ * const bucket = new sst.cloudflare.R2(corpus.bucket.name)
33
+ *
34
+ * // Resource names: 'myappDb', 'myappBucket'
35
+ * ```
36
+ */
16
37
  export declare function createCorpusInfra(name: string, config?: Partial<CorpusInfraConfig>): CorpusInfra;
38
+ /**
39
+ * SQL migration script to create required D1 tables for the Cloudflare backend.
40
+ *
41
+ * Must be executed on the D1 database before using `create_cloudflare_backend()`.
42
+ * Creates the `corpus_snapshots` table and required indexes.
43
+ *
44
+ * Safe to run multiple times (uses IF NOT EXISTS).
45
+ *
46
+ * @example
47
+ * ```ts
48
+ * // Run migration via wrangler
49
+ * // wrangler d1 execute <database-name> --command "$(cat migration.sql)"
50
+ *
51
+ * // Or programmatically in a Worker
52
+ * await env.CORPUS_DB.exec(CORPUS_MIGRATION_SQL)
53
+ * ```
54
+ */
17
55
  export declare const CORPUS_MIGRATION_SQL = "\nCREATE TABLE IF NOT EXISTS corpus_snapshots (\n store_id TEXT NOT NULL,\n version TEXT NOT NULL,\n parents TEXT NOT NULL,\n created_at TEXT NOT NULL,\n invoked_at TEXT,\n content_hash TEXT NOT NULL,\n content_type TEXT NOT NULL,\n size_bytes INTEGER NOT NULL,\n data_key TEXT NOT NULL,\n tags TEXT,\n PRIMARY KEY (store_id, version)\n);\n\nCREATE INDEX IF NOT EXISTS idx_store_created ON corpus_snapshots(store_id, created_at);\nCREATE INDEX IF NOT EXISTS idx_content_hash ON corpus_snapshots(store_id, content_hash);\nCREATE INDEX IF NOT EXISTS idx_data_key ON corpus_snapshots(data_key);\n";
18
56
  //# sourceMappingURL=sst.d.ts.map