@f0rbit/corpus 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/backend/cloudflare.d.ts +40 -0
- package/dist/backend/cloudflare.d.ts.map +1 -1
- package/dist/backend/cloudflare.js +40 -0
- package/dist/backend/file.d.ts +35 -0
- package/dist/backend/file.d.ts.map +1 -1
- package/dist/backend/file.js +35 -0
- package/dist/backend/layered.d.ts +38 -0
- package/dist/backend/layered.d.ts.map +1 -1
- package/dist/backend/layered.js +38 -0
- package/dist/backend/memory.d.ts +30 -0
- package/dist/backend/memory.d.ts.map +1 -1
- package/dist/backend/memory.js +30 -0
- package/dist/backends.d.ts +11 -0
- package/dist/backends.d.ts.map +1 -0
- package/dist/backends.js +9 -0
- package/dist/cloudflare.d.ts +2 -5
- package/dist/cloudflare.d.ts.map +1 -1
- package/dist/cloudflare.js +2 -5
- package/dist/codecs.d.ts +8 -0
- package/dist/codecs.d.ts.map +1 -0
- package/dist/codecs.js +6 -0
- package/dist/core.d.ts +9 -0
- package/dist/core.d.ts.map +1 -0
- package/dist/core.js +7 -0
- package/dist/corpus.d.ts +68 -1
- package/dist/corpus.d.ts.map +1 -1
- package/dist/corpus.js +194 -1
- package/dist/index.d.ts +3 -6
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -5
- package/dist/schema.d.ts +27 -0
- package/dist/schema.d.ts.map +1 -1
- package/dist/schema.js +27 -0
- package/dist/sst.d.ts +38 -0
- package/dist/sst.d.ts.map +1 -1
- package/dist/sst.js +38 -0
- package/dist/types.d.ts +229 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js +91 -2
- package/dist/utils.d.ts +133 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +174 -0
- package/package.json +5 -2
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description Cloudflare Workers storage backend using D1 and R2.
|
|
4
|
+
*/
|
|
1
5
|
import type { Backend, EventHandler } from '../types';
|
|
2
6
|
type D1Database = {
|
|
3
7
|
prepare: (sql: string) => unknown;
|
|
@@ -18,6 +22,42 @@ export type CloudflareBackendConfig = {
|
|
|
18
22
|
r2: R2Bucket;
|
|
19
23
|
on_event?: EventHandler;
|
|
20
24
|
};
|
|
25
|
+
/**
|
|
26
|
+
* Creates a Cloudflare Workers storage backend using D1 and R2.
|
|
27
|
+
* @category Backends
|
|
28
|
+
* @group Storage Backends
|
|
29
|
+
*
|
|
30
|
+
* Uses D1 (SQLite) for metadata storage and R2 (object storage) for binary data.
|
|
31
|
+
* Requires running `CORPUS_MIGRATION_SQL` on the D1 database before first use.
|
|
32
|
+
*
|
|
33
|
+
* This backend is designed for production use in Cloudflare Workers environments,
|
|
34
|
+
* providing durable, globally distributed storage.
|
|
35
|
+
*
|
|
36
|
+
* @param config - Configuration with `d1` (D1 database), `r2` (R2 bucket), and optional `on_event` handler
|
|
37
|
+
* @returns A Backend instance using Cloudflare D1 + R2
|
|
38
|
+
*
|
|
39
|
+
* @example
|
|
40
|
+
* ```ts
|
|
41
|
+
* // In a Cloudflare Worker
|
|
42
|
+
* export default {
|
|
43
|
+
* async fetch(request: Request, env: Env) {
|
|
44
|
+
* const backend = create_cloudflare_backend({
|
|
45
|
+
* d1: env.CORPUS_DB,
|
|
46
|
+
* r2: env.CORPUS_BUCKET
|
|
47
|
+
* })
|
|
48
|
+
*
|
|
49
|
+
* const corpus = create_corpus()
|
|
50
|
+
* .with_backend(backend)
|
|
51
|
+
* .with_store(define_store('cache', json_codec(CacheSchema)))
|
|
52
|
+
* .build()
|
|
53
|
+
*
|
|
54
|
+
* // Use corpus...
|
|
55
|
+
* }
|
|
56
|
+
* }
|
|
57
|
+
* ```
|
|
58
|
+
*
|
|
59
|
+
* @see CORPUS_MIGRATION_SQL for required database setup
|
|
60
|
+
*/
|
|
21
61
|
export declare function create_cloudflare_backend(config: CloudflareBackendConfig): Backend;
|
|
22
62
|
export {};
|
|
23
63
|
//# sourceMappingURL=cloudflare.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../../backend/cloudflare.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../../backend/cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAI3I,KAAK,UAAU,GAAG;IAAE,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAA;CAAE,CAAA;AACvD,KAAK,QAAQ,GAAG;IACd,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;QAAC,WAAW,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,CAAA;KAAE,GAAG,IAAI,CAAC,CAAA;IACnH,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,GAAG,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IAClF,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IACtC,IAAI,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,GAAG,IAAI,CAAC,CAAA;CACvD,CAAA;AAED,MAAM,MAAM,uBAAuB,GAAG;IACpC,EAAE,EAAE,UAAU,CAAA;IACd,EAAE,EAAE,QAAQ,CAAA;IACZ,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAgB,yBAAyB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CA8PlF"}
|
|
@@ -1,7 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description Cloudflare Workers storage backend using D1 and R2.
|
|
4
|
+
*/
|
|
1
5
|
import { eq, and, desc, lt, gt, like, sql } from 'drizzle-orm';
|
|
2
6
|
import { drizzle } from 'drizzle-orm/d1';
|
|
3
7
|
import { ok, err } from '../types';
|
|
4
8
|
import { corpus_snapshots } from '../schema';
|
|
9
|
+
/**
|
|
10
|
+
* Creates a Cloudflare Workers storage backend using D1 and R2.
|
|
11
|
+
* @category Backends
|
|
12
|
+
* @group Storage Backends
|
|
13
|
+
*
|
|
14
|
+
* Uses D1 (SQLite) for metadata storage and R2 (object storage) for binary data.
|
|
15
|
+
* Requires running `CORPUS_MIGRATION_SQL` on the D1 database before first use.
|
|
16
|
+
*
|
|
17
|
+
* This backend is designed for production use in Cloudflare Workers environments,
|
|
18
|
+
* providing durable, globally distributed storage.
|
|
19
|
+
*
|
|
20
|
+
* @param config - Configuration with `d1` (D1 database), `r2` (R2 bucket), and optional `on_event` handler
|
|
21
|
+
* @returns A Backend instance using Cloudflare D1 + R2
|
|
22
|
+
*
|
|
23
|
+
* @example
|
|
24
|
+
* ```ts
|
|
25
|
+
* // In a Cloudflare Worker
|
|
26
|
+
* export default {
|
|
27
|
+
* async fetch(request: Request, env: Env) {
|
|
28
|
+
* const backend = create_cloudflare_backend({
|
|
29
|
+
* d1: env.CORPUS_DB,
|
|
30
|
+
* r2: env.CORPUS_BUCKET
|
|
31
|
+
* })
|
|
32
|
+
*
|
|
33
|
+
* const corpus = create_corpus()
|
|
34
|
+
* .with_backend(backend)
|
|
35
|
+
* .with_store(define_store('cache', json_codec(CacheSchema)))
|
|
36
|
+
* .build()
|
|
37
|
+
*
|
|
38
|
+
* // Use corpus...
|
|
39
|
+
* }
|
|
40
|
+
* }
|
|
41
|
+
* ```
|
|
42
|
+
*
|
|
43
|
+
* @see CORPUS_MIGRATION_SQL for required database setup
|
|
44
|
+
*/
|
|
5
45
|
export function create_cloudflare_backend(config) {
|
|
6
46
|
const db = drizzle(config.d1);
|
|
7
47
|
const { r2, on_event } = config;
|
package/dist/backend/file.d.ts
CHANGED
|
@@ -1,7 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description File-system storage backend for local persistence.
|
|
4
|
+
*/
|
|
1
5
|
import type { Backend, EventHandler } from '../types';
|
|
2
6
|
export type FileBackendConfig = {
|
|
3
7
|
base_path: string;
|
|
4
8
|
on_event?: EventHandler;
|
|
5
9
|
};
|
|
10
|
+
/**
|
|
11
|
+
* Creates a file-system storage backend for local persistence.
|
|
12
|
+
* @category Backends
|
|
13
|
+
* @group Storage Backends
|
|
14
|
+
*
|
|
15
|
+
* Uses Bun's file APIs for efficient I/O. Metadata is stored as JSON files
|
|
16
|
+
* per store, and data is stored as binary files in a shared `_data` directory.
|
|
17
|
+
*
|
|
18
|
+
* Directory structure:
|
|
19
|
+
* ```
|
|
20
|
+
* base_path/
|
|
21
|
+
* <store_id>/_meta.json # Metadata for each store
|
|
22
|
+
* _data/<store_id>_<hash>.bin # Binary data files
|
|
23
|
+
* ```
|
|
24
|
+
*
|
|
25
|
+
* @param config - Configuration with `base_path` (root directory) and optional `on_event` handler
|
|
26
|
+
* @returns A Backend instance using file-system storage
|
|
27
|
+
*
|
|
28
|
+
* @example
|
|
29
|
+
* ```ts
|
|
30
|
+
* const backend = create_file_backend({
|
|
31
|
+
* base_path: './data/corpus',
|
|
32
|
+
* on_event: (e) => console.log(e.type)
|
|
33
|
+
* })
|
|
34
|
+
*
|
|
35
|
+
* const corpus = create_corpus()
|
|
36
|
+
* .with_backend(backend)
|
|
37
|
+
* .with_store(define_store('documents', json_codec(DocSchema)))
|
|
38
|
+
* .build()
|
|
39
|
+
* ```
|
|
40
|
+
*/
|
|
6
41
|
export declare function create_file_backend(config: FileBackendConfig): Backend;
|
|
7
42
|
//# sourceMappingURL=file.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAK3I,MAAM,MAAM,iBAAiB,GAAG;IAC9B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAqMtE"}
|
|
1
|
+
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAK3I,MAAM,MAAM,iBAAiB,GAAG;IAC9B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAqMtE"}
|
package/dist/backend/file.js
CHANGED
|
@@ -1,6 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description File-system storage backend for local persistence.
|
|
4
|
+
*/
|
|
1
5
|
import { ok, err } from '../types';
|
|
2
6
|
import { mkdir, readdir } from 'node:fs/promises';
|
|
3
7
|
import { join, dirname } from 'node:path';
|
|
8
|
+
/**
|
|
9
|
+
* Creates a file-system storage backend for local persistence.
|
|
10
|
+
* @category Backends
|
|
11
|
+
* @group Storage Backends
|
|
12
|
+
*
|
|
13
|
+
* Uses Bun's file APIs for efficient I/O. Metadata is stored as JSON files
|
|
14
|
+
* per store, and data is stored as binary files in a shared `_data` directory.
|
|
15
|
+
*
|
|
16
|
+
* Directory structure:
|
|
17
|
+
* ```
|
|
18
|
+
* base_path/
|
|
19
|
+
* <store_id>/_meta.json # Metadata for each store
|
|
20
|
+
* _data/<store_id>_<hash>.bin # Binary data files
|
|
21
|
+
* ```
|
|
22
|
+
*
|
|
23
|
+
* @param config - Configuration with `base_path` (root directory) and optional `on_event` handler
|
|
24
|
+
* @returns A Backend instance using file-system storage
|
|
25
|
+
*
|
|
26
|
+
* @example
|
|
27
|
+
* ```ts
|
|
28
|
+
* const backend = create_file_backend({
|
|
29
|
+
* base_path: './data/corpus',
|
|
30
|
+
* on_event: (e) => console.log(e.type)
|
|
31
|
+
* })
|
|
32
|
+
*
|
|
33
|
+
* const corpus = create_corpus()
|
|
34
|
+
* .with_backend(backend)
|
|
35
|
+
* .with_store(define_store('documents', json_codec(DocSchema)))
|
|
36
|
+
* .build()
|
|
37
|
+
* ```
|
|
38
|
+
*/
|
|
4
39
|
export function create_file_backend(config) {
|
|
5
40
|
const { base_path, on_event } = config;
|
|
6
41
|
function emit(event) {
|
|
@@ -1,8 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description Layered backend for caching and replication strategies.
|
|
4
|
+
*/
|
|
1
5
|
import type { Backend } from '../types';
|
|
2
6
|
export type LayeredBackendOptions = {
|
|
3
7
|
read: Backend[];
|
|
4
8
|
write: Backend[];
|
|
5
9
|
list_strategy?: 'merge' | 'first';
|
|
6
10
|
};
|
|
11
|
+
/**
|
|
12
|
+
* Creates a layered backend that combines multiple backends with read/write separation.
|
|
13
|
+
* @category Backends
|
|
14
|
+
* @group Composite Backends
|
|
15
|
+
*
|
|
16
|
+
* Read operations use fallback: tries each read backend in order until one succeeds.
|
|
17
|
+
* Write operations use fanout: writes to all write backends (fails if any fail).
|
|
18
|
+
*
|
|
19
|
+
* Common use cases:
|
|
20
|
+
* - **Caching**: Memory backend first for reads, file backend for persistence
|
|
21
|
+
* - **Replication**: Write to multiple backends for redundancy
|
|
22
|
+
* - **Migration**: Read from old + new backends, write only to new
|
|
23
|
+
*
|
|
24
|
+
* @param options - Configuration with `read` backends (tried in order), `write` backends (all receive writes), and optional `list_strategy` ('merge' or 'first')
|
|
25
|
+
* @returns A Backend that delegates to the configured backends
|
|
26
|
+
*
|
|
27
|
+
* @example
|
|
28
|
+
* ```ts
|
|
29
|
+
* // Caching layer: memory cache with file persistence
|
|
30
|
+
* const cache = create_memory_backend()
|
|
31
|
+
* const storage = create_file_backend({ base_path: './data' })
|
|
32
|
+
*
|
|
33
|
+
* const backend = create_layered_backend({
|
|
34
|
+
* read: [cache, storage], // Try cache first, fall back to disk
|
|
35
|
+
* write: [cache, storage], // Write to both
|
|
36
|
+
* })
|
|
37
|
+
*
|
|
38
|
+
* // Migration: read from old and new, write only to new
|
|
39
|
+
* const backend = create_layered_backend({
|
|
40
|
+
* read: [newBackend, oldBackend],
|
|
41
|
+
* write: [newBackend],
|
|
42
|
+
* })
|
|
43
|
+
* ```
|
|
44
|
+
*/
|
|
7
45
|
export declare function create_layered_backend(options: LayeredBackendOptions): Backend;
|
|
8
46
|
//# sourceMappingURL=layered.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"layered.d.ts","sourceRoot":"","sources":["../../backend/layered.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAA6E,MAAM,UAAU,CAAA;AAGlH,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,OAAO,EAAE,CAAA;IACf,KAAK,EAAE,OAAO,EAAE,CAAA;IAChB,aAAa,CAAC,EAAE,OAAO,GAAG,OAAO,CAAA;CAClC,CAAA;AAED,wBAAgB,sBAAsB,CAAC,OAAO,EAAE,qBAAqB,GAAG,OAAO,CA4I9E"}
|
|
1
|
+
{"version":3,"file":"layered.d.ts","sourceRoot":"","sources":["../../backend/layered.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAA6E,MAAM,UAAU,CAAA;AAGlH,MAAM,MAAM,qBAAqB,GAAG;IAClC,IAAI,EAAE,OAAO,EAAE,CAAA;IACf,KAAK,EAAE,OAAO,EAAE,CAAA;IAChB,aAAa,CAAC,EAAE,OAAO,GAAG,OAAO,CAAA;CAClC,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,wBAAgB,sBAAsB,CAAC,OAAO,EAAE,qBAAqB,GAAG,OAAO,CA4I9E"}
|
package/dist/backend/layered.js
CHANGED
|
@@ -1,4 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description Layered backend for caching and replication strategies.
|
|
4
|
+
*/
|
|
1
5
|
import { ok, err } from '../types';
|
|
6
|
+
/**
|
|
7
|
+
* Creates a layered backend that combines multiple backends with read/write separation.
|
|
8
|
+
* @category Backends
|
|
9
|
+
* @group Composite Backends
|
|
10
|
+
*
|
|
11
|
+
* Read operations use fallback: tries each read backend in order until one succeeds.
|
|
12
|
+
* Write operations use fanout: writes to all write backends (fails if any fail).
|
|
13
|
+
*
|
|
14
|
+
* Common use cases:
|
|
15
|
+
* - **Caching**: Memory backend first for reads, file backend for persistence
|
|
16
|
+
* - **Replication**: Write to multiple backends for redundancy
|
|
17
|
+
* - **Migration**: Read from old + new backends, write only to new
|
|
18
|
+
*
|
|
19
|
+
* @param options - Configuration with `read` backends (tried in order), `write` backends (all receive writes), and optional `list_strategy` ('merge' or 'first')
|
|
20
|
+
* @returns A Backend that delegates to the configured backends
|
|
21
|
+
*
|
|
22
|
+
* @example
|
|
23
|
+
* ```ts
|
|
24
|
+
* // Caching layer: memory cache with file persistence
|
|
25
|
+
* const cache = create_memory_backend()
|
|
26
|
+
* const storage = create_file_backend({ base_path: './data' })
|
|
27
|
+
*
|
|
28
|
+
* const backend = create_layered_backend({
|
|
29
|
+
* read: [cache, storage], // Try cache first, fall back to disk
|
|
30
|
+
* write: [cache, storage], // Write to both
|
|
31
|
+
* })
|
|
32
|
+
*
|
|
33
|
+
* // Migration: read from old and new, write only to new
|
|
34
|
+
* const backend = create_layered_backend({
|
|
35
|
+
* read: [newBackend, oldBackend],
|
|
36
|
+
* write: [newBackend],
|
|
37
|
+
* })
|
|
38
|
+
* ```
|
|
39
|
+
*/
|
|
2
40
|
export function create_layered_backend(options) {
|
|
3
41
|
const { read, write, list_strategy = 'merge' } = options;
|
|
4
42
|
const metadata = {
|
package/dist/backend/memory.d.ts
CHANGED
|
@@ -1,6 +1,36 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description In-memory storage backend for testing and development.
|
|
4
|
+
*/
|
|
1
5
|
import type { Backend, EventHandler } from '../types';
|
|
2
6
|
export type MemoryBackendOptions = {
|
|
3
7
|
on_event?: EventHandler;
|
|
4
8
|
};
|
|
9
|
+
/**
|
|
10
|
+
* Creates an in-memory storage backend.
|
|
11
|
+
* @category Backends
|
|
12
|
+
* @group Storage Backends
|
|
13
|
+
*
|
|
14
|
+
* Ideal for testing, development, and ephemeral storage scenarios.
|
|
15
|
+
* All data is lost when the process ends.
|
|
16
|
+
*
|
|
17
|
+
* @param options - Optional configuration with `on_event` handler for observability
|
|
18
|
+
* @returns A Backend instance using in-memory storage
|
|
19
|
+
*
|
|
20
|
+
* @example
|
|
21
|
+
* ```ts
|
|
22
|
+
* // Basic usage for testing
|
|
23
|
+
* const backend = create_memory_backend()
|
|
24
|
+
* const corpus = create_corpus()
|
|
25
|
+
* .with_backend(backend)
|
|
26
|
+
* .with_store(define_store('test', text_codec()))
|
|
27
|
+
* .build()
|
|
28
|
+
*
|
|
29
|
+
* // With event logging
|
|
30
|
+
* const backend = create_memory_backend({
|
|
31
|
+
* on_event: (e) => console.log(`[${e.type}]`, e)
|
|
32
|
+
* })
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
5
35
|
export declare function create_memory_backend(options?: MemoryBackendOptions): Backend;
|
|
6
36
|
//# sourceMappingURL=memory.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAG3I,MAAM,MAAM,oBAAoB,GAAG;IACjC,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAiJ7E"}
|
|
1
|
+
{"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAG3I,MAAM,MAAM,oBAAoB,GAAG;IACjC,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAiJ7E"}
|
package/dist/backend/memory.js
CHANGED
|
@@ -1,4 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Backends
|
|
3
|
+
* @description In-memory storage backend for testing and development.
|
|
4
|
+
*/
|
|
1
5
|
import { ok, err } from '../types';
|
|
6
|
+
/**
|
|
7
|
+
* Creates an in-memory storage backend.
|
|
8
|
+
* @category Backends
|
|
9
|
+
* @group Storage Backends
|
|
10
|
+
*
|
|
11
|
+
* Ideal for testing, development, and ephemeral storage scenarios.
|
|
12
|
+
* All data is lost when the process ends.
|
|
13
|
+
*
|
|
14
|
+
* @param options - Optional configuration with `on_event` handler for observability
|
|
15
|
+
* @returns A Backend instance using in-memory storage
|
|
16
|
+
*
|
|
17
|
+
* @example
|
|
18
|
+
* ```ts
|
|
19
|
+
* // Basic usage for testing
|
|
20
|
+
* const backend = create_memory_backend()
|
|
21
|
+
* const corpus = create_corpus()
|
|
22
|
+
* .with_backend(backend)
|
|
23
|
+
* .with_store(define_store('test', text_codec()))
|
|
24
|
+
* .build()
|
|
25
|
+
*
|
|
26
|
+
* // With event logging
|
|
27
|
+
* const backend = create_memory_backend({
|
|
28
|
+
* on_event: (e) => console.log(`[${e.type}]`, e)
|
|
29
|
+
* })
|
|
30
|
+
* ```
|
|
31
|
+
*/
|
|
2
32
|
export function create_memory_backend(options) {
|
|
3
33
|
const meta_store = new Map();
|
|
4
34
|
const data_store = new Map();
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Storage backend implementations for different environments.
|
|
3
|
+
* @module Backends
|
|
4
|
+
* @packageDocumentation
|
|
5
|
+
*/
|
|
6
|
+
export { create_memory_backend, type MemoryBackendOptions } from './backend/memory';
|
|
7
|
+
export { create_file_backend, type FileBackendConfig } from './backend/file';
|
|
8
|
+
export { create_cloudflare_backend, type CloudflareBackendConfig } from './backend/cloudflare';
|
|
9
|
+
export { create_layered_backend, type LayeredBackendOptions } from './backend/layered';
|
|
10
|
+
export type { Backend, MetadataClient, DataClient, DataHandle, EventHandler, CorpusEvent } from './types';
|
|
11
|
+
//# sourceMappingURL=backends.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"backends.d.ts","sourceRoot":"","sources":["../backends.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AACtF,YAAY,EAAE,OAAO,EAAE,cAAc,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA"}
|
package/dist/backends.js
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Storage backend implementations for different environments.
|
|
3
|
+
* @module Backends
|
|
4
|
+
* @packageDocumentation
|
|
5
|
+
*/
|
|
6
|
+
export { create_memory_backend } from './backend/memory';
|
|
7
|
+
export { create_file_backend } from './backend/file';
|
|
8
|
+
export { create_cloudflare_backend } from './backend/cloudflare';
|
|
9
|
+
export { create_layered_backend } from './backend/layered';
|
package/dist/cloudflare.d.ts
CHANGED
|
@@ -2,14 +2,11 @@
|
|
|
2
2
|
* Cloudflare Workers compatible exports
|
|
3
3
|
* This entry point excludes the file backend which uses Node.js APIs
|
|
4
4
|
*/
|
|
5
|
-
export { create_corpus } from './corpus';
|
|
6
|
-
export { create_store } from './store';
|
|
5
|
+
export { create_corpus, create_store } from './corpus';
|
|
7
6
|
export { create_memory_backend, type MemoryBackendOptions } from './backend/memory';
|
|
8
7
|
export { create_cloudflare_backend, type CloudflareBackendConfig } from './backend/cloudflare';
|
|
9
|
-
export { json_codec, text_codec, binary_codec } from './
|
|
8
|
+
export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
|
|
10
9
|
export { corpus_snapshots, type CorpusSnapshotRow, type CorpusSnapshotInsert } from './schema';
|
|
11
|
-
export { compute_hash } from './hash';
|
|
12
|
-
export { generate_version } from './version';
|
|
13
10
|
export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, } from './types';
|
|
14
11
|
export { ok, err, define_store } from './types';
|
|
15
12
|
//# sourceMappingURL=cloudflare.d.ts.map
|
package/dist/cloudflare.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,aAAa,EAAE,
|
|
1
|
+
{"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAEtD,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAE9F,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA"}
|
package/dist/cloudflare.js
CHANGED
|
@@ -2,12 +2,9 @@
|
|
|
2
2
|
* Cloudflare Workers compatible exports
|
|
3
3
|
* This entry point excludes the file backend which uses Node.js APIs
|
|
4
4
|
*/
|
|
5
|
-
export { create_corpus } from './corpus';
|
|
6
|
-
export { create_store } from './store';
|
|
5
|
+
export { create_corpus, create_store } from './corpus';
|
|
7
6
|
export { create_memory_backend } from './backend/memory';
|
|
8
7
|
export { create_cloudflare_backend } from './backend/cloudflare';
|
|
9
|
-
export { json_codec, text_codec, binary_codec } from './
|
|
8
|
+
export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
|
|
10
9
|
export { corpus_snapshots } from './schema';
|
|
11
|
-
export { compute_hash } from './hash';
|
|
12
|
-
export { generate_version } from './version';
|
|
13
10
|
export { ok, err, define_store } from './types';
|
package/dist/codecs.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Codec implementations for serializing and deserializing data.
|
|
3
|
+
* @module Codecs
|
|
4
|
+
* @packageDocumentation
|
|
5
|
+
*/
|
|
6
|
+
export { json_codec, text_codec, binary_codec } from './utils';
|
|
7
|
+
export type { Codec, ContentType } from './types';
|
|
8
|
+
//# sourceMappingURL=codecs.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"codecs.d.ts","sourceRoot":"","sources":["../codecs.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAC9D,YAAY,EAAE,KAAK,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA"}
|
package/dist/codecs.js
ADDED
package/dist/core.d.ts
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core corpus functionality for creating and managing versioned data stores.
|
|
3
|
+
* @module Core
|
|
4
|
+
* @packageDocumentation
|
|
5
|
+
*/
|
|
6
|
+
export { create_corpus, create_store } from './corpus';
|
|
7
|
+
export { define_store, ok, err } from './types';
|
|
8
|
+
export type { Corpus, CorpusBuilder, Store, StoreDefinition, Result, CorpusError, PutOpts, } from './types';
|
|
9
|
+
//# sourceMappingURL=core.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"core.d.ts","sourceRoot":"","sources":["../core.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AACtD,OAAO,EAAE,YAAY,EAAE,EAAE,EAAE,GAAG,EAAE,MAAM,SAAS,CAAA;AAC/C,YAAY,EACV,MAAM,EACN,aAAa,EACb,KAAK,EACL,eAAe,EACf,MAAM,EACN,WAAW,EACX,OAAO,GACR,MAAM,SAAS,CAAA"}
|
package/dist/core.js
ADDED
package/dist/corpus.d.ts
CHANGED
|
@@ -1,3 +1,70 @@
|
|
|
1
|
-
|
|
1
|
+
/**
|
|
2
|
+
* @module Core
|
|
3
|
+
* @description Core corpus and store creation functions.
|
|
4
|
+
*/
|
|
5
|
+
import type { Backend, CorpusBuilder, StoreDefinition, Store } from './types';
|
|
6
|
+
/**
|
|
7
|
+
* Creates a typed Store instance bound to a Backend.
|
|
8
|
+
* @category Core
|
|
9
|
+
* @group Builders
|
|
10
|
+
*
|
|
11
|
+
* Each store manages versioned snapshots of data with automatic deduplication:
|
|
12
|
+
* when the same content is stored twice, only one copy of the data is kept
|
|
13
|
+
* (identified by content hash), though separate metadata entries are created.
|
|
14
|
+
*
|
|
15
|
+
* Stores are typically created via `create_corpus().with_store()` rather than
|
|
16
|
+
* directly, which provides type-safe access through `corpus.stores.<id>`.
|
|
17
|
+
*
|
|
18
|
+
* @param backend - The storage backend for persistence
|
|
19
|
+
* @param definition - Store configuration including id and codec
|
|
20
|
+
* @returns A Store instance for the specified type
|
|
21
|
+
*
|
|
22
|
+
* @example
|
|
23
|
+
* ```ts
|
|
24
|
+
* const backend = create_memory_backend()
|
|
25
|
+
* const users = define_store('users', json_codec(UserSchema))
|
|
26
|
+
* const store = create_store(backend, users)
|
|
27
|
+
*
|
|
28
|
+
* // Store a snapshot
|
|
29
|
+
* const result = await store.put({ name: 'Alice', email: 'alice@example.com' })
|
|
30
|
+
* if (result.ok) {
|
|
31
|
+
* console.log('Stored version:', result.value.version)
|
|
32
|
+
* }
|
|
33
|
+
*
|
|
34
|
+
* // Storing identical content reuses the same data_key (deduplication)
|
|
35
|
+
* const result2 = await store.put({ name: 'Alice', email: 'alice@example.com' })
|
|
36
|
+
* // result.value.data_key === result2.value.data_key (same content hash)
|
|
37
|
+
* ```
|
|
38
|
+
*/
|
|
39
|
+
export declare function create_store<T>(backend: Backend, definition: StoreDefinition<string, T>): Store<T>;
|
|
40
|
+
/**
|
|
41
|
+
* Creates a new Corpus instance using the builder pattern.
|
|
42
|
+
*
|
|
43
|
+
* A Corpus is a collection of typed stores backed by a storage backend.
|
|
44
|
+
* Use the builder chain to configure: `with_backend()` → `with_store()` → `build()`.
|
|
45
|
+
*
|
|
46
|
+
* @category Core
|
|
47
|
+
* @group Builders
|
|
48
|
+
* @returns A CorpusBuilder to configure and build the Corpus
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* ```ts
|
|
52
|
+
* import { z } from 'zod'
|
|
53
|
+
*
|
|
54
|
+
* const UserSchema = z.object({ name: z.string(), email: z.string() })
|
|
55
|
+
* const users = define_store('users', json_codec(UserSchema))
|
|
56
|
+
* const notes = define_store('notes', text_codec())
|
|
57
|
+
*
|
|
58
|
+
* const corpus = create_corpus()
|
|
59
|
+
* .with_backend(create_memory_backend())
|
|
60
|
+
* .with_store(users)
|
|
61
|
+
* .with_store(notes)
|
|
62
|
+
* .build()
|
|
63
|
+
*
|
|
64
|
+
* // Type-safe access to stores
|
|
65
|
+
* await corpus.stores.users.put({ name: 'Alice', email: 'alice@example.com' })
|
|
66
|
+
* await corpus.stores.notes.put('Hello, world!')
|
|
67
|
+
* ```
|
|
68
|
+
*/
|
|
2
69
|
export declare function create_corpus(): CorpusBuilder<{}>;
|
|
3
70
|
//# sourceMappingURL=corpus.d.ts.map
|
package/dist/corpus.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"corpus.d.ts","sourceRoot":"","sources":["../corpus.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"corpus.d.ts","sourceRoot":"","sources":["../corpus.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAU,aAAa,EAAE,eAAe,EAAE,KAAK,EAAqD,MAAM,SAAS,CAAA;AAIxI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAmJlG;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAgB,aAAa,IAAI,aAAa,CAAC,EAAE,CAAC,CAoCjD"}
|