@f0rbit/corpus 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/codec.js ADDED
@@ -0,0 +1,21 @@
1
+ export function json_codec(schema) {
2
+ return {
3
+ content_type: "application/json",
4
+ encode: (value) => new TextEncoder().encode(JSON.stringify(value)),
5
+ decode: (bytes) => schema.parse(JSON.parse(new TextDecoder().decode(bytes))),
6
+ };
7
+ }
8
+ export function text_codec() {
9
+ return {
10
+ content_type: "text/plain",
11
+ encode: (value) => new TextEncoder().encode(value),
12
+ decode: (bytes) => new TextDecoder().decode(bytes),
13
+ };
14
+ }
15
+ export function binary_codec() {
16
+ return {
17
+ content_type: "application/octet-stream",
18
+ encode: (value) => value,
19
+ decode: (bytes) => bytes,
20
+ };
21
+ }
@@ -0,0 +1,3 @@
1
+ import type { CorpusBuilder } from './types';
2
+ export declare function create_corpus(): CorpusBuilder<{}>;
3
+ //# sourceMappingURL=corpus.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"corpus.d.ts","sourceRoot":"","sources":["../corpus.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAmB,aAAa,EAA0B,MAAM,SAAS,CAAA;AAGrF,wBAAgB,aAAa,IAAI,aAAa,CAAC,EAAE,CAAC,CAoCjD"}
package/dist/corpus.js ADDED
@@ -0,0 +1,31 @@
1
+ import { create_store } from './store';
2
+ export function create_corpus() {
3
+ let backend = null;
4
+ const definitions = [];
5
+ const builder = {
6
+ with_backend(b) {
7
+ backend = b;
8
+ return builder;
9
+ },
10
+ with_store(definition) {
11
+ definitions.push(definition);
12
+ return builder;
13
+ },
14
+ build() {
15
+ if (!backend) {
16
+ throw new Error('Backend is required. Call with_backend() first.');
17
+ }
18
+ const b = backend;
19
+ const stores = {};
20
+ for (const def of definitions) {
21
+ stores[def.id] = create_store(b, def);
22
+ }
23
+ return {
24
+ stores,
25
+ metadata: b.metadata,
26
+ data: b.data,
27
+ };
28
+ },
29
+ };
30
+ return builder;
31
+ }
package/dist/hash.d.ts ADDED
@@ -0,0 +1,2 @@
1
+ export declare function compute_hash(data: Uint8Array): Promise<string>;
2
+ //# sourceMappingURL=hash.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../hash.ts"],"names":[],"mappings":"AAAA,wBAAsB,YAAY,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAIpE"}
package/dist/hash.js ADDED
@@ -0,0 +1,5 @@
1
+ export async function compute_hash(data) {
2
+ const hash_buffer = await crypto.subtle.digest('SHA-256', data);
3
+ const hash_array = new Uint8Array(hash_buffer);
4
+ return Array.from(hash_array).map(b => b.toString(16).padStart(2, '0')).join('');
5
+ }
@@ -0,0 +1,13 @@
1
+ export { create_corpus } from './corpus';
2
+ export { create_store } from './store';
3
+ export { create_memory_backend, type MemoryBackendOptions } from './backend/memory';
4
+ export { create_file_backend, type FileBackendConfig } from './backend/file';
5
+ export { create_cloudflare_backend, type CloudflareBackendConfig } from './backend/cloudflare';
6
+ export { json_codec, text_codec, binary_codec } from './codec';
7
+ export { corpus_snapshots, type CorpusSnapshotRow, type CorpusSnapshotInsert } from './schema';
8
+ export { compute_hash } from './hash';
9
+ export { generate_version } from './version';
10
+ export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, } from './types';
11
+ export { ok, err, define_store } from './types';
12
+ export { createCorpusInfra, CORPUS_MIGRATION_SQL, type CorpusInfra, type CorpusInfraConfig } from './sst';
13
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAA;AACxC,OAAO,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAEtC,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE9D,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAA;AACrC,OAAO,EAAE,gBAAgB,EAAE,MAAM,WAAW,CAAA;AAE5C,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE/C,OAAO,EAAE,iBAAiB,EAAE,oBAAoB,EAAE,KAAK,WAAW,EAAE,KAAK,iBAAiB,EAAE,MAAM,OAAO,CAAA"}
package/dist/index.js ADDED
@@ -0,0 +1,11 @@
1
+ export { create_corpus } from './corpus';
2
+ export { create_store } from './store';
3
+ export { create_memory_backend } from './backend/memory';
4
+ export { create_file_backend } from './backend/file';
5
+ export { create_cloudflare_backend } from './backend/cloudflare';
6
+ export { json_codec, text_codec, binary_codec } from './codec';
7
+ export { corpus_snapshots } from './schema';
8
+ export { compute_hash } from './hash';
9
+ export { generate_version } from './version';
10
+ export { ok, err, define_store } from './types';
11
+ export { createCorpusInfra, CORPUS_MIGRATION_SQL } from './sst';
@@ -0,0 +1,198 @@
1
+ export declare const corpus_snapshots: import("drizzle-orm/sqlite-core").SQLiteTableWithColumns<{
2
+ name: "corpus_snapshots";
3
+ schema: undefined;
4
+ columns: {
5
+ store_id: import("drizzle-orm/sqlite-core").SQLiteColumn<{
6
+ name: "store_id";
7
+ tableName: "corpus_snapshots";
8
+ dataType: "string";
9
+ columnType: "SQLiteText";
10
+ data: string;
11
+ driverParam: string;
12
+ notNull: true;
13
+ hasDefault: false;
14
+ isPrimaryKey: false;
15
+ isAutoincrement: false;
16
+ hasRuntimeDefault: false;
17
+ enumValues: [string, ...string[]];
18
+ baseColumn: never;
19
+ identity: undefined;
20
+ generated: undefined;
21
+ }, {}, {
22
+ length: number | undefined;
23
+ }>;
24
+ version: import("drizzle-orm/sqlite-core").SQLiteColumn<{
25
+ name: "version";
26
+ tableName: "corpus_snapshots";
27
+ dataType: "string";
28
+ columnType: "SQLiteText";
29
+ data: string;
30
+ driverParam: string;
31
+ notNull: true;
32
+ hasDefault: false;
33
+ isPrimaryKey: false;
34
+ isAutoincrement: false;
35
+ hasRuntimeDefault: false;
36
+ enumValues: [string, ...string[]];
37
+ baseColumn: never;
38
+ identity: undefined;
39
+ generated: undefined;
40
+ }, {}, {
41
+ length: number | undefined;
42
+ }>;
43
+ parents: import("drizzle-orm/sqlite-core").SQLiteColumn<{
44
+ name: "parents";
45
+ tableName: "corpus_snapshots";
46
+ dataType: "string";
47
+ columnType: "SQLiteText";
48
+ data: string;
49
+ driverParam: string;
50
+ notNull: true;
51
+ hasDefault: false;
52
+ isPrimaryKey: false;
53
+ isAutoincrement: false;
54
+ hasRuntimeDefault: false;
55
+ enumValues: [string, ...string[]];
56
+ baseColumn: never;
57
+ identity: undefined;
58
+ generated: undefined;
59
+ }, {}, {
60
+ length: number | undefined;
61
+ }>;
62
+ created_at: import("drizzle-orm/sqlite-core").SQLiteColumn<{
63
+ name: "created_at";
64
+ tableName: "corpus_snapshots";
65
+ dataType: "string";
66
+ columnType: "SQLiteText";
67
+ data: string;
68
+ driverParam: string;
69
+ notNull: true;
70
+ hasDefault: false;
71
+ isPrimaryKey: false;
72
+ isAutoincrement: false;
73
+ hasRuntimeDefault: false;
74
+ enumValues: [string, ...string[]];
75
+ baseColumn: never;
76
+ identity: undefined;
77
+ generated: undefined;
78
+ }, {}, {
79
+ length: number | undefined;
80
+ }>;
81
+ invoked_at: import("drizzle-orm/sqlite-core").SQLiteColumn<{
82
+ name: "invoked_at";
83
+ tableName: "corpus_snapshots";
84
+ dataType: "string";
85
+ columnType: "SQLiteText";
86
+ data: string;
87
+ driverParam: string;
88
+ notNull: false;
89
+ hasDefault: false;
90
+ isPrimaryKey: false;
91
+ isAutoincrement: false;
92
+ hasRuntimeDefault: false;
93
+ enumValues: [string, ...string[]];
94
+ baseColumn: never;
95
+ identity: undefined;
96
+ generated: undefined;
97
+ }, {}, {
98
+ length: number | undefined;
99
+ }>;
100
+ content_hash: import("drizzle-orm/sqlite-core").SQLiteColumn<{
101
+ name: "content_hash";
102
+ tableName: "corpus_snapshots";
103
+ dataType: "string";
104
+ columnType: "SQLiteText";
105
+ data: string;
106
+ driverParam: string;
107
+ notNull: true;
108
+ hasDefault: false;
109
+ isPrimaryKey: false;
110
+ isAutoincrement: false;
111
+ hasRuntimeDefault: false;
112
+ enumValues: [string, ...string[]];
113
+ baseColumn: never;
114
+ identity: undefined;
115
+ generated: undefined;
116
+ }, {}, {
117
+ length: number | undefined;
118
+ }>;
119
+ content_type: import("drizzle-orm/sqlite-core").SQLiteColumn<{
120
+ name: "content_type";
121
+ tableName: "corpus_snapshots";
122
+ dataType: "string";
123
+ columnType: "SQLiteText";
124
+ data: string;
125
+ driverParam: string;
126
+ notNull: true;
127
+ hasDefault: false;
128
+ isPrimaryKey: false;
129
+ isAutoincrement: false;
130
+ hasRuntimeDefault: false;
131
+ enumValues: [string, ...string[]];
132
+ baseColumn: never;
133
+ identity: undefined;
134
+ generated: undefined;
135
+ }, {}, {
136
+ length: number | undefined;
137
+ }>;
138
+ size_bytes: import("drizzle-orm/sqlite-core").SQLiteColumn<{
139
+ name: "size_bytes";
140
+ tableName: "corpus_snapshots";
141
+ dataType: "number";
142
+ columnType: "SQLiteInteger";
143
+ data: number;
144
+ driverParam: number;
145
+ notNull: true;
146
+ hasDefault: false;
147
+ isPrimaryKey: false;
148
+ isAutoincrement: false;
149
+ hasRuntimeDefault: false;
150
+ enumValues: undefined;
151
+ baseColumn: never;
152
+ identity: undefined;
153
+ generated: undefined;
154
+ }, {}, {}>;
155
+ data_key: import("drizzle-orm/sqlite-core").SQLiteColumn<{
156
+ name: "data_key";
157
+ tableName: "corpus_snapshots";
158
+ dataType: "string";
159
+ columnType: "SQLiteText";
160
+ data: string;
161
+ driverParam: string;
162
+ notNull: true;
163
+ hasDefault: false;
164
+ isPrimaryKey: false;
165
+ isAutoincrement: false;
166
+ hasRuntimeDefault: false;
167
+ enumValues: [string, ...string[]];
168
+ baseColumn: never;
169
+ identity: undefined;
170
+ generated: undefined;
171
+ }, {}, {
172
+ length: number | undefined;
173
+ }>;
174
+ tags: import("drizzle-orm/sqlite-core").SQLiteColumn<{
175
+ name: "tags";
176
+ tableName: "corpus_snapshots";
177
+ dataType: "string";
178
+ columnType: "SQLiteText";
179
+ data: string;
180
+ driverParam: string;
181
+ notNull: false;
182
+ hasDefault: false;
183
+ isPrimaryKey: false;
184
+ isAutoincrement: false;
185
+ hasRuntimeDefault: false;
186
+ enumValues: [string, ...string[]];
187
+ baseColumn: never;
188
+ identity: undefined;
189
+ generated: undefined;
190
+ }, {}, {
191
+ length: number | undefined;
192
+ }>;
193
+ };
194
+ dialect: "sqlite";
195
+ }>;
196
+ export type CorpusSnapshotRow = typeof corpus_snapshots.$inferSelect;
197
+ export type CorpusSnapshotInsert = typeof corpus_snapshots.$inferInsert;
198
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../schema.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAgB1B,CAAA;AAEH,MAAM,MAAM,iBAAiB,GAAG,OAAO,gBAAgB,CAAC,YAAY,CAAA;AACpE,MAAM,MAAM,oBAAoB,GAAG,OAAO,gBAAgB,CAAC,YAAY,CAAA"}
package/dist/schema.js ADDED
@@ -0,0 +1,18 @@
1
+ import { sqliteTable, text, integer, primaryKey, index } from 'drizzle-orm/sqlite-core';
2
+ export const corpus_snapshots = sqliteTable('corpus_snapshots', {
3
+ store_id: text('store_id').notNull(),
4
+ version: text('version').notNull(),
5
+ parents: text('parents').notNull(),
6
+ created_at: text('created_at').notNull(),
7
+ invoked_at: text('invoked_at'),
8
+ content_hash: text('content_hash').notNull(),
9
+ content_type: text('content_type').notNull(),
10
+ size_bytes: integer('size_bytes').notNull(),
11
+ data_key: text('data_key').notNull(),
12
+ tags: text('tags'),
13
+ }, (table) => ({
14
+ pk: primaryKey({ columns: [table.store_id, table.version] }),
15
+ created_idx: index('idx_store_created').on(table.store_id, table.created_at),
16
+ hash_idx: index('idx_content_hash').on(table.store_id, table.content_hash),
17
+ data_key_idx: index('idx_data_key').on(table.data_key),
18
+ }));
package/dist/sst.d.ts ADDED
@@ -0,0 +1,18 @@
1
+ export type CorpusInfraConfig = {
2
+ name: string;
3
+ bucket_name?: string;
4
+ database_name?: string;
5
+ };
6
+ export type CorpusInfra = {
7
+ database: {
8
+ name: string;
9
+ };
10
+ bucket: {
11
+ name: string;
12
+ };
13
+ database_name: string;
14
+ bucket_name: string;
15
+ };
16
+ export declare function createCorpusInfra(name: string, config?: Partial<CorpusInfraConfig>): CorpusInfra;
17
+ export declare const CORPUS_MIGRATION_SQL = "\nCREATE TABLE IF NOT EXISTS corpus_snapshots (\n store_id TEXT NOT NULL,\n version TEXT NOT NULL,\n parents TEXT NOT NULL,\n created_at TEXT NOT NULL,\n invoked_at TEXT,\n content_hash TEXT NOT NULL,\n content_type TEXT NOT NULL,\n size_bytes INTEGER NOT NULL,\n data_key TEXT NOT NULL,\n tags TEXT,\n PRIMARY KEY (store_id, version)\n);\n\nCREATE INDEX IF NOT EXISTS idx_store_created ON corpus_snapshots(store_id, created_at);\nCREATE INDEX IF NOT EXISTS idx_content_hash ON corpus_snapshots(store_id, content_hash);\nCREATE INDEX IF NOT EXISTS idx_data_key ON corpus_snapshots(data_key);\n";
18
+ //# sourceMappingURL=sst.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sst.d.ts","sourceRoot":"","sources":["../sst.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,MAAM,CAAA;IACZ,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,MAAM,MAAM,WAAW,GAAG;IACxB,QAAQ,EAAE;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAA;IAC1B,MAAM,EAAE;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE,CAAA;IACxB,aAAa,EAAE,MAAM,CAAA;IACrB,WAAW,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,wBAAgB,iBAAiB,CAC/B,IAAI,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,OAAO,CAAC,iBAAiB,CAAC,GAClC,WAAW,CAUb;AAED,eAAO,MAAM,oBAAoB,imBAkBhC,CAAA"}
package/dist/sst.js ADDED
@@ -0,0 +1,29 @@
1
+ export function createCorpusInfra(name, config) {
2
+ const database_name = config?.database_name ?? `${name}Db`;
3
+ const bucket_name = config?.bucket_name ?? `${name}Bucket`;
4
+ return {
5
+ database: { name: database_name },
6
+ bucket: { name: bucket_name },
7
+ database_name,
8
+ bucket_name,
9
+ };
10
+ }
11
+ export const CORPUS_MIGRATION_SQL = `
12
+ CREATE TABLE IF NOT EXISTS corpus_snapshots (
13
+ store_id TEXT NOT NULL,
14
+ version TEXT NOT NULL,
15
+ parents TEXT NOT NULL,
16
+ created_at TEXT NOT NULL,
17
+ invoked_at TEXT,
18
+ content_hash TEXT NOT NULL,
19
+ content_type TEXT NOT NULL,
20
+ size_bytes INTEGER NOT NULL,
21
+ data_key TEXT NOT NULL,
22
+ tags TEXT,
23
+ PRIMARY KEY (store_id, version)
24
+ );
25
+
26
+ CREATE INDEX IF NOT EXISTS idx_store_created ON corpus_snapshots(store_id, created_at);
27
+ CREATE INDEX IF NOT EXISTS idx_content_hash ON corpus_snapshots(store_id, content_hash);
28
+ CREATE INDEX IF NOT EXISTS idx_data_key ON corpus_snapshots(data_key);
29
+ `;
@@ -0,0 +1,3 @@
1
+ import type { Backend, Store, StoreDefinition } from './types';
2
+ export declare function create_store<T>(backend: Backend, definition: StoreDefinition<string, T>): Store<T>;
3
+ //# sourceMappingURL=store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"store.d.ts","sourceRoot":"","sources":["../store.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,eAAe,EAA8C,MAAM,SAAS,CAAA;AAK1G,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CA+IlG"}
package/dist/store.js ADDED
@@ -0,0 +1,125 @@
1
+ import { ok, err } from './types';
2
+ import { compute_hash } from './hash';
3
+ import { generate_version } from './version';
4
+ export function create_store(backend, definition) {
5
+ const { id, codec } = definition;
6
+ function emit(event) {
7
+ backend.on_event?.(event);
8
+ }
9
+ function make_data_key(store_id, content_hash) {
10
+ return `${store_id}/${content_hash}`;
11
+ }
12
+ return {
13
+ id,
14
+ codec,
15
+ async put(data, opts) {
16
+ const version = generate_version();
17
+ let bytes;
18
+ try {
19
+ bytes = codec.encode(data);
20
+ }
21
+ catch (cause) {
22
+ const error = { kind: 'encode_error', cause: cause };
23
+ emit({ type: 'error', error });
24
+ return err(error);
25
+ }
26
+ const content_hash = await compute_hash(bytes);
27
+ // deduplication: reuse existing data_key if content already exists
28
+ const existing = await backend.metadata.find_by_hash(id, content_hash);
29
+ const deduplicated = existing !== null;
30
+ const data_key = deduplicated ? existing.data_key : make_data_key(id, content_hash);
31
+ if (!deduplicated) {
32
+ const data_result = await backend.data.put(data_key, bytes);
33
+ if (!data_result.ok) {
34
+ emit({ type: 'error', error: data_result.error });
35
+ return data_result;
36
+ }
37
+ }
38
+ emit({ type: 'data_put', store_id: id, version, size_bytes: bytes.length, deduplicated });
39
+ const meta = {
40
+ store_id: id,
41
+ version,
42
+ parents: opts?.parents ?? [],
43
+ created_at: new Date(),
44
+ invoked_at: opts?.invoked_at,
45
+ content_hash,
46
+ content_type: codec.content_type,
47
+ size_bytes: bytes.length,
48
+ data_key,
49
+ tags: opts?.tags,
50
+ };
51
+ const meta_result = await backend.metadata.put(meta);
52
+ if (!meta_result.ok) {
53
+ emit({ type: 'error', error: meta_result.error });
54
+ return meta_result;
55
+ }
56
+ emit({ type: 'snapshot_put', store_id: id, version, content_hash, deduplicated });
57
+ return ok(meta);
58
+ },
59
+ async get(version) {
60
+ const meta_result = await backend.metadata.get(id, version);
61
+ if (!meta_result.ok) {
62
+ emit({ type: 'snapshot_get', store_id: id, version, found: false });
63
+ return meta_result;
64
+ }
65
+ const meta = meta_result.value;
66
+ const data_result = await backend.data.get(meta.data_key);
67
+ if (!data_result.ok) {
68
+ emit({ type: 'error', error: data_result.error });
69
+ return data_result;
70
+ }
71
+ const bytes = await data_result.value.bytes();
72
+ let data;
73
+ try {
74
+ data = codec.decode(bytes);
75
+ }
76
+ catch (cause) {
77
+ const error = { kind: 'decode_error', cause: cause };
78
+ emit({ type: 'error', error });
79
+ return err(error);
80
+ }
81
+ emit({ type: 'snapshot_get', store_id: id, version, found: true });
82
+ return ok({ meta, data });
83
+ },
84
+ async get_latest() {
85
+ const meta_result = await backend.metadata.get_latest(id);
86
+ if (!meta_result.ok) {
87
+ return meta_result;
88
+ }
89
+ const meta = meta_result.value;
90
+ const data_result = await backend.data.get(meta.data_key);
91
+ if (!data_result.ok) {
92
+ return data_result;
93
+ }
94
+ const bytes = await data_result.value.bytes();
95
+ let data;
96
+ try {
97
+ data = codec.decode(bytes);
98
+ }
99
+ catch (cause) {
100
+ const error = { kind: 'decode_error', cause: cause };
101
+ emit({ type: 'error', error });
102
+ return err(error);
103
+ }
104
+ return ok({ meta, data });
105
+ },
106
+ async get_meta(version) {
107
+ return backend.metadata.get(id, version);
108
+ },
109
+ list(opts) {
110
+ return backend.metadata.list(id, opts);
111
+ },
112
+ async delete(version) {
113
+ const meta_result = await backend.metadata.get(id, version);
114
+ if (!meta_result.ok) {
115
+ return meta_result;
116
+ }
117
+ const delete_meta_result = await backend.metadata.delete(id, version);
118
+ if (!delete_meta_result.ok) {
119
+ return delete_meta_result;
120
+ }
121
+ emit({ type: 'meta_delete', store_id: id, version });
122
+ return ok(undefined);
123
+ },
124
+ };
125
+ }