@f0rbit/corpus 0.1.9 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,23 @@
1
+ /**
2
+ * @module Backend Base
3
+ * @description Base abstraction layer for backend implementations.
4
+ */
5
+ import type { MetadataClient, DataClient, SnapshotMeta, CorpusEvent } from "../types";
6
+ export type MetadataStorage = {
7
+ get: (store_id: string, version: string) => Promise<SnapshotMeta | null>;
8
+ put: (meta: SnapshotMeta) => Promise<void>;
9
+ delete: (store_id: string, version: string) => Promise<void>;
10
+ list: (store_id: string) => AsyncIterable<SnapshotMeta>;
11
+ find_by_hash: (store_id: string, hash: string) => Promise<SnapshotMeta | null>;
12
+ };
13
+ export type DataStorage = {
14
+ get: (data_key: string) => Promise<Uint8Array | null>;
15
+ put: (data_key: string, data: Uint8Array) => Promise<void>;
16
+ delete: (data_key: string) => Promise<void>;
17
+ exists: (data_key: string) => Promise<boolean>;
18
+ };
19
+ type Emit = (event: CorpusEvent) => void;
20
+ export declare function create_metadata_client(storage: MetadataStorage, emit: Emit): MetadataClient;
21
+ export declare function create_data_client(storage: DataStorage, emit: Emit): DataClient;
22
+ export {};
23
+ //# sourceMappingURL=base.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../backend/base.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EACV,cAAc,EACd,UAAU,EACV,YAAY,EAGZ,WAAW,EAGZ,MAAM,UAAU,CAAC;AAIlB,MAAM,MAAM,eAAe,GAAG;IAC5B,GAAG,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,KAAK,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC,CAAC;IACzE,GAAG,EAAE,CAAC,IAAI,EAAE,YAAY,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3C,MAAM,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC7D,IAAI,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,aAAa,CAAC,YAAY,CAAC,CAAC;IACxD,YAAY,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC,CAAC;CAChF,CAAC;AAEF,MAAM,MAAM,WAAW,GAAG;IACxB,GAAG,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC;IACtD,GAAG,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3D,MAAM,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5C,MAAM,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC;CAChD,CAAC;AAEF,KAAK,IAAI,GAAG,CAAC,KAAK,EAAE,WAAW,KAAK,IAAI,CAAC;AAEzC,wBAAgB,sBAAsB,CACpC,OAAO,EAAE,eAAe,EACxB,IAAI,EAAE,IAAI,GACT,cAAc,CAoEhB;AAED,wBAAgB,kBAAkB,CAAC,OAAO,EAAE,WAAW,EAAE,IAAI,EAAE,IAAI,GAAG,UAAU,CA0C/E"}
@@ -0,0 +1,100 @@
1
+ /**
2
+ * @module Backend Base
3
+ * @description Base abstraction layer for backend implementations.
4
+ */
5
+ import { ok, err } from "../types";
6
+ import { to_bytes, filter_snapshots } from "../utils";
7
+ export function create_metadata_client(storage, emit) {
8
+ return {
9
+ async get(store_id, version) {
10
+ const meta = await storage.get(store_id, version);
11
+ emit({ type: "meta_get", store_id, version, found: !!meta });
12
+ if (!meta) {
13
+ return err({ kind: "not_found", store_id, version });
14
+ }
15
+ return ok(meta);
16
+ },
17
+ async put(meta) {
18
+ await storage.put(meta);
19
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
20
+ return ok(undefined);
21
+ },
22
+ async delete(store_id, version) {
23
+ await storage.delete(store_id, version);
24
+ emit({ type: "meta_delete", store_id, version });
25
+ return ok(undefined);
26
+ },
27
+ async *list(store_id, opts) {
28
+ const all = [];
29
+ for await (const meta of storage.list(store_id)) {
30
+ all.push(meta);
31
+ }
32
+ const filtered = filter_snapshots(all, opts);
33
+ let count = 0;
34
+ for (const meta of filtered) {
35
+ yield meta;
36
+ count++;
37
+ }
38
+ emit({ type: "meta_list", store_id, count });
39
+ },
40
+ async get_latest(store_id) {
41
+ let latest = null;
42
+ for await (const meta of storage.list(store_id)) {
43
+ if (!latest || meta.created_at > latest.created_at) {
44
+ latest = meta;
45
+ }
46
+ }
47
+ if (!latest) {
48
+ return err({ kind: "not_found", store_id, version: "latest" });
49
+ }
50
+ return ok(latest);
51
+ },
52
+ async *get_children(parent_store_id, parent_version) {
53
+ for await (const meta of storage.list("")) {
54
+ const is_child = meta.parents.some((p) => p.store_id === parent_store_id && p.version === parent_version);
55
+ if (is_child)
56
+ yield meta;
57
+ }
58
+ },
59
+ async find_by_hash(store_id, content_hash) {
60
+ return storage.find_by_hash(store_id, content_hash);
61
+ },
62
+ };
63
+ }
64
+ export function create_data_client(storage, emit) {
65
+ return {
66
+ async get(data_key) {
67
+ const bytes = await storage.get(data_key);
68
+ emit({
69
+ type: "data_get",
70
+ store_id: data_key.split("/")[0] ?? data_key,
71
+ version: data_key,
72
+ found: !!bytes,
73
+ });
74
+ if (!bytes) {
75
+ return err({ kind: "not_found", store_id: data_key, version: "" });
76
+ }
77
+ return ok({
78
+ stream: () => new ReadableStream({
79
+ start(controller) {
80
+ controller.enqueue(bytes);
81
+ controller.close();
82
+ },
83
+ }),
84
+ bytes: async () => bytes,
85
+ });
86
+ },
87
+ async put(data_key, input) {
88
+ const bytes = await to_bytes(input);
89
+ await storage.put(data_key, bytes);
90
+ return ok(undefined);
91
+ },
92
+ async delete(data_key) {
93
+ await storage.delete(data_key);
94
+ return ok(undefined);
95
+ },
96
+ async exists(data_key) {
97
+ return storage.exists(data_key);
98
+ },
99
+ };
100
+ }
@@ -1 +1 @@
1
- {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAiE,YAAY,EAAE,MAAM,UAAU,CAAC;AAQrH,MAAM,MAAM,iBAAiB,GAAG;IAC/B,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CA6MtE"}
1
+ {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../backend/file.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAgB,YAAY,EAAE,MAAM,UAAU,CAAC;AASpE,MAAM,MAAM,iBAAiB,GAAG;IAC/B,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE,iBAAiB,GAAG,OAAO,CAiKtE"}
@@ -3,10 +3,10 @@
3
3
  * @description File-system storage backend for local persistence.
4
4
  */
5
5
  import { create_observations_client, create_observations_storage } from "../observations";
6
- import { ok, err } from "../types";
7
- import { to_bytes, create_emitter, filter_snapshots, parse_snapshot_meta } from "../utils";
6
+ import { create_emitter, parse_snapshot_meta } from "../utils";
8
7
  import { mkdir, readdir } from "node:fs/promises";
9
8
  import { join, dirname } from "node:path";
9
+ import { create_metadata_client, create_data_client } from "./base";
10
10
  /**
11
11
  * Creates a file-system storage backend for local persistence.
12
12
  * @category Backends
@@ -67,68 +67,47 @@ export function create_file_backend(config) {
67
67
  const entries = Array.from(meta_map.entries());
68
68
  await Bun.write(path, JSON.stringify(entries));
69
69
  }
70
- const metadata = {
70
+ async function* list_all_stores() {
71
+ try {
72
+ const entries = await readdir(base_path, { withFileTypes: true });
73
+ for (const entry of entries) {
74
+ if (entry.isDirectory() && !entry.name.startsWith("_")) {
75
+ yield entry.name;
76
+ }
77
+ }
78
+ }
79
+ catch { }
80
+ }
81
+ const metadata_storage = {
71
82
  async get(store_id, version) {
72
83
  const store_meta = await read_store_meta(store_id);
73
- const meta = store_meta.get(version);
74
- emit({ type: "meta_get", store_id, version, found: !!meta });
75
- if (!meta) {
76
- return err({ kind: "not_found", store_id, version });
77
- }
78
- return ok(meta);
84
+ return store_meta.get(version) ?? null;
79
85
  },
80
86
  async put(meta) {
81
87
  const store_meta = await read_store_meta(meta.store_id);
82
88
  store_meta.set(meta.version, meta);
83
89
  await write_store_meta(meta.store_id, store_meta);
84
- emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
85
- return ok(undefined);
86
90
  },
87
91
  async delete(store_id, version) {
88
92
  const store_meta = await read_store_meta(store_id);
89
93
  store_meta.delete(version);
90
94
  await write_store_meta(store_id, store_meta);
91
- emit({ type: "meta_delete", store_id, version });
92
- return ok(undefined);
93
95
  },
94
- async *list(store_id, opts) {
95
- const store_meta = await read_store_meta(store_id);
96
- const filtered = filter_snapshots(Array.from(store_meta.values()), opts);
97
- let count = 0;
98
- for (const meta of filtered) {
99
- yield meta;
100
- count++;
101
- }
102
- emit({ type: "meta_list", store_id, count });
103
- },
104
- async get_latest(store_id) {
105
- const store_meta = await read_store_meta(store_id);
106
- let latest = null;
107
- for (const meta of store_meta.values()) {
108
- if (!latest || meta.created_at > latest.created_at) {
109
- latest = meta;
96
+ async *list(store_id) {
97
+ if (store_id) {
98
+ const store_meta = await read_store_meta(store_id);
99
+ for (const meta of store_meta.values()) {
100
+ yield meta;
110
101
  }
111
102
  }
112
- if (!latest) {
113
- return err({ kind: "not_found", store_id, version: "latest" });
114
- }
115
- return ok(latest);
116
- },
117
- async *get_children(parent_store_id, parent_version) {
118
- try {
119
- const entries = await readdir(base_path, { withFileTypes: true });
120
- for (const entry of entries) {
121
- if (!entry.isDirectory() || entry.name.startsWith("_"))
122
- continue;
123
- const store_meta = await read_store_meta(entry.name);
103
+ else {
104
+ for await (const sid of list_all_stores()) {
105
+ const store_meta = await read_store_meta(sid);
124
106
  for (const meta of store_meta.values()) {
125
- const is_child = meta.parents.some(p => p.store_id === parent_store_id && p.version === parent_version);
126
- if (is_child)
127
- yield meta;
107
+ yield meta;
128
108
  }
129
109
  }
130
110
  }
131
- catch { }
132
111
  },
133
112
  async find_by_hash(store_id, content_hash) {
134
113
  const store_meta = await read_store_meta(store_id);
@@ -140,43 +119,24 @@ export function create_file_backend(config) {
140
119
  return null;
141
120
  },
142
121
  };
143
- const data = {
122
+ const data_storage = {
144
123
  async get(data_key) {
145
124
  const path = data_path(data_key);
146
125
  const file = Bun.file(path);
147
- const found = await file.exists();
148
- emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found });
149
- if (!found) {
150
- return err({ kind: "not_found", store_id: data_key, version: "" });
151
- }
152
- return ok({
153
- stream: () => file.stream(),
154
- bytes: async () => new Uint8Array(await file.arrayBuffer()),
155
- });
126
+ if (!(await file.exists()))
127
+ return null;
128
+ return new Uint8Array(await file.arrayBuffer());
156
129
  },
157
- async put(data_key, input) {
130
+ async put(data_key, data) {
158
131
  const path = data_path(data_key);
159
132
  await mkdir(dirname(path), { recursive: true });
160
- try {
161
- const bytes = await to_bytes(input);
162
- await Bun.write(path, bytes);
163
- return ok(undefined);
164
- }
165
- catch (cause) {
166
- return err({ kind: "storage_error", cause: cause, operation: "put" });
167
- }
133
+ await Bun.write(path, data);
168
134
  },
169
135
  async delete(data_key) {
170
136
  const path = data_path(data_key);
171
- try {
172
- const file = Bun.file(path);
173
- if (await file.exists()) {
174
- await file.delete();
175
- }
176
- return ok(undefined);
177
- }
178
- catch (cause) {
179
- return err({ kind: "storage_error", cause: cause, operation: "delete" });
137
+ const file = Bun.file(path);
138
+ if (await file.exists()) {
139
+ await file.delete();
180
140
  }
181
141
  },
182
142
  async exists(data_key) {
@@ -185,6 +145,8 @@ export function create_file_backend(config) {
185
145
  return file.exists();
186
146
  },
187
147
  };
148
+ const metadata = create_metadata_client(metadata_storage, emit);
149
+ const data = create_data_client(data_storage, emit);
188
150
  const file_path = join(base_path, "_observations.json");
189
151
  async function read_observations() {
190
152
  const file = Bun.file(file_path);
@@ -205,7 +167,7 @@ export function create_file_backend(config) {
205
167
  set_all: write_observations,
206
168
  get_one: async (id) => {
207
169
  const rows = await read_observations();
208
- return rows.find(r => r.id === id) ?? null;
170
+ return rows.find((r) => r.id === id) ?? null;
209
171
  },
210
172
  add_one: async (row) => {
211
173
  const rows = await read_observations();
@@ -214,13 +176,13 @@ export function create_file_backend(config) {
214
176
  },
215
177
  remove_one: async (id) => {
216
178
  const rows = await read_observations();
217
- const idx = rows.findIndex(r => r.id === id);
179
+ const idx = rows.findIndex((r) => r.id === id);
218
180
  if (idx === -1)
219
181
  return false;
220
182
  rows.splice(idx, 1);
221
183
  await write_observations(rows);
222
184
  return true;
223
- }
185
+ },
224
186
  });
225
187
  const observations = create_observations_client(storage, metadata);
226
188
  return { metadata, data, observations, on_event };
@@ -1 +1 @@
1
- {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAiE,MAAM,UAAU,CAAC;AAKvG,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAE7C,MAAM,MAAM,oBAAoB,GAAG;IAClC,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CA4I7E"}
1
+ {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAgB,MAAM,UAAU,CAAC;AAItD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAI7C,MAAM,MAAM,oBAAoB,GAAG;IAClC,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAoF7E"}
@@ -3,8 +3,8 @@
3
3
  * @description In-memory storage backend for testing and development.
4
4
  */
5
5
  import { create_observations_client, create_observations_storage } from "../observations";
6
- import { ok, err } from "../types";
7
- import { to_bytes, create_emitter, filter_snapshots } from "../utils";
6
+ import { create_emitter } from "../utils";
7
+ import { create_metadata_client, create_data_client } from "./base";
8
8
  /**
9
9
  * Creates an in-memory storage backend.
10
10
  * @category Backends
@@ -40,61 +40,22 @@ export function create_memory_backend(options) {
40
40
  function make_meta_key(store_id, version) {
41
41
  return `${store_id}:${version}`;
42
42
  }
43
- const metadata = {
43
+ const metadata_storage = {
44
44
  async get(store_id, version) {
45
- const meta = meta_store.get(make_meta_key(store_id, version));
46
- emit({ type: "meta_get", store_id, version, found: !!meta });
47
- if (!meta) {
48
- return err({ kind: "not_found", store_id, version });
49
- }
50
- return ok(meta);
45
+ return meta_store.get(make_meta_key(store_id, version)) ?? null;
51
46
  },
52
47
  async put(meta) {
53
48
  meta_store.set(make_meta_key(meta.store_id, meta.version), meta);
54
- emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
55
- return ok(undefined);
56
49
  },
57
50
  async delete(store_id, version) {
58
51
  meta_store.delete(make_meta_key(store_id, version));
59
- emit({ type: "meta_delete", store_id, version });
60
- return ok(undefined);
61
- },
62
- async *list(store_id, opts) {
63
- const prefix = `${store_id}:`;
64
- const store_metas = [];
65
- for (const [key, meta] of meta_store) {
66
- if (key.startsWith(prefix)) {
67
- store_metas.push(meta);
68
- }
69
- }
70
- const filtered = filter_snapshots(store_metas, opts);
71
- let count = 0;
72
- for (const meta of filtered) {
73
- yield meta;
74
- count++;
75
- }
76
- emit({ type: "meta_list", store_id, count });
77
52
  },
78
- async get_latest(store_id) {
79
- let latest = null;
80
- const prefix = `${store_id}:`;
53
+ async *list(store_id) {
54
+ const prefix = store_id ? `${store_id}:` : "";
81
55
  for (const [key, meta] of meta_store) {
82
- if (!key.startsWith(prefix))
83
- continue;
84
- if (!latest || meta.created_at > latest.created_at) {
85
- latest = meta;
86
- }
87
- }
88
- if (!latest) {
89
- return err({ kind: "not_found", store_id, version: "latest" });
90
- }
91
- return ok(latest);
92
- },
93
- async *get_children(parent_store_id, parent_version) {
94
- for (const meta of meta_store.values()) {
95
- const is_child = meta.parents.some(p => p.store_id === parent_store_id && p.version === parent_version);
96
- if (is_child)
56
+ if (!prefix || key.startsWith(prefix)) {
97
57
  yield meta;
58
+ }
98
59
  }
99
60
  },
100
61
  async find_by_hash(store_id, content_hash) {
@@ -107,36 +68,22 @@ export function create_memory_backend(options) {
107
68
  return null;
108
69
  },
109
70
  };
110
- const data = {
71
+ const data_storage = {
111
72
  async get(data_key) {
112
- const bytes = data_store.get(data_key);
113
- emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found: !!bytes });
114
- if (!bytes) {
115
- return err({ kind: "not_found", store_id: data_key, version: "" });
116
- }
117
- return ok({
118
- stream: () => new ReadableStream({
119
- start(controller) {
120
- controller.enqueue(bytes);
121
- controller.close();
122
- },
123
- }),
124
- bytes: async () => bytes,
125
- });
73
+ return data_store.get(data_key) ?? null;
126
74
  },
127
- async put(data_key, input) {
128
- const bytes = await to_bytes(input);
129
- data_store.set(data_key, bytes);
130
- return ok(undefined);
75
+ async put(data_key, data) {
76
+ data_store.set(data_key, data);
131
77
  },
132
78
  async delete(data_key) {
133
79
  data_store.delete(data_key);
134
- return ok(undefined);
135
80
  },
136
81
  async exists(data_key) {
137
82
  return data_store.has(data_key);
138
83
  },
139
84
  };
85
+ const metadata = create_metadata_client(metadata_storage, emit);
86
+ const data = create_data_client(data_storage, emit);
140
87
  const storage = create_observations_storage({
141
88
  get_all: async () => Array.from(observation_store.values()),
142
89
  set_all: async (rows) => {
@@ -145,12 +92,14 @@ export function create_memory_backend(options) {
145
92
  observation_store.set(row.id, row);
146
93
  },
147
94
  get_one: async (id) => observation_store.get(id) ?? null,
148
- add_one: async (row) => { observation_store.set(row.id, row); },
95
+ add_one: async (row) => {
96
+ observation_store.set(row.id, row);
97
+ },
149
98
  remove_one: async (id) => {
150
99
  const had = observation_store.has(id);
151
100
  observation_store.delete(id);
152
101
  return had;
153
- }
102
+ },
154
103
  });
155
104
  const observations = create_observations_client(storage, metadata);
156
105
  return { metadata, data, observations, on_event };
@@ -7,5 +7,6 @@ export { create_memory_backend, type MemoryBackendOptions } from './backend/memo
7
7
  export { create_file_backend, type FileBackendConfig } from './backend/file';
8
8
  export { create_cloudflare_backend, type CloudflareBackendConfig } from './backend/cloudflare';
9
9
  export { create_layered_backend, type LayeredBackendOptions } from './backend/layered';
10
+ export { create_metadata_client, create_data_client, type MetadataStorage, type DataStorage } from './backend/base';
10
11
  export type { Backend, MetadataClient, DataClient, DataHandle, EventHandler, CorpusEvent } from './types';
11
12
  //# sourceMappingURL=backends.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"backends.d.ts","sourceRoot":"","sources":["../backends.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AACtF,YAAY,EAAE,OAAO,EAAE,cAAc,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA"}
1
+ {"version":3,"file":"backends.d.ts","sourceRoot":"","sources":["../backends.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AACtF,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,KAAK,eAAe,EAAE,KAAK,WAAW,EAAE,MAAM,gBAAgB,CAAA;AACnH,YAAY,EAAE,OAAO,EAAE,cAAc,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA"}
package/dist/backends.js CHANGED
@@ -7,3 +7,4 @@ export { create_memory_backend } from './backend/memory';
7
7
  export { create_file_backend } from './backend/file';
8
8
  export { create_cloudflare_backend } from './backend/cloudflare';
9
9
  export { create_layered_backend } from './backend/layered';
10
+ export { create_metadata_client, create_data_client } from './backend/base';
@@ -9,4 +9,5 @@ export { json_codec, text_codec, binary_codec, compute_hash, generate_version }
9
9
  export { corpus_snapshots, type CorpusSnapshotRow, type CorpusSnapshotInsert } from './schema';
10
10
  export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, } from './types';
11
11
  export { ok, err, define_store } from './types';
12
+ export * from './observations';
12
13
  //# sourceMappingURL=cloudflare.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAEtD,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAE9F,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA"}
1
+ {"version":3,"file":"cloudflare.d.ts","sourceRoot":"","sources":["../cloudflare.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAEtD,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAE9F,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAE9F,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE/C,cAAc,gBAAgB,CAAA"}
@@ -8,3 +8,4 @@ export { create_cloudflare_backend } from './backend/cloudflare';
8
8
  export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
9
9
  export { corpus_snapshots } from './schema';
10
10
  export { ok, err, define_store } from './types';
11
+ export * from './observations';
@@ -0,0 +1,78 @@
1
+ /**
2
+ * @module Concurrency
3
+ * @description Utilities for controlling concurrent async operations.
4
+ */
5
+ /**
6
+ * Semaphore for controlling concurrent operations.
7
+ *
8
+ * Limits the number of concurrent async operations by requiring
9
+ * callers to acquire a permit before proceeding. When all permits
10
+ * are taken, subsequent acquires wait until a permit is released.
11
+ *
12
+ * @example
13
+ * ```ts
14
+ * const semaphore = new Semaphore(3) // Allow 3 concurrent operations
15
+ *
16
+ * async function rateLimitedFetch(url: string) {
17
+ * await semaphore.acquire()
18
+ * try {
19
+ * return await fetch(url)
20
+ * } finally {
21
+ * semaphore.release()
22
+ * }
23
+ * }
24
+ *
25
+ * // Only 3 fetches will run concurrently
26
+ * await Promise.all(urls.map(rateLimitedFetch))
27
+ * ```
28
+ */
29
+ export declare class Semaphore {
30
+ private permits;
31
+ private waiting;
32
+ constructor(permits: number);
33
+ /**
34
+ * Acquire a permit. Resolves immediately if available,
35
+ * otherwise waits until a permit is released.
36
+ */
37
+ acquire(): Promise<void>;
38
+ /**
39
+ * Release a permit, allowing the next waiting operation to proceed.
40
+ */
41
+ release(): void;
42
+ }
43
+ /**
44
+ * Map over array with controlled concurrency.
45
+ *
46
+ * Unlike Promise.all which starts all operations at once, this limits
47
+ * concurrent operations. Results are returned in the same order as inputs.
48
+ *
49
+ * @param items - Array of items to process
50
+ * @param mapper - Async function to apply to each item
51
+ * @param concurrency - Maximum number of concurrent operations
52
+ * @returns Array of results in the same order as inputs
53
+ *
54
+ * @example
55
+ * ```ts
56
+ * // Process 100 items, but only 5 at a time
57
+ * const results = await parallel_map(
58
+ * urls,
59
+ * async (url, index) => {
60
+ * console.log(`Fetching ${index + 1}/${urls.length}`)
61
+ * return fetch(url).then(r => r.json())
62
+ * },
63
+ * 5
64
+ * )
65
+ * ```
66
+ *
67
+ * @example
68
+ * ```ts
69
+ * // Use with AI APIs that have rate limits
70
+ * const summaries = await parallel_map(
71
+ * documents,
72
+ * doc => summarize(doc),
73
+ * 3 // Only 3 concurrent API calls
74
+ * )
75
+ * ```
76
+ */
77
+ export declare const parallel_map: <T, R>(items: T[], mapper: (item: T, index: number) => Promise<R>, concurrency: number) => Promise<R[]>;
78
+ //# sourceMappingURL=concurrency.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"concurrency.d.ts","sourceRoot":"","sources":["../concurrency.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,qBAAa,SAAS;IACrB,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,OAAO,CAAyB;gBAE5B,OAAO,EAAE,MAAM;IAI3B;;;OAGG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAU9B;;OAEG;IACH,OAAO,IAAI,IAAI;CAQf;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,eAAO,MAAM,YAAY,GAAU,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,EAAE,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,EAAE,aAAa,MAAM,KAAG,OAAO,CAAC,CAAC,EAAE,CAgBrI,CAAC"}