@f0rbit/corpus 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/dist/backend/cloudflare.d.ts +1 -1
  2. package/dist/backend/cloudflare.d.ts.map +1 -1
  3. package/dist/backend/cloudflare.js +168 -58
  4. package/dist/backend/file.d.ts +1 -1
  5. package/dist/backend/file.d.ts.map +1 -1
  6. package/dist/backend/file.js +65 -67
  7. package/dist/backend/layered.d.ts.map +1 -1
  8. package/dist/backend/layered.js +67 -19
  9. package/dist/backend/memory.d.ts +2 -1
  10. package/dist/backend/memory.d.ts.map +1 -1
  11. package/dist/backend/memory.js +39 -53
  12. package/dist/corpus.d.ts +11 -0
  13. package/dist/corpus.d.ts.map +1 -1
  14. package/dist/corpus.js +52 -0
  15. package/dist/index.d.ts +2 -1
  16. package/dist/index.d.ts.map +1 -1
  17. package/dist/index.js +1 -0
  18. package/dist/observations/client.d.ts +12 -0
  19. package/dist/observations/client.d.ts.map +1 -0
  20. package/dist/observations/client.js +115 -0
  21. package/dist/observations/index.d.ts +12 -0
  22. package/dist/observations/index.d.ts.map +1 -0
  23. package/dist/observations/index.js +11 -0
  24. package/dist/observations/schema.d.ts +267 -0
  25. package/dist/observations/schema.d.ts.map +1 -0
  26. package/dist/observations/schema.js +55 -0
  27. package/dist/observations/storage.d.ts +75 -0
  28. package/dist/observations/storage.d.ts.map +1 -0
  29. package/dist/observations/storage.js +137 -0
  30. package/dist/observations/types.d.ts +219 -0
  31. package/dist/observations/types.d.ts.map +1 -0
  32. package/dist/observations/types.js +40 -0
  33. package/dist/observations/utils.d.ts +183 -0
  34. package/dist/observations/utils.d.ts.map +1 -0
  35. package/dist/observations/utils.js +272 -0
  36. package/dist/sst.d.ts +1 -1
  37. package/dist/sst.d.ts.map +1 -1
  38. package/dist/sst.js +20 -0
  39. package/dist/types.d.ts +61 -0
  40. package/dist/types.d.ts.map +1 -1
  41. package/dist/utils.d.ts +38 -1
  42. package/dist/utils.d.ts.map +1 -1
  43. package/dist/utils.js +84 -0
  44. package/package.json +1 -1
  45. package/dist/codec.d.ts +0 -9
  46. package/dist/codec.d.ts.map +0 -1
  47. package/dist/codec.js +0 -21
  48. package/dist/codecs.d.ts +0 -8
  49. package/dist/codecs.d.ts.map +0 -1
  50. package/dist/codecs.js +0 -6
  51. package/dist/core.d.ts +0 -9
  52. package/dist/core.d.ts.map +0 -1
  53. package/dist/core.js +0 -7
  54. package/dist/hash.d.ts +0 -2
  55. package/dist/hash.d.ts.map +0 -1
  56. package/dist/hash.js +0 -5
  57. package/dist/store.d.ts +0 -3
  58. package/dist/store.d.ts.map +0 -1
  59. package/dist/store.js +0 -125
  60. package/dist/version.d.ts +0 -7
  61. package/dist/version.d.ts.map +0 -1
  62. package/dist/version.js +0 -31
@@ -3,6 +3,7 @@
3
3
  * @description Layered backend for caching and replication strategies.
4
4
  */
5
5
  import { ok, err } from '../types';
6
+ import { to_bytes } from '../utils';
6
7
  /**
7
8
  * Creates a layered backend that combines multiple backends with read/write separation.
8
9
  * @category Backends
@@ -169,25 +170,72 @@ export function create_layered_backend(options) {
169
170
  return false;
170
171
  },
171
172
  };
172
- return { metadata, data };
173
+ const observations = createLayeredObservationsClient(read, write);
174
+ return {
175
+ metadata,
176
+ data,
177
+ ...(observations ? { observations } : {}),
178
+ };
173
179
  }
174
- async function to_bytes(data) {
175
- if (data instanceof Uint8Array)
176
- return data;
177
- const chunks = [];
178
- const reader = data.getReader();
179
- while (true) {
180
- const { done, value } = await reader.read();
181
- if (done)
182
- break;
183
- chunks.push(value);
184
- }
185
- const total = chunks.reduce((sum, c) => sum + c.length, 0);
186
- const result = new Uint8Array(total);
187
- let offset = 0;
188
- for (const chunk of chunks) {
189
- result.set(chunk, offset);
190
- offset += chunk.length;
180
+ function createLayeredObservationsClient(readLayers, writeLayers) {
181
+ const readLayer = readLayers.find(l => l.observations);
182
+ const writeLayersWithObs = writeLayers.filter(l => l.observations);
183
+ if (!readLayer?.observations && writeLayersWithObs.length === 0) {
184
+ return undefined;
191
185
  }
192
- return result;
186
+ const primary = readLayer?.observations;
187
+ return {
188
+ async put(type, opts) {
189
+ if (writeLayersWithObs.length === 0) {
190
+ return err({ kind: 'invalid_config', message: 'No write layers support observations' });
191
+ }
192
+ let result;
193
+ for (const layer of writeLayersWithObs) {
194
+ result = await layer.observations.put(type, opts);
195
+ if (!result.ok)
196
+ return result;
197
+ }
198
+ return result;
199
+ },
200
+ async get(id) {
201
+ if (!primary) {
202
+ return err({ kind: 'observation_not_found', id });
203
+ }
204
+ return primary.get(id);
205
+ },
206
+ async *query(opts) {
207
+ if (!primary)
208
+ return;
209
+ yield* primary.query(opts);
210
+ },
211
+ async *query_meta(opts) {
212
+ if (!primary)
213
+ return;
214
+ yield* primary.query_meta(opts);
215
+ },
216
+ async delete(id) {
217
+ if (writeLayersWithObs.length === 0) {
218
+ return err({ kind: 'observation_not_found', id });
219
+ }
220
+ let result;
221
+ for (const layer of writeLayersWithObs) {
222
+ result = await layer.observations.delete(id);
223
+ }
224
+ return result;
225
+ },
226
+ async delete_by_source(source) {
227
+ let total = 0;
228
+ for (const layer of writeLayersWithObs) {
229
+ const result = await layer.observations.delete_by_source(source);
230
+ if (result.ok)
231
+ total += result.value;
232
+ }
233
+ return ok(total);
234
+ },
235
+ async is_stale(pointer) {
236
+ if (!primary)
237
+ return false;
238
+ return primary.is_stale(pointer);
239
+ },
240
+ };
193
241
  }
@@ -2,7 +2,8 @@
2
2
  * @module Backends
3
3
  * @description In-memory storage backend for testing and development.
4
4
  */
5
- import type { Backend, EventHandler } from '../types';
5
+ import type { Backend } from "../types";
6
+ import type { EventHandler } from "../types";
6
7
  export type MemoryBackendOptions = {
7
8
  on_event?: EventHandler;
8
9
  };
@@ -1 +1 @@
1
- {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAwF,YAAY,EAAE,MAAM,UAAU,CAAA;AAG3I,MAAM,MAAM,oBAAoB,GAAG;IACjC,QAAQ,CAAC,EAAE,YAAY,CAAA;CACxB,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAiJ7E"}
1
+ {"version":3,"file":"memory.d.ts","sourceRoot":"","sources":["../../backend/memory.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAiE,MAAM,UAAU,CAAC;AAKvG,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAE7C,MAAM,MAAM,oBAAoB,GAAG;IAClC,QAAQ,CAAC,EAAE,YAAY,CAAC;CACxB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CA4I7E"}
@@ -2,7 +2,9 @@
2
2
  * @module Backends
3
3
  * @description In-memory storage backend for testing and development.
4
4
  */
5
- import { ok, err } from '../types';
5
+ import { create_observations_client, create_observations_storage } from "../observations";
6
+ import { ok, err } from "../types";
7
+ import { to_bytes, create_emitter, filter_snapshots } from "../utils";
6
8
  /**
7
9
  * Creates an in-memory storage backend.
8
10
  * @category Backends
@@ -32,54 +34,46 @@ import { ok, err } from '../types';
32
34
  export function create_memory_backend(options) {
33
35
  const meta_store = new Map();
34
36
  const data_store = new Map();
37
+ const observation_store = new Map();
35
38
  const on_event = options?.on_event;
36
- function emit(event) {
37
- on_event?.(event);
38
- }
39
+ const emit = create_emitter(on_event);
39
40
  function make_meta_key(store_id, version) {
40
41
  return `${store_id}:${version}`;
41
42
  }
42
43
  const metadata = {
43
44
  async get(store_id, version) {
44
45
  const meta = meta_store.get(make_meta_key(store_id, version));
45
- emit({ type: 'meta_get', store_id, version, found: !!meta });
46
+ emit({ type: "meta_get", store_id, version, found: !!meta });
46
47
  if (!meta) {
47
- return err({ kind: 'not_found', store_id, version });
48
+ return err({ kind: "not_found", store_id, version });
48
49
  }
49
50
  return ok(meta);
50
51
  },
51
52
  async put(meta) {
52
53
  meta_store.set(make_meta_key(meta.store_id, meta.version), meta);
53
- emit({ type: 'meta_put', store_id: meta.store_id, version: meta.version });
54
+ emit({ type: "meta_put", store_id: meta.store_id, version: meta.version });
54
55
  return ok(undefined);
55
56
  },
56
57
  async delete(store_id, version) {
57
58
  meta_store.delete(make_meta_key(store_id, version));
58
- emit({ type: 'meta_delete', store_id, version });
59
+ emit({ type: "meta_delete", store_id, version });
59
60
  return ok(undefined);
60
61
  },
61
62
  async *list(store_id, opts) {
62
63
  const prefix = `${store_id}:`;
63
- const matches = [];
64
+ const store_metas = [];
64
65
  for (const [key, meta] of meta_store) {
65
- if (!key.startsWith(prefix))
66
- continue;
67
- if (opts?.before && meta.created_at >= opts.before)
68
- continue;
69
- if (opts?.after && meta.created_at <= opts.after)
70
- continue;
71
- if (opts?.tags?.length && !opts.tags.some(t => meta.tags?.includes(t)))
72
- continue;
73
- matches.push(meta);
66
+ if (key.startsWith(prefix)) {
67
+ store_metas.push(meta);
68
+ }
74
69
  }
75
- matches.sort((a, b) => b.created_at.getTime() - a.created_at.getTime());
76
- const limit = opts?.limit ?? Infinity;
70
+ const filtered = filter_snapshots(store_metas, opts);
77
71
  let count = 0;
78
- for (const match of matches.slice(0, limit)) {
79
- yield match;
72
+ for (const meta of filtered) {
73
+ yield meta;
80
74
  count++;
81
75
  }
82
- emit({ type: 'meta_list', store_id, count });
76
+ emit({ type: "meta_list", store_id, count });
83
77
  },
84
78
  async get_latest(store_id) {
85
79
  let latest = null;
@@ -92,7 +86,7 @@ export function create_memory_backend(options) {
92
86
  }
93
87
  }
94
88
  if (!latest) {
95
- return err({ kind: 'not_found', store_id, version: 'latest' });
89
+ return err({ kind: "not_found", store_id, version: "latest" });
96
90
  }
97
91
  return ok(latest);
98
92
  },
@@ -116,36 +110,22 @@ export function create_memory_backend(options) {
116
110
  const data = {
117
111
  async get(data_key) {
118
112
  const bytes = data_store.get(data_key);
119
- emit({ type: 'data_get', store_id: data_key.split('/')[0] ?? data_key, version: data_key, found: !!bytes });
113
+ emit({ type: "data_get", store_id: data_key.split("/")[0] ?? data_key, version: data_key, found: !!bytes });
120
114
  if (!bytes) {
121
- return err({ kind: 'not_found', store_id: data_key, version: '' });
115
+ return err({ kind: "not_found", store_id: data_key, version: "" });
122
116
  }
123
117
  return ok({
124
118
  stream: () => new ReadableStream({
125
119
  start(controller) {
126
120
  controller.enqueue(bytes);
127
121
  controller.close();
128
- }
122
+ },
129
123
  }),
130
124
  bytes: async () => bytes,
131
125
  });
132
126
  },
133
127
  async put(data_key, input) {
134
- let bytes;
135
- if (input instanceof Uint8Array) {
136
- bytes = input;
137
- }
138
- else {
139
- const chunks = [];
140
- const reader = input.getReader();
141
- while (true) {
142
- const { done, value } = await reader.read();
143
- if (done)
144
- break;
145
- chunks.push(value);
146
- }
147
- bytes = concat_bytes(chunks);
148
- }
128
+ const bytes = await to_bytes(input);
149
129
  data_store.set(data_key, bytes);
150
130
  return ok(undefined);
151
131
  },
@@ -157,15 +137,21 @@ export function create_memory_backend(options) {
157
137
  return data_store.has(data_key);
158
138
  },
159
139
  };
160
- return { metadata, data, on_event };
161
- }
162
- function concat_bytes(chunks) {
163
- const total = chunks.reduce((sum, c) => sum + c.length, 0);
164
- const result = new Uint8Array(total);
165
- let offset = 0;
166
- for (const chunk of chunks) {
167
- result.set(chunk, offset);
168
- offset += chunk.length;
169
- }
170
- return result;
140
+ const storage = create_observations_storage({
141
+ get_all: async () => Array.from(observation_store.values()),
142
+ set_all: async (rows) => {
143
+ observation_store.clear();
144
+ for (const row of rows)
145
+ observation_store.set(row.id, row);
146
+ },
147
+ get_one: async (id) => observation_store.get(id) ?? null,
148
+ add_one: async (row) => { observation_store.set(row.id, row); },
149
+ remove_one: async (id) => {
150
+ const had = observation_store.has(id);
151
+ observation_store.delete(id);
152
+ return had;
153
+ }
154
+ });
155
+ const observations = create_observations_client(storage, metadata);
156
+ return { metadata, data, observations, on_event };
171
157
  }
package/dist/corpus.d.ts CHANGED
@@ -64,6 +64,17 @@ export declare function create_store<T>(backend: Backend, definition: StoreDefin
64
64
  * // Type-safe access to stores
65
65
  * await corpus.stores.users.put({ name: 'Alice', email: 'alice@example.com' })
66
66
  * await corpus.stores.notes.put('Hello, world!')
67
+ *
68
+ * // With observations
69
+ * const corpus_with_obs = create_corpus()
70
+ * .with_backend(create_memory_backend())
71
+ * .with_store(users)
72
+ * .with_observations([EntityType, SentimentType])
73
+ * .build()
74
+ *
75
+ * // Pointer utilities
76
+ * const pointer = corpus_with_obs.create_pointer('users', 'v123', '$.name')
77
+ * const value = await corpus_with_obs.resolve_pointer(pointer)
67
78
  * ```
68
79
  */
69
80
  export declare function create_corpus(): CorpusBuilder<{}>;
@@ -1 +1 @@
1
- {"version":3,"file":"corpus.d.ts","sourceRoot":"","sources":["../corpus.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAU,aAAa,EAAE,eAAe,EAAE,KAAK,EAAqD,MAAM,SAAS,CAAA;AAIxI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAmJlG;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAgB,aAAa,IAAI,aAAa,CAAC,EAAE,CAAC,CAoCjD"}
1
+ {"version":3,"file":"corpus.d.ts","sourceRoot":"","sources":["../corpus.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAU,aAAa,EAAE,eAAe,EAAE,KAAK,EAAyE,MAAM,SAAS,CAAA;AAM5J;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAmJlG;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAuCG;AACH,wBAAgB,aAAa,IAAI,aAAa,CAAC,EAAE,CAAC,CAiFjD"}
package/dist/corpus.js CHANGED
@@ -4,6 +4,7 @@
4
4
  */
5
5
  import { ok, err } from './types';
6
6
  import { compute_hash, generate_version } from './utils';
7
+ import { create_pointer, resolve_path, apply_span } from './observations/utils';
7
8
  /**
8
9
  * Creates a typed Store instance bound to a Backend.
9
10
  * @category Core
@@ -190,11 +191,23 @@ export function create_store(backend, definition) {
190
191
  * // Type-safe access to stores
191
192
  * await corpus.stores.users.put({ name: 'Alice', email: 'alice@example.com' })
192
193
  * await corpus.stores.notes.put('Hello, world!')
194
+ *
195
+ * // With observations
196
+ * const corpus_with_obs = create_corpus()
197
+ * .with_backend(create_memory_backend())
198
+ * .with_store(users)
199
+ * .with_observations([EntityType, SentimentType])
200
+ * .build()
201
+ *
202
+ * // Pointer utilities
203
+ * const pointer = corpus_with_obs.create_pointer('users', 'v123', '$.name')
204
+ * const value = await corpus_with_obs.resolve_pointer(pointer)
193
205
  * ```
194
206
  */
195
207
  export function create_corpus() {
196
208
  let backend = null;
197
209
  const definitions = [];
210
+ let observation_types = [];
198
211
  const builder = {
199
212
  with_backend(b) {
200
213
  backend = b;
@@ -204,6 +217,10 @@ export function create_corpus() {
204
217
  definitions.push(definition);
205
218
  return builder;
206
219
  },
220
+ with_observations(types) {
221
+ observation_types = types;
222
+ return builder;
223
+ },
207
224
  build() {
208
225
  if (!backend) {
209
226
  throw new Error('Backend is required. Call with_backend() first.');
@@ -213,10 +230,45 @@ export function create_corpus() {
213
230
  for (const def of definitions) {
214
231
  stores[def.id] = create_store(b, def);
215
232
  }
233
+ const observations_client = observation_types.length > 0 && 'observations' in b
234
+ ? b.observations
235
+ : undefined;
236
+ async function resolve_pointer_impl(pointer) {
237
+ const store = stores[pointer.store_id];
238
+ if (!store) {
239
+ return err({ kind: 'not_found', store_id: pointer.store_id, version: pointer.version });
240
+ }
241
+ const snapshot_result = await store.get(pointer.version);
242
+ if (!snapshot_result.ok)
243
+ return snapshot_result;
244
+ let value = snapshot_result.value.data;
245
+ if (pointer.path) {
246
+ const path_result = resolve_path(value, pointer.path);
247
+ if (!path_result.ok)
248
+ return path_result;
249
+ value = path_result.value;
250
+ }
251
+ if (pointer.span && typeof value === 'string') {
252
+ const span_result = apply_span(value, pointer.span);
253
+ if (!span_result.ok)
254
+ return span_result;
255
+ value = span_result.value;
256
+ }
257
+ return ok(value);
258
+ }
259
+ async function is_superseded_impl(pointer) {
260
+ if (!observations_client?.is_stale)
261
+ return false;
262
+ return observations_client.is_stale(pointer);
263
+ }
216
264
  return {
217
265
  stores,
218
266
  metadata: b.metadata,
219
267
  data: b.data,
268
+ observations: observations_client,
269
+ create_pointer,
270
+ resolve_pointer: resolve_pointer_impl,
271
+ is_superseded: is_superseded_impl,
220
272
  };
221
273
  },
222
274
  };
package/dist/index.d.ts CHANGED
@@ -5,7 +5,8 @@ export { create_cloudflare_backend, type CloudflareBackendConfig } from './backe
5
5
  export { create_layered_backend, type LayeredBackendOptions } from './backend/layered';
6
6
  export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
7
7
  export { corpus_snapshots, type CorpusSnapshotRow, type CorpusSnapshotInsert } from './schema';
8
- export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, DefineStoreOpts, DataKeyContext, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, } from './types';
8
+ export type { ContentType, ParentRef, SnapshotMeta, Snapshot, DataHandle, MetadataClient, DataClient, ListOpts, Backend, Codec, Store, StoreDefinition, DefineStoreOpts, DataKeyContext, PutOpts, CorpusBuilder, Corpus, CorpusError, Result, CorpusEvent, EventHandler, ObservationsClient, } from './types';
9
9
  export { ok, err, define_store } from './types';
10
+ export * from './observations';
10
11
  export { createCorpusInfra, CORPUS_MIGRATION_SQL, type CorpusInfra, type CorpusInfraConfig } from './sst';
11
12
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAEtD,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AAEtF,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAE9F,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,eAAe,EACf,cAAc,EACd,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,GACb,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE/C,OAAO,EAAE,iBAAiB,EAAE,oBAAoB,EAAE,KAAK,WAAW,EAAE,KAAK,iBAAiB,EAAE,MAAM,OAAO,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,UAAU,CAAA;AAEtD,OAAO,EAAE,qBAAqB,EAAE,KAAK,oBAAoB,EAAE,MAAM,kBAAkB,CAAA;AACnF,OAAO,EAAE,mBAAmB,EAAE,KAAK,iBAAiB,EAAE,MAAM,gBAAgB,CAAA;AAC5E,OAAO,EAAE,yBAAyB,EAAE,KAAK,uBAAuB,EAAE,MAAM,sBAAsB,CAAA;AAC9F,OAAO,EAAE,sBAAsB,EAAE,KAAK,qBAAqB,EAAE,MAAM,mBAAmB,CAAA;AAEtF,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,YAAY,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAA;AAE9F,OAAO,EAAE,gBAAgB,EAAE,KAAK,iBAAiB,EAAE,KAAK,oBAAoB,EAAE,MAAM,UAAU,CAAA;AAE9F,YAAY,EACV,WAAW,EACX,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,UAAU,EACV,cAAc,EACd,UAAU,EACV,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,eAAe,EACf,eAAe,EACf,cAAc,EACd,OAAO,EACP,aAAa,EACb,MAAM,EACN,WAAW,EACX,MAAM,EACN,WAAW,EACX,YAAY,EACZ,kBAAkB,GACnB,MAAM,SAAS,CAAA;AAEhB,OAAO,EAAE,EAAE,EAAE,GAAG,EAAE,YAAY,EAAE,MAAM,SAAS,CAAA;AAE/C,cAAc,gBAAgB,CAAA;AAE9B,OAAO,EAAE,iBAAiB,EAAE,oBAAoB,EAAE,KAAK,WAAW,EAAE,KAAK,iBAAiB,EAAE,MAAM,OAAO,CAAA"}
package/dist/index.js CHANGED
@@ -6,4 +6,5 @@ export { create_layered_backend } from './backend/layered';
6
6
  export { json_codec, text_codec, binary_codec, compute_hash, generate_version } from './utils';
7
7
  export { corpus_snapshots } from './schema';
8
8
  export { ok, err, define_store } from './types';
9
+ export * from './observations';
9
10
  export { createCorpusInfra, CORPUS_MIGRATION_SQL } from './sst';
@@ -0,0 +1,12 @@
1
+ /**
2
+ * @module ObservationsClient
3
+ * @description Centralized business logic for observations, built on storage adapters.
4
+ */
5
+ import type { MetadataClient, ObservationsClient } from '../types';
6
+ import type { ObservationsStorage } from './storage';
7
+ /**
8
+ * Creates an ObservationsClient from a storage adapter.
9
+ * All business logic (validation, staleness, etc.) is centralized here.
10
+ */
11
+ export declare function create_observations_client(storage: ObservationsStorage, metadata: MetadataClient): ObservationsClient;
12
+ //# sourceMappingURL=client.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../observations/client.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAuB,cAAc,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAA;AAEvF,OAAO,KAAK,EAAE,mBAAmB,EAAoB,MAAM,WAAW,CAAA;AAuBtE;;;GAGG;AACH,wBAAgB,0BAA0B,CACxC,OAAO,EAAE,mBAAmB,EAC5B,QAAQ,EAAE,cAAc,GACvB,kBAAkB,CAqGpB"}
@@ -0,0 +1,115 @@
1
+ /**
2
+ * @module ObservationsClient
3
+ * @description Centralized business logic for observations, built on storage adapters.
4
+ */
5
+ import { row_to_observation, row_to_meta, create_observation_row } from './storage';
6
+ import { generate_observation_id } from './utils';
7
+ import { ok, err } from '../types';
8
+ /**
9
+ * Convert client query opts to storage query opts.
10
+ * Handles Date -> ISO string conversion.
11
+ */
12
+ function to_storage_opts(opts) {
13
+ return {
14
+ type: opts.type,
15
+ source_store_id: opts.source_store,
16
+ source_version: opts.source_version,
17
+ source_prefix: opts.source_prefix,
18
+ created_after: opts.created_after?.toISOString(),
19
+ created_before: opts.created_before?.toISOString(),
20
+ observed_after: opts.after?.toISOString(),
21
+ observed_before: opts.before?.toISOString(),
22
+ limit: opts.limit
23
+ };
24
+ }
25
+ /**
26
+ * Creates an ObservationsClient from a storage adapter.
27
+ * All business logic (validation, staleness, etc.) is centralized here.
28
+ */
29
+ export function create_observations_client(storage, metadata) {
30
+ async function get_latest_version(store_id) {
31
+ const result = await metadata.get_latest(store_id);
32
+ return result.ok ? result.value.version : null;
33
+ }
34
+ return {
35
+ async put(type, opts) {
36
+ const validation = type.schema.safeParse(opts.content);
37
+ if (!validation.success) {
38
+ return err({
39
+ kind: 'validation_error',
40
+ cause: validation.error,
41
+ message: validation.error.message
42
+ });
43
+ }
44
+ const id = generate_observation_id();
45
+ const row = create_observation_row(id, type.name, opts.source, validation.data, {
46
+ confidence: opts.confidence,
47
+ observed_at: opts.observed_at,
48
+ derived_from: opts.derived_from
49
+ });
50
+ const result = await storage.put_row(row);
51
+ if (!result.ok)
52
+ return result;
53
+ const observation = {
54
+ id,
55
+ type: type.name,
56
+ source: opts.source,
57
+ content: validation.data,
58
+ ...(opts.confidence !== undefined && { confidence: opts.confidence }),
59
+ ...(opts.observed_at && { observed_at: opts.observed_at }),
60
+ created_at: new Date(row.created_at),
61
+ ...(opts.derived_from && { derived_from: opts.derived_from })
62
+ };
63
+ return ok(observation);
64
+ },
65
+ async get(id) {
66
+ const result = await storage.get_row(id);
67
+ if (!result.ok)
68
+ return result;
69
+ if (!result.value) {
70
+ return err({ kind: 'observation_not_found', id });
71
+ }
72
+ return ok(row_to_observation(result.value));
73
+ },
74
+ async *query(opts = {}) {
75
+ const storageOpts = to_storage_opts(opts);
76
+ for await (const row of storage.query_rows(storageOpts)) {
77
+ if (!opts.include_stale) {
78
+ const latest = await get_latest_version(row.source_store_id);
79
+ if (latest && row.source_version !== latest)
80
+ continue;
81
+ }
82
+ yield row_to_observation(row);
83
+ }
84
+ },
85
+ async *query_meta(opts = {}) {
86
+ const storageOpts = to_storage_opts(opts);
87
+ for await (const row of storage.query_rows(storageOpts)) {
88
+ if (!opts.include_stale) {
89
+ const latest = await get_latest_version(row.source_store_id);
90
+ if (latest && row.source_version !== latest)
91
+ continue;
92
+ }
93
+ yield row_to_meta(row);
94
+ }
95
+ },
96
+ async delete(id) {
97
+ const result = await storage.delete_row(id);
98
+ if (!result.ok)
99
+ return result;
100
+ if (!result.value) {
101
+ return err({ kind: 'observation_not_found', id });
102
+ }
103
+ return ok(undefined);
104
+ },
105
+ async delete_by_source(source) {
106
+ return storage.delete_by_source(source.store_id, source.version, source.path);
107
+ },
108
+ async is_stale(pointer) {
109
+ const latest = await get_latest_version(pointer.store_id);
110
+ if (!latest)
111
+ return false;
112
+ return pointer.version !== latest;
113
+ }
114
+ };
115
+ }
@@ -0,0 +1,12 @@
1
+ /**
2
+ * @module Observations
3
+ * @description Re-exports for the observations feature.
4
+ */
5
+ export * from './types';
6
+ export type { ObservationRow, ObservationInsert } from './schema';
7
+ export { corpus_observations } from './schema';
8
+ export type { ObservationsStorage, StorageQueryOpts, ObservationsCRUD } from './storage';
9
+ export { row_to_observation, row_to_meta, create_observation_row, filter_observation_rows, create_observations_storage } from './storage';
10
+ export { create_observations_client } from './client';
11
+ export * from './utils';
12
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../observations/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,cAAc,SAAS,CAAA;AACvB,YAAY,EAAE,cAAc,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAA;AACjE,OAAO,EAAE,mBAAmB,EAAE,MAAM,UAAU,CAAA;AAC9C,YAAY,EAAE,mBAAmB,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,WAAW,CAAA;AAGxF,OAAO,EAAE,kBAAkB,EAAE,WAAW,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,2BAA2B,EAAE,MAAM,WAAW,CAAA;AACzI,OAAO,EAAE,0BAA0B,EAAE,MAAM,UAAU,CAAA;AACrD,cAAc,SAAS,CAAA"}
@@ -0,0 +1,11 @@
1
+ /**
2
+ * @module Observations
3
+ * @description Re-exports for the observations feature.
4
+ */
5
+ // Types
6
+ export * from './types';
7
+ export { corpus_observations } from './schema';
8
+ // Functions
9
+ export { row_to_observation, row_to_meta, create_observation_row, filter_observation_rows, create_observations_storage } from './storage';
10
+ export { create_observations_client } from './client';
11
+ export * from './utils';