@byearlybird/starling 0.9.3 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,637 +1,462 @@
1
- //#region src/crdt/eventstamp.ts
2
- function generateNonce() {
3
- return Math.random().toString(16).slice(2, 6).padStart(4, "0");
4
- }
5
- function encodeEventstamp(timestampMs, counter, nonce) {
6
- return `${new Date(timestampMs).toISOString()}|${counter.toString(16).padStart(4, "0")}|${nonce}`;
7
- }
8
- /**
9
- * Validates whether a string is a properly formatted eventstamp.
10
- * Expected format: YYYY-MM-DDTHH:mm:ss.SSSZ|HHHH+|HHHH
11
- * where HHHH+ represents 4 or more hex characters for the counter,
12
- * and HHHH represents exactly 4 hex characters for the nonce.
13
- */
14
- function isValidEventstamp(stamp) {
15
- return /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z\|[0-9a-f]{4,}\|[0-9a-f]{4}$/.test(stamp);
16
- }
17
- function decodeEventstamp(eventstamp) {
18
- if (!isValidEventstamp(eventstamp)) throw new Error(`Invalid eventstamp format: "${eventstamp}". Expected format: YYYY-MM-DDTHH:mm:ss.SSSZ|HHHH+|HHHH`);
19
- const parts = eventstamp.split("|");
20
- const isoString = parts[0];
21
- const hexCounter = parts[1];
22
- const nonce = parts[2];
1
+ import { c as makeResource, i as mapToDocument, l as mergeResources, o as mergeDocuments, s as deleteResource, u as createClock } from "./core-DI0FfUjX.js";
2
+
3
+ //#region src/database/emitter.ts
4
+ function createEmitter() {
5
+ const handlers = /* @__PURE__ */ new Map();
23
6
  return {
24
- timestampMs: new Date(isoString).getTime(),
25
- counter: parseInt(hexCounter, 16),
26
- nonce
7
+ on(type, handler) {
8
+ let set = handlers.get(type);
9
+ if (!set) {
10
+ set = /* @__PURE__ */ new Set();
11
+ handlers.set(type, set);
12
+ }
13
+ set.add(handler);
14
+ return () => {
15
+ set?.delete(handler);
16
+ if (!set?.size) handlers.delete(type);
17
+ };
18
+ },
19
+ emit(type, payload) {
20
+ const set = handlers.get(type);
21
+ if (!set) return;
22
+ for (const handler of Array.from(set)) handler(payload);
23
+ },
24
+ clear() {
25
+ handlers.clear();
26
+ }
27
27
  };
28
28
  }
29
- const MIN_EVENTSTAMP = encodeEventstamp(0, 0, "0000");
30
29
 
31
30
  //#endregion
32
- //#region src/crdt/utils.ts
33
- function isObject(value) {
34
- return !!(value != null && typeof value === "object" && !Array.isArray(value) && Object.getPrototypeOf(value) === Object.prototype);
35
- }
36
- function isEncodedValue(value) {
37
- return !!(typeof value === "object" && value !== null && "~value" in value && "~eventstamp" in value);
38
- }
39
-
40
- //#endregion
41
- //#region src/crdt/value.ts
42
- function encodeValue(value, eventstamp) {
43
- return {
44
- "~value": value,
45
- "~eventstamp": eventstamp
46
- };
47
- }
48
- function decodeValue(value) {
49
- return value["~value"];
50
- }
51
- function mergeValues(into, from) {
52
- return into["~eventstamp"] > from["~eventstamp"] ? [into, into["~eventstamp"]] : [from, from["~eventstamp"]];
31
+ //#region src/database/standard-schema.ts
32
+ function standardValidate(schema, input) {
33
+ const result = schema["~standard"].validate(input);
34
+ if (result instanceof Promise) throw new TypeError("Schema validation must be synchronous");
35
+ if (result.issues) throw new Error(JSON.stringify(result.issues, null, 2));
36
+ return result.value;
53
37
  }
54
38
 
55
39
  //#endregion
56
- //#region src/crdt/record.ts
57
- function processRecord(source, process) {
58
- const result = {};
59
- const step = (input, output) => {
60
- for (const key in input) {
61
- if (!Object.hasOwn(input, key)) continue;
62
- const value = input[key];
63
- if (isEncodedValue(value)) output[key] = process(value);
64
- else if (isObject(value)) {
65
- output[key] = {};
66
- step(value, output[key]);
67
- }
68
- }
69
- };
70
- step(source, result);
71
- return result;
72
- }
73
- function encodeRecord(obj, eventstamp) {
74
- const result = {};
75
- const step = (input, output) => {
76
- for (const key in input) {
77
- if (!Object.hasOwn(input, key)) continue;
78
- const value = input[key];
79
- if (isObject(value)) {
80
- output[key] = {};
81
- step(value, output[key]);
82
- } else output[key] = encodeValue(value, eventstamp);
83
- }
40
+ //#region src/database/collection.ts
41
+ /**
42
+ * Symbols for internal collection methods used by transactions.
43
+ * These are not part of the public Collection type.
44
+ */
45
+ const CollectionInternals = {
46
+ getPendingMutations: Symbol("getPendingMutations"),
47
+ emitMutations: Symbol("emitMutations"),
48
+ replaceData: Symbol("replaceData"),
49
+ data: Symbol("data")
50
+ };
51
+ function createCollection(name, schema, getId, getEventstamp, initialData, options) {
52
+ const autoFlush = options?.autoFlush ?? true;
53
+ const data = initialData ?? /* @__PURE__ */ new Map();
54
+ const emitter = createEmitter();
55
+ const pendingMutations = {
56
+ added: [],
57
+ updated: [],
58
+ removed: []
84
59
  };
85
- step(obj, result);
86
- return result;
87
- }
88
- function decodeRecord(obj) {
89
- const result = {};
90
- const step = (input, output) => {
91
- for (const key in input) {
92
- if (!Object.hasOwn(input, key)) continue;
93
- const value = input[key];
94
- if (isEncodedValue(value)) output[key] = decodeValue(value);
95
- else if (isObject(value)) {
96
- output[key] = {};
97
- step(value, output[key]);
98
- }
60
+ const flushMutations = () => {
61
+ if (pendingMutations.added.length > 0 || pendingMutations.updated.length > 0 || pendingMutations.removed.length > 0) {
62
+ emitter.emit("mutation", {
63
+ added: [...pendingMutations.added],
64
+ updated: [...pendingMutations.updated],
65
+ removed: [...pendingMutations.removed]
66
+ });
67
+ pendingMutations.added = [];
68
+ pendingMutations.updated = [];
69
+ pendingMutations.removed = [];
99
70
  }
100
71
  };
101
- step(obj, result);
102
- return result;
103
- }
104
- function mergeRecords(into, from) {
105
- const result = {};
106
- let greatestEventstamp = MIN_EVENTSTAMP;
107
- const step = (v1, v2, output) => {
108
- for (const key in v1) {
109
- if (!Object.hasOwn(v1, key)) continue;
110
- const value1 = v1[key];
111
- const value2 = v2[key];
112
- if (isEncodedValue(value1) && isEncodedValue(value2)) {
113
- const [win, eventstamp] = mergeValues(value1, value2);
114
- output[key] = win;
115
- if (eventstamp > greatestEventstamp) greatestEventstamp = eventstamp;
116
- } else if (isEncodedValue(value1)) {
117
- output[key] = value1;
118
- const eventstamp = value1["~eventstamp"];
119
- if (eventstamp > greatestEventstamp) greatestEventstamp = eventstamp;
120
- } else if (isObject(value1) && isObject(value2)) {
121
- output[key] = {};
122
- step(value1, value2, output[key]);
123
- } else if (value1) output[key] = value1;
124
- }
125
- for (const key in v2) {
126
- if (!Object.hasOwn(v2, key) || Object.hasOwn(output, key)) continue;
127
- const value = v2[key];
128
- if (value !== void 0) {
129
- output[key] = value;
130
- if (isEncodedValue(value)) {
131
- const eventstamp = value["~eventstamp"];
132
- if (eventstamp > greatestEventstamp) greatestEventstamp = eventstamp;
72
+ return {
73
+ get(id, opts = {}) {
74
+ const resource = data.get(id);
75
+ if (!resource) return null;
76
+ if (!opts.includeDeleted && resource.meta.deletedAt) return null;
77
+ return resource.attributes;
78
+ },
79
+ getAll(opts = {}) {
80
+ const resources = Array.from(data.values());
81
+ if (opts.includeDeleted) return resources.map((resource) => resource.attributes);
82
+ else return resources.filter((resource) => !resource.meta.deletedAt).map((resource) => resource.attributes);
83
+ },
84
+ find(filter, opts) {
85
+ const results = [];
86
+ for (const [, resource] of data.entries()) {
87
+ if (resource.meta.deletedAt) continue;
88
+ const attributes = resource.attributes;
89
+ if (filter(attributes)) {
90
+ const value = opts?.map ? opts.map(attributes) : attributes;
91
+ results.push(value);
133
92
  }
134
93
  }
94
+ if (opts?.sort) results.sort(opts.sort);
95
+ return results;
96
+ },
97
+ add(item) {
98
+ const validated = standardValidate(schema, item);
99
+ const id = getId(validated);
100
+ if (data.has(id)) throw new DuplicateIdError(id);
101
+ const resource = makeResource(name, id, validated, getEventstamp());
102
+ data.set(id, resource);
103
+ pendingMutations.added.push({
104
+ id,
105
+ item: validated
106
+ });
107
+ if (autoFlush) flushMutations();
108
+ return validated;
109
+ },
110
+ update(id, updates) {
111
+ const existing = data.get(id);
112
+ if (!existing) throw new IdNotFoundError(id);
113
+ const before = existing.attributes;
114
+ const merged = mergeResources(existing, makeResource(name, id, updates, getEventstamp()));
115
+ standardValidate(schema, merged.attributes);
116
+ data.set(id, merged);
117
+ pendingMutations.updated.push({
118
+ id,
119
+ before,
120
+ after: merged.attributes
121
+ });
122
+ if (autoFlush) flushMutations();
123
+ },
124
+ remove(id) {
125
+ const existing = data.get(id);
126
+ if (!existing) throw new IdNotFoundError(id);
127
+ const item = existing.attributes;
128
+ const removed = deleteResource(existing, getEventstamp());
129
+ data.set(id, removed);
130
+ pendingMutations.removed.push({
131
+ id,
132
+ item
133
+ });
134
+ if (autoFlush) flushMutations();
135
+ },
136
+ merge(document) {
137
+ const beforeState = /* @__PURE__ */ new Map();
138
+ for (const [id, resource] of data.entries()) beforeState.set(id, resource.attributes);
139
+ const result = mergeDocuments(mapToDocument(data, getEventstamp()), document);
140
+ data.clear();
141
+ for (const resource of result.document.data) data.set(resource.id, resource);
142
+ for (const [id, resource] of result.changes.added) {
143
+ standardValidate(schema, resource.attributes);
144
+ pendingMutations.added.push({
145
+ id,
146
+ item: resource.attributes
147
+ });
148
+ }
149
+ for (const [id, resource] of result.changes.updated) {
150
+ standardValidate(schema, resource.attributes);
151
+ const before = beforeState.get(id);
152
+ pendingMutations.updated.push({
153
+ id,
154
+ before,
155
+ after: resource.attributes
156
+ });
157
+ }
158
+ for (const id of result.changes.deleted) {
159
+ const before = beforeState.get(id);
160
+ pendingMutations.removed.push({
161
+ id,
162
+ item: before
163
+ });
164
+ }
165
+ if (autoFlush) flushMutations();
166
+ },
167
+ toDocument() {
168
+ return mapToDocument(data, getEventstamp());
169
+ },
170
+ on(event, handler) {
171
+ return emitter.on(event, handler);
172
+ },
173
+ [CollectionInternals.data]() {
174
+ return new Map(data);
175
+ },
176
+ [CollectionInternals.getPendingMutations]() {
177
+ return {
178
+ added: [...pendingMutations.added],
179
+ updated: [...pendingMutations.updated],
180
+ removed: [...pendingMutations.removed]
181
+ };
182
+ },
183
+ [CollectionInternals.emitMutations](mutations) {
184
+ if (mutations.added.length > 0 || mutations.updated.length > 0 || mutations.removed.length > 0) emitter.emit("mutation", mutations);
185
+ },
186
+ [CollectionInternals.replaceData](newData) {
187
+ data.clear();
188
+ for (const [id, resource] of newData.entries()) data.set(id, resource);
135
189
  }
136
190
  };
137
- step(into, from, result);
138
- return [result, greatestEventstamp];
139
191
  }
192
+ var IdNotFoundError = class extends Error {
193
+ constructor(id) {
194
+ super(`Resource with id ${id} not found`);
195
+ this.name = "IdNotFoundError";
196
+ }
197
+ };
198
+ var DuplicateIdError = class extends Error {
199
+ constructor(id) {
200
+ super(`Resource with id ${id} already exists`);
201
+ this.name = "DuplicateIdError";
202
+ }
203
+ };
140
204
 
141
205
  //#endregion
142
- //#region src/crdt/document.ts
143
- function encodeDoc(id, obj, eventstamp, deletedAt = null) {
144
- return {
145
- "~id": id,
146
- "~data": isObject(obj) ? encodeRecord(obj, eventstamp) : encodeValue(obj, eventstamp),
147
- "~deletedAt": deletedAt
206
+ //#region src/database/query.ts
207
+ /**
208
+ * Execute a reactive query with automatic re-computation on mutations.
209
+ *
210
+ * @param db - Database instance to query
211
+ * @param callback - Query callback receiving read-only collection handles
212
+ * @returns QueryHandle with result, subscribe, and dispose methods
213
+ */
214
+ function executeQuery(db, callback) {
215
+ const accessedCollections = /* @__PURE__ */ new Set();
216
+ const subscribers = /* @__PURE__ */ new Set();
217
+ let currentResult;
218
+ const createTrackingHandles = () => {
219
+ const handles = {};
220
+ for (const name of db.collectionKeys()) {
221
+ const collection = db[name];
222
+ handles[name] = createTrackingHandle(name, collection, accessedCollections);
223
+ }
224
+ return handles;
148
225
  };
149
- }
150
- function decodeDoc(doc) {
151
- return {
152
- "~id": doc["~id"],
153
- "~data": isEncodedValue(doc["~data"]) ? decodeValue(doc["~data"]) : decodeRecord(doc["~data"]),
154
- "~deletedAt": doc["~deletedAt"]
226
+ const runQuery = () => {
227
+ return callback(createTrackingHandles());
155
228
  };
156
- }
157
- function mergeDocs(into, from) {
158
- const intoIsValue = isEncodedValue(into["~data"]);
159
- const fromIsValue = isEncodedValue(from["~data"]);
160
- if (intoIsValue !== fromIsValue) throw new Error("Merge error: Incompatible types");
161
- const [mergedData, dataEventstamp] = intoIsValue && fromIsValue ? mergeValues(into["~data"], from["~data"]) : mergeRecords(into["~data"], from["~data"]);
162
- const mergedDeletedAt = into["~deletedAt"] && from["~deletedAt"] ? into["~deletedAt"] > from["~deletedAt"] ? into["~deletedAt"] : from["~deletedAt"] : into["~deletedAt"] || from["~deletedAt"] || null;
163
- let greatestEventstamp = dataEventstamp;
164
- if (mergedDeletedAt && mergedDeletedAt > greatestEventstamp) greatestEventstamp = mergedDeletedAt;
165
- return [{
166
- "~id": into["~id"],
167
- "~data": mergedData,
168
- "~deletedAt": mergedDeletedAt
169
- }, greatestEventstamp];
170
- }
171
- function deleteDoc(doc, eventstamp) {
229
+ currentResult = runQuery();
230
+ const unsubscribeMutation = db.on("mutation", (event) => {
231
+ if (accessedCollections.has(event.collection)) {
232
+ currentResult = runQuery();
233
+ for (const subscriber of subscribers) subscriber(currentResult);
234
+ }
235
+ });
236
+ let disposed = false;
172
237
  return {
173
- "~id": doc["~id"],
174
- "~data": doc["~data"],
175
- "~deletedAt": eventstamp
238
+ get result() {
239
+ return currentResult;
240
+ },
241
+ subscribe(callback$1) {
242
+ if (disposed) throw new Error("Cannot subscribe to a disposed query");
243
+ subscribers.add(callback$1);
244
+ return () => {
245
+ subscribers.delete(callback$1);
246
+ };
247
+ },
248
+ dispose() {
249
+ if (disposed) return;
250
+ disposed = true;
251
+ unsubscribeMutation();
252
+ subscribers.clear();
253
+ accessedCollections.clear();
254
+ }
176
255
  };
177
256
  }
178
257
  /**
179
- * Transform all values in a document using a provided function.
180
- *
181
- * Useful for custom serialization in plugin hooks (encryption, compression, etc.)
182
- *
183
- * @param doc - Document to transform
184
- * @param process - Function to apply to each leaf value
185
- * @returns New document with transformed values
186
- *
187
- * @example
188
- * ```ts
189
- * // Encrypt all values before persisting
190
- * const encrypted = processDocument(doc, (value) => ({
191
- * ...value,
192
- * "~value": encrypt(value["~value"])
193
- * }));
194
- * ```
258
+ * Create a read-only collection handle that tracks access.
195
259
  */
196
- function processDocument(doc, process) {
197
- const processedData = isEncodedValue(doc["~data"]) ? process(doc["~data"]) : processRecord(doc["~data"], process);
260
+ function createTrackingHandle(name, collection, accessedCollections) {
261
+ const trackAccess = () => {
262
+ accessedCollections.add(name);
263
+ };
198
264
  return {
199
- "~id": doc["~id"],
200
- "~data": processedData,
201
- "~deletedAt": doc["~deletedAt"]
265
+ get(id, opts) {
266
+ trackAccess();
267
+ return collection.get(id, opts);
268
+ },
269
+ getAll(opts) {
270
+ trackAccess();
271
+ return collection.getAll(opts);
272
+ },
273
+ find(filter, opts) {
274
+ trackAccess();
275
+ return collection.find(filter, opts);
276
+ }
202
277
  };
203
278
  }
204
279
 
205
280
  //#endregion
206
- //#region src/crdt/collection.ts
281
+ //#region src/database/transaction.ts
207
282
  /**
208
- * Merges two collections using field-level Last-Write-Wins semantics.
209
- *
210
- * The merge operation:
211
- * 1. Forwards the clock to the newest eventstamp from either collection
212
- * 2. Merges each document pair using field-level LWW (via mergeDocs)
213
- * 3. Tracks what changed for hook notifications (added/updated/deleted)
214
- *
215
- * Deletion is final: once a document is deleted, updates to it are merged into
216
- * the document's data but don't restore visibility. Only new documents or
217
- * transitions into the deleted state are tracked.
283
+ * Execute a transaction with snapshot isolation and copy-on-write optimization.
218
284
  *
219
- * @param into - The base collection to merge into
220
- * @param from - The source collection to merge from
221
- * @returns Merged collection and categorized changes
285
+ * @param configs - Collection configurations for creating new instances
286
+ * @param collections - Active collection instances (mutable reference)
287
+ * @param getEventstamp - Function to generate eventstamps
288
+ * @param callback - Transaction callback with tx context
289
+ * @returns The return value from the callback
222
290
  *
223
- * @example
224
- * ```typescript
225
- * const into = {
226
- * "~docs": [{ "~id": "doc1", "~data": {...}, "~deletedAt": null }],
227
- * "~eventstamp": "2025-01-01T00:00:00.000Z|0001|a1b2"
228
- * };
229
- *
230
- * const from = {
231
- * "~docs": [
232
- * { "~id": "doc1", "~data": {...}, "~deletedAt": null }, // updated
233
- * { "~id": "doc2", "~data": {...}, "~deletedAt": null } // new
234
- * ],
235
- * "~eventstamp": "2025-01-01T00:05:00.000Z|0001|c3d4"
236
- * };
237
- *
238
- * const result = mergeCollections(into, from);
239
- * // result.collection.~eventstamp === "2025-01-01T00:05:00.000Z|0001|c3d4"
240
- * // result.changes.added has "doc2"
241
- * // result.changes.updated has "doc1"
242
- * ```
291
+ * @remarks
292
+ * - Collections are cloned lazily on first access (read or write)
293
+ * - Provides snapshot isolation: tx sees consistent data from first access
294
+ * - Explicit rollback via tx.rollback() or implicit on exception
295
+ * - Only modified collections are committed back
243
296
  */
244
- function mergeCollections(into, from) {
245
- const intoDocsById = /* @__PURE__ */ new Map();
246
- for (const doc of into["~docs"]) intoDocsById.set(doc["~id"], doc);
247
- const added = /* @__PURE__ */ new Map();
248
- const updated = /* @__PURE__ */ new Map();
249
- const deleted = /* @__PURE__ */ new Set();
250
- const mergedDocsById = new Map(intoDocsById);
251
- for (const fromDoc of from["~docs"]) {
252
- const id = fromDoc["~id"];
253
- const intoDoc = intoDocsById.get(id);
254
- if (!intoDoc) {
255
- mergedDocsById.set(id, fromDoc);
256
- if (!fromDoc["~deletedAt"]) added.set(id, fromDoc);
257
- } else {
258
- if (intoDoc === fromDoc) continue;
259
- const [mergedDoc] = mergeDocs(intoDoc, fromDoc);
260
- mergedDocsById.set(id, mergedDoc);
261
- const wasDeleted = intoDoc["~deletedAt"] !== null;
262
- const isDeleted = mergedDoc["~deletedAt"] !== null;
263
- if (!wasDeleted && isDeleted) deleted.add(id);
264
- else if (!isDeleted) updated.set(id, mergedDoc);
265
- }
297
+ function executeTransaction(configs, collections, getEventstamp, callback) {
298
+ const clonedCollections = /* @__PURE__ */ new Map();
299
+ const txHandles = {};
300
+ for (const name of Object.keys(collections)) {
301
+ const originalCollection = collections[name];
302
+ const config = configs[name];
303
+ const getClonedCollection = () => {
304
+ if (!clonedCollections.has(name)) {
305
+ const cloned = createCollection(name, config.schema, config.getId, getEventstamp, originalCollection[CollectionInternals.data](), { autoFlush: false });
306
+ clonedCollections.set(name, cloned);
307
+ }
308
+ return clonedCollections.get(name);
309
+ };
310
+ txHandles[name] = createLazyTransactionHandle(originalCollection, getClonedCollection);
266
311
  }
267
- const newestEventstamp = into["~eventstamp"] >= from["~eventstamp"] ? into["~eventstamp"] : from["~eventstamp"];
268
- return {
269
- collection: {
270
- "~docs": Array.from(mergedDocsById.values()),
271
- "~eventstamp": newestEventstamp
272
- },
273
- changes: {
274
- added,
275
- updated,
276
- deleted
312
+ let shouldRollback = false;
313
+ const tx = {
314
+ ...txHandles,
315
+ rollback() {
316
+ shouldRollback = true;
277
317
  }
278
318
  };
319
+ let result;
320
+ result = callback(tx);
321
+ if (!shouldRollback) for (const [name, clonedCollection] of clonedCollections.entries()) {
322
+ const originalCollection = collections[name];
323
+ const pendingMutations = clonedCollection[CollectionInternals.getPendingMutations]();
324
+ originalCollection[CollectionInternals.replaceData](clonedCollection[CollectionInternals.data]());
325
+ originalCollection[CollectionInternals.emitMutations](pendingMutations);
326
+ }
327
+ return result;
279
328
  }
280
-
281
- //#endregion
282
- //#region src/clock.ts
283
329
  /**
284
- * A Hybrid Logical Clock that generates monotonically increasing eventstamps.
285
- * Combines wall-clock time with a counter for handling clock stalls and a
286
- * random nonce for tie-breaking.
330
+ * Create a transaction handle that lazily clones on first access (copy-on-write).
331
+ *
332
+ * @param originalCollection - The base collection (not modified)
333
+ * @param getClonedCollection - Lazy cloner (invoked on first access)
334
+ * @returns A collection handle with snapshot isolation
287
335
  *
288
- * The clock automatically increments the counter when the wall clock doesn't
289
- * advance, ensuring eventstamps are always unique and monotonic.
336
+ * @remarks
337
+ * First read or write triggers cloning, providing snapshot isolation.
338
+ * All subsequent operations use the cloned collection.
339
+ * Excluded methods:
340
+ * - on(): events are only emitted after the transaction commits
341
+ * - toDocument(): serialization should happen outside transactions
290
342
  */
291
- var Clock = class {
292
- #counter = 0;
293
- #lastMs = Date.now();
294
- #lastNonce = generateNonce();
295
- /** Generates a new eventstamp, advancing the clock */
296
- now() {
297
- const wallMs = Date.now();
298
- if (wallMs > this.#lastMs) {
299
- this.#lastMs = wallMs;
300
- this.#counter = 0;
301
- this.#lastNonce = generateNonce();
302
- } else {
303
- this.#counter++;
304
- this.#lastNonce = generateNonce();
305
- }
306
- return encodeEventstamp(this.#lastMs, this.#counter, this.#lastNonce);
307
- }
308
- /** Returns the most recent eventstamp without advancing the clock */
309
- latest() {
310
- return encodeEventstamp(this.#lastMs, this.#counter, this.#lastNonce);
311
- }
312
- /** Fast-forwards the clock to match a newer remote eventstamp */
313
- forward(eventstamp) {
314
- if (eventstamp > this.latest()) {
315
- const newer = decodeEventstamp(eventstamp);
316
- this.#lastMs = newer.timestampMs;
317
- this.#counter = newer.counter;
318
- this.#lastNonce = newer.nonce;
343
+ function createLazyTransactionHandle(_originalCollection, getClonedCollection) {
344
+ let cloned = null;
345
+ const ensureCloned = () => {
346
+ if (!cloned) cloned = getClonedCollection();
347
+ return cloned;
348
+ };
349
+ return {
350
+ get(id, opts) {
351
+ return ensureCloned().get(id, opts);
352
+ },
353
+ getAll(opts) {
354
+ return ensureCloned().getAll(opts);
355
+ },
356
+ find(filter, opts) {
357
+ return ensureCloned().find(filter, opts);
358
+ },
359
+ add(item) {
360
+ return ensureCloned().add(item);
361
+ },
362
+ update(id, updates) {
363
+ ensureCloned().update(id, updates);
364
+ },
365
+ remove(id) {
366
+ ensureCloned().remove(id);
367
+ },
368
+ merge(document) {
369
+ ensureCloned().merge(document);
319
370
  }
320
- }
321
- };
371
+ };
372
+ }
322
373
 
323
374
  //#endregion
324
- //#region src/store.ts
375
+ //#region src/database/db.ts
325
376
  /**
326
- * Lightweight local-first data store with built-in sync and reactive queries.
327
- *
328
- * Stores plain JavaScript objects with automatic field-level conflict resolution
329
- * using Last-Write-Wins semantics powered by hybrid logical clocks.
330
- *
331
- * @template T - The type of documents stored in this collection
377
+ * Create a typed database instance with collection access.
378
+ * @param config - Database configuration
379
+ * @param config.name - Database name used for persistence and routing
380
+ * @param config.schema - Collection schema definitions
381
+ * @param config.version - Optional database version, defaults to 1
382
+ * @returns A database instance with typed collection properties
332
383
  *
333
384
  * @example
334
- * ```ts
335
- * const store = await new Store<{ text: string; completed: boolean }>()
336
- * .use(unstoragePlugin('todos', storage))
385
+ * ```typescript
386
+ * const db = await createDatabase({
387
+ * name: "my-app",
388
+ * schema: {
389
+ * tasks: { schema: taskSchema, getId: (task) => task.id },
390
+ * },
391
+ * })
392
+ * .use(idbPlugin())
337
393
  * .init();
338
394
  *
339
- * // Add, update, delete
340
- * const id = store.add({ text: 'Buy milk', completed: false });
341
- * store.update(id, { completed: true });
342
- * store.del(id);
343
- *
344
- * // Reactive queries
345
- * const activeTodos = store.query({ where: (todo) => !todo.completed });
346
- * activeTodos.onChange(() => console.log('Todos changed!'));
395
+ * const task = db.tasks.add({ title: 'Learn Starling' });
347
396
  * ```
348
397
  */
349
- var Store = class {
350
- #readMap = /* @__PURE__ */ new Map();
351
- #clock = new Clock();
352
- #getId;
353
- #onInitHandlers = [];
354
- #onDisposeHandlers = [];
355
- #onAddHandlers = [];
356
- #onUpdateHandlers = [];
357
- #onDeleteHandlers = [];
358
- #queries = /* @__PURE__ */ new Set();
359
- constructor(config = {}) {
360
- this.#getId = config.getId ?? (() => crypto.randomUUID());
361
- }
362
- /**
363
- * Get a document by ID.
364
- * @returns The document, or null if not found or deleted
365
- */
366
- get(key) {
367
- return this.#decodeActive(this.#readMap.get(key) ?? null);
368
- }
369
- /**
370
- * Iterate over all non-deleted documents as [id, document] tuples.
371
- */
372
- entries() {
373
- const self = this;
374
- function* iterator() {
375
- for (const [key, doc] of self.#readMap.entries()) {
376
- const data = self.#decodeActive(doc);
377
- if (data !== null) yield [key, data];
378
- }
379
- }
380
- return iterator();
381
- }
382
- /**
383
- * Get the complete store state as a Collection for persistence or sync.
384
- * @returns Collection containing all documents and the latest eventstamp
385
- */
386
- collection() {
387
- return {
388
- "~docs": Array.from(this.#readMap.values()),
389
- "~eventstamp": this.#clock.latest()
390
- };
391
- }
392
- /**
393
- * Merge a collection from storage or another replica using field-level LWW.
394
- * @param collection - Collection from storage or another store instance
395
- */
396
- merge(collection) {
397
- const result = mergeCollections(this.collection(), collection);
398
- this.#clock.forward(result.collection["~eventstamp"]);
399
- this.#readMap = new Map(result.collection["~docs"].map((doc) => [doc["~id"], doc]));
400
- const addEntries = Array.from(result.changes.added.entries()).map(([key, doc]) => [key, decodeDoc(doc)["~data"]]);
401
- const updateEntries = Array.from(result.changes.updated.entries()).map(([key, doc]) => [key, decodeDoc(doc)["~data"]]);
402
- const deleteKeys = Array.from(result.changes.deleted);
403
- if (addEntries.length > 0 || updateEntries.length > 0 || deleteKeys.length > 0) this.#emitMutations(addEntries, updateEntries, deleteKeys);
404
- }
405
- /**
406
- * Run multiple operations in a transaction with rollback support.
407
- *
408
- * @param callback - Function receiving a transaction context
409
- * @param opts - Optional config. Use `silent: true` to skip plugin hooks.
410
- * @returns The callback's return value
411
- *
412
- * @example
413
- * ```ts
414
- * const id = store.begin((tx) => {
415
- * const newId = tx.add({ text: 'Buy milk' });
416
- * tx.update(newId, { priority: 'high' });
417
- * return newId; // Return value becomes begin()'s return value
418
- * });
419
- * ```
420
- */
421
- begin(callback, opts) {
422
- const silent = opts?.silent ?? false;
423
- const addEntries = [];
424
- const updateEntries = [];
425
- const deleteKeys = [];
426
- const staging = new Map(this.#readMap);
427
- let rolledBack = false;
428
- const result = callback({
429
- add: (value, options) => {
430
- const key = options?.withId ?? this.#getId();
431
- staging.set(key, this.#encodeValue(key, value));
432
- addEntries.push([key, value]);
433
- return key;
434
- },
435
- update: (key, value) => {
436
- const doc = encodeDoc(key, value, this.#clock.now());
437
- const prev = staging.get(key);
438
- const mergedDoc = prev ? mergeDocs(prev, doc)[0] : doc;
439
- staging.set(key, mergedDoc);
440
- const merged = this.#decodeActive(mergedDoc);
441
- if (merged !== null) updateEntries.push([key, merged]);
442
- },
443
- merge: (doc) => {
444
- const existing = staging.get(doc["~id"]);
445
- const mergedDoc = existing ? mergeDocs(existing, doc)[0] : doc;
446
- staging.set(doc["~id"], mergedDoc);
447
- const decoded = this.#decodeActive(mergedDoc);
448
- const isNew = !this.#readMap.has(doc["~id"]);
449
- if (mergedDoc["~deletedAt"]) deleteKeys.push(doc["~id"]);
450
- else if (decoded !== null) if (isNew) addEntries.push([doc["~id"], decoded]);
451
- else updateEntries.push([doc["~id"], decoded]);
452
- },
453
- del: (key) => {
454
- const currentDoc = staging.get(key);
455
- if (!currentDoc) return;
456
- staging.set(key, deleteDoc(currentDoc, this.#clock.now()));
457
- deleteKeys.push(key);
458
- },
459
- get: (key) => this.#decodeActive(staging.get(key) ?? null),
460
- rollback: () => {
461
- rolledBack = true;
462
- }
398
+ function createDatabase(config) {
399
+ const { name, schema, version = 1 } = config;
400
+ const clock = createClock();
401
+ const getEventstamp = () => clock.now();
402
+ const collections = makeCollections(schema, getEventstamp);
403
+ const publicCollections = collections;
404
+ const dbEmitter = createEmitter();
405
+ for (const collectionName of Object.keys(collections)) collections[collectionName].on("mutation", (mutations) => {
406
+ if (mutations.added.length > 0 || mutations.updated.length > 0 || mutations.removed.length > 0) dbEmitter.emit("mutation", {
407
+ collection: collectionName,
408
+ added: mutations.added,
409
+ updated: mutations.updated,
410
+ removed: mutations.removed
463
411
  });
464
- if (!rolledBack) {
465
- this.#readMap = staging;
466
- if (!silent) this.#emitMutations(addEntries, updateEntries, deleteKeys);
467
- }
468
- return result;
469
- }
470
- /**
471
- * Add a document to the store.
472
- * @returns The document's ID (generated or provided via options)
473
- */
474
- add(value, options) {
475
- return this.begin((tx) => tx.add(value, options));
476
- }
477
- /**
478
- * Update a document with a partial value.
479
- *
480
- * Uses field-level merge - only specified fields are updated.
481
- */
482
- update(key, value) {
483
- this.begin((tx) => tx.update(key, value));
484
- }
485
- /**
486
- * Soft-delete a document.
487
- *
488
- * Deleted docs remain in snapshots for sync purposes but are
489
- * excluded from queries and reads.
490
- */
491
- del(key) {
492
- this.begin((tx) => tx.del(key));
493
- }
494
- /**
495
- * Register a plugin for persistence, analytics, etc.
496
- * @returns This store instance for chaining
497
- */
498
- use(plugin) {
499
- this.#onInitHandlers.push(plugin.onInit);
500
- this.#onDisposeHandlers.push(plugin.onDispose);
501
- if (plugin.onAdd) this.#onAddHandlers.push(plugin.onAdd);
502
- if (plugin.onUpdate) this.#onUpdateHandlers.push(plugin.onUpdate);
503
- if (plugin.onDelete) this.#onDeleteHandlers.push(plugin.onDelete);
504
- return this;
505
- }
506
- /**
507
- * Initialize the store and run plugin onInit hooks.
508
- *
509
- * Must be called before using the store. Runs plugin setup (hydrate
510
- * snapshots, start pollers, etc.) and hydrates existing queries.
511
- *
512
- * @returns This store instance for chaining
513
- */
514
- async init() {
515
- for (const hook of this.#onInitHandlers) await hook(this);
516
- for (const query of this.#queries) this.#hydrateQuery(query);
517
- return this;
518
- }
519
- /**
520
- * Dispose the store and run plugin cleanup.
521
- *
522
- * Flushes pending operations, clears queries, and runs plugin teardown.
523
- * Call when shutting down to avoid memory leaks.
524
- */
525
- async dispose() {
526
- for (let i = this.#onDisposeHandlers.length - 1; i >= 0; i--) await this.#onDisposeHandlers[i]?.();
527
- for (const query of this.#queries) {
528
- query.callbacks.clear();
529
- query.results.clear();
530
- }
531
- this.#queries.clear();
532
- this.#onInitHandlers = [];
533
- this.#onDisposeHandlers = [];
534
- this.#onAddHandlers = [];
535
- this.#onUpdateHandlers = [];
536
- this.#onDeleteHandlers = [];
537
- }
538
- /**
539
- * Create a reactive query that auto-updates when matching docs change.
540
- *
541
- * @example
542
- * ```ts
543
- * const active = store.query({ where: (todo) => !todo.completed });
544
- * active.results(); // [[id, todo], ...]
545
- * active.onChange(() => console.log('Updated!'));
546
- * active.dispose(); // Clean up when done
547
- * ```
548
- */
549
- query(config) {
550
- const query = {
551
- where: config.where,
552
- select: config.select,
553
- order: config.order,
554
- results: /* @__PURE__ */ new Map(),
555
- callbacks: /* @__PURE__ */ new Set()
556
- };
557
- this.#queries.add(query);
558
- this.#hydrateQuery(query);
559
- return {
560
- results: () => {
561
- if (query.order) return Array.from(query.results).sort(([, a], [, b]) => query.order(a, b));
562
- return Array.from(query.results);
563
- },
564
- onChange: (callback) => {
565
- query.callbacks.add(callback);
566
- return () => {
567
- query.callbacks.delete(callback);
568
- };
569
- },
570
- dispose: () => {
571
- this.#queries.delete(query);
572
- query.callbacks.clear();
573
- query.results.clear();
574
- }
575
- };
576
- }
577
- #encodeValue(key, value) {
578
- return encodeDoc(key, value, this.#clock.now());
579
- }
580
- #decodeActive(doc) {
581
- if (!doc || doc["~deletedAt"]) return null;
582
- return decodeDoc(doc)["~data"];
583
- }
584
- #emitMutations(addEntries, updateEntries, deleteKeys) {
585
- this.#notifyQueries(addEntries, updateEntries, deleteKeys);
586
- if (addEntries.length > 0) for (const handler of this.#onAddHandlers) handler(addEntries);
587
- if (updateEntries.length > 0) for (const handler of this.#onUpdateHandlers) handler(updateEntries);
588
- if (deleteKeys.length > 0) for (const handler of this.#onDeleteHandlers) handler(deleteKeys);
589
- }
590
- #notifyQueries(addEntries, updateEntries, deleteKeys) {
591
- if (this.#queries.size === 0) return;
592
- const dirtyQueries = /* @__PURE__ */ new Set();
593
- if (addEntries.length > 0) {
594
- for (const [key, value] of addEntries) for (const query of this.#queries) if (query.where(value)) {
595
- const selected = this.#selectValue(query, value);
596
- query.results.set(key, selected);
597
- dirtyQueries.add(query);
598
- }
599
- }
600
- if (updateEntries.length > 0) for (const [key, value] of updateEntries) for (const query of this.#queries) {
601
- const matches = query.where(value);
602
- const inResults = query.results.has(key);
603
- if (matches && !inResults) {
604
- const selected = this.#selectValue(query, value);
605
- query.results.set(key, selected);
606
- dirtyQueries.add(query);
607
- } else if (!matches && inResults) {
608
- query.results.delete(key);
609
- dirtyQueries.add(query);
610
- } else if (matches && inResults) {
611
- const selected = this.#selectValue(query, value);
612
- query.results.set(key, selected);
613
- dirtyQueries.add(query);
412
+ });
413
+ const plugins = [];
414
+ const db = {
415
+ ...publicCollections,
416
+ name,
417
+ version,
418
+ begin(callback) {
419
+ return executeTransaction(schema, collections, getEventstamp, callback);
420
+ },
421
+ query(callback) {
422
+ return executeQuery(db, callback);
423
+ },
424
+ toDocuments() {
425
+ const documents = {};
426
+ for (const dbName of Object.keys(collections)) documents[dbName] = collections[dbName].toDocument();
427
+ return documents;
428
+ },
429
+ on(event, handler) {
430
+ return dbEmitter.on(event, handler);
431
+ },
432
+ use(plugin) {
433
+ plugins.push(plugin);
434
+ return db;
435
+ },
436
+ async init() {
437
+ for (const plugin of plugins) if (plugin.handlers.init) await plugin.handlers.init(db);
438
+ return db;
439
+ },
440
+ async dispose() {
441
+ for (let i = plugins.length - 1; i >= 0; i--) {
442
+ const plugin = plugins[i];
443
+ if (plugin?.handlers.dispose) await plugin.handlers.dispose(db);
614
444
  }
445
+ },
446
+ collectionKeys() {
447
+ return Object.keys(collections);
615
448
  }
616
- if (deleteKeys.length > 0) {
617
- for (const key of deleteKeys) for (const query of this.#queries) if (query.results.delete(key)) dirtyQueries.add(query);
618
- }
619
- if (dirtyQueries.size > 0) this.#runQueryCallbacks(dirtyQueries);
620
- }
621
- #runQueryCallbacks(dirtyQueries) {
622
- for (const query of dirtyQueries) for (const callback of query.callbacks) callback();
623
- }
624
- #hydrateQuery(query) {
625
- query.results.clear();
626
- for (const [key, value] of this.entries()) if (query.where(value)) {
627
- const selected = this.#selectValue(query, value);
628
- query.results.set(key, selected);
629
- }
630
- }
631
- #selectValue(query, value) {
632
- return query.select ? query.select(value) : value;
449
+ };
450
+ return db;
451
+ }
452
+ function makeCollections(configs, getEventstamp) {
453
+ const collections = {};
454
+ for (const name of Object.keys(configs)) {
455
+ const config = configs[name];
456
+ collections[name] = createCollection(name, config.schema, config.getId, getEventstamp);
633
457
  }
634
- };
458
+ return collections;
459
+ }
635
460
 
636
461
  //#endregion
637
- export { Store, mergeCollections, processDocument };
462
+ export { CollectionInternals, DuplicateIdError, IdNotFoundError, createDatabase };