@byearlybird/starling 0.9.3 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core-DI0FfUjX.js +423 -0
- package/dist/core.d.ts +2 -0
- package/dist/core.js +3 -0
- package/dist/db-qQgPYE41.d.ts +199 -0
- package/dist/index-D7bXWDg6.d.ts +270 -0
- package/dist/index.d.ts +3 -2
- package/dist/index.js +405 -580
- package/dist/plugin-http.d.ts +139 -0
- package/dist/plugin-http.js +191 -0
- package/dist/plugin-idb.d.ts +59 -0
- package/dist/plugin-idb.js +169 -0
- package/package.json +21 -13
- package/dist/plugins/unstorage/plugin.d.ts +0 -54
- package/dist/plugins/unstorage/plugin.js +0 -104
- package/dist/store-bS1Nb57l.d.ts +0 -365
|
@@ -0,0 +1,423 @@
|
|
|
1
|
+
//#region src/core/clock/errors.ts
|
|
2
|
+
var InvalidEventstampError = class extends Error {
|
|
3
|
+
constructor(eventstamp) {
|
|
4
|
+
super(`Invalid eventstamp: "${eventstamp}"`);
|
|
5
|
+
this.name = "InvalidEventstampError";
|
|
6
|
+
}
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
//#endregion
|
|
10
|
+
//#region src/core/clock/eventstamp.ts
|
|
11
|
+
function generateNonce() {
|
|
12
|
+
return Math.random().toString(16).slice(2, 6).padStart(4, "0");
|
|
13
|
+
}
|
|
14
|
+
function encodeEventstamp(timestampMs, counter, nonce) {
|
|
15
|
+
return `${new Date(timestampMs).toISOString()}|${counter.toString(16).padStart(4, "0")}|${nonce}`;
|
|
16
|
+
}
|
|
17
|
+
const EVENTSTAMP_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z\|[0-9a-f]{4,}\|[0-9a-f]{4}$/;
|
|
18
|
+
/**
|
|
19
|
+
* Validates whether a string is a properly formatted eventstamp.
|
|
20
|
+
* Expected format: YYYY-MM-DDTHH:mm:ss.SSSZ|HHHH+|HHHH
|
|
21
|
+
* where HHHH+ represents 4 or more hex characters for the counter,
|
|
22
|
+
* and HHHH represents exactly 4 hex characters for the nonce.
|
|
23
|
+
*/
|
|
24
|
+
function isValidEventstamp(stamp) {
|
|
25
|
+
return EVENTSTAMP_REGEX.test(stamp);
|
|
26
|
+
}
|
|
27
|
+
function decodeEventstamp(eventstamp) {
|
|
28
|
+
if (!isValidEventstamp(eventstamp)) throw new InvalidEventstampError(eventstamp);
|
|
29
|
+
const parts = eventstamp.split("|");
|
|
30
|
+
const isoString = parts[0];
|
|
31
|
+
const hexCounter = parts[1];
|
|
32
|
+
const nonce = parts[2];
|
|
33
|
+
return {
|
|
34
|
+
timestampMs: new Date(isoString).getTime(),
|
|
35
|
+
counter: parseInt(hexCounter, 16),
|
|
36
|
+
nonce
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
const MIN_EVENTSTAMP = encodeEventstamp(0, 0, "0000");
|
|
40
|
+
/**
|
|
41
|
+
* Find the maximum eventstamp from an array of eventstamps.
|
|
42
|
+
* Returns MIN_EVENTSTAMP if the array is empty.
|
|
43
|
+
* @param eventstamps - Array of eventstamp strings
|
|
44
|
+
* @returns The maximum eventstamp
|
|
45
|
+
*/
|
|
46
|
+
function maxEventstamp(eventstamps) {
|
|
47
|
+
if (eventstamps.length === 0) return MIN_EVENTSTAMP;
|
|
48
|
+
return eventstamps.reduce((max, stamp) => stamp > max ? stamp : max);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
//#endregion
|
|
52
|
+
//#region src/core/clock/clock.ts
|
|
53
|
+
/**
|
|
54
|
+
* Create a new Clock instance.
|
|
55
|
+
* @param initialState - Optional initial state for the clock
|
|
56
|
+
*/
|
|
57
|
+
function createClock(initialState) {
|
|
58
|
+
let counter = initialState?.counter ?? 0;
|
|
59
|
+
let lastMs = initialState?.lastMs ?? Date.now();
|
|
60
|
+
let lastNonce = initialState?.lastNonce ?? generateNonce();
|
|
61
|
+
const now = () => {
|
|
62
|
+
const wallMs = Date.now();
|
|
63
|
+
if (wallMs > lastMs) {
|
|
64
|
+
lastMs = wallMs;
|
|
65
|
+
counter = 0;
|
|
66
|
+
lastNonce = generateNonce();
|
|
67
|
+
} else {
|
|
68
|
+
counter++;
|
|
69
|
+
lastNonce = generateNonce();
|
|
70
|
+
}
|
|
71
|
+
return encodeEventstamp(lastMs, counter, lastNonce);
|
|
72
|
+
};
|
|
73
|
+
const latest = () => encodeEventstamp(lastMs, counter, lastNonce);
|
|
74
|
+
const forward = (eventstamp) => {
|
|
75
|
+
if (!isValidEventstamp(eventstamp)) throw new InvalidEventstampError(eventstamp);
|
|
76
|
+
if (eventstamp > latest()) {
|
|
77
|
+
const newer = decodeEventstamp(eventstamp);
|
|
78
|
+
lastMs = newer.timestampMs;
|
|
79
|
+
counter = newer.counter;
|
|
80
|
+
lastNonce = newer.nonce;
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
return {
|
|
84
|
+
now,
|
|
85
|
+
latest,
|
|
86
|
+
forward
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Create a Clock from an eventstamp string.
|
|
91
|
+
* @param eventstamp - Eventstamp string to decode and initialize clock from
|
|
92
|
+
* @throws Error if eventstamp is invalid
|
|
93
|
+
*/
|
|
94
|
+
function createClockFromEventstamp(eventstamp) {
|
|
95
|
+
if (!isValidEventstamp(eventstamp)) throw new Error(`Invalid eventstamp: "${eventstamp}"`);
|
|
96
|
+
const decoded = decodeEventstamp(eventstamp);
|
|
97
|
+
return createClock({
|
|
98
|
+
counter: decoded.counter,
|
|
99
|
+
lastMs: decoded.timestampMs,
|
|
100
|
+
lastNonce: decoded.nonce
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
//#endregion
|
|
105
|
+
//#region src/core/document/resource.ts
|
|
106
|
+
function isObject(value) {
|
|
107
|
+
return value != null && typeof value === "object" && !Array.isArray(value) && Object.getPrototypeOf(value) === Object.prototype;
|
|
108
|
+
}
|
|
109
|
+
/**
|
|
110
|
+
* Get a value from a nested object using a dot-separated path.
|
|
111
|
+
* @internal
|
|
112
|
+
*/
|
|
113
|
+
function getValueAtPath(obj, path) {
|
|
114
|
+
const parts = path.split(".");
|
|
115
|
+
let current = obj;
|
|
116
|
+
for (const part of parts) {
|
|
117
|
+
if (current == null) return void 0;
|
|
118
|
+
current = current[part];
|
|
119
|
+
}
|
|
120
|
+
return current;
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Set a value in a nested object using a dot-separated path.
|
|
124
|
+
* Creates intermediate objects as needed.
|
|
125
|
+
* @internal
|
|
126
|
+
*/
|
|
127
|
+
function setValueAtPath(obj, path, value) {
|
|
128
|
+
const parts = path.split(".");
|
|
129
|
+
let current = obj;
|
|
130
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
131
|
+
if (!current[parts[i]] || typeof current[parts[i]] !== "object") current[parts[i]] = {};
|
|
132
|
+
current = current[parts[i]];
|
|
133
|
+
}
|
|
134
|
+
current[parts[parts.length - 1]] = value;
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Compute the latest eventstamp for a resource from its field eventstamps and deletedAt.
|
|
138
|
+
* Used internally and exported for testing/validation.
|
|
139
|
+
* @internal
|
|
140
|
+
*/
|
|
141
|
+
function computeResourceLatest(eventstamps, deletedAt, fallback) {
|
|
142
|
+
let max = fallback ?? MIN_EVENTSTAMP;
|
|
143
|
+
for (const stamp of Object.values(eventstamps)) if (stamp > max) max = stamp;
|
|
144
|
+
if (deletedAt && deletedAt > max) return deletedAt;
|
|
145
|
+
return max;
|
|
146
|
+
}
|
|
147
|
+
function makeResource(type, id, obj, eventstamp, deletedAt = null) {
|
|
148
|
+
const eventstamps = {};
|
|
149
|
+
const traverse = (input, path = "") => {
|
|
150
|
+
for (const key in input) {
|
|
151
|
+
if (!Object.hasOwn(input, key)) continue;
|
|
152
|
+
const value = input[key];
|
|
153
|
+
const fieldPath = path ? `${path}.${key}` : key;
|
|
154
|
+
if (isObject(value)) traverse(value, fieldPath);
|
|
155
|
+
else eventstamps[fieldPath] = eventstamp;
|
|
156
|
+
}
|
|
157
|
+
};
|
|
158
|
+
traverse(obj);
|
|
159
|
+
return {
|
|
160
|
+
type,
|
|
161
|
+
id,
|
|
162
|
+
attributes: obj,
|
|
163
|
+
meta: {
|
|
164
|
+
eventstamps,
|
|
165
|
+
latest: computeResourceLatest(eventstamps, deletedAt, eventstamp),
|
|
166
|
+
deletedAt
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
function mergeResources(into, from) {
|
|
171
|
+
const resultAttributes = {};
|
|
172
|
+
const resultEventstamps = {};
|
|
173
|
+
const allPaths = new Set([...Object.keys(into.meta.eventstamps), ...Object.keys(from.meta.eventstamps)]);
|
|
174
|
+
for (const path of allPaths) {
|
|
175
|
+
const stamp1 = into.meta.eventstamps[path];
|
|
176
|
+
const stamp2 = from.meta.eventstamps[path];
|
|
177
|
+
if (stamp1 && stamp2) if (stamp1 > stamp2) {
|
|
178
|
+
setValueAtPath(resultAttributes, path, getValueAtPath(into.attributes, path));
|
|
179
|
+
resultEventstamps[path] = stamp1;
|
|
180
|
+
} else {
|
|
181
|
+
setValueAtPath(resultAttributes, path, getValueAtPath(from.attributes, path));
|
|
182
|
+
resultEventstamps[path] = stamp2;
|
|
183
|
+
}
|
|
184
|
+
else if (stamp1) {
|
|
185
|
+
setValueAtPath(resultAttributes, path, getValueAtPath(into.attributes, path));
|
|
186
|
+
resultEventstamps[path] = stamp1;
|
|
187
|
+
} else {
|
|
188
|
+
setValueAtPath(resultAttributes, path, getValueAtPath(from.attributes, path));
|
|
189
|
+
resultEventstamps[path] = stamp2;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
const dataLatest = computeResourceLatest(resultEventstamps, null, into.meta.latest > from.meta.latest ? into.meta.latest : from.meta.latest);
|
|
193
|
+
const mergedDeletedAt = into.meta.deletedAt && from.meta.deletedAt ? into.meta.deletedAt > from.meta.deletedAt ? into.meta.deletedAt : from.meta.deletedAt : into.meta.deletedAt || from.meta.deletedAt || null;
|
|
194
|
+
const finalLatest = mergedDeletedAt && mergedDeletedAt > dataLatest ? mergedDeletedAt : dataLatest;
|
|
195
|
+
return {
|
|
196
|
+
type: into.type,
|
|
197
|
+
id: into.id,
|
|
198
|
+
attributes: resultAttributes,
|
|
199
|
+
meta: {
|
|
200
|
+
eventstamps: resultEventstamps,
|
|
201
|
+
latest: finalLatest,
|
|
202
|
+
deletedAt: mergedDeletedAt
|
|
203
|
+
}
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
function deleteResource(resource, eventstamp) {
|
|
207
|
+
const dataLatest = resource.meta.deletedAt ? computeResourceLatest(resource.meta.eventstamps, null) : resource.meta.latest;
|
|
208
|
+
const latest = eventstamp > dataLatest ? eventstamp : dataLatest;
|
|
209
|
+
return {
|
|
210
|
+
type: resource.type,
|
|
211
|
+
id: resource.id,
|
|
212
|
+
attributes: resource.attributes,
|
|
213
|
+
meta: {
|
|
214
|
+
eventstamps: resource.meta.eventstamps,
|
|
215
|
+
latest,
|
|
216
|
+
deletedAt: eventstamp
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
//#endregion
|
|
222
|
+
//#region src/core/document/document.ts
|
|
223
|
+
/**
|
|
224
|
+
* Merges two JSON:API documents using field-level Last-Write-Wins semantics.
|
|
225
|
+
*
|
|
226
|
+
* The merge operation:
|
|
227
|
+
* 1. Forwards the clock to the newest eventstamp from either document
|
|
228
|
+
* 2. Merges each resource pair using field-level LWW (via mergeResources)
|
|
229
|
+
* 3. Tracks what changed for hook notifications (added/updated/deleted)
|
|
230
|
+
*
|
|
231
|
+
* Deletion is final: once a resource is deleted, updates to it are merged into
|
|
232
|
+
* the resource's attributes but don't restore visibility. Only new resources or
|
|
233
|
+
* transitions into the deleted state are tracked.
|
|
234
|
+
*
|
|
235
|
+
* @param into - The base document to merge into
|
|
236
|
+
* @param from - The source document to merge from
|
|
237
|
+
* @returns Merged document and categorized changes
|
|
238
|
+
*
|
|
239
|
+
* @example
|
|
240
|
+
* ```typescript
|
|
241
|
+
* const into = {
|
|
242
|
+
* jsonapi: { version: "1.1" },
|
|
243
|
+
* meta: { latest: "2025-01-01T00:00:00.000Z|0001|a1b2" },
|
|
244
|
+
* data: [{ type: "items", id: "doc1", attributes: {...}, meta: { deletedAt: null, latest: "..." } }]
|
|
245
|
+
* };
|
|
246
|
+
*
|
|
247
|
+
* const from = {
|
|
248
|
+
* jsonapi: { version: "1.1" },
|
|
249
|
+
* meta: { latest: "2025-01-01T00:05:00.000Z|0001|c3d4" },
|
|
250
|
+
* data: [
|
|
251
|
+
* { type: "items", id: "doc1", attributes: {...}, meta: { deletedAt: null, latest: "..." } }, // updated
|
|
252
|
+
* { type: "items", id: "doc2", attributes: {...}, meta: { deletedAt: null, latest: "..." } } // new
|
|
253
|
+
* ]
|
|
254
|
+
* };
|
|
255
|
+
*
|
|
256
|
+
* const result = mergeDocuments(into, from);
|
|
257
|
+
* // result.document.meta.latest === "2025-01-01T00:05:00.000Z|0001|c3d4"
|
|
258
|
+
* // result.changes.added has "doc2"
|
|
259
|
+
* // result.changes.updated has "doc1"
|
|
260
|
+
* ```
|
|
261
|
+
*/
|
|
262
|
+
function mergeDocuments(into, from) {
|
|
263
|
+
const intoDocsById = /* @__PURE__ */ new Map();
|
|
264
|
+
for (const doc of into.data) intoDocsById.set(doc.id, doc);
|
|
265
|
+
const added = /* @__PURE__ */ new Map();
|
|
266
|
+
const updated = /* @__PURE__ */ new Map();
|
|
267
|
+
const deleted = /* @__PURE__ */ new Set();
|
|
268
|
+
const mergedDocsById = new Map(intoDocsById);
|
|
269
|
+
let newestEventstamp = into.meta.latest >= from.meta.latest ? into.meta.latest : from.meta.latest;
|
|
270
|
+
for (const fromDoc of from.data) {
|
|
271
|
+
const id = fromDoc.id;
|
|
272
|
+
const intoDoc = intoDocsById.get(id);
|
|
273
|
+
if (!intoDoc) {
|
|
274
|
+
mergedDocsById.set(id, fromDoc);
|
|
275
|
+
if (!fromDoc.meta.deletedAt) added.set(id, fromDoc);
|
|
276
|
+
if (fromDoc.meta.latest > newestEventstamp) newestEventstamp = fromDoc.meta.latest;
|
|
277
|
+
} else {
|
|
278
|
+
if (intoDoc === fromDoc) continue;
|
|
279
|
+
const mergedDoc = mergeResources(intoDoc, fromDoc);
|
|
280
|
+
mergedDocsById.set(id, mergedDoc);
|
|
281
|
+
if (mergedDoc.meta.latest > newestEventstamp) newestEventstamp = mergedDoc.meta.latest;
|
|
282
|
+
const wasDeleted = intoDoc.meta.deletedAt !== null;
|
|
283
|
+
const isDeleted = mergedDoc.meta.deletedAt !== null;
|
|
284
|
+
if (!wasDeleted && isDeleted) deleted.add(id);
|
|
285
|
+
else if (!isDeleted) {
|
|
286
|
+
if (intoDoc.meta.latest !== mergedDoc.meta.latest) updated.set(id, mergedDoc);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
return {
|
|
291
|
+
document: {
|
|
292
|
+
jsonapi: { version: "1.1" },
|
|
293
|
+
meta: { latest: newestEventstamp },
|
|
294
|
+
data: Array.from(mergedDocsById.values())
|
|
295
|
+
},
|
|
296
|
+
changes: {
|
|
297
|
+
added,
|
|
298
|
+
updated,
|
|
299
|
+
deleted
|
|
300
|
+
}
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
/**
|
|
304
|
+
* Creates an empty JSON:API document with the given eventstamp.
|
|
305
|
+
* Useful for initializing new stores or testing.
|
|
306
|
+
*
|
|
307
|
+
* @param eventstamp - Initial clock value for this document
|
|
308
|
+
* @returns Empty document
|
|
309
|
+
*
|
|
310
|
+
* @example
|
|
311
|
+
* ```typescript
|
|
312
|
+
* const empty = makeDocument("2025-01-01T00:00:00.000Z|0000|0000");
|
|
313
|
+
* ```
|
|
314
|
+
*/
|
|
315
|
+
function makeDocument(eventstamp) {
|
|
316
|
+
return {
|
|
317
|
+
jsonapi: { version: "1.1" },
|
|
318
|
+
meta: { latest: eventstamp },
|
|
319
|
+
data: []
|
|
320
|
+
};
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
//#endregion
|
|
324
|
+
//#region src/core/document/utils.ts
|
|
325
|
+
/**
|
|
326
|
+
* Convert a JsonDocument's data array into a Map keyed by resource ID.
|
|
327
|
+
* @param document - JsonDocument containing resource data
|
|
328
|
+
* @returns Map of resource ID to ResourceObject
|
|
329
|
+
*/
|
|
330
|
+
function documentToMap(document) {
|
|
331
|
+
return new Map(document.data.map((doc) => [doc.id, doc]));
|
|
332
|
+
}
|
|
333
|
+
/**
|
|
334
|
+
* Convert a Map of resources into a JsonDocument.
|
|
335
|
+
* @param resources - Map of resource ID to ResourceObject
|
|
336
|
+
* @param fallbackEventstamp - Eventstamp to include when computing the max (optional)
|
|
337
|
+
* @returns JsonDocument representation of the resources
|
|
338
|
+
*/
|
|
339
|
+
function mapToDocument(resources, fallbackEventstamp) {
|
|
340
|
+
const resourceArray = Array.from(resources.values());
|
|
341
|
+
const eventstamps = resourceArray.map((r) => r.meta.latest);
|
|
342
|
+
if (fallbackEventstamp) eventstamps.push(fallbackEventstamp);
|
|
343
|
+
return {
|
|
344
|
+
jsonapi: { version: "1.1" },
|
|
345
|
+
meta: { latest: maxEventstamp(eventstamps) },
|
|
346
|
+
data: resourceArray
|
|
347
|
+
};
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
//#endregion
|
|
351
|
+
//#region src/core/resource-map/resource-map.ts
|
|
352
|
+
/**
|
|
353
|
+
* A ResourceMap container for storing and managing ResourceObjects.
|
|
354
|
+
*
|
|
355
|
+
* This factory function creates a ResourceMap with state-based replication
|
|
356
|
+
* and automatic convergence via Last-Write-Wins conflict resolution.
|
|
357
|
+
* It stores complete resource snapshots with encoded metadata, including deletion markers.
|
|
358
|
+
*
|
|
359
|
+
* ResourceMap does NOT filter based on deletion status—it stores and returns
|
|
360
|
+
* all ResourceObjects including deleted ones. The Store class is responsible
|
|
361
|
+
* for filtering what's visible to users.
|
|
362
|
+
*
|
|
363
|
+
* @example
|
|
364
|
+
* ```typescript
|
|
365
|
+
* const resourceMap = createMap("todos");
|
|
366
|
+
* resourceMap.set("id1", { name: "Alice" });
|
|
367
|
+
* const resource = resourceMap.get("id1"); // ResourceObject with metadata
|
|
368
|
+
* ```
|
|
369
|
+
*/
|
|
370
|
+
function createMap(resourceType, initialMap = /* @__PURE__ */ new Map(), eventstamp) {
|
|
371
|
+
let internalMap = initialMap;
|
|
372
|
+
const clock = createClock();
|
|
373
|
+
if (eventstamp) clock.forward(eventstamp);
|
|
374
|
+
return {
|
|
375
|
+
has(id) {
|
|
376
|
+
return internalMap.has(id);
|
|
377
|
+
},
|
|
378
|
+
get(id) {
|
|
379
|
+
return internalMap.get(id);
|
|
380
|
+
},
|
|
381
|
+
entries() {
|
|
382
|
+
return internalMap.entries();
|
|
383
|
+
},
|
|
384
|
+
set(id, object) {
|
|
385
|
+
const encoded = makeResource(resourceType, id, object, clock.now());
|
|
386
|
+
const current = internalMap.get(id);
|
|
387
|
+
if (current) {
|
|
388
|
+
const merged = mergeResources(current, encoded);
|
|
389
|
+
internalMap.set(id, merged);
|
|
390
|
+
} else internalMap.set(id, encoded);
|
|
391
|
+
},
|
|
392
|
+
delete(id) {
|
|
393
|
+
const current = internalMap.get(id);
|
|
394
|
+
if (current) {
|
|
395
|
+
const doc = deleteResource(current, clock.now());
|
|
396
|
+
internalMap.set(id, doc);
|
|
397
|
+
}
|
|
398
|
+
},
|
|
399
|
+
cloneMap() {
|
|
400
|
+
return new Map(internalMap);
|
|
401
|
+
},
|
|
402
|
+
toDocument() {
|
|
403
|
+
return mapToDocument(internalMap, clock.latest());
|
|
404
|
+
},
|
|
405
|
+
merge(document) {
|
|
406
|
+
const result = mergeDocuments(mapToDocument(internalMap, clock.latest()), document);
|
|
407
|
+
clock.forward(result.document.meta.latest);
|
|
408
|
+
internalMap = documentToMap(result.document);
|
|
409
|
+
return result;
|
|
410
|
+
}
|
|
411
|
+
};
|
|
412
|
+
}
|
|
413
|
+
/**
|
|
414
|
+
* Create a ResourceMap from a JsonDocument snapshot.
|
|
415
|
+
* @param type - Resource type identifier (defaults to "default")
|
|
416
|
+
* @param document - JsonDocument containing resource data
|
|
417
|
+
*/
|
|
418
|
+
function createMapFromDocument(type, document) {
|
|
419
|
+
return createMap(document.data[0]?.type ?? type, documentToMap(document), document.meta.latest);
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
//#endregion
|
|
423
|
+
export { makeDocument as a, makeResource as c, createClockFromEventstamp as d, MIN_EVENTSTAMP as f, InvalidEventstampError as h, mapToDocument as i, mergeResources as l, maxEventstamp as m, createMapFromDocument as n, mergeDocuments as o, isValidEventstamp as p, documentToMap as r, deleteResource as s, createMap as t, createClock as u };
|
package/dist/core.d.ts
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import { _ as maxEventstamp, a as AnyObject, b as createClock, c as MergeDocumentsResult, d as ResourceObject, f as deleteResource, g as isValidEventstamp, h as MIN_EVENTSTAMP, i as mapToDocument, l as makeDocument, m as mergeResources, n as createMapFromDocument, o as DocumentChanges, p as makeResource, r as documentToMap, s as JsonDocument, t as createMap, u as mergeDocuments, v as InvalidEventstampError, x as createClockFromEventstamp, y as Clock } from "./index-D7bXWDg6.js";
|
|
2
|
+
export { AnyObject, Clock, DocumentChanges, InvalidEventstampError, JsonDocument, MIN_EVENTSTAMP, MergeDocumentsResult, ResourceObject, createClock, createClockFromEventstamp, createMap, createMapFromDocument, deleteResource, documentToMap, isValidEventstamp, makeDocument, makeResource, mapToDocument, maxEventstamp, mergeDocuments, mergeResources };
|
package/dist/core.js
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import { a as makeDocument, c as makeResource, d as createClockFromEventstamp, f as MIN_EVENTSTAMP, h as InvalidEventstampError, i as mapToDocument, l as mergeResources, m as maxEventstamp, n as createMapFromDocument, o as mergeDocuments, p as isValidEventstamp, r as documentToMap, s as deleteResource, t as createMap, u as createClock } from "./core-DI0FfUjX.js";
|
|
2
|
+
|
|
3
|
+
export { InvalidEventstampError, MIN_EVENTSTAMP, createClock, createClockFromEventstamp, createMap, createMapFromDocument, deleteResource, documentToMap, isValidEventstamp, makeDocument, makeResource, mapToDocument, maxEventstamp, mergeDocuments, mergeResources };
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
import { a as AnyObject, s as JsonDocument } from "./index-D7bXWDg6.js";
|
|
2
|
+
|
|
3
|
+
//#region src/database/standard-schema.d.ts
|
|
4
|
+
/** The Standard Schema interface. */
|
|
5
|
+
interface StandardSchemaV1<Input = unknown, Output = Input> {
|
|
6
|
+
/** The Standard Schema properties. */
|
|
7
|
+
readonly "~standard": StandardSchemaV1.Props<Input, Output>;
|
|
8
|
+
}
|
|
9
|
+
declare namespace StandardSchemaV1 {
|
|
10
|
+
/** The Standard Schema properties interface. */
|
|
11
|
+
interface Props<Input = unknown, Output = Input> {
|
|
12
|
+
/** The version number of the standard. */
|
|
13
|
+
readonly version: 1;
|
|
14
|
+
/** The vendor name of the schema library. */
|
|
15
|
+
readonly vendor: string;
|
|
16
|
+
/** Validates unknown input values. */
|
|
17
|
+
readonly validate: (value: unknown) => Result<Output> | Promise<Result<Output>>;
|
|
18
|
+
/** Inferred types associated with the schema. */
|
|
19
|
+
readonly types?: Types<Input, Output> | undefined;
|
|
20
|
+
}
|
|
21
|
+
/** The result interface of the validate function. */
|
|
22
|
+
type Result<Output> = SuccessResult<Output> | FailureResult;
|
|
23
|
+
/** The result interface if validation succeeds. */
|
|
24
|
+
interface SuccessResult<Output> {
|
|
25
|
+
/** The typed output value. */
|
|
26
|
+
readonly value: Output;
|
|
27
|
+
/** The non-existent issues. */
|
|
28
|
+
readonly issues?: undefined;
|
|
29
|
+
}
|
|
30
|
+
/** The result interface if validation fails. */
|
|
31
|
+
interface FailureResult {
|
|
32
|
+
/** The issues of failed validation. */
|
|
33
|
+
readonly issues: ReadonlyArray<Issue>;
|
|
34
|
+
}
|
|
35
|
+
/** The issue interface of the failure output. */
|
|
36
|
+
interface Issue {
|
|
37
|
+
/** The error message of the issue. */
|
|
38
|
+
readonly message: string;
|
|
39
|
+
/** The path of the issue, if any. */
|
|
40
|
+
readonly path?: ReadonlyArray<PropertyKey | PathSegment> | undefined;
|
|
41
|
+
}
|
|
42
|
+
/** The path segment interface of the issue. */
|
|
43
|
+
interface PathSegment {
|
|
44
|
+
/** The key representing a path segment. */
|
|
45
|
+
readonly key: PropertyKey;
|
|
46
|
+
}
|
|
47
|
+
/** The Standard Schema types interface. */
|
|
48
|
+
interface Types<Input = unknown, Output = Input> {
|
|
49
|
+
/** The input type of the schema. */
|
|
50
|
+
readonly input: Input;
|
|
51
|
+
/** The output type of the schema. */
|
|
52
|
+
readonly output: Output;
|
|
53
|
+
}
|
|
54
|
+
/** Infers the input type of a Standard Schema. */
|
|
55
|
+
type InferInput<Schema extends StandardSchemaV1> = NonNullable<Schema["~standard"]["types"]>["input"];
|
|
56
|
+
/** Infers the output type of a Standard Schema. */
|
|
57
|
+
type InferOutput<Schema extends StandardSchemaV1> = NonNullable<Schema["~standard"]["types"]>["output"];
|
|
58
|
+
}
|
|
59
|
+
//#endregion
|
|
60
|
+
//#region src/database/types.d.ts
|
|
61
|
+
type AnyObjectSchema<T extends AnyObject = AnyObject> = StandardSchemaV1<T>;
|
|
62
|
+
type SchemasMap = Record<string, AnyObjectSchema>;
|
|
63
|
+
//#endregion
|
|
64
|
+
//#region src/database/collection.d.ts
|
|
65
|
+
/**
|
|
66
|
+
* Symbols for internal collection methods used by transactions.
|
|
67
|
+
* These are not part of the public Collection type.
|
|
68
|
+
*/
|
|
69
|
+
declare const CollectionInternals: {
|
|
70
|
+
readonly getPendingMutations: symbol;
|
|
71
|
+
readonly emitMutations: symbol;
|
|
72
|
+
readonly replaceData: symbol;
|
|
73
|
+
readonly data: symbol;
|
|
74
|
+
};
|
|
75
|
+
/** Shorthand for extracting the data type from a schema */
|
|
76
|
+
type InferData<T extends AnyObjectSchema> = StandardSchemaV1.InferOutput<T>;
|
|
77
|
+
type MutationBatch<T> = {
|
|
78
|
+
added: Array<{
|
|
79
|
+
id: string;
|
|
80
|
+
item: T;
|
|
81
|
+
}>;
|
|
82
|
+
updated: Array<{
|
|
83
|
+
id: string;
|
|
84
|
+
before: T;
|
|
85
|
+
after: T;
|
|
86
|
+
}>;
|
|
87
|
+
removed: Array<{
|
|
88
|
+
id: string;
|
|
89
|
+
item: T;
|
|
90
|
+
}>;
|
|
91
|
+
};
|
|
92
|
+
type CollectionMutationEvent<T> = MutationBatch<T>;
|
|
93
|
+
type Collection<T extends AnyObjectSchema> = {
|
|
94
|
+
get(id: string, opts?: {
|
|
95
|
+
includeDeleted?: boolean;
|
|
96
|
+
}): InferData<T> | null;
|
|
97
|
+
getAll(opts?: {
|
|
98
|
+
includeDeleted?: boolean;
|
|
99
|
+
}): InferData<T>[];
|
|
100
|
+
find<U = InferData<T>>(filter: (item: InferData<T>) => boolean, opts?: {
|
|
101
|
+
map?: (item: InferData<T>) => U;
|
|
102
|
+
sort?: (a: U, b: U) => number;
|
|
103
|
+
}): U[];
|
|
104
|
+
add(item: StandardSchemaV1.InferInput<T>): InferData<T>;
|
|
105
|
+
update(id: string, updates: Partial<StandardSchemaV1.InferInput<T>>): void;
|
|
106
|
+
remove(id: string): void;
|
|
107
|
+
merge(document: JsonDocument<InferData<T>>): void;
|
|
108
|
+
toDocument(): JsonDocument<InferData<T>>;
|
|
109
|
+
on(event: "mutation", handler: (payload: CollectionMutationEvent<InferData<T>>) => void): () => void;
|
|
110
|
+
};
|
|
111
|
+
declare class IdNotFoundError extends Error {
|
|
112
|
+
constructor(id: string);
|
|
113
|
+
}
|
|
114
|
+
declare class DuplicateIdError extends Error {
|
|
115
|
+
constructor(id: string);
|
|
116
|
+
}
|
|
117
|
+
//#endregion
|
|
118
|
+
//#region src/database/query.d.ts
|
|
119
|
+
/** Read-only collection handle for queries */
|
|
120
|
+
type QueryCollectionHandle<T extends AnyObjectSchema> = Pick<Collection<T>, "get" | "getAll" | "find">;
|
|
121
|
+
/** Query context with read-only collection handles */
|
|
122
|
+
type QueryContext<Schemas extends SchemasMap> = { [K in keyof Schemas]: QueryCollectionHandle<Schemas[K]> };
|
|
123
|
+
/** Handle returned by db.query() for managing the reactive query */
|
|
124
|
+
type QueryHandle<R> = {
|
|
125
|
+
/** Current query result */
|
|
126
|
+
readonly result: R;
|
|
127
|
+
/** Subscribe to result changes. Returns unsubscribe function. */
|
|
128
|
+
subscribe(callback: (result: R) => void): () => void;
|
|
129
|
+
/** Stop tracking and clean up subscriptions */
|
|
130
|
+
dispose(): void;
|
|
131
|
+
};
|
|
132
|
+
//#endregion
|
|
133
|
+
//#region src/database/transaction.d.ts
|
|
134
|
+
/** Transaction-safe collection handle that excludes event subscription and serialization */
|
|
135
|
+
type TransactionCollectionHandle<T extends AnyObjectSchema> = Omit<Collection<T>, "on" | "toDocument">;
|
|
136
|
+
type TransactionCollectionHandles<Schemas extends SchemasMap> = { [K in keyof Schemas]: TransactionCollectionHandle<Schemas[K]> };
|
|
137
|
+
type TransactionContext<Schemas extends SchemasMap> = TransactionCollectionHandles<Schemas> & {
|
|
138
|
+
rollback(): void;
|
|
139
|
+
};
|
|
140
|
+
//#endregion
|
|
141
|
+
//#region src/database/db.d.ts
|
|
142
|
+
type Collections<Schemas extends SchemasMap> = { [K in keyof Schemas]: Collection<Schemas[K]> };
|
|
143
|
+
type CollectionConfigMap<Schemas extends SchemasMap> = { [K in keyof Schemas]: CollectionConfig<Schemas[K]> };
|
|
144
|
+
type MutationEnvelope<Schemas extends SchemasMap> = { [K in keyof Schemas]: {
|
|
145
|
+
collection: K;
|
|
146
|
+
} & MutationBatch<StandardSchemaV1.InferOutput<Schemas[K]>> }[keyof Schemas];
|
|
147
|
+
type DatabaseMutationEvent<Schemas extends SchemasMap> = MutationEnvelope<Schemas>;
|
|
148
|
+
type CollectionConfig<T extends AnyObjectSchema> = {
|
|
149
|
+
schema: T;
|
|
150
|
+
getId: (item: StandardSchemaV1.InferOutput<T>) => string;
|
|
151
|
+
};
|
|
152
|
+
type DatabasePlugin<Schemas extends SchemasMap> = {
|
|
153
|
+
handlers: {
|
|
154
|
+
init?: (db: Database<Schemas>) => Promise<unknown> | unknown;
|
|
155
|
+
dispose?: (db: Database<Schemas>) => Promise<unknown> | unknown;
|
|
156
|
+
};
|
|
157
|
+
};
|
|
158
|
+
type DbConfig<Schemas extends SchemasMap> = {
|
|
159
|
+
name: string;
|
|
160
|
+
schema: CollectionConfigMap<Schemas>;
|
|
161
|
+
version?: number;
|
|
162
|
+
};
|
|
163
|
+
type Database<Schemas extends SchemasMap> = Collections<Schemas> & {
|
|
164
|
+
name: string;
|
|
165
|
+
version: number;
|
|
166
|
+
begin<R>(callback: (tx: TransactionContext<Schemas>) => R): R;
|
|
167
|
+
query<R>(callback: (ctx: QueryContext<Schemas>) => R): QueryHandle<R>;
|
|
168
|
+
toDocuments(): { [K in keyof Schemas]: JsonDocument<StandardSchemaV1.InferOutput<Schemas[K]>> };
|
|
169
|
+
on(event: "mutation", handler: (payload: DatabaseMutationEvent<Schemas>) => unknown): () => void;
|
|
170
|
+
use(plugin: DatabasePlugin<Schemas>): Database<Schemas>;
|
|
171
|
+
init(): Promise<Database<Schemas>>;
|
|
172
|
+
dispose(): Promise<void>;
|
|
173
|
+
collectionKeys(): (keyof Schemas)[];
|
|
174
|
+
};
|
|
175
|
+
/**
|
|
176
|
+
* Create a typed database instance with collection access.
|
|
177
|
+
* @param config - Database configuration
|
|
178
|
+
* @param config.name - Database name used for persistence and routing
|
|
179
|
+
* @param config.schema - Collection schema definitions
|
|
180
|
+
* @param config.version - Optional database version, defaults to 1
|
|
181
|
+
* @returns A database instance with typed collection properties
|
|
182
|
+
*
|
|
183
|
+
* @example
|
|
184
|
+
* ```typescript
|
|
185
|
+
* const db = await createDatabase({
|
|
186
|
+
* name: "my-app",
|
|
187
|
+
* schema: {
|
|
188
|
+
* tasks: { schema: taskSchema, getId: (task) => task.id },
|
|
189
|
+
* },
|
|
190
|
+
* })
|
|
191
|
+
* .use(idbPlugin())
|
|
192
|
+
* .init();
|
|
193
|
+
*
|
|
194
|
+
* const task = db.tasks.add({ title: 'Learn Starling' });
|
|
195
|
+
* ```
|
|
196
|
+
*/
|
|
197
|
+
declare function createDatabase<Schemas extends SchemasMap>(config: DbConfig<Schemas>): Database<Schemas>;
|
|
198
|
+
//#endregion
|
|
199
|
+
export { createDatabase as a, QueryCollectionHandle as c, Collection as d, CollectionInternals as f, StandardSchemaV1 as g, SchemasMap as h, DbConfig as i, QueryContext as l, IdNotFoundError as m, Database as n, TransactionCollectionHandle as o, DuplicateIdError as p, DatabasePlugin as r, TransactionContext as s, CollectionConfig as t, QueryHandle as u };
|