@byearlybird/starling 0.9.3 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core-DI0FfUjX.js +423 -0
- package/dist/core.d.ts +2 -0
- package/dist/core.js +3 -0
- package/dist/db-qQgPYE41.d.ts +199 -0
- package/dist/index-D7bXWDg6.d.ts +270 -0
- package/dist/index.d.ts +3 -2
- package/dist/index.js +405 -580
- package/dist/plugin-http.d.ts +139 -0
- package/dist/plugin-http.js +191 -0
- package/dist/plugin-idb.d.ts +59 -0
- package/dist/plugin-idb.js +169 -0
- package/package.json +21 -13
- package/dist/plugins/unstorage/plugin.d.ts +0 -54
- package/dist/plugins/unstorage/plugin.js +0 -104
- package/dist/store-bS1Nb57l.d.ts +0 -365
package/dist/index.js
CHANGED
|
@@ -1,637 +1,462 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
return `${new Date(timestampMs).toISOString()}|${counter.toString(16).padStart(4, "0")}|${nonce}`;
|
|
7
|
-
}
|
|
8
|
-
/**
|
|
9
|
-
* Validates whether a string is a properly formatted eventstamp.
|
|
10
|
-
* Expected format: YYYY-MM-DDTHH:mm:ss.SSSZ|HHHH+|HHHH
|
|
11
|
-
* where HHHH+ represents 4 or more hex characters for the counter,
|
|
12
|
-
* and HHHH represents exactly 4 hex characters for the nonce.
|
|
13
|
-
*/
|
|
14
|
-
function isValidEventstamp(stamp) {
|
|
15
|
-
return /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z\|[0-9a-f]{4,}\|[0-9a-f]{4}$/.test(stamp);
|
|
16
|
-
}
|
|
17
|
-
function decodeEventstamp(eventstamp) {
|
|
18
|
-
if (!isValidEventstamp(eventstamp)) throw new Error(`Invalid eventstamp format: "${eventstamp}". Expected format: YYYY-MM-DDTHH:mm:ss.SSSZ|HHHH+|HHHH`);
|
|
19
|
-
const parts = eventstamp.split("|");
|
|
20
|
-
const isoString = parts[0];
|
|
21
|
-
const hexCounter = parts[1];
|
|
22
|
-
const nonce = parts[2];
|
|
1
|
+
import { c as makeResource, i as mapToDocument, l as mergeResources, o as mergeDocuments, s as deleteResource, u as createClock } from "./core-DI0FfUjX.js";
|
|
2
|
+
|
|
3
|
+
//#region src/database/emitter.ts
|
|
4
|
+
function createEmitter() {
|
|
5
|
+
const handlers = /* @__PURE__ */ new Map();
|
|
23
6
|
return {
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
7
|
+
on(type, handler) {
|
|
8
|
+
let set = handlers.get(type);
|
|
9
|
+
if (!set) {
|
|
10
|
+
set = /* @__PURE__ */ new Set();
|
|
11
|
+
handlers.set(type, set);
|
|
12
|
+
}
|
|
13
|
+
set.add(handler);
|
|
14
|
+
return () => {
|
|
15
|
+
set?.delete(handler);
|
|
16
|
+
if (!set?.size) handlers.delete(type);
|
|
17
|
+
};
|
|
18
|
+
},
|
|
19
|
+
emit(type, payload) {
|
|
20
|
+
const set = handlers.get(type);
|
|
21
|
+
if (!set) return;
|
|
22
|
+
for (const handler of Array.from(set)) handler(payload);
|
|
23
|
+
},
|
|
24
|
+
clear() {
|
|
25
|
+
handlers.clear();
|
|
26
|
+
}
|
|
27
27
|
};
|
|
28
28
|
}
|
|
29
|
-
const MIN_EVENTSTAMP = encodeEventstamp(0, 0, "0000");
|
|
30
29
|
|
|
31
30
|
//#endregion
|
|
32
|
-
//#region src/
|
|
33
|
-
function
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
return
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
//#endregion
|
|
41
|
-
//#region src/crdt/value.ts
|
|
42
|
-
function encodeValue(value, eventstamp) {
|
|
43
|
-
return {
|
|
44
|
-
"~value": value,
|
|
45
|
-
"~eventstamp": eventstamp
|
|
46
|
-
};
|
|
47
|
-
}
|
|
48
|
-
function decodeValue(value) {
|
|
49
|
-
return value["~value"];
|
|
50
|
-
}
|
|
51
|
-
function mergeValues(into, from) {
|
|
52
|
-
return into["~eventstamp"] > from["~eventstamp"] ? [into, into["~eventstamp"]] : [from, from["~eventstamp"]];
|
|
31
|
+
//#region src/database/standard-schema.ts
|
|
32
|
+
function standardValidate(schema, input) {
|
|
33
|
+
const result = schema["~standard"].validate(input);
|
|
34
|
+
if (result instanceof Promise) throw new TypeError("Schema validation must be synchronous");
|
|
35
|
+
if (result.issues) throw new Error(JSON.stringify(result.issues, null, 2));
|
|
36
|
+
return result.value;
|
|
53
37
|
}
|
|
54
38
|
|
|
55
39
|
//#endregion
|
|
56
|
-
//#region src/
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
const step = (input, output) => {
|
|
76
|
-
for (const key in input) {
|
|
77
|
-
if (!Object.hasOwn(input, key)) continue;
|
|
78
|
-
const value = input[key];
|
|
79
|
-
if (isObject(value)) {
|
|
80
|
-
output[key] = {};
|
|
81
|
-
step(value, output[key]);
|
|
82
|
-
} else output[key] = encodeValue(value, eventstamp);
|
|
83
|
-
}
|
|
40
|
+
//#region src/database/collection.ts
|
|
41
|
+
/**
|
|
42
|
+
* Symbols for internal collection methods used by transactions.
|
|
43
|
+
* These are not part of the public Collection type.
|
|
44
|
+
*/
|
|
45
|
+
const CollectionInternals = {
|
|
46
|
+
getPendingMutations: Symbol("getPendingMutations"),
|
|
47
|
+
emitMutations: Symbol("emitMutations"),
|
|
48
|
+
replaceData: Symbol("replaceData"),
|
|
49
|
+
data: Symbol("data")
|
|
50
|
+
};
|
|
51
|
+
function createCollection(name, schema, getId, getEventstamp, initialData, options) {
|
|
52
|
+
const autoFlush = options?.autoFlush ?? true;
|
|
53
|
+
const data = initialData ?? /* @__PURE__ */ new Map();
|
|
54
|
+
const emitter = createEmitter();
|
|
55
|
+
const pendingMutations = {
|
|
56
|
+
added: [],
|
|
57
|
+
updated: [],
|
|
58
|
+
removed: []
|
|
84
59
|
};
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
else if (isObject(value)) {
|
|
96
|
-
output[key] = {};
|
|
97
|
-
step(value, output[key]);
|
|
98
|
-
}
|
|
60
|
+
const flushMutations = () => {
|
|
61
|
+
if (pendingMutations.added.length > 0 || pendingMutations.updated.length > 0 || pendingMutations.removed.length > 0) {
|
|
62
|
+
emitter.emit("mutation", {
|
|
63
|
+
added: [...pendingMutations.added],
|
|
64
|
+
updated: [...pendingMutations.updated],
|
|
65
|
+
removed: [...pendingMutations.removed]
|
|
66
|
+
});
|
|
67
|
+
pendingMutations.added = [];
|
|
68
|
+
pendingMutations.updated = [];
|
|
69
|
+
pendingMutations.removed = [];
|
|
99
70
|
}
|
|
100
71
|
};
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
output[key] = {};
|
|
122
|
-
step(value1, value2, output[key]);
|
|
123
|
-
} else if (value1) output[key] = value1;
|
|
124
|
-
}
|
|
125
|
-
for (const key in v2) {
|
|
126
|
-
if (!Object.hasOwn(v2, key) || Object.hasOwn(output, key)) continue;
|
|
127
|
-
const value = v2[key];
|
|
128
|
-
if (value !== void 0) {
|
|
129
|
-
output[key] = value;
|
|
130
|
-
if (isEncodedValue(value)) {
|
|
131
|
-
const eventstamp = value["~eventstamp"];
|
|
132
|
-
if (eventstamp > greatestEventstamp) greatestEventstamp = eventstamp;
|
|
72
|
+
return {
|
|
73
|
+
get(id, opts = {}) {
|
|
74
|
+
const resource = data.get(id);
|
|
75
|
+
if (!resource) return null;
|
|
76
|
+
if (!opts.includeDeleted && resource.meta.deletedAt) return null;
|
|
77
|
+
return resource.attributes;
|
|
78
|
+
},
|
|
79
|
+
getAll(opts = {}) {
|
|
80
|
+
const resources = Array.from(data.values());
|
|
81
|
+
if (opts.includeDeleted) return resources.map((resource) => resource.attributes);
|
|
82
|
+
else return resources.filter((resource) => !resource.meta.deletedAt).map((resource) => resource.attributes);
|
|
83
|
+
},
|
|
84
|
+
find(filter, opts) {
|
|
85
|
+
const results = [];
|
|
86
|
+
for (const [, resource] of data.entries()) {
|
|
87
|
+
if (resource.meta.deletedAt) continue;
|
|
88
|
+
const attributes = resource.attributes;
|
|
89
|
+
if (filter(attributes)) {
|
|
90
|
+
const value = opts?.map ? opts.map(attributes) : attributes;
|
|
91
|
+
results.push(value);
|
|
133
92
|
}
|
|
134
93
|
}
|
|
94
|
+
if (opts?.sort) results.sort(opts.sort);
|
|
95
|
+
return results;
|
|
96
|
+
},
|
|
97
|
+
add(item) {
|
|
98
|
+
const validated = standardValidate(schema, item);
|
|
99
|
+
const id = getId(validated);
|
|
100
|
+
if (data.has(id)) throw new DuplicateIdError(id);
|
|
101
|
+
const resource = makeResource(name, id, validated, getEventstamp());
|
|
102
|
+
data.set(id, resource);
|
|
103
|
+
pendingMutations.added.push({
|
|
104
|
+
id,
|
|
105
|
+
item: validated
|
|
106
|
+
});
|
|
107
|
+
if (autoFlush) flushMutations();
|
|
108
|
+
return validated;
|
|
109
|
+
},
|
|
110
|
+
update(id, updates) {
|
|
111
|
+
const existing = data.get(id);
|
|
112
|
+
if (!existing) throw new IdNotFoundError(id);
|
|
113
|
+
const before = existing.attributes;
|
|
114
|
+
const merged = mergeResources(existing, makeResource(name, id, updates, getEventstamp()));
|
|
115
|
+
standardValidate(schema, merged.attributes);
|
|
116
|
+
data.set(id, merged);
|
|
117
|
+
pendingMutations.updated.push({
|
|
118
|
+
id,
|
|
119
|
+
before,
|
|
120
|
+
after: merged.attributes
|
|
121
|
+
});
|
|
122
|
+
if (autoFlush) flushMutations();
|
|
123
|
+
},
|
|
124
|
+
remove(id) {
|
|
125
|
+
const existing = data.get(id);
|
|
126
|
+
if (!existing) throw new IdNotFoundError(id);
|
|
127
|
+
const item = existing.attributes;
|
|
128
|
+
const removed = deleteResource(existing, getEventstamp());
|
|
129
|
+
data.set(id, removed);
|
|
130
|
+
pendingMutations.removed.push({
|
|
131
|
+
id,
|
|
132
|
+
item
|
|
133
|
+
});
|
|
134
|
+
if (autoFlush) flushMutations();
|
|
135
|
+
},
|
|
136
|
+
merge(document) {
|
|
137
|
+
const beforeState = /* @__PURE__ */ new Map();
|
|
138
|
+
for (const [id, resource] of data.entries()) beforeState.set(id, resource.attributes);
|
|
139
|
+
const result = mergeDocuments(mapToDocument(data, getEventstamp()), document);
|
|
140
|
+
data.clear();
|
|
141
|
+
for (const resource of result.document.data) data.set(resource.id, resource);
|
|
142
|
+
for (const [id, resource] of result.changes.added) {
|
|
143
|
+
standardValidate(schema, resource.attributes);
|
|
144
|
+
pendingMutations.added.push({
|
|
145
|
+
id,
|
|
146
|
+
item: resource.attributes
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
for (const [id, resource] of result.changes.updated) {
|
|
150
|
+
standardValidate(schema, resource.attributes);
|
|
151
|
+
const before = beforeState.get(id);
|
|
152
|
+
pendingMutations.updated.push({
|
|
153
|
+
id,
|
|
154
|
+
before,
|
|
155
|
+
after: resource.attributes
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
for (const id of result.changes.deleted) {
|
|
159
|
+
const before = beforeState.get(id);
|
|
160
|
+
pendingMutations.removed.push({
|
|
161
|
+
id,
|
|
162
|
+
item: before
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
if (autoFlush) flushMutations();
|
|
166
|
+
},
|
|
167
|
+
toDocument() {
|
|
168
|
+
return mapToDocument(data, getEventstamp());
|
|
169
|
+
},
|
|
170
|
+
on(event, handler) {
|
|
171
|
+
return emitter.on(event, handler);
|
|
172
|
+
},
|
|
173
|
+
[CollectionInternals.data]() {
|
|
174
|
+
return new Map(data);
|
|
175
|
+
},
|
|
176
|
+
[CollectionInternals.getPendingMutations]() {
|
|
177
|
+
return {
|
|
178
|
+
added: [...pendingMutations.added],
|
|
179
|
+
updated: [...pendingMutations.updated],
|
|
180
|
+
removed: [...pendingMutations.removed]
|
|
181
|
+
};
|
|
182
|
+
},
|
|
183
|
+
[CollectionInternals.emitMutations](mutations) {
|
|
184
|
+
if (mutations.added.length > 0 || mutations.updated.length > 0 || mutations.removed.length > 0) emitter.emit("mutation", mutations);
|
|
185
|
+
},
|
|
186
|
+
[CollectionInternals.replaceData](newData) {
|
|
187
|
+
data.clear();
|
|
188
|
+
for (const [id, resource] of newData.entries()) data.set(id, resource);
|
|
135
189
|
}
|
|
136
190
|
};
|
|
137
|
-
step(into, from, result);
|
|
138
|
-
return [result, greatestEventstamp];
|
|
139
191
|
}
|
|
192
|
+
var IdNotFoundError = class extends Error {
|
|
193
|
+
constructor(id) {
|
|
194
|
+
super(`Resource with id ${id} not found`);
|
|
195
|
+
this.name = "IdNotFoundError";
|
|
196
|
+
}
|
|
197
|
+
};
|
|
198
|
+
var DuplicateIdError = class extends Error {
|
|
199
|
+
constructor(id) {
|
|
200
|
+
super(`Resource with id ${id} already exists`);
|
|
201
|
+
this.name = "DuplicateIdError";
|
|
202
|
+
}
|
|
203
|
+
};
|
|
140
204
|
|
|
141
205
|
//#endregion
|
|
142
|
-
//#region src/
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
206
|
+
//#region src/database/query.ts
|
|
207
|
+
/**
|
|
208
|
+
* Execute a reactive query with automatic re-computation on mutations.
|
|
209
|
+
*
|
|
210
|
+
* @param db - Database instance to query
|
|
211
|
+
* @param callback - Query callback receiving read-only collection handles
|
|
212
|
+
* @returns QueryHandle with result, subscribe, and dispose methods
|
|
213
|
+
*/
|
|
214
|
+
function executeQuery(db, callback) {
|
|
215
|
+
const accessedCollections = /* @__PURE__ */ new Set();
|
|
216
|
+
const subscribers = /* @__PURE__ */ new Set();
|
|
217
|
+
let currentResult;
|
|
218
|
+
const createTrackingHandles = () => {
|
|
219
|
+
const handles = {};
|
|
220
|
+
for (const name of db.collectionKeys()) {
|
|
221
|
+
const collection = db[name];
|
|
222
|
+
handles[name] = createTrackingHandle(name, collection, accessedCollections);
|
|
223
|
+
}
|
|
224
|
+
return handles;
|
|
148
225
|
};
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
return {
|
|
152
|
-
"~id": doc["~id"],
|
|
153
|
-
"~data": isEncodedValue(doc["~data"]) ? decodeValue(doc["~data"]) : decodeRecord(doc["~data"]),
|
|
154
|
-
"~deletedAt": doc["~deletedAt"]
|
|
226
|
+
const runQuery = () => {
|
|
227
|
+
return callback(createTrackingHandles());
|
|
155
228
|
};
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
let
|
|
164
|
-
if (mergedDeletedAt && mergedDeletedAt > greatestEventstamp) greatestEventstamp = mergedDeletedAt;
|
|
165
|
-
return [{
|
|
166
|
-
"~id": into["~id"],
|
|
167
|
-
"~data": mergedData,
|
|
168
|
-
"~deletedAt": mergedDeletedAt
|
|
169
|
-
}, greatestEventstamp];
|
|
170
|
-
}
|
|
171
|
-
function deleteDoc(doc, eventstamp) {
|
|
229
|
+
currentResult = runQuery();
|
|
230
|
+
const unsubscribeMutation = db.on("mutation", (event) => {
|
|
231
|
+
if (accessedCollections.has(event.collection)) {
|
|
232
|
+
currentResult = runQuery();
|
|
233
|
+
for (const subscriber of subscribers) subscriber(currentResult);
|
|
234
|
+
}
|
|
235
|
+
});
|
|
236
|
+
let disposed = false;
|
|
172
237
|
return {
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
238
|
+
get result() {
|
|
239
|
+
return currentResult;
|
|
240
|
+
},
|
|
241
|
+
subscribe(callback$1) {
|
|
242
|
+
if (disposed) throw new Error("Cannot subscribe to a disposed query");
|
|
243
|
+
subscribers.add(callback$1);
|
|
244
|
+
return () => {
|
|
245
|
+
subscribers.delete(callback$1);
|
|
246
|
+
};
|
|
247
|
+
},
|
|
248
|
+
dispose() {
|
|
249
|
+
if (disposed) return;
|
|
250
|
+
disposed = true;
|
|
251
|
+
unsubscribeMutation();
|
|
252
|
+
subscribers.clear();
|
|
253
|
+
accessedCollections.clear();
|
|
254
|
+
}
|
|
176
255
|
};
|
|
177
256
|
}
|
|
178
257
|
/**
|
|
179
|
-
*
|
|
180
|
-
*
|
|
181
|
-
* Useful for custom serialization in plugin hooks (encryption, compression, etc.)
|
|
182
|
-
*
|
|
183
|
-
* @param doc - Document to transform
|
|
184
|
-
* @param process - Function to apply to each leaf value
|
|
185
|
-
* @returns New document with transformed values
|
|
186
|
-
*
|
|
187
|
-
* @example
|
|
188
|
-
* ```ts
|
|
189
|
-
* // Encrypt all values before persisting
|
|
190
|
-
* const encrypted = processDocument(doc, (value) => ({
|
|
191
|
-
* ...value,
|
|
192
|
-
* "~value": encrypt(value["~value"])
|
|
193
|
-
* }));
|
|
194
|
-
* ```
|
|
258
|
+
* Create a read-only collection handle that tracks access.
|
|
195
259
|
*/
|
|
196
|
-
function
|
|
197
|
-
const
|
|
260
|
+
function createTrackingHandle(name, collection, accessedCollections) {
|
|
261
|
+
const trackAccess = () => {
|
|
262
|
+
accessedCollections.add(name);
|
|
263
|
+
};
|
|
198
264
|
return {
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
265
|
+
get(id, opts) {
|
|
266
|
+
trackAccess();
|
|
267
|
+
return collection.get(id, opts);
|
|
268
|
+
},
|
|
269
|
+
getAll(opts) {
|
|
270
|
+
trackAccess();
|
|
271
|
+
return collection.getAll(opts);
|
|
272
|
+
},
|
|
273
|
+
find(filter, opts) {
|
|
274
|
+
trackAccess();
|
|
275
|
+
return collection.find(filter, opts);
|
|
276
|
+
}
|
|
202
277
|
};
|
|
203
278
|
}
|
|
204
279
|
|
|
205
280
|
//#endregion
|
|
206
|
-
//#region src/
|
|
281
|
+
//#region src/database/transaction.ts
|
|
207
282
|
/**
|
|
208
|
-
*
|
|
209
|
-
*
|
|
210
|
-
* The merge operation:
|
|
211
|
-
* 1. Forwards the clock to the newest eventstamp from either collection
|
|
212
|
-
* 2. Merges each document pair using field-level LWW (via mergeDocs)
|
|
213
|
-
* 3. Tracks what changed for hook notifications (added/updated/deleted)
|
|
214
|
-
*
|
|
215
|
-
* Deletion is final: once a document is deleted, updates to it are merged into
|
|
216
|
-
* the document's data but don't restore visibility. Only new documents or
|
|
217
|
-
* transitions into the deleted state are tracked.
|
|
283
|
+
* Execute a transaction with snapshot isolation and copy-on-write optimization.
|
|
218
284
|
*
|
|
219
|
-
* @param
|
|
220
|
-
* @param
|
|
221
|
-
* @
|
|
285
|
+
* @param configs - Collection configurations for creating new instances
|
|
286
|
+
* @param collections - Active collection instances (mutable reference)
|
|
287
|
+
* @param getEventstamp - Function to generate eventstamps
|
|
288
|
+
* @param callback - Transaction callback with tx context
|
|
289
|
+
* @returns The return value from the callback
|
|
222
290
|
*
|
|
223
|
-
* @
|
|
224
|
-
*
|
|
225
|
-
*
|
|
226
|
-
*
|
|
227
|
-
*
|
|
228
|
-
* };
|
|
229
|
-
*
|
|
230
|
-
* const from = {
|
|
231
|
-
* "~docs": [
|
|
232
|
-
* { "~id": "doc1", "~data": {...}, "~deletedAt": null }, // updated
|
|
233
|
-
* { "~id": "doc2", "~data": {...}, "~deletedAt": null } // new
|
|
234
|
-
* ],
|
|
235
|
-
* "~eventstamp": "2025-01-01T00:05:00.000Z|0001|c3d4"
|
|
236
|
-
* };
|
|
237
|
-
*
|
|
238
|
-
* const result = mergeCollections(into, from);
|
|
239
|
-
* // result.collection.~eventstamp === "2025-01-01T00:05:00.000Z|0001|c3d4"
|
|
240
|
-
* // result.changes.added has "doc2"
|
|
241
|
-
* // result.changes.updated has "doc1"
|
|
242
|
-
* ```
|
|
291
|
+
* @remarks
|
|
292
|
+
* - Collections are cloned lazily on first access (read or write)
|
|
293
|
+
* - Provides snapshot isolation: tx sees consistent data from first access
|
|
294
|
+
* - Explicit rollback via tx.rollback() or implicit on exception
|
|
295
|
+
* - Only modified collections are committed back
|
|
243
296
|
*/
|
|
244
|
-
function
|
|
245
|
-
const
|
|
246
|
-
|
|
247
|
-
const
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
if (intoDoc === fromDoc) continue;
|
|
259
|
-
const [mergedDoc] = mergeDocs(intoDoc, fromDoc);
|
|
260
|
-
mergedDocsById.set(id, mergedDoc);
|
|
261
|
-
const wasDeleted = intoDoc["~deletedAt"] !== null;
|
|
262
|
-
const isDeleted = mergedDoc["~deletedAt"] !== null;
|
|
263
|
-
if (!wasDeleted && isDeleted) deleted.add(id);
|
|
264
|
-
else if (!isDeleted) updated.set(id, mergedDoc);
|
|
265
|
-
}
|
|
297
|
+
function executeTransaction(configs, collections, getEventstamp, callback) {
|
|
298
|
+
const clonedCollections = /* @__PURE__ */ new Map();
|
|
299
|
+
const txHandles = {};
|
|
300
|
+
for (const name of Object.keys(collections)) {
|
|
301
|
+
const originalCollection = collections[name];
|
|
302
|
+
const config = configs[name];
|
|
303
|
+
const getClonedCollection = () => {
|
|
304
|
+
if (!clonedCollections.has(name)) {
|
|
305
|
+
const cloned = createCollection(name, config.schema, config.getId, getEventstamp, originalCollection[CollectionInternals.data](), { autoFlush: false });
|
|
306
|
+
clonedCollections.set(name, cloned);
|
|
307
|
+
}
|
|
308
|
+
return clonedCollections.get(name);
|
|
309
|
+
};
|
|
310
|
+
txHandles[name] = createLazyTransactionHandle(originalCollection, getClonedCollection);
|
|
266
311
|
}
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
},
|
|
273
|
-
changes: {
|
|
274
|
-
added,
|
|
275
|
-
updated,
|
|
276
|
-
deleted
|
|
312
|
+
let shouldRollback = false;
|
|
313
|
+
const tx = {
|
|
314
|
+
...txHandles,
|
|
315
|
+
rollback() {
|
|
316
|
+
shouldRollback = true;
|
|
277
317
|
}
|
|
278
318
|
};
|
|
319
|
+
let result;
|
|
320
|
+
result = callback(tx);
|
|
321
|
+
if (!shouldRollback) for (const [name, clonedCollection] of clonedCollections.entries()) {
|
|
322
|
+
const originalCollection = collections[name];
|
|
323
|
+
const pendingMutations = clonedCollection[CollectionInternals.getPendingMutations]();
|
|
324
|
+
originalCollection[CollectionInternals.replaceData](clonedCollection[CollectionInternals.data]());
|
|
325
|
+
originalCollection[CollectionInternals.emitMutations](pendingMutations);
|
|
326
|
+
}
|
|
327
|
+
return result;
|
|
279
328
|
}
|
|
280
|
-
|
|
281
|
-
//#endregion
|
|
282
|
-
//#region src/clock.ts
|
|
283
329
|
/**
|
|
284
|
-
*
|
|
285
|
-
*
|
|
286
|
-
*
|
|
330
|
+
* Create a transaction handle that lazily clones on first access (copy-on-write).
|
|
331
|
+
*
|
|
332
|
+
* @param originalCollection - The base collection (not modified)
|
|
333
|
+
* @param getClonedCollection - Lazy cloner (invoked on first access)
|
|
334
|
+
* @returns A collection handle with snapshot isolation
|
|
287
335
|
*
|
|
288
|
-
*
|
|
289
|
-
*
|
|
336
|
+
* @remarks
|
|
337
|
+
* First read or write triggers cloning, providing snapshot isolation.
|
|
338
|
+
* All subsequent operations use the cloned collection.
|
|
339
|
+
* Excluded methods:
|
|
340
|
+
* - on(): events are only emitted after the transaction commits
|
|
341
|
+
* - toDocument(): serialization should happen outside transactions
|
|
290
342
|
*/
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
this.#lastNonce = newer.nonce;
|
|
343
|
+
function createLazyTransactionHandle(_originalCollection, getClonedCollection) {
|
|
344
|
+
let cloned = null;
|
|
345
|
+
const ensureCloned = () => {
|
|
346
|
+
if (!cloned) cloned = getClonedCollection();
|
|
347
|
+
return cloned;
|
|
348
|
+
};
|
|
349
|
+
return {
|
|
350
|
+
get(id, opts) {
|
|
351
|
+
return ensureCloned().get(id, opts);
|
|
352
|
+
},
|
|
353
|
+
getAll(opts) {
|
|
354
|
+
return ensureCloned().getAll(opts);
|
|
355
|
+
},
|
|
356
|
+
find(filter, opts) {
|
|
357
|
+
return ensureCloned().find(filter, opts);
|
|
358
|
+
},
|
|
359
|
+
add(item) {
|
|
360
|
+
return ensureCloned().add(item);
|
|
361
|
+
},
|
|
362
|
+
update(id, updates) {
|
|
363
|
+
ensureCloned().update(id, updates);
|
|
364
|
+
},
|
|
365
|
+
remove(id) {
|
|
366
|
+
ensureCloned().remove(id);
|
|
367
|
+
},
|
|
368
|
+
merge(document) {
|
|
369
|
+
ensureCloned().merge(document);
|
|
319
370
|
}
|
|
320
|
-
}
|
|
321
|
-
}
|
|
371
|
+
};
|
|
372
|
+
}
|
|
322
373
|
|
|
323
374
|
//#endregion
|
|
324
|
-
//#region src/
|
|
375
|
+
//#region src/database/db.ts
|
|
325
376
|
/**
|
|
326
|
-
*
|
|
327
|
-
*
|
|
328
|
-
*
|
|
329
|
-
*
|
|
330
|
-
*
|
|
331
|
-
* @
|
|
377
|
+
* Create a typed database instance with collection access.
|
|
378
|
+
* @param config - Database configuration
|
|
379
|
+
* @param config.name - Database name used for persistence and routing
|
|
380
|
+
* @param config.schema - Collection schema definitions
|
|
381
|
+
* @param config.version - Optional database version, defaults to 1
|
|
382
|
+
* @returns A database instance with typed collection properties
|
|
332
383
|
*
|
|
333
384
|
* @example
|
|
334
|
-
* ```
|
|
335
|
-
* const
|
|
336
|
-
*
|
|
385
|
+
* ```typescript
|
|
386
|
+
* const db = await createDatabase({
|
|
387
|
+
* name: "my-app",
|
|
388
|
+
* schema: {
|
|
389
|
+
* tasks: { schema: taskSchema, getId: (task) => task.id },
|
|
390
|
+
* },
|
|
391
|
+
* })
|
|
392
|
+
* .use(idbPlugin())
|
|
337
393
|
* .init();
|
|
338
394
|
*
|
|
339
|
-
*
|
|
340
|
-
* const id = store.add({ text: 'Buy milk', completed: false });
|
|
341
|
-
* store.update(id, { completed: true });
|
|
342
|
-
* store.del(id);
|
|
343
|
-
*
|
|
344
|
-
* // Reactive queries
|
|
345
|
-
* const activeTodos = store.query({ where: (todo) => !todo.completed });
|
|
346
|
-
* activeTodos.onChange(() => console.log('Todos changed!'));
|
|
395
|
+
* const task = db.tasks.add({ title: 'Learn Starling' });
|
|
347
396
|
* ```
|
|
348
397
|
*/
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
/**
|
|
363
|
-
* Get a document by ID.
|
|
364
|
-
* @returns The document, or null if not found or deleted
|
|
365
|
-
*/
|
|
366
|
-
get(key) {
|
|
367
|
-
return this.#decodeActive(this.#readMap.get(key) ?? null);
|
|
368
|
-
}
|
|
369
|
-
/**
|
|
370
|
-
* Iterate over all non-deleted documents as [id, document] tuples.
|
|
371
|
-
*/
|
|
372
|
-
entries() {
|
|
373
|
-
const self = this;
|
|
374
|
-
function* iterator() {
|
|
375
|
-
for (const [key, doc] of self.#readMap.entries()) {
|
|
376
|
-
const data = self.#decodeActive(doc);
|
|
377
|
-
if (data !== null) yield [key, data];
|
|
378
|
-
}
|
|
379
|
-
}
|
|
380
|
-
return iterator();
|
|
381
|
-
}
|
|
382
|
-
/**
|
|
383
|
-
* Get the complete store state as a Collection for persistence or sync.
|
|
384
|
-
* @returns Collection containing all documents and the latest eventstamp
|
|
385
|
-
*/
|
|
386
|
-
collection() {
|
|
387
|
-
return {
|
|
388
|
-
"~docs": Array.from(this.#readMap.values()),
|
|
389
|
-
"~eventstamp": this.#clock.latest()
|
|
390
|
-
};
|
|
391
|
-
}
|
|
392
|
-
/**
|
|
393
|
-
* Merge a collection from storage or another replica using field-level LWW.
|
|
394
|
-
* @param collection - Collection from storage or another store instance
|
|
395
|
-
*/
|
|
396
|
-
merge(collection) {
|
|
397
|
-
const result = mergeCollections(this.collection(), collection);
|
|
398
|
-
this.#clock.forward(result.collection["~eventstamp"]);
|
|
399
|
-
this.#readMap = new Map(result.collection["~docs"].map((doc) => [doc["~id"], doc]));
|
|
400
|
-
const addEntries = Array.from(result.changes.added.entries()).map(([key, doc]) => [key, decodeDoc(doc)["~data"]]);
|
|
401
|
-
const updateEntries = Array.from(result.changes.updated.entries()).map(([key, doc]) => [key, decodeDoc(doc)["~data"]]);
|
|
402
|
-
const deleteKeys = Array.from(result.changes.deleted);
|
|
403
|
-
if (addEntries.length > 0 || updateEntries.length > 0 || deleteKeys.length > 0) this.#emitMutations(addEntries, updateEntries, deleteKeys);
|
|
404
|
-
}
|
|
405
|
-
/**
|
|
406
|
-
* Run multiple operations in a transaction with rollback support.
|
|
407
|
-
*
|
|
408
|
-
* @param callback - Function receiving a transaction context
|
|
409
|
-
* @param opts - Optional config. Use `silent: true` to skip plugin hooks.
|
|
410
|
-
* @returns The callback's return value
|
|
411
|
-
*
|
|
412
|
-
* @example
|
|
413
|
-
* ```ts
|
|
414
|
-
* const id = store.begin((tx) => {
|
|
415
|
-
* const newId = tx.add({ text: 'Buy milk' });
|
|
416
|
-
* tx.update(newId, { priority: 'high' });
|
|
417
|
-
* return newId; // Return value becomes begin()'s return value
|
|
418
|
-
* });
|
|
419
|
-
* ```
|
|
420
|
-
*/
|
|
421
|
-
begin(callback, opts) {
|
|
422
|
-
const silent = opts?.silent ?? false;
|
|
423
|
-
const addEntries = [];
|
|
424
|
-
const updateEntries = [];
|
|
425
|
-
const deleteKeys = [];
|
|
426
|
-
const staging = new Map(this.#readMap);
|
|
427
|
-
let rolledBack = false;
|
|
428
|
-
const result = callback({
|
|
429
|
-
add: (value, options) => {
|
|
430
|
-
const key = options?.withId ?? this.#getId();
|
|
431
|
-
staging.set(key, this.#encodeValue(key, value));
|
|
432
|
-
addEntries.push([key, value]);
|
|
433
|
-
return key;
|
|
434
|
-
},
|
|
435
|
-
update: (key, value) => {
|
|
436
|
-
const doc = encodeDoc(key, value, this.#clock.now());
|
|
437
|
-
const prev = staging.get(key);
|
|
438
|
-
const mergedDoc = prev ? mergeDocs(prev, doc)[0] : doc;
|
|
439
|
-
staging.set(key, mergedDoc);
|
|
440
|
-
const merged = this.#decodeActive(mergedDoc);
|
|
441
|
-
if (merged !== null) updateEntries.push([key, merged]);
|
|
442
|
-
},
|
|
443
|
-
merge: (doc) => {
|
|
444
|
-
const existing = staging.get(doc["~id"]);
|
|
445
|
-
const mergedDoc = existing ? mergeDocs(existing, doc)[0] : doc;
|
|
446
|
-
staging.set(doc["~id"], mergedDoc);
|
|
447
|
-
const decoded = this.#decodeActive(mergedDoc);
|
|
448
|
-
const isNew = !this.#readMap.has(doc["~id"]);
|
|
449
|
-
if (mergedDoc["~deletedAt"]) deleteKeys.push(doc["~id"]);
|
|
450
|
-
else if (decoded !== null) if (isNew) addEntries.push([doc["~id"], decoded]);
|
|
451
|
-
else updateEntries.push([doc["~id"], decoded]);
|
|
452
|
-
},
|
|
453
|
-
del: (key) => {
|
|
454
|
-
const currentDoc = staging.get(key);
|
|
455
|
-
if (!currentDoc) return;
|
|
456
|
-
staging.set(key, deleteDoc(currentDoc, this.#clock.now()));
|
|
457
|
-
deleteKeys.push(key);
|
|
458
|
-
},
|
|
459
|
-
get: (key) => this.#decodeActive(staging.get(key) ?? null),
|
|
460
|
-
rollback: () => {
|
|
461
|
-
rolledBack = true;
|
|
462
|
-
}
|
|
398
|
+
function createDatabase(config) {
|
|
399
|
+
const { name, schema, version = 1 } = config;
|
|
400
|
+
const clock = createClock();
|
|
401
|
+
const getEventstamp = () => clock.now();
|
|
402
|
+
const collections = makeCollections(schema, getEventstamp);
|
|
403
|
+
const publicCollections = collections;
|
|
404
|
+
const dbEmitter = createEmitter();
|
|
405
|
+
for (const collectionName of Object.keys(collections)) collections[collectionName].on("mutation", (mutations) => {
|
|
406
|
+
if (mutations.added.length > 0 || mutations.updated.length > 0 || mutations.removed.length > 0) dbEmitter.emit("mutation", {
|
|
407
|
+
collection: collectionName,
|
|
408
|
+
added: mutations.added,
|
|
409
|
+
updated: mutations.updated,
|
|
410
|
+
removed: mutations.removed
|
|
463
411
|
});
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
* @returns This store instance for chaining
|
|
497
|
-
*/
|
|
498
|
-
use(plugin) {
|
|
499
|
-
this.#onInitHandlers.push(plugin.onInit);
|
|
500
|
-
this.#onDisposeHandlers.push(plugin.onDispose);
|
|
501
|
-
if (plugin.onAdd) this.#onAddHandlers.push(plugin.onAdd);
|
|
502
|
-
if (plugin.onUpdate) this.#onUpdateHandlers.push(plugin.onUpdate);
|
|
503
|
-
if (plugin.onDelete) this.#onDeleteHandlers.push(plugin.onDelete);
|
|
504
|
-
return this;
|
|
505
|
-
}
|
|
506
|
-
/**
|
|
507
|
-
* Initialize the store and run plugin onInit hooks.
|
|
508
|
-
*
|
|
509
|
-
* Must be called before using the store. Runs plugin setup (hydrate
|
|
510
|
-
* snapshots, start pollers, etc.) and hydrates existing queries.
|
|
511
|
-
*
|
|
512
|
-
* @returns This store instance for chaining
|
|
513
|
-
*/
|
|
514
|
-
async init() {
|
|
515
|
-
for (const hook of this.#onInitHandlers) await hook(this);
|
|
516
|
-
for (const query of this.#queries) this.#hydrateQuery(query);
|
|
517
|
-
return this;
|
|
518
|
-
}
|
|
519
|
-
/**
|
|
520
|
-
* Dispose the store and run plugin cleanup.
|
|
521
|
-
*
|
|
522
|
-
* Flushes pending operations, clears queries, and runs plugin teardown.
|
|
523
|
-
* Call when shutting down to avoid memory leaks.
|
|
524
|
-
*/
|
|
525
|
-
async dispose() {
|
|
526
|
-
for (let i = this.#onDisposeHandlers.length - 1; i >= 0; i--) await this.#onDisposeHandlers[i]?.();
|
|
527
|
-
for (const query of this.#queries) {
|
|
528
|
-
query.callbacks.clear();
|
|
529
|
-
query.results.clear();
|
|
530
|
-
}
|
|
531
|
-
this.#queries.clear();
|
|
532
|
-
this.#onInitHandlers = [];
|
|
533
|
-
this.#onDisposeHandlers = [];
|
|
534
|
-
this.#onAddHandlers = [];
|
|
535
|
-
this.#onUpdateHandlers = [];
|
|
536
|
-
this.#onDeleteHandlers = [];
|
|
537
|
-
}
|
|
538
|
-
/**
|
|
539
|
-
* Create a reactive query that auto-updates when matching docs change.
|
|
540
|
-
*
|
|
541
|
-
* @example
|
|
542
|
-
* ```ts
|
|
543
|
-
* const active = store.query({ where: (todo) => !todo.completed });
|
|
544
|
-
* active.results(); // [[id, todo], ...]
|
|
545
|
-
* active.onChange(() => console.log('Updated!'));
|
|
546
|
-
* active.dispose(); // Clean up when done
|
|
547
|
-
* ```
|
|
548
|
-
*/
|
|
549
|
-
query(config) {
|
|
550
|
-
const query = {
|
|
551
|
-
where: config.where,
|
|
552
|
-
select: config.select,
|
|
553
|
-
order: config.order,
|
|
554
|
-
results: /* @__PURE__ */ new Map(),
|
|
555
|
-
callbacks: /* @__PURE__ */ new Set()
|
|
556
|
-
};
|
|
557
|
-
this.#queries.add(query);
|
|
558
|
-
this.#hydrateQuery(query);
|
|
559
|
-
return {
|
|
560
|
-
results: () => {
|
|
561
|
-
if (query.order) return Array.from(query.results).sort(([, a], [, b]) => query.order(a, b));
|
|
562
|
-
return Array.from(query.results);
|
|
563
|
-
},
|
|
564
|
-
onChange: (callback) => {
|
|
565
|
-
query.callbacks.add(callback);
|
|
566
|
-
return () => {
|
|
567
|
-
query.callbacks.delete(callback);
|
|
568
|
-
};
|
|
569
|
-
},
|
|
570
|
-
dispose: () => {
|
|
571
|
-
this.#queries.delete(query);
|
|
572
|
-
query.callbacks.clear();
|
|
573
|
-
query.results.clear();
|
|
574
|
-
}
|
|
575
|
-
};
|
|
576
|
-
}
|
|
577
|
-
#encodeValue(key, value) {
|
|
578
|
-
return encodeDoc(key, value, this.#clock.now());
|
|
579
|
-
}
|
|
580
|
-
#decodeActive(doc) {
|
|
581
|
-
if (!doc || doc["~deletedAt"]) return null;
|
|
582
|
-
return decodeDoc(doc)["~data"];
|
|
583
|
-
}
|
|
584
|
-
#emitMutations(addEntries, updateEntries, deleteKeys) {
|
|
585
|
-
this.#notifyQueries(addEntries, updateEntries, deleteKeys);
|
|
586
|
-
if (addEntries.length > 0) for (const handler of this.#onAddHandlers) handler(addEntries);
|
|
587
|
-
if (updateEntries.length > 0) for (const handler of this.#onUpdateHandlers) handler(updateEntries);
|
|
588
|
-
if (deleteKeys.length > 0) for (const handler of this.#onDeleteHandlers) handler(deleteKeys);
|
|
589
|
-
}
|
|
590
|
-
#notifyQueries(addEntries, updateEntries, deleteKeys) {
|
|
591
|
-
if (this.#queries.size === 0) return;
|
|
592
|
-
const dirtyQueries = /* @__PURE__ */ new Set();
|
|
593
|
-
if (addEntries.length > 0) {
|
|
594
|
-
for (const [key, value] of addEntries) for (const query of this.#queries) if (query.where(value)) {
|
|
595
|
-
const selected = this.#selectValue(query, value);
|
|
596
|
-
query.results.set(key, selected);
|
|
597
|
-
dirtyQueries.add(query);
|
|
598
|
-
}
|
|
599
|
-
}
|
|
600
|
-
if (updateEntries.length > 0) for (const [key, value] of updateEntries) for (const query of this.#queries) {
|
|
601
|
-
const matches = query.where(value);
|
|
602
|
-
const inResults = query.results.has(key);
|
|
603
|
-
if (matches && !inResults) {
|
|
604
|
-
const selected = this.#selectValue(query, value);
|
|
605
|
-
query.results.set(key, selected);
|
|
606
|
-
dirtyQueries.add(query);
|
|
607
|
-
} else if (!matches && inResults) {
|
|
608
|
-
query.results.delete(key);
|
|
609
|
-
dirtyQueries.add(query);
|
|
610
|
-
} else if (matches && inResults) {
|
|
611
|
-
const selected = this.#selectValue(query, value);
|
|
612
|
-
query.results.set(key, selected);
|
|
613
|
-
dirtyQueries.add(query);
|
|
412
|
+
});
|
|
413
|
+
const plugins = [];
|
|
414
|
+
const db = {
|
|
415
|
+
...publicCollections,
|
|
416
|
+
name,
|
|
417
|
+
version,
|
|
418
|
+
begin(callback) {
|
|
419
|
+
return executeTransaction(schema, collections, getEventstamp, callback);
|
|
420
|
+
},
|
|
421
|
+
query(callback) {
|
|
422
|
+
return executeQuery(db, callback);
|
|
423
|
+
},
|
|
424
|
+
toDocuments() {
|
|
425
|
+
const documents = {};
|
|
426
|
+
for (const dbName of Object.keys(collections)) documents[dbName] = collections[dbName].toDocument();
|
|
427
|
+
return documents;
|
|
428
|
+
},
|
|
429
|
+
on(event, handler) {
|
|
430
|
+
return dbEmitter.on(event, handler);
|
|
431
|
+
},
|
|
432
|
+
use(plugin) {
|
|
433
|
+
plugins.push(plugin);
|
|
434
|
+
return db;
|
|
435
|
+
},
|
|
436
|
+
async init() {
|
|
437
|
+
for (const plugin of plugins) if (plugin.handlers.init) await plugin.handlers.init(db);
|
|
438
|
+
return db;
|
|
439
|
+
},
|
|
440
|
+
async dispose() {
|
|
441
|
+
for (let i = plugins.length - 1; i >= 0; i--) {
|
|
442
|
+
const plugin = plugins[i];
|
|
443
|
+
if (plugin?.handlers.dispose) await plugin.handlers.dispose(db);
|
|
614
444
|
}
|
|
445
|
+
},
|
|
446
|
+
collectionKeys() {
|
|
447
|
+
return Object.keys(collections);
|
|
615
448
|
}
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
}
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
#hydrateQuery(query) {
|
|
625
|
-
query.results.clear();
|
|
626
|
-
for (const [key, value] of this.entries()) if (query.where(value)) {
|
|
627
|
-
const selected = this.#selectValue(query, value);
|
|
628
|
-
query.results.set(key, selected);
|
|
629
|
-
}
|
|
630
|
-
}
|
|
631
|
-
#selectValue(query, value) {
|
|
632
|
-
return query.select ? query.select(value) : value;
|
|
449
|
+
};
|
|
450
|
+
return db;
|
|
451
|
+
}
|
|
452
|
+
function makeCollections(configs, getEventstamp) {
|
|
453
|
+
const collections = {};
|
|
454
|
+
for (const name of Object.keys(configs)) {
|
|
455
|
+
const config = configs[name];
|
|
456
|
+
collections[name] = createCollection(name, config.schema, config.getId, getEventstamp);
|
|
633
457
|
}
|
|
634
|
-
|
|
458
|
+
return collections;
|
|
459
|
+
}
|
|
635
460
|
|
|
636
461
|
//#endregion
|
|
637
|
-
export {
|
|
462
|
+
export { CollectionInternals, DuplicateIdError, IdNotFoundError, createDatabase };
|