@brightchain/db 0.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +81 -0
- package/package.json +18 -0
- package/src/__tests__/helpers/mockBlockStore.d.ts +113 -0
- package/src/__tests__/helpers/mockBlockStore.js +380 -0
- package/src/__tests__/helpers/mockBlockStore.js.map +1 -0
- package/src/index.d.ts +31 -0
- package/src/index.js +78 -0
- package/src/index.js.map +1 -0
- package/src/lib/aggregation.d.ts +18 -0
- package/src/lib/aggregation.js +407 -0
- package/src/lib/aggregation.js.map +1 -0
- package/src/lib/cblIndex.d.ts +268 -0
- package/src/lib/cblIndex.js +856 -0
- package/src/lib/cblIndex.js.map +1 -0
- package/src/lib/collection.d.ts +305 -0
- package/src/lib/collection.js +991 -0
- package/src/lib/collection.js.map +1 -0
- package/src/lib/cursor.d.ts +8 -0
- package/src/lib/cursor.js +13 -0
- package/src/lib/cursor.js.map +1 -0
- package/src/lib/database.d.ts +158 -0
- package/src/lib/database.js +332 -0
- package/src/lib/database.js.map +1 -0
- package/src/lib/errors.d.ts +85 -0
- package/src/lib/errors.js +103 -0
- package/src/lib/errors.js.map +1 -0
- package/src/lib/expressMiddleware.d.ts +57 -0
- package/src/lib/expressMiddleware.js +488 -0
- package/src/lib/expressMiddleware.js.map +1 -0
- package/src/lib/headRegistry.d.ts +60 -0
- package/src/lib/headRegistry.js +216 -0
- package/src/lib/headRegistry.js.map +1 -0
- package/src/lib/indexing.d.ts +7 -0
- package/src/lib/indexing.js +14 -0
- package/src/lib/indexing.js.map +1 -0
- package/src/lib/model.d.ts +162 -0
- package/src/lib/model.js +260 -0
- package/src/lib/model.js.map +1 -0
- package/src/lib/pooledStoreAdapter.d.ts +44 -0
- package/src/lib/pooledStoreAdapter.js +109 -0
- package/src/lib/pooledStoreAdapter.js.map +1 -0
- package/src/lib/queryEngine.d.ts +48 -0
- package/src/lib/queryEngine.js +461 -0
- package/src/lib/queryEngine.js.map +1 -0
- package/src/lib/schemaValidation.d.ts +80 -0
- package/src/lib/schemaValidation.js +353 -0
- package/src/lib/schemaValidation.js.map +1 -0
- package/src/lib/transaction.d.ts +7 -0
- package/src/lib/transaction.js +12 -0
- package/src/lib/transaction.js.map +1 -0
- package/src/lib/types.d.ts +360 -0
- package/src/lib/types.js +6 -0
- package/src/lib/types.js.map +1 -0
- package/src/lib/updateEngine.d.ts +7 -0
- package/src/lib/updateEngine.js +13 -0
- package/src/lib/updateEngine.js.map +1 -0
|
@@ -0,0 +1,991 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Collection – the main API surface for interacting with a document collection.
|
|
4
|
+
*
|
|
5
|
+
* Provides a MongoDB-compatible interface including:
|
|
6
|
+
* insertOne, insertMany, findOne, find, updateOne, updateMany,
|
|
7
|
+
* deleteOne, deleteMany, replaceOne, countDocuments, distinct,
|
|
8
|
+
* aggregate, createIndex, dropIndex, watch
|
|
9
|
+
*/
|
|
10
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
11
|
+
exports.HeadRegistry = exports.Collection = void 0;
|
|
12
|
+
exports.calculateBlockId = calculateBlockId;
|
|
13
|
+
const sha3_1 = require("@noble/hashes/sha3");
|
|
14
|
+
const crypto_1 = require("crypto");
|
|
15
|
+
const aggregation_1 = require("./aggregation");
|
|
16
|
+
const cursor_1 = require("./cursor");
|
|
17
|
+
const errors_1 = require("./errors");
|
|
18
|
+
const indexing_1 = require("./indexing");
|
|
19
|
+
const queryEngine_1 = require("./queryEngine");
|
|
20
|
+
const schemaValidation_1 = require("./schemaValidation");
|
|
21
|
+
const updateEngine_1 = require("./updateEngine");
|
|
22
|
+
/**
|
|
23
|
+
* Calculate a content-addressable block ID from data.
|
|
24
|
+
* Uses SHA3-512, the same algorithm as BrightChain's ChecksumService,
|
|
25
|
+
* but without requiring the global service provider to be initialised.
|
|
26
|
+
*/
|
|
27
|
+
function calculateBlockId(data) {
|
|
28
|
+
const bytes = data instanceof Buffer ? new Uint8Array(data) : data;
|
|
29
|
+
const hash = (0, sha3_1.sha3_512)(bytes);
|
|
30
|
+
return Buffer.from(hash).toString('hex');
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* A single document collection backed by a BrightChain block store.
|
|
34
|
+
*/
|
|
35
|
+
class Collection {
|
|
36
|
+
constructor(name, store, dbName, headRegistry, options) {
|
|
37
|
+
this.name = name;
|
|
38
|
+
this.store = store;
|
|
39
|
+
this.dbName = dbName;
|
|
40
|
+
this.headRegistry = headRegistry;
|
|
41
|
+
/** In-memory document index: logical _id → block checksum */
|
|
42
|
+
this.docIndex = new Map();
|
|
43
|
+
/** In-memory document cache for fast reads */
|
|
44
|
+
this.docCache = new Map();
|
|
45
|
+
/** Index manager */
|
|
46
|
+
this.indexManager = new indexing_1.IndexManager();
|
|
47
|
+
/** Change listeners */
|
|
48
|
+
this.changeListeners = new Set();
|
|
49
|
+
/** Whether initial index has been loaded */
|
|
50
|
+
this.loaded = false;
|
|
51
|
+
/** Write concern for this collection */
|
|
52
|
+
this.writeConcern = { w: 1 };
|
|
53
|
+
/** Read preference for this collection */
|
|
54
|
+
this.readPreference = 'primary';
|
|
55
|
+
/** TTL index timers */
|
|
56
|
+
this.ttlTimers = new Map();
|
|
57
|
+
/** Text index config: field name → weight */
|
|
58
|
+
this.textIndexFields = {};
|
|
59
|
+
if (options?.writeConcern)
|
|
60
|
+
this.writeConcern = options.writeConcern;
|
|
61
|
+
if (options?.readPreference)
|
|
62
|
+
this.readPreference = options.readPreference;
|
|
63
|
+
}
|
|
64
|
+
/** Set the collection resolver for cross-collection operations */
|
|
65
|
+
setCollectionResolver(resolver) {
|
|
66
|
+
this.collectionResolver = resolver;
|
|
67
|
+
}
|
|
68
|
+
// ═══════════════════════════════════════════════════════
|
|
69
|
+
// Index loading / persistence
|
|
70
|
+
// ═══════════════════════════════════════════════════════
|
|
71
|
+
async ensureLoaded() {
|
|
72
|
+
if (this.loaded)
|
|
73
|
+
return;
|
|
74
|
+
if (!this.loading) {
|
|
75
|
+
this.loading = this.loadFromStore().finally(() => {
|
|
76
|
+
this.loading = undefined;
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
return this.loading;
|
|
80
|
+
}
|
|
81
|
+
async loadFromStore() {
|
|
82
|
+
const headBlockId = this.headRegistry.getHead(this.dbName, this.name);
|
|
83
|
+
if (headBlockId) {
|
|
84
|
+
try {
|
|
85
|
+
const handle = this.store.get(headBlockId);
|
|
86
|
+
const blockData = handle.fullData;
|
|
87
|
+
const meta = JSON.parse(Buffer.from(blockData).toString('utf8'));
|
|
88
|
+
// Restore document index
|
|
89
|
+
if (meta.mappings) {
|
|
90
|
+
for (const [logicalId, blockId] of Object.entries(meta.mappings)) {
|
|
91
|
+
this.docIndex.set(logicalId, blockId);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
// Restore index metadata
|
|
95
|
+
if (meta.indexes) {
|
|
96
|
+
// Load all docs first for index rebuild
|
|
97
|
+
const docs = await this.loadAllDocs();
|
|
98
|
+
this.indexManager.restoreFromJSON(meta.indexes, docs);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
catch {
|
|
102
|
+
// Head block not found – start fresh
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
this.loaded = true;
|
|
106
|
+
}
|
|
107
|
+
async loadAllDocs() {
|
|
108
|
+
const docs = [];
|
|
109
|
+
for (const [logicalId, blockId] of this.docIndex.entries()) {
|
|
110
|
+
const doc = await this.readDocFromStore(blockId);
|
|
111
|
+
if (doc) {
|
|
112
|
+
this.docCache.set(logicalId, doc);
|
|
113
|
+
docs.push(doc);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return docs;
|
|
117
|
+
}
|
|
118
|
+
async persistMeta() {
|
|
119
|
+
const meta = {
|
|
120
|
+
mappings: Object.fromEntries(this.docIndex.entries()),
|
|
121
|
+
indexes: this.indexManager.toJSON(),
|
|
122
|
+
};
|
|
123
|
+
const payload = Buffer.from(JSON.stringify(meta), 'utf8');
|
|
124
|
+
const blockId = calculateBlockId(payload);
|
|
125
|
+
const exists = await this.store.has(blockId);
|
|
126
|
+
if (!exists) {
|
|
127
|
+
await this.store.put(blockId, payload);
|
|
128
|
+
}
|
|
129
|
+
await this.headRegistry.setHead(this.dbName, this.name, blockId);
|
|
130
|
+
}
|
|
131
|
+
// ═══════════════════════════════════════════════════════
|
|
132
|
+
// Low-level document I/O
|
|
133
|
+
// ═══════════════════════════════════════════════════════
|
|
134
|
+
async readDocFromStore(blockId) {
|
|
135
|
+
try {
|
|
136
|
+
const handle = this.store.get(blockId);
|
|
137
|
+
const blockData = handle.fullData;
|
|
138
|
+
return JSON.parse(Buffer.from(blockData).toString('utf8'));
|
|
139
|
+
}
|
|
140
|
+
catch {
|
|
141
|
+
return null;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
async readDoc(logicalId) {
|
|
145
|
+
// Check cache first
|
|
146
|
+
if (this.docCache.has(logicalId)) {
|
|
147
|
+
return this.docCache.get(logicalId) ?? null;
|
|
148
|
+
}
|
|
149
|
+
const blockId = this.docIndex.get(logicalId);
|
|
150
|
+
if (!blockId)
|
|
151
|
+
return null;
|
|
152
|
+
const doc = await this.readDocFromStore(blockId);
|
|
153
|
+
if (doc) {
|
|
154
|
+
this.docCache.set(logicalId, doc);
|
|
155
|
+
}
|
|
156
|
+
return doc;
|
|
157
|
+
}
|
|
158
|
+
async writeDoc(doc, logicalId) {
|
|
159
|
+
const id = logicalId ?? doc._id ?? (0, crypto_1.randomUUID)().replace(/-/g, '');
|
|
160
|
+
const docWithId = { ...doc, _id: id };
|
|
161
|
+
const payload = Buffer.from(JSON.stringify(docWithId), 'utf8');
|
|
162
|
+
const blockId = calculateBlockId(payload);
|
|
163
|
+
const exists = await this.store.has(blockId);
|
|
164
|
+
if (!exists) {
|
|
165
|
+
await this.store.put(blockId, payload);
|
|
166
|
+
}
|
|
167
|
+
// Remove old index entries if updating
|
|
168
|
+
const oldDoc = this.docCache.get(id);
|
|
169
|
+
if (oldDoc) {
|
|
170
|
+
this.indexManager.removeDocument(oldDoc);
|
|
171
|
+
}
|
|
172
|
+
this.docIndex.set(id, blockId);
|
|
173
|
+
this.docCache.set(id, docWithId);
|
|
174
|
+
// Add to indexes (may throw DuplicateKeyError)
|
|
175
|
+
try {
|
|
176
|
+
this.indexManager.addDocument(docWithId);
|
|
177
|
+
}
|
|
178
|
+
catch (err) {
|
|
179
|
+
// Roll back the write
|
|
180
|
+
if (oldDoc) {
|
|
181
|
+
const oldPayload = Buffer.from(JSON.stringify(oldDoc), 'utf8');
|
|
182
|
+
const oldBlockId = calculateBlockId(oldPayload);
|
|
183
|
+
this.docIndex.set(id, oldBlockId);
|
|
184
|
+
this.docCache.set(id, oldDoc);
|
|
185
|
+
this.indexManager.addDocument(oldDoc);
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
this.docIndex.delete(id);
|
|
189
|
+
this.docCache.delete(id);
|
|
190
|
+
}
|
|
191
|
+
throw err;
|
|
192
|
+
}
|
|
193
|
+
await this.persistMeta();
|
|
194
|
+
return docWithId;
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Remove a document from the collection index.
|
|
198
|
+
*
|
|
199
|
+
* Copy-on-write: blocks in the store are never deleted. This method only
|
|
200
|
+
* removes the logical mapping so the document is no longer reachable.
|
|
201
|
+
* The underlying block remains in the store as an orphan and could be
|
|
202
|
+
* reclaimed by a separate garbage-collection process if desired.
|
|
203
|
+
*/
|
|
204
|
+
async removeDoc(logicalId) {
|
|
205
|
+
const doc = this.docCache.get(logicalId);
|
|
206
|
+
if (doc) {
|
|
207
|
+
this.indexManager.removeDocument(doc);
|
|
208
|
+
}
|
|
209
|
+
const blockId = this.docIndex.get(logicalId);
|
|
210
|
+
if (!blockId)
|
|
211
|
+
return false;
|
|
212
|
+
// Copy-on-write: do NOT call this.store.delete(blockId).
|
|
213
|
+
// Blocks are immutable; we only remove the mapping.
|
|
214
|
+
this.docIndex.delete(logicalId);
|
|
215
|
+
this.docCache.delete(logicalId);
|
|
216
|
+
await this.persistMeta();
|
|
217
|
+
return true;
|
|
218
|
+
}
|
|
219
|
+
// ═══════════════════════════════════════════════════════
|
|
220
|
+
// CRUD operations
|
|
221
|
+
// ═══════════════════════════════════════════════════════
|
|
222
|
+
/**
|
|
223
|
+
* Insert a single document.
|
|
224
|
+
*/
|
|
225
|
+
async insertOne(doc, options) {
|
|
226
|
+
await this.ensureLoaded();
|
|
227
|
+
if (options?.session && options.session.inTransaction) {
|
|
228
|
+
const session = options.session;
|
|
229
|
+
const id = doc._id ?? (0, crypto_1.randomUUID)().replace(/-/g, '');
|
|
230
|
+
const docWithId = { ...doc, _id: id };
|
|
231
|
+
const validated = this.validateBeforeWrite(docWithId);
|
|
232
|
+
session.addOp({ type: 'insert', collection: this.name, doc: validated });
|
|
233
|
+
return { acknowledged: true, insertedId: id };
|
|
234
|
+
}
|
|
235
|
+
const validated = this.validateBeforeWrite(doc);
|
|
236
|
+
const written = await this.writeDoc(validated);
|
|
237
|
+
this.emitChange('insert', written);
|
|
238
|
+
return { acknowledged: true, insertedId: written._id };
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Insert multiple documents.
|
|
242
|
+
*/
|
|
243
|
+
async insertMany(docs, options) {
|
|
244
|
+
await this.ensureLoaded();
|
|
245
|
+
const insertedIds = {};
|
|
246
|
+
for (let i = 0; i < docs.length; i++) {
|
|
247
|
+
const result = await this.insertOne(docs[i], options);
|
|
248
|
+
insertedIds[i] = result.insertedId;
|
|
249
|
+
}
|
|
250
|
+
return {
|
|
251
|
+
acknowledged: true,
|
|
252
|
+
insertedCount: docs.length,
|
|
253
|
+
insertedIds,
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
/**
|
|
257
|
+
* Find a single document matching the filter.
|
|
258
|
+
*/
|
|
259
|
+
async findOne(filter = {}, options) {
|
|
260
|
+
await this.ensureLoaded();
|
|
261
|
+
this.configureTextSearch();
|
|
262
|
+
// Try index lookup
|
|
263
|
+
const candidates = this.indexManager.findCandidates(filter);
|
|
264
|
+
if (candidates) {
|
|
265
|
+
for (const id of candidates) {
|
|
266
|
+
const doc = await this.readDoc(id);
|
|
267
|
+
if (doc && (0, queryEngine_1.matchesFilter)(doc, filter)) {
|
|
268
|
+
return options?.projection
|
|
269
|
+
? (0, queryEngine_1.applyProjection)(doc, options.projection)
|
|
270
|
+
: doc;
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
return null;
|
|
274
|
+
}
|
|
275
|
+
// Full scan
|
|
276
|
+
for (const id of this.docIndex.keys()) {
|
|
277
|
+
const doc = await this.readDoc(id);
|
|
278
|
+
if (doc && (0, queryEngine_1.matchesFilter)(doc, filter)) {
|
|
279
|
+
return options?.projection
|
|
280
|
+
? (0, queryEngine_1.applyProjection)(doc, options.projection)
|
|
281
|
+
: doc;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
return null;
|
|
285
|
+
}
|
|
286
|
+
/**
|
|
287
|
+
* Find documents matching the filter. Returns a cursor for chaining.
|
|
288
|
+
*/
|
|
289
|
+
find(filter = {}, options) {
|
|
290
|
+
const cursor = new cursor_1.Cursor(async () => {
|
|
291
|
+
await this.ensureLoaded();
|
|
292
|
+
this.configureTextSearch();
|
|
293
|
+
// Try index lookup
|
|
294
|
+
const candidates = this.indexManager.findCandidates(filter);
|
|
295
|
+
const idsToScan = candidates ?? new Set(this.docIndex.keys());
|
|
296
|
+
const results = [];
|
|
297
|
+
for (const id of idsToScan) {
|
|
298
|
+
const doc = await this.readDoc(id);
|
|
299
|
+
if (doc && (0, queryEngine_1.matchesFilter)(doc, filter)) {
|
|
300
|
+
results.push(options?.projection
|
|
301
|
+
? (0, queryEngine_1.applyProjection)(doc, options.projection)
|
|
302
|
+
: doc);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
return results;
|
|
306
|
+
});
|
|
307
|
+
if (options?.sort)
|
|
308
|
+
cursor.sort(options.sort);
|
|
309
|
+
if (options?.skip)
|
|
310
|
+
cursor.skip(options.skip);
|
|
311
|
+
if (options?.limit)
|
|
312
|
+
cursor.limit(options.limit);
|
|
313
|
+
return cursor;
|
|
314
|
+
}
|
|
315
|
+
/**
|
|
316
|
+
* Find a document by its _id.
|
|
317
|
+
*/
|
|
318
|
+
async findById(id) {
|
|
319
|
+
await this.ensureLoaded();
|
|
320
|
+
return this.readDoc(id);
|
|
321
|
+
}
|
|
322
|
+
/**
|
|
323
|
+
* Update a single document matching the filter.
|
|
324
|
+
*/
|
|
325
|
+
async updateOne(filter, update, options) {
|
|
326
|
+
await this.ensureLoaded();
|
|
327
|
+
const doc = await this.findOne(filter);
|
|
328
|
+
if (!doc && options?.upsert) {
|
|
329
|
+
const baseDoc = {};
|
|
330
|
+
// Apply filter as initial fields (for exact matches)
|
|
331
|
+
for (const [key, value] of Object.entries(filter)) {
|
|
332
|
+
if (!key.startsWith('$') && typeof value !== 'object') {
|
|
333
|
+
baseDoc[key] = value;
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
const updated = (0, updateEngine_1.applyUpdate)(baseDoc, update);
|
|
337
|
+
const result = await this.insertOne(updated, options);
|
|
338
|
+
return {
|
|
339
|
+
acknowledged: true,
|
|
340
|
+
matchedCount: 0,
|
|
341
|
+
modifiedCount: 0,
|
|
342
|
+
upsertedCount: 1,
|
|
343
|
+
upsertedId: result.insertedId,
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
if (!doc) {
|
|
347
|
+
return {
|
|
348
|
+
acknowledged: true,
|
|
349
|
+
matchedCount: 0,
|
|
350
|
+
modifiedCount: 0,
|
|
351
|
+
upsertedCount: 0,
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
if (options?.session && options.session.inTransaction) {
|
|
355
|
+
const session = options.session;
|
|
356
|
+
const updated = (0, updateEngine_1.applyUpdate)(doc, update);
|
|
357
|
+
const validated = this.validateBeforeWrite(updated, true);
|
|
358
|
+
session.addOp({
|
|
359
|
+
type: 'update',
|
|
360
|
+
collection: this.name,
|
|
361
|
+
docId: doc._id,
|
|
362
|
+
before: doc,
|
|
363
|
+
after: validated,
|
|
364
|
+
});
|
|
365
|
+
return {
|
|
366
|
+
acknowledged: true,
|
|
367
|
+
matchedCount: 1,
|
|
368
|
+
modifiedCount: 1,
|
|
369
|
+
upsertedCount: 0,
|
|
370
|
+
};
|
|
371
|
+
}
|
|
372
|
+
const updated = (0, updateEngine_1.applyUpdate)(doc, update);
|
|
373
|
+
const validated = this.validateBeforeWrite(updated, true);
|
|
374
|
+
await this.writeDoc(validated, doc._id);
|
|
375
|
+
this.emitChange('update', updated, {
|
|
376
|
+
updatedFields: (0, updateEngine_1.isOperatorUpdate)(update)
|
|
377
|
+
? update['$set']
|
|
378
|
+
: undefined,
|
|
379
|
+
});
|
|
380
|
+
return {
|
|
381
|
+
acknowledged: true,
|
|
382
|
+
matchedCount: 1,
|
|
383
|
+
modifiedCount: 1,
|
|
384
|
+
upsertedCount: 0,
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
/**
|
|
388
|
+
* Update all documents matching the filter.
|
|
389
|
+
*/
|
|
390
|
+
async updateMany(filter, update, options) {
|
|
391
|
+
await this.ensureLoaded();
|
|
392
|
+
const docs = await this.find(filter).toArray();
|
|
393
|
+
let modified = 0;
|
|
394
|
+
for (const doc of docs) {
|
|
395
|
+
const updated = (0, updateEngine_1.applyUpdate)(doc, update);
|
|
396
|
+
if (options?.session && options.session.inTransaction) {
|
|
397
|
+
options.session.addOp({
|
|
398
|
+
type: 'update',
|
|
399
|
+
collection: this.name,
|
|
400
|
+
docId: doc._id,
|
|
401
|
+
before: doc,
|
|
402
|
+
after: updated,
|
|
403
|
+
});
|
|
404
|
+
}
|
|
405
|
+
else {
|
|
406
|
+
await this.writeDoc(updated, doc._id);
|
|
407
|
+
this.emitChange('update', updated);
|
|
408
|
+
}
|
|
409
|
+
modified++;
|
|
410
|
+
}
|
|
411
|
+
return {
|
|
412
|
+
acknowledged: true,
|
|
413
|
+
matchedCount: docs.length,
|
|
414
|
+
modifiedCount: modified,
|
|
415
|
+
upsertedCount: 0,
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
/**
|
|
419
|
+
* Delete a single document matching the filter.
|
|
420
|
+
*/
|
|
421
|
+
async deleteOne(filter, options) {
|
|
422
|
+
await this.ensureLoaded();
|
|
423
|
+
const doc = await this.findOne(filter);
|
|
424
|
+
if (!doc)
|
|
425
|
+
return { acknowledged: true, deletedCount: 0 };
|
|
426
|
+
if (options?.session && options.session.inTransaction) {
|
|
427
|
+
options.session.addOp({
|
|
428
|
+
type: 'delete',
|
|
429
|
+
collection: this.name,
|
|
430
|
+
docId: doc._id,
|
|
431
|
+
doc,
|
|
432
|
+
});
|
|
433
|
+
return { acknowledged: true, deletedCount: 1 };
|
|
434
|
+
}
|
|
435
|
+
const removed = await this.removeDoc(doc._id);
|
|
436
|
+
if (removed)
|
|
437
|
+
this.emitChange('delete', doc);
|
|
438
|
+
return { acknowledged: true, deletedCount: removed ? 1 : 0 };
|
|
439
|
+
}
|
|
440
|
+
/**
|
|
441
|
+
* Delete all documents matching the filter.
|
|
442
|
+
*/
|
|
443
|
+
async deleteMany(filter, options) {
|
|
444
|
+
await this.ensureLoaded();
|
|
445
|
+
const docs = await this.find(filter).toArray();
|
|
446
|
+
let deleted = 0;
|
|
447
|
+
for (const doc of docs) {
|
|
448
|
+
if (options?.session && options.session.inTransaction) {
|
|
449
|
+
options.session.addOp({
|
|
450
|
+
type: 'delete',
|
|
451
|
+
collection: this.name,
|
|
452
|
+
docId: doc._id,
|
|
453
|
+
doc,
|
|
454
|
+
});
|
|
455
|
+
deleted++;
|
|
456
|
+
}
|
|
457
|
+
else {
|
|
458
|
+
const removed = await this.removeDoc(doc._id);
|
|
459
|
+
if (removed) {
|
|
460
|
+
this.emitChange('delete', doc);
|
|
461
|
+
deleted++;
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
return { acknowledged: true, deletedCount: deleted };
|
|
466
|
+
}
|
|
467
|
+
/**
|
|
468
|
+
* Replace a single document matching the filter.
|
|
469
|
+
*/
|
|
470
|
+
async replaceOne(filter, replacement, options) {
|
|
471
|
+
await this.ensureLoaded();
|
|
472
|
+
const doc = await this.findOne(filter);
|
|
473
|
+
if (!doc && options?.upsert) {
|
|
474
|
+
const result = await this.insertOne(replacement, options);
|
|
475
|
+
return {
|
|
476
|
+
acknowledged: true,
|
|
477
|
+
matchedCount: 0,
|
|
478
|
+
modifiedCount: 0,
|
|
479
|
+
upsertedCount: 1,
|
|
480
|
+
upsertedId: result.insertedId,
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
if (!doc) {
|
|
484
|
+
return {
|
|
485
|
+
acknowledged: true,
|
|
486
|
+
matchedCount: 0,
|
|
487
|
+
modifiedCount: 0,
|
|
488
|
+
upsertedCount: 0,
|
|
489
|
+
};
|
|
490
|
+
}
|
|
491
|
+
const replacementWithId = { ...replacement, _id: doc._id };
|
|
492
|
+
await this.writeDoc(replacementWithId, doc._id);
|
|
493
|
+
this.emitChange('replace', replacementWithId);
|
|
494
|
+
return {
|
|
495
|
+
acknowledged: true,
|
|
496
|
+
matchedCount: 1,
|
|
497
|
+
modifiedCount: 1,
|
|
498
|
+
upsertedCount: 0,
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
/**
|
|
502
|
+
* Count documents matching the filter.
|
|
503
|
+
*/
|
|
504
|
+
async countDocuments(filter = {}) {
|
|
505
|
+
const docs = await this.find(filter).toArray();
|
|
506
|
+
return docs.length;
|
|
507
|
+
}
|
|
508
|
+
/**
|
|
509
|
+
* Estimated document count (fast, from index size).
|
|
510
|
+
*/
|
|
511
|
+
async estimatedDocumentCount() {
|
|
512
|
+
await this.ensureLoaded();
|
|
513
|
+
return this.docIndex.size;
|
|
514
|
+
}
|
|
515
|
+
/**
|
|
516
|
+
* Get distinct values for a field.
|
|
517
|
+
*/
|
|
518
|
+
async distinct(field, filter = {}) {
|
|
519
|
+
const docs = await this.find(filter).toArray();
|
|
520
|
+
const values = new Set();
|
|
521
|
+
const result = [];
|
|
522
|
+
for (const doc of docs) {
|
|
523
|
+
const val = doc[field];
|
|
524
|
+
const key = JSON.stringify(val);
|
|
525
|
+
if (!values.has(key)) {
|
|
526
|
+
values.add(key);
|
|
527
|
+
result.push(val);
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
return result;
|
|
531
|
+
}
|
|
532
|
+
/**
|
|
533
|
+
* Run an aggregation pipeline.
|
|
534
|
+
*/
|
|
535
|
+
async aggregate(pipeline) {
|
|
536
|
+
await this.ensureLoaded();
|
|
537
|
+
const allDocs = await this.find().toArray();
|
|
538
|
+
const lookupResolver = this.collectionResolver
|
|
539
|
+
? async (collName) => {
|
|
540
|
+
const coll = this.collectionResolver(collName);
|
|
541
|
+
return coll.find().toArray();
|
|
542
|
+
}
|
|
543
|
+
: undefined;
|
|
544
|
+
return (0, aggregation_1.runAggregation)(allDocs, pipeline, lookupResolver);
|
|
545
|
+
}
|
|
546
|
+
// ═══════════════════════════════════════════════════════
|
|
547
|
+
// Schema validation
|
|
548
|
+
// ═══════════════════════════════════════════════════════
|
|
549
|
+
/**
|
|
550
|
+
* Set a schema for this collection. Documents will be validated on
|
|
551
|
+
* insert and (if level is 'strict') on update.
|
|
552
|
+
*/
|
|
553
|
+
setSchema(schema) {
|
|
554
|
+
this.schema = schema;
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Get the current schema (if any).
|
|
558
|
+
*/
|
|
559
|
+
getSchema() {
|
|
560
|
+
return this.schema;
|
|
561
|
+
}
|
|
562
|
+
/**
|
|
563
|
+
* Remove validation schema.
|
|
564
|
+
*/
|
|
565
|
+
removeSchema() {
|
|
566
|
+
this.schema = undefined;
|
|
567
|
+
}
|
|
568
|
+
/**
|
|
569
|
+
* Validate a document without inserting it.
|
|
570
|
+
* @returns array of validation errors (empty if valid)
|
|
571
|
+
*/
|
|
572
|
+
validateDoc(doc) {
|
|
573
|
+
if (!this.schema)
|
|
574
|
+
return [];
|
|
575
|
+
try {
|
|
576
|
+
(0, schemaValidation_1.validateDocument)(doc, this.schema, this.name);
|
|
577
|
+
return [];
|
|
578
|
+
}
|
|
579
|
+
catch (err) {
|
|
580
|
+
if (err instanceof errors_1.ValidationError)
|
|
581
|
+
return err.validationErrors;
|
|
582
|
+
throw err;
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
/**
|
|
586
|
+
* Internal: validate and apply defaults before write.
|
|
587
|
+
*/
|
|
588
|
+
validateBeforeWrite(doc, isUpdate = false) {
|
|
589
|
+
if (!this.schema)
|
|
590
|
+
return doc;
|
|
591
|
+
if (isUpdate && this.schema.validationLevel === 'moderate')
|
|
592
|
+
return doc;
|
|
593
|
+
const withDefaults = (0, schemaValidation_1.applyDefaults)(doc, this.schema);
|
|
594
|
+
(0, schemaValidation_1.validateDocument)(withDefaults, this.schema, this.name);
|
|
595
|
+
return withDefaults;
|
|
596
|
+
}
|
|
597
|
+
// ═══════════════════════════════════════════════════════
|
|
598
|
+
// Text search index
|
|
599
|
+
// ═══════════════════════════════════════════════════════
|
|
600
|
+
/**
|
|
601
|
+
* Create a text index for full-text search.
|
|
602
|
+
*
|
|
603
|
+
* @param options - The fields to index and their weights
|
|
604
|
+
* @returns The index name
|
|
605
|
+
*
|
|
606
|
+
* @example
|
|
607
|
+
* ```typescript
|
|
608
|
+
* await coll.createTextIndex({ fields: { title: 10, body: 1 } });
|
|
609
|
+
* const results = await coll.find({ $text: { $search: 'hello world' } }).toArray();
|
|
610
|
+
* ```
|
|
611
|
+
*/
|
|
612
|
+
createTextIndex(options) {
|
|
613
|
+
this.textIndexFields = { ...options.fields };
|
|
614
|
+
return options.name ?? 'text_index';
|
|
615
|
+
}
|
|
616
|
+
/**
|
|
617
|
+
* Drop the text index.
|
|
618
|
+
*/
|
|
619
|
+
dropTextIndex() {
|
|
620
|
+
this.textIndexFields = {};
|
|
621
|
+
}
|
|
622
|
+
/**
|
|
623
|
+
* Check if a text index exists.
|
|
624
|
+
*/
|
|
625
|
+
hasTextIndex() {
|
|
626
|
+
return Object.keys(this.textIndexFields).length > 0;
|
|
627
|
+
}
|
|
628
|
+
// ═══════════════════════════════════════════════════════
|
|
629
|
+
// TTL index
|
|
630
|
+
// ═══════════════════════════════════════════════════════
|
|
631
|
+
/**
|
|
632
|
+
* Create a TTL index that automatically removes documents whose
|
|
633
|
+
* date field is older than `expireAfterSeconds`.
|
|
634
|
+
*
|
|
635
|
+
* TTL expiry is checked periodically (every 60 seconds by default).
|
|
636
|
+
*/
|
|
637
|
+
async createTTLIndex(field, expireAfterSeconds, intervalMs = 60000) {
|
|
638
|
+
const name = await this.createIndex({ [field]: 1 }, { expireAfterSeconds, name: `ttl_${field}` });
|
|
639
|
+
// Set up periodic sweep
|
|
640
|
+
const timer = setInterval(async () => {
|
|
641
|
+
await this.sweepTTL(field, expireAfterSeconds);
|
|
642
|
+
}, intervalMs);
|
|
643
|
+
// Allow timer to not prevent process exit
|
|
644
|
+
if (timer.unref)
|
|
645
|
+
timer.unref();
|
|
646
|
+
this.ttlTimers.set(name, timer);
|
|
647
|
+
return name;
|
|
648
|
+
}
|
|
649
|
+
/**
|
|
650
|
+
* Stop a TTL index timer (does not drop the index itself).
|
|
651
|
+
*/
|
|
652
|
+
stopTTL(indexName) {
|
|
653
|
+
const timer = this.ttlTimers.get(indexName);
|
|
654
|
+
if (timer) {
|
|
655
|
+
clearInterval(timer);
|
|
656
|
+
this.ttlTimers.delete(indexName);
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
/**
|
|
660
|
+
* Run a single TTL sweep: delete documents where `field` is older
|
|
661
|
+
* than `expireAfterSeconds` from now.
|
|
662
|
+
*/
|
|
663
|
+
async sweepTTL(field, expireAfterSeconds) {
|
|
664
|
+
await this.ensureLoaded();
|
|
665
|
+
const cutoff = new Date(Date.now() - expireAfterSeconds * 1000);
|
|
666
|
+
const expired = [];
|
|
667
|
+
for (const id of this.docIndex.keys()) {
|
|
668
|
+
const doc = await this.readDoc(id);
|
|
669
|
+
if (!doc)
|
|
670
|
+
continue;
|
|
671
|
+
const value = doc[field];
|
|
672
|
+
let dateValue = null;
|
|
673
|
+
if (value instanceof Date) {
|
|
674
|
+
dateValue = value;
|
|
675
|
+
}
|
|
676
|
+
else if (typeof value === 'string' || typeof value === 'number') {
|
|
677
|
+
const parsed = new Date(value);
|
|
678
|
+
if (!isNaN(parsed.getTime()))
|
|
679
|
+
dateValue = parsed;
|
|
680
|
+
}
|
|
681
|
+
if (dateValue && dateValue <= cutoff) {
|
|
682
|
+
expired.push(id);
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
for (const id of expired) {
|
|
686
|
+
const doc = await this.readDoc(id);
|
|
687
|
+
if (doc) {
|
|
688
|
+
await this.removeDoc(id);
|
|
689
|
+
this.emitChange('delete', doc);
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
return expired.length;
|
|
693
|
+
}
|
|
694
|
+
// ═══════════════════════════════════════════════════════
|
|
695
|
+
// Bulk write
|
|
696
|
+
// ═══════════════════════════════════════════════════════
|
|
697
|
+
/**
|
|
698
|
+
* Execute multiple write operations in a single call.
|
|
699
|
+
*
|
|
700
|
+
* @param operations - Array of write operations
|
|
701
|
+
* @param options - Bulk write options (ordered = stop on first error)
|
|
702
|
+
* @returns Aggregated result of all operations
|
|
703
|
+
* @throws BulkWriteError if any operations fail and ordered is true
|
|
704
|
+
*
|
|
705
|
+
* @example
|
|
706
|
+
* ```typescript
|
|
707
|
+
* await coll.bulkWrite([
|
|
708
|
+
* { insertOne: { document: { name: 'Alice' } } },
|
|
709
|
+
* { updateOne: { filter: { name: 'Bob' }, update: { $set: { age: 30 } } } },
|
|
710
|
+
* { deleteOne: { filter: { name: 'Charlie' } } },
|
|
711
|
+
* ]);
|
|
712
|
+
* ```
|
|
713
|
+
*/
|
|
714
|
+
async bulkWrite(operations, options) {
|
|
715
|
+
await this.ensureLoaded();
|
|
716
|
+
const ordered = options?.ordered ?? true;
|
|
717
|
+
const result = {
|
|
718
|
+
acknowledged: true,
|
|
719
|
+
insertedCount: 0,
|
|
720
|
+
matchedCount: 0,
|
|
721
|
+
modifiedCount: 0,
|
|
722
|
+
deletedCount: 0,
|
|
723
|
+
upsertedCount: 0,
|
|
724
|
+
insertedIds: {},
|
|
725
|
+
upsertedIds: {},
|
|
726
|
+
};
|
|
727
|
+
const errors = [];
|
|
728
|
+
for (let i = 0; i < operations.length; i++) {
|
|
729
|
+
try {
|
|
730
|
+
const op = operations[i];
|
|
731
|
+
if ('insertOne' in op) {
|
|
732
|
+
const r = await this.insertOne(op.insertOne.document, options);
|
|
733
|
+
result.insertedCount++;
|
|
734
|
+
result.insertedIds[i] = r.insertedId;
|
|
735
|
+
}
|
|
736
|
+
else if ('updateOne' in op) {
|
|
737
|
+
const r = await this.updateOne(op.updateOne.filter, op.updateOne.update, { ...options, upsert: op.updateOne.upsert });
|
|
738
|
+
result.matchedCount += r.matchedCount;
|
|
739
|
+
result.modifiedCount += r.modifiedCount;
|
|
740
|
+
result.upsertedCount += r.upsertedCount;
|
|
741
|
+
if (r.upsertedId)
|
|
742
|
+
result.upsertedIds[i] = r.upsertedId;
|
|
743
|
+
}
|
|
744
|
+
else if ('updateMany' in op) {
|
|
745
|
+
const r = await this.updateMany(op.updateMany.filter, op.updateMany.update, options);
|
|
746
|
+
result.matchedCount += r.matchedCount;
|
|
747
|
+
result.modifiedCount += r.modifiedCount;
|
|
748
|
+
}
|
|
749
|
+
else if ('deleteOne' in op) {
|
|
750
|
+
const r = await this.deleteOne(op.deleteOne.filter, options);
|
|
751
|
+
result.deletedCount += r.deletedCount;
|
|
752
|
+
}
|
|
753
|
+
else if ('deleteMany' in op) {
|
|
754
|
+
const r = await this.deleteMany(op.deleteMany.filter, options);
|
|
755
|
+
result.deletedCount += r.deletedCount;
|
|
756
|
+
}
|
|
757
|
+
else if ('replaceOne' in op) {
|
|
758
|
+
const r = await this.replaceOne(op.replaceOne.filter, op.replaceOne.replacement, { ...options, upsert: op.replaceOne.upsert });
|
|
759
|
+
result.matchedCount += r.matchedCount;
|
|
760
|
+
result.modifiedCount += r.modifiedCount;
|
|
761
|
+
result.upsertedCount += r.upsertedCount;
|
|
762
|
+
if (r.upsertedId)
|
|
763
|
+
result.upsertedIds[i] = r.upsertedId;
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
catch (err) {
|
|
767
|
+
const error = {
|
|
768
|
+
index: i,
|
|
769
|
+
code: err instanceof Error && 'code' in err
|
|
770
|
+
? err.code
|
|
771
|
+
: 500,
|
|
772
|
+
message: String(err),
|
|
773
|
+
};
|
|
774
|
+
errors.push(error);
|
|
775
|
+
if (ordered) {
|
|
776
|
+
throw new errors_1.BulkWriteError(errors, i);
|
|
777
|
+
}
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
if (errors.length > 0) {
|
|
781
|
+
throw new errors_1.BulkWriteError(errors, operations.length - errors.length);
|
|
782
|
+
}
|
|
783
|
+
return result;
|
|
784
|
+
}
|
|
785
|
+
// ═══════════════════════════════════════════════════════
|
|
786
|
+
// Write concern / Read preference
|
|
787
|
+
// ═══════════════════════════════════════════════════════
|
|
788
|
+
/**
|
|
789
|
+
* Get the current write concern for this collection.
|
|
790
|
+
*/
|
|
791
|
+
getWriteConcern() {
|
|
792
|
+
return { ...this.writeConcern };
|
|
793
|
+
}
|
|
794
|
+
/**
|
|
795
|
+
* Set the write concern for this collection.
|
|
796
|
+
*/
|
|
797
|
+
setWriteConcern(wc) {
|
|
798
|
+
this.writeConcern = { ...wc };
|
|
799
|
+
}
|
|
800
|
+
/**
|
|
801
|
+
* Get the current read preference for this collection.
|
|
802
|
+
*/
|
|
803
|
+
getReadPreference() {
|
|
804
|
+
return this.readPreference;
|
|
805
|
+
}
|
|
806
|
+
/**
|
|
807
|
+
* Set the read preference for this collection.
|
|
808
|
+
*/
|
|
809
|
+
setReadPreference(rp) {
|
|
810
|
+
this.readPreference = rp;
|
|
811
|
+
}
|
|
812
|
+
// ═══════════════════════════════════════════════════════
|
|
813
|
+
// Index operations
|
|
814
|
+
// ═══════════════════════════════════════════════════════
|
|
815
|
+
/**
|
|
816
|
+
* Create an index on this collection.
|
|
817
|
+
*/
|
|
818
|
+
async createIndex(spec, options) {
|
|
819
|
+
await this.ensureLoaded();
|
|
820
|
+
const name = this.indexManager.createIndex(spec, options);
|
|
821
|
+
// Build index from existing docs
|
|
822
|
+
for (const id of this.docIndex.keys()) {
|
|
823
|
+
const doc = await this.readDoc(id);
|
|
824
|
+
if (doc) {
|
|
825
|
+
this.indexManager.getIndex(name)?.addDocument(doc);
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
await this.persistMeta();
|
|
829
|
+
return name;
|
|
830
|
+
}
|
|
831
|
+
/**
|
|
832
|
+
* Drop an index by name.
|
|
833
|
+
*/
|
|
834
|
+
async dropIndex(name) {
|
|
835
|
+
await this.ensureLoaded();
|
|
836
|
+
this.indexManager.dropIndex(name);
|
|
837
|
+
await this.persistMeta();
|
|
838
|
+
}
|
|
839
|
+
/**
|
|
840
|
+
* List all indexes on this collection.
|
|
841
|
+
*/
|
|
842
|
+
listIndexes() {
|
|
843
|
+
return this.indexManager.listIndexes();
|
|
844
|
+
}
|
|
845
|
+
// ═══════════════════════════════════════════════════════
|
|
846
|
+
// Change stream
|
|
847
|
+
// ═══════════════════════════════════════════════════════
|
|
848
|
+
/**
|
|
849
|
+
* Watch for changes on this collection.
|
|
850
|
+
*/
|
|
851
|
+
watch(listener) {
|
|
852
|
+
this.changeListeners.add(listener);
|
|
853
|
+
return () => {
|
|
854
|
+
this.changeListeners.delete(listener);
|
|
855
|
+
};
|
|
856
|
+
}
|
|
857
|
+
emitChange(operationType, doc, updateDescription) {
|
|
858
|
+
if (this.changeListeners.size === 0)
|
|
859
|
+
return;
|
|
860
|
+
const event = {
|
|
861
|
+
operationType,
|
|
862
|
+
documentKey: { _id: doc._id },
|
|
863
|
+
fullDocument: operationType !== 'delete' ? doc : undefined,
|
|
864
|
+
updateDescription: operationType === 'update' ? updateDescription : undefined,
|
|
865
|
+
ns: { db: this.dbName, coll: this.name },
|
|
866
|
+
timestamp: new Date(),
|
|
867
|
+
};
|
|
868
|
+
for (const listener of this.changeListeners) {
|
|
869
|
+
try {
|
|
870
|
+
listener(event);
|
|
871
|
+
}
|
|
872
|
+
catch {
|
|
873
|
+
// Don't let listener errors break the operation
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
// ═══════════════════════════════════════════════════════
|
|
878
|
+
// Transaction helpers (called by the session on commit)
|
|
879
|
+
// ═══════════════════════════════════════════════════════
|
|
880
|
+
/** Apply a transaction insert */
|
|
881
|
+
async _txInsert(doc) {
|
|
882
|
+
await this.writeDoc(doc, doc._id);
|
|
883
|
+
this.emitChange('insert', doc);
|
|
884
|
+
}
|
|
885
|
+
/** Apply a transaction update – writes a new block (copy-on-write). */
|
|
886
|
+
async _txUpdate(docId, after) {
|
|
887
|
+
await this.writeDoc(after, docId);
|
|
888
|
+
this.emitChange('update', after);
|
|
889
|
+
}
|
|
890
|
+
/** Apply a transaction delete – removes mapping only (copy-on-write). */
|
|
891
|
+
async _txDelete(docId, doc) {
|
|
892
|
+
await this.removeDoc(docId);
|
|
893
|
+
this.emitChange('delete', doc);
|
|
894
|
+
}
|
|
895
|
+
/**
|
|
896
|
+
* Rollback a transaction insert.
|
|
897
|
+
* Copy-on-write: just remove the mapping; the block remains in the store.
|
|
898
|
+
*/
|
|
899
|
+
async _txRollbackInsert(docId) {
|
|
900
|
+
await this.removeDoc(docId);
|
|
901
|
+
}
|
|
902
|
+
/**
|
|
903
|
+
* Rollback a transaction update.
|
|
904
|
+
* Copy-on-write: restore the old mapping. The old block still exists in
|
|
905
|
+
* the store (blocks are never deleted), so writeDoc will detect it via
|
|
906
|
+
* store.has() and simply re-point the index.
|
|
907
|
+
*/
|
|
908
|
+
async _txRollbackUpdate(docId, before) {
|
|
909
|
+
await this.writeDoc(before, docId);
|
|
910
|
+
}
|
|
911
|
+
/**
|
|
912
|
+
* Rollback a transaction delete.
|
|
913
|
+
* Copy-on-write: re-insert the mapping. The block still exists in the
|
|
914
|
+
* store since blocks are never deleted.
|
|
915
|
+
*/
|
|
916
|
+
async _txRollbackDelete(doc) {
|
|
917
|
+
await this.writeDoc(doc, doc._id);
|
|
918
|
+
}
|
|
919
|
+
/**
|
|
920
|
+
* Drop the entire collection – remove all document mappings and indexes.
|
|
921
|
+
*
|
|
922
|
+
* Copy-on-write: blocks in the store are never deleted. Only the
|
|
923
|
+
* collection's internal index and cache are cleared.
|
|
924
|
+
*/
|
|
925
|
+
async drop() {
|
|
926
|
+
await this.ensureLoaded();
|
|
927
|
+
// Stop all TTL timers
|
|
928
|
+
for (const timer of this.ttlTimers.values()) {
|
|
929
|
+
clearInterval(timer);
|
|
930
|
+
}
|
|
931
|
+
this.ttlTimers.clear();
|
|
932
|
+
// Remove all index entries
|
|
933
|
+
for (const id of this.docIndex.keys()) {
|
|
934
|
+
const doc = this.docCache.get(id);
|
|
935
|
+
if (doc)
|
|
936
|
+
this.indexManager.removeDocument(doc);
|
|
937
|
+
}
|
|
938
|
+
this.docIndex.clear();
|
|
939
|
+
this.docCache.clear();
|
|
940
|
+
await this.headRegistry.removeHead(this.dbName, this.name);
|
|
941
|
+
}
|
|
942
|
+
/** Configure the query engine's text search fields from our text index */
|
|
943
|
+
configureTextSearch() {
|
|
944
|
+
const fields = Object.keys(this.textIndexFields);
|
|
945
|
+
(0, queryEngine_1.setTextSearchFields)(fields);
|
|
946
|
+
}
|
|
947
|
+
}
|
|
948
|
+
exports.Collection = Collection;
|
|
949
|
+
// ═══════════════════════════════════════════════════════
|
|
950
|
+
// Head registry – tracks latest metadata block per collection
|
|
951
|
+
// ═══════════════════════════════════════════════════════
|
|
952
|
+
class HeadRegistry {
|
|
953
|
+
constructor() {
|
|
954
|
+
this.heads = new Map();
|
|
955
|
+
}
|
|
956
|
+
static getInstance() {
|
|
957
|
+
if (!HeadRegistry.instance) {
|
|
958
|
+
HeadRegistry.instance = new HeadRegistry();
|
|
959
|
+
}
|
|
960
|
+
return HeadRegistry.instance;
|
|
961
|
+
}
|
|
962
|
+
/** Create a new independent registry (for testing) */
|
|
963
|
+
static createIsolated() {
|
|
964
|
+
return new HeadRegistry();
|
|
965
|
+
}
|
|
966
|
+
makeKey(dbName, collectionName) {
|
|
967
|
+
return `${dbName}:${collectionName}`;
|
|
968
|
+
}
|
|
969
|
+
getHead(dbName, collectionName) {
|
|
970
|
+
return this.heads.get(this.makeKey(dbName, collectionName));
|
|
971
|
+
}
|
|
972
|
+
setHead(dbName, collectionName, blockId) {
|
|
973
|
+
this.heads.set(this.makeKey(dbName, collectionName), blockId);
|
|
974
|
+
}
|
|
975
|
+
removeHead(dbName, collectionName) {
|
|
976
|
+
this.heads.delete(this.makeKey(dbName, collectionName));
|
|
977
|
+
}
|
|
978
|
+
clear() {
|
|
979
|
+
this.heads.clear();
|
|
980
|
+
}
|
|
981
|
+
/**
|
|
982
|
+
* Load head pointers from the persistence layer.
|
|
983
|
+
* For the in-memory implementation this is a no-op.
|
|
984
|
+
* Exists for API compatibility with IHeadRegistry / persistent registries.
|
|
985
|
+
*/
|
|
986
|
+
async load() {
|
|
987
|
+
// No-op — in-memory heads are always current.
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
exports.HeadRegistry = HeadRegistry;
|
|
991
|
+
//# sourceMappingURL=collection.js.map
|