@push.rocks/smartmongo 2.0.12 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/dist_ts/00_commitinfo_data.js +2 -2
  2. package/dist_ts/congodb/congodb.plugins.d.ts +10 -0
  3. package/dist_ts/congodb/congodb.plugins.js +14 -0
  4. package/dist_ts/congodb/engine/AggregationEngine.d.ts +66 -0
  5. package/dist_ts/congodb/engine/AggregationEngine.js +189 -0
  6. package/dist_ts/congodb/engine/IndexEngine.d.ts +77 -0
  7. package/dist_ts/congodb/engine/IndexEngine.js +376 -0
  8. package/dist_ts/congodb/engine/QueryEngine.d.ts +54 -0
  9. package/dist_ts/congodb/engine/QueryEngine.js +271 -0
  10. package/dist_ts/congodb/engine/TransactionEngine.d.ts +85 -0
  11. package/dist_ts/congodb/engine/TransactionEngine.js +287 -0
  12. package/dist_ts/congodb/engine/UpdateEngine.d.ts +47 -0
  13. package/dist_ts/congodb/engine/UpdateEngine.js +461 -0
  14. package/dist_ts/congodb/errors/CongoErrors.d.ts +100 -0
  15. package/dist_ts/congodb/errors/CongoErrors.js +155 -0
  16. package/dist_ts/congodb/index.d.ts +19 -0
  17. package/dist_ts/congodb/index.js +26 -0
  18. package/dist_ts/congodb/server/CommandRouter.d.ts +51 -0
  19. package/dist_ts/congodb/server/CommandRouter.js +132 -0
  20. package/dist_ts/congodb/server/CongoServer.d.ts +95 -0
  21. package/dist_ts/congodb/server/CongoServer.js +227 -0
  22. package/dist_ts/congodb/server/WireProtocol.d.ts +117 -0
  23. package/dist_ts/congodb/server/WireProtocol.js +298 -0
  24. package/dist_ts/congodb/server/handlers/AdminHandler.d.ts +100 -0
  25. package/dist_ts/congodb/server/handlers/AdminHandler.js +568 -0
  26. package/dist_ts/congodb/server/handlers/AggregateHandler.d.ts +31 -0
  27. package/dist_ts/congodb/server/handlers/AggregateHandler.js +277 -0
  28. package/dist_ts/congodb/server/handlers/DeleteHandler.d.ts +8 -0
  29. package/dist_ts/congodb/server/handlers/DeleteHandler.js +83 -0
  30. package/dist_ts/congodb/server/handlers/FindHandler.d.ts +31 -0
  31. package/dist_ts/congodb/server/handlers/FindHandler.js +261 -0
  32. package/dist_ts/congodb/server/handlers/HelloHandler.d.ts +11 -0
  33. package/dist_ts/congodb/server/handlers/HelloHandler.js +62 -0
  34. package/dist_ts/congodb/server/handlers/IndexHandler.d.ts +20 -0
  35. package/dist_ts/congodb/server/handlers/IndexHandler.js +183 -0
  36. package/dist_ts/congodb/server/handlers/InsertHandler.d.ts +8 -0
  37. package/dist_ts/congodb/server/handlers/InsertHandler.js +76 -0
  38. package/dist_ts/congodb/server/handlers/UpdateHandler.d.ts +24 -0
  39. package/dist_ts/congodb/server/handlers/UpdateHandler.js +270 -0
  40. package/dist_ts/congodb/server/handlers/index.d.ts +8 -0
  41. package/dist_ts/congodb/server/handlers/index.js +10 -0
  42. package/dist_ts/congodb/server/index.d.ts +6 -0
  43. package/dist_ts/congodb/server/index.js +7 -0
  44. package/dist_ts/congodb/storage/FileStorageAdapter.d.ts +61 -0
  45. package/dist_ts/congodb/storage/FileStorageAdapter.js +396 -0
  46. package/dist_ts/congodb/storage/IStorageAdapter.d.ts +140 -0
  47. package/dist_ts/congodb/storage/IStorageAdapter.js +2 -0
  48. package/dist_ts/congodb/storage/MemoryStorageAdapter.d.ts +66 -0
  49. package/dist_ts/congodb/storage/MemoryStorageAdapter.js +367 -0
  50. package/dist_ts/congodb/storage/OpLog.d.ts +93 -0
  51. package/dist_ts/congodb/storage/OpLog.js +221 -0
  52. package/dist_ts/congodb/types/interfaces.d.ts +363 -0
  53. package/dist_ts/congodb/types/interfaces.js +2 -0
  54. package/dist_ts/index.d.ts +1 -0
  55. package/dist_ts/index.js +8 -6
  56. package/dist_ts/smartmongo.plugins.d.ts +1 -1
  57. package/dist_ts/smartmongo.plugins.js +2 -2
  58. package/npmextra.json +17 -7
  59. package/package.json +20 -12
  60. package/readme.hints.md +89 -1
  61. package/ts/00_commitinfo_data.ts +1 -1
  62. package/ts/congodb/congodb.plugins.ts +17 -0
  63. package/ts/congodb/engine/AggregationEngine.ts +283 -0
  64. package/ts/congodb/engine/IndexEngine.ts +479 -0
  65. package/ts/congodb/engine/QueryEngine.ts +301 -0
  66. package/ts/congodb/engine/TransactionEngine.ts +351 -0
  67. package/ts/congodb/engine/UpdateEngine.ts +506 -0
  68. package/ts/congodb/errors/CongoErrors.ts +181 -0
  69. package/ts/congodb/index.ts +37 -0
  70. package/ts/congodb/server/CommandRouter.ts +180 -0
  71. package/ts/congodb/server/CongoServer.ts +298 -0
  72. package/ts/congodb/server/WireProtocol.ts +416 -0
  73. package/ts/congodb/server/handlers/AdminHandler.ts +614 -0
  74. package/ts/congodb/server/handlers/AggregateHandler.ts +342 -0
  75. package/ts/congodb/server/handlers/DeleteHandler.ts +100 -0
  76. package/ts/congodb/server/handlers/FindHandler.ts +301 -0
  77. package/ts/congodb/server/handlers/HelloHandler.ts +78 -0
  78. package/ts/congodb/server/handlers/IndexHandler.ts +207 -0
  79. package/ts/congodb/server/handlers/InsertHandler.ts +91 -0
  80. package/ts/congodb/server/handlers/UpdateHandler.ts +315 -0
  81. package/ts/congodb/server/handlers/index.ts +10 -0
  82. package/ts/congodb/server/index.ts +10 -0
  83. package/ts/congodb/storage/FileStorageAdapter.ts +479 -0
  84. package/ts/congodb/storage/IStorageAdapter.ts +202 -0
  85. package/ts/congodb/storage/MemoryStorageAdapter.ts +443 -0
  86. package/ts/congodb/storage/OpLog.ts +282 -0
  87. package/ts/congodb/types/interfaces.ts +433 -0
  88. package/ts/index.ts +3 -0
  89. package/ts/smartmongo.plugins.ts +1 -1
@@ -0,0 +1,342 @@
1
+ import * as plugins from '../../congodb.plugins.js';
2
+ import type { ICommandHandler, IHandlerContext, ICursorState } from '../CommandRouter.js';
3
+ import { AggregationEngine } from '../../engine/AggregationEngine.js';
4
+
5
+ /**
6
+ * AggregateHandler - Handles aggregate command
7
+ */
8
+ export class AggregateHandler implements ICommandHandler {
9
+ private cursors: Map<bigint, ICursorState>;
10
+ private nextCursorId: () => bigint;
11
+
12
+ constructor(
13
+ cursors: Map<bigint, ICursorState>,
14
+ nextCursorId: () => bigint
15
+ ) {
16
+ this.cursors = cursors;
17
+ this.nextCursorId = nextCursorId;
18
+ }
19
+
20
+ async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
21
+ const { storage, database, command } = context;
22
+
23
+ const collection = command.aggregate;
24
+ const pipeline = command.pipeline || [];
25
+ const cursor = command.cursor || {};
26
+ const batchSize = cursor.batchSize || 101;
27
+
28
+ // Validate
29
+ if (typeof collection !== 'string' && collection !== 1) {
30
+ return {
31
+ ok: 0,
32
+ errmsg: 'aggregate command requires a collection name or 1',
33
+ code: 2,
34
+ codeName: 'BadValue',
35
+ };
36
+ }
37
+
38
+ if (!Array.isArray(pipeline)) {
39
+ return {
40
+ ok: 0,
41
+ errmsg: 'pipeline must be an array',
42
+ code: 2,
43
+ codeName: 'BadValue',
44
+ };
45
+ }
46
+
47
+ try {
48
+ // Get source documents
49
+ let documents: plugins.bson.Document[] = [];
50
+
51
+ if (collection === 1 || collection === '1') {
52
+ // Database-level aggregation (e.g., $listLocalSessions)
53
+ documents = [];
54
+ } else {
55
+ // Collection-level aggregation
56
+ const exists = await storage.collectionExists(database, collection);
57
+ if (exists) {
58
+ documents = await storage.findAll(database, collection);
59
+ }
60
+ }
61
+
62
+ // Handle $lookup and $graphLookup stages that reference other collections
63
+ const processedPipeline = await this.preprocessPipeline(
64
+ storage,
65
+ database,
66
+ pipeline,
67
+ documents
68
+ );
69
+
70
+ // Run aggregation
71
+ let results: plugins.bson.Document[];
72
+
73
+ // Check for special stages that we handle manually
74
+ if (this.hasSpecialStages(pipeline)) {
75
+ results = await this.executeWithSpecialStages(
76
+ storage,
77
+ database,
78
+ documents,
79
+ pipeline
80
+ );
81
+ } else {
82
+ results = AggregationEngine.aggregate(documents as any, processedPipeline);
83
+ }
84
+
85
+ // Handle $out and $merge stages
86
+ const lastStage = pipeline[pipeline.length - 1];
87
+ if (lastStage && lastStage.$out) {
88
+ await this.handleOut(storage, database, results, lastStage.$out);
89
+ return { ok: 1, cursor: { id: plugins.bson.Long.fromNumber(0), ns: `${database}.${collection}`, firstBatch: [] } };
90
+ }
91
+
92
+ if (lastStage && lastStage.$merge) {
93
+ await this.handleMerge(storage, database, results, lastStage.$merge);
94
+ return { ok: 1, cursor: { id: plugins.bson.Long.fromNumber(0), ns: `${database}.${collection}`, firstBatch: [] } };
95
+ }
96
+
97
+ // Build cursor response
98
+ const effectiveBatchSize = Math.min(batchSize, results.length);
99
+ const firstBatch = results.slice(0, effectiveBatchSize);
100
+ const remaining = results.slice(effectiveBatchSize);
101
+
102
+ let cursorId = BigInt(0);
103
+ if (remaining.length > 0) {
104
+ cursorId = this.nextCursorId();
105
+ this.cursors.set(cursorId, {
106
+ id: cursorId,
107
+ database,
108
+ collection: typeof collection === 'string' ? collection : '$cmd.aggregate',
109
+ documents: remaining,
110
+ position: 0,
111
+ batchSize,
112
+ createdAt: new Date(),
113
+ });
114
+ }
115
+
116
+ return {
117
+ ok: 1,
118
+ cursor: {
119
+ id: plugins.bson.Long.fromBigInt(cursorId),
120
+ ns: `${database}.${typeof collection === 'string' ? collection : '$cmd.aggregate'}`,
121
+ firstBatch,
122
+ },
123
+ };
124
+ } catch (error: any) {
125
+ return {
126
+ ok: 0,
127
+ errmsg: error.message || 'Aggregation failed',
128
+ code: 1,
129
+ codeName: 'InternalError',
130
+ };
131
+ }
132
+ }
133
+
134
+ /**
135
+ * Preprocess pipeline to handle cross-collection lookups
136
+ */
137
+ private async preprocessPipeline(
138
+ storage: any,
139
+ database: string,
140
+ pipeline: plugins.bson.Document[],
141
+ documents: plugins.bson.Document[]
142
+ ): Promise<plugins.bson.Document[]> {
143
+ // For now, return the pipeline as-is
144
+ // Cross-collection lookups are handled in executeWithSpecialStages
145
+ return pipeline;
146
+ }
147
+
148
+ /**
149
+ * Check if pipeline has stages that need special handling
150
+ */
151
+ private hasSpecialStages(pipeline: plugins.bson.Document[]): boolean {
152
+ return pipeline.some(stage =>
153
+ stage.$lookup ||
154
+ stage.$graphLookup ||
155
+ stage.$unionWith
156
+ );
157
+ }
158
+
159
+ /**
160
+ * Execute pipeline with special stage handling
161
+ */
162
+ private async executeWithSpecialStages(
163
+ storage: any,
164
+ database: string,
165
+ documents: plugins.bson.Document[],
166
+ pipeline: plugins.bson.Document[]
167
+ ): Promise<plugins.bson.Document[]> {
168
+ let results: plugins.bson.Document[] = [...documents];
169
+
170
+ for (const stage of pipeline) {
171
+ if (stage.$lookup) {
172
+ const lookupSpec = stage.$lookup;
173
+ const fromCollection = lookupSpec.from;
174
+
175
+ // Get foreign collection documents
176
+ const foreignExists = await storage.collectionExists(database, fromCollection);
177
+ const foreignDocs = foreignExists
178
+ ? await storage.findAll(database, fromCollection)
179
+ : [];
180
+
181
+ results = AggregationEngine.executeLookup(results as any, lookupSpec, foreignDocs);
182
+ } else if (stage.$graphLookup) {
183
+ const graphLookupSpec = stage.$graphLookup;
184
+ const fromCollection = graphLookupSpec.from;
185
+
186
+ const foreignExists = await storage.collectionExists(database, fromCollection);
187
+ const foreignDocs = foreignExists
188
+ ? await storage.findAll(database, fromCollection)
189
+ : [];
190
+
191
+ results = AggregationEngine.executeGraphLookup(results as any, graphLookupSpec, foreignDocs);
192
+ } else if (stage.$unionWith) {
193
+ let unionSpec = stage.$unionWith;
194
+ let unionColl: string;
195
+ let unionPipeline: plugins.bson.Document[] | undefined;
196
+
197
+ if (typeof unionSpec === 'string') {
198
+ unionColl = unionSpec;
199
+ } else {
200
+ unionColl = unionSpec.coll;
201
+ unionPipeline = unionSpec.pipeline;
202
+ }
203
+
204
+ const unionExists = await storage.collectionExists(database, unionColl);
205
+ const unionDocs = unionExists
206
+ ? await storage.findAll(database, unionColl)
207
+ : [];
208
+
209
+ results = AggregationEngine.executeUnionWith(results as any, unionDocs, unionPipeline);
210
+ } else if (stage.$facet) {
211
+ // Execute each facet pipeline separately
212
+ const facetResults: plugins.bson.Document = {};
213
+
214
+ for (const [facetName, facetPipeline] of Object.entries(stage.$facet)) {
215
+ const facetDocs = await this.executeWithSpecialStages(
216
+ storage,
217
+ database,
218
+ results,
219
+ facetPipeline as plugins.bson.Document[]
220
+ );
221
+ facetResults[facetName] = facetDocs;
222
+ }
223
+
224
+ results = [facetResults];
225
+ } else {
226
+ // Regular stage - pass to mingo
227
+ results = AggregationEngine.aggregate(results as any, [stage]);
228
+ }
229
+ }
230
+
231
+ return results;
232
+ }
233
+
234
+ /**
235
+ * Handle $out stage - write results to a collection
236
+ */
237
+ private async handleOut(
238
+ storage: any,
239
+ database: string,
240
+ results: plugins.bson.Document[],
241
+ outSpec: string | { db?: string; coll: string }
242
+ ): Promise<void> {
243
+ let targetDb = database;
244
+ let targetColl: string;
245
+
246
+ if (typeof outSpec === 'string') {
247
+ targetColl = outSpec;
248
+ } else {
249
+ targetDb = outSpec.db || database;
250
+ targetColl = outSpec.coll;
251
+ }
252
+
253
+ // Drop existing collection
254
+ await storage.dropCollection(targetDb, targetColl);
255
+
256
+ // Create new collection and insert results
257
+ await storage.createCollection(targetDb, targetColl);
258
+
259
+ for (const doc of results) {
260
+ if (!doc._id) {
261
+ doc._id = new plugins.bson.ObjectId();
262
+ }
263
+ await storage.insertOne(targetDb, targetColl, doc);
264
+ }
265
+ }
266
+
267
+ /**
268
+ * Handle $merge stage - merge results into a collection
269
+ */
270
+ private async handleMerge(
271
+ storage: any,
272
+ database: string,
273
+ results: plugins.bson.Document[],
274
+ mergeSpec: any
275
+ ): Promise<void> {
276
+ let targetDb = database;
277
+ let targetColl: string;
278
+
279
+ if (typeof mergeSpec === 'string') {
280
+ targetColl = mergeSpec;
281
+ } else if (typeof mergeSpec.into === 'string') {
282
+ targetColl = mergeSpec.into;
283
+ } else {
284
+ targetDb = mergeSpec.into.db || database;
285
+ targetColl = mergeSpec.into.coll;
286
+ }
287
+
288
+ const on = mergeSpec.on || '_id';
289
+ const whenMatched = mergeSpec.whenMatched || 'merge';
290
+ const whenNotMatched = mergeSpec.whenNotMatched || 'insert';
291
+
292
+ // Ensure target collection exists
293
+ await storage.createCollection(targetDb, targetColl);
294
+
295
+ for (const doc of results) {
296
+ // Find matching document
297
+ const existingDocs = await storage.findAll(targetDb, targetColl);
298
+ const onFields = Array.isArray(on) ? on : [on];
299
+
300
+ let matchingDoc = null;
301
+ for (const existing of existingDocs) {
302
+ let matches = true;
303
+ for (const field of onFields) {
304
+ if (JSON.stringify(existing[field]) !== JSON.stringify(doc[field])) {
305
+ matches = false;
306
+ break;
307
+ }
308
+ }
309
+ if (matches) {
310
+ matchingDoc = existing;
311
+ break;
312
+ }
313
+ }
314
+
315
+ if (matchingDoc) {
316
+ // Handle whenMatched
317
+ if (whenMatched === 'replace') {
318
+ await storage.updateById(targetDb, targetColl, matchingDoc._id, doc);
319
+ } else if (whenMatched === 'keepExisting') {
320
+ // Do nothing
321
+ } else if (whenMatched === 'merge') {
322
+ const merged = { ...matchingDoc, ...doc };
323
+ await storage.updateById(targetDb, targetColl, matchingDoc._id, merged);
324
+ } else if (whenMatched === 'fail') {
325
+ throw new Error('Document matched but whenMatched is fail');
326
+ }
327
+ } else {
328
+ // Handle whenNotMatched
329
+ if (whenNotMatched === 'insert') {
330
+ if (!doc._id) {
331
+ doc._id = new plugins.bson.ObjectId();
332
+ }
333
+ await storage.insertOne(targetDb, targetColl, doc);
334
+ } else if (whenNotMatched === 'discard') {
335
+ // Do nothing
336
+ } else if (whenNotMatched === 'fail') {
337
+ throw new Error('Document not matched but whenNotMatched is fail');
338
+ }
339
+ }
340
+ }
341
+ }
342
+ }
@@ -0,0 +1,100 @@
1
+ import * as plugins from '../../congodb.plugins.js';
2
+ import type { ICommandHandler, IHandlerContext } from '../CommandRouter.js';
3
+ import { QueryEngine } from '../../engine/QueryEngine.js';
4
+
5
+ /**
6
+ * DeleteHandler - Handles delete commands
7
+ */
8
+ export class DeleteHandler implements ICommandHandler {
9
+ async handle(context: IHandlerContext): Promise<plugins.bson.Document> {
10
+ const { storage, database, command, documentSequences } = context;
11
+
12
+ const collection = command.delete;
13
+ if (typeof collection !== 'string') {
14
+ return {
15
+ ok: 0,
16
+ errmsg: 'delete command requires a collection name',
17
+ code: 2,
18
+ codeName: 'BadValue',
19
+ };
20
+ }
21
+
22
+ // Get deletes from command or document sequences
23
+ let deletes: plugins.bson.Document[] = command.deletes || [];
24
+
25
+ // Check for OP_MSG document sequences
26
+ if (documentSequences && documentSequences.has('deletes')) {
27
+ deletes = documentSequences.get('deletes')!;
28
+ }
29
+
30
+ if (!Array.isArray(deletes) || deletes.length === 0) {
31
+ return {
32
+ ok: 0,
33
+ errmsg: 'delete command requires deletes array',
34
+ code: 2,
35
+ codeName: 'BadValue',
36
+ };
37
+ }
38
+
39
+ const ordered = command.ordered !== false;
40
+ const writeErrors: plugins.bson.Document[] = [];
41
+ let totalDeleted = 0;
42
+
43
+ // Check if collection exists
44
+ const exists = await storage.collectionExists(database, collection);
45
+ if (!exists) {
46
+ // Collection doesn't exist, return success with 0 deleted
47
+ return { ok: 1, n: 0 };
48
+ }
49
+
50
+ for (let i = 0; i < deletes.length; i++) {
51
+ const deleteSpec = deletes[i];
52
+ const filter = deleteSpec.q || deleteSpec.filter || {};
53
+ const limit = deleteSpec.limit;
54
+
55
+ // limit: 0 means delete all matching, limit: 1 means delete one
56
+ const deleteAll = limit === 0;
57
+
58
+ try {
59
+ // Get all documents
60
+ const documents = await storage.findAll(database, collection);
61
+
62
+ // Apply filter
63
+ const matchingDocs = QueryEngine.filter(documents, filter);
64
+
65
+ if (matchingDocs.length === 0) {
66
+ continue;
67
+ }
68
+
69
+ // Determine which documents to delete
70
+ const docsToDelete = deleteAll ? matchingDocs : matchingDocs.slice(0, 1);
71
+
72
+ // Delete the documents
73
+ const idsToDelete = docsToDelete.map(doc => doc._id);
74
+ const deleted = await storage.deleteByIds(database, collection, idsToDelete);
75
+ totalDeleted += deleted;
76
+ } catch (error: any) {
77
+ writeErrors.push({
78
+ index: i,
79
+ code: error.code || 1,
80
+ errmsg: error.message || 'Delete failed',
81
+ });
82
+
83
+ if (ordered) {
84
+ break;
85
+ }
86
+ }
87
+ }
88
+
89
+ const response: plugins.bson.Document = {
90
+ ok: 1,
91
+ n: totalDeleted,
92
+ };
93
+
94
+ if (writeErrors.length > 0) {
95
+ response.writeErrors = writeErrors;
96
+ }
97
+
98
+ return response;
99
+ }
100
+ }