@zodmon/core 0.8.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1124 -113
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1619 -163
- package/dist/index.d.ts +1619 -163
- package/dist/index.js +1084 -97
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -21,29 +21,52 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
21
21
|
var index_exports = {};
|
|
22
22
|
__export(index_exports, {
|
|
23
23
|
$: () => $,
|
|
24
|
+
$addToSet: () => $addToSet,
|
|
24
25
|
$and: () => $and,
|
|
26
|
+
$avg: () => $avg,
|
|
27
|
+
$count: () => $count,
|
|
25
28
|
$eq: () => $eq,
|
|
26
29
|
$exists: () => $exists,
|
|
30
|
+
$first: () => $first,
|
|
27
31
|
$gt: () => $gt,
|
|
28
32
|
$gte: () => $gte,
|
|
29
33
|
$in: () => $in,
|
|
34
|
+
$last: () => $last,
|
|
30
35
|
$lt: () => $lt,
|
|
31
36
|
$lte: () => $lte,
|
|
37
|
+
$max: () => $max,
|
|
38
|
+
$min: () => $min,
|
|
32
39
|
$ne: () => $ne,
|
|
33
40
|
$nin: () => $nin,
|
|
34
41
|
$nor: () => $nor,
|
|
35
42
|
$not: () => $not,
|
|
36
43
|
$or: () => $or,
|
|
44
|
+
$push: () => $push,
|
|
37
45
|
$regex: () => $regex,
|
|
46
|
+
$sum: () => $sum,
|
|
47
|
+
AggregatePipeline: () => AggregatePipeline,
|
|
38
48
|
CollectionHandle: () => CollectionHandle,
|
|
39
49
|
Database: () => Database,
|
|
40
50
|
IndexBuilder: () => IndexBuilder,
|
|
41
51
|
TypedFindCursor: () => TypedFindCursor,
|
|
52
|
+
ZodmonAuthError: () => ZodmonAuthError,
|
|
53
|
+
ZodmonBulkWriteError: () => ZodmonBulkWriteError,
|
|
54
|
+
ZodmonDocValidationError: () => ZodmonDocValidationError,
|
|
55
|
+
ZodmonDuplicateKeyError: () => ZodmonDuplicateKeyError,
|
|
56
|
+
ZodmonError: () => ZodmonError,
|
|
57
|
+
ZodmonIndexError: () => ZodmonIndexError,
|
|
58
|
+
ZodmonNetworkError: () => ZodmonNetworkError,
|
|
42
59
|
ZodmonNotFoundError: () => ZodmonNotFoundError,
|
|
60
|
+
ZodmonQueryError: () => ZodmonQueryError,
|
|
61
|
+
ZodmonTimeoutError: () => ZodmonTimeoutError,
|
|
43
62
|
ZodmonValidationError: () => ZodmonValidationError,
|
|
63
|
+
ZodmonWriteConflictError: () => ZodmonWriteConflictError,
|
|
64
|
+
aggregate: () => aggregate,
|
|
44
65
|
checkUnindexedFields: () => checkUnindexedFields,
|
|
45
66
|
collection: () => collection,
|
|
67
|
+
createAccumulatorBuilder: () => createAccumulatorBuilder,
|
|
46
68
|
createClient: () => createClient,
|
|
69
|
+
createExpressionBuilder: () => createExpressionBuilder,
|
|
47
70
|
deleteMany: () => deleteMany,
|
|
48
71
|
deleteOne: () => deleteOne,
|
|
49
72
|
extractComparableOptions: () => extractComparableOptions,
|
|
@@ -69,12 +92,913 @@ __export(index_exports, {
|
|
|
69
92
|
toCompoundIndexSpec: () => toCompoundIndexSpec,
|
|
70
93
|
toFieldIndexSpec: () => toFieldIndexSpec,
|
|
71
94
|
updateMany: () => updateMany,
|
|
72
|
-
updateOne: () => updateOne
|
|
95
|
+
updateOne: () => updateOne,
|
|
96
|
+
wrapMongoError: () => wrapMongoError
|
|
73
97
|
});
|
|
74
98
|
module.exports = __toCommonJS(index_exports);
|
|
75
99
|
|
|
100
|
+
// src/aggregate/expressions.ts
|
|
101
|
+
var $count = () => ({
|
|
102
|
+
__accum: true,
|
|
103
|
+
expr: { $sum: 1 }
|
|
104
|
+
});
|
|
105
|
+
var $sum = (field) => ({
|
|
106
|
+
__accum: true,
|
|
107
|
+
expr: { $sum: field }
|
|
108
|
+
});
|
|
109
|
+
var $avg = (field) => ({
|
|
110
|
+
__accum: true,
|
|
111
|
+
expr: { $avg: field }
|
|
112
|
+
});
|
|
113
|
+
var $min = (field) => ({
|
|
114
|
+
__accum: true,
|
|
115
|
+
expr: { $min: field }
|
|
116
|
+
});
|
|
117
|
+
var $max = (field) => ({
|
|
118
|
+
__accum: true,
|
|
119
|
+
expr: { $max: field }
|
|
120
|
+
});
|
|
121
|
+
var $first = (field) => ({
|
|
122
|
+
__accum: true,
|
|
123
|
+
expr: { $first: field }
|
|
124
|
+
});
|
|
125
|
+
var $last = (field) => ({
|
|
126
|
+
__accum: true,
|
|
127
|
+
expr: { $last: field }
|
|
128
|
+
});
|
|
129
|
+
var $push = (field) => ({
|
|
130
|
+
__accum: true,
|
|
131
|
+
expr: { $push: field }
|
|
132
|
+
});
|
|
133
|
+
var $addToSet = (field) => ({
|
|
134
|
+
__accum: true,
|
|
135
|
+
expr: { $addToSet: field }
|
|
136
|
+
});
|
|
137
|
+
function createAccumulatorBuilder() {
|
|
138
|
+
return {
|
|
139
|
+
count: () => ({ __accum: true, expr: { $sum: 1 } }),
|
|
140
|
+
sum: (field) => ({
|
|
141
|
+
__accum: true,
|
|
142
|
+
expr: { $sum: typeof field === "number" ? field : `$${field}` }
|
|
143
|
+
}),
|
|
144
|
+
avg: (field) => ({ __accum: true, expr: { $avg: `$${field}` } }),
|
|
145
|
+
min: (field) => ({ __accum: true, expr: { $min: `$${field}` } }),
|
|
146
|
+
max: (field) => ({ __accum: true, expr: { $max: `$${field}` } }),
|
|
147
|
+
first: (field) => ({ __accum: true, expr: { $first: `$${field}` } }),
|
|
148
|
+
last: (field) => ({ __accum: true, expr: { $last: `$${field}` } }),
|
|
149
|
+
push: (field) => ({ __accum: true, expr: { $push: `$${field}` } }),
|
|
150
|
+
addToSet: (field) => ({ __accum: true, expr: { $addToSet: `$${field}` } })
|
|
151
|
+
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses string field names and returns plain objects — TypeScript cannot verify that the runtime Accumulator objects match the generic AccumulatorBuilder<T> return types. Safe because type resolution happens at compile time via AccumulatorBuilder<T>, and runtime values are identical to what the standalone $min/$max/etc. produce.
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
function createExpressionBuilder() {
|
|
155
|
+
const $2 = (field) => `$${field}`;
|
|
156
|
+
const val = (v) => typeof v === "number" ? v : `$${v}`;
|
|
157
|
+
const expr = (value) => ({ __expr: true, value });
|
|
158
|
+
return {
|
|
159
|
+
// Arithmetic
|
|
160
|
+
add: (field, value) => expr({ $add: [$2(field), val(value)] }),
|
|
161
|
+
subtract: (field, value) => expr({ $subtract: [$2(field), val(value)] }),
|
|
162
|
+
multiply: (field, value) => expr({ $multiply: [$2(field), val(value)] }),
|
|
163
|
+
divide: (field, value) => expr({ $divide: [$2(field), val(value)] }),
|
|
164
|
+
mod: (field, value) => expr({ $mod: [$2(field), val(value)] }),
|
|
165
|
+
abs: (field) => expr({ $abs: $2(field) }),
|
|
166
|
+
ceil: (field) => expr({ $ceil: $2(field) }),
|
|
167
|
+
floor: (field) => expr({ $floor: $2(field) }),
|
|
168
|
+
round: (field, place = 0) => expr({ $round: [$2(field), place] }),
|
|
169
|
+
// String
|
|
170
|
+
concat: (...parts) => {
|
|
171
|
+
const resolved = parts.map((p) => {
|
|
172
|
+
if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(p)) return $2(p);
|
|
173
|
+
return p;
|
|
174
|
+
});
|
|
175
|
+
return expr({ $concat: resolved });
|
|
176
|
+
},
|
|
177
|
+
toLower: (field) => expr({ $toLower: $2(field) }),
|
|
178
|
+
toUpper: (field) => expr({ $toUpper: $2(field) }),
|
|
179
|
+
trim: (field) => expr({ $trim: { input: $2(field) } }),
|
|
180
|
+
substr: (field, start, length) => expr({ $substrBytes: [$2(field), start, length] }),
|
|
181
|
+
// Comparison
|
|
182
|
+
eq: (field, value) => expr({ $eq: [$2(field), value] }),
|
|
183
|
+
gt: (field, value) => expr({ $gt: [$2(field), value] }),
|
|
184
|
+
gte: (field, value) => expr({ $gte: [$2(field), value] }),
|
|
185
|
+
lt: (field, value) => expr({ $lt: [$2(field), value] }),
|
|
186
|
+
lte: (field, value) => expr({ $lte: [$2(field), value] }),
|
|
187
|
+
ne: (field, value) => expr({ $ne: [$2(field), value] }),
|
|
188
|
+
// Date
|
|
189
|
+
year: (field) => expr({ $year: $2(field) }),
|
|
190
|
+
month: (field) => expr({ $month: $2(field) }),
|
|
191
|
+
dayOfMonth: (field) => expr({ $dayOfMonth: $2(field) }),
|
|
192
|
+
// Array
|
|
193
|
+
size: (field) => expr({ $size: $2(field) }),
|
|
194
|
+
// Conditional
|
|
195
|
+
cond: (condition, thenValue, elseValue) => expr({ $cond: [condition.value, thenValue, elseValue] }),
|
|
196
|
+
ifNull: (field, fallback) => expr({ $ifNull: [$2(field), fallback] })
|
|
197
|
+
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses string field names — TypeScript cannot verify generic ExpressionBuilder<T> return types match. Safe because type resolution happens at compile time.
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// src/errors/wrap.ts
|
|
202
|
+
var import_mongodb = require("mongodb");
|
|
203
|
+
|
|
204
|
+
// src/errors/base.ts
|
|
205
|
+
var ZodmonError = class extends Error {
|
|
206
|
+
name = "ZodmonError";
|
|
207
|
+
/** The MongoDB collection name associated with this error. */
|
|
208
|
+
collection;
|
|
209
|
+
/** The underlying error that caused this error, if any. */
|
|
210
|
+
cause;
|
|
211
|
+
constructor(message, collection2, options) {
|
|
212
|
+
super(message);
|
|
213
|
+
this.collection = collection2;
|
|
214
|
+
if (options?.cause) {
|
|
215
|
+
this.cause = options.cause;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
|
|
220
|
+
// src/errors/auth.ts
|
|
221
|
+
var ZodmonAuthError = class extends ZodmonError {
|
|
222
|
+
name = "ZodmonAuthError";
|
|
223
|
+
/** The MongoDB error code (13 or 18). */
|
|
224
|
+
code;
|
|
225
|
+
constructor(collection2, code, cause) {
|
|
226
|
+
const message = code === 18 ? `Authentication failed for "${collection2}": check connection credentials` : `Not authorized to perform this operation on "${collection2}"`;
|
|
227
|
+
super(message, collection2, { cause });
|
|
228
|
+
this.code = code;
|
|
229
|
+
}
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
// src/errors/bulk-write.ts
|
|
233
|
+
var ZodmonBulkWriteError = class extends ZodmonError {
|
|
234
|
+
name = "ZodmonBulkWriteError";
|
|
235
|
+
/** Number of documents successfully inserted. */
|
|
236
|
+
insertedCount;
|
|
237
|
+
/** Number of documents matched by update filters. */
|
|
238
|
+
matchedCount;
|
|
239
|
+
/** Number of documents actually modified. */
|
|
240
|
+
modifiedCount;
|
|
241
|
+
/** Number of documents deleted. */
|
|
242
|
+
deletedCount;
|
|
243
|
+
/** Individual write errors with their operation index, code, and message. */
|
|
244
|
+
writeErrors;
|
|
245
|
+
constructor(collection2, cause, totalOps) {
|
|
246
|
+
const bulkErr = cause;
|
|
247
|
+
const result = bulkErr["result"] ?? {};
|
|
248
|
+
const rawErrors = bulkErr["writeErrors"] ?? [];
|
|
249
|
+
const writeErrors = rawErrors.map((e) => ({
|
|
250
|
+
index: e["index"] ?? 0,
|
|
251
|
+
code: e["code"] ?? 0,
|
|
252
|
+
message: e["errmsg"] ?? e["message"] ?? "unknown error"
|
|
253
|
+
}));
|
|
254
|
+
const failedMsg = totalOps !== void 0 ? `${writeErrors.length} of ${totalOps} operations failed` : `${writeErrors.length} operations failed`;
|
|
255
|
+
super(`Bulk write failed on "${collection2}": ${failedMsg}`, collection2, { cause });
|
|
256
|
+
this.insertedCount = result["insertedCount"] ?? 0;
|
|
257
|
+
this.matchedCount = result["matchedCount"] ?? 0;
|
|
258
|
+
this.modifiedCount = result["modifiedCount"] ?? 0;
|
|
259
|
+
this.deletedCount = result["deletedCount"] ?? 0;
|
|
260
|
+
this.writeErrors = writeErrors;
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
// src/errors/doc-validation.ts
|
|
265
|
+
var ZodmonDocValidationError = class extends ZodmonError {
|
|
266
|
+
name = "ZodmonDocValidationError";
|
|
267
|
+
/** Server-provided validation failure details. */
|
|
268
|
+
errInfo;
|
|
269
|
+
constructor(collection2, errInfo, cause) {
|
|
270
|
+
super(
|
|
271
|
+
`Server-side document validation failed for "${collection2}": ${cause.message}`,
|
|
272
|
+
collection2,
|
|
273
|
+
{ cause }
|
|
274
|
+
);
|
|
275
|
+
this.errInfo = errInfo;
|
|
276
|
+
}
|
|
277
|
+
};
|
|
278
|
+
|
|
279
|
+
// src/errors/duplicate-key.ts
|
|
280
|
+
var INDEX_REGEX = /index:\s+(\S+)/;
|
|
281
|
+
var DUP_KEY_FIELD_REGEX = /dup key:\s*\{\s*(\w+):/;
|
|
282
|
+
var ZodmonDuplicateKeyError = class extends ZodmonError {
|
|
283
|
+
name = "ZodmonDuplicateKeyError";
|
|
284
|
+
/** The first field that caused the duplicate key violation. */
|
|
285
|
+
field;
|
|
286
|
+
/** The duplicate value, or `undefined` if it could not be extracted. */
|
|
287
|
+
value;
|
|
288
|
+
/** The name of the index that was violated. */
|
|
289
|
+
index;
|
|
290
|
+
/** The key pattern of the violated index (e.g. `{ email: 1 }`). */
|
|
291
|
+
keyPattern;
|
|
292
|
+
/** The key values that caused the violation. */
|
|
293
|
+
keyValue;
|
|
294
|
+
constructor(collection2, cause) {
|
|
295
|
+
const serverErr = cause;
|
|
296
|
+
const kp = serverErr["keyPattern"];
|
|
297
|
+
const kv = serverErr["keyValue"];
|
|
298
|
+
let field;
|
|
299
|
+
let value;
|
|
300
|
+
let keyPattern;
|
|
301
|
+
let keyValue;
|
|
302
|
+
if (kp && kv) {
|
|
303
|
+
const firstKey = Object.keys(kp)[0] ?? "unknown";
|
|
304
|
+
field = firstKey;
|
|
305
|
+
value = kv[firstKey];
|
|
306
|
+
keyPattern = kp;
|
|
307
|
+
keyValue = kv;
|
|
308
|
+
} else {
|
|
309
|
+
const fieldMatch = cause.message.match(DUP_KEY_FIELD_REGEX);
|
|
310
|
+
field = fieldMatch?.[1] ?? "unknown";
|
|
311
|
+
value = void 0;
|
|
312
|
+
keyPattern = field !== "unknown" ? { [field]: 1 } : {};
|
|
313
|
+
keyValue = {};
|
|
314
|
+
}
|
|
315
|
+
const indexMatch = cause.message.match(INDEX_REGEX);
|
|
316
|
+
const index2 = indexMatch?.[1] ?? "unknown";
|
|
317
|
+
const valueStr = typeof value === "string" ? `"${value}"` : String(value);
|
|
318
|
+
super(
|
|
319
|
+
`Duplicate key in "${collection2}": ${field} = ${valueStr} (index: ${index2})`,
|
|
320
|
+
collection2,
|
|
321
|
+
{ cause }
|
|
322
|
+
);
|
|
323
|
+
this.field = field;
|
|
324
|
+
this.value = value;
|
|
325
|
+
this.index = index2;
|
|
326
|
+
this.keyPattern = keyPattern;
|
|
327
|
+
this.keyValue = keyValue;
|
|
328
|
+
}
|
|
329
|
+
};
|
|
330
|
+
|
|
331
|
+
// src/errors/index-error.ts
|
|
332
|
+
var ZodmonIndexError = class extends ZodmonError {
|
|
333
|
+
name = "ZodmonIndexError";
|
|
334
|
+
/** The MongoDB error code (67, 85, or 86). */
|
|
335
|
+
code;
|
|
336
|
+
constructor(collection2, code, errmsg, cause) {
|
|
337
|
+
const prefix = code === 67 ? "Cannot create index" : code === 85 ? "Index options conflict" : "Index key specs conflict";
|
|
338
|
+
super(`${prefix} on "${collection2}": ${errmsg}`, collection2, { cause });
|
|
339
|
+
this.code = code;
|
|
340
|
+
}
|
|
341
|
+
};
|
|
342
|
+
|
|
343
|
+
// src/errors/network.ts
|
|
344
|
+
var ZodmonNetworkError = class extends ZodmonError {
|
|
345
|
+
name = "ZodmonNetworkError";
|
|
346
|
+
constructor(collection2, cause) {
|
|
347
|
+
super(`Network error on "${collection2}": ${cause.message}`, collection2, { cause });
|
|
348
|
+
}
|
|
349
|
+
};
|
|
350
|
+
|
|
351
|
+
// src/errors/query.ts
|
|
352
|
+
var ZodmonQueryError = class extends ZodmonError {
|
|
353
|
+
name = "ZodmonQueryError";
|
|
354
|
+
/** The MongoDB error code (2, 9, or 292). */
|
|
355
|
+
code;
|
|
356
|
+
constructor(collection2, code, errmsg, cause) {
|
|
357
|
+
const message = code === 292 ? `Query exceeded memory limit on "${collection2}": enable allowDiskUse for large sorts or aggregations` : code === 9 ? `Failed to parse query on "${collection2}": ${errmsg}` : `Bad value in query on "${collection2}": ${errmsg}`;
|
|
358
|
+
super(message, collection2, { cause });
|
|
359
|
+
this.code = code;
|
|
360
|
+
}
|
|
361
|
+
};
|
|
362
|
+
|
|
363
|
+
// src/errors/timeout.ts
|
|
364
|
+
var ZodmonTimeoutError = class extends ZodmonError {
|
|
365
|
+
name = "ZodmonTimeoutError";
|
|
366
|
+
/** The MongoDB error code (50 or 262). */
|
|
367
|
+
code;
|
|
368
|
+
constructor(collection2, code, cause) {
|
|
369
|
+
super(`Operation timed out on "${collection2}": exceeded server time limit`, collection2, {
|
|
370
|
+
cause
|
|
371
|
+
});
|
|
372
|
+
this.code = code;
|
|
373
|
+
}
|
|
374
|
+
};
|
|
375
|
+
|
|
376
|
+
// src/errors/write-conflict.ts
|
|
377
|
+
var ZodmonWriteConflictError = class extends ZodmonError {
|
|
378
|
+
name = "ZodmonWriteConflictError";
|
|
379
|
+
constructor(collection2, cause) {
|
|
380
|
+
super(
|
|
381
|
+
`Write conflict in "${collection2}": another operation modified this document concurrently \u2014 retry the transaction`,
|
|
382
|
+
collection2,
|
|
383
|
+
{ cause }
|
|
384
|
+
);
|
|
385
|
+
}
|
|
386
|
+
};
|
|
387
|
+
|
|
388
|
+
// src/errors/wrap.ts
|
|
389
|
+
function wrapMongoError(err, collection2) {
|
|
390
|
+
if (err instanceof ZodmonError) {
|
|
391
|
+
throw err;
|
|
392
|
+
}
|
|
393
|
+
if (err instanceof import_mongodb.MongoBulkWriteError) {
|
|
394
|
+
throw new ZodmonBulkWriteError(collection2, err);
|
|
395
|
+
}
|
|
396
|
+
if (err instanceof import_mongodb.MongoNetworkError) {
|
|
397
|
+
throw new ZodmonNetworkError(collection2, err);
|
|
398
|
+
}
|
|
399
|
+
if (err instanceof import_mongodb.MongoServerError) {
|
|
400
|
+
switch (err.code) {
|
|
401
|
+
case 11e3:
|
|
402
|
+
case 11001:
|
|
403
|
+
throw new ZodmonDuplicateKeyError(collection2, err);
|
|
404
|
+
case 112:
|
|
405
|
+
throw new ZodmonWriteConflictError(collection2, err);
|
|
406
|
+
case 50:
|
|
407
|
+
case 262:
|
|
408
|
+
throw new ZodmonTimeoutError(collection2, err.code, err);
|
|
409
|
+
case 13:
|
|
410
|
+
case 18:
|
|
411
|
+
throw new ZodmonAuthError(collection2, err.code, err);
|
|
412
|
+
case 67:
|
|
413
|
+
case 85:
|
|
414
|
+
case 86:
|
|
415
|
+
throw new ZodmonIndexError(collection2, err.code, err.message, err);
|
|
416
|
+
case 2:
|
|
417
|
+
case 9:
|
|
418
|
+
case 292:
|
|
419
|
+
throw new ZodmonQueryError(collection2, err.code, err.message, err);
|
|
420
|
+
case 121:
|
|
421
|
+
throw new ZodmonDocValidationError(
|
|
422
|
+
collection2,
|
|
423
|
+
err.errInfo,
|
|
424
|
+
err
|
|
425
|
+
);
|
|
426
|
+
default:
|
|
427
|
+
throw new ZodmonError(`MongoDB error on "${collection2}": ${err.message}`, collection2, {
|
|
428
|
+
cause: err
|
|
429
|
+
});
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
if (err instanceof Error) {
|
|
433
|
+
throw new ZodmonError(`Unexpected error on "${collection2}": ${err.message}`, collection2, {
|
|
434
|
+
cause: err
|
|
435
|
+
});
|
|
436
|
+
}
|
|
437
|
+
throw new ZodmonError(`Unexpected error on "${collection2}": ${String(err)}`, collection2);
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
// src/schema/ref.ts
|
|
441
|
+
var import_zod = require("zod");
|
|
442
|
+
var refMetadata = /* @__PURE__ */ new WeakMap();
|
|
443
|
+
function getRefMetadata(schema) {
|
|
444
|
+
if (typeof schema !== "object" || schema === null) return void 0;
|
|
445
|
+
return refMetadata.get(schema);
|
|
446
|
+
}
|
|
447
|
+
var REF_GUARD = /* @__PURE__ */ Symbol.for("zodmon_ref");
|
|
448
|
+
function installRefExtension() {
|
|
449
|
+
const proto = import_zod.z.ZodType.prototype;
|
|
450
|
+
if (REF_GUARD in proto) return;
|
|
451
|
+
Object.defineProperty(proto, "ref", {
|
|
452
|
+
value(collection2) {
|
|
453
|
+
refMetadata.set(this, { collection: collection2 });
|
|
454
|
+
return this;
|
|
455
|
+
},
|
|
456
|
+
enumerable: true,
|
|
457
|
+
configurable: true,
|
|
458
|
+
writable: true
|
|
459
|
+
});
|
|
460
|
+
Object.defineProperty(proto, REF_GUARD, {
|
|
461
|
+
value: true,
|
|
462
|
+
enumerable: false,
|
|
463
|
+
configurable: false,
|
|
464
|
+
writable: false
|
|
465
|
+
});
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
// src/aggregate/pipeline.ts
|
|
469
|
+
var AggregatePipeline = class _AggregatePipeline {
|
|
470
|
+
definition;
|
|
471
|
+
nativeCollection;
|
|
472
|
+
stages;
|
|
473
|
+
constructor(definition, nativeCollection, stages) {
|
|
474
|
+
this.definition = definition;
|
|
475
|
+
this.nativeCollection = nativeCollection;
|
|
476
|
+
this.stages = stages;
|
|
477
|
+
}
|
|
478
|
+
/**
|
|
479
|
+
* Append an arbitrary aggregation stage to the pipeline (escape hatch).
|
|
480
|
+
*
|
|
481
|
+
* Returns a new pipeline instance with the stage appended — the
|
|
482
|
+
* original pipeline is not modified.
|
|
483
|
+
*
|
|
484
|
+
* Optionally accepts a type parameter `TNew` to change the output
|
|
485
|
+
* type when the stage transforms the document shape.
|
|
486
|
+
*
|
|
487
|
+
* @typeParam TNew - The output type after this stage. Defaults to the current output type.
|
|
488
|
+
* @param stage - A raw MongoDB aggregation stage document (e.g. `{ $match: { ... } }`).
|
|
489
|
+
* @returns A new pipeline with the stage appended.
|
|
490
|
+
*
|
|
491
|
+
* @example
|
|
492
|
+
* ```ts
|
|
493
|
+
* const admins = aggregate(users)
|
|
494
|
+
* .raw({ $match: { role: 'admin' } })
|
|
495
|
+
* .toArray()
|
|
496
|
+
* ```
|
|
497
|
+
*
|
|
498
|
+
* @example
|
|
499
|
+
* ```ts
|
|
500
|
+
* // Change output type with a $project stage
|
|
501
|
+
* const names = aggregate(users)
|
|
502
|
+
* .raw<{ name: string }>({ $project: { name: 1, _id: 0 } })
|
|
503
|
+
* .toArray()
|
|
504
|
+
* ```
|
|
505
|
+
*/
|
|
506
|
+
raw(stage) {
|
|
507
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
508
|
+
...this.stages,
|
|
509
|
+
stage
|
|
510
|
+
]);
|
|
511
|
+
}
|
|
512
|
+
/**
|
|
513
|
+
* Execute the pipeline and return all results as an array.
|
|
514
|
+
*
|
|
515
|
+
* @returns A promise resolving to the array of output documents.
|
|
516
|
+
*
|
|
517
|
+
* @example
|
|
518
|
+
* ```ts
|
|
519
|
+
* const results = await aggregate(users)
|
|
520
|
+
* .raw({ $match: { age: { $gte: 18 } } })
|
|
521
|
+
* .toArray()
|
|
522
|
+
* ```
|
|
523
|
+
*/
|
|
524
|
+
async toArray() {
|
|
525
|
+
try {
|
|
526
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
527
|
+
return await cursor.toArray();
|
|
528
|
+
} catch (err) {
|
|
529
|
+
wrapMongoError(err, this.definition.name);
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
/**
|
|
533
|
+
* Stream pipeline results one document at a time via `for await...of`.
|
|
534
|
+
*
|
|
535
|
+
* @returns An async generator yielding output documents.
|
|
536
|
+
*
|
|
537
|
+
* @example
|
|
538
|
+
* ```ts
|
|
539
|
+
* for await (const user of aggregate(users).raw({ $match: { role: 'admin' } })) {
|
|
540
|
+
* console.log(user.name)
|
|
541
|
+
* }
|
|
542
|
+
* ```
|
|
543
|
+
*/
|
|
544
|
+
async *[Symbol.asyncIterator]() {
|
|
545
|
+
try {
|
|
546
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
547
|
+
for await (const doc of cursor) {
|
|
548
|
+
yield doc;
|
|
549
|
+
}
|
|
550
|
+
} catch (err) {
|
|
551
|
+
wrapMongoError(err, this.definition.name);
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
/**
|
|
555
|
+
* Return the query execution plan without running the pipeline.
|
|
556
|
+
*
|
|
557
|
+
* Useful for debugging and understanding how MongoDB will process
|
|
558
|
+
* the pipeline stages.
|
|
559
|
+
*
|
|
560
|
+
* @returns A promise resolving to the explain output document.
|
|
561
|
+
*
|
|
562
|
+
* @example
|
|
563
|
+
* ```ts
|
|
564
|
+
* const plan = await aggregate(users)
|
|
565
|
+
* .raw({ $match: { role: 'admin' } })
|
|
566
|
+
* .explain()
|
|
567
|
+
* console.log(plan)
|
|
568
|
+
* ```
|
|
569
|
+
*/
|
|
570
|
+
async explain() {
|
|
571
|
+
try {
|
|
572
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
573
|
+
return await cursor.explain();
|
|
574
|
+
} catch (err) {
|
|
575
|
+
wrapMongoError(err, this.definition.name);
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
// ── Shape-preserving stages ──────────────────────────────────────
|
|
579
|
+
/**
|
|
580
|
+
* Filter documents using a type-safe match expression.
|
|
581
|
+
*
|
|
582
|
+
* Appends a `$match` stage to the pipeline. The filter is constrained
|
|
583
|
+
* to the current output type, so only valid fields and operators are accepted.
|
|
584
|
+
*
|
|
585
|
+
* Supports two forms of type narrowing:
|
|
586
|
+
*
|
|
587
|
+
* **Tier 1 — Explicit type parameter:**
|
|
588
|
+
* ```ts
|
|
589
|
+
* .match<{ role: 'engineer' | 'designer' }>({ role: { $in: ['engineer', 'designer'] } })
|
|
590
|
+
* // role narrows to 'engineer' | 'designer'
|
|
591
|
+
* ```
|
|
592
|
+
*
|
|
593
|
+
* **Tier 2 — Automatic inference from filter literals:**
|
|
594
|
+
* ```ts
|
|
595
|
+
* .match({ role: 'engineer' }) // role narrows to 'engineer'
|
|
596
|
+
* .match({ role: { $ne: 'intern' } }) // role narrows to Exclude<Role, 'intern'>
|
|
597
|
+
* .match({ role: { $in: ['engineer', 'designer'] as const } }) // needs as const
|
|
598
|
+
* ```
|
|
599
|
+
*
|
|
600
|
+
* When no type parameter is provided and the filter doesn't contain
|
|
601
|
+
* inferrable literals, the output type is unchanged (backward compatible).
|
|
602
|
+
*
|
|
603
|
+
* @typeParam TNarrow - Optional object mapping field names to narrowed types. Must be a subtype of the corresponding fields in TOutput.
|
|
604
|
+
* @typeParam F - Inferred from the filter argument. Do not provide explicitly.
|
|
605
|
+
* @param filter - A type-safe filter for the current output type.
|
|
606
|
+
* @returns A new pipeline with the `$match` stage appended and output type narrowed.
|
|
607
|
+
*
|
|
608
|
+
* @example
|
|
609
|
+
* ```ts
|
|
610
|
+
* // Explicit narrowing
|
|
611
|
+
* const filtered = await users.aggregate()
|
|
612
|
+
* .match<{ role: 'engineer' }>({ role: 'engineer' })
|
|
613
|
+
* .toArray()
|
|
614
|
+
* // filtered[0].role → 'engineer'
|
|
615
|
+
*
|
|
616
|
+
* // Automatic narrowing with $in (requires as const)
|
|
617
|
+
* const subset = await users.aggregate()
|
|
618
|
+
* .match({ role: { $in: ['engineer', 'designer'] as const } })
|
|
619
|
+
* .toArray()
|
|
620
|
+
* // subset[0].role → 'engineer' | 'designer'
|
|
621
|
+
* ```
|
|
622
|
+
*/
|
|
623
|
+
match(filter) {
|
|
624
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
625
|
+
...this.stages,
|
|
626
|
+
{ $match: filter }
|
|
627
|
+
]);
|
|
628
|
+
return pipeline;
|
|
629
|
+
}
|
|
630
|
+
/**
|
|
631
|
+
* Sort documents by one or more fields.
|
|
632
|
+
*
|
|
633
|
+
* Appends a `$sort` stage. Keys are constrained to `keyof TOutput & string`
|
|
634
|
+
* and values must be `1` (ascending) or `-1` (descending).
|
|
635
|
+
*
|
|
636
|
+
* @param spec - A sort specification mapping field names to sort direction.
|
|
637
|
+
* @returns A new pipeline with the `$sort` stage appended.
|
|
638
|
+
*
|
|
639
|
+
* @example
|
|
640
|
+
* ```ts
|
|
641
|
+
* const sorted = await aggregate(users)
|
|
642
|
+
* .sort({ age: -1, name: 1 })
|
|
643
|
+
* .toArray()
|
|
644
|
+
* ```
|
|
645
|
+
*/
|
|
646
|
+
sort(spec) {
|
|
647
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
648
|
+
...this.stages,
|
|
649
|
+
{ $sort: spec }
|
|
650
|
+
]);
|
|
651
|
+
}
|
|
652
|
+
/**
|
|
653
|
+
* Skip a number of documents in the pipeline.
|
|
654
|
+
*
|
|
655
|
+
* Appends a `$skip` stage. Commonly used with {@link limit} for pagination.
|
|
656
|
+
*
|
|
657
|
+
* @param n - The number of documents to skip.
|
|
658
|
+
* @returns A new pipeline with the `$skip` stage appended.
|
|
659
|
+
*
|
|
660
|
+
* @example
|
|
661
|
+
* ```ts
|
|
662
|
+
* // Page 2 (10 items per page)
|
|
663
|
+
* const page2 = await aggregate(users)
|
|
664
|
+
* .sort({ name: 1 })
|
|
665
|
+
* .skip(10)
|
|
666
|
+
* .limit(10)
|
|
667
|
+
* .toArray()
|
|
668
|
+
* ```
|
|
669
|
+
*/
|
|
670
|
+
skip(n) {
|
|
671
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
672
|
+
...this.stages,
|
|
673
|
+
{ $skip: n }
|
|
674
|
+
]);
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* Limit the number of documents passing through the pipeline.
|
|
678
|
+
*
|
|
679
|
+
* Appends a `$limit` stage. Commonly used with {@link skip} for pagination,
|
|
680
|
+
* or after {@link sort} to get top/bottom N results.
|
|
681
|
+
*
|
|
682
|
+
* @param n - The maximum number of documents to pass through.
|
|
683
|
+
* @returns A new pipeline with the `$limit` stage appended.
|
|
684
|
+
*
|
|
685
|
+
* @example
|
|
686
|
+
* ```ts
|
|
687
|
+
* const top5 = await aggregate(users)
|
|
688
|
+
* .sort({ score: -1 })
|
|
689
|
+
* .limit(5)
|
|
690
|
+
* .toArray()
|
|
691
|
+
* ```
|
|
692
|
+
*/
|
|
693
|
+
limit(n) {
|
|
694
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
695
|
+
...this.stages,
|
|
696
|
+
{ $limit: n }
|
|
697
|
+
]);
|
|
698
|
+
}
|
|
699
|
+
// ── Shape-transforming projection stages ─────────────────────────
|
|
700
|
+
/**
|
|
701
|
+
* Include only specified fields in the output.
|
|
702
|
+
*
|
|
703
|
+
* Appends a `$project` stage with inclusion (`1`) for each key.
|
|
704
|
+
* The `_id` field is always included. The output type narrows to
|
|
705
|
+
* `Pick<TOutput, K | '_id'>`.
|
|
706
|
+
*
|
|
707
|
+
* @param spec - An object mapping field names to `1` for inclusion.
|
|
708
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
709
|
+
*
|
|
710
|
+
* @example
|
|
711
|
+
* ```ts
|
|
712
|
+
* const namesOnly = await aggregate(users)
|
|
713
|
+
* .project({ name: 1 })
|
|
714
|
+
* .toArray()
|
|
715
|
+
* // [{ _id: ..., name: 'Ada' }, ...]
|
|
716
|
+
* ```
|
|
717
|
+
*/
|
|
718
|
+
project(spec) {
|
|
719
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
720
|
+
...this.stages,
|
|
721
|
+
{ $project: spec }
|
|
722
|
+
]);
|
|
723
|
+
return pipeline;
|
|
724
|
+
}
|
|
725
|
+
/**
|
|
726
|
+
* Variadic shorthand for {@link project} — pick fields to include.
|
|
727
|
+
*
|
|
728
|
+
* Generates a `$project` stage that includes only the listed fields
|
|
729
|
+
* (plus `_id`). Equivalent to `.project({ field1: 1, field2: 1 })`.
|
|
730
|
+
*
|
|
731
|
+
* @param fields - Field names to include in the output.
|
|
732
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
733
|
+
*
|
|
734
|
+
* @example
|
|
735
|
+
* ```ts
|
|
736
|
+
* const namesAndRoles = await aggregate(users)
|
|
737
|
+
* .pick('name', 'role')
|
|
738
|
+
* .toArray()
|
|
739
|
+
* ```
|
|
740
|
+
*/
|
|
741
|
+
pick(...fields) {
|
|
742
|
+
const spec = Object.fromEntries(fields.map((f) => [f, 1]));
|
|
743
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
744
|
+
...this.stages,
|
|
745
|
+
{ $project: spec }
|
|
746
|
+
]);
|
|
747
|
+
return pipeline;
|
|
748
|
+
}
|
|
749
|
+
/**
|
|
750
|
+
* Exclude specified fields from the output.
|
|
751
|
+
*
|
|
752
|
+
* Appends a `$project` stage with exclusion (`0`) for each key.
|
|
753
|
+
* All other fields pass through. The output type becomes `Omit<TOutput, K>`.
|
|
754
|
+
*
|
|
755
|
+
* @param fields - Field names to exclude from the output.
|
|
756
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
757
|
+
*
|
|
758
|
+
* @example
|
|
759
|
+
* ```ts
|
|
760
|
+
* const noAge = await aggregate(users)
|
|
761
|
+
* .omit('age')
|
|
762
|
+
* .toArray()
|
|
763
|
+
* ```
|
|
764
|
+
*/
|
|
765
|
+
omit(...fields) {
|
|
766
|
+
const spec = Object.fromEntries(fields.map((f) => [f, 0]));
|
|
767
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
768
|
+
...this.stages,
|
|
769
|
+
{ $project: spec }
|
|
770
|
+
]);
|
|
771
|
+
return pipeline;
|
|
772
|
+
}
|
|
773
|
+
groupBy(field, accumulators) {
|
|
774
|
+
const resolved = typeof accumulators === "function" ? accumulators(createAccumulatorBuilder()) : accumulators;
|
|
775
|
+
const _id = Array.isArray(field) ? Object.fromEntries(field.map((f) => [f, `$${f}`])) : `$${field}`;
|
|
776
|
+
const accumExprs = Object.fromEntries(
|
|
777
|
+
Object.entries(resolved).map(([key, acc]) => [key, acc.expr])
|
|
778
|
+
);
|
|
779
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
780
|
+
...this.stages,
|
|
781
|
+
{ $group: { _id, ...accumExprs } }
|
|
782
|
+
]);
|
|
783
|
+
return pipeline;
|
|
784
|
+
}
|
|
785
|
+
// Implementation
|
|
786
|
+
addFields(fields) {
|
|
787
|
+
const resolved = typeof fields === "function" ? fields(createExpressionBuilder()) : fields;
|
|
788
|
+
const stage = Object.fromEntries(
|
|
789
|
+
Object.entries(resolved).map(([k, v]) => [
|
|
790
|
+
k,
|
|
791
|
+
v && typeof v === "object" && "__expr" in v ? v.value : v
|
|
792
|
+
])
|
|
793
|
+
);
|
|
794
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
795
|
+
...this.stages,
|
|
796
|
+
{ $addFields: stage }
|
|
797
|
+
]);
|
|
798
|
+
return pipeline;
|
|
799
|
+
}
|
|
800
|
+
// ── unwind stage ─────────────────────────────────────────────────
|
|
801
|
+
/**
|
|
802
|
+
* Deconstruct an array field, outputting one document per array element.
|
|
803
|
+
*
|
|
804
|
+
* Appends an `$unwind` stage. The unwound field's type changes from
|
|
805
|
+
* `T[]` to `T` in the output type. Documents with empty or missing
|
|
806
|
+
* arrays are dropped unless `preserveEmpty` is `true`.
|
|
807
|
+
*
|
|
808
|
+
* @param field - The name of the array field to unwind.
|
|
809
|
+
* @param options - Optional settings for the unwind stage.
|
|
810
|
+
* @param options.preserveEmpty - If `true`, documents with null, missing, or empty arrays are preserved.
|
|
811
|
+
* @returns A new pipeline with the `$unwind` stage appended.
|
|
812
|
+
*
|
|
813
|
+
* @example
|
|
814
|
+
* ```ts
|
|
815
|
+
* const flat = await aggregate(orders)
|
|
816
|
+
* .unwind('items')
|
|
817
|
+
* .toArray()
|
|
818
|
+
* // Each result has a single `items` value instead of an array
|
|
819
|
+
* ```
|
|
820
|
+
*/
|
|
821
|
+
unwind(field, options) {
|
|
822
|
+
const stage = options?.preserveEmpty ? { $unwind: { path: `$${field}`, preserveNullAndEmptyArrays: true } } : { $unwind: `$${field}` };
|
|
823
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
824
|
+
...this.stages,
|
|
825
|
+
stage
|
|
826
|
+
]);
|
|
827
|
+
return pipeline;
|
|
828
|
+
}
|
|
829
|
+
lookup(fieldOrFrom, options) {
|
|
830
|
+
const stages = [...this.stages];
|
|
831
|
+
if (typeof fieldOrFrom === "object") {
|
|
832
|
+
const foreignName = fieldOrFrom.name;
|
|
833
|
+
const foreignField = options?.on;
|
|
834
|
+
if (!foreignField) {
|
|
835
|
+
throw new Error(
|
|
836
|
+
`[zodmon] lookup: reverse lookup on '${foreignName}' requires an 'on' option specifying which field on the foreign collection references this collection.`
|
|
837
|
+
);
|
|
838
|
+
}
|
|
839
|
+
const asField = options?.as ?? foreignName;
|
|
840
|
+
stages.push({
|
|
841
|
+
$lookup: {
|
|
842
|
+
from: foreignName,
|
|
843
|
+
localField: "_id",
|
|
844
|
+
foreignField,
|
|
845
|
+
as: asField
|
|
846
|
+
}
|
|
847
|
+
});
|
|
848
|
+
if (options?.unwind) {
|
|
849
|
+
stages.push({ $unwind: { path: `$${asField}`, preserveNullAndEmptyArrays: true } });
|
|
850
|
+
}
|
|
851
|
+
} else {
|
|
852
|
+
const shape = this.definition.shape;
|
|
853
|
+
const fieldSchema = shape[fieldOrFrom];
|
|
854
|
+
const ref = getRefMetadata(fieldSchema);
|
|
855
|
+
if (!ref) {
|
|
856
|
+
throw new Error(
|
|
857
|
+
`[zodmon] lookup: field '${fieldOrFrom}' has no .ref() metadata. Use .lookup(CollectionDef, { on: foreignKey }) for reverse lookups, or add .ref(TargetCollection) to the field schema.`
|
|
858
|
+
);
|
|
859
|
+
}
|
|
860
|
+
const targetName = ref.collection.name;
|
|
861
|
+
const asField = options?.as ?? targetName;
|
|
862
|
+
stages.push({
|
|
863
|
+
$lookup: {
|
|
864
|
+
from: targetName,
|
|
865
|
+
localField: fieldOrFrom,
|
|
866
|
+
foreignField: "_id",
|
|
867
|
+
as: asField
|
|
868
|
+
}
|
|
869
|
+
});
|
|
870
|
+
if (options?.unwind) {
|
|
871
|
+
stages.push({ $unwind: { path: `$${asField}`, preserveNullAndEmptyArrays: true } });
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, stages);
|
|
875
|
+
return pipeline;
|
|
876
|
+
}
|
|
877
|
+
// ── Convenience shortcuts ────────────────────────────────────────
|
|
878
|
+
/**
|
|
879
|
+
* Count documents per group, sorted by count descending.
|
|
880
|
+
*
|
|
881
|
+
* Shorthand for `.groupBy(field, { count: $count() }).sort({ count: -1 })`.
|
|
882
|
+
*
|
|
883
|
+
* @param field - The field to group and count by.
|
|
884
|
+
* @returns A new pipeline producing `{ _id: TOutput[K], count: number }` results.
|
|
885
|
+
*
|
|
886
|
+
* @example
|
|
887
|
+
* ```ts
|
|
888
|
+
* const roleCounts = await aggregate(users)
|
|
889
|
+
* .countBy('role')
|
|
890
|
+
* .toArray()
|
|
891
|
+
* // [{ _id: 'user', count: 3 }, { _id: 'admin', count: 2 }]
|
|
892
|
+
* ```
|
|
893
|
+
*/
|
|
894
|
+
countBy(field) {
|
|
895
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
896
|
+
...this.stages,
|
|
897
|
+
{ $group: { _id: `$${field}`, count: { $sum: 1 } } },
|
|
898
|
+
{ $sort: { count: -1 } }
|
|
899
|
+
]);
|
|
900
|
+
return pipeline;
|
|
901
|
+
}
|
|
902
|
+
/**
|
|
903
|
+
* Sum a numeric field per group, sorted by total descending.
|
|
904
|
+
*
|
|
905
|
+
* Shorthand for `.groupBy(field, { total: $sum('$sumField') }).sort({ total: -1 })`.
|
|
906
|
+
*
|
|
907
|
+
* @param field - The field to group by.
|
|
908
|
+
* @param sumField - The numeric field to sum.
|
|
909
|
+
* @returns A new pipeline producing `{ _id: TOutput[K], total: number }` results.
|
|
910
|
+
*
|
|
911
|
+
* @example
|
|
912
|
+
* ```ts
|
|
913
|
+
* const revenueByCategory = await aggregate(orders)
|
|
914
|
+
* .sumBy('category', 'amount')
|
|
915
|
+
* .toArray()
|
|
916
|
+
* // [{ _id: 'electronics', total: 5000 }, ...]
|
|
917
|
+
* ```
|
|
918
|
+
*/
|
|
919
|
+
sumBy(field, sumField) {
|
|
920
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
921
|
+
...this.stages,
|
|
922
|
+
{ $group: { _id: `$${field}`, total: { $sum: `$${sumField}` } } },
|
|
923
|
+
{ $sort: { total: -1 } }
|
|
924
|
+
]);
|
|
925
|
+
return pipeline;
|
|
926
|
+
}
|
|
927
|
+
/**
|
|
928
|
+
* Sort by a single field with a friendly direction name.
|
|
929
|
+
*
|
|
930
|
+
* Shorthand for `.sort({ [field]: direction === 'desc' ? -1 : 1 })`.
|
|
931
|
+
*
|
|
932
|
+
* @param field - The field to sort by.
|
|
933
|
+
* @param direction - Sort direction: `'asc'` (default) or `'desc'`.
|
|
934
|
+
* @returns A new pipeline with the `$sort` stage appended.
|
|
935
|
+
*
|
|
936
|
+
* @example
|
|
937
|
+
* ```ts
|
|
938
|
+
* const youngest = await aggregate(users)
|
|
939
|
+
* .sortBy('age')
|
|
940
|
+
* .toArray()
|
|
941
|
+
* ```
|
|
942
|
+
*/
|
|
943
|
+
sortBy(field, direction = "asc") {
|
|
944
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
945
|
+
...this.stages,
|
|
946
|
+
{ $sort: { [field]: direction === "desc" ? -1 : 1 } }
|
|
947
|
+
]);
|
|
948
|
+
}
|
|
949
|
+
/**
|
|
950
|
+
* Return the top N documents sorted by a field descending.
|
|
951
|
+
*
|
|
952
|
+
* Shorthand for `.sort({ [by]: -1 }).limit(n)`.
|
|
953
|
+
*
|
|
954
|
+
* @param n - The number of documents to return.
|
|
955
|
+
* @param options - An object with a `by` field specifying the sort key.
|
|
956
|
+
* @returns A new pipeline with `$sort` and `$limit` stages appended.
|
|
957
|
+
*
|
|
958
|
+
* @example
|
|
959
|
+
* ```ts
|
|
960
|
+
* const top3 = await aggregate(users)
|
|
961
|
+
* .top(3, { by: 'score' })
|
|
962
|
+
* .toArray()
|
|
963
|
+
* ```
|
|
964
|
+
*/
|
|
965
|
+
top(n, options) {
|
|
966
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
967
|
+
...this.stages,
|
|
968
|
+
{ $sort: { [options.by]: -1 } },
|
|
969
|
+
{ $limit: n }
|
|
970
|
+
]);
|
|
971
|
+
}
|
|
972
|
+
/**
|
|
973
|
+
* Return the bottom N documents sorted by a field ascending.
|
|
974
|
+
*
|
|
975
|
+
* Shorthand for `.sort({ [by]: 1 }).limit(n)`.
|
|
976
|
+
*
|
|
977
|
+
* @param n - The number of documents to return.
|
|
978
|
+
* @param options - An object with a `by` field specifying the sort key.
|
|
979
|
+
* @returns A new pipeline with `$sort` and `$limit` stages appended.
|
|
980
|
+
*
|
|
981
|
+
* @example
|
|
982
|
+
* ```ts
|
|
983
|
+
* const bottom3 = await aggregate(users)
|
|
984
|
+
* .bottom(3, { by: 'score' })
|
|
985
|
+
* .toArray()
|
|
986
|
+
* ```
|
|
987
|
+
*/
|
|
988
|
+
bottom(n, options) {
|
|
989
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
990
|
+
...this.stages,
|
|
991
|
+
{ $sort: { [options.by]: 1 } },
|
|
992
|
+
{ $limit: n }
|
|
993
|
+
]);
|
|
994
|
+
}
|
|
995
|
+
};
|
|
996
|
+
function aggregate(handle) {
|
|
997
|
+
return new AggregatePipeline(handle.definition, handle.native, []);
|
|
998
|
+
}
|
|
999
|
+
|
|
76
1000
|
// src/client/client.ts
|
|
77
|
-
var
|
|
1001
|
+
var import_mongodb3 = require("mongodb");
|
|
78
1002
|
|
|
79
1003
|
// src/indexes/spec.ts
|
|
80
1004
|
function toFieldIndexSpec(def) {
|
|
@@ -146,7 +1070,7 @@ async function processDesiredSpec(spec, existingByKey, native, dryRun, dropOrpha
|
|
|
146
1070
|
const serialized = serializeIndexKey(spec.key);
|
|
147
1071
|
const existing = existingByKey.get(serialized);
|
|
148
1072
|
if (!existing) {
|
|
149
|
-
if (!dryRun) await native
|
|
1073
|
+
if (!dryRun) await safeCreateIndex(native, spec.key, spec.options);
|
|
150
1074
|
acc.created.push(resolveSpecName(spec));
|
|
151
1075
|
return;
|
|
152
1076
|
}
|
|
@@ -159,8 +1083,8 @@ async function processDesiredSpec(spec, existingByKey, native, dryRun, dropOrpha
|
|
|
159
1083
|
}
|
|
160
1084
|
if (dropOrphaned) {
|
|
161
1085
|
if (!dryRun) {
|
|
162
|
-
await native
|
|
163
|
-
await native
|
|
1086
|
+
await safeDropIndex(native, existingName);
|
|
1087
|
+
await safeCreateIndex(native, spec.key, spec.options);
|
|
164
1088
|
}
|
|
165
1089
|
acc.dropped.push(existingName);
|
|
166
1090
|
acc.created.push(resolveSpecName(spec));
|
|
@@ -173,6 +1097,20 @@ async function processDesiredSpec(spec, existingByKey, native, dryRun, dropOrpha
|
|
|
173
1097
|
desired: spec.options
|
|
174
1098
|
});
|
|
175
1099
|
}
|
|
1100
|
+
async function safeCreateIndex(native, key, options) {
|
|
1101
|
+
try {
|
|
1102
|
+
await native.createIndex(key, options);
|
|
1103
|
+
} catch (err) {
|
|
1104
|
+
wrapMongoError(err, native.collectionName);
|
|
1105
|
+
}
|
|
1106
|
+
}
|
|
1107
|
+
async function safeDropIndex(native, name) {
|
|
1108
|
+
try {
|
|
1109
|
+
await native.dropIndex(name);
|
|
1110
|
+
} catch (err) {
|
|
1111
|
+
wrapMongoError(err, native.collectionName);
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
176
1114
|
async function processOrphanedIndexes(existingIndexes, desiredKeys, matchedKeys, native, dryRun, dropOrphaned, dropped) {
|
|
177
1115
|
for (const idx of existingIndexes) {
|
|
178
1116
|
const rawName = idx["name"];
|
|
@@ -181,7 +1119,7 @@ async function processOrphanedIndexes(existingIndexes, desiredKeys, matchedKeys,
|
|
|
181
1119
|
const serialized = serializeIndexKey(idx["key"]);
|
|
182
1120
|
if (matchedKeys.has(serialized) || desiredKeys.has(serialized)) continue;
|
|
183
1121
|
if (dropOrphaned) {
|
|
184
|
-
if (!dryRun) await native
|
|
1122
|
+
if (!dryRun) await safeDropIndex(native, name);
|
|
185
1123
|
dropped.push(name);
|
|
186
1124
|
}
|
|
187
1125
|
}
|
|
@@ -193,7 +1131,7 @@ async function listIndexesSafe(native) {
|
|
|
193
1131
|
if (err instanceof Error && err.message.includes("ns does not exist")) {
|
|
194
1132
|
return [];
|
|
195
1133
|
}
|
|
196
|
-
|
|
1134
|
+
wrapMongoError(err, native.collectionName);
|
|
197
1135
|
}
|
|
198
1136
|
}
|
|
199
1137
|
async function syncIndexes(handle, options) {
|
|
@@ -239,39 +1177,52 @@ async function syncIndexes(handle, options) {
|
|
|
239
1177
|
}
|
|
240
1178
|
|
|
241
1179
|
// src/crud/delete.ts
|
|
242
|
-
var
|
|
1180
|
+
var import_zod2 = require("zod");
|
|
243
1181
|
|
|
244
1182
|
// src/errors/validation.ts
|
|
245
|
-
var ZodmonValidationError = class extends
|
|
1183
|
+
var ZodmonValidationError = class extends ZodmonError {
|
|
246
1184
|
name = "ZodmonValidationError";
|
|
247
|
-
/** The MongoDB collection name where the validation failed. */
|
|
248
|
-
collection;
|
|
249
1185
|
/** The original Zod validation error with detailed issue information. */
|
|
250
1186
|
zodError;
|
|
251
|
-
|
|
1187
|
+
/** The document that failed validation. */
|
|
1188
|
+
document;
|
|
1189
|
+
constructor(collection2, zodError, document) {
|
|
252
1190
|
const fields = zodError.issues.map((issue) => {
|
|
253
1191
|
const path = issue.path.join(".") || "(root)";
|
|
254
1192
|
return `${path} (${issue.message})`;
|
|
255
1193
|
}).join(", ");
|
|
256
|
-
super(`Validation failed for "${collection2}": ${fields}
|
|
257
|
-
this.collection = collection2;
|
|
1194
|
+
super(`Validation failed for "${collection2}": ${fields}`, collection2, { cause: zodError });
|
|
258
1195
|
this.zodError = zodError;
|
|
1196
|
+
this.document = document;
|
|
259
1197
|
}
|
|
260
1198
|
};
|
|
261
1199
|
|
|
262
1200
|
// src/crud/delete.ts
|
|
263
1201
|
async function deleteOne(handle, filter) {
|
|
264
|
-
|
|
1202
|
+
try {
|
|
1203
|
+
return await handle.native.deleteOne(filter);
|
|
1204
|
+
} catch (err) {
|
|
1205
|
+
wrapMongoError(err, handle.definition.name);
|
|
1206
|
+
}
|
|
265
1207
|
}
|
|
266
1208
|
async function deleteMany(handle, filter) {
|
|
267
|
-
|
|
1209
|
+
try {
|
|
1210
|
+
return await handle.native.deleteMany(filter);
|
|
1211
|
+
} catch (err) {
|
|
1212
|
+
wrapMongoError(err, handle.definition.name);
|
|
1213
|
+
}
|
|
268
1214
|
}
|
|
269
1215
|
async function findOneAndDelete(handle, filter, options) {
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
1216
|
+
let result;
|
|
1217
|
+
try {
|
|
1218
|
+
result = await handle.native.findOneAndDelete(
|
|
1219
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
1220
|
+
filter,
|
|
1221
|
+
{ includeResultMetadata: false }
|
|
1222
|
+
);
|
|
1223
|
+
} catch (err) {
|
|
1224
|
+
wrapMongoError(err, handle.definition.name);
|
|
1225
|
+
}
|
|
275
1226
|
if (!result) return null;
|
|
276
1227
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
277
1228
|
if (mode === false || mode === "passthrough") {
|
|
@@ -280,24 +1231,24 @@ async function findOneAndDelete(handle, filter, options) {
|
|
|
280
1231
|
try {
|
|
281
1232
|
return handle.definition.schema.parse(result);
|
|
282
1233
|
} catch (err) {
|
|
283
|
-
if (err instanceof
|
|
284
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1234
|
+
if (err instanceof import_zod2.z.ZodError) {
|
|
1235
|
+
throw new ZodmonValidationError(handle.definition.name, err, result);
|
|
285
1236
|
}
|
|
286
1237
|
throw err;
|
|
287
1238
|
}
|
|
288
1239
|
}
|
|
289
1240
|
|
|
290
1241
|
// src/crud/find.ts
|
|
291
|
-
var
|
|
1242
|
+
var import_zod4 = require("zod");
|
|
292
1243
|
|
|
293
1244
|
// src/errors/not-found.ts
|
|
294
|
-
var ZodmonNotFoundError = class extends
|
|
1245
|
+
var ZodmonNotFoundError = class extends ZodmonError {
|
|
295
1246
|
name = "ZodmonNotFoundError";
|
|
296
|
-
/** The
|
|
297
|
-
|
|
298
|
-
constructor(collection2) {
|
|
299
|
-
super(`Document not found in "${collection2}"
|
|
300
|
-
this.
|
|
1247
|
+
/** The filter that produced no results. */
|
|
1248
|
+
filter;
|
|
1249
|
+
constructor(collection2, filter) {
|
|
1250
|
+
super(`Document not found in "${collection2}"`, collection2);
|
|
1251
|
+
this.filter = filter;
|
|
301
1252
|
}
|
|
302
1253
|
};
|
|
303
1254
|
|
|
@@ -325,18 +1276,18 @@ function checkUnindexedFields(definition, filter) {
|
|
|
325
1276
|
}
|
|
326
1277
|
|
|
327
1278
|
// src/query/cursor.ts
|
|
328
|
-
var
|
|
1279
|
+
var import_zod3 = require("zod");
|
|
329
1280
|
|
|
330
1281
|
// src/crud/paginate.ts
|
|
331
|
-
var
|
|
1282
|
+
var import_mongodb2 = require("mongodb");
|
|
332
1283
|
function serializeValue(value) {
|
|
333
|
-
if (value instanceof
|
|
1284
|
+
if (value instanceof import_mongodb2.ObjectId) return { $oid: value.toHexString() };
|
|
334
1285
|
if (value instanceof Date) return { $date: value.getTime() };
|
|
335
1286
|
return value;
|
|
336
1287
|
}
|
|
337
1288
|
function deserializeValue(value) {
|
|
338
1289
|
if (value != null && typeof value === "object") {
|
|
339
|
-
if ("$oid" in value) return new
|
|
1290
|
+
if ("$oid" in value) return new import_mongodb2.ObjectId(value.$oid);
|
|
340
1291
|
if ("$date" in value) return new Date(value.$date);
|
|
341
1292
|
}
|
|
342
1293
|
return value;
|
|
@@ -498,10 +1449,17 @@ var TypedFindCursor = class {
|
|
|
498
1449
|
}
|
|
499
1450
|
/** @internal Offset pagination implementation. */
|
|
500
1451
|
async offsetPaginate(_sortKeys, sort, opts) {
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
1452
|
+
let total;
|
|
1453
|
+
let raw2;
|
|
1454
|
+
try {
|
|
1455
|
+
;
|
|
1456
|
+
[total, raw2] = await Promise.all([
|
|
1457
|
+
this.nativeCollection.countDocuments(this.filter),
|
|
1458
|
+
this.nativeCollection.find(this.filter).sort(sort).skip((opts.page - 1) * opts.perPage).limit(opts.perPage).toArray()
|
|
1459
|
+
]);
|
|
1460
|
+
} catch (err) {
|
|
1461
|
+
wrapMongoError(err, this.collectionName);
|
|
1462
|
+
}
|
|
505
1463
|
const docs = raw2.map((doc) => this.validateDoc(doc));
|
|
506
1464
|
const totalPages = Math.ceil(total / opts.perPage);
|
|
507
1465
|
return {
|
|
@@ -525,7 +1483,12 @@ var TypedFindCursor = class {
|
|
|
525
1483
|
combinedFilter = this.filter && Object.keys(this.filter).length > 0 ? { $and: [this.filter, cursorFilter] } : cursorFilter;
|
|
526
1484
|
}
|
|
527
1485
|
const effectiveSort = isBackward ? Object.fromEntries(sortKeys2.map(([f, d]) => [f, d === 1 ? -1 : 1])) : sort;
|
|
528
|
-
|
|
1486
|
+
let raw2;
|
|
1487
|
+
try {
|
|
1488
|
+
raw2 = await this.nativeCollection.find(combinedFilter).sort(effectiveSort).limit(opts.limit + 1).toArray();
|
|
1489
|
+
} catch (err) {
|
|
1490
|
+
wrapMongoError(err, this.collectionName);
|
|
1491
|
+
}
|
|
529
1492
|
const hasMore = raw2.length > opts.limit;
|
|
530
1493
|
if (hasMore) raw2.pop();
|
|
531
1494
|
if (isBackward) raw2.reverse();
|
|
@@ -553,7 +1516,12 @@ var TypedFindCursor = class {
|
|
|
553
1516
|
* ```
|
|
554
1517
|
*/
|
|
555
1518
|
async toArray() {
|
|
556
|
-
|
|
1519
|
+
let raw2;
|
|
1520
|
+
try {
|
|
1521
|
+
raw2 = await this.cursor.toArray();
|
|
1522
|
+
} catch (err) {
|
|
1523
|
+
wrapMongoError(err, this.collectionName);
|
|
1524
|
+
}
|
|
557
1525
|
return raw2.map((doc) => this.validateDoc(doc));
|
|
558
1526
|
}
|
|
559
1527
|
/**
|
|
@@ -573,8 +1541,12 @@ var TypedFindCursor = class {
|
|
|
573
1541
|
* ```
|
|
574
1542
|
*/
|
|
575
1543
|
async *[Symbol.asyncIterator]() {
|
|
576
|
-
|
|
577
|
-
|
|
1544
|
+
try {
|
|
1545
|
+
for await (const doc of this.cursor) {
|
|
1546
|
+
yield this.validateDoc(doc);
|
|
1547
|
+
}
|
|
1548
|
+
} catch (err) {
|
|
1549
|
+
wrapMongoError(err, this.collectionName);
|
|
578
1550
|
}
|
|
579
1551
|
}
|
|
580
1552
|
/** @internal Validate a single raw document against the schema. */
|
|
@@ -585,8 +1557,8 @@ var TypedFindCursor = class {
|
|
|
585
1557
|
try {
|
|
586
1558
|
return this.schema.parse(raw2);
|
|
587
1559
|
} catch (err) {
|
|
588
|
-
if (err instanceof
|
|
589
|
-
throw new ZodmonValidationError(this.collectionName, err);
|
|
1560
|
+
if (err instanceof import_zod3.z.ZodError) {
|
|
1561
|
+
throw new ZodmonValidationError(this.collectionName, err, raw2);
|
|
590
1562
|
}
|
|
591
1563
|
throw err;
|
|
592
1564
|
}
|
|
@@ -597,7 +1569,12 @@ var TypedFindCursor = class {
|
|
|
597
1569
|
async function findOne(handle, filter, options) {
|
|
598
1570
|
checkUnindexedFields(handle.definition, filter);
|
|
599
1571
|
const findOptions = options?.project ? { projection: options.project } : void 0;
|
|
600
|
-
|
|
1572
|
+
let raw2;
|
|
1573
|
+
try {
|
|
1574
|
+
raw2 = await handle.native.findOne(filter, findOptions);
|
|
1575
|
+
} catch (err) {
|
|
1576
|
+
wrapMongoError(err, handle.definition.name);
|
|
1577
|
+
}
|
|
601
1578
|
if (!raw2) return null;
|
|
602
1579
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
603
1580
|
if (mode === false || mode === "passthrough") {
|
|
@@ -606,8 +1583,8 @@ async function findOne(handle, filter, options) {
|
|
|
606
1583
|
try {
|
|
607
1584
|
return handle.definition.schema.parse(raw2);
|
|
608
1585
|
} catch (err) {
|
|
609
|
-
if (err instanceof
|
|
610
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1586
|
+
if (err instanceof import_zod4.z.ZodError) {
|
|
1587
|
+
throw new ZodmonValidationError(handle.definition.name, err, raw2);
|
|
611
1588
|
}
|
|
612
1589
|
throw err;
|
|
613
1590
|
}
|
|
@@ -615,7 +1592,7 @@ async function findOne(handle, filter, options) {
|
|
|
615
1592
|
async function findOneOrThrow(handle, filter, options) {
|
|
616
1593
|
const doc = await findOne(handle, filter, options);
|
|
617
1594
|
if (!doc) {
|
|
618
|
-
throw new ZodmonNotFoundError(handle.definition.name);
|
|
1595
|
+
throw new ZodmonNotFoundError(handle.definition.name, filter);
|
|
619
1596
|
}
|
|
620
1597
|
return doc;
|
|
621
1598
|
}
|
|
@@ -628,18 +1605,22 @@ function find(handle, filter, options) {
|
|
|
628
1605
|
}
|
|
629
1606
|
|
|
630
1607
|
// src/crud/insert.ts
|
|
631
|
-
var
|
|
1608
|
+
var import_zod5 = require("zod");
|
|
632
1609
|
async function insertOne(handle, doc) {
|
|
633
1610
|
let parsed;
|
|
634
1611
|
try {
|
|
635
1612
|
parsed = handle.definition.schema.parse(doc);
|
|
636
1613
|
} catch (err) {
|
|
637
|
-
if (err instanceof
|
|
638
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1614
|
+
if (err instanceof import_zod5.z.ZodError) {
|
|
1615
|
+
throw new ZodmonValidationError(handle.definition.name, err, doc);
|
|
639
1616
|
}
|
|
640
1617
|
throw err;
|
|
641
1618
|
}
|
|
642
|
-
|
|
1619
|
+
try {
|
|
1620
|
+
await handle.native.insertOne(parsed);
|
|
1621
|
+
} catch (err) {
|
|
1622
|
+
wrapMongoError(err, handle.definition.name);
|
|
1623
|
+
}
|
|
643
1624
|
return parsed;
|
|
644
1625
|
}
|
|
645
1626
|
async function insertMany(handle, docs) {
|
|
@@ -649,23 +1630,35 @@ async function insertMany(handle, docs) {
|
|
|
649
1630
|
try {
|
|
650
1631
|
parsed.push(handle.definition.schema.parse(doc));
|
|
651
1632
|
} catch (err) {
|
|
652
|
-
if (err instanceof
|
|
653
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1633
|
+
if (err instanceof import_zod5.z.ZodError) {
|
|
1634
|
+
throw new ZodmonValidationError(handle.definition.name, err, doc);
|
|
654
1635
|
}
|
|
655
1636
|
throw err;
|
|
656
1637
|
}
|
|
657
1638
|
}
|
|
658
|
-
|
|
1639
|
+
try {
|
|
1640
|
+
await handle.native.insertMany(parsed);
|
|
1641
|
+
} catch (err) {
|
|
1642
|
+
wrapMongoError(err, handle.definition.name);
|
|
1643
|
+
}
|
|
659
1644
|
return parsed;
|
|
660
1645
|
}
|
|
661
1646
|
|
|
662
1647
|
// src/crud/update.ts
|
|
663
|
-
var
|
|
1648
|
+
var import_zod6 = require("zod");
|
|
664
1649
|
async function updateOne(handle, filter, update, options) {
|
|
665
|
-
|
|
1650
|
+
try {
|
|
1651
|
+
return await handle.native.updateOne(filter, update, options);
|
|
1652
|
+
} catch (err) {
|
|
1653
|
+
wrapMongoError(err, handle.definition.name);
|
|
1654
|
+
}
|
|
666
1655
|
}
|
|
667
1656
|
async function updateMany(handle, filter, update, options) {
|
|
668
|
-
|
|
1657
|
+
try {
|
|
1658
|
+
return await handle.native.updateMany(filter, update, options);
|
|
1659
|
+
} catch (err) {
|
|
1660
|
+
wrapMongoError(err, handle.definition.name);
|
|
1661
|
+
}
|
|
669
1662
|
}
|
|
670
1663
|
async function findOneAndUpdate(handle, filter, update, options) {
|
|
671
1664
|
const driverOptions = {
|
|
@@ -675,14 +1668,19 @@ async function findOneAndUpdate(handle, filter, update, options) {
|
|
|
675
1668
|
if (options?.upsert !== void 0) {
|
|
676
1669
|
driverOptions["upsert"] = options.upsert;
|
|
677
1670
|
}
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
1671
|
+
let result;
|
|
1672
|
+
try {
|
|
1673
|
+
result = await handle.native.findOneAndUpdate(
|
|
1674
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
1675
|
+
filter,
|
|
1676
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedUpdateFilter intersection type is not directly assignable to MongoDB's UpdateFilter
|
|
1677
|
+
update,
|
|
1678
|
+
// biome-ignore lint/suspicious/noExplicitAny: dynamic options object is not assignable to driver's FindOneAndUpdateOptions under exactOptionalPropertyTypes
|
|
1679
|
+
driverOptions
|
|
1680
|
+
);
|
|
1681
|
+
} catch (err) {
|
|
1682
|
+
wrapMongoError(err, handle.definition.name);
|
|
1683
|
+
}
|
|
686
1684
|
if (!result) return null;
|
|
687
1685
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
688
1686
|
if (mode === false || mode === "passthrough") {
|
|
@@ -691,8 +1689,8 @@ async function findOneAndUpdate(handle, filter, update, options) {
|
|
|
691
1689
|
try {
|
|
692
1690
|
return handle.definition.schema.parse(result);
|
|
693
1691
|
} catch (err) {
|
|
694
|
-
if (err instanceof
|
|
695
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1692
|
+
if (err instanceof import_zod6.z.ZodError) {
|
|
1693
|
+
throw new ZodmonValidationError(handle.definition.name, err, result);
|
|
696
1694
|
}
|
|
697
1695
|
throw err;
|
|
698
1696
|
}
|
|
@@ -982,6 +1980,27 @@ var CollectionHandle = class {
|
|
|
982
1980
|
async syncIndexes(options) {
|
|
983
1981
|
return await syncIndexes(this, options);
|
|
984
1982
|
}
|
|
1983
|
+
/**
|
|
1984
|
+
* Start a type-safe aggregation pipeline on this collection.
|
|
1985
|
+
*
|
|
1986
|
+
* Returns a fluent pipeline builder that tracks the output document
|
|
1987
|
+
* shape through each stage. The pipeline is lazy — no query executes
|
|
1988
|
+
* until a terminal method (`toArray`, `for await`, `explain`) is called.
|
|
1989
|
+
*
|
|
1990
|
+
* @returns A new pipeline builder starting with this collection's document type.
|
|
1991
|
+
*
|
|
1992
|
+
* @example
|
|
1993
|
+
* ```ts
|
|
1994
|
+
* const users = db.use(Users)
|
|
1995
|
+
* const result = await users.aggregate()
|
|
1996
|
+
* .match({ role: 'admin' })
|
|
1997
|
+
* .groupBy('role', { count: $count() })
|
|
1998
|
+
* .toArray()
|
|
1999
|
+
* ```
|
|
2000
|
+
*/
|
|
2001
|
+
aggregate() {
|
|
2002
|
+
return aggregate(this);
|
|
2003
|
+
}
|
|
985
2004
|
};
|
|
986
2005
|
|
|
987
2006
|
// src/client/client.ts
|
|
@@ -991,7 +2010,7 @@ var Database = class {
|
|
|
991
2010
|
/** Registered collection definitions, keyed by name. Used by syncIndexes(). */
|
|
992
2011
|
_collections = /* @__PURE__ */ new Map();
|
|
993
2012
|
constructor(uri, dbName, options) {
|
|
994
|
-
this._client = new
|
|
2013
|
+
this._client = new import_mongodb3.MongoClient(uri, options);
|
|
995
2014
|
this._db = this._client.db(dbName);
|
|
996
2015
|
}
|
|
997
2016
|
/**
|
|
@@ -1007,9 +2026,7 @@ var Database = class {
|
|
|
1007
2026
|
*/
|
|
1008
2027
|
use(def) {
|
|
1009
2028
|
this._collections.set(def.name, def);
|
|
1010
|
-
const native = this._db.collection(
|
|
1011
|
-
def.name
|
|
1012
|
-
);
|
|
2029
|
+
const native = this._db.collection(def.name);
|
|
1013
2030
|
return new CollectionHandle(
|
|
1014
2031
|
def,
|
|
1015
2032
|
native
|
|
@@ -1082,41 +2099,11 @@ function createClient(uri, dbNameOrOptions, maybeOptions) {
|
|
|
1082
2099
|
}
|
|
1083
2100
|
|
|
1084
2101
|
// src/collection/collection.ts
|
|
1085
|
-
var
|
|
2102
|
+
var import_mongodb5 = require("mongodb");
|
|
1086
2103
|
var import_zod9 = require("zod");
|
|
1087
2104
|
|
|
1088
2105
|
// src/schema/extensions.ts
|
|
1089
2106
|
var import_zod7 = require("zod");
|
|
1090
|
-
|
|
1091
|
-
// src/schema/ref.ts
|
|
1092
|
-
var import_zod6 = require("zod");
|
|
1093
|
-
var refMetadata = /* @__PURE__ */ new WeakMap();
|
|
1094
|
-
function getRefMetadata(schema) {
|
|
1095
|
-
if (typeof schema !== "object" || schema === null) return void 0;
|
|
1096
|
-
return refMetadata.get(schema);
|
|
1097
|
-
}
|
|
1098
|
-
var REF_GUARD = /* @__PURE__ */ Symbol.for("zodmon_ref");
|
|
1099
|
-
function installRefExtension() {
|
|
1100
|
-
const proto = import_zod6.z.ZodType.prototype;
|
|
1101
|
-
if (REF_GUARD in proto) return;
|
|
1102
|
-
Object.defineProperty(proto, "ref", {
|
|
1103
|
-
value(collection2) {
|
|
1104
|
-
refMetadata.set(this, { collection: collection2 });
|
|
1105
|
-
return this;
|
|
1106
|
-
},
|
|
1107
|
-
enumerable: true,
|
|
1108
|
-
configurable: true,
|
|
1109
|
-
writable: true
|
|
1110
|
-
});
|
|
1111
|
-
Object.defineProperty(proto, REF_GUARD, {
|
|
1112
|
-
value: true,
|
|
1113
|
-
enumerable: false,
|
|
1114
|
-
configurable: false,
|
|
1115
|
-
writable: false
|
|
1116
|
-
});
|
|
1117
|
-
}
|
|
1118
|
-
|
|
1119
|
-
// src/schema/extensions.ts
|
|
1120
2107
|
var indexMetadata = /* @__PURE__ */ new WeakMap();
|
|
1121
2108
|
function getIndexMetadata(schema) {
|
|
1122
2109
|
if (typeof schema !== "object" || schema === null) return void 0;
|
|
@@ -1222,14 +2209,14 @@ function installExtensions() {
|
|
|
1222
2209
|
installExtensions();
|
|
1223
2210
|
|
|
1224
2211
|
// src/schema/object-id.ts
|
|
1225
|
-
var
|
|
2212
|
+
var import_mongodb4 = require("mongodb");
|
|
1226
2213
|
var import_zod8 = require("zod");
|
|
1227
2214
|
var OBJECT_ID_HEX = /^[a-f\d]{24}$/i;
|
|
1228
2215
|
function objectId() {
|
|
1229
2216
|
return import_zod8.z.custom((val) => {
|
|
1230
|
-
if (val instanceof
|
|
2217
|
+
if (val instanceof import_mongodb4.ObjectId) return true;
|
|
1231
2218
|
return typeof val === "string" && OBJECT_ID_HEX.test(val);
|
|
1232
|
-
}, "Invalid ObjectId").transform((val) => val instanceof
|
|
2219
|
+
}, "Invalid ObjectId").transform((val) => val instanceof import_mongodb4.ObjectId ? val : import_mongodb4.ObjectId.createFromHexString(val));
|
|
1233
2220
|
}
|
|
1234
2221
|
|
|
1235
2222
|
// src/collection/collection.ts
|
|
@@ -1244,7 +2231,7 @@ function extractFieldIndexes(shape) {
|
|
|
1244
2231
|
return result;
|
|
1245
2232
|
}
|
|
1246
2233
|
function collection(name, shape, options) {
|
|
1247
|
-
const resolvedShape = "_id" in shape ? shape : { _id: objectId().default(() => new
|
|
2234
|
+
const resolvedShape = "_id" in shape ? shape : { _id: objectId().default(() => new import_mongodb5.ObjectId()), ...shape };
|
|
1248
2235
|
const schema = import_zod9.z.object(resolvedShape);
|
|
1249
2236
|
const fieldIndexes = extractFieldIndexes(shape);
|
|
1250
2237
|
const { indexes: compoundIndexes, validation, ...rest } = options ?? {};
|
|
@@ -1315,14 +2302,14 @@ function index(fields) {
|
|
|
1315
2302
|
}
|
|
1316
2303
|
|
|
1317
2304
|
// src/helpers/oid.ts
|
|
1318
|
-
var
|
|
2305
|
+
var import_mongodb6 = require("mongodb");
|
|
1319
2306
|
function oid(value) {
|
|
1320
|
-
if (value === void 0) return new
|
|
1321
|
-
if (value instanceof
|
|
1322
|
-
return
|
|
2307
|
+
if (value === void 0) return new import_mongodb6.ObjectId();
|
|
2308
|
+
if (value instanceof import_mongodb6.ObjectId) return value;
|
|
2309
|
+
return import_mongodb6.ObjectId.createFromHexString(value);
|
|
1323
2310
|
}
|
|
1324
2311
|
function isOid(value) {
|
|
1325
|
-
return value instanceof
|
|
2312
|
+
return value instanceof import_mongodb6.ObjectId;
|
|
1326
2313
|
}
|
|
1327
2314
|
|
|
1328
2315
|
// src/query/operators.ts
|
|
@@ -1367,29 +2354,52 @@ var $ = {
|
|
|
1367
2354
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1368
2355
|
0 && (module.exports = {
|
|
1369
2356
|
$,
|
|
2357
|
+
$addToSet,
|
|
1370
2358
|
$and,
|
|
2359
|
+
$avg,
|
|
2360
|
+
$count,
|
|
1371
2361
|
$eq,
|
|
1372
2362
|
$exists,
|
|
2363
|
+
$first,
|
|
1373
2364
|
$gt,
|
|
1374
2365
|
$gte,
|
|
1375
2366
|
$in,
|
|
2367
|
+
$last,
|
|
1376
2368
|
$lt,
|
|
1377
2369
|
$lte,
|
|
2370
|
+
$max,
|
|
2371
|
+
$min,
|
|
1378
2372
|
$ne,
|
|
1379
2373
|
$nin,
|
|
1380
2374
|
$nor,
|
|
1381
2375
|
$not,
|
|
1382
2376
|
$or,
|
|
2377
|
+
$push,
|
|
1383
2378
|
$regex,
|
|
2379
|
+
$sum,
|
|
2380
|
+
AggregatePipeline,
|
|
1384
2381
|
CollectionHandle,
|
|
1385
2382
|
Database,
|
|
1386
2383
|
IndexBuilder,
|
|
1387
2384
|
TypedFindCursor,
|
|
2385
|
+
ZodmonAuthError,
|
|
2386
|
+
ZodmonBulkWriteError,
|
|
2387
|
+
ZodmonDocValidationError,
|
|
2388
|
+
ZodmonDuplicateKeyError,
|
|
2389
|
+
ZodmonError,
|
|
2390
|
+
ZodmonIndexError,
|
|
2391
|
+
ZodmonNetworkError,
|
|
1388
2392
|
ZodmonNotFoundError,
|
|
2393
|
+
ZodmonQueryError,
|
|
2394
|
+
ZodmonTimeoutError,
|
|
1389
2395
|
ZodmonValidationError,
|
|
2396
|
+
ZodmonWriteConflictError,
|
|
2397
|
+
aggregate,
|
|
1390
2398
|
checkUnindexedFields,
|
|
1391
2399
|
collection,
|
|
2400
|
+
createAccumulatorBuilder,
|
|
1392
2401
|
createClient,
|
|
2402
|
+
createExpressionBuilder,
|
|
1393
2403
|
deleteMany,
|
|
1394
2404
|
deleteOne,
|
|
1395
2405
|
extractComparableOptions,
|
|
@@ -1415,6 +2425,7 @@ var $ = {
|
|
|
1415
2425
|
toCompoundIndexSpec,
|
|
1416
2426
|
toFieldIndexSpec,
|
|
1417
2427
|
updateMany,
|
|
1418
|
-
updateOne
|
|
2428
|
+
updateOne,
|
|
2429
|
+
wrapMongoError
|
|
1419
2430
|
});
|
|
1420
2431
|
//# sourceMappingURL=index.cjs.map
|