@zodmon/core 0.8.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1124 -113
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1619 -163
- package/dist/index.d.ts +1619 -163
- package/dist/index.js +1084 -97
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1,3 +1,903 @@
|
|
|
1
|
+
// src/aggregate/expressions.ts
|
|
2
|
+
var $count = () => ({
|
|
3
|
+
__accum: true,
|
|
4
|
+
expr: { $sum: 1 }
|
|
5
|
+
});
|
|
6
|
+
var $sum = (field) => ({
|
|
7
|
+
__accum: true,
|
|
8
|
+
expr: { $sum: field }
|
|
9
|
+
});
|
|
10
|
+
var $avg = (field) => ({
|
|
11
|
+
__accum: true,
|
|
12
|
+
expr: { $avg: field }
|
|
13
|
+
});
|
|
14
|
+
var $min = (field) => ({
|
|
15
|
+
__accum: true,
|
|
16
|
+
expr: { $min: field }
|
|
17
|
+
});
|
|
18
|
+
var $max = (field) => ({
|
|
19
|
+
__accum: true,
|
|
20
|
+
expr: { $max: field }
|
|
21
|
+
});
|
|
22
|
+
var $first = (field) => ({
|
|
23
|
+
__accum: true,
|
|
24
|
+
expr: { $first: field }
|
|
25
|
+
});
|
|
26
|
+
var $last = (field) => ({
|
|
27
|
+
__accum: true,
|
|
28
|
+
expr: { $last: field }
|
|
29
|
+
});
|
|
30
|
+
var $push = (field) => ({
|
|
31
|
+
__accum: true,
|
|
32
|
+
expr: { $push: field }
|
|
33
|
+
});
|
|
34
|
+
var $addToSet = (field) => ({
|
|
35
|
+
__accum: true,
|
|
36
|
+
expr: { $addToSet: field }
|
|
37
|
+
});
|
|
38
|
+
function createAccumulatorBuilder() {
|
|
39
|
+
return {
|
|
40
|
+
count: () => ({ __accum: true, expr: { $sum: 1 } }),
|
|
41
|
+
sum: (field) => ({
|
|
42
|
+
__accum: true,
|
|
43
|
+
expr: { $sum: typeof field === "number" ? field : `$${field}` }
|
|
44
|
+
}),
|
|
45
|
+
avg: (field) => ({ __accum: true, expr: { $avg: `$${field}` } }),
|
|
46
|
+
min: (field) => ({ __accum: true, expr: { $min: `$${field}` } }),
|
|
47
|
+
max: (field) => ({ __accum: true, expr: { $max: `$${field}` } }),
|
|
48
|
+
first: (field) => ({ __accum: true, expr: { $first: `$${field}` } }),
|
|
49
|
+
last: (field) => ({ __accum: true, expr: { $last: `$${field}` } }),
|
|
50
|
+
push: (field) => ({ __accum: true, expr: { $push: `$${field}` } }),
|
|
51
|
+
addToSet: (field) => ({ __accum: true, expr: { $addToSet: `$${field}` } })
|
|
52
|
+
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses string field names and returns plain objects — TypeScript cannot verify that the runtime Accumulator objects match the generic AccumulatorBuilder<T> return types. Safe because type resolution happens at compile time via AccumulatorBuilder<T>, and runtime values are identical to what the standalone $min/$max/etc. produce.
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
function createExpressionBuilder() {
|
|
56
|
+
const $2 = (field) => `$${field}`;
|
|
57
|
+
const val = (v) => typeof v === "number" ? v : `$${v}`;
|
|
58
|
+
const expr = (value) => ({ __expr: true, value });
|
|
59
|
+
return {
|
|
60
|
+
// Arithmetic
|
|
61
|
+
add: (field, value) => expr({ $add: [$2(field), val(value)] }),
|
|
62
|
+
subtract: (field, value) => expr({ $subtract: [$2(field), val(value)] }),
|
|
63
|
+
multiply: (field, value) => expr({ $multiply: [$2(field), val(value)] }),
|
|
64
|
+
divide: (field, value) => expr({ $divide: [$2(field), val(value)] }),
|
|
65
|
+
mod: (field, value) => expr({ $mod: [$2(field), val(value)] }),
|
|
66
|
+
abs: (field) => expr({ $abs: $2(field) }),
|
|
67
|
+
ceil: (field) => expr({ $ceil: $2(field) }),
|
|
68
|
+
floor: (field) => expr({ $floor: $2(field) }),
|
|
69
|
+
round: (field, place = 0) => expr({ $round: [$2(field), place] }),
|
|
70
|
+
// String
|
|
71
|
+
concat: (...parts) => {
|
|
72
|
+
const resolved = parts.map((p) => {
|
|
73
|
+
if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(p)) return $2(p);
|
|
74
|
+
return p;
|
|
75
|
+
});
|
|
76
|
+
return expr({ $concat: resolved });
|
|
77
|
+
},
|
|
78
|
+
toLower: (field) => expr({ $toLower: $2(field) }),
|
|
79
|
+
toUpper: (field) => expr({ $toUpper: $2(field) }),
|
|
80
|
+
trim: (field) => expr({ $trim: { input: $2(field) } }),
|
|
81
|
+
substr: (field, start, length) => expr({ $substrBytes: [$2(field), start, length] }),
|
|
82
|
+
// Comparison
|
|
83
|
+
eq: (field, value) => expr({ $eq: [$2(field), value] }),
|
|
84
|
+
gt: (field, value) => expr({ $gt: [$2(field), value] }),
|
|
85
|
+
gte: (field, value) => expr({ $gte: [$2(field), value] }),
|
|
86
|
+
lt: (field, value) => expr({ $lt: [$2(field), value] }),
|
|
87
|
+
lte: (field, value) => expr({ $lte: [$2(field), value] }),
|
|
88
|
+
ne: (field, value) => expr({ $ne: [$2(field), value] }),
|
|
89
|
+
// Date
|
|
90
|
+
year: (field) => expr({ $year: $2(field) }),
|
|
91
|
+
month: (field) => expr({ $month: $2(field) }),
|
|
92
|
+
dayOfMonth: (field) => expr({ $dayOfMonth: $2(field) }),
|
|
93
|
+
// Array
|
|
94
|
+
size: (field) => expr({ $size: $2(field) }),
|
|
95
|
+
// Conditional
|
|
96
|
+
cond: (condition, thenValue, elseValue) => expr({ $cond: [condition.value, thenValue, elseValue] }),
|
|
97
|
+
ifNull: (field, fallback) => expr({ $ifNull: [$2(field), fallback] })
|
|
98
|
+
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses string field names — TypeScript cannot verify generic ExpressionBuilder<T> return types match. Safe because type resolution happens at compile time.
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// src/errors/wrap.ts
|
|
103
|
+
import { MongoBulkWriteError, MongoNetworkError, MongoServerError } from "mongodb";
|
|
104
|
+
|
|
105
|
+
// src/errors/base.ts
|
|
106
|
+
var ZodmonError = class extends Error {
|
|
107
|
+
name = "ZodmonError";
|
|
108
|
+
/** The MongoDB collection name associated with this error. */
|
|
109
|
+
collection;
|
|
110
|
+
/** The underlying error that caused this error, if any. */
|
|
111
|
+
cause;
|
|
112
|
+
constructor(message, collection2, options) {
|
|
113
|
+
super(message);
|
|
114
|
+
this.collection = collection2;
|
|
115
|
+
if (options?.cause) {
|
|
116
|
+
this.cause = options.cause;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
// src/errors/auth.ts
|
|
122
|
+
var ZodmonAuthError = class extends ZodmonError {
|
|
123
|
+
name = "ZodmonAuthError";
|
|
124
|
+
/** The MongoDB error code (13 or 18). */
|
|
125
|
+
code;
|
|
126
|
+
constructor(collection2, code, cause) {
|
|
127
|
+
const message = code === 18 ? `Authentication failed for "${collection2}": check connection credentials` : `Not authorized to perform this operation on "${collection2}"`;
|
|
128
|
+
super(message, collection2, { cause });
|
|
129
|
+
this.code = code;
|
|
130
|
+
}
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
// src/errors/bulk-write.ts
|
|
134
|
+
var ZodmonBulkWriteError = class extends ZodmonError {
|
|
135
|
+
name = "ZodmonBulkWriteError";
|
|
136
|
+
/** Number of documents successfully inserted. */
|
|
137
|
+
insertedCount;
|
|
138
|
+
/** Number of documents matched by update filters. */
|
|
139
|
+
matchedCount;
|
|
140
|
+
/** Number of documents actually modified. */
|
|
141
|
+
modifiedCount;
|
|
142
|
+
/** Number of documents deleted. */
|
|
143
|
+
deletedCount;
|
|
144
|
+
/** Individual write errors with their operation index, code, and message. */
|
|
145
|
+
writeErrors;
|
|
146
|
+
constructor(collection2, cause, totalOps) {
|
|
147
|
+
const bulkErr = cause;
|
|
148
|
+
const result = bulkErr["result"] ?? {};
|
|
149
|
+
const rawErrors = bulkErr["writeErrors"] ?? [];
|
|
150
|
+
const writeErrors = rawErrors.map((e) => ({
|
|
151
|
+
index: e["index"] ?? 0,
|
|
152
|
+
code: e["code"] ?? 0,
|
|
153
|
+
message: e["errmsg"] ?? e["message"] ?? "unknown error"
|
|
154
|
+
}));
|
|
155
|
+
const failedMsg = totalOps !== void 0 ? `${writeErrors.length} of ${totalOps} operations failed` : `${writeErrors.length} operations failed`;
|
|
156
|
+
super(`Bulk write failed on "${collection2}": ${failedMsg}`, collection2, { cause });
|
|
157
|
+
this.insertedCount = result["insertedCount"] ?? 0;
|
|
158
|
+
this.matchedCount = result["matchedCount"] ?? 0;
|
|
159
|
+
this.modifiedCount = result["modifiedCount"] ?? 0;
|
|
160
|
+
this.deletedCount = result["deletedCount"] ?? 0;
|
|
161
|
+
this.writeErrors = writeErrors;
|
|
162
|
+
}
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
// src/errors/doc-validation.ts
|
|
166
|
+
var ZodmonDocValidationError = class extends ZodmonError {
|
|
167
|
+
name = "ZodmonDocValidationError";
|
|
168
|
+
/** Server-provided validation failure details. */
|
|
169
|
+
errInfo;
|
|
170
|
+
constructor(collection2, errInfo, cause) {
|
|
171
|
+
super(
|
|
172
|
+
`Server-side document validation failed for "${collection2}": ${cause.message}`,
|
|
173
|
+
collection2,
|
|
174
|
+
{ cause }
|
|
175
|
+
);
|
|
176
|
+
this.errInfo = errInfo;
|
|
177
|
+
}
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
// src/errors/duplicate-key.ts
|
|
181
|
+
var INDEX_REGEX = /index:\s+(\S+)/;
|
|
182
|
+
var DUP_KEY_FIELD_REGEX = /dup key:\s*\{\s*(\w+):/;
|
|
183
|
+
var ZodmonDuplicateKeyError = class extends ZodmonError {
|
|
184
|
+
name = "ZodmonDuplicateKeyError";
|
|
185
|
+
/** The first field that caused the duplicate key violation. */
|
|
186
|
+
field;
|
|
187
|
+
/** The duplicate value, or `undefined` if it could not be extracted. */
|
|
188
|
+
value;
|
|
189
|
+
/** The name of the index that was violated. */
|
|
190
|
+
index;
|
|
191
|
+
/** The key pattern of the violated index (e.g. `{ email: 1 }`). */
|
|
192
|
+
keyPattern;
|
|
193
|
+
/** The key values that caused the violation. */
|
|
194
|
+
keyValue;
|
|
195
|
+
constructor(collection2, cause) {
|
|
196
|
+
const serverErr = cause;
|
|
197
|
+
const kp = serverErr["keyPattern"];
|
|
198
|
+
const kv = serverErr["keyValue"];
|
|
199
|
+
let field;
|
|
200
|
+
let value;
|
|
201
|
+
let keyPattern;
|
|
202
|
+
let keyValue;
|
|
203
|
+
if (kp && kv) {
|
|
204
|
+
const firstKey = Object.keys(kp)[0] ?? "unknown";
|
|
205
|
+
field = firstKey;
|
|
206
|
+
value = kv[firstKey];
|
|
207
|
+
keyPattern = kp;
|
|
208
|
+
keyValue = kv;
|
|
209
|
+
} else {
|
|
210
|
+
const fieldMatch = cause.message.match(DUP_KEY_FIELD_REGEX);
|
|
211
|
+
field = fieldMatch?.[1] ?? "unknown";
|
|
212
|
+
value = void 0;
|
|
213
|
+
keyPattern = field !== "unknown" ? { [field]: 1 } : {};
|
|
214
|
+
keyValue = {};
|
|
215
|
+
}
|
|
216
|
+
const indexMatch = cause.message.match(INDEX_REGEX);
|
|
217
|
+
const index2 = indexMatch?.[1] ?? "unknown";
|
|
218
|
+
const valueStr = typeof value === "string" ? `"${value}"` : String(value);
|
|
219
|
+
super(
|
|
220
|
+
`Duplicate key in "${collection2}": ${field} = ${valueStr} (index: ${index2})`,
|
|
221
|
+
collection2,
|
|
222
|
+
{ cause }
|
|
223
|
+
);
|
|
224
|
+
this.field = field;
|
|
225
|
+
this.value = value;
|
|
226
|
+
this.index = index2;
|
|
227
|
+
this.keyPattern = keyPattern;
|
|
228
|
+
this.keyValue = keyValue;
|
|
229
|
+
}
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
// src/errors/index-error.ts
|
|
233
|
+
var ZodmonIndexError = class extends ZodmonError {
|
|
234
|
+
name = "ZodmonIndexError";
|
|
235
|
+
/** The MongoDB error code (67, 85, or 86). */
|
|
236
|
+
code;
|
|
237
|
+
constructor(collection2, code, errmsg, cause) {
|
|
238
|
+
const prefix = code === 67 ? "Cannot create index" : code === 85 ? "Index options conflict" : "Index key specs conflict";
|
|
239
|
+
super(`${prefix} on "${collection2}": ${errmsg}`, collection2, { cause });
|
|
240
|
+
this.code = code;
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
// src/errors/network.ts
|
|
245
|
+
var ZodmonNetworkError = class extends ZodmonError {
|
|
246
|
+
name = "ZodmonNetworkError";
|
|
247
|
+
constructor(collection2, cause) {
|
|
248
|
+
super(`Network error on "${collection2}": ${cause.message}`, collection2, { cause });
|
|
249
|
+
}
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
// src/errors/query.ts
|
|
253
|
+
var ZodmonQueryError = class extends ZodmonError {
|
|
254
|
+
name = "ZodmonQueryError";
|
|
255
|
+
/** The MongoDB error code (2, 9, or 292). */
|
|
256
|
+
code;
|
|
257
|
+
constructor(collection2, code, errmsg, cause) {
|
|
258
|
+
const message = code === 292 ? `Query exceeded memory limit on "${collection2}": enable allowDiskUse for large sorts or aggregations` : code === 9 ? `Failed to parse query on "${collection2}": ${errmsg}` : `Bad value in query on "${collection2}": ${errmsg}`;
|
|
259
|
+
super(message, collection2, { cause });
|
|
260
|
+
this.code = code;
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
// src/errors/timeout.ts
|
|
265
|
+
var ZodmonTimeoutError = class extends ZodmonError {
|
|
266
|
+
name = "ZodmonTimeoutError";
|
|
267
|
+
/** The MongoDB error code (50 or 262). */
|
|
268
|
+
code;
|
|
269
|
+
constructor(collection2, code, cause) {
|
|
270
|
+
super(`Operation timed out on "${collection2}": exceeded server time limit`, collection2, {
|
|
271
|
+
cause
|
|
272
|
+
});
|
|
273
|
+
this.code = code;
|
|
274
|
+
}
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
// src/errors/write-conflict.ts
|
|
278
|
+
var ZodmonWriteConflictError = class extends ZodmonError {
|
|
279
|
+
name = "ZodmonWriteConflictError";
|
|
280
|
+
constructor(collection2, cause) {
|
|
281
|
+
super(
|
|
282
|
+
`Write conflict in "${collection2}": another operation modified this document concurrently \u2014 retry the transaction`,
|
|
283
|
+
collection2,
|
|
284
|
+
{ cause }
|
|
285
|
+
);
|
|
286
|
+
}
|
|
287
|
+
};
|
|
288
|
+
|
|
289
|
+
// src/errors/wrap.ts
|
|
290
|
+
function wrapMongoError(err, collection2) {
|
|
291
|
+
if (err instanceof ZodmonError) {
|
|
292
|
+
throw err;
|
|
293
|
+
}
|
|
294
|
+
if (err instanceof MongoBulkWriteError) {
|
|
295
|
+
throw new ZodmonBulkWriteError(collection2, err);
|
|
296
|
+
}
|
|
297
|
+
if (err instanceof MongoNetworkError) {
|
|
298
|
+
throw new ZodmonNetworkError(collection2, err);
|
|
299
|
+
}
|
|
300
|
+
if (err instanceof MongoServerError) {
|
|
301
|
+
switch (err.code) {
|
|
302
|
+
case 11e3:
|
|
303
|
+
case 11001:
|
|
304
|
+
throw new ZodmonDuplicateKeyError(collection2, err);
|
|
305
|
+
case 112:
|
|
306
|
+
throw new ZodmonWriteConflictError(collection2, err);
|
|
307
|
+
case 50:
|
|
308
|
+
case 262:
|
|
309
|
+
throw new ZodmonTimeoutError(collection2, err.code, err);
|
|
310
|
+
case 13:
|
|
311
|
+
case 18:
|
|
312
|
+
throw new ZodmonAuthError(collection2, err.code, err);
|
|
313
|
+
case 67:
|
|
314
|
+
case 85:
|
|
315
|
+
case 86:
|
|
316
|
+
throw new ZodmonIndexError(collection2, err.code, err.message, err);
|
|
317
|
+
case 2:
|
|
318
|
+
case 9:
|
|
319
|
+
case 292:
|
|
320
|
+
throw new ZodmonQueryError(collection2, err.code, err.message, err);
|
|
321
|
+
case 121:
|
|
322
|
+
throw new ZodmonDocValidationError(
|
|
323
|
+
collection2,
|
|
324
|
+
err.errInfo,
|
|
325
|
+
err
|
|
326
|
+
);
|
|
327
|
+
default:
|
|
328
|
+
throw new ZodmonError(`MongoDB error on "${collection2}": ${err.message}`, collection2, {
|
|
329
|
+
cause: err
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
if (err instanceof Error) {
|
|
334
|
+
throw new ZodmonError(`Unexpected error on "${collection2}": ${err.message}`, collection2, {
|
|
335
|
+
cause: err
|
|
336
|
+
});
|
|
337
|
+
}
|
|
338
|
+
throw new ZodmonError(`Unexpected error on "${collection2}": ${String(err)}`, collection2);
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// src/schema/ref.ts
|
|
342
|
+
import { z } from "zod";
|
|
343
|
+
var refMetadata = /* @__PURE__ */ new WeakMap();
|
|
344
|
+
function getRefMetadata(schema) {
|
|
345
|
+
if (typeof schema !== "object" || schema === null) return void 0;
|
|
346
|
+
return refMetadata.get(schema);
|
|
347
|
+
}
|
|
348
|
+
var REF_GUARD = /* @__PURE__ */ Symbol.for("zodmon_ref");
|
|
349
|
+
function installRefExtension() {
|
|
350
|
+
const proto = z.ZodType.prototype;
|
|
351
|
+
if (REF_GUARD in proto) return;
|
|
352
|
+
Object.defineProperty(proto, "ref", {
|
|
353
|
+
value(collection2) {
|
|
354
|
+
refMetadata.set(this, { collection: collection2 });
|
|
355
|
+
return this;
|
|
356
|
+
},
|
|
357
|
+
enumerable: true,
|
|
358
|
+
configurable: true,
|
|
359
|
+
writable: true
|
|
360
|
+
});
|
|
361
|
+
Object.defineProperty(proto, REF_GUARD, {
|
|
362
|
+
value: true,
|
|
363
|
+
enumerable: false,
|
|
364
|
+
configurable: false,
|
|
365
|
+
writable: false
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// src/aggregate/pipeline.ts
|
|
370
|
+
var AggregatePipeline = class _AggregatePipeline {
|
|
371
|
+
definition;
|
|
372
|
+
nativeCollection;
|
|
373
|
+
stages;
|
|
374
|
+
constructor(definition, nativeCollection, stages) {
|
|
375
|
+
this.definition = definition;
|
|
376
|
+
this.nativeCollection = nativeCollection;
|
|
377
|
+
this.stages = stages;
|
|
378
|
+
}
|
|
379
|
+
/**
|
|
380
|
+
* Append an arbitrary aggregation stage to the pipeline (escape hatch).
|
|
381
|
+
*
|
|
382
|
+
* Returns a new pipeline instance with the stage appended — the
|
|
383
|
+
* original pipeline is not modified.
|
|
384
|
+
*
|
|
385
|
+
* Optionally accepts a type parameter `TNew` to change the output
|
|
386
|
+
* type when the stage transforms the document shape.
|
|
387
|
+
*
|
|
388
|
+
* @typeParam TNew - The output type after this stage. Defaults to the current output type.
|
|
389
|
+
* @param stage - A raw MongoDB aggregation stage document (e.g. `{ $match: { ... } }`).
|
|
390
|
+
* @returns A new pipeline with the stage appended.
|
|
391
|
+
*
|
|
392
|
+
* @example
|
|
393
|
+
* ```ts
|
|
394
|
+
* const admins = aggregate(users)
|
|
395
|
+
* .raw({ $match: { role: 'admin' } })
|
|
396
|
+
* .toArray()
|
|
397
|
+
* ```
|
|
398
|
+
*
|
|
399
|
+
* @example
|
|
400
|
+
* ```ts
|
|
401
|
+
* // Change output type with a $project stage
|
|
402
|
+
* const names = aggregate(users)
|
|
403
|
+
* .raw<{ name: string }>({ $project: { name: 1, _id: 0 } })
|
|
404
|
+
* .toArray()
|
|
405
|
+
* ```
|
|
406
|
+
*/
|
|
407
|
+
raw(stage) {
|
|
408
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
409
|
+
...this.stages,
|
|
410
|
+
stage
|
|
411
|
+
]);
|
|
412
|
+
}
|
|
413
|
+
/**
|
|
414
|
+
* Execute the pipeline and return all results as an array.
|
|
415
|
+
*
|
|
416
|
+
* @returns A promise resolving to the array of output documents.
|
|
417
|
+
*
|
|
418
|
+
* @example
|
|
419
|
+
* ```ts
|
|
420
|
+
* const results = await aggregate(users)
|
|
421
|
+
* .raw({ $match: { age: { $gte: 18 } } })
|
|
422
|
+
* .toArray()
|
|
423
|
+
* ```
|
|
424
|
+
*/
|
|
425
|
+
async toArray() {
|
|
426
|
+
try {
|
|
427
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
428
|
+
return await cursor.toArray();
|
|
429
|
+
} catch (err) {
|
|
430
|
+
wrapMongoError(err, this.definition.name);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
/**
|
|
434
|
+
* Stream pipeline results one document at a time via `for await...of`.
|
|
435
|
+
*
|
|
436
|
+
* @returns An async generator yielding output documents.
|
|
437
|
+
*
|
|
438
|
+
* @example
|
|
439
|
+
* ```ts
|
|
440
|
+
* for await (const user of aggregate(users).raw({ $match: { role: 'admin' } })) {
|
|
441
|
+
* console.log(user.name)
|
|
442
|
+
* }
|
|
443
|
+
* ```
|
|
444
|
+
*/
|
|
445
|
+
async *[Symbol.asyncIterator]() {
|
|
446
|
+
try {
|
|
447
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
448
|
+
for await (const doc of cursor) {
|
|
449
|
+
yield doc;
|
|
450
|
+
}
|
|
451
|
+
} catch (err) {
|
|
452
|
+
wrapMongoError(err, this.definition.name);
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
/**
|
|
456
|
+
* Return the query execution plan without running the pipeline.
|
|
457
|
+
*
|
|
458
|
+
* Useful for debugging and understanding how MongoDB will process
|
|
459
|
+
* the pipeline stages.
|
|
460
|
+
*
|
|
461
|
+
* @returns A promise resolving to the explain output document.
|
|
462
|
+
*
|
|
463
|
+
* @example
|
|
464
|
+
* ```ts
|
|
465
|
+
* const plan = await aggregate(users)
|
|
466
|
+
* .raw({ $match: { role: 'admin' } })
|
|
467
|
+
* .explain()
|
|
468
|
+
* console.log(plan)
|
|
469
|
+
* ```
|
|
470
|
+
*/
|
|
471
|
+
async explain() {
|
|
472
|
+
try {
|
|
473
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
474
|
+
return await cursor.explain();
|
|
475
|
+
} catch (err) {
|
|
476
|
+
wrapMongoError(err, this.definition.name);
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
// ── Shape-preserving stages ──────────────────────────────────────
|
|
480
|
+
/**
|
|
481
|
+
* Filter documents using a type-safe match expression.
|
|
482
|
+
*
|
|
483
|
+
* Appends a `$match` stage to the pipeline. The filter is constrained
|
|
484
|
+
* to the current output type, so only valid fields and operators are accepted.
|
|
485
|
+
*
|
|
486
|
+
* Supports two forms of type narrowing:
|
|
487
|
+
*
|
|
488
|
+
* **Tier 1 — Explicit type parameter:**
|
|
489
|
+
* ```ts
|
|
490
|
+
* .match<{ role: 'engineer' | 'designer' }>({ role: { $in: ['engineer', 'designer'] } })
|
|
491
|
+
* // role narrows to 'engineer' | 'designer'
|
|
492
|
+
* ```
|
|
493
|
+
*
|
|
494
|
+
* **Tier 2 — Automatic inference from filter literals:**
|
|
495
|
+
* ```ts
|
|
496
|
+
* .match({ role: 'engineer' }) // role narrows to 'engineer'
|
|
497
|
+
* .match({ role: { $ne: 'intern' } }) // role narrows to Exclude<Role, 'intern'>
|
|
498
|
+
* .match({ role: { $in: ['engineer', 'designer'] as const } }) // needs as const
|
|
499
|
+
* ```
|
|
500
|
+
*
|
|
501
|
+
* When no type parameter is provided and the filter doesn't contain
|
|
502
|
+
* inferrable literals, the output type is unchanged (backward compatible).
|
|
503
|
+
*
|
|
504
|
+
* @typeParam TNarrow - Optional object mapping field names to narrowed types. Must be a subtype of the corresponding fields in TOutput.
|
|
505
|
+
* @typeParam F - Inferred from the filter argument. Do not provide explicitly.
|
|
506
|
+
* @param filter - A type-safe filter for the current output type.
|
|
507
|
+
* @returns A new pipeline with the `$match` stage appended and output type narrowed.
|
|
508
|
+
*
|
|
509
|
+
* @example
|
|
510
|
+
* ```ts
|
|
511
|
+
* // Explicit narrowing
|
|
512
|
+
* const filtered = await users.aggregate()
|
|
513
|
+
* .match<{ role: 'engineer' }>({ role: 'engineer' })
|
|
514
|
+
* .toArray()
|
|
515
|
+
* // filtered[0].role → 'engineer'
|
|
516
|
+
*
|
|
517
|
+
* // Automatic narrowing with $in (requires as const)
|
|
518
|
+
* const subset = await users.aggregate()
|
|
519
|
+
* .match({ role: { $in: ['engineer', 'designer'] as const } })
|
|
520
|
+
* .toArray()
|
|
521
|
+
* // subset[0].role → 'engineer' | 'designer'
|
|
522
|
+
* ```
|
|
523
|
+
*/
|
|
524
|
+
match(filter) {
|
|
525
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
526
|
+
...this.stages,
|
|
527
|
+
{ $match: filter }
|
|
528
|
+
]);
|
|
529
|
+
return pipeline;
|
|
530
|
+
}
|
|
531
|
+
/**
|
|
532
|
+
* Sort documents by one or more fields.
|
|
533
|
+
*
|
|
534
|
+
* Appends a `$sort` stage. Keys are constrained to `keyof TOutput & string`
|
|
535
|
+
* and values must be `1` (ascending) or `-1` (descending).
|
|
536
|
+
*
|
|
537
|
+
* @param spec - A sort specification mapping field names to sort direction.
|
|
538
|
+
* @returns A new pipeline with the `$sort` stage appended.
|
|
539
|
+
*
|
|
540
|
+
* @example
|
|
541
|
+
* ```ts
|
|
542
|
+
* const sorted = await aggregate(users)
|
|
543
|
+
* .sort({ age: -1, name: 1 })
|
|
544
|
+
* .toArray()
|
|
545
|
+
* ```
|
|
546
|
+
*/
|
|
547
|
+
sort(spec) {
|
|
548
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
549
|
+
...this.stages,
|
|
550
|
+
{ $sort: spec }
|
|
551
|
+
]);
|
|
552
|
+
}
|
|
553
|
+
/**
|
|
554
|
+
* Skip a number of documents in the pipeline.
|
|
555
|
+
*
|
|
556
|
+
* Appends a `$skip` stage. Commonly used with {@link limit} for pagination.
|
|
557
|
+
*
|
|
558
|
+
* @param n - The number of documents to skip.
|
|
559
|
+
* @returns A new pipeline with the `$skip` stage appended.
|
|
560
|
+
*
|
|
561
|
+
* @example
|
|
562
|
+
* ```ts
|
|
563
|
+
* // Page 2 (10 items per page)
|
|
564
|
+
* const page2 = await aggregate(users)
|
|
565
|
+
* .sort({ name: 1 })
|
|
566
|
+
* .skip(10)
|
|
567
|
+
* .limit(10)
|
|
568
|
+
* .toArray()
|
|
569
|
+
* ```
|
|
570
|
+
*/
|
|
571
|
+
skip(n) {
|
|
572
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
573
|
+
...this.stages,
|
|
574
|
+
{ $skip: n }
|
|
575
|
+
]);
|
|
576
|
+
}
|
|
577
|
+
/**
|
|
578
|
+
* Limit the number of documents passing through the pipeline.
|
|
579
|
+
*
|
|
580
|
+
* Appends a `$limit` stage. Commonly used with {@link skip} for pagination,
|
|
581
|
+
* or after {@link sort} to get top/bottom N results.
|
|
582
|
+
*
|
|
583
|
+
* @param n - The maximum number of documents to pass through.
|
|
584
|
+
* @returns A new pipeline with the `$limit` stage appended.
|
|
585
|
+
*
|
|
586
|
+
* @example
|
|
587
|
+
* ```ts
|
|
588
|
+
* const top5 = await aggregate(users)
|
|
589
|
+
* .sort({ score: -1 })
|
|
590
|
+
* .limit(5)
|
|
591
|
+
* .toArray()
|
|
592
|
+
* ```
|
|
593
|
+
*/
|
|
594
|
+
limit(n) {
|
|
595
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
596
|
+
...this.stages,
|
|
597
|
+
{ $limit: n }
|
|
598
|
+
]);
|
|
599
|
+
}
|
|
600
|
+
// ── Shape-transforming projection stages ─────────────────────────
|
|
601
|
+
/**
|
|
602
|
+
* Include only specified fields in the output.
|
|
603
|
+
*
|
|
604
|
+
* Appends a `$project` stage with inclusion (`1`) for each key.
|
|
605
|
+
* The `_id` field is always included. The output type narrows to
|
|
606
|
+
* `Pick<TOutput, K | '_id'>`.
|
|
607
|
+
*
|
|
608
|
+
* @param spec - An object mapping field names to `1` for inclusion.
|
|
609
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
610
|
+
*
|
|
611
|
+
* @example
|
|
612
|
+
* ```ts
|
|
613
|
+
* const namesOnly = await aggregate(users)
|
|
614
|
+
* .project({ name: 1 })
|
|
615
|
+
* .toArray()
|
|
616
|
+
* // [{ _id: ..., name: 'Ada' }, ...]
|
|
617
|
+
* ```
|
|
618
|
+
*/
|
|
619
|
+
project(spec) {
|
|
620
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
621
|
+
...this.stages,
|
|
622
|
+
{ $project: spec }
|
|
623
|
+
]);
|
|
624
|
+
return pipeline;
|
|
625
|
+
}
|
|
626
|
+
/**
|
|
627
|
+
* Variadic shorthand for {@link project} — pick fields to include.
|
|
628
|
+
*
|
|
629
|
+
* Generates a `$project` stage that includes only the listed fields
|
|
630
|
+
* (plus `_id`). Equivalent to `.project({ field1: 1, field2: 1 })`.
|
|
631
|
+
*
|
|
632
|
+
* @param fields - Field names to include in the output.
|
|
633
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
634
|
+
*
|
|
635
|
+
* @example
|
|
636
|
+
* ```ts
|
|
637
|
+
* const namesAndRoles = await aggregate(users)
|
|
638
|
+
* .pick('name', 'role')
|
|
639
|
+
* .toArray()
|
|
640
|
+
* ```
|
|
641
|
+
*/
|
|
642
|
+
pick(...fields) {
|
|
643
|
+
const spec = Object.fromEntries(fields.map((f) => [f, 1]));
|
|
644
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
645
|
+
...this.stages,
|
|
646
|
+
{ $project: spec }
|
|
647
|
+
]);
|
|
648
|
+
return pipeline;
|
|
649
|
+
}
|
|
650
|
+
/**
|
|
651
|
+
* Exclude specified fields from the output.
|
|
652
|
+
*
|
|
653
|
+
* Appends a `$project` stage with exclusion (`0`) for each key.
|
|
654
|
+
* All other fields pass through. The output type becomes `Omit<TOutput, K>`.
|
|
655
|
+
*
|
|
656
|
+
* @param fields - Field names to exclude from the output.
|
|
657
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
658
|
+
*
|
|
659
|
+
* @example
|
|
660
|
+
* ```ts
|
|
661
|
+
* const noAge = await aggregate(users)
|
|
662
|
+
* .omit('age')
|
|
663
|
+
* .toArray()
|
|
664
|
+
* ```
|
|
665
|
+
*/
|
|
666
|
+
omit(...fields) {
|
|
667
|
+
const spec = Object.fromEntries(fields.map((f) => [f, 0]));
|
|
668
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
669
|
+
...this.stages,
|
|
670
|
+
{ $project: spec }
|
|
671
|
+
]);
|
|
672
|
+
return pipeline;
|
|
673
|
+
}
|
|
674
|
+
groupBy(field, accumulators) {
|
|
675
|
+
const resolved = typeof accumulators === "function" ? accumulators(createAccumulatorBuilder()) : accumulators;
|
|
676
|
+
const _id = Array.isArray(field) ? Object.fromEntries(field.map((f) => [f, `$${f}`])) : `$${field}`;
|
|
677
|
+
const accumExprs = Object.fromEntries(
|
|
678
|
+
Object.entries(resolved).map(([key, acc]) => [key, acc.expr])
|
|
679
|
+
);
|
|
680
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
681
|
+
...this.stages,
|
|
682
|
+
{ $group: { _id, ...accumExprs } }
|
|
683
|
+
]);
|
|
684
|
+
return pipeline;
|
|
685
|
+
}
|
|
686
|
+
// Implementation
|
|
687
|
+
addFields(fields) {
|
|
688
|
+
const resolved = typeof fields === "function" ? fields(createExpressionBuilder()) : fields;
|
|
689
|
+
const stage = Object.fromEntries(
|
|
690
|
+
Object.entries(resolved).map(([k, v]) => [
|
|
691
|
+
k,
|
|
692
|
+
v && typeof v === "object" && "__expr" in v ? v.value : v
|
|
693
|
+
])
|
|
694
|
+
);
|
|
695
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
696
|
+
...this.stages,
|
|
697
|
+
{ $addFields: stage }
|
|
698
|
+
]);
|
|
699
|
+
return pipeline;
|
|
700
|
+
}
|
|
701
|
+
// ── unwind stage ─────────────────────────────────────────────────
|
|
702
|
+
/**
|
|
703
|
+
* Deconstruct an array field, outputting one document per array element.
|
|
704
|
+
*
|
|
705
|
+
* Appends an `$unwind` stage. The unwound field's type changes from
|
|
706
|
+
* `T[]` to `T` in the output type. Documents with empty or missing
|
|
707
|
+
* arrays are dropped unless `preserveEmpty` is `true`.
|
|
708
|
+
*
|
|
709
|
+
* @param field - The name of the array field to unwind.
|
|
710
|
+
* @param options - Optional settings for the unwind stage.
|
|
711
|
+
* @param options.preserveEmpty - If `true`, documents with null, missing, or empty arrays are preserved.
|
|
712
|
+
* @returns A new pipeline with the `$unwind` stage appended.
|
|
713
|
+
*
|
|
714
|
+
* @example
|
|
715
|
+
* ```ts
|
|
716
|
+
* const flat = await aggregate(orders)
|
|
717
|
+
* .unwind('items')
|
|
718
|
+
* .toArray()
|
|
719
|
+
* // Each result has a single `items` value instead of an array
|
|
720
|
+
* ```
|
|
721
|
+
*/
|
|
722
|
+
unwind(field, options) {
|
|
723
|
+
const stage = options?.preserveEmpty ? { $unwind: { path: `$${field}`, preserveNullAndEmptyArrays: true } } : { $unwind: `$${field}` };
|
|
724
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
725
|
+
...this.stages,
|
|
726
|
+
stage
|
|
727
|
+
]);
|
|
728
|
+
return pipeline;
|
|
729
|
+
}
|
|
730
|
+
lookup(fieldOrFrom, options) {
|
|
731
|
+
const stages = [...this.stages];
|
|
732
|
+
if (typeof fieldOrFrom === "object") {
|
|
733
|
+
const foreignName = fieldOrFrom.name;
|
|
734
|
+
const foreignField = options?.on;
|
|
735
|
+
if (!foreignField) {
|
|
736
|
+
throw new Error(
|
|
737
|
+
`[zodmon] lookup: reverse lookup on '${foreignName}' requires an 'on' option specifying which field on the foreign collection references this collection.`
|
|
738
|
+
);
|
|
739
|
+
}
|
|
740
|
+
const asField = options?.as ?? foreignName;
|
|
741
|
+
stages.push({
|
|
742
|
+
$lookup: {
|
|
743
|
+
from: foreignName,
|
|
744
|
+
localField: "_id",
|
|
745
|
+
foreignField,
|
|
746
|
+
as: asField
|
|
747
|
+
}
|
|
748
|
+
});
|
|
749
|
+
if (options?.unwind) {
|
|
750
|
+
stages.push({ $unwind: { path: `$${asField}`, preserveNullAndEmptyArrays: true } });
|
|
751
|
+
}
|
|
752
|
+
} else {
|
|
753
|
+
const shape = this.definition.shape;
|
|
754
|
+
const fieldSchema = shape[fieldOrFrom];
|
|
755
|
+
const ref = getRefMetadata(fieldSchema);
|
|
756
|
+
if (!ref) {
|
|
757
|
+
throw new Error(
|
|
758
|
+
`[zodmon] lookup: field '${fieldOrFrom}' has no .ref() metadata. Use .lookup(CollectionDef, { on: foreignKey }) for reverse lookups, or add .ref(TargetCollection) to the field schema.`
|
|
759
|
+
);
|
|
760
|
+
}
|
|
761
|
+
const targetName = ref.collection.name;
|
|
762
|
+
const asField = options?.as ?? targetName;
|
|
763
|
+
stages.push({
|
|
764
|
+
$lookup: {
|
|
765
|
+
from: targetName,
|
|
766
|
+
localField: fieldOrFrom,
|
|
767
|
+
foreignField: "_id",
|
|
768
|
+
as: asField
|
|
769
|
+
}
|
|
770
|
+
});
|
|
771
|
+
if (options?.unwind) {
|
|
772
|
+
stages.push({ $unwind: { path: `$${asField}`, preserveNullAndEmptyArrays: true } });
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, stages);
|
|
776
|
+
return pipeline;
|
|
777
|
+
}
|
|
778
|
+
// ── Convenience shortcuts ────────────────────────────────────────
|
|
779
|
+
/**
|
|
780
|
+
* Count documents per group, sorted by count descending.
|
|
781
|
+
*
|
|
782
|
+
* Shorthand for `.groupBy(field, { count: $count() }).sort({ count: -1 })`.
|
|
783
|
+
*
|
|
784
|
+
* @param field - The field to group and count by.
|
|
785
|
+
* @returns A new pipeline producing `{ _id: TOutput[K], count: number }` results.
|
|
786
|
+
*
|
|
787
|
+
* @example
|
|
788
|
+
* ```ts
|
|
789
|
+
* const roleCounts = await aggregate(users)
|
|
790
|
+
* .countBy('role')
|
|
791
|
+
* .toArray()
|
|
792
|
+
* // [{ _id: 'user', count: 3 }, { _id: 'admin', count: 2 }]
|
|
793
|
+
* ```
|
|
794
|
+
*/
|
|
795
|
+
countBy(field) {
|
|
796
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
797
|
+
...this.stages,
|
|
798
|
+
{ $group: { _id: `$${field}`, count: { $sum: 1 } } },
|
|
799
|
+
{ $sort: { count: -1 } }
|
|
800
|
+
]);
|
|
801
|
+
return pipeline;
|
|
802
|
+
}
|
|
803
|
+
/**
|
|
804
|
+
* Sum a numeric field per group, sorted by total descending.
|
|
805
|
+
*
|
|
806
|
+
* Shorthand for `.groupBy(field, { total: $sum('$sumField') }).sort({ total: -1 })`.
|
|
807
|
+
*
|
|
808
|
+
* @param field - The field to group by.
|
|
809
|
+
* @param sumField - The numeric field to sum.
|
|
810
|
+
* @returns A new pipeline producing `{ _id: TOutput[K], total: number }` results.
|
|
811
|
+
*
|
|
812
|
+
* @example
|
|
813
|
+
* ```ts
|
|
814
|
+
* const revenueByCategory = await aggregate(orders)
|
|
815
|
+
* .sumBy('category', 'amount')
|
|
816
|
+
* .toArray()
|
|
817
|
+
* // [{ _id: 'electronics', total: 5000 }, ...]
|
|
818
|
+
* ```
|
|
819
|
+
*/
|
|
820
|
+
sumBy(field, sumField) {
|
|
821
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
822
|
+
...this.stages,
|
|
823
|
+
{ $group: { _id: `$${field}`, total: { $sum: `$${sumField}` } } },
|
|
824
|
+
{ $sort: { total: -1 } }
|
|
825
|
+
]);
|
|
826
|
+
return pipeline;
|
|
827
|
+
}
|
|
828
|
+
/**
|
|
829
|
+
* Sort by a single field with a friendly direction name.
|
|
830
|
+
*
|
|
831
|
+
* Shorthand for `.sort({ [field]: direction === 'desc' ? -1 : 1 })`.
|
|
832
|
+
*
|
|
833
|
+
* @param field - The field to sort by.
|
|
834
|
+
* @param direction - Sort direction: `'asc'` (default) or `'desc'`.
|
|
835
|
+
* @returns A new pipeline with the `$sort` stage appended.
|
|
836
|
+
*
|
|
837
|
+
* @example
|
|
838
|
+
* ```ts
|
|
839
|
+
* const youngest = await aggregate(users)
|
|
840
|
+
* .sortBy('age')
|
|
841
|
+
* .toArray()
|
|
842
|
+
* ```
|
|
843
|
+
*/
|
|
844
|
+
sortBy(field, direction = "asc") {
|
|
845
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
846
|
+
...this.stages,
|
|
847
|
+
{ $sort: { [field]: direction === "desc" ? -1 : 1 } }
|
|
848
|
+
]);
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* Return the top N documents sorted by a field descending.
|
|
852
|
+
*
|
|
853
|
+
* Shorthand for `.sort({ [by]: -1 }).limit(n)`.
|
|
854
|
+
*
|
|
855
|
+
* @param n - The number of documents to return.
|
|
856
|
+
* @param options - An object with a `by` field specifying the sort key.
|
|
857
|
+
* @returns A new pipeline with `$sort` and `$limit` stages appended.
|
|
858
|
+
*
|
|
859
|
+
* @example
|
|
860
|
+
* ```ts
|
|
861
|
+
* const top3 = await aggregate(users)
|
|
862
|
+
* .top(3, { by: 'score' })
|
|
863
|
+
* .toArray()
|
|
864
|
+
* ```
|
|
865
|
+
*/
|
|
866
|
+
top(n, options) {
|
|
867
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
868
|
+
...this.stages,
|
|
869
|
+
{ $sort: { [options.by]: -1 } },
|
|
870
|
+
{ $limit: n }
|
|
871
|
+
]);
|
|
872
|
+
}
|
|
873
|
+
/**
|
|
874
|
+
* Return the bottom N documents sorted by a field ascending.
|
|
875
|
+
*
|
|
876
|
+
* Shorthand for `.sort({ [by]: 1 }).limit(n)`.
|
|
877
|
+
*
|
|
878
|
+
* @param n - The number of documents to return.
|
|
879
|
+
* @param options - An object with a `by` field specifying the sort key.
|
|
880
|
+
* @returns A new pipeline with `$sort` and `$limit` stages appended.
|
|
881
|
+
*
|
|
882
|
+
* @example
|
|
883
|
+
* ```ts
|
|
884
|
+
* const bottom3 = await aggregate(users)
|
|
885
|
+
* .bottom(3, { by: 'score' })
|
|
886
|
+
* .toArray()
|
|
887
|
+
* ```
|
|
888
|
+
*/
|
|
889
|
+
bottom(n, options) {
|
|
890
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
891
|
+
...this.stages,
|
|
892
|
+
{ $sort: { [options.by]: 1 } },
|
|
893
|
+
{ $limit: n }
|
|
894
|
+
]);
|
|
895
|
+
}
|
|
896
|
+
};
|
|
897
|
+
function aggregate(handle) {
|
|
898
|
+
return new AggregatePipeline(handle.definition, handle.native, []);
|
|
899
|
+
}
|
|
900
|
+
|
|
1
901
|
// src/client/client.ts
|
|
2
902
|
import { MongoClient } from "mongodb";
|
|
3
903
|
|
|
@@ -71,7 +971,7 @@ async function processDesiredSpec(spec, existingByKey, native, dryRun, dropOrpha
|
|
|
71
971
|
const serialized = serializeIndexKey(spec.key);
|
|
72
972
|
const existing = existingByKey.get(serialized);
|
|
73
973
|
if (!existing) {
|
|
74
|
-
if (!dryRun) await native
|
|
974
|
+
if (!dryRun) await safeCreateIndex(native, spec.key, spec.options);
|
|
75
975
|
acc.created.push(resolveSpecName(spec));
|
|
76
976
|
return;
|
|
77
977
|
}
|
|
@@ -84,8 +984,8 @@ async function processDesiredSpec(spec, existingByKey, native, dryRun, dropOrpha
|
|
|
84
984
|
}
|
|
85
985
|
if (dropOrphaned) {
|
|
86
986
|
if (!dryRun) {
|
|
87
|
-
await native
|
|
88
|
-
await native
|
|
987
|
+
await safeDropIndex(native, existingName);
|
|
988
|
+
await safeCreateIndex(native, spec.key, spec.options);
|
|
89
989
|
}
|
|
90
990
|
acc.dropped.push(existingName);
|
|
91
991
|
acc.created.push(resolveSpecName(spec));
|
|
@@ -98,6 +998,20 @@ async function processDesiredSpec(spec, existingByKey, native, dryRun, dropOrpha
|
|
|
98
998
|
desired: spec.options
|
|
99
999
|
});
|
|
100
1000
|
}
|
|
1001
|
+
async function safeCreateIndex(native, key, options) {
|
|
1002
|
+
try {
|
|
1003
|
+
await native.createIndex(key, options);
|
|
1004
|
+
} catch (err) {
|
|
1005
|
+
wrapMongoError(err, native.collectionName);
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
async function safeDropIndex(native, name) {
|
|
1009
|
+
try {
|
|
1010
|
+
await native.dropIndex(name);
|
|
1011
|
+
} catch (err) {
|
|
1012
|
+
wrapMongoError(err, native.collectionName);
|
|
1013
|
+
}
|
|
1014
|
+
}
|
|
101
1015
|
async function processOrphanedIndexes(existingIndexes, desiredKeys, matchedKeys, native, dryRun, dropOrphaned, dropped) {
|
|
102
1016
|
for (const idx of existingIndexes) {
|
|
103
1017
|
const rawName = idx["name"];
|
|
@@ -106,7 +1020,7 @@ async function processOrphanedIndexes(existingIndexes, desiredKeys, matchedKeys,
|
|
|
106
1020
|
const serialized = serializeIndexKey(idx["key"]);
|
|
107
1021
|
if (matchedKeys.has(serialized) || desiredKeys.has(serialized)) continue;
|
|
108
1022
|
if (dropOrphaned) {
|
|
109
|
-
if (!dryRun) await native
|
|
1023
|
+
if (!dryRun) await safeDropIndex(native, name);
|
|
110
1024
|
dropped.push(name);
|
|
111
1025
|
}
|
|
112
1026
|
}
|
|
@@ -118,7 +1032,7 @@ async function listIndexesSafe(native) {
|
|
|
118
1032
|
if (err instanceof Error && err.message.includes("ns does not exist")) {
|
|
119
1033
|
return [];
|
|
120
1034
|
}
|
|
121
|
-
|
|
1035
|
+
wrapMongoError(err, native.collectionName);
|
|
122
1036
|
}
|
|
123
1037
|
}
|
|
124
1038
|
async function syncIndexes(handle, options) {
|
|
@@ -164,39 +1078,52 @@ async function syncIndexes(handle, options) {
|
|
|
164
1078
|
}
|
|
165
1079
|
|
|
166
1080
|
// src/crud/delete.ts
|
|
167
|
-
import { z } from "zod";
|
|
1081
|
+
import { z as z2 } from "zod";
|
|
168
1082
|
|
|
169
1083
|
// src/errors/validation.ts
|
|
170
|
-
var ZodmonValidationError = class extends
|
|
1084
|
+
var ZodmonValidationError = class extends ZodmonError {
|
|
171
1085
|
name = "ZodmonValidationError";
|
|
172
|
-
/** The MongoDB collection name where the validation failed. */
|
|
173
|
-
collection;
|
|
174
1086
|
/** The original Zod validation error with detailed issue information. */
|
|
175
1087
|
zodError;
|
|
176
|
-
|
|
1088
|
+
/** The document that failed validation. */
|
|
1089
|
+
document;
|
|
1090
|
+
constructor(collection2, zodError, document) {
|
|
177
1091
|
const fields = zodError.issues.map((issue) => {
|
|
178
1092
|
const path = issue.path.join(".") || "(root)";
|
|
179
1093
|
return `${path} (${issue.message})`;
|
|
180
1094
|
}).join(", ");
|
|
181
|
-
super(`Validation failed for "${collection2}": ${fields}
|
|
182
|
-
this.collection = collection2;
|
|
1095
|
+
super(`Validation failed for "${collection2}": ${fields}`, collection2, { cause: zodError });
|
|
183
1096
|
this.zodError = zodError;
|
|
1097
|
+
this.document = document;
|
|
184
1098
|
}
|
|
185
1099
|
};
|
|
186
1100
|
|
|
187
1101
|
// src/crud/delete.ts
|
|
188
1102
|
async function deleteOne(handle, filter) {
|
|
189
|
-
|
|
1103
|
+
try {
|
|
1104
|
+
return await handle.native.deleteOne(filter);
|
|
1105
|
+
} catch (err) {
|
|
1106
|
+
wrapMongoError(err, handle.definition.name);
|
|
1107
|
+
}
|
|
190
1108
|
}
|
|
191
1109
|
async function deleteMany(handle, filter) {
|
|
192
|
-
|
|
1110
|
+
try {
|
|
1111
|
+
return await handle.native.deleteMany(filter);
|
|
1112
|
+
} catch (err) {
|
|
1113
|
+
wrapMongoError(err, handle.definition.name);
|
|
1114
|
+
}
|
|
193
1115
|
}
|
|
194
1116
|
async function findOneAndDelete(handle, filter, options) {
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
1117
|
+
let result;
|
|
1118
|
+
try {
|
|
1119
|
+
result = await handle.native.findOneAndDelete(
|
|
1120
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
1121
|
+
filter,
|
|
1122
|
+
{ includeResultMetadata: false }
|
|
1123
|
+
);
|
|
1124
|
+
} catch (err) {
|
|
1125
|
+
wrapMongoError(err, handle.definition.name);
|
|
1126
|
+
}
|
|
200
1127
|
if (!result) return null;
|
|
201
1128
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
202
1129
|
if (mode === false || mode === "passthrough") {
|
|
@@ -205,24 +1132,24 @@ async function findOneAndDelete(handle, filter, options) {
|
|
|
205
1132
|
try {
|
|
206
1133
|
return handle.definition.schema.parse(result);
|
|
207
1134
|
} catch (err) {
|
|
208
|
-
if (err instanceof
|
|
209
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1135
|
+
if (err instanceof z2.ZodError) {
|
|
1136
|
+
throw new ZodmonValidationError(handle.definition.name, err, result);
|
|
210
1137
|
}
|
|
211
1138
|
throw err;
|
|
212
1139
|
}
|
|
213
1140
|
}
|
|
214
1141
|
|
|
215
1142
|
// src/crud/find.ts
|
|
216
|
-
import { z as
|
|
1143
|
+
import { z as z4 } from "zod";
|
|
217
1144
|
|
|
218
1145
|
// src/errors/not-found.ts
|
|
219
|
-
var ZodmonNotFoundError = class extends
|
|
1146
|
+
var ZodmonNotFoundError = class extends ZodmonError {
|
|
220
1147
|
name = "ZodmonNotFoundError";
|
|
221
|
-
/** The
|
|
222
|
-
|
|
223
|
-
constructor(collection2) {
|
|
224
|
-
super(`Document not found in "${collection2}"
|
|
225
|
-
this.
|
|
1148
|
+
/** The filter that produced no results. */
|
|
1149
|
+
filter;
|
|
1150
|
+
constructor(collection2, filter) {
|
|
1151
|
+
super(`Document not found in "${collection2}"`, collection2);
|
|
1152
|
+
this.filter = filter;
|
|
226
1153
|
}
|
|
227
1154
|
};
|
|
228
1155
|
|
|
@@ -250,7 +1177,7 @@ function checkUnindexedFields(definition, filter) {
|
|
|
250
1177
|
}
|
|
251
1178
|
|
|
252
1179
|
// src/query/cursor.ts
|
|
253
|
-
import { z as
|
|
1180
|
+
import { z as z3 } from "zod";
|
|
254
1181
|
|
|
255
1182
|
// src/crud/paginate.ts
|
|
256
1183
|
import { ObjectId } from "mongodb";
|
|
@@ -423,10 +1350,17 @@ var TypedFindCursor = class {
|
|
|
423
1350
|
}
|
|
424
1351
|
/** @internal Offset pagination implementation. */
|
|
425
1352
|
async offsetPaginate(_sortKeys, sort, opts) {
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
1353
|
+
let total;
|
|
1354
|
+
let raw2;
|
|
1355
|
+
try {
|
|
1356
|
+
;
|
|
1357
|
+
[total, raw2] = await Promise.all([
|
|
1358
|
+
this.nativeCollection.countDocuments(this.filter),
|
|
1359
|
+
this.nativeCollection.find(this.filter).sort(sort).skip((opts.page - 1) * opts.perPage).limit(opts.perPage).toArray()
|
|
1360
|
+
]);
|
|
1361
|
+
} catch (err) {
|
|
1362
|
+
wrapMongoError(err, this.collectionName);
|
|
1363
|
+
}
|
|
430
1364
|
const docs = raw2.map((doc) => this.validateDoc(doc));
|
|
431
1365
|
const totalPages = Math.ceil(total / opts.perPage);
|
|
432
1366
|
return {
|
|
@@ -450,7 +1384,12 @@ var TypedFindCursor = class {
|
|
|
450
1384
|
combinedFilter = this.filter && Object.keys(this.filter).length > 0 ? { $and: [this.filter, cursorFilter] } : cursorFilter;
|
|
451
1385
|
}
|
|
452
1386
|
const effectiveSort = isBackward ? Object.fromEntries(sortKeys2.map(([f, d]) => [f, d === 1 ? -1 : 1])) : sort;
|
|
453
|
-
|
|
1387
|
+
let raw2;
|
|
1388
|
+
try {
|
|
1389
|
+
raw2 = await this.nativeCollection.find(combinedFilter).sort(effectiveSort).limit(opts.limit + 1).toArray();
|
|
1390
|
+
} catch (err) {
|
|
1391
|
+
wrapMongoError(err, this.collectionName);
|
|
1392
|
+
}
|
|
454
1393
|
const hasMore = raw2.length > opts.limit;
|
|
455
1394
|
if (hasMore) raw2.pop();
|
|
456
1395
|
if (isBackward) raw2.reverse();
|
|
@@ -478,7 +1417,12 @@ var TypedFindCursor = class {
|
|
|
478
1417
|
* ```
|
|
479
1418
|
*/
|
|
480
1419
|
async toArray() {
|
|
481
|
-
|
|
1420
|
+
let raw2;
|
|
1421
|
+
try {
|
|
1422
|
+
raw2 = await this.cursor.toArray();
|
|
1423
|
+
} catch (err) {
|
|
1424
|
+
wrapMongoError(err, this.collectionName);
|
|
1425
|
+
}
|
|
482
1426
|
return raw2.map((doc) => this.validateDoc(doc));
|
|
483
1427
|
}
|
|
484
1428
|
/**
|
|
@@ -498,8 +1442,12 @@ var TypedFindCursor = class {
|
|
|
498
1442
|
* ```
|
|
499
1443
|
*/
|
|
500
1444
|
async *[Symbol.asyncIterator]() {
|
|
501
|
-
|
|
502
|
-
|
|
1445
|
+
try {
|
|
1446
|
+
for await (const doc of this.cursor) {
|
|
1447
|
+
yield this.validateDoc(doc);
|
|
1448
|
+
}
|
|
1449
|
+
} catch (err) {
|
|
1450
|
+
wrapMongoError(err, this.collectionName);
|
|
503
1451
|
}
|
|
504
1452
|
}
|
|
505
1453
|
/** @internal Validate a single raw document against the schema. */
|
|
@@ -510,8 +1458,8 @@ var TypedFindCursor = class {
|
|
|
510
1458
|
try {
|
|
511
1459
|
return this.schema.parse(raw2);
|
|
512
1460
|
} catch (err) {
|
|
513
|
-
if (err instanceof
|
|
514
|
-
throw new ZodmonValidationError(this.collectionName, err);
|
|
1461
|
+
if (err instanceof z3.ZodError) {
|
|
1462
|
+
throw new ZodmonValidationError(this.collectionName, err, raw2);
|
|
515
1463
|
}
|
|
516
1464
|
throw err;
|
|
517
1465
|
}
|
|
@@ -522,7 +1470,12 @@ var TypedFindCursor = class {
|
|
|
522
1470
|
async function findOne(handle, filter, options) {
|
|
523
1471
|
checkUnindexedFields(handle.definition, filter);
|
|
524
1472
|
const findOptions = options?.project ? { projection: options.project } : void 0;
|
|
525
|
-
|
|
1473
|
+
let raw2;
|
|
1474
|
+
try {
|
|
1475
|
+
raw2 = await handle.native.findOne(filter, findOptions);
|
|
1476
|
+
} catch (err) {
|
|
1477
|
+
wrapMongoError(err, handle.definition.name);
|
|
1478
|
+
}
|
|
526
1479
|
if (!raw2) return null;
|
|
527
1480
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
528
1481
|
if (mode === false || mode === "passthrough") {
|
|
@@ -531,8 +1484,8 @@ async function findOne(handle, filter, options) {
|
|
|
531
1484
|
try {
|
|
532
1485
|
return handle.definition.schema.parse(raw2);
|
|
533
1486
|
} catch (err) {
|
|
534
|
-
if (err instanceof
|
|
535
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1487
|
+
if (err instanceof z4.ZodError) {
|
|
1488
|
+
throw new ZodmonValidationError(handle.definition.name, err, raw2);
|
|
536
1489
|
}
|
|
537
1490
|
throw err;
|
|
538
1491
|
}
|
|
@@ -540,7 +1493,7 @@ async function findOne(handle, filter, options) {
|
|
|
540
1493
|
async function findOneOrThrow(handle, filter, options) {
|
|
541
1494
|
const doc = await findOne(handle, filter, options);
|
|
542
1495
|
if (!doc) {
|
|
543
|
-
throw new ZodmonNotFoundError(handle.definition.name);
|
|
1496
|
+
throw new ZodmonNotFoundError(handle.definition.name, filter);
|
|
544
1497
|
}
|
|
545
1498
|
return doc;
|
|
546
1499
|
}
|
|
@@ -553,18 +1506,22 @@ function find(handle, filter, options) {
|
|
|
553
1506
|
}
|
|
554
1507
|
|
|
555
1508
|
// src/crud/insert.ts
|
|
556
|
-
import { z as
|
|
1509
|
+
import { z as z5 } from "zod";
|
|
557
1510
|
async function insertOne(handle, doc) {
|
|
558
1511
|
let parsed;
|
|
559
1512
|
try {
|
|
560
1513
|
parsed = handle.definition.schema.parse(doc);
|
|
561
1514
|
} catch (err) {
|
|
562
|
-
if (err instanceof
|
|
563
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1515
|
+
if (err instanceof z5.ZodError) {
|
|
1516
|
+
throw new ZodmonValidationError(handle.definition.name, err, doc);
|
|
564
1517
|
}
|
|
565
1518
|
throw err;
|
|
566
1519
|
}
|
|
567
|
-
|
|
1520
|
+
try {
|
|
1521
|
+
await handle.native.insertOne(parsed);
|
|
1522
|
+
} catch (err) {
|
|
1523
|
+
wrapMongoError(err, handle.definition.name);
|
|
1524
|
+
}
|
|
568
1525
|
return parsed;
|
|
569
1526
|
}
|
|
570
1527
|
async function insertMany(handle, docs) {
|
|
@@ -574,23 +1531,35 @@ async function insertMany(handle, docs) {
|
|
|
574
1531
|
try {
|
|
575
1532
|
parsed.push(handle.definition.schema.parse(doc));
|
|
576
1533
|
} catch (err) {
|
|
577
|
-
if (err instanceof
|
|
578
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1534
|
+
if (err instanceof z5.ZodError) {
|
|
1535
|
+
throw new ZodmonValidationError(handle.definition.name, err, doc);
|
|
579
1536
|
}
|
|
580
1537
|
throw err;
|
|
581
1538
|
}
|
|
582
1539
|
}
|
|
583
|
-
|
|
1540
|
+
try {
|
|
1541
|
+
await handle.native.insertMany(parsed);
|
|
1542
|
+
} catch (err) {
|
|
1543
|
+
wrapMongoError(err, handle.definition.name);
|
|
1544
|
+
}
|
|
584
1545
|
return parsed;
|
|
585
1546
|
}
|
|
586
1547
|
|
|
587
1548
|
// src/crud/update.ts
|
|
588
|
-
import { z as
|
|
1549
|
+
import { z as z6 } from "zod";
|
|
589
1550
|
async function updateOne(handle, filter, update, options) {
|
|
590
|
-
|
|
1551
|
+
try {
|
|
1552
|
+
return await handle.native.updateOne(filter, update, options);
|
|
1553
|
+
} catch (err) {
|
|
1554
|
+
wrapMongoError(err, handle.definition.name);
|
|
1555
|
+
}
|
|
591
1556
|
}
|
|
592
1557
|
async function updateMany(handle, filter, update, options) {
|
|
593
|
-
|
|
1558
|
+
try {
|
|
1559
|
+
return await handle.native.updateMany(filter, update, options);
|
|
1560
|
+
} catch (err) {
|
|
1561
|
+
wrapMongoError(err, handle.definition.name);
|
|
1562
|
+
}
|
|
594
1563
|
}
|
|
595
1564
|
async function findOneAndUpdate(handle, filter, update, options) {
|
|
596
1565
|
const driverOptions = {
|
|
@@ -600,14 +1569,19 @@ async function findOneAndUpdate(handle, filter, update, options) {
|
|
|
600
1569
|
if (options?.upsert !== void 0) {
|
|
601
1570
|
driverOptions["upsert"] = options.upsert;
|
|
602
1571
|
}
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
1572
|
+
let result;
|
|
1573
|
+
try {
|
|
1574
|
+
result = await handle.native.findOneAndUpdate(
|
|
1575
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
1576
|
+
filter,
|
|
1577
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedUpdateFilter intersection type is not directly assignable to MongoDB's UpdateFilter
|
|
1578
|
+
update,
|
|
1579
|
+
// biome-ignore lint/suspicious/noExplicitAny: dynamic options object is not assignable to driver's FindOneAndUpdateOptions under exactOptionalPropertyTypes
|
|
1580
|
+
driverOptions
|
|
1581
|
+
);
|
|
1582
|
+
} catch (err) {
|
|
1583
|
+
wrapMongoError(err, handle.definition.name);
|
|
1584
|
+
}
|
|
611
1585
|
if (!result) return null;
|
|
612
1586
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
613
1587
|
if (mode === false || mode === "passthrough") {
|
|
@@ -616,8 +1590,8 @@ async function findOneAndUpdate(handle, filter, update, options) {
|
|
|
616
1590
|
try {
|
|
617
1591
|
return handle.definition.schema.parse(result);
|
|
618
1592
|
} catch (err) {
|
|
619
|
-
if (err instanceof
|
|
620
|
-
throw new ZodmonValidationError(handle.definition.name, err);
|
|
1593
|
+
if (err instanceof z6.ZodError) {
|
|
1594
|
+
throw new ZodmonValidationError(handle.definition.name, err, result);
|
|
621
1595
|
}
|
|
622
1596
|
throw err;
|
|
623
1597
|
}
|
|
@@ -907,6 +1881,27 @@ var CollectionHandle = class {
|
|
|
907
1881
|
async syncIndexes(options) {
|
|
908
1882
|
return await syncIndexes(this, options);
|
|
909
1883
|
}
|
|
1884
|
+
/**
|
|
1885
|
+
* Start a type-safe aggregation pipeline on this collection.
|
|
1886
|
+
*
|
|
1887
|
+
* Returns a fluent pipeline builder that tracks the output document
|
|
1888
|
+
* shape through each stage. The pipeline is lazy — no query executes
|
|
1889
|
+
* until a terminal method (`toArray`, `for await`, `explain`) is called.
|
|
1890
|
+
*
|
|
1891
|
+
* @returns A new pipeline builder starting with this collection's document type.
|
|
1892
|
+
*
|
|
1893
|
+
* @example
|
|
1894
|
+
* ```ts
|
|
1895
|
+
* const users = db.use(Users)
|
|
1896
|
+
* const result = await users.aggregate()
|
|
1897
|
+
* .match({ role: 'admin' })
|
|
1898
|
+
* .groupBy('role', { count: $count() })
|
|
1899
|
+
* .toArray()
|
|
1900
|
+
* ```
|
|
1901
|
+
*/
|
|
1902
|
+
aggregate() {
|
|
1903
|
+
return aggregate(this);
|
|
1904
|
+
}
|
|
910
1905
|
};
|
|
911
1906
|
|
|
912
1907
|
// src/client/client.ts
|
|
@@ -932,9 +1927,7 @@ var Database = class {
|
|
|
932
1927
|
*/
|
|
933
1928
|
use(def) {
|
|
934
1929
|
this._collections.set(def.name, def);
|
|
935
|
-
const native = this._db.collection(
|
|
936
|
-
def.name
|
|
937
|
-
);
|
|
1930
|
+
const native = this._db.collection(def.name);
|
|
938
1931
|
return new CollectionHandle(
|
|
939
1932
|
def,
|
|
940
1933
|
native
|
|
@@ -1012,36 +2005,6 @@ import { z as z9 } from "zod";
|
|
|
1012
2005
|
|
|
1013
2006
|
// src/schema/extensions.ts
|
|
1014
2007
|
import { z as z7 } from "zod";
|
|
1015
|
-
|
|
1016
|
-
// src/schema/ref.ts
|
|
1017
|
-
import { z as z6 } from "zod";
|
|
1018
|
-
var refMetadata = /* @__PURE__ */ new WeakMap();
|
|
1019
|
-
function getRefMetadata(schema) {
|
|
1020
|
-
if (typeof schema !== "object" || schema === null) return void 0;
|
|
1021
|
-
return refMetadata.get(schema);
|
|
1022
|
-
}
|
|
1023
|
-
var REF_GUARD = /* @__PURE__ */ Symbol.for("zodmon_ref");
|
|
1024
|
-
function installRefExtension() {
|
|
1025
|
-
const proto = z6.ZodType.prototype;
|
|
1026
|
-
if (REF_GUARD in proto) return;
|
|
1027
|
-
Object.defineProperty(proto, "ref", {
|
|
1028
|
-
value(collection2) {
|
|
1029
|
-
refMetadata.set(this, { collection: collection2 });
|
|
1030
|
-
return this;
|
|
1031
|
-
},
|
|
1032
|
-
enumerable: true,
|
|
1033
|
-
configurable: true,
|
|
1034
|
-
writable: true
|
|
1035
|
-
});
|
|
1036
|
-
Object.defineProperty(proto, REF_GUARD, {
|
|
1037
|
-
value: true,
|
|
1038
|
-
enumerable: false,
|
|
1039
|
-
configurable: false,
|
|
1040
|
-
writable: false
|
|
1041
|
-
});
|
|
1042
|
-
}
|
|
1043
|
-
|
|
1044
|
-
// src/schema/extensions.ts
|
|
1045
2008
|
var indexMetadata = /* @__PURE__ */ new WeakMap();
|
|
1046
2009
|
function getIndexMetadata(schema) {
|
|
1047
2010
|
if (typeof schema !== "object" || schema === null) return void 0;
|
|
@@ -1291,29 +2254,52 @@ var $ = {
|
|
|
1291
2254
|
};
|
|
1292
2255
|
export {
|
|
1293
2256
|
$,
|
|
2257
|
+
$addToSet,
|
|
1294
2258
|
$and,
|
|
2259
|
+
$avg,
|
|
2260
|
+
$count,
|
|
1295
2261
|
$eq,
|
|
1296
2262
|
$exists,
|
|
2263
|
+
$first,
|
|
1297
2264
|
$gt,
|
|
1298
2265
|
$gte,
|
|
1299
2266
|
$in,
|
|
2267
|
+
$last,
|
|
1300
2268
|
$lt,
|
|
1301
2269
|
$lte,
|
|
2270
|
+
$max,
|
|
2271
|
+
$min,
|
|
1302
2272
|
$ne,
|
|
1303
2273
|
$nin,
|
|
1304
2274
|
$nor,
|
|
1305
2275
|
$not,
|
|
1306
2276
|
$or,
|
|
2277
|
+
$push,
|
|
1307
2278
|
$regex,
|
|
2279
|
+
$sum,
|
|
2280
|
+
AggregatePipeline,
|
|
1308
2281
|
CollectionHandle,
|
|
1309
2282
|
Database,
|
|
1310
2283
|
IndexBuilder,
|
|
1311
2284
|
TypedFindCursor,
|
|
2285
|
+
ZodmonAuthError,
|
|
2286
|
+
ZodmonBulkWriteError,
|
|
2287
|
+
ZodmonDocValidationError,
|
|
2288
|
+
ZodmonDuplicateKeyError,
|
|
2289
|
+
ZodmonError,
|
|
2290
|
+
ZodmonIndexError,
|
|
2291
|
+
ZodmonNetworkError,
|
|
1312
2292
|
ZodmonNotFoundError,
|
|
2293
|
+
ZodmonQueryError,
|
|
2294
|
+
ZodmonTimeoutError,
|
|
1313
2295
|
ZodmonValidationError,
|
|
2296
|
+
ZodmonWriteConflictError,
|
|
2297
|
+
aggregate,
|
|
1314
2298
|
checkUnindexedFields,
|
|
1315
2299
|
collection,
|
|
2300
|
+
createAccumulatorBuilder,
|
|
1316
2301
|
createClient,
|
|
2302
|
+
createExpressionBuilder,
|
|
1317
2303
|
deleteMany,
|
|
1318
2304
|
deleteOne,
|
|
1319
2305
|
extractComparableOptions,
|
|
@@ -1339,6 +2325,7 @@ export {
|
|
|
1339
2325
|
toCompoundIndexSpec,
|
|
1340
2326
|
toFieldIndexSpec,
|
|
1341
2327
|
updateMany,
|
|
1342
|
-
updateOne
|
|
2328
|
+
updateOne,
|
|
2329
|
+
wrapMongoError
|
|
1343
2330
|
};
|
|
1344
2331
|
//# sourceMappingURL=index.js.map
|