@zodmon/core 0.7.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1020 -60
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +2412 -841
- package/dist/index.d.ts +2412 -841
- package/dist/index.js +1000 -60
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1,8 +1,819 @@
|
|
|
1
|
+
// src/aggregate/expressions.ts
|
|
2
|
+
var $count = () => ({
|
|
3
|
+
__accum: true,
|
|
4
|
+
expr: { $sum: 1 }
|
|
5
|
+
});
|
|
6
|
+
var $sum = (field) => ({
|
|
7
|
+
__accum: true,
|
|
8
|
+
expr: { $sum: field }
|
|
9
|
+
});
|
|
10
|
+
var $avg = (field) => ({
|
|
11
|
+
__accum: true,
|
|
12
|
+
expr: { $avg: field }
|
|
13
|
+
});
|
|
14
|
+
var $min = (field) => ({
|
|
15
|
+
__accum: true,
|
|
16
|
+
expr: { $min: field }
|
|
17
|
+
});
|
|
18
|
+
var $max = (field) => ({
|
|
19
|
+
__accum: true,
|
|
20
|
+
expr: { $max: field }
|
|
21
|
+
});
|
|
22
|
+
var $first = (field) => ({
|
|
23
|
+
__accum: true,
|
|
24
|
+
expr: { $first: field }
|
|
25
|
+
});
|
|
26
|
+
var $last = (field) => ({
|
|
27
|
+
__accum: true,
|
|
28
|
+
expr: { $last: field }
|
|
29
|
+
});
|
|
30
|
+
var $push = (field) => ({
|
|
31
|
+
__accum: true,
|
|
32
|
+
expr: { $push: field }
|
|
33
|
+
});
|
|
34
|
+
var $addToSet = (field) => ({
|
|
35
|
+
__accum: true,
|
|
36
|
+
expr: { $addToSet: field }
|
|
37
|
+
});
|
|
38
|
+
function createAccumulatorBuilder() {
|
|
39
|
+
return {
|
|
40
|
+
count: () => ({ __accum: true, expr: { $sum: 1 } }),
|
|
41
|
+
sum: (field) => ({
|
|
42
|
+
__accum: true,
|
|
43
|
+
expr: { $sum: typeof field === "number" ? field : `$${field}` }
|
|
44
|
+
}),
|
|
45
|
+
avg: (field) => ({ __accum: true, expr: { $avg: `$${field}` } }),
|
|
46
|
+
min: (field) => ({ __accum: true, expr: { $min: `$${field}` } }),
|
|
47
|
+
max: (field) => ({ __accum: true, expr: { $max: `$${field}` } }),
|
|
48
|
+
first: (field) => ({ __accum: true, expr: { $first: `$${field}` } }),
|
|
49
|
+
last: (field) => ({ __accum: true, expr: { $last: `$${field}` } }),
|
|
50
|
+
push: (field) => ({ __accum: true, expr: { $push: `$${field}` } }),
|
|
51
|
+
addToSet: (field) => ({ __accum: true, expr: { $addToSet: `$${field}` } })
|
|
52
|
+
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses string field names and returns plain objects — TypeScript cannot verify that the runtime Accumulator objects match the generic AccumulatorBuilder<T> return types. Safe because type resolution happens at compile time via AccumulatorBuilder<T>, and runtime values are identical to what the standalone $min/$max/etc. produce.
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
function createExpressionBuilder() {
|
|
56
|
+
const $2 = (field) => `$${field}`;
|
|
57
|
+
const val = (v) => typeof v === "number" ? v : `$${v}`;
|
|
58
|
+
const expr = (value) => ({ __expr: true, value });
|
|
59
|
+
return {
|
|
60
|
+
// Arithmetic
|
|
61
|
+
add: (field, value) => expr({ $add: [$2(field), val(value)] }),
|
|
62
|
+
subtract: (field, value) => expr({ $subtract: [$2(field), val(value)] }),
|
|
63
|
+
multiply: (field, value) => expr({ $multiply: [$2(field), val(value)] }),
|
|
64
|
+
divide: (field, value) => expr({ $divide: [$2(field), val(value)] }),
|
|
65
|
+
mod: (field, value) => expr({ $mod: [$2(field), val(value)] }),
|
|
66
|
+
abs: (field) => expr({ $abs: $2(field) }),
|
|
67
|
+
ceil: (field) => expr({ $ceil: $2(field) }),
|
|
68
|
+
floor: (field) => expr({ $floor: $2(field) }),
|
|
69
|
+
round: (field, place = 0) => expr({ $round: [$2(field), place] }),
|
|
70
|
+
// String
|
|
71
|
+
concat: (...parts) => {
|
|
72
|
+
const resolved = parts.map((p) => {
|
|
73
|
+
if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(p)) return $2(p);
|
|
74
|
+
return p;
|
|
75
|
+
});
|
|
76
|
+
return expr({ $concat: resolved });
|
|
77
|
+
},
|
|
78
|
+
toLower: (field) => expr({ $toLower: $2(field) }),
|
|
79
|
+
toUpper: (field) => expr({ $toUpper: $2(field) }),
|
|
80
|
+
trim: (field) => expr({ $trim: { input: $2(field) } }),
|
|
81
|
+
substr: (field, start, length) => expr({ $substrBytes: [$2(field), start, length] }),
|
|
82
|
+
// Comparison
|
|
83
|
+
eq: (field, value) => expr({ $eq: [$2(field), value] }),
|
|
84
|
+
gt: (field, value) => expr({ $gt: [$2(field), value] }),
|
|
85
|
+
gte: (field, value) => expr({ $gte: [$2(field), value] }),
|
|
86
|
+
lt: (field, value) => expr({ $lt: [$2(field), value] }),
|
|
87
|
+
lte: (field, value) => expr({ $lte: [$2(field), value] }),
|
|
88
|
+
ne: (field, value) => expr({ $ne: [$2(field), value] }),
|
|
89
|
+
// Date
|
|
90
|
+
year: (field) => expr({ $year: $2(field) }),
|
|
91
|
+
month: (field) => expr({ $month: $2(field) }),
|
|
92
|
+
dayOfMonth: (field) => expr({ $dayOfMonth: $2(field) }),
|
|
93
|
+
// Array
|
|
94
|
+
size: (field) => expr({ $size: $2(field) }),
|
|
95
|
+
// Conditional
|
|
96
|
+
cond: (condition, thenValue, elseValue) => expr({ $cond: [condition.value, thenValue, elseValue] }),
|
|
97
|
+
ifNull: (field, fallback) => expr({ $ifNull: [$2(field), fallback] })
|
|
98
|
+
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses string field names — TypeScript cannot verify generic ExpressionBuilder<T> return types match. Safe because type resolution happens at compile time.
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// src/schema/ref.ts
|
|
103
|
+
import { z } from "zod";
|
|
104
|
+
var refMetadata = /* @__PURE__ */ new WeakMap();
|
|
105
|
+
function getRefMetadata(schema) {
|
|
106
|
+
if (typeof schema !== "object" || schema === null) return void 0;
|
|
107
|
+
return refMetadata.get(schema);
|
|
108
|
+
}
|
|
109
|
+
var REF_GUARD = /* @__PURE__ */ Symbol.for("zodmon_ref");
|
|
110
|
+
function installRefExtension() {
|
|
111
|
+
const proto = z.ZodType.prototype;
|
|
112
|
+
if (REF_GUARD in proto) return;
|
|
113
|
+
Object.defineProperty(proto, "ref", {
|
|
114
|
+
value(collection2) {
|
|
115
|
+
refMetadata.set(this, { collection: collection2 });
|
|
116
|
+
return this;
|
|
117
|
+
},
|
|
118
|
+
enumerable: true,
|
|
119
|
+
configurable: true,
|
|
120
|
+
writable: true
|
|
121
|
+
});
|
|
122
|
+
Object.defineProperty(proto, REF_GUARD, {
|
|
123
|
+
value: true,
|
|
124
|
+
enumerable: false,
|
|
125
|
+
configurable: false,
|
|
126
|
+
writable: false
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// src/aggregate/pipeline.ts
|
|
131
|
+
var AggregatePipeline = class _AggregatePipeline {
|
|
132
|
+
definition;
|
|
133
|
+
nativeCollection;
|
|
134
|
+
stages;
|
|
135
|
+
constructor(definition, nativeCollection, stages) {
|
|
136
|
+
this.definition = definition;
|
|
137
|
+
this.nativeCollection = nativeCollection;
|
|
138
|
+
this.stages = stages;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Append an arbitrary aggregation stage to the pipeline (escape hatch).
|
|
142
|
+
*
|
|
143
|
+
* Returns a new pipeline instance with the stage appended — the
|
|
144
|
+
* original pipeline is not modified.
|
|
145
|
+
*
|
|
146
|
+
* Optionally accepts a type parameter `TNew` to change the output
|
|
147
|
+
* type when the stage transforms the document shape.
|
|
148
|
+
*
|
|
149
|
+
* @typeParam TNew - The output type after this stage. Defaults to the current output type.
|
|
150
|
+
* @param stage - A raw MongoDB aggregation stage document (e.g. `{ $match: { ... } }`).
|
|
151
|
+
* @returns A new pipeline with the stage appended.
|
|
152
|
+
*
|
|
153
|
+
* @example
|
|
154
|
+
* ```ts
|
|
155
|
+
* const admins = aggregate(users)
|
|
156
|
+
* .raw({ $match: { role: 'admin' } })
|
|
157
|
+
* .toArray()
|
|
158
|
+
* ```
|
|
159
|
+
*
|
|
160
|
+
* @example
|
|
161
|
+
* ```ts
|
|
162
|
+
* // Change output type with a $project stage
|
|
163
|
+
* const names = aggregate(users)
|
|
164
|
+
* .raw<{ name: string }>({ $project: { name: 1, _id: 0 } })
|
|
165
|
+
* .toArray()
|
|
166
|
+
* ```
|
|
167
|
+
*/
|
|
168
|
+
raw(stage) {
|
|
169
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
170
|
+
...this.stages,
|
|
171
|
+
stage
|
|
172
|
+
]);
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Execute the pipeline and return all results as an array.
|
|
176
|
+
*
|
|
177
|
+
* @returns A promise resolving to the array of output documents.
|
|
178
|
+
*
|
|
179
|
+
* @example
|
|
180
|
+
* ```ts
|
|
181
|
+
* const results = await aggregate(users)
|
|
182
|
+
* .raw({ $match: { age: { $gte: 18 } } })
|
|
183
|
+
* .toArray()
|
|
184
|
+
* ```
|
|
185
|
+
*/
|
|
186
|
+
async toArray() {
|
|
187
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
188
|
+
return await cursor.toArray();
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Stream pipeline results one document at a time via `for await...of`.
|
|
192
|
+
*
|
|
193
|
+
* @returns An async generator yielding output documents.
|
|
194
|
+
*
|
|
195
|
+
* @example
|
|
196
|
+
* ```ts
|
|
197
|
+
* for await (const user of aggregate(users).raw({ $match: { role: 'admin' } })) {
|
|
198
|
+
* console.log(user.name)
|
|
199
|
+
* }
|
|
200
|
+
* ```
|
|
201
|
+
*/
|
|
202
|
+
async *[Symbol.asyncIterator]() {
|
|
203
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
204
|
+
for await (const doc of cursor) {
|
|
205
|
+
yield doc;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
/**
|
|
209
|
+
* Return the query execution plan without running the pipeline.
|
|
210
|
+
*
|
|
211
|
+
* Useful for debugging and understanding how MongoDB will process
|
|
212
|
+
* the pipeline stages.
|
|
213
|
+
*
|
|
214
|
+
* @returns A promise resolving to the explain output document.
|
|
215
|
+
*
|
|
216
|
+
* @example
|
|
217
|
+
* ```ts
|
|
218
|
+
* const plan = await aggregate(users)
|
|
219
|
+
* .raw({ $match: { role: 'admin' } })
|
|
220
|
+
* .explain()
|
|
221
|
+
* console.log(plan)
|
|
222
|
+
* ```
|
|
223
|
+
*/
|
|
224
|
+
async explain() {
|
|
225
|
+
const cursor = this.nativeCollection.aggregate(this.stages);
|
|
226
|
+
return await cursor.explain();
|
|
227
|
+
}
|
|
228
|
+
// ── Shape-preserving stages ──────────────────────────────────────
|
|
229
|
+
/**
|
|
230
|
+
* Filter documents using a type-safe match expression.
|
|
231
|
+
*
|
|
232
|
+
* Appends a `$match` stage to the pipeline. The filter is constrained
|
|
233
|
+
* to the current output type, so only valid fields and operators are accepted.
|
|
234
|
+
*
|
|
235
|
+
* Supports two forms of type narrowing:
|
|
236
|
+
*
|
|
237
|
+
* **Tier 1 — Explicit type parameter:**
|
|
238
|
+
* ```ts
|
|
239
|
+
* .match<{ role: 'engineer' | 'designer' }>({ role: { $in: ['engineer', 'designer'] } })
|
|
240
|
+
* // role narrows to 'engineer' | 'designer'
|
|
241
|
+
* ```
|
|
242
|
+
*
|
|
243
|
+
* **Tier 2 — Automatic inference from filter literals:**
|
|
244
|
+
* ```ts
|
|
245
|
+
* .match({ role: 'engineer' }) // role narrows to 'engineer'
|
|
246
|
+
* .match({ role: { $ne: 'intern' } }) // role narrows to Exclude<Role, 'intern'>
|
|
247
|
+
* .match({ role: { $in: ['engineer', 'designer'] as const } }) // needs as const
|
|
248
|
+
* ```
|
|
249
|
+
*
|
|
250
|
+
* When no type parameter is provided and the filter doesn't contain
|
|
251
|
+
* inferrable literals, the output type is unchanged (backward compatible).
|
|
252
|
+
*
|
|
253
|
+
* @typeParam TNarrow - Optional object mapping field names to narrowed types. Must be a subtype of the corresponding fields in TOutput.
|
|
254
|
+
* @typeParam F - Inferred from the filter argument. Do not provide explicitly.
|
|
255
|
+
* @param filter - A type-safe filter for the current output type.
|
|
256
|
+
* @returns A new pipeline with the `$match` stage appended and output type narrowed.
|
|
257
|
+
*
|
|
258
|
+
* @example
|
|
259
|
+
* ```ts
|
|
260
|
+
* // Explicit narrowing
|
|
261
|
+
* const filtered = await users.aggregate()
|
|
262
|
+
* .match<{ role: 'engineer' }>({ role: 'engineer' })
|
|
263
|
+
* .toArray()
|
|
264
|
+
* // filtered[0].role → 'engineer'
|
|
265
|
+
*
|
|
266
|
+
* // Automatic narrowing with $in (requires as const)
|
|
267
|
+
* const subset = await users.aggregate()
|
|
268
|
+
* .match({ role: { $in: ['engineer', 'designer'] as const } })
|
|
269
|
+
* .toArray()
|
|
270
|
+
* // subset[0].role → 'engineer' | 'designer'
|
|
271
|
+
* ```
|
|
272
|
+
*/
|
|
273
|
+
match(filter) {
|
|
274
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
275
|
+
...this.stages,
|
|
276
|
+
{ $match: filter }
|
|
277
|
+
]);
|
|
278
|
+
return pipeline;
|
|
279
|
+
}
|
|
280
|
+
/**
|
|
281
|
+
* Sort documents by one or more fields.
|
|
282
|
+
*
|
|
283
|
+
* Appends a `$sort` stage. Keys are constrained to `keyof TOutput & string`
|
|
284
|
+
* and values must be `1` (ascending) or `-1` (descending).
|
|
285
|
+
*
|
|
286
|
+
* @param spec - A sort specification mapping field names to sort direction.
|
|
287
|
+
* @returns A new pipeline with the `$sort` stage appended.
|
|
288
|
+
*
|
|
289
|
+
* @example
|
|
290
|
+
* ```ts
|
|
291
|
+
* const sorted = await aggregate(users)
|
|
292
|
+
* .sort({ age: -1, name: 1 })
|
|
293
|
+
* .toArray()
|
|
294
|
+
* ```
|
|
295
|
+
*/
|
|
296
|
+
sort(spec) {
|
|
297
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
298
|
+
...this.stages,
|
|
299
|
+
{ $sort: spec }
|
|
300
|
+
]);
|
|
301
|
+
}
|
|
302
|
+
/**
|
|
303
|
+
* Skip a number of documents in the pipeline.
|
|
304
|
+
*
|
|
305
|
+
* Appends a `$skip` stage. Commonly used with {@link limit} for pagination.
|
|
306
|
+
*
|
|
307
|
+
* @param n - The number of documents to skip.
|
|
308
|
+
* @returns A new pipeline with the `$skip` stage appended.
|
|
309
|
+
*
|
|
310
|
+
* @example
|
|
311
|
+
* ```ts
|
|
312
|
+
* // Page 2 (10 items per page)
|
|
313
|
+
* const page2 = await aggregate(users)
|
|
314
|
+
* .sort({ name: 1 })
|
|
315
|
+
* .skip(10)
|
|
316
|
+
* .limit(10)
|
|
317
|
+
* .toArray()
|
|
318
|
+
* ```
|
|
319
|
+
*/
|
|
320
|
+
skip(n) {
|
|
321
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
322
|
+
...this.stages,
|
|
323
|
+
{ $skip: n }
|
|
324
|
+
]);
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Limit the number of documents passing through the pipeline.
|
|
328
|
+
*
|
|
329
|
+
* Appends a `$limit` stage. Commonly used with {@link skip} for pagination,
|
|
330
|
+
* or after {@link sort} to get top/bottom N results.
|
|
331
|
+
*
|
|
332
|
+
* @param n - The maximum number of documents to pass through.
|
|
333
|
+
* @returns A new pipeline with the `$limit` stage appended.
|
|
334
|
+
*
|
|
335
|
+
* @example
|
|
336
|
+
* ```ts
|
|
337
|
+
* const top5 = await aggregate(users)
|
|
338
|
+
* .sort({ score: -1 })
|
|
339
|
+
* .limit(5)
|
|
340
|
+
* .toArray()
|
|
341
|
+
* ```
|
|
342
|
+
*/
|
|
343
|
+
limit(n) {
|
|
344
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
345
|
+
...this.stages,
|
|
346
|
+
{ $limit: n }
|
|
347
|
+
]);
|
|
348
|
+
}
|
|
349
|
+
// ── Shape-transforming projection stages ─────────────────────────
|
|
350
|
+
/**
|
|
351
|
+
* Include only specified fields in the output.
|
|
352
|
+
*
|
|
353
|
+
* Appends a `$project` stage with inclusion (`1`) for each key.
|
|
354
|
+
* The `_id` field is always included. The output type narrows to
|
|
355
|
+
* `Pick<TOutput, K | '_id'>`.
|
|
356
|
+
*
|
|
357
|
+
* @param spec - An object mapping field names to `1` for inclusion.
|
|
358
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
359
|
+
*
|
|
360
|
+
* @example
|
|
361
|
+
* ```ts
|
|
362
|
+
* const namesOnly = await aggregate(users)
|
|
363
|
+
* .project({ name: 1 })
|
|
364
|
+
* .toArray()
|
|
365
|
+
* // [{ _id: ..., name: 'Ada' }, ...]
|
|
366
|
+
* ```
|
|
367
|
+
*/
|
|
368
|
+
project(spec) {
|
|
369
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
370
|
+
...this.stages,
|
|
371
|
+
{ $project: spec }
|
|
372
|
+
]);
|
|
373
|
+
return pipeline;
|
|
374
|
+
}
|
|
375
|
+
/**
|
|
376
|
+
* Variadic shorthand for {@link project} — pick fields to include.
|
|
377
|
+
*
|
|
378
|
+
* Generates a `$project` stage that includes only the listed fields
|
|
379
|
+
* (plus `_id`). Equivalent to `.project({ field1: 1, field2: 1 })`.
|
|
380
|
+
*
|
|
381
|
+
* @param fields - Field names to include in the output.
|
|
382
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
383
|
+
*
|
|
384
|
+
* @example
|
|
385
|
+
* ```ts
|
|
386
|
+
* const namesAndRoles = await aggregate(users)
|
|
387
|
+
* .pick('name', 'role')
|
|
388
|
+
* .toArray()
|
|
389
|
+
* ```
|
|
390
|
+
*/
|
|
391
|
+
pick(...fields) {
|
|
392
|
+
const spec = Object.fromEntries(fields.map((f) => [f, 1]));
|
|
393
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
394
|
+
...this.stages,
|
|
395
|
+
{ $project: spec }
|
|
396
|
+
]);
|
|
397
|
+
return pipeline;
|
|
398
|
+
}
|
|
399
|
+
/**
|
|
400
|
+
* Exclude specified fields from the output.
|
|
401
|
+
*
|
|
402
|
+
* Appends a `$project` stage with exclusion (`0`) for each key.
|
|
403
|
+
* All other fields pass through. The output type becomes `Omit<TOutput, K>`.
|
|
404
|
+
*
|
|
405
|
+
* @param fields - Field names to exclude from the output.
|
|
406
|
+
* @returns A new pipeline with the `$project` stage appended.
|
|
407
|
+
*
|
|
408
|
+
* @example
|
|
409
|
+
* ```ts
|
|
410
|
+
* const noAge = await aggregate(users)
|
|
411
|
+
* .omit('age')
|
|
412
|
+
* .toArray()
|
|
413
|
+
* ```
|
|
414
|
+
*/
|
|
415
|
+
omit(...fields) {
|
|
416
|
+
const spec = Object.fromEntries(fields.map((f) => [f, 0]));
|
|
417
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
418
|
+
...this.stages,
|
|
419
|
+
{ $project: spec }
|
|
420
|
+
]);
|
|
421
|
+
return pipeline;
|
|
422
|
+
}
|
|
423
|
+
groupBy(field, accumulators) {
|
|
424
|
+
const resolved = typeof accumulators === "function" ? accumulators(createAccumulatorBuilder()) : accumulators;
|
|
425
|
+
const _id = Array.isArray(field) ? Object.fromEntries(field.map((f) => [f, `$${f}`])) : `$${field}`;
|
|
426
|
+
const accumExprs = Object.fromEntries(
|
|
427
|
+
Object.entries(resolved).map(([key, acc]) => [key, acc.expr])
|
|
428
|
+
);
|
|
429
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
430
|
+
...this.stages,
|
|
431
|
+
{ $group: { _id, ...accumExprs } }
|
|
432
|
+
]);
|
|
433
|
+
return pipeline;
|
|
434
|
+
}
|
|
435
|
+
// Implementation
|
|
436
|
+
addFields(fields) {
|
|
437
|
+
const resolved = typeof fields === "function" ? fields(createExpressionBuilder()) : fields;
|
|
438
|
+
const stage = Object.fromEntries(
|
|
439
|
+
Object.entries(resolved).map(([k, v]) => [
|
|
440
|
+
k,
|
|
441
|
+
v && typeof v === "object" && "__expr" in v ? v.value : v
|
|
442
|
+
])
|
|
443
|
+
);
|
|
444
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
445
|
+
...this.stages,
|
|
446
|
+
{ $addFields: stage }
|
|
447
|
+
]);
|
|
448
|
+
return pipeline;
|
|
449
|
+
}
|
|
450
|
+
// ── unwind stage ─────────────────────────────────────────────────
|
|
451
|
+
/**
|
|
452
|
+
* Deconstruct an array field, outputting one document per array element.
|
|
453
|
+
*
|
|
454
|
+
* Appends an `$unwind` stage. The unwound field's type changes from
|
|
455
|
+
* `T[]` to `T` in the output type. Documents with empty or missing
|
|
456
|
+
* arrays are dropped unless `preserveEmpty` is `true`.
|
|
457
|
+
*
|
|
458
|
+
* @param field - The name of the array field to unwind.
|
|
459
|
+
* @param options - Optional settings for the unwind stage.
|
|
460
|
+
* @param options.preserveEmpty - If `true`, documents with null, missing, or empty arrays are preserved.
|
|
461
|
+
* @returns A new pipeline with the `$unwind` stage appended.
|
|
462
|
+
*
|
|
463
|
+
* @example
|
|
464
|
+
* ```ts
|
|
465
|
+
* const flat = await aggregate(orders)
|
|
466
|
+
* .unwind('items')
|
|
467
|
+
* .toArray()
|
|
468
|
+
* // Each result has a single `items` value instead of an array
|
|
469
|
+
* ```
|
|
470
|
+
*/
|
|
471
|
+
unwind(field, options) {
|
|
472
|
+
const stage = options?.preserveEmpty ? { $unwind: { path: `$${field}`, preserveNullAndEmptyArrays: true } } : { $unwind: `$${field}` };
|
|
473
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
474
|
+
...this.stages,
|
|
475
|
+
stage
|
|
476
|
+
]);
|
|
477
|
+
return pipeline;
|
|
478
|
+
}
|
|
479
|
+
lookup(fieldOrFrom, options) {
|
|
480
|
+
const stages = [...this.stages];
|
|
481
|
+
if (typeof fieldOrFrom === "object") {
|
|
482
|
+
const foreignName = fieldOrFrom.name;
|
|
483
|
+
const foreignField = options?.on;
|
|
484
|
+
if (!foreignField) {
|
|
485
|
+
throw new Error(
|
|
486
|
+
`[zodmon] lookup: reverse lookup on '${foreignName}' requires an 'on' option specifying which field on the foreign collection references this collection.`
|
|
487
|
+
);
|
|
488
|
+
}
|
|
489
|
+
const asField = options?.as ?? foreignName;
|
|
490
|
+
stages.push({
|
|
491
|
+
$lookup: {
|
|
492
|
+
from: foreignName,
|
|
493
|
+
localField: "_id",
|
|
494
|
+
foreignField,
|
|
495
|
+
as: asField
|
|
496
|
+
}
|
|
497
|
+
});
|
|
498
|
+
if (options?.unwind) {
|
|
499
|
+
stages.push({ $unwind: { path: `$${asField}`, preserveNullAndEmptyArrays: true } });
|
|
500
|
+
}
|
|
501
|
+
} else {
|
|
502
|
+
const shape = this.definition.shape;
|
|
503
|
+
const fieldSchema = shape[fieldOrFrom];
|
|
504
|
+
const ref = getRefMetadata(fieldSchema);
|
|
505
|
+
if (!ref) {
|
|
506
|
+
throw new Error(
|
|
507
|
+
`[zodmon] lookup: field '${fieldOrFrom}' has no .ref() metadata. Use .lookup(CollectionDef, { on: foreignKey }) for reverse lookups, or add .ref(TargetCollection) to the field schema.`
|
|
508
|
+
);
|
|
509
|
+
}
|
|
510
|
+
const targetName = ref.collection.name;
|
|
511
|
+
const asField = options?.as ?? targetName;
|
|
512
|
+
stages.push({
|
|
513
|
+
$lookup: {
|
|
514
|
+
from: targetName,
|
|
515
|
+
localField: fieldOrFrom,
|
|
516
|
+
foreignField: "_id",
|
|
517
|
+
as: asField
|
|
518
|
+
}
|
|
519
|
+
});
|
|
520
|
+
if (options?.unwind) {
|
|
521
|
+
stages.push({ $unwind: { path: `$${asField}`, preserveNullAndEmptyArrays: true } });
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, stages);
|
|
525
|
+
return pipeline;
|
|
526
|
+
}
|
|
527
|
+
// ── Convenience shortcuts ────────────────────────────────────────
|
|
528
|
+
/**
|
|
529
|
+
* Count documents per group, sorted by count descending.
|
|
530
|
+
*
|
|
531
|
+
* Shorthand for `.groupBy(field, { count: $count() }).sort({ count: -1 })`.
|
|
532
|
+
*
|
|
533
|
+
* @param field - The field to group and count by.
|
|
534
|
+
* @returns A new pipeline producing `{ _id: TOutput[K], count: number }` results.
|
|
535
|
+
*
|
|
536
|
+
* @example
|
|
537
|
+
* ```ts
|
|
538
|
+
* const roleCounts = await aggregate(users)
|
|
539
|
+
* .countBy('role')
|
|
540
|
+
* .toArray()
|
|
541
|
+
* // [{ _id: 'user', count: 3 }, { _id: 'admin', count: 2 }]
|
|
542
|
+
* ```
|
|
543
|
+
*/
|
|
544
|
+
countBy(field) {
|
|
545
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
546
|
+
...this.stages,
|
|
547
|
+
{ $group: { _id: `$${field}`, count: { $sum: 1 } } },
|
|
548
|
+
{ $sort: { count: -1 } }
|
|
549
|
+
]);
|
|
550
|
+
return pipeline;
|
|
551
|
+
}
|
|
552
|
+
/**
|
|
553
|
+
* Sum a numeric field per group, sorted by total descending.
|
|
554
|
+
*
|
|
555
|
+
* Shorthand for `.groupBy(field, { total: $sum('$sumField') }).sort({ total: -1 })`.
|
|
556
|
+
*
|
|
557
|
+
* @param field - The field to group by.
|
|
558
|
+
* @param sumField - The numeric field to sum.
|
|
559
|
+
* @returns A new pipeline producing `{ _id: TOutput[K], total: number }` results.
|
|
560
|
+
*
|
|
561
|
+
* @example
|
|
562
|
+
* ```ts
|
|
563
|
+
* const revenueByCategory = await aggregate(orders)
|
|
564
|
+
* .sumBy('category', 'amount')
|
|
565
|
+
* .toArray()
|
|
566
|
+
* // [{ _id: 'electronics', total: 5000 }, ...]
|
|
567
|
+
* ```
|
|
568
|
+
*/
|
|
569
|
+
sumBy(field, sumField) {
|
|
570
|
+
const pipeline = new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
571
|
+
...this.stages,
|
|
572
|
+
{ $group: { _id: `$${field}`, total: { $sum: `$${sumField}` } } },
|
|
573
|
+
{ $sort: { total: -1 } }
|
|
574
|
+
]);
|
|
575
|
+
return pipeline;
|
|
576
|
+
}
|
|
577
|
+
/**
|
|
578
|
+
* Sort by a single field with a friendly direction name.
|
|
579
|
+
*
|
|
580
|
+
* Shorthand for `.sort({ [field]: direction === 'desc' ? -1 : 1 })`.
|
|
581
|
+
*
|
|
582
|
+
* @param field - The field to sort by.
|
|
583
|
+
* @param direction - Sort direction: `'asc'` (default) or `'desc'`.
|
|
584
|
+
* @returns A new pipeline with the `$sort` stage appended.
|
|
585
|
+
*
|
|
586
|
+
* @example
|
|
587
|
+
* ```ts
|
|
588
|
+
* const youngest = await aggregate(users)
|
|
589
|
+
* .sortBy('age')
|
|
590
|
+
* .toArray()
|
|
591
|
+
* ```
|
|
592
|
+
*/
|
|
593
|
+
sortBy(field, direction = "asc") {
|
|
594
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
595
|
+
...this.stages,
|
|
596
|
+
{ $sort: { [field]: direction === "desc" ? -1 : 1 } }
|
|
597
|
+
]);
|
|
598
|
+
}
|
|
599
|
+
/**
|
|
600
|
+
* Return the top N documents sorted by a field descending.
|
|
601
|
+
*
|
|
602
|
+
* Shorthand for `.sort({ [by]: -1 }).limit(n)`.
|
|
603
|
+
*
|
|
604
|
+
* @param n - The number of documents to return.
|
|
605
|
+
* @param options - An object with a `by` field specifying the sort key.
|
|
606
|
+
* @returns A new pipeline with `$sort` and `$limit` stages appended.
|
|
607
|
+
*
|
|
608
|
+
* @example
|
|
609
|
+
* ```ts
|
|
610
|
+
* const top3 = await aggregate(users)
|
|
611
|
+
* .top(3, { by: 'score' })
|
|
612
|
+
* .toArray()
|
|
613
|
+
* ```
|
|
614
|
+
*/
|
|
615
|
+
top(n, options) {
|
|
616
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
617
|
+
...this.stages,
|
|
618
|
+
{ $sort: { [options.by]: -1 } },
|
|
619
|
+
{ $limit: n }
|
|
620
|
+
]);
|
|
621
|
+
}
|
|
622
|
+
/**
|
|
623
|
+
* Return the bottom N documents sorted by a field ascending.
|
|
624
|
+
*
|
|
625
|
+
* Shorthand for `.sort({ [by]: 1 }).limit(n)`.
|
|
626
|
+
*
|
|
627
|
+
* @param n - The number of documents to return.
|
|
628
|
+
* @param options - An object with a `by` field specifying the sort key.
|
|
629
|
+
* @returns A new pipeline with `$sort` and `$limit` stages appended.
|
|
630
|
+
*
|
|
631
|
+
* @example
|
|
632
|
+
* ```ts
|
|
633
|
+
* const bottom3 = await aggregate(users)
|
|
634
|
+
* .bottom(3, { by: 'score' })
|
|
635
|
+
* .toArray()
|
|
636
|
+
* ```
|
|
637
|
+
*/
|
|
638
|
+
bottom(n, options) {
|
|
639
|
+
return new _AggregatePipeline(this.definition, this.nativeCollection, [
|
|
640
|
+
...this.stages,
|
|
641
|
+
{ $sort: { [options.by]: 1 } },
|
|
642
|
+
{ $limit: n }
|
|
643
|
+
]);
|
|
644
|
+
}
|
|
645
|
+
};
|
|
646
|
+
function aggregate(handle) {
|
|
647
|
+
return new AggregatePipeline(handle.definition, handle.native, []);
|
|
648
|
+
}
|
|
649
|
+
|
|
1
650
|
// src/client/client.ts
|
|
2
651
|
import { MongoClient } from "mongodb";
|
|
3
652
|
|
|
653
|
+
// src/indexes/spec.ts
|
|
654
|
+
function toFieldIndexSpec(def) {
|
|
655
|
+
const direction = def.text ? "text" : def.descending ? -1 : 1;
|
|
656
|
+
const key = { [def.field]: direction };
|
|
657
|
+
const options = {};
|
|
658
|
+
if (def.unique) options["unique"] = true;
|
|
659
|
+
if (def.sparse) options["sparse"] = true;
|
|
660
|
+
if (def.expireAfter !== void 0) options["expireAfterSeconds"] = def.expireAfter;
|
|
661
|
+
if (def.partial) options["partialFilterExpression"] = def.partial;
|
|
662
|
+
return { key, options };
|
|
663
|
+
}
|
|
664
|
+
function toCompoundIndexSpec(def) {
|
|
665
|
+
const key = { ...def.fields };
|
|
666
|
+
const options = {};
|
|
667
|
+
if (def.options?.unique) options["unique"] = true;
|
|
668
|
+
if (def.options?.sparse) options["sparse"] = true;
|
|
669
|
+
if (def.options?.name) options["name"] = def.options.name;
|
|
670
|
+
if (def.options?.partial) options["partialFilterExpression"] = def.options.partial;
|
|
671
|
+
return { key, options };
|
|
672
|
+
}
|
|
673
|
+
function serializeIndexKey(key) {
|
|
674
|
+
return Object.entries(key).map(([field, dir]) => `${field}:${dir}`).join(",");
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
// src/indexes/sync.ts
|
|
678
|
+
var COMPARABLE_OPTION_KEYS = [
|
|
679
|
+
"unique",
|
|
680
|
+
"sparse",
|
|
681
|
+
"expireAfterSeconds",
|
|
682
|
+
"partialFilterExpression"
|
|
683
|
+
];
|
|
684
|
+
function extractComparableOptions(info) {
|
|
685
|
+
const result = {};
|
|
686
|
+
for (const key of COMPARABLE_OPTION_KEYS) {
|
|
687
|
+
if (info[key] !== void 0) {
|
|
688
|
+
result[key] = info[key];
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
return result;
|
|
692
|
+
}
|
|
693
|
+
function generateIndexName(key) {
|
|
694
|
+
return Object.entries(key).map(([field, dir]) => `${field}_${dir}`).join("_");
|
|
695
|
+
}
|
|
696
|
+
function sortKeys(obj) {
|
|
697
|
+
const sorted = {};
|
|
698
|
+
for (const key of Object.keys(obj).sort()) {
|
|
699
|
+
sorted[key] = obj[key];
|
|
700
|
+
}
|
|
701
|
+
return sorted;
|
|
702
|
+
}
|
|
703
|
+
function resolveSpecName(spec) {
|
|
704
|
+
const specName = spec.options["name"];
|
|
705
|
+
return typeof specName === "string" ? specName : generateIndexName(spec.key);
|
|
706
|
+
}
|
|
707
|
+
function resolveExistingName(info, key) {
|
|
708
|
+
const infoName = info["name"];
|
|
709
|
+
if (typeof infoName === "string") return infoName;
|
|
710
|
+
return generateIndexName(key);
|
|
711
|
+
}
|
|
712
|
+
function optionsMatch(a, b) {
|
|
713
|
+
return JSON.stringify(sortKeys(stripName(a))) === JSON.stringify(sortKeys(stripName(b)));
|
|
714
|
+
}
|
|
715
|
+
function stripName(obj) {
|
|
716
|
+
const { name: _, ...rest } = obj;
|
|
717
|
+
return rest;
|
|
718
|
+
}
|
|
719
|
+
async function processDesiredSpec(spec, existingByKey, native, dryRun, dropOrphaned, acc) {
|
|
720
|
+
const serialized = serializeIndexKey(spec.key);
|
|
721
|
+
const existing = existingByKey.get(serialized);
|
|
722
|
+
if (!existing) {
|
|
723
|
+
if (!dryRun) await native.createIndex(spec.key, spec.options);
|
|
724
|
+
acc.created.push(resolveSpecName(spec));
|
|
725
|
+
return;
|
|
726
|
+
}
|
|
727
|
+
acc.matchedKeys.add(serialized);
|
|
728
|
+
const existingName = resolveExistingName(existing, spec.key);
|
|
729
|
+
const existingOpts = extractComparableOptions(existing);
|
|
730
|
+
if (optionsMatch(existingOpts, spec.options)) {
|
|
731
|
+
acc.skipped.push(existingName);
|
|
732
|
+
return;
|
|
733
|
+
}
|
|
734
|
+
if (dropOrphaned) {
|
|
735
|
+
if (!dryRun) {
|
|
736
|
+
await native.dropIndex(existingName);
|
|
737
|
+
await native.createIndex(spec.key, spec.options);
|
|
738
|
+
}
|
|
739
|
+
acc.dropped.push(existingName);
|
|
740
|
+
acc.created.push(resolveSpecName(spec));
|
|
741
|
+
return;
|
|
742
|
+
}
|
|
743
|
+
acc.stale.push({
|
|
744
|
+
name: existingName,
|
|
745
|
+
key: spec.key,
|
|
746
|
+
existing: existingOpts,
|
|
747
|
+
desired: spec.options
|
|
748
|
+
});
|
|
749
|
+
}
|
|
750
|
+
async function processOrphanedIndexes(existingIndexes, desiredKeys, matchedKeys, native, dryRun, dropOrphaned, dropped) {
|
|
751
|
+
for (const idx of existingIndexes) {
|
|
752
|
+
const rawName = idx["name"];
|
|
753
|
+
const name = typeof rawName === "string" ? rawName : "";
|
|
754
|
+
if (name === "_id_") continue;
|
|
755
|
+
const serialized = serializeIndexKey(idx["key"]);
|
|
756
|
+
if (matchedKeys.has(serialized) || desiredKeys.has(serialized)) continue;
|
|
757
|
+
if (dropOrphaned) {
|
|
758
|
+
if (!dryRun) await native.dropIndex(name);
|
|
759
|
+
dropped.push(name);
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
async function listIndexesSafe(native) {
|
|
764
|
+
try {
|
|
765
|
+
return await native.listIndexes().toArray();
|
|
766
|
+
} catch (err) {
|
|
767
|
+
if (err instanceof Error && err.message.includes("ns does not exist")) {
|
|
768
|
+
return [];
|
|
769
|
+
}
|
|
770
|
+
throw err;
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
async function syncIndexes(handle, options) {
|
|
774
|
+
const { dryRun = false, dropOrphaned = false } = options ?? {};
|
|
775
|
+
const native = handle.native;
|
|
776
|
+
const def = handle.definition;
|
|
777
|
+
const desiredSpecs = [
|
|
778
|
+
...def.fieldIndexes.map(toFieldIndexSpec),
|
|
779
|
+
...def.compoundIndexes.map(toCompoundIndexSpec)
|
|
780
|
+
];
|
|
781
|
+
const existingIndexes = await listIndexesSafe(native);
|
|
782
|
+
const existingByKey = /* @__PURE__ */ new Map();
|
|
783
|
+
for (const idx of existingIndexes) {
|
|
784
|
+
const serialized = serializeIndexKey(idx["key"]);
|
|
785
|
+
existingByKey.set(serialized, idx);
|
|
786
|
+
}
|
|
787
|
+
const acc = {
|
|
788
|
+
created: [],
|
|
789
|
+
dropped: [],
|
|
790
|
+
skipped: [],
|
|
791
|
+
stale: [],
|
|
792
|
+
matchedKeys: /* @__PURE__ */ new Set()
|
|
793
|
+
};
|
|
794
|
+
for (const spec of desiredSpecs) {
|
|
795
|
+
await processDesiredSpec(spec, existingByKey, native, dryRun, dropOrphaned, acc);
|
|
796
|
+
}
|
|
797
|
+
const desiredKeys = new Set(desiredSpecs.map((s) => serializeIndexKey(s.key)));
|
|
798
|
+
await processOrphanedIndexes(
|
|
799
|
+
existingIndexes,
|
|
800
|
+
desiredKeys,
|
|
801
|
+
acc.matchedKeys,
|
|
802
|
+
native,
|
|
803
|
+
dryRun,
|
|
804
|
+
dropOrphaned,
|
|
805
|
+
acc.dropped
|
|
806
|
+
);
|
|
807
|
+
return {
|
|
808
|
+
created: acc.created,
|
|
809
|
+
dropped: acc.dropped,
|
|
810
|
+
skipped: acc.skipped,
|
|
811
|
+
stale: acc.stale
|
|
812
|
+
};
|
|
813
|
+
}
|
|
814
|
+
|
|
4
815
|
// src/crud/delete.ts
|
|
5
|
-
import { z } from "zod";
|
|
816
|
+
import { z as z2 } from "zod";
|
|
6
817
|
|
|
7
818
|
// src/errors/validation.ts
|
|
8
819
|
var ZodmonValidationError = class extends Error {
|
|
@@ -43,7 +854,7 @@ async function findOneAndDelete(handle, filter, options) {
|
|
|
43
854
|
try {
|
|
44
855
|
return handle.definition.schema.parse(result);
|
|
45
856
|
} catch (err) {
|
|
46
|
-
if (err instanceof
|
|
857
|
+
if (err instanceof z2.ZodError) {
|
|
47
858
|
throw new ZodmonValidationError(handle.definition.name, err);
|
|
48
859
|
}
|
|
49
860
|
throw err;
|
|
@@ -51,7 +862,7 @@ async function findOneAndDelete(handle, filter, options) {
|
|
|
51
862
|
}
|
|
52
863
|
|
|
53
864
|
// src/crud/find.ts
|
|
54
|
-
import { z as
|
|
865
|
+
import { z as z4 } from "zod";
|
|
55
866
|
|
|
56
867
|
// src/errors/not-found.ts
|
|
57
868
|
var ZodmonNotFoundError = class extends Error {
|
|
@@ -64,8 +875,31 @@ var ZodmonNotFoundError = class extends Error {
|
|
|
64
875
|
}
|
|
65
876
|
};
|
|
66
877
|
|
|
878
|
+
// src/indexes/warn.ts
|
|
879
|
+
var SKIP_OPERATORS = /* @__PURE__ */ new Set(["$or", "$and", "$nor", "$text", "$where", "$expr", "$comment"]);
|
|
880
|
+
function checkUnindexedFields(definition, filter) {
|
|
881
|
+
if (definition.options.warnUnindexedQueries !== true) return;
|
|
882
|
+
const covered = /* @__PURE__ */ new Set();
|
|
883
|
+
for (const fi of definition.fieldIndexes) {
|
|
884
|
+
covered.add(fi.field);
|
|
885
|
+
}
|
|
886
|
+
for (const ci of definition.compoundIndexes) {
|
|
887
|
+
const firstField = Object.keys(ci.fields)[0];
|
|
888
|
+
if (firstField !== void 0) {
|
|
889
|
+
covered.add(firstField);
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
for (const key of Object.keys(filter)) {
|
|
893
|
+
if (key === "_id") continue;
|
|
894
|
+
if (SKIP_OPERATORS.has(key)) continue;
|
|
895
|
+
if (!covered.has(key)) {
|
|
896
|
+
console.warn(`[zodmon] warn: query on '${definition.name}' uses unindexed field '${key}'`);
|
|
897
|
+
}
|
|
898
|
+
}
|
|
899
|
+
}
|
|
900
|
+
|
|
67
901
|
// src/query/cursor.ts
|
|
68
|
-
import { z as
|
|
902
|
+
import { z as z3 } from "zod";
|
|
69
903
|
|
|
70
904
|
// src/crud/paginate.ts
|
|
71
905
|
import { ObjectId } from "mongodb";
|
|
@@ -81,8 +915,8 @@ function deserializeValue(value) {
|
|
|
81
915
|
}
|
|
82
916
|
return value;
|
|
83
917
|
}
|
|
84
|
-
function encodeCursor(doc,
|
|
85
|
-
const values =
|
|
918
|
+
function encodeCursor(doc, sortKeys2, direction) {
|
|
919
|
+
const values = sortKeys2.map(([field]) => serializeValue(doc[field]));
|
|
86
920
|
return btoa(JSON.stringify([direction, ...values]));
|
|
87
921
|
}
|
|
88
922
|
function decodeCursor(cursor) {
|
|
@@ -101,14 +935,14 @@ function decodeCursor(cursor) {
|
|
|
101
935
|
}
|
|
102
936
|
return { direction, values: rawValues.map(deserializeValue) };
|
|
103
937
|
}
|
|
104
|
-
function buildCursorFilter(
|
|
938
|
+
function buildCursorFilter(sortKeys2, values, isBackward) {
|
|
105
939
|
const clauses = [];
|
|
106
|
-
for (let i = 0; i <
|
|
940
|
+
for (let i = 0; i < sortKeys2.length; i++) {
|
|
107
941
|
const clause = {};
|
|
108
942
|
for (let j = 0; j < i; j++) {
|
|
109
|
-
clause[
|
|
943
|
+
clause[sortKeys2[j][0]] = values[j];
|
|
110
944
|
}
|
|
111
|
-
const [field, direction] =
|
|
945
|
+
const [field, direction] = sortKeys2[i];
|
|
112
946
|
const isAsc = direction === 1;
|
|
113
947
|
const op = isAsc !== isBackward ? "$gt" : "$lt";
|
|
114
948
|
if (values[i] === null) {
|
|
@@ -204,14 +1038,37 @@ var TypedFindCursor = class {
|
|
|
204
1038
|
this.cursor.limit(n);
|
|
205
1039
|
return this;
|
|
206
1040
|
}
|
|
1041
|
+
/**
|
|
1042
|
+
* Force the query optimizer to use the specified index.
|
|
1043
|
+
*
|
|
1044
|
+
* Only accepts index names that were declared via `.name()` in the
|
|
1045
|
+
* collection definition. If no named indexes exist, any string is accepted.
|
|
1046
|
+
*
|
|
1047
|
+
* @param indexName - The name of a declared compound index.
|
|
1048
|
+
* @returns `this` for chaining.
|
|
1049
|
+
*
|
|
1050
|
+
* @example
|
|
1051
|
+
* ```ts
|
|
1052
|
+
* const Users = collection('users', { email: z.string(), role: z.string() }, {
|
|
1053
|
+
* indexes: [index({ email: 1, role: -1 }).name('email_role_idx')],
|
|
1054
|
+
* })
|
|
1055
|
+
* const admins = await users.find({ role: 'admin' })
|
|
1056
|
+
* .hint('email_role_idx')
|
|
1057
|
+
* .toArray()
|
|
1058
|
+
* ```
|
|
1059
|
+
*/
|
|
1060
|
+
hint(indexName) {
|
|
1061
|
+
this.cursor.hint(indexName);
|
|
1062
|
+
return this;
|
|
1063
|
+
}
|
|
207
1064
|
async paginate(opts) {
|
|
208
1065
|
const sortRecord = this.sortSpec ? this.sortSpec : null;
|
|
209
|
-
const
|
|
210
|
-
const sort = Object.fromEntries(
|
|
1066
|
+
const sortKeys2 = resolveSortKeys(sortRecord);
|
|
1067
|
+
const sort = Object.fromEntries(sortKeys2);
|
|
211
1068
|
if ("page" in opts) {
|
|
212
|
-
return await this.offsetPaginate(
|
|
1069
|
+
return await this.offsetPaginate(sortKeys2, sort, opts);
|
|
213
1070
|
}
|
|
214
|
-
return await this.cursorPaginate(
|
|
1071
|
+
return await this.cursorPaginate(sortKeys2, sort, opts);
|
|
215
1072
|
}
|
|
216
1073
|
/** @internal Offset pagination implementation. */
|
|
217
1074
|
async offsetPaginate(_sortKeys, sort, opts) {
|
|
@@ -232,16 +1089,16 @@ var TypedFindCursor = class {
|
|
|
232
1089
|
};
|
|
233
1090
|
}
|
|
234
1091
|
/** @internal Cursor pagination implementation. */
|
|
235
|
-
async cursorPaginate(
|
|
1092
|
+
async cursorPaginate(sortKeys2, sort, opts) {
|
|
236
1093
|
let isBackward = false;
|
|
237
1094
|
let combinedFilter = this.filter;
|
|
238
1095
|
if (opts.cursor) {
|
|
239
1096
|
const decoded = decodeCursor(opts.cursor);
|
|
240
1097
|
isBackward = decoded.direction === "b";
|
|
241
|
-
const cursorFilter = buildCursorFilter(
|
|
1098
|
+
const cursorFilter = buildCursorFilter(sortKeys2, decoded.values, isBackward);
|
|
242
1099
|
combinedFilter = this.filter && Object.keys(this.filter).length > 0 ? { $and: [this.filter, cursorFilter] } : cursorFilter;
|
|
243
1100
|
}
|
|
244
|
-
const effectiveSort = isBackward ? Object.fromEntries(
|
|
1101
|
+
const effectiveSort = isBackward ? Object.fromEntries(sortKeys2.map(([f, d]) => [f, d === 1 ? -1 : 1])) : sort;
|
|
245
1102
|
const raw2 = await this.nativeCollection.find(combinedFilter).sort(effectiveSort).limit(opts.limit + 1).toArray();
|
|
246
1103
|
const hasMore = raw2.length > opts.limit;
|
|
247
1104
|
if (hasMore) raw2.pop();
|
|
@@ -251,8 +1108,8 @@ var TypedFindCursor = class {
|
|
|
251
1108
|
docs,
|
|
252
1109
|
hasNext: isBackward ? true : hasMore,
|
|
253
1110
|
hasPrev: isBackward ? hasMore : opts.cursor != null,
|
|
254
|
-
startCursor: docs.length > 0 ? encodeCursor(docs[0],
|
|
255
|
-
endCursor: docs.length > 0 ? encodeCursor(docs[docs.length - 1],
|
|
1111
|
+
startCursor: docs.length > 0 ? encodeCursor(docs[0], sortKeys2, "b") : null,
|
|
1112
|
+
endCursor: docs.length > 0 ? encodeCursor(docs[docs.length - 1], sortKeys2, "f") : null
|
|
256
1113
|
};
|
|
257
1114
|
}
|
|
258
1115
|
/**
|
|
@@ -302,7 +1159,7 @@ var TypedFindCursor = class {
|
|
|
302
1159
|
try {
|
|
303
1160
|
return this.schema.parse(raw2);
|
|
304
1161
|
} catch (err) {
|
|
305
|
-
if (err instanceof
|
|
1162
|
+
if (err instanceof z3.ZodError) {
|
|
306
1163
|
throw new ZodmonValidationError(this.collectionName, err);
|
|
307
1164
|
}
|
|
308
1165
|
throw err;
|
|
@@ -312,6 +1169,7 @@ var TypedFindCursor = class {
|
|
|
312
1169
|
|
|
313
1170
|
// src/crud/find.ts
|
|
314
1171
|
async function findOne(handle, filter, options) {
|
|
1172
|
+
checkUnindexedFields(handle.definition, filter);
|
|
315
1173
|
const findOptions = options?.project ? { projection: options.project } : void 0;
|
|
316
1174
|
const raw2 = await handle.native.findOne(filter, findOptions);
|
|
317
1175
|
if (!raw2) return null;
|
|
@@ -322,7 +1180,7 @@ async function findOne(handle, filter, options) {
|
|
|
322
1180
|
try {
|
|
323
1181
|
return handle.definition.schema.parse(raw2);
|
|
324
1182
|
} catch (err) {
|
|
325
|
-
if (err instanceof
|
|
1183
|
+
if (err instanceof z4.ZodError) {
|
|
326
1184
|
throw new ZodmonValidationError(handle.definition.name, err);
|
|
327
1185
|
}
|
|
328
1186
|
throw err;
|
|
@@ -336,6 +1194,7 @@ async function findOneOrThrow(handle, filter, options) {
|
|
|
336
1194
|
return doc;
|
|
337
1195
|
}
|
|
338
1196
|
function find(handle, filter, options) {
|
|
1197
|
+
checkUnindexedFields(handle.definition, filter);
|
|
339
1198
|
const raw2 = handle.native.find(filter);
|
|
340
1199
|
const cursor = raw2;
|
|
341
1200
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
@@ -343,13 +1202,13 @@ function find(handle, filter, options) {
|
|
|
343
1202
|
}
|
|
344
1203
|
|
|
345
1204
|
// src/crud/insert.ts
|
|
346
|
-
import { z as
|
|
1205
|
+
import { z as z5 } from "zod";
|
|
347
1206
|
async function insertOne(handle, doc) {
|
|
348
1207
|
let parsed;
|
|
349
1208
|
try {
|
|
350
1209
|
parsed = handle.definition.schema.parse(doc);
|
|
351
1210
|
} catch (err) {
|
|
352
|
-
if (err instanceof
|
|
1211
|
+
if (err instanceof z5.ZodError) {
|
|
353
1212
|
throw new ZodmonValidationError(handle.definition.name, err);
|
|
354
1213
|
}
|
|
355
1214
|
throw err;
|
|
@@ -364,7 +1223,7 @@ async function insertMany(handle, docs) {
|
|
|
364
1223
|
try {
|
|
365
1224
|
parsed.push(handle.definition.schema.parse(doc));
|
|
366
1225
|
} catch (err) {
|
|
367
|
-
if (err instanceof
|
|
1226
|
+
if (err instanceof z5.ZodError) {
|
|
368
1227
|
throw new ZodmonValidationError(handle.definition.name, err);
|
|
369
1228
|
}
|
|
370
1229
|
throw err;
|
|
@@ -375,7 +1234,7 @@ async function insertMany(handle, docs) {
|
|
|
375
1234
|
}
|
|
376
1235
|
|
|
377
1236
|
// src/crud/update.ts
|
|
378
|
-
import { z as
|
|
1237
|
+
import { z as z6 } from "zod";
|
|
379
1238
|
async function updateOne(handle, filter, update, options) {
|
|
380
1239
|
return await handle.native.updateOne(filter, update, options);
|
|
381
1240
|
}
|
|
@@ -406,7 +1265,7 @@ async function findOneAndUpdate(handle, filter, update, options) {
|
|
|
406
1265
|
try {
|
|
407
1266
|
return handle.definition.schema.parse(result);
|
|
408
1267
|
} catch (err) {
|
|
409
|
-
if (err instanceof
|
|
1268
|
+
if (err instanceof z6.ZodError) {
|
|
410
1269
|
throw new ZodmonValidationError(handle.definition.name, err);
|
|
411
1270
|
}
|
|
412
1271
|
throw err;
|
|
@@ -667,6 +1526,57 @@ var CollectionHandle = class {
|
|
|
667
1526
|
async findOneAndDelete(filter, options) {
|
|
668
1527
|
return await findOneAndDelete(this, filter, options);
|
|
669
1528
|
}
|
|
1529
|
+
/**
|
|
1530
|
+
* Synchronize the indexes declared in this collection's schema with MongoDB.
|
|
1531
|
+
*
|
|
1532
|
+
* Compares the desired indexes (from field-level `.index()` / `.unique()` /
|
|
1533
|
+
* `.text()` / `.expireAfter()` and compound `indexes` in collection options)
|
|
1534
|
+
* with the indexes that currently exist in MongoDB, then creates, drops, or
|
|
1535
|
+
* reports differences depending on the options.
|
|
1536
|
+
*
|
|
1537
|
+
* @param options - Optional sync behavior (dryRun, dropOrphaned).
|
|
1538
|
+
* @returns A summary of created, dropped, skipped, and stale indexes.
|
|
1539
|
+
*
|
|
1540
|
+
* @example
|
|
1541
|
+
* ```ts
|
|
1542
|
+
* const users = db.use(Users)
|
|
1543
|
+
* const result = await users.syncIndexes()
|
|
1544
|
+
* console.log('Created:', result.created)
|
|
1545
|
+
* console.log('Stale:', result.stale.map(s => s.name))
|
|
1546
|
+
* ```
|
|
1547
|
+
*
|
|
1548
|
+
* @example
|
|
1549
|
+
* ```ts
|
|
1550
|
+
* // Dry run to preview changes without modifying the database
|
|
1551
|
+
* const diff = await users.syncIndexes({ dryRun: true })
|
|
1552
|
+
* console.log('Would create:', diff.created)
|
|
1553
|
+
* console.log('Would drop:', diff.dropped)
|
|
1554
|
+
* ```
|
|
1555
|
+
*/
|
|
1556
|
+
async syncIndexes(options) {
|
|
1557
|
+
return await syncIndexes(this, options);
|
|
1558
|
+
}
|
|
1559
|
+
/**
|
|
1560
|
+
* Start a type-safe aggregation pipeline on this collection.
|
|
1561
|
+
*
|
|
1562
|
+
* Returns a fluent pipeline builder that tracks the output document
|
|
1563
|
+
* shape through each stage. The pipeline is lazy — no query executes
|
|
1564
|
+
* until a terminal method (`toArray`, `for await`, `explain`) is called.
|
|
1565
|
+
*
|
|
1566
|
+
* @returns A new pipeline builder starting with this collection's document type.
|
|
1567
|
+
*
|
|
1568
|
+
* @example
|
|
1569
|
+
* ```ts
|
|
1570
|
+
* const users = db.use(Users)
|
|
1571
|
+
* const result = await users.aggregate()
|
|
1572
|
+
* .match({ role: 'admin' })
|
|
1573
|
+
* .groupBy('role', { count: $count() })
|
|
1574
|
+
* .toArray()
|
|
1575
|
+
* ```
|
|
1576
|
+
*/
|
|
1577
|
+
aggregate() {
|
|
1578
|
+
return aggregate(this);
|
|
1579
|
+
}
|
|
670
1580
|
};
|
|
671
1581
|
|
|
672
1582
|
// src/client/client.ts
|
|
@@ -699,12 +1609,33 @@ var Database = class {
|
|
|
699
1609
|
);
|
|
700
1610
|
}
|
|
701
1611
|
/**
|
|
702
|
-
* Synchronize indexes
|
|
1612
|
+
* Synchronize indexes for all registered collections with MongoDB.
|
|
1613
|
+
*
|
|
1614
|
+
* Iterates every collection registered via {@link use} and calls
|
|
1615
|
+
* {@link syncIndexes} on each one. Returns a record keyed by collection
|
|
1616
|
+
* name with the sync result for each.
|
|
1617
|
+
*
|
|
1618
|
+
* @param options - Optional sync behavior (dryRun, dropOrphaned).
|
|
1619
|
+
* @returns A record mapping collection names to their sync results.
|
|
703
1620
|
*
|
|
704
|
-
*
|
|
1621
|
+
* @example
|
|
1622
|
+
* ```ts
|
|
1623
|
+
* const db = createClient('mongodb://localhost:27017', 'myapp')
|
|
1624
|
+
* db.use(Users)
|
|
1625
|
+
* db.use(Posts)
|
|
1626
|
+
* const results = await db.syncIndexes()
|
|
1627
|
+
* console.log(results['users'].created) // ['email_1']
|
|
1628
|
+
* console.log(results['posts'].created) // ['title_1']
|
|
1629
|
+
* ```
|
|
705
1630
|
*/
|
|
706
|
-
syncIndexes() {
|
|
707
|
-
|
|
1631
|
+
async syncIndexes(options) {
|
|
1632
|
+
const results = {};
|
|
1633
|
+
for (const [name, def] of this._collections) {
|
|
1634
|
+
const native = this._db.collection(name);
|
|
1635
|
+
const handle = new CollectionHandle(def, native);
|
|
1636
|
+
results[name] = await syncIndexes(handle, options);
|
|
1637
|
+
}
|
|
1638
|
+
return results;
|
|
708
1639
|
}
|
|
709
1640
|
/**
|
|
710
1641
|
* Execute a function within a MongoDB transaction with auto-commit/rollback.
|
|
@@ -749,36 +1680,6 @@ import { z as z9 } from "zod";
|
|
|
749
1680
|
|
|
750
1681
|
// src/schema/extensions.ts
|
|
751
1682
|
import { z as z7 } from "zod";
|
|
752
|
-
|
|
753
|
-
// src/schema/ref.ts
|
|
754
|
-
import { z as z6 } from "zod";
|
|
755
|
-
var refMetadata = /* @__PURE__ */ new WeakMap();
|
|
756
|
-
function getRefMetadata(schema) {
|
|
757
|
-
if (typeof schema !== "object" || schema === null) return void 0;
|
|
758
|
-
return refMetadata.get(schema);
|
|
759
|
-
}
|
|
760
|
-
var REF_GUARD = /* @__PURE__ */ Symbol.for("zodmon_ref");
|
|
761
|
-
function installRefExtension() {
|
|
762
|
-
const proto = z6.ZodType.prototype;
|
|
763
|
-
if (REF_GUARD in proto) return;
|
|
764
|
-
Object.defineProperty(proto, "ref", {
|
|
765
|
-
value(collection2) {
|
|
766
|
-
refMetadata.set(this, { collection: collection2 });
|
|
767
|
-
return this;
|
|
768
|
-
},
|
|
769
|
-
enumerable: true,
|
|
770
|
-
configurable: true,
|
|
771
|
-
writable: true
|
|
772
|
-
});
|
|
773
|
-
Object.defineProperty(proto, REF_GUARD, {
|
|
774
|
-
value: true,
|
|
775
|
-
enumerable: false,
|
|
776
|
-
configurable: false,
|
|
777
|
-
writable: false
|
|
778
|
-
});
|
|
779
|
-
}
|
|
780
|
-
|
|
781
|
-
// src/schema/extensions.ts
|
|
782
1683
|
var indexMetadata = /* @__PURE__ */ new WeakMap();
|
|
783
1684
|
function getIndexMetadata(schema) {
|
|
784
1685
|
if (typeof schema !== "object" || schema === null) return void 0;
|
|
@@ -919,6 +1820,8 @@ function collection(name, shape, options) {
|
|
|
919
1820
|
schema,
|
|
920
1821
|
shape,
|
|
921
1822
|
fieldIndexes,
|
|
1823
|
+
// Safe cast: compoundIndexes is TIndexes at runtime (or an empty array when
|
|
1824
|
+
// no options provided). The spread into [...TIndexes] preserves the tuple type.
|
|
922
1825
|
compoundIndexes: compoundIndexes ?? [],
|
|
923
1826
|
options: {
|
|
924
1827
|
validation: validation ?? "strict",
|
|
@@ -943,12 +1846,29 @@ var IndexBuilder = class _IndexBuilder {
|
|
|
943
1846
|
options
|
|
944
1847
|
});
|
|
945
1848
|
}
|
|
1849
|
+
// Safe cast: _clone returns IndexBuilder<TKeys> but `this` may carry an
|
|
1850
|
+
// intersection from .name(). The cast is safe because _clone preserves all fields.
|
|
946
1851
|
unique() {
|
|
947
1852
|
return this._clone({ ...this.options, unique: true });
|
|
948
1853
|
}
|
|
1854
|
+
// Safe cast: same reasoning as unique().
|
|
949
1855
|
sparse() {
|
|
950
1856
|
return this._clone({ ...this.options, sparse: true });
|
|
951
1857
|
}
|
|
1858
|
+
/**
|
|
1859
|
+
* Set a custom name for this index, preserving the literal type.
|
|
1860
|
+
*
|
|
1861
|
+
* The returned builder carries the literal name type via an intersection,
|
|
1862
|
+
* enabling type-safe `.hint()` on cursors that only accepts declared names.
|
|
1863
|
+
*
|
|
1864
|
+
* @param name - The index name.
|
|
1865
|
+
* @returns A new IndexBuilder with the name recorded at the type level.
|
|
1866
|
+
*
|
|
1867
|
+
* @example
|
|
1868
|
+
* ```ts
|
|
1869
|
+
* index({ email: 1, role: -1 }).name('email_role_idx')
|
|
1870
|
+
* ```
|
|
1871
|
+
*/
|
|
952
1872
|
name(name) {
|
|
953
1873
|
return this._clone({ ...this.options, name });
|
|
954
1874
|
}
|
|
@@ -1009,30 +1929,45 @@ var $ = {
|
|
|
1009
1929
|
};
|
|
1010
1930
|
export {
|
|
1011
1931
|
$,
|
|
1932
|
+
$addToSet,
|
|
1012
1933
|
$and,
|
|
1934
|
+
$avg,
|
|
1935
|
+
$count,
|
|
1013
1936
|
$eq,
|
|
1014
1937
|
$exists,
|
|
1938
|
+
$first,
|
|
1015
1939
|
$gt,
|
|
1016
1940
|
$gte,
|
|
1017
1941
|
$in,
|
|
1942
|
+
$last,
|
|
1018
1943
|
$lt,
|
|
1019
1944
|
$lte,
|
|
1945
|
+
$max,
|
|
1946
|
+
$min,
|
|
1020
1947
|
$ne,
|
|
1021
1948
|
$nin,
|
|
1022
1949
|
$nor,
|
|
1023
1950
|
$not,
|
|
1024
1951
|
$or,
|
|
1952
|
+
$push,
|
|
1025
1953
|
$regex,
|
|
1954
|
+
$sum,
|
|
1955
|
+
AggregatePipeline,
|
|
1026
1956
|
CollectionHandle,
|
|
1027
1957
|
Database,
|
|
1028
1958
|
IndexBuilder,
|
|
1029
1959
|
TypedFindCursor,
|
|
1030
1960
|
ZodmonNotFoundError,
|
|
1031
1961
|
ZodmonValidationError,
|
|
1962
|
+
aggregate,
|
|
1963
|
+
checkUnindexedFields,
|
|
1032
1964
|
collection,
|
|
1965
|
+
createAccumulatorBuilder,
|
|
1033
1966
|
createClient,
|
|
1967
|
+
createExpressionBuilder,
|
|
1034
1968
|
deleteMany,
|
|
1035
1969
|
deleteOne,
|
|
1970
|
+
extractComparableOptions,
|
|
1036
1971
|
extractDbName,
|
|
1037
1972
|
extractFieldIndexes,
|
|
1038
1973
|
find,
|
|
@@ -1040,6 +1975,7 @@ export {
|
|
|
1040
1975
|
findOneAndDelete,
|
|
1041
1976
|
findOneAndUpdate,
|
|
1042
1977
|
findOneOrThrow,
|
|
1978
|
+
generateIndexName,
|
|
1043
1979
|
getIndexMetadata,
|
|
1044
1980
|
getRefMetadata,
|
|
1045
1981
|
index,
|
|
@@ -1049,6 +1985,10 @@ export {
|
|
|
1049
1985
|
objectId,
|
|
1050
1986
|
oid,
|
|
1051
1987
|
raw,
|
|
1988
|
+
serializeIndexKey,
|
|
1989
|
+
syncIndexes,
|
|
1990
|
+
toCompoundIndexSpec,
|
|
1991
|
+
toFieldIndexSpec,
|
|
1052
1992
|
updateMany,
|
|
1053
1993
|
updateOne
|
|
1054
1994
|
};
|