@zodmon/core 0.10.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +991 -234
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1914 -344
- package/dist/index.d.ts +1914 -344
- package/dist/index.js +980 -234
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -11,30 +11,24 @@ var $avg = (field) => ({
|
|
|
11
11
|
__accum: true,
|
|
12
12
|
expr: { $avg: field }
|
|
13
13
|
});
|
|
14
|
-
|
|
15
|
-
__accum: true,
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
expr: { $
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
__accum: true,
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
expr: { $push: field }
|
|
33
|
-
});
|
|
34
|
-
var $addToSet = (field) => ({
|
|
35
|
-
__accum: true,
|
|
36
|
-
expr: { $addToSet: field }
|
|
37
|
-
});
|
|
14
|
+
function $min(field) {
|
|
15
|
+
return { __accum: true, expr: { $min: field } };
|
|
16
|
+
}
|
|
17
|
+
function $max(field) {
|
|
18
|
+
return { __accum: true, expr: { $max: field } };
|
|
19
|
+
}
|
|
20
|
+
function $first(field) {
|
|
21
|
+
return { __accum: true, expr: { $first: field } };
|
|
22
|
+
}
|
|
23
|
+
function $last(field) {
|
|
24
|
+
return { __accum: true, expr: { $last: field } };
|
|
25
|
+
}
|
|
26
|
+
function $push(field) {
|
|
27
|
+
return { __accum: true, expr: { $push: field } };
|
|
28
|
+
}
|
|
29
|
+
function $addToSet(field) {
|
|
30
|
+
return { __accum: true, expr: { $addToSet: field } };
|
|
31
|
+
}
|
|
38
32
|
function createAccumulatorBuilder() {
|
|
39
33
|
return {
|
|
40
34
|
count: () => ({ __accum: true, expr: { $sum: 1 } }),
|
|
@@ -52,50 +46,84 @@ function createAccumulatorBuilder() {
|
|
|
52
46
|
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses string field names and returns plain objects — TypeScript cannot verify that the runtime Accumulator objects match the generic AccumulatorBuilder<T> return types. Safe because type resolution happens at compile time via AccumulatorBuilder<T>, and runtime values are identical to what the standalone $min/$max/etc. produce.
|
|
53
47
|
};
|
|
54
48
|
}
|
|
49
|
+
var isExpr = (v) => typeof v === "object" && v !== null && v.__expr === true;
|
|
55
50
|
function createExpressionBuilder() {
|
|
56
|
-
const
|
|
57
|
-
|
|
51
|
+
const resolveArg = (arg) => {
|
|
52
|
+
if (typeof arg === "number") return arg;
|
|
53
|
+
if (isExpr(arg)) return arg.value;
|
|
54
|
+
return `$${arg}`;
|
|
55
|
+
};
|
|
56
|
+
const resolveExprVal = (v) => isExpr(v) ? v.value : v;
|
|
58
57
|
const expr = (value) => ({ __expr: true, value });
|
|
59
58
|
return {
|
|
60
59
|
// Arithmetic
|
|
61
|
-
add: (
|
|
62
|
-
subtract: (
|
|
63
|
-
multiply: (
|
|
64
|
-
divide: (
|
|
65
|
-
mod: (
|
|
66
|
-
abs: (field) => expr({ $abs:
|
|
67
|
-
ceil: (field) => expr({ $ceil:
|
|
68
|
-
floor: (field) => expr({ $floor:
|
|
69
|
-
round: (field, place = 0) => expr({ $round: [
|
|
60
|
+
add: (a, b) => expr({ $add: [resolveArg(a), resolveArg(b)] }),
|
|
61
|
+
subtract: (a, b) => expr({ $subtract: [resolveArg(a), resolveArg(b)] }),
|
|
62
|
+
multiply: (a, b) => expr({ $multiply: [resolveArg(a), resolveArg(b)] }),
|
|
63
|
+
divide: (a, b) => expr({ $divide: [resolveArg(a), resolveArg(b)] }),
|
|
64
|
+
mod: (a, b) => expr({ $mod: [resolveArg(a), resolveArg(b)] }),
|
|
65
|
+
abs: (field) => expr({ $abs: resolveArg(field) }),
|
|
66
|
+
ceil: (field) => expr({ $ceil: resolveArg(field) }),
|
|
67
|
+
floor: (field) => expr({ $floor: resolveArg(field) }),
|
|
68
|
+
round: (field, place = 0) => expr({ $round: [resolveArg(field), place] }),
|
|
70
69
|
// String
|
|
71
70
|
concat: (...parts) => {
|
|
72
71
|
const resolved = parts.map((p) => {
|
|
73
|
-
if (
|
|
72
|
+
if (isExpr(p)) return p.value;
|
|
73
|
+
if (/^[a-zA-Z_][a-zA-Z0-9_.]*$/.test(p)) return `$${p}`;
|
|
74
74
|
return p;
|
|
75
75
|
});
|
|
76
76
|
return expr({ $concat: resolved });
|
|
77
77
|
},
|
|
78
|
-
toLower: (field) => expr({ $toLower:
|
|
79
|
-
toUpper: (field) => expr({ $toUpper:
|
|
80
|
-
trim: (field) => expr({ $trim: { input:
|
|
81
|
-
substr: (field, start, length) => expr({ $substrBytes: [
|
|
82
|
-
// Comparison
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
78
|
+
toLower: (field) => expr({ $toLower: resolveArg(field) }),
|
|
79
|
+
toUpper: (field) => expr({ $toUpper: resolveArg(field) }),
|
|
80
|
+
trim: (field) => expr({ $trim: { input: resolveArg(field) } }),
|
|
81
|
+
substr: (field, start, length) => expr({ $substrBytes: [resolveArg(field), start, length] }),
|
|
82
|
+
// Comparison — single runtime implementation handles both overloads:
|
|
83
|
+
// field path → resolveArg('name') → '$name'
|
|
84
|
+
// expression → resolveArg(expr.sub(...)) → { $subtract: [...] }
|
|
85
|
+
eq: (field, value) => expr({ $eq: [resolveArg(field), resolveExprVal(value)] }),
|
|
86
|
+
gt: (field, value) => expr({ $gt: [resolveArg(field), resolveExprVal(value)] }),
|
|
87
|
+
gte: (field, value) => expr({ $gte: [resolveArg(field), resolveExprVal(value)] }),
|
|
88
|
+
lt: (field, value) => expr({ $lt: [resolveArg(field), resolveExprVal(value)] }),
|
|
89
|
+
lte: (field, value) => expr({ $lte: [resolveArg(field), resolveExprVal(value)] }),
|
|
90
|
+
ne: (field, value) => expr({ $ne: [resolveArg(field), resolveExprVal(value)] }),
|
|
89
91
|
// Date
|
|
90
|
-
year: (field) => expr({ $year:
|
|
91
|
-
month: (field) => expr({ $month:
|
|
92
|
-
dayOfMonth: (field) => expr({ $dayOfMonth:
|
|
92
|
+
year: (field) => expr({ $year: resolveArg(field) }),
|
|
93
|
+
month: (field) => expr({ $month: resolveArg(field) }),
|
|
94
|
+
dayOfMonth: (field) => expr({ $dayOfMonth: resolveArg(field) }),
|
|
93
95
|
// Array
|
|
94
|
-
size: (field) => expr({ $size:
|
|
96
|
+
size: (field) => expr({ $size: resolveArg(field) }),
|
|
95
97
|
// Conditional
|
|
96
|
-
cond: (condition, thenValue, elseValue) => expr({
|
|
97
|
-
|
|
98
|
-
|
|
98
|
+
cond: (condition, thenValue, elseValue) => expr({
|
|
99
|
+
$cond: [condition.value, resolveExprVal(thenValue), resolveExprVal(elseValue)]
|
|
100
|
+
}),
|
|
101
|
+
ifNull: (field, fallback) => expr({ $ifNull: [resolveArg(field), fallback] }),
|
|
102
|
+
// Date (extended)
|
|
103
|
+
dayOfWeek: (field) => expr({ $dayOfWeek: resolveArg(field) }),
|
|
104
|
+
dateToString: (field, format) => expr({ $dateToString: { format, date: resolveArg(field) } }),
|
|
105
|
+
// $$NOW is a MongoDB system variable string — not a Document, but valid anywhere
|
|
106
|
+
// an aggregation expression is expected. Cast is safe; the MongoDB driver accepts it.
|
|
107
|
+
now: () => ({ __expr: true, value: "$$NOW" }),
|
|
108
|
+
// String conversion
|
|
109
|
+
toString: (field) => expr({ $toString: resolveArg(field) }),
|
|
110
|
+
// Array (extended)
|
|
111
|
+
inArray: (value, array) => expr({ $in: [resolveArg(value), Array.isArray(array) ? array : resolveArg(array)] }),
|
|
112
|
+
arrayElemAt: (field, index2) => expr({ $arrayElemAt: [resolveArg(field), index2] }),
|
|
113
|
+
// Conditional (extended)
|
|
114
|
+
switch: (branches, fallback) => expr({
|
|
115
|
+
$switch: {
|
|
116
|
+
branches: branches.map((b) => ({
|
|
117
|
+
case: b.case.value,
|
|
118
|
+
// biome-ignore lint/suspicious/noThenProperty: MongoDB $switch branch object requires a `then` key
|
|
119
|
+
then: resolveExprVal(b.then)
|
|
120
|
+
})),
|
|
121
|
+
default: resolveExprVal(fallback)
|
|
122
|
+
}
|
|
123
|
+
}),
|
|
124
|
+
// Field reference
|
|
125
|
+
field: (name) => ({ __expr: true, value: `$${name}` })
|
|
126
|
+
// biome-ignore lint/suspicious/noExplicitAny: Runtime implementation uses resolveArg/resolveExprVal — TypeScript cannot verify generic ExpressionBuilder<T> return types match. Safe because type resolution happens at compile time via ExpressionBuilder<T>.
|
|
99
127
|
};
|
|
100
128
|
}
|
|
101
129
|
|
|
@@ -110,9 +138,9 @@ var ZodmonError = class extends Error {
|
|
|
110
138
|
/** The underlying error that caused this error, if any. */
|
|
111
139
|
cause;
|
|
112
140
|
constructor(message, collection2, options) {
|
|
113
|
-
super(message);
|
|
141
|
+
super(message, options?.cause !== void 0 ? { cause: options.cause } : void 0);
|
|
114
142
|
this.collection = collection2;
|
|
115
|
-
if (options?.cause) {
|
|
143
|
+
if (options?.cause !== void 0) {
|
|
116
144
|
this.cause = options.cause;
|
|
117
145
|
}
|
|
118
146
|
}
|
|
@@ -371,10 +399,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
371
399
|
definition;
|
|
372
400
|
nativeCollection;
|
|
373
401
|
stages;
|
|
374
|
-
|
|
402
|
+
session;
|
|
403
|
+
constructor(definition, nativeCollection, stages, session) {
|
|
375
404
|
this.definition = definition;
|
|
376
405
|
this.nativeCollection = nativeCollection;
|
|
377
406
|
this.stages = stages;
|
|
407
|
+
this.session = session;
|
|
378
408
|
}
|
|
379
409
|
/**
|
|
380
410
|
* Append an arbitrary aggregation stage to the pipeline (escape hatch).
|
|
@@ -405,10 +435,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
405
435
|
* ```
|
|
406
436
|
*/
|
|
407
437
|
raw(stage) {
|
|
408
|
-
return new _AggregatePipeline(
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
438
|
+
return new _AggregatePipeline(
|
|
439
|
+
this.definition,
|
|
440
|
+
this.nativeCollection,
|
|
441
|
+
[...this.stages, stage],
|
|
442
|
+
this.session
|
|
443
|
+
);
|
|
412
444
|
}
|
|
413
445
|
/**
|
|
414
446
|
* Execute the pipeline and return all results as an array.
|
|
@@ -424,7 +456,10 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
424
456
|
*/
|
|
425
457
|
async toArray() {
|
|
426
458
|
try {
|
|
427
|
-
const cursor = this.nativeCollection.aggregate(
|
|
459
|
+
const cursor = this.nativeCollection.aggregate(
|
|
460
|
+
this.stages,
|
|
461
|
+
this.session ? { session: this.session } : {}
|
|
462
|
+
);
|
|
428
463
|
return await cursor.toArray();
|
|
429
464
|
} catch (err) {
|
|
430
465
|
wrapMongoError(err, this.definition.name);
|
|
@@ -444,7 +479,10 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
444
479
|
*/
|
|
445
480
|
async *[Symbol.asyncIterator]() {
|
|
446
481
|
try {
|
|
447
|
-
const cursor = this.nativeCollection.aggregate(
|
|
482
|
+
const cursor = this.nativeCollection.aggregate(
|
|
483
|
+
this.stages,
|
|
484
|
+
this.session ? { session: this.session } : {}
|
|
485
|
+
);
|
|
448
486
|
for await (const doc of cursor) {
|
|
449
487
|
yield doc;
|
|
450
488
|
}
|
|
@@ -470,7 +508,10 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
470
508
|
*/
|
|
471
509
|
async explain() {
|
|
472
510
|
try {
|
|
473
|
-
const cursor = this.nativeCollection.aggregate(
|
|
511
|
+
const cursor = this.nativeCollection.aggregate(
|
|
512
|
+
this.stages,
|
|
513
|
+
this.session ? { session: this.session } : {}
|
|
514
|
+
);
|
|
474
515
|
return await cursor.explain();
|
|
475
516
|
} catch (err) {
|
|
476
517
|
wrapMongoError(err, this.definition.name);
|
|
@@ -520,12 +561,30 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
520
561
|
* .toArray()
|
|
521
562
|
* // subset[0].role → 'engineer' | 'designer'
|
|
522
563
|
* ```
|
|
564
|
+
*
|
|
565
|
+
* @example
|
|
566
|
+
* ```ts
|
|
567
|
+
* // Field-vs-field comparison via $expr callback
|
|
568
|
+
* const overRefunded = await orders.aggregate()
|
|
569
|
+
* .match(
|
|
570
|
+
* { status: 'completed' },
|
|
571
|
+
* (expr) => expr.gt('totalAmount', expr.field('refundedAmount')),
|
|
572
|
+
* )
|
|
573
|
+
* .toArray()
|
|
574
|
+
* ```
|
|
523
575
|
*/
|
|
524
|
-
match(filter) {
|
|
525
|
-
const
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
576
|
+
match(filter, exprCb) {
|
|
577
|
+
const stage = { ...filter };
|
|
578
|
+
if (exprCb) {
|
|
579
|
+
const built = exprCb(createExpressionBuilder());
|
|
580
|
+
stage["$expr"] = built.value;
|
|
581
|
+
}
|
|
582
|
+
const pipeline = new _AggregatePipeline(
|
|
583
|
+
this.definition,
|
|
584
|
+
this.nativeCollection,
|
|
585
|
+
[...this.stages, { $match: stage }],
|
|
586
|
+
this.session
|
|
587
|
+
);
|
|
529
588
|
return pipeline;
|
|
530
589
|
}
|
|
531
590
|
/**
|
|
@@ -545,10 +604,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
545
604
|
* ```
|
|
546
605
|
*/
|
|
547
606
|
sort(spec) {
|
|
548
|
-
return new _AggregatePipeline(
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
607
|
+
return new _AggregatePipeline(
|
|
608
|
+
this.definition,
|
|
609
|
+
this.nativeCollection,
|
|
610
|
+
[...this.stages, { $sort: spec }],
|
|
611
|
+
this.session
|
|
612
|
+
);
|
|
552
613
|
}
|
|
553
614
|
/**
|
|
554
615
|
* Skip a number of documents in the pipeline.
|
|
@@ -569,10 +630,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
569
630
|
* ```
|
|
570
631
|
*/
|
|
571
632
|
skip(n) {
|
|
572
|
-
return new _AggregatePipeline(
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
633
|
+
return new _AggregatePipeline(
|
|
634
|
+
this.definition,
|
|
635
|
+
this.nativeCollection,
|
|
636
|
+
[...this.stages, { $skip: n }],
|
|
637
|
+
this.session
|
|
638
|
+
);
|
|
576
639
|
}
|
|
577
640
|
/**
|
|
578
641
|
* Limit the number of documents passing through the pipeline.
|
|
@@ -592,10 +655,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
592
655
|
* ```
|
|
593
656
|
*/
|
|
594
657
|
limit(n) {
|
|
595
|
-
return new _AggregatePipeline(
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
658
|
+
return new _AggregatePipeline(
|
|
659
|
+
this.definition,
|
|
660
|
+
this.nativeCollection,
|
|
661
|
+
[...this.stages, { $limit: n }],
|
|
662
|
+
this.session
|
|
663
|
+
);
|
|
599
664
|
}
|
|
600
665
|
// ── Shape-transforming projection stages ─────────────────────────
|
|
601
666
|
/**
|
|
@@ -617,10 +682,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
617
682
|
* ```
|
|
618
683
|
*/
|
|
619
684
|
project(spec) {
|
|
620
|
-
const pipeline = new _AggregatePipeline(
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
685
|
+
const pipeline = new _AggregatePipeline(
|
|
686
|
+
this.definition,
|
|
687
|
+
this.nativeCollection,
|
|
688
|
+
[...this.stages, { $project: spec }],
|
|
689
|
+
this.session
|
|
690
|
+
);
|
|
624
691
|
return pipeline;
|
|
625
692
|
}
|
|
626
693
|
/**
|
|
@@ -641,10 +708,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
641
708
|
*/
|
|
642
709
|
pick(...fields) {
|
|
643
710
|
const spec = Object.fromEntries(fields.map((f) => [f, 1]));
|
|
644
|
-
const pipeline = new _AggregatePipeline(
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
711
|
+
const pipeline = new _AggregatePipeline(
|
|
712
|
+
this.definition,
|
|
713
|
+
this.nativeCollection,
|
|
714
|
+
[...this.stages, { $project: spec }],
|
|
715
|
+
this.session
|
|
716
|
+
);
|
|
648
717
|
return pipeline;
|
|
649
718
|
}
|
|
650
719
|
/**
|
|
@@ -665,22 +734,41 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
665
734
|
*/
|
|
666
735
|
omit(...fields) {
|
|
667
736
|
const spec = Object.fromEntries(fields.map((f) => [f, 0]));
|
|
668
|
-
const pipeline = new _AggregatePipeline(
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
737
|
+
const pipeline = new _AggregatePipeline(
|
|
738
|
+
this.definition,
|
|
739
|
+
this.nativeCollection,
|
|
740
|
+
[...this.stages, { $project: spec }],
|
|
741
|
+
this.session
|
|
742
|
+
);
|
|
672
743
|
return pipeline;
|
|
673
744
|
}
|
|
674
745
|
groupBy(field, accumulators) {
|
|
675
746
|
const resolved = typeof accumulators === "function" ? accumulators(createAccumulatorBuilder()) : accumulators;
|
|
676
|
-
|
|
747
|
+
let _id;
|
|
748
|
+
if (field === null) {
|
|
749
|
+
_id = null;
|
|
750
|
+
} else if (Array.isArray(field)) {
|
|
751
|
+
const entries = field.map((f) => [f.replaceAll(".", "_"), `$${f}`]);
|
|
752
|
+
const keys = entries.map(([k]) => k);
|
|
753
|
+
const dupes = keys.filter((k, i) => keys.indexOf(k) !== i);
|
|
754
|
+
if (dupes.length > 0) {
|
|
755
|
+
throw new Error(
|
|
756
|
+
`Compound groupBy key collision: ${dupes.join(", ")}. Two or more fields produce the same _id key after dot-to-underscore conversion. Use raw() with explicit aliases instead.`
|
|
757
|
+
);
|
|
758
|
+
}
|
|
759
|
+
_id = Object.fromEntries(entries);
|
|
760
|
+
} else {
|
|
761
|
+
_id = `$${field}`;
|
|
762
|
+
}
|
|
677
763
|
const accumExprs = Object.fromEntries(
|
|
678
764
|
Object.entries(resolved).map(([key, acc]) => [key, acc.expr])
|
|
679
765
|
);
|
|
680
|
-
const pipeline = new _AggregatePipeline(
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
766
|
+
const pipeline = new _AggregatePipeline(
|
|
767
|
+
this.definition,
|
|
768
|
+
this.nativeCollection,
|
|
769
|
+
[...this.stages, { $group: { _id, ...accumExprs } }],
|
|
770
|
+
this.session
|
|
771
|
+
);
|
|
684
772
|
return pipeline;
|
|
685
773
|
}
|
|
686
774
|
// Implementation
|
|
@@ -692,10 +780,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
692
780
|
v && typeof v === "object" && "__expr" in v ? v.value : v
|
|
693
781
|
])
|
|
694
782
|
);
|
|
695
|
-
const pipeline = new _AggregatePipeline(
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
783
|
+
const pipeline = new _AggregatePipeline(
|
|
784
|
+
this.definition,
|
|
785
|
+
this.nativeCollection,
|
|
786
|
+
[...this.stages, { $addFields: stage }],
|
|
787
|
+
this.session
|
|
788
|
+
);
|
|
699
789
|
return pipeline;
|
|
700
790
|
}
|
|
701
791
|
// ── unwind stage ─────────────────────────────────────────────────
|
|
@@ -721,10 +811,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
721
811
|
*/
|
|
722
812
|
unwind(field, options) {
|
|
723
813
|
const stage = options?.preserveEmpty ? { $unwind: { path: `$${field}`, preserveNullAndEmptyArrays: true } } : { $unwind: `$${field}` };
|
|
724
|
-
const pipeline = new _AggregatePipeline(
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
814
|
+
const pipeline = new _AggregatePipeline(
|
|
815
|
+
this.definition,
|
|
816
|
+
this.nativeCollection,
|
|
817
|
+
[...this.stages, stage],
|
|
818
|
+
this.session
|
|
819
|
+
);
|
|
728
820
|
return pipeline;
|
|
729
821
|
}
|
|
730
822
|
lookup(fieldOrFrom, options) {
|
|
@@ -772,7 +864,63 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
772
864
|
stages.push({ $unwind: { path: `$${asField}`, preserveNullAndEmptyArrays: true } });
|
|
773
865
|
}
|
|
774
866
|
}
|
|
775
|
-
const pipeline = new _AggregatePipeline(
|
|
867
|
+
const pipeline = new _AggregatePipeline(
|
|
868
|
+
this.definition,
|
|
869
|
+
this.nativeCollection,
|
|
870
|
+
stages,
|
|
871
|
+
this.session
|
|
872
|
+
);
|
|
873
|
+
return pipeline;
|
|
874
|
+
}
|
|
875
|
+
// ── facet stage ──────────────────────────────────────────────────
|
|
876
|
+
/**
|
|
877
|
+
* Run multiple sub-pipelines on the same input documents in parallel.
|
|
878
|
+
*
|
|
879
|
+
* Each key in `spec` maps to a callback that receives a fresh `SubPipeline`
|
|
880
|
+
* starting from `TOutput`. The callback chains stages and returns the terminal
|
|
881
|
+
* pipeline. Zodmon extracts the accumulated stages at runtime to build the
|
|
882
|
+
* `$facet` document. The output type is fully inferred — no annotation needed.
|
|
883
|
+
*
|
|
884
|
+
* Sub-pipelines support all stage methods including `.raw()` for operators not
|
|
885
|
+
* yet first-class. Execution methods (`toArray`, `explain`) are not available
|
|
886
|
+
* inside branches.
|
|
887
|
+
*
|
|
888
|
+
* @param spec - An object mapping branch names to sub-pipeline builder callbacks.
|
|
889
|
+
* @returns A new pipeline whose output is one document with each branch name mapped to an array of results.
|
|
890
|
+
*
|
|
891
|
+
* @example
|
|
892
|
+
* ```ts
|
|
893
|
+
* const [report] = await aggregate(orders)
|
|
894
|
+
* .facet({
|
|
895
|
+
* byCategory: (sub) => sub
|
|
896
|
+
* .groupBy('category', acc => ({ count: acc.count() }))
|
|
897
|
+
* .sort({ count: -1 }),
|
|
898
|
+
* totals: (sub) => sub
|
|
899
|
+
* .groupBy(null, acc => ({ grandTotal: acc.sum('amount') })),
|
|
900
|
+
* })
|
|
901
|
+
* .toArray()
|
|
902
|
+
* // report.byCategory → { _id: 'electronics' | 'books' | 'clothing'; count: number }[]
|
|
903
|
+
* // report.totals → { _id: null; grandTotal: number }[]
|
|
904
|
+
* ```
|
|
905
|
+
*/
|
|
906
|
+
facet(spec) {
|
|
907
|
+
const branches = {};
|
|
908
|
+
for (const [key, cb] of Object.entries(spec)) {
|
|
909
|
+
const sub = new _AggregatePipeline(
|
|
910
|
+
this.definition,
|
|
911
|
+
this.nativeCollection,
|
|
912
|
+
[],
|
|
913
|
+
this.session
|
|
914
|
+
// biome-ignore lint/suspicious/noExplicitAny: sub must be cast to `any` so the concrete `AggregatePipeline<TDef, TOutput>` is accepted where `SubPipeline<TDef, TOutput>` (which lacks execution methods) is expected — safe at runtime because the pipeline instance always has the right shape
|
|
915
|
+
);
|
|
916
|
+
branches[key] = cb(sub).getStages();
|
|
917
|
+
}
|
|
918
|
+
const pipeline = new _AggregatePipeline(
|
|
919
|
+
this.definition,
|
|
920
|
+
this.nativeCollection,
|
|
921
|
+
[...this.stages, { $facet: branches }],
|
|
922
|
+
this.session
|
|
923
|
+
);
|
|
776
924
|
return pipeline;
|
|
777
925
|
}
|
|
778
926
|
// ── Convenience shortcuts ────────────────────────────────────────
|
|
@@ -793,11 +941,16 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
793
941
|
* ```
|
|
794
942
|
*/
|
|
795
943
|
countBy(field) {
|
|
796
|
-
const pipeline = new _AggregatePipeline(
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
944
|
+
const pipeline = new _AggregatePipeline(
|
|
945
|
+
this.definition,
|
|
946
|
+
this.nativeCollection,
|
|
947
|
+
[
|
|
948
|
+
...this.stages,
|
|
949
|
+
{ $group: { _id: `$${field}`, count: { $sum: 1 } } },
|
|
950
|
+
{ $sort: { count: -1 } }
|
|
951
|
+
],
|
|
952
|
+
this.session
|
|
953
|
+
);
|
|
801
954
|
return pipeline;
|
|
802
955
|
}
|
|
803
956
|
/**
|
|
@@ -818,11 +971,16 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
818
971
|
* ```
|
|
819
972
|
*/
|
|
820
973
|
sumBy(field, sumField) {
|
|
821
|
-
const pipeline = new _AggregatePipeline(
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
974
|
+
const pipeline = new _AggregatePipeline(
|
|
975
|
+
this.definition,
|
|
976
|
+
this.nativeCollection,
|
|
977
|
+
[
|
|
978
|
+
...this.stages,
|
|
979
|
+
{ $group: { _id: `$${field}`, total: { $sum: `$${sumField}` } } },
|
|
980
|
+
{ $sort: { total: -1 } }
|
|
981
|
+
],
|
|
982
|
+
this.session
|
|
983
|
+
);
|
|
826
984
|
return pipeline;
|
|
827
985
|
}
|
|
828
986
|
/**
|
|
@@ -842,10 +1000,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
842
1000
|
* ```
|
|
843
1001
|
*/
|
|
844
1002
|
sortBy(field, direction = "asc") {
|
|
845
|
-
return new _AggregatePipeline(
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
1003
|
+
return new _AggregatePipeline(
|
|
1004
|
+
this.definition,
|
|
1005
|
+
this.nativeCollection,
|
|
1006
|
+
[...this.stages, { $sort: { [field]: direction === "desc" ? -1 : 1 } }],
|
|
1007
|
+
this.session
|
|
1008
|
+
);
|
|
849
1009
|
}
|
|
850
1010
|
/**
|
|
851
1011
|
* Return the top N documents sorted by a field descending.
|
|
@@ -864,11 +1024,12 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
864
1024
|
* ```
|
|
865
1025
|
*/
|
|
866
1026
|
top(n, options) {
|
|
867
|
-
return new _AggregatePipeline(
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
{ $limit: n }
|
|
871
|
-
|
|
1027
|
+
return new _AggregatePipeline(
|
|
1028
|
+
this.definition,
|
|
1029
|
+
this.nativeCollection,
|
|
1030
|
+
[...this.stages, { $sort: { [options.by]: -1 } }, { $limit: n }],
|
|
1031
|
+
this.session
|
|
1032
|
+
);
|
|
872
1033
|
}
|
|
873
1034
|
/**
|
|
874
1035
|
* Return the bottom N documents sorted by a field ascending.
|
|
@@ -887,15 +1048,25 @@ var AggregatePipeline = class _AggregatePipeline {
|
|
|
887
1048
|
* ```
|
|
888
1049
|
*/
|
|
889
1050
|
bottom(n, options) {
|
|
890
|
-
return new _AggregatePipeline(
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
{ $limit: n }
|
|
894
|
-
|
|
1051
|
+
return new _AggregatePipeline(
|
|
1052
|
+
this.definition,
|
|
1053
|
+
this.nativeCollection,
|
|
1054
|
+
[...this.stages, { $sort: { [options.by]: 1 } }, { $limit: n }],
|
|
1055
|
+
this.session
|
|
1056
|
+
);
|
|
1057
|
+
}
|
|
1058
|
+
/** @internal Used by facet() to extract branch stages. Not part of the public API. */
|
|
1059
|
+
getStages() {
|
|
1060
|
+
return this.stages;
|
|
895
1061
|
}
|
|
896
1062
|
};
|
|
897
1063
|
function aggregate(handle) {
|
|
898
|
-
return new AggregatePipeline(
|
|
1064
|
+
return new AggregatePipeline(
|
|
1065
|
+
handle.definition,
|
|
1066
|
+
handle.native,
|
|
1067
|
+
[],
|
|
1068
|
+
handle.session
|
|
1069
|
+
);
|
|
899
1070
|
}
|
|
900
1071
|
|
|
901
1072
|
// src/client/client.ts
|
|
@@ -1077,6 +1248,36 @@ async function syncIndexes(handle, options) {
|
|
|
1077
1248
|
};
|
|
1078
1249
|
}
|
|
1079
1250
|
|
|
1251
|
+
// src/transaction/transaction.ts
|
|
1252
|
+
var TransactionContext = class {
|
|
1253
|
+
/** @internal */
|
|
1254
|
+
session;
|
|
1255
|
+
/** @internal */
|
|
1256
|
+
constructor(session) {
|
|
1257
|
+
this.session = session;
|
|
1258
|
+
}
|
|
1259
|
+
/**
|
|
1260
|
+
* Bind a collection handle to this transaction's session.
|
|
1261
|
+
*
|
|
1262
|
+
* Returns a cloned handle whose CRUD operations automatically include
|
|
1263
|
+
* the transaction session. The original handle is not modified.
|
|
1264
|
+
*
|
|
1265
|
+
* @param handle - An existing collection handle from `db.use()`.
|
|
1266
|
+
* @returns A new handle bound to the transaction session.
|
|
1267
|
+
*
|
|
1268
|
+
* @example
|
|
1269
|
+
* ```ts
|
|
1270
|
+
* await db.transaction(async (tx) => {
|
|
1271
|
+
* const txUsers = tx.use(users)
|
|
1272
|
+
* await txUsers.insertOne({ name: 'Ada' })
|
|
1273
|
+
* })
|
|
1274
|
+
* ```
|
|
1275
|
+
*/
|
|
1276
|
+
use(handle) {
|
|
1277
|
+
return handle.withSession(this.session);
|
|
1278
|
+
}
|
|
1279
|
+
};
|
|
1280
|
+
|
|
1080
1281
|
// src/crud/delete.ts
|
|
1081
1282
|
import { z as z2 } from "zod";
|
|
1082
1283
|
|
|
@@ -1101,14 +1302,22 @@ var ZodmonValidationError = class extends ZodmonError {
|
|
|
1101
1302
|
// src/crud/delete.ts
|
|
1102
1303
|
async function deleteOne(handle, filter) {
|
|
1103
1304
|
try {
|
|
1104
|
-
return await handle.native.deleteOne(
|
|
1305
|
+
return await handle.native.deleteOne(
|
|
1306
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
1307
|
+
filter,
|
|
1308
|
+
handle.session ? { session: handle.session } : {}
|
|
1309
|
+
);
|
|
1105
1310
|
} catch (err) {
|
|
1106
1311
|
wrapMongoError(err, handle.definition.name);
|
|
1107
1312
|
}
|
|
1108
1313
|
}
|
|
1109
1314
|
async function deleteMany(handle, filter) {
|
|
1110
1315
|
try {
|
|
1111
|
-
return await handle.native.deleteMany(
|
|
1316
|
+
return await handle.native.deleteMany(
|
|
1317
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
1318
|
+
filter,
|
|
1319
|
+
handle.session ? { session: handle.session } : {}
|
|
1320
|
+
);
|
|
1112
1321
|
} catch (err) {
|
|
1113
1322
|
wrapMongoError(err, handle.definition.name);
|
|
1114
1323
|
}
|
|
@@ -1119,7 +1328,7 @@ async function findOneAndDelete(handle, filter, options) {
|
|
|
1119
1328
|
result = await handle.native.findOneAndDelete(
|
|
1120
1329
|
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
1121
1330
|
filter,
|
|
1122
|
-
{ includeResultMetadata: false }
|
|
1331
|
+
handle.session ? { includeResultMetadata: false, session: handle.session } : { includeResultMetadata: false }
|
|
1123
1332
|
);
|
|
1124
1333
|
} catch (err) {
|
|
1125
1334
|
wrapMongoError(err, handle.definition.name);
|
|
@@ -1130,7 +1339,8 @@ async function findOneAndDelete(handle, filter, options) {
|
|
|
1130
1339
|
return result;
|
|
1131
1340
|
}
|
|
1132
1341
|
try {
|
|
1133
|
-
|
|
1342
|
+
const schema = mode === "strict" ? handle.definition.strictSchema : handle.definition.schema;
|
|
1343
|
+
return schema.parse(result);
|
|
1134
1344
|
} catch (err) {
|
|
1135
1345
|
if (err instanceof z2.ZodError) {
|
|
1136
1346
|
throw new ZodmonValidationError(handle.definition.name, err, result);
|
|
@@ -1140,7 +1350,7 @@ async function findOneAndDelete(handle, filter, options) {
|
|
|
1140
1350
|
}
|
|
1141
1351
|
|
|
1142
1352
|
// src/crud/find.ts
|
|
1143
|
-
import { z as
|
|
1353
|
+
import { z as z5 } from "zod";
|
|
1144
1354
|
|
|
1145
1355
|
// src/errors/not-found.ts
|
|
1146
1356
|
var ZodmonNotFoundError = class extends ZodmonError {
|
|
@@ -1177,7 +1387,7 @@ function checkUnindexedFields(definition, filter) {
|
|
|
1177
1387
|
}
|
|
1178
1388
|
|
|
1179
1389
|
// src/query/cursor.ts
|
|
1180
|
-
import { z as
|
|
1390
|
+
import { z as z4 } from "zod";
|
|
1181
1391
|
|
|
1182
1392
|
// src/crud/paginate.ts
|
|
1183
1393
|
import { ObjectId } from "mongodb";
|
|
@@ -1240,11 +1450,313 @@ function resolveSortKeys(sortSpec) {
|
|
|
1240
1450
|
return entries;
|
|
1241
1451
|
}
|
|
1242
1452
|
|
|
1453
|
+
// src/populate/builder.ts
|
|
1454
|
+
var PopulateRefBuilder = class {
|
|
1455
|
+
/**
|
|
1456
|
+
* Declare a projection to apply when fetching the referenced documents.
|
|
1457
|
+
*
|
|
1458
|
+
* Supported: inclusion (`{ name: 1 }`), exclusion (`{ email: 0 }`), or
|
|
1459
|
+
* `_id` suppression (`{ name: 1, _id: 0 }`).
|
|
1460
|
+
*
|
|
1461
|
+
* @param projection - MongoDB-style inclusion or exclusion projection.
|
|
1462
|
+
* @returns A config object carrying the projection type for compile-time narrowing.
|
|
1463
|
+
*
|
|
1464
|
+
* @example
|
|
1465
|
+
* ```ts
|
|
1466
|
+
* (b) => b.project({ name: 1, email: 1 })
|
|
1467
|
+
* (b) => b.project({ password: 0 })
|
|
1468
|
+
* ```
|
|
1469
|
+
*/
|
|
1470
|
+
project(projection) {
|
|
1471
|
+
return { projection };
|
|
1472
|
+
}
|
|
1473
|
+
};
|
|
1474
|
+
|
|
1475
|
+
// src/populate/execute.ts
|
|
1476
|
+
import { z as z3 } from "zod";
|
|
1477
|
+
function unwrapRefSchema(schema) {
|
|
1478
|
+
const def = schema._zod.def;
|
|
1479
|
+
if (def && typeof def === "object") {
|
|
1480
|
+
if ("innerType" in def && def.innerType instanceof z3.ZodType) {
|
|
1481
|
+
return unwrapRefSchema(def.innerType);
|
|
1482
|
+
}
|
|
1483
|
+
if ("element" in def && def.element instanceof z3.ZodType) {
|
|
1484
|
+
return unwrapRefSchema(def.element);
|
|
1485
|
+
}
|
|
1486
|
+
}
|
|
1487
|
+
return schema;
|
|
1488
|
+
}
|
|
1489
|
+
function resolveRefField(shape, fieldName, collectionName) {
|
|
1490
|
+
const fieldSchema = shape[fieldName];
|
|
1491
|
+
if (!fieldSchema) {
|
|
1492
|
+
throw new Error(
|
|
1493
|
+
`[zodmon] populate: field '${fieldName}' does not exist on collection '${collectionName}'.`
|
|
1494
|
+
);
|
|
1495
|
+
}
|
|
1496
|
+
const isArray = fieldSchema instanceof z3.ZodArray;
|
|
1497
|
+
const inner = unwrapRefSchema(fieldSchema);
|
|
1498
|
+
const ref = getRefMetadata(inner);
|
|
1499
|
+
if (!ref) {
|
|
1500
|
+
throw new Error(
|
|
1501
|
+
`[zodmon] populate: field '${fieldName}' has no .ref() metadata. Only fields declared with .ref(Collection) can be populated.`
|
|
1502
|
+
);
|
|
1503
|
+
}
|
|
1504
|
+
return { isArray, ref };
|
|
1505
|
+
}
|
|
1506
|
+
function resolvePopulateStep(definition, previousSteps, path, as, projection) {
|
|
1507
|
+
const dotIndex = path.indexOf(".");
|
|
1508
|
+
if (dotIndex === -1) {
|
|
1509
|
+
const shape = definition.shape;
|
|
1510
|
+
const { isArray: isArray2, ref: ref2 } = resolveRefField(shape, path, definition.name);
|
|
1511
|
+
return {
|
|
1512
|
+
originalPath: path,
|
|
1513
|
+
leafField: path,
|
|
1514
|
+
as,
|
|
1515
|
+
parentOutputPath: void 0,
|
|
1516
|
+
targetCollection: ref2.collection,
|
|
1517
|
+
isArray: isArray2,
|
|
1518
|
+
...projection !== void 0 ? { projection } : {}
|
|
1519
|
+
};
|
|
1520
|
+
}
|
|
1521
|
+
const parentPath = path.slice(0, dotIndex);
|
|
1522
|
+
const leafField = path.slice(dotIndex + 1);
|
|
1523
|
+
const parentStep = previousSteps.find((s) => s.as === parentPath);
|
|
1524
|
+
if (!parentStep) {
|
|
1525
|
+
throw new Error(
|
|
1526
|
+
`[zodmon] populate: parent '${parentPath}' has not been populated. Populate '${parentPath}' before populating '${path}'.`
|
|
1527
|
+
);
|
|
1528
|
+
}
|
|
1529
|
+
const parentShape = parentStep.targetCollection.shape;
|
|
1530
|
+
const { isArray, ref } = resolveRefField(parentShape, leafField, parentStep.targetCollection.name);
|
|
1531
|
+
return {
|
|
1532
|
+
originalPath: path,
|
|
1533
|
+
leafField,
|
|
1534
|
+
as,
|
|
1535
|
+
parentOutputPath: parentPath,
|
|
1536
|
+
targetCollection: ref.collection,
|
|
1537
|
+
isArray,
|
|
1538
|
+
...projection !== void 0 ? { projection } : {}
|
|
1539
|
+
};
|
|
1540
|
+
}
|
|
1541
|
+
function expandValue(value) {
|
|
1542
|
+
if (value == null) return [];
|
|
1543
|
+
if (Array.isArray(value)) {
|
|
1544
|
+
const result = [];
|
|
1545
|
+
for (const item of value) {
|
|
1546
|
+
if (item != null && typeof item === "object") {
|
|
1547
|
+
result.push(item);
|
|
1548
|
+
}
|
|
1549
|
+
}
|
|
1550
|
+
return result;
|
|
1551
|
+
}
|
|
1552
|
+
if (typeof value === "object") {
|
|
1553
|
+
return [value];
|
|
1554
|
+
}
|
|
1555
|
+
return [];
|
|
1556
|
+
}
|
|
1557
|
+
function getNestedTargets(doc, path) {
|
|
1558
|
+
const parts = path.split(".");
|
|
1559
|
+
let targets = [doc];
|
|
1560
|
+
for (const part of parts) {
|
|
1561
|
+
targets = targets.flatMap((target) => expandValue(target[part]));
|
|
1562
|
+
}
|
|
1563
|
+
return targets;
|
|
1564
|
+
}
|
|
1565
|
+
function addUniqueId(value, idSet, idValues) {
|
|
1566
|
+
const key = String(value);
|
|
1567
|
+
if (!idSet.has(key)) {
|
|
1568
|
+
idSet.add(key);
|
|
1569
|
+
idValues.push(value);
|
|
1570
|
+
}
|
|
1571
|
+
}
|
|
1572
|
+
function collectIds(targets, leafField) {
|
|
1573
|
+
const idSet = /* @__PURE__ */ new Set();
|
|
1574
|
+
const idValues = [];
|
|
1575
|
+
for (const target of targets) {
|
|
1576
|
+
const value = target[leafField];
|
|
1577
|
+
if (value == null) continue;
|
|
1578
|
+
if (Array.isArray(value)) {
|
|
1579
|
+
for (const id of value) {
|
|
1580
|
+
addUniqueId(id, idSet, idValues);
|
|
1581
|
+
}
|
|
1582
|
+
} else {
|
|
1583
|
+
addUniqueId(value, idSet, idValues);
|
|
1584
|
+
}
|
|
1585
|
+
}
|
|
1586
|
+
return idValues;
|
|
1587
|
+
}
|
|
1588
|
+
function resolvePopulatedValue(value, map) {
|
|
1589
|
+
if (value == null) return value;
|
|
1590
|
+
if (Array.isArray(value)) {
|
|
1591
|
+
return value.map((id) => map.get(String(id))).filter((d) => d != null);
|
|
1592
|
+
}
|
|
1593
|
+
return map.get(String(value)) ?? null;
|
|
1594
|
+
}
|
|
1595
|
+
function mergePopulated(targets, step, map) {
|
|
1596
|
+
for (const target of targets) {
|
|
1597
|
+
const value = target[step.leafField];
|
|
1598
|
+
const populated = resolvePopulatedValue(value, map);
|
|
1599
|
+
if (step.as !== step.leafField) {
|
|
1600
|
+
delete target[step.leafField];
|
|
1601
|
+
}
|
|
1602
|
+
target[step.as] = populated;
|
|
1603
|
+
}
|
|
1604
|
+
}
|
|
1605
|
+
async function executePopulate(documents, steps, getCollection) {
|
|
1606
|
+
for (const step of steps) {
|
|
1607
|
+
const targets = step.parentOutputPath ? documents.flatMap((doc) => getNestedTargets(doc, step.parentOutputPath)) : documents;
|
|
1608
|
+
const idValues = collectIds(targets, step.leafField);
|
|
1609
|
+
if (idValues.length === 0) continue;
|
|
1610
|
+
const col = getCollection(step.targetCollection.name);
|
|
1611
|
+
const findOptions = step.projection !== void 0 ? { projection: step.projection } : {};
|
|
1612
|
+
const fetched = await col.find({ _id: { $in: idValues } }, findOptions).toArray();
|
|
1613
|
+
const map = /* @__PURE__ */ new Map();
|
|
1614
|
+
for (const doc of fetched) {
|
|
1615
|
+
map.set(String(doc._id), doc);
|
|
1616
|
+
}
|
|
1617
|
+
mergePopulated(targets, step, map);
|
|
1618
|
+
}
|
|
1619
|
+
return documents;
|
|
1620
|
+
}
|
|
1621
|
+
|
|
1622
|
+
// src/populate/cursor.ts
|
|
1623
|
+
var PopulateCursor = class _PopulateCursor {
|
|
1624
|
+
cursor;
|
|
1625
|
+
definition;
|
|
1626
|
+
steps;
|
|
1627
|
+
nativeCollection;
|
|
1628
|
+
/** @internal */
|
|
1629
|
+
constructor(cursor, definition, steps, nativeCollection) {
|
|
1630
|
+
this.cursor = cursor;
|
|
1631
|
+
this.definition = definition;
|
|
1632
|
+
this.steps = steps;
|
|
1633
|
+
this.nativeCollection = nativeCollection;
|
|
1634
|
+
}
|
|
1635
|
+
// Implementation -- TypeScript cannot narrow overloaded generics in the
|
|
1636
|
+
// implementation body, so param types are widened and the return is cast.
|
|
1637
|
+
populate(field, asOrConfigure) {
|
|
1638
|
+
let alias;
|
|
1639
|
+
let projection;
|
|
1640
|
+
if (typeof asOrConfigure === "function") {
|
|
1641
|
+
alias = field.includes(".") ? field.split(".").pop() ?? field : field;
|
|
1642
|
+
const config = asOrConfigure(new PopulateRefBuilder());
|
|
1643
|
+
projection = config.projection;
|
|
1644
|
+
} else {
|
|
1645
|
+
alias = asOrConfigure ?? (field.includes(".") ? field.split(".").pop() ?? field : field);
|
|
1646
|
+
projection = void 0;
|
|
1647
|
+
}
|
|
1648
|
+
const step = resolvePopulateStep(this.definition, this.steps, field, alias, projection);
|
|
1649
|
+
const newSteps = [...this.steps, step];
|
|
1650
|
+
return new _PopulateCursor(this.cursor, this.definition, newSteps, this.nativeCollection);
|
|
1651
|
+
}
|
|
1652
|
+
/**
|
|
1653
|
+
* Execute the query and return all matching documents as a populated array.
|
|
1654
|
+
*
|
|
1655
|
+
* Fetches all documents from the underlying cursor, then applies populate
|
|
1656
|
+
* steps in order using batch `$in` queries (no N+1 problem).
|
|
1657
|
+
*
|
|
1658
|
+
* @returns Array of populated documents.
|
|
1659
|
+
*
|
|
1660
|
+
* @example
|
|
1661
|
+
* ```ts
|
|
1662
|
+
* const posts = await db.use(Posts)
|
|
1663
|
+
* .find({})
|
|
1664
|
+
* .populate('authorId', 'author')
|
|
1665
|
+
* .toArray()
|
|
1666
|
+
* ```
|
|
1667
|
+
*/
|
|
1668
|
+
async toArray() {
|
|
1669
|
+
const docs = await this.cursor.toArray();
|
|
1670
|
+
if (this.steps.length === 0) return docs;
|
|
1671
|
+
const populated = await executePopulate(
|
|
1672
|
+
docs,
|
|
1673
|
+
this.steps,
|
|
1674
|
+
(name) => this.nativeCollection.db.collection(name)
|
|
1675
|
+
);
|
|
1676
|
+
return populated;
|
|
1677
|
+
}
|
|
1678
|
+
/**
|
|
1679
|
+
* Async iterator for streaming populated documents.
|
|
1680
|
+
*
|
|
1681
|
+
* Fetches all documents first (populate requires the full batch for
|
|
1682
|
+
* efficient `$in` queries), then yields results one at a time.
|
|
1683
|
+
*
|
|
1684
|
+
* @yields Populated documents one at a time.
|
|
1685
|
+
*
|
|
1686
|
+
* @example
|
|
1687
|
+
* ```ts
|
|
1688
|
+
* for await (const post of db.use(Posts).find({}).populate('authorId', 'author')) {
|
|
1689
|
+
* console.log(post.author.name)
|
|
1690
|
+
* }
|
|
1691
|
+
* ```
|
|
1692
|
+
*/
|
|
1693
|
+
async *[Symbol.asyncIterator]() {
|
|
1694
|
+
const results = await this.toArray();
|
|
1695
|
+
for (const doc of results) {
|
|
1696
|
+
yield doc;
|
|
1697
|
+
}
|
|
1698
|
+
}
|
|
1699
|
+
};
|
|
1700
|
+
function createPopulateCursor(cursor, definition, steps) {
|
|
1701
|
+
const nativeCollection = cursor.nativeCollection;
|
|
1702
|
+
return new PopulateCursor(cursor, definition, steps, nativeCollection);
|
|
1703
|
+
}
|
|
1704
|
+
|
|
1705
|
+
// src/query/projection.ts
|
|
1706
|
+
function isIncludeValue(value) {
|
|
1707
|
+
return value === 1 || value === true;
|
|
1708
|
+
}
|
|
1709
|
+
function isExcludeValue(value) {
|
|
1710
|
+
return value === 0 || value === false;
|
|
1711
|
+
}
|
|
1712
|
+
function isInclusionProjection(projection) {
|
|
1713
|
+
for (const key of Object.keys(projection)) {
|
|
1714
|
+
if (key === "_id") continue;
|
|
1715
|
+
const value = projection[key];
|
|
1716
|
+
if (value !== void 0 && isIncludeValue(value)) return true;
|
|
1717
|
+
}
|
|
1718
|
+
return false;
|
|
1719
|
+
}
|
|
1720
|
+
function buildPickMask(projection, schemaKeys) {
|
|
1721
|
+
const mask = {};
|
|
1722
|
+
const idValue = projection._id;
|
|
1723
|
+
if (!(idValue !== void 0 && isExcludeValue(idValue)) && schemaKeys.has("_id")) {
|
|
1724
|
+
mask._id = true;
|
|
1725
|
+
}
|
|
1726
|
+
for (const key of Object.keys(projection)) {
|
|
1727
|
+
if (key === "_id") continue;
|
|
1728
|
+
const value = projection[key];
|
|
1729
|
+
if (value !== void 0 && isIncludeValue(value) && schemaKeys.has(key)) {
|
|
1730
|
+
mask[key] = true;
|
|
1731
|
+
}
|
|
1732
|
+
}
|
|
1733
|
+
return mask;
|
|
1734
|
+
}
|
|
1735
|
+
function buildOmitMask(projection, schemaKeys) {
|
|
1736
|
+
const mask = {};
|
|
1737
|
+
for (const key of Object.keys(projection)) {
|
|
1738
|
+
const value = projection[key];
|
|
1739
|
+
if (value !== void 0 && isExcludeValue(value) && schemaKeys.has(key)) {
|
|
1740
|
+
mask[key] = true;
|
|
1741
|
+
}
|
|
1742
|
+
}
|
|
1743
|
+
return mask;
|
|
1744
|
+
}
|
|
1745
|
+
function deriveProjectedSchema(schema, projection) {
|
|
1746
|
+
const schemaKeys = new Set(Object.keys(schema.shape));
|
|
1747
|
+
if (isInclusionProjection(projection)) {
|
|
1748
|
+
return schema.pick(buildPickMask(projection, schemaKeys));
|
|
1749
|
+
}
|
|
1750
|
+
return schema.omit(buildOmitMask(projection, schemaKeys));
|
|
1751
|
+
}
|
|
1752
|
+
|
|
1243
1753
|
// src/query/cursor.ts
|
|
1244
1754
|
var TypedFindCursor = class {
|
|
1245
1755
|
/** @internal */
|
|
1246
1756
|
cursor;
|
|
1247
1757
|
/** @internal */
|
|
1758
|
+
definition;
|
|
1759
|
+
/** @internal */
|
|
1248
1760
|
schema;
|
|
1249
1761
|
/** @internal */
|
|
1250
1762
|
collectionName;
|
|
@@ -1256,16 +1768,23 @@ var TypedFindCursor = class {
|
|
|
1256
1768
|
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter is not assignable to MongoDB's Filter; stored opaquely for paginate
|
|
1257
1769
|
filter;
|
|
1258
1770
|
/** @internal */
|
|
1771
|
+
session;
|
|
1772
|
+
/** @internal */
|
|
1259
1773
|
sortSpec;
|
|
1260
1774
|
/** @internal */
|
|
1261
|
-
|
|
1775
|
+
projectedSchema;
|
|
1776
|
+
/** @internal */
|
|
1777
|
+
constructor(cursor, definition, mode, nativeCollection, filter, session) {
|
|
1262
1778
|
this.cursor = cursor;
|
|
1779
|
+
this.definition = definition;
|
|
1263
1780
|
this.schema = definition.schema;
|
|
1264
1781
|
this.collectionName = definition.name;
|
|
1265
1782
|
this.mode = mode;
|
|
1266
1783
|
this.nativeCollection = nativeCollection;
|
|
1267
1784
|
this.filter = filter;
|
|
1785
|
+
this.session = session;
|
|
1268
1786
|
this.sortSpec = null;
|
|
1787
|
+
this.projectedSchema = null;
|
|
1269
1788
|
}
|
|
1270
1789
|
/**
|
|
1271
1790
|
* Set the sort order for the query.
|
|
@@ -1339,6 +1858,48 @@ var TypedFindCursor = class {
|
|
|
1339
1858
|
this.cursor.hint(indexName);
|
|
1340
1859
|
return this;
|
|
1341
1860
|
}
|
|
1861
|
+
/**
|
|
1862
|
+
* Apply a projection to narrow the returned fields.
|
|
1863
|
+
*
|
|
1864
|
+
* Inclusion projections (`{ name: 1 }`) return only the specified fields
|
|
1865
|
+
* plus `_id` (unless `_id: 0`). Exclusion projections (`{ email: 0 }`)
|
|
1866
|
+
* return all fields except those excluded.
|
|
1867
|
+
*
|
|
1868
|
+
* The cursor's output type is narrowed at compile time. A derived Zod
|
|
1869
|
+
* schema is built for runtime validation of the projected fields.
|
|
1870
|
+
*
|
|
1871
|
+
* Projects from the original document type, not from a previous projection.
|
|
1872
|
+
* Calling `.project()` twice overrides the previous projection.
|
|
1873
|
+
*
|
|
1874
|
+
* @param spec - Type-safe projection document.
|
|
1875
|
+
* @returns A new cursor with the narrowed output type.
|
|
1876
|
+
*
|
|
1877
|
+
* @example
|
|
1878
|
+
* ```ts
|
|
1879
|
+
* const names = await find(users, {})
|
|
1880
|
+
* .project({ name: 1 })
|
|
1881
|
+
* .sort({ name: 1 })
|
|
1882
|
+
* .toArray()
|
|
1883
|
+
* // names[0].name ✓
|
|
1884
|
+
* // names[0].email TS error
|
|
1885
|
+
* ```
|
|
1886
|
+
*/
|
|
1887
|
+
project(spec) {
|
|
1888
|
+
this.cursor.project(spec);
|
|
1889
|
+
this.projectedSchema = deriveProjectedSchema(
|
|
1890
|
+
this.schema,
|
|
1891
|
+
spec
|
|
1892
|
+
);
|
|
1893
|
+
return this;
|
|
1894
|
+
}
|
|
1895
|
+
// Implementation — creates a PopulateCursor and delegates the first populate call.
|
|
1896
|
+
// No circular runtime dependency: populate/cursor.ts imports TypedFindCursor as a
|
|
1897
|
+
// *type* only (erased at runtime), so the runtime import flows one way:
|
|
1898
|
+
// query/cursor.ts → populate/cursor.ts.
|
|
1899
|
+
populate(field, asOrConfigure) {
|
|
1900
|
+
const popCursor = createPopulateCursor(this, this.definition, []);
|
|
1901
|
+
return popCursor.populate(field, asOrConfigure);
|
|
1902
|
+
}
|
|
1342
1903
|
async paginate(opts) {
|
|
1343
1904
|
const sortRecord = this.sortSpec ? this.sortSpec : null;
|
|
1344
1905
|
const sortKeys2 = resolveSortKeys(sortRecord);
|
|
@@ -1355,8 +1916,11 @@ var TypedFindCursor = class {
|
|
|
1355
1916
|
try {
|
|
1356
1917
|
;
|
|
1357
1918
|
[total, raw2] = await Promise.all([
|
|
1358
|
-
this.nativeCollection.countDocuments(
|
|
1359
|
-
|
|
1919
|
+
this.nativeCollection.countDocuments(
|
|
1920
|
+
this.filter,
|
|
1921
|
+
this.session ? { session: this.session } : {}
|
|
1922
|
+
),
|
|
1923
|
+
this.nativeCollection.find(this.filter, this.session ? { session: this.session } : void 0).sort(sort).skip((opts.page - 1) * opts.perPage).limit(opts.perPage).toArray()
|
|
1360
1924
|
]);
|
|
1361
1925
|
} catch (err) {
|
|
1362
1926
|
wrapMongoError(err, this.collectionName);
|
|
@@ -1386,7 +1950,7 @@ var TypedFindCursor = class {
|
|
|
1386
1950
|
const effectiveSort = isBackward ? Object.fromEntries(sortKeys2.map(([f, d]) => [f, d === 1 ? -1 : 1])) : sort;
|
|
1387
1951
|
let raw2;
|
|
1388
1952
|
try {
|
|
1389
|
-
raw2 = await this.nativeCollection.find(combinedFilter).sort(effectiveSort).limit(opts.limit + 1).toArray();
|
|
1953
|
+
raw2 = await this.nativeCollection.find(combinedFilter, this.session ? { session: this.session } : void 0).sort(effectiveSort).limit(opts.limit + 1).toArray();
|
|
1390
1954
|
} catch (err) {
|
|
1391
1955
|
wrapMongoError(err, this.collectionName);
|
|
1392
1956
|
}
|
|
@@ -1455,10 +2019,11 @@ var TypedFindCursor = class {
|
|
|
1455
2019
|
if (this.mode === false || this.mode === "passthrough") {
|
|
1456
2020
|
return raw2;
|
|
1457
2021
|
}
|
|
2022
|
+
const schema = this.projectedSchema ?? (this.mode === "strict" ? this.definition.strictSchema : this.schema);
|
|
1458
2023
|
try {
|
|
1459
|
-
return
|
|
2024
|
+
return schema.parse(raw2);
|
|
1460
2025
|
} catch (err) {
|
|
1461
|
-
if (err instanceof
|
|
2026
|
+
if (err instanceof z4.ZodError) {
|
|
1462
2027
|
throw new ZodmonValidationError(this.collectionName, err, raw2);
|
|
1463
2028
|
}
|
|
1464
2029
|
throw err;
|
|
@@ -1469,10 +2034,15 @@ var TypedFindCursor = class {
|
|
|
1469
2034
|
// src/crud/find.ts
|
|
1470
2035
|
async function findOne(handle, filter, options) {
|
|
1471
2036
|
checkUnindexedFields(handle.definition, filter);
|
|
1472
|
-
const
|
|
2037
|
+
const project = options && "project" in options ? options.project : void 0;
|
|
2038
|
+
const findOptions = project ? { projection: project } : void 0;
|
|
1473
2039
|
let raw2;
|
|
1474
2040
|
try {
|
|
1475
|
-
raw2 = await handle.native.findOne(
|
|
2041
|
+
raw2 = await handle.native.findOne(
|
|
2042
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
2043
|
+
filter,
|
|
2044
|
+
handle.session ? { ...findOptions, session: handle.session } : findOptions
|
|
2045
|
+
);
|
|
1476
2046
|
} catch (err) {
|
|
1477
2047
|
wrapMongoError(err, handle.definition.name);
|
|
1478
2048
|
}
|
|
@@ -1481,10 +2051,14 @@ async function findOne(handle, filter, options) {
|
|
|
1481
2051
|
if (mode === false || mode === "passthrough") {
|
|
1482
2052
|
return raw2;
|
|
1483
2053
|
}
|
|
2054
|
+
const schema = project ? deriveProjectedSchema(
|
|
2055
|
+
handle.definition.schema,
|
|
2056
|
+
project
|
|
2057
|
+
) : mode === "strict" ? handle.definition.strictSchema : handle.definition.schema;
|
|
1484
2058
|
try {
|
|
1485
|
-
return
|
|
2059
|
+
return schema.parse(raw2);
|
|
1486
2060
|
} catch (err) {
|
|
1487
|
-
if (err instanceof
|
|
2061
|
+
if (err instanceof z5.ZodError) {
|
|
1488
2062
|
throw new ZodmonValidationError(handle.definition.name, err, raw2);
|
|
1489
2063
|
}
|
|
1490
2064
|
throw err;
|
|
@@ -1499,26 +2073,42 @@ async function findOneOrThrow(handle, filter, options) {
|
|
|
1499
2073
|
}
|
|
1500
2074
|
function find(handle, filter, options) {
|
|
1501
2075
|
checkUnindexedFields(handle.definition, filter);
|
|
1502
|
-
const raw2 = handle.native.find(
|
|
2076
|
+
const raw2 = handle.native.find(
|
|
2077
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
2078
|
+
filter,
|
|
2079
|
+
handle.session ? { session: handle.session } : void 0
|
|
2080
|
+
);
|
|
1503
2081
|
const cursor = raw2;
|
|
1504
2082
|
const mode = options?.validate !== void 0 ? options.validate : handle.definition.options.validation;
|
|
1505
|
-
|
|
2083
|
+
const typedCursor = new TypedFindCursor(
|
|
2084
|
+
cursor,
|
|
2085
|
+
handle.definition,
|
|
2086
|
+
mode,
|
|
2087
|
+
handle.native,
|
|
2088
|
+
filter,
|
|
2089
|
+
handle.session
|
|
2090
|
+
);
|
|
2091
|
+
const project = options && "project" in options ? options.project : void 0;
|
|
2092
|
+
if (project) {
|
|
2093
|
+
return typedCursor.project(project);
|
|
2094
|
+
}
|
|
2095
|
+
return typedCursor;
|
|
1506
2096
|
}
|
|
1507
2097
|
|
|
1508
2098
|
// src/crud/insert.ts
|
|
1509
|
-
import { z as
|
|
2099
|
+
import { z as z6 } from "zod";
|
|
1510
2100
|
async function insertOne(handle, doc) {
|
|
1511
2101
|
let parsed;
|
|
1512
2102
|
try {
|
|
1513
2103
|
parsed = handle.definition.schema.parse(doc);
|
|
1514
2104
|
} catch (err) {
|
|
1515
|
-
if (err instanceof
|
|
2105
|
+
if (err instanceof z6.ZodError) {
|
|
1516
2106
|
throw new ZodmonValidationError(handle.definition.name, err, doc);
|
|
1517
2107
|
}
|
|
1518
2108
|
throw err;
|
|
1519
2109
|
}
|
|
1520
2110
|
try {
|
|
1521
|
-
await handle.native.insertOne(parsed);
|
|
2111
|
+
await handle.native.insertOne(parsed, handle.session ? { session: handle.session } : {});
|
|
1522
2112
|
} catch (err) {
|
|
1523
2113
|
wrapMongoError(err, handle.definition.name);
|
|
1524
2114
|
}
|
|
@@ -1531,14 +2121,14 @@ async function insertMany(handle, docs) {
|
|
|
1531
2121
|
try {
|
|
1532
2122
|
parsed.push(handle.definition.schema.parse(doc));
|
|
1533
2123
|
} catch (err) {
|
|
1534
|
-
if (err instanceof
|
|
2124
|
+
if (err instanceof z6.ZodError) {
|
|
1535
2125
|
throw new ZodmonValidationError(handle.definition.name, err, doc);
|
|
1536
2126
|
}
|
|
1537
2127
|
throw err;
|
|
1538
2128
|
}
|
|
1539
2129
|
}
|
|
1540
2130
|
try {
|
|
1541
|
-
await handle.native.insertMany(parsed);
|
|
2131
|
+
await handle.native.insertMany(parsed, handle.session ? { session: handle.session } : {});
|
|
1542
2132
|
} catch (err) {
|
|
1543
2133
|
wrapMongoError(err, handle.definition.name);
|
|
1544
2134
|
}
|
|
@@ -1546,17 +2136,29 @@ async function insertMany(handle, docs) {
|
|
|
1546
2136
|
}
|
|
1547
2137
|
|
|
1548
2138
|
// src/crud/update.ts
|
|
1549
|
-
import { z as
|
|
2139
|
+
import { z as z7 } from "zod";
|
|
1550
2140
|
async function updateOne(handle, filter, update, options) {
|
|
1551
2141
|
try {
|
|
1552
|
-
return await handle.native.updateOne(
|
|
2142
|
+
return await handle.native.updateOne(
|
|
2143
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
2144
|
+
filter,
|
|
2145
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedUpdateFilter intersection type is not directly assignable to MongoDB's UpdateFilter
|
|
2146
|
+
update,
|
|
2147
|
+
handle.session ? { ...options, session: handle.session } : options
|
|
2148
|
+
);
|
|
1553
2149
|
} catch (err) {
|
|
1554
2150
|
wrapMongoError(err, handle.definition.name);
|
|
1555
2151
|
}
|
|
1556
2152
|
}
|
|
1557
2153
|
async function updateMany(handle, filter, update, options) {
|
|
1558
2154
|
try {
|
|
1559
|
-
return await handle.native.updateMany(
|
|
2155
|
+
return await handle.native.updateMany(
|
|
2156
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedFilter intersection type is not directly assignable to MongoDB's Filter
|
|
2157
|
+
filter,
|
|
2158
|
+
// biome-ignore lint/suspicious/noExplicitAny: TypedUpdateFilter intersection type is not directly assignable to MongoDB's UpdateFilter
|
|
2159
|
+
update,
|
|
2160
|
+
handle.session ? { ...options, session: handle.session } : options
|
|
2161
|
+
);
|
|
1560
2162
|
} catch (err) {
|
|
1561
2163
|
wrapMongoError(err, handle.definition.name);
|
|
1562
2164
|
}
|
|
@@ -1569,6 +2171,9 @@ async function findOneAndUpdate(handle, filter, update, options) {
|
|
|
1569
2171
|
if (options?.upsert !== void 0) {
|
|
1570
2172
|
driverOptions["upsert"] = options.upsert;
|
|
1571
2173
|
}
|
|
2174
|
+
if (handle.session) {
|
|
2175
|
+
driverOptions["session"] = handle.session;
|
|
2176
|
+
}
|
|
1572
2177
|
let result;
|
|
1573
2178
|
try {
|
|
1574
2179
|
result = await handle.native.findOneAndUpdate(
|
|
@@ -1588,24 +2193,165 @@ async function findOneAndUpdate(handle, filter, update, options) {
|
|
|
1588
2193
|
return result;
|
|
1589
2194
|
}
|
|
1590
2195
|
try {
|
|
1591
|
-
|
|
2196
|
+
const schema = mode === "strict" ? handle.definition.strictSchema : handle.definition.schema;
|
|
2197
|
+
return schema.parse(result);
|
|
1592
2198
|
} catch (err) {
|
|
1593
|
-
if (err instanceof
|
|
2199
|
+
if (err instanceof z7.ZodError) {
|
|
1594
2200
|
throw new ZodmonValidationError(handle.definition.name, err, result);
|
|
1595
2201
|
}
|
|
1596
2202
|
throw err;
|
|
1597
2203
|
}
|
|
1598
2204
|
}
|
|
1599
2205
|
|
|
2206
|
+
// src/populate/query.ts
|
|
2207
|
+
var PopulateOneQuery = class _PopulateOneQuery {
|
|
2208
|
+
handle;
|
|
2209
|
+
filter;
|
|
2210
|
+
options;
|
|
2211
|
+
steps;
|
|
2212
|
+
constructor(handle, filter, options, steps = []) {
|
|
2213
|
+
this.handle = handle;
|
|
2214
|
+
this.filter = filter;
|
|
2215
|
+
this.options = options;
|
|
2216
|
+
this.steps = steps;
|
|
2217
|
+
}
|
|
2218
|
+
// Implementation — TypeScript cannot narrow overloaded generics in the
|
|
2219
|
+
// implementation body, so param types are widened and the return is cast.
|
|
2220
|
+
populate(field, asOrConfigure) {
|
|
2221
|
+
let alias;
|
|
2222
|
+
let projection;
|
|
2223
|
+
if (typeof asOrConfigure === "function") {
|
|
2224
|
+
alias = field.includes(".") ? field.split(".").pop() ?? field : field;
|
|
2225
|
+
const config = asOrConfigure(new PopulateRefBuilder());
|
|
2226
|
+
projection = config.projection;
|
|
2227
|
+
} else {
|
|
2228
|
+
alias = asOrConfigure ?? (field.includes(".") ? field.split(".").pop() ?? field : field);
|
|
2229
|
+
projection = void 0;
|
|
2230
|
+
}
|
|
2231
|
+
const step = resolvePopulateStep(this.handle.definition, this.steps, field, alias, projection);
|
|
2232
|
+
const newSteps = [...this.steps, step];
|
|
2233
|
+
return new _PopulateOneQuery(this.handle, this.filter, this.options, newSteps);
|
|
2234
|
+
}
|
|
2235
|
+
/**
|
|
2236
|
+
* Attach fulfillment and rejection handlers to the query promise.
|
|
2237
|
+
*
|
|
2238
|
+
* Executes the base findOne query and applies populate steps if any.
|
|
2239
|
+
* Returns `null` when no document matches the filter.
|
|
2240
|
+
*/
|
|
2241
|
+
// biome-ignore lint/suspicious/noThenProperty: PromiseLike requires a then method
|
|
2242
|
+
then(onfulfilled, onrejected) {
|
|
2243
|
+
const promise = this.execute();
|
|
2244
|
+
return promise.then(onfulfilled, onrejected);
|
|
2245
|
+
}
|
|
2246
|
+
async execute() {
|
|
2247
|
+
const doc = await findOne(this.handle, this.filter, this.options);
|
|
2248
|
+
if (!doc) return null;
|
|
2249
|
+
if (this.steps.length === 0) return doc;
|
|
2250
|
+
const populated = await executePopulate(
|
|
2251
|
+
[doc],
|
|
2252
|
+
this.steps,
|
|
2253
|
+
(name) => this.handle.native.db.collection(name)
|
|
2254
|
+
);
|
|
2255
|
+
return populated[0] ?? null;
|
|
2256
|
+
}
|
|
2257
|
+
};
|
|
2258
|
+
var PopulateOneOrThrowQuery = class _PopulateOneOrThrowQuery {
|
|
2259
|
+
handle;
|
|
2260
|
+
filter;
|
|
2261
|
+
options;
|
|
2262
|
+
steps;
|
|
2263
|
+
constructor(handle, filter, options, steps = []) {
|
|
2264
|
+
this.handle = handle;
|
|
2265
|
+
this.filter = filter;
|
|
2266
|
+
this.options = options;
|
|
2267
|
+
this.steps = steps;
|
|
2268
|
+
}
|
|
2269
|
+
// Implementation — see PopulateOneQuery for reasoning on casts.
|
|
2270
|
+
populate(field, asOrConfigure) {
|
|
2271
|
+
let alias;
|
|
2272
|
+
let projection;
|
|
2273
|
+
if (typeof asOrConfigure === "function") {
|
|
2274
|
+
alias = field.includes(".") ? field.split(".").pop() ?? field : field;
|
|
2275
|
+
const config = asOrConfigure(new PopulateRefBuilder());
|
|
2276
|
+
projection = config.projection;
|
|
2277
|
+
} else {
|
|
2278
|
+
alias = asOrConfigure ?? (field.includes(".") ? field.split(".").pop() ?? field : field);
|
|
2279
|
+
projection = void 0;
|
|
2280
|
+
}
|
|
2281
|
+
const step = resolvePopulateStep(this.handle.definition, this.steps, field, alias, projection);
|
|
2282
|
+
const newSteps = [...this.steps, step];
|
|
2283
|
+
return new _PopulateOneOrThrowQuery(this.handle, this.filter, this.options, newSteps);
|
|
2284
|
+
}
|
|
2285
|
+
/**
|
|
2286
|
+
* Attach fulfillment and rejection handlers to the query promise.
|
|
2287
|
+
*
|
|
2288
|
+
* Executes the base findOneOrThrow query and applies populate steps if any.
|
|
2289
|
+
* Throws {@link ZodmonNotFoundError} when no document matches.
|
|
2290
|
+
*/
|
|
2291
|
+
// biome-ignore lint/suspicious/noThenProperty: PromiseLike requires a then method
|
|
2292
|
+
then(onfulfilled, onrejected) {
|
|
2293
|
+
const promise = this.execute();
|
|
2294
|
+
return promise.then(onfulfilled, onrejected);
|
|
2295
|
+
}
|
|
2296
|
+
async execute() {
|
|
2297
|
+
const doc = await findOne(this.handle, this.filter, this.options);
|
|
2298
|
+
if (!doc) {
|
|
2299
|
+
throw new ZodmonNotFoundError(this.handle.definition.name, this.filter);
|
|
2300
|
+
}
|
|
2301
|
+
if (this.steps.length === 0) return doc;
|
|
2302
|
+
const populated = await executePopulate(
|
|
2303
|
+
[doc],
|
|
2304
|
+
this.steps,
|
|
2305
|
+
(name) => this.handle.native.db.collection(name)
|
|
2306
|
+
);
|
|
2307
|
+
const result = populated[0];
|
|
2308
|
+
if (!result) {
|
|
2309
|
+
throw new ZodmonNotFoundError(this.handle.definition.name, this.filter);
|
|
2310
|
+
}
|
|
2311
|
+
return result;
|
|
2312
|
+
}
|
|
2313
|
+
};
|
|
2314
|
+
|
|
1600
2315
|
// src/client/handle.ts
|
|
1601
|
-
var CollectionHandle = class {
|
|
2316
|
+
var CollectionHandle = class _CollectionHandle {
|
|
1602
2317
|
/** The collection definition containing schema, name, and index metadata. */
|
|
1603
2318
|
definition;
|
|
1604
2319
|
/** The underlying MongoDB driver collection, typed to the inferred document type. */
|
|
1605
2320
|
native;
|
|
1606
|
-
|
|
2321
|
+
/**
|
|
2322
|
+
* The MongoDB client session bound to this handle, if any.
|
|
2323
|
+
*
|
|
2324
|
+
* When set, all CRUD and aggregation operations performed through this
|
|
2325
|
+
* handle will include the session in their options, enabling transactional
|
|
2326
|
+
* reads and writes. Undefined when no session is bound.
|
|
2327
|
+
*/
|
|
2328
|
+
session;
|
|
2329
|
+
constructor(definition, native, session) {
|
|
1607
2330
|
this.definition = definition;
|
|
1608
2331
|
this.native = native;
|
|
2332
|
+
this.session = session;
|
|
2333
|
+
}
|
|
2334
|
+
/**
|
|
2335
|
+
* Create a new handle bound to the given MongoDB client session.
|
|
2336
|
+
*
|
|
2337
|
+
* Returns a new {@link CollectionHandle} that shares the same collection
|
|
2338
|
+
* definition and native driver collection, but passes `session` to every
|
|
2339
|
+
* CRUD and aggregation operation. The original handle is not modified.
|
|
2340
|
+
*
|
|
2341
|
+
* @param session - The MongoDB `ClientSession` to bind.
|
|
2342
|
+
* @returns A new handle with the session attached.
|
|
2343
|
+
*
|
|
2344
|
+
* @example
|
|
2345
|
+
* ```ts
|
|
2346
|
+
* const users = db.use(Users)
|
|
2347
|
+
* await db.client.withSession(async (session) => {
|
|
2348
|
+
* const bound = users.withSession(session)
|
|
2349
|
+
* await bound.insertOne({ name: 'Ada' }) // uses session
|
|
2350
|
+
* })
|
|
2351
|
+
* ```
|
|
2352
|
+
*/
|
|
2353
|
+
withSession(session) {
|
|
2354
|
+
return new _CollectionHandle(this.definition, this.native, session);
|
|
1609
2355
|
}
|
|
1610
2356
|
/**
|
|
1611
2357
|
* Insert a single document into the collection.
|
|
@@ -1651,70 +2397,18 @@ var CollectionHandle = class {
|
|
|
1651
2397
|
async insertMany(docs) {
|
|
1652
2398
|
return await insertMany(this, docs);
|
|
1653
2399
|
}
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
* back to the collection-level default (which defaults to `'strict'`).
|
|
1660
|
-
*
|
|
1661
|
-
* @param filter - Type-safe filter to match documents.
|
|
1662
|
-
* @param options - Optional projection and validation overrides.
|
|
1663
|
-
* @returns The matched document, or `null` if no document matches.
|
|
1664
|
-
* @throws {ZodmonValidationError} When the fetched document fails schema validation in strict mode.
|
|
1665
|
-
*
|
|
1666
|
-
* @example
|
|
1667
|
-
* ```ts
|
|
1668
|
-
* const users = db.use(Users)
|
|
1669
|
-
* const user = await users.findOne({ name: 'Ada' })
|
|
1670
|
-
* if (user) console.log(user.role)
|
|
1671
|
-
* ```
|
|
1672
|
-
*/
|
|
1673
|
-
async findOne(filter, options) {
|
|
1674
|
-
return await findOne(this, filter, options);
|
|
2400
|
+
findOne(filter, options) {
|
|
2401
|
+
if (options && "project" in options) {
|
|
2402
|
+
return findOne(this, filter, options);
|
|
2403
|
+
}
|
|
2404
|
+
return new PopulateOneQuery(this, filter, options);
|
|
1675
2405
|
}
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
*
|
|
1682
|
-
* @param filter - Type-safe filter to match documents.
|
|
1683
|
-
* @param options - Optional projection and validation overrides.
|
|
1684
|
-
* @returns The matched document (never null).
|
|
1685
|
-
* @throws {ZodmonNotFoundError} When no document matches the filter.
|
|
1686
|
-
* @throws {ZodmonValidationError} When the fetched document fails schema validation in strict mode.
|
|
1687
|
-
*
|
|
1688
|
-
* @example
|
|
1689
|
-
* ```ts
|
|
1690
|
-
* const users = db.use(Users)
|
|
1691
|
-
* const user = await users.findOneOrThrow({ name: 'Ada' })
|
|
1692
|
-
* console.log(user.role) // guaranteed non-null
|
|
1693
|
-
* ```
|
|
1694
|
-
*/
|
|
1695
|
-
async findOneOrThrow(filter, options) {
|
|
1696
|
-
return await findOneOrThrow(this, filter, options);
|
|
2406
|
+
findOneOrThrow(filter, options) {
|
|
2407
|
+
if (options && "project" in options) {
|
|
2408
|
+
return findOneOrThrow(this, filter, options);
|
|
2409
|
+
}
|
|
2410
|
+
return new PopulateOneOrThrowQuery(this, filter, options);
|
|
1697
2411
|
}
|
|
1698
|
-
/**
|
|
1699
|
-
* Find all documents matching the filter, returning a chainable typed cursor.
|
|
1700
|
-
*
|
|
1701
|
-
* The cursor is lazy — no query is executed until a terminal method
|
|
1702
|
-
* (`toArray`, `for await`) is called. Use `sort`, `skip`, and `limit`
|
|
1703
|
-
* to shape the query before executing.
|
|
1704
|
-
*
|
|
1705
|
-
* @param filter - Type-safe filter to match documents.
|
|
1706
|
-
* @param options - Optional validation overrides.
|
|
1707
|
-
* @returns A typed cursor for chaining query modifiers.
|
|
1708
|
-
*
|
|
1709
|
-
* @example
|
|
1710
|
-
* ```ts
|
|
1711
|
-
* const users = db.use(Users)
|
|
1712
|
-
* const admins = await users.find({ role: 'admin' })
|
|
1713
|
-
* .sort({ name: 1 })
|
|
1714
|
-
* .limit(10)
|
|
1715
|
-
* .toArray()
|
|
1716
|
-
* ```
|
|
1717
|
-
*/
|
|
1718
2412
|
find(filter, options) {
|
|
1719
2413
|
return find(this, filter, options);
|
|
1720
2414
|
}
|
|
@@ -1963,12 +2657,51 @@ var Database = class {
|
|
|
1963
2657
|
return results;
|
|
1964
2658
|
}
|
|
1965
2659
|
/**
|
|
1966
|
-
* Execute a function within a MongoDB transaction
|
|
2660
|
+
* Execute a function within a MongoDB transaction.
|
|
2661
|
+
*
|
|
2662
|
+
* Starts a client session and runs the callback inside
|
|
2663
|
+
* `session.withTransaction()`. The driver handles commit on success,
|
|
2664
|
+
* abort on error, and automatic retries for transient transaction errors.
|
|
2665
|
+
*
|
|
2666
|
+
* The return value of `fn` is forwarded as the return value of this method.
|
|
1967
2667
|
*
|
|
1968
|
-
*
|
|
2668
|
+
* @param fn - Async callback receiving a {@link TransactionContext}.
|
|
2669
|
+
* @returns The value returned by `fn`.
|
|
2670
|
+
*
|
|
2671
|
+
* @example
|
|
2672
|
+
* ```ts
|
|
2673
|
+
* const user = await db.transaction(async (tx) => {
|
|
2674
|
+
* const txUsers = tx.use(users)
|
|
2675
|
+
* return await txUsers.insertOne({ name: 'Ada' })
|
|
2676
|
+
* })
|
|
2677
|
+
* ```
|
|
2678
|
+
*
|
|
2679
|
+
* @example
|
|
2680
|
+
* ```ts
|
|
2681
|
+
* // Rollback on error
|
|
2682
|
+
* try {
|
|
2683
|
+
* await db.transaction(async (tx) => {
|
|
2684
|
+
* const txUsers = tx.use(users)
|
|
2685
|
+
* await txUsers.insertOne({ name: 'Ada' })
|
|
2686
|
+
* throw new Error('abort!')
|
|
2687
|
+
* })
|
|
2688
|
+
* } catch (err) {
|
|
2689
|
+
* // insert was rolled back, err is the original error
|
|
2690
|
+
* }
|
|
2691
|
+
* ```
|
|
1969
2692
|
*/
|
|
1970
|
-
transaction(
|
|
1971
|
-
|
|
2693
|
+
async transaction(fn) {
|
|
2694
|
+
const session = this._client.startSession();
|
|
2695
|
+
try {
|
|
2696
|
+
let result;
|
|
2697
|
+
await session.withTransaction(async () => {
|
|
2698
|
+
const tx = new TransactionContext(session);
|
|
2699
|
+
result = await fn(tx);
|
|
2700
|
+
});
|
|
2701
|
+
return result;
|
|
2702
|
+
} finally {
|
|
2703
|
+
await session.endSession();
|
|
2704
|
+
}
|
|
1972
2705
|
}
|
|
1973
2706
|
/**
|
|
1974
2707
|
* Close the underlying `MongoClient` connection. Safe to call even if
|
|
@@ -2001,10 +2734,10 @@ function createClient(uri, dbNameOrOptions, maybeOptions) {
|
|
|
2001
2734
|
|
|
2002
2735
|
// src/collection/collection.ts
|
|
2003
2736
|
import { ObjectId as ObjectId3 } from "mongodb";
|
|
2004
|
-
import { z as
|
|
2737
|
+
import { z as z10 } from "zod";
|
|
2005
2738
|
|
|
2006
2739
|
// src/schema/extensions.ts
|
|
2007
|
-
import { z as
|
|
2740
|
+
import { z as z8 } from "zod";
|
|
2008
2741
|
var indexMetadata = /* @__PURE__ */ new WeakMap();
|
|
2009
2742
|
function getIndexMetadata(schema) {
|
|
2010
2743
|
if (typeof schema !== "object" || schema === null) return void 0;
|
|
@@ -2012,7 +2745,7 @@ function getIndexMetadata(schema) {
|
|
|
2012
2745
|
}
|
|
2013
2746
|
var GUARD = /* @__PURE__ */ Symbol.for("zodmon_extensions");
|
|
2014
2747
|
function installExtensions() {
|
|
2015
|
-
const proto =
|
|
2748
|
+
const proto = z8.ZodType.prototype;
|
|
2016
2749
|
if (GUARD in proto) return;
|
|
2017
2750
|
Object.defineProperty(proto, "index", {
|
|
2018
2751
|
/**
|
|
@@ -2111,10 +2844,10 @@ installExtensions();
|
|
|
2111
2844
|
|
|
2112
2845
|
// src/schema/object-id.ts
|
|
2113
2846
|
import { ObjectId as ObjectId2 } from "mongodb";
|
|
2114
|
-
import { z as
|
|
2847
|
+
import { z as z9 } from "zod";
|
|
2115
2848
|
var OBJECT_ID_HEX = /^[a-f\d]{24}$/i;
|
|
2116
2849
|
function objectId() {
|
|
2117
|
-
return
|
|
2850
|
+
return z9.custom((val) => {
|
|
2118
2851
|
if (val instanceof ObjectId2) return true;
|
|
2119
2852
|
return typeof val === "string" && OBJECT_ID_HEX.test(val);
|
|
2120
2853
|
}, "Invalid ObjectId").transform((val) => val instanceof ObjectId2 ? val : ObjectId2.createFromHexString(val));
|
|
@@ -2133,7 +2866,8 @@ function extractFieldIndexes(shape) {
|
|
|
2133
2866
|
}
|
|
2134
2867
|
function collection(name, shape, options) {
|
|
2135
2868
|
const resolvedShape = "_id" in shape ? shape : { _id: objectId().default(() => new ObjectId3()), ...shape };
|
|
2136
|
-
const schema =
|
|
2869
|
+
const schema = z10.object(resolvedShape);
|
|
2870
|
+
const strictSchema = schema.strict();
|
|
2137
2871
|
const fieldIndexes = extractFieldIndexes(shape);
|
|
2138
2872
|
const { indexes: compoundIndexes, validation, ...rest } = options ?? {};
|
|
2139
2873
|
return {
|
|
@@ -2143,6 +2877,7 @@ function collection(name, shape, options) {
|
|
|
2143
2877
|
// not assignable to ZodObject<ResolvedShape<TShape>>. The cast is safe because
|
|
2144
2878
|
// the runtime shape is correct — only the readonly modifier differs.
|
|
2145
2879
|
schema,
|
|
2880
|
+
strictSchema,
|
|
2146
2881
|
shape,
|
|
2147
2882
|
fieldIndexes,
|
|
2148
2883
|
// Safe cast: compoundIndexes is TIndexes at runtime (or an empty array when
|
|
@@ -2281,6 +3016,11 @@ export {
|
|
|
2281
3016
|
CollectionHandle,
|
|
2282
3017
|
Database,
|
|
2283
3018
|
IndexBuilder,
|
|
3019
|
+
PopulateCursor,
|
|
3020
|
+
PopulateOneOrThrowQuery,
|
|
3021
|
+
PopulateOneQuery,
|
|
3022
|
+
PopulateRefBuilder,
|
|
3023
|
+
TransactionContext,
|
|
2284
3024
|
TypedFindCursor,
|
|
2285
3025
|
ZodmonAuthError,
|
|
2286
3026
|
ZodmonBulkWriteError,
|
|
@@ -2300,8 +3040,11 @@ export {
|
|
|
2300
3040
|
createAccumulatorBuilder,
|
|
2301
3041
|
createClient,
|
|
2302
3042
|
createExpressionBuilder,
|
|
3043
|
+
createPopulateCursor,
|
|
2303
3044
|
deleteMany,
|
|
2304
3045
|
deleteOne,
|
|
3046
|
+
deriveProjectedSchema,
|
|
3047
|
+
executePopulate,
|
|
2305
3048
|
extractComparableOptions,
|
|
2306
3049
|
extractDbName,
|
|
2307
3050
|
extractFieldIndexes,
|
|
@@ -2316,14 +3059,17 @@ export {
|
|
|
2316
3059
|
index,
|
|
2317
3060
|
insertMany,
|
|
2318
3061
|
insertOne,
|
|
3062
|
+
isInclusionProjection,
|
|
2319
3063
|
isOid,
|
|
2320
3064
|
objectId,
|
|
2321
3065
|
oid,
|
|
2322
3066
|
raw,
|
|
3067
|
+
resolvePopulateStep,
|
|
2323
3068
|
serializeIndexKey,
|
|
2324
3069
|
syncIndexes,
|
|
2325
3070
|
toCompoundIndexSpec,
|
|
2326
3071
|
toFieldIndexSpec,
|
|
3072
|
+
unwrapRefSchema,
|
|
2327
3073
|
updateMany,
|
|
2328
3074
|
updateOne,
|
|
2329
3075
|
wrapMongoError
|