@classytic/mongokit 3.3.2 → 3.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +137 -7
- package/dist/PaginationEngine-nY04eGUM.mjs +290 -0
- package/dist/actions/index.d.mts +2 -9
- package/dist/actions/index.mjs +3 -5
- package/dist/ai/index.d.mts +1 -1
- package/dist/ai/index.mjs +3 -3
- package/dist/chunk-CfYAbeIz.mjs +13 -0
- package/dist/{limits-s1-d8rWb.mjs → cursor-CHToazHy.mjs} +122 -171
- package/dist/{logger-D8ily-PP.mjs → error-Bpbi_NKo.mjs} +34 -22
- package/dist/{cache-keys-CzFwVnLy.mjs → field-selection-reyDRzXf.mjs} +110 -112
- package/dist/{aggregate-BkOG9qwr.d.mts → index-BuoZIZ15.d.mts} +132 -129
- package/dist/index.d.mts +549 -543
- package/dist/index.mjs +33 -101
- package/dist/{mongooseToJsonSchema-D_i2Am_O.mjs → mongooseToJsonSchema-B6Qyl8BK.mjs} +13 -12
- package/dist/{mongooseToJsonSchema-B6O2ED3n.d.mts → mongooseToJsonSchema-RX9YfJLu.d.mts} +24 -17
- package/dist/pagination/PaginationEngine.d.mts +1 -1
- package/dist/pagination/PaginationEngine.mjs +2 -209
- package/dist/plugins/index.d.mts +1 -2
- package/dist/plugins/index.mjs +2 -3
- package/dist/sort-C-BJEWUZ.mjs +57 -0
- package/dist/{types-pVY0w1Pp.d.mts → types-COINbsdL.d.mts} +57 -27
- package/dist/{aggregate-BClp040M.mjs → update-DGKMmBgG.mjs} +575 -565
- package/dist/utils/index.d.mts +2 -2
- package/dist/utils/index.mjs +4 -5
- package/dist/{custom-id.plugin-BJ3FSnzt.d.mts → validation-chain.plugin-BNoaKDOm.d.mts} +832 -832
- package/dist/{custom-id.plugin-FInXDsUX.mjs → validation-chain.plugin-da3fOo8A.mjs} +2410 -2246
- package/package.json +11 -6
- package/dist/chunk-DQk6qfdC.mjs +0 -18
package/README.md
CHANGED
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
- **Search governance** - Text index guard (throws `400` if no index), allowlisted sort/filter fields, ReDoS protection
|
|
18
18
|
- **Vector search** - MongoDB Atlas `$vectorSearch` with auto-embedding and multimodal support
|
|
19
19
|
- **TypeScript first** - Full type safety with discriminated unions
|
|
20
|
-
- **
|
|
20
|
+
- **1090+ passing tests** - Battle-tested and production-ready
|
|
21
21
|
|
|
22
22
|
## Installation
|
|
23
23
|
|
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
npm install @classytic/mongokit mongoose
|
|
26
26
|
```
|
|
27
27
|
|
|
28
|
-
>
|
|
28
|
+
> Requires Mongoose `^9.0.0` | Node.js `>=22`
|
|
29
29
|
|
|
30
30
|
## Quick Start
|
|
31
31
|
|
|
@@ -210,7 +210,7 @@ const repo = new Repository(UserModel, [
|
|
|
210
210
|
| `cascadePlugin(opts)` | Auto-delete related documents |
|
|
211
211
|
| `methodRegistryPlugin()` | Dynamic method registration (required by plugins below) |
|
|
212
212
|
| `mongoOperationsPlugin()` | Adds `increment`, `pushToArray`, `upsert`, etc. |
|
|
213
|
-
| `batchOperationsPlugin()` | Adds `updateMany`, `deleteMany`
|
|
213
|
+
| `batchOperationsPlugin()` | Adds `updateMany`, `deleteMany`, `bulkWrite` |
|
|
214
214
|
| `aggregateHelpersPlugin()` | Adds `groupBy`, `sum`, `average`, etc. |
|
|
215
215
|
| `subdocumentPlugin()` | Manage subdocument arrays |
|
|
216
216
|
| `multiTenantPlugin(opts)` | Auto-inject tenant isolation on all operations |
|
|
@@ -223,14 +223,82 @@ const repo = new Repository(UserModel, [
|
|
|
223
223
|
|
|
224
224
|
```javascript
|
|
225
225
|
const repo = new Repository(UserModel, [
|
|
226
|
+
methodRegistryPlugin(),
|
|
227
|
+
batchOperationsPlugin(),
|
|
226
228
|
softDeletePlugin({ deletedField: "deletedAt" }),
|
|
227
229
|
]);
|
|
228
230
|
|
|
229
|
-
await repo.delete(id); // Marks as deleted
|
|
231
|
+
await repo.delete(id); // Marks as deleted (sets deletedAt)
|
|
230
232
|
await repo.getAll(); // Excludes deleted
|
|
231
233
|
await repo.getAll({ includeDeleted: true }); // Includes deleted
|
|
234
|
+
|
|
235
|
+
// Batch operations respect soft-delete automatically
|
|
236
|
+
await repo.deleteMany({ status: "draft" }); // Soft-deletes matching docs
|
|
237
|
+
await repo.updateMany({ status: "active" }, { $set: { featured: true } }); // Skips soft-deleted
|
|
238
|
+
```
|
|
239
|
+
|
|
240
|
+
### Populate via URL (Array Refs + Field Selection)
|
|
241
|
+
|
|
242
|
+
Populate arrays of ObjectIds with field selection, filtering, and sorting — all from URL query params:
|
|
243
|
+
|
|
244
|
+
```bash
|
|
245
|
+
# Populate all products in an order
|
|
246
|
+
GET /orders?populate=products
|
|
247
|
+
|
|
248
|
+
# Only name and price from each product
|
|
249
|
+
GET /orders?populate[products][select]=name,price
|
|
250
|
+
|
|
251
|
+
# Exclude fields
|
|
252
|
+
GET /orders?populate[products][select]=-internalNotes,-cost
|
|
253
|
+
|
|
254
|
+
# Filter: only active products
|
|
255
|
+
GET /orders?populate[products][match][status]=active
|
|
256
|
+
|
|
257
|
+
# Limit + sort populated items
|
|
258
|
+
GET /orders?populate[products][limit]=5&populate[products][sort]=-price
|
|
259
|
+
|
|
260
|
+
# Combined
|
|
261
|
+
GET /orders?populate[products][select]=name,price&populate[products][match][status]=active&populate[products][limit]=10
|
|
232
262
|
```
|
|
233
263
|
|
|
264
|
+
```typescript
|
|
265
|
+
// Express route — 3 lines
|
|
266
|
+
const parsed = parser.parse(req.query);
|
|
267
|
+
const result = await orderRepo.getAll(
|
|
268
|
+
{ filters: parsed.filters, sort: parsed.sort, limit: parsed.limit },
|
|
269
|
+
{ populateOptions: parsed.populateOptions, populate: parsed.populate },
|
|
270
|
+
);
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
### Lookup Joins via URL (No Refs Needed)
|
|
274
|
+
|
|
275
|
+
Join collections by any field (slug, code, SKU) using `$lookup` — no `ref` in schema required. Faster than `populate` for non-ref joins.
|
|
276
|
+
|
|
277
|
+
```bash
|
|
278
|
+
# Join products with categories by slug
|
|
279
|
+
GET /products?lookup[category][from]=categories&lookup[category][localField]=categorySlug&lookup[category][foreignField]=slug&lookup[category][single]=true
|
|
280
|
+
|
|
281
|
+
# With field selection on joined collection (only bring name + slug)
|
|
282
|
+
GET /products?lookup[category][...same]&lookup[category][select]=name,slug
|
|
283
|
+
|
|
284
|
+
# Combined with filter + sort + root select
|
|
285
|
+
GET /products?status=active&sort=-price&select=name,price,category&lookup[category][...same]&lookup[category][select]=name
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
```typescript
|
|
289
|
+
// Express route — getAll auto-routes to $lookup when lookups are present
|
|
290
|
+
const parsed = parser.parse(req.query);
|
|
291
|
+
const result = await repo.getAll({
|
|
292
|
+
filters: parsed.filters,
|
|
293
|
+
sort: parsed.sort,
|
|
294
|
+
lookups: parsed.lookups, // auto-routes to lookupPopulate
|
|
295
|
+
select: parsed.select,
|
|
296
|
+
limit: parsed.limit,
|
|
297
|
+
});
|
|
298
|
+
```
|
|
299
|
+
|
|
300
|
+
> **Populate vs Lookup:** Use `populate` for `ref` fields (ObjectId arrays). Use `lookup` for joining by any field (slugs, codes, SKUs) — it runs a server-side `$lookup` aggregation, which is faster than client-side population for non-ref joins.
|
|
301
|
+
|
|
234
302
|
### Caching
|
|
235
303
|
|
|
236
304
|
```javascript
|
|
@@ -854,7 +922,46 @@ repo.on("error:create", ({ context, error }) => {
|
|
|
854
922
|
});
|
|
855
923
|
```
|
|
856
924
|
|
|
857
|
-
**Events:** `before:*`, `after:*`, `error:*` for `create`, `createMany`, `update`, `delete`, `getById`, `getByQuery`, `getAll`, `aggregatePaginate`
|
|
925
|
+
**Events:** `before:*`, `after:*`, `error:*` for `create`, `createMany`, `update`, `delete`, `deleteMany`, `updateMany`, `getById`, `getByQuery`, `getAll`, `aggregatePaginate`
|
|
926
|
+
|
|
927
|
+
### Microservice Integration (Kafka / RabbitMQ / Redis Pub-Sub)
|
|
928
|
+
|
|
929
|
+
Use `after:*` hooks to publish events to message brokers — zero additional libraries needed:
|
|
930
|
+
|
|
931
|
+
```typescript
|
|
932
|
+
import { HOOK_PRIORITY } from "@classytic/mongokit";
|
|
933
|
+
|
|
934
|
+
// Publish to Kafka after every create
|
|
935
|
+
repo.on("after:create", async ({ context, result }) => {
|
|
936
|
+
await kafka.publish("orders.created", {
|
|
937
|
+
operation: context.operation,
|
|
938
|
+
model: context.model,
|
|
939
|
+
document: result,
|
|
940
|
+
userId: context.user?._id,
|
|
941
|
+
tenantId: context.organizationId,
|
|
942
|
+
timestamp: Date.now(),
|
|
943
|
+
});
|
|
944
|
+
}, { priority: HOOK_PRIORITY.OBSERVABILITY });
|
|
945
|
+
|
|
946
|
+
// Redis Pub-Sub on updates
|
|
947
|
+
repo.on("after:update", async ({ context, result }) => {
|
|
948
|
+
await redis.publish("order:updated", JSON.stringify({
|
|
949
|
+
id: result._id,
|
|
950
|
+
changes: context.data,
|
|
951
|
+
}));
|
|
952
|
+
}, { priority: HOOK_PRIORITY.OBSERVABILITY });
|
|
953
|
+
|
|
954
|
+
// RabbitMQ on deletes (including soft-deletes)
|
|
955
|
+
repo.on("after:delete", async ({ context, result }) => {
|
|
956
|
+
await rabbitMQ.sendToQueue("order.deleted", {
|
|
957
|
+
id: result.id,
|
|
958
|
+
soft: result.soft,
|
|
959
|
+
tenantId: context.organizationId,
|
|
960
|
+
});
|
|
961
|
+
}, { priority: HOOK_PRIORITY.OBSERVABILITY });
|
|
962
|
+
```
|
|
963
|
+
|
|
964
|
+
**Hook priority order:** `POLICY (100)` → `CACHE (200)` → `OBSERVABILITY (300)` → `DEFAULT (500)`. Event publishing at `OBSERVABILITY` ensures it runs after policy enforcement and cache invalidation.
|
|
858
965
|
|
|
859
966
|
## Building REST APIs
|
|
860
967
|
|
|
@@ -952,7 +1059,7 @@ GET /posts?populate[author][populate][department][select]=name # Nested
|
|
|
952
1059
|
|
|
953
1060
|
**Security features:**
|
|
954
1061
|
|
|
955
|
-
- Blocks `$where`, `$function`, `$accumulator
|
|
1062
|
+
- Blocks `$where`, `$function`, `$accumulator` operators (`$expr` allowed for `$lookup` correlation)
|
|
956
1063
|
- ReDoS protection for regex patterns
|
|
957
1064
|
- Max filter depth enforcement
|
|
958
1065
|
- Collection allowlists for lookups
|
|
@@ -1240,6 +1347,29 @@ const userRepo = createRepository(UserModel, [timestampPlugin()], {
|
|
|
1240
1347
|
});
|
|
1241
1348
|
```
|
|
1242
1349
|
|
|
1350
|
+
## Error Handling
|
|
1351
|
+
|
|
1352
|
+
MongoKit translates MongoDB and Mongoose errors into HTTP-compatible errors with proper status codes:
|
|
1353
|
+
|
|
1354
|
+
| Error Type | Status | Example |
|
|
1355
|
+
|---|---|---|
|
|
1356
|
+
| Duplicate key (E11000) | **409** | `Duplicate value for email (email: "dup@test.com")` |
|
|
1357
|
+
| Validation error | **400** | `Validation Error: name is required` |
|
|
1358
|
+
| Cast error | **400** | `Invalid _id: not-a-valid-id` |
|
|
1359
|
+
| Document not found | **404** | `Document not found` |
|
|
1360
|
+
| Other errors | **500** | `Internal Server Error` |
|
|
1361
|
+
|
|
1362
|
+
```typescript
|
|
1363
|
+
import { parseDuplicateKeyError } from "@classytic/mongokit";
|
|
1364
|
+
|
|
1365
|
+
// Use in custom error handlers
|
|
1366
|
+
const dupErr = parseDuplicateKeyError(error);
|
|
1367
|
+
if (dupErr) {
|
|
1368
|
+
// dupErr.status === 409
|
|
1369
|
+
// dupErr.message includes field name and value
|
|
1370
|
+
}
|
|
1371
|
+
```
|
|
1372
|
+
|
|
1243
1373
|
## No Breaking Changes
|
|
1244
1374
|
|
|
1245
1375
|
Extending Repository works exactly the same with Mongoose 8 and 9. The package:
|
|
@@ -1247,7 +1377,7 @@ Extending Repository works exactly the same with Mongoose 8 and 9. The package:
|
|
|
1247
1377
|
- Uses its own event system (not Mongoose middleware)
|
|
1248
1378
|
- Defines its own `FilterQuery` type (unaffected by Mongoose 9 rename)
|
|
1249
1379
|
- Properly gates update pipelines (safe for Mongoose 9's stricter defaults)
|
|
1250
|
-
- All
|
|
1380
|
+
- All 1090+ tests pass on Mongoose 9
|
|
1251
1381
|
|
|
1252
1382
|
## License
|
|
1253
1383
|
|
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
import { a as warn, t as createError } from "./error-Bpbi_NKo.mjs";
|
|
2
|
+
import { n as encodeCursor, r as resolveCursorFilter } from "./cursor-CHToazHy.mjs";
|
|
3
|
+
import { r as validateKeysetSort, t as getPrimaryField } from "./sort-C-BJEWUZ.mjs";
|
|
4
|
+
//#region src/pagination/utils/limits.ts
|
|
5
|
+
/**
|
|
6
|
+
* Validates and sanitizes limit value
|
|
7
|
+
* Parses strings to numbers and prevents NaN bugs
|
|
8
|
+
*
|
|
9
|
+
* @param limit - Requested limit
|
|
10
|
+
* @param config - Pagination configuration
|
|
11
|
+
* @returns Sanitized limit between 1 and maxLimit
|
|
12
|
+
*/
|
|
13
|
+
function validateLimit(limit, config) {
|
|
14
|
+
const parsed = Number(limit);
|
|
15
|
+
if (!Number.isFinite(parsed) || parsed < 1) return config.defaultLimit || 10;
|
|
16
|
+
return Math.min(Math.floor(parsed), config.maxLimit || 100);
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Validates and sanitizes page number
|
|
20
|
+
* Parses strings to numbers and prevents NaN bugs
|
|
21
|
+
*
|
|
22
|
+
* @param page - Requested page (1-indexed)
|
|
23
|
+
* @param config - Pagination configuration
|
|
24
|
+
* @returns Sanitized page number >= 1
|
|
25
|
+
* @throws Error if page exceeds maxPage
|
|
26
|
+
*/
|
|
27
|
+
function validatePage(page, config) {
|
|
28
|
+
const parsed = Number(page);
|
|
29
|
+
if (!Number.isFinite(parsed) || parsed < 1) return 1;
|
|
30
|
+
const sanitized = Math.floor(parsed);
|
|
31
|
+
if (sanitized > (config.maxPage || 1e4)) throw new Error(`Page ${sanitized} exceeds maximum ${config.maxPage || 1e4}`);
|
|
32
|
+
return sanitized;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Checks if page number should trigger deep pagination warning
|
|
36
|
+
*
|
|
37
|
+
* @param page - Current page number
|
|
38
|
+
* @param threshold - Warning threshold
|
|
39
|
+
* @returns True if warning should be shown
|
|
40
|
+
*/
|
|
41
|
+
function shouldWarnDeepPagination(page, threshold) {
|
|
42
|
+
return page > threshold;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Calculates number of documents to skip for offset pagination
|
|
46
|
+
*
|
|
47
|
+
* @param page - Page number (1-indexed)
|
|
48
|
+
* @param limit - Documents per page
|
|
49
|
+
* @returns Number of documents to skip
|
|
50
|
+
*/
|
|
51
|
+
function calculateSkip(page, limit) {
|
|
52
|
+
return (page - 1) * limit;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Calculates total number of pages
|
|
56
|
+
*
|
|
57
|
+
* @param total - Total document count
|
|
58
|
+
* @param limit - Documents per page
|
|
59
|
+
* @returns Total number of pages
|
|
60
|
+
*/
|
|
61
|
+
function calculateTotalPages(total, limit) {
|
|
62
|
+
return Math.ceil(total / limit);
|
|
63
|
+
}
|
|
64
|
+
//#endregion
|
|
65
|
+
//#region src/pagination/PaginationEngine.ts
|
|
66
|
+
function ensureKeysetSelectIncludesCursorFields(select, sort) {
|
|
67
|
+
if (!select) return select;
|
|
68
|
+
const requiredFields = new Set([...Object.keys(sort), "_id"]);
|
|
69
|
+
if (typeof select === "string") {
|
|
70
|
+
const fields = select.split(/[,\s]+/).map((field) => field.trim()).filter(Boolean);
|
|
71
|
+
if (fields.length > 0 && fields.every((field) => field.startsWith("-"))) return select;
|
|
72
|
+
const merged = new Set(fields);
|
|
73
|
+
for (const field of requiredFields) merged.add(field);
|
|
74
|
+
return Array.from(merged).join(" ");
|
|
75
|
+
}
|
|
76
|
+
if (Array.isArray(select)) {
|
|
77
|
+
const fields = select.map((field) => field.trim()).filter(Boolean);
|
|
78
|
+
if (fields.length > 0 && fields.every((field) => field.startsWith("-"))) return select;
|
|
79
|
+
const merged = new Set(fields);
|
|
80
|
+
for (const field of requiredFields) merged.add(field);
|
|
81
|
+
return Array.from(merged);
|
|
82
|
+
}
|
|
83
|
+
const projection = { ...select };
|
|
84
|
+
if (!Object.values(projection).some((value) => value === 1)) return select;
|
|
85
|
+
for (const field of requiredFields) projection[field] = 1;
|
|
86
|
+
return projection;
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Production-grade pagination engine for MongoDB
|
|
90
|
+
* Supports offset, keyset (cursor), and aggregate pagination
|
|
91
|
+
*/
|
|
92
|
+
var PaginationEngine = class {
|
|
93
|
+
Model;
|
|
94
|
+
config;
|
|
95
|
+
/**
|
|
96
|
+
* Create a new pagination engine
|
|
97
|
+
*
|
|
98
|
+
* @param Model - Mongoose model to paginate
|
|
99
|
+
* @param config - Pagination configuration
|
|
100
|
+
*/
|
|
101
|
+
constructor(Model, config = {}) {
|
|
102
|
+
this.Model = Model;
|
|
103
|
+
this.config = {
|
|
104
|
+
defaultLimit: config.defaultLimit || 10,
|
|
105
|
+
maxLimit: config.maxLimit || 100,
|
|
106
|
+
maxPage: config.maxPage || 1e4,
|
|
107
|
+
deepPageThreshold: config.deepPageThreshold || 100,
|
|
108
|
+
cursorVersion: config.cursorVersion || 1,
|
|
109
|
+
useEstimatedCount: config.useEstimatedCount || false
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Offset-based pagination using skip/limit
|
|
114
|
+
* Best for small datasets and when users need random page access
|
|
115
|
+
* O(n) performance - slower for deep pages
|
|
116
|
+
*
|
|
117
|
+
* @param options - Pagination options
|
|
118
|
+
* @returns Pagination result with total count
|
|
119
|
+
*
|
|
120
|
+
* @example
|
|
121
|
+
* const result = await engine.paginate({
|
|
122
|
+
* filters: { status: 'active' },
|
|
123
|
+
* sort: { createdAt: -1 },
|
|
124
|
+
* page: 1,
|
|
125
|
+
* limit: 20
|
|
126
|
+
* });
|
|
127
|
+
* console.log(result.docs, result.total, result.hasNext);
|
|
128
|
+
*/
|
|
129
|
+
async paginate(options = {}) {
|
|
130
|
+
const { filters = {}, sort = { _id: -1 }, page = 1, limit = this.config.defaultLimit, select, populate = [], lean = true, session, hint, maxTimeMS, countStrategy = "exact", readPreference, collation } = options;
|
|
131
|
+
const sanitizedPage = validatePage(page, this.config);
|
|
132
|
+
const sanitizedLimit = validateLimit(limit, this.config);
|
|
133
|
+
const skip = calculateSkip(sanitizedPage, sanitizedLimit);
|
|
134
|
+
const fetchLimit = countStrategy === "none" ? sanitizedLimit + 1 : sanitizedLimit;
|
|
135
|
+
let query = this.Model.find(filters);
|
|
136
|
+
if (select) query = query.select(select);
|
|
137
|
+
if (populate && (Array.isArray(populate) ? populate.length : populate)) query = query.populate(populate);
|
|
138
|
+
query = query.sort(sort).skip(skip).limit(fetchLimit).lean(lean);
|
|
139
|
+
if (collation) query = query.collation(collation);
|
|
140
|
+
if (session) query = query.session(session);
|
|
141
|
+
if (hint) query = query.hint(hint);
|
|
142
|
+
if (maxTimeMS) query = query.maxTimeMS(maxTimeMS);
|
|
143
|
+
if (readPreference) query = query.read(readPreference);
|
|
144
|
+
const hasFilters = Object.keys(filters).length > 0;
|
|
145
|
+
const useEstimated = this.config.useEstimatedCount && !hasFilters;
|
|
146
|
+
let countPromise;
|
|
147
|
+
if ((countStrategy === "estimated" || useEstimated) && !hasFilters) countPromise = this.Model.estimatedDocumentCount();
|
|
148
|
+
else if (countStrategy === "none") countPromise = Promise.resolve(0);
|
|
149
|
+
else {
|
|
150
|
+
const countQuery = this.Model.countDocuments(filters).session(session ?? null);
|
|
151
|
+
if (hint) countQuery.hint(hint);
|
|
152
|
+
if (maxTimeMS) countQuery.maxTimeMS(maxTimeMS);
|
|
153
|
+
if (readPreference) countQuery.read(readPreference);
|
|
154
|
+
countPromise = countQuery.exec();
|
|
155
|
+
}
|
|
156
|
+
const [docs, total] = await Promise.all([query.exec(), countPromise]);
|
|
157
|
+
const totalPages = countStrategy === "none" ? 0 : calculateTotalPages(total, sanitizedLimit);
|
|
158
|
+
let hasNext;
|
|
159
|
+
if (countStrategy === "none") {
|
|
160
|
+
hasNext = docs.length > sanitizedLimit;
|
|
161
|
+
if (hasNext) docs.pop();
|
|
162
|
+
} else hasNext = sanitizedPage < totalPages;
|
|
163
|
+
const warning = shouldWarnDeepPagination(sanitizedPage, this.config.deepPageThreshold) ? `Deep pagination (page ${sanitizedPage}). Consider getAll({ after, sort, limit }) for better performance.` : void 0;
|
|
164
|
+
return {
|
|
165
|
+
method: "offset",
|
|
166
|
+
docs,
|
|
167
|
+
page: sanitizedPage,
|
|
168
|
+
limit: sanitizedLimit,
|
|
169
|
+
total,
|
|
170
|
+
pages: totalPages,
|
|
171
|
+
hasNext,
|
|
172
|
+
hasPrev: sanitizedPage > 1,
|
|
173
|
+
...warning && { warning }
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Keyset (cursor-based) pagination for high-performance streaming
|
|
178
|
+
* Best for large datasets, infinite scroll, real-time feeds
|
|
179
|
+
* O(1) performance - consistent speed regardless of position
|
|
180
|
+
*
|
|
181
|
+
* @param options - Pagination options (sort is required)
|
|
182
|
+
* @returns Pagination result with next cursor
|
|
183
|
+
*
|
|
184
|
+
* @example
|
|
185
|
+
* // First page
|
|
186
|
+
* const page1 = await engine.stream({
|
|
187
|
+
* sort: { createdAt: -1 },
|
|
188
|
+
* limit: 20
|
|
189
|
+
* });
|
|
190
|
+
*
|
|
191
|
+
* // Next page using cursor
|
|
192
|
+
* const page2 = await engine.stream({
|
|
193
|
+
* sort: { createdAt: -1 },
|
|
194
|
+
* after: page1.next,
|
|
195
|
+
* limit: 20
|
|
196
|
+
* });
|
|
197
|
+
*/
|
|
198
|
+
async stream(options) {
|
|
199
|
+
const { filters = {}, sort, after, limit = this.config.defaultLimit, select, populate = [], lean = true, session, hint, maxTimeMS, readPreference, collation } = options;
|
|
200
|
+
if (!sort) throw createError(400, "sort is required for keyset pagination");
|
|
201
|
+
const sanitizedLimit = validateLimit(limit, this.config);
|
|
202
|
+
const normalizedSort = validateKeysetSort(sort);
|
|
203
|
+
const filterKeys = Object.keys(filters).filter((k) => !k.startsWith("$"));
|
|
204
|
+
const sortFields = Object.keys(normalizedSort);
|
|
205
|
+
if (filterKeys.length > 0 && sortFields.length > 0) {
|
|
206
|
+
const indexFields = [...filterKeys.map((f) => `${f}: 1`), ...sortFields.map((f) => `${f}: ${normalizedSort[f]}`)];
|
|
207
|
+
warn(`[mongokit] Keyset pagination with filters [${filterKeys.join(", ")}] and sort [${sortFields.join(", ")}] requires a compound index for O(1) performance. Ensure index exists: { ${indexFields.join(", ")} }`);
|
|
208
|
+
}
|
|
209
|
+
let query = { ...filters };
|
|
210
|
+
if (after) query = resolveCursorFilter(after, normalizedSort, this.config.cursorVersion, query);
|
|
211
|
+
const effectiveSelect = ensureKeysetSelectIncludesCursorFields(select, normalizedSort);
|
|
212
|
+
let mongoQuery = this.Model.find(query);
|
|
213
|
+
if (effectiveSelect) mongoQuery = mongoQuery.select(effectiveSelect);
|
|
214
|
+
if (populate && (Array.isArray(populate) ? populate.length : populate)) mongoQuery = mongoQuery.populate(populate);
|
|
215
|
+
mongoQuery = mongoQuery.sort(normalizedSort).limit(sanitizedLimit + 1).lean(lean);
|
|
216
|
+
if (collation) mongoQuery = mongoQuery.collation(collation);
|
|
217
|
+
if (session) mongoQuery = mongoQuery.session(session);
|
|
218
|
+
if (hint) mongoQuery = mongoQuery.hint(hint);
|
|
219
|
+
if (maxTimeMS) mongoQuery = mongoQuery.maxTimeMS(maxTimeMS);
|
|
220
|
+
if (readPreference) mongoQuery = mongoQuery.read(readPreference);
|
|
221
|
+
const docs = await mongoQuery.exec();
|
|
222
|
+
const hasMore = docs.length > sanitizedLimit;
|
|
223
|
+
if (hasMore) docs.pop();
|
|
224
|
+
const primaryField = getPrimaryField(normalizedSort);
|
|
225
|
+
return {
|
|
226
|
+
method: "keyset",
|
|
227
|
+
docs,
|
|
228
|
+
limit: sanitizedLimit,
|
|
229
|
+
hasMore,
|
|
230
|
+
next: hasMore && docs.length > 0 ? encodeCursor(docs[docs.length - 1], primaryField, normalizedSort, this.config.cursorVersion) : null
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Aggregate pipeline with pagination
|
|
235
|
+
* Best for complex queries requiring aggregation stages
|
|
236
|
+
* Uses $facet to combine results and count in single query
|
|
237
|
+
*
|
|
238
|
+
* @param options - Aggregation options
|
|
239
|
+
* @returns Pagination result with total count
|
|
240
|
+
*
|
|
241
|
+
* @example
|
|
242
|
+
* const result = await engine.aggregatePaginate({
|
|
243
|
+
* pipeline: [
|
|
244
|
+
* { $match: { status: 'active' } },
|
|
245
|
+
* { $group: { _id: '$category', count: { $sum: 1 } } },
|
|
246
|
+
* { $sort: { count: -1 } }
|
|
247
|
+
* ],
|
|
248
|
+
* page: 1,
|
|
249
|
+
* limit: 20
|
|
250
|
+
* });
|
|
251
|
+
*/
|
|
252
|
+
async aggregatePaginate(options = {}) {
|
|
253
|
+
const { pipeline = [], page = 1, limit = this.config.defaultLimit, session, hint, maxTimeMS, countStrategy = "exact", readPreference } = options;
|
|
254
|
+
const sanitizedPage = validatePage(page, this.config);
|
|
255
|
+
const sanitizedLimit = validateLimit(limit, this.config);
|
|
256
|
+
const skip = calculateSkip(sanitizedPage, sanitizedLimit);
|
|
257
|
+
const fetchLimit = countStrategy === "none" ? sanitizedLimit + 1 : sanitizedLimit;
|
|
258
|
+
const facetStages = { docs: [{ $skip: skip }, { $limit: fetchLimit }] };
|
|
259
|
+
if (countStrategy !== "none") facetStages.total = [{ $count: "count" }];
|
|
260
|
+
const facetPipeline = [...pipeline, { $facet: facetStages }];
|
|
261
|
+
const aggregation = this.Model.aggregate(facetPipeline);
|
|
262
|
+
if (session) aggregation.session(session);
|
|
263
|
+
if (hint) aggregation.hint(hint);
|
|
264
|
+
if (maxTimeMS) aggregation.option({ maxTimeMS });
|
|
265
|
+
if (readPreference) aggregation.read(readPreference);
|
|
266
|
+
const [result] = await aggregation.exec();
|
|
267
|
+
const docs = result.docs;
|
|
268
|
+
const total = result.total?.[0]?.count || 0;
|
|
269
|
+
const totalPages = countStrategy === "none" ? 0 : calculateTotalPages(total, sanitizedLimit);
|
|
270
|
+
let hasNext;
|
|
271
|
+
if (countStrategy === "none") {
|
|
272
|
+
hasNext = docs.length > sanitizedLimit;
|
|
273
|
+
if (hasNext) docs.pop();
|
|
274
|
+
} else hasNext = sanitizedPage < totalPages;
|
|
275
|
+
const warning = shouldWarnDeepPagination(sanitizedPage, this.config.deepPageThreshold) ? `Deep pagination in aggregate (page ${sanitizedPage}). Uses $skip internally.` : void 0;
|
|
276
|
+
return {
|
|
277
|
+
method: "aggregate",
|
|
278
|
+
docs,
|
|
279
|
+
page: sanitizedPage,
|
|
280
|
+
limit: sanitizedLimit,
|
|
281
|
+
total,
|
|
282
|
+
pages: totalPages,
|
|
283
|
+
hasNext,
|
|
284
|
+
hasPrev: sanitizedPage > 1,
|
|
285
|
+
...warning && { warning }
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
};
|
|
289
|
+
//#endregion
|
|
290
|
+
export { PaginationEngine as t };
|
package/dist/actions/index.d.mts
CHANGED
|
@@ -1,9 +1,2 @@
|
|
|
1
|
-
import "../
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
//#region src/actions/index.d.ts
|
|
5
|
-
declare namespace index_d_exports {
|
|
6
|
-
export { aggregate_d_exports as aggregate, create_d_exports as create, delete_d_exports as deleteActions, read_d_exports as read, update_d_exports as update };
|
|
7
|
-
}
|
|
8
|
-
//#endregion
|
|
9
|
-
export { aggregate_d_exports as aggregate, create_d_exports as create, delete_d_exports as deleteActions, read_d_exports as read, index_d_exports as t, update_d_exports as update };
|
|
1
|
+
import { a as create_d_exports, i as delete_d_exports, n as update_d_exports, o as aggregate_d_exports, r as read_d_exports } from "../index-BuoZIZ15.mjs";
|
|
2
|
+
export { aggregate_d_exports as aggregate, create_d_exports as create, delete_d_exports as deleteActions, read_d_exports as read, update_d_exports as update };
|
package/dist/actions/index.mjs
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import { t as __exportAll } from "../chunk-
|
|
2
|
-
import {
|
|
3
|
-
|
|
1
|
+
import { t as __exportAll } from "../chunk-CfYAbeIz.mjs";
|
|
2
|
+
import { c as read_exports, h as aggregate_exports, n as update_exports, p as create_exports, u as delete_exports } from "../update-DGKMmBgG.mjs";
|
|
4
3
|
//#region src/actions/index.ts
|
|
5
4
|
var actions_exports = /* @__PURE__ */ __exportAll({
|
|
6
5
|
aggregate: () => aggregate_exports,
|
|
@@ -9,6 +8,5 @@ var actions_exports = /* @__PURE__ */ __exportAll({
|
|
|
9
8
|
read: () => read_exports,
|
|
10
9
|
update: () => update_exports
|
|
11
10
|
});
|
|
12
|
-
|
|
13
11
|
//#endregion
|
|
14
|
-
export { aggregate_exports as aggregate, create_exports as create, delete_exports as deleteActions, read_exports as read, actions_exports as t, update_exports as update };
|
|
12
|
+
export { aggregate_exports as aggregate, create_exports as create, delete_exports as deleteActions, read_exports as read, actions_exports as t, update_exports as update };
|
package/dist/ai/index.d.mts
CHANGED
package/dist/ai/index.mjs
CHANGED
|
@@ -175,9 +175,10 @@ function vectorPlugin(options) {
|
|
|
175
175
|
});
|
|
176
176
|
repo.on("before:update", async (context) => {
|
|
177
177
|
if (!context.data) return;
|
|
178
|
+
const contextData = context.data;
|
|
178
179
|
const fieldsToEmbed = fields.filter((field) => {
|
|
179
180
|
const allFields = [...field.sourceFields ?? [], ...field.mediaFields ?? []];
|
|
180
|
-
return allFields.length > 0 && allFields.some((f) => f in
|
|
181
|
+
return allFields.length > 0 && contextData && allFields.some((f) => f in contextData);
|
|
181
182
|
});
|
|
182
183
|
if (!fieldsToEmbed.length) return;
|
|
183
184
|
const existing = await repo.Model.findById(context.id).lean().session(context.session ?? null);
|
|
@@ -198,6 +199,5 @@ function vectorPlugin(options) {
|
|
|
198
199
|
}
|
|
199
200
|
};
|
|
200
201
|
}
|
|
201
|
-
|
|
202
202
|
//#endregion
|
|
203
|
-
export { buildVectorSearchPipeline, vectorPlugin };
|
|
203
|
+
export { buildVectorSearchPipeline, vectorPlugin };
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
//#region \0rolldown/runtime.js
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __exportAll = (all, no_symbols) => {
|
|
4
|
+
let target = {};
|
|
5
|
+
for (var name in all) __defProp(target, name, {
|
|
6
|
+
get: all[name],
|
|
7
|
+
enumerable: true
|
|
8
|
+
});
|
|
9
|
+
if (!no_symbols) __defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
10
|
+
return target;
|
|
11
|
+
};
|
|
12
|
+
//#endregion
|
|
13
|
+
export { __exportAll as t };
|