@classytic/mongokit 3.1.0 → 3.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,299 @@
1
+ import 'mongoose';
2
+
3
+ // src/utils/memory-cache.ts
4
+ function createMemoryCache(maxEntries = 1e3) {
5
+ const cache = /* @__PURE__ */ new Map();
6
+ function cleanup() {
7
+ const now = Date.now();
8
+ for (const [key, entry] of cache) {
9
+ if (entry.expiresAt < now) {
10
+ cache.delete(key);
11
+ }
12
+ }
13
+ }
14
+ function evictOldest() {
15
+ if (cache.size >= maxEntries) {
16
+ const firstKey = cache.keys().next().value;
17
+ if (firstKey) cache.delete(firstKey);
18
+ }
19
+ }
20
+ return {
21
+ async get(key) {
22
+ cleanup();
23
+ const entry = cache.get(key);
24
+ if (!entry) return null;
25
+ if (entry.expiresAt < Date.now()) {
26
+ cache.delete(key);
27
+ return null;
28
+ }
29
+ return entry.value;
30
+ },
31
+ async set(key, value, ttl) {
32
+ cleanup();
33
+ evictOldest();
34
+ cache.set(key, {
35
+ value,
36
+ expiresAt: Date.now() + ttl * 1e3
37
+ });
38
+ },
39
+ async del(key) {
40
+ cache.delete(key);
41
+ },
42
+ async clear(pattern) {
43
+ if (!pattern) {
44
+ cache.clear();
45
+ return;
46
+ }
47
+ const regex = new RegExp(
48
+ "^" + pattern.replace(/\*/g, ".*").replace(/\?/g, ".") + "$"
49
+ );
50
+ for (const key of cache.keys()) {
51
+ if (regex.test(key)) {
52
+ cache.delete(key);
53
+ }
54
+ }
55
+ }
56
+ };
57
+ }
58
+ function buildCrudSchemasFromMongooseSchema(mongooseSchema, options = {}) {
59
+ const jsonCreate = buildJsonSchemaFromPaths(mongooseSchema, options);
60
+ const jsonUpdate = buildJsonSchemaForUpdate(jsonCreate, options);
61
+ const jsonParams = {
62
+ type: "object",
63
+ properties: { id: { type: "string", pattern: "^[0-9a-fA-F]{24}$" } },
64
+ required: ["id"]
65
+ };
66
+ const tree = mongooseSchema?.obj || {};
67
+ const jsonQuery = buildJsonSchemaForQuery(tree, options);
68
+ return { createBody: jsonCreate, updateBody: jsonUpdate, params: jsonParams, listQuery: jsonQuery };
69
+ }
70
+ function buildCrudSchemasFromModel(mongooseModel, options = {}) {
71
+ if (!mongooseModel || !mongooseModel.schema) {
72
+ throw new Error("Invalid mongoose model");
73
+ }
74
+ return buildCrudSchemasFromMongooseSchema(mongooseModel.schema, options);
75
+ }
76
+ function getImmutableFields(options = {}) {
77
+ const immutable = [];
78
+ const fieldRules = options?.fieldRules || {};
79
+ Object.entries(fieldRules).forEach(([field, rules]) => {
80
+ if (rules.immutable || rules.immutableAfterCreate) {
81
+ immutable.push(field);
82
+ }
83
+ });
84
+ (options?.update?.omitFields || []).forEach((f) => {
85
+ if (!immutable.includes(f)) immutable.push(f);
86
+ });
87
+ return immutable;
88
+ }
89
+ function getSystemManagedFields(options = {}) {
90
+ const systemManaged = [];
91
+ const fieldRules = options?.fieldRules || {};
92
+ Object.entries(fieldRules).forEach(([field, rules]) => {
93
+ if (rules.systemManaged) {
94
+ systemManaged.push(field);
95
+ }
96
+ });
97
+ return systemManaged;
98
+ }
99
+ function isFieldUpdateAllowed(fieldName, options = {}) {
100
+ const immutableFields = getImmutableFields(options);
101
+ const systemManagedFields = getSystemManagedFields(options);
102
+ return !immutableFields.includes(fieldName) && !systemManagedFields.includes(fieldName);
103
+ }
104
+ function validateUpdateBody(body = {}, options = {}) {
105
+ const violations = [];
106
+ const immutableFields = getImmutableFields(options);
107
+ const systemManagedFields = getSystemManagedFields(options);
108
+ Object.keys(body).forEach((field) => {
109
+ if (immutableFields.includes(field)) {
110
+ violations.push({ field, reason: "Field is immutable" });
111
+ } else if (systemManagedFields.includes(field)) {
112
+ violations.push({ field, reason: "Field is system-managed" });
113
+ }
114
+ });
115
+ return {
116
+ valid: violations.length === 0,
117
+ violations
118
+ };
119
+ }
120
+ function buildJsonSchemaFromPaths(mongooseSchema, options) {
121
+ const properties = {};
122
+ const required = [];
123
+ const paths = mongooseSchema.paths;
124
+ const rootFields = /* @__PURE__ */ new Map();
125
+ for (const [path, schemaType] of Object.entries(paths)) {
126
+ if (path === "_id" || path === "__v") continue;
127
+ const parts = path.split(".");
128
+ const rootField = parts[0];
129
+ if (!rootFields.has(rootField)) {
130
+ rootFields.set(rootField, []);
131
+ }
132
+ rootFields.get(rootField).push({ path, schemaType });
133
+ }
134
+ for (const [rootField, fieldPaths] of rootFields.entries()) {
135
+ if (fieldPaths.length === 1 && fieldPaths[0].path === rootField) {
136
+ const schemaType = fieldPaths[0].schemaType;
137
+ properties[rootField] = schemaTypeToJsonSchema(schemaType);
138
+ if (schemaType.isRequired) {
139
+ required.push(rootField);
140
+ }
141
+ } else {
142
+ const nestedSchema = buildNestedJsonSchema(fieldPaths, rootField);
143
+ properties[rootField] = nestedSchema.schema;
144
+ if (nestedSchema.required) {
145
+ required.push(rootField);
146
+ }
147
+ }
148
+ }
149
+ const schema = { type: "object", properties };
150
+ if (required.length) schema.required = required;
151
+ const fieldsToOmit = /* @__PURE__ */ new Set(["createdAt", "updatedAt", "__v"]);
152
+ (options?.create?.omitFields || []).forEach((f) => fieldsToOmit.add(f));
153
+ const fieldRules = options?.fieldRules || {};
154
+ Object.entries(fieldRules).forEach(([field, rules]) => {
155
+ if (rules.systemManaged) {
156
+ fieldsToOmit.add(field);
157
+ }
158
+ });
159
+ fieldsToOmit.forEach((field) => {
160
+ if (schema.properties?.[field]) {
161
+ delete schema.properties[field];
162
+ }
163
+ if (schema.required) {
164
+ schema.required = schema.required.filter((k) => k !== field);
165
+ }
166
+ });
167
+ const reqOv = options?.create?.requiredOverrides || {};
168
+ const optOv = options?.create?.optionalOverrides || {};
169
+ schema.required = schema.required || [];
170
+ for (const [k, v] of Object.entries(reqOv)) {
171
+ if (v && !schema.required.includes(k)) schema.required.push(k);
172
+ }
173
+ for (const [k, v] of Object.entries(optOv)) {
174
+ if (v && schema.required) schema.required = schema.required.filter((x) => x !== k);
175
+ }
176
+ Object.entries(fieldRules).forEach(([field, rules]) => {
177
+ if (rules.optional && schema.required) {
178
+ schema.required = schema.required.filter((x) => x !== field);
179
+ }
180
+ });
181
+ const schemaOverrides = options?.create?.schemaOverrides || {};
182
+ for (const [k, override] of Object.entries(schemaOverrides)) {
183
+ if (schema.properties?.[k]) {
184
+ schema.properties[k] = override;
185
+ }
186
+ }
187
+ if (options?.strictAdditionalProperties === true) {
188
+ schema.additionalProperties = false;
189
+ }
190
+ return schema;
191
+ }
192
+ function buildNestedJsonSchema(fieldPaths, rootField) {
193
+ const properties = {};
194
+ const required = [];
195
+ let hasRequiredFields = false;
196
+ for (const { path, schemaType } of fieldPaths) {
197
+ const relativePath = path.substring(rootField.length + 1);
198
+ const parts = relativePath.split(".");
199
+ if (parts.length === 1) {
200
+ properties[parts[0]] = schemaTypeToJsonSchema(schemaType);
201
+ if (schemaType.isRequired) {
202
+ required.push(parts[0]);
203
+ hasRequiredFields = true;
204
+ }
205
+ } else {
206
+ const fieldName = parts[0];
207
+ if (!properties[fieldName]) {
208
+ properties[fieldName] = { type: "object", properties: {} };
209
+ }
210
+ const nestedObj = properties[fieldName];
211
+ if (!nestedObj.properties) nestedObj.properties = {};
212
+ const deepPath = parts.slice(1).join(".");
213
+ nestedObj.properties[deepPath] = schemaTypeToJsonSchema(schemaType);
214
+ }
215
+ }
216
+ const schema = { type: "object", properties };
217
+ if (required.length) schema.required = required;
218
+ return { schema, required: hasRequiredFields };
219
+ }
220
+ function schemaTypeToJsonSchema(schemaType) {
221
+ const result = {};
222
+ const instance = schemaType.instance;
223
+ const options = schemaType.options || {};
224
+ if (instance === "String") {
225
+ result.type = "string";
226
+ if (typeof options.minlength === "number") result.minLength = options.minlength;
227
+ if (typeof options.maxlength === "number") result.maxLength = options.maxlength;
228
+ if (options.match instanceof RegExp) result.pattern = options.match.source;
229
+ if (options.enum && Array.isArray(options.enum)) result.enum = options.enum;
230
+ } else if (instance === "Number") {
231
+ result.type = "number";
232
+ if (typeof options.min === "number") result.minimum = options.min;
233
+ if (typeof options.max === "number") result.maximum = options.max;
234
+ } else if (instance === "Boolean") {
235
+ result.type = "boolean";
236
+ } else if (instance === "Date") {
237
+ result.type = "string";
238
+ result.format = "date-time";
239
+ } else if (instance === "ObjectId" || instance === "ObjectID") {
240
+ result.type = "string";
241
+ result.pattern = "^[0-9a-fA-F]{24}$";
242
+ } else if (instance === "Array") {
243
+ result.type = "array";
244
+ result.items = { type: "string" };
245
+ } else {
246
+ result.type = "object";
247
+ result.additionalProperties = true;
248
+ }
249
+ return result;
250
+ }
251
+ function buildJsonSchemaForUpdate(createJson, options) {
252
+ const clone = JSON.parse(JSON.stringify(createJson));
253
+ delete clone.required;
254
+ const fieldsToOmit = /* @__PURE__ */ new Set();
255
+ (options?.update?.omitFields || []).forEach((f) => fieldsToOmit.add(f));
256
+ const fieldRules = options?.fieldRules || {};
257
+ Object.entries(fieldRules).forEach(([field, rules]) => {
258
+ if (rules.immutable || rules.immutableAfterCreate) {
259
+ fieldsToOmit.add(field);
260
+ }
261
+ });
262
+ fieldsToOmit.forEach((field) => {
263
+ if (clone.properties?.[field]) {
264
+ delete clone.properties[field];
265
+ }
266
+ });
267
+ if (options?.strictAdditionalProperties === true) {
268
+ clone.additionalProperties = false;
269
+ }
270
+ if (options?.update?.requireAtLeastOne === true) {
271
+ clone.minProperties = 1;
272
+ }
273
+ return clone;
274
+ }
275
+ function buildJsonSchemaForQuery(_tree, options) {
276
+ const basePagination = {
277
+ type: "object",
278
+ properties: {
279
+ page: { type: "string" },
280
+ limit: { type: "string" },
281
+ sort: { type: "string" },
282
+ populate: { type: "string" },
283
+ search: { type: "string" },
284
+ select: { type: "string" },
285
+ lean: { type: "string" },
286
+ includeDeleted: { type: "string" }
287
+ },
288
+ additionalProperties: true
289
+ };
290
+ const filterable = options?.query?.filterableFields || {};
291
+ for (const [k, v] of Object.entries(filterable)) {
292
+ if (basePagination.properties) {
293
+ basePagination.properties[k] = v && typeof v === "object" && "type" in v ? v : { type: "string" };
294
+ }
295
+ }
296
+ return basePagination;
297
+ }
298
+
299
+ export { buildCrudSchemasFromModel, buildCrudSchemasFromMongooseSchema, createMemoryCache, getImmutableFields, getSystemManagedFields, isFieldUpdateAllowed, validateUpdateBody };
@@ -0,0 +1,361 @@
1
+ import { createError } from './chunk-VJXDGP3C.js';
2
+ import mongoose from 'mongoose';
3
+
4
+ function encodeCursor(doc, primaryField, sort, version = 1) {
5
+ const primaryValue = doc[primaryField];
6
+ const idValue = doc._id;
7
+ const payload = {
8
+ v: serializeValue(primaryValue),
9
+ t: getValueType(primaryValue),
10
+ id: serializeValue(idValue),
11
+ idType: getValueType(idValue),
12
+ sort,
13
+ ver: version
14
+ };
15
+ return Buffer.from(JSON.stringify(payload)).toString("base64");
16
+ }
17
+ function decodeCursor(token) {
18
+ try {
19
+ const json = Buffer.from(token, "base64").toString("utf-8");
20
+ const payload = JSON.parse(json);
21
+ return {
22
+ value: rehydrateValue(payload.v, payload.t),
23
+ id: rehydrateValue(payload.id, payload.idType),
24
+ sort: payload.sort,
25
+ version: payload.ver
26
+ };
27
+ } catch {
28
+ throw new Error("Invalid cursor token");
29
+ }
30
+ }
31
+ function validateCursorSort(cursorSort, currentSort) {
32
+ const cursorSortStr = JSON.stringify(cursorSort);
33
+ const currentSortStr = JSON.stringify(currentSort);
34
+ if (cursorSortStr !== currentSortStr) {
35
+ throw new Error("Cursor sort does not match current query sort");
36
+ }
37
+ }
38
+ function validateCursorVersion(cursorVersion, expectedVersion) {
39
+ if (cursorVersion !== expectedVersion) {
40
+ throw new Error(`Cursor version ${cursorVersion} does not match expected version ${expectedVersion}`);
41
+ }
42
+ }
43
+ function serializeValue(value) {
44
+ if (value instanceof Date) return value.toISOString();
45
+ if (value instanceof mongoose.Types.ObjectId) return value.toString();
46
+ return value;
47
+ }
48
+ function getValueType(value) {
49
+ if (value instanceof Date) return "date";
50
+ if (value instanceof mongoose.Types.ObjectId) return "objectid";
51
+ if (typeof value === "boolean") return "boolean";
52
+ if (typeof value === "number") return "number";
53
+ if (typeof value === "string") return "string";
54
+ return "unknown";
55
+ }
56
+ function rehydrateValue(serialized, type) {
57
+ switch (type) {
58
+ case "date":
59
+ return new Date(serialized);
60
+ case "objectid":
61
+ return new mongoose.Types.ObjectId(serialized);
62
+ case "boolean":
63
+ return Boolean(serialized);
64
+ case "number":
65
+ return Number(serialized);
66
+ default:
67
+ return serialized;
68
+ }
69
+ }
70
+
71
+ // src/pagination/utils/sort.ts
72
+ function normalizeSort(sort) {
73
+ const normalized = {};
74
+ Object.keys(sort).forEach((key) => {
75
+ if (key !== "_id") normalized[key] = sort[key];
76
+ });
77
+ if (sort._id !== void 0) {
78
+ normalized._id = sort._id;
79
+ }
80
+ return normalized;
81
+ }
82
+ function validateKeysetSort(sort) {
83
+ const keys = Object.keys(sort);
84
+ if (keys.length === 1 && keys[0] !== "_id") {
85
+ const field = keys[0];
86
+ const direction = sort[field];
87
+ return normalizeSort({ [field]: direction, _id: direction });
88
+ }
89
+ if (keys.length === 1 && keys[0] === "_id") {
90
+ return normalizeSort(sort);
91
+ }
92
+ if (keys.length === 2) {
93
+ if (!keys.includes("_id")) {
94
+ throw new Error("Keyset pagination requires _id as tie-breaker");
95
+ }
96
+ const primaryField = keys.find((k) => k !== "_id");
97
+ const primaryDirection = sort[primaryField];
98
+ const idDirection = sort._id;
99
+ if (primaryDirection !== idDirection) {
100
+ throw new Error("_id direction must match primary field direction");
101
+ }
102
+ return normalizeSort(sort);
103
+ }
104
+ throw new Error("Keyset pagination only supports single field + _id");
105
+ }
106
+ function getPrimaryField(sort) {
107
+ const keys = Object.keys(sort);
108
+ return keys.find((k) => k !== "_id") || "_id";
109
+ }
110
+
111
+ // src/pagination/utils/filter.ts
112
+ function buildKeysetFilter(baseFilters, sort, cursorValue, cursorId) {
113
+ const primaryField = Object.keys(sort).find((k) => k !== "_id") || "_id";
114
+ const direction = sort[primaryField];
115
+ const operator = direction === 1 ? "$gt" : "$lt";
116
+ return {
117
+ ...baseFilters,
118
+ $or: [
119
+ { [primaryField]: { [operator]: cursorValue } },
120
+ {
121
+ [primaryField]: cursorValue,
122
+ _id: { [operator]: cursorId }
123
+ }
124
+ ]
125
+ };
126
+ }
127
+
128
+ // src/pagination/utils/limits.ts
129
+ function validateLimit(limit, config) {
130
+ const parsed = Number(limit);
131
+ if (!Number.isFinite(parsed) || parsed < 1) {
132
+ return config.defaultLimit || 10;
133
+ }
134
+ return Math.min(Math.floor(parsed), config.maxLimit || 100);
135
+ }
136
+ function validatePage(page, config) {
137
+ const parsed = Number(page);
138
+ if (!Number.isFinite(parsed) || parsed < 1) {
139
+ return 1;
140
+ }
141
+ const sanitized = Math.floor(parsed);
142
+ if (sanitized > (config.maxPage || 1e4)) {
143
+ throw new Error(`Page ${sanitized} exceeds maximum ${config.maxPage || 1e4}`);
144
+ }
145
+ return sanitized;
146
+ }
147
+ function shouldWarnDeepPagination(page, threshold) {
148
+ return page > threshold;
149
+ }
150
+ function calculateSkip(page, limit) {
151
+ return (page - 1) * limit;
152
+ }
153
+ function calculateTotalPages(total, limit) {
154
+ return Math.ceil(total / limit);
155
+ }
156
+
157
+ // src/pagination/PaginationEngine.ts
158
+ var PaginationEngine = class {
159
+ Model;
160
+ config;
161
+ /**
162
+ * Create a new pagination engine
163
+ *
164
+ * @param Model - Mongoose model to paginate
165
+ * @param config - Pagination configuration
166
+ */
167
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
168
+ constructor(Model, config = {}) {
169
+ this.Model = Model;
170
+ this.config = {
171
+ defaultLimit: config.defaultLimit || 10,
172
+ maxLimit: config.maxLimit || 100,
173
+ maxPage: config.maxPage || 1e4,
174
+ deepPageThreshold: config.deepPageThreshold || 100,
175
+ cursorVersion: config.cursorVersion || 1,
176
+ useEstimatedCount: config.useEstimatedCount || false
177
+ };
178
+ }
179
+ /**
180
+ * Offset-based pagination using skip/limit
181
+ * Best for small datasets and when users need random page access
182
+ * O(n) performance - slower for deep pages
183
+ *
184
+ * @param options - Pagination options
185
+ * @returns Pagination result with total count
186
+ *
187
+ * @example
188
+ * const result = await engine.paginate({
189
+ * filters: { status: 'active' },
190
+ * sort: { createdAt: -1 },
191
+ * page: 1,
192
+ * limit: 20
193
+ * });
194
+ * console.log(result.docs, result.total, result.hasNext);
195
+ */
196
+ async paginate(options = {}) {
197
+ const {
198
+ filters = {},
199
+ sort = { _id: -1 },
200
+ page = 1,
201
+ limit = this.config.defaultLimit,
202
+ select,
203
+ populate = [],
204
+ lean = true,
205
+ session
206
+ } = options;
207
+ const sanitizedPage = validatePage(page, this.config);
208
+ const sanitizedLimit = validateLimit(limit, this.config);
209
+ const skip = calculateSkip(sanitizedPage, sanitizedLimit);
210
+ let query = this.Model.find(filters);
211
+ if (select) query = query.select(select);
212
+ if (populate && (Array.isArray(populate) ? populate.length : populate)) {
213
+ query = query.populate(populate);
214
+ }
215
+ query = query.sort(sort).skip(skip).limit(sanitizedLimit).lean(lean);
216
+ if (session) query = query.session(session);
217
+ const hasFilters = Object.keys(filters).length > 0;
218
+ const useEstimated = this.config.useEstimatedCount && !hasFilters;
219
+ const [docs, total] = await Promise.all([
220
+ query.exec(),
221
+ useEstimated ? this.Model.estimatedDocumentCount() : this.Model.countDocuments(filters).session(session ?? null)
222
+ ]);
223
+ const totalPages = calculateTotalPages(total, sanitizedLimit);
224
+ const warning = shouldWarnDeepPagination(sanitizedPage, this.config.deepPageThreshold) ? `Deep pagination (page ${sanitizedPage}). Consider getAll({ after, sort, limit }) for better performance.` : void 0;
225
+ return {
226
+ method: "offset",
227
+ docs,
228
+ page: sanitizedPage,
229
+ limit: sanitizedLimit,
230
+ total,
231
+ pages: totalPages,
232
+ hasNext: sanitizedPage < totalPages,
233
+ hasPrev: sanitizedPage > 1,
234
+ ...warning && { warning }
235
+ };
236
+ }
237
+ /**
238
+ * Keyset (cursor-based) pagination for high-performance streaming
239
+ * Best for large datasets, infinite scroll, real-time feeds
240
+ * O(1) performance - consistent speed regardless of position
241
+ *
242
+ * @param options - Pagination options (sort is required)
243
+ * @returns Pagination result with next cursor
244
+ *
245
+ * @example
246
+ * // First page
247
+ * const page1 = await engine.stream({
248
+ * sort: { createdAt: -1 },
249
+ * limit: 20
250
+ * });
251
+ *
252
+ * // Next page using cursor
253
+ * const page2 = await engine.stream({
254
+ * sort: { createdAt: -1 },
255
+ * after: page1.next,
256
+ * limit: 20
257
+ * });
258
+ */
259
+ async stream(options) {
260
+ const {
261
+ filters = {},
262
+ sort,
263
+ after,
264
+ limit = this.config.defaultLimit,
265
+ select,
266
+ populate = [],
267
+ lean = true,
268
+ session
269
+ } = options;
270
+ if (!sort) {
271
+ throw createError(400, "sort is required for keyset pagination");
272
+ }
273
+ const sanitizedLimit = validateLimit(limit, this.config);
274
+ const normalizedSort = validateKeysetSort(sort);
275
+ let query = { ...filters };
276
+ if (after) {
277
+ const cursor = decodeCursor(after);
278
+ validateCursorVersion(cursor.version, this.config.cursorVersion);
279
+ validateCursorSort(cursor.sort, normalizedSort);
280
+ query = buildKeysetFilter(query, normalizedSort, cursor.value, cursor.id);
281
+ }
282
+ let mongoQuery = this.Model.find(query);
283
+ if (select) mongoQuery = mongoQuery.select(select);
284
+ if (populate && (Array.isArray(populate) ? populate.length : populate)) {
285
+ mongoQuery = mongoQuery.populate(populate);
286
+ }
287
+ mongoQuery = mongoQuery.sort(normalizedSort).limit(sanitizedLimit + 1).lean(lean);
288
+ if (session) mongoQuery = mongoQuery.session(session);
289
+ const docs = await mongoQuery.exec();
290
+ const hasMore = docs.length > sanitizedLimit;
291
+ if (hasMore) docs.pop();
292
+ const primaryField = getPrimaryField(normalizedSort);
293
+ const nextCursor = hasMore && docs.length > 0 ? encodeCursor(docs[docs.length - 1], primaryField, normalizedSort, this.config.cursorVersion) : null;
294
+ return {
295
+ method: "keyset",
296
+ docs,
297
+ limit: sanitizedLimit,
298
+ hasMore,
299
+ next: nextCursor
300
+ };
301
+ }
302
+ /**
303
+ * Aggregate pipeline with pagination
304
+ * Best for complex queries requiring aggregation stages
305
+ * Uses $facet to combine results and count in single query
306
+ *
307
+ * @param options - Aggregation options
308
+ * @returns Pagination result with total count
309
+ *
310
+ * @example
311
+ * const result = await engine.aggregatePaginate({
312
+ * pipeline: [
313
+ * { $match: { status: 'active' } },
314
+ * { $group: { _id: '$category', count: { $sum: 1 } } },
315
+ * { $sort: { count: -1 } }
316
+ * ],
317
+ * page: 1,
318
+ * limit: 20
319
+ * });
320
+ */
321
+ async aggregatePaginate(options = {}) {
322
+ const {
323
+ pipeline = [],
324
+ page = 1,
325
+ limit = this.config.defaultLimit,
326
+ session
327
+ } = options;
328
+ const sanitizedPage = validatePage(page, this.config);
329
+ const sanitizedLimit = validateLimit(limit, this.config);
330
+ const skip = calculateSkip(sanitizedPage, sanitizedLimit);
331
+ const facetPipeline = [
332
+ ...pipeline,
333
+ {
334
+ $facet: {
335
+ docs: [{ $skip: skip }, { $limit: sanitizedLimit }],
336
+ total: [{ $count: "count" }]
337
+ }
338
+ }
339
+ ];
340
+ const aggregation = this.Model.aggregate(facetPipeline);
341
+ if (session) aggregation.session(session);
342
+ const [result] = await aggregation.exec();
343
+ const docs = result.docs;
344
+ const total = result.total[0]?.count || 0;
345
+ const totalPages = calculateTotalPages(total, sanitizedLimit);
346
+ const warning = shouldWarnDeepPagination(sanitizedPage, this.config.deepPageThreshold) ? `Deep pagination in aggregate (page ${sanitizedPage}). Uses $skip internally.` : void 0;
347
+ return {
348
+ method: "aggregate",
349
+ docs,
350
+ page: sanitizedPage,
351
+ limit: sanitizedLimit,
352
+ total,
353
+ pages: totalPages,
354
+ hasNext: sanitizedPage < totalPages,
355
+ hasPrev: sanitizedPage > 1,
356
+ ...warning && { warning }
357
+ };
358
+ }
359
+ };
360
+
361
+ export { PaginationEngine };