@classytic/mongokit 3.2.0 → 3.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +470 -193
- package/dist/actions/index.d.mts +9 -0
- package/dist/actions/index.mjs +15 -0
- package/dist/aggregate-BAi4Do-X.mjs +767 -0
- package/dist/aggregate-CCHI7F51.d.mts +269 -0
- package/dist/ai/index.d.mts +125 -0
- package/dist/ai/index.mjs +203 -0
- package/dist/cache-keys-C8Z9B5sw.mjs +204 -0
- package/dist/chunk-DQk6qfdC.mjs +18 -0
- package/dist/create-BuO6xt0v.mjs +55 -0
- package/dist/custom-id.plugin-B_zIs6gE.mjs +1818 -0
- package/dist/custom-id.plugin-BzZI4gnE.d.mts +893 -0
- package/dist/index.d.mts +1012 -0
- package/dist/index.mjs +1906 -0
- package/dist/limits-DsNeCx4D.mjs +299 -0
- package/dist/logger-D8ily-PP.mjs +51 -0
- package/dist/mongooseToJsonSchema-COdDEkIJ.mjs +317 -0
- package/dist/{mongooseToJsonSchema-CaRF_bCN.d.ts → mongooseToJsonSchema-Wbvjfwkn.d.mts} +16 -89
- package/dist/pagination/PaginationEngine.d.mts +93 -0
- package/dist/pagination/PaginationEngine.mjs +196 -0
- package/dist/plugins/index.d.mts +3 -0
- package/dist/plugins/index.mjs +3 -0
- package/dist/types-D-gploPr.d.mts +1241 -0
- package/dist/utils/{index.d.ts → index.d.mts} +14 -21
- package/dist/utils/index.mjs +5 -0
- package/package.json +21 -21
- package/dist/actions/index.d.ts +0 -3
- package/dist/actions/index.js +0 -5
- package/dist/ai/index.d.ts +0 -175
- package/dist/ai/index.js +0 -206
- package/dist/chunks/chunk-2ZN65ZOP.js +0 -93
- package/dist/chunks/chunk-44KXLGPO.js +0 -388
- package/dist/chunks/chunk-DEVXDBRL.js +0 -1226
- package/dist/chunks/chunk-I7CWNAJB.js +0 -46
- package/dist/chunks/chunk-JWUAVZ3L.js +0 -8
- package/dist/chunks/chunk-UE2IEXZJ.js +0 -306
- package/dist/chunks/chunk-URLJFIR7.js +0 -22
- package/dist/chunks/chunk-VWKIKZYF.js +0 -737
- package/dist/chunks/chunk-WSFCRVEQ.js +0 -7
- package/dist/index-BDn5fSTE.d.ts +0 -516
- package/dist/index.d.ts +0 -1422
- package/dist/index.js +0 -1893
- package/dist/pagination/PaginationEngine.d.ts +0 -117
- package/dist/pagination/PaginationEngine.js +0 -3
- package/dist/plugins/index.d.ts +0 -922
- package/dist/plugins/index.js +0 -6
- package/dist/types-Jni1KgkP.d.ts +0 -780
- package/dist/utils/index.js +0 -5
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
import mongoose from "mongoose";
|
|
2
|
+
|
|
3
|
+
//#region src/pagination/utils/cursor.ts
|
|
4
|
+
/**
|
|
5
|
+
* Cursor Utilities
|
|
6
|
+
*
|
|
7
|
+
* Encoding and decoding of cursor tokens for keyset pagination.
|
|
8
|
+
* Cursors are base64-encoded JSON containing position data and metadata.
|
|
9
|
+
*/
|
|
10
|
+
/**
|
|
11
|
+
* Encodes document values and sort metadata into a base64 cursor token
|
|
12
|
+
*
|
|
13
|
+
* @param doc - Document to extract cursor values from
|
|
14
|
+
* @param primaryField - Primary sort field name
|
|
15
|
+
* @param sort - Normalized sort specification
|
|
16
|
+
* @param version - Cursor version for forward compatibility
|
|
17
|
+
* @returns Base64-encoded cursor token
|
|
18
|
+
*/
|
|
19
|
+
function encodeCursor(doc, primaryField, sort, version = 1) {
|
|
20
|
+
const primaryValue = doc[primaryField];
|
|
21
|
+
const idValue = doc._id;
|
|
22
|
+
const payload = {
|
|
23
|
+
v: serializeValue(primaryValue),
|
|
24
|
+
t: getValueType(primaryValue),
|
|
25
|
+
id: serializeValue(idValue),
|
|
26
|
+
idType: getValueType(idValue),
|
|
27
|
+
sort,
|
|
28
|
+
ver: version
|
|
29
|
+
};
|
|
30
|
+
return Buffer.from(JSON.stringify(payload)).toString("base64");
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Decodes a cursor token back into document values and sort metadata
|
|
34
|
+
*
|
|
35
|
+
* @param token - Base64-encoded cursor token
|
|
36
|
+
* @returns Decoded cursor data
|
|
37
|
+
* @throws Error if token is invalid or malformed
|
|
38
|
+
*/
|
|
39
|
+
function decodeCursor(token) {
|
|
40
|
+
let json;
|
|
41
|
+
try {
|
|
42
|
+
json = Buffer.from(token, "base64").toString("utf-8");
|
|
43
|
+
} catch {
|
|
44
|
+
throw new Error("Invalid cursor token: not valid base64");
|
|
45
|
+
}
|
|
46
|
+
let payload;
|
|
47
|
+
try {
|
|
48
|
+
payload = JSON.parse(json);
|
|
49
|
+
} catch {
|
|
50
|
+
throw new Error("Invalid cursor token: not valid JSON");
|
|
51
|
+
}
|
|
52
|
+
if (!payload || typeof payload !== "object" || !("v" in payload) || !("t" in payload) || !("id" in payload) || !("idType" in payload) || !payload.sort || typeof payload.sort !== "object" || typeof payload.ver !== "number") throw new Error("Invalid cursor token: malformed payload structure");
|
|
53
|
+
const VALID_TYPES = [
|
|
54
|
+
"date",
|
|
55
|
+
"objectid",
|
|
56
|
+
"boolean",
|
|
57
|
+
"number",
|
|
58
|
+
"string",
|
|
59
|
+
"null",
|
|
60
|
+
"unknown"
|
|
61
|
+
];
|
|
62
|
+
if (!VALID_TYPES.includes(payload.t) || !VALID_TYPES.includes(payload.idType)) throw new Error("Invalid cursor token: unrecognized value type");
|
|
63
|
+
try {
|
|
64
|
+
return {
|
|
65
|
+
value: rehydrateValue(payload.v, payload.t),
|
|
66
|
+
id: rehydrateValue(payload.id, payload.idType),
|
|
67
|
+
sort: payload.sort,
|
|
68
|
+
version: payload.ver
|
|
69
|
+
};
|
|
70
|
+
} catch {
|
|
71
|
+
throw new Error("Invalid cursor token: failed to rehydrate values");
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Validates that cursor sort matches current query sort
|
|
76
|
+
*
|
|
77
|
+
* @param cursorSort - Sort specification from cursor
|
|
78
|
+
* @param currentSort - Sort specification from query
|
|
79
|
+
* @throws Error if sorts don't match
|
|
80
|
+
*/
|
|
81
|
+
function validateCursorSort(cursorSort, currentSort) {
|
|
82
|
+
if (JSON.stringify(cursorSort) !== JSON.stringify(currentSort)) throw new Error("Cursor sort does not match current query sort");
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Validates cursor version matches expected version
|
|
86
|
+
*
|
|
87
|
+
* @param cursorVersion - Version from cursor
|
|
88
|
+
* @param expectedVersion - Expected version from config
|
|
89
|
+
* @throws Error if versions don't match
|
|
90
|
+
*/
|
|
91
|
+
function validateCursorVersion(cursorVersion, expectedVersion) {
|
|
92
|
+
if (cursorVersion !== expectedVersion) throw new Error(`Cursor version ${cursorVersion} does not match expected version ${expectedVersion}`);
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Serializes a value for cursor storage
|
|
96
|
+
*/
|
|
97
|
+
function serializeValue(value) {
|
|
98
|
+
if (value === null || value === void 0) return null;
|
|
99
|
+
if (value instanceof Date) return value.toISOString();
|
|
100
|
+
if (value instanceof mongoose.Types.ObjectId) return value.toString();
|
|
101
|
+
return value;
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Gets the type identifier for a value
|
|
105
|
+
*/
|
|
106
|
+
function getValueType(value) {
|
|
107
|
+
if (value === null || value === void 0) return "null";
|
|
108
|
+
if (value instanceof Date) return "date";
|
|
109
|
+
if (value instanceof mongoose.Types.ObjectId) return "objectid";
|
|
110
|
+
if (typeof value === "boolean") return "boolean";
|
|
111
|
+
if (typeof value === "number") return "number";
|
|
112
|
+
if (typeof value === "string") return "string";
|
|
113
|
+
return "unknown";
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Rehydrates a serialized value back to its original type
|
|
117
|
+
*/
|
|
118
|
+
function rehydrateValue(serialized, type) {
|
|
119
|
+
if (type === "null" || serialized === null) return null;
|
|
120
|
+
switch (type) {
|
|
121
|
+
case "date": return new Date(serialized);
|
|
122
|
+
case "objectid": return new mongoose.Types.ObjectId(serialized);
|
|
123
|
+
case "boolean": return Boolean(serialized);
|
|
124
|
+
case "number": return Number(serialized);
|
|
125
|
+
default: return serialized;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
//#endregion
|
|
130
|
+
//#region src/pagination/utils/sort.ts
|
|
131
|
+
/**
|
|
132
|
+
* Normalizes sort object to ensure stable key order
|
|
133
|
+
* Primary fields first, _id last (not alphabetical)
|
|
134
|
+
*
|
|
135
|
+
* @param sort - Sort specification
|
|
136
|
+
* @returns Normalized sort with stable key order
|
|
137
|
+
*/
|
|
138
|
+
function normalizeSort(sort) {
|
|
139
|
+
const normalized = {};
|
|
140
|
+
Object.keys(sort).forEach((key) => {
|
|
141
|
+
if (key !== "_id") normalized[key] = sort[key];
|
|
142
|
+
});
|
|
143
|
+
if (sort._id !== void 0) normalized._id = sort._id;
|
|
144
|
+
return normalized;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Validates and normalizes sort for keyset pagination
|
|
148
|
+
* Auto-adds _id tie-breaker if needed
|
|
149
|
+
* Ensures _id direction matches primary field
|
|
150
|
+
*
|
|
151
|
+
* @param sort - Sort specification
|
|
152
|
+
* @returns Validated and normalized sort
|
|
153
|
+
* @throws Error if sort is invalid for keyset pagination
|
|
154
|
+
*/
|
|
155
|
+
function validateKeysetSort(sort) {
|
|
156
|
+
const keys = Object.keys(sort);
|
|
157
|
+
if (keys.length === 1 && keys[0] !== "_id") {
|
|
158
|
+
const field = keys[0];
|
|
159
|
+
const direction = sort[field];
|
|
160
|
+
return normalizeSort({
|
|
161
|
+
[field]: direction,
|
|
162
|
+
_id: direction
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
if (keys.length === 1 && keys[0] === "_id") return normalizeSort(sort);
|
|
166
|
+
if (keys.length === 2) {
|
|
167
|
+
if (!keys.includes("_id")) throw new Error("Keyset pagination requires _id as tie-breaker");
|
|
168
|
+
if (sort[keys.find((k) => k !== "_id")] !== sort._id) throw new Error("_id direction must match primary field direction");
|
|
169
|
+
return normalizeSort(sort);
|
|
170
|
+
}
|
|
171
|
+
throw new Error("Keyset pagination only supports single field + _id");
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Extracts primary sort field (first non-_id field)
|
|
175
|
+
*
|
|
176
|
+
* @param sort - Sort specification
|
|
177
|
+
* @returns Primary field name
|
|
178
|
+
*/
|
|
179
|
+
function getPrimaryField(sort) {
|
|
180
|
+
return Object.keys(sort).find((k) => k !== "_id") || "_id";
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
//#endregion
|
|
184
|
+
//#region src/pagination/utils/filter.ts
|
|
185
|
+
/**
|
|
186
|
+
* Builds MongoDB filter for keyset pagination
|
|
187
|
+
* Creates compound $or condition for proper cursor-based filtering
|
|
188
|
+
*
|
|
189
|
+
* @param baseFilters - Existing query filters
|
|
190
|
+
* @param sort - Normalized sort specification
|
|
191
|
+
* @param cursorValue - Primary field value from cursor
|
|
192
|
+
* @param cursorId - _id value from cursor
|
|
193
|
+
* @returns MongoDB filter with keyset condition
|
|
194
|
+
*
|
|
195
|
+
* @example
|
|
196
|
+
* buildKeysetFilter(
|
|
197
|
+
* { status: 'active' },
|
|
198
|
+
* { createdAt: -1, _id: -1 },
|
|
199
|
+
* new Date('2024-01-01'),
|
|
200
|
+
* new ObjectId('...')
|
|
201
|
+
* )
|
|
202
|
+
* // Returns:
|
|
203
|
+
* // {
|
|
204
|
+
* // status: 'active',
|
|
205
|
+
* // $or: [
|
|
206
|
+
* // { createdAt: { $lt: Date('2024-01-01') } },
|
|
207
|
+
* // { createdAt: Date('2024-01-01'), _id: { $lt: ObjectId('...') } }
|
|
208
|
+
* // ]
|
|
209
|
+
* // }
|
|
210
|
+
*/
|
|
211
|
+
function buildKeysetFilter(baseFilters, sort, cursorValue, cursorId) {
|
|
212
|
+
const primaryField = Object.keys(sort).find((k) => k !== "_id") || "_id";
|
|
213
|
+
const direction = sort[primaryField];
|
|
214
|
+
const operator = direction === 1 ? "$gt" : "$lt";
|
|
215
|
+
if (cursorValue === null || cursorValue === void 0) if (direction === 1) return {
|
|
216
|
+
...baseFilters,
|
|
217
|
+
$or: [{
|
|
218
|
+
[primaryField]: null,
|
|
219
|
+
_id: { $gt: cursorId }
|
|
220
|
+
}, { [primaryField]: { $ne: null } }]
|
|
221
|
+
};
|
|
222
|
+
else return {
|
|
223
|
+
...baseFilters,
|
|
224
|
+
[primaryField]: null,
|
|
225
|
+
_id: { $lt: cursorId }
|
|
226
|
+
};
|
|
227
|
+
return {
|
|
228
|
+
...baseFilters,
|
|
229
|
+
$or: [{ [primaryField]: { [operator]: cursorValue } }, {
|
|
230
|
+
[primaryField]: cursorValue,
|
|
231
|
+
_id: { [operator]: cursorId }
|
|
232
|
+
}]
|
|
233
|
+
};
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
//#endregion
|
|
237
|
+
//#region src/pagination/utils/limits.ts
|
|
238
|
+
/**
|
|
239
|
+
* Validates and sanitizes limit value
|
|
240
|
+
* Parses strings to numbers and prevents NaN bugs
|
|
241
|
+
*
|
|
242
|
+
* @param limit - Requested limit
|
|
243
|
+
* @param config - Pagination configuration
|
|
244
|
+
* @returns Sanitized limit between 1 and maxLimit
|
|
245
|
+
*/
|
|
246
|
+
function validateLimit(limit, config) {
|
|
247
|
+
const parsed = Number(limit);
|
|
248
|
+
if (!Number.isFinite(parsed) || parsed < 1) return config.defaultLimit || 10;
|
|
249
|
+
return Math.min(Math.floor(parsed), config.maxLimit || 100);
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Validates and sanitizes page number
|
|
253
|
+
* Parses strings to numbers and prevents NaN bugs
|
|
254
|
+
*
|
|
255
|
+
* @param page - Requested page (1-indexed)
|
|
256
|
+
* @param config - Pagination configuration
|
|
257
|
+
* @returns Sanitized page number >= 1
|
|
258
|
+
* @throws Error if page exceeds maxPage
|
|
259
|
+
*/
|
|
260
|
+
function validatePage(page, config) {
|
|
261
|
+
const parsed = Number(page);
|
|
262
|
+
if (!Number.isFinite(parsed) || parsed < 1) return 1;
|
|
263
|
+
const sanitized = Math.floor(parsed);
|
|
264
|
+
if (sanitized > (config.maxPage || 1e4)) throw new Error(`Page ${sanitized} exceeds maximum ${config.maxPage || 1e4}`);
|
|
265
|
+
return sanitized;
|
|
266
|
+
}
|
|
267
|
+
/**
|
|
268
|
+
* Checks if page number should trigger deep pagination warning
|
|
269
|
+
*
|
|
270
|
+
* @param page - Current page number
|
|
271
|
+
* @param threshold - Warning threshold
|
|
272
|
+
* @returns True if warning should be shown
|
|
273
|
+
*/
|
|
274
|
+
function shouldWarnDeepPagination(page, threshold) {
|
|
275
|
+
return page > threshold;
|
|
276
|
+
}
|
|
277
|
+
/**
|
|
278
|
+
* Calculates number of documents to skip for offset pagination
|
|
279
|
+
*
|
|
280
|
+
* @param page - Page number (1-indexed)
|
|
281
|
+
* @param limit - Documents per page
|
|
282
|
+
* @returns Number of documents to skip
|
|
283
|
+
*/
|
|
284
|
+
function calculateSkip(page, limit) {
|
|
285
|
+
return (page - 1) * limit;
|
|
286
|
+
}
|
|
287
|
+
/**
|
|
288
|
+
* Calculates total number of pages
|
|
289
|
+
*
|
|
290
|
+
* @param total - Total document count
|
|
291
|
+
* @param limit - Documents per page
|
|
292
|
+
* @returns Total number of pages
|
|
293
|
+
*/
|
|
294
|
+
function calculateTotalPages(total, limit) {
|
|
295
|
+
return Math.ceil(total / limit);
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
//#endregion
|
|
299
|
+
export { validatePage as a, validateKeysetSort as c, validateCursorSort as d, validateCursorVersion as f, validateLimit as i, decodeCursor as l, calculateTotalPages as n, buildKeysetFilter as o, shouldWarnDeepPagination as r, getPrimaryField as s, calculateSkip as t, encodeCursor as u };
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
//#region src/utils/error.ts
|
|
2
|
+
/**
|
|
3
|
+
* Creates an error with HTTP status code
|
|
4
|
+
*
|
|
5
|
+
* @param status - HTTP status code
|
|
6
|
+
* @param message - Error message
|
|
7
|
+
* @returns Error with status property
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* throw createError(404, 'Document not found');
|
|
11
|
+
* throw createError(400, 'Invalid input');
|
|
12
|
+
* throw createError(403, 'Access denied');
|
|
13
|
+
*/
|
|
14
|
+
function createError(status, message) {
|
|
15
|
+
const error = new Error(message);
|
|
16
|
+
error.status = status;
|
|
17
|
+
return error;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
//#endregion
|
|
21
|
+
//#region src/utils/logger.ts
|
|
22
|
+
const noop = () => {};
|
|
23
|
+
let current = {
|
|
24
|
+
warn: console.warn.bind(console),
|
|
25
|
+
debug: noop
|
|
26
|
+
};
|
|
27
|
+
/**
|
|
28
|
+
* Configure the internal logger.
|
|
29
|
+
* Pass `false` to silence all output.
|
|
30
|
+
*/
|
|
31
|
+
function configureLogger(config) {
|
|
32
|
+
if (config === false) current = {
|
|
33
|
+
warn: noop,
|
|
34
|
+
debug: noop
|
|
35
|
+
};
|
|
36
|
+
else current = {
|
|
37
|
+
...current,
|
|
38
|
+
...config
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
/** Emit a warning — security blocks, config issues, performance hints */
|
|
42
|
+
function warn(message, ...args) {
|
|
43
|
+
current.warn(message, ...args);
|
|
44
|
+
}
|
|
45
|
+
/** Emit debug info — only visible when debug is enabled */
|
|
46
|
+
function debug(message, ...args) {
|
|
47
|
+
current.debug(message, ...args);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
//#endregion
|
|
51
|
+
export { createError as i, debug as n, warn as r, configureLogger as t };
|
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
import mongoose, { Schema } from "mongoose";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/memory-cache.ts
|
|
4
|
+
/**
|
|
5
|
+
* Creates an in-memory cache adapter
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - Automatic TTL expiration
|
|
9
|
+
* - Pattern-based clearing (simple glob with *)
|
|
10
|
+
* - Max entries limit to prevent memory leaks
|
|
11
|
+
*
|
|
12
|
+
* @param maxEntries - Maximum cache entries before oldest are evicted (default: 1000)
|
|
13
|
+
*/
|
|
14
|
+
function createMemoryCache(maxEntries = 1e3) {
|
|
15
|
+
const cache = /* @__PURE__ */ new Map();
|
|
16
|
+
let lastCleanup = Date.now();
|
|
17
|
+
const CLEANUP_INTERVAL_MS = 6e4;
|
|
18
|
+
function cleanupIfNeeded() {
|
|
19
|
+
const now = Date.now();
|
|
20
|
+
if (now - lastCleanup < CLEANUP_INTERVAL_MS) return;
|
|
21
|
+
lastCleanup = now;
|
|
22
|
+
for (const [key, entry] of cache) if (entry.expiresAt < now) cache.delete(key);
|
|
23
|
+
}
|
|
24
|
+
function evictOldest() {
|
|
25
|
+
while (cache.size >= maxEntries) {
|
|
26
|
+
const firstKey = cache.keys().next().value;
|
|
27
|
+
if (firstKey) cache.delete(firstKey);
|
|
28
|
+
else break;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
return {
|
|
32
|
+
async get(key) {
|
|
33
|
+
const entry = cache.get(key);
|
|
34
|
+
if (!entry) return null;
|
|
35
|
+
if (entry.expiresAt < Date.now()) {
|
|
36
|
+
cache.delete(key);
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
cache.delete(key);
|
|
40
|
+
cache.set(key, entry);
|
|
41
|
+
return entry.value;
|
|
42
|
+
},
|
|
43
|
+
async set(key, value, ttl) {
|
|
44
|
+
cache.delete(key);
|
|
45
|
+
if (cache.size >= maxEntries) {
|
|
46
|
+
cleanupIfNeeded();
|
|
47
|
+
evictOldest();
|
|
48
|
+
}
|
|
49
|
+
cache.set(key, {
|
|
50
|
+
value,
|
|
51
|
+
expiresAt: Date.now() + ttl * 1e3
|
|
52
|
+
});
|
|
53
|
+
},
|
|
54
|
+
async del(key) {
|
|
55
|
+
cache.delete(key);
|
|
56
|
+
},
|
|
57
|
+
async clear(pattern) {
|
|
58
|
+
if (!pattern) {
|
|
59
|
+
cache.clear();
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
const escaped = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
63
|
+
const regex = new RegExp("^" + escaped + "$");
|
|
64
|
+
for (const key of cache.keys()) if (regex.test(key)) cache.delete(key);
|
|
65
|
+
}
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
//#endregion
|
|
70
|
+
//#region src/utils/mongooseToJsonSchema.ts
|
|
71
|
+
/**
|
|
72
|
+
* Build CRUD schemas from Mongoose schema
|
|
73
|
+
*/
|
|
74
|
+
function buildCrudSchemasFromMongooseSchema(mongooseSchema, options = {}) {
|
|
75
|
+
const jsonCreate = buildJsonSchemaFromPaths(mongooseSchema, options);
|
|
76
|
+
return {
|
|
77
|
+
createBody: jsonCreate,
|
|
78
|
+
updateBody: buildJsonSchemaForUpdate(jsonCreate, options),
|
|
79
|
+
params: {
|
|
80
|
+
type: "object",
|
|
81
|
+
properties: { id: {
|
|
82
|
+
type: "string",
|
|
83
|
+
pattern: "^[0-9a-fA-F]{24}$"
|
|
84
|
+
} },
|
|
85
|
+
required: ["id"]
|
|
86
|
+
},
|
|
87
|
+
listQuery: buildJsonSchemaForQuery(mongooseSchema?.obj || {}, options)
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Build CRUD schemas from Mongoose model
|
|
92
|
+
*/
|
|
93
|
+
function buildCrudSchemasFromModel(mongooseModel, options = {}) {
|
|
94
|
+
if (!mongooseModel || !mongooseModel.schema) throw new Error("Invalid mongoose model");
|
|
95
|
+
return buildCrudSchemasFromMongooseSchema(mongooseModel.schema, options);
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Get fields that are immutable (cannot be updated)
|
|
99
|
+
*/
|
|
100
|
+
function getImmutableFields(options = {}) {
|
|
101
|
+
const immutable = [];
|
|
102
|
+
const fieldRules = options?.fieldRules || {};
|
|
103
|
+
Object.entries(fieldRules).forEach(([field, rules]) => {
|
|
104
|
+
if (rules.immutable || rules.immutableAfterCreate) immutable.push(field);
|
|
105
|
+
});
|
|
106
|
+
(options?.update?.omitFields || []).forEach((f) => {
|
|
107
|
+
if (!immutable.includes(f)) immutable.push(f);
|
|
108
|
+
});
|
|
109
|
+
return immutable;
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Get fields that are system-managed (cannot be set by users)
|
|
113
|
+
*/
|
|
114
|
+
function getSystemManagedFields(options = {}) {
|
|
115
|
+
const systemManaged = [];
|
|
116
|
+
const fieldRules = options?.fieldRules || {};
|
|
117
|
+
Object.entries(fieldRules).forEach(([field, rules]) => {
|
|
118
|
+
if (rules.systemManaged) systemManaged.push(field);
|
|
119
|
+
});
|
|
120
|
+
return systemManaged;
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Check if field is allowed in update
|
|
124
|
+
*/
|
|
125
|
+
function isFieldUpdateAllowed(fieldName, options = {}) {
|
|
126
|
+
const immutableFields = getImmutableFields(options);
|
|
127
|
+
const systemManagedFields = getSystemManagedFields(options);
|
|
128
|
+
return !immutableFields.includes(fieldName) && !systemManagedFields.includes(fieldName);
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Validate update body against field rules
|
|
132
|
+
*/
|
|
133
|
+
function validateUpdateBody(body = {}, options = {}) {
|
|
134
|
+
const violations = [];
|
|
135
|
+
const immutableFields = getImmutableFields(options);
|
|
136
|
+
const systemManagedFields = getSystemManagedFields(options);
|
|
137
|
+
Object.keys(body).forEach((field) => {
|
|
138
|
+
if (immutableFields.includes(field)) violations.push({
|
|
139
|
+
field,
|
|
140
|
+
reason: "Field is immutable"
|
|
141
|
+
});
|
|
142
|
+
else if (systemManagedFields.includes(field)) violations.push({
|
|
143
|
+
field,
|
|
144
|
+
reason: "Field is system-managed"
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
return {
|
|
148
|
+
valid: violations.length === 0,
|
|
149
|
+
violations
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Build JSON schema from Mongoose schema.paths (accurate type information)
|
|
154
|
+
*/
|
|
155
|
+
function buildJsonSchemaFromPaths(mongooseSchema, options) {
|
|
156
|
+
const properties = {};
|
|
157
|
+
const required = [];
|
|
158
|
+
const paths = mongooseSchema.paths;
|
|
159
|
+
const rootFields = /* @__PURE__ */ new Map();
|
|
160
|
+
for (const [path, schemaType] of Object.entries(paths)) {
|
|
161
|
+
if (path === "_id" || path === "__v") continue;
|
|
162
|
+
const rootField = path.split(".")[0];
|
|
163
|
+
if (!rootFields.has(rootField)) rootFields.set(rootField, []);
|
|
164
|
+
rootFields.get(rootField).push({
|
|
165
|
+
path,
|
|
166
|
+
schemaType
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
for (const [rootField, fieldPaths] of rootFields.entries()) if (fieldPaths.length === 1 && fieldPaths[0].path === rootField) {
|
|
170
|
+
const schemaType = fieldPaths[0].schemaType;
|
|
171
|
+
properties[rootField] = schemaTypeToJsonSchema(schemaType);
|
|
172
|
+
if (schemaType.isRequired) required.push(rootField);
|
|
173
|
+
} else {
|
|
174
|
+
const nestedSchema = buildNestedJsonSchema(fieldPaths, rootField);
|
|
175
|
+
properties[rootField] = nestedSchema.schema;
|
|
176
|
+
if (nestedSchema.required) required.push(rootField);
|
|
177
|
+
}
|
|
178
|
+
const schema = {
|
|
179
|
+
type: "object",
|
|
180
|
+
properties
|
|
181
|
+
};
|
|
182
|
+
if (required.length) schema.required = required;
|
|
183
|
+
const fieldsToOmit = new Set([
|
|
184
|
+
"createdAt",
|
|
185
|
+
"updatedAt",
|
|
186
|
+
"__v"
|
|
187
|
+
]);
|
|
188
|
+
(options?.create?.omitFields || []).forEach((f) => fieldsToOmit.add(f));
|
|
189
|
+
const fieldRules = options?.fieldRules || {};
|
|
190
|
+
Object.entries(fieldRules).forEach(([field, rules]) => {
|
|
191
|
+
if (rules.systemManaged) fieldsToOmit.add(field);
|
|
192
|
+
});
|
|
193
|
+
fieldsToOmit.forEach((field) => {
|
|
194
|
+
if (schema.properties?.[field]) delete schema.properties[field];
|
|
195
|
+
if (schema.required) schema.required = schema.required.filter((k) => k !== field);
|
|
196
|
+
});
|
|
197
|
+
const reqOv = options?.create?.requiredOverrides || {};
|
|
198
|
+
const optOv = options?.create?.optionalOverrides || {};
|
|
199
|
+
schema.required = schema.required || [];
|
|
200
|
+
for (const [k, v] of Object.entries(reqOv)) if (v && !schema.required.includes(k)) schema.required.push(k);
|
|
201
|
+
for (const [k, v] of Object.entries(optOv)) if (v && schema.required) schema.required = schema.required.filter((x) => x !== k);
|
|
202
|
+
Object.entries(fieldRules).forEach(([field, rules]) => {
|
|
203
|
+
if (rules.optional && schema.required) schema.required = schema.required.filter((x) => x !== field);
|
|
204
|
+
});
|
|
205
|
+
const schemaOverrides = options?.create?.schemaOverrides || {};
|
|
206
|
+
for (const [k, override] of Object.entries(schemaOverrides)) if (schema.properties?.[k]) schema.properties[k] = override;
|
|
207
|
+
if (options?.strictAdditionalProperties === true) schema.additionalProperties = false;
|
|
208
|
+
return schema;
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Build nested JSON schema from dot-notation paths
|
|
212
|
+
*/
|
|
213
|
+
function buildNestedJsonSchema(fieldPaths, rootField) {
|
|
214
|
+
const properties = {};
|
|
215
|
+
const required = [];
|
|
216
|
+
let hasRequiredFields = false;
|
|
217
|
+
for (const { path, schemaType } of fieldPaths) {
|
|
218
|
+
const parts = path.substring(rootField.length + 1).split(".");
|
|
219
|
+
if (parts.length === 1) {
|
|
220
|
+
properties[parts[0]] = schemaTypeToJsonSchema(schemaType);
|
|
221
|
+
if (schemaType.isRequired) {
|
|
222
|
+
required.push(parts[0]);
|
|
223
|
+
hasRequiredFields = true;
|
|
224
|
+
}
|
|
225
|
+
} else {
|
|
226
|
+
const fieldName = parts[0];
|
|
227
|
+
if (!properties[fieldName]) properties[fieldName] = {
|
|
228
|
+
type: "object",
|
|
229
|
+
properties: {}
|
|
230
|
+
};
|
|
231
|
+
const nestedObj = properties[fieldName];
|
|
232
|
+
if (!nestedObj.properties) nestedObj.properties = {};
|
|
233
|
+
const deepPath = parts.slice(1).join(".");
|
|
234
|
+
nestedObj.properties[deepPath] = schemaTypeToJsonSchema(schemaType);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
const schema = {
|
|
238
|
+
type: "object",
|
|
239
|
+
properties
|
|
240
|
+
};
|
|
241
|
+
if (required.length) schema.required = required;
|
|
242
|
+
return {
|
|
243
|
+
schema,
|
|
244
|
+
required: hasRequiredFields
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Convert Mongoose SchemaType to JSON Schema
|
|
249
|
+
*/
|
|
250
|
+
function schemaTypeToJsonSchema(schemaType) {
|
|
251
|
+
const result = {};
|
|
252
|
+
const instance = schemaType.instance;
|
|
253
|
+
const options = schemaType.options || {};
|
|
254
|
+
if (instance === "String") {
|
|
255
|
+
result.type = "string";
|
|
256
|
+
if (typeof options.minlength === "number") result.minLength = options.minlength;
|
|
257
|
+
if (typeof options.maxlength === "number") result.maxLength = options.maxlength;
|
|
258
|
+
if (options.match instanceof RegExp) result.pattern = options.match.source;
|
|
259
|
+
if (options.enum && Array.isArray(options.enum)) result.enum = options.enum;
|
|
260
|
+
} else if (instance === "Number") {
|
|
261
|
+
result.type = "number";
|
|
262
|
+
if (typeof options.min === "number") result.minimum = options.min;
|
|
263
|
+
if (typeof options.max === "number") result.maximum = options.max;
|
|
264
|
+
} else if (instance === "Boolean") result.type = "boolean";
|
|
265
|
+
else if (instance === "Date") {
|
|
266
|
+
result.type = "string";
|
|
267
|
+
result.format = "date-time";
|
|
268
|
+
} else if (instance === "ObjectId" || instance === "ObjectID") {
|
|
269
|
+
result.type = "string";
|
|
270
|
+
result.pattern = "^[0-9a-fA-F]{24}$";
|
|
271
|
+
} else if (instance === "Array") {
|
|
272
|
+
result.type = "array";
|
|
273
|
+
result.items = { type: "string" };
|
|
274
|
+
} else {
|
|
275
|
+
result.type = "object";
|
|
276
|
+
result.additionalProperties = true;
|
|
277
|
+
}
|
|
278
|
+
return result;
|
|
279
|
+
}
|
|
280
|
+
function buildJsonSchemaForUpdate(createJson, options) {
|
|
281
|
+
const clone = JSON.parse(JSON.stringify(createJson));
|
|
282
|
+
delete clone.required;
|
|
283
|
+
const fieldsToOmit = /* @__PURE__ */ new Set();
|
|
284
|
+
(options?.update?.omitFields || []).forEach((f) => fieldsToOmit.add(f));
|
|
285
|
+
const fieldRules = options?.fieldRules || {};
|
|
286
|
+
Object.entries(fieldRules).forEach(([field, rules]) => {
|
|
287
|
+
if (rules.immutable || rules.immutableAfterCreate) fieldsToOmit.add(field);
|
|
288
|
+
});
|
|
289
|
+
fieldsToOmit.forEach((field) => {
|
|
290
|
+
if (clone.properties?.[field]) delete clone.properties[field];
|
|
291
|
+
});
|
|
292
|
+
if (options?.strictAdditionalProperties === true) clone.additionalProperties = false;
|
|
293
|
+
if (options?.update?.requireAtLeastOne === true) clone.minProperties = 1;
|
|
294
|
+
return clone;
|
|
295
|
+
}
|
|
296
|
+
function buildJsonSchemaForQuery(_tree, options) {
|
|
297
|
+
const basePagination = {
|
|
298
|
+
type: "object",
|
|
299
|
+
properties: {
|
|
300
|
+
page: { type: "string" },
|
|
301
|
+
limit: { type: "string" },
|
|
302
|
+
sort: { type: "string" },
|
|
303
|
+
populate: { type: "string" },
|
|
304
|
+
search: { type: "string" },
|
|
305
|
+
select: { type: "string" },
|
|
306
|
+
lean: { type: "string" },
|
|
307
|
+
includeDeleted: { type: "string" }
|
|
308
|
+
},
|
|
309
|
+
additionalProperties: true
|
|
310
|
+
};
|
|
311
|
+
const filterable = options?.query?.filterableFields || {};
|
|
312
|
+
for (const [k, v] of Object.entries(filterable)) if (basePagination.properties) basePagination.properties[k] = v && typeof v === "object" && "type" in v ? v : { type: "string" };
|
|
313
|
+
return basePagination;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
//#endregion
|
|
317
|
+
export { isFieldUpdateAllowed as a, getSystemManagedFields as i, buildCrudSchemasFromMongooseSchema as n, validateUpdateBody as o, getImmutableFields as r, createMemoryCache as s, buildCrudSchemasFromModel as t };
|