@airoom/nextmin-node 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/apiRouter.d.ts +6 -20
- package/dist/api/apiRouter.js +86 -1476
- package/dist/api/router/ctx.d.ts +25 -0
- package/dist/api/router/ctx.js +2 -0
- package/dist/api/router/mountCrudRoutes.d.ts +2 -0
- package/dist/api/router/mountCrudRoutes.js +754 -0
- package/dist/api/router/mountFindRoutes.d.ts +2 -0
- package/dist/api/router/mountFindRoutes.js +205 -0
- package/dist/api/router/setupAuthRoutes.d.ts +2 -0
- package/dist/api/router/setupAuthRoutes.js +247 -0
- package/dist/api/router/setupFileRoutes.d.ts +2 -0
- package/dist/api/router/setupFileRoutes.js +85 -0
- package/dist/api/router/utils.d.ts +63 -0
- package/dist/api/router/utils.js +247 -0
- package/dist/database/MongoAdapter.d.ts +1 -1
- package/dist/database/MongoAdapter.js +21 -32
- package/dist/schemas/Roles.json +7 -2
- package/dist/utils/DefaultDataInitializer.js +3 -0
- package/dist/utils/SchemaLoader.js +28 -7
- package/package.json +1 -1
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.splitCSV = exports.isPlainObject = void 0;
|
|
4
|
+
exports.normalizeAttrType = normalizeAttrType;
|
|
5
|
+
exports.toIdString = toIdString;
|
|
6
|
+
exports.splitFilterForExtended = splitFilterForExtended;
|
|
7
|
+
exports.splitSortForExtended = splitSortForExtended;
|
|
8
|
+
exports.sortInMemory = sortInMemory;
|
|
9
|
+
exports.matchDoc = matchDoc;
|
|
10
|
+
exports.buildPredicateForField = buildPredicateForField;
|
|
11
|
+
exports.parseSort = parseSort;
|
|
12
|
+
exports.parseQuery = parseQuery;
|
|
13
|
+
exports.extractIds = extractIds;
|
|
14
|
+
exports.refInfoFromAttr = refInfoFromAttr;
|
|
15
|
+
const isPlainObject = (v) => !!v && typeof v === "object" && !Array.isArray(v);
|
|
16
|
+
exports.isPlainObject = isPlainObject;
|
|
17
|
+
const splitCSV = (raw) => raw.split(",").map(s => s.trim()).filter(Boolean);
|
|
18
|
+
exports.splitCSV = splitCSV;
|
|
19
|
+
function normalizeAttrType(attr) {
|
|
20
|
+
const a = Array.isArray(attr) ? attr?.[0] : attr;
|
|
21
|
+
let t = a?.type ?? a;
|
|
22
|
+
if (typeof t === "function" && t.name)
|
|
23
|
+
t = t.name;
|
|
24
|
+
if (t && typeof t === "object" && "name" in t)
|
|
25
|
+
t = t.name;
|
|
26
|
+
if (typeof t === "string")
|
|
27
|
+
t = t.toLowerCase();
|
|
28
|
+
if (t === "bool")
|
|
29
|
+
t = "boolean";
|
|
30
|
+
if (t === "objectid" || t === "oid" || t === "ref")
|
|
31
|
+
t = "objectid";
|
|
32
|
+
return String(t || "");
|
|
33
|
+
}
|
|
34
|
+
function toIdString(v) {
|
|
35
|
+
if (!v)
|
|
36
|
+
return null;
|
|
37
|
+
if (typeof v === "string")
|
|
38
|
+
return v;
|
|
39
|
+
if (typeof v === "number")
|
|
40
|
+
return String(v);
|
|
41
|
+
if (typeof v === "object") {
|
|
42
|
+
if (typeof v.id === "string")
|
|
43
|
+
return v.id;
|
|
44
|
+
if (v._id && typeof v._id.toString === "function")
|
|
45
|
+
return v._id.toString();
|
|
46
|
+
if (typeof v._id === "string")
|
|
47
|
+
return v._id;
|
|
48
|
+
}
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
function splitFilterForExtended(filter, baseKeys) {
|
|
52
|
+
const walk = (node) => {
|
|
53
|
+
if (!(0, exports.isPlainObject)(node))
|
|
54
|
+
return { child: node, base: {} };
|
|
55
|
+
const outChild = {};
|
|
56
|
+
const outBase = {};
|
|
57
|
+
for (const [k, v] of Object.entries(node)) {
|
|
58
|
+
if (k === "$and" || k === "$or" || k === "$nor") {
|
|
59
|
+
if (!Array.isArray(v))
|
|
60
|
+
continue;
|
|
61
|
+
const childArr = [];
|
|
62
|
+
const baseArr = [];
|
|
63
|
+
for (const sub of v) {
|
|
64
|
+
const split = walk(sub);
|
|
65
|
+
if (Object.keys(split.child).length)
|
|
66
|
+
childArr.push(split.child);
|
|
67
|
+
if (Object.keys(split.base).length)
|
|
68
|
+
baseArr.push(split.base);
|
|
69
|
+
}
|
|
70
|
+
if (childArr.length)
|
|
71
|
+
outChild[k] = childArr;
|
|
72
|
+
if (baseArr.length)
|
|
73
|
+
outBase[k] = baseArr;
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
if (baseKeys.has(k))
|
|
77
|
+
outBase[k] = v;
|
|
78
|
+
else
|
|
79
|
+
outChild[k] = v;
|
|
80
|
+
}
|
|
81
|
+
return { child: outChild, base: outBase };
|
|
82
|
+
};
|
|
83
|
+
return walk(filter || {});
|
|
84
|
+
}
|
|
85
|
+
function splitSortForExtended(sort, baseKeys) {
|
|
86
|
+
const child = {};
|
|
87
|
+
const base = {};
|
|
88
|
+
for (const [k, dir] of Object.entries(sort || {})) {
|
|
89
|
+
if (baseKeys.has(k))
|
|
90
|
+
base[k] = dir;
|
|
91
|
+
else
|
|
92
|
+
child[k] = dir;
|
|
93
|
+
}
|
|
94
|
+
return { child, base };
|
|
95
|
+
}
|
|
96
|
+
function sortInMemory(rows, sort) {
|
|
97
|
+
const keys = Object.keys(sort || {});
|
|
98
|
+
if (!keys.length)
|
|
99
|
+
return rows;
|
|
100
|
+
return [...rows].sort((a, b) => {
|
|
101
|
+
for (const k of keys) {
|
|
102
|
+
const dir = sort[k];
|
|
103
|
+
const av = a?.[k];
|
|
104
|
+
const bv = b?.[k];
|
|
105
|
+
const ax = av instanceof Date ? +av : (av ?? "");
|
|
106
|
+
const bx = bv instanceof Date ? +bv : (bv ?? "");
|
|
107
|
+
if (ax > bx)
|
|
108
|
+
return dir;
|
|
109
|
+
if (ax < bx)
|
|
110
|
+
return -dir;
|
|
111
|
+
}
|
|
112
|
+
return 0;
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
function matchDoc(doc, filter) {
|
|
116
|
+
const evalNode = (node) => {
|
|
117
|
+
for (const [k, v] of Object.entries(node)) {
|
|
118
|
+
if (k === "$and" && Array.isArray(v))
|
|
119
|
+
return v.every((n) => evalNode(n));
|
|
120
|
+
if (k === "$or" && Array.isArray(v))
|
|
121
|
+
return v.some((n) => evalNode(n));
|
|
122
|
+
if (k === "$nor" && Array.isArray(v))
|
|
123
|
+
return !v.some((n) => evalNode(n));
|
|
124
|
+
const dv = doc[k];
|
|
125
|
+
if ((0, exports.isPlainObject)(v)) {
|
|
126
|
+
if ("$in" in v && !v.$in.includes(dv))
|
|
127
|
+
return false;
|
|
128
|
+
if ("$gte" in v && !(dv >= v.$gte))
|
|
129
|
+
return false;
|
|
130
|
+
if ("$lte" in v && !(dv <= v.$lte))
|
|
131
|
+
return false;
|
|
132
|
+
if ("$regex" in v) {
|
|
133
|
+
const re = new RegExp(v.$regex, v.$options || "");
|
|
134
|
+
if (!re.test(String(dv ?? "")))
|
|
135
|
+
return false;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
else {
|
|
139
|
+
if (dv !== v)
|
|
140
|
+
return false;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
return true;
|
|
144
|
+
};
|
|
145
|
+
return evalNode(filter || {});
|
|
146
|
+
}
|
|
147
|
+
function buildPredicateForField(field, attr, raw) {
|
|
148
|
+
const isArray = Array.isArray(attr);
|
|
149
|
+
const base = isArray ? attr[0] : attr;
|
|
150
|
+
const attrType = normalizeAttrType(base);
|
|
151
|
+
const tokens = (0, exports.splitCSV)(raw);
|
|
152
|
+
switch (attrType) {
|
|
153
|
+
case "string":
|
|
154
|
+
return isArray ? { [field]: { $in: tokens.length ? tokens : [raw] } } : { [field]: { $regex: raw, $options: "i" } };
|
|
155
|
+
case "number": {
|
|
156
|
+
const nums = tokens.map(Number).filter((n) => !Number.isNaN(n));
|
|
157
|
+
return isArray ? (nums.length ? { [field]: { $in: nums } } : null) : (nums.length ? { [field]: nums[0] } : null);
|
|
158
|
+
}
|
|
159
|
+
case "boolean": {
|
|
160
|
+
const toBool = (t) => /^(true|1|yes)$/i.test(t) ? true : /^(false|0|no)$/i.test(t) ? false : null;
|
|
161
|
+
if (isArray) {
|
|
162
|
+
const bools = tokens.map(toBool).filter((v) => v !== null);
|
|
163
|
+
return bools.length ? { [field]: { $in: bools } } : null;
|
|
164
|
+
}
|
|
165
|
+
const b = toBool(raw);
|
|
166
|
+
return b === null ? null : { [field]: b };
|
|
167
|
+
}
|
|
168
|
+
case "objectid":
|
|
169
|
+
return isArray || tokens.length > 1 ? { [field]: { $in: tokens } } : (raw ? { [field]: raw } : null);
|
|
170
|
+
case "date": {
|
|
171
|
+
const toDate = (t) => {
|
|
172
|
+
const d = new Date(t);
|
|
173
|
+
return Number.isNaN(+d) ? null : d;
|
|
174
|
+
};
|
|
175
|
+
if (isArray) {
|
|
176
|
+
const ds = tokens.map(toDate).filter((d) => !!d);
|
|
177
|
+
return ds.length ? { [field]: { $in: ds } } : null;
|
|
178
|
+
}
|
|
179
|
+
const d = toDate(raw);
|
|
180
|
+
return d ? { [field]: d } : null;
|
|
181
|
+
}
|
|
182
|
+
default:
|
|
183
|
+
return isArray ? { [field]: { $in: tokens.length ? tokens : [raw] } } : { [field]: raw };
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
function parseSort(expr) {
|
|
187
|
+
if (!expr)
|
|
188
|
+
return;
|
|
189
|
+
const out = {};
|
|
190
|
+
for (const raw of expr.split(",").map((s) => s.trim()).filter(Boolean)) {
|
|
191
|
+
if (raw.startsWith("-"))
|
|
192
|
+
out[raw.slice(1)] = -1;
|
|
193
|
+
else if (raw.startsWith("+"))
|
|
194
|
+
out[raw.slice(1)] = 1;
|
|
195
|
+
else
|
|
196
|
+
out[raw] = 1;
|
|
197
|
+
}
|
|
198
|
+
return Object.keys(out).length ? out : undefined;
|
|
199
|
+
}
|
|
200
|
+
function parseQuery(req) {
|
|
201
|
+
const limit = Math.min(parseInt(String(req.query.limit ?? "12"), 10) || 12, 100);
|
|
202
|
+
const page = Math.max(parseInt(String(req.query.page ?? "1"), 10) || 1, 1);
|
|
203
|
+
const skip = (page - 1) * limit;
|
|
204
|
+
const fields = String(req.query.fields ?? "")
|
|
205
|
+
.split(",")
|
|
206
|
+
.map((s) => s.trim())
|
|
207
|
+
.filter(Boolean);
|
|
208
|
+
const projection = fields.length ? Object.fromEntries(fields.map((f) => [f, 1])) : undefined;
|
|
209
|
+
const sort = parseSort(String(req.query.sort ?? "-createdAt"));
|
|
210
|
+
return { limit, page, skip, projection, sort };
|
|
211
|
+
}
|
|
212
|
+
function extractIds(val) {
|
|
213
|
+
if (val == null)
|
|
214
|
+
return [];
|
|
215
|
+
const arr = Array.isArray(val) ? val : [val];
|
|
216
|
+
const toId = (v) => {
|
|
217
|
+
if (!v)
|
|
218
|
+
return null;
|
|
219
|
+
if (typeof v === "string")
|
|
220
|
+
return v;
|
|
221
|
+
if (typeof v === "number")
|
|
222
|
+
return String(v);
|
|
223
|
+
if (typeof v === "object") {
|
|
224
|
+
if (typeof v.id === "string")
|
|
225
|
+
return v.id;
|
|
226
|
+
if (v._id && typeof v._id === "string")
|
|
227
|
+
return v._id;
|
|
228
|
+
if (v._id && typeof v._id.toString === "function")
|
|
229
|
+
return v._id.toString();
|
|
230
|
+
}
|
|
231
|
+
return null;
|
|
232
|
+
};
|
|
233
|
+
return arr.map(toId).filter((s) => !!s);
|
|
234
|
+
}
|
|
235
|
+
function refInfoFromAttr(attr) {
|
|
236
|
+
if (!attr)
|
|
237
|
+
return null;
|
|
238
|
+
if (Array.isArray(attr) && attr[0] && attr[0].ref) {
|
|
239
|
+
return { ref: String(attr[0].ref), isArray: true };
|
|
240
|
+
}
|
|
241
|
+
const a = Array.isArray(attr) ? attr?.[0] : attr;
|
|
242
|
+
const t = (typeof a?.type === "string" ? a.type : String(a?.type || "")).toLowerCase();
|
|
243
|
+
if (a?.ref && (t === "objectid" || t === "ref")) {
|
|
244
|
+
return { ref: String(a.ref), isArray: false };
|
|
245
|
+
}
|
|
246
|
+
return null;
|
|
247
|
+
}
|
|
@@ -48,5 +48,5 @@ export declare class MongoAdapter implements DatabaseAdapter {
|
|
|
48
48
|
* Only touches indexes named with the prefix below (won't touch user indexes).
|
|
49
49
|
*/
|
|
50
50
|
private managedIndexName;
|
|
51
|
-
syncIndexes(modelName: string,
|
|
51
|
+
syncIndexes(modelName: string, spec: FieldIndexSpec): Promise<void>;
|
|
52
52
|
}
|
|
@@ -378,44 +378,33 @@ class MongoAdapter {
|
|
|
378
378
|
managedIndexName(field) {
|
|
379
379
|
return `nextmin_idx_${field}`;
|
|
380
380
|
}
|
|
381
|
-
async syncIndexes(modelName,
|
|
381
|
+
async syncIndexes(modelName, spec) {
|
|
382
382
|
const col = this.getNativeCollectionByModelName(modelName);
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
const
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
// create / recreate if needed
|
|
396
|
-
for (const [field, dir] of desiredEntries) {
|
|
397
|
-
const name = this.managedIndexName(field);
|
|
398
|
-
const have = managed.get(name);
|
|
399
|
-
const wantKey = { [field]: dir };
|
|
400
|
-
if (!have || JSON.stringify(have.key) !== JSON.stringify(wantKey)) {
|
|
401
|
-
if (have) {
|
|
402
|
-
try {
|
|
403
|
-
await col.dropIndex(name);
|
|
404
|
-
}
|
|
405
|
-
catch { }
|
|
406
|
-
}
|
|
407
|
-
await col.createIndex(wantKey, { name, background: true });
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
// drop stale managed indexes
|
|
411
|
-
for (const name of managed.keys()) {
|
|
412
|
-
if (!desiredNames.has(name)) {
|
|
383
|
+
const existing = await col.indexes(); // [{ name, key, unique, sparse, ... }]
|
|
384
|
+
// desired => only from 'spec'
|
|
385
|
+
const desired = Object.entries(spec).map(([field, dir]) => ({
|
|
386
|
+
name: `${field}_${dir === 1 ? 'asc' : 'desc'}`,
|
|
387
|
+
key: { [field]: dir },
|
|
388
|
+
}));
|
|
389
|
+
const desiredNames = new Set(desired.map((d) => d.name));
|
|
390
|
+
// drop indexes not in desired (except _id_)
|
|
391
|
+
for (const idx of existing) {
|
|
392
|
+
if (idx.name === '_id_')
|
|
393
|
+
continue;
|
|
394
|
+
if (!desiredNames.has(idx.name)) {
|
|
413
395
|
try {
|
|
414
|
-
await col.dropIndex(name);
|
|
396
|
+
await col.dropIndex(idx.name);
|
|
415
397
|
}
|
|
416
398
|
catch { }
|
|
417
399
|
}
|
|
418
400
|
}
|
|
401
|
+
// create missing
|
|
402
|
+
const existingNames = new Set((await col.indexes()).map((i) => i.name));
|
|
403
|
+
for (const d of desired) {
|
|
404
|
+
if (!existingNames.has(d.name)) {
|
|
405
|
+
await col.createIndex(d.key, { name: d.name });
|
|
406
|
+
}
|
|
407
|
+
}
|
|
419
408
|
}
|
|
420
409
|
}
|
|
421
410
|
exports.MongoAdapter = MongoAdapter;
|
package/dist/schemas/Roles.json
CHANGED
|
@@ -8,6 +8,11 @@
|
|
|
8
8
|
"enum": ["system", "default", "user"],
|
|
9
9
|
"default": "user",
|
|
10
10
|
"required": true
|
|
11
|
+
},
|
|
12
|
+
"isPublic": {
|
|
13
|
+
"type": "boolean",
|
|
14
|
+
"default": false,
|
|
15
|
+
"index": true
|
|
11
16
|
}
|
|
12
17
|
},
|
|
13
18
|
|
|
@@ -20,8 +25,8 @@
|
|
|
20
25
|
|
|
21
26
|
"access": {
|
|
22
27
|
"public": {
|
|
23
|
-
"create":
|
|
24
|
-
"read":
|
|
28
|
+
"create": false,
|
|
29
|
+
"read": true,
|
|
25
30
|
"update": false,
|
|
26
31
|
"delete": false
|
|
27
32
|
},
|
|
@@ -12,16 +12,19 @@ const DEFAULT_ROLES = [
|
|
|
12
12
|
name: 'superadmin',
|
|
13
13
|
description: 'Super administrator with all privileges',
|
|
14
14
|
type: 'system',
|
|
15
|
+
isPublic: false,
|
|
15
16
|
},
|
|
16
17
|
{
|
|
17
18
|
name: 'admin',
|
|
18
19
|
description: 'Administrator with elevated privileges',
|
|
19
20
|
type: 'system',
|
|
21
|
+
isPublic: false,
|
|
20
22
|
},
|
|
21
23
|
{
|
|
22
24
|
name: 'user',
|
|
23
25
|
description: 'Regular user with limited privileges',
|
|
24
26
|
type: 'system',
|
|
27
|
+
isPublic: true,
|
|
25
28
|
},
|
|
26
29
|
];
|
|
27
30
|
// Keep your env override as requested
|
|
@@ -108,7 +108,7 @@ class SchemaLoader {
|
|
|
108
108
|
try {
|
|
109
109
|
this.loadSchemas();
|
|
110
110
|
Logger_1.default.info('Schemas hot-reloaded successfully!');
|
|
111
|
-
this.emitter.emit('schemasChanged', this.
|
|
111
|
+
this.emitter.emit('schemasChanged', this.getSchemas());
|
|
112
112
|
}
|
|
113
113
|
catch (err) {
|
|
114
114
|
Logger_1.default.error(`Error during schema hot reload for ${filePath}:`, err);
|
|
@@ -129,23 +129,38 @@ class SchemaLoader {
|
|
|
129
129
|
const baseSchema = schemas[baseName];
|
|
130
130
|
if (baseSchema) {
|
|
131
131
|
// Merge attributes and methods for READ surface
|
|
132
|
-
|
|
132
|
+
const mergedAttrs = {
|
|
133
133
|
...baseSchema.attributes,
|
|
134
134
|
...schema.attributes,
|
|
135
135
|
};
|
|
136
|
+
for (const key of Object.keys(baseSchema.attributes || {})) {
|
|
137
|
+
if (key === 'baseId')
|
|
138
|
+
continue;
|
|
139
|
+
if (Object.prototype.hasOwnProperty.call(mergedAttrs, key)) {
|
|
140
|
+
const val = mergedAttrs[key];
|
|
141
|
+
if (Array.isArray(val) && val[0] && typeof val[0] === 'object') {
|
|
142
|
+
const { unique: _u, index: _i, ...rest } = val[0];
|
|
143
|
+
mergedAttrs[key] = [{ ...rest }];
|
|
144
|
+
}
|
|
145
|
+
else if (val && typeof val === 'object') {
|
|
146
|
+
const { unique: _u, index: _i, ...rest } = val;
|
|
147
|
+
mergedAttrs[key] = { ...rest };
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
schema.attributes = mergedAttrs;
|
|
136
152
|
schema.allowedMethods = {
|
|
137
153
|
...baseSchema.allowedMethods,
|
|
138
154
|
...schema.allowedMethods,
|
|
139
155
|
};
|
|
140
156
|
// Inject hidden link field to base (used for storage join)
|
|
157
|
+
// Ensure hidden link field
|
|
141
158
|
const linkField = 'baseId';
|
|
142
159
|
if (!schema.attributes[linkField]) {
|
|
143
160
|
schema.attributes[linkField] = {
|
|
144
161
|
type: 'ObjectId',
|
|
145
162
|
ref: baseName,
|
|
146
163
|
private: true,
|
|
147
|
-
// Not required on create; server auto-fills for extended creates.
|
|
148
|
-
// Presence is enforced for existing docs on update.
|
|
149
164
|
};
|
|
150
165
|
}
|
|
151
166
|
}
|
|
@@ -325,18 +340,24 @@ class SchemaLoader {
|
|
|
325
340
|
const plan = {};
|
|
326
341
|
for (const [name, s] of Object.entries(this.schemas)) {
|
|
327
342
|
const spec = {};
|
|
328
|
-
|
|
343
|
+
const baseName = s?.extends;
|
|
344
|
+
let baseKeys = null;
|
|
345
|
+
if (baseName && this.schemas[baseName]) {
|
|
346
|
+
baseKeys = new Set(Object.keys(this.schemas[baseName].attributes || {}));
|
|
347
|
+
}
|
|
329
348
|
const attrs = s.attributes || {};
|
|
330
349
|
for (const [field, rawAttr] of Object.entries(attrs)) {
|
|
350
|
+
// Skip base attributes for child collections (except link 'baseId')
|
|
351
|
+
if (baseKeys && field !== 'baseId' && baseKeys.has(field))
|
|
352
|
+
continue;
|
|
331
353
|
const attr = Array.isArray(rawAttr) ? rawAttr[0] : rawAttr;
|
|
332
354
|
if (!attr || typeof attr !== 'object')
|
|
333
355
|
continue;
|
|
334
|
-
// allow `index` on any attribute
|
|
335
356
|
const dir = this.coerceIndexDir(attr.index);
|
|
336
357
|
if (dir)
|
|
337
358
|
spec[field] = dir;
|
|
338
359
|
}
|
|
339
|
-
//
|
|
360
|
+
// Always add timestamps (desc)
|
|
340
361
|
spec.createdAt = spec.createdAt ?? -1;
|
|
341
362
|
spec.updatedAt = spec.updatedAt ?? -1;
|
|
342
363
|
plan[name] = spec;
|