@airoom/nextmin-node 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,247 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.splitCSV = exports.isPlainObject = void 0;
4
+ exports.normalizeAttrType = normalizeAttrType;
5
+ exports.toIdString = toIdString;
6
+ exports.splitFilterForExtended = splitFilterForExtended;
7
+ exports.splitSortForExtended = splitSortForExtended;
8
+ exports.sortInMemory = sortInMemory;
9
+ exports.matchDoc = matchDoc;
10
+ exports.buildPredicateForField = buildPredicateForField;
11
+ exports.parseSort = parseSort;
12
+ exports.parseQuery = parseQuery;
13
+ exports.extractIds = extractIds;
14
+ exports.refInfoFromAttr = refInfoFromAttr;
15
+ const isPlainObject = (v) => !!v && typeof v === "object" && !Array.isArray(v);
16
+ exports.isPlainObject = isPlainObject;
17
+ const splitCSV = (raw) => raw.split(",").map(s => s.trim()).filter(Boolean);
18
+ exports.splitCSV = splitCSV;
19
+ function normalizeAttrType(attr) {
20
+ const a = Array.isArray(attr) ? attr?.[0] : attr;
21
+ let t = a?.type ?? a;
22
+ if (typeof t === "function" && t.name)
23
+ t = t.name;
24
+ if (t && typeof t === "object" && "name" in t)
25
+ t = t.name;
26
+ if (typeof t === "string")
27
+ t = t.toLowerCase();
28
+ if (t === "bool")
29
+ t = "boolean";
30
+ if (t === "objectid" || t === "oid" || t === "ref")
31
+ t = "objectid";
32
+ return String(t || "");
33
+ }
34
+ function toIdString(v) {
35
+ if (!v)
36
+ return null;
37
+ if (typeof v === "string")
38
+ return v;
39
+ if (typeof v === "number")
40
+ return String(v);
41
+ if (typeof v === "object") {
42
+ if (typeof v.id === "string")
43
+ return v.id;
44
+ if (v._id && typeof v._id.toString === "function")
45
+ return v._id.toString();
46
+ if (typeof v._id === "string")
47
+ return v._id;
48
+ }
49
+ return null;
50
+ }
51
+ function splitFilterForExtended(filter, baseKeys) {
52
+ const walk = (node) => {
53
+ if (!(0, exports.isPlainObject)(node))
54
+ return { child: node, base: {} };
55
+ const outChild = {};
56
+ const outBase = {};
57
+ for (const [k, v] of Object.entries(node)) {
58
+ if (k === "$and" || k === "$or" || k === "$nor") {
59
+ if (!Array.isArray(v))
60
+ continue;
61
+ const childArr = [];
62
+ const baseArr = [];
63
+ for (const sub of v) {
64
+ const split = walk(sub);
65
+ if (Object.keys(split.child).length)
66
+ childArr.push(split.child);
67
+ if (Object.keys(split.base).length)
68
+ baseArr.push(split.base);
69
+ }
70
+ if (childArr.length)
71
+ outChild[k] = childArr;
72
+ if (baseArr.length)
73
+ outBase[k] = baseArr;
74
+ continue;
75
+ }
76
+ if (baseKeys.has(k))
77
+ outBase[k] = v;
78
+ else
79
+ outChild[k] = v;
80
+ }
81
+ return { child: outChild, base: outBase };
82
+ };
83
+ return walk(filter || {});
84
+ }
85
+ function splitSortForExtended(sort, baseKeys) {
86
+ const child = {};
87
+ const base = {};
88
+ for (const [k, dir] of Object.entries(sort || {})) {
89
+ if (baseKeys.has(k))
90
+ base[k] = dir;
91
+ else
92
+ child[k] = dir;
93
+ }
94
+ return { child, base };
95
+ }
96
+ function sortInMemory(rows, sort) {
97
+ const keys = Object.keys(sort || {});
98
+ if (!keys.length)
99
+ return rows;
100
+ return [...rows].sort((a, b) => {
101
+ for (const k of keys) {
102
+ const dir = sort[k];
103
+ const av = a?.[k];
104
+ const bv = b?.[k];
105
+ const ax = av instanceof Date ? +av : (av ?? "");
106
+ const bx = bv instanceof Date ? +bv : (bv ?? "");
107
+ if (ax > bx)
108
+ return dir;
109
+ if (ax < bx)
110
+ return -dir;
111
+ }
112
+ return 0;
113
+ });
114
+ }
115
+ function matchDoc(doc, filter) {
116
+ const evalNode = (node) => {
117
+ for (const [k, v] of Object.entries(node)) {
118
+ if (k === "$and" && Array.isArray(v))
119
+ return v.every((n) => evalNode(n));
120
+ if (k === "$or" && Array.isArray(v))
121
+ return v.some((n) => evalNode(n));
122
+ if (k === "$nor" && Array.isArray(v))
123
+ return !v.some((n) => evalNode(n));
124
+ const dv = doc[k];
125
+ if ((0, exports.isPlainObject)(v)) {
126
+ if ("$in" in v && !v.$in.includes(dv))
127
+ return false;
128
+ if ("$gte" in v && !(dv >= v.$gte))
129
+ return false;
130
+ if ("$lte" in v && !(dv <= v.$lte))
131
+ return false;
132
+ if ("$regex" in v) {
133
+ const re = new RegExp(v.$regex, v.$options || "");
134
+ if (!re.test(String(dv ?? "")))
135
+ return false;
136
+ }
137
+ }
138
+ else {
139
+ if (dv !== v)
140
+ return false;
141
+ }
142
+ }
143
+ return true;
144
+ };
145
+ return evalNode(filter || {});
146
+ }
147
+ function buildPredicateForField(field, attr, raw) {
148
+ const isArray = Array.isArray(attr);
149
+ const base = isArray ? attr[0] : attr;
150
+ const attrType = normalizeAttrType(base);
151
+ const tokens = (0, exports.splitCSV)(raw);
152
+ switch (attrType) {
153
+ case "string":
154
+ return isArray ? { [field]: { $in: tokens.length ? tokens : [raw] } } : { [field]: { $regex: raw, $options: "i" } };
155
+ case "number": {
156
+ const nums = tokens.map(Number).filter((n) => !Number.isNaN(n));
157
+ return isArray ? (nums.length ? { [field]: { $in: nums } } : null) : (nums.length ? { [field]: nums[0] } : null);
158
+ }
159
+ case "boolean": {
160
+ const toBool = (t) => /^(true|1|yes)$/i.test(t) ? true : /^(false|0|no)$/i.test(t) ? false : null;
161
+ if (isArray) {
162
+ const bools = tokens.map(toBool).filter((v) => v !== null);
163
+ return bools.length ? { [field]: { $in: bools } } : null;
164
+ }
165
+ const b = toBool(raw);
166
+ return b === null ? null : { [field]: b };
167
+ }
168
+ case "objectid":
169
+ return isArray || tokens.length > 1 ? { [field]: { $in: tokens } } : (raw ? { [field]: raw } : null);
170
+ case "date": {
171
+ const toDate = (t) => {
172
+ const d = new Date(t);
173
+ return Number.isNaN(+d) ? null : d;
174
+ };
175
+ if (isArray) {
176
+ const ds = tokens.map(toDate).filter((d) => !!d);
177
+ return ds.length ? { [field]: { $in: ds } } : null;
178
+ }
179
+ const d = toDate(raw);
180
+ return d ? { [field]: d } : null;
181
+ }
182
+ default:
183
+ return isArray ? { [field]: { $in: tokens.length ? tokens : [raw] } } : { [field]: raw };
184
+ }
185
+ }
186
+ function parseSort(expr) {
187
+ if (!expr)
188
+ return;
189
+ const out = {};
190
+ for (const raw of expr.split(",").map((s) => s.trim()).filter(Boolean)) {
191
+ if (raw.startsWith("-"))
192
+ out[raw.slice(1)] = -1;
193
+ else if (raw.startsWith("+"))
194
+ out[raw.slice(1)] = 1;
195
+ else
196
+ out[raw] = 1;
197
+ }
198
+ return Object.keys(out).length ? out : undefined;
199
+ }
200
+ function parseQuery(req) {
201
+ const limit = Math.min(parseInt(String(req.query.limit ?? "12"), 10) || 12, 100);
202
+ const page = Math.max(parseInt(String(req.query.page ?? "1"), 10) || 1, 1);
203
+ const skip = (page - 1) * limit;
204
+ const fields = String(req.query.fields ?? "")
205
+ .split(",")
206
+ .map((s) => s.trim())
207
+ .filter(Boolean);
208
+ const projection = fields.length ? Object.fromEntries(fields.map((f) => [f, 1])) : undefined;
209
+ const sort = parseSort(String(req.query.sort ?? "-createdAt"));
210
+ return { limit, page, skip, projection, sort };
211
+ }
212
+ function extractIds(val) {
213
+ if (val == null)
214
+ return [];
215
+ const arr = Array.isArray(val) ? val : [val];
216
+ const toId = (v) => {
217
+ if (!v)
218
+ return null;
219
+ if (typeof v === "string")
220
+ return v;
221
+ if (typeof v === "number")
222
+ return String(v);
223
+ if (typeof v === "object") {
224
+ if (typeof v.id === "string")
225
+ return v.id;
226
+ if (v._id && typeof v._id === "string")
227
+ return v._id;
228
+ if (v._id && typeof v._id.toString === "function")
229
+ return v._id.toString();
230
+ }
231
+ return null;
232
+ };
233
+ return arr.map(toId).filter((s) => !!s);
234
+ }
235
+ function refInfoFromAttr(attr) {
236
+ if (!attr)
237
+ return null;
238
+ if (Array.isArray(attr) && attr[0] && attr[0].ref) {
239
+ return { ref: String(attr[0].ref), isArray: true };
240
+ }
241
+ const a = Array.isArray(attr) ? attr?.[0] : attr;
242
+ const t = (typeof a?.type === "string" ? a.type : String(a?.type || "")).toLowerCase();
243
+ if (a?.ref && (t === "objectid" || t === "ref")) {
244
+ return { ref: String(a.ref), isArray: false };
245
+ }
246
+ return null;
247
+ }
@@ -48,5 +48,5 @@ export declare class MongoAdapter implements DatabaseAdapter {
48
48
  * Only touches indexes named with the prefix below (won't touch user indexes).
49
49
  */
50
50
  private managedIndexName;
51
- syncIndexes(modelName: string, desired: FieldIndexSpec): Promise<void>;
51
+ syncIndexes(modelName: string, spec: FieldIndexSpec): Promise<void>;
52
52
  }
@@ -136,7 +136,19 @@ class MongoAdapter {
136
136
  /** Build a fresh Mongoose schema from our NextMin schema definition. */
137
137
  buildMongooseSchema(def) {
138
138
  const shape = {};
139
+ // If this schema extends a base, only include child-own attributes for storage
140
+ let baseKeys = null;
141
+ const baseName = def?.extends;
142
+ if (baseName) {
143
+ const baseSchema = this.getSchema(baseName);
144
+ if (baseSchema) {
145
+ baseKeys = new Set(Object.keys(baseSchema.attributes || {}));
146
+ }
147
+ }
139
148
  for (const [key, attr] of Object.entries(def.attributes)) {
149
+ // Exclude base attributes from child storage schema; always allow link field
150
+ if (baseKeys && key !== 'baseId' && baseKeys.has(key))
151
+ continue;
140
152
  shape[key] = this.mapAttribute(attr);
141
153
  }
142
154
  const s = new mongoose_1.Schema(shape, { timestamps: true });
@@ -366,44 +378,33 @@ class MongoAdapter {
366
378
  managedIndexName(field) {
367
379
  return `nextmin_idx_${field}`;
368
380
  }
369
- async syncIndexes(modelName, desired) {
381
+ async syncIndexes(modelName, spec) {
370
382
  const col = this.getNativeCollectionByModelName(modelName);
371
- // list existing and pick only the ones we manage
372
- const existing = await col.indexes();
373
- const managed = new Map();
374
- for (const ix of existing) {
375
- const name = String(ix.name || '');
376
- if (name.startsWith('nextmin_idx_')) {
377
- managed.set(name, { key: ix.key });
378
- }
379
- }
380
- // desired names from spec
381
- const desiredEntries = Object.entries(desired);
382
- const desiredNames = new Set(desiredEntries.map(([field]) => this.managedIndexName(field)));
383
- // create / recreate if needed
384
- for (const [field, dir] of desiredEntries) {
385
- const name = this.managedIndexName(field);
386
- const have = managed.get(name);
387
- const wantKey = { [field]: dir };
388
- if (!have || JSON.stringify(have.key) !== JSON.stringify(wantKey)) {
389
- if (have) {
390
- try {
391
- await col.dropIndex(name);
392
- }
393
- catch { }
394
- }
395
- await col.createIndex(wantKey, { name, background: true });
396
- }
397
- }
398
- // drop stale managed indexes
399
- for (const name of managed.keys()) {
400
- if (!desiredNames.has(name)) {
383
+ const existing = await col.indexes(); // [{ name, key, unique, sparse, ... }]
384
+ // desired => only from 'spec'
385
+ const desired = Object.entries(spec).map(([field, dir]) => ({
386
+ name: `${field}_${dir === 1 ? 'asc' : 'desc'}`,
387
+ key: { [field]: dir },
388
+ }));
389
+ const desiredNames = new Set(desired.map((d) => d.name));
390
+ // drop indexes not in desired (except _id_)
391
+ for (const idx of existing) {
392
+ if (idx.name === '_id_')
393
+ continue;
394
+ if (!desiredNames.has(idx.name)) {
401
395
  try {
402
- await col.dropIndex(name);
396
+ await col.dropIndex(idx.name);
403
397
  }
404
398
  catch { }
405
399
  }
406
400
  }
401
+ // create missing
402
+ const existingNames = new Set((await col.indexes()).map((i) => i.name));
403
+ for (const d of desired) {
404
+ if (!existingNames.has(d.name)) {
405
+ await col.createIndex(d.key, { name: d.name });
406
+ }
407
+ }
407
408
  }
408
409
  }
409
410
  exports.MongoAdapter = MongoAdapter;
@@ -8,6 +8,11 @@
8
8
  "enum": ["system", "default", "user"],
9
9
  "default": "user",
10
10
  "required": true
11
+ },
12
+ "isPublic": {
13
+ "type": "boolean",
14
+ "default": false,
15
+ "index": true
11
16
  }
12
17
  },
13
18
 
@@ -20,8 +25,8 @@
20
25
 
21
26
  "access": {
22
27
  "public": {
23
- "create": true,
24
- "read": false,
28
+ "create": false,
29
+ "read": true,
25
30
  "update": false,
26
31
  "delete": false
27
32
  },
@@ -12,16 +12,19 @@ const DEFAULT_ROLES = [
12
12
  name: 'superadmin',
13
13
  description: 'Super administrator with all privileges',
14
14
  type: 'system',
15
+ isPublic: false,
15
16
  },
16
17
  {
17
18
  name: 'admin',
18
19
  description: 'Administrator with elevated privileges',
19
20
  type: 'system',
21
+ isPublic: false,
20
22
  },
21
23
  {
22
24
  name: 'user',
23
25
  description: 'Regular user with limited privileges',
24
26
  type: 'system',
27
+ isPublic: true,
25
28
  },
26
29
  ];
27
30
  // Keep your env override as requested
@@ -108,7 +108,7 @@ class SchemaLoader {
108
108
  try {
109
109
  this.loadSchemas();
110
110
  Logger_1.default.info('Schemas hot-reloaded successfully!');
111
- this.emitter.emit('schemasChanged', this.getPublicSchemaList());
111
+ this.emitter.emit('schemasChanged', this.getSchemas());
112
112
  }
113
113
  catch (err) {
114
114
  Logger_1.default.error(`Error during schema hot reload for ${filePath}:`, err);
@@ -125,16 +125,44 @@ class SchemaLoader {
125
125
  }
126
126
  for (const schema of Object.values(schemas)) {
127
127
  if (schema.extends) {
128
- const baseSchema = schemas[schema.extends];
128
+ const baseName = schema.extends;
129
+ const baseSchema = schemas[baseName];
129
130
  if (baseSchema) {
130
- schema.attributes = {
131
+ // Merge attributes and methods for READ surface
132
+ const mergedAttrs = {
131
133
  ...baseSchema.attributes,
132
134
  ...schema.attributes,
133
135
  };
136
+ for (const key of Object.keys(baseSchema.attributes || {})) {
137
+ if (key === 'baseId')
138
+ continue;
139
+ if (Object.prototype.hasOwnProperty.call(mergedAttrs, key)) {
140
+ const val = mergedAttrs[key];
141
+ if (Array.isArray(val) && val[0] && typeof val[0] === 'object') {
142
+ const { unique: _u, index: _i, ...rest } = val[0];
143
+ mergedAttrs[key] = [{ ...rest }];
144
+ }
145
+ else if (val && typeof val === 'object') {
146
+ const { unique: _u, index: _i, ...rest } = val;
147
+ mergedAttrs[key] = { ...rest };
148
+ }
149
+ }
150
+ }
151
+ schema.attributes = mergedAttrs;
134
152
  schema.allowedMethods = {
135
153
  ...baseSchema.allowedMethods,
136
154
  ...schema.allowedMethods,
137
155
  };
156
+ // Inject hidden link field to base (used for storage join)
157
+ // Ensure hidden link field
158
+ const linkField = 'baseId';
159
+ if (!schema.attributes[linkField]) {
160
+ schema.attributes[linkField] = {
161
+ type: 'ObjectId',
162
+ ref: baseName,
163
+ private: true,
164
+ };
165
+ }
138
166
  }
139
167
  else {
140
168
  throw new Error(`Base schema ${schema.extends} not found for ${schema.modelName}`);
@@ -262,19 +290,29 @@ class SchemaLoader {
262
290
  if (Array.isArray(attr)) {
263
291
  const elem = attr[0];
264
292
  if (elem && typeof elem === 'object') {
265
- // Keep as an array with a single shallow-cloned descriptor, preserving flags like `private`, `sensitive`, `writeOnly`
266
- out[key] = [{ ...elem }];
293
+ // If the inner descriptor is private, omit this field entirely from public schema
294
+ if (elem.private) {
295
+ continue;
296
+ }
297
+ // Keep as an array with a single shallow-cloned descriptor (without leaking private flag)
298
+ const { private: _omit, ...rest } = elem;
299
+ out[key] = [{ ...rest }];
267
300
  }
268
301
  else {
269
- // Fallback: keep as-is
302
+ // Fallback: keep as-is (no private flag to check)
270
303
  out[key] = attr;
271
304
  }
272
305
  continue;
273
306
  }
274
307
  // Single attribute object
275
308
  if (attr && typeof attr === 'object') {
276
- // Keep `private`, `sensitive`, `writeOnly`, etc.
277
- out[key] = { ...attr };
309
+ // If marked private, omit from public schema entirely
310
+ if (attr.private) {
311
+ continue;
312
+ }
313
+ // Shallow clone and drop the private flag if present
314
+ const { private: _omit, ...rest } = attr;
315
+ out[key] = { ...rest };
278
316
  continue;
279
317
  }
280
318
  // Unexpected primitives — pass through
@@ -302,18 +340,24 @@ class SchemaLoader {
302
340
  const plan = {};
303
341
  for (const [name, s] of Object.entries(this.schemas)) {
304
342
  const spec = {};
305
- // from declared attributes
343
+ const baseName = s?.extends;
344
+ let baseKeys = null;
345
+ if (baseName && this.schemas[baseName]) {
346
+ baseKeys = new Set(Object.keys(this.schemas[baseName].attributes || {}));
347
+ }
306
348
  const attrs = s.attributes || {};
307
349
  for (const [field, rawAttr] of Object.entries(attrs)) {
350
+ // Skip base attributes for child collections (except link 'baseId')
351
+ if (baseKeys && field !== 'baseId' && baseKeys.has(field))
352
+ continue;
308
353
  const attr = Array.isArray(rawAttr) ? rawAttr[0] : rawAttr;
309
354
  if (!attr || typeof attr !== 'object')
310
355
  continue;
311
- // allow `index` on any attribute
312
356
  const dir = this.coerceIndexDir(attr.index);
313
357
  if (dir)
314
358
  spec[field] = dir;
315
359
  }
316
- // always add timestamps (desc). Does not require attributes to exist.
360
+ // Always add timestamps (desc)
317
361
  spec.createdAt = spec.createdAt ?? -1;
318
362
  spec.updatedAt = spec.updatedAt ?? -1;
319
363
  plan[name] = spec;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@airoom/nextmin-node",
3
- "version": "0.1.3",
4
- "license": "SEE LICENSE IN LICENSE",
3
+ "version": "0.1.5",
4
+ "license": "MIT",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
7
7
  "scripts": {