@airoom/nextmin-node 1.4.5 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +48 -5
- package/dist/api/apiRouter.d.ts +2 -0
- package/dist/api/apiRouter.js +68 -19
- package/dist/api/router/mountCrudRoutes.js +209 -221
- package/dist/api/router/mountFindRoutes.js +2 -49
- package/dist/api/router/mountSearchRoutes.js +10 -52
- package/dist/api/router/mountSearchRoutes_extended.js +7 -48
- package/dist/api/router/setupAuthRoutes.js +6 -2
- package/dist/api/router/utils.js +20 -7
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +83 -0
- package/dist/database/DatabaseAdapter.d.ts +7 -0
- package/dist/database/NMAdapter.d.ts +41 -0
- package/dist/database/NMAdapter.js +979 -0
- package/dist/database/QueryEngine.d.ts +14 -0
- package/dist/database/QueryEngine.js +215 -0
- package/dist/database/utils.d.ts +2 -0
- package/dist/database/utils.js +21 -0
- package/dist/index.d.ts +4 -1
- package/dist/index.js +11 -5
- package/dist/models/BaseModel.d.ts +16 -0
- package/dist/models/BaseModel.js +32 -4
- package/dist/policy/authorize.js +118 -43
- package/dist/schemas/Users.json +66 -30
- package/dist/services/RealtimeService.d.ts +20 -0
- package/dist/services/RealtimeService.js +93 -0
- package/dist/services/SchemaService.d.ts +3 -0
- package/dist/services/SchemaService.js +9 -5
- package/dist/utils/DefaultDataInitializer.js +10 -2
- package/dist/utils/Events.d.ts +34 -0
- package/dist/utils/Events.js +55 -0
- package/dist/utils/Logger.js +12 -10
- package/dist/utils/QueryCache.d.ts +16 -0
- package/dist/utils/QueryCache.js +106 -0
- package/dist/utils/SchemaLoader.d.ts +7 -2
- package/dist/utils/SchemaLoader.js +58 -18
- package/package.json +19 -4
- package/dist/database/InMemoryAdapter.d.ts +0 -15
- package/dist/database/InMemoryAdapter.js +0 -71
- package/dist/database/MongoAdapter.d.ts +0 -52
- package/dist/database/MongoAdapter.js +0 -410
|
@@ -0,0 +1,979 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.NMAdapter = void 0;
|
|
7
|
+
const typeorm_1 = require("typeorm");
|
|
8
|
+
const mongodb_1 = require("mongodb");
|
|
9
|
+
const Logger_1 = __importDefault(require("../utils/Logger"));
|
|
10
|
+
const utils_1 = require("../api/router/utils");
|
|
11
|
+
const utils_2 = require("./utils");
|
|
12
|
+
/**
|
|
13
|
+
* NMAdapter implements the DatabaseAdapter interface.
|
|
14
|
+
* It supports SQL databases (PostgreSQL, MySQL, MariaDB, SQLite, MSSQL) and MongoDB.
|
|
15
|
+
*/
|
|
16
|
+
class NMAdapter {
|
|
17
|
+
constructor(options) {
|
|
18
|
+
this.options = options;
|
|
19
|
+
this.repositories = new Map();
|
|
20
|
+
this.entitySchemas = new Map();
|
|
21
|
+
this.registeredSchemas = {};
|
|
22
|
+
this.dataSource = new typeorm_1.DataSource({
|
|
23
|
+
...options,
|
|
24
|
+
entities: [], // Will be populated dynamically
|
|
25
|
+
synchronize: options.type === 'mongodb' ? false : true, // Only for development; use migrations for production in a real-world app
|
|
26
|
+
logging: false,
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
async connect() {
|
|
30
|
+
await this.dataSource.initialize();
|
|
31
|
+
Logger_1.default.info('NMAdapter', `Connected to database: ${this.dataSource.options.type}`);
|
|
32
|
+
}
|
|
33
|
+
async disconnect() {
|
|
34
|
+
await this.dataSource.destroy();
|
|
35
|
+
Logger_1.default.warn('NMAdapter', 'Disconnected from database');
|
|
36
|
+
}
|
|
37
|
+
async registerSchemas(schemas) {
|
|
38
|
+
// 1) Update cumulative schema map BEFORE building entities
|
|
39
|
+
for (const [name, def] of Object.entries(schemas)) {
|
|
40
|
+
this.registeredSchemas[name.toLowerCase()] = def;
|
|
41
|
+
}
|
|
42
|
+
// 2) Define EntitySchemas dynamically
|
|
43
|
+
for (const [name, def] of Object.entries(schemas)) {
|
|
44
|
+
const entitySchema = this.buildEntitySchema(def);
|
|
45
|
+
this.entitySchemas.set(name.toLowerCase(), entitySchema);
|
|
46
|
+
}
|
|
47
|
+
// 2) Update DataSource with new entities
|
|
48
|
+
// Note: For dynamic entity updates in TypeORM, we must clear old metadatas
|
|
49
|
+
// and rebuild them to ensure synchronize() detects all changes correctly.
|
|
50
|
+
this.dataSource.options.entities = Array.from(this.entitySchemas.values());
|
|
51
|
+
// Re-initialize metadata
|
|
52
|
+
if (this.dataSource.entityMetadatas) {
|
|
53
|
+
this.dataSource.entityMetadatas.length = 0;
|
|
54
|
+
}
|
|
55
|
+
await this.dataSource.buildMetadatas();
|
|
56
|
+
// Synchronize schema (if applicable)
|
|
57
|
+
if (this.dataSource.options.synchronize) {
|
|
58
|
+
try {
|
|
59
|
+
Logger_1.default.info('NMAdapter', `Synchronizing ${Object.keys(schemas).length} schemas...`);
|
|
60
|
+
await this.dataSource.synchronize();
|
|
61
|
+
Logger_1.default.info('NMAdapter', 'Schema synchronization completed successfully');
|
|
62
|
+
}
|
|
63
|
+
catch (err) {
|
|
64
|
+
// Ignore "table already exists" errors during dynamic sync
|
|
65
|
+
// This often happens in SQLite when metadata rebuilds don't perfectly align with existing tables
|
|
66
|
+
if (err.message?.includes('already exists')) {
|
|
67
|
+
Logger_1.default.info('NMAdapter', `Synchronize notice: Table already exists, skipping create: ${err.message}`);
|
|
68
|
+
}
|
|
69
|
+
else {
|
|
70
|
+
Logger_1.default.error('NMAdapter', `Schema synchronization failed: ${err.message}`, err);
|
|
71
|
+
throw err; // Re-throw critical errors
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
// 3) Sync Indexes
|
|
76
|
+
for (const [name, def] of Object.entries(schemas)) {
|
|
77
|
+
const indexes = {};
|
|
78
|
+
for (const [key, attr] of Object.entries(def.attributes)) {
|
|
79
|
+
if (attr.index === true) {
|
|
80
|
+
indexes[key] = 1;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
if (Object.keys(indexes).length > 0) {
|
|
84
|
+
await this.syncIndexes(name, indexes);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
// 4) Cache repositories
|
|
88
|
+
for (const name of Object.keys(schemas)) {
|
|
89
|
+
const lcName = name.toLowerCase();
|
|
90
|
+
const schema = this.entitySchemas.get(lcName);
|
|
91
|
+
if (schema) {
|
|
92
|
+
this.repositories.set(lcName, this.dataSource.getRepository(schema));
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
async syncIndexes(modelName, spec) {
|
|
97
|
+
const repo = this.repositories.get(modelName.toLowerCase());
|
|
98
|
+
if (!repo)
|
|
99
|
+
return;
|
|
100
|
+
if (this.dataSource.options.type === 'mongodb') {
|
|
101
|
+
const mongoRepo = repo.manager.getMongoRepository(repo.metadata.name);
|
|
102
|
+
for (const field of Object.keys(spec)) {
|
|
103
|
+
try {
|
|
104
|
+
const mongoRepo = repo.manager.getMongoRepository(repo.metadata.name);
|
|
105
|
+
// Use TypeORM's built-in createCollectionIndex if available, or native
|
|
106
|
+
await mongoRepo.createCollectionIndex({ [field]: 1 }, { background: true });
|
|
107
|
+
Logger_1.default.info('NMAdapter', `Created index on ${modelName}.${field}`);
|
|
108
|
+
}
|
|
109
|
+
catch (err) {
|
|
110
|
+
Logger_1.default.warn('NMAdapter', `Failed to create index on ${modelName}.${field}: ${err}`);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
// For SQL, TypeORM handles basic indexing via EntitySchema.synchronize()
|
|
116
|
+
// which we already called in registerSchemas.
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
buildEntitySchema(def) {
|
|
120
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
121
|
+
const columns = {
|
|
122
|
+
id: {
|
|
123
|
+
primary: true,
|
|
124
|
+
objectId: isMongo,
|
|
125
|
+
// For MongoDB we use name: '_id' to map the physical column
|
|
126
|
+
name: isMongo ? '_id' : undefined,
|
|
127
|
+
// For MongoDB we use string/ObjectId, for SQL we use int
|
|
128
|
+
type: isMongo ? 'objectId' : 'int',
|
|
129
|
+
generated: isMongo ? true : 'increment',
|
|
130
|
+
},
|
|
131
|
+
createdAt: {
|
|
132
|
+
type: 'datetime',
|
|
133
|
+
default: () => 'CURRENT_TIMESTAMP',
|
|
134
|
+
nullable: true,
|
|
135
|
+
},
|
|
136
|
+
updatedAt: {
|
|
137
|
+
type: 'datetime',
|
|
138
|
+
default: () => 'CURRENT_TIMESTAMP',
|
|
139
|
+
nullable: true,
|
|
140
|
+
},
|
|
141
|
+
};
|
|
142
|
+
const indices = [];
|
|
143
|
+
for (const [key, attr] of Object.entries(def.attributes)) {
|
|
144
|
+
if (key === 'id' || key === '_id')
|
|
145
|
+
continue;
|
|
146
|
+
// CRITICAL: If this field is physically stored in a base collection,
|
|
147
|
+
// do NOT define a column for it in this child entity internally,
|
|
148
|
+
// UNLESS it is an OVERRIDE (not inherited) and not marked as 'safe' in the base.
|
|
149
|
+
if (def.extends && key !== 'baseId') {
|
|
150
|
+
const head = Array.isArray(attr) ? attr[0] : attr;
|
|
151
|
+
if (head?.inherited) {
|
|
152
|
+
continue; // Skip purely inherited fields; they live in the base record
|
|
153
|
+
}
|
|
154
|
+
// It's an override or child-specific field
|
|
155
|
+
const baseSchema = this.registeredSchemas[def.extends];
|
|
156
|
+
const baseAttr = baseSchema?.attributes?.[key];
|
|
157
|
+
const bHead = Array.isArray(baseAttr) ? baseAttr[0] : baseAttr;
|
|
158
|
+
if (bHead?.safe === true) {
|
|
159
|
+
continue; // Base model protects this field; skip column in child
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
const typeORMType = this.mapType(attr);
|
|
163
|
+
const isUnique = attr.unique === true;
|
|
164
|
+
const isSparse = attr.sparse === true;
|
|
165
|
+
columns[key] = {
|
|
166
|
+
type: typeORMType, // Map type appropriately (e.g., 'varchar' for strings)
|
|
167
|
+
nullable: true,
|
|
168
|
+
unique: isUnique && (!isMongo || !isSparse), // Standard unique if not sparse mongo
|
|
169
|
+
default: attr.default,
|
|
170
|
+
index: attr.index === true,
|
|
171
|
+
};
|
|
172
|
+
// For MongoDB, if unique and sparse, we use an explicit index definition
|
|
173
|
+
// since column-level uniqueness doesn't support the 'sparse' option in TypeORM's EntitySchema for Mongo.
|
|
174
|
+
if (isMongo && isUnique && isSparse) {
|
|
175
|
+
indices.push({
|
|
176
|
+
name: `IDX_${def.modelName}_${key}_UNIQUE`,
|
|
177
|
+
columns: [key],
|
|
178
|
+
unique: true,
|
|
179
|
+
sparse: true,
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
if (typeORMType === 'datetime' && attr.default === 'now') {
|
|
183
|
+
columns[key].default = () => 'CURRENT_TIMESTAMP';
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
return new typeorm_1.EntitySchema({
|
|
187
|
+
name: def.modelName,
|
|
188
|
+
tableName: def.collection || def.modelName.toLowerCase(), // Reverted to lowercase for DB compatibility
|
|
189
|
+
columns,
|
|
190
|
+
indices: indices.length > 0 ? indices : undefined,
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
mapType(attr) {
|
|
194
|
+
if (Array.isArray(attr)) {
|
|
195
|
+
return 'simple-json';
|
|
196
|
+
}
|
|
197
|
+
const type = String(attr?.type || 'string').toLowerCase();
|
|
198
|
+
switch (type) {
|
|
199
|
+
case 'string': return 'varchar';
|
|
200
|
+
case 'number': return 'float';
|
|
201
|
+
case 'boolean': return 'boolean';
|
|
202
|
+
case 'date':
|
|
203
|
+
case 'datetime': return 'datetime';
|
|
204
|
+
case 'json':
|
|
205
|
+
case 'object': return 'json';
|
|
206
|
+
case 'array': return 'simple-json';
|
|
207
|
+
case 'objectid':
|
|
208
|
+
return 'varchar';
|
|
209
|
+
default: return 'varchar';
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
getRepository(collection) {
|
|
213
|
+
const repo = this.repositories.get(collection.toLowerCase());
|
|
214
|
+
if (!repo) {
|
|
215
|
+
throw new Error(`Repository for ${collection} not found. Did you register the schema?`);
|
|
216
|
+
}
|
|
217
|
+
return repo;
|
|
218
|
+
}
|
|
219
|
+
async create(collection, data, schemaDefinition) {
|
|
220
|
+
const repo = this.getRepository(collection);
|
|
221
|
+
const coerced = this.coercePayloadForStorage(data, schemaDefinition);
|
|
222
|
+
const created = await repo.save(coerced);
|
|
223
|
+
return this.prepareResult(created);
|
|
224
|
+
}
|
|
225
|
+
coercePayloadForStorage(payload, schema) {
|
|
226
|
+
if (!payload || !schema || this.dataSource.options.type !== 'mongodb')
|
|
227
|
+
return payload;
|
|
228
|
+
const coerced = { ...payload };
|
|
229
|
+
for (const [key, attr] of Object.entries(schema.attributes)) {
|
|
230
|
+
const a = Array.isArray(attr) ? attr[0] : attr;
|
|
231
|
+
const isObjectIdField = String(a?.type || '').toLowerCase() === 'objectid';
|
|
232
|
+
// For MongoDB, we remove null values to satisfy sparse indexes
|
|
233
|
+
if (coerced[key] === null) {
|
|
234
|
+
delete coerced[key];
|
|
235
|
+
continue;
|
|
236
|
+
}
|
|
237
|
+
if (isObjectIdField && coerced[key] != null) {
|
|
238
|
+
if (Array.isArray(coerced[key])) {
|
|
239
|
+
coerced[key] = coerced[key].map((v) => this.convertToObjectId(v));
|
|
240
|
+
}
|
|
241
|
+
else {
|
|
242
|
+
coerced[key] = this.convertToObjectId(coerced[key]);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
return coerced;
|
|
247
|
+
}
|
|
248
|
+
async read(collection, query, limit, skip, schemaDefinition, includePrivateFields, options) {
|
|
249
|
+
const repo = this.getRepository(collection);
|
|
250
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
251
|
+
const resolvedQuery = await this.resolveNestedSearches(query);
|
|
252
|
+
const where = this.transformQuery(resolvedQuery, schemaDefinition);
|
|
253
|
+
const findOptions = {
|
|
254
|
+
where,
|
|
255
|
+
take: limit,
|
|
256
|
+
skip: skip,
|
|
257
|
+
order: options?.sort,
|
|
258
|
+
};
|
|
259
|
+
if (options?.projection) {
|
|
260
|
+
const hasRootProj = this.isExclusiveProjection(options.projection, schemaDefinition);
|
|
261
|
+
if (hasRootProj) {
|
|
262
|
+
const dbProjection = {};
|
|
263
|
+
for (const [key, value] of Object.entries(options.projection)) {
|
|
264
|
+
if (key.includes('.') && schemaDefinition) {
|
|
265
|
+
const root = key.split('.')[0];
|
|
266
|
+
const attr = schemaDefinition.attributes[root];
|
|
267
|
+
const a = Array.isArray(attr) ? attr[0] : attr;
|
|
268
|
+
const isRelation = String(a?.type || '').toLowerCase() === 'objectid';
|
|
269
|
+
if (isRelation) {
|
|
270
|
+
// For relations, we only need the root field (the ID) in the DB query.
|
|
271
|
+
// Adding the dotted key (e.g. 'districts.name') causes a path collision in Mongo
|
|
272
|
+
// because 'districts' is just a list of ObjectIds in this collection.
|
|
273
|
+
dbProjection[root] = true;
|
|
274
|
+
continue;
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
dbProjection[key] = !!value;
|
|
278
|
+
}
|
|
279
|
+
findOptions.select = dbProjection;
|
|
280
|
+
if (isMongo) {
|
|
281
|
+
const mongoProj = {};
|
|
282
|
+
for (const [k, v] of Object.entries(dbProjection)) {
|
|
283
|
+
const tk = (k === 'id') ? '_id' : k;
|
|
284
|
+
mongoProj[tk] = v ? 1 : 0;
|
|
285
|
+
}
|
|
286
|
+
findOptions.projection = mongoProj;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
let results;
|
|
291
|
+
if (isMongo && findOptions.projection) {
|
|
292
|
+
const mongoRepo = repo.manager.getMongoRepository(repo.metadata.name);
|
|
293
|
+
const cursor = mongoRepo.createCursor(where);
|
|
294
|
+
cursor.project(findOptions.projection);
|
|
295
|
+
if (limit)
|
|
296
|
+
cursor.limit(limit);
|
|
297
|
+
if (skip)
|
|
298
|
+
cursor.skip(skip);
|
|
299
|
+
if (options?.sort)
|
|
300
|
+
cursor.sort(options.sort);
|
|
301
|
+
results = await cursor.toArray();
|
|
302
|
+
}
|
|
303
|
+
else {
|
|
304
|
+
results = await repo.find(findOptions);
|
|
305
|
+
}
|
|
306
|
+
const mappedResults = results.map(r => this.prepareResult(r));
|
|
307
|
+
if (schemaDefinition) {
|
|
308
|
+
await this.hydrateRelations(mappedResults, schemaDefinition, options?.depth ?? 0, options?.projection);
|
|
309
|
+
}
|
|
310
|
+
return mappedResults;
|
|
311
|
+
}
|
|
312
|
+
prepareResult(item) {
|
|
313
|
+
if (!item)
|
|
314
|
+
return item;
|
|
315
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
316
|
+
if (isMongo) {
|
|
317
|
+
// For MongoDB, ensure id is set from _id consistently
|
|
318
|
+
const rawId = item._id || item.id;
|
|
319
|
+
if (rawId) {
|
|
320
|
+
item.id = this.idToString(rawId);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
else {
|
|
324
|
+
if (item.id != null)
|
|
325
|
+
item.id = String(item.id);
|
|
326
|
+
}
|
|
327
|
+
return item;
|
|
328
|
+
}
|
|
329
|
+
idToString(val) {
|
|
330
|
+
if (val == null)
|
|
331
|
+
return '';
|
|
332
|
+
if (typeof val === 'string')
|
|
333
|
+
return val;
|
|
334
|
+
if (typeof val === 'number')
|
|
335
|
+
return String(val);
|
|
336
|
+
// Handle MongoDB ObjectId or similar objects with toHexString
|
|
337
|
+
if (typeof val.toHexString === 'function')
|
|
338
|
+
return val.toHexString();
|
|
339
|
+
if (typeof val.toString === 'function' && val.constructor.name === 'ObjectId')
|
|
340
|
+
return val.toString();
|
|
341
|
+
if (Buffer.isBuffer(val))
|
|
342
|
+
return val.toString('hex');
|
|
343
|
+
if (val.id)
|
|
344
|
+
return this.idToString(val.id);
|
|
345
|
+
if (val._id)
|
|
346
|
+
return this.idToString(val._id);
|
|
347
|
+
return String(val);
|
|
348
|
+
}
|
|
349
|
+
convertToObjectId(val) {
|
|
350
|
+
if (val == null)
|
|
351
|
+
return val;
|
|
352
|
+
// If it's an object (potentially hydrated relation), extract the ID
|
|
353
|
+
if (typeof val === 'object' && !Buffer.isBuffer(val)) {
|
|
354
|
+
if (val instanceof mongodb_1.ObjectId || val.constructor?.name === 'ObjectId')
|
|
355
|
+
return val;
|
|
356
|
+
const extractedId = val.id || val._id;
|
|
357
|
+
if (extractedId)
|
|
358
|
+
return this.convertToObjectId(extractedId);
|
|
359
|
+
}
|
|
360
|
+
try {
|
|
361
|
+
if (typeof val === 'string' && val.length === 24 && /^[0-9a-fA-F]{24}$/.test(val)) {
|
|
362
|
+
return new mongodb_1.ObjectId(val);
|
|
363
|
+
}
|
|
364
|
+
return val;
|
|
365
|
+
}
|
|
366
|
+
catch {
|
|
367
|
+
return val;
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
async hydrateRelations(results, schema, depth = 0, projection) {
|
|
371
|
+
if (!results.length || depth > 2)
|
|
372
|
+
return;
|
|
373
|
+
// 1) Handle inheritance: if schema extends another, fetch base records
|
|
374
|
+
if (schema.extends) {
|
|
375
|
+
const baseLc = schema.extends.toLowerCase();
|
|
376
|
+
const baseRepo = this.repositories.get(baseLc);
|
|
377
|
+
if (baseRepo) {
|
|
378
|
+
const baseIds = new Set();
|
|
379
|
+
for (const item of results) {
|
|
380
|
+
if (item.baseId != null) {
|
|
381
|
+
baseIds.add(String(item.baseId));
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
if (baseIds.size > 0) {
|
|
385
|
+
const baseSchema = this.registeredSchemas[baseLc];
|
|
386
|
+
const baseAttrKeys = new Set(Object.keys(baseSchema.attributes || {}));
|
|
387
|
+
const hasRootProj = this.isExclusiveProjection(projection, baseSchema);
|
|
388
|
+
let baseProjection = undefined;
|
|
389
|
+
if (hasRootProj && projection) {
|
|
390
|
+
baseProjection = { id: 1 };
|
|
391
|
+
for (const [pk, pv] of Object.entries(projection)) {
|
|
392
|
+
if (baseAttrKeys.has(pk)) {
|
|
393
|
+
baseProjection[pk] = pv;
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
const baseDocs = await this.read(schema.extends, { id: { $in: Array.from(baseIds) } }, baseIds.size, 0, baseSchema, true, { projection: baseProjection });
|
|
398
|
+
const baseMap = new Map();
|
|
399
|
+
baseDocs.forEach(b => {
|
|
400
|
+
const key = String(b.id);
|
|
401
|
+
baseMap.set(key, b);
|
|
402
|
+
});
|
|
403
|
+
for (let i = 0; i < results.length; i++) {
|
|
404
|
+
const item = results[i];
|
|
405
|
+
if (item.baseId != null) {
|
|
406
|
+
const bid = String(item.baseId);
|
|
407
|
+
const baseData = baseMap.get(bid);
|
|
408
|
+
if (baseData) {
|
|
409
|
+
// Intelligent Merge: Prefer child data but skip nulls/empty for inherited or common fields
|
|
410
|
+
const result = { ...baseData };
|
|
411
|
+
for (const k of Object.keys(item)) {
|
|
412
|
+
const val = item[k];
|
|
413
|
+
const hasValue = val != null && (!Array.isArray(val) || val.length > 0);
|
|
414
|
+
if (hasValue) {
|
|
415
|
+
result[k] = val;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
result.exId = String(baseData.id || baseData._id || '');
|
|
419
|
+
result.id = String(item.id || item._id || '');
|
|
420
|
+
result._id = result.id;
|
|
421
|
+
if (hasRootProj && projection) {
|
|
422
|
+
for (const k of Object.keys(result)) {
|
|
423
|
+
if (k !== 'id' && k !== 'baseId' && !projection[k]) {
|
|
424
|
+
if (baseAttrKeys.has(k)) {
|
|
425
|
+
delete result[k];
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
results[i] = result;
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
// 2) Handle standard ObjectId hydration & 3) json-group in parallel
|
|
438
|
+
const hydrationPromises = [];
|
|
439
|
+
// --- Standard Relations ---
|
|
440
|
+
const relFields = Object.entries(schema.attributes).filter(([key, attr]) => {
|
|
441
|
+
if (key === 'baseId')
|
|
442
|
+
return false;
|
|
443
|
+
const a = Array.isArray(attr) ? attr[0] : attr;
|
|
444
|
+
return String(a?.type).toLowerCase() === 'objectid' && a?.ref;
|
|
445
|
+
});
|
|
446
|
+
for (const [key, attr] of relFields) {
|
|
447
|
+
hydrationPromises.push((async () => {
|
|
448
|
+
const hasRootProj = this.isExclusiveProjection(projection, schema);
|
|
449
|
+
const isTargeted = projection && (projection[key] || Object.keys(projection).some(pk => pk.startsWith(`${key}.`)));
|
|
450
|
+
if (projection && hasRootProj && !isTargeted)
|
|
451
|
+
return;
|
|
452
|
+
const isArray = Array.isArray(attr);
|
|
453
|
+
const refModelName = (isArray ? attr[0] : attr).ref;
|
|
454
|
+
if (!refModelName)
|
|
455
|
+
return;
|
|
456
|
+
const refLc = refModelName.toLowerCase();
|
|
457
|
+
const refRepo = this.repositories.get(refLc);
|
|
458
|
+
if (!refRepo)
|
|
459
|
+
return;
|
|
460
|
+
const idsToFetch = new Set();
|
|
461
|
+
for (const item of results) {
|
|
462
|
+
const val = item[key];
|
|
463
|
+
if (val == null)
|
|
464
|
+
continue;
|
|
465
|
+
if (isArray && Array.isArray(val)) {
|
|
466
|
+
val.forEach(v => { if (v != null)
|
|
467
|
+
idsToFetch.add(v); });
|
|
468
|
+
}
|
|
469
|
+
else {
|
|
470
|
+
idsToFetch.add(val);
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
if (idsToFetch.size === 0)
|
|
474
|
+
return;
|
|
475
|
+
let subProjection = undefined;
|
|
476
|
+
if (projection) {
|
|
477
|
+
const prefix = `${key}.`;
|
|
478
|
+
for (const [pk, pv] of Object.entries(projection)) {
|
|
479
|
+
if (pk.startsWith(prefix)) {
|
|
480
|
+
if (!subProjection)
|
|
481
|
+
subProjection = {};
|
|
482
|
+
subProjection[pk.slice(prefix.length)] = pv;
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
const fetchedDocs = await this.read(refModelName, { id: { $in: Array.from(idsToFetch) } }, idsToFetch.size, 0, this.registeredSchemas[refModelName], true, {
|
|
487
|
+
depth: depth + 1,
|
|
488
|
+
projection: subProjection
|
|
489
|
+
});
|
|
490
|
+
const docMap = new Map();
|
|
491
|
+
fetchedDocs.forEach(d => {
|
|
492
|
+
const sid = this.idToString(d.id);
|
|
493
|
+
if (sid)
|
|
494
|
+
docMap.set(sid, d);
|
|
495
|
+
});
|
|
496
|
+
for (const item of results) {
|
|
497
|
+
const val = item[key];
|
|
498
|
+
if (val == null)
|
|
499
|
+
continue;
|
|
500
|
+
if (Array.isArray(val)) {
|
|
501
|
+
item[key] = val.map(v => docMap.get(this.idToString(v)) || v);
|
|
502
|
+
}
|
|
503
|
+
else {
|
|
504
|
+
item[key] = docMap.get(this.idToString(val)) || val;
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
})());
|
|
508
|
+
}
|
|
509
|
+
// --- JSON groups ---
|
|
510
|
+
const jsonGroupFields = Object.entries(schema.attributes).filter(([key, attr]) => {
|
|
511
|
+
const a = Array.isArray(attr) ? attr[0] : attr;
|
|
512
|
+
return a?.format === 'json-group' && a?.attributes;
|
|
513
|
+
});
|
|
514
|
+
for (const [key, attr] of jsonGroupFields) {
|
|
515
|
+
hydrationPromises.push((async () => {
|
|
516
|
+
const a = Array.isArray(attr) ? attr[0] : attr;
|
|
517
|
+
const nestedSchema = {
|
|
518
|
+
modelName: `${schema.modelName}_${key}`,
|
|
519
|
+
attributes: a.attributes,
|
|
520
|
+
allowedMethods: {},
|
|
521
|
+
showCount: false
|
|
522
|
+
};
|
|
523
|
+
for (const item of results) {
|
|
524
|
+
let val = item[key];
|
|
525
|
+
if (!val)
|
|
526
|
+
continue;
|
|
527
|
+
if (typeof val === 'string' && val.trim()) {
|
|
528
|
+
try {
|
|
529
|
+
val = JSON.parse(val);
|
|
530
|
+
}
|
|
531
|
+
catch {
|
|
532
|
+
continue;
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
if (val && (typeof val === 'object' || Array.isArray(val))) {
|
|
536
|
+
const nestedResults = Array.isArray(val) ? val : [val];
|
|
537
|
+
await this.hydrateRelations(nestedResults, nestedSchema, depth + 1);
|
|
538
|
+
item[key] = val;
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
})());
|
|
542
|
+
}
|
|
543
|
+
await Promise.all(hydrationPromises);
|
|
544
|
+
}
|
|
545
|
+
async update(collection, targetId, data, schemaDefinition) {
|
|
546
|
+
if (targetId == null || targetId === '' || targetId === 'undefined' || targetId === 'null') {
|
|
547
|
+
throw new Error(`[NMAdapter] Cannot perform update on '${collection}': targetId is required and cannot be null/undefined. This guard prevents accidental bulk updates.`);
|
|
548
|
+
}
|
|
549
|
+
const repo = this.repositories.get(collection.toLowerCase());
|
|
550
|
+
if (!repo)
|
|
551
|
+
throw new Error(`Repository for ${collection} not found.`);
|
|
552
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
553
|
+
const resolvedTargetId = isMongo ? this.convertToObjectId(targetId) : targetId;
|
|
554
|
+
const coerced = this.coercePayloadForStorage(data, schemaDefinition);
|
|
555
|
+
// Strip ID fields from data to prevent primary key conflicts
|
|
556
|
+
const { id: _unusedId, _id: _unusedRawId, ...updateData } = coerced;
|
|
557
|
+
if (isMongo) {
|
|
558
|
+
// Using explicit getMongoRepository and updateOne prevents accidental bulk updates
|
|
559
|
+
// that targetBy filter {} could cause in standard TypeORM .update()
|
|
560
|
+
const mongoRepo = this.dataSource.getMongoRepository(repo.metadata.name);
|
|
561
|
+
const setObj = {};
|
|
562
|
+
const unsetObj = {};
|
|
563
|
+
// We look at the original data for nulls because coercePayloadForStorage strips them
|
|
564
|
+
for (const [k, v] of Object.entries(data)) {
|
|
565
|
+
if (k === 'id' || k === '_id')
|
|
566
|
+
continue;
|
|
567
|
+
if (v === null) {
|
|
568
|
+
unsetObj[k] = "";
|
|
569
|
+
}
|
|
570
|
+
else {
|
|
571
|
+
// Use the coerced value if it exists (for ObjectIds)
|
|
572
|
+
setObj[k] = updateData[k] !== undefined ? updateData[k] : v;
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
const updatePayload = {};
|
|
576
|
+
if (Object.keys(setObj).length > 0)
|
|
577
|
+
updatePayload.$set = setObj;
|
|
578
|
+
if (Object.keys(unsetObj).length > 0)
|
|
579
|
+
updatePayload.$unset = unsetObj;
|
|
580
|
+
if (Object.keys(updatePayload).length > 0) {
|
|
581
|
+
await mongoRepo.updateOne({ _id: resolvedTargetId }, updatePayload);
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
else {
|
|
585
|
+
await repo.update(resolvedTargetId, updateData);
|
|
586
|
+
}
|
|
587
|
+
const updated = await this.read(collection, { id: resolvedTargetId }, 1, 0, schemaDefinition);
|
|
588
|
+
return updated[0];
|
|
589
|
+
}
|
|
590
|
+
async delete(collection, targetId, schemaDefinition) {
|
|
591
|
+
if (targetId == null || targetId === '' || targetId === 'undefined' || targetId === 'null') {
|
|
592
|
+
throw new Error(`[NMAdapter] Cannot perform delete on '${collection}': targetId is required and cannot be null/undefined. This guard prevents accidental bulk deletions.`);
|
|
593
|
+
}
|
|
594
|
+
const repo = this.repositories.get(collection.toLowerCase());
|
|
595
|
+
if (!repo)
|
|
596
|
+
throw new Error(`Repository for ${collection} not found.`);
|
|
597
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
598
|
+
// Use a string ID for the initial read to avoid any ObjectId conversion issues in transformQuery
|
|
599
|
+
const idStr = this.idToString(targetId);
|
|
600
|
+
const existing = await this.read(collection, { id: idStr }, 1, 0, schemaDefinition);
|
|
601
|
+
const id = isMongo ? this.convertToObjectId(targetId) : targetId;
|
|
602
|
+
if (isMongo) {
|
|
603
|
+
const item = await repo.findOneBy({ _id: id });
|
|
604
|
+
if (item)
|
|
605
|
+
await repo.remove(item);
|
|
606
|
+
}
|
|
607
|
+
else {
|
|
608
|
+
await repo.delete(id);
|
|
609
|
+
}
|
|
610
|
+
return existing[0] || null;
|
|
611
|
+
}
|
|
612
|
+
async count(collection, query, schemaDefinition) {
|
|
613
|
+
const repo = this.getRepository(collection);
|
|
614
|
+
const resolvedQuery = await this.resolveNestedSearches(query);
|
|
615
|
+
const where = this.transformQuery(resolvedQuery, schemaDefinition);
|
|
616
|
+
let total;
|
|
617
|
+
if (this.dataSource.options.type === 'mongodb') {
|
|
618
|
+
total = await repo.countBy(where);
|
|
619
|
+
}
|
|
620
|
+
else {
|
|
621
|
+
total = await repo.count({ where });
|
|
622
|
+
}
|
|
623
|
+
return total;
|
|
624
|
+
}
|
|
625
|
+
async findFirstRelatedIds(collection, field, ids) {
|
|
626
|
+
const repo = this.getRepository(collection);
|
|
627
|
+
const map = new Map();
|
|
628
|
+
if (this.dataSource.options.type === 'mongodb') {
|
|
629
|
+
const objectIds = ids.map(id => this.convertToObjectId(id));
|
|
630
|
+
const stringIds = ids.map(id => this.idToString(id));
|
|
631
|
+
const allIds = Array.from(new Set([...objectIds, ...stringIds]));
|
|
632
|
+
const results = await repo.manager.getMongoRepository(repo.metadata.name).aggregate([
|
|
633
|
+
{ $match: { [field]: { $in: allIds } } },
|
|
634
|
+
{ $group: { _id: `$${field}`, firstId: { $first: '$_id' } } }
|
|
635
|
+
]).toArray();
|
|
636
|
+
results.forEach(r => {
|
|
637
|
+
const parentId = this.idToString(r._id);
|
|
638
|
+
const firstId = this.idToString(r.firstId);
|
|
639
|
+
if (parentId && firstId) {
|
|
640
|
+
map.set(parentId, firstId);
|
|
641
|
+
}
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
else {
|
|
645
|
+
// SQL: Use GROUP BY to find at least one related record per parent ID
|
|
646
|
+
const qb = repo.createQueryBuilder('entity');
|
|
647
|
+
const results = await qb
|
|
648
|
+
.select([`entity.${field}`, 'MIN(entity.id) as firstId'])
|
|
649
|
+
.where(`entity.${field} IN (:...ids)`, { ids })
|
|
650
|
+
.groupBy(`entity.${field}`)
|
|
651
|
+
.getRawMany();
|
|
652
|
+
results.forEach(r => {
|
|
653
|
+
// Handle various raw column name formats from different SQL drivers
|
|
654
|
+
const parentId = r[`entity_${field}`] || r[field] || r[`entity_${field.toLowerCase()}`];
|
|
655
|
+
const relId = r.firstId || r.firstid || r.FIRSTID;
|
|
656
|
+
if (parentId != null && relId != null) {
|
|
657
|
+
map.set(this.idToString(parentId), this.idToString(relId));
|
|
658
|
+
}
|
|
659
|
+
});
|
|
660
|
+
}
|
|
661
|
+
return map;
|
|
662
|
+
}
|
|
663
|
+
/**
|
|
664
|
+
* Centralized Nested Search Resolver
|
|
665
|
+
* Recursively scans for $nestedSearch operators and resolves them to { $in: [ids] }
|
|
666
|
+
*/
|
|
667
|
+
async resolveNestedSearches(query) {
|
|
668
|
+
if (!(0, utils_1.isPlainObject)(query))
|
|
669
|
+
return query;
|
|
670
|
+
const resolved = { ...query };
|
|
671
|
+
for (const [key, value] of Object.entries(resolved)) {
|
|
672
|
+
if ((key === '$or' || key === '$and') && Array.isArray(value)) {
|
|
673
|
+
resolved[key] = await Promise.all(value.map(v => this.resolveNestedSearches(v)));
|
|
674
|
+
continue;
|
|
675
|
+
}
|
|
676
|
+
if ((0, utils_1.isPlainObject)(value)) {
|
|
677
|
+
const opObj = value;
|
|
678
|
+
if (opObj.$nestedSearch) {
|
|
679
|
+
const { ref, searchBy, show, value: searchVal, operator = '$in' } = opObj.$nestedSearch;
|
|
680
|
+
const refSchema = Object.values(this.registeredSchemas).find(s => s.modelName.toLowerCase() === ref.toLowerCase());
|
|
681
|
+
if (!refSchema) {
|
|
682
|
+
Logger_1.default.warn('NMAdapter', `Schema not found for nested search ref: ${ref}`);
|
|
683
|
+
delete resolved[key];
|
|
684
|
+
continue;
|
|
685
|
+
}
|
|
686
|
+
// 1) Collect all unique fields to search in the referenced model
|
|
687
|
+
const searchFields = new Set();
|
|
688
|
+
if (show)
|
|
689
|
+
searchFields.add(show);
|
|
690
|
+
if (searchBy) {
|
|
691
|
+
if (Array.isArray(searchBy)) {
|
|
692
|
+
searchBy.forEach((f) => searchFields.add(f));
|
|
693
|
+
}
|
|
694
|
+
else {
|
|
695
|
+
searchFields.add(searchBy);
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
if (searchFields.size === 0)
|
|
699
|
+
searchFields.add('name'); // fallback
|
|
700
|
+
// 2) Build conditions for secondary search
|
|
701
|
+
const conditions = [];
|
|
702
|
+
const isArrayVal = Array.isArray(searchVal);
|
|
703
|
+
for (const field of Array.from(searchFields)) {
|
|
704
|
+
if (isArrayVal) {
|
|
705
|
+
conditions.push({ [field]: { $in: searchVal } });
|
|
706
|
+
}
|
|
707
|
+
else {
|
|
708
|
+
const anchoredValue = String(searchVal).startsWith('^') ? searchVal : `^${searchVal}`;
|
|
709
|
+
if (field.includes('.')) {
|
|
710
|
+
// Support deep nested search in relations (e.g. "districts.name")
|
|
711
|
+
const [localKey, ...rest] = field.split('.');
|
|
712
|
+
const attr = refSchema.attributes[localKey];
|
|
713
|
+
const a = Array.isArray(attr) ? attr[0] : attr;
|
|
714
|
+
if (a?.ref) {
|
|
715
|
+
// It's another relation! Delegate via another $nestedSearch
|
|
716
|
+
conditions.push({
|
|
717
|
+
[localKey]: {
|
|
718
|
+
$nestedSearch: {
|
|
719
|
+
ref: a.ref,
|
|
720
|
+
show: rest.join('.'),
|
|
721
|
+
value: anchoredValue
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
});
|
|
725
|
+
}
|
|
726
|
+
else {
|
|
727
|
+
// Just a nested object field
|
|
728
|
+
conditions.push({ [field]: { $regex: anchoredValue, $options: 'i' } });
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
else {
|
|
732
|
+
conditions.push({ [field]: { $regex: anchoredValue, $options: 'i' } });
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
// 2.1) ADDITION: If it looks like a 24-char hex string (ObjectId), also search by ID directly
|
|
737
|
+
if (!isArrayVal && typeof searchVal === 'string' && /^[0-9a-fA-F]{24}$/.test(searchVal)) {
|
|
738
|
+
conditions.push({ id: searchVal });
|
|
739
|
+
}
|
|
740
|
+
// 3) Use this.read() to find matching IDs in the referenced collection
|
|
741
|
+
const foundRelDocs = await this.read(ref, { $or: conditions }, 2000, 0, refSchema, true);
|
|
742
|
+
const ids = foundRelDocs.map(d => String(d.id || d._id)).filter(Boolean);
|
|
743
|
+
if (operator === '$nin') {
|
|
744
|
+
resolved[key] = { $nin: ids };
|
|
745
|
+
}
|
|
746
|
+
else if (ids.length === 1) {
|
|
747
|
+
resolved[key] = ids[0];
|
|
748
|
+
}
|
|
749
|
+
else if (ids.length > 1) {
|
|
750
|
+
resolved[key] = { $in: ids };
|
|
751
|
+
}
|
|
752
|
+
else {
|
|
753
|
+
// Unmatchable ID to ensure result is empty for non-$nin
|
|
754
|
+
resolved[key] = { $in: [this.convertToObjectId('000000000000000000000000')] };
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
else {
|
|
758
|
+
resolved[key] = await this.resolveNestedSearches(value);
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
return resolved;
|
|
763
|
+
}
|
|
764
|
+
transformQuery(query, schema) {
|
|
765
|
+
if (!(0, utils_1.isPlainObject)(query))
|
|
766
|
+
return query;
|
|
767
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
768
|
+
const transformed = {};
|
|
769
|
+
for (const [key, value] of Object.entries(query)) {
|
|
770
|
+
// Map both 'id' and '_id' to '_id' for MongoDB queries
|
|
771
|
+
const targetKey = (isMongo && (key === 'id' || key === '_id')) ? '_id' : key;
|
|
772
|
+
const attr = schema?.attributes?.[key];
|
|
773
|
+
const a = Array.isArray(attr) ? attr[0] : attr;
|
|
774
|
+
// id and any field with type ObjectId are treated as ID fields
|
|
775
|
+
const isObjectIdField = key === 'id' || key === '_id' || (a && String(a.type).toLowerCase() === 'objectid');
|
|
776
|
+
if ((key === '$or' || key === '$and') && Array.isArray(value)) {
|
|
777
|
+
const subQueries = value.map(v => this.transformQuery(v, schema));
|
|
778
|
+
transformed[key] = subQueries;
|
|
779
|
+
continue;
|
|
780
|
+
}
|
|
781
|
+
if ((0, utils_1.isPlainObject)(value)) {
|
|
782
|
+
const keys = Object.keys(value);
|
|
783
|
+
if (keys.some(k => k.startsWith('$'))) {
|
|
784
|
+
transformed[targetKey] = this.mapOperator(value, isMongo && isObjectIdField);
|
|
785
|
+
}
|
|
786
|
+
else {
|
|
787
|
+
transformed[targetKey] = this.transformQuery(value, schema);
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
else {
|
|
791
|
+
if (isObjectIdField) {
|
|
792
|
+
if (isMongo) {
|
|
793
|
+
if (Array.isArray(value)) {
|
|
794
|
+
transformed[targetKey] = { $in: value.map(v => this.convertToObjectId(v)) };
|
|
795
|
+
}
|
|
796
|
+
else {
|
|
797
|
+
transformed[targetKey] = this.convertToObjectId(value);
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
else {
|
|
801
|
+
// Coerce to number for SQL if it is a string numeric
|
|
802
|
+
if (Array.isArray(value)) {
|
|
803
|
+
transformed[targetKey] = (0, typeorm_1.In)(value.map(v => {
|
|
804
|
+
const n = Number(v);
|
|
805
|
+
return !isNaN(n) ? n : v;
|
|
806
|
+
}));
|
|
807
|
+
}
|
|
808
|
+
else {
|
|
809
|
+
const num = Number(value);
|
|
810
|
+
transformed[targetKey] = !isNaN(num) ? num : value;
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
else {
|
|
815
|
+
// Coerce string numeric values for SQL, but keep as-is for Mongo (integrity for string fields)
|
|
816
|
+
if (!isMongo) {
|
|
817
|
+
const num = Number(value);
|
|
818
|
+
transformed[key] = !isNaN(num) ? num : value;
|
|
819
|
+
}
|
|
820
|
+
else {
|
|
821
|
+
transformed[key] = value;
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
// Special handling for top-level $or in SQL (TypeORM expects array for OR, but our $or needs to be flattened/merged if combined with other ANDs)
|
|
827
|
+
if (!isMongo && transformed.$or && Array.isArray(transformed.$or)) {
|
|
828
|
+
const orPart = transformed.$or;
|
|
829
|
+
const otherPart = { ...transformed };
|
|
830
|
+
delete otherPart.$or;
|
|
831
|
+
if (Object.keys(otherPart).length === 0) {
|
|
832
|
+
return orPart;
|
|
833
|
+
}
|
|
834
|
+
else {
|
|
835
|
+
// Merge other filters into each OR condition: (A AND (B OR C)) -> (A AND B) OR (A AND C)
|
|
836
|
+
return orPart.map((sub) => ({ ...otherPart, ...sub }));
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
return transformed;
|
|
840
|
+
}
|
|
841
|
+
mapOperator(opObj, isObjectIdField = false) {
|
|
842
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
843
|
+
if (opObj.$regex) {
|
|
844
|
+
const val = String(opObj.$regex);
|
|
845
|
+
const isCaseInsensitive = opObj.$options?.includes('i');
|
|
846
|
+
if (isMongo) {
|
|
847
|
+
return { $regex: val, $options: opObj.$options || '' };
|
|
848
|
+
}
|
|
849
|
+
let pattern = val;
|
|
850
|
+
if (pattern.startsWith('^')) {
|
|
851
|
+
pattern = pattern.slice(1);
|
|
852
|
+
}
|
|
853
|
+
else {
|
|
854
|
+
pattern = `%${pattern}`;
|
|
855
|
+
}
|
|
856
|
+
if (pattern.endsWith('$')) {
|
|
857
|
+
pattern = pattern.slice(0, -1);
|
|
858
|
+
}
|
|
859
|
+
else {
|
|
860
|
+
pattern = `${pattern}%`;
|
|
861
|
+
}
|
|
862
|
+
return isCaseInsensitive ? (0, typeorm_1.ILike)(pattern) : (0, typeorm_1.Like)(pattern);
|
|
863
|
+
}
|
|
864
|
+
if (opObj.$gt !== undefined) {
|
|
865
|
+
const val = isObjectIdField ? this.convertToObjectId(opObj.$gt) : opObj.$gt;
|
|
866
|
+
return isMongo ? { $gt: val } : (0, typeorm_1.MoreThan)(val);
|
|
867
|
+
}
|
|
868
|
+
if (opObj.$gte !== undefined) {
|
|
869
|
+
const val = isObjectIdField ? this.convertToObjectId(opObj.$gte) : opObj.$gte;
|
|
870
|
+
return isMongo ? { $gte: val } : (0, typeorm_1.MoreThanOrEqual)(val);
|
|
871
|
+
}
|
|
872
|
+
if (opObj.$lt !== undefined) {
|
|
873
|
+
const val = isObjectIdField ? this.convertToObjectId(opObj.$lt) : opObj.$lt;
|
|
874
|
+
return isMongo ? { $lt: val } : (0, typeorm_1.LessThan)(val);
|
|
875
|
+
}
|
|
876
|
+
if (opObj.$lte !== undefined) {
|
|
877
|
+
const val = isObjectIdField ? this.convertToObjectId(opObj.$lte) : opObj.$lte;
|
|
878
|
+
return isMongo ? { $lte: val } : (0, typeorm_1.LessThanOrEqual)(val);
|
|
879
|
+
}
|
|
880
|
+
if (opObj.$in !== undefined) {
|
|
881
|
+
const vals = Array.isArray(opObj.$in) ? opObj.$in : [opObj.$in];
|
|
882
|
+
const mappedVals = isObjectIdField ? vals.map((v) => this.convertToObjectId(v)) : vals;
|
|
883
|
+
return isMongo ? { $in: mappedVals } : (0, typeorm_1.In)(mappedVals);
|
|
884
|
+
}
|
|
885
|
+
if (opObj.$nin !== undefined) {
|
|
886
|
+
const vals = Array.isArray(opObj.$nin) ? opObj.$nin : [opObj.$nin];
|
|
887
|
+
const mappedVals = isObjectIdField ? vals.map((v) => this.convertToObjectId(v)) : vals;
|
|
888
|
+
return isMongo ? { $nin: mappedVals } : (0, typeorm_1.Not)((0, typeorm_1.In)(mappedVals));
|
|
889
|
+
}
|
|
890
|
+
if (opObj.$ne !== undefined) {
|
|
891
|
+
const val = isObjectIdField ? this.convertToObjectId(opObj.$ne) : opObj.$ne;
|
|
892
|
+
return isMongo ? { $ne: val } : (0, typeorm_1.Not)((0, typeorm_1.Equal)(val));
|
|
893
|
+
}
|
|
894
|
+
if (opObj.$eq !== undefined) {
|
|
895
|
+
const val = isObjectIdField ? this.convertToObjectId(opObj.$eq) : opObj.$eq;
|
|
896
|
+
return isMongo ? val : (0, typeorm_1.Equal)(val);
|
|
897
|
+
}
|
|
898
|
+
return opObj;
|
|
899
|
+
}
|
|
900
|
+
async cleanupUnusedFields(schemas) {
|
|
901
|
+
const report = {};
|
|
902
|
+
const isMongo = this.dataSource.options.type === 'mongodb';
|
|
903
|
+
if (isMongo) {
|
|
904
|
+
for (const def of Object.values(schemas)) {
|
|
905
|
+
const modelName = def.modelName;
|
|
906
|
+
const repo = this.dataSource.getMongoRepository(modelName);
|
|
907
|
+
// For MongoDB, we use the aggregation strategy to find all fields currently in the collection
|
|
908
|
+
const keysPipeline = [
|
|
909
|
+
{ $project: { data: { $objectToArray: '$$ROOT' } } },
|
|
910
|
+
{ $unwind: '$data' },
|
|
911
|
+
{ $group: { _id: null, allKeys: { $addToSet: '$data.k' } } },
|
|
912
|
+
];
|
|
913
|
+
const keysResult = await repo.aggregate(keysPipeline).toArray();
|
|
914
|
+
const allKeys = keysResult[0]?.allKeys || [];
|
|
915
|
+
const schemaKeys = new Set([
|
|
916
|
+
...Object.keys(def.attributes),
|
|
917
|
+
'_id',
|
|
918
|
+
'__v',
|
|
919
|
+
'createdAt',
|
|
920
|
+
'updatedAt',
|
|
921
|
+
'id',
|
|
922
|
+
]);
|
|
923
|
+
const unusedKeys = allKeys.filter((k) => !schemaKeys.has(k));
|
|
924
|
+
if (unusedKeys.length > 0) {
|
|
925
|
+
const unsetObj = {};
|
|
926
|
+
for (const k of unusedKeys) {
|
|
927
|
+
unsetObj[k] = '';
|
|
928
|
+
}
|
|
929
|
+
await repo.updateMany({}, { $unset: unsetObj });
|
|
930
|
+
report[modelName] = unusedKeys;
|
|
931
|
+
Logger_1.default.info('NMAdapter', `Cleaned up ${unusedKeys.length} fields in Mongo collection ${modelName}: ${unusedKeys.join(', ')}`);
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
return report;
|
|
935
|
+
}
|
|
936
|
+
// Default SQL Logic
|
|
937
|
+
const queryRunner = this.dataSource.createQueryRunner();
|
|
938
|
+
await queryRunner.connect();
|
|
939
|
+
try {
|
|
940
|
+
for (const def of Object.values(schemas)) {
|
|
941
|
+
const modelName = def.modelName;
|
|
942
|
+
const tableName = def.collection || modelName.toLowerCase();
|
|
943
|
+
const table = await queryRunner.getTable(tableName);
|
|
944
|
+
if (!table)
|
|
945
|
+
continue;
|
|
946
|
+
const schemaKeys = new Set([
|
|
947
|
+
...Object.keys(def.attributes),
|
|
948
|
+
'id',
|
|
949
|
+
'_id',
|
|
950
|
+
'createdAt',
|
|
951
|
+
'updatedAt',
|
|
952
|
+
]);
|
|
953
|
+
const columnsToDrop = table.columns
|
|
954
|
+
.filter((col) => !schemaKeys.has(col.name))
|
|
955
|
+
.map((col) => col.name);
|
|
956
|
+
if (columnsToDrop.length > 0) {
|
|
957
|
+
Logger_1.default.info('NMAdapter', `Dropping ${columnsToDrop.length} columns in ${tableName}: ${columnsToDrop.join(', ')}`);
|
|
958
|
+
for (const colName of columnsToDrop) {
|
|
959
|
+
try {
|
|
960
|
+
await queryRunner.dropColumn(tableName, colName);
|
|
961
|
+
}
|
|
962
|
+
catch (err) {
|
|
963
|
+
Logger_1.default.warn('NMAdapter', `Failed to drop column ${colName} in ${tableName}: ${err.message}`);
|
|
964
|
+
}
|
|
965
|
+
}
|
|
966
|
+
report[modelName] = columnsToDrop;
|
|
967
|
+
}
|
|
968
|
+
}
|
|
969
|
+
}
|
|
970
|
+
finally {
|
|
971
|
+
await queryRunner.release();
|
|
972
|
+
}
|
|
973
|
+
return report;
|
|
974
|
+
}
|
|
975
|
+
isExclusiveProjection(projection, schema) {
|
|
976
|
+
return (0, utils_2.isExclusiveProjection)(projection, schema);
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
exports.NMAdapter = NMAdapter;
|