@intranefr/superbackend 1.4.3 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +6 -1
- package/README.md +5 -5
- package/index.js +23 -5
- package/package.json +5 -2
- package/public/sdk/ui-components.iife.js +191 -0
- package/sdk/error-tracking/browser/package.json +4 -3
- package/sdk/error-tracking/browser/src/embed.js +29 -0
- package/sdk/ui-components/browser/src/index.js +228 -0
- package/src/controllers/admin.controller.js +139 -1
- package/src/controllers/adminHeadless.controller.js +82 -0
- package/src/controllers/adminMigration.controller.js +5 -1
- package/src/controllers/adminScripts.controller.js +229 -0
- package/src/controllers/adminTerminals.controller.js +39 -0
- package/src/controllers/adminUiComponents.controller.js +315 -0
- package/src/controllers/adminUiComponentsAi.controller.js +34 -0
- package/src/controllers/orgAdmin.controller.js +286 -0
- package/src/controllers/uiComponentsPublic.controller.js +118 -0
- package/src/middleware/auth.js +7 -0
- package/src/middleware.js +119 -0
- package/src/models/HeadlessModelDefinition.js +10 -0
- package/src/models/ScriptDefinition.js +42 -0
- package/src/models/ScriptRun.js +22 -0
- package/src/models/UiComponent.js +29 -0
- package/src/models/UiComponentProject.js +26 -0
- package/src/models/UiComponentProjectComponent.js +18 -0
- package/src/routes/admin.routes.js +2 -0
- package/src/routes/adminHeadless.routes.js +6 -0
- package/src/routes/adminScripts.routes.js +21 -0
- package/src/routes/adminTerminals.routes.js +13 -0
- package/src/routes/adminUiComponents.routes.js +29 -0
- package/src/routes/llmUi.routes.js +26 -0
- package/src/routes/orgAdmin.routes.js +5 -0
- package/src/routes/uiComponentsPublic.routes.js +9 -0
- package/src/services/consoleOverride.service.js +291 -0
- package/src/services/email.service.js +17 -1
- package/src/services/headlessExternalModels.service.js +292 -0
- package/src/services/headlessModels.service.js +26 -6
- package/src/services/scriptsRunner.service.js +259 -0
- package/src/services/terminals.service.js +152 -0
- package/src/services/terminalsWs.service.js +100 -0
- package/src/services/uiComponentsAi.service.js +312 -0
- package/src/services/uiComponentsCrypto.service.js +39 -0
- package/src/services/webhook.service.js +2 -2
- package/src/services/workflow.service.js +1 -1
- package/src/utils/encryption.js +5 -3
- package/views/admin-coolify-deploy.ejs +1 -1
- package/views/admin-dashboard-home.ejs +1 -1
- package/views/admin-dashboard.ejs +1 -1
- package/views/admin-errors.ejs +2 -2
- package/views/admin-global-settings.ejs +3 -3
- package/views/admin-headless.ejs +294 -24
- package/views/admin-json-configs.ejs +8 -1
- package/views/admin-llm.ejs +2 -2
- package/views/admin-organizations.ejs +365 -9
- package/views/admin-scripts.ejs +497 -0
- package/views/admin-seo-config.ejs +1 -1
- package/views/admin-terminals.ejs +328 -0
- package/views/admin-test.ejs +3 -3
- package/views/admin-ui-components.ejs +709 -0
- package/views/admin-users.ejs +440 -4
- package/views/admin-webhooks.ejs +1 -1
- package/views/admin-workflows.ejs +1 -1
- package/views/partials/admin-assets-script.ejs +3 -3
- package/views/partials/dashboard/nav-items.ejs +3 -0
- package/views/partials/dashboard/palette.ejs +1 -1
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
let originalConsole = null;
|
|
5
|
+
let logFileStream = null;
|
|
6
|
+
let isActive = false;
|
|
7
|
+
let isWriting = false;
|
|
8
|
+
let memoryInterval = null;
|
|
9
|
+
let logLines = [];
|
|
10
|
+
const MAX_LOG_LINES = 2000;
|
|
11
|
+
|
|
12
|
+
// Store the truly original console methods at module load time
|
|
13
|
+
const TRULY_ORIGINAL_CONSOLE = {
|
|
14
|
+
log: console.log,
|
|
15
|
+
error: console.error,
|
|
16
|
+
warn: console.warn,
|
|
17
|
+
info: console.info,
|
|
18
|
+
debug: console.debug
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Console Override Service
|
|
23
|
+
* Provides dual logging to stdout and file in non-production environments
|
|
24
|
+
*/
|
|
25
|
+
const consoleOverride = {
|
|
26
|
+
/**
|
|
27
|
+
* Initialize console override
|
|
28
|
+
* @param {Object} options - Configuration options
|
|
29
|
+
* @param {string} options.logFile - Log file path (default: 'stdout.log')
|
|
30
|
+
* @param {boolean} options.forceEnable - Force enable regardless of NODE_ENV
|
|
31
|
+
*/
|
|
32
|
+
init(options = {}) {
|
|
33
|
+
if (isActive) {
|
|
34
|
+
return; // Already initialized
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const nodeEnv = process.env.NODE_ENV || 'development';
|
|
38
|
+
const forceEnabled = options.forceEnable || process.env.CONSOLE_OVERRIDE_ENABLED === 'true';
|
|
39
|
+
const forceDisabled = process.env.CONSOLE_OVERRIDE_ENABLED === 'false';
|
|
40
|
+
|
|
41
|
+
// Skip if production and not force enabled, or if force disabled
|
|
42
|
+
if ((nodeEnv === 'production' && !forceEnabled) || forceDisabled) {
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const logFile = options.logFile || process.env.CONSOLE_LOG_FILE || 'stdout.log';
|
|
47
|
+
const logPath = path.resolve(process.cwd(), logFile);
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
// Close any existing stream before truncating
|
|
51
|
+
if (logFileStream && !logFileStream.destroyed) {
|
|
52
|
+
logFileStream.end();
|
|
53
|
+
logFileStream = null;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Wait a bit for stream to fully close, then truncate
|
|
57
|
+
setTimeout(() => {
|
|
58
|
+
// Truncate log file on initialization (start with empty file)
|
|
59
|
+
if (fs.existsSync(logPath)) {
|
|
60
|
+
fs.truncateSync(logPath, 0);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Create file stream for appending with error handling
|
|
64
|
+
logFileStream = fs.createWriteStream(logPath, { flags: 'a' });
|
|
65
|
+
|
|
66
|
+
// Handle stream errors
|
|
67
|
+
logFileStream.on('error', (error) => {
|
|
68
|
+
if (originalConsole && originalConsole.error && !isWriting) {
|
|
69
|
+
originalConsole.error('❌ Log stream error:', error.message);
|
|
70
|
+
}
|
|
71
|
+
isActive = false;
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
// Store original console
|
|
75
|
+
originalConsole = { ...console };
|
|
76
|
+
|
|
77
|
+
// Override console methods
|
|
78
|
+
this._overrideConsoleMethods();
|
|
79
|
+
|
|
80
|
+
// Start memory management interval (1 minute)
|
|
81
|
+
this._startMemoryManagement();
|
|
82
|
+
|
|
83
|
+
isActive = true;
|
|
84
|
+
|
|
85
|
+
// Log initialization using original console to avoid recursion
|
|
86
|
+
const initMsg = `📝 Console override initialized - logging to ${logPath}`;
|
|
87
|
+
originalConsole.log(initMsg);
|
|
88
|
+
this._writeToFile(initMsg);
|
|
89
|
+
}, 10);
|
|
90
|
+
|
|
91
|
+
} catch (error) {
|
|
92
|
+
// Fallback to console-only logging
|
|
93
|
+
originalConsole = originalConsole || console;
|
|
94
|
+
originalConsole.error('❌ Console override failed:', error.message);
|
|
95
|
+
isActive = false;
|
|
96
|
+
}
|
|
97
|
+
},
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Override individual console methods
|
|
101
|
+
* @private
|
|
102
|
+
*/
|
|
103
|
+
_overrideConsoleMethods() {
|
|
104
|
+
const methods = ['log', 'error', 'warn', 'info', 'debug'];
|
|
105
|
+
|
|
106
|
+
methods.forEach(method => {
|
|
107
|
+
console[method] = (...args) => {
|
|
108
|
+
// Call the truly original console method
|
|
109
|
+
TRULY_ORIGINAL_CONSOLE[method](...args);
|
|
110
|
+
|
|
111
|
+
// Write to file if stream is available and not already writing
|
|
112
|
+
if (logFileStream && !logFileStream.destroyed && !isWriting) {
|
|
113
|
+
this._writeToFile(args);
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
});
|
|
117
|
+
},
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Write message to file
|
|
121
|
+
* @param {string|Array} message - Message to write
|
|
122
|
+
* @private
|
|
123
|
+
*/
|
|
124
|
+
_writeToFile(message) {
|
|
125
|
+
if (!logFileStream || logFileStream.destroyed) {
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
isWriting = true;
|
|
130
|
+
try {
|
|
131
|
+
const messageStr = Array.isArray(message)
|
|
132
|
+
? message.map(arg => typeof arg === 'object' ? JSON.stringify(arg, null, 2) : String(arg)).join(' ')
|
|
133
|
+
: String(message);
|
|
134
|
+
|
|
135
|
+
// Add to memory buffer
|
|
136
|
+
logLines.push(messageStr);
|
|
137
|
+
|
|
138
|
+
// Only write if stream is available and not in the middle of a rewrite
|
|
139
|
+
if (logFileStream && !logFileStream.destroyed) {
|
|
140
|
+
logFileStream.write(messageStr + '\n');
|
|
141
|
+
}
|
|
142
|
+
} catch (writeError) {
|
|
143
|
+
// Prevent infinite recursion - use original console for errors
|
|
144
|
+
if (originalConsole && originalConsole.error && !isWriting) {
|
|
145
|
+
originalConsole.error('❌ Log write error:', writeError.message);
|
|
146
|
+
}
|
|
147
|
+
} finally {
|
|
148
|
+
isWriting = false;
|
|
149
|
+
}
|
|
150
|
+
},
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Start memory management interval
|
|
154
|
+
* @private
|
|
155
|
+
*/
|
|
156
|
+
_startMemoryManagement() {
|
|
157
|
+
// Clear any existing interval
|
|
158
|
+
if (memoryInterval) {
|
|
159
|
+
clearInterval(memoryInterval);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Set up 1-minute interval to manage log lines
|
|
163
|
+
memoryInterval = setInterval(() => {
|
|
164
|
+
this._manageLogMemory();
|
|
165
|
+
}, 60000); // 1 minute
|
|
166
|
+
},
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Manage log memory by keeping only last MAX_LOG_LINES
|
|
170
|
+
* @private
|
|
171
|
+
*/
|
|
172
|
+
_manageLogMemory() {
|
|
173
|
+
if (logLines.length > MAX_LOG_LINES) {
|
|
174
|
+
// Keep only the last MAX_LOG_LINES
|
|
175
|
+
const excessLines = logLines.length - MAX_LOG_LINES;
|
|
176
|
+
logLines = logLines.slice(excessLines);
|
|
177
|
+
|
|
178
|
+
// Rewrite the log file with only the recent lines
|
|
179
|
+
this._rewriteLogFile();
|
|
180
|
+
}
|
|
181
|
+
},
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Rewrite log file with current memory buffer
|
|
185
|
+
* @private
|
|
186
|
+
*/
|
|
187
|
+
_rewriteLogFile() {
|
|
188
|
+
if (!logFileStream || !logFileStream.path) {
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
try {
|
|
193
|
+
const logPath = logFileStream.path;
|
|
194
|
+
|
|
195
|
+
// Write recent lines to file (this truncates the file)
|
|
196
|
+
const fileContent = logLines.join('\n') + '\n';
|
|
197
|
+
fs.writeFileSync(logPath, fileContent, { flag: 'w' });
|
|
198
|
+
|
|
199
|
+
// Reopen stream for appending
|
|
200
|
+
logFileStream = fs.createWriteStream(logPath, { flags: 'a' });
|
|
201
|
+
|
|
202
|
+
// Reattach error handler
|
|
203
|
+
logFileStream.on('error', (error) => {
|
|
204
|
+
if (originalConsole && originalConsole.error && !isWriting) {
|
|
205
|
+
originalConsole.error('❌ Log stream error:', error.message);
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
} catch (error) {
|
|
210
|
+
if (originalConsole && originalConsole.error) {
|
|
211
|
+
originalConsole.error('❌ Log file rewrite error:', error.message);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
},
|
|
215
|
+
|
|
216
|
+
/**
|
|
217
|
+
* Restore original console
|
|
218
|
+
*/
|
|
219
|
+
restore() {
|
|
220
|
+
if (!isActive && !originalConsole) {
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// Clear memory management interval
|
|
225
|
+
if (memoryInterval) {
|
|
226
|
+
clearInterval(memoryInterval);
|
|
227
|
+
memoryInterval = null;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Restore original console methods using the truly original ones
|
|
231
|
+
['log', 'error', 'warn', 'info', 'debug'].forEach(method => {
|
|
232
|
+
console[method] = TRULY_ORIGINAL_CONSOLE[method];
|
|
233
|
+
});
|
|
234
|
+
|
|
235
|
+
// Close file stream
|
|
236
|
+
if (logFileStream && !logFileStream.destroyed) {
|
|
237
|
+
logFileStream.end();
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Reset state
|
|
241
|
+
isActive = false;
|
|
242
|
+
originalConsole = null;
|
|
243
|
+
logFileStream = null;
|
|
244
|
+
logLines = [];
|
|
245
|
+
isWriting = false;
|
|
246
|
+
},
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Check if override is active
|
|
250
|
+
* @returns {boolean}
|
|
251
|
+
*/
|
|
252
|
+
isActive() {
|
|
253
|
+
return isActive;
|
|
254
|
+
},
|
|
255
|
+
|
|
256
|
+
/**
|
|
257
|
+
* Get current memory lines count (for testing/debugging)
|
|
258
|
+
* @returns {number}
|
|
259
|
+
*/
|
|
260
|
+
getMemoryLinesCount() {
|
|
261
|
+
return logLines.length;
|
|
262
|
+
},
|
|
263
|
+
|
|
264
|
+
/**
|
|
265
|
+
* Get current log path
|
|
266
|
+
* @returns {string|null}
|
|
267
|
+
*/
|
|
268
|
+
getLogPath() {
|
|
269
|
+
if (!logFileStream) {
|
|
270
|
+
return null;
|
|
271
|
+
}
|
|
272
|
+
return logFileStream.path;
|
|
273
|
+
}
|
|
274
|
+
};
|
|
275
|
+
|
|
276
|
+
// Cleanup on process exit
|
|
277
|
+
process.on('exit', () => {
|
|
278
|
+
consoleOverride.restore();
|
|
279
|
+
});
|
|
280
|
+
|
|
281
|
+
process.on('SIGINT', () => {
|
|
282
|
+
consoleOverride.restore();
|
|
283
|
+
process.exit(0);
|
|
284
|
+
});
|
|
285
|
+
|
|
286
|
+
process.on('SIGTERM', () => {
|
|
287
|
+
consoleOverride.restore();
|
|
288
|
+
process.exit(0);
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
module.exports = consoleOverride;
|
|
@@ -74,7 +74,7 @@ const sendEmail = async ({
|
|
|
74
74
|
from ||
|
|
75
75
|
(await getSetting(
|
|
76
76
|
"EMAIL_FROM",
|
|
77
|
-
process.env.EMAIL_FROM || "
|
|
77
|
+
process.env.EMAIL_FROM || "SuperBackend <no-reply@resend.dev>",
|
|
78
78
|
));
|
|
79
79
|
const toArray = Array.isArray(to) ? to : [to];
|
|
80
80
|
|
|
@@ -338,6 +338,20 @@ const replaceTemplateVariables = (template, variables) => {
|
|
|
338
338
|
return result;
|
|
339
339
|
};
|
|
340
340
|
|
|
341
|
+
// Helper to clear cache (for testing)
|
|
342
|
+
const clearCache = () => {
|
|
343
|
+
settingsCache.clear();
|
|
344
|
+
resendClient = null;
|
|
345
|
+
};
|
|
346
|
+
|
|
347
|
+
// Helper to clear cache and reinitialize (for testing)
|
|
348
|
+
const clearCacheAndReinitialize = async () => {
|
|
349
|
+
settingsCache.clear();
|
|
350
|
+
resendClient = null;
|
|
351
|
+
// Reinitialize with current env vars
|
|
352
|
+
await initResend();
|
|
353
|
+
};
|
|
354
|
+
|
|
341
355
|
module.exports = {
|
|
342
356
|
sendEmail,
|
|
343
357
|
sendPasswordResetEmail,
|
|
@@ -348,4 +362,6 @@ module.exports = {
|
|
|
348
362
|
sendSubscriptionEmail,
|
|
349
363
|
sendWaitingListEmail,
|
|
350
364
|
replaceTemplateVariables,
|
|
365
|
+
clearCache,
|
|
366
|
+
clearCacheAndReinitialize,
|
|
351
367
|
};
|
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
const mongoose = require('mongoose');
|
|
2
|
+
|
|
3
|
+
const HeadlessModelDefinition = require('../models/HeadlessModelDefinition');
|
|
4
|
+
const { normalizeCodeIdentifier, computeSchemaHash } = require('./headlessModels.service');
|
|
5
|
+
|
|
6
|
+
function isObjectId(value) {
|
|
7
|
+
if (!value) return false;
|
|
8
|
+
if (value instanceof mongoose.Types.ObjectId) return true;
|
|
9
|
+
if (value && typeof value === 'object' && value._bsontype === 'ObjectID') return true;
|
|
10
|
+
return false;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function detectFieldType(value) {
|
|
14
|
+
if (value === null || value === undefined) return null;
|
|
15
|
+
if (isObjectId(value)) return 'objectid';
|
|
16
|
+
if (value instanceof Date) return 'date';
|
|
17
|
+
if (Array.isArray(value)) return 'array';
|
|
18
|
+
const t = typeof value;
|
|
19
|
+
if (t === 'string') return 'string';
|
|
20
|
+
if (t === 'number') return 'number';
|
|
21
|
+
if (t === 'boolean') return 'boolean';
|
|
22
|
+
if (t === 'object') return 'object';
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function pickBestType(seenTypes) {
|
|
27
|
+
const types = new Set(Array.from(seenTypes || []).filter(Boolean));
|
|
28
|
+
if (types.size === 0) return { type: 'object', warning: 'No non-null sample values' };
|
|
29
|
+
if (types.size === 1) return { type: Array.from(types)[0], warning: null };
|
|
30
|
+
if (types.has('array')) return { type: 'array', warning: `Mixed types: ${Array.from(types).join('|')}` };
|
|
31
|
+
if (types.has('object')) return { type: 'object', warning: `Mixed types: ${Array.from(types).join('|')}` };
|
|
32
|
+
return { type: 'string', warning: `Mixed types: ${Array.from(types).join('|')}` };
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function tryInferRefFromFieldName(fieldName, externalModelsByCollection) {
|
|
36
|
+
const name = String(fieldName || '').trim();
|
|
37
|
+
if (!name) return null;
|
|
38
|
+
|
|
39
|
+
const lower = name.toLowerCase();
|
|
40
|
+
if (!lower.endsWith('id') || lower === 'id') return null;
|
|
41
|
+
|
|
42
|
+
const stem = name.slice(0, -2);
|
|
43
|
+
const candidates = [stem, `${stem}s`, `${stem}es`];
|
|
44
|
+
for (const c of candidates) {
|
|
45
|
+
const match = externalModelsByCollection.get(String(c).toLowerCase());
|
|
46
|
+
if (match) return match;
|
|
47
|
+
}
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function normalizeIndexFromMongo(idx) {
|
|
52
|
+
if (!idx || typeof idx !== 'object') return null;
|
|
53
|
+
const fields = idx.key;
|
|
54
|
+
if (!fields || typeof fields !== 'object') return null;
|
|
55
|
+
|
|
56
|
+
const options = { ...idx };
|
|
57
|
+
delete options.key;
|
|
58
|
+
delete options.v;
|
|
59
|
+
delete options.ns;
|
|
60
|
+
|
|
61
|
+
return { fields, options };
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async function listExternalCollections({ q, includeSystem } = {}) {
|
|
65
|
+
if (!mongoose.connection || !mongoose.connection.db) {
|
|
66
|
+
const err = new Error('Mongo connection not ready');
|
|
67
|
+
err.code = 'VALIDATION';
|
|
68
|
+
throw err;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const filter = {};
|
|
72
|
+
if (q) filter.name = { $regex: String(q), $options: 'i' };
|
|
73
|
+
|
|
74
|
+
const cursor = await mongoose.connection.db.listCollections(filter, { nameOnly: true });
|
|
75
|
+
const items = await cursor.toArray();
|
|
76
|
+
|
|
77
|
+
const out = [];
|
|
78
|
+
for (const c of items) {
|
|
79
|
+
const name = String(c && c.name ? c.name : '').trim();
|
|
80
|
+
if (!name) continue;
|
|
81
|
+
if (!includeSystem && name.startsWith('system.')) continue;
|
|
82
|
+
out.push({ name, type: 'collection' });
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
out.sort((a, b) => a.name.localeCompare(b.name));
|
|
86
|
+
return out;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async function inferExternalModelFromCollection({ collectionName, sampleSize = 200 } = {}) {
|
|
90
|
+
if (!mongoose.connection || !mongoose.connection.db) {
|
|
91
|
+
const err = new Error('Mongo connection not ready');
|
|
92
|
+
err.code = 'VALIDATION';
|
|
93
|
+
throw err;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const collName = String(collectionName || '').trim();
|
|
97
|
+
if (!collName) {
|
|
98
|
+
const err = new Error('collectionName is required');
|
|
99
|
+
err.code = 'VALIDATION';
|
|
100
|
+
throw err;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const N = Math.max(1, Math.min(Number(sampleSize) || 200, 1000));
|
|
104
|
+
const coll = mongoose.connection.db.collection(collName);
|
|
105
|
+
|
|
106
|
+
let docs;
|
|
107
|
+
try {
|
|
108
|
+
docs = await coll.aggregate([{ $sample: { size: N } }]).toArray();
|
|
109
|
+
} catch {
|
|
110
|
+
docs = await coll.find({}).limit(N).toArray();
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
const externalModels = await HeadlessModelDefinition.find({ isActive: true, sourceType: 'external' })
|
|
114
|
+
.select({ codeIdentifier: 1, sourceCollectionName: 1 })
|
|
115
|
+
.lean();
|
|
116
|
+
|
|
117
|
+
const externalModelsByCollection = new Map();
|
|
118
|
+
for (const m of externalModels || []) {
|
|
119
|
+
const cn = String(m?.sourceCollectionName || '').trim();
|
|
120
|
+
const code = String(m?.codeIdentifier || '').trim();
|
|
121
|
+
if (cn && code) externalModelsByCollection.set(cn.toLowerCase(), code);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
const perField = new Map();
|
|
125
|
+
const warnings = [];
|
|
126
|
+
|
|
127
|
+
for (const doc of docs || []) {
|
|
128
|
+
if (!doc || typeof doc !== 'object') continue;
|
|
129
|
+
for (const [k, v] of Object.entries(doc)) {
|
|
130
|
+
if (!k || k === '_id') continue;
|
|
131
|
+
const type = detectFieldType(v);
|
|
132
|
+
let stats = perField.get(k);
|
|
133
|
+
if (!stats) {
|
|
134
|
+
stats = { seenTypes: new Set(), objectIdCount: 0, nonNullCount: 0 };
|
|
135
|
+
perField.set(k, stats);
|
|
136
|
+
}
|
|
137
|
+
if (type) stats.seenTypes.add(type);
|
|
138
|
+
if (type === 'objectid') stats.objectIdCount += 1;
|
|
139
|
+
if (v !== null && v !== undefined) stats.nonNullCount += 1;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
const fields = [];
|
|
144
|
+
for (const [name, stats] of perField.entries()) {
|
|
145
|
+
const { type, warning } = pickBestType(stats.seenTypes);
|
|
146
|
+
|
|
147
|
+
if (warning) warnings.push(`Field ${name}: ${warning}`);
|
|
148
|
+
|
|
149
|
+
if (type === 'objectid') {
|
|
150
|
+
const refModelCode = tryInferRefFromFieldName(name, externalModelsByCollection);
|
|
151
|
+
if (refModelCode) {
|
|
152
|
+
fields.push({ name, type: 'ref', required: false, unique: false, refModelCode });
|
|
153
|
+
} else {
|
|
154
|
+
fields.push({ name, type: 'string', required: false, unique: false });
|
|
155
|
+
}
|
|
156
|
+
continue;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
if (type === 'date') {
|
|
160
|
+
fields.push({ name, type: 'date', required: false, unique: false });
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if (type === 'array') {
|
|
165
|
+
fields.push({ name, type: 'array', required: false, unique: false });
|
|
166
|
+
continue;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
if (type === 'object') {
|
|
170
|
+
fields.push({ name, type: 'object', required: false, unique: false });
|
|
171
|
+
continue;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
if (type === 'number' || type === 'boolean' || type === 'string') {
|
|
175
|
+
fields.push({ name, type, required: false, unique: false });
|
|
176
|
+
continue;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
fields.push({ name, type: 'object', required: false, unique: false });
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
fields.sort((a, b) => a.name.localeCompare(b.name));
|
|
183
|
+
|
|
184
|
+
let indexes = [];
|
|
185
|
+
try {
|
|
186
|
+
const idx = await coll.indexes();
|
|
187
|
+
indexes = (idx || []).map(normalizeIndexFromMongo).filter(Boolean);
|
|
188
|
+
} catch {
|
|
189
|
+
indexes = [];
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
const fieldsHash = computeSchemaHash({ fields, indexes });
|
|
193
|
+
|
|
194
|
+
return {
|
|
195
|
+
collectionName: collName,
|
|
196
|
+
fields,
|
|
197
|
+
indexes,
|
|
198
|
+
warnings,
|
|
199
|
+
stats: {
|
|
200
|
+
sampled: (docs || []).length,
|
|
201
|
+
maxSampleSize: N,
|
|
202
|
+
fields: fields.length,
|
|
203
|
+
},
|
|
204
|
+
fieldsHash,
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
async function createOrUpdateExternalModel({ collectionName, codeIdentifier, displayName, sampleSize } = {}) {
|
|
209
|
+
const cn = String(collectionName || '').trim();
|
|
210
|
+
if (!cn) {
|
|
211
|
+
const err = new Error('collectionName is required');
|
|
212
|
+
err.code = 'VALIDATION';
|
|
213
|
+
throw err;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
const code = normalizeCodeIdentifier(codeIdentifier);
|
|
217
|
+
if (!code.startsWith('ext_')) {
|
|
218
|
+
const err = new Error('External model codeIdentifier must start with ext_');
|
|
219
|
+
err.code = 'VALIDATION';
|
|
220
|
+
throw err;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
const name = String(displayName || code).trim();
|
|
224
|
+
if (!name) {
|
|
225
|
+
const err = new Error('displayName is required');
|
|
226
|
+
err.code = 'VALIDATION';
|
|
227
|
+
throw err;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const inferred = await inferExternalModelFromCollection({ collectionName: cn, sampleSize });
|
|
231
|
+
|
|
232
|
+
const existing = await HeadlessModelDefinition.findOne({ codeIdentifier: code, isActive: true });
|
|
233
|
+
if (!existing) {
|
|
234
|
+
const doc = await HeadlessModelDefinition.create({
|
|
235
|
+
codeIdentifier: code,
|
|
236
|
+
displayName: name,
|
|
237
|
+
description: '',
|
|
238
|
+
fields: inferred.fields,
|
|
239
|
+
indexes: inferred.indexes,
|
|
240
|
+
fieldsHash: inferred.fieldsHash,
|
|
241
|
+
version: 1,
|
|
242
|
+
previousFields: [],
|
|
243
|
+
previousIndexes: [],
|
|
244
|
+
sourceType: 'external',
|
|
245
|
+
sourceCollectionName: cn,
|
|
246
|
+
isExternal: true,
|
|
247
|
+
inference: {
|
|
248
|
+
enabled: true,
|
|
249
|
+
lastInferredAt: new Date(),
|
|
250
|
+
sampleSize: Number(sampleSize) || null,
|
|
251
|
+
warnings: inferred.warnings || [],
|
|
252
|
+
stats: inferred.stats || null,
|
|
253
|
+
},
|
|
254
|
+
isActive: true,
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
return { created: true, item: doc.toObject(), inference: inferred };
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
existing.displayName = name;
|
|
261
|
+
existing.sourceType = 'external';
|
|
262
|
+
existing.sourceCollectionName = cn;
|
|
263
|
+
existing.isExternal = true;
|
|
264
|
+
|
|
265
|
+
const newHash = inferred.fieldsHash;
|
|
266
|
+
if (newHash !== existing.fieldsHash) {
|
|
267
|
+
existing.previousFields = existing.fields;
|
|
268
|
+
existing.previousIndexes = existing.indexes;
|
|
269
|
+
existing.fields = inferred.fields;
|
|
270
|
+
existing.indexes = inferred.indexes;
|
|
271
|
+
existing.fieldsHash = newHash;
|
|
272
|
+
existing.version = Number(existing.version || 1) + 1;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
existing.inference = {
|
|
276
|
+
enabled: true,
|
|
277
|
+
lastInferredAt: new Date(),
|
|
278
|
+
sampleSize: Number(sampleSize) || null,
|
|
279
|
+
warnings: inferred.warnings || [],
|
|
280
|
+
stats: inferred.stats || null,
|
|
281
|
+
};
|
|
282
|
+
|
|
283
|
+
await existing.save();
|
|
284
|
+
|
|
285
|
+
return { created: false, item: existing.toObject(), inference: inferred };
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
module.exports = {
|
|
289
|
+
listExternalCollections,
|
|
290
|
+
inferExternalModelFromCollection,
|
|
291
|
+
createOrUpdateExternalModel,
|
|
292
|
+
};
|
|
@@ -89,6 +89,18 @@ function getMongoCollectionName(codeIdentifier) {
|
|
|
89
89
|
return `${MODEL_COLLECTION_PREFIX}${code}`;
|
|
90
90
|
}
|
|
91
91
|
|
|
92
|
+
function isExternalDefinition(def) {
|
|
93
|
+
return def && (def.sourceType === 'external' || def.isExternal === true);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
function getCollectionNameForDefinition(def) {
|
|
97
|
+
if (isExternalDefinition(def)) {
|
|
98
|
+
const cn = String(def.sourceCollectionName || '').trim();
|
|
99
|
+
return cn || getMongoCollectionName(def.codeIdentifier);
|
|
100
|
+
}
|
|
101
|
+
return getMongoCollectionName(def.codeIdentifier);
|
|
102
|
+
}
|
|
103
|
+
|
|
92
104
|
function buildSchemaFromDefinition(def) {
|
|
93
105
|
const schemaShape = {};
|
|
94
106
|
for (const field of def.fields || []) {
|
|
@@ -99,12 +111,14 @@ function buildSchemaFromDefinition(def) {
|
|
|
99
111
|
schemaShape[fieldName] = toMongooseField(field);
|
|
100
112
|
}
|
|
101
113
|
|
|
102
|
-
|
|
103
|
-
|
|
114
|
+
if (!isExternalDefinition(def)) {
|
|
115
|
+
schemaShape._headlessModelCode = { type: String, default: def.codeIdentifier, index: true };
|
|
116
|
+
schemaShape._headlessSchemaVersion = { type: Number, default: def.version, index: true };
|
|
117
|
+
}
|
|
104
118
|
|
|
105
119
|
const schema = new mongoose.Schema(schemaShape, {
|
|
106
120
|
timestamps: true,
|
|
107
|
-
collection:
|
|
121
|
+
collection: getCollectionNameForDefinition(def),
|
|
108
122
|
strict: false,
|
|
109
123
|
});
|
|
110
124
|
|
|
@@ -254,6 +268,8 @@ async function disableModelDefinition(codeIdentifier) {
|
|
|
254
268
|
const modelCache = new Map();
|
|
255
269
|
|
|
256
270
|
async function ensureAutoMigration(modelDef) {
|
|
271
|
+
if (isExternalDefinition(modelDef)) return;
|
|
272
|
+
|
|
257
273
|
const collectionName = getMongoCollectionName(modelDef.codeIdentifier);
|
|
258
274
|
const coll = mongoose.connection.collection(collectionName);
|
|
259
275
|
|
|
@@ -318,7 +334,8 @@ async function getDynamicModel(codeIdentifier) {
|
|
|
318
334
|
throw err;
|
|
319
335
|
}
|
|
320
336
|
|
|
321
|
-
const
|
|
337
|
+
const collectionName = getCollectionNameForDefinition(def);
|
|
338
|
+
const cacheKey = `${def.codeIdentifier}:${def.version}:${def.fieldsHash}:${collectionName}`;
|
|
322
339
|
const cached = modelCache.get(cacheKey);
|
|
323
340
|
if (cached) return cached;
|
|
324
341
|
|
|
@@ -334,8 +351,10 @@ async function getDynamicModel(codeIdentifier) {
|
|
|
334
351
|
const schema = buildSchemaFromDefinition(def);
|
|
335
352
|
const Model = mongoose.model(modelName, schema);
|
|
336
353
|
|
|
337
|
-
|
|
338
|
-
|
|
354
|
+
if (!isExternalDefinition(def)) {
|
|
355
|
+
await ensureAutoMigration(def);
|
|
356
|
+
await ensureIndexesBestEffort(Model);
|
|
357
|
+
}
|
|
339
358
|
|
|
340
359
|
modelCache.set(cacheKey, Model);
|
|
341
360
|
return Model;
|
|
@@ -346,6 +365,7 @@ module.exports = {
|
|
|
346
365
|
normalizeCodeIdentifier,
|
|
347
366
|
getMongooseModelName,
|
|
348
367
|
getMongoCollectionName,
|
|
368
|
+
getCollectionNameForDefinition,
|
|
349
369
|
computeSchemaHash,
|
|
350
370
|
listModelDefinitions,
|
|
351
371
|
getModelDefinitionByCode,
|