@triophore/falconjs 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/FalconAuthPlugin.js +473 -0
- package/LICENSE +21 -0
- package/README.md +2 -0
- package/core/auth.js +200 -0
- package/core/cache/redis_cacher.js +7 -0
- package/core/check_collection.js +9 -0
- package/core/crypto/encrypt_decrypt.js +19 -0
- package/core/errors.js +48 -0
- package/core/logger/log4js.js +89 -0
- package/core/logo.js +3 -0
- package/core/mongo/generateModelfromJsonFile.js +128 -0
- package/core/mongo/mongoSchmeFromJson.js +90 -0
- package/core/parse_num.js +8 -0
- package/core/rannum.js +33 -0
- package/core/ranstring.js +33 -0
- package/core/recursive-require-call.js +121 -0
- package/core/uitls/mongoose_to_joi.js +72 -0
- package/core/uitls/return.js +7 -0
- package/falcon.js +1644 -0
- package/falconAuthPlugin.js +17 -0
- package/falconBaseService.js +532 -0
- package/falconBaseWorker.js +540 -0
- package/index.js +4 -0
- package/out/Falcon.html +777 -0
- package/out/falcon.js.html +525 -0
- package/out/fonts/OpenSans-Bold-webfont.eot +0 -0
- package/out/fonts/OpenSans-Bold-webfont.svg +1830 -0
- package/out/fonts/OpenSans-Bold-webfont.woff +0 -0
- package/out/fonts/OpenSans-BoldItalic-webfont.eot +0 -0
- package/out/fonts/OpenSans-BoldItalic-webfont.svg +1830 -0
- package/out/fonts/OpenSans-BoldItalic-webfont.woff +0 -0
- package/out/fonts/OpenSans-Italic-webfont.eot +0 -0
- package/out/fonts/OpenSans-Italic-webfont.svg +1830 -0
- package/out/fonts/OpenSans-Italic-webfont.woff +0 -0
- package/out/fonts/OpenSans-Light-webfont.eot +0 -0
- package/out/fonts/OpenSans-Light-webfont.svg +1831 -0
- package/out/fonts/OpenSans-Light-webfont.woff +0 -0
- package/out/fonts/OpenSans-LightItalic-webfont.eot +0 -0
- package/out/fonts/OpenSans-LightItalic-webfont.svg +1835 -0
- package/out/fonts/OpenSans-LightItalic-webfont.woff +0 -0
- package/out/fonts/OpenSans-Regular-webfont.eot +0 -0
- package/out/fonts/OpenSans-Regular-webfont.svg +1831 -0
- package/out/fonts/OpenSans-Regular-webfont.woff +0 -0
- package/out/index.html +65 -0
- package/out/scripts/linenumber.js +25 -0
- package/out/scripts/prettify/Apache-License-2.0.txt +202 -0
- package/out/scripts/prettify/lang-css.js +2 -0
- package/out/scripts/prettify/prettify.js +28 -0
- package/out/styles/jsdoc-default.css +358 -0
- package/out/styles/prettify-jsdoc.css +111 -0
- package/out/styles/prettify-tomorrow.css +132 -0
- package/package.json +106 -0
- package/settings.js +1 -0
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
module.exports.collectionExists = async function (db, collectionName) {
|
|
2
|
+
try {
|
|
3
|
+
const collections = await db.listCollections({ name: collectionName }).toArray();
|
|
4
|
+
return collections.length > 0;
|
|
5
|
+
} catch (error) {
|
|
6
|
+
console.error("Error checking collection existence:", error);
|
|
7
|
+
return false; // Or throw the error, depending on your error handling
|
|
8
|
+
}
|
|
9
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
require('dotenv').config();
|
|
2
|
+
const config = process.env;
|
|
3
|
+
const crypto = require('crypto');
|
|
4
|
+
|
|
5
|
+
// Encryption function
|
|
6
|
+
module.exports.encrypt = function(text) {
|
|
7
|
+
const cipher = crypto.createCipheriv('aes-256-cbc', Buffer.from(config.ENCRYPT_KEY), Buffer.from('0123456789abcdef'));
|
|
8
|
+
let encrypted = cipher.update(text);
|
|
9
|
+
encrypted = Buffer.concat([encrypted, cipher.final()]);
|
|
10
|
+
return encrypted.toString('hex');
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
// Decryption function
|
|
14
|
+
module.exports.decrypt = function(encryptedText) {
|
|
15
|
+
const decipher = crypto.createDecipheriv('aes-256-cbc', Buffer.from(config.ENCRYPT_KEY), Buffer.from('0123456789abcdef'));
|
|
16
|
+
let decrypted = decipher.update(Buffer.from(encryptedText, 'hex'));
|
|
17
|
+
decrypted = Buffer.concat([decrypted, decipher.final()]);
|
|
18
|
+
return decrypted.toString();
|
|
19
|
+
}
|
package/core/errors.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Custom error classes for Falcon.js framework
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
class FalconError extends Error {
|
|
6
|
+
constructor(message, code = 'FALCON_ERROR') {
|
|
7
|
+
super(message);
|
|
8
|
+
this.name = 'FalconError';
|
|
9
|
+
this.code = code;
|
|
10
|
+
Error.captureStackTrace(this, this.constructor);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
class ConfigurationError extends FalconError {
|
|
15
|
+
constructor(message) {
|
|
16
|
+
super(message, 'CONFIG_ERROR');
|
|
17
|
+
this.name = 'ConfigurationError';
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
class ValidationError extends FalconError {
|
|
22
|
+
constructor(message) {
|
|
23
|
+
super(message, 'VALIDATION_ERROR');
|
|
24
|
+
this.name = 'ValidationError';
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
class DatabaseError extends FalconError {
|
|
29
|
+
constructor(message) {
|
|
30
|
+
super(message, 'DATABASE_ERROR');
|
|
31
|
+
this.name = 'DatabaseError';
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
class AuthenticationError extends FalconError {
|
|
36
|
+
constructor(message) {
|
|
37
|
+
super(message, 'AUTH_ERROR');
|
|
38
|
+
this.name = 'AuthenticationError';
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
module.exports = {
|
|
43
|
+
FalconError,
|
|
44
|
+
ConfigurationError,
|
|
45
|
+
ValidationError,
|
|
46
|
+
DatabaseError,
|
|
47
|
+
AuthenticationError
|
|
48
|
+
};
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
const log4js = require("log4js");
|
|
2
|
+
require('dotenv').config();
|
|
3
|
+
|
|
4
|
+
const config = process.env;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Creates and returns a configured log4js logger based on config_type
|
|
8
|
+
* @param {"file"|"stdio"|"both"} config_type - Where to log
|
|
9
|
+
* @returns {log4js.Logger}
|
|
10
|
+
*/
|
|
11
|
+
module.exports.getLogger = function (config_type = "both") {
|
|
12
|
+
// Normalize config_type
|
|
13
|
+
let type = "both";
|
|
14
|
+
|
|
15
|
+
if (typeof config_type === 'string') {
|
|
16
|
+
type = config_type;
|
|
17
|
+
} else if (typeof config_type === 'object' && config_type !== null) {
|
|
18
|
+
type = config_type.type || "both";
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
type = type.toLowerCase().trim();
|
|
22
|
+
|
|
23
|
+
if (!["file", "stdio", "both"].includes(type)) {
|
|
24
|
+
throw new Error(`Invalid config_type: ${type}. Use 'file', 'stdio', or 'both'.`);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// Custom layout with Unix timestamp in milliseconds
|
|
28
|
+
log4js.addLayout('unixTimestampMillisLayout', () => {
|
|
29
|
+
return (logEvent) => {
|
|
30
|
+
const unixTimestampMillis = logEvent.startTime.getTime();
|
|
31
|
+
const logData = logEvent.data.map(item => {
|
|
32
|
+
try {
|
|
33
|
+
if (item instanceof Error) {
|
|
34
|
+
return item.stack || item.message;
|
|
35
|
+
}
|
|
36
|
+
return typeof item === 'object' && item !== null
|
|
37
|
+
? JSON.stringify(item)
|
|
38
|
+
: String(item);
|
|
39
|
+
} catch (error) {
|
|
40
|
+
if (error.message.includes('circular')) {
|
|
41
|
+
return '[Circular]';
|
|
42
|
+
}
|
|
43
|
+
return `[Error: ${error.message}]`;
|
|
44
|
+
}
|
|
45
|
+
}).join(' ');
|
|
46
|
+
|
|
47
|
+
return `[${unixTimestampMillis}] [${logEvent.level.levelStr}] ${logData}`;
|
|
48
|
+
};
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
// Build appenders dynamically
|
|
52
|
+
const appenders = {};
|
|
53
|
+
const categoryAppenders = [];
|
|
54
|
+
|
|
55
|
+
// File Appender
|
|
56
|
+
if (type === "file" || type === "both") {
|
|
57
|
+
appenders.file = {
|
|
58
|
+
type: "file",
|
|
59
|
+
filename: config.LOG_FILE_NAME || "log/server.log",
|
|
60
|
+
maxLogSize: config.LOG_MAX_SIZE || "5M",
|
|
61
|
+
backups: parseInt(config.LOG_BACKUPS, 10) || 10,
|
|
62
|
+
compress: config.LOG_COMPRESS !== "false", // default true
|
|
63
|
+
layout: { type: 'unixTimestampMillisLayout' }
|
|
64
|
+
};
|
|
65
|
+
categoryAppenders.push("file");
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Console (stdio) Appender
|
|
69
|
+
if (type === "stdio" || type === "both") {
|
|
70
|
+
appenders.console = {
|
|
71
|
+
type: "stdout", // or "stderr" for errors only
|
|
72
|
+
layout: { type: 'unixTimestampMillisLayout' }
|
|
73
|
+
};
|
|
74
|
+
categoryAppenders.push("console");
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Configure log4js
|
|
78
|
+
log4js.configure({
|
|
79
|
+
appenders,
|
|
80
|
+
categories: {
|
|
81
|
+
default: {
|
|
82
|
+
appenders: categoryAppenders,
|
|
83
|
+
level: config.LOG_LEVEL || "trace"
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
return log4js.getLogger();
|
|
89
|
+
};
|
package/core/logo.js
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Infers Mongoose schema type from a JavaScript value
|
|
6
|
+
* @param {*} value
|
|
7
|
+
* @returns {String} Mongoose schema type
|
|
8
|
+
*/
|
|
9
|
+
function inferType(value) {
|
|
10
|
+
if (value === null || value === undefined) return 'Mixed';
|
|
11
|
+
if (Array.isArray(value)) {
|
|
12
|
+
if (value.length === 0) return '[Mixed]';
|
|
13
|
+
const itemType = inferType(value[0]);
|
|
14
|
+
return `[${itemType}]`;
|
|
15
|
+
}
|
|
16
|
+
switch (typeof value) {
|
|
17
|
+
case 'string':
|
|
18
|
+
// Detect common string subtypes
|
|
19
|
+
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(value)) return 'Date';
|
|
20
|
+
if (/^\d{4}-\d{2}-\d{2}$/.test(value)) return 'Date';
|
|
21
|
+
if (value.length > 100) return 'String'; // Long text
|
|
22
|
+
return 'String';
|
|
23
|
+
case 'number':
|
|
24
|
+
return Number.isInteger(value) ? 'Number' : 'Number'; // Mongoose uses Number for both
|
|
25
|
+
case 'boolean':
|
|
26
|
+
return 'Boolean';
|
|
27
|
+
case 'object':
|
|
28
|
+
if (value instanceof Date) return 'Date';
|
|
29
|
+
return 'Object';
|
|
30
|
+
default:
|
|
31
|
+
return 'Mixed';
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Recursively generates Mongoose schema definition from JSON object
|
|
37
|
+
* @param {Object} obj
|
|
38
|
+
* @param {Number} depth
|
|
39
|
+
* @returns {Object} Schema definition
|
|
40
|
+
*/
|
|
41
|
+
function generateSchemaDefinition(obj, depth = 0) {
|
|
42
|
+
if (depth > 10) return {}; // Prevent infinite recursion
|
|
43
|
+
|
|
44
|
+
const schema = {};
|
|
45
|
+
|
|
46
|
+
for (const key in obj) {
|
|
47
|
+
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
48
|
+
const value = obj[key];
|
|
49
|
+
const type = inferType(value);
|
|
50
|
+
|
|
51
|
+
if (type === 'Object' && typeof value === 'object' && !Array.isArray(value) && value !== null) {
|
|
52
|
+
schema[key] = generateSchemaDefinition(value, depth + 1);
|
|
53
|
+
} else if (type.startsWith('[')) {
|
|
54
|
+
const arrayItemType = type.slice(1, -1);
|
|
55
|
+
if (arrayItemType === 'Object') {
|
|
56
|
+
schema[key] = [generateSchemaDefinition(value[0] || {}, depth + 1)];
|
|
57
|
+
} else {
|
|
58
|
+
schema[key] = {
|
|
59
|
+
type: `[${arrayItemType}]`,
|
|
60
|
+
default: []
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
} else {
|
|
64
|
+
schema[key] = { type };
|
|
65
|
+
if (value === null) schema[key].default = null;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return schema;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Converts schema definition to Mongoose Schema string
|
|
75
|
+
* @param {Object} schemaDef
|
|
76
|
+
* @param {String} indent
|
|
77
|
+
* @returns {String}
|
|
78
|
+
*/
|
|
79
|
+
function schemaToString(schemaDef, indent = ' ') {
|
|
80
|
+
let str = '{\n';
|
|
81
|
+
|
|
82
|
+
const keys = Object.keys(schemaDef);
|
|
83
|
+
keys.forEach((key, index) => {
|
|
84
|
+
const field = schemaDef[key];
|
|
85
|
+
const isLast = index === keys.length - 1;
|
|
86
|
+
|
|
87
|
+
if (typeof field === 'object' && field.type && field.type.startsWith('[')) {
|
|
88
|
+
const arrayType = field.type.slice(1, -1);
|
|
89
|
+
if (arrayType === 'Object') {
|
|
90
|
+
// Nested array of objects
|
|
91
|
+
str += `${indent} ${key}: [${schemaToString(field[0], indent + ' ')}\n${indent} ]`;
|
|
92
|
+
} else {
|
|
93
|
+
str += `${indent} ${key}: { type: ${field.type}${field.default !== undefined ? `, default: ${JSON.stringify(field.default)}` : ''} }`;
|
|
94
|
+
}
|
|
95
|
+
} else if (typeof field === 'object' && !field.type && Object.keys(field).length > 0) {
|
|
96
|
+
// Nested object
|
|
97
|
+
str += `${indent} ${key}: ${schemaToString(field, indent + ' ')}`;
|
|
98
|
+
} else if (typeof field === 'object' && field.type) {
|
|
99
|
+
str += `${indent} ${key}: { type: ${field.type}${field.default !== undefined ? `, default: ${JSON.stringify(field.default)}` : ''} }`;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
str += isLast ? '\n' : ',\n';
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
str += `${indent}}`;
|
|
106
|
+
return str;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* Main function to generate Mongoose model from JSON
|
|
111
|
+
* @param {Object} jsonData
|
|
112
|
+
* @param {String} modelName
|
|
113
|
+
* @returns {String} Complete Mongoose model code
|
|
114
|
+
*/
|
|
115
|
+
function generateMongooseModel(jsonData, modelName = 'MyModel') {
|
|
116
|
+
const schemaDef = generateSchemaDefinition(jsonData);
|
|
117
|
+
const schemaString = schemaToString(schemaDef);
|
|
118
|
+
|
|
119
|
+
return `const mongoose = require('mongoose');
|
|
120
|
+
const Schema = mongoose.Schema;
|
|
121
|
+
|
|
122
|
+
const ${modelName}Schema = new Schema(
|
|
123
|
+
${schemaString}
|
|
124
|
+
);
|
|
125
|
+
|
|
126
|
+
module.exports = mongoose.model('${modelName}', ${modelName}Schema);
|
|
127
|
+
`;
|
|
128
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
// createMongooseSchema.js
|
|
2
|
+
const mongoose = require('mongoose');
|
|
3
|
+
const { Schema } = mongoose;
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Maps custom type → Mongoose type
|
|
7
|
+
*/
|
|
8
|
+
const typeMap = {
|
|
9
|
+
string: String,
|
|
10
|
+
number: Number,
|
|
11
|
+
boolean: Boolean,
|
|
12
|
+
date: Date,
|
|
13
|
+
objectid: mongoose.Types.ObjectId,
|
|
14
|
+
mixed: Schema.Types.Mixed,
|
|
15
|
+
array: Array,
|
|
16
|
+
object: Object
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Converts custom schema field to Mongoose field definition
|
|
21
|
+
*/
|
|
22
|
+
function convertField(field) {
|
|
23
|
+
if (!field || !field.type) return {};
|
|
24
|
+
|
|
25
|
+
const mongooseField = {};
|
|
26
|
+
const typeKey = field.type.toLowerCase();
|
|
27
|
+
|
|
28
|
+
// Set type
|
|
29
|
+
if (typeKey === 'array' && field.items) {
|
|
30
|
+
const itemSchema = convertField(field.items);
|
|
31
|
+
mongooseField.type = [itemSchema.type ? new Schema(itemSchema) : itemSchema];
|
|
32
|
+
}
|
|
33
|
+
else if (typeKey === 'object' && field.properties) {
|
|
34
|
+
const subSchema = {};
|
|
35
|
+
for (const [key, subField] of Object.entries(field.properties)) {
|
|
36
|
+
subSchema[key] = convertField(subField);
|
|
37
|
+
}
|
|
38
|
+
mongooseField.type = subSchema;
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
mongooseField.type = typeMap[typeKey] || Schema.Types.Mixed;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Add options
|
|
45
|
+
if (field.required === true) mongooseField.required = true;
|
|
46
|
+
if (field.unique === true) mongooseField.unique = true;
|
|
47
|
+
if (field.index === true) mongooseField.index = true;
|
|
48
|
+
if (field.default !== undefined) {
|
|
49
|
+
mongooseField.default = field.default === 'now' ? Date.now : field.default;
|
|
50
|
+
}
|
|
51
|
+
if (field.trim === true) mongooseField.trim = true;
|
|
52
|
+
if (field.lowercase === true) mongooseField.lowercase = true;
|
|
53
|
+
if (field.uppercase === true) mongooseField.uppercase = true;
|
|
54
|
+
if (field.enum) mongooseField.enum = field.enum;
|
|
55
|
+
|
|
56
|
+
return mongooseField;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Main function: JSON → Mongoose Schema
|
|
61
|
+
* @param {Object} config
|
|
62
|
+
* @returns {{ schema: mongoose.Schema, modelName: string }}
|
|
63
|
+
*/
|
|
64
|
+
function createMongooseSchemaFromJson(config) {
|
|
65
|
+
if (!config || !config.name || !config.schema) {
|
|
66
|
+
throw new Error('Invalid config: must have "name" and "schema"');
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const { name, schema: schemaDef, options = {} } = config;
|
|
70
|
+
const mongooseSchemaDef = {};
|
|
71
|
+
|
|
72
|
+
// Convert each field
|
|
73
|
+
for (const [fieldName, fieldDef] of Object.entries(schemaDef)) {
|
|
74
|
+
mongooseSchemaDef[fieldName] = convertField(fieldDef);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Create Mongoose Schema
|
|
78
|
+
const mongooseSchema = new Schema(mongooseSchemaDef, {
|
|
79
|
+
timestamps: options.timestamps || false,
|
|
80
|
+
...options
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
return {
|
|
84
|
+
modelName: name,
|
|
85
|
+
schema: mongooseSchema
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Export
|
|
90
|
+
module.exports = { createMongooseSchemaFromJson };
|
package/core/rannum.js
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
const crypto = require('crypto');
|
|
2
|
+
|
|
3
|
+
module.exports.generateRandomnumericString = function (length) {
|
|
4
|
+
if (typeof length !== 'number' || length <= 0) {
|
|
5
|
+
throw new Error('Length must be a positive number.');
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
const characters = '0123456789';
|
|
9
|
+
const charactersLength = characters.length;
|
|
10
|
+
|
|
11
|
+
let result = '';
|
|
12
|
+
|
|
13
|
+
// More cryptographically secure approach using randomBytes (recommended)
|
|
14
|
+
const randomBytes = crypto.randomBytes(length);
|
|
15
|
+
|
|
16
|
+
for (let i = 0; i < length; i++) {
|
|
17
|
+
const randomIndex = randomBytes[i] % charactersLength; // Get index within character set
|
|
18
|
+
result += characters.charAt(randomIndex);
|
|
19
|
+
}
|
|
20
|
+
return result;
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
// Less secure approach (if absolutely needed and you understand the risks)
|
|
24
|
+
// - Not recommended for production systems where security is critical.
|
|
25
|
+
// - Uses Math.random(), which is not cryptographically strong.
|
|
26
|
+
/*
|
|
27
|
+
for (let i = 0; i < length; i++) {
|
|
28
|
+
const randomIndex = Math.floor(Math.random() * charactersLength);
|
|
29
|
+
result += characters.charAt(randomIndex);
|
|
30
|
+
}
|
|
31
|
+
return result;
|
|
32
|
+
*/
|
|
33
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
const crypto = require('crypto');
|
|
2
|
+
|
|
3
|
+
module.exports.generateRandomAlphanumericString = function (length) {
|
|
4
|
+
if (typeof length !== 'number' || length <= 0) {
|
|
5
|
+
throw new Error('Length must be a positive number.');
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
|
|
9
|
+
const charactersLength = characters.length;
|
|
10
|
+
|
|
11
|
+
let result = '';
|
|
12
|
+
|
|
13
|
+
// More cryptographically secure approach using randomBytes (recommended)
|
|
14
|
+
const randomBytes = crypto.randomBytes(length);
|
|
15
|
+
|
|
16
|
+
for (let i = 0; i < length; i++) {
|
|
17
|
+
const randomIndex = randomBytes[i] % charactersLength; // Get index within character set
|
|
18
|
+
result += characters.charAt(randomIndex);
|
|
19
|
+
}
|
|
20
|
+
return result;
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
// Less secure approach (if absolutely needed and you understand the risks)
|
|
24
|
+
// - Not recommended for production systems where security is critical.
|
|
25
|
+
// - Uses Math.random(), which is not cryptographically strong.
|
|
26
|
+
/*
|
|
27
|
+
for (let i = 0; i < length; i++) {
|
|
28
|
+
const randomIndex = Math.floor(Math.random() * charactersLength);
|
|
29
|
+
result += characters.charAt(randomIndex);
|
|
30
|
+
}
|
|
31
|
+
return result;
|
|
32
|
+
*/
|
|
33
|
+
}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Recursively require CJS modules and call a function with arguments
|
|
6
|
+
* @param {string} dir - Directory to scan
|
|
7
|
+
* @param {string} [functionName='getValue'] - Function name to call
|
|
8
|
+
* @param {any[]} [args=[]] - Arguments to pass to the function
|
|
9
|
+
* @param {RegExp} [fileFilter=/\.(js|cjs)$/i] - File filter
|
|
10
|
+
* @returns {Promise<Array<{file: string, value: any, error?: string}>>}
|
|
11
|
+
*/
|
|
12
|
+
async function requireAndCallRecursively(dir, functionName = 'getValue', args = [], fileFilter = /\.(js|cjs)$/i) {
|
|
13
|
+
const results = [];
|
|
14
|
+
|
|
15
|
+
async function scan(currentDir) {
|
|
16
|
+
let files;
|
|
17
|
+
try {
|
|
18
|
+
files = await fs.promises.readdir(currentDir);
|
|
19
|
+
} catch (err) {
|
|
20
|
+
results.push({ file: currentDir, error: `Read dir error: ${err.message}` });
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
for (const file of files) {
|
|
25
|
+
const fullPath = path.join(currentDir, file);
|
|
26
|
+
let stat;
|
|
27
|
+
|
|
28
|
+
try {
|
|
29
|
+
stat = await fs.promises.stat(fullPath);
|
|
30
|
+
} catch (err) {
|
|
31
|
+
results.push({ file: fullPath, error: `Stat error: ${err.message}` });
|
|
32
|
+
continue;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (stat.isDirectory()) {
|
|
36
|
+
if (file === 'node_modules' || file === '.git' || file.startsWith('.')) {
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
await scan(fullPath);
|
|
40
|
+
} else if (stat.isFile() && fileFilter.test(file)) {
|
|
41
|
+
let moduleExport;
|
|
42
|
+
try {
|
|
43
|
+
delete require.cache[require.resolve(fullPath)];
|
|
44
|
+
moduleExport = require(fullPath);
|
|
45
|
+
} catch (err) {
|
|
46
|
+
results.push({ file: fullPath, error: `Require error: ${err.message}` });
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
let value;
|
|
51
|
+
try {
|
|
52
|
+
let fn = null;
|
|
53
|
+
|
|
54
|
+
// Resolve function in various export patterns
|
|
55
|
+
if (typeof moduleExport === 'function') {
|
|
56
|
+
fn = moduleExport;
|
|
57
|
+
} else if (moduleExport && typeof moduleExport[functionName] === 'function') {
|
|
58
|
+
fn = moduleExport[functionName];
|
|
59
|
+
} else if (moduleExport?.default && typeof moduleExport.default === 'function') {
|
|
60
|
+
fn = moduleExport.default;
|
|
61
|
+
} else if (moduleExport?.default && typeof moduleExport.default[functionName] === 'function') {
|
|
62
|
+
fn = moduleExport.default[functionName];
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (fn) {
|
|
66
|
+
value = await Promise.resolve(fn(...args));
|
|
67
|
+
} else {
|
|
68
|
+
value = { warning: 'No callable function found', export: moduleExport };
|
|
69
|
+
}
|
|
70
|
+
} catch (err) {
|
|
71
|
+
results.push({ file: fullPath, value: null, error: `Call error: ${err.message}` });
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
results.push({ file: fullPath, value });
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
await scan(path.resolve(dir));
|
|
81
|
+
return results;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// === CLI Usage ===
|
|
85
|
+
if (require.main === module) {
|
|
86
|
+
const args = process.argv.slice(2);
|
|
87
|
+
if (args.length === 0) {
|
|
88
|
+
console.log('Usage: node script.js <directory> <functionName> [arg1] [arg2] ...');
|
|
89
|
+
console.log('Example: node script.js ./modules getValue 123 "hello"');
|
|
90
|
+
process.exit(1);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const targetDir = args[0];
|
|
94
|
+
const funcName = args[1];
|
|
95
|
+
const funcArgs = args.slice(2).map(arg => {
|
|
96
|
+
// Try to parse JSON or numbers
|
|
97
|
+
try {
|
|
98
|
+
return JSON.parse(arg);
|
|
99
|
+
} catch {
|
|
100
|
+
return arg;
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
requireAndCallRecursively(targetDir, funcName, funcArgs)
|
|
105
|
+
.then(results => {
|
|
106
|
+
console.log(`\nScanned ${results.length} file(s):\n`);
|
|
107
|
+
results.forEach(r => {
|
|
108
|
+
if (r.error) {
|
|
109
|
+
console.error(`Failed: ${r.file}\n → ${r.error}`);
|
|
110
|
+
} else {
|
|
111
|
+
const val = typeof r.value === 'object' ? JSON.stringify(r.value, null, 2) : r.value;
|
|
112
|
+
console.log(`Success: ${r.file}\n → ${val}`);
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
})
|
|
116
|
+
.catch(err => {
|
|
117
|
+
console.error('Fatal error:', err);
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
module.exports = { requireAndCallRecursively };
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
const Joi = require('joi');
|
|
2
|
+
const mongoose = require('mongoose');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Converts a Mongoose schema to a Joi schema.
|
|
6
|
+
* @param {mongoose.Schema} schema - The Mongoose schema to convert.
|
|
7
|
+
* @returns {Joi.ObjectSchema} The converted Joi schema.
|
|
8
|
+
*/
|
|
9
|
+
function mongooseToJoi(schema) {
|
|
10
|
+
if (!schema) return Joi.object();
|
|
11
|
+
|
|
12
|
+
const joiSchema = {};
|
|
13
|
+
|
|
14
|
+
schema.eachPath((path, type) => {
|
|
15
|
+
if (path === '_id' || path === '__v') return; // Skip internal fields
|
|
16
|
+
|
|
17
|
+
let joiType;
|
|
18
|
+
const instance = type.instance;
|
|
19
|
+
const options = type.options || {};
|
|
20
|
+
|
|
21
|
+
if (instance === 'String') {
|
|
22
|
+
joiType = Joi.string();
|
|
23
|
+
if (options.enum) {
|
|
24
|
+
joiType = joiType.valid(...options.enum);
|
|
25
|
+
}
|
|
26
|
+
} else if (instance === 'Number') {
|
|
27
|
+
joiType = Joi.number();
|
|
28
|
+
} else if (instance === 'Boolean') {
|
|
29
|
+
joiType = Joi.boolean();
|
|
30
|
+
} else if (instance === 'Date') {
|
|
31
|
+
joiType = Joi.date();
|
|
32
|
+
} else if (instance === 'Array') {
|
|
33
|
+
// Basic array support - checking if it has a sub-schema or just a type
|
|
34
|
+
if (type.caster && type.caster.instance) {
|
|
35
|
+
// Array of primitives
|
|
36
|
+
const subInstance = type.caster.instance;
|
|
37
|
+
if (subInstance === 'String') joiType = Joi.array().items(Joi.string());
|
|
38
|
+
else if (subInstance === 'Number') joiType = Joi.array().items(Joi.number());
|
|
39
|
+
else if (subInstance === 'Boolean') joiType = Joi.array().items(Joi.boolean());
|
|
40
|
+
else joiType = Joi.array();
|
|
41
|
+
} else if (type.schema) {
|
|
42
|
+
// Array of subdocuments
|
|
43
|
+
const subSchema = mongooseToJoi(type.schema);
|
|
44
|
+
joiType = Joi.array().items(subSchema);
|
|
45
|
+
} else {
|
|
46
|
+
joiType = Joi.array();
|
|
47
|
+
}
|
|
48
|
+
} else if (instance === 'Embedded') {
|
|
49
|
+
// Nested Object
|
|
50
|
+
if (type.schema) {
|
|
51
|
+
joiType = mongooseToJoi(type.schema);
|
|
52
|
+
} else {
|
|
53
|
+
joiType = Joi.object();
|
|
54
|
+
}
|
|
55
|
+
} else if (instance === 'ObjectID') {
|
|
56
|
+
joiType = Joi.string().regex(/^[0-9a-fA-F]{24}$/, 'ObjectId');
|
|
57
|
+
} else {
|
|
58
|
+
// Fallback for unknown types
|
|
59
|
+
joiType = Joi.any();
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
if (options.required) {
|
|
63
|
+
joiType = joiType.required();
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
joiSchema[path] = joiType;
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
return Joi.object(joiSchema);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
module.exports = mongooseToJoi;
|