xypriss 2.0.0 → 2.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +171 -381
- package/dist/cjs/mods/security/src/algorithms/hash-algorithms.js +10 -0
- package/dist/cjs/mods/security/src/algorithms/hash-algorithms.js.map +1 -1
- package/dist/cjs/mods/security/src/components/cache/cacheSys.utils.js +10 -0
- package/dist/cjs/mods/security/src/components/cache/cacheSys.utils.js.map +1 -1
- package/dist/cjs/mods/security/src/components/cache/index.js +10 -2
- package/dist/cjs/mods/security/src/components/cache/index.js.map +1 -1
- package/dist/cjs/mods/security/src/components/fortified-function/index.js +9 -0
- package/dist/cjs/mods/security/src/components/fortified-function/index.js.map +1 -1
- package/dist/cjs/mods/security/src/components/fortified-function/security/security-handler.js +10 -0
- package/dist/cjs/mods/security/src/components/fortified-function/security/security-handler.js.map +1 -1
- package/dist/cjs/mods/security/src/core/crypto.js +14 -0
- package/dist/cjs/mods/security/src/core/crypto.js.map +1 -1
- package/dist/cjs/mods/security/src/index.js +10 -1
- package/dist/cjs/mods/security/src/index.js.map +1 -1
- package/dist/cjs/shared/logger/Logger.js +372 -29
- package/dist/cjs/shared/logger/Logger.js.map +1 -1
- package/dist/cjs/src/cluster/bun-cluster-manager.js +91 -1
- package/dist/cjs/src/cluster/bun-cluster-manager.js.map +1 -1
- package/dist/cjs/src/cluster/cluster-manager.js +15 -3
- package/dist/cjs/src/cluster/cluster-manager.js.map +1 -1
- package/dist/cjs/src/cluster/modules/AutoScaler.js +4 -4
- package/dist/cjs/src/cluster/modules/AutoScaler.js.map +1 -1
- package/dist/cjs/src/cluster/modules/CrossPlatformMemory.js +2 -2
- package/dist/cjs/src/cluster/modules/CrossPlatformMemory.js.map +1 -1
- package/dist/cjs/src/cluster/modules/EventLoopMonitor.js +270 -0
- package/dist/cjs/src/cluster/modules/EventLoopMonitor.js.map +1 -0
- package/dist/cjs/src/cluster/modules/GCStatsTracker.js +200 -0
- package/dist/cjs/src/cluster/modules/GCStatsTracker.js.map +1 -0
- package/dist/cjs/src/cluster/modules/HeapStatsCollector.js +111 -0
- package/dist/cjs/src/cluster/modules/HeapStatsCollector.js.map +1 -0
- package/dist/cjs/src/cluster/modules/NetworkTracker.js +162 -0
- package/dist/cjs/src/cluster/modules/NetworkTracker.js.map +1 -0
- package/dist/cjs/src/cluster/modules/ThroughputCalculator.js +186 -0
- package/dist/cjs/src/cluster/modules/ThroughputCalculator.js.map +1 -0
- package/dist/cjs/src/cluster/modules/WorkerManager.js +14 -15
- package/dist/cjs/src/cluster/modules/WorkerManager.js.map +1 -1
- package/dist/cjs/src/cluster/modules/{LoadBalancer.js → strategy/LoadBalancer.js} +1 -1
- package/dist/cjs/src/cluster/modules/strategy/LoadBalancer.js.map +1 -0
- package/dist/cjs/src/middleware/built-in/sqlInjection.js +335 -0
- package/dist/cjs/src/middleware/built-in/sqlInjection.js.map +1 -0
- package/dist/cjs/src/middleware/safe-json-middleware.js +1 -1
- package/dist/cjs/src/middleware/safe-json-middleware.js.map +1 -1
- package/dist/cjs/src/middleware/security-middleware.js +447 -332
- package/dist/cjs/src/middleware/security-middleware.js.map +1 -1
- package/dist/cjs/src/plugins/modules/index.js +9 -3
- package/dist/cjs/src/plugins/modules/index.js.map +1 -1
- package/dist/cjs/src/server/FastServer.js +41 -1
- package/dist/cjs/src/server/FastServer.js.map +1 -1
- package/dist/cjs/src/server/ServerFactory.js +62 -2
- package/dist/cjs/src/server/ServerFactory.js.map +1 -1
- package/dist/cjs/src/server/components/fastapi/ClusterManagerComponent.js +32 -6
- package/dist/cjs/src/server/components/fastapi/ClusterManagerComponent.js.map +1 -1
- package/dist/cjs/src/server/components/fastapi/WorkerPoolComponent.js +206 -0
- package/dist/cjs/src/server/components/fastapi/WorkerPoolComponent.js.map +1 -0
- package/dist/cjs/src/server/components/fastapi/console/ConsoleInterceptor.js +3 -28
- package/dist/cjs/src/server/components/fastapi/console/ConsoleInterceptor.js.map +1 -1
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/WorkerPoolManager.js +265 -0
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/WorkerPoolManager.js.map +1 -0
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/workers/Logger.js +236 -0
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/workers/cpu-tasks.js +294 -0
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/workers/enhanced-cpu-worker.js +433 -0
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/workers/io-worker.js +1615 -0
- package/dist/cjs/src/server/components/lifecycle/ServerLifecycleManager.js +143 -24
- package/dist/cjs/src/server/components/lifecycle/ServerLifecycleManager.js.map +1 -1
- package/dist/cjs/src/server/const/default.js +23 -9
- package/dist/cjs/src/server/const/default.js.map +1 -1
- package/dist/cjs/src/server/core/HttpServer.js +8 -8
- package/dist/cjs/src/server/core/HttpServer.js.map +1 -1
- package/dist/cjs/src/server/core/XyprissApp.js +284 -17
- package/dist/cjs/src/server/core/XyprissApp.js.map +1 -1
- package/dist/cjs/src/server/handlers/NotFoundHandler.js +1 -1
- package/dist/cjs/src/server/handlers/NotFoundHandler.js.map +1 -1
- package/dist/cjs/src/server/middleware/MiddlewareManager.js +57 -12
- package/dist/cjs/src/server/middleware/MiddlewareManager.js.map +1 -1
- package/dist/cjs/src/server/utils/forceClosePort.js +1 -1
- package/dist/cjs/src/server/utils/forceClosePort.js.map +1 -1
- package/dist/esm/mods/security/src/algorithms/hash-algorithms.js +10 -0
- package/dist/esm/mods/security/src/algorithms/hash-algorithms.js.map +1 -1
- package/dist/esm/mods/security/src/components/cache/cacheSys.utils.js +10 -0
- package/dist/esm/mods/security/src/components/cache/cacheSys.utils.js.map +1 -1
- package/dist/esm/mods/security/src/components/cache/index.js +10 -2
- package/dist/esm/mods/security/src/components/cache/index.js.map +1 -1
- package/dist/esm/mods/security/src/components/fortified-function/index.js +9 -0
- package/dist/esm/mods/security/src/components/fortified-function/index.js.map +1 -1
- package/dist/esm/mods/security/src/components/fortified-function/security/security-handler.js +10 -0
- package/dist/esm/mods/security/src/components/fortified-function/security/security-handler.js.map +1 -1
- package/dist/esm/mods/security/src/core/crypto.js +14 -0
- package/dist/esm/mods/security/src/core/crypto.js.map +1 -1
- package/dist/esm/mods/security/src/index.js +10 -1
- package/dist/esm/mods/security/src/index.js.map +1 -1
- package/dist/esm/shared/logger/Logger.js +372 -29
- package/dist/esm/shared/logger/Logger.js.map +1 -1
- package/dist/esm/src/cluster/bun-cluster-manager.js +91 -1
- package/dist/esm/src/cluster/bun-cluster-manager.js.map +1 -1
- package/dist/esm/src/cluster/cluster-manager.js +15 -3
- package/dist/esm/src/cluster/cluster-manager.js.map +1 -1
- package/dist/esm/src/cluster/modules/AutoScaler.js +4 -4
- package/dist/esm/src/cluster/modules/AutoScaler.js.map +1 -1
- package/dist/esm/src/cluster/modules/CrossPlatformMemory.js +2 -2
- package/dist/esm/src/cluster/modules/CrossPlatformMemory.js.map +1 -1
- package/dist/esm/src/cluster/modules/EventLoopMonitor.js +268 -0
- package/dist/esm/src/cluster/modules/EventLoopMonitor.js.map +1 -0
- package/dist/esm/src/cluster/modules/GCStatsTracker.js +198 -0
- package/dist/esm/src/cluster/modules/GCStatsTracker.js.map +1 -0
- package/dist/esm/src/cluster/modules/HeapStatsCollector.js +109 -0
- package/dist/esm/src/cluster/modules/HeapStatsCollector.js.map +1 -0
- package/dist/esm/src/cluster/modules/NetworkTracker.js +160 -0
- package/dist/esm/src/cluster/modules/NetworkTracker.js.map +1 -0
- package/dist/esm/src/cluster/modules/ThroughputCalculator.js +184 -0
- package/dist/esm/src/cluster/modules/ThroughputCalculator.js.map +1 -0
- package/dist/esm/src/cluster/modules/WorkerManager.js +14 -14
- package/dist/esm/src/cluster/modules/WorkerManager.js.map +1 -1
- package/dist/esm/src/cluster/modules/{LoadBalancer.js → strategy/LoadBalancer.js} +1 -1
- package/dist/esm/src/cluster/modules/strategy/LoadBalancer.js.map +1 -0
- package/dist/esm/src/middleware/built-in/sqlInjection.js +333 -0
- package/dist/esm/src/middleware/built-in/sqlInjection.js.map +1 -0
- package/dist/esm/src/middleware/safe-json-middleware.js +1 -1
- package/dist/esm/src/middleware/safe-json-middleware.js.map +1 -1
- package/dist/esm/src/middleware/security-middleware.js +447 -332
- package/dist/esm/src/middleware/security-middleware.js.map +1 -1
- package/dist/esm/src/plugins/modules/index.js +9 -3
- package/dist/esm/src/plugins/modules/index.js.map +1 -1
- package/dist/esm/src/server/FastServer.js +41 -1
- package/dist/esm/src/server/FastServer.js.map +1 -1
- package/dist/esm/src/server/ServerFactory.js +62 -2
- package/dist/esm/src/server/ServerFactory.js.map +1 -1
- package/dist/esm/src/server/components/fastapi/ClusterManagerComponent.js +32 -6
- package/dist/esm/src/server/components/fastapi/ClusterManagerComponent.js.map +1 -1
- package/dist/esm/src/server/components/fastapi/WorkerPoolComponent.js +204 -0
- package/dist/esm/src/server/components/fastapi/WorkerPoolComponent.js.map +1 -0
- package/dist/esm/src/server/components/fastapi/console/ConsoleInterceptor.js +2 -27
- package/dist/esm/src/server/components/fastapi/console/ConsoleInterceptor.js.map +1 -1
- package/dist/esm/src/server/components/fastapi/modules/UFRP/WorkerPoolManager.js +263 -0
- package/dist/esm/src/server/components/fastapi/modules/UFRP/WorkerPoolManager.js.map +1 -0
- package/dist/esm/src/server/components/fastapi/modules/UFRP/workers/Logger.js +236 -0
- package/dist/esm/src/server/components/fastapi/modules/UFRP/workers/cpu-tasks.js +294 -0
- package/dist/esm/src/server/components/fastapi/modules/UFRP/workers/enhanced-cpu-worker.js +433 -0
- package/dist/esm/src/server/components/fastapi/modules/UFRP/workers/io-worker.js +1615 -0
- package/dist/esm/src/server/components/lifecycle/ServerLifecycleManager.js +143 -24
- package/dist/esm/src/server/components/lifecycle/ServerLifecycleManager.js.map +1 -1
- package/dist/esm/src/server/const/default.js +23 -9
- package/dist/esm/src/server/const/default.js.map +1 -1
- package/dist/esm/src/server/core/HttpServer.js +8 -8
- package/dist/esm/src/server/core/HttpServer.js.map +1 -1
- package/dist/esm/src/server/core/XyprissApp.js +284 -17
- package/dist/esm/src/server/core/XyprissApp.js.map +1 -1
- package/dist/esm/src/server/handlers/NotFoundHandler.js +1 -1
- package/dist/esm/src/server/handlers/NotFoundHandler.js.map +1 -1
- package/dist/esm/src/server/middleware/MiddlewareManager.js +57 -12
- package/dist/esm/src/server/middleware/MiddlewareManager.js.map +1 -1
- package/dist/esm/src/server/utils/forceClosePort.js +1 -1
- package/dist/esm/src/server/utils/forceClosePort.js.map +1 -1
- package/dist/index.d.ts +675 -516
- package/package.json +9 -9
- package/dist/cjs/src/cluster/index.js +0 -361
- package/dist/cjs/src/cluster/index.js.map +0 -1
- package/dist/cjs/src/cluster/modules/ClusterFactory.js +0 -539
- package/dist/cjs/src/cluster/modules/ClusterFactory.js.map +0 -1
- package/dist/cjs/src/cluster/modules/LoadBalancer.js.map +0 -1
- package/dist/cjs/src/server/components/fastapi/UltraFastRequestProcessor.js +0 -668
- package/dist/cjs/src/server/components/fastapi/UltraFastRequestProcessor.js.map +0 -1
- package/dist/cjs/src/server/components/fastapi/middlewares/MiddlewareAPI.js +0 -347
- package/dist/cjs/src/server/components/fastapi/middlewares/MiddlewareAPI.js.map +0 -1
- package/dist/cjs/src/server/components/fastapi/middlewares/MiddlewareMethodsManager.js +0 -204
- package/dist/cjs/src/server/components/fastapi/middlewares/MiddlewareMethodsManager.js.map +0 -1
- package/dist/cjs/src/server/components/fastapi/middlewares/middlewareManager.js +0 -953
- package/dist/cjs/src/server/components/fastapi/middlewares/middlewareManager.js.map +0 -1
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/WorkerPool.js +0 -56
- package/dist/cjs/src/server/components/fastapi/modules/UFRP/WorkerPool.js.map +0 -1
- package/dist/esm/src/cluster/index.js +0 -339
- package/dist/esm/src/cluster/index.js.map +0 -1
- package/dist/esm/src/cluster/modules/ClusterFactory.js +0 -511
- package/dist/esm/src/cluster/modules/ClusterFactory.js.map +0 -1
- package/dist/esm/src/cluster/modules/LoadBalancer.js.map +0 -1
- package/dist/esm/src/server/components/fastapi/UltraFastRequestProcessor.js +0 -647
- package/dist/esm/src/server/components/fastapi/UltraFastRequestProcessor.js.map +0 -1
- package/dist/esm/src/server/components/fastapi/middlewares/MiddlewareAPI.js +0 -345
- package/dist/esm/src/server/components/fastapi/middlewares/MiddlewareAPI.js.map +0 -1
- package/dist/esm/src/server/components/fastapi/middlewares/MiddlewareMethodsManager.js +0 -202
- package/dist/esm/src/server/components/fastapi/middlewares/MiddlewareMethodsManager.js.map +0 -1
- package/dist/esm/src/server/components/fastapi/middlewares/middlewareManager.js +0 -951
- package/dist/esm/src/server/components/fastapi/middlewares/middlewareManager.js.map +0 -1
- package/dist/esm/src/server/components/fastapi/modules/UFRP/WorkerPool.js +0 -54
- package/dist/esm/src/server/components/fastapi/modules/UFRP/WorkerPool.js.map +0 -1
|
@@ -0,0 +1,1615 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* XyPrissJS - Robust I/O Worker
|
|
3
|
+
* Handles I/O-intensive tasks in a separate thread
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { parentPort, workerData } = require("worker_threads");
|
|
7
|
+
const fs = require("fs").promises;
|
|
8
|
+
const path = require("path");
|
|
9
|
+
const crypto = require("crypto");
|
|
10
|
+
const { EventEmitter } = require("events");
|
|
11
|
+
|
|
12
|
+
// Worker configuration with defaults
|
|
13
|
+
const WORKER_CONFIG = {
|
|
14
|
+
enableLogging: true,
|
|
15
|
+
maxRetries: 3,
|
|
16
|
+
retryDelay: 1000,
|
|
17
|
+
timeout: 30000,
|
|
18
|
+
maxFileSize: 50 * 1024 * 1024, // 50MB
|
|
19
|
+
allowedExtensions: [".json", ".txt", ".log", ".csv", ".yaml", ".yml"],
|
|
20
|
+
cacheDir: path.join(process.cwd(), "cache"),
|
|
21
|
+
logsDir: path.join(process.cwd(), "logs"),
|
|
22
|
+
configDir: path.join(process.cwd(), "config"),
|
|
23
|
+
...workerData?.config,
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
// Enhanced logger fallback if external logger fails
|
|
27
|
+
// const logger = {
|
|
28
|
+
// info: (context, message, ...args) =>
|
|
29
|
+
// console.log(`[INFO][${context}]`, message, ...args),
|
|
30
|
+
// warn: (context, message, ...args) =>
|
|
31
|
+
// console.warn(`[WARN][${context}]`, message, ...args),
|
|
32
|
+
// error: (context, message, ...args) =>
|
|
33
|
+
// console.error(`[ERROR][${context}]`, message, ...args),
|
|
34
|
+
// debug: (context, message, ...args) =>
|
|
35
|
+
// WORKER_CONFIG.debug &&
|
|
36
|
+
// console.debug(`[DEBUG][${context}]`, message, ...args),
|
|
37
|
+
// };
|
|
38
|
+
|
|
39
|
+
const externalLogger = require("./Logger");
|
|
40
|
+
const logger = externalLogger.logger;
|
|
41
|
+
|
|
42
|
+
// // Try to use external logger if available
|
|
43
|
+
// try {
|
|
44
|
+
// const externalLogger = require("./Logger");
|
|
45
|
+
// if (externalLogger.logger) {
|
|
46
|
+
// Object.assign(logger, externalLogger.logger);
|
|
47
|
+
// }
|
|
48
|
+
// } catch (error) {
|
|
49
|
+
// logger.warn("io-worker", "External logger not available, using fallback");
|
|
50
|
+
// }
|
|
51
|
+
|
|
52
|
+
// Worker event emitter for internal communication
|
|
53
|
+
const workerEmitter = new EventEmitter();
|
|
54
|
+
|
|
55
|
+
// Task queue and processing state
|
|
56
|
+
const taskQueue = new Map();
|
|
57
|
+
let isShuttingDown = false;
|
|
58
|
+
|
|
59
|
+
function log(level, message, ...args) {
|
|
60
|
+
if (!WORKER_CONFIG.enableLogging) return;
|
|
61
|
+
logger[level]("io-worker", message, ...args);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Enhanced error handling
|
|
65
|
+
class IOWorkerError extends Error {
|
|
66
|
+
constructor(message, code, details = {}) {
|
|
67
|
+
super(message);
|
|
68
|
+
this.name = "IOWorkerError";
|
|
69
|
+
this.code = code;
|
|
70
|
+
this.details = details;
|
|
71
|
+
this.timestamp = new Date().toISOString();
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// Worker initialization with robust setup
|
|
76
|
+
async function initializeWorker() {
|
|
77
|
+
try {
|
|
78
|
+
log("debug", "Initializing I/O Worker", workerData);
|
|
79
|
+
log("info", "Initializing I/O Worker");
|
|
80
|
+
|
|
81
|
+
// Create necessary directories
|
|
82
|
+
await Promise.all([
|
|
83
|
+
ensureDirectoryExists(WORKER_CONFIG.cacheDir),
|
|
84
|
+
ensureDirectoryExists(WORKER_CONFIG.logsDir),
|
|
85
|
+
ensureDirectoryExists(WORKER_CONFIG.configDir),
|
|
86
|
+
]);
|
|
87
|
+
|
|
88
|
+
// Verify write permissions
|
|
89
|
+
await verifyWritePermissions();
|
|
90
|
+
|
|
91
|
+
log("info", "I/O Worker initialized successfully");
|
|
92
|
+
return true;
|
|
93
|
+
} catch (error) {
|
|
94
|
+
log("error", "Failed to initialize I/O Worker", error);
|
|
95
|
+
throw new IOWorkerError("Worker initialization failed", "INIT_FAILED", {
|
|
96
|
+
originalError: error.message,
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Verify write permissions for required directories
|
|
102
|
+
async function verifyWritePermissions() {
|
|
103
|
+
const testFile = path.join(WORKER_CONFIG.cacheDir, ".write_test");
|
|
104
|
+
try {
|
|
105
|
+
await fs.writeFile(testFile, "test");
|
|
106
|
+
await fs.unlink(testFile);
|
|
107
|
+
} catch (error) {
|
|
108
|
+
throw new IOWorkerError(
|
|
109
|
+
"Insufficient write permissions",
|
|
110
|
+
"PERMISSION_DENIED",
|
|
111
|
+
{ directory: WORKER_CONFIG.cacheDir }
|
|
112
|
+
);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// Enhanced task processing with timeout and retry logic
|
|
117
|
+
async function processTaskWithRetry(task) {
|
|
118
|
+
let lastError;
|
|
119
|
+
|
|
120
|
+
for (let attempt = 1; attempt <= WORKER_CONFIG.maxRetries; attempt++) {
|
|
121
|
+
try {
|
|
122
|
+
log("debug", `Processing task ${task.id}, attempt ${attempt}`);
|
|
123
|
+
|
|
124
|
+
const result = await Promise.race([
|
|
125
|
+
processTask(task),
|
|
126
|
+
new Promise((_, reject) =>
|
|
127
|
+
setTimeout(
|
|
128
|
+
() =>
|
|
129
|
+
reject(
|
|
130
|
+
new IOWorkerError("Task timeout", "TIMEOUT")
|
|
131
|
+
),
|
|
132
|
+
WORKER_CONFIG.timeout
|
|
133
|
+
)
|
|
134
|
+
),
|
|
135
|
+
]);
|
|
136
|
+
|
|
137
|
+
log(
|
|
138
|
+
"info",
|
|
139
|
+
`Task ${task.id} completed successfully on attempt ${attempt}`
|
|
140
|
+
);
|
|
141
|
+
return result;
|
|
142
|
+
} catch (error) {
|
|
143
|
+
lastError = error;
|
|
144
|
+
log(
|
|
145
|
+
"warn",
|
|
146
|
+
`Task ${task.id} failed on attempt ${attempt}:`,
|
|
147
|
+
error.message
|
|
148
|
+
);
|
|
149
|
+
|
|
150
|
+
if (
|
|
151
|
+
attempt < WORKER_CONFIG.maxRetries &&
|
|
152
|
+
!isNonRetryableError(error)
|
|
153
|
+
) {
|
|
154
|
+
await new Promise((resolve) =>
|
|
155
|
+
setTimeout(resolve, WORKER_CONFIG.retryDelay * attempt)
|
|
156
|
+
);
|
|
157
|
+
continue;
|
|
158
|
+
}
|
|
159
|
+
break;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
throw lastError;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// Determine if error should not be retried
|
|
167
|
+
function isNonRetryableError(error) {
|
|
168
|
+
const nonRetryableCodes = [
|
|
169
|
+
"INVALID_INPUT",
|
|
170
|
+
"PERMISSION_DENIED",
|
|
171
|
+
"FILE_TOO_LARGE",
|
|
172
|
+
"UNSUPPORTED_TYPE",
|
|
173
|
+
];
|
|
174
|
+
return nonRetryableCodes.includes(error.code);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Handle messages from main thread with enhanced error handling
|
|
178
|
+
if (parentPort) {
|
|
179
|
+
parentPort.on("message", async (task) => {
|
|
180
|
+
if (isShuttingDown) {
|
|
181
|
+
parentPort.postMessage({
|
|
182
|
+
success: false,
|
|
183
|
+
taskId: task.id,
|
|
184
|
+
error: "Worker is shutting down",
|
|
185
|
+
code: "SHUTTING_DOWN",
|
|
186
|
+
});
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
const startTime = Date.now();
|
|
191
|
+
taskQueue.set(task.id, { task, startTime });
|
|
192
|
+
|
|
193
|
+
try {
|
|
194
|
+
log("info", `Processing task ${task.id} of type ${task.type}`);
|
|
195
|
+
|
|
196
|
+
// Validate task
|
|
197
|
+
validateTask(task);
|
|
198
|
+
|
|
199
|
+
const result = await processTaskWithRetry(task);
|
|
200
|
+
|
|
201
|
+
parentPort.postMessage({
|
|
202
|
+
success: true,
|
|
203
|
+
taskId: task.id,
|
|
204
|
+
result: result,
|
|
205
|
+
executionTime: Date.now() - startTime,
|
|
206
|
+
timestamp: new Date().toISOString(),
|
|
207
|
+
});
|
|
208
|
+
} catch (error) {
|
|
209
|
+
log("error", `Error processing task ${task.id}:`, error);
|
|
210
|
+
|
|
211
|
+
parentPort.postMessage({
|
|
212
|
+
success: false,
|
|
213
|
+
taskId: task.id,
|
|
214
|
+
error: error.message,
|
|
215
|
+
code: error.code || "UNKNOWN_ERROR",
|
|
216
|
+
details: error.details || {},
|
|
217
|
+
executionTime: Date.now() - startTime,
|
|
218
|
+
timestamp: new Date().toISOString(),
|
|
219
|
+
});
|
|
220
|
+
} finally {
|
|
221
|
+
taskQueue.delete(task.id);
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
parentPort.on("error", (error) => {
|
|
226
|
+
log("error", "Worker parent port error:", error);
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Enhanced task validation
|
|
231
|
+
function validateTask(task) {
|
|
232
|
+
if (!task || typeof task !== "object") {
|
|
233
|
+
throw new IOWorkerError("Invalid task object", "INVALID_INPUT");
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
if (!task.id) {
|
|
237
|
+
throw new IOWorkerError("Task ID is required", "INVALID_INPUT");
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
if (!task.type) {
|
|
241
|
+
throw new IOWorkerError("Task type is required", "INVALID_INPUT");
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
const validTypes = [
|
|
245
|
+
"read",
|
|
246
|
+
"write",
|
|
247
|
+
"validate",
|
|
248
|
+
"delete",
|
|
249
|
+
"process",
|
|
250
|
+
"batch",
|
|
251
|
+
];
|
|
252
|
+
if (!validTypes.includes(task.type)) {
|
|
253
|
+
throw new IOWorkerError(
|
|
254
|
+
`Invalid task type: ${task.type}`,
|
|
255
|
+
"INVALID_INPUT",
|
|
256
|
+
{ validTypes }
|
|
257
|
+
);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
if (!task.data) {
|
|
261
|
+
throw new IOWorkerError("Task data is required", "INVALID_INPUT");
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
/**
|
|
266
|
+
* Enhanced task processing with proper error handling
|
|
267
|
+
*/
|
|
268
|
+
async function processTask(task) {
|
|
269
|
+
const { type, data } = task;
|
|
270
|
+
|
|
271
|
+
switch (type) {
|
|
272
|
+
case "read":
|
|
273
|
+
return await processReadTask(data);
|
|
274
|
+
case "write":
|
|
275
|
+
return await processWriteTask(data);
|
|
276
|
+
case "validate":
|
|
277
|
+
return await processValidateTask(data);
|
|
278
|
+
case "delete":
|
|
279
|
+
return await processDeleteTask(data);
|
|
280
|
+
case "process":
|
|
281
|
+
return await processGenericTask(data);
|
|
282
|
+
case "batch":
|
|
283
|
+
return await processBatchTask(data);
|
|
284
|
+
default:
|
|
285
|
+
throw new IOWorkerError(
|
|
286
|
+
`Unknown task type: ${type}`,
|
|
287
|
+
"INVALID_TASK_TYPE"
|
|
288
|
+
);
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
/**
|
|
293
|
+
* Enhanced read task processing
|
|
294
|
+
*/
|
|
295
|
+
async function processReadTask(data) {
|
|
296
|
+
const startTime = Date.now();
|
|
297
|
+
|
|
298
|
+
try {
|
|
299
|
+
validateReadData(data);
|
|
300
|
+
|
|
301
|
+
const operations = [];
|
|
302
|
+
|
|
303
|
+
// Read from cache if requested
|
|
304
|
+
if (data.includeCache !== false) {
|
|
305
|
+
operations.push(
|
|
306
|
+
readFromCache(data).catch((error) => {
|
|
307
|
+
log("warn", "Cache read failed:", error.message);
|
|
308
|
+
return null;
|
|
309
|
+
})
|
|
310
|
+
);
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// Read configuration if path provided
|
|
314
|
+
if (data.path) {
|
|
315
|
+
operations.push(
|
|
316
|
+
readConfiguration(data.path).catch((error) => {
|
|
317
|
+
log("warn", "Config read failed:", error.message);
|
|
318
|
+
return null;
|
|
319
|
+
})
|
|
320
|
+
);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
// Read logs if requested
|
|
324
|
+
if (data.includeLogs) {
|
|
325
|
+
operations.push(
|
|
326
|
+
readRequestLogs(data).catch((error) => {
|
|
327
|
+
log("warn", "Log read failed:", error.message);
|
|
328
|
+
return [];
|
|
329
|
+
})
|
|
330
|
+
);
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
// Read custom files if specified
|
|
334
|
+
if (data.files && Array.isArray(data.files)) {
|
|
335
|
+
for (const filePath of data.files) {
|
|
336
|
+
operations.push(readFile(filePath));
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
const [cacheData, configData, logData, ...fileData] =
|
|
341
|
+
await Promise.allSettled(operations);
|
|
342
|
+
|
|
343
|
+
return {
|
|
344
|
+
method: data.method,
|
|
345
|
+
path: data.path,
|
|
346
|
+
query: data.query,
|
|
347
|
+
headers: sanitizeHeaders(data.headers),
|
|
348
|
+
cacheData: getSettledValue(cacheData),
|
|
349
|
+
configData: getSettledValue(configData),
|
|
350
|
+
logData: getSettledValue(logData),
|
|
351
|
+
fileData: fileData.map(getSettledValue),
|
|
352
|
+
processingTime: Date.now() - startTime,
|
|
353
|
+
timestamp: new Date().toISOString(),
|
|
354
|
+
};
|
|
355
|
+
} catch (error) {
|
|
356
|
+
throw new IOWorkerError(
|
|
357
|
+
`Read operation failed: ${error.message}`,
|
|
358
|
+
"READ_FAILED",
|
|
359
|
+
{ originalError: error.message }
|
|
360
|
+
);
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
function validateReadData(data) {
|
|
365
|
+
if (data.files && !Array.isArray(data.files)) {
|
|
366
|
+
throw new IOWorkerError("Files must be an array", "INVALID_INPUT");
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
if (data.files) {
|
|
370
|
+
for (const filePath of data.files) {
|
|
371
|
+
if (typeof filePath !== "string") {
|
|
372
|
+
throw new IOWorkerError(
|
|
373
|
+
"File path must be a string",
|
|
374
|
+
"INVALID_INPUT"
|
|
375
|
+
);
|
|
376
|
+
}
|
|
377
|
+
validateFilePath(filePath);
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
/**
|
|
383
|
+
* Enhanced write task processing
|
|
384
|
+
*/
|
|
385
|
+
async function processWriteTask(data) {
|
|
386
|
+
const startTime = Date.now();
|
|
387
|
+
|
|
388
|
+
try {
|
|
389
|
+
validateWriteData(data);
|
|
390
|
+
|
|
391
|
+
const operations = [];
|
|
392
|
+
const results = {};
|
|
393
|
+
|
|
394
|
+
// Write to cache if data provided
|
|
395
|
+
if (data.cacheData) {
|
|
396
|
+
operations.push(
|
|
397
|
+
writeToCache({ ...data, data: data.cacheData })
|
|
398
|
+
.then((result) => ({ type: "cache", result }))
|
|
399
|
+
.catch((error) => ({ type: "cache", error: error.message }))
|
|
400
|
+
);
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// Write to logs
|
|
404
|
+
if (data.logEntry !== false) {
|
|
405
|
+
operations.push(
|
|
406
|
+
writeToLogs(data)
|
|
407
|
+
.then((result) => ({ type: "logs", result }))
|
|
408
|
+
.catch((error) => ({ type: "logs", error: error.message }))
|
|
409
|
+
);
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
// Update configuration
|
|
413
|
+
if (data.updateConfig !== false) {
|
|
414
|
+
operations.push(
|
|
415
|
+
updateConfiguration(data)
|
|
416
|
+
.then((result) => ({ type: "config", result }))
|
|
417
|
+
.catch((error) => ({
|
|
418
|
+
type: "config",
|
|
419
|
+
error: error.message,
|
|
420
|
+
}))
|
|
421
|
+
);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
// Write custom files if specified
|
|
425
|
+
if (data.files && Array.isArray(data.files)) {
|
|
426
|
+
for (const fileData of data.files) {
|
|
427
|
+
operations.push(
|
|
428
|
+
writeFile(fileData.path, fileData.content, fileData.options)
|
|
429
|
+
.then((result) => ({
|
|
430
|
+
type: "file",
|
|
431
|
+
path: fileData.path,
|
|
432
|
+
result,
|
|
433
|
+
}))
|
|
434
|
+
.catch((error) => ({
|
|
435
|
+
type: "file",
|
|
436
|
+
path: fileData.path,
|
|
437
|
+
error: error.message,
|
|
438
|
+
}))
|
|
439
|
+
);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
const operationResults = await Promise.allSettled(operations);
|
|
444
|
+
|
|
445
|
+
// Process results
|
|
446
|
+
for (const settledResult of operationResults) {
|
|
447
|
+
const operation =
|
|
448
|
+
settledResult.status === "fulfilled"
|
|
449
|
+
? settledResult.value
|
|
450
|
+
: settledResult.reason;
|
|
451
|
+
results[operation.type] = operation.result || {
|
|
452
|
+
error: operation.error,
|
|
453
|
+
};
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
return {
|
|
457
|
+
...sanitizeData(data),
|
|
458
|
+
results,
|
|
459
|
+
timestamp: new Date().toISOString(),
|
|
460
|
+
processingTime: Date.now() - startTime,
|
|
461
|
+
};
|
|
462
|
+
} catch (error) {
|
|
463
|
+
throw new IOWorkerError(
|
|
464
|
+
`Write operation failed: ${error.message}`,
|
|
465
|
+
"WRITE_FAILED",
|
|
466
|
+
{ originalError: error.message }
|
|
467
|
+
);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
function validateWriteData(data) {
|
|
472
|
+
if (data.files && !Array.isArray(data.files)) {
|
|
473
|
+
throw new IOWorkerError("Files must be an array", "INVALID_INPUT");
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
if (data.files) {
|
|
477
|
+
for (const file of data.files) {
|
|
478
|
+
if (!file.path || !file.content) {
|
|
479
|
+
throw new IOWorkerError(
|
|
480
|
+
"File must have path and content",
|
|
481
|
+
"INVALID_INPUT"
|
|
482
|
+
);
|
|
483
|
+
}
|
|
484
|
+
validateFilePath(file.path);
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
/**
|
|
490
|
+
* Enhanced validation task processing
|
|
491
|
+
*/
|
|
492
|
+
async function processValidateTask(data) {
|
|
493
|
+
const startTime = Date.now();
|
|
494
|
+
|
|
495
|
+
try {
|
|
496
|
+
const validationResults = {
|
|
497
|
+
fileStructure: await validateFileStructure(data.path),
|
|
498
|
+
permissions: await validatePermissions(data.path),
|
|
499
|
+
content: data.content
|
|
500
|
+
? await validateContent(data.content, data.schema)
|
|
501
|
+
: null,
|
|
502
|
+
configuration: data.validateConfig
|
|
503
|
+
? await validateConfiguration()
|
|
504
|
+
: null,
|
|
505
|
+
};
|
|
506
|
+
|
|
507
|
+
const isValid = Object.values(validationResults).every(
|
|
508
|
+
(result) => result === null || result.valid === true
|
|
509
|
+
);
|
|
510
|
+
|
|
511
|
+
return {
|
|
512
|
+
valid: isValid,
|
|
513
|
+
validationResults,
|
|
514
|
+
data: sanitizeData(data),
|
|
515
|
+
processingTime: Date.now() - startTime,
|
|
516
|
+
timestamp: new Date().toISOString(),
|
|
517
|
+
};
|
|
518
|
+
} catch (error) {
|
|
519
|
+
throw new IOWorkerError(
|
|
520
|
+
`Validation failed: ${error.message}`,
|
|
521
|
+
"VALIDATION_FAILED",
|
|
522
|
+
{ originalError: error.message }
|
|
523
|
+
);
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
/**
|
|
528
|
+
* Enhanced delete task processing
|
|
529
|
+
*/
|
|
530
|
+
async function processDeleteTask(data) {
|
|
531
|
+
const startTime = Date.now();
|
|
532
|
+
|
|
533
|
+
try {
|
|
534
|
+
validateDeleteData(data);
|
|
535
|
+
|
|
536
|
+
const deleteResults = {
|
|
537
|
+
files: [],
|
|
538
|
+
cache: null,
|
|
539
|
+
logs: null,
|
|
540
|
+
};
|
|
541
|
+
|
|
542
|
+
// Delete specified files
|
|
543
|
+
if (data.files && Array.isArray(data.files)) {
|
|
544
|
+
for (const filePath of data.files) {
|
|
545
|
+
try {
|
|
546
|
+
await deleteFile(filePath);
|
|
547
|
+
deleteResults.files.push({ path: filePath, deleted: true });
|
|
548
|
+
} catch (error) {
|
|
549
|
+
deleteResults.files.push({
|
|
550
|
+
path: filePath,
|
|
551
|
+
deleted: false,
|
|
552
|
+
error: error.message,
|
|
553
|
+
});
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
// Clear cache if requested
|
|
559
|
+
if (data.clearCache) {
|
|
560
|
+
try {
|
|
561
|
+
deleteResults.cache = await clearCache(data.cachePattern);
|
|
562
|
+
} catch (error) {
|
|
563
|
+
deleteResults.cache = { cleared: false, error: error.message };
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
// Clean logs if requested
|
|
568
|
+
if (data.cleanLogs) {
|
|
569
|
+
try {
|
|
570
|
+
deleteResults.logs = await cleanLogs(
|
|
571
|
+
data.logRetentionDays || 30
|
|
572
|
+
);
|
|
573
|
+
} catch (error) {
|
|
574
|
+
deleteResults.logs = { cleaned: false, error: error.message };
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
return {
|
|
579
|
+
deleteResults,
|
|
580
|
+
data: sanitizeData(data),
|
|
581
|
+
processingTime: Date.now() - startTime,
|
|
582
|
+
timestamp: new Date().toISOString(),
|
|
583
|
+
};
|
|
584
|
+
} catch (error) {
|
|
585
|
+
throw new IOWorkerError(
|
|
586
|
+
`Delete operation failed: ${error.message}`,
|
|
587
|
+
"DELETE_FAILED",
|
|
588
|
+
{ originalError: error.message }
|
|
589
|
+
);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
function validateDeleteData(data) {
|
|
594
|
+
if (data.files && !Array.isArray(data.files)) {
|
|
595
|
+
throw new IOWorkerError("Files must be an array", "INVALID_INPUT");
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
if (data.files) {
|
|
599
|
+
for (const filePath of data.files) {
|
|
600
|
+
validateFilePath(filePath);
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
/**
|
|
606
|
+
* Enhanced generic task processing
|
|
607
|
+
*/
|
|
608
|
+
async function processGenericTask(data) {
|
|
609
|
+
const startTime = Date.now();
|
|
610
|
+
|
|
611
|
+
try {
|
|
612
|
+
const operations = [];
|
|
613
|
+
|
|
614
|
+
// Custom processing based on operation type
|
|
615
|
+
if (data.operation) {
|
|
616
|
+
switch (data.operation) {
|
|
617
|
+
case "compress":
|
|
618
|
+
operations.push(compressFiles(data.files));
|
|
619
|
+
break;
|
|
620
|
+
case "backup":
|
|
621
|
+
operations.push(
|
|
622
|
+
createBackup(data.source, data.destination)
|
|
623
|
+
);
|
|
624
|
+
break;
|
|
625
|
+
case "sync":
|
|
626
|
+
operations.push(syncDirectories(data.source, data.target));
|
|
627
|
+
break;
|
|
628
|
+
case "analyze":
|
|
629
|
+
operations.push(analyzeFiles(data.files));
|
|
630
|
+
break;
|
|
631
|
+
default:
|
|
632
|
+
throw new IOWorkerError(
|
|
633
|
+
`Unknown operation: ${data.operation}`,
|
|
634
|
+
"INVALID_OPERATION"
|
|
635
|
+
);
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
const results = await Promise.allSettled(operations);
|
|
640
|
+
|
|
641
|
+
return {
|
|
642
|
+
operation: data.operation,
|
|
643
|
+
results: results.map(getSettledValue),
|
|
644
|
+
data: sanitizeData(data),
|
|
645
|
+
processingTime: Date.now() - startTime,
|
|
646
|
+
timestamp: new Date().toISOString(),
|
|
647
|
+
};
|
|
648
|
+
} catch (error) {
|
|
649
|
+
throw new IOWorkerError(
|
|
650
|
+
`Generic operation failed: ${error.message}`,
|
|
651
|
+
"GENERIC_FAILED",
|
|
652
|
+
{ originalError: error.message }
|
|
653
|
+
);
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
/**
|
|
658
|
+
* New batch task processing
|
|
659
|
+
*/
|
|
660
|
+
async function processBatchTask(data) {
|
|
661
|
+
const startTime = Date.now();
|
|
662
|
+
|
|
663
|
+
try {
|
|
664
|
+
if (!Array.isArray(data.tasks)) {
|
|
665
|
+
throw new IOWorkerError(
|
|
666
|
+
"Batch tasks must be an array",
|
|
667
|
+
"INVALID_INPUT"
|
|
668
|
+
);
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
const batchResults = [];
|
|
672
|
+
const concurrency = data.concurrency || 3;
|
|
673
|
+
|
|
674
|
+
// Process tasks in batches
|
|
675
|
+
for (let i = 0; i < data.tasks.length; i += concurrency) {
|
|
676
|
+
const batch = data.tasks.slice(i, i + concurrency);
|
|
677
|
+
const batchPromises = batch.map(async (task, index) => {
|
|
678
|
+
try {
|
|
679
|
+
const result = await processTask({
|
|
680
|
+
id: `${data.batchId || "batch"}_${i + index}`,
|
|
681
|
+
...task,
|
|
682
|
+
});
|
|
683
|
+
return { success: true, result, taskIndex: i + index };
|
|
684
|
+
} catch (error) {
|
|
685
|
+
return {
|
|
686
|
+
success: false,
|
|
687
|
+
error: error.message,
|
|
688
|
+
code: error.code,
|
|
689
|
+
taskIndex: i + index,
|
|
690
|
+
};
|
|
691
|
+
}
|
|
692
|
+
});
|
|
693
|
+
|
|
694
|
+
const batchResult = await Promise.allSettled(batchPromises);
|
|
695
|
+
batchResults.push(...batchResult.map(getSettledValue));
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
const successful = batchResults.filter((r) => r.success).length;
|
|
699
|
+
const failed = batchResults.length - successful;
|
|
700
|
+
|
|
701
|
+
return {
|
|
702
|
+
batchId: data.batchId,
|
|
703
|
+
totalTasks: data.tasks.length,
|
|
704
|
+
successful,
|
|
705
|
+
failed,
|
|
706
|
+
results: batchResults,
|
|
707
|
+
processingTime: Date.now() - startTime,
|
|
708
|
+
timestamp: new Date().toISOString(),
|
|
709
|
+
};
|
|
710
|
+
} catch (error) {
|
|
711
|
+
throw new IOWorkerError(
|
|
712
|
+
`Batch operation failed: ${error.message}`,
|
|
713
|
+
"BATCH_FAILED",
|
|
714
|
+
{ originalError: error.message }
|
|
715
|
+
);
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
// ===== ENHANCED I/O UTILITY FUNCTIONS =====
|
|
720
|
+
|
|
721
|
+
/**
|
|
722
|
+
* Enhanced cache operations with better error handling
|
|
723
|
+
*/
|
|
724
|
+
async function readFromCache(data) {
|
|
725
|
+
try {
|
|
726
|
+
const cacheKey = generateCacheKey(data);
|
|
727
|
+
const cachePath = path.join(WORKER_CONFIG.cacheDir, `${cacheKey}.json`);
|
|
728
|
+
|
|
729
|
+
if (!(await fileExists(cachePath))) {
|
|
730
|
+
return null;
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
const stats = await fs.stat(cachePath);
|
|
734
|
+
|
|
735
|
+
// Check if cache is expired
|
|
736
|
+
if (data.cacheMaxAge) {
|
|
737
|
+
const ageMs = Date.now() - stats.mtime.getTime();
|
|
738
|
+
if (ageMs > data.cacheMaxAge * 1000) {
|
|
739
|
+
log("debug", "Cache expired for key:", cacheKey);
|
|
740
|
+
return null;
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
const cacheContent = await fs.readFile(cachePath, "utf8");
|
|
745
|
+
const parsed = JSON.parse(cacheContent);
|
|
746
|
+
|
|
747
|
+
return {
|
|
748
|
+
...parsed,
|
|
749
|
+
cached: true,
|
|
750
|
+
cacheAge: Date.now() - stats.mtime.getTime(),
|
|
751
|
+
cacheKey,
|
|
752
|
+
};
|
|
753
|
+
} catch (error) {
|
|
754
|
+
log("warn", "Cache read failed:", error.message);
|
|
755
|
+
return null;
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
async function writeToCache(data) {
|
|
760
|
+
try {
|
|
761
|
+
const cacheKey = generateCacheKey(data);
|
|
762
|
+
const cachePath = path.join(WORKER_CONFIG.cacheDir, `${cacheKey}.json`);
|
|
763
|
+
|
|
764
|
+
const cacheData = {
|
|
765
|
+
...data,
|
|
766
|
+
cachedAt: new Date().toISOString(),
|
|
767
|
+
cacheKey,
|
|
768
|
+
};
|
|
769
|
+
|
|
770
|
+
await fs.writeFile(cachePath, JSON.stringify(cacheData, null, 2));
|
|
771
|
+
|
|
772
|
+
const stats = await fs.stat(cachePath);
|
|
773
|
+
|
|
774
|
+
return {
|
|
775
|
+
success: true,
|
|
776
|
+
path: cachePath,
|
|
777
|
+
size: stats.size,
|
|
778
|
+
cacheKey,
|
|
779
|
+
};
|
|
780
|
+
} catch (error) {
|
|
781
|
+
log("error", "Cache write failed:", error.message);
|
|
782
|
+
throw new IOWorkerError("Cache write failed", "CACHE_WRITE_FAILED", {
|
|
783
|
+
originalError: error.message,
|
|
784
|
+
});
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
|
|
788
|
+
/**
|
|
789
|
+
* Enhanced configuration operations
|
|
790
|
+
*/
|
|
791
|
+
async function readConfiguration(requestPath) {
|
|
792
|
+
try {
|
|
793
|
+
const configPath = path.join(WORKER_CONFIG.configDir, "routes.json");
|
|
794
|
+
|
|
795
|
+
if (!(await fileExists(configPath))) {
|
|
796
|
+
return null;
|
|
797
|
+
}
|
|
798
|
+
|
|
799
|
+
const configContent = await fs.readFile(configPath, "utf8");
|
|
800
|
+
const config = JSON.parse(configContent);
|
|
801
|
+
|
|
802
|
+
return config[requestPath] || null;
|
|
803
|
+
} catch (error) {
|
|
804
|
+
log("warn", "Config read failed:", error.message);
|
|
805
|
+
return null;
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
async function updateConfiguration(data) {
|
|
810
|
+
try {
|
|
811
|
+
const configPath = path.join(WORKER_CONFIG.configDir, "routes.json");
|
|
812
|
+
let config = {};
|
|
813
|
+
|
|
814
|
+
if (await fileExists(configPath)) {
|
|
815
|
+
const configContent = await fs.readFile(configPath, "utf8");
|
|
816
|
+
config = JSON.parse(configContent);
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
// Update configuration with enhanced metadata
|
|
820
|
+
config[data.path] = {
|
|
821
|
+
lastAccessed: new Date().toISOString(),
|
|
822
|
+
method: data.method,
|
|
823
|
+
headers: sanitizeHeaders(data.headers),
|
|
824
|
+
accessCount: (config[data.path]?.accessCount || 0) + 1,
|
|
825
|
+
userAgent: data.headers?.["user-agent"]?.substring(0, 100), // Truncate for security
|
|
826
|
+
};
|
|
827
|
+
|
|
828
|
+
await fs.writeFile(configPath, JSON.stringify(config, null, 2));
|
|
829
|
+
|
|
830
|
+
const stats = await fs.stat(configPath);
|
|
831
|
+
|
|
832
|
+
return {
|
|
833
|
+
success: true,
|
|
834
|
+
path: configPath,
|
|
835
|
+
size: stats.size,
|
|
836
|
+
updatedPath: data.path,
|
|
837
|
+
};
|
|
838
|
+
} catch (error) {
|
|
839
|
+
log("error", "Config update failed:", error.message);
|
|
840
|
+
throw new IOWorkerError(
|
|
841
|
+
"Config update failed",
|
|
842
|
+
"CONFIG_UPDATE_FAILED",
|
|
843
|
+
{ originalError: error.message }
|
|
844
|
+
);
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
|
|
848
|
+
/**
|
|
849
|
+
* Enhanced logging operations
|
|
850
|
+
*/
|
|
851
|
+
async function writeToLogs(data) {
|
|
852
|
+
try {
|
|
853
|
+
const logPath = path.join(WORKER_CONFIG.logsDir, "requests.log");
|
|
854
|
+
|
|
855
|
+
const logEntry = {
|
|
856
|
+
timestamp: new Date().toISOString(),
|
|
857
|
+
method: data.method,
|
|
858
|
+
path: data.path,
|
|
859
|
+
statusCode: data.statusCode,
|
|
860
|
+
responseTime: data.responseTime,
|
|
861
|
+
userAgent: data.headers?.["user-agent"]?.substring(0, 100),
|
|
862
|
+
ip: data.ip || "unknown",
|
|
863
|
+
};
|
|
864
|
+
|
|
865
|
+
const logLine = JSON.stringify(logEntry) + "\n";
|
|
866
|
+
|
|
867
|
+
await fs.appendFile(logPath, logLine);
|
|
868
|
+
|
|
869
|
+
// Rotate log if it gets too large
|
|
870
|
+
const stats = await fs.stat(logPath);
|
|
871
|
+
if (stats.size > 10 * 1024 * 1024) {
|
|
872
|
+
// 10MB
|
|
873
|
+
await rotateLog(logPath);
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
return {
|
|
877
|
+
success: true,
|
|
878
|
+
path: logPath,
|
|
879
|
+
size: stats.size,
|
|
880
|
+
};
|
|
881
|
+
} catch (error) {
|
|
882
|
+
log("error", "Log write failed:", error.message);
|
|
883
|
+
throw new IOWorkerError("Log write failed", "LOG_WRITE_FAILED", {
|
|
884
|
+
originalError: error.message,
|
|
885
|
+
});
|
|
886
|
+
}
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
async function readRequestLogs(data) {
|
|
890
|
+
try {
|
|
891
|
+
const logPath = path.join(WORKER_CONFIG.logsDir, "requests.log");
|
|
892
|
+
|
|
893
|
+
if (!(await fileExists(logPath))) {
|
|
894
|
+
return [];
|
|
895
|
+
}
|
|
896
|
+
|
|
897
|
+
const logContent = await fs.readFile(logPath, "utf8");
|
|
898
|
+
const lines = logContent
|
|
899
|
+
.split("\n")
|
|
900
|
+
.filter((line) => line.trim())
|
|
901
|
+
.filter((line) => {
|
|
902
|
+
try {
|
|
903
|
+
const entry = JSON.parse(line);
|
|
904
|
+
return data.path ? entry.path?.includes(data.path) : true;
|
|
905
|
+
} catch {
|
|
906
|
+
return line.includes(data.path || "");
|
|
907
|
+
}
|
|
908
|
+
})
|
|
909
|
+
.slice(-(data.limit || 10))
|
|
910
|
+
.map((line) => {
|
|
911
|
+
try {
|
|
912
|
+
return JSON.parse(line);
|
|
913
|
+
} catch {
|
|
914
|
+
return line;
|
|
915
|
+
}
|
|
916
|
+
});
|
|
917
|
+
|
|
918
|
+
return lines;
|
|
919
|
+
} catch (error) {
|
|
920
|
+
log("warn", "Log read failed:", error.message);
|
|
921
|
+
return [];
|
|
922
|
+
}
|
|
923
|
+
}
|
|
924
|
+
|
|
925
|
+
// ===== ADDITIONAL UTILITY FUNCTIONS =====
|
|
926
|
+
|
|
927
|
+
/**
|
|
928
|
+
* General file operations
|
|
929
|
+
*/
|
|
930
|
+
async function readFile(filePath) {
|
|
931
|
+
validateFilePath(filePath);
|
|
932
|
+
|
|
933
|
+
const stats = await fs.stat(filePath);
|
|
934
|
+
|
|
935
|
+
if (stats.size > WORKER_CONFIG.maxFileSize) {
|
|
936
|
+
throw new IOWorkerError("File too large", "FILE_TOO_LARGE", {
|
|
937
|
+
size: stats.size,
|
|
938
|
+
maxSize: WORKER_CONFIG.maxFileSize,
|
|
939
|
+
});
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
const content = await fs.readFile(filePath, "utf8");
|
|
943
|
+
|
|
944
|
+
return {
|
|
945
|
+
path: filePath,
|
|
946
|
+
content,
|
|
947
|
+
size: stats.size,
|
|
948
|
+
modified: stats.mtime,
|
|
949
|
+
created: stats.birthtime,
|
|
950
|
+
};
|
|
951
|
+
}
|
|
952
|
+
|
|
953
|
+
async function writeFile(filePath, content, options = {}) {
|
|
954
|
+
validateFilePath(filePath);
|
|
955
|
+
|
|
956
|
+
if (Buffer.byteLength(content, "utf8") > WORKER_CONFIG.maxFileSize) {
|
|
957
|
+
throw new IOWorkerError("Content too large", "CONTENT_TOO_LARGE");
|
|
958
|
+
}
|
|
959
|
+
|
|
960
|
+
await ensureDirectoryExists(path.dirname(filePath));
|
|
961
|
+
await fs.writeFile(filePath, content, options.encoding || "utf8");
|
|
962
|
+
|
|
963
|
+
const stats = await fs.stat(filePath);
|
|
964
|
+
|
|
965
|
+
return {
|
|
966
|
+
path: filePath,
|
|
967
|
+
size: stats.size,
|
|
968
|
+
written: true,
|
|
969
|
+
};
|
|
970
|
+
}
|
|
971
|
+
|
|
972
|
+
async function deleteFile(filePath) {
|
|
973
|
+
validateFilePath(filePath);
|
|
974
|
+
|
|
975
|
+
if (!(await fileExists(filePath))) {
|
|
976
|
+
return { deleted: false, reason: "File does not exist" };
|
|
977
|
+
}
|
|
978
|
+
|
|
979
|
+
await fs.unlink(filePath);
|
|
980
|
+
return { deleted: true, path: filePath };
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
/**
|
|
984
|
+
* Validation functions
|
|
985
|
+
*/
|
|
986
|
+
async function validateFileStructure(basePath) {
|
|
987
|
+
if (!basePath) return { valid: true };
|
|
988
|
+
|
|
989
|
+
try {
|
|
990
|
+
const stats = await fs.stat(basePath);
|
|
991
|
+
return {
|
|
992
|
+
valid: true,
|
|
993
|
+
isDirectory: stats.isDirectory(),
|
|
994
|
+
isFile: stats.isFile(),
|
|
995
|
+
size: stats.size,
|
|
996
|
+
permissions: (stats.mode & parseInt("777", 8)).toString(8),
|
|
997
|
+
};
|
|
998
|
+
} catch (error) {
|
|
999
|
+
return { valid: false, error: error.message };
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
|
|
1003
|
+
async function validatePermissions(filePath) {
|
|
1004
|
+
if (!filePath) return { valid: true };
|
|
1005
|
+
|
|
1006
|
+
try {
|
|
1007
|
+
await fs.access(filePath, fs.constants.R_OK | fs.constants.W_OK);
|
|
1008
|
+
return { valid: true, readable: true, writable: true };
|
|
1009
|
+
} catch (error) {
|
|
1010
|
+
try {
|
|
1011
|
+
await fs.access(filePath, fs.constants.R_OK);
|
|
1012
|
+
return { valid: true, readable: true, writable: false };
|
|
1013
|
+
} catch {
|
|
1014
|
+
return { valid: false, readable: false, writable: false };
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
async function validateContent(content, schema) {
|
|
1020
|
+
if (!schema) return { valid: true };
|
|
1021
|
+
|
|
1022
|
+
try {
|
|
1023
|
+
// Basic JSON validation
|
|
1024
|
+
if (schema.type === "json") {
|
|
1025
|
+
JSON.parse(content);
|
|
1026
|
+
return { valid: true, type: "json" };
|
|
1027
|
+
}
|
|
1028
|
+
|
|
1029
|
+
// Add more content validation as needed
|
|
1030
|
+
return { valid: true, type: "unknown" };
|
|
1031
|
+
} catch (error) {
|
|
1032
|
+
return { valid: false, error: error.message };
|
|
1033
|
+
}
|
|
1034
|
+
}
|
|
1035
|
+
|
|
1036
|
+
async function validateConfiguration() {
|
|
1037
|
+
const configPath = path.join(WORKER_CONFIG.configDir, "routes.json");
|
|
1038
|
+
|
|
1039
|
+
try {
|
|
1040
|
+
if (!(await fileExists(configPath))) {
|
|
1041
|
+
return { valid: false, error: "Configuration file not found" };
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
const content = await fs.readFile(configPath, "utf8");
|
|
1045
|
+
JSON.parse(content); // Validate JSON
|
|
1046
|
+
|
|
1047
|
+
return { valid: true };
|
|
1048
|
+
} catch (error) {
|
|
1049
|
+
return { valid: false, error: error.message };
|
|
1050
|
+
}
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
/**
|
|
1054
|
+
* Advanced operations
|
|
1055
|
+
*/
|
|
1056
|
+
async function clearCache(pattern = "*") {
|
|
1057
|
+
const cacheDir = WORKER_CONFIG.cacheDir;
|
|
1058
|
+
const files = await fs.readdir(cacheDir);
|
|
1059
|
+
|
|
1060
|
+
let cleared = 0;
|
|
1061
|
+
const errors = [];
|
|
1062
|
+
|
|
1063
|
+
for (const file of files) {
|
|
1064
|
+
if (pattern === "*" || file.includes(pattern)) {
|
|
1065
|
+
try {
|
|
1066
|
+
await fs.unlink(path.join(cacheDir, file));
|
|
1067
|
+
cleared++;
|
|
1068
|
+
} catch (error) {
|
|
1069
|
+
errors.push({ file, error: error.message });
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1072
|
+
}
|
|
1073
|
+
|
|
1074
|
+
return { cleared, errors, totalFiles: files.length };
|
|
1075
|
+
}
|
|
1076
|
+
|
|
1077
|
+
async function cleanLogs(retentionDays = 30) {
|
|
1078
|
+
const logsDir = WORKER_CONFIG.logsDir;
|
|
1079
|
+
const cutoffDate = new Date(
|
|
1080
|
+
Date.now() - retentionDays * 24 * 60 * 60 * 1000
|
|
1081
|
+
);
|
|
1082
|
+
|
|
1083
|
+
try {
|
|
1084
|
+
const files = await fs.readdir(logsDir);
|
|
1085
|
+
let cleaned = 0;
|
|
1086
|
+
const errors = [];
|
|
1087
|
+
|
|
1088
|
+
for (const file of files) {
|
|
1089
|
+
if (file.endsWith(".log")) {
|
|
1090
|
+
const filePath = path.join(logsDir, file);
|
|
1091
|
+
try {
|
|
1092
|
+
const stats = await fs.stat(filePath);
|
|
1093
|
+
if (stats.mtime < cutoffDate) {
|
|
1094
|
+
await fs.unlink(filePath);
|
|
1095
|
+
cleaned++;
|
|
1096
|
+
}
|
|
1097
|
+
} catch (error) {
|
|
1098
|
+
errors.push({ file, error: error.message });
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
|
|
1103
|
+
return { cleaned, errors, retentionDays };
|
|
1104
|
+
} catch (error) {
|
|
1105
|
+
throw new IOWorkerError("Log cleanup failed", "LOG_CLEANUP_FAILED", {
|
|
1106
|
+
originalError: error.message,
|
|
1107
|
+
});
|
|
1108
|
+
}
|
|
1109
|
+
}
|
|
1110
|
+
|
|
1111
|
+
async function rotateLog(logPath) {
|
|
1112
|
+
try {
|
|
1113
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
1114
|
+
const rotatedPath = `${logPath}.${timestamp}`;
|
|
1115
|
+
|
|
1116
|
+
await fs.rename(logPath, rotatedPath);
|
|
1117
|
+
log("info", `Log rotated: ${rotatedPath}`);
|
|
1118
|
+
|
|
1119
|
+
return { rotated: true, newPath: rotatedPath };
|
|
1120
|
+
} catch (error) {
|
|
1121
|
+
log("error", "Log rotation failed:", error.message);
|
|
1122
|
+
throw new IOWorkerError("Log rotation failed", "LOG_ROTATION_FAILED", {
|
|
1123
|
+
originalError: error.message,
|
|
1124
|
+
});
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
|
|
1128
|
+
async function compressFiles(files) {
|
|
1129
|
+
// TODO: Implementation for file compression would go here
|
|
1130
|
+
// This is a placeholder for actual compression logic
|
|
1131
|
+
// @iDevo, if you can, do that plse.
|
|
1132
|
+
throw new IOWorkerError(
|
|
1133
|
+
"Compression not yet implemented",
|
|
1134
|
+
"NOT_IMPLEMENTED"
|
|
1135
|
+
);
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
async function createBackup(source, destination) {
|
|
1139
|
+
try {
|
|
1140
|
+
validateFilePath(source);
|
|
1141
|
+
validateFilePath(destination);
|
|
1142
|
+
|
|
1143
|
+
await ensureDirectoryExists(path.dirname(destination));
|
|
1144
|
+
|
|
1145
|
+
const sourceStats = await fs.stat(source);
|
|
1146
|
+
if (sourceStats.isDirectory()) {
|
|
1147
|
+
await copyDirectory(source, destination);
|
|
1148
|
+
} else {
|
|
1149
|
+
await fs.copyFile(source, destination);
|
|
1150
|
+
}
|
|
1151
|
+
|
|
1152
|
+
const destStats = await fs.stat(destination);
|
|
1153
|
+
|
|
1154
|
+
return {
|
|
1155
|
+
success: true,
|
|
1156
|
+
source,
|
|
1157
|
+
destination,
|
|
1158
|
+
size: destStats.size,
|
|
1159
|
+
created: new Date().toISOString(),
|
|
1160
|
+
};
|
|
1161
|
+
} catch (error) {
|
|
1162
|
+
throw new IOWorkerError("Backup creation failed", "BACKUP_FAILED", {
|
|
1163
|
+
source,
|
|
1164
|
+
destination,
|
|
1165
|
+
originalError: error.message,
|
|
1166
|
+
});
|
|
1167
|
+
}
|
|
1168
|
+
}
|
|
1169
|
+
|
|
1170
|
+
async function copyDirectory(source, destination) {
|
|
1171
|
+
await ensureDirectoryExists(destination);
|
|
1172
|
+
|
|
1173
|
+
const entries = await fs.readdir(source, { withFileTypes: true });
|
|
1174
|
+
|
|
1175
|
+
for (const entry of entries) {
|
|
1176
|
+
const srcPath = path.join(source, entry.name);
|
|
1177
|
+
const destPath = path.join(destination, entry.name);
|
|
1178
|
+
|
|
1179
|
+
if (entry.isDirectory()) {
|
|
1180
|
+
await copyDirectory(srcPath, destPath);
|
|
1181
|
+
} else {
|
|
1182
|
+
await fs.copyFile(srcPath, destPath);
|
|
1183
|
+
}
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
|
|
1187
|
+
async function syncDirectories(source, target) {
|
|
1188
|
+
try {
|
|
1189
|
+
validateFilePath(source);
|
|
1190
|
+
validateFilePath(target);
|
|
1191
|
+
|
|
1192
|
+
await ensureDirectoryExists(target);
|
|
1193
|
+
|
|
1194
|
+
const sourceFiles = await getDirectoryFiles(source);
|
|
1195
|
+
const targetFiles = await getDirectoryFiles(target);
|
|
1196
|
+
|
|
1197
|
+
const operations = {
|
|
1198
|
+
copied: 0,
|
|
1199
|
+
updated: 0,
|
|
1200
|
+
deleted: 0,
|
|
1201
|
+
errors: [],
|
|
1202
|
+
};
|
|
1203
|
+
|
|
1204
|
+
// Copy new and updated files
|
|
1205
|
+
for (const file of sourceFiles) {
|
|
1206
|
+
const sourcePath = path.join(source, file);
|
|
1207
|
+
const targetPath = path.join(target, file);
|
|
1208
|
+
|
|
1209
|
+
try {
|
|
1210
|
+
const sourceStats = await fs.stat(sourcePath);
|
|
1211
|
+
let shouldCopy = true;
|
|
1212
|
+
|
|
1213
|
+
if (targetFiles.includes(file)) {
|
|
1214
|
+
const targetStats = await fs.stat(targetPath);
|
|
1215
|
+
shouldCopy = sourceStats.mtime > targetStats.mtime;
|
|
1216
|
+
}
|
|
1217
|
+
|
|
1218
|
+
if (shouldCopy) {
|
|
1219
|
+
await ensureDirectoryExists(path.dirname(targetPath));
|
|
1220
|
+
await fs.copyFile(sourcePath, targetPath);
|
|
1221
|
+
|
|
1222
|
+
if (targetFiles.includes(file)) {
|
|
1223
|
+
operations.updated++;
|
|
1224
|
+
} else {
|
|
1225
|
+
operations.copied++;
|
|
1226
|
+
}
|
|
1227
|
+
}
|
|
1228
|
+
} catch (error) {
|
|
1229
|
+
operations.errors.push({
|
|
1230
|
+
file,
|
|
1231
|
+
operation: "copy",
|
|
1232
|
+
error: error.message,
|
|
1233
|
+
});
|
|
1234
|
+
}
|
|
1235
|
+
}
|
|
1236
|
+
|
|
1237
|
+
// Remove files that don't exist in source
|
|
1238
|
+
for (const file of targetFiles) {
|
|
1239
|
+
if (!sourceFiles.includes(file)) {
|
|
1240
|
+
try {
|
|
1241
|
+
await fs.unlink(path.join(target, file));
|
|
1242
|
+
operations.deleted++;
|
|
1243
|
+
} catch (error) {
|
|
1244
|
+
operations.errors.push({
|
|
1245
|
+
file,
|
|
1246
|
+
operation: "delete",
|
|
1247
|
+
error: error.message,
|
|
1248
|
+
});
|
|
1249
|
+
}
|
|
1250
|
+
}
|
|
1251
|
+
}
|
|
1252
|
+
|
|
1253
|
+
return {
|
|
1254
|
+
success: true,
|
|
1255
|
+
source,
|
|
1256
|
+
target,
|
|
1257
|
+
operations,
|
|
1258
|
+
timestamp: new Date().toISOString(),
|
|
1259
|
+
};
|
|
1260
|
+
} catch (error) {
|
|
1261
|
+
throw new IOWorkerError("Directory sync failed", "SYNC_FAILED", {
|
|
1262
|
+
source,
|
|
1263
|
+
target,
|
|
1264
|
+
originalError: error.message,
|
|
1265
|
+
});
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1268
|
+
|
|
1269
|
+
async function getDirectoryFiles(dirPath) {
|
|
1270
|
+
const files = [];
|
|
1271
|
+
|
|
1272
|
+
async function traverse(currentPath, relativePath = "") {
|
|
1273
|
+
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
|
1274
|
+
|
|
1275
|
+
for (const entry of entries) {
|
|
1276
|
+
const fullPath = path.join(currentPath, entry.name);
|
|
1277
|
+
const relativeFilePath = path.join(relativePath, entry.name);
|
|
1278
|
+
|
|
1279
|
+
if (entry.isDirectory()) {
|
|
1280
|
+
await traverse(fullPath, relativeFilePath);
|
|
1281
|
+
} else {
|
|
1282
|
+
files.push(relativeFilePath);
|
|
1283
|
+
}
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
|
|
1287
|
+
await traverse(dirPath);
|
|
1288
|
+
return files;
|
|
1289
|
+
}
|
|
1290
|
+
|
|
1291
|
+
async function analyzeFiles(files) {
|
|
1292
|
+
try {
|
|
1293
|
+
const analysis = {
|
|
1294
|
+
totalFiles: 0,
|
|
1295
|
+
totalSize: 0,
|
|
1296
|
+
fileTypes: {},
|
|
1297
|
+
largestFile: null,
|
|
1298
|
+
oldestFile: null,
|
|
1299
|
+
newestFile: null,
|
|
1300
|
+
errors: [],
|
|
1301
|
+
};
|
|
1302
|
+
|
|
1303
|
+
for (const filePath of files) {
|
|
1304
|
+
try {
|
|
1305
|
+
validateFilePath(filePath);
|
|
1306
|
+
|
|
1307
|
+
const stats = await fs.stat(filePath);
|
|
1308
|
+
const ext =
|
|
1309
|
+
path.extname(filePath).toLowerCase() || "no-extension";
|
|
1310
|
+
|
|
1311
|
+
analysis.totalFiles++;
|
|
1312
|
+
analysis.totalSize += stats.size;
|
|
1313
|
+
|
|
1314
|
+
// Track file types
|
|
1315
|
+
analysis.fileTypes[ext] = (analysis.fileTypes[ext] || 0) + 1;
|
|
1316
|
+
|
|
1317
|
+
// Track largest file
|
|
1318
|
+
if (
|
|
1319
|
+
!analysis.largestFile ||
|
|
1320
|
+
stats.size > analysis.largestFile.size
|
|
1321
|
+
) {
|
|
1322
|
+
analysis.largestFile = { path: filePath, size: stats.size };
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
// Track oldest file
|
|
1326
|
+
if (
|
|
1327
|
+
!analysis.oldestFile ||
|
|
1328
|
+
stats.mtime < analysis.oldestFile.mtime
|
|
1329
|
+
) {
|
|
1330
|
+
analysis.oldestFile = {
|
|
1331
|
+
path: filePath,
|
|
1332
|
+
mtime: stats.mtime,
|
|
1333
|
+
};
|
|
1334
|
+
}
|
|
1335
|
+
|
|
1336
|
+
// Track newest file
|
|
1337
|
+
if (
|
|
1338
|
+
!analysis.newestFile ||
|
|
1339
|
+
stats.mtime > analysis.newestFile.mtime
|
|
1340
|
+
) {
|
|
1341
|
+
analysis.newestFile = {
|
|
1342
|
+
path: filePath,
|
|
1343
|
+
mtime: stats.mtime,
|
|
1344
|
+
};
|
|
1345
|
+
}
|
|
1346
|
+
} catch (error) {
|
|
1347
|
+
analysis.errors.push({ file: filePath, error: error.message });
|
|
1348
|
+
}
|
|
1349
|
+
}
|
|
1350
|
+
|
|
1351
|
+
return {
|
|
1352
|
+
success: true,
|
|
1353
|
+
analysis,
|
|
1354
|
+
timestamp: new Date().toISOString(),
|
|
1355
|
+
};
|
|
1356
|
+
} catch (error) {
|
|
1357
|
+
throw new IOWorkerError("File analysis failed", "ANALYSIS_FAILED", {
|
|
1358
|
+
originalError: error.message,
|
|
1359
|
+
});
|
|
1360
|
+
}
|
|
1361
|
+
}
|
|
1362
|
+
|
|
1363
|
+
// ===== UTILITY HELPER FUNCTIONS =====
|
|
1364
|
+
|
|
1365
|
+
/**
|
|
1366
|
+
* Enhanced cache key generation with better security
|
|
1367
|
+
*/
|
|
1368
|
+
function generateCacheKey(data) {
|
|
1369
|
+
const keyParts = [
|
|
1370
|
+
data.method || "GET",
|
|
1371
|
+
data.path || "/",
|
|
1372
|
+
JSON.stringify(data.query || {}),
|
|
1373
|
+
data.headers?.["authorization"] ? "auth" : "noauth",
|
|
1374
|
+
data.userId || "anonymous",
|
|
1375
|
+
];
|
|
1376
|
+
|
|
1377
|
+
const rawKey = keyParts.join("_");
|
|
1378
|
+
|
|
1379
|
+
// Create a hash for security and length consistency
|
|
1380
|
+
return crypto
|
|
1381
|
+
.createHash("sha256")
|
|
1382
|
+
.update(rawKey)
|
|
1383
|
+
.digest("hex")
|
|
1384
|
+
.substring(0, 32);
|
|
1385
|
+
}
|
|
1386
|
+
|
|
1387
|
+
/**
|
|
1388
|
+
* Enhanced file existence check with proper error handling
|
|
1389
|
+
*/
|
|
1390
|
+
async function fileExists(filePath) {
|
|
1391
|
+
try {
|
|
1392
|
+
await fs.access(filePath, fs.constants.F_OK);
|
|
1393
|
+
return true;
|
|
1394
|
+
} catch {
|
|
1395
|
+
return false;
|
|
1396
|
+
}
|
|
1397
|
+
}
|
|
1398
|
+
|
|
1399
|
+
/**
|
|
1400
|
+
* Enhanced directory creation with proper permissions
|
|
1401
|
+
*/
|
|
1402
|
+
async function ensureDirectoryExists(dirPath) {
|
|
1403
|
+
try {
|
|
1404
|
+
await fs.mkdir(dirPath, { recursive: true, mode: 0o755 });
|
|
1405
|
+
} catch (error) {
|
|
1406
|
+
if (error.code !== "EEXIST") {
|
|
1407
|
+
throw new IOWorkerError(
|
|
1408
|
+
`Failed to create directory: ${dirPath}`,
|
|
1409
|
+
"DIR_CREATE_FAILED",
|
|
1410
|
+
{
|
|
1411
|
+
originalError: error.message,
|
|
1412
|
+
}
|
|
1413
|
+
);
|
|
1414
|
+
}
|
|
1415
|
+
}
|
|
1416
|
+
}
|
|
1417
|
+
|
|
1418
|
+
/**
|
|
1419
|
+
* File path validation for security
|
|
1420
|
+
*/
|
|
1421
|
+
function validateFilePath(filePath) {
|
|
1422
|
+
if (!filePath || typeof filePath !== "string") {
|
|
1423
|
+
throw new IOWorkerError("Invalid file path", "INVALID_PATH");
|
|
1424
|
+
}
|
|
1425
|
+
|
|
1426
|
+
// Prevent directory traversal attacks
|
|
1427
|
+
if (filePath.includes("..") || filePath.includes("~")) {
|
|
1428
|
+
throw new IOWorkerError("Path traversal not allowed", "INVALID_PATH");
|
|
1429
|
+
}
|
|
1430
|
+
|
|
1431
|
+
// Check file extension if restrictions are enabled
|
|
1432
|
+
if (WORKER_CONFIG.allowedExtensions.length > 0) {
|
|
1433
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
1434
|
+
if (!WORKER_CONFIG.allowedExtensions.includes(ext)) {
|
|
1435
|
+
throw new IOWorkerError(
|
|
1436
|
+
`File extension not allowed: ${ext}`,
|
|
1437
|
+
"UNSUPPORTED_TYPE",
|
|
1438
|
+
{
|
|
1439
|
+
allowedExtensions: WORKER_CONFIG.allowedExtensions,
|
|
1440
|
+
}
|
|
1441
|
+
);
|
|
1442
|
+
}
|
|
1443
|
+
}
|
|
1444
|
+
|
|
1445
|
+
// Ensure path is within allowed boundaries
|
|
1446
|
+
const resolvedPath = path.resolve(filePath);
|
|
1447
|
+
const workingDir = process.cwd();
|
|
1448
|
+
|
|
1449
|
+
if (!resolvedPath.startsWith(workingDir)) {
|
|
1450
|
+
throw new IOWorkerError(
|
|
1451
|
+
"Path outside working directory not allowed",
|
|
1452
|
+
"INVALID_PATH"
|
|
1453
|
+
);
|
|
1454
|
+
}
|
|
1455
|
+
}
|
|
1456
|
+
|
|
1457
|
+
/**
|
|
1458
|
+
* Sanitize sensitive data before logging or returning
|
|
1459
|
+
*/
|
|
1460
|
+
function sanitizeData(data) {
|
|
1461
|
+
if (!data || typeof data !== "object") return data;
|
|
1462
|
+
|
|
1463
|
+
const sanitized = { ...data };
|
|
1464
|
+
|
|
1465
|
+
// Remove sensitive fields
|
|
1466
|
+
const sensitiveFields = ["password", "token", "apiKey", "secret", "auth"];
|
|
1467
|
+
|
|
1468
|
+
for (const field of sensitiveFields) {
|
|
1469
|
+
if (sanitized[field]) {
|
|
1470
|
+
sanitized[field] = "[REDACTED]";
|
|
1471
|
+
}
|
|
1472
|
+
}
|
|
1473
|
+
|
|
1474
|
+
return sanitized;
|
|
1475
|
+
}
|
|
1476
|
+
|
|
1477
|
+
/**
|
|
1478
|
+
* Sanitize HTTP headers
|
|
1479
|
+
*/
|
|
1480
|
+
function sanitizeHeaders(headers) {
|
|
1481
|
+
if (!headers || typeof headers !== "object") return headers;
|
|
1482
|
+
|
|
1483
|
+
const sanitized = { ...headers };
|
|
1484
|
+
|
|
1485
|
+
// Sanitize sensitive headers
|
|
1486
|
+
if (sanitized.authorization) {
|
|
1487
|
+
sanitized.authorization = "[REDACTED]";
|
|
1488
|
+
}
|
|
1489
|
+
|
|
1490
|
+
if (sanitized.cookie) {
|
|
1491
|
+
sanitized.cookie = "[REDACTED]";
|
|
1492
|
+
}
|
|
1493
|
+
|
|
1494
|
+
return sanitized;
|
|
1495
|
+
}
|
|
1496
|
+
|
|
1497
|
+
/**
|
|
1498
|
+
* Get value from settled promise result
|
|
1499
|
+
*/
|
|
1500
|
+
function getSettledValue(settledResult) {
|
|
1501
|
+
if (settledResult.status === "fulfilled") {
|
|
1502
|
+
return settledResult.value;
|
|
1503
|
+
} else {
|
|
1504
|
+
return { error: settledResult.reason?.message || "Unknown error" };
|
|
1505
|
+
}
|
|
1506
|
+
}
|
|
1507
|
+
|
|
1508
|
+
// ===== WORKER LIFECYCLE MANAGEMENT =====
|
|
1509
|
+
|
|
1510
|
+
/**
|
|
1511
|
+
* Graceful shutdown handler
|
|
1512
|
+
*/
|
|
1513
|
+
async function gracefulShutdown(signal) {
|
|
1514
|
+
log("info", `Received ${signal}, initiating graceful shutdown`);
|
|
1515
|
+
isShuttingDown = true;
|
|
1516
|
+
|
|
1517
|
+
// Wait for current tasks to complete
|
|
1518
|
+
const maxWaitTime = 10000; // 10 seconds
|
|
1519
|
+
const startTime = Date.now();
|
|
1520
|
+
|
|
1521
|
+
while (taskQueue.size > 0 && Date.now() - startTime < maxWaitTime) {
|
|
1522
|
+
log("info", `Waiting for ${taskQueue.size} tasks to complete`);
|
|
1523
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
1524
|
+
}
|
|
1525
|
+
|
|
1526
|
+
if (taskQueue.size > 0) {
|
|
1527
|
+
log(
|
|
1528
|
+
"warn",
|
|
1529
|
+
`Forcefully shutting down with ${taskQueue.size} tasks remaining`
|
|
1530
|
+
);
|
|
1531
|
+
}
|
|
1532
|
+
|
|
1533
|
+
log("info", "I/O Worker shutdown complete");
|
|
1534
|
+
process.exit(0);
|
|
1535
|
+
}
|
|
1536
|
+
|
|
1537
|
+
// Enhanced signal handlers
|
|
1538
|
+
process.on("SIGTERM", () => gracefulShutdown("SIGTERM"));
|
|
1539
|
+
process.on("SIGINT", () => gracefulShutdown("SIGINT"));
|
|
1540
|
+
|
|
1541
|
+
// Handle uncaught exceptions
|
|
1542
|
+
process.on("uncaughtException", (error) => {
|
|
1543
|
+
log("error", "Uncaught exception in I/O Worker:", error);
|
|
1544
|
+
|
|
1545
|
+
if (parentPort) {
|
|
1546
|
+
parentPort.postMessage({
|
|
1547
|
+
type: "error",
|
|
1548
|
+
error: "Worker uncaught exception",
|
|
1549
|
+
details: error.message,
|
|
1550
|
+
});
|
|
1551
|
+
}
|
|
1552
|
+
|
|
1553
|
+
gracefulShutdown("UNCAUGHT_EXCEPTION");
|
|
1554
|
+
});
|
|
1555
|
+
|
|
1556
|
+
// Handle unhandled promise rejections
|
|
1557
|
+
process.on("unhandledRejection", (reason, promise) => {
|
|
1558
|
+
log("error", "Unhandled promise rejection in I/O Worker:", reason);
|
|
1559
|
+
|
|
1560
|
+
if (parentPort) {
|
|
1561
|
+
parentPort.postMessage({
|
|
1562
|
+
type: "error",
|
|
1563
|
+
error: "Worker unhandled rejection",
|
|
1564
|
+
details: reason?.message || "Unknown rejection",
|
|
1565
|
+
});
|
|
1566
|
+
}
|
|
1567
|
+
});
|
|
1568
|
+
|
|
1569
|
+
// ===== WORKER INITIALIZATION AND READY SIGNAL =====
|
|
1570
|
+
|
|
1571
|
+
// Initialize worker and signal readiness
|
|
1572
|
+
(async () => {
|
|
1573
|
+
try {
|
|
1574
|
+
await initializeWorker();
|
|
1575
|
+
|
|
1576
|
+
if (parentPort) {
|
|
1577
|
+
parentPort.postMessage({
|
|
1578
|
+
type: "ready",
|
|
1579
|
+
workerType: "io",
|
|
1580
|
+
config: {
|
|
1581
|
+
maxFileSize: WORKER_CONFIG.maxFileSize,
|
|
1582
|
+
allowedExtensions: WORKER_CONFIG.allowedExtensions,
|
|
1583
|
+
timeout: WORKER_CONFIG.timeout,
|
|
1584
|
+
maxRetries: WORKER_CONFIG.maxRetries,
|
|
1585
|
+
},
|
|
1586
|
+
timestamp: new Date().toISOString(),
|
|
1587
|
+
});
|
|
1588
|
+
}
|
|
1589
|
+
} catch (error) {
|
|
1590
|
+
log("error", "Worker initialization failed:", error);
|
|
1591
|
+
|
|
1592
|
+
if (parentPort) {
|
|
1593
|
+
parentPort.postMessage({
|
|
1594
|
+
type: "error",
|
|
1595
|
+
error: "Worker initialization failed",
|
|
1596
|
+
details: error.message,
|
|
1597
|
+
});
|
|
1598
|
+
}
|
|
1599
|
+
|
|
1600
|
+
process.exit(1);
|
|
1601
|
+
}
|
|
1602
|
+
})();
|
|
1603
|
+
|
|
1604
|
+
// Export for testing purposes (if running in test environment)
|
|
1605
|
+
if (process.env.NODE_ENV === "test") {
|
|
1606
|
+
module.exports = {
|
|
1607
|
+
processTask,
|
|
1608
|
+
validateTask,
|
|
1609
|
+
generateCacheKey,
|
|
1610
|
+
sanitizeData,
|
|
1611
|
+
sanitizeHeaders,
|
|
1612
|
+
IOWorkerError,
|
|
1613
|
+
};
|
|
1614
|
+
}
|
|
1615
|
+
|