@jammysunshine/astrology-shared 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/examples/index.js +455 -0
- package/examples/serviceExamples.js +359 -0
- package/index.js +75 -0
- package/interfaces/IDataAccessService.js +70 -0
- package/interfaces/IUserContextService.js +58 -0
- package/logger/index.js +417 -0
- package/logger/test_fix.js +56 -0
- package/package.json +43 -0
- package/schemas/README.md +97 -0
- package/schemas/index.js +940 -0
- package/validation/BaseValidator.js +308 -0
- package/validation/index.js +88 -0
package/logger/index.js
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Universal Logger - Works in both Node.js (backend) and browser (frontend) environments
|
|
3
|
+
* Uses winston/pino in backend when available, console in frontend or as fallback
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
let loggerInstance = null;
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Detect environment and create appropriate logger
|
|
10
|
+
*/
|
|
11
|
+
function createLogger() {
|
|
12
|
+
// Check if we're in Node.js environment
|
|
13
|
+
const isNode = typeof window === 'undefined' && typeof process !== 'undefined';
|
|
14
|
+
|
|
15
|
+
if (isNode) {
|
|
16
|
+
// Backend: Try to use winston or pino, fallback to console
|
|
17
|
+
try {
|
|
18
|
+
// Try winston first (look in various locations)
|
|
19
|
+
let winston;
|
|
20
|
+
try {
|
|
21
|
+
winston = require('winston');
|
|
22
|
+
} catch (e) {
|
|
23
|
+
// Try relative to backend directory
|
|
24
|
+
try {
|
|
25
|
+
winston = require('../backend/node_modules/winston');
|
|
26
|
+
} catch (e2) {
|
|
27
|
+
// Try from current working directory
|
|
28
|
+
try {
|
|
29
|
+
const path = require('path');
|
|
30
|
+
const backendPath = path.join(process.cwd(), 'packages', 'backend', 'node_modules', 'winston');
|
|
31
|
+
winston = require(backendPath);
|
|
32
|
+
} catch (e3) {
|
|
33
|
+
// Winston not available, try pino
|
|
34
|
+
throw new Error('Winston not found');
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return createWinstonLogger(winston);
|
|
39
|
+
} catch (winstonError) {
|
|
40
|
+
try {
|
|
41
|
+
// Try pino
|
|
42
|
+
const pino = require('pino');
|
|
43
|
+
return createPinoLogger(pino);
|
|
44
|
+
} catch (pinoError) {
|
|
45
|
+
// Fallback to console if neither is available
|
|
46
|
+
return createConsoleLogger();
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
} else {
|
|
50
|
+
// Frontend: Use console-based logger
|
|
51
|
+
return createConsoleLogger();
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Generates a timestamp string in the local timezone.
|
|
57
|
+
* Format: YYYY-MM-DD-HH-mm-ss
|
|
58
|
+
*/
|
|
59
|
+
function getLocalTimestamp() {
|
|
60
|
+
const d = new Date();
|
|
61
|
+
const pad = (num) => num.toString().padStart(2, '0');
|
|
62
|
+
return `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}-${pad(d.getHours())}-${pad(d.getMinutes())}-${pad(d.getSeconds())}`;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Create winston-based logger for backend
|
|
67
|
+
*/
|
|
68
|
+
function createWinstonLogger(winston) {
|
|
69
|
+
try {
|
|
70
|
+
const transports = [
|
|
71
|
+
// Console transport should always be available for debugging
|
|
72
|
+
new winston.transports.Console({
|
|
73
|
+
level: process.env.LOG_LEVEL || 'debug', // Use the same level as file logs for consistency
|
|
74
|
+
format: winston.format.combine(
|
|
75
|
+
winston.format.colorize(),
|
|
76
|
+
winston.format.simple()
|
|
77
|
+
)
|
|
78
|
+
})
|
|
79
|
+
];
|
|
80
|
+
|
|
81
|
+
// Add Better Stack HTTP transport if token is provided
|
|
82
|
+
if (process.env.BETTERSTACK_SOURCE_TOKEN) {
|
|
83
|
+
try {
|
|
84
|
+
// Use HTTP transport instead of Logtail WebSocket
|
|
85
|
+
const https = require('https');
|
|
86
|
+
const http = require('http');
|
|
87
|
+
|
|
88
|
+
class BetterStackHttpTransport extends winston.Transport {
|
|
89
|
+
constructor(opts) {
|
|
90
|
+
super(opts);
|
|
91
|
+
this.endpoint = opts.endpoint || 'https://s1638078.eu-nbg-2.betterstackdata.com';
|
|
92
|
+
this.token = opts.token;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
log(info, callback) {
|
|
96
|
+
const payload = {
|
|
97
|
+
dt: new Date().toISOString(),
|
|
98
|
+
message: info.message,
|
|
99
|
+
level: info.level,
|
|
100
|
+
...info
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
const data = JSON.stringify(payload);
|
|
104
|
+
const url = new URL(this.endpoint);
|
|
105
|
+
|
|
106
|
+
const options = {
|
|
107
|
+
hostname: url.hostname,
|
|
108
|
+
port: url.port || (url.protocol === 'https:' ? 443 : 80),
|
|
109
|
+
path: url.pathname,
|
|
110
|
+
method: 'POST',
|
|
111
|
+
headers: {
|
|
112
|
+
'Content-Type': 'application/json',
|
|
113
|
+
'Authorization': `Bearer ${this.token}`,
|
|
114
|
+
'Content-Length': Buffer.byteLength(data)
|
|
115
|
+
},
|
|
116
|
+
timeout: parseInt(process.env.LOGGER_HTTP_TIMEOUT || '5000')
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
let called = false;
|
|
120
|
+
const done = () => {
|
|
121
|
+
if (!called) {
|
|
122
|
+
called = true;
|
|
123
|
+
callback();
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
const req = (url.protocol === 'https:' ? https : http).request(options, (res) => {
|
|
128
|
+
// Consume response data to prevent memory leaks and handle 'end'
|
|
129
|
+
res.on('data', () => {});
|
|
130
|
+
res.on('end', () => done());
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
req.on('error', (err) => {
|
|
134
|
+
// Silent fail for logging transport, but ensure callback is called
|
|
135
|
+
done();
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
req.on('timeout', () => {
|
|
139
|
+
req.destroy();
|
|
140
|
+
done();
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
req.write(data);
|
|
144
|
+
req.end();
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
transports.push(new BetterStackHttpTransport({
|
|
149
|
+
token: process.env.BETTERSTACK_SOURCE_TOKEN.trim(),
|
|
150
|
+
level: process.env.LOG_LEVEL || 'info'
|
|
151
|
+
}));
|
|
152
|
+
console.log('✅ Better Stack (HTTP) logging transport enabled');
|
|
153
|
+
} catch (error) {
|
|
154
|
+
console.log('❌ Better Stack HTTP transport failed:', error.message);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Always add file transport in all environments (not just production)
|
|
159
|
+
// Note: Console transport is separate and will continue to work alongside file transports
|
|
160
|
+
// Remove the environment check to ensure logging works in all environments
|
|
161
|
+
try {
|
|
162
|
+
const path = require('path');
|
|
163
|
+
const logDir = process.env.LOGS_DIR || path.join(process.cwd(), 'logs');
|
|
164
|
+
|
|
165
|
+
// Ensure log directory exists
|
|
166
|
+
const fs = require('fs');
|
|
167
|
+
if (!fs.existsSync(logDir)) {
|
|
168
|
+
fs.mkdirSync(logDir, { recursive: true });
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Cleans up old log files, keeping only the most recent 'keepCount' files
|
|
173
|
+
* for both 'error-' and 'server-' prefixed logs.
|
|
174
|
+
*/
|
|
175
|
+
function cleanOldLogFiles(directory, keepCount = 4) {
|
|
176
|
+
fs.readdir(directory, (err, files) => {
|
|
177
|
+
if (err) {
|
|
178
|
+
console.error('Error reading log directory:', err);
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
const errorLogs = [];
|
|
183
|
+
const serverLogs = [];
|
|
184
|
+
|
|
185
|
+
files.forEach(file => {
|
|
186
|
+
const filePath = path.join(directory, file);
|
|
187
|
+
// Regex to match 'error-YYYY-MM-DD-HH-mm-ss.log' or 'server-YYYY-MM-DD-HH-mm-ss.log'
|
|
188
|
+
const errorMatch = file.match(/^error-(\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2})\.log$/);
|
|
189
|
+
const serverMatch = file.match(/^server-(\d{4}-\d{2}-\d{2}-\d{2}-\d{2}-\d{2})\.log$/);
|
|
190
|
+
|
|
191
|
+
if (errorMatch) {
|
|
192
|
+
errorLogs.push({ file, timestamp: errorMatch[1], path: filePath });
|
|
193
|
+
} else if (serverMatch) {
|
|
194
|
+
serverLogs.push({ file, timestamp: serverMatch[1], path: filePath });
|
|
195
|
+
}
|
|
196
|
+
});
|
|
197
|
+
|
|
198
|
+
// Sort logs by timestamp (oldest first)
|
|
199
|
+
errorLogs.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
|
|
200
|
+
serverLogs.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
|
|
201
|
+
|
|
202
|
+
// Delete oldest error logs
|
|
203
|
+
for (let i = 0; i < errorLogs.length - keepCount; i++) {
|
|
204
|
+
fs.unlink(errorLogs[i].path, (err) => {
|
|
205
|
+
if (err) console.error('Error deleting old error log file:', errorLogs[i].file, err);
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// Delete oldest server logs
|
|
210
|
+
for (let i = 0; i < serverLogs.length - keepCount; i++) {
|
|
211
|
+
fs.unlink(serverLogs[i].path, (err) => {
|
|
212
|
+
if (err) console.error('Error deleting old server log file:', serverLogs[i].file, err);
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// Call the cleanup function on startup
|
|
219
|
+
cleanOldLogFiles(logDir);
|
|
220
|
+
|
|
221
|
+
// Generate a unique timestamp for each run to ensure different log files per execution
|
|
222
|
+
const runTimestamp = getLocalTimestamp();
|
|
223
|
+
|
|
224
|
+
const errorFile = path.join(logDir, `error-${runTimestamp}.log`);
|
|
225
|
+
const serverFile = path.join(logDir, `server-${runTimestamp}.log`);
|
|
226
|
+
|
|
227
|
+
transports.push(new winston.transports.File({
|
|
228
|
+
filename: errorFile,
|
|
229
|
+
level: 'error',
|
|
230
|
+
format: winston.format.combine(
|
|
231
|
+
winston.format.timestamp(),
|
|
232
|
+
winston.format.errors({ stack: true }),
|
|
233
|
+
winston.format.json()
|
|
234
|
+
)
|
|
235
|
+
}));
|
|
236
|
+
|
|
237
|
+
transports.push(new winston.transports.File({
|
|
238
|
+
filename: serverFile,
|
|
239
|
+
level: process.env.LOG_LEVEL || 'info', // Changed default from 'debug' to 'info'
|
|
240
|
+
format: winston.format.combine(
|
|
241
|
+
winston.format.timestamp(),
|
|
242
|
+
winston.format.json()
|
|
243
|
+
)
|
|
244
|
+
}));
|
|
245
|
+
} catch (error) {
|
|
246
|
+
console.error('❌ Error creating file transports:', error.message);
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
const logger = winston.createLogger({
|
|
250
|
+
level: process.env.LOG_LEVEL || 'debug', // Changed from 'info' to 'debug' to ensure all logs are processed
|
|
251
|
+
format: winston.format.combine(
|
|
252
|
+
winston.format.timestamp(),
|
|
253
|
+
winston.format.errors({ stack: true }),
|
|
254
|
+
winston.format.json()
|
|
255
|
+
),
|
|
256
|
+
defaultMeta: { service: 'astrology-shared' },
|
|
257
|
+
transports: transports
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
return {
|
|
261
|
+
debug: (message, meta = {}) => logger.debug(message, meta),
|
|
262
|
+
info: (message, meta = {}) => logger.info(message, meta),
|
|
263
|
+
warn: (message, meta = {}) => logger.warn(message, meta),
|
|
264
|
+
error: (message, meta = {}) => logger.error(message, meta),
|
|
265
|
+
|
|
266
|
+
// Additional methods for compatibility
|
|
267
|
+
log: (level, message, meta = {}) => logger.log(level, message, meta),
|
|
268
|
+
child: (meta) => logger.child(meta)
|
|
269
|
+
};
|
|
270
|
+
} catch (error) {
|
|
271
|
+
console.error('Failed to create winston logger, falling back to console:', error);
|
|
272
|
+
return createConsoleLogger();
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Create pino-based logger for backend
|
|
278
|
+
*/
|
|
279
|
+
function createPinoLogger(pino) {
|
|
280
|
+
try {
|
|
281
|
+
const logger = pino({
|
|
282
|
+
level: process.env.LOG_LEVEL || 'info',
|
|
283
|
+
timestamp: pino.stdTimeFunctions.isoTime,
|
|
284
|
+
base: { service: 'astrology-shared' }
|
|
285
|
+
});
|
|
286
|
+
|
|
287
|
+
return {
|
|
288
|
+
debug: (message, meta = {}) => logger.debug(meta, message),
|
|
289
|
+
info: (message, meta = {}) => logger.info(meta, message),
|
|
290
|
+
warn: (message, meta = {}) => logger.warn(meta, message),
|
|
291
|
+
error: (message, meta = {}) => logger.error(meta, message),
|
|
292
|
+
|
|
293
|
+
// Additional methods for compatibility
|
|
294
|
+
log: (level, message, meta = {}) => logger[level](meta, message),
|
|
295
|
+
child: (meta) => logger.child(meta)
|
|
296
|
+
};
|
|
297
|
+
} catch (error) {
|
|
298
|
+
console.error('Failed to create pino logger, falling back to console:', error);
|
|
299
|
+
return createConsoleLogger();
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
/**
|
|
304
|
+
* Create console-based logger for frontend or fallback
|
|
305
|
+
*/
|
|
306
|
+
function createConsoleLogger() {
|
|
307
|
+
const LOG_LEVELS = ['debug', 'info', 'warn', 'error'];
|
|
308
|
+
const configuredLogLevel = process.env.LOG_LEVEL?.toLowerCase() || 'info';
|
|
309
|
+
const configuredLogLevelIndex = LOG_LEVELS.indexOf(configuredLogLevel);
|
|
310
|
+
|
|
311
|
+
// If the configured level is not found, default to 'info'
|
|
312
|
+
const effectiveLogLevelIndex = configuredLogLevelIndex !== -1 ? configuredLogLevelIndex : LOG_LEVELS.indexOf('info');
|
|
313
|
+
|
|
314
|
+
const getTimestamp = () => new Date().toISOString();
|
|
315
|
+
const formatMessage = (level, message, meta = {}) => {
|
|
316
|
+
const timestamp = getTimestamp();
|
|
317
|
+
const metaStr = Object.keys(meta).length > 0 ? ` ${JSON.stringify(meta)}` : '';
|
|
318
|
+
return `[${timestamp}] ${level.toUpperCase()}: ${message}${metaStr}`;
|
|
319
|
+
};
|
|
320
|
+
|
|
321
|
+
return {
|
|
322
|
+
debug: (message, meta = {}) => {
|
|
323
|
+
if (effectiveLogLevelIndex <= LOG_LEVELS.indexOf('debug')) {
|
|
324
|
+
// Using the built-in console.debug
|
|
325
|
+
console.debug(formatMessage('debug', message, meta));
|
|
326
|
+
}
|
|
327
|
+
},
|
|
328
|
+
info: (message, meta = {}) => {
|
|
329
|
+
if (effectiveLogLevelIndex <= LOG_LEVELS.indexOf('info')) {
|
|
330
|
+
// Using the built-in console.info
|
|
331
|
+
console.info(formatMessage('info', message, meta));
|
|
332
|
+
}
|
|
333
|
+
},
|
|
334
|
+
warn: (message, meta = {}) => {
|
|
335
|
+
if (effectiveLogLevelIndex <= LOG_LEVELS.indexOf('warn')) {
|
|
336
|
+
// Using the built-in console.warn
|
|
337
|
+
console.warn(formatMessage('warn', message, meta));
|
|
338
|
+
}
|
|
339
|
+
},
|
|
340
|
+
error: (message, meta = {}) => {
|
|
341
|
+
if (effectiveLogLevelIndex <= LOG_LEVELS.indexOf('error')) {
|
|
342
|
+
// Using the built-in console.error
|
|
343
|
+
console.error(formatMessage('error', message, meta));
|
|
344
|
+
}
|
|
345
|
+
},
|
|
346
|
+
|
|
347
|
+
// Additional methods for compatibility
|
|
348
|
+
log: (level, message, meta = {}) => {
|
|
349
|
+
const levelIndex = LOG_LEVELS.indexOf(level);
|
|
350
|
+
if (effectiveLogLevelIndex <= levelIndex) {
|
|
351
|
+
const method = level in console ? level : 'log';
|
|
352
|
+
console[method](formatMessage(level, message, meta));
|
|
353
|
+
}
|
|
354
|
+
},
|
|
355
|
+
child: (meta) => {
|
|
356
|
+
// Return a child logger with additional meta
|
|
357
|
+
const parentLogger = createConsoleLogger();
|
|
358
|
+
const childLogger = {
|
|
359
|
+
debug: (msg, additionalMeta = {}) => parentLogger.debug(msg, { ...meta, ...additionalMeta }),
|
|
360
|
+
info: (msg, additionalMeta = {}) => parentLogger.info(msg, { ...meta, ...additionalMeta }),
|
|
361
|
+
warn: (msg, additionalMeta = {}) => parentLogger.warn(msg, { ...meta, ...additionalMeta }),
|
|
362
|
+
error: (msg, additionalMeta = {}) => parentLogger.error(msg, { ...meta, ...additionalMeta }),
|
|
363
|
+
log: (level, msg, additionalMeta = {}) => parentLogger.log(level, msg, { ...meta, ...additionalMeta }),
|
|
364
|
+
child: (moreMeta) => parentLogger.child({ ...meta, ...moreMeta })
|
|
365
|
+
};
|
|
366
|
+
return childLogger;
|
|
367
|
+
}
|
|
368
|
+
};
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Get the logger instance (singleton)
|
|
373
|
+
*/
|
|
374
|
+
function getLogger(serviceName = null) {
|
|
375
|
+
// For unified runner, always return a simple console logger to avoid test noise
|
|
376
|
+
if (process.argv.some(arg => arg.includes('unified.runner'))) {
|
|
377
|
+
return createConsoleLogger();
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
if (!loggerInstance) {
|
|
381
|
+
try {
|
|
382
|
+
loggerInstance = createLogger();
|
|
383
|
+
} catch (error) {
|
|
384
|
+
console.error('Failed to create logger:', error);
|
|
385
|
+
// Fallback to a minimal logger
|
|
386
|
+
loggerInstance = createConsoleLogger();
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
// Return child logger with service name if provided
|
|
391
|
+
if (serviceName) {
|
|
392
|
+
try {
|
|
393
|
+
return loggerInstance.child({ service: serviceName });
|
|
394
|
+
} catch (error) {
|
|
395
|
+
console.error(`Failed to create child logger for ${serviceName}:`, error);
|
|
396
|
+
return loggerInstance;
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
return loggerInstance;
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
/**
|
|
404
|
+
* Create a named logger for a specific service/component
|
|
405
|
+
*/
|
|
406
|
+
function createNamedLogger(name, additionalMeta = {}) {
|
|
407
|
+
const baseLogger = getLogger();
|
|
408
|
+
return baseLogger.child({ component: name, ...additionalMeta });
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
// Export the logger
|
|
412
|
+
module.exports = {
|
|
413
|
+
getLogger,
|
|
414
|
+
createNamedLogger,
|
|
415
|
+
// For convenience, export the default logger
|
|
416
|
+
default: getLogger()
|
|
417
|
+
};
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
const winston = require('winston');
|
|
2
|
+
const { EventEmitter } = require('events');
|
|
3
|
+
|
|
4
|
+
class BetterStackHttpTransport extends winston.Transport {
|
|
5
|
+
constructor(opts) {
|
|
6
|
+
super(opts);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
log(info, callback) {
|
|
10
|
+
// --- START OF FIXED LOGIC ---
|
|
11
|
+
let called = false;
|
|
12
|
+
const done = () => {
|
|
13
|
+
if (!called) {
|
|
14
|
+
called = true;
|
|
15
|
+
console.log('✅ Callback called safely (first time)');
|
|
16
|
+
callback();
|
|
17
|
+
} else {
|
|
18
|
+
console.log('⚠️ Safety Lock engaged: Prevented second callback execution!');
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
// 1. Mocking the request object
|
|
23
|
+
const mockReq = new EventEmitter();
|
|
24
|
+
mockReq.write = () => {};
|
|
25
|
+
mockReq.end = () => {};
|
|
26
|
+
mockReq.destroy = () => {};
|
|
27
|
+
|
|
28
|
+
// 2. Attach the error listener (Just like the real code)
|
|
29
|
+
mockReq.on('error', (err) => {
|
|
30
|
+
console.log('Event: error received');
|
|
31
|
+
done();
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
// 3. --- SIMULATE THE RACE CONDITION ---
|
|
35
|
+
|
|
36
|
+
// Simulate Response event finishing
|
|
37
|
+
console.log('Action: Triggering Response finish...');
|
|
38
|
+
done();
|
|
39
|
+
|
|
40
|
+
// Simulate Error event firing immediately after
|
|
41
|
+
console.log('Action: Triggering Error event...');
|
|
42
|
+
mockReq.emit('error', new Error('Simulated socket error'));
|
|
43
|
+
|
|
44
|
+
// --- END OF SIMULATION ---
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Execute the test
|
|
49
|
+
console.log('--- Logger Defect Verification Test ---');
|
|
50
|
+
const transport = new BetterStackHttpTransport();
|
|
51
|
+
|
|
52
|
+
transport.log({ message: 'test' }, () => {
|
|
53
|
+
console.log('Status: Winston acknowledged the log.');
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
console.log('\nFinal Check: If you see "Safety Lock engaged" above, the fix is PROVEN.');
|
package/package.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@jammysunshine/astrology-shared",
|
|
3
|
+
"version": "1.0.1",
|
|
4
|
+
"description": "Shared utilities for astrology applications (backend and frontend)",
|
|
5
|
+
"main": "index.js",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
8
|
+
},
|
|
9
|
+
"keywords": [
|
|
10
|
+
"astrology",
|
|
11
|
+
"validation",
|
|
12
|
+
"logging",
|
|
13
|
+
"shared"
|
|
14
|
+
],
|
|
15
|
+
"author": "Astrology Team",
|
|
16
|
+
"license": "MIT",
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"uuid": "^13.0.0"
|
|
19
|
+
},
|
|
20
|
+
"engines": {
|
|
21
|
+
"node": ">=18.0.0"
|
|
22
|
+
},
|
|
23
|
+
"peerDependencies": {
|
|
24
|
+
"cors": "^2.8.5",
|
|
25
|
+
"express": "^4.19.2",
|
|
26
|
+
"helmet": "^7.1.0"
|
|
27
|
+
},
|
|
28
|
+
"peerDependenciesMeta": {
|
|
29
|
+
"express": {
|
|
30
|
+
"optional": true
|
|
31
|
+
},
|
|
32
|
+
"cors": {
|
|
33
|
+
"optional": true
|
|
34
|
+
},
|
|
35
|
+
"helmet": {
|
|
36
|
+
"optional": true
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
"optionalDependencies": {
|
|
40
|
+
"pino": "^10.1.0",
|
|
41
|
+
"winston": "^3.18.3"
|
|
42
|
+
}
|
|
43
|
+
}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
# Schema Management System
|
|
2
|
+
|
|
3
|
+
This directory contains the JSON Schema definitions for the astrology services API.
|
|
4
|
+
|
|
5
|
+
## Directory Structure
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
packages/shared/schemas/
|
|
9
|
+
├── v1/ # Versioned namespace
|
|
10
|
+
│ ├── core/ # Core system schemas
|
|
11
|
+
│ │ ├── input.json # Base input schema for all services
|
|
12
|
+
│ │ ├── output.json # Base output schema for all services
|
|
13
|
+
│ │ └── service.json # Service template
|
|
14
|
+
│ ├── domains/ # Domain-specific schemas
|
|
15
|
+
│ │ └── astrology/
|
|
16
|
+
│ │ ├── components/ # Astrology-specific components
|
|
17
|
+
│ │ │ ├── planet.json
|
|
18
|
+
│ │ │ ├── house.json
|
|
19
|
+
│ │ │ ├── aspect.json
|
|
20
|
+
│ │ │ └── dasha-period.json
|
|
21
|
+
│ │ ├── services/ # Astrology services
|
|
22
|
+
│ │ │ ├── birth-chart/
|
|
23
|
+
│ │ │ │ ├── service.v1.json
|
|
24
|
+
│ │ │ │ └── data.v1.json
|
|
25
|
+
│ │ │ └── dasha/
|
|
26
|
+
│ │ │ ├── service.v1.json
|
|
27
|
+
│ │ │ └── data.v1.json
|
|
28
|
+
│ │ └── inputs/ # Astrology input variants
|
|
29
|
+
│ │ ├── vedic.v1.json
|
|
30
|
+
│ │ └── compatibility.v1.json
|
|
31
|
+
│ ├── services/ # Cross-domain services
|
|
32
|
+
│ │ ├── ai/
|
|
33
|
+
│ │ │ └── query.v1.json
|
|
34
|
+
│ │ ├── database/
|
|
35
|
+
│ │ │ └── query.v1.json
|
|
36
|
+
│ │ ├── location/
|
|
37
|
+
│ │ │ └── query.v1.json
|
|
38
|
+
│ │ ├── logging/
|
|
39
|
+
│ │ │ └── entry.v1.json
|
|
40
|
+
│ │ ├── monitoring/
|
|
41
|
+
│ │ │ └── query.v1.json
|
|
42
|
+
│ │ └── cache/
|
|
43
|
+
│ │ └── query.v1.json
|
|
44
|
+
│ └── shared/ # Cross-service components
|
|
45
|
+
│ ├── metadata.v1.json
|
|
46
|
+
│ ├── error.v1.json
|
|
47
|
+
│ ├── performance.v1.json
|
|
48
|
+
│ ├── client-context.v1.json
|
|
49
|
+
│ └── preferences.v1.json
|
|
50
|
+
├── registry/
|
|
51
|
+
│ ├── services.json # Service registry
|
|
52
|
+
│ ├── components.json # Component registry
|
|
53
|
+
│ └── compatibility.json # Migration/compatibility info
|
|
54
|
+
├── legacy/ # Deprecated schemas
|
|
55
|
+
│ ├── astrologyInputSchema.js
|
|
56
|
+
│ ├── astrologyOutputSchema.js
|
|
57
|
+
│ └── serviceSchemas.js
|
|
58
|
+
├── index.js # Main exports and validation
|
|
59
|
+
└── README.md # This file
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
## Schema Loading
|
|
63
|
+
|
|
64
|
+
Schemas are loaded using the registry system:
|
|
65
|
+
|
|
66
|
+
```javascript
|
|
67
|
+
const { loadSchema } = require('./index.js');
|
|
68
|
+
|
|
69
|
+
// Load a service schema
|
|
70
|
+
const birthChartSchema = loadSchema('astrology/birth-chart/service', 'v1');
|
|
71
|
+
|
|
72
|
+
// Load a component schema
|
|
73
|
+
const planetSchema = loadSchema('planet');
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Adding New Schemas
|
|
77
|
+
|
|
78
|
+
### For Services:
|
|
79
|
+
1. Create service directory: `v1/domains/{domain}/services/{service-name}/`
|
|
80
|
+
2. Add `service.v1.json` and `data.v1.json`
|
|
81
|
+
3. Update `registry/services.json`
|
|
82
|
+
|
|
83
|
+
### For Components:
|
|
84
|
+
1. Add component file to appropriate directory
|
|
85
|
+
2. Update `registry/components.json`
|
|
86
|
+
|
|
87
|
+
## Validation
|
|
88
|
+
|
|
89
|
+
Schemas are validated using AJV with support for:
|
|
90
|
+
- JSON Schema draft-07
|
|
91
|
+
- Custom formats (uuid, date-time, etc.)
|
|
92
|
+
- Recursive $ref resolution
|
|
93
|
+
- Schema composition with allOf
|
|
94
|
+
|
|
95
|
+
## Migration
|
|
96
|
+
|
|
97
|
+
Legacy Zod-based schemas are preserved in the `legacy/` directory for reference during the migration period.
|