db-backup-logging 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,408 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ errorMiddleware: () => errorMiddleware,
34
+ initializeNoticePackage: () => initializeNoticePackage,
35
+ logCustomError: () => logCustomError,
36
+ manualBackupTrigger: () => manualBackupTrigger,
37
+ requestLogger: () => requestLogger
38
+ });
39
+ module.exports = __toCommonJS(index_exports);
40
+
41
+ // src/utils/config.ts
42
+ var _config = null;
43
+ function setConfig(config) {
44
+ _config = { ...config };
45
+ }
46
+ function getConfig() {
47
+ if (!_config) {
48
+ throw new Error(
49
+ "[notice-utility] Package not initialized. Call initializeNoticePackage() first."
50
+ );
51
+ }
52
+ return _config;
53
+ }
54
+
55
+ // src/utils/db.ts
56
+ var import_mongoose = __toESM(require("mongoose"));
57
+ var modelCache = /* @__PURE__ */ new Map();
58
+ var localConnection = null;
59
+ function getDbConnection() {
60
+ if (!localConnection) {
61
+ localConnection = import_mongoose.default.createConnection(getConfig().dbUri);
62
+ }
63
+ return localConnection;
64
+ }
65
+ function getModel(collectionName) {
66
+ if (modelCache.has(collectionName)) {
67
+ return modelCache.get(collectionName);
68
+ }
69
+ const schema = new import_mongoose.Schema(
70
+ {},
71
+ {
72
+ strict: false,
73
+ timestamps: false,
74
+ collection: collectionName
75
+ }
76
+ );
77
+ const conn = getDbConnection();
78
+ let model;
79
+ try {
80
+ model = conn.model(collectionName);
81
+ } catch {
82
+ model = conn.model(collectionName, schema);
83
+ }
84
+ modelCache.set(collectionName, model);
85
+ return model;
86
+ }
87
+ function getDbUri() {
88
+ return getConfig().dbUri;
89
+ }
90
+
91
+ // src/utils/queue.ts
92
+ var queue = [];
93
+ var isProcessing = false;
94
+ var BATCH_SIZE = 50;
95
+ var FLUSH_INTERVAL_MS = 5e3;
96
+ var MAX_RETRIES = 3;
97
+ var flushTimer = null;
98
+ function enqueue(collectionName, data) {
99
+ queue.push({ collectionName, data });
100
+ if (!flushTimer) {
101
+ flushTimer = setInterval(() => {
102
+ flush().catch(() => {
103
+ });
104
+ }, FLUSH_INTERVAL_MS);
105
+ if (flushTimer && typeof flushTimer === "object" && "unref" in flushTimer) {
106
+ flushTimer.unref();
107
+ }
108
+ }
109
+ if (queue.length >= BATCH_SIZE) {
110
+ flush().catch(() => {
111
+ });
112
+ }
113
+ }
114
+ async function flush() {
115
+ if (isProcessing || queue.length === 0) return;
116
+ isProcessing = true;
117
+ const batch = queue.splice(0, BATCH_SIZE);
118
+ const grouped = /* @__PURE__ */ new Map();
119
+ for (const item of batch) {
120
+ const list = grouped.get(item.collectionName) || [];
121
+ list.push(item.data);
122
+ grouped.set(item.collectionName, list);
123
+ }
124
+ for (const [collectionName, docs] of grouped) {
125
+ let retries = 0;
126
+ while (retries < MAX_RETRIES) {
127
+ try {
128
+ const Model2 = getModel(collectionName);
129
+ await Model2.insertMany(docs, { ordered: false });
130
+ break;
131
+ } catch (err) {
132
+ retries++;
133
+ if (retries >= MAX_RETRIES) {
134
+ console.error(
135
+ `[notice-utility] Failed to write ${docs.length} logs to "${collectionName}" after ${MAX_RETRIES} retries:`,
136
+ err instanceof Error ? err.message : err
137
+ );
138
+ } else {
139
+ await new Promise((r) => setTimeout(r, 100 * retries));
140
+ }
141
+ }
142
+ }
143
+ }
144
+ isProcessing = false;
145
+ if (queue.length > 0) {
146
+ flush().catch(() => {
147
+ });
148
+ }
149
+ }
150
+
151
+ // src/exceptions/index.ts
152
+ function logError(error, context) {
153
+ try {
154
+ const config = getConfig();
155
+ const err = typeof error === "string" ? new Error(error) : error;
156
+ const logEntry = {
157
+ errorMessage: err.message,
158
+ stackTrace: err.stack || null,
159
+ requestContext: context?.req ? {
160
+ method: context.req.method,
161
+ url: context.req.originalUrl || context.req.url,
162
+ headers: context.req.headers,
163
+ body: context.req.body || null
164
+ } : null,
165
+ userId: context?.userId || context?.req?.user?.id || context?.req?.user?._id || null,
166
+ serviceName: config.serviceName || null,
167
+ environment: config.environment || process.env.NODE_ENV || null,
168
+ createdAt: /* @__PURE__ */ new Date()
169
+ };
170
+ enqueue(config.tables.errorLogs, logEntry);
171
+ } catch {
172
+ }
173
+ }
174
+ function registerExceptionHandlers() {
175
+ const existingUncaught = process.listeners("uncaughtException").slice();
176
+ const existingRejection = process.listeners("unhandledRejection").slice();
177
+ process.on("uncaughtException", (error) => {
178
+ logError(error);
179
+ });
180
+ process.on("unhandledRejection", (reason) => {
181
+ const error = reason instanceof Error ? reason : new Error(String(reason));
182
+ logError(error);
183
+ });
184
+ }
185
+ function errorMiddleware() {
186
+ return (err, req, res, next) => {
187
+ logError(err, { req });
188
+ next(err);
189
+ };
190
+ }
191
+ function logCustomError(error, context) {
192
+ try {
193
+ const config = getConfig();
194
+ const err = typeof error === "string" ? new Error(error) : error;
195
+ const logEntry = {
196
+ errorMessage: err.message,
197
+ stackTrace: err.stack || null,
198
+ requestContext: context?.requestContext || null,
199
+ userId: context?.userId || null,
200
+ serviceName: config.serviceName || null,
201
+ environment: config.environment || process.env.NODE_ENV || null,
202
+ createdAt: /* @__PURE__ */ new Date()
203
+ };
204
+ enqueue(config.tables.errorLogs, logEntry);
205
+ } catch {
206
+ }
207
+ }
208
+
209
+ // src/init.ts
210
+ function initializeNoticePackage(config) {
211
+ if (!config.dbType) {
212
+ throw new Error('[notice-utility] "dbType" is required in config.');
213
+ }
214
+ if (!config.dbUri) {
215
+ throw new Error('[notice-utility] "dbUri" is required in config.');
216
+ }
217
+ if (!config.tables) {
218
+ throw new Error('[notice-utility] "tables" configuration is required.');
219
+ }
220
+ if (!config.tables.requestLogs || !config.tables.errorLogs || !config.tables.backupLogs) {
221
+ throw new Error(
222
+ "[notice-utility] All table names (requestLogs, errorLogs, backupLogs) must be configured."
223
+ );
224
+ }
225
+ if (config.aws?.enabled) {
226
+ if (!config.aws.bucketName || !config.aws.region) {
227
+ throw new Error(
228
+ '[notice-utility] AWS "bucketName" and "region" are required when AWS is enabled.'
229
+ );
230
+ }
231
+ if (!config.aws.accessKeyId || !config.aws.secretAccessKey) {
232
+ throw new Error(
233
+ '[notice-utility] AWS "accessKeyId" and "secretAccessKey" are required when AWS is enabled.'
234
+ );
235
+ }
236
+ }
237
+ if (config.local?.enabled) {
238
+ if (!config.local.backupPath) {
239
+ throw new Error(
240
+ '[notice-utility] Local "backupPath" is required when local backup is enabled.'
241
+ );
242
+ }
243
+ }
244
+ setConfig(config);
245
+ registerExceptionHandlers();
246
+ console.log(
247
+ `[notice-utility] Initialized successfully (db: ${config.dbType}, service: ${config.serviceName || "default"}, env: ${config.environment || process.env.NODE_ENV || "development"})`
248
+ );
249
+ }
250
+
251
+ // src/logger/index.ts
252
+ function requestLogger() {
253
+ return (req, res, next) => {
254
+ const startTime = Date.now();
255
+ const originalJson = res.json.bind(res);
256
+ let responseBody = void 0;
257
+ res.json = function(body) {
258
+ responseBody = body;
259
+ return originalJson(body);
260
+ };
261
+ res.on("finish", () => {
262
+ try {
263
+ const config = getConfig();
264
+ const responseTime = Date.now() - startTime;
265
+ const logEntry = {
266
+ method: req.method,
267
+ url: req.originalUrl || req.url,
268
+ headers: req.headers,
269
+ requestBody: req.body || null,
270
+ responseStatus: res.statusCode,
271
+ responseBody: responseBody ?? null,
272
+ responseTime,
273
+ userId: req.user?.id || req.user?._id || req.userId || null,
274
+ ipAddress: req.headers["x-forwarded-for"]?.split(",")[0]?.trim() || req.socket?.remoteAddress || "unknown",
275
+ serviceName: config.serviceName || null,
276
+ environment: config.environment || process.env.NODE_ENV || null,
277
+ createdAt: /* @__PURE__ */ new Date()
278
+ };
279
+ enqueue(config.tables.requestLogs, logEntry);
280
+ } catch {
281
+ }
282
+ });
283
+ next();
284
+ };
285
+ }
286
+
287
+ // src/backup/index.ts
288
+ var import_child_process = require("child_process");
289
+ var import_util = require("util");
290
+ var path = __toESM(require("path"));
291
+ var fs = __toESM(require("fs"));
292
+ var execAsync = (0, import_util.promisify)(import_child_process.exec);
293
+ function getBackupFileName() {
294
+ const now = /* @__PURE__ */ new Date();
295
+ const ts = now.toISOString().replace(/[:.]/g, "-");
296
+ return `backup-${ts}`;
297
+ }
298
+ async function performBackup() {
299
+ const config = getConfig();
300
+ const dbUri = getDbUri();
301
+ const fileName = getBackupFileName();
302
+ const archiveName = `${fileName}.zip`;
303
+ const tempDir = path.resolve(process.cwd(), ".notice-backups-tmp");
304
+ if (!fs.existsSync(tempDir)) {
305
+ fs.mkdirSync(tempDir, { recursive: true });
306
+ }
307
+ const dumpDir = path.join(tempDir, fileName);
308
+ const archivePath = path.join(tempDir, archiveName);
309
+ try {
310
+ console.log(`[notice-utility] Starting backup: ${archiveName}`);
311
+ await execAsync(`mongodump --uri="${dbUri}" --out="${dumpDir}"`);
312
+ await execAsync(`zip -r "${archivePath}" "${fileName}"`, { cwd: tempDir });
313
+ const stats = fs.statSync(archivePath);
314
+ const fileSize = stats.size;
315
+ if (config.local?.enabled) {
316
+ const localDir = path.resolve(process.cwd(), config.local.backupPath);
317
+ if (!fs.existsSync(localDir)) {
318
+ fs.mkdirSync(localDir, { recursive: true });
319
+ }
320
+ const localPath = path.join(localDir, archiveName);
321
+ fs.copyFileSync(archivePath, localPath);
322
+ console.log(`[notice-utility] Backup saved locally: ${localPath}`);
323
+ await logBackup({
324
+ backupFileName: archiveName,
325
+ backupType: "full",
326
+ location: "local",
327
+ fileSize,
328
+ status: "success"
329
+ });
330
+ }
331
+ if (config.aws?.enabled) {
332
+ await uploadToS3(archivePath, archiveName, fileSize);
333
+ }
334
+ if (fs.existsSync(archivePath)) {
335
+ fs.unlinkSync(archivePath);
336
+ }
337
+ if (fs.existsSync(dumpDir)) {
338
+ fs.rmSync(dumpDir, { recursive: true, force: true });
339
+ }
340
+ console.log(`[notice-utility] Backup completed: ${archiveName}`);
341
+ } catch (err) {
342
+ console.error("[notice-utility] Backup failed:", err instanceof Error ? err.message : err);
343
+ await logBackup({
344
+ backupFileName: archiveName,
345
+ backupType: "full",
346
+ location: config.aws?.enabled ? "s3" : "local",
347
+ fileSize: 0,
348
+ status: "failed",
349
+ errorMessage: err instanceof Error ? err.message : String(err)
350
+ }).catch(() => {
351
+ });
352
+ }
353
+ }
354
+ async function uploadToS3(filePath, fileName, fileSize) {
355
+ const config = getConfig();
356
+ if (!config.aws) throw new Error("AWS config not provided");
357
+ const { S3Client, PutObjectCommand } = await import("@aws-sdk/client-s3");
358
+ const s3Client = new S3Client({
359
+ region: config.aws.region,
360
+ credentials: {
361
+ accessKeyId: config.aws.accessKeyId,
362
+ secretAccessKey: config.aws.secretAccessKey
363
+ }
364
+ });
365
+ const fileBuffer = fs.readFileSync(filePath);
366
+ const command = new PutObjectCommand({
367
+ Bucket: config.aws.bucketName,
368
+ Key: `backups/${fileName}`,
369
+ Body: fileBuffer,
370
+ ContentType: "application/zip"
371
+ });
372
+ await s3Client.send(command);
373
+ console.log(`[notice-utility] Backup uploaded to S3: backups/${fileName}`);
374
+ await logBackup({
375
+ backupFileName: fileName,
376
+ backupType: "full",
377
+ location: "s3",
378
+ fileSize,
379
+ status: "success"
380
+ });
381
+ }
382
+ async function logBackup(entry) {
383
+ try {
384
+ const config = getConfig();
385
+ const Model2 = getModel(config.tables.backupLogs);
386
+ await Model2.create({
387
+ ...entry,
388
+ createdAt: /* @__PURE__ */ new Date()
389
+ });
390
+ } catch (err) {
391
+ console.error(
392
+ "[notice-utility] Failed to write backup log:",
393
+ err instanceof Error ? err.message : err
394
+ );
395
+ }
396
+ }
397
+ async function manualBackupTrigger() {
398
+ await performBackup();
399
+ }
400
+ // Annotate the CommonJS export names for ESM import in node:
401
+ 0 && (module.exports = {
402
+ errorMiddleware,
403
+ initializeNoticePackage,
404
+ logCustomError,
405
+ manualBackupTrigger,
406
+ requestLogger
407
+ });
408
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/utils/config.ts","../src/utils/db.ts","../src/utils/queue.ts","../src/exceptions/index.ts","../src/init.ts","../src/logger/index.ts","../src/backup/index.ts"],"sourcesContent":["// ─── Public API ─────────────────────────────────────────────────────────────\n\nexport { initializeNoticePackage } from './init'\nexport { requestLogger } from './logger'\nexport { manualBackupTrigger } from './backup'\nexport { logCustomError, errorMiddleware } from './exceptions'\n\n// ─── Types ──────────────────────────────────────────────────────────────────\n\nexport type {\n NoticeConfig,\n AWSConfig,\n LocalConfig,\n NoticeTables,\n RequestLogEntry,\n ErrorLogEntry,\n BackupLogEntry,\n} from './types'\n","import type { NoticeConfig } from '../types'\n\nlet _config: NoticeConfig | null = null\n\nexport function setConfig(config: NoticeConfig): void {\n _config = { ...config }\n}\n\nexport function getConfig(): NoticeConfig {\n if (!_config) {\n throw new Error(\n '[notice-utility] Package not initialized. Call initializeNoticePackage() first.'\n )\n }\n return _config\n}\n","import mongoose, { Schema, Model, Connection } from 'mongoose'\nimport { getConfig } from './config'\n\nconst modelCache = new Map<string, Model<any>>()\n\nlet localConnection: Connection | null = null\n\nexport function getDbConnection(): Connection {\n if (!localConnection) {\n localConnection = mongoose.createConnection(getConfig().dbUri)\n }\n return localConnection\n}\n\n/**\n * Gets or creates a Mongoose model for the given collection name.\n * Uses a flexible mixed schema to support any log structure.\n */\nexport function getModel(collectionName: string): Model<any> {\n if (modelCache.has(collectionName)) {\n return modelCache.get(collectionName)!\n }\n\n const schema = new Schema(\n {},\n {\n strict: false,\n timestamps: false,\n collection: collectionName,\n }\n )\n\n // Use a dedicated connection for notice-utility to avoid 'npm link' duplication issues\n const conn: Connection = getDbConnection()\n\n // Avoid OverwriteModelError — check if model already exists\n let model: Model<any>\n try {\n model = conn.model(collectionName)\n } catch {\n model = conn.model(collectionName, schema)\n }\n\n modelCache.set(collectionName, model)\n return model\n}\n\n/**\n * Creates a separate Mongoose connection for backup purposes (mongodump URI).\n */\nexport function getDbUri(): string {\n return getConfig().dbUri\n}\n","import { getModel } from './db'\n\ninterface QueueItem {\n collectionName: string\n data: Record<string, unknown>\n}\n\nconst queue: QueueItem[] = []\nlet isProcessing = false\nconst BATCH_SIZE = 50\nconst FLUSH_INTERVAL_MS = 5000\nconst MAX_RETRIES = 3\n\nlet flushTimer: ReturnType<typeof setInterval> | null = null\n\n/**\n * Enqueue a log entry for async writing to the database.\n * This never throws — all errors are silently logged to console.\n */\nexport function enqueue(collectionName: string, data: Record<string, unknown>): void {\n queue.push({ collectionName, data })\n\n // Start the flush interval if not already running\n if (!flushTimer) {\n flushTimer = setInterval(() => {\n flush().catch(() => {})\n }, FLUSH_INTERVAL_MS)\n\n // Unref so the timer doesn't keep the process alive\n if (flushTimer && typeof flushTimer === 'object' && 'unref' in flushTimer) {\n flushTimer.unref()\n }\n }\n\n // If queue reaches batch size, flush immediately\n if (queue.length >= BATCH_SIZE) {\n flush().catch(() => {})\n }\n}\n\n/**\n * Flush all queued log entries to the database.\n */\nexport async function flush(): Promise<void> {\n if (isProcessing || queue.length === 0) return\n isProcessing = true\n\n const batch = queue.splice(0, BATCH_SIZE)\n\n // Group by collection\n const grouped = new Map<string, Record<string, unknown>[]>()\n for (const item of batch) {\n const list = grouped.get(item.collectionName) || []\n list.push(item.data)\n grouped.set(item.collectionName, list)\n }\n\n for (const [collectionName, docs] of grouped) {\n let retries = 0\n while (retries < MAX_RETRIES) {\n try {\n const Model = getModel(collectionName)\n await Model.insertMany(docs, { ordered: false })\n break\n } catch (err) {\n retries++\n if (retries >= MAX_RETRIES) {\n console.error(\n `[notice-utility] Failed to write ${docs.length} logs to \"${collectionName}\" after ${MAX_RETRIES} retries:`,\n err instanceof Error ? err.message : err\n )\n } else {\n // Brief delay before retry\n await new Promise((r) => setTimeout(r, 100 * retries))\n }\n }\n }\n }\n\n isProcessing = false\n\n // If there are more items in the queue, continue flushing\n if (queue.length > 0) {\n flush().catch(() => {})\n }\n}\n","import type { Request, Response, NextFunction } from 'express'\nimport { getConfig } from '../utils/config'\nimport { enqueue } from '../utils/queue'\n\n/**\n * Log an error entry to the configured errorLogs collection.\n */\nfunction logError(\n error: Error | string,\n context?: {\n req?: Request\n userId?: string\n }\n): void {\n try {\n const config = getConfig()\n const err = typeof error === 'string' ? new Error(error) : error\n\n const logEntry: Record<string, unknown> = {\n errorMessage: err.message,\n stackTrace: err.stack || null,\n requestContext: context?.req\n ? {\n method: context.req.method,\n url: context.req.originalUrl || context.req.url,\n headers: context.req.headers,\n body: context.req.body || null,\n }\n : null,\n userId:\n context?.userId ||\n (context?.req as any)?.user?.id ||\n (context?.req as any)?.user?._id ||\n null,\n serviceName: config.serviceName || null,\n environment: config.environment || process.env.NODE_ENV || null,\n createdAt: new Date(),\n }\n\n enqueue(config.tables.errorLogs, logEntry)\n } catch {\n // Silent — error logging failure must never cause further errors\n }\n}\n\n/**\n * Register global process-level exception handlers.\n * Chains with existing handlers — does NOT override them.\n */\nexport function registerExceptionHandlers(): void {\n // Store references to any existing handlers so we can chain\n const existingUncaught = process.listeners('uncaughtException').slice()\n const existingRejection = process.listeners('unhandledRejection').slice()\n\n // uncaughtException\n process.on('uncaughtException', (error: Error) => {\n logError(error)\n // Don't prevent the default crash behavior for truly unrecoverable errors\n // The existing handlers will still fire since we're adding, not replacing\n })\n\n // unhandledRejection\n process.on('unhandledRejection', (reason: unknown) => {\n const error = reason instanceof Error ? reason : new Error(String(reason))\n logError(error)\n })\n}\n\n/**\n * Express error-handling middleware.\n * Must be registered AFTER all routes: app.use(errorMiddleware())\n */\nexport function errorMiddleware() {\n return (err: Error, req: Request, res: Response, next: NextFunction): void => {\n logError(err, { req })\n\n // Pass to the next error handler (don't swallow the error)\n next(err)\n }\n}\n\n/**\n * Public export — manually log a custom error with optional context.\n */\nexport function logCustomError(\n error: Error | string,\n context?: {\n userId?: string\n requestContext?: {\n method?: string\n url?: string\n headers?: Record<string, string | string[] | undefined>\n body?: unknown\n }\n }\n): void {\n try {\n const config = getConfig()\n const err = typeof error === 'string' ? new Error(error) : error\n\n const logEntry: Record<string, unknown> = {\n errorMessage: err.message,\n stackTrace: err.stack || null,\n requestContext: context?.requestContext || null,\n userId: context?.userId || null,\n serviceName: config.serviceName || null,\n environment: config.environment || process.env.NODE_ENV || null,\n createdAt: new Date(),\n }\n\n enqueue(config.tables.errorLogs, logEntry)\n } catch {\n // Silent\n }\n}\n","import type { NoticeConfig } from './types'\nimport { setConfig } from './utils/config'\nimport { registerExceptionHandlers } from './exceptions'\n\n/**\n * Initialize the notice-utility package.\n * Must be called once during application startup, after mongoose.connect().\n *\n * @param config - Full package configuration\n */\nexport function initializeNoticePackage(config: NoticeConfig): void {\n // Validate required fields\n if (!config.dbType) {\n throw new Error('[notice-utility] \"dbType\" is required in config.')\n }\n if (!config.dbUri) {\n throw new Error('[notice-utility] \"dbUri\" is required in config.')\n }\n if (!config.tables) {\n throw new Error('[notice-utility] \"tables\" configuration is required.')\n }\n if (!config.tables.requestLogs || !config.tables.errorLogs || !config.tables.backupLogs) {\n throw new Error(\n '[notice-utility] All table names (requestLogs, errorLogs, backupLogs) must be configured.'\n )\n }\n\n // Validate AWS config if enabled\n if (config.aws?.enabled) {\n if (!config.aws.bucketName || !config.aws.region) {\n throw new Error(\n '[notice-utility] AWS \"bucketName\" and \"region\" are required when AWS is enabled.'\n )\n }\n if (!config.aws.accessKeyId || !config.aws.secretAccessKey) {\n throw new Error(\n '[notice-utility] AWS \"accessKeyId\" and \"secretAccessKey\" are required when AWS is enabled.'\n )\n }\n }\n\n // Validate local config if enabled\n if (config.local?.enabled) {\n if (!config.local.backupPath) {\n throw new Error(\n '[notice-utility] Local \"backupPath\" is required when local backup is enabled.'\n )\n }\n }\n\n // Store config\n setConfig(config)\n\n // Register global exception handlers\n registerExceptionHandlers()\n\n console.log(\n `[notice-utility] Initialized successfully (db: ${config.dbType}, service: ${config.serviceName || 'default'}, env: ${config.environment || process.env.NODE_ENV || 'development'})`\n )\n}\n","import type { Request, Response, NextFunction } from 'express'\nimport { getConfig } from '../utils/config'\nimport { enqueue } from '../utils/queue'\n\n/**\n * Express middleware factory for request/response logging.\n * Captures method, URL, headers, body, response, timing, user, IP.\n * All logging is non-blocking via the async queue.\n */\nexport function requestLogger() {\n return (req: Request, res: Response, next: NextFunction): void => {\n const startTime = Date.now()\n\n // Capture the original res.json to intercept the response body\n const originalJson = res.json.bind(res)\n let responseBody: unknown = undefined\n\n res.json = function (body: any) {\n responseBody = body\n return originalJson(body)\n }\n\n // Hook into the 'finish' event to log after response is sent\n res.on('finish', () => {\n try {\n const config = getConfig()\n const responseTime = Date.now() - startTime\n\n const logEntry: Record<string, unknown> = {\n method: req.method,\n url: req.originalUrl || req.url,\n headers: req.headers,\n requestBody: req.body || null,\n responseStatus: res.statusCode,\n responseBody: responseBody ?? null,\n responseTime,\n userId: (req as any).user?.id || (req as any).user?._id || (req as any).userId || null,\n ipAddress:\n (req.headers['x-forwarded-for'] as string)?.split(',')[0]?.trim() ||\n req.socket?.remoteAddress ||\n 'unknown',\n serviceName: config.serviceName || null,\n environment: config.environment || process.env.NODE_ENV || null,\n createdAt: new Date(),\n }\n\n enqueue(config.tables.requestLogs, logEntry)\n } catch {\n // Silent — logging failure must never affect the request\n }\n })\n\n next()\n }\n}\n","import { exec } from 'child_process'\nimport { promisify } from 'util'\nimport * as path from 'path'\nimport * as fs from 'fs'\nimport { getConfig } from '../utils/config'\nimport { getModel, getDbUri } from '../utils/db'\n\nconst execAsync = promisify(exec)\n\n/**\n * Generate a timestamp-based backup file name.\n */\nfunction getBackupFileName(): string {\n const now = new Date()\n const ts = now.toISOString().replace(/[:.]/g, '-')\n return `backup-${ts}`\n}\n\n/**\n * Perform a full MongoDB backup using mongodump.\n * Saves to local filesystem and/or uploads to S3 based on config.\n */\nasync function performBackup(): Promise<void> {\n const config = getConfig()\n const dbUri = getDbUri()\n const fileName = getBackupFileName()\n const archiveName = `${fileName}.zip`\n\n // Temp directory for backup\n const tempDir = path.resolve(process.cwd(), '.notice-backups-tmp')\n if (!fs.existsSync(tempDir)) {\n fs.mkdirSync(tempDir, { recursive: true })\n }\n\n const dumpDir = path.join(tempDir, fileName)\n const archivePath = path.join(tempDir, archiveName)\n\n try {\n // Run mongodump to a directory\n console.log(`[notice-utility] Starting backup: ${archiveName}`)\n await execAsync(`mongodump --uri=\"${dbUri}\" --out=\"${dumpDir}\"`)\n\n // Zip the directory\n await execAsync(`zip -r \"${archivePath}\" \"${fileName}\"`, { cwd: tempDir })\n\n const stats = fs.statSync(archivePath)\n const fileSize = stats.size\n\n // ── Save to local storage ──\n if (config.local?.enabled) {\n const localDir = path.resolve(process.cwd(), config.local.backupPath)\n if (!fs.existsSync(localDir)) {\n fs.mkdirSync(localDir, { recursive: true })\n }\n const localPath = path.join(localDir, archiveName)\n fs.copyFileSync(archivePath, localPath)\n console.log(`[notice-utility] Backup saved locally: ${localPath}`)\n\n // Log to database\n await logBackup({\n backupFileName: archiveName,\n backupType: 'full',\n location: 'local',\n fileSize,\n status: 'success',\n })\n }\n\n // ── Upload to S3 ──\n if (config.aws?.enabled) {\n await uploadToS3(archivePath, archiveName, fileSize)\n }\n\n // Cleanup temp files\n if (fs.existsSync(archivePath)) {\n fs.unlinkSync(archivePath)\n }\n if (fs.existsSync(dumpDir)) {\n fs.rmSync(dumpDir, { recursive: true, force: true })\n }\n\n console.log(`[notice-utility] Backup completed: ${archiveName}`)\n } catch (err) {\n console.error('[notice-utility] Backup failed:', err instanceof Error ? err.message : err)\n\n await logBackup({\n backupFileName: archiveName,\n backupType: 'full',\n location: config.aws?.enabled ? 's3' : 'local',\n fileSize: 0,\n status: 'failed',\n errorMessage: err instanceof Error ? err.message : String(err),\n }).catch(() => {})\n\n // Don't rethrow — backup failure should not crash the host app\n }\n}\n\n/**\n * Upload backup archive to AWS S3.\n */\nasync function uploadToS3(filePath: string, fileName: string, fileSize: number): Promise<void> {\n const config = getConfig()\n if (!config.aws) throw new Error('AWS config not provided')\n\n const { S3Client, PutObjectCommand } = await import('@aws-sdk/client-s3')\n\n const s3Client = new S3Client({\n region: config.aws.region,\n credentials: {\n accessKeyId: config.aws.accessKeyId,\n secretAccessKey: config.aws.secretAccessKey,\n },\n })\n\n const fileBuffer = fs.readFileSync(filePath)\n\n const command = new PutObjectCommand({\n Bucket: config.aws.bucketName,\n Key: `backups/${fileName}`,\n Body: fileBuffer,\n ContentType: 'application/zip',\n })\n\n await s3Client.send(command)\n console.log(`[notice-utility] Backup uploaded to S3: backups/${fileName}`)\n\n await logBackup({\n backupFileName: fileName,\n backupType: 'full',\n location: 's3',\n fileSize,\n status: 'success',\n })\n}\n\n/**\n * Write a backup log entry to the configured backupLogs collection.\n */\nasync function logBackup(entry: {\n backupFileName: string\n backupType: string\n location: 'local' | 's3'\n fileSize: number\n status: 'success' | 'failed'\n errorMessage?: string\n}): Promise<void> {\n try {\n const config = getConfig()\n const Model = getModel(config.tables.backupLogs)\n await Model.create({\n ...entry,\n createdAt: new Date(),\n })\n } catch (err) {\n console.error(\n '[notice-utility] Failed to write backup log:',\n err instanceof Error ? err.message : err\n )\n }\n}\n\n/**\n * Public export — trigger a manual database backup.\n */\nexport async function manualBackupTrigger(): Promise<void> {\n await performBackup()\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACEA,IAAI,UAA+B;AAE5B,SAAS,UAAU,QAA4B;AACpD,YAAU,EAAE,GAAG,OAAO;AACxB;AAEO,SAAS,YAA0B;AACxC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;;;ACfA,sBAAoD;AAGpD,IAAM,aAAa,oBAAI,IAAwB;AAE/C,IAAI,kBAAqC;AAElC,SAAS,kBAA8B;AAC5C,MAAI,CAAC,iBAAiB;AACpB,sBAAkB,gBAAAA,QAAS,iBAAiB,UAAU,EAAE,KAAK;AAAA,EAC/D;AACA,SAAO;AACT;AAMO,SAAS,SAAS,gBAAoC;AAC3D,MAAI,WAAW,IAAI,cAAc,GAAG;AAClC,WAAO,WAAW,IAAI,cAAc;AAAA,EACtC;AAEA,QAAM,SAAS,IAAI;AAAA,IACjB,CAAC;AAAA,IACD;AAAA,MACE,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,YAAY;AAAA,IACd;AAAA,EACF;AAGA,QAAM,OAAmB,gBAAgB;AAGzC,MAAI;AACJ,MAAI;AACF,YAAQ,KAAK,MAAM,cAAc;AAAA,EACnC,QAAQ;AACN,YAAQ,KAAK,MAAM,gBAAgB,MAAM;AAAA,EAC3C;AAEA,aAAW,IAAI,gBAAgB,KAAK;AACpC,SAAO;AACT;AAKO,SAAS,WAAmB;AACjC,SAAO,UAAU,EAAE;AACrB;;;AC7CA,IAAM,QAAqB,CAAC;AAC5B,IAAI,eAAe;AACnB,IAAM,aAAa;AACnB,IAAM,oBAAoB;AAC1B,IAAM,cAAc;AAEpB,IAAI,aAAoD;AAMjD,SAAS,QAAQ,gBAAwB,MAAqC;AACnF,QAAM,KAAK,EAAE,gBAAgB,KAAK,CAAC;AAGnC,MAAI,CAAC,YAAY;AACf,iBAAa,YAAY,MAAM;AAC7B,YAAM,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IACxB,GAAG,iBAAiB;AAGpB,QAAI,cAAc,OAAO,eAAe,YAAY,WAAW,YAAY;AACzE,iBAAW,MAAM;AAAA,IACnB;AAAA,EACF;AAGA,MAAI,MAAM,UAAU,YAAY;AAC9B,UAAM,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EACxB;AACF;AAKA,eAAsB,QAAuB;AAC3C,MAAI,gBAAgB,MAAM,WAAW,EAAG;AACxC,iBAAe;AAEf,QAAM,QAAQ,MAAM,OAAO,GAAG,UAAU;AAGxC,QAAM,UAAU,oBAAI,IAAuC;AAC3D,aAAW,QAAQ,OAAO;AACxB,UAAM,OAAO,QAAQ,IAAI,KAAK,cAAc,KAAK,CAAC;AAClD,SAAK,KAAK,KAAK,IAAI;AACnB,YAAQ,IAAI,KAAK,gBAAgB,IAAI;AAAA,EACvC;AAEA,aAAW,CAAC,gBAAgB,IAAI,KAAK,SAAS;AAC5C,QAAI,UAAU;AACd,WAAO,UAAU,aAAa;AAC5B,UAAI;AACF,cAAMC,SAAQ,SAAS,cAAc;AACrC,cAAMA,OAAM,WAAW,MAAM,EAAE,SAAS,MAAM,CAAC;AAC/C;AAAA,MACF,SAAS,KAAK;AACZ;AACA,YAAI,WAAW,aAAa;AAC1B,kBAAQ;AAAA,YACN,oCAAoC,KAAK,MAAM,aAAa,cAAc,WAAW,WAAW;AAAA,YAChG,eAAe,QAAQ,IAAI,UAAU;AAAA,UACvC;AAAA,QACF,OAAO;AAEL,gBAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,iBAAe;AAGf,MAAI,MAAM,SAAS,GAAG;AACpB,UAAM,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EACxB;AACF;;;AC9EA,SAAS,SACP,OACA,SAIM;AACN,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,UAAM,MAAM,OAAO,UAAU,WAAW,IAAI,MAAM,KAAK,IAAI;AAE3D,UAAM,WAAoC;AAAA,MACxC,cAAc,IAAI;AAAA,MAClB,YAAY,IAAI,SAAS;AAAA,MACzB,gBAAgB,SAAS,MACrB;AAAA,QACE,QAAQ,QAAQ,IAAI;AAAA,QACpB,KAAK,QAAQ,IAAI,eAAe,QAAQ,IAAI;AAAA,QAC5C,SAAS,QAAQ,IAAI;AAAA,QACrB,MAAM,QAAQ,IAAI,QAAQ;AAAA,MAC5B,IACA;AAAA,MACJ,QACE,SAAS,UACR,SAAS,KAAa,MAAM,MAC5B,SAAS,KAAa,MAAM,OAC7B;AAAA,MACF,aAAa,OAAO,eAAe;AAAA,MACnC,aAAa,OAAO,eAAe,QAAQ,IAAI,YAAY;AAAA,MAC3D,WAAW,oBAAI,KAAK;AAAA,IACtB;AAEA,YAAQ,OAAO,OAAO,WAAW,QAAQ;AAAA,EAC3C,QAAQ;AAAA,EAER;AACF;AAMO,SAAS,4BAAkC;AAEhD,QAAM,mBAAmB,QAAQ,UAAU,mBAAmB,EAAE,MAAM;AACtE,QAAM,oBAAoB,QAAQ,UAAU,oBAAoB,EAAE,MAAM;AAGxE,UAAQ,GAAG,qBAAqB,CAAC,UAAiB;AAChD,aAAS,KAAK;AAAA,EAGhB,CAAC;AAGD,UAAQ,GAAG,sBAAsB,CAAC,WAAoB;AACpD,UAAM,QAAQ,kBAAkB,QAAQ,SAAS,IAAI,MAAM,OAAO,MAAM,CAAC;AACzE,aAAS,KAAK;AAAA,EAChB,CAAC;AACH;AAMO,SAAS,kBAAkB;AAChC,SAAO,CAAC,KAAY,KAAc,KAAe,SAA6B;AAC5E,aAAS,KAAK,EAAE,IAAI,CAAC;AAGrB,SAAK,GAAG;AAAA,EACV;AACF;AAKO,SAAS,eACd,OACA,SASM;AACN,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,UAAM,MAAM,OAAO,UAAU,WAAW,IAAI,MAAM,KAAK,IAAI;AAE3D,UAAM,WAAoC;AAAA,MACxC,cAAc,IAAI;AAAA,MAClB,YAAY,IAAI,SAAS;AAAA,MACzB,gBAAgB,SAAS,kBAAkB;AAAA,MAC3C,QAAQ,SAAS,UAAU;AAAA,MAC3B,aAAa,OAAO,eAAe;AAAA,MACnC,aAAa,OAAO,eAAe,QAAQ,IAAI,YAAY;AAAA,MAC3D,WAAW,oBAAI,KAAK;AAAA,IACtB;AAEA,YAAQ,OAAO,OAAO,WAAW,QAAQ;AAAA,EAC3C,QAAQ;AAAA,EAER;AACF;;;ACxGO,SAAS,wBAAwB,QAA4B;AAElE,MAAI,CAAC,OAAO,QAAQ;AAClB,UAAM,IAAI,MAAM,kDAAkD;AAAA,EACpE;AACA,MAAI,CAAC,OAAO,OAAO;AACjB,UAAM,IAAI,MAAM,iDAAiD;AAAA,EACnE;AACA,MAAI,CAAC,OAAO,QAAQ;AAClB,UAAM,IAAI,MAAM,sDAAsD;AAAA,EACxE;AACA,MAAI,CAAC,OAAO,OAAO,eAAe,CAAC,OAAO,OAAO,aAAa,CAAC,OAAO,OAAO,YAAY;AACvF,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,KAAK,SAAS;AACvB,QAAI,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO,IAAI,QAAQ;AAChD,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,QAAI,CAAC,OAAO,IAAI,eAAe,CAAC,OAAO,IAAI,iBAAiB;AAC1D,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,OAAO,SAAS;AACzB,QAAI,CAAC,OAAO,MAAM,YAAY;AAC5B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,MAAM;AAGhB,4BAA0B;AAE1B,UAAQ;AAAA,IACN,kDAAkD,OAAO,MAAM,cAAc,OAAO,eAAe,SAAS,UAAU,OAAO,eAAe,QAAQ,IAAI,YAAY,aAAa;AAAA,EACnL;AACF;;;AClDO,SAAS,gBAAgB;AAC9B,SAAO,CAAC,KAAc,KAAe,SAA6B;AAChE,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,eAAe,IAAI,KAAK,KAAK,GAAG;AACtC,QAAI,eAAwB;AAE5B,QAAI,OAAO,SAAU,MAAW;AAC9B,qBAAe;AACf,aAAO,aAAa,IAAI;AAAA,IAC1B;AAGA,QAAI,GAAG,UAAU,MAAM;AACrB,UAAI;AACF,cAAM,SAAS,UAAU;AACzB,cAAM,eAAe,KAAK,IAAI,IAAI;AAElC,cAAM,WAAoC;AAAA,UACxC,QAAQ,IAAI;AAAA,UACZ,KAAK,IAAI,eAAe,IAAI;AAAA,UAC5B,SAAS,IAAI;AAAA,UACb,aAAa,IAAI,QAAQ;AAAA,UACzB,gBAAgB,IAAI;AAAA,UACpB,cAAc,gBAAgB;AAAA,UAC9B;AAAA,UACA,QAAS,IAAY,MAAM,MAAO,IAAY,MAAM,OAAQ,IAAY,UAAU;AAAA,UAClF,WACG,IAAI,QAAQ,iBAAiB,GAAc,MAAM,GAAG,EAAE,CAAC,GAAG,KAAK,KAChE,IAAI,QAAQ,iBACZ;AAAA,UACF,aAAa,OAAO,eAAe;AAAA,UACnC,aAAa,OAAO,eAAe,QAAQ,IAAI,YAAY;AAAA,UAC3D,WAAW,oBAAI,KAAK;AAAA,QACtB;AAEA,gBAAQ,OAAO,OAAO,aAAa,QAAQ;AAAA,MAC7C,QAAQ;AAAA,MAER;AAAA,IACF,CAAC;AAED,SAAK;AAAA,EACP;AACF;;;ACtDA,2BAAqB;AACrB,kBAA0B;AAC1B,WAAsB;AACtB,SAAoB;AAIpB,IAAM,gBAAY,uBAAU,yBAAI;AAKhC,SAAS,oBAA4B;AACnC,QAAM,MAAM,oBAAI,KAAK;AACrB,QAAM,KAAK,IAAI,YAAY,EAAE,QAAQ,SAAS,GAAG;AACjD,SAAO,UAAU,EAAE;AACrB;AAMA,eAAe,gBAA+B;AAC5C,QAAM,SAAS,UAAU;AACzB,QAAM,QAAQ,SAAS;AACvB,QAAM,WAAW,kBAAkB;AACnC,QAAM,cAAc,GAAG,QAAQ;AAG/B,QAAM,UAAe,aAAQ,QAAQ,IAAI,GAAG,qBAAqB;AACjE,MAAI,CAAI,cAAW,OAAO,GAAG;AAC3B,IAAG,aAAU,SAAS,EAAE,WAAW,KAAK,CAAC;AAAA,EAC3C;AAEA,QAAM,UAAe,UAAK,SAAS,QAAQ;AAC3C,QAAM,cAAmB,UAAK,SAAS,WAAW;AAElD,MAAI;AAEF,YAAQ,IAAI,qCAAqC,WAAW,EAAE;AAC9D,UAAM,UAAU,oBAAoB,KAAK,YAAY,OAAO,GAAG;AAG/D,UAAM,UAAU,WAAW,WAAW,MAAM,QAAQ,KAAK,EAAE,KAAK,QAAQ,CAAC;AAEzE,UAAM,QAAW,YAAS,WAAW;AACrC,UAAM,WAAW,MAAM;AAGvB,QAAI,OAAO,OAAO,SAAS;AACzB,YAAM,WAAgB,aAAQ,QAAQ,IAAI,GAAG,OAAO,MAAM,UAAU;AACpE,UAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,QAAG,aAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,MAC5C;AACA,YAAM,YAAiB,UAAK,UAAU,WAAW;AACjD,MAAG,gBAAa,aAAa,SAAS;AACtC,cAAQ,IAAI,0CAA0C,SAAS,EAAE;AAGjE,YAAM,UAAU;AAAA,QACd,gBAAgB;AAAA,QAChB,YAAY;AAAA,QACZ,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,KAAK,SAAS;AACvB,YAAM,WAAW,aAAa,aAAa,QAAQ;AAAA,IACrD;AAGA,QAAO,cAAW,WAAW,GAAG;AAC9B,MAAG,cAAW,WAAW;AAAA,IAC3B;AACA,QAAO,cAAW,OAAO,GAAG;AAC1B,MAAG,UAAO,SAAS,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,IACrD;AAEA,YAAQ,IAAI,sCAAsC,WAAW,EAAE;AAAA,EACjE,SAAS,KAAK;AACZ,YAAQ,MAAM,mCAAmC,eAAe,QAAQ,IAAI,UAAU,GAAG;AAEzF,UAAM,UAAU;AAAA,MACd,gBAAgB;AAAA,MAChB,YAAY;AAAA,MACZ,UAAU,OAAO,KAAK,UAAU,OAAO;AAAA,MACvC,UAAU;AAAA,MACV,QAAQ;AAAA,MACR,cAAc,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,IAC/D,CAAC,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC;AAAA,EAGnB;AACF;AAKA,eAAe,WAAW,UAAkB,UAAkB,UAAiC;AAC7F,QAAM,SAAS,UAAU;AACzB,MAAI,CAAC,OAAO,IAAK,OAAM,IAAI,MAAM,yBAAyB;AAE1D,QAAM,EAAE,UAAU,iBAAiB,IAAI,MAAM,OAAO,oBAAoB;AAExE,QAAM,WAAW,IAAI,SAAS;AAAA,IAC5B,QAAQ,OAAO,IAAI;AAAA,IACnB,aAAa;AAAA,MACX,aAAa,OAAO,IAAI;AAAA,MACxB,iBAAiB,OAAO,IAAI;AAAA,IAC9B;AAAA,EACF,CAAC;AAED,QAAM,aAAgB,gBAAa,QAAQ;AAE3C,QAAM,UAAU,IAAI,iBAAiB;AAAA,IACnC,QAAQ,OAAO,IAAI;AAAA,IACnB,KAAK,WAAW,QAAQ;AAAA,IACxB,MAAM;AAAA,IACN,aAAa;AAAA,EACf,CAAC;AAED,QAAM,SAAS,KAAK,OAAO;AAC3B,UAAQ,IAAI,mDAAmD,QAAQ,EAAE;AAEzE,QAAM,UAAU;AAAA,IACd,gBAAgB;AAAA,IAChB,YAAY;AAAA,IACZ,UAAU;AAAA,IACV;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AACH;AAKA,eAAe,UAAU,OAOP;AAChB,MAAI;AACF,UAAM,SAAS,UAAU;AACzB,UAAMC,SAAQ,SAAS,OAAO,OAAO,UAAU;AAC/C,UAAMA,OAAM,OAAO;AAAA,MACjB,GAAG;AAAA,MACH,WAAW,oBAAI,KAAK;AAAA,IACtB,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,YAAQ;AAAA,MACN;AAAA,MACA,eAAe,QAAQ,IAAI,UAAU;AAAA,IACvC;AAAA,EACF;AACF;AAKA,eAAsB,sBAAqC;AACzD,QAAM,cAAc;AACtB;","names":["mongoose","Model","Model"]}