zync-nest-data-module 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +672 -0
- package/dist/backup/backup.config.d.ts +4 -0
- package/dist/backup/backup.config.js +84 -0
- package/dist/backup/backup.config.js.map +1 -0
- package/dist/backup/backup.interface.d.ts +21 -0
- package/dist/backup/backup.interface.js +3 -0
- package/dist/backup/backup.interface.js.map +1 -0
- package/dist/backup/backup.module.d.ts +2 -0
- package/dist/backup/backup.module.js +24 -0
- package/dist/backup/backup.module.js.map +1 -0
- package/dist/backup/backup.service.d.ts +19 -0
- package/dist/backup/backup.service.js +229 -0
- package/dist/backup/backup.service.js.map +1 -0
- package/dist/backup/index.d.ts +4 -0
- package/dist/backup/index.js +21 -0
- package/dist/backup/index.js.map +1 -0
- package/dist/database/database.module.d.ts +2 -0
- package/dist/database/database.module.js +34 -0
- package/dist/database/database.module.js.map +1 -0
- package/dist/database/database.repository.d.ts +62 -0
- package/dist/database/database.repository.js +259 -0
- package/dist/database/database.repository.js.map +1 -0
- package/dist/database/database.scheme.d.ts +45 -0
- package/dist/database/database.scheme.js +187 -0
- package/dist/database/database.scheme.js.map +1 -0
- package/dist/database/database.service.d.ts +7 -0
- package/dist/database/database.service.js +39 -0
- package/dist/database/database.service.js.map +1 -0
- package/dist/database/database.sync.d.ts +10 -0
- package/dist/database/database.sync.js +44 -0
- package/dist/database/database.sync.js.map +1 -0
- package/dist/database/database.transaction.d.ts +17 -0
- package/dist/database/database.transaction.js +101 -0
- package/dist/database/database.transaction.js.map +1 -0
- package/dist/database/database.uniqueId.d.ts +25 -0
- package/dist/database/database.uniqueId.js +68 -0
- package/dist/database/database.uniqueId.js.map +1 -0
- package/dist/database/database.utils.d.ts +10 -0
- package/dist/database/database.utils.js +119 -0
- package/dist/database/database.utils.js.map +1 -0
- package/dist/database/index.d.ts +8 -0
- package/dist/database/index.js +25 -0
- package/dist/database/index.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +20 -0
- package/dist/index.js.map +1 -0
- package/dist/service/index.d.ts +1 -0
- package/dist/service/index.js +18 -0
- package/dist/service/index.js.map +1 -0
- package/dist/service/service.d.ts +40 -0
- package/dist/service/service.js +182 -0
- package/dist/service/service.js.map +1 -0
- package/dist/tsconfig.lib.tsbuildinfo +1 -0
- package/libs/src/app.controller.ts +84 -0
- package/libs/src/app.module.ts +31 -0
- package/libs/src/backup/backup.config.ts +45 -0
- package/libs/src/backup/backup.interface.ts +21 -0
- package/libs/src/backup/backup.module.ts +11 -0
- package/libs/src/backup/backup.service.ts +283 -0
- package/libs/src/backup/index.ts +4 -0
- package/libs/src/database/database.module.ts +26 -0
- package/libs/src/database/database.repository.ts +358 -0
- package/libs/src/database/database.scheme.ts +128 -0
- package/libs/src/database/database.service.ts +36 -0
- package/libs/src/database/database.sync.ts +61 -0
- package/libs/src/database/database.transaction.ts +101 -0
- package/libs/src/database/database.uniqueId.ts +59 -0
- package/libs/src/database/database.utils.ts +99 -0
- package/libs/src/database/index.ts +8 -0
- package/libs/src/index.ts +3 -0
- package/libs/src/main.ts +62 -0
- package/libs/src/service/index.ts +1 -0
- package/libs/src/service/service.ts +201 -0
- package/libs/src/test/test.dto.ts +41 -0
- package/libs/src/test/test.module.ts +20 -0
- package/libs/src/test/test.repository.ts +44 -0
- package/libs/src/test/test.resolver.ts +44 -0
- package/libs/src/test/test.schema.ts +21 -0
- package/libs/src/test/test.service.ts +19 -0
- package/libs/tsconfig.lib.json +19 -0
- package/nest-cli.json +16 -0
- package/package.json +89 -0
- package/tsconfig.json +29 -0
- package/update-links.js +159 -0
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { Controller, Get, Post, Body, Param, Query } from '@nestjs/common';
|
|
2
|
+
import { ApiTags, ApiOperation, ApiResponse, ApiParam, ApiQuery } from '@nestjs/swagger';
|
|
3
|
+
|
|
4
|
+
@ApiTags('Test')
|
|
5
|
+
@Controller('test')
|
|
6
|
+
export class AppController {
|
|
7
|
+
@Get()
|
|
8
|
+
@ApiOperation({ summary: 'Get test message' })
|
|
9
|
+
@ApiResponse({ status: 200, description: 'Returns a test message' })
|
|
10
|
+
getTest(): { message: string; timestamp: string } {
|
|
11
|
+
return {
|
|
12
|
+
message: 'Zync Nest Library is working!',
|
|
13
|
+
timestamp: new Date().toISOString(),
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
@Get('health')
|
|
18
|
+
@ApiOperation({ summary: 'Health check endpoint' })
|
|
19
|
+
@ApiResponse({ status: 200, description: 'Returns health status' })
|
|
20
|
+
getHealth(): { status: string; uptime: number } {
|
|
21
|
+
return {
|
|
22
|
+
status: 'healthy',
|
|
23
|
+
uptime: process.uptime(),
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
@Post('echo')
|
|
28
|
+
@ApiOperation({ summary: 'Echo back the request body' })
|
|
29
|
+
@ApiResponse({ status: 201, description: 'Returns the echoed data' })
|
|
30
|
+
echoData(@Body() data: any): { echo: any; receivedAt: string } {
|
|
31
|
+
return {
|
|
32
|
+
echo: data,
|
|
33
|
+
receivedAt: new Date().toISOString(),
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
@Get('params/:id')
|
|
38
|
+
@ApiOperation({ summary: 'Test path parameters' })
|
|
39
|
+
@ApiParam({ name: 'id', description: 'Test ID' })
|
|
40
|
+
@ApiQuery({ name: 'query', required: false, description: 'Optional query parameter' })
|
|
41
|
+
@ApiResponse({ status: 200, description: 'Returns parameter data' })
|
|
42
|
+
testParams(
|
|
43
|
+
@Param('id') id: string,
|
|
44
|
+
@Query('query') query?: string,
|
|
45
|
+
): { id: string; query?: string; timestamp: string } {
|
|
46
|
+
return {
|
|
47
|
+
id,
|
|
48
|
+
query,
|
|
49
|
+
timestamp: new Date().toISOString(),
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
@Get('library-info')
|
|
54
|
+
@ApiOperation({ summary: 'Get library information' })
|
|
55
|
+
@ApiResponse({ status: 200, description: 'Returns library information' })
|
|
56
|
+
getLibraryInfo(): {
|
|
57
|
+
name: string;
|
|
58
|
+
version: string;
|
|
59
|
+
description: string;
|
|
60
|
+
modules: string[];
|
|
61
|
+
} {
|
|
62
|
+
return {
|
|
63
|
+
name: 'zync-nest-library',
|
|
64
|
+
version: '1.0.23',
|
|
65
|
+
description: 'NestJS library with database backup and file upload utilities',
|
|
66
|
+
modules: [
|
|
67
|
+
'billplz',
|
|
68
|
+
'dbbackup',
|
|
69
|
+
'exabytes',
|
|
70
|
+
'firebase',
|
|
71
|
+
'googleapi',
|
|
72
|
+
'htmlConverter',
|
|
73
|
+
'mailer',
|
|
74
|
+
'message',
|
|
75
|
+
'razorpay',
|
|
76
|
+
'senangpay',
|
|
77
|
+
'ultramsg',
|
|
78
|
+
'upload',
|
|
79
|
+
'utils',
|
|
80
|
+
'xlsx',
|
|
81
|
+
],
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { ApolloDriver, ApolloDriverConfig } from "@nestjs/apollo";
|
|
2
|
+
import { Module } from "@nestjs/common";
|
|
3
|
+
import { GraphQLModule } from "@nestjs/graphql";
|
|
4
|
+
import { join } from "path";
|
|
5
|
+
import { AppController } from "./app.controller";
|
|
6
|
+
import { ApDatabaseModule } from "./database";
|
|
7
|
+
import { ApDbBackupModule } from "./backup";
|
|
8
|
+
import { TestModule } from "./test/test.module";
|
|
9
|
+
|
|
10
|
+
@Module({
|
|
11
|
+
imports: [
|
|
12
|
+
TestModule,
|
|
13
|
+
GraphQLModule.forRoot<ApolloDriverConfig>({
|
|
14
|
+
driver: ApolloDriver,
|
|
15
|
+
// csrfPrevention: false,
|
|
16
|
+
autoSchemaFile: join(process.cwd(), "src/schema.gql"),
|
|
17
|
+
subscriptions: {
|
|
18
|
+
"graphql-ws": true,
|
|
19
|
+
"subscriptions-transport-ws": true,
|
|
20
|
+
},
|
|
21
|
+
playground: true,
|
|
22
|
+
plugins: [],
|
|
23
|
+
context: ({ req, res }) => ({ req, res }),
|
|
24
|
+
}),
|
|
25
|
+
ApDatabaseModule,
|
|
26
|
+
ApDbBackupModule,
|
|
27
|
+
],
|
|
28
|
+
controllers: [AppController],
|
|
29
|
+
exports: [ApDatabaseModule, ApDbBackupModule],
|
|
30
|
+
})
|
|
31
|
+
export class ApDbModule {}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { Injectable } from "@nestjs/common";
|
|
2
|
+
|
|
3
|
+
import * as dotenv from "dotenv";
|
|
4
|
+
import { IDbBackupConfig } from "./backup.interface";
|
|
5
|
+
import * as Path from "path";
|
|
6
|
+
|
|
7
|
+
dotenv.config();
|
|
8
|
+
|
|
9
|
+
@Injectable()
|
|
10
|
+
export class DbBackupConfigService {
|
|
11
|
+
public getBackupConfig(): IDbBackupConfig {
|
|
12
|
+
const isDocker =
|
|
13
|
+
process.env.NODE_ENV === "production" || process.env.is_docker === "true";
|
|
14
|
+
|
|
15
|
+
return {
|
|
16
|
+
env: process.env.app_env || "",
|
|
17
|
+
mongodb: {
|
|
18
|
+
connectionStrings: process.env.mongodb_backup_connection_strings
|
|
19
|
+
? process.env.mongodb_backup_connection_strings.split(",")
|
|
20
|
+
: [process.env.mongodb_url || ""],
|
|
21
|
+
},
|
|
22
|
+
backup: {
|
|
23
|
+
enabled: process.env.db_backup_enabled === "true",
|
|
24
|
+
dir: isDocker
|
|
25
|
+
? process.env.db_backup_dir || "/app/backups"
|
|
26
|
+
: process.env.db_backup_dir
|
|
27
|
+
? Path.join(__dirname, "..", "..", "..", "..", process.env.db_backup_dir)
|
|
28
|
+
: Path.join(__dirname, "..", "..", "..", "..", "db_backups"),
|
|
29
|
+
maxBackups: parseInt(process.env.db_backup_max || "10", 10),
|
|
30
|
+
isDownload: false, // download to false if upload to space is true
|
|
31
|
+
uploadToSpace: true, // upload to space to true if download is false
|
|
32
|
+
},
|
|
33
|
+
spaces: {
|
|
34
|
+
name: process.env.aws_bucket || "",
|
|
35
|
+
region: process.env.aws_s3_region || "nyc3",
|
|
36
|
+
key: process.env.aws_access_key_id || "",
|
|
37
|
+
secret: process.env.aws_secret_access_key || "",
|
|
38
|
+
dir: process.env.s3_spaces_dir || "db-backups",
|
|
39
|
+
endpoint:
|
|
40
|
+
process.env.s3_spaces_endpoint ||
|
|
41
|
+
"https://sgp1.digitaloceanspaces.com",
|
|
42
|
+
},
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
export interface IDbBackupConfig {
|
|
2
|
+
env: string;
|
|
3
|
+
mongodb: {
|
|
4
|
+
connectionStrings: string | string[];
|
|
5
|
+
};
|
|
6
|
+
backup: {
|
|
7
|
+
dir: string;
|
|
8
|
+
enabled: boolean;
|
|
9
|
+
maxBackups: number;
|
|
10
|
+
isDownload: boolean;
|
|
11
|
+
uploadToSpace: boolean;
|
|
12
|
+
};
|
|
13
|
+
spaces: {
|
|
14
|
+
name: string;
|
|
15
|
+
region: string;
|
|
16
|
+
key: string;
|
|
17
|
+
secret: string;
|
|
18
|
+
endpoint: string;
|
|
19
|
+
dir?: string;
|
|
20
|
+
};
|
|
21
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { Module } from "@nestjs/common";
|
|
2
|
+
import { DbBackupService } from "./backup.service";
|
|
3
|
+
import { DbBackupConfigService } from "./backup.config";
|
|
4
|
+
import { ApUploadModule } from "zync-nest-library";
|
|
5
|
+
|
|
6
|
+
@Module({
|
|
7
|
+
imports: [ApUploadModule],
|
|
8
|
+
providers: [DbBackupService, DbBackupConfigService],
|
|
9
|
+
exports: [DbBackupService, DbBackupConfigService],
|
|
10
|
+
})
|
|
11
|
+
export class ApDbBackupModule {}
|
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
import { Injectable, Logger } from "@nestjs/common";
|
|
2
|
+
import { exec } from "child_process";
|
|
3
|
+
import { promises as fs } from "fs";
|
|
4
|
+
import * as path from "path";
|
|
5
|
+
import { promisify } from "util";
|
|
6
|
+
import { UploadService } from "zync-nest-library";
|
|
7
|
+
import { DbBackupConfigService } from "./backup.config";
|
|
8
|
+
import { IDbBackupConfig } from "./backup.interface";
|
|
9
|
+
|
|
10
|
+
const execAsync = promisify(exec);
|
|
11
|
+
|
|
12
|
+
@Injectable()
|
|
13
|
+
export class DbBackupService {
|
|
14
|
+
private readonly logger = new Logger(DbBackupService.name);
|
|
15
|
+
|
|
16
|
+
constructor(
|
|
17
|
+
private readonly dbConfigService: DbBackupConfigService,
|
|
18
|
+
private readonly uploadService: UploadService
|
|
19
|
+
) {}
|
|
20
|
+
|
|
21
|
+
public async runBackup(): Promise<any> {
|
|
22
|
+
const config = this.dbConfigService.getBackupConfig();
|
|
23
|
+
|
|
24
|
+
if (!config?.backup?.enabled) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return await this.createBackup(this.dbConfigService.getBackupConfig());
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
public async createBackup(options: IDbBackupConfig): Promise<any> {
|
|
32
|
+
try {
|
|
33
|
+
// Validate configuration before starting backup
|
|
34
|
+
this.validateBackupOptions(options);
|
|
35
|
+
|
|
36
|
+
// Create backup directory if it doesn't exist
|
|
37
|
+
await this.ensureDirectoryExists(options.backup.dir);
|
|
38
|
+
|
|
39
|
+
console.log("Starting backup process...", options.mongodb);
|
|
40
|
+
|
|
41
|
+
for (const connectionString of options.mongodb.connectionStrings) {
|
|
42
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
43
|
+
|
|
44
|
+
const dbName = this.parseConnectionStringDatabase(connectionString);
|
|
45
|
+
|
|
46
|
+
if (!dbName) {
|
|
47
|
+
this.logger.warn(
|
|
48
|
+
`No valid database name found in connection string: ${connectionString}`
|
|
49
|
+
);
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
this.logger.log(`Creating backup for database: ${dbName}`);
|
|
54
|
+
|
|
55
|
+
const backupFileName = `${
|
|
56
|
+
process.env.app_name || "dev"
|
|
57
|
+
}_${dbName}_backup_${timestamp}.tar.gz`;
|
|
58
|
+
|
|
59
|
+
const localBackupPath = path.join(
|
|
60
|
+
options.backup.dir,
|
|
61
|
+
`${process.env.app_name || "dev"}_${dbName}_backup_${timestamp}`
|
|
62
|
+
);
|
|
63
|
+
const localBackupFile = path.join(options.backup.dir, backupFileName);
|
|
64
|
+
// Ensure the local backup directory exists
|
|
65
|
+
await this.ensureDirectoryExists(localBackupPath);
|
|
66
|
+
// Create MongoDB dump
|
|
67
|
+
await this.createMongoDump(connectionString, localBackupPath);
|
|
68
|
+
// Compress the backup
|
|
69
|
+
await this.compressBackup(localBackupPath, localBackupFile);
|
|
70
|
+
|
|
71
|
+
if (options?.backup?.isDownload) {
|
|
72
|
+
return await fs.readFile(localBackupFile);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (options?.backup?.uploadToSpace) {
|
|
76
|
+
await this.uploadToSpaces(
|
|
77
|
+
options.spaces,
|
|
78
|
+
backupFileName,
|
|
79
|
+
localBackupFile
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
// Upload to DigitalOcean Spaces
|
|
83
|
+
// Clean up
|
|
84
|
+
await this.cleanup(
|
|
85
|
+
localBackupPath,
|
|
86
|
+
options.backup.dir,
|
|
87
|
+
options.backup.maxBackups
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
this.logger.log(`Backup for ${dbName} completed: ${backupFileName}`);
|
|
91
|
+
}
|
|
92
|
+
} catch (error) {
|
|
93
|
+
this.logger.error(`Backup failed: ${error.message}`);
|
|
94
|
+
throw error;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
private validateBackupOptions(options: IDbBackupConfig): void {
|
|
99
|
+
if (!options.mongodb?.connectionStrings) {
|
|
100
|
+
throw new Error("MongoDB connection strings are required");
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const connectionStrings = Array.isArray(options.mongodb.connectionStrings)
|
|
104
|
+
? options.mongodb.connectionStrings
|
|
105
|
+
: [options.mongodb.connectionStrings];
|
|
106
|
+
|
|
107
|
+
if (
|
|
108
|
+
connectionStrings.length === 0 ||
|
|
109
|
+
connectionStrings.some((cs) => !cs || cs.trim() === "")
|
|
110
|
+
) {
|
|
111
|
+
throw new Error(
|
|
112
|
+
"At least one valid MongoDB connection string is required"
|
|
113
|
+
);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
if (options?.backup?.uploadToSpace) {
|
|
117
|
+
if (!options.spaces) {
|
|
118
|
+
throw new Error("DigitalOcean Spaces configuration is required");
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if (
|
|
122
|
+
!options.spaces.name ||
|
|
123
|
+
!options.spaces.key ||
|
|
124
|
+
!options.spaces.secret ||
|
|
125
|
+
!options.spaces.endpoint
|
|
126
|
+
) {
|
|
127
|
+
throw new Error(
|
|
128
|
+
"DigitalOcean Spaces configuration is incomplete (name, key, secret, endpoint are all required)"
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (!options.backup?.dir) {
|
|
134
|
+
throw new Error("Backup directory is required");
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
private async ensureDirectoryExists(dir: string): Promise<void> {
|
|
139
|
+
try {
|
|
140
|
+
await fs.mkdir(dir, { recursive: true });
|
|
141
|
+
} catch (error) {
|
|
142
|
+
if (error.code !== "EEXIST") {
|
|
143
|
+
throw error;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
private async createMongoDump(
|
|
149
|
+
connectionString: string,
|
|
150
|
+
outputPath: string
|
|
151
|
+
): Promise<void> {
|
|
152
|
+
this.logger.log("Creating MongoDB dump...");
|
|
153
|
+
|
|
154
|
+
if (!connectionString) {
|
|
155
|
+
throw new Error("Connection string is required for MongoDB backup");
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
await execAsync(
|
|
159
|
+
`mongodump --uri "${connectionString}" --out ${outputPath}`
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
private async compressBackup(
|
|
164
|
+
sourcePath: string,
|
|
165
|
+
destinationFile: string
|
|
166
|
+
): Promise<void> {
|
|
167
|
+
this.logger.log("Compressing backup...");
|
|
168
|
+
await execAsync(`tar -czvf ${destinationFile} -C ${sourcePath} .`);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
private async uploadToSpaces(
|
|
172
|
+
spacesConfig: IDbBackupConfig["spaces"],
|
|
173
|
+
fileName: string,
|
|
174
|
+
filePath: string
|
|
175
|
+
): Promise<void> {
|
|
176
|
+
this.logger.log("Uploading to DigitalOcean Spaces...");
|
|
177
|
+
|
|
178
|
+
// Validate required spaces configuration
|
|
179
|
+
if (!spacesConfig.name) {
|
|
180
|
+
throw new Error("DigitalOcean Spaces bucket name is required");
|
|
181
|
+
}
|
|
182
|
+
if (!spacesConfig.key) {
|
|
183
|
+
throw new Error("DigitalOcean Spaces access key is required");
|
|
184
|
+
}
|
|
185
|
+
if (!spacesConfig.secret) {
|
|
186
|
+
throw new Error("DigitalOcean Spaces secret key is required");
|
|
187
|
+
}
|
|
188
|
+
if (!spacesConfig.endpoint) {
|
|
189
|
+
throw new Error("DigitalOcean Spaces endpoint is required");
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// Ensure endpoint is a proper URL
|
|
193
|
+
let endpoint = spacesConfig.endpoint;
|
|
194
|
+
if (
|
|
195
|
+
endpoint &&
|
|
196
|
+
!endpoint.startsWith("http://") &&
|
|
197
|
+
!endpoint.startsWith("https://")
|
|
198
|
+
) {
|
|
199
|
+
endpoint = `https://${endpoint}`;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const fileContent = await fs.readFile(filePath);
|
|
203
|
+
|
|
204
|
+
await this.uploadService.upload.uploadBuffer({
|
|
205
|
+
file: fileContent,
|
|
206
|
+
filename: fileName,
|
|
207
|
+
disableTransformName: true,
|
|
208
|
+
filetype: "application/gzip",
|
|
209
|
+
dir: spacesConfig.dir || "db-backups",
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
private async cleanup(
|
|
214
|
+
backupPath: string,
|
|
215
|
+
backupDir: string,
|
|
216
|
+
maxBackups: number
|
|
217
|
+
): Promise<void> {
|
|
218
|
+
this.logger.log("Cleaning up...");
|
|
219
|
+
|
|
220
|
+
// Remove the uncompressed backup
|
|
221
|
+
await fs.rm(backupPath, { recursive: true, force: true });
|
|
222
|
+
|
|
223
|
+
// Remove old backups
|
|
224
|
+
this.cleanupBackups();
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
public async cleanupBackups(): Promise<void> {
|
|
228
|
+
this.logger.log("Cleaning up...");
|
|
229
|
+
|
|
230
|
+
const { backup } = this.dbConfigService.getBackupConfig();
|
|
231
|
+
|
|
232
|
+
// Remove old backups
|
|
233
|
+
const files = await fs.readdir(backup.dir);
|
|
234
|
+
|
|
235
|
+
const backupFiles = files
|
|
236
|
+
.filter((file) => file.endsWith(".tar.gz"))
|
|
237
|
+
.sort()
|
|
238
|
+
.reverse();
|
|
239
|
+
|
|
240
|
+
if (backupFiles.length > backup.maxBackups) {
|
|
241
|
+
const toDelete = backupFiles.slice(backup.maxBackups);
|
|
242
|
+
for (const file of toDelete) {
|
|
243
|
+
console.log(file, "files to delete");
|
|
244
|
+
console.log(file?.replace(".tar.gz", ""), "files to delete");
|
|
245
|
+
|
|
246
|
+
await fs.unlink(path.join(backup.dir, file));
|
|
247
|
+
await fs.rm(path.join(backup.dir, file?.replace(".tar.gz", "")), {
|
|
248
|
+
recursive: true,
|
|
249
|
+
force: true,
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
this.logger.log(`Removed ${toDelete.length} old backup(s)`);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
private parseConnectionStringDatabase(
|
|
257
|
+
connectionString: string
|
|
258
|
+
): string | null {
|
|
259
|
+
try {
|
|
260
|
+
// Remove any authentication info and query parameters for parsing
|
|
261
|
+
const cleanUri = connectionString.replace(/^mongodb(\+srv)?:\/\//, "");
|
|
262
|
+
|
|
263
|
+
// Find the database name after the last slash and before any query parameters
|
|
264
|
+
const dbMatch = cleanUri.match(/\/([^/?&]+)(\?|$)/);
|
|
265
|
+
|
|
266
|
+
if (
|
|
267
|
+
dbMatch &&
|
|
268
|
+
dbMatch[1] &&
|
|
269
|
+
dbMatch[1] !== "admin" &&
|
|
270
|
+
dbMatch[1] !== "test"
|
|
271
|
+
) {
|
|
272
|
+
return dbMatch[1];
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
return null;
|
|
276
|
+
} catch (error) {
|
|
277
|
+
this.logger.warn(
|
|
278
|
+
`Failed to parse database from connection string: ${error.message}`
|
|
279
|
+
);
|
|
280
|
+
return null;
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { Global, Module } from "@nestjs/common";
|
|
2
|
+
import { MongooseModule } from "@nestjs/mongoose";
|
|
3
|
+
import { DatabaseService } from "./database.service";
|
|
4
|
+
import { TransactionManager } from "./database.transaction";
|
|
5
|
+
import {
|
|
6
|
+
ApUniqueIdGenerator,
|
|
7
|
+
UniqueId,
|
|
8
|
+
UniqueIdSchema,
|
|
9
|
+
} from "./database.uniqueId";
|
|
10
|
+
import { ConfigModule } from "@nestjs/config";
|
|
11
|
+
|
|
12
|
+
console.log(process.env.mongodb_url,"db url");
|
|
13
|
+
|
|
14
|
+
@Global()
|
|
15
|
+
@Module({
|
|
16
|
+
imports: [
|
|
17
|
+
ConfigModule.forRoot(),
|
|
18
|
+
MongooseModule.forRoot(process.env.mongodb_url),
|
|
19
|
+
MongooseModule.forFeature([
|
|
20
|
+
{ name: UniqueId.name, schema: UniqueIdSchema },
|
|
21
|
+
]),
|
|
22
|
+
],
|
|
23
|
+
providers: [TransactionManager, ApUniqueIdGenerator, DatabaseService],
|
|
24
|
+
exports: [ApUniqueIdGenerator, TransactionManager, DatabaseService],
|
|
25
|
+
})
|
|
26
|
+
export class ApDatabaseModule {}
|