nodejs-quickstart-structure 1.11.1 ā 1.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/README.md +1 -1
- package/bin/index.js +2 -2
- package/lib/generator.js +8 -2
- package/lib/modules/app-setup.js +24 -0
- package/lib/modules/config-files.js +18 -1
- package/lib/modules/kafka-setup.js +2 -35
- package/package.json +1 -1
- package/templates/clean-architecture/js/src/index.js.ejs +2 -4
- package/templates/clean-architecture/js/src/infrastructure/config/env.js.ejs +47 -0
- package/templates/clean-architecture/js/src/infrastructure/webserver/middlewares/error.middleware.js +2 -1
- package/templates/clean-architecture/js/src/infrastructure/webserver/server.js.ejs +5 -2
- package/templates/clean-architecture/js/src/interfaces/graphql/resolvers/user.resolvers.js.ejs +4 -1
- package/templates/clean-architecture/ts/src/config/env.ts.ejs +46 -0
- package/templates/clean-architecture/ts/src/index.ts.ejs +9 -12
- package/templates/clean-architecture/ts/src/interfaces/graphql/resolvers/user.resolvers.ts.ejs +4 -1
- package/templates/clean-architecture/ts/src/utils/error.middleware.ts.ejs +2 -1
- package/templates/common/.env.example.ejs +3 -1
- package/templates/common/README.md.ejs +30 -0
- package/templates/common/database/js/mongoose.js.ejs +3 -1
- package/templates/common/database/ts/mongoose.ts.ejs +3 -1
- package/templates/common/docker-compose.yml.ejs +11 -1
- package/templates/common/ecosystem.config.js.ejs +40 -0
- package/templates/common/package.json.ejs +3 -1
- package/templates/mvc/js/src/config/env.js.ejs +46 -0
- package/templates/mvc/js/src/graphql/resolvers/user.resolvers.js.ejs +4 -1
- package/templates/mvc/js/src/index.js.ejs +2 -2
- package/templates/mvc/js/src/utils/error.middleware.js +2 -1
- package/templates/mvc/ts/src/config/env.ts.ejs +45 -0
- package/templates/mvc/ts/src/graphql/resolvers/user.resolvers.ts.ejs +4 -1
- package/templates/mvc/ts/src/index.ts.ejs +8 -12
- package/templates/mvc/ts/src/utils/error.middleware.ts.ejs +2 -1
- package/templates/clean-architecture/js/src/domain/repositories/UserRepository.js +0 -9
package/CHANGELOG.md
CHANGED
|
@@ -5,6 +5,15 @@ All notable changes to this project will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## [1.12.0] - 2026-03-04
|
|
9
|
+
### Added
|
|
10
|
+
- **Zod Environment Validation:** Replaced manual `dotenv` process calls in server entry points with a centralized schema parser.
|
|
11
|
+
- Automatically generates `src/config/env.ts` (or `.js`) evaluating `NODE_ENV`, `PORT`, and strictly mapping database, cache, and Kafka connection definitions gracefully crashing the app at startup if missing.
|
|
12
|
+
- **PM2 Deployment Configuration:** Natively supports PM2 ecosystem clustering for VPS and EC2 configurations out-of-the-box.
|
|
13
|
+
- Generates `ecosystem.config.js` intelligently mapping dynamic environments for Redis, Databases, and Kafka without user prompts.
|
|
14
|
+
- Modifies `package.json` with an out-of-the-box `npm run deploy` script bound to `pm2 start ecosystem.config.js --env production`.
|
|
15
|
+
- Upgraded generated README deployment guides for transparent CLI instruction workflows outlining the contrast between running Docker vs PM2.
|
|
16
|
+
|
|
8
17
|
## [1.11.1] - 2026-03-03
|
|
9
18
|
### Fixed
|
|
10
19
|
- Fixed relative import paths in Clean Architecture JS `error.middleware.js` ā changed to correct 3-level relative paths (`../../../`).
|
package/README.md
CHANGED
|
@@ -35,6 +35,7 @@ We don't just generate boilerplate; we generate **production-ready** foundations
|
|
|
35
35
|
- **š CI/CD Integration**: Pre-configured workflows for **GitHub Actions**, **Jenkins**, and **GitLab CI**.
|
|
36
36
|
- **ā Git Hooks**: `Husky` and `Lint-Staged` to ensure no bad code is ever committed.
|
|
37
37
|
- **š³ DevOps**: Highly optimized **Multi-Stage Dockerfile** for small, secure production images.
|
|
38
|
+
- **š Deployment**: Ship confidently with an integrated **PM2 Ecosystem Configuration** for zero-downtime reloads and robust process management.
|
|
38
39
|
|
|
39
40
|
## š§© 240+ Project Combinations
|
|
40
41
|
|
|
@@ -92,7 +93,6 @@ The generated project will include:
|
|
|
92
93
|
|
|
93
94
|
- `src/`: Source code (controllers, routes, services/use-cases).
|
|
94
95
|
- `src/errors/`: Custom error classes ā `ApiError`, `NotFoundError`, `BadRequestError`.
|
|
95
|
-
- `src/utils/error.middleware.{ts|js}`: Global Express error handler (logs 500s, returns `{ statusCode, message }`).
|
|
96
96
|
- `flyway/sql/`: SQL migration scripts (if SQL database selected).
|
|
97
97
|
- `docker-compose.yml`: Services configuration for DB, Flyway, and Kafka.
|
|
98
98
|
- `package.json`: Dependencies and scripts (`start`, `dev`, `build`).
|
package/bin/index.js
CHANGED
|
@@ -52,7 +52,7 @@ program
|
|
|
52
52
|
|
|
53
53
|
console.log(chalk.green('\nā Project generated successfully!'));
|
|
54
54
|
|
|
55
|
-
let manualStartInstructions = `\
|
|
55
|
+
let manualStartInstructions = `\n${chalk.yellow('Development:')}\n cd ${answers.projectName}\n npm install`;
|
|
56
56
|
|
|
57
57
|
const needsInfrastructure = answers.database !== 'None' || answers.caching === 'Redis' || answers.communication === 'Kafka';
|
|
58
58
|
|
|
@@ -67,7 +67,7 @@ program
|
|
|
67
67
|
manualStartInstructions += `\n npm run dev`;
|
|
68
68
|
}
|
|
69
69
|
|
|
70
|
-
console.log(chalk.cyan(`\nNext steps:\n cd ${answers.projectName}\n npm install\n docker-compose up\n-----------------------${manualStartInstructions}`));
|
|
70
|
+
console.log(chalk.cyan(`\nNext steps:\n cd ${answers.projectName}\n npm install\n docker-compose up\n-----------------------${manualStartInstructions}\n\n${chalk.yellow('Production (PM2):')}\n npm run build\n npm run deploy\n npx pm2 logs`));
|
|
71
71
|
|
|
72
72
|
} catch (error) {
|
|
73
73
|
console.error(chalk.red('Error generating project:'), error);
|
package/lib/generator.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import path from 'path';
|
|
2
2
|
import { fileURLToPath } from 'url';
|
|
3
3
|
import { setupProjectDirectory, copyBaseStructure, copyCommonFiles } from './modules/project-setup.js';
|
|
4
|
-
import { renderPackageJson, renderDockerCompose, renderReadme, renderDockerfile, renderProfessionalConfig, setupCiCd, renderTestSample, renderEnvExample } from './modules/config-files.js';
|
|
5
|
-
import { renderIndexFile, renderErrorMiddleware, renderDynamicComponents, renderSwaggerConfig, setupViews as setupSrcViews } from './modules/app-setup.js';
|
|
4
|
+
import { renderPackageJson, renderDockerCompose, renderReadme, renderDockerfile, renderProfessionalConfig, setupCiCd, renderTestSample, renderEnvExample, renderPm2Config } from './modules/config-files.js';
|
|
5
|
+
import { renderIndexFile, renderEnvConfig, renderErrorMiddleware, renderDynamicComponents, renderSwaggerConfig, setupViews as setupSrcViews } from './modules/app-setup.js';
|
|
6
6
|
import { setupDatabase } from './modules/database-setup.js';
|
|
7
7
|
import { setupKafka, setupViews } from './modules/kafka-setup.js';
|
|
8
8
|
import { setupCaching } from './modules/caching-setup.js';
|
|
@@ -33,6 +33,9 @@ export const generateProject = async (config) => {
|
|
|
33
33
|
// 6. Render index file (ts/js)
|
|
34
34
|
await renderIndexFile(templatePath, targetDir, config);
|
|
35
35
|
|
|
36
|
+
// 6a. Render Environment Configuration
|
|
37
|
+
await renderEnvConfig(templatePath, targetDir, config);
|
|
38
|
+
|
|
36
39
|
// 6a. Render error middleware
|
|
37
40
|
await renderErrorMiddleware(templatePath, targetDir, config);
|
|
38
41
|
|
|
@@ -73,6 +76,9 @@ export const generateProject = async (config) => {
|
|
|
73
76
|
// 15. Env Example
|
|
74
77
|
await renderEnvExample(templatesDir, targetDir, config);
|
|
75
78
|
|
|
79
|
+
// 16. PM2 Configuration
|
|
80
|
+
await renderPm2Config(templatesDir, targetDir, config);
|
|
81
|
+
|
|
76
82
|
console.log(`
|
|
77
83
|
====================================================
|
|
78
84
|
Node.js Project Created Successfully!
|
package/lib/modules/app-setup.js
CHANGED
|
@@ -22,6 +22,30 @@ export const renderIndexFile = async (templatePath, targetDir, config) => {
|
|
|
22
22
|
}
|
|
23
23
|
};
|
|
24
24
|
|
|
25
|
+
export const renderEnvConfig = async (templatePath, targetDir, config) => {
|
|
26
|
+
const { language, architecture, database, caching, communication } = config;
|
|
27
|
+
const envExt = language === 'TypeScript' ? 'ts' : 'js';
|
|
28
|
+
|
|
29
|
+
let configDir = path.join(targetDir, 'src', 'config');
|
|
30
|
+
if (architecture === 'Clean Architecture' && language === 'JavaScript') {
|
|
31
|
+
configDir = path.join(targetDir, 'src', 'infrastructure', 'config');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const envTemplatePath = path.join(configDir, `env.${envExt}.ejs`);
|
|
35
|
+
const envDestPath = path.join(configDir, `env.${envExt}`);
|
|
36
|
+
|
|
37
|
+
if (await fs.pathExists(envTemplatePath)) {
|
|
38
|
+
const envTemplate = await fs.readFile(envTemplatePath, 'utf-8');
|
|
39
|
+
const envContent = ejs.render(envTemplate, {
|
|
40
|
+
database,
|
|
41
|
+
caching,
|
|
42
|
+
communication
|
|
43
|
+
});
|
|
44
|
+
await fs.writeFile(envDestPath, envContent);
|
|
45
|
+
await fs.remove(envTemplatePath);
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
|
|
25
49
|
export const renderErrorMiddleware = async (templatePath, targetDir, config) => {
|
|
26
50
|
const { language, architecture } = config;
|
|
27
51
|
const errName = language === 'TypeScript' ? 'error.middleware.ts' : 'error.middleware.js';
|
|
@@ -59,6 +59,21 @@ export const renderDockerfile = async (templatesDir, targetDir, config) => {
|
|
|
59
59
|
await fs.writeFile(path.join(targetDir, 'Dockerfile'), dockerfileContent);
|
|
60
60
|
};
|
|
61
61
|
|
|
62
|
+
export const renderPm2Config = async (templatesDir, targetDir, config) => {
|
|
63
|
+
const { projectName, database, dbName, communication, language, caching } = config;
|
|
64
|
+
const pm2ConfigPath = path.join(targetDir, 'ecosystem.config.js');
|
|
65
|
+
const pm2Template = await fs.readFile(path.join(templatesDir, 'common', 'ecosystem.config.js.ejs'), 'utf-8');
|
|
66
|
+
const pm2Content = ejs.render(pm2Template, {
|
|
67
|
+
projectName,
|
|
68
|
+
database,
|
|
69
|
+
dbName,
|
|
70
|
+
communication,
|
|
71
|
+
language,
|
|
72
|
+
caching
|
|
73
|
+
});
|
|
74
|
+
await fs.writeFile(pm2ConfigPath, pm2Content);
|
|
75
|
+
};
|
|
76
|
+
|
|
62
77
|
export const renderProfessionalConfig = async (templatesDir, targetDir, language) => {
|
|
63
78
|
const eslintTemplate = await fs.readFile(path.join(templatesDir, 'common', 'eslint.config.mjs.ejs'), 'utf-8');
|
|
64
79
|
const eslintContent = ejs.render(eslintTemplate, { language });
|
|
@@ -98,7 +113,8 @@ export const renderTestSample = async (templatesDir, targetDir, language) => {
|
|
|
98
113
|
|
|
99
114
|
export const renderEnvExample = async (templatesDir, targetDir, config) => {
|
|
100
115
|
const { database, dbName, communication, projectName, caching } = config;
|
|
101
|
-
const
|
|
116
|
+
const envExamplePath = path.join(targetDir, '.env.example');
|
|
117
|
+
const envPath = path.join(targetDir, '.env');
|
|
102
118
|
const envTemplate = await fs.readFile(path.join(templatesDir, 'common', '.env.example.ejs'), 'utf-8');
|
|
103
119
|
|
|
104
120
|
const envContent = ejs.render(envTemplate, {
|
|
@@ -109,5 +125,6 @@ export const renderEnvExample = async (templatesDir, targetDir, config) => {
|
|
|
109
125
|
caching
|
|
110
126
|
});
|
|
111
127
|
|
|
128
|
+
await fs.writeFile(envExamplePath, envContent);
|
|
112
129
|
await fs.writeFile(envPath, envContent);
|
|
113
130
|
};
|
|
@@ -55,21 +55,12 @@ export const setupKafka = async (templatesDir, targetDir, config) => {
|
|
|
55
55
|
// Cleanup old services folder
|
|
56
56
|
await fs.remove(path.join(targetDir, 'src/services'));
|
|
57
57
|
|
|
58
|
-
// Remove src/config if it was only for Kafka and not needed by other parts
|
|
59
|
-
// But Database setup might assume src/config existence in some templates (though moved to infrastructure/database for clean arch)
|
|
60
|
-
// Safest to leave src/config if non-empty, or remove if empty.
|
|
61
|
-
// For now, mirroring original logic: remove specific REST folders
|
|
62
|
-
|
|
63
58
|
// Remove REST-specific folders (Interfaces)
|
|
64
59
|
await fs.remove(path.join(targetDir, 'src/interfaces/routes'));
|
|
65
60
|
await fs.remove(path.join(targetDir, 'src/interfaces/controllers'));
|
|
66
61
|
|
|
67
|
-
// Original logic removed src/config entirely for
|
|
68
|
-
//
|
|
69
|
-
// await fs.remove(path.join(targetDir, 'src/config'));
|
|
70
|
-
// Yes, it did.
|
|
71
|
-
await fs.remove(path.join(targetDir, 'src/config'));
|
|
72
|
-
|
|
62
|
+
// Original logic removed src/config entirely, but now we use it for Zod env validation in TS.
|
|
63
|
+
// We will no longer delete it.
|
|
73
64
|
} else if (architecture === 'MVC' && (!config.viewEngine || config.viewEngine === 'None')) {
|
|
74
65
|
// MVC Cleanup for Kafka Worker (No views)
|
|
75
66
|
await fs.remove(path.join(targetDir, 'src/controllers'));
|
|
@@ -84,29 +75,5 @@ export const setupViews = async (templatesDir, targetDir, config) => {
|
|
|
84
75
|
if (await fs.pathExists(publicDir)) {
|
|
85
76
|
await fs.copy(publicDir, path.join(targetDir, 'public'));
|
|
86
77
|
}
|
|
87
|
-
|
|
88
|
-
// Copy views mapping
|
|
89
|
-
// Logic handled in database-setup (part of db config block in original) but functionally belongs here or separate.
|
|
90
|
-
// Original: if (viewEngine && viewEngine !== 'None') await fs.copy(...) inside the DB block for MVC.
|
|
91
|
-
// We moved it to database-setup.js to match flow, but let's double check if we missed it there.
|
|
92
|
-
// Checked database-setup.js: It copies views ONLY if database !== 'None' OR if database === 'None'
|
|
93
|
-
// So it is covered. Ideally it should be here, but for now strict refactor keeps it effectively in DB/structure setup phase.
|
|
94
|
-
// To be cleaner, we should move the VIEW copying here.
|
|
95
|
-
|
|
96
|
-
// Moving View Copying Check here for better separation:
|
|
97
|
-
// We need to verify if database-setup.js ALREADY does this.
|
|
98
|
-
// In my prev step for database-setup.js, I included logic:
|
|
99
|
-
// if (architecture === 'MVC') { if (viewEngine...) copy views }
|
|
100
|
-
// So duplication might occur if I add it here too.
|
|
101
|
-
// Let's relies on this module ONLY for public assets for now, or ensure idempotency.
|
|
102
|
-
|
|
103
|
-
// Actually, let's keep it clean. database-setup.js shouldn't handle views.
|
|
104
|
-
// I will assume I can update database-setup.js to remove view copying if I put it here?
|
|
105
|
-
// OR just leave it there for this iteration to avoid breaking changes in flow order.
|
|
106
|
-
// Let's stick to the original flow where possible, but this module is 'kafka-and-views'.
|
|
107
|
-
|
|
108
|
-
// The original logic had view copying inside the "Database Config" block.
|
|
109
|
-
// My database-setup.js preserved that.
|
|
110
|
-
// So this logic here only handles 'public' folder copying which was Step 8 in original.
|
|
111
78
|
}
|
|
112
79
|
};
|
package/package.json
CHANGED
|
@@ -4,8 +4,6 @@ const logger = require('./infrastructure/log/logger');
|
|
|
4
4
|
const { connectKafka, sendMessage } = require('./infrastructure/messaging/kafkaClient');
|
|
5
5
|
<% } -%>
|
|
6
6
|
|
|
7
|
-
const PORT = process.env.PORT || 3000;
|
|
8
|
-
|
|
9
7
|
<%_ if (database !== 'None') { -%>
|
|
10
8
|
// Database Sync
|
|
11
9
|
const syncDatabase = async () => {
|
|
@@ -21,7 +19,7 @@ const syncDatabase = async () => {
|
|
|
21
19
|
<%_ } -%>
|
|
22
20
|
logger.info('Database synced');
|
|
23
21
|
// Start the web server after DB sync
|
|
24
|
-
startServer(
|
|
22
|
+
startServer();
|
|
25
23
|
<%_ if (communication === 'Kafka') { -%>
|
|
26
24
|
// Connect Kafka
|
|
27
25
|
connectKafka().then(async () => {
|
|
@@ -42,7 +40,7 @@ const syncDatabase = async () => {
|
|
|
42
40
|
};
|
|
43
41
|
syncDatabase();
|
|
44
42
|
<%_ } else { -%>
|
|
45
|
-
startServer(
|
|
43
|
+
startServer();
|
|
46
44
|
<%_ if (communication === 'Kafka') { -%>
|
|
47
45
|
// Connect Kafka
|
|
48
46
|
connectKafka().then(async () => {
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
const { z } = require('zod');
|
|
2
|
+
const logger = require('../log/logger');
|
|
3
|
+
|
|
4
|
+
if (process.env.NODE_ENV !== 'production') {
|
|
5
|
+
require('dotenv').config();
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
const envSchema = z.object({
|
|
9
|
+
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
|
|
10
|
+
PORT: z.string().transform(Number).default('3000'),
|
|
11
|
+
<%_ if (database !== 'None') { -%>
|
|
12
|
+
DB_HOST: z.string(),
|
|
13
|
+
<%_ if (database === 'MySQL') { -%>
|
|
14
|
+
DB_USER: z.string(),
|
|
15
|
+
DB_PASSWORD: z.string(),
|
|
16
|
+
DB_NAME: z.string(),
|
|
17
|
+
DB_PORT: z.string().transform(Number),
|
|
18
|
+
<%_ } else if (database === 'PostgreSQL') { -%>
|
|
19
|
+
DB_USER: z.string(),
|
|
20
|
+
DB_PASSWORD: z.string(),
|
|
21
|
+
DB_NAME: z.string(),
|
|
22
|
+
DB_PORT: z.string().transform(Number),
|
|
23
|
+
<%_ } else if (database === 'MongoDB') { -%>
|
|
24
|
+
DB_NAME: z.string(),
|
|
25
|
+
DB_PORT: z.string().transform(Number),
|
|
26
|
+
<%_ } -%>
|
|
27
|
+
<%_ } -%>
|
|
28
|
+
<%_ if (caching === 'Redis') { -%>
|
|
29
|
+
REDIS_HOST: z.string(),
|
|
30
|
+
REDIS_PORT: z.string().transform(Number),
|
|
31
|
+
REDIS_PASSWORD: z.string().optional(),
|
|
32
|
+
<%_ } -%>
|
|
33
|
+
<%_ if (communication === 'Kafka') { -%>
|
|
34
|
+
KAFKA_BROKER: z.string(),
|
|
35
|
+
<%_ } -%>
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
const _env = envSchema.safeParse(process.env);
|
|
39
|
+
|
|
40
|
+
if (!_env.success) {
|
|
41
|
+
logger.error('ā Invalid environment variables:', _env.error.format());
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const env = _env.data;
|
|
46
|
+
|
|
47
|
+
module.exports = { env };
|
package/templates/clean-architecture/js/src/infrastructure/webserver/middlewares/error.middleware.js
CHANGED
|
@@ -2,7 +2,8 @@ const logger = require('../../log/logger');
|
|
|
2
2
|
const { ApiError } = require('../../../errors/ApiError');
|
|
3
3
|
const HTTP_STATUS = require('../../../utils/httpCodes');
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
// eslint-disable-next-line no-unused-vars
|
|
6
|
+
const errorMiddleware = (err, req, res, next) => {
|
|
6
7
|
let error = err;
|
|
7
8
|
|
|
8
9
|
if (!(error instanceof ApiError)) {
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
const express = require('express');
|
|
2
2
|
const cors = require('cors');
|
|
3
|
-
require('dotenv').config();
|
|
4
3
|
const logger = require('../log/logger');
|
|
5
4
|
const morgan = require('morgan');
|
|
6
5
|
const { errorMiddleware } = require('./middlewares/error.middleware');
|
|
@@ -19,7 +18,11 @@ const { typeDefs, resolvers } = require('../../interfaces/graphql');
|
|
|
19
18
|
const { gqlContext } = require('../../interfaces/graphql/context');
|
|
20
19
|
<%_ } -%>
|
|
21
20
|
|
|
22
|
-
const
|
|
21
|
+
const { env } = require('../config/env');
|
|
22
|
+
|
|
23
|
+
const startServer = async () => {
|
|
24
|
+
// Determine port using the validated env
|
|
25
|
+
const port = env.PORT;
|
|
23
26
|
const app = express();
|
|
24
27
|
|
|
25
28
|
app.use(cors());
|
package/templates/clean-architecture/js/src/interfaces/graphql/resolvers/user.resolvers.js.ejs
CHANGED
|
@@ -12,7 +12,10 @@ const userResolvers = {
|
|
|
12
12
|
createUser: async (_, { name, email }) => {
|
|
13
13
|
return await userController.createUser({ name, email });
|
|
14
14
|
}
|
|
15
|
-
}
|
|
15
|
+
}<%_ if (database === 'MongoDB') { -%>,
|
|
16
|
+
User: {
|
|
17
|
+
id: (parent) => parent.id || parent._id
|
|
18
|
+
}<%_ } %>
|
|
16
19
|
};
|
|
17
20
|
|
|
18
21
|
module.exports = { userResolvers };
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import dotenv from 'dotenv';
|
|
2
|
+
import { z } from 'zod';
|
|
3
|
+
import logger from '@/infrastructure/log/logger';
|
|
4
|
+
|
|
5
|
+
if (process.env.NODE_ENV !== 'production') {
|
|
6
|
+
dotenv.config();
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
const envSchema = z.object({
|
|
10
|
+
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
|
|
11
|
+
PORT: z.string().transform(Number).default('3000'),
|
|
12
|
+
<%_ if (database !== 'None') { -%>
|
|
13
|
+
DB_HOST: z.string(),
|
|
14
|
+
<%_ if (database === 'MySQL') { -%>
|
|
15
|
+
DB_USER: z.string(),
|
|
16
|
+
DB_PASSWORD: z.string(),
|
|
17
|
+
DB_NAME: z.string(),
|
|
18
|
+
DB_PORT: z.string().transform(Number),
|
|
19
|
+
<%_ } else if (database === 'PostgreSQL') { -%>
|
|
20
|
+
DB_USER: z.string(),
|
|
21
|
+
DB_PASSWORD: z.string(),
|
|
22
|
+
DB_NAME: z.string(),
|
|
23
|
+
DB_PORT: z.string().transform(Number),
|
|
24
|
+
<%_ } else if (database === 'MongoDB') { -%>
|
|
25
|
+
DB_NAME: z.string(),
|
|
26
|
+
DB_PORT: z.string().transform(Number),
|
|
27
|
+
<%_ } -%>
|
|
28
|
+
<%_ } -%>
|
|
29
|
+
<%_ if (caching === 'Redis') { -%>
|
|
30
|
+
REDIS_HOST: z.string(),
|
|
31
|
+
REDIS_PORT: z.string().transform(Number),
|
|
32
|
+
REDIS_PASSWORD: z.string().optional(),
|
|
33
|
+
<%_ } -%>
|
|
34
|
+
<%_ if (communication === 'Kafka') { -%>
|
|
35
|
+
KAFKA_BROKER: z.string(),
|
|
36
|
+
<%_ } -%>
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
const _env = envSchema.safeParse(process.env);
|
|
40
|
+
|
|
41
|
+
if (!_env.success) {
|
|
42
|
+
logger.error('ā Invalid environment variables:', _env.error.format());
|
|
43
|
+
process.exit(1);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export const env = _env.data;
|
|
@@ -3,7 +3,6 @@ import cors from 'cors';
|
|
|
3
3
|
import helmet from 'helmet';
|
|
4
4
|
import hpp from 'hpp';
|
|
5
5
|
import rateLimit from 'express-rate-limit';
|
|
6
|
-
import dotenv from 'dotenv';
|
|
7
6
|
import logger from '@/infrastructure/log/logger';
|
|
8
7
|
import morgan from 'morgan';
|
|
9
8
|
import { errorMiddleware } from '@/utils/error.middleware';
|
|
@@ -20,12 +19,12 @@ import { unwrapResolverError } from '@apollo/server/errors';
|
|
|
20
19
|
import { ApiError } from '@/errors/ApiError';
|
|
21
20
|
import { typeDefs, resolvers } from '@/interfaces/graphql';
|
|
22
21
|
import { gqlContext, MyContext } from '@/interfaces/graphql/context';
|
|
23
|
-
<% } -%>
|
|
22
|
+
<%_ } -%>
|
|
24
23
|
|
|
25
|
-
|
|
24
|
+
import { env } from '@/config/env';
|
|
26
25
|
|
|
27
26
|
const app = express();
|
|
28
|
-
const port =
|
|
27
|
+
const port = env.PORT;
|
|
29
28
|
|
|
30
29
|
// Security Middleware
|
|
31
30
|
<%_ if (communication === 'GraphQL') { -%>
|
|
@@ -96,15 +95,13 @@ const startServer = async () => {
|
|
|
96
95
|
app.listen(port, () => {
|
|
97
96
|
logger.info(`Server running on port ${port}`);
|
|
98
97
|
<%_ if (communication === 'Kafka') { -%>
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
});
|
|
105
|
-
} catch (err) {
|
|
98
|
+
const kafkaService = new KafkaService();
|
|
99
|
+
kafkaService.connect().then(() => {
|
|
100
|
+
logger.info('Kafka connected');
|
|
101
|
+
kafkaService.sendMessage('test-topic', 'Hello Kafka from Clean Arch TS!');
|
|
102
|
+
}).catch(err => {
|
|
106
103
|
logger.error('Failed to connect to Kafka:', err);
|
|
107
|
-
}
|
|
104
|
+
});
|
|
108
105
|
<%_ } -%>
|
|
109
106
|
});
|
|
110
107
|
};
|
package/templates/clean-architecture/ts/src/interfaces/graphql/resolvers/user.resolvers.ts.ejs
CHANGED
|
@@ -14,5 +14,8 @@ export const userResolvers = {
|
|
|
14
14
|
const user = await userController.createUser({ name, email });
|
|
15
15
|
return user;
|
|
16
16
|
}
|
|
17
|
-
}
|
|
17
|
+
}<%_ if (database === 'MongoDB') { -%>,
|
|
18
|
+
User: {
|
|
19
|
+
id: (parent: { id?: string; _id?: unknown }) => parent.id || parent._id
|
|
20
|
+
}<%_ } %>
|
|
18
21
|
};
|
|
@@ -3,7 +3,8 @@ import logger from '@/infrastructure/log/logger';
|
|
|
3
3
|
import { ApiError } from '@/errors/ApiError';
|
|
4
4
|
import { HTTP_STATUS } from '@/utils/httpCodes';
|
|
5
5
|
|
|
6
|
-
|
|
6
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
7
|
+
export const errorMiddleware = (err: Error, req: Request, res: Response, next: unknown) => {
|
|
7
8
|
let error = err;
|
|
8
9
|
|
|
9
10
|
if (!(error instanceof ApiError)) {
|
|
@@ -183,6 +183,36 @@ docker run -p 3000:3000 <%= projectName %>
|
|
|
183
183
|
```
|
|
184
184
|
<% } -%>
|
|
185
185
|
|
|
186
|
+
## š PM2 Deployment (VPS/EC2)
|
|
187
|
+
This project is pre-configured for direct deployment to a VPS/EC2 instance using **PM2** (via `ecosystem.config.js`).
|
|
188
|
+
1. Install dependencies
|
|
189
|
+
```bash
|
|
190
|
+
npm install
|
|
191
|
+
```
|
|
192
|
+
2. **Start Infrastructure (DB, Redis, Kafka, etc.) in the background**
|
|
193
|
+
*(This specifically starts the background services without running the application inside Docker, allowing PM2 to handle it).*
|
|
194
|
+
```bash
|
|
195
|
+
docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> zookeeper kafka<% } %>
|
|
196
|
+
```
|
|
197
|
+
3. **Wait 5-10s** for the database to fully initialize.
|
|
198
|
+
4. **Deploy the App using PM2 in Cluster Mode**
|
|
199
|
+
```bash
|
|
200
|
+
<% if (language === 'TypeScript') { %>npm run build
|
|
201
|
+
<% } %>npm run deploy
|
|
202
|
+
```
|
|
203
|
+
5. **Check logs**
|
|
204
|
+
```bash
|
|
205
|
+
npx pm2 logs
|
|
206
|
+
```
|
|
207
|
+
6. Stop and remove the PM2 application
|
|
208
|
+
```bash
|
|
209
|
+
npx pm2 delete <%= projectName %>
|
|
210
|
+
```
|
|
211
|
+
7. Stop and remove the Docker infrastructure
|
|
212
|
+
```bash
|
|
213
|
+
docker-compose down
|
|
214
|
+
```
|
|
215
|
+
|
|
186
216
|
## š Security Features
|
|
187
217
|
- **Helmet**: Sets secure HTTP headers.
|
|
188
218
|
- **CORS**: Configured for cross-origin requests.
|
|
@@ -8,7 +8,9 @@ logger = require('../log/logger');
|
|
|
8
8
|
<% } %>
|
|
9
9
|
const connectDB = async () => {
|
|
10
10
|
const dbHost = process.env.DB_HOST || 'localhost';
|
|
11
|
-
const
|
|
11
|
+
const dbPort = process.env.DB_PORT || '27017';
|
|
12
|
+
const dbName = process.env.DB_NAME || '<%= dbName %>';
|
|
13
|
+
const mongoURI = process.env.MONGO_URI || `mongodb://${dbHost}:${dbPort}/${dbName}`;
|
|
12
14
|
|
|
13
15
|
let retries = 5;
|
|
14
16
|
while (retries) {
|
|
@@ -6,7 +6,9 @@ import logger from '@/infrastructure/log/logger';
|
|
|
6
6
|
<% } %>
|
|
7
7
|
const connectDB = async (): Promise<void> => {
|
|
8
8
|
const dbHost = process.env.DB_HOST || 'localhost';
|
|
9
|
-
const
|
|
9
|
+
const dbPort = process.env.DB_PORT || '27017';
|
|
10
|
+
const dbName = process.env.DB_NAME || '<%= dbName %>';
|
|
11
|
+
const mongoURI = process.env.MONGO_URI || `mongodb://${dbHost}:${dbPort}/${dbName}`;
|
|
10
12
|
|
|
11
13
|
let retries = 5;
|
|
12
14
|
while (retries) {
|
|
@@ -28,10 +28,15 @@ services:
|
|
|
28
28
|
- DB_USER=root
|
|
29
29
|
- DB_PASSWORD=root
|
|
30
30
|
- DB_NAME=<%= dbName %>
|
|
31
|
+
- DB_PORT=3306
|
|
31
32
|
<%_ } -%><%_ if (database === 'PostgreSQL') { -%>
|
|
32
33
|
- DB_USER=postgres
|
|
33
34
|
- DB_PASSWORD=root
|
|
34
35
|
- DB_NAME=<%= dbName %>
|
|
36
|
+
- DB_PORT=5432
|
|
37
|
+
<%_ } -%><%_ if (database === 'MongoDB') { -%>
|
|
38
|
+
- DB_NAME=<%= dbName %>
|
|
39
|
+
- DB_PORT=27017
|
|
35
40
|
<%_ } -%>
|
|
36
41
|
<%_ } -%>
|
|
37
42
|
<%_ } else { -%>
|
|
@@ -48,10 +53,15 @@ services:
|
|
|
48
53
|
- DB_USER=root
|
|
49
54
|
- DB_PASSWORD=root
|
|
50
55
|
- DB_NAME=<%= dbName %>
|
|
56
|
+
- DB_PORT=3306
|
|
51
57
|
<%_ } -%><%_ if (database === 'PostgreSQL') { -%>
|
|
52
58
|
- DB_USER=postgres
|
|
53
59
|
- DB_PASSWORD=root
|
|
54
60
|
- DB_NAME=<%= dbName %>
|
|
61
|
+
- DB_PORT=5432
|
|
62
|
+
<%_ } -%><%_ if (database === 'MongoDB') { -%>
|
|
63
|
+
- DB_NAME=<%= dbName %>
|
|
64
|
+
- DB_PORT=27017
|
|
55
65
|
<%_ } -%>
|
|
56
66
|
<%_ } -%>
|
|
57
67
|
<%_ } -%>
|
|
@@ -89,7 +99,7 @@ services:
|
|
|
89
99
|
- mongodb_data:/data/db
|
|
90
100
|
|
|
91
101
|
mongo-migrate:
|
|
92
|
-
image: node:
|
|
102
|
+
image: node:22-alpine
|
|
93
103
|
working_dir: /app
|
|
94
104
|
volumes:
|
|
95
105
|
- .:/app
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
module.exports = {
|
|
2
|
+
apps: [{
|
|
3
|
+
name: "<%= projectName %>",
|
|
4
|
+
script: "<% if (language === 'TypeScript') { %>./dist/index.js<% } else { %>./src/index.js<% } %>", // Entry point
|
|
5
|
+
instances: "max", // Run in Cluster Mode to utilize all CPUs (Note: On Windows, cluster mode may throw `spawn wmic ENOENT` errors due to missing WMIC in Windows 11. To fix, change instances to 1, or install wmic)
|
|
6
|
+
exec_mode: "cluster",
|
|
7
|
+
watch: false, // Disable watch in production
|
|
8
|
+
max_memory_restart: "1G",
|
|
9
|
+
env_production: {
|
|
10
|
+
NODE_ENV: "production",
|
|
11
|
+
PORT: 3000,
|
|
12
|
+
<%_ if (caching === 'Redis') { -%>
|
|
13
|
+
REDIS_HOST: "127.0.0.1",
|
|
14
|
+
REDIS_PORT: 6379,
|
|
15
|
+
REDIS_PASSWORD: "",
|
|
16
|
+
<%_ } -%>
|
|
17
|
+
<%_ if (communication === 'Kafka') { -%>
|
|
18
|
+
KAFKA_BROKER: "127.0.0.1:9092",
|
|
19
|
+
KAFKAJS_NO_PARTITIONER_WARNING: 1,
|
|
20
|
+
<%_ } -%>
|
|
21
|
+
<%_ if (database !== 'None') { -%>
|
|
22
|
+
DB_HOST: "127.0.0.1",
|
|
23
|
+
<%_ if (database === 'MySQL') { -%>
|
|
24
|
+
DB_USER: "root",
|
|
25
|
+
DB_PASSWORD: "root",
|
|
26
|
+
DB_NAME: "<%= dbName %>",
|
|
27
|
+
DB_PORT: 3306
|
|
28
|
+
<%_ } else if (database === 'PostgreSQL') { -%>
|
|
29
|
+
DB_USER: "postgres",
|
|
30
|
+
DB_PASSWORD: "root",
|
|
31
|
+
DB_NAME: "<%= dbName %>",
|
|
32
|
+
DB_PORT: 5432
|
|
33
|
+
<%_ } else if (database === 'MongoDB') { -%>
|
|
34
|
+
DB_NAME: "<%= dbName %>",
|
|
35
|
+
DB_PORT: 27017
|
|
36
|
+
<%_ } -%>
|
|
37
|
+
<%_ } -%>
|
|
38
|
+
}
|
|
39
|
+
}]
|
|
40
|
+
};
|
|
@@ -7,6 +7,7 @@
|
|
|
7
7
|
"start": "<% if (language === 'TypeScript') { %>node dist/index.js<% } else { %>node src/index.js<% } %>",
|
|
8
8
|
"dev": "<% if (language === 'TypeScript') { %>nodemon --exec ts-node -r tsconfig-paths/register src/index.ts<% } else { %>nodemon src/index.js<% } %>"<% if (language === 'TypeScript') { %>,
|
|
9
9
|
"build": "rimraf dist && tsc && tsc-alias<% if (viewEngine && viewEngine !== 'None') { %> && cpx \"src/views/**/*\" dist/views<% } %><% if (communication === 'REST APIs') { %> && cpx \"src/**/*.yml\" dist/<% } %>"<% } %>,
|
|
10
|
+
"deploy": "npx pm2 start ecosystem.config.js --env production",
|
|
10
11
|
"lint": "eslint .",
|
|
11
12
|
"lint:fix": "eslint . --fix",
|
|
12
13
|
"format": "prettier --write .",
|
|
@@ -20,6 +21,7 @@
|
|
|
20
21
|
"dependencies": {
|
|
21
22
|
"express": "^4.18.2",
|
|
22
23
|
"dotenv": "^16.3.1",
|
|
24
|
+
"zod": "^3.22.4",
|
|
23
25
|
<% if (database === 'MySQL') { %> "mysql2": "^3.6.5",
|
|
24
26
|
"sequelize": "^6.35.2",
|
|
25
27
|
<% } -%>
|
|
@@ -81,7 +83,7 @@
|
|
|
81
83
|
"lint-staged": "^15.4.3"<% if (language === 'TypeScript') { %>,
|
|
82
84
|
"typescript-eslint": "^8.24.1",<%_ if (communication === 'REST APIs') { %>
|
|
83
85
|
"@types/swagger-ui-express": "^4.1.6",
|
|
84
|
-
"@types/yamljs": "^0.2.34",<%_ }
|
|
86
|
+
"@types/yamljs": "^0.2.34",<%_ } %>
|
|
85
87
|
"jest": "^29.7.0",
|
|
86
88
|
"ts-jest": "^29.2.5",
|
|
87
89
|
"@types/jest": "^29.5.14",
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
const { z } = require('zod');
|
|
2
|
+
const logger = require('../utils/logger');
|
|
3
|
+
|
|
4
|
+
if (process.env.NODE_ENV !== 'production') {
|
|
5
|
+
require('dotenv').config();
|
|
6
|
+
}
|
|
7
|
+
const envSchema = z.object({
|
|
8
|
+
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
|
|
9
|
+
PORT: z.string().transform(Number).default('3000'),
|
|
10
|
+
<%_ if (database !== 'None') { -%>
|
|
11
|
+
DB_HOST: z.string(),
|
|
12
|
+
<%_ if (database === 'MySQL') { -%>
|
|
13
|
+
DB_USER: z.string(),
|
|
14
|
+
DB_PASSWORD: z.string(),
|
|
15
|
+
DB_NAME: z.string(),
|
|
16
|
+
DB_PORT: z.string().transform(Number),
|
|
17
|
+
<%_ } else if (database === 'PostgreSQL') { -%>
|
|
18
|
+
DB_USER: z.string(),
|
|
19
|
+
DB_PASSWORD: z.string(),
|
|
20
|
+
DB_NAME: z.string(),
|
|
21
|
+
DB_PORT: z.string().transform(Number),
|
|
22
|
+
<%_ } else if (database === 'MongoDB') { -%>
|
|
23
|
+
DB_NAME: z.string(),
|
|
24
|
+
DB_PORT: z.string().transform(Number),
|
|
25
|
+
<%_ } -%>
|
|
26
|
+
<%_ } -%>
|
|
27
|
+
<%_ if (caching === 'Redis') { -%>
|
|
28
|
+
REDIS_HOST: z.string(),
|
|
29
|
+
REDIS_PORT: z.string().transform(Number),
|
|
30
|
+
REDIS_PASSWORD: z.string().optional(),
|
|
31
|
+
<%_ } -%>
|
|
32
|
+
<%_ if (communication === 'Kafka') { -%>
|
|
33
|
+
KAFKA_BROKER: z.string(),
|
|
34
|
+
<%_ } -%>
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
const _env = envSchema.safeParse(process.env);
|
|
38
|
+
|
|
39
|
+
if (!_env.success) {
|
|
40
|
+
logger.error('ā Invalid environment variables:', _env.error.format());
|
|
41
|
+
process.exit(1);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const env = _env.data;
|
|
45
|
+
|
|
46
|
+
module.exports = { env };
|
|
@@ -10,7 +10,10 @@ const userResolvers = {
|
|
|
10
10
|
createUser: async (_, { name, email }) => {
|
|
11
11
|
return await userController.createUser({ name, email });
|
|
12
12
|
}
|
|
13
|
-
}
|
|
13
|
+
}<%_ if (database === 'MongoDB') { -%>,
|
|
14
|
+
User: {
|
|
15
|
+
id: (parent) => parent.id || parent._id
|
|
16
|
+
}<%_ } %>
|
|
14
17
|
};
|
|
15
18
|
|
|
16
19
|
module.exports = { userResolvers };
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
const express = require('express');
|
|
2
2
|
const cors = require('cors');
|
|
3
|
-
require('dotenv').config();
|
|
4
3
|
<%_ if (communication === 'REST APIs') { -%>const apiRoutes = require('./routes/api');<%_ } -%>
|
|
5
4
|
<%_ if (communication === 'Kafka') { -%>const { connectKafka, sendMessage } = require('./services/kafkaService');<%_ } -%>
|
|
6
5
|
<%_ if (communication === 'GraphQL') { -%>
|
|
@@ -16,9 +15,10 @@ const { gqlContext } = require('./graphql/context');
|
|
|
16
15
|
const swaggerUi = require('swagger-ui-express');
|
|
17
16
|
const swaggerSpecs = require('./config/swagger');
|
|
18
17
|
<%_ } -%>
|
|
18
|
+
const { env } = require('./config/env');
|
|
19
19
|
|
|
20
20
|
const app = express();
|
|
21
|
-
const PORT =
|
|
21
|
+
const PORT = env.PORT;
|
|
22
22
|
const logger = require('./utils/logger');
|
|
23
23
|
const morgan = require('morgan');
|
|
24
24
|
const { errorMiddleware } = require('./utils/error.middleware');
|
|
@@ -2,7 +2,8 @@ const logger = require('./logger');
|
|
|
2
2
|
const { ApiError } = require('../errors/ApiError');
|
|
3
3
|
const HTTP_STATUS = require('./httpCodes');
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
// eslint-disable-next-line no-unused-vars
|
|
6
|
+
const errorMiddleware = (err, req, res, next) => {
|
|
6
7
|
let error = err;
|
|
7
8
|
|
|
8
9
|
if (!(error instanceof ApiError)) {
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import dotenv from 'dotenv';
|
|
2
|
+
import { z } from 'zod';
|
|
3
|
+
import logger from '@/utils/logger';
|
|
4
|
+
|
|
5
|
+
if (process.env.NODE_ENV !== 'production') {
|
|
6
|
+
dotenv.config();
|
|
7
|
+
}
|
|
8
|
+
const envSchema = z.object({
|
|
9
|
+
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
|
|
10
|
+
PORT: z.string().transform(Number).default('3000'),
|
|
11
|
+
<%_ if (database !== 'None') { -%>
|
|
12
|
+
DB_HOST: z.string(),
|
|
13
|
+
<%_ if (database === 'MySQL') { -%>
|
|
14
|
+
DB_USER: z.string(),
|
|
15
|
+
DB_PASSWORD: z.string(),
|
|
16
|
+
DB_NAME: z.string(),
|
|
17
|
+
DB_PORT: z.string().transform(Number),
|
|
18
|
+
<%_ } else if (database === 'PostgreSQL') { -%>
|
|
19
|
+
DB_USER: z.string(),
|
|
20
|
+
DB_PASSWORD: z.string(),
|
|
21
|
+
DB_NAME: z.string(),
|
|
22
|
+
DB_PORT: z.string().transform(Number),
|
|
23
|
+
<%_ } else if (database === 'MongoDB') { -%>
|
|
24
|
+
DB_NAME: z.string(),
|
|
25
|
+
DB_PORT: z.string().transform(Number),
|
|
26
|
+
<%_ } -%>
|
|
27
|
+
<%_ } -%>
|
|
28
|
+
<%_ if (caching === 'Redis') { -%>
|
|
29
|
+
REDIS_HOST: z.string(),
|
|
30
|
+
REDIS_PORT: z.string().transform(Number),
|
|
31
|
+
REDIS_PASSWORD: z.string().optional(),
|
|
32
|
+
<%_ } -%>
|
|
33
|
+
<%_ if (communication === 'Kafka') { -%>
|
|
34
|
+
KAFKA_BROKER: z.string(),
|
|
35
|
+
<%_ } -%>
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
const _env = envSchema.safeParse(process.env);
|
|
39
|
+
|
|
40
|
+
if (!_env.success) {
|
|
41
|
+
logger.error('ā Invalid environment variables:', _env.error.format());
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export const env = _env.data;
|
|
@@ -14,5 +14,8 @@ export const userResolvers = {
|
|
|
14
14
|
const user = await userController.createUser({ name, email });
|
|
15
15
|
return user;
|
|
16
16
|
}
|
|
17
|
-
}
|
|
17
|
+
}<%_ if (database === 'MongoDB') { -%>,
|
|
18
|
+
User: {
|
|
19
|
+
id: (parent: { id?: string; _id?: unknown }) => parent.id || parent._id
|
|
20
|
+
}<%_ } %>
|
|
18
21
|
};
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
+
import { env } from '@/config/env';
|
|
1
2
|
import express, { Request, Response } from 'express';
|
|
2
3
|
import cors from 'cors';
|
|
3
4
|
import helmet from 'helmet';
|
|
4
5
|
import hpp from 'hpp';
|
|
5
6
|
import rateLimit from 'express-rate-limit';
|
|
6
|
-
import dotenv from 'dotenv';
|
|
7
7
|
import logger from '@/utils/logger';
|
|
8
8
|
import morgan from 'morgan';
|
|
9
9
|
import { errorMiddleware } from '@/utils/error.middleware';
|
|
@@ -23,10 +23,8 @@ import { typeDefs, resolvers } from '@/graphql';
|
|
|
23
23
|
import { gqlContext, MyContext } from '@/graphql/context';
|
|
24
24
|
<% } -%>
|
|
25
25
|
|
|
26
|
-
dotenv.config();
|
|
27
|
-
|
|
28
26
|
const app = express();
|
|
29
|
-
const port =
|
|
27
|
+
const port = env.PORT;
|
|
30
28
|
|
|
31
29
|
// Security Middleware
|
|
32
30
|
<%_ if (communication === 'GraphQL') { -%>
|
|
@@ -111,15 +109,13 @@ const startServer = async () => {
|
|
|
111
109
|
app.listen(port, () => {
|
|
112
110
|
logger.info(`Server running on port ${port}`);
|
|
113
111
|
<%_ if (communication === 'Kafka') { -%>
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
});
|
|
120
|
-
} catch (err) {
|
|
112
|
+
const kafkaService = new KafkaService();
|
|
113
|
+
kafkaService.connect().then(() => {
|
|
114
|
+
logger.info('Kafka connected');
|
|
115
|
+
kafkaService.sendMessage('test-topic', 'Hello Kafka from MVC TS!');
|
|
116
|
+
}).catch(err => {
|
|
121
117
|
logger.error('Failed to connect to Kafka:', err);
|
|
122
|
-
}
|
|
118
|
+
});
|
|
123
119
|
<%_ } -%>
|
|
124
120
|
});
|
|
125
121
|
};
|
|
@@ -3,7 +3,8 @@ import logger from '@/utils/logger';
|
|
|
3
3
|
import { ApiError } from '@/errors/ApiError';
|
|
4
4
|
import { HTTP_STATUS } from '@/utils/httpCodes';
|
|
5
5
|
|
|
6
|
-
|
|
6
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
7
|
+
export const errorMiddleware = (err: Error, req: Request, res: Response, next: unknown) => {
|
|
7
8
|
let error = err;
|
|
8
9
|
|
|
9
10
|
if (!(error instanceof ApiError)) {
|