nodejs-quickstart-structure 1.16.2 → 1.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/LICENSE +15 -0
  3. package/README.md +144 -135
  4. package/bin/index.js +92 -89
  5. package/lib/generator.js +3 -2
  6. package/lib/modules/app-setup.js +49 -12
  7. package/lib/modules/caching-setup.js +1 -1
  8. package/lib/modules/config-files.js +32 -3
  9. package/lib/modules/database-setup.js +2 -2
  10. package/lib/modules/kafka-setup.js +8 -8
  11. package/lib/prompts.js +16 -1
  12. package/package.json +14 -2
  13. package/templates/clean-architecture/ts/src/index.ts.ejs +1 -2
  14. package/templates/common/.cursorrules.ejs +1 -1
  15. package/templates/common/.env.example.ejs +1 -1
  16. package/templates/common/.gitlab-ci.yml.ejs +55 -4
  17. package/templates/common/Dockerfile +12 -2
  18. package/templates/common/Jenkinsfile.ejs +32 -21
  19. package/templates/common/README.md.ejs +19 -9
  20. package/templates/common/SECURITY.md +20 -0
  21. package/templates/common/_github/workflows/{ci.yml → ci.yml.ejs} +13 -7
  22. package/templates/common/_github/workflows/security.yml.ejs +36 -0
  23. package/templates/common/_husky/pre-commit +4 -0
  24. package/templates/common/caching/js/redisClient.spec.js.ejs +0 -2
  25. package/templates/common/docker-compose.yml.ejs +13 -20
  26. package/templates/common/ecosystem.config.js.ejs +1 -1
  27. package/templates/common/jest.config.js.ejs +1 -0
  28. package/templates/common/jest.e2e.config.js.ejs +8 -0
  29. package/templates/common/kafka/js/config/kafka.js +2 -1
  30. package/templates/common/kafka/js/config/kafka.spec.js.ejs +6 -0
  31. package/templates/common/kafka/ts/config/kafka.spec.ts.ejs +6 -0
  32. package/templates/common/kafka/ts/config/kafka.ts +2 -1
  33. package/templates/common/package.json.ejs +14 -9
  34. package/templates/common/prompts/add-feature.md.ejs +1 -1
  35. package/templates/common/prompts/project-context.md.ejs +1 -1
  36. package/templates/common/scripts/run-e2e.js.ejs +63 -0
  37. package/templates/common/sonar-project.properties.ejs +27 -0
  38. package/templates/common/src/tests/e2e/e2e.users.test.js.ejs +49 -0
  39. package/templates/common/src/tests/e2e/e2e.users.test.ts.ejs +49 -0
  40. package/templates/mvc/js/src/index.js.ejs +1 -1
@@ -73,7 +73,7 @@ export const renderErrorMiddleware = async (templatePath, targetDir, config) =>
73
73
  const specExt = language === 'TypeScript' ? 'ts' : 'js';
74
74
  const specTemplatePath = path.join(templatePath, '../../common/src/utils', `errorMiddleware.spec.${specExt}.ejs`);
75
75
  if (await fs.pathExists(specTemplatePath)) {
76
- const testUtilsDir = path.join(targetDir, 'tests', 'utils');
76
+ const testUtilsDir = path.join(targetDir, 'tests', 'unit', 'utils');
77
77
  await fs.ensureDir(testUtilsDir);
78
78
  const specContent = ejs.render(await fs.readFile(specTemplatePath, 'utf-8'), config);
79
79
  await fs.writeFile(path.join(testUtilsDir, `errorMiddleware.spec.${specExt}`), specContent);
@@ -89,7 +89,7 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
89
89
  const userControllerSpecName = language === 'TypeScript' ? 'userController.spec.ts' : 'userController.spec.js';
90
90
 
91
91
  const userControllerPath = path.join(targetDir, 'src/controllers', userControllerName);
92
- const userControllerSpecPath = path.join(targetDir, 'tests/controllers', userControllerSpecName);
92
+ const userControllerSpecPath = path.join(targetDir, 'tests/unit/controllers', userControllerSpecName);
93
93
 
94
94
  const userControllerTemplate = path.join(templatePath, 'src/controllers', `${userControllerName}.ejs`);
95
95
  const userControllerSpecTemplate = path.join(templatePath, 'src/controllers', `${userControllerSpecName}.ejs`);
@@ -101,7 +101,7 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
101
101
  }
102
102
 
103
103
  if (await fs.pathExists(userControllerSpecTemplate)) {
104
- await fs.ensureDir(path.join(targetDir, 'tests/controllers'));
104
+ await fs.ensureDir(path.join(targetDir, 'tests/unit/controllers'));
105
105
  const content = ejs.render(await fs.readFile(userControllerSpecTemplate, 'utf-8'), { ...config });
106
106
  await fs.writeFile(userControllerSpecPath, content);
107
107
  await fs.remove(path.join(targetDir, 'src/controllers', `${userControllerSpecName}.ejs`));
@@ -113,7 +113,7 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
113
113
  const repoSpecName = language === 'TypeScript' ? 'UserRepository.spec.ts' : 'UserRepository.spec.js';
114
114
 
115
115
  const repoPath = path.join(targetDir, 'src/infrastructure/repositories', repoName);
116
- const repoSpecPath = path.join(targetDir, 'tests/infrastructure/repositories', repoSpecName);
116
+ const repoSpecPath = path.join(targetDir, 'tests/unit/infrastructure/repositories', repoSpecName);
117
117
 
118
118
  const repoTemplate = path.join(templatePath, 'src/infrastructure/repositories', `${repoName}.ejs`);
119
119
  const repoSpecTemplate = path.join(templatePath, 'src/infrastructure/repositories', `${repoSpecName}.ejs`);
@@ -124,7 +124,7 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
124
124
  await fs.remove(path.join(targetDir, 'src/infrastructure/repositories', `${repoName}.ejs`));
125
125
  }
126
126
  if (await fs.pathExists(repoSpecTemplate)) {
127
- await fs.ensureDir(path.join(targetDir, 'tests/infrastructure/repositories'));
127
+ await fs.ensureDir(path.join(targetDir, 'tests/unit/infrastructure/repositories'));
128
128
  const content = ejs.render(await fs.readFile(repoSpecTemplate, 'utf-8'), { ...config });
129
129
  await fs.writeFile(repoSpecPath, content);
130
130
  await fs.remove(path.join(targetDir, 'src/infrastructure/repositories', `${repoSpecName}.ejs`));
@@ -134,7 +134,7 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
134
134
  const controllerSpecName = language === 'TypeScript' ? 'userController.spec.ts' : 'userController.spec.js';
135
135
 
136
136
  const controllerPath = path.join(targetDir, 'src/interfaces/controllers', controllerName);
137
- const controllerSpecPath = path.join(targetDir, 'tests/interfaces/controllers', controllerSpecName);
137
+ const controllerSpecPath = path.join(targetDir, 'tests/unit/interfaces/controllers', controllerSpecName);
138
138
 
139
139
  const controllerTemplate = path.join(templatePath, 'src/interfaces/controllers', `${controllerName}.ejs`);
140
140
  const controllerSpecTemplate = path.join(templatePath, 'src/interfaces/controllers', `${controllerSpecName}.ejs`);
@@ -146,7 +146,7 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
146
146
  }
147
147
 
148
148
  if (await fs.pathExists(controllerSpecTemplate)) {
149
- await fs.ensureDir(path.join(targetDir, 'tests/interfaces/controllers'));
149
+ await fs.ensureDir(path.join(targetDir, 'tests/unit/interfaces/controllers'));
150
150
  const content = ejs.render(await fs.readFile(controllerSpecTemplate, 'utf-8'), { ...config });
151
151
  await fs.writeFile(controllerSpecPath, content);
152
152
  await fs.remove(path.join(targetDir, 'src/interfaces/controllers', `${controllerSpecName}.ejs`));
@@ -191,9 +191,9 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
191
191
  // Render health route spec template
192
192
  const healthSpecTemplatePath = path.join(templatePath, '../../common/health', healthExt, `healthRoute.spec.${healthExt}.ejs`);
193
193
  if (await fs.pathExists(healthSpecTemplatePath)) {
194
- let testRouteDestDir = path.join(targetDir, 'tests', 'routes');
194
+ let testRouteDestDir = path.join(targetDir, 'tests', 'unit', 'routes');
195
195
  if (architecture === 'Clean Architecture') {
196
- testRouteDestDir = path.join(targetDir, 'tests', 'interfaces', 'routes');
196
+ testRouteDestDir = path.join(targetDir, 'tests', 'unit', 'interfaces', 'routes');
197
197
  }
198
198
  await fs.ensureDir(testRouteDestDir);
199
199
  const specContent = ejs.render(await fs.readFile(healthSpecTemplatePath, 'utf-8'), config);
@@ -215,13 +215,40 @@ export const renderDynamicComponents = async (templatePath, targetDir, config) =
215
215
  // Render graceful shutdown spec template
216
216
  const shutdownSpecTemplatePath = path.join(templatePath, '../../common/shutdown', shutdownExt, `gracefulShutdown.spec.${shutdownExt}.ejs`);
217
217
  if (await fs.pathExists(shutdownSpecTemplatePath)) {
218
- const testUtilsDestDir = path.join(targetDir, 'tests', 'utils');
218
+ const testUtilsDestDir = path.join(targetDir, 'tests', 'unit', 'utils');
219
219
  await fs.ensureDir(testUtilsDestDir);
220
220
  const specContent = ejs.render(await fs.readFile(shutdownSpecTemplatePath, 'utf-8'), config);
221
221
  await fs.writeFile(path.join(testUtilsDestDir, `gracefulShutdown.spec.${shutdownExt}`), specContent);
222
222
  }
223
223
  }
224
224
 
225
+ // Advanced E2E Testing Generation
226
+ const e2eExt = language === 'TypeScript' ? 'ts' : 'js';
227
+ const e2eTemplatePath = path.join(templatePath, '../../common/src/tests/e2e', `e2e.users.test.${e2eExt}.ejs`);
228
+
229
+ if (await fs.pathExists(e2eTemplatePath)) {
230
+ let e2eDestDir = path.join(targetDir, 'tests', 'e2e');
231
+ await fs.ensureDir(e2eDestDir);
232
+
233
+ const e2eContent = ejs.render(await fs.readFile(e2eTemplatePath, 'utf-8'), { ...config });
234
+ await fs.writeFile(path.join(e2eDestDir, `e2e.users.test.${e2eExt}`), e2eContent);
235
+ }
236
+
237
+ // E2E Test Orchestrator Generation
238
+ const e2eOrchestratorTemplatePath = path.join(templatePath, '../../common/scripts', 'run-e2e.js.ejs');
239
+ if (await fs.pathExists(e2eOrchestratorTemplatePath)) {
240
+ let scriptsDestDir = path.join(targetDir, 'scripts');
241
+ await fs.ensureDir(scriptsDestDir);
242
+
243
+ const orchestratorContent = ejs.render(await fs.readFile(e2eOrchestratorTemplatePath, 'utf-8'), { ...config });
244
+ await fs.writeFile(path.join(scriptsDestDir, 'run-e2e.js'), orchestratorContent);
245
+
246
+ // Cleanup the raw ejs copy in target
247
+ if (await fs.pathExists(path.join(scriptsDestDir, 'run-e2e.js.ejs'))) {
248
+ await fs.remove(path.join(scriptsDestDir, 'run-e2e.js.ejs'));
249
+ }
250
+ }
251
+
225
252
  // GraphQL Setup
226
253
  if (config.communication === 'GraphQL') {
227
254
  const ext = language === 'TypeScript' ? 'ts' : 'js';
@@ -342,13 +369,23 @@ export const processAllTests = async (targetDir, config) => {
342
369
  await processDir(itemPath);
343
370
  } else if (itemPath.endsWith('.spec.ts') ||
344
371
  itemPath.endsWith('.spec.js') ||
372
+ itemPath.endsWith('.test.ts') ||
373
+ itemPath.endsWith('.test.js') ||
345
374
  itemPath.endsWith('.spec.ts.ejs') ||
346
- itemPath.endsWith('.spec.js.ejs')) {
375
+ itemPath.endsWith('.spec.js.ejs') ||
376
+ itemPath.endsWith('.test.ts.ejs') ||
377
+ itemPath.endsWith('.test.js.ejs')) {
347
378
  const relativePath = path.relative(srcDir, itemPath);
348
379
 
349
380
  const cleanRelativePath = relativePath.replace(/\.ejs$/, '');
350
381
 
351
- const targetTestPath = path.join(testsDir, cleanRelativePath);
382
+ // Exclude e2e if it accidentally falls here, as it's processed separately
383
+ if (cleanRelativePath.includes('e2e')) {
384
+ await fs.remove(itemPath);
385
+ continue;
386
+ }
387
+
388
+ const targetTestPath = path.join(testsDir, 'unit', cleanRelativePath);
352
389
 
353
390
  await fs.ensureDir(path.dirname(targetTestPath));
354
391
 
@@ -64,7 +64,7 @@ export const setupCaching = async (templatesDir, targetDir, config) => {
64
64
  const specLoggerPath = architecture === 'Clean Architecture' ? '@/infrastructure/log/logger' : '@/utils/logger';
65
65
  const specRedisPath = architecture === 'Clean Architecture' ? '@/infrastructure/caching/redisClient' : '@/config/redisClient';
66
66
  const specContent = ejs.render(specTemplate, { ...config, loggerPath: specLoggerPath, redisClientPath: specRedisPath });
67
- const specTarget = cacheTarget.replace(`${path.sep}src${path.sep}`, `${path.sep}tests${path.sep}`).replace(`.${langExt}`, `.spec.${langExt}`);
67
+ const specTarget = cacheTarget.replace(`${path.sep}src${path.sep}`, `${path.sep}tests${path.sep}unit${path.sep}`).replace(`.${langExt}`, `.spec.${langExt}`);
68
68
  await fs.ensureDir(path.dirname(specTarget));
69
69
  await fs.writeFile(specTarget, specContent);
70
70
  }
@@ -47,6 +47,25 @@ export const renderProfessionalConfig = async (templatesDir, targetDir, config)
47
47
  const jestTemplate = await fs.readFile(path.join(templatesDir, 'common', 'jest.config.js.ejs'), 'utf-8');
48
48
  const jestContent = ejs.render(jestTemplate, { ...config });
49
49
  await fs.writeFile(path.join(targetDir, 'jest.config.js'), jestContent);
50
+
51
+ // E2E Config
52
+ const jestE2eTemplate = await fs.readFile(path.join(templatesDir, 'common', 'jest.e2e.config.js.ejs'), 'utf-8');
53
+ const jestE2eContent = ejs.render(jestE2eTemplate, { ...config });
54
+ await fs.writeFile(path.join(targetDir, 'jest.e2e.config.js'), jestE2eContent);
55
+
56
+ // 1. Setup Husky pre-commit (Always for Professional Standard)
57
+ const huskyDir = path.join(targetDir, '.husky');
58
+ await fs.ensureDir(huskyDir);
59
+ await fs.copy(path.join(templatesDir, 'common', '_husky', 'pre-commit'), path.join(huskyDir, 'pre-commit'));
60
+
61
+ // 2. Enterprise Security Hardening (Optional)
62
+ if (config.includeSecurity) {
63
+ await fs.copy(path.join(templatesDir, 'common', 'SECURITY.md'), path.join(targetDir, 'SECURITY.md'));
64
+
65
+ const sonarTemplate = await fs.readFile(path.join(templatesDir, 'common', 'sonar-project.properties.ejs'), 'utf-8');
66
+ const sonarContent = ejs.render(sonarTemplate, { ...config });
67
+ await fs.writeFile(path.join(targetDir, 'sonar-project.properties'), sonarContent);
68
+ }
50
69
  };
51
70
 
52
71
  export const renderAiNativeFiles = async (templatesDir, targetDir, config) => {
@@ -75,10 +94,20 @@ export const renderAiNativeFiles = async (templatesDir, targetDir, config) => {
75
94
  };
76
95
 
77
96
  export const setupCiCd = async (templatesDir, targetDir, config) => {
78
- const { ciProvider } = config;
97
+ const { ciProvider, includeSecurity } = config;
79
98
  if (ciProvider === 'GitHub Actions') {
80
- await fs.ensureDir(path.join(targetDir, '.github/workflows'));
81
- await fs.copy(path.join(templatesDir, 'common', '_github/workflows/ci.yml'), path.join(targetDir, '.github/workflows/ci.yml'));
99
+ const workflowsDir = path.join(targetDir, '.github/workflows');
100
+ await fs.ensureDir(workflowsDir);
101
+
102
+ const ciTemplate = await fs.readFile(path.join(templatesDir, 'common', '_github/workflows/ci.yml.ejs'), 'utf-8');
103
+ const ciContent = ejs.render(ciTemplate, { ...config });
104
+ await fs.writeFile(path.join(workflowsDir, 'ci.yml'), ciContent);
105
+
106
+ if (includeSecurity) {
107
+ const securityTemplate = await fs.readFile(path.join(templatesDir, 'common', '_github/workflows/security.yml.ejs'), 'utf-8');
108
+ const securityContent = ejs.render(securityTemplate, { ...config });
109
+ await fs.writeFile(path.join(workflowsDir, 'security.yml'), securityContent);
110
+ }
82
111
  } else if (ciProvider === 'Jenkins') {
83
112
  const jenkinsTemplate = await fs.readFile(path.join(templatesDir, 'common', 'Jenkinsfile.ejs'), 'utf-8');
84
113
  const jenkinsContent = ejs.render(jenkinsTemplate, { ...config });
@@ -67,7 +67,7 @@ export const setupDatabase = async (templatesDir, targetDir, config) => {
67
67
  if (await fs.pathExists(specTemplateSource)) {
68
68
  const specTemplate = await fs.readFile(specTemplateSource, 'utf-8');
69
69
  const specContent = ejs.render(specTemplate, { ...config });
70
- const specTarget = dbConfigTarget.replace(`${path.sep}src${path.sep}`, `${path.sep}tests${path.sep}`).replace(`.${langExt}`, `.spec.${langExt}`);
70
+ const specTarget = dbConfigTarget.replace(`${path.sep}src${path.sep}`, `${path.sep}tests${path.sep}unit${path.sep}`).replace(`.${langExt}`, `.spec.${langExt}`);
71
71
  await fs.ensureDir(path.dirname(specTarget));
72
72
  await fs.writeFile(specTarget, specContent);
73
73
  }
@@ -108,7 +108,7 @@ export const generateModels = async (templatesDir, targetDir, config) => {
108
108
  if (await fs.pathExists(modelSpecTemplateSource)) {
109
109
  const modelSpecTemplate = await fs.readFile(modelSpecTemplateSource, 'utf-8');
110
110
  const modelSpecContent = ejs.render(modelSpecTemplate, { ...config });
111
- const modelSpecTarget = modelTarget.replace(`${path.sep}src${path.sep}`, `${path.sep}tests${path.sep}`).replace(`.${langExt}`, `.spec.${langExt}`);
111
+ const modelSpecTarget = modelTarget.replace(`${path.sep}src${path.sep}`, `${path.sep}tests${path.sep}unit${path.sep}`).replace(`.${langExt}`, `.spec.${langExt}`);
112
112
  await fs.ensureDir(path.dirname(modelSpecTarget));
113
113
  await fs.writeFile(modelSpecTarget, modelSpecContent);
114
114
  }
@@ -63,9 +63,9 @@ export const setupKafka = async (templatesDir, targetDir, config) => {
63
63
  const specContent = ejs.render(await fs.readFile(kafkaConfigSpecTemplate, 'utf-8'), { ...config });
64
64
  let specTarget;
65
65
  if (architecture === 'MVC') {
66
- specTarget = path.join(targetDir, 'tests', 'config', kafkaConfigSpecFileName);
66
+ specTarget = path.join(targetDir, 'tests', 'unit', 'config', kafkaConfigSpecFileName);
67
67
  } else {
68
- specTarget = path.join(targetDir, 'tests', 'infrastructure', 'config', kafkaConfigSpecFileName);
68
+ specTarget = path.join(targetDir, 'tests', 'unit', 'infrastructure', 'config', kafkaConfigSpecFileName);
69
69
  }
70
70
  await fs.ensureDir(path.dirname(specTarget));
71
71
  await fs.writeFile(specTarget, specContent);
@@ -79,7 +79,7 @@ export const setupKafka = async (templatesDir, targetDir, config) => {
79
79
  if (architecture === 'Clean Architecture') {
80
80
  // Clean Architecture Restructuring
81
81
  await fs.ensureDir(path.join(targetDir, 'src/infrastructure/messaging'));
82
- await fs.ensureDir(path.join(targetDir, 'tests/infrastructure/messaging'));
82
+ await fs.ensureDir(path.join(targetDir, 'tests/unit/infrastructure/messaging'));
83
83
  await fs.ensureDir(path.join(targetDir, 'src/infrastructure/config'));
84
84
 
85
85
  const serviceExt = language === 'TypeScript' ? 'ts' : 'js';
@@ -93,7 +93,7 @@ export const setupKafka = async (templatesDir, targetDir, config) => {
93
93
  if (await fs.pathExists(path.join(targetDir, `src/services/kafkaService.spec.${serviceExt}`))) {
94
94
  await fs.move(
95
95
  path.join(targetDir, `src/services/kafkaService.spec.${serviceExt}`),
96
- path.join(targetDir, `tests/infrastructure/messaging/kafkaClient.spec.${serviceExt}`),
96
+ path.join(targetDir, `tests/unit/infrastructure/messaging/kafkaClient.spec.${serviceExt}`),
97
97
  { overwrite: true }
98
98
  );
99
99
  }
@@ -131,7 +131,7 @@ export const setupKafka = async (templatesDir, targetDir, config) => {
131
131
  const specTemplateSource = path.join(templatesDir, 'common', 'kafka', langExt, 'messaging', `${t.src}.spec.${langExt}.ejs`);
132
132
  if (await fs.pathExists(specTemplateSource)) {
133
133
  const specContent = ejs.render(await fs.readFile(specTemplateSource, 'utf-8'), { ...config, loggerPath });
134
- const specDest = path.join(targetDir, 'tests', `${t.dest}.spec.${langExt}`);
134
+ const specDest = path.join(targetDir, 'tests', 'unit', `${t.dest}.spec.${langExt}`);
135
135
  await fs.ensureDir(path.dirname(specDest));
136
136
  await fs.writeFile(specDest, specContent);
137
137
  }
@@ -162,17 +162,17 @@ export const setupKafka = async (templatesDir, targetDir, config) => {
162
162
  const specTemplateSource = path.join(templatesDir, 'common', 'kafka', langExt, 'messaging', `${t.src}.spec.${langExt}.ejs`);
163
163
  if (await fs.pathExists(specTemplateSource)) {
164
164
  const specContent = ejs.render(await fs.readFile(specTemplateSource, 'utf-8'), { ...config, loggerPath });
165
- const specDest = path.join(targetDir, 'tests', `${t.dest}.spec.${langExt}`);
165
+ const specDest = path.join(targetDir, 'tests', 'unit', `${t.dest}.spec.${langExt}`);
166
166
  await fs.ensureDir(path.dirname(specDest));
167
167
  await fs.writeFile(specDest, specContent);
168
168
  }
169
169
  }
170
170
 
171
171
  if (await fs.pathExists(path.join(targetDir, `src/services/kafkaService.spec.${serviceExt}`))) {
172
- await fs.ensureDir(path.join(targetDir, 'tests/services'));
172
+ await fs.ensureDir(path.join(targetDir, 'tests/unit/services'));
173
173
  await fs.move(
174
174
  path.join(targetDir, `src/services/kafkaService.spec.${serviceExt}`),
175
- path.join(targetDir, `tests/services/kafkaService.spec.${serviceExt}`),
175
+ path.join(targetDir, `tests/unit/services/kafkaService.spec.${serviceExt}`),
176
176
  { overwrite: true }
177
177
  );
178
178
  }
package/lib/prompts.js CHANGED
@@ -77,9 +77,24 @@ export const getProjectDetails = async (options = {}) => {
77
77
  choices: ['None', 'GitHub Actions', 'Jenkins', 'GitLab CI'],
78
78
  default: 'None',
79
79
  when: !options.ciProvider
80
+ },
81
+ {
82
+ type: 'select',
83
+ name: 'includeSecurity',
84
+ message: 'Include Enterprise Security Hardening (Big Tech Standard: Snyk, SonarQube)?',
85
+ choices: ['No', 'Yes'],
86
+ default: "No",
87
+ when: (answers) => !options.includeSecurity && (options.ciProvider || answers.ciProvider) !== 'None'
80
88
  }
81
89
  ];
82
90
 
83
91
  const answers = await inquirer.prompt(questions);
84
- return { ...options, ...answers };
92
+ const result = { ...options, ...answers };
93
+
94
+ // Normalize includeSecurity to boolean if it's a string from the select prompt
95
+ if (typeof result.includeSecurity === 'string') {
96
+ result.includeSecurity = result.includeSecurity === 'Yes';
97
+ }
98
+
99
+ return result;
85
100
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nodejs-quickstart-structure",
3
- "version": "1.16.2",
3
+ "version": "1.18.0",
4
4
  "type": "module",
5
5
  "description": "The ultimate nodejs quickstart structure CLI to scaffold Node.js microservices with MVC or Clean Architecture",
6
6
  "main": "bin/index.js",
@@ -12,7 +12,12 @@
12
12
  "test:e2e": "npm run test:e2e:windows",
13
13
  "test:e2e:windows": "node scripts/validate-windows.js",
14
14
  "test:e2e:linux": "node scripts/validate-linux.js",
15
- "test:verify:mongo": "node scripts/verify-migration.js"
15
+ "test:verify:mongo": "node scripts/verify-migration.js",
16
+ "docs:dev": "vitepress dev docs",
17
+ "docs:build": "vitepress build docs",
18
+ "docs:preview": "vitepress preview docs",
19
+ "security:check": "npm audit && npm run snyk:test",
20
+ "snyk:test": "snyk test"
16
21
  },
17
22
  "keywords": [
18
23
  "nodejs",
@@ -45,6 +50,13 @@
45
50
  "fs-extra": "^11.3.0",
46
51
  "inquirer": "^13.3.2"
47
52
  },
53
+ "overrides": {
54
+ "esbuild": "^0.25.0"
55
+ },
56
+ "devDependencies": {
57
+ "snyk": "^1.1303.2",
58
+ "vitepress": "^1.0.0-rc.45"
59
+ },
48
60
  "files": [
49
61
  "bin",
50
62
  "lib",
@@ -1,3 +1,4 @@
1
+ import { env } from '@/config/env';
1
2
  import express from 'express';
2
3
  import cors from 'cors';
3
4
  import helmet from 'helmet';
@@ -23,8 +24,6 @@ import { typeDefs, resolvers } from '@/interfaces/graphql';
23
24
  import { gqlContext, MyContext } from '@/interfaces/graphql/context';
24
25
  <%_ } -%>
25
26
 
26
- import { env } from '@/config/env';
27
-
28
27
  const app = express();
29
28
  const port = env.PORT;
30
29
 
@@ -20,7 +20,7 @@ When indexing or searching the workspace, ignore the following paths to prevent
20
20
 
21
21
  ### 1. Testing First
22
22
  - Every new service or controller method MUST have a test file in `tests/`.
23
- - **Coverage Gate**: Aim for > 70% coverage (Statement/Line/Function/Branch).
23
+ - **Coverage Gate**: Aim for > 80% coverage (Statement/Line/Function/Branch).
24
24
  - **Format**: Use Jest with the AAA (Arrange, Act, Assert) pattern.
25
25
  - **Isolation**: Mock external dependencies (DB, Redis, etc.) using `jest.mock()`.
26
26
 
@@ -28,7 +28,7 @@ DB_NAME=<%= dbName %>
28
28
 
29
29
  <%_ if (communication === 'Kafka') { -%>
30
30
  # Communication
31
- KAFKA_BROKER=localhost:9092
31
+ KAFKA_BROKER=localhost:9093
32
32
  KAFKA_CLIENT_ID=<%= projectName %>
33
33
  KAFKA_GROUP_ID=<%= projectName %>-group
34
34
  <%_ } -%>
@@ -4,6 +4,10 @@ variables:
4
4
  stages:
5
5
  - lint
6
6
  - test
7
+ <% if (includeSecurity) { %>
8
+ - security
9
+ - quality
10
+ <% } %>
7
11
  - build
8
12
 
9
13
  cache:
@@ -12,22 +16,69 @@ cache:
12
16
 
13
17
  install_dependencies:
14
18
  stage: .pre
15
- image: node:22-alpine
19
+ image: node:22-slim
16
20
  script:
17
21
  - npm ci
18
22
 
19
23
  lint_code:
20
24
  stage: lint
21
- image: node:22-alpine
25
+ image: node:22-slim
22
26
  script:
23
27
  - npm run lint
24
28
 
25
- run_tests:
29
+ run_unit_tests:
26
30
  stage: test
27
- image: node:22-alpine
31
+ image: node:22-slim
28
32
  script:
29
33
  - npm run test:coverage
30
34
 
35
+ run_e2e_tests:
36
+ stage: test
37
+ image: docker:20.10.16
38
+ services:
39
+ - docker:20.10.16-dind
40
+ script:
41
+ - apk add --no-cache nodejs npm docker-compose
42
+ - npm ci
43
+ - npm run test:e2e
44
+ <% if (includeSecurity) { %>
45
+ snyk_scan:
46
+ stage: security
47
+ image: node:22-alpine
48
+ script:
49
+ - npm ci
50
+ - npm run snyk:test
51
+ only:
52
+ - main
53
+
54
+ snyk_container_scan:
55
+ stage: security
56
+ image: docker:20.10.16
57
+ services:
58
+ - docker:20.10.16-dind
59
+ script:
60
+ - apk add --no-cache nodejs npm
61
+ - npm install -g snyk
62
+ - docker build -t <%= projectName %>:latest .
63
+ - snyk container test <%= projectName %>:latest --file=Dockerfile --severity-threshold=high --skip-unused-projects
64
+
65
+ sonarqube_check:
66
+ stage: quality
67
+ image:
68
+ name: sonarsource/sonar-scanner-cli:latest
69
+ entrypoint: [""]
70
+ variables:
71
+ SONAR_USER_HOME: "${CI_PROJECT_DIR}/.sonar"
72
+ GIT_DEPTH: "0"
73
+ cache:
74
+ key: "${CI_JOB_NAME}"
75
+ paths:
76
+ - .sonar/cache
77
+ script:
78
+ - sonar-scanner
79
+ only:
80
+ - main
81
+ <% } %>
31
82
  build_app:
32
83
  stage: build
33
84
  image: node:22-alpine
@@ -1,7 +1,12 @@
1
1
  # ==========================================
2
2
  # Stage 1: Builder
3
3
  # ==========================================
4
- FROM node:22-alpine AS builder
4
+ FROM node:22.22.2-trixie-slim AS builder
5
+
6
+ # Upgrade OS packages to fix upstream vulnerabilities (Snyk-detected)
7
+ RUN apt-get update && apt-get upgrade -y && \
8
+ apt-get install -y --no-install-recommends ca-certificates && \
9
+ rm -rf /var/lib/apt/lists/*
5
10
 
6
11
  WORKDIR /app
7
12
  ENV NPM_CONFIG_UPDATE_NOTIFIER=false
@@ -20,7 +25,12 @@ COPY . .
20
25
  # ==========================================
21
26
  # Stage 2: Production
22
27
  # ==========================================
23
- FROM node:22-alpine AS production
28
+ FROM node:22.22.2-trixie-slim AS production
29
+
30
+ # Upgrade OS packages to fix upstream vulnerabilities (Snyk-detected)
31
+ RUN apt-get update && apt-get upgrade -y && \
32
+ apt-get install -y --no-install-recommends ca-certificates && \
33
+ rm -rf /var/lib/apt/lists/*
24
34
 
25
35
  WORKDIR /app
26
36
 
@@ -19,35 +19,46 @@ pipeline {
19
19
  }
20
20
  }
21
21
 
22
- stage('Test') {
22
+ stage('Unit Test') {
23
23
  steps {
24
24
  sh 'npm run test:coverage'
25
25
  }
26
26
  }
27
27
 
28
- // stage('Build') {
29
- // steps {
30
- // sh 'npm run build'
31
- // }
32
- // }
28
+ stage('E2E Test') {
29
+ steps {
30
+ sh 'npm run test:e2e'
31
+ }
32
+ }
33
33
 
34
- // stage('SonarQube Analysis') {
35
- // environment {
36
- // scannerHome = tool 'SonarScanner'
37
- // }
38
- // steps {
39
- // withSonarQubeEnv('SonarQube') {
40
- // sh "${scannerHome}/bin/sonar-scanner"
41
- // }
42
- // }
43
- // }
34
+ <% if (includeSecurity) { %>
35
+ stage('SonarQube Analysis') {
36
+ environment {
37
+ scannerHome = tool 'SonarScanner'
38
+ }
39
+ steps {
40
+ withSonarQubeEnv('SonarQube') {
41
+ sh "${scannerHome}/bin/sonar-scanner"
42
+ }
43
+ }
44
+ }
44
45
 
45
- // stage('Security Scan') {
46
- // steps {
47
- // sh 'npm audit --audit-level=high'
48
- // }
49
- // }
46
+ stage('Security Scan') {
47
+ steps {
48
+ sh 'npm audit --audit-level=high'
49
+ sh 'npm run snyk:test'
50
+ }
51
+ }
50
52
 
53
+ stage('Snyk Container Scan') {
54
+ steps {
55
+ script {
56
+ sh 'docker build -t <%= projectName %>:latest .'
57
+ sh 'snyk container test <%= projectName %>:latest --file=Dockerfile --severity-threshold=high --skip-unused-projects'
58
+ }
59
+ }
60
+ }
61
+ <% } %>
51
62
  // stage('Docker Build & Push') {
52
63
  // steps {
53
64
  // script {
@@ -3,6 +3,10 @@
3
3
  ![Node.js](https://img.shields.io/badge/Node.js-18%2B-green.svg)
4
4
  ![License](https://img.shields.io/badge/License-ISC-blue.svg)
5
5
  <% if (language === 'TypeScript') { %>![TypeScript](https://img.shields.io/badge/Language-TypeScript-blue.svg)<% } else { %>![JavaScript](https://img.shields.io/badge/Language-JavaScript-yellow.svg)<% } %>
6
+ <% if (includeSecurity) { %>
7
+ [![Snyk Vulnerabilities](https://img.shields.io/snyk/vulnerabilities/github/yourusername/<%= projectName %>?style=flat-square)](https://snyk.io/)
8
+ [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=<%= projectName %>&metric=alert_status)](https://sonarcloud.io/)
9
+ <% } %>
6
10
 
7
11
  A production-ready Node.js microservice generated with **<%= architecture %>** and **<%= database %>**.
8
12
  This project comes pre-configured with industry-standard tooling for **Code Quality**, **Testing**, and **Security**.
@@ -15,6 +19,7 @@ This project comes pre-configured with industry-standard tooling for **Code Qual
15
19
  - **Quality**: Eslint, Prettier, Husky, Lint-Staged.
16
20
  - **Testing**: Jest (Unit & Integration).
17
21
  - **DevOps**: Multi-stage Docker build, CI/CD ready.
22
+ <% if (includeSecurity) { %>- **Enterprise Security**: Snyk SCA, SonarCloud SAST.<% } %>
18
23
 
19
24
  ## 🔄 CI/CD Pipeline
20
25
  <%_ if (ciProvider === 'GitHub Actions') { -%>
@@ -49,19 +54,20 @@ CI/CD is not currently configured, but the project is ready for integration.
49
54
 
50
55
  ### 2. Quick Start
51
56
  ```bash
52
- # Initialize Git (Required for Husky)
57
+ # Initialize Git (Mandatory for Husky hooks)
53
58
  git init
54
59
 
55
60
  # Install dependencies
56
61
  npm install
57
62
 
58
- # Setup Git Hooks (Husky)
59
- npm run prepare
63
+ # Troubleshooting Husky (if skip git init)
64
+ # npx husky install
60
65
 
61
66
  # Start Infrastructure (DB, etc.)
62
67
  docker-compose up -d
63
68
 
64
69
  # Run Development Server
70
+ docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> kafka<% } %>
65
71
  npm run dev
66
72
  ```
67
73
 
@@ -124,7 +130,7 @@ This project demonstrates a production-ready Kafka flow:
124
130
  2. **Consumer**: `WelcomeEmailConsumer` listens to `user-topic` and simulates sending an email.
125
131
 
126
132
  ### How to verify:
127
- 1. Ensure infrastructure is running: `docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> zookeeper kafka<% } %>`
133
+ 1. Ensure infrastructure is running: `docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> kafka<% } %>`
128
134
  2. Start the app: `npm run dev`
129
135
  3. Trigger an event by creating a user (via Postman or curl):
130
136
  ```bash
@@ -167,7 +173,7 @@ To run the Node.js application locally while using Docker for the infrastructure
167
173
 
168
174
  ```bash
169
175
  # Start infrastructure
170
- docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> zookeeper kafka<% } %>
176
+ docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> kafka<% } %>
171
177
 
172
178
  # Start the application
173
179
  npm run dev
@@ -215,7 +221,7 @@ npm install
215
221
  2. **Start Infrastructure (DB, Redis, Kafka, etc.) in the background**
216
222
  *(This specifically starts the background services without running the application inside Docker, allowing PM2 to handle it).*
217
223
  ```bash
218
- docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> zookeeper kafka<% } %>
224
+ docker-compose up -d<% if (database !== 'None') { %> db<% } %><% if (caching === 'Redis') { %> redis<% } %><% if (communication === 'Kafka') { %> kafka<% } %>
219
225
  ```
220
226
  3. **Wait 5-10s** for the database to fully initialize.
221
227
  4. **Deploy the App using PM2 in Cluster Mode**
@@ -241,13 +247,17 @@ docker-compose down
241
247
  - **CORS**: Configured for cross-origin requests.
242
248
  - **Rate Limiting**: Protects against DDoS / Brute-force.
243
249
  - **HPP**: Prevents HTTP Parameter Pollution attacks.
244
-
245
-
250
+ <% if (includeSecurity) { %>
251
+ ### 🛡️ Enterprise Hardening (Big Tech Standard)
252
+ - **Snyk SCA**: Automated dependency vulnerability scanning.
253
+ - **SonarCloud**: Deep static analysis for code quality and security hotspots.
254
+ - **Security Policy**: Standard `SECURITY.md` for vulnerability reporting.
255
+ <% } %>
246
256
  ## 🤖 AI-Native Development
247
257
 
248
258
  This project is "AI-Ready" out of the box. We have pre-configured industry-leading AI context files to bridge the gap between "Generated Code" and "AI-Assisted Development."
249
259
 
250
260
  - **Magic Defaults**: We've automatically tailored your AI context to focus on **<%= projectName %>** and its specific architectural stack (<%= architecture %>, <%= database %>, etc.).
251
- - **Use Cursor?** We've configured **`.cursorrules`** at the root. It enforces project standards (70% coverage, MVC/Clean) directly within the editor.
261
+ - **Use Cursor?** We've configured **`.cursorrules`** at the root. It enforces project standards (80% coverage, MVC/Clean) directly within the editor.
252
262
  - *Pro-tip*: You can customize the `Project Goal` placeholder in `.cursorrules` to help the AI understand your specific business logic!
253
263
  - **Use ChatGPT/Gemini/Claude?** Check the **`prompts/`** directory. It contains highly-specialized Agent Skill templates. You can copy-paste these into any LLM to give it a "Senior Developer" understanding of your codebase immediately.