@devbro/pashmak 0.1.43 → 0.1.44

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/app/console/generate/GenerateApiDocsCommand.d.mts +4 -1
  2. package/dist/app/console/generate/GenerateApiDocsCommand.mjs +56 -4
  3. package/dist/app/console/generate/GenerateApiDocsCommand.mjs.map +1 -1
  4. package/dist/app/console/project/CreateProjectCommand.mjs +27 -2
  5. package/dist/app/console/project/CreateProjectCommand.mjs.map +1 -1
  6. package/dist/app/console/project/base_project/package.json.tpl +7 -2
  7. package/dist/app/console/project/base_project/src/app/queues/index.ts.tpl +9 -0
  8. package/dist/app/console/project/base_project/src/config/caches.ts.tpl +2 -4
  9. package/dist/app/console/project/base_project/src/config/databases.ts.tpl +7 -9
  10. package/dist/app/console/project/base_project/src/config/default.mts.tpl +17 -3
  11. package/dist/app/console/project/base_project/src/config/loggers.ts.tpl +6 -8
  12. package/dist/app/console/project/base_project/src/config/mailer.ts.tpl +15 -16
  13. package/dist/app/console/project/base_project/src/config/queues.ts.tpl +2 -4
  14. package/dist/app/console/project/base_project/src/config/storages.ts.tpl +5 -4
  15. package/dist/app/console/project/base_project/src/helpers/QueryKit.ts.tpl +175 -0
  16. package/dist/app/console/project/base_project/src/helpers/index.ts.tpl +96 -0
  17. package/dist/app/console/project/base_project/src/helpers/validation.ts.tpl +26 -0
  18. package/dist/app/console/project/base_project/src/initialize.ts.tpl +64 -7
  19. package/dist/app/console/project/base_project/src/middlewares.ts.tpl +1 -1
  20. package/dist/bin/app/console/DefaultCommand.cjs +0 -19
  21. package/dist/bin/app/console/KeyGenerateCommand.cjs +0 -19
  22. package/dist/bin/app/console/StartCommand.cjs +0 -19
  23. package/dist/bin/app/console/generate/GenerateApiDocsCommand.cjs +62 -24
  24. package/dist/bin/app/console/generate/GenerateControllerCommand.cjs +0 -19
  25. package/dist/bin/app/console/generate/index.cjs +62 -24
  26. package/dist/bin/app/console/index.cjs +89 -26
  27. package/dist/bin/app/console/migrate/GenerateMigrateCommand.cjs +0 -19
  28. package/dist/bin/app/console/migrate/MigrateCommand.cjs +0 -19
  29. package/dist/bin/app/console/migrate/MigrateRollbackCommand.cjs +0 -19
  30. package/dist/bin/app/console/migrate/index.cjs +0 -19
  31. package/dist/bin/app/console/project/CreateProjectCommand.cjs +27 -2
  32. package/dist/bin/app/console/queue/GenerateQueueMigrateCommand.cjs +0 -19
  33. package/dist/bin/bin/pashmak_cli.cjs +27 -2
  34. package/dist/bin/cache.cjs +0 -19
  35. package/dist/bin/facades.cjs +0 -19
  36. package/dist/bin/factories.cjs +0 -19
  37. package/dist/bin/index.cjs +96 -29
  38. package/dist/bin/middlewares.cjs +0 -19
  39. package/dist/bin/queue.cjs +0 -19
  40. package/dist/bin/router.cjs +0 -9
  41. package/dist/facades.mjs +0 -19
  42. package/dist/facades.mjs.map +1 -1
  43. package/dist/router.d.mts +1 -3
  44. package/dist/router.mjs +1 -9
  45. package/dist/router.mjs.map +1 -1
  46. package/package.json +12 -11
  47. package/dist/app/console/project/base_project/src/helpers.ts.tpl +0 -28
@@ -1,8 +1,11 @@
1
+ import * as clipanion from 'clipanion';
1
2
  import { Command } from 'clipanion';
2
3
 
3
4
  declare class GenerateApiDocsCommand extends Command {
4
5
  static paths: string[][];
5
- execute(): Promise<void>;
6
+ static usage: clipanion.Usage;
7
+ help: boolean;
8
+ execute(): Promise<0 | undefined>;
6
9
  private extractParameters;
7
10
  }
8
11
 
@@ -1,9 +1,11 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
3
  import { cli, router } from "../../../facades.mjs";
4
- import { Command } from "clipanion";
4
+ import { Command, Option } from "clipanion";
5
5
  import path from "path";
6
6
  import * as fs from "fs/promises";
7
+ import { config } from "../../../config.mjs";
8
+ import { Arr } from "@devbro/neko-helper";
7
9
  class GenerateApiDocsCommand extends Command {
8
10
  static {
9
11
  __name(this, "GenerateApiDocsCommand");
@@ -12,7 +14,44 @@ class GenerateApiDocsCommand extends Command {
12
14
  [`make`, `apidocs`],
13
15
  [`generate`, `apidocs`]
14
16
  ];
17
+ static usage = Command.Usage({
18
+ category: `Generate`,
19
+ description: `Generate OpenAPI documentation from routes`,
20
+ details: `
21
+ This command generates OpenAPI 3.0 specification documentation by analyzing
22
+ your application's routes and merging with example files.
23
+
24
+ The generated documentation includes:
25
+ - All registered routes with their HTTP methods
26
+ - Path parameters extracted from route definitions
27
+ - Request body schemas for POST, PUT, and PATCH methods
28
+ - Response schemas
29
+
30
+ The command will merge files specified in config.api_docs.merge_files
31
+ and output the final documentation to config.api_docs.output.
32
+
33
+ This command depends on config data. make sure your default config contains the following:
34
+ api_docs: {
35
+ merge_files: [
36
+ path.join(__dirname, '../..', 'private', 'openapi_examples.json'),
37
+ path.join(__dirname, '../..', 'private', 'openapi_base.json'),
38
+ path.join(__dirname, '../..', 'private', 'openapi_user_changes.json'),
39
+ ],
40
+ output: path.join(__dirname, '../..', 'private', 'openapi.json'),
41
+ }
42
+ `,
43
+ examples: [[`Generate API documentation`, `$0 generate apidocs`]]
44
+ });
45
+ help = Option.Boolean(`--help,-h`, false, {
46
+ description: `Show help message for this command`
47
+ });
15
48
  async execute() {
49
+ if (this.help) {
50
+ this.context.stdout.write(
51
+ this.constructor.usage?.toString() || "No help available\n"
52
+ );
53
+ return 0;
54
+ }
16
55
  const rootDir = process.cwd();
17
56
  this.context.stdout.write(`Generating OpenAPI documentation...
18
57
  `);
@@ -77,9 +116,8 @@ class GenerateApiDocsCommand extends Command {
77
116
  }
78
117
  }
79
118
  }
80
- const publicDir = path.join(rootDir, "public");
81
- await fs.mkdir(publicDir, { recursive: true });
82
- const outputPath = path.join(publicDir, "openapi.json");
119
+ await fs.mkdir(config.get("private_path"), { recursive: true });
120
+ const outputPath = path.join(config.get("private_path"), "openapi.json");
83
121
  await fs.writeFile(
84
122
  outputPath,
85
123
  JSON.stringify(openApiSpec, null, 2),
@@ -91,6 +129,20 @@ class GenerateApiDocsCommand extends Command {
91
129
  );
92
130
  this.context.stdout.write(`Total routes documented: ${routes.length}
93
131
  `);
132
+ let files_to_merge = config.get("api_docs.merge_files");
133
+ let final_api_docs = {};
134
+ for (let file_path of files_to_merge) {
135
+ let file_json = JSON.parse(await fs.readFile(file_path, "utf8"));
136
+ Arr.deepMerge(final_api_docs, file_json);
137
+ }
138
+ await fs.writeFile(
139
+ config.get("api_docs.output"),
140
+ JSON.stringify(final_api_docs, null, 2)
141
+ );
142
+ this.context.stdout.write(
143
+ `wrote final open api document to : ${config.get("api_docs.output")}
144
+ `
145
+ );
94
146
  }
95
147
  extractParameters(routePath) {
96
148
  const paramRegex = /:([a-zA-Z0-9_]+)/g;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/app/console/generate/GenerateApiDocsCommand.mts"],"sourcesContent":["import { cli, router } from \"../../../facades.mjs\";\nimport { Command } from \"clipanion\";\nimport path from \"path\";\nimport * as fs from \"fs/promises\";\n\nexport class GenerateApiDocsCommand extends Command {\n static paths = [\n [`make`, `apidocs`],\n [`generate`, `apidocs`],\n ];\n\n async execute() {\n const rootDir = process.cwd();\n\n this.context.stdout.write(`Generating OpenAPI documentation...\\n`);\n\n // Get all routes from the router\n const routes = router().routes;\n\n // Generate OpenAPI 3.0 specification\n const openApiSpec = {\n openapi: \"3.0.0\",\n info: {\n title: \"API Documentation\",\n version: \"1.0.0\",\n description: \"Auto-generated API documentation\",\n },\n servers: [\n {\n url: \"/\",\n description: \"Local server\",\n },\n ],\n paths: {} as Record<string, any>,\n };\n\n // Process each route\n for (const route of routes) {\n const routePath = route.path;\n // Convert route path to OpenAPI format (e.g., /api/:id -> /api/{id})\n const openApiPath = routePath.replace(/:([a-zA-Z0-9_]+)/g, \"{$1}\");\n\n if (!openApiSpec.paths[openApiPath]) {\n openApiSpec.paths[openApiPath] = {};\n }\n\n // Add each HTTP method for this route\n for (const method of route.methods) {\n const lowerMethod = method.toLowerCase();\n\n // Skip HEAD as it's usually auto-generated\n if (lowerMethod === \"head\") {\n continue;\n }\n\n openApiSpec.paths[openApiPath][lowerMethod] = {\n summary: `${method} ${routePath}`,\n description: `Endpoint for ${method} ${routePath}`,\n parameters: this.extractParameters(routePath),\n responses: {\n \"200\": {\n description: \"Successful response\",\n content: {\n \"application/json\": {\n schema: {\n type: \"object\",\n },\n },\n },\n },\n \"500\": {\n description: \"Internal server error\",\n },\n },\n };\n\n // Add request body for POST, PUT, PATCH\n if ([\"post\", \"put\", \"patch\"].includes(lowerMethod)) {\n openApiSpec.paths[openApiPath][lowerMethod].requestBody = {\n required: true,\n content: {\n \"application/json\": {\n schema: {\n type: \"object\",\n },\n },\n },\n };\n }\n }\n }\n\n // Ensure public directory exists\n const publicDir = path.join(rootDir, \"public\");\n await fs.mkdir(publicDir, { recursive: true });\n\n // Write the OpenAPI spec to public/openapi.json\n const outputPath = path.join(publicDir, \"openapi.json\");\n await fs.writeFile(\n outputPath,\n JSON.stringify(openApiSpec, null, 2),\n \"utf-8\",\n );\n\n this.context.stdout.write(\n `OpenAPI documentation generated at: ${outputPath}\\n`,\n );\n this.context.stdout.write(`Total routes documented: ${routes.length}\\n`);\n }\n\n private extractParameters(routePath: string): any[] {\n const paramRegex = /:([a-zA-Z0-9_]+)/g;\n const parameters: any[] = [];\n let match;\n\n while ((match = paramRegex.exec(routePath)) !== null) {\n parameters.push({\n name: match[1],\n in: \"path\",\n required: true,\n schema: {\n type: \"string\",\n },\n description: `Path parameter ${match[1]}`,\n });\n }\n\n return parameters;\n }\n}\n\ncli().register(GenerateApiDocsCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,cAAc;AAC5B,SAAS,eAAe;AACxB,OAAO,UAAU;AACjB,YAAY,QAAQ;AAEb,MAAM,+BAA+B,QAAQ;AAAA,EALpD,OAKoD;AAAA;AAAA;AAAA,EAClD,OAAO,QAAQ;AAAA,IACb,CAAC,QAAQ,SAAS;AAAA,IAClB,CAAC,YAAY,SAAS;AAAA,EACxB;AAAA,EAEA,MAAM,UAAU;AACd,UAAM,UAAU,QAAQ,IAAI;AAE5B,SAAK,QAAQ,OAAO,MAAM;AAAA,CAAuC;AAGjE,UAAM,SAAS,OAAO,EAAE;AAGxB,UAAM,cAAc;AAAA,MAClB,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,OAAO;AAAA,QACP,SAAS;AAAA,QACT,aAAa;AAAA,MACf;AAAA,MACA,SAAS;AAAA,QACP;AAAA,UACE,KAAK;AAAA,UACL,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,OAAO,CAAC;AAAA,IACV;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,MAAM;AAExB,YAAM,cAAc,UAAU,QAAQ,qBAAqB,MAAM;AAEjE,UAAI,CAAC,YAAY,MAAM,WAAW,GAAG;AACnC,oBAAY,MAAM,WAAW,IAAI,CAAC;AAAA,MACpC;AAGA,iBAAW,UAAU,MAAM,SAAS;AAClC,cAAM,cAAc,OAAO,YAAY;AAGvC,YAAI,gBAAgB,QAAQ;AAC1B;AAAA,QACF;AAEA,oBAAY,MAAM,WAAW,EAAE,WAAW,IAAI;AAAA,UAC5C,SAAS,GAAG,MAAM,IAAI,SAAS;AAAA,UAC/B,aAAa,gBAAgB,MAAM,IAAI,SAAS;AAAA,UAChD,YAAY,KAAK,kBAAkB,SAAS;AAAA,UAC5C,WAAW;AAAA,YACT,OAAO;AAAA,cACL,aAAa;AAAA,cACb,SAAS;AAAA,gBACP,oBAAoB;AAAA,kBAClB,QAAQ;AAAA,oBACN,MAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,OAAO;AAAA,cACL,aAAa;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAGA,YAAI,CAAC,QAAQ,OAAO,OAAO,EAAE,SAAS,WAAW,GAAG;AAClD,sBAAY,MAAM,WAAW,EAAE,WAAW,EAAE,cAAc;AAAA,YACxD,UAAU;AAAA,YACV,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,YAAY,KAAK,KAAK,SAAS,QAAQ;AAC7C,UAAM,GAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAG7C,UAAM,aAAa,KAAK,KAAK,WAAW,cAAc;AACtD,UAAM,GAAG;AAAA,MACP;AAAA,MACA,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,MACnC;AAAA,IACF;AAEA,SAAK,QAAQ,OAAO;AAAA,MAClB,uCAAuC,UAAU;AAAA;AAAA,IACnD;AACA,SAAK,QAAQ,OAAO,MAAM,4BAA4B,OAAO,MAAM;AAAA,CAAI;AAAA,EACzE;AAAA,EAEQ,kBAAkB,WAA0B;AAClD,UAAM,aAAa;AACnB,UAAM,aAAoB,CAAC;AAC3B,QAAI;AAEJ,YAAQ,QAAQ,WAAW,KAAK,SAAS,OAAO,MAAM;AACpD,iBAAW,KAAK;AAAA,QACd,MAAM,MAAM,CAAC;AAAA,QACb,IAAI;AAAA,QACJ,UAAU;AAAA,QACV,QAAQ;AAAA,UACN,MAAM;AAAA,QACR;AAAA,QACA,aAAa,kBAAkB,MAAM,CAAC,CAAC;AAAA,MACzC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AACF;AAEA,IAAI,EAAE,SAAS,sBAAsB;","names":[]}
1
+ {"version":3,"sources":["../../../../src/app/console/generate/GenerateApiDocsCommand.mts"],"sourcesContent":["import { cli, router } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport path from \"path\";\nimport * as fs from \"fs/promises\";\nimport { config } from \"../../../config.mjs\";\nimport { Arr } from \"@devbro/neko-helper\";\n\nexport class GenerateApiDocsCommand extends Command {\n static paths = [\n [`make`, `apidocs`],\n [`generate`, `apidocs`],\n ];\n\n static usage = Command.Usage({\n category: `Generate`,\n description: `Generate OpenAPI documentation from routes`,\n details: `\n This command generates OpenAPI 3.0 specification documentation by analyzing\n your application's routes and merging with example files.\n \n The generated documentation includes:\n - All registered routes with their HTTP methods\n - Path parameters extracted from route definitions\n - Request body schemas for POST, PUT, and PATCH methods\n - Response schemas\n \n The command will merge files specified in config.api_docs.merge_files\n and output the final documentation to config.api_docs.output.\n\n This command depends on config data. make sure your default config contains the following:\n api_docs: {\n merge_files: [\n path.join(__dirname, '../..', 'private', 'openapi_examples.json'),\n path.join(__dirname, '../..', 'private', 'openapi_base.json'),\n path.join(__dirname, '../..', 'private', 'openapi_user_changes.json'),\n ],\n output: path.join(__dirname, '../..', 'private', 'openapi.json'),\n }\n `,\n examples: [[`Generate API documentation`, `$0 generate apidocs`]],\n });\n\n help = Option.Boolean(`--help,-h`, false, {\n description: `Show help message for this command`,\n });\n\n async execute() {\n if (this.help) {\n this.context.stdout.write(\n this.constructor.usage?.toString() || \"No help available\\n\",\n );\n return 0;\n }\n\n const rootDir = process.cwd();\n\n this.context.stdout.write(`Generating OpenAPI documentation...\\n`);\n\n // Get all routes from the router\n const routes = router().routes;\n\n // Generate OpenAPI 3.0 specification\n const openApiSpec = {\n openapi: \"3.0.0\",\n info: {\n title: \"API Documentation\",\n version: \"1.0.0\",\n description: \"Auto-generated API documentation\",\n },\n servers: [\n {\n url: \"/\",\n description: \"Local server\",\n },\n ],\n paths: {} as Record<string, any>,\n };\n\n // Process each route\n for (const route of routes) {\n const routePath = route.path;\n // Convert route path to OpenAPI format (e.g., /api/:id -> /api/{id})\n const openApiPath = routePath.replace(/:([a-zA-Z0-9_]+)/g, \"{$1}\");\n\n if (!openApiSpec.paths[openApiPath]) {\n openApiSpec.paths[openApiPath] = {};\n }\n\n // Add each HTTP method for this route\n for (const method of route.methods) {\n const lowerMethod = method.toLowerCase();\n\n // Skip HEAD as it's usually auto-generated\n if (lowerMethod === \"head\") {\n continue;\n }\n\n openApiSpec.paths[openApiPath][lowerMethod] = {\n summary: `${method} ${routePath}`,\n description: `Endpoint for ${method} ${routePath}`,\n parameters: this.extractParameters(routePath),\n responses: {\n \"200\": {\n description: \"Successful response\",\n content: {\n \"application/json\": {\n schema: {\n type: \"object\",\n },\n },\n },\n },\n \"500\": {\n description: \"Internal server error\",\n },\n },\n };\n\n // Add request body for POST, PUT, PATCH\n if ([\"post\", \"put\", \"patch\"].includes(lowerMethod)) {\n openApiSpec.paths[openApiPath][lowerMethod].requestBody = {\n required: true,\n content: {\n \"application/json\": {\n schema: {\n type: \"object\",\n },\n },\n },\n };\n }\n }\n }\n\n // Ensure public directory exists\n await fs.mkdir(config.get(\"private_path\"), { recursive: true });\n\n // Write the OpenAPI spec to public/openapi.json\n const outputPath = path.join(config.get(\"private_path\"), \"openapi.json\");\n await fs.writeFile(\n outputPath,\n JSON.stringify(openApiSpec, null, 2),\n \"utf-8\",\n );\n\n this.context.stdout.write(\n `OpenAPI documentation generated at: ${outputPath}\\n`,\n );\n this.context.stdout.write(`Total routes documented: ${routes.length}\\n`);\n\n let files_to_merge: string[] = config.get(\"api_docs.merge_files\");\n let final_api_docs = {};\n for (let file_path of files_to_merge) {\n let file_json = JSON.parse(await fs.readFile(file_path, \"utf8\"));\n Arr.deepMerge(final_api_docs, file_json);\n }\n\n await fs.writeFile(\n config.get(\"api_docs.output\"),\n JSON.stringify(final_api_docs, null, 2),\n );\n\n this.context.stdout.write(\n `wrote final open api document to : ${config.get(\"api_docs.output\")}\\n`,\n );\n }\n\n private extractParameters(routePath: string): any[] {\n const paramRegex = /:([a-zA-Z0-9_]+)/g;\n const parameters: any[] = [];\n let match;\n\n while ((match = paramRegex.exec(routePath)) !== null) {\n parameters.push({\n name: match[1],\n in: \"path\",\n required: true,\n schema: {\n type: \"string\",\n },\n description: `Path parameter ${match[1]}`,\n });\n }\n\n return parameters;\n }\n}\n\ncli().register(GenerateApiDocsCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,cAAc;AAC5B,SAAS,SAAS,cAAc;AAChC,OAAO,UAAU;AACjB,YAAY,QAAQ;AACpB,SAAS,cAAc;AACvB,SAAS,WAAW;AAEb,MAAM,+BAA+B,QAAQ;AAAA,EAPpD,OAOoD;AAAA;AAAA;AAAA,EAClD,OAAO,QAAQ;AAAA,IACb,CAAC,QAAQ,SAAS;AAAA,IAClB,CAAC,YAAY,SAAS;AAAA,EACxB;AAAA,EAEA,OAAO,QAAQ,QAAQ,MAAM;AAAA,IAC3B,UAAU;AAAA,IACV,aAAa;AAAA,IACb,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAuBT,UAAU,CAAC,CAAC,8BAA8B,qBAAqB,CAAC;AAAA,EAClE,CAAC;AAAA,EAED,OAAO,OAAO,QAAQ,aAAa,OAAO;AAAA,IACxC,aAAa;AAAA,EACf,CAAC;AAAA,EAED,MAAM,UAAU;AACd,QAAI,KAAK,MAAM;AACb,WAAK,QAAQ,OAAO;AAAA,QAClB,KAAK,YAAY,OAAO,SAAS,KAAK;AAAA,MACxC;AACA,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,QAAQ,IAAI;AAE5B,SAAK,QAAQ,OAAO,MAAM;AAAA,CAAuC;AAGjE,UAAM,SAAS,OAAO,EAAE;AAGxB,UAAM,cAAc;AAAA,MAClB,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,OAAO;AAAA,QACP,SAAS;AAAA,QACT,aAAa;AAAA,MACf;AAAA,MACA,SAAS;AAAA,QACP;AAAA,UACE,KAAK;AAAA,UACL,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,OAAO,CAAC;AAAA,IACV;AAGA,eAAW,SAAS,QAAQ;AAC1B,YAAM,YAAY,MAAM;AAExB,YAAM,cAAc,UAAU,QAAQ,qBAAqB,MAAM;AAEjE,UAAI,CAAC,YAAY,MAAM,WAAW,GAAG;AACnC,oBAAY,MAAM,WAAW,IAAI,CAAC;AAAA,MACpC;AAGA,iBAAW,UAAU,MAAM,SAAS;AAClC,cAAM,cAAc,OAAO,YAAY;AAGvC,YAAI,gBAAgB,QAAQ;AAC1B;AAAA,QACF;AAEA,oBAAY,MAAM,WAAW,EAAE,WAAW,IAAI;AAAA,UAC5C,SAAS,GAAG,MAAM,IAAI,SAAS;AAAA,UAC/B,aAAa,gBAAgB,MAAM,IAAI,SAAS;AAAA,UAChD,YAAY,KAAK,kBAAkB,SAAS;AAAA,UAC5C,WAAW;AAAA,YACT,OAAO;AAAA,cACL,aAAa;AAAA,cACb,SAAS;AAAA,gBACP,oBAAoB;AAAA,kBAClB,QAAQ;AAAA,oBACN,MAAM;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,YACA,OAAO;AAAA,cACL,aAAa;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAGA,YAAI,CAAC,QAAQ,OAAO,OAAO,EAAE,SAAS,WAAW,GAAG;AAClD,sBAAY,MAAM,WAAW,EAAE,WAAW,EAAE,cAAc;AAAA,YACxD,UAAU;AAAA,YACV,SAAS;AAAA,cACP,oBAAoB;AAAA,gBAClB,QAAQ;AAAA,kBACN,MAAM;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,GAAG,MAAM,OAAO,IAAI,cAAc,GAAG,EAAE,WAAW,KAAK,CAAC;AAG9D,UAAM,aAAa,KAAK,KAAK,OAAO,IAAI,cAAc,GAAG,cAAc;AACvE,UAAM,GAAG;AAAA,MACP;AAAA,MACA,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,MACnC;AAAA,IACF;AAEA,SAAK,QAAQ,OAAO;AAAA,MAClB,uCAAuC,UAAU;AAAA;AAAA,IACnD;AACA,SAAK,QAAQ,OAAO,MAAM,4BAA4B,OAAO,MAAM;AAAA,CAAI;AAEvE,QAAI,iBAA2B,OAAO,IAAI,sBAAsB;AAChE,QAAI,iBAAiB,CAAC;AACtB,aAAS,aAAa,gBAAgB;AACpC,UAAI,YAAY,KAAK,MAAM,MAAM,GAAG,SAAS,WAAW,MAAM,CAAC;AAC/D,UAAI,UAAU,gBAAgB,SAAS;AAAA,IACzC;AAEA,UAAM,GAAG;AAAA,MACP,OAAO,IAAI,iBAAiB;AAAA,MAC5B,KAAK,UAAU,gBAAgB,MAAM,CAAC;AAAA,IACxC;AAEA,SAAK,QAAQ,OAAO;AAAA,MAClB,sCAAsC,OAAO,IAAI,iBAAiB,CAAC;AAAA;AAAA,IACrE;AAAA,EACF;AAAA,EAEQ,kBAAkB,WAA0B;AAClD,UAAM,aAAa;AACnB,UAAM,aAAoB,CAAC;AAC3B,QAAI;AAEJ,YAAQ,QAAQ,WAAW,KAAK,SAAS,OAAO,MAAM;AACpD,iBAAW,KAAK;AAAA,QACd,MAAM,MAAM,CAAC;AAAA,QACb,IAAI;AAAA,QACJ,UAAU;AAAA,QACV,QAAQ;AAAA,UACN,MAAM;AAAA,QACR;AAAA,QACA,aAAa,kBAAkB,MAAM,CAAC,CAAC;AAAA,MACzC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AACF;AAEA,IAAI,EAAE,SAAS,sBAAsB;","names":[]}
@@ -7,6 +7,7 @@ import * as fs from "fs/promises";
7
7
  import { fileURLToPath } from "url";
8
8
  import handlebars from "handlebars";
9
9
  import { execSync } from "child_process";
10
+ import { select, Separator } from "@inquirer/prompts";
10
11
  class CreateProjectCommand extends Command {
11
12
  static {
12
13
  __name(this, "CreateProjectCommand");
@@ -53,6 +54,27 @@ class CreateProjectCommand extends Command {
53
54
  return 1;
54
55
  } catch {
55
56
  }
57
+ const validation_library = await select({
58
+ message: "Select a package you want for validation",
59
+ choices: [
60
+ {
61
+ name: "Yup",
62
+ value: "yup",
63
+ description: "https://github.com/jquense/yup"
64
+ },
65
+ {
66
+ name: "Zod",
67
+ value: "zod",
68
+ description: "https://zod.dev/"
69
+ },
70
+ new Separator(),
71
+ {
72
+ name: "None",
73
+ value: "none",
74
+ disabled: false
75
+ }
76
+ ]
77
+ });
56
78
  await fs.mkdir(projectPath, { recursive: true });
57
79
  console.log(`Created project directory at: ${projectPath}`);
58
80
  const dirname = typeof __dirname === "undefined" ? path.dirname(fileURLToPath(import.meta.url)) : __dirname;
@@ -62,7 +84,9 @@ class CreateProjectCommand extends Command {
62
84
  }
63
85
  console.log(`Using base project path: ${basePath}`);
64
86
  const baseProjectPath = basePath;
65
- await this.processTplFolder(baseProjectPath, projectPath, {});
87
+ await this.processTplFolder(baseProjectPath, projectPath, {
88
+ validation_library
89
+ });
66
90
  console.log(`Copied base project files to: ${projectPath}`);
67
91
  const packageJsonPath = path.join(projectPath, `package.json`);
68
92
  const packageJson = JSON.parse(await fs.readFile(packageJsonPath, `utf-8`));
@@ -92,7 +116,7 @@ class CreateProjectCommand extends Command {
92
116
  await fs.mkdir(destPath, { recursive: true });
93
117
  await this.processTplFolder(srcPath, destPath, data);
94
118
  } else if (file.name.endsWith(".tpl")) {
95
- await this.processTplFile(srcPath, destPath, {});
119
+ await this.processTplFile(srcPath, destPath, data);
96
120
  } else {
97
121
  throw new Error(
98
122
  "unexpected non tpl file: " + srcPath + " " + file.name
@@ -101,6 +125,7 @@ class CreateProjectCommand extends Command {
101
125
  }
102
126
  }
103
127
  async processTplFile(src, dest, data = {}) {
128
+ handlebars.registerHelper("eq", (a, b) => a === b);
104
129
  const compiledTemplate = handlebars.compile(
105
130
  (await fs.readFile(src)).toString()
106
131
  );
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/app/console/project/CreateProjectCommand.mts"],"sourcesContent":["import { Command, Option } from \"clipanion\";\nimport { Case } from \"change-case-all\";\nimport path from \"path\";\nimport * as fs from \"fs/promises\";\nimport { fileURLToPath } from \"url\";\nimport handlebars from \"handlebars\";\nimport { execSync } from \"child_process\";\n\nexport class CreateProjectCommand extends Command {\n static paths = [[`create`, `project`]];\n\n static usage = Command.Usage({\n category: `Project`,\n description: `Create a new project`,\n details: `\n This command creates a new project with the specified name at the given path.\n If no path is provided, the project will be created in the current directory.\n `,\n examples: [\n [\n `Create a new project in specified directory`,\n `create project --path /path/to/my-project --git`,\n ],\n [\n `Create a new project at a specific path with git initialized`,\n `create project --path /path/to/my-project --git`,\n ],\n ],\n });\n\n projectPath = Option.String(\"--path\", { required: true });\n\n git = Option.Boolean(`--git`, false, {\n description: `Initialize a git repository in the new project`,\n });\n\n async folderExists(folderPath: string): Promise<boolean> {\n try {\n const stats = await fs.stat(folderPath);\n return stats.isDirectory();\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n return false; // Folder does not exist\n }\n throw error; // Other errors (e.g., permission issues)\n }\n }\n\n async execute() {\n // Create the project directory path by joining the specified path and project name\n const projectPath = path.join(this.projectPath);\n // Check if directory already exists\n try {\n await fs.access(projectPath);\n console.error(`Error: Directory ${projectPath} already exists.`);\n return 1;\n } catch {\n // Directory doesn't exist, we can proceed\n }\n\n await fs.mkdir(projectPath, { recursive: true });\n console.log(`Created project directory at: ${projectPath}`);\n\n const dirname =\n typeof __dirname === \"undefined\"\n ? path.dirname(fileURLToPath(import.meta.url))\n : __dirname;\n\n let basePath = path.join(dirname, `./base_project`);\n if ((await this.folderExists(basePath)) === false) {\n // we are running a compiled code that was bundled and the code is running from ./dist/bin/ folder.\n basePath = path.join(dirname, `../app/console/project/base_project`);\n }\n\n console.log(`Using base project path: ${basePath}`);\n //copy content of ./base_project to the new project directory\n const baseProjectPath = basePath;\n\n await this.processTplFolder(baseProjectPath, projectPath, {});\n console.log(`Copied base project files to: ${projectPath}`);\n\n //modify package.json with foldername\n const packageJsonPath = path.join(projectPath, `package.json`);\n const packageJson = JSON.parse(await fs.readFile(packageJsonPath, `utf-8`));\n packageJson.name = Case.snake(path.basename(projectPath));\n await fs.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2));\n console.log(`Updated package.json with project name: ${packageJson.name}`);\n\n if (this.git) {\n try {\n execSync(\n 'git init; git add --all; git commit --allow-empty -m \"chore: first commit for pashmak\"',\n {\n cwd: projectPath,\n },\n );\n } catch (error) {\n console.error(`Failed to create project.`, error);\n return 1;\n }\n }\n }\n\n async processTplFolder(src: string, dest: string, data: any = {}) {\n const files = await fs.readdir(src, { withFileTypes: true });\n for (const file of files) {\n const srcPath = path.join(src, file.name);\n const destPath =\n file.isFile() && file.name.endsWith(\".tpl\")\n ? path.join(dest, file.name.substring(0, file.name.length - 4))\n : path.join(dest, file.name);\n\n if (file.isDirectory()) {\n await fs.mkdir(destPath, { recursive: true });\n await this.processTplFolder(srcPath, destPath, data);\n } else if (file.name.endsWith(\".tpl\")) {\n await this.processTplFile(srcPath, destPath, {});\n } else {\n throw new Error(\n \"unexpected non tpl file: \" + srcPath + \" \" + file.name,\n );\n }\n }\n }\n\n async processTplFile(src: string, dest: string, data: any = {}) {\n const compiledTemplate = handlebars.compile(\n (await fs.readFile(src)).toString(),\n );\n const template = await compiledTemplate(data);\n await fs.writeFile(dest, template);\n }\n}\n"],"mappings":";;AAAA,SAAS,SAAS,cAAc;AAChC,SAAS,YAAY;AACrB,OAAO,UAAU;AACjB,YAAY,QAAQ;AACpB,SAAS,qBAAqB;AAC9B,OAAO,gBAAgB;AACvB,SAAS,gBAAgB;AAElB,MAAM,6BAA6B,QAAQ;AAAA,EARlD,OAQkD;AAAA;AAAA;AAAA,EAChD,OAAO,QAAQ,CAAC,CAAC,UAAU,SAAS,CAAC;AAAA,EAErC,OAAO,QAAQ,QAAQ,MAAM;AAAA,IAC3B,UAAU;AAAA,IACV,aAAa;AAAA,IACb,SAAS;AAAA;AAAA;AAAA;AAAA,IAIT,UAAU;AAAA,MACR;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAAA,EAED,cAAc,OAAO,OAAO,UAAU,EAAE,UAAU,KAAK,CAAC;AAAA,EAExD,MAAM,OAAO,QAAQ,SAAS,OAAO;AAAA,IACnC,aAAa;AAAA,EACf,CAAC;AAAA,EAED,MAAM,aAAa,YAAsC;AACvD,QAAI;AACF,YAAM,QAAQ,MAAM,GAAG,KAAK,UAAU;AACtC,aAAO,MAAM,YAAY;AAAA,IAC3B,SAAS,OAAY;AACnB,UAAI,MAAM,SAAS,UAAU;AAC3B,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,UAAU;AAEd,UAAM,cAAc,KAAK,KAAK,KAAK,WAAW;AAE9C,QAAI;AACF,YAAM,GAAG,OAAO,WAAW;AAC3B,cAAQ,MAAM,oBAAoB,WAAW,kBAAkB;AAC/D,aAAO;AAAA,IACT,QAAQ;AAAA,IAER;AAEA,UAAM,GAAG,MAAM,aAAa,EAAE,WAAW,KAAK,CAAC;AAC/C,YAAQ,IAAI,iCAAiC,WAAW,EAAE;AAE1D,UAAM,UACJ,OAAO,cAAc,cACjB,KAAK,QAAQ,cAAc,YAAY,GAAG,CAAC,IAC3C;AAEN,QAAI,WAAW,KAAK,KAAK,SAAS,gBAAgB;AAClD,QAAK,MAAM,KAAK,aAAa,QAAQ,MAAO,OAAO;AAEjD,iBAAW,KAAK,KAAK,SAAS,qCAAqC;AAAA,IACrE;AAEA,YAAQ,IAAI,4BAA4B,QAAQ,EAAE;AAElD,UAAM,kBAAkB;AAExB,UAAM,KAAK,iBAAiB,iBAAiB,aAAa,CAAC,CAAC;AAC5D,YAAQ,IAAI,iCAAiC,WAAW,EAAE;AAG1D,UAAM,kBAAkB,KAAK,KAAK,aAAa,cAAc;AAC7D,UAAM,cAAc,KAAK,MAAM,MAAM,GAAG,SAAS,iBAAiB,OAAO,CAAC;AAC1E,gBAAY,OAAO,KAAK,MAAM,KAAK,SAAS,WAAW,CAAC;AACxD,UAAM,GAAG,UAAU,iBAAiB,KAAK,UAAU,aAAa,MAAM,CAAC,CAAC;AACxE,YAAQ,IAAI,2CAA2C,YAAY,IAAI,EAAE;AAEzE,QAAI,KAAK,KAAK;AACZ,UAAI;AACF;AAAA,UACE;AAAA,UACA;AAAA,YACE,KAAK;AAAA,UACP;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ,MAAM,6BAA6B,KAAK;AAChD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,iBAAiB,KAAa,MAAc,OAAY,CAAC,GAAG;AAChE,UAAM,QAAQ,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC3D,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,KAAK,KAAK,KAAK,IAAI;AACxC,YAAM,WACJ,KAAK,OAAO,KAAK,KAAK,KAAK,SAAS,MAAM,IACtC,KAAK,KAAK,MAAM,KAAK,KAAK,UAAU,GAAG,KAAK,KAAK,SAAS,CAAC,CAAC,IAC5D,KAAK,KAAK,MAAM,KAAK,IAAI;AAE/B,UAAI,KAAK,YAAY,GAAG;AACtB,cAAM,GAAG,MAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AAC5C,cAAM,KAAK,iBAAiB,SAAS,UAAU,IAAI;AAAA,MACrD,WAAW,KAAK,KAAK,SAAS,MAAM,GAAG;AACrC,cAAM,KAAK,eAAe,SAAS,UAAU,CAAC,CAAC;AAAA,MACjD,OAAO;AACL,cAAM,IAAI;AAAA,UACR,8BAA8B,UAAU,MAAM,KAAK;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,eAAe,KAAa,MAAc,OAAY,CAAC,GAAG;AAC9D,UAAM,mBAAmB,WAAW;AAAA,OACjC,MAAM,GAAG,SAAS,GAAG,GAAG,SAAS;AAAA,IACpC;AACA,UAAM,WAAW,MAAM,iBAAiB,IAAI;AAC5C,UAAM,GAAG,UAAU,MAAM,QAAQ;AAAA,EACnC;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../../src/app/console/project/CreateProjectCommand.mts"],"sourcesContent":["import { Command, Option } from \"clipanion\";\nimport { Case } from \"change-case-all\";\nimport path from \"path\";\nimport * as fs from \"fs/promises\";\nimport { fileURLToPath } from \"url\";\nimport handlebars from \"handlebars\";\nimport { execSync } from \"child_process\";\nimport { select, Separator } from \"@inquirer/prompts\";\n\nexport class CreateProjectCommand extends Command {\n static paths = [[`create`, `project`]];\n\n static usage = Command.Usage({\n category: `Project`,\n description: `Create a new project`,\n details: `\n This command creates a new project with the specified name at the given path.\n If no path is provided, the project will be created in the current directory.\n `,\n examples: [\n [\n `Create a new project in specified directory`,\n `create project --path /path/to/my-project --git`,\n ],\n [\n `Create a new project at a specific path with git initialized`,\n `create project --path /path/to/my-project --git`,\n ],\n ],\n });\n\n projectPath = Option.String(\"--path\", { required: true });\n\n git = Option.Boolean(`--git`, false, {\n description: `Initialize a git repository in the new project`,\n });\n\n async folderExists(folderPath: string): Promise<boolean> {\n try {\n const stats = await fs.stat(folderPath);\n return stats.isDirectory();\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n return false; // Folder does not exist\n }\n throw error; // Other errors (e.g., permission issues)\n }\n }\n\n async execute() {\n // Create the project directory path by joining the specified path and project name\n const projectPath = path.join(this.projectPath);\n // Check if directory already exists\n try {\n await fs.access(projectPath);\n console.error(`Error: Directory ${projectPath} already exists.`);\n return 1;\n } catch {\n // Directory doesn't exist, we can proceed\n }\n\n // ask what library to use for validation: yup or zod or none\n const validation_library = await select({\n message: \"Select a package you want for validation\",\n choices: [\n {\n name: \"Yup\",\n value: \"yup\",\n description: \"https://github.com/jquense/yup\",\n },\n {\n name: \"Zod\",\n value: \"zod\",\n description: \"https://zod.dev/\",\n },\n new Separator(),\n {\n name: \"None\",\n value: \"none\",\n disabled: false,\n },\n ],\n });\n\n await fs.mkdir(projectPath, { recursive: true });\n console.log(`Created project directory at: ${projectPath}`);\n\n const dirname =\n typeof __dirname === \"undefined\"\n ? path.dirname(fileURLToPath(import.meta.url))\n : __dirname;\n\n let basePath = path.join(dirname, `./base_project`);\n if ((await this.folderExists(basePath)) === false) {\n // we are running a compiled code that was bundled and the code is running from ./dist/bin/ folder.\n basePath = path.join(dirname, `../app/console/project/base_project`);\n }\n\n console.log(`Using base project path: ${basePath}`);\n //copy content of ./base_project to the new project directory\n const baseProjectPath = basePath;\n\n await this.processTplFolder(baseProjectPath, projectPath, {\n validation_library,\n });\n console.log(`Copied base project files to: ${projectPath}`);\n\n //modify package.json with foldername\n const packageJsonPath = path.join(projectPath, `package.json`);\n const packageJson = JSON.parse(await fs.readFile(packageJsonPath, `utf-8`));\n packageJson.name = Case.snake(path.basename(projectPath));\n await fs.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2));\n console.log(`Updated package.json with project name: ${packageJson.name}`);\n\n if (this.git) {\n try {\n execSync(\n 'git init; git add --all; git commit --allow-empty -m \"chore: first commit for pashmak\"',\n {\n cwd: projectPath,\n },\n );\n } catch (error) {\n console.error(`Failed to create project.`, error);\n return 1;\n }\n }\n }\n\n async processTplFolder(src: string, dest: string, data: any = {}) {\n const files = await fs.readdir(src, { withFileTypes: true });\n for (const file of files) {\n const srcPath = path.join(src, file.name);\n const destPath =\n file.isFile() && file.name.endsWith(\".tpl\")\n ? path.join(dest, file.name.substring(0, file.name.length - 4))\n : path.join(dest, file.name);\n\n if (file.isDirectory()) {\n await fs.mkdir(destPath, { recursive: true });\n await this.processTplFolder(srcPath, destPath, data);\n } else if (file.name.endsWith(\".tpl\")) {\n await this.processTplFile(srcPath, destPath, data);\n } else {\n throw new Error(\n \"unexpected non tpl file: \" + srcPath + \" \" + file.name,\n );\n }\n }\n }\n\n async processTplFile(src: string, dest: string, data: any = {}) {\n handlebars.registerHelper(\"eq\", (a, b) => a === b);\n\n const compiledTemplate = handlebars.compile(\n (await fs.readFile(src)).toString(),\n );\n const template = await compiledTemplate(data);\n await fs.writeFile(dest, template);\n }\n}\n"],"mappings":";;AAAA,SAAS,SAAS,cAAc;AAChC,SAAS,YAAY;AACrB,OAAO,UAAU;AACjB,YAAY,QAAQ;AACpB,SAAS,qBAAqB;AAC9B,OAAO,gBAAgB;AACvB,SAAS,gBAAgB;AACzB,SAAS,QAAQ,iBAAiB;AAE3B,MAAM,6BAA6B,QAAQ;AAAA,EATlD,OASkD;AAAA;AAAA;AAAA,EAChD,OAAO,QAAQ,CAAC,CAAC,UAAU,SAAS,CAAC;AAAA,EAErC,OAAO,QAAQ,QAAQ,MAAM;AAAA,IAC3B,UAAU;AAAA,IACV,aAAa;AAAA,IACb,SAAS;AAAA;AAAA;AAAA;AAAA,IAIT,UAAU;AAAA,MACR;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAAA,EAED,cAAc,OAAO,OAAO,UAAU,EAAE,UAAU,KAAK,CAAC;AAAA,EAExD,MAAM,OAAO,QAAQ,SAAS,OAAO;AAAA,IACnC,aAAa;AAAA,EACf,CAAC;AAAA,EAED,MAAM,aAAa,YAAsC;AACvD,QAAI;AACF,YAAM,QAAQ,MAAM,GAAG,KAAK,UAAU;AACtC,aAAO,MAAM,YAAY;AAAA,IAC3B,SAAS,OAAY;AACnB,UAAI,MAAM,SAAS,UAAU;AAC3B,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,UAAU;AAEd,UAAM,cAAc,KAAK,KAAK,KAAK,WAAW;AAE9C,QAAI;AACF,YAAM,GAAG,OAAO,WAAW;AAC3B,cAAQ,MAAM,oBAAoB,WAAW,kBAAkB;AAC/D,aAAO;AAAA,IACT,QAAQ;AAAA,IAER;AAGA,UAAM,qBAAqB,MAAM,OAAO;AAAA,MACtC,SAAS;AAAA,MACT,SAAS;AAAA,QACP;AAAA,UACE,MAAM;AAAA,UACN,OAAO;AAAA,UACP,aAAa;AAAA,QACf;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,OAAO;AAAA,UACP,aAAa;AAAA,QACf;AAAA,QACA,IAAI,UAAU;AAAA,QACd;AAAA,UACE,MAAM;AAAA,UACN,OAAO;AAAA,UACP,UAAU;AAAA,QACZ;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,GAAG,MAAM,aAAa,EAAE,WAAW,KAAK,CAAC;AAC/C,YAAQ,IAAI,iCAAiC,WAAW,EAAE;AAE1D,UAAM,UACJ,OAAO,cAAc,cACjB,KAAK,QAAQ,cAAc,YAAY,GAAG,CAAC,IAC3C;AAEN,QAAI,WAAW,KAAK,KAAK,SAAS,gBAAgB;AAClD,QAAK,MAAM,KAAK,aAAa,QAAQ,MAAO,OAAO;AAEjD,iBAAW,KAAK,KAAK,SAAS,qCAAqC;AAAA,IACrE;AAEA,YAAQ,IAAI,4BAA4B,QAAQ,EAAE;AAElD,UAAM,kBAAkB;AAExB,UAAM,KAAK,iBAAiB,iBAAiB,aAAa;AAAA,MACxD;AAAA,IACF,CAAC;AACD,YAAQ,IAAI,iCAAiC,WAAW,EAAE;AAG1D,UAAM,kBAAkB,KAAK,KAAK,aAAa,cAAc;AAC7D,UAAM,cAAc,KAAK,MAAM,MAAM,GAAG,SAAS,iBAAiB,OAAO,CAAC;AAC1E,gBAAY,OAAO,KAAK,MAAM,KAAK,SAAS,WAAW,CAAC;AACxD,UAAM,GAAG,UAAU,iBAAiB,KAAK,UAAU,aAAa,MAAM,CAAC,CAAC;AACxE,YAAQ,IAAI,2CAA2C,YAAY,IAAI,EAAE;AAEzE,QAAI,KAAK,KAAK;AACZ,UAAI;AACF;AAAA,UACE;AAAA,UACA;AAAA,YACE,KAAK;AAAA,UACP;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,gBAAQ,MAAM,6BAA6B,KAAK;AAChD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,iBAAiB,KAAa,MAAc,OAAY,CAAC,GAAG;AAChE,UAAM,QAAQ,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC3D,eAAW,QAAQ,OAAO;AACxB,YAAM,UAAU,KAAK,KAAK,KAAK,KAAK,IAAI;AACxC,YAAM,WACJ,KAAK,OAAO,KAAK,KAAK,KAAK,SAAS,MAAM,IACtC,KAAK,KAAK,MAAM,KAAK,KAAK,UAAU,GAAG,KAAK,KAAK,SAAS,CAAC,CAAC,IAC5D,KAAK,KAAK,MAAM,KAAK,IAAI;AAE/B,UAAI,KAAK,YAAY,GAAG;AACtB,cAAM,GAAG,MAAM,UAAU,EAAE,WAAW,KAAK,CAAC;AAC5C,cAAM,KAAK,iBAAiB,SAAS,UAAU,IAAI;AAAA,MACrD,WAAW,KAAK,KAAK,SAAS,MAAM,GAAG;AACrC,cAAM,KAAK,eAAe,SAAS,UAAU,IAAI;AAAA,MACnD,OAAO;AACL,cAAM,IAAI;AAAA,UACR,8BAA8B,UAAU,MAAM,KAAK;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,eAAe,KAAa,MAAc,OAAY,CAAC,GAAG;AAC9D,eAAW,eAAe,MAAM,CAAC,GAAG,MAAM,MAAM,CAAC;AAEjD,UAAM,mBAAmB,WAAW;AAAA,OACjC,MAAM,GAAG,SAAS,GAAG,GAAG,SAAS;AAAA,IACpC;AACA,UAAM,WAAW,MAAM,iBAAiB,IAAI;AAC5C,UAAM,GAAG,UAAU,MAAM,QAAQ;AAAA,EACnC;AACF;","names":[]}
@@ -42,10 +42,15 @@
42
42
  "@devbro/pashmak": "0.1.*",
43
43
  "bcryptjs": "^3.0.2",
44
44
  "clipanion": "^4.0.0-rc.4",
45
- "dotenv": "^16.5.0",
46
45
  "jsonwebtoken": "^9.0.0",
47
46
  "tsconfig-paths": "^4.2.0",
48
- "yup": "^1.6.1"
47
+ {{#if (eq validation_library "yup")}}
48
+ "yup": "^1.6.1",
49
+ {{/if}}
50
+ {{#if (eq validation_library "zod")}}
51
+ "zod": "^4.1.12",
52
+ {{/if}}
53
+ "dotenv": "^16.5.0"
49
54
  },
50
55
  "directories": {
51
56
  "doc": "docs",
@@ -0,0 +1,9 @@
1
+ import { queue } from '@devbro/pashmak/facades';
2
+
3
+ export function startQueueListeners() {
4
+ let rc: any = {};
5
+ // rc.channel_name_1 = queue().listen('channel_name_1', MessageClass1);
6
+ // rc.channel_name_2 = queue().listen('channel_name_2', MessageClass2);
7
+
8
+ return rc;
9
+ }
@@ -1,7 +1,5 @@
1
1
  export default {
2
- caches: {
3
- default: {
4
- type: 'disabled',
5
- },
2
+ default: {
3
+ type: 'disabled',
6
4
  },
7
5
  };
@@ -1,12 +1,10 @@
1
1
  export default {
2
- databases: {
3
- default: {
4
- host: process.env.DB_HOST,
5
- database: process.env.DB_NAME || 'test_db',
6
- user: process.env.DB_USER,
7
- password: process.env.DB_PASSWORD,
8
- port: parseInt(process.env.DB_PORT || '5432'),
9
- name: 'db',
10
- },
2
+ default: {
3
+ host: process.env.DB_HOST,
4
+ database: process.env.DB_NAME || 'test_db',
5
+ user: process.env.DB_USER,
6
+ password: process.env.DB_PASSWORD,
7
+ port: parseInt(process.env.DB_PORT || '5432'),
8
+ name: 'db',
11
9
  },
12
10
  };
@@ -3,14 +3,19 @@ import os from 'os';
3
3
  import { getEnv } from '@devbro/pashmak/helper';
4
4
  import { dirname } from 'path';
5
5
  import { fileURLToPath } from 'url';
6
+ import { loadConfig } from '@devbro/pashmak/config';
6
7
 
7
8
  const __filename = fileURLToPath(import.meta.url);
8
9
  const __dirname = dirname(__filename);
9
10
 
10
- const extends_list = ['databases', 'storages', 'mailer', 'loggers', 'queues', 'caches'];
11
-
12
11
  export default {
13
- extends: [...extends_list.map((i) => `./${i}.js`), ...extends_list.map((i) => `./${i}.ts`)],
12
+ databases: await loadConfig('./databases'),
13
+ storages: await loadConfig('./storages'),
14
+ mailer: await loadConfig('./mailer'),
15
+ loggers: await loadConfig('./loggers'),
16
+ queues: await loadConfig('./queues'),
17
+ caches: await loadConfig('./caches'),
18
+ base_url: getEnv('BASE_URL', 'http://localhost:' + getEnv('PORT', '3000')),
14
19
  port: getEnv('PORT', 3000),
15
20
  file_upload_path: path.join(os.tmpdir(), ''),
16
21
  migration: {
@@ -33,3 +38,12 @@ export default {
33
38
  public_path: path.join(__dirname, '../..', 'public'),
34
39
  debug_mode: getEnv('APP_DEBUG', false),
35
40
  };
41
+
42
+ export const $test = {
43
+ // Test environment overrides
44
+ };
45
+
46
+ export const $prod = {
47
+ port: getEnv('PORT', 80),
48
+ debug_mode: false,
49
+ };
@@ -2,14 +2,12 @@ import { ctxSafe } from '@devbro/pashmak/context';
2
2
  import { LogMessage } from '@devbro/pashmak/logger';
3
3
 
4
4
  export default {
5
- loggers: {
6
- default: {
7
- level: process.env.NODE_ENV === 'test' ? 'silent' : 'info',
8
- extrasFunction: (message: LogMessage) => {
9
- let requestId = ctxSafe()?.get('requestId');
10
- requestId && (message.requestId = requestId);
11
- return message;
12
- },
5
+ default: {
6
+ level: process.env.NODE_ENV === 'test' ? 'silent' : 'info',
7
+ extrasFunction: (message: LogMessage) => {
8
+ let requestId = ctxSafe()?.get('requestId');
9
+ requestId && (message.requestId = requestId);
10
+ return message;
13
11
  },
14
12
  },
15
13
  };
@@ -1,23 +1,22 @@
1
1
  export default {
2
- mailer: {
3
- default: {
4
- provider: 'MEMORY',
2
+ default: {
3
+ provider: 'memory',
4
+ config: {
5
5
  default_from: 'no-reply@devbro.com',
6
6
  },
7
7
  },
8
- $prod: {
9
- mailer: {
10
- default: {
11
- provider: 'SES',
12
- // credentials are loaded as env vars
13
- },
14
- },
8
+ };
9
+
10
+ export const $prod = {
11
+ default: {
12
+ provider: 'ses',
13
+ // credentials are loaded as env vars
15
14
  },
16
- $test: {
17
- mailer: {
18
- default: {
19
- provider: 'MEMORY',
20
- },
21
- },
15
+ };
16
+
17
+ export const $test = {
18
+ default: {
19
+ provider: 'memory',
20
+ // credentials are loaded as env vars
22
21
  },
23
22
  };
@@ -1,7 +1,5 @@
1
1
  export default {
2
- queues: {
3
- default: {
4
- type: 'database',
5
- },
2
+ default: {
3
+ provider: 'memory',
6
4
  },
7
5
  };
@@ -1,11 +1,12 @@
1
+ import { LocalStorageConfig } from '@devbro/pashmak/storage';
1
2
  import path from 'path';
2
3
  import os from 'os';
3
4
 
4
5
  export default {
5
- storages: {
6
- default: {
7
- engine: 'local',
6
+ default: {
7
+ provider: 'local',
8
+ config: {
8
9
  basePath: path.join(os.tmpdir(), '/app-storage/'),
9
- },
10
+ } as LocalStorageConfig,
10
11
  },
11
12
  };
@@ -0,0 +1,175 @@
1
+ import { logger } from '@devbro/pashmak/facades';
2
+ import { JSONValue, Num } from '@devbro/pashmak/helper';
3
+ import { Query } from '@devbro/pashmak/sql';
4
+ import { cacheQuery } from '@devbro/pashmak/cache';
5
+
6
+ type orderByDirection = 'asc' | 'desc';
7
+ export class QueryKit {
8
+ private query: Query;
9
+ private pagination: { min: number; max: number; sort: '' } = { min: 10, max: 1000, sort: '' };
10
+ private parameters: Record<string, any> = {};
11
+ private sorts: Record<string, CustomSort> = {};
12
+ private filters: Record<string, QueryFilter> = {};
13
+ private defaultSortKey: string = '';
14
+ private _cache_results: boolean = false;
15
+
16
+ get cacheResults(): boolean {
17
+ return this._cache_results;
18
+ }
19
+
20
+ set cacheResults(value: boolean) {
21
+ this._cache_results = value;
22
+ }
23
+
24
+ constructor(query: Query) {
25
+ this.query = query;
26
+ }
27
+
28
+ setParameters(parameters: Record<string, any>) {
29
+ this.parameters = parameters;
30
+ }
31
+
32
+ addSort(fields: (string | { key: string; field: string })[]) {
33
+ for (const field of fields) {
34
+ if (typeof field === 'string') {
35
+ this.sorts[field] = {
36
+ apply: (query: Query, sort_key: string, direction: orderByDirection) => {
37
+ query.orderBy(sort_key, direction);
38
+ },
39
+ };
40
+ continue;
41
+ }
42
+ if (typeof field === 'object' && field.key && field.field) {
43
+ this.sorts[field.key] = {
44
+ apply: (query: Query, sort_key: string, direction: orderByDirection) => {
45
+ query.orderBy(field.field, direction);
46
+ },
47
+ };
48
+ }
49
+ }
50
+ }
51
+
52
+ addCustomSort(
53
+ sort_key: string,
54
+ callback: (query: Query, sort_key: string, direction: orderByDirection) => undefined
55
+ ) {
56
+ this.sorts[sort_key] = {
57
+ apply: callback,
58
+ };
59
+ }
60
+
61
+ setDefaultSort(sort_key: string) {
62
+ this.defaultSortKey = sort_key;
63
+ }
64
+
65
+ addFilter(filter: QueryFilter) {
66
+ this.filters[filter.getKey()] = filter;
67
+ }
68
+
69
+ setPagination(min_pagination: number, max_pagination: number) {
70
+ this.pagination.min = min_pagination;
71
+ this.pagination.max = max_pagination;
72
+ }
73
+
74
+ async get() {
75
+ // Apply filters
76
+ for (const key in this.parameters.filter) {
77
+ if (this.filters[key] && this.parameters.filter[key]) {
78
+ this.filters[key].apply(this.query, key, this.parameters.filter[key]);
79
+ }
80
+ }
81
+
82
+ let total_rows = await this.query.count();
83
+
84
+ // apply sorting
85
+ let sort_key = this.parameters.sort || this.defaultSortKey;
86
+ let sort_direction: orderByDirection = 'asc';
87
+ if (sort_key && sort_key.charAt(0) === '-') {
88
+ sort_key = sort_key.substring(1);
89
+ sort_direction = 'desc';
90
+ }
91
+ if (this.sorts[sort_key]) {
92
+ this.sorts[sort_key].apply(this.query, sort_key, sort_direction);
93
+ }
94
+
95
+ // Apply pagination
96
+ let per_page = this?.parameters?.page?.per_page || this.pagination.max;
97
+ per_page = Num.clamp(per_page, this.pagination.min, this.pagination.max);
98
+
99
+ this.query.limit(per_page);
100
+
101
+ // Apply offset if page is provided
102
+ const page_number: number = this?.parameters?.page?.number || 1;
103
+ const offset = (page_number - 1) * per_page;
104
+ this.query.offset(offset);
105
+
106
+ // logger().info({ msg: 'Fetching list', query: this.query.toSql() });
107
+
108
+ let rows = [];
109
+ if (this.cacheResults) {
110
+ rows = await cacheQuery(this.query);
111
+ } else {
112
+ rows = await this.query.get();
113
+ }
114
+
115
+ return {
116
+ data: rows,
117
+ meta: {
118
+ first_page: 1,
119
+ current_page: page_number,
120
+ last_page: Math.ceil(total_rows / per_page),
121
+ total: total_rows,
122
+ per_page: per_page,
123
+ per_this_page: rows.length,
124
+ from: offset + 1,
125
+ to: Math.min(offset + rows.length, total_rows),
126
+ },
127
+ };
128
+ }
129
+ }
130
+
131
+ interface QueryFilter {
132
+ getKey(): string;
133
+ apply(query: Query, key: String, value: any): undefined;
134
+ }
135
+
136
+ interface CustomSort {
137
+ apply(query: Query, sort_key: string, direction: orderByDirection): undefined;
138
+ }
139
+
140
+ export class QueryFilterFactory {
141
+ static exact(key: string, field: string | undefined = undefined): QueryFilter {
142
+ return {
143
+ getKey() {
144
+ return key;
145
+ },
146
+ apply(query: Query, field_name: string, value: any) {
147
+ query.whereOp(field ?? field_name, '=', value);
148
+ },
149
+ };
150
+ }
151
+
152
+ static startsWith(key: string, field: string | undefined = undefined): QueryFilter {
153
+ return {
154
+ getKey() {
155
+ return key;
156
+ },
157
+ apply(query: Query, field_name: string, value: any) {
158
+ query.whereOp(field ?? field_name, 'ILIKE', `${value}%`);
159
+ },
160
+ };
161
+ }
162
+
163
+ static custom(field: string, callback: (query: Query, field_name: string, value: any) => Query): QueryFilter {
164
+ return {
165
+ getKey() {
166
+ return field;
167
+ },
168
+ apply(query: Query, field_name: string, value: any) {
169
+ query.whereNested((q) => {
170
+ callback(q, field_name, value);
171
+ });
172
+ },
173
+ };
174
+ }
175
+ }