@devbro/pashmak 0.1.10 → 0.1.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/app/console/migrate/MigrateCommand.d.mts +1 -0
  2. package/dist/app/console/migrate/MigrateCommand.mjs +33 -16
  3. package/dist/app/console/migrate/MigrateCommand.mjs.map +1 -1
  4. package/dist/app/console/migrate/MigrateRollbackCommand.d.mts +1 -1
  5. package/dist/app/console/migrate/MigrateRollbackCommand.mjs +5 -6
  6. package/dist/app/console/migrate/MigrateRollbackCommand.mjs.map +1 -1
  7. package/dist/app/console/migrate/make_migration.tpl +5 -5
  8. package/dist/app/console/queue/GenerateMigrateCommand.d.mts +9 -0
  9. package/dist/app/console/queue/GenerateMigrateCommand.mjs +51 -0
  10. package/dist/app/console/queue/GenerateMigrateCommand.mjs.map +1 -0
  11. package/dist/app/console/queue/queue_migration.tpl +19 -0
  12. package/dist/bin/app/console/DefaultCommand.cjs +254 -25
  13. package/dist/bin/app/console/KeyGenerateCommand.cjs +254 -25
  14. package/dist/bin/app/console/StartCommand.cjs +257 -28
  15. package/dist/bin/app/console/generate/GenerateControllerCommand.cjs +254 -25
  16. package/dist/bin/app/console/generate/index.cjs +254 -25
  17. package/dist/bin/app/console/index.cjs +293 -48
  18. package/dist/bin/app/console/migrate/GenerateMigrateCommand.cjs +254 -25
  19. package/dist/bin/app/console/migrate/MigrateCommand.cjs +288 -42
  20. package/dist/bin/app/console/migrate/MigrateRollbackCommand.cjs +258 -30
  21. package/dist/bin/app/console/migrate/index.cjs +291 -46
  22. package/dist/bin/app/console/queue/GenerateMigrateCommand.cjs +752 -0
  23. package/dist/bin/facades.cjs +257 -26
  24. package/dist/bin/factories.cjs +707 -0
  25. package/dist/bin/index.cjs +312 -54
  26. package/dist/bin/middlewares.cjs +255 -26
  27. package/dist/bin/queue.cjs +99 -0
  28. package/dist/bin/router.cjs +95 -5
  29. package/dist/facades.d.mts +3 -1
  30. package/dist/facades.mjs +15 -27
  31. package/dist/facades.mjs.map +1 -1
  32. package/dist/factories.d.mts +20 -0
  33. package/dist/factories.mjs +83 -0
  34. package/dist/factories.mjs.map +1 -0
  35. package/dist/queue.d.mts +15 -0
  36. package/dist/queue.mjs +73 -0
  37. package/dist/queue.mjs.map +1 -0
  38. package/dist/router.mjs +1 -1
  39. package/dist/router.mjs.map +1 -1
  40. package/package.json +10 -3
@@ -3,6 +3,7 @@ import { Command } from 'clipanion';
3
3
  declare class MigrateCommand extends Command {
4
4
  static paths: string[][];
5
5
  fresh: boolean;
6
+ refresh: boolean;
6
7
  execute(): Promise<void>;
7
8
  }
8
9
 
@@ -11,29 +11,46 @@ class MigrateCommand extends Command {
11
11
  __name(this, "MigrateCommand");
12
12
  }
13
13
  static paths = [[`migrate`]];
14
- fresh = Option.Boolean("--fresh", false);
14
+ fresh = Option.Boolean(`--fresh`, false, {
15
+ description: `whether to delete and recreate database`
16
+ });
17
+ refresh = Option.Boolean(`--refresh`, false, {
18
+ description: `whether to drop all tables before running migrations by using rollback function`
19
+ });
15
20
  async execute() {
16
21
  await context_provider.run(async () => {
17
22
  const db = database();
18
23
  const schema = db.getSchema();
19
24
  if (this.fresh) {
20
- logger().info("dropping all tables!!");
21
- let retry = true;
22
- let retry_count = 0;
23
- while (retry && retry_count < 10) {
24
- retry = false;
25
- retry_count++;
26
- const tables = await schema.tables();
27
- for (const table of tables) {
28
- logger().info(`dropping table ${table.name}`);
29
- try {
30
- await schema.dropTable(table.name);
31
- } catch {
32
- logger().info(`failed to drop ${table.name}`);
33
- retry = true;
34
- }
25
+ throw new Error("not implemented");
26
+ }
27
+ if (this.refresh) {
28
+ logger().info("reverting all migrations!!");
29
+ const existing_migrations = await db.runQuery({
30
+ sql: "select * from migrations order by created_at DESC",
31
+ bindings: []
32
+ });
33
+ const migrationsDir2 = config.get("migration.path");
34
+ for (const migration_record of existing_migrations) {
35
+ logger().info(`rolling back ${migration_record.filename}`);
36
+ try {
37
+ const MigrationClass = (await import(path.join(migrationsDir2, migration_record.filename))).default;
38
+ const migrationInstance = new MigrationClass();
39
+ await migrationInstance.down(db.getSchema());
40
+ await db.runQuery({
41
+ sql: "delete from migrations where filename = $1",
42
+ bindings: [migration_record.filename]
43
+ });
44
+ } catch (error) {
45
+ logger().error(
46
+ `Failed to rollback migration ${migration_record.filename}: ${error}`
47
+ );
48
+ throw error;
35
49
  }
36
50
  }
51
+ logger().info(
52
+ `rolled back ${existing_migrations.length} migrations successfully!`
53
+ );
37
54
  }
38
55
  if (!await schema.tableExists("migrations")) {
39
56
  await schema.createTable("migrations", (blueprint) => {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/app/console/migrate/MigrateCommand.mts"],"sourcesContent":["import { cli, db as database, logger } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { Blueprint } from \"@devbro/neko-sql\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\n\nexport class MigrateCommand extends Command {\n static paths = [[`migrate`]];\n\n fresh = Option.Boolean(\"--fresh\", false);\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n if (this.fresh) {\n logger().info(\"dropping all tables!!\");\n let retry = true;\n let retry_count = 0;\n while (retry && retry_count < 10) {\n retry = false;\n retry_count++;\n const tables = await schema.tables();\n for (const table of tables) {\n logger().info(`dropping table ${table.name}`);\n try {\n await schema.dropTable(table.name);\n } catch {\n logger().info(`failed to drop ${table.name}`);\n retry = true;\n }\n }\n }\n }\n\n //create migration table if not exists\n if (!(await schema.tableExists(\"migrations\"))) {\n await schema.createTable(\"migrations\", (blueprint: Blueprint) => {\n blueprint.id();\n blueprint.timestamps();\n blueprint.string(\"filename\");\n blueprint.integer(\"batch\");\n });\n }\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries\n .filter((entry) => entry.endsWith(\".ts\") || entry.endsWith(\".js\"))\n .sort();\n let batch_number = await db.runQuery({\n sql: \"select max(batch) as next_batch from migrations\",\n bindings: [],\n });\n batch_number = batch_number[0].next_batch || 0;\n batch_number++;\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at ASC\",\n bindings: [],\n });\n\n const completed_migrations = migrations.map((r: any) => r.filename);\n const pending_migrations = files.filter(\n (file) => !completed_migrations.includes(file),\n );\n\n let migrated_count = 0;\n for (const class_to_migrate of pending_migrations) {\n logger().info(`migrating up ${class_to_migrate}`);\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n const c: Migration = new ClassToMigrate();\n await c.up(db.getSchema());\n await db.runQuery({\n sql: \"insert into migrations (filename, batch) values ($1,$2)\",\n bindings: [class_to_migrate, batch_number],\n });\n migrated_count++;\n }\n\n if (migrated_count === 0) {\n logger().warn(\"no migrations to run!\");\n return;\n }\n\n logger().info(`migrated ${migrated_count} migrations successfully!`);\n });\n }\n}\n\ncli().register(MigrateCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,UAAU,cAAc;AAC5C,SAAS,SAAS,cAAc;AAEhC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAGhB,MAAM,uBAAuB,QAAQ;AAAA,EAT5C,OAS4C;AAAA;AAAA;AAAA,EAC1C,OAAO,QAAQ,CAAC,CAAC,SAAS,CAAC;AAAA,EAE3B,QAAQ,OAAO,QAAQ,WAAW,KAAK;AAAA,EAEvC,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,UAAI,KAAK,OAAO;AACd,eAAO,EAAE,KAAK,uBAAuB;AACrC,YAAI,QAAQ;AACZ,YAAI,cAAc;AAClB,eAAO,SAAS,cAAc,IAAI;AAChC,kBAAQ;AACR;AACA,gBAAM,SAAS,MAAM,OAAO,OAAO;AACnC,qBAAW,SAAS,QAAQ;AAC1B,mBAAO,EAAE,KAAK,kBAAkB,MAAM,IAAI,EAAE;AAC5C,gBAAI;AACF,oBAAM,OAAO,UAAU,MAAM,IAAI;AAAA,YACnC,QAAQ;AACN,qBAAO,EAAE,KAAK,kBAAkB,MAAM,IAAI,EAAE;AAC5C,sBAAQ;AAAA,YACV;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,UAAI,CAAE,MAAM,OAAO,YAAY,YAAY,GAAI;AAC7C,cAAM,OAAO,YAAY,cAAc,CAAC,cAAyB;AAC/D,oBAAU,GAAG;AACb,oBAAU,WAAW;AACrB,oBAAU,OAAO,UAAU;AAC3B,oBAAU,QAAQ,OAAO;AAAA,QAC3B,CAAC;AAAA,MACH;AAEA,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WACL,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,KAAK,MAAM,SAAS,KAAK,CAAC,EAChE,KAAK;AACR,UAAI,eAAe,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC;AAAA,MACb,CAAC;AACD,qBAAe,aAAa,CAAC,EAAE,cAAc;AAC7C;AAEA,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC;AAAA,MACb,CAAC;AAED,YAAM,uBAAuB,WAAW,IAAI,CAAC,MAAW,EAAE,QAAQ;AAClE,YAAM,qBAAqB,MAAM;AAAA,QAC/B,CAAC,SAAS,CAAC,qBAAqB,SAAS,IAAI;AAAA,MAC/C;AAEA,UAAI,iBAAiB;AACrB,iBAAW,oBAAoB,oBAAoB;AACjD,eAAO,EAAE,KAAK,gBAAgB,gBAAgB,EAAE;AAChD,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AACF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,GAAG,GAAG,UAAU,CAAC;AACzB,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,UAAU,CAAC,kBAAkB,YAAY;AAAA,QAC3C,CAAC;AACD;AAAA,MACF;AAEA,UAAI,mBAAmB,GAAG;AACxB,eAAO,EAAE,KAAK,uBAAuB;AACrC;AAAA,MACF;AAEA,aAAO,EAAE,KAAK,YAAY,cAAc,2BAA2B;AAAA,IACrE,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,cAAc;","names":[]}
1
+ {"version":3,"sources":["../../../../src/app/console/migrate/MigrateCommand.mts"],"sourcesContent":["import { cli, db as database, logger } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { Blueprint } from \"@devbro/neko-sql\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\n\nexport class MigrateCommand extends Command {\n static paths = [[`migrate`]];\n\n fresh = Option.Boolean(`--fresh`, false, {\n description: `whether to delete and recreate database`,\n });\n\n refresh = Option.Boolean(`--refresh`, false, {\n description: `whether to drop all tables before running migrations by using rollback function`,\n });\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n if (this.fresh) {\n throw new Error(\"not implemented\");\n }\n\n if (this.refresh) {\n logger().info(\"reverting all migrations!!\");\n // read all migrations and undo them all\n const existing_migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at DESC\",\n bindings: [],\n });\n\n const migrationsDir = config.get(\"migration.path\");\n\n for (const migration_record of existing_migrations) {\n logger().info(`rolling back ${migration_record.filename}`);\n try {\n const MigrationClass = (\n await import(path.join(migrationsDir, migration_record.filename))\n ).default;\n const migrationInstance: Migration = new MigrationClass();\n\n // Call the down method to rollback the migration\n await migrationInstance.down(db.getSchema());\n\n // Remove the migration record from the migrations table\n await db.runQuery({\n sql: \"delete from migrations where filename = $1\",\n bindings: [migration_record.filename],\n });\n } catch (error) {\n logger().error(\n `Failed to rollback migration ${migration_record.filename}: ${error}`,\n );\n throw error;\n }\n }\n\n logger().info(\n `rolled back ${existing_migrations.length} migrations successfully!`,\n );\n }\n\n //create migration table if not exists\n if (!(await schema.tableExists(\"migrations\"))) {\n await schema.createTable(\"migrations\", (blueprint: Blueprint) => {\n blueprint.id();\n blueprint.timestamps();\n blueprint.string(\"filename\");\n blueprint.integer(\"batch\");\n });\n }\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries\n .filter((entry) => entry.endsWith(\".ts\") || entry.endsWith(\".js\"))\n .sort();\n let batch_number = await db.runQuery({\n sql: \"select max(batch) as next_batch from migrations\",\n bindings: [],\n });\n batch_number = batch_number[0].next_batch || 0;\n batch_number++;\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at ASC\",\n bindings: [],\n });\n\n const completed_migrations = migrations.map((r: any) => r.filename);\n const pending_migrations = files.filter(\n (file) => !completed_migrations.includes(file),\n );\n\n let migrated_count = 0;\n for (const class_to_migrate of pending_migrations) {\n logger().info(`migrating up ${class_to_migrate}`);\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n const c: Migration = new ClassToMigrate();\n await c.up(db.getSchema());\n await db.runQuery({\n sql: \"insert into migrations (filename, batch) values ($1,$2)\",\n bindings: [class_to_migrate, batch_number],\n });\n migrated_count++;\n }\n\n if (migrated_count === 0) {\n logger().warn(\"no migrations to run!\");\n return;\n }\n\n logger().info(`migrated ${migrated_count} migrations successfully!`);\n });\n }\n}\n\ncli().register(MigrateCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,UAAU,cAAc;AAC5C,SAAS,SAAS,cAAc;AAEhC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAGhB,MAAM,uBAAuB,QAAQ;AAAA,EAT5C,OAS4C;AAAA;AAAA;AAAA,EAC1C,OAAO,QAAQ,CAAC,CAAC,SAAS,CAAC;AAAA,EAE3B,QAAQ,OAAO,QAAQ,WAAW,OAAO;AAAA,IACvC,aAAa;AAAA,EACf,CAAC;AAAA,EAED,UAAU,OAAO,QAAQ,aAAa,OAAO;AAAA,IAC3C,aAAa;AAAA,EACf,CAAC;AAAA,EAED,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,UAAI,KAAK,OAAO;AACd,cAAM,IAAI,MAAM,iBAAiB;AAAA,MACnC;AAEA,UAAI,KAAK,SAAS;AAChB,eAAO,EAAE,KAAK,4BAA4B;AAE1C,cAAM,sBAAsB,MAAM,GAAG,SAAS;AAAA,UAC5C,KAAK;AAAA,UACL,UAAU,CAAC;AAAA,QACb,CAAC;AAED,cAAMA,iBAAgB,OAAO,IAAI,gBAAgB;AAEjD,mBAAW,oBAAoB,qBAAqB;AAClD,iBAAO,EAAE,KAAK,gBAAgB,iBAAiB,QAAQ,EAAE;AACzD,cAAI;AACF,kBAAM,kBACJ,MAAM,OAAO,KAAK,KAAKA,gBAAe,iBAAiB,QAAQ,IAC/D;AACF,kBAAM,oBAA+B,IAAI,eAAe;AAGxD,kBAAM,kBAAkB,KAAK,GAAG,UAAU,CAAC;AAG3C,kBAAM,GAAG,SAAS;AAAA,cAChB,KAAK;AAAA,cACL,UAAU,CAAC,iBAAiB,QAAQ;AAAA,YACtC,CAAC;AAAA,UACH,SAAS,OAAO;AACd,mBAAO,EAAE;AAAA,cACP,gCAAgC,iBAAiB,QAAQ,KAAK,KAAK;AAAA,YACrE;AACA,kBAAM;AAAA,UACR;AAAA,QACF;AAEA,eAAO,EAAE;AAAA,UACP,eAAe,oBAAoB,MAAM;AAAA,QAC3C;AAAA,MACF;AAGA,UAAI,CAAE,MAAM,OAAO,YAAY,YAAY,GAAI;AAC7C,cAAM,OAAO,YAAY,cAAc,CAAC,cAAyB;AAC/D,oBAAU,GAAG;AACb,oBAAU,WAAW;AACrB,oBAAU,OAAO,UAAU;AAC3B,oBAAU,QAAQ,OAAO;AAAA,QAC3B,CAAC;AAAA,MACH;AAEA,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WACL,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,KAAK,MAAM,SAAS,KAAK,CAAC,EAChE,KAAK;AACR,UAAI,eAAe,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC;AAAA,MACb,CAAC;AACD,qBAAe,aAAa,CAAC,EAAE,cAAc;AAC7C;AAEA,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC;AAAA,MACb,CAAC;AAED,YAAM,uBAAuB,WAAW,IAAI,CAAC,MAAW,EAAE,QAAQ;AAClE,YAAM,qBAAqB,MAAM;AAAA,QAC/B,CAAC,SAAS,CAAC,qBAAqB,SAAS,IAAI;AAAA,MAC/C;AAEA,UAAI,iBAAiB;AACrB,iBAAW,oBAAoB,oBAAoB;AACjD,eAAO,EAAE,KAAK,gBAAgB,gBAAgB,EAAE;AAChD,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AACF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,GAAG,GAAG,UAAU,CAAC;AACzB,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,UAAU,CAAC,kBAAkB,YAAY;AAAA,QAC3C,CAAC;AACD;AAAA,MACF;AAEA,UAAI,mBAAmB,GAAG;AACxB,eAAO,EAAE,KAAK,uBAAuB;AACrC;AAAA,MACF;AAEA,aAAO,EAAE,KAAK,YAAY,cAAc,2BAA2B;AAAA,IACrE,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,cAAc;","names":["migrationsDir"]}
@@ -2,7 +2,7 @@ import { Command } from 'clipanion';
2
2
 
3
3
  declare class MigrateRollbackCommand extends Command {
4
4
  static paths: string[][];
5
- steps: number | undefined;
5
+ steps: number;
6
6
  execute(): Promise<void>;
7
7
  }
8
8
 
@@ -1,6 +1,6 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
- import { cli, db as database } from "../../../facades.mjs";
3
+ import { cli, db as database, logger } from "../../../facades.mjs";
4
4
  import { Command, Option } from "clipanion";
5
5
  import { context_provider } from "@devbro/neko-context";
6
6
  import path from "path";
@@ -12,7 +12,7 @@ class MigrateRollbackCommand extends Command {
12
12
  __name(this, "MigrateRollbackCommand");
13
13
  }
14
14
  static paths = [[`migrate`, "rollback"]];
15
- steps = Option.String(`--steps`, {
15
+ steps = Option.String(`--steps`, "1", {
16
16
  description: `how many migrations to rollback`,
17
17
  validator: t.isNumber()
18
18
  });
@@ -25,13 +25,12 @@ class MigrateRollbackCommand extends Command {
25
25
  const dirEntries = await fs.readdir(migrationsDir);
26
26
  files = dirEntries.filter((entry) => entry.endsWith(".ts")).sort();
27
27
  const migrations = await db.runQuery({
28
- sql: "select * from migrations order by created_at DESC",
29
- bindings: []
28
+ sql: "select * from migrations order by created_at DESC limit $1",
29
+ bindings: [this.steps]
30
30
  });
31
- const count = 0;
32
31
  for (const migration of migrations) {
33
32
  const class_to_migrate = migration.filename;
34
- this.context.stdout.write(`rolling back ${class_to_migrate}`);
33
+ logger().info(`rolling back ${class_to_migrate}`);
35
34
  const ClassToMigrate = (await import(path.join(migrationsDir, class_to_migrate))).default;
36
35
  const c = new ClassToMigrate();
37
36
  await c.down(db.getSchema());
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/app/console/migrate/MigrateRollbackCommand.mts"],"sourcesContent":["import { cli, db as database } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\nimport * as t from \"typanion\";\n\nexport class MigrateRollbackCommand extends Command {\n static paths = [[`migrate`, \"rollback\"]];\n\n steps = Option.String(`--steps`, {\n description: `how many migrations to rollback`,\n validator: t.isNumber(),\n });\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries.filter((entry) => entry.endsWith(\".ts\")).sort();\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at DESC\",\n bindings: [],\n });\n\n const count = 0;\n for (const migration of migrations) {\n const class_to_migrate = migration.filename;\n this.context.stdout.write(`rolling back ${class_to_migrate}`);\n\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n\n const c: Migration = new ClassToMigrate();\n await c.down(db.getSchema());\n await db.runQuery({\n sql: \"delete from migrations where id = $1\",\n bindings: [migration.id],\n });\n }\n });\n }\n}\n\ncli().register(MigrateRollbackCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,gBAAgB;AACpC,SAAS,SAAS,cAAc;AAChC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAEvB,YAAY,OAAO;AAEZ,MAAM,+BAA+B,QAAQ;AAAA,EATpD,OASoD;AAAA;AAAA;AAAA,EAClD,OAAO,QAAQ,CAAC,CAAC,WAAW,UAAU,CAAC;AAAA,EAEvC,QAAQ,OAAO,OAAO,WAAW;AAAA,IAC/B,aAAa;AAAA,IACb,WAAW,EAAE,SAAS;AAAA,EACxB,CAAC;AAAA,EAED,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WAAW,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,CAAC,EAAE,KAAK;AAEjE,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC;AAAA,MACb,CAAC;AAED,YAAM,QAAQ;AACd,iBAAW,aAAa,YAAY;AAClC,cAAM,mBAAmB,UAAU;AACnC,aAAK,QAAQ,OAAO,MAAM,gBAAgB,gBAAgB,EAAE;AAE5D,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AAEF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,KAAK,GAAG,UAAU,CAAC;AAC3B,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,UAAU,CAAC,UAAU,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,sBAAsB;","names":[]}
1
+ {"version":3,"sources":["../../../../src/app/console/migrate/MigrateRollbackCommand.mts"],"sourcesContent":["import { cli, db as database, logger } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\nimport * as t from \"typanion\";\n\nexport class MigrateRollbackCommand extends Command {\n static paths = [[`migrate`, \"rollback\"]];\n\n steps = Option.String(`--steps`, \"1\", {\n description: `how many migrations to rollback`,\n validator: t.isNumber(),\n });\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries.filter((entry) => entry.endsWith(\".ts\")).sort();\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at DESC limit $1\",\n bindings: [this.steps],\n });\n\n for (const migration of migrations) {\n const class_to_migrate = migration.filename;\n logger().info(`rolling back ${class_to_migrate}`);\n\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n\n const c: Migration = new ClassToMigrate();\n await c.down(db.getSchema());\n await db.runQuery({\n sql: \"delete from migrations where id = $1\",\n bindings: [migration.id],\n });\n }\n });\n }\n}\n\ncli().register(MigrateRollbackCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,UAAU,cAAc;AAC5C,SAAS,SAAS,cAAc;AAChC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAEvB,YAAY,OAAO;AAEZ,MAAM,+BAA+B,QAAQ;AAAA,EATpD,OASoD;AAAA;AAAA;AAAA,EAClD,OAAO,QAAQ,CAAC,CAAC,WAAW,UAAU,CAAC;AAAA,EAEvC,QAAQ,OAAO,OAAO,WAAW,KAAK;AAAA,IACpC,aAAa;AAAA,IACb,WAAW,EAAE,SAAS;AAAA,EACxB,CAAC;AAAA,EAED,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WAAW,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,CAAC,EAAE,KAAK;AAEjE,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC,KAAK,KAAK;AAAA,MACvB,CAAC;AAED,iBAAW,aAAa,YAAY;AAClC,cAAM,mBAAmB,UAAU;AACnC,eAAO,EAAE,KAAK,gBAAgB,gBAAgB,EAAE;AAEhD,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AAEF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,KAAK,GAAG,UAAU,CAAC;AAC3B,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,UAAU,CAAC,UAAU,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,sBAAsB;","names":[]}
@@ -3,13 +3,13 @@ import { Schema, Blueprint } from "@devbro/pashmak/sql";
3
3
 
4
4
  export default class {{className}} extends Migration {
5
5
  async up(schema: Schema) {
6
- // await schema.createTable("{{tableName}}", (table: Blueprint) => {
7
- // table.id();
8
- // table.timestamps();
9
- // });
6
+ await schema.createTable("{{tableName}}", (table: Blueprint) => {
7
+ table.id();
8
+ table.timestamps();
9
+ });
10
10
  }
11
11
 
12
12
  async down(schema: Schema) {
13
- // await schema.dropTable("{{tableName}}");
13
+ await schema.dropTableIfExists("{{tableName}}");
14
14
  }
15
15
  }
@@ -0,0 +1,9 @@
1
+ import { Command } from 'clipanion';
2
+
3
+ declare class GenerateMigrateCommand extends Command {
4
+ static paths: string[][];
5
+ name: string;
6
+ execute(): Promise<void>;
7
+ }
8
+
9
+ export { GenerateMigrateCommand };
@@ -0,0 +1,51 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
+ import { cli } from "../../../facades.mjs";
4
+ import { Command, Option } from "clipanion";
5
+ import { Case } from "change-case-all";
6
+ import path from "path";
7
+ import * as fs from "fs/promises";
8
+ import { config } from "@devbro/neko-config";
9
+ import handlebars from "handlebars";
10
+ import { fileURLToPath } from "url";
11
+ class GenerateMigrateCommand extends Command {
12
+ static {
13
+ __name(this, "GenerateMigrateCommand");
14
+ }
15
+ static paths = [[`generate`, `queue`, "migration"]];
16
+ name = Option.String({ required: true });
17
+ async execute() {
18
+ const date = /* @__PURE__ */ new Date();
19
+ const year = date.getFullYear();
20
+ const month = String(date.getMonth() + 1).padStart(2, "0");
21
+ const day = String(date.getDate()).padStart(2, "0");
22
+ const secondsOfDay = String(
23
+ date.getHours() * 3600 + date.getMinutes() * 60 + date.getSeconds()
24
+ ).padStart(5, "0");
25
+ const fixed_name = "queue_messages";
26
+ const filename = `${year}_${month}_${day}_${secondsOfDay}_${fixed_name}.ts`;
27
+ this.context.stdout.write(`creating migration file ${filename}
28
+ `);
29
+ await fs.mkdir(config.get("migration.path"), { recursive: true });
30
+ let dirname = typeof __dirname === "string" ? __dirname : void 0;
31
+ if (!dirname) {
32
+ dirname = path.dirname(fileURLToPath(import.meta.url));
33
+ }
34
+ const compiledTemplate = handlebars.compile(
35
+ (await fs.readFile(path.join(dirname, "./queue_migration.tpl"))).toString()
36
+ );
37
+ const template = await compiledTemplate({
38
+ className: Case.pascal(this.name) + "Migration",
39
+ tableName: Case.snake(this.name)
40
+ });
41
+ await fs.writeFile(
42
+ path.join(config.get("migration.path"), filename),
43
+ template
44
+ );
45
+ }
46
+ }
47
+ cli().register(GenerateMigrateCommand);
48
+ export {
49
+ GenerateMigrateCommand
50
+ };
51
+ //# sourceMappingURL=GenerateMigrateCommand.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../src/app/console/queue/GenerateMigrateCommand.mts"],"sourcesContent":["import { cli } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { Case } from \"change-case-all\";\nimport path from \"path\";\nimport * as fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport handlebars from \"handlebars\";\nimport { fileURLToPath } from \"url\";\nimport { table } from \"console\";\n\nexport class GenerateMigrateCommand extends Command {\n static paths = [[`generate`, `queue`, \"migration\"]];\n\n name = Option.String({ required: true });\n\n async execute() {\n const date = new Date();\n const year = date.getFullYear();\n const month = String(date.getMonth() + 1).padStart(2, \"0\");\n const day = String(date.getDate()).padStart(2, \"0\");\n const secondsOfDay = String(\n date.getHours() * 3600 + date.getMinutes() * 60 + date.getSeconds(),\n ).padStart(5, \"0\");\n\n const fixed_name = \"queue_messages\";\n const filename = `${year}_${month}_${day}_${secondsOfDay}_${fixed_name}.ts`;\n this.context.stdout.write(`creating migration file ${filename}\\n`);\n\n await fs.mkdir(config.get(\"migration.path\"), { recursive: true });\n\n let dirname = typeof __dirname === \"string\" ? __dirname : undefined;\n if (!dirname) {\n dirname = path.dirname(fileURLToPath(import.meta.url));\n }\n\n const compiledTemplate = handlebars.compile(\n (\n await fs.readFile(path.join(dirname, \"./queue_migration.tpl\"))\n ).toString(),\n );\n const template = await compiledTemplate({\n className: Case.pascal(this.name) + \"Migration\",\n tableName: Case.snake(this.name),\n });\n\n await fs.writeFile(\n path.join(config.get(\"migration.path\"), filename),\n template,\n );\n }\n}\n\ncli().register(GenerateMigrateCommand);\n"],"mappings":";;AAAA,SAAS,WAAW;AACpB,SAAS,SAAS,cAAc;AAChC,SAAS,YAAY;AACrB,OAAO,UAAU;AACjB,YAAY,QAAQ;AACpB,SAAS,cAAc;AACvB,OAAO,gBAAgB;AACvB,SAAS,qBAAqB;AAGvB,MAAM,+BAA+B,QAAQ;AAAA,EAVpD,OAUoD;AAAA;AAAA;AAAA,EAClD,OAAO,QAAQ,CAAC,CAAC,YAAY,SAAS,WAAW,CAAC;AAAA,EAElD,OAAO,OAAO,OAAO,EAAE,UAAU,KAAK,CAAC;AAAA,EAEvC,MAAM,UAAU;AACd,UAAM,OAAO,oBAAI,KAAK;AACtB,UAAM,OAAO,KAAK,YAAY;AAC9B,UAAM,QAAQ,OAAO,KAAK,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AACzD,UAAM,MAAM,OAAO,KAAK,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAClD,UAAM,eAAe;AAAA,MACnB,KAAK,SAAS,IAAI,OAAO,KAAK,WAAW,IAAI,KAAK,KAAK,WAAW;AAAA,IACpE,EAAE,SAAS,GAAG,GAAG;AAEjB,UAAM,aAAa;AACnB,UAAM,WAAW,GAAG,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,YAAY,IAAI,UAAU;AACtE,SAAK,QAAQ,OAAO,MAAM,2BAA2B,QAAQ;AAAA,CAAI;AAEjE,UAAM,GAAG,MAAM,OAAO,IAAI,gBAAgB,GAAG,EAAE,WAAW,KAAK,CAAC;AAEhE,QAAI,UAAU,OAAO,cAAc,WAAW,YAAY;AAC1D,QAAI,CAAC,SAAS;AACZ,gBAAU,KAAK,QAAQ,cAAc,YAAY,GAAG,CAAC;AAAA,IACvD;AAEA,UAAM,mBAAmB,WAAW;AAAA,OAEhC,MAAM,GAAG,SAAS,KAAK,KAAK,SAAS,uBAAuB,CAAC,GAC7D,SAAS;AAAA,IACb;AACA,UAAM,WAAW,MAAM,iBAAiB;AAAA,MACtC,WAAW,KAAK,OAAO,KAAK,IAAI,IAAI;AAAA,MACpC,WAAW,KAAK,MAAM,KAAK,IAAI;AAAA,IACjC,CAAC;AAED,UAAM,GAAG;AAAA,MACP,KAAK,KAAK,OAAO,IAAI,gBAAgB,GAAG,QAAQ;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAI,EAAE,SAAS,sBAAsB;","names":[]}
@@ -0,0 +1,19 @@
1
+ import { Migration } from '@devbro/pashmak/sql';
2
+ import { Schema, Blueprint } from "@devbro/pashmak/sql";
3
+
4
+ export default class {{className}} extends Migration {
5
+ async up(schema: Schema) {
6
+ await schema.createTable("{{tableName}}", (table: Blueprint) => {
7
+ table.id();
8
+ table.timestamps();
9
+ table.string('channel');
10
+ table.text('message');
11
+ table.datetimeTz('last_tried_at').nullable(true);
12
+ table.text('process_message').default('');
13
+ });
14
+ }
15
+
16
+ async down(schema: Schema) {
17
+ await schema.dropTableIfExists("{{tableName}}");
18
+ }
19
+ }
@@ -41,6 +41,9 @@ var import_clipanion2 = require("clipanion");
41
41
  var import_neko_context = require("@devbro/neko-context");
42
42
  var import_errors = require("@devbro/neko-http/errors");
43
43
 
44
+ // ../neko-router/dist/CompiledRoute.mjs
45
+ var import_stream = require("stream");
46
+
44
47
  // ../neko-router/dist/Middleware.mjs
45
48
  var Middleware = class {
46
49
  static {
@@ -118,6 +121,9 @@ var CompiledRoute = class {
118
121
  if (typeof value.toJson === "function") {
119
122
  return traverse(value.toJson());
120
123
  }
124
+ if (typeof value.toJSON === "function") {
125
+ return traverse(value.toJSON());
126
+ }
121
127
  if (Array.isArray(value)) {
122
128
  return value.map(traverse);
123
129
  }
@@ -142,7 +148,7 @@ var CompiledRoute = class {
142
148
  }
143
149
  return String(obj);
144
150
  }
145
- processResponseBody(res, controller_rc) {
151
+ async processResponseBody(res, controller_rc) {
146
152
  if (controller_rc && res.writableEnded) {
147
153
  throw new Error("cannot write to response, response has already ended");
148
154
  }
@@ -151,18 +157,36 @@ var CompiledRoute = class {
151
157
  }
152
158
  if (controller_rc) {
153
159
  const header_content_type = res.getHeader("Content-Type");
154
- if (!header_content_type && typeof controller_rc === "object") {
160
+ if (controller_rc instanceof import_stream.Stream || Buffer.isBuffer(controller_rc)) {
161
+ await this.writeAsync(res, controller_rc);
162
+ res.end();
163
+ } else if (!header_content_type && typeof controller_rc === "object") {
155
164
  res.setHeader("Content-Type", "application/json");
165
+ res.end(this.convertToString(controller_rc));
156
166
  } else if (!header_content_type) {
157
167
  res.setHeader("Content-Type", "text/plain");
168
+ res.end(this.convertToString(controller_rc));
169
+ } else {
170
+ res.end(this.convertToString(controller_rc));
158
171
  }
159
- res.end(this.convertToString(controller_rc));
160
172
  return;
161
173
  } else {
162
174
  res.statusCode = [200].includes(res.statusCode) ? 204 : res.statusCode;
163
175
  res.end();
164
176
  }
165
177
  }
178
+ async writeAsync(res, chunk) {
179
+ return new Promise((resolve, reject) => {
180
+ const ok = res.write(chunk, (err) => {
181
+ if (err) reject(err);
182
+ });
183
+ if (ok) {
184
+ resolve(0);
185
+ } else {
186
+ res.once("drain", resolve);
187
+ }
188
+ });
189
+ }
166
190
  async runMiddlewares(middlewares, req, res) {
167
191
  let index = 0;
168
192
  const me = this;
@@ -220,7 +244,7 @@ var Route = class {
220
244
  i = start;
221
245
  } else if (char === "*") {
222
246
  let start = i + 1;
223
- while (start < path2.length && /[a-zA-Z0-9_]/.test(path2[start])) {
247
+ while (start < path2.length && /[a-zA-Z0-9_\.]/.test(path2[start])) {
224
248
  start++;
225
249
  }
226
250
  tokens.push({ type: "WILDCARD", value: path2.slice(i + 1, start) });
@@ -290,6 +314,10 @@ var Route = class {
290
314
  params: r.groups || {}
291
315
  };
292
316
  }
317
+ prependMiddleware(middlewares) {
318
+ this.middlewares = [].concat(middlewares, this.middlewares);
319
+ return this;
320
+ }
293
321
  addMiddleware(middlewares) {
294
322
  this.middlewares = this.middlewares.concat(middlewares);
295
323
  return this;
@@ -304,6 +332,62 @@ var Route = class {
304
332
 
305
333
  // ../neko-router/dist/Router.mjs
306
334
  var import_path = __toESM(require("path"), 1);
335
+
336
+ // ../node_modules/url-join/lib/url-join.js
337
+ function normalize(strArray) {
338
+ var resultArray = [];
339
+ if (strArray.length === 0) {
340
+ return "";
341
+ }
342
+ if (typeof strArray[0] !== "string") {
343
+ throw new TypeError("Url must be a string. Received " + strArray[0]);
344
+ }
345
+ if (strArray[0].match(/^[^/:]+:\/*$/) && strArray.length > 1) {
346
+ var first = strArray.shift();
347
+ strArray[0] = first + strArray[0];
348
+ }
349
+ if (strArray[0].match(/^file:\/\/\//)) {
350
+ strArray[0] = strArray[0].replace(/^([^/:]+):\/*/, "$1:///");
351
+ } else {
352
+ strArray[0] = strArray[0].replace(/^([^/:]+):\/*/, "$1://");
353
+ }
354
+ for (var i = 0; i < strArray.length; i++) {
355
+ var component = strArray[i];
356
+ if (typeof component !== "string") {
357
+ throw new TypeError("Url must be a string. Received " + component);
358
+ }
359
+ if (component === "") {
360
+ continue;
361
+ }
362
+ if (i > 0) {
363
+ component = component.replace(/^[\/]+/, "");
364
+ }
365
+ if (i < strArray.length - 1) {
366
+ component = component.replace(/[\/]+$/, "");
367
+ } else {
368
+ component = component.replace(/[\/]+$/, "/");
369
+ }
370
+ resultArray.push(component);
371
+ }
372
+ var str = resultArray.join("/");
373
+ str = str.replace(/\/(\?|&|#[^!])/g, "$1");
374
+ var parts = str.split("?");
375
+ str = parts.shift() + (parts.length > 0 ? "?" : "") + parts.join("&");
376
+ return str;
377
+ }
378
+ __name(normalize, "normalize");
379
+ function urlJoin() {
380
+ var input;
381
+ if (typeof arguments[0] === "object") {
382
+ input = arguments[0];
383
+ } else {
384
+ input = [].slice.call(arguments);
385
+ }
386
+ return normalize(input);
387
+ }
388
+ __name(urlJoin, "urlJoin");
389
+
390
+ // ../neko-router/dist/Router.mjs
307
391
  var Router = class {
308
392
  static {
309
393
  __name(this, "Router");
@@ -328,6 +412,12 @@ var Router = class {
328
412
  }).addMiddleware([...controller.baseMiddlewares, ...route.middlewares]);
329
413
  }
330
414
  }
415
+ addRouter(path2, router2) {
416
+ for (const route of router2.routes) {
417
+ let path22 = urlJoin("/", path2, route.path);
418
+ this.addRoute(route.methods, path22, route.handler).addMiddleware(router2.getMiddlewares()).addMiddleware(route.getMiddlewares());
419
+ }
420
+ }
331
421
  addGlobalMiddleware(middlewares) {
332
422
  this.middlewares = this.middlewares.concat(middlewares);
333
423
  }
@@ -368,7 +458,7 @@ var import_neko_scheduler = require("@devbro/neko-scheduler");
368
458
  var import_neko_helper = require("@devbro/neko-helper");
369
459
  var import_neko_context2 = require("@devbro/neko-context");
370
460
  var import_neko_storage = require("@devbro/neko-storage");
371
- var import_neko_mailer = require("@devbro/neko-mailer");
461
+ var import_neko_mailer2 = require("@devbro/neko-mailer");
372
462
  var import_neko_config = require("@devbro/neko-config");
373
463
  var import_clipanion = require("clipanion");
374
464
 
@@ -379,6 +469,153 @@ __reExport(http_exports, require("@devbro/neko-http"));
379
469
  // src/facades.mts
380
470
  var yup = __toESM(require("yup"), 1);
381
471
  var import_neko_logger = require("@devbro/neko-logger");
472
+
473
+ // src/factories.mts
474
+ var import_neko_mailer = require("@devbro/neko-mailer");
475
+ var import_neko_queue = require("@devbro/neko-queue");
476
+ var import_neko_queue2 = require("@devbro/neko-queue");
477
+
478
+ // src/queue.mts
479
+ var queue_exports = {};
480
+ __export(queue_exports, {
481
+ DatabaseTransport: () => DatabaseTransport
482
+ });
483
+ __reExport(queue_exports, require("@devbro/neko-queue"));
484
+ var import_neko_sql = require("@devbro/neko-sql");
485
+ var DatabaseTransport = class {
486
+ // default to 100 messages per fetch
487
+ constructor(db_config) {
488
+ this.db_config = db_config;
489
+ }
490
+ static {
491
+ __name(this, "DatabaseTransport");
492
+ }
493
+ listenInterval = 6e4;
494
+ // default to 1 minute
495
+ messageLimit = 100;
496
+ setListenInterval(interval) {
497
+ this.listenInterval = interval;
498
+ }
499
+ setMessageLimit(limit) {
500
+ this.messageLimit = limit;
501
+ }
502
+ async dispatch(channel, message) {
503
+ const conn = new import_neko_sql.PostgresqlConnection(this.db_config);
504
+ try {
505
+ await conn.connect();
506
+ let q = conn.getQuery();
507
+ await q.table("queue_messages").insert({
508
+ channel,
509
+ message,
510
+ processed: false,
511
+ created_at: /* @__PURE__ */ new Date(),
512
+ updated_at: /* @__PURE__ */ new Date(),
513
+ last_tried_at: null,
514
+ process_message: ""
515
+ });
516
+ } finally {
517
+ await conn.disconnect();
518
+ }
519
+ }
520
+ async listen(channel, callback) {
521
+ return new Promise(async (resolve, reject) => {
522
+ setInterval(async () => {
523
+ const conn = new import_neko_sql.PostgresqlConnection(this.db_config);
524
+ try {
525
+ await conn.connect();
526
+ let q = conn.getQuery();
527
+ let messages = await q.table("queue_messages").whereOp("channel", "=", channel).whereOp("processed", "=", false).limit(this.messageLimit).orderBy("last_tried_at", "asc").get();
528
+ for (let msg of messages) {
529
+ try {
530
+ await callback(msg.message);
531
+ await q.table("queue_messages").whereOp("id", "=", msg.id).update({
532
+ processed: true,
533
+ updated_at: /* @__PURE__ */ new Date()
534
+ });
535
+ } catch (error) {
536
+ await q.table("queue_messages").whereOp("id", "=", msg.id).update({
537
+ processed: false,
538
+ last_tried_at: /* @__PURE__ */ new Date(),
539
+ process_message: error.message || "Error processing message"
540
+ });
541
+ }
542
+ }
543
+ } finally {
544
+ await conn.disconnect();
545
+ }
546
+ }, this.listenInterval);
547
+ });
548
+ }
549
+ };
550
+
551
+ // src/factories.mts
552
+ var FlexibleFactory = class {
553
+ static {
554
+ __name(this, "FlexibleFactory");
555
+ }
556
+ registry = /* @__PURE__ */ new Map();
557
+ register(key, ctor) {
558
+ this.registry.set(key, ctor);
559
+ }
560
+ create(key, ...args) {
561
+ const ctor = this.registry.get(key);
562
+ if (!ctor) {
563
+ throw new Error(`No factory registered for key: ${key}`);
564
+ }
565
+ return new ctor(...args);
566
+ }
567
+ };
568
+ var MailerFactory = class _MailerFactory {
569
+ static {
570
+ __name(this, "MailerFactory");
571
+ }
572
+ static instance = new FlexibleFactory();
573
+ static register(key, factory) {
574
+ _MailerFactory.instance.register(key, factory);
575
+ }
576
+ static create(key, ...args) {
577
+ return _MailerFactory.instance.create(key, ...args);
578
+ }
579
+ };
580
+ MailerFactory.register("logger", (opt) => {
581
+ return new import_neko_mailer.FunctionProvider((mail) => {
582
+ logger().info({
583
+ msg: "Sending email",
584
+ mail
585
+ });
586
+ });
587
+ });
588
+ MailerFactory.register("SES", (opt) => {
589
+ return new import_neko_mailer.SESProvider(opt);
590
+ });
591
+ MailerFactory.register("SMTP", (opt) => {
592
+ return new import_neko_mailer.SMTPProvider(opt);
593
+ });
594
+ MailerFactory.register("MEMORY", (opt) => {
595
+ return new import_neko_mailer.MemoryProvider();
596
+ });
597
+ var QueueFactory = class _QueueFactory {
598
+ static {
599
+ __name(this, "QueueFactory");
600
+ }
601
+ static instance = new FlexibleFactory();
602
+ static register(key, factory) {
603
+ _QueueFactory.instance.register(key, factory);
604
+ }
605
+ static create(key, ...args) {
606
+ return _QueueFactory.instance.create(key, ...args);
607
+ }
608
+ };
609
+ QueueFactory.register("database", (opt) => {
610
+ let transport = new DatabaseTransport(opt);
611
+ return new import_neko_queue.QueueConnection(transport);
612
+ });
613
+ QueueFactory.register("memory", (opt) => {
614
+ let transport = new import_neko_queue2.MemoryTransport(opt);
615
+ return new import_neko_queue.QueueConnection(transport);
616
+ });
617
+
618
+ // src/facades.mts
382
619
  var router = (0, import_neko_helper.createSingleton)(() => new Router());
383
620
  var scheduler = (0, import_neko_helper.createSingleton)(() => {
384
621
  const rc = new import_neko_scheduler.Scheduler();
@@ -448,27 +685,19 @@ var logger = (0, import_neko_helper.createSingleton)((label) => {
448
685
  });
449
686
  var mailer = (0, import_neko_helper.createSingleton)((label) => {
450
687
  const mailer_config = import_neko_config.config.get(["mailer", label].join("."));
451
- let provider;
452
- if (mailer_config.provider === "logger") {
453
- provider = new import_neko_mailer.FunctionProvider((mail) => {
454
- logger().info({
455
- msg: "Sending email",
456
- mail
457
- });
458
- });
459
- } else if (mailer_config.provider === "SES") {
460
- provider = new import_neko_mailer.SESProvider(mailer_config.config);
461
- } else if (mailer_config.provider === "SMTP") {
462
- provider = new import_neko_mailer.SMTPProvider(mailer_config.config);
463
- } else if (mailer_config.provider === "MEMORY") {
464
- provider = new import_neko_mailer.MemoryProvider();
465
- }
466
- if (!provider) {
467
- throw new Error(
468
- `cannot initiate mailer provider: ${mailer_config?.provider}`
469
- );
688
+ let provider = MailerFactory.create(
689
+ mailer_config.provider,
690
+ mailer_config.config
691
+ );
692
+ const rc = new import_neko_mailer2.Mailer(provider);
693
+ return rc;
694
+ });
695
+ var queue = (0, import_neko_helper.createSingleton)(async (label) => {
696
+ const queue_config = import_neko_config.config.get(["queues", label].join("."));
697
+ if (!queue_config) {
698
+ throw new Error(`Queue configuration for '${label}' not found`);
470
699
  }
471
- const rc = new import_neko_mailer.Mailer(provider);
700
+ const rc = await QueueFactory.create(queue_config.type, queue_config);
472
701
  return rc;
473
702
  });
474
703