@devbro/pashmak 0.1.25 → 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,6 +28,7 @@ class MigrateCommand extends Command {
28
28
  logger().info("reverting all migrations!!");
29
29
  const existing_migrations = await db.runQuery({
30
30
  sql: "select * from migrations order by created_at DESC",
31
+ parts: [],
31
32
  bindings: []
32
33
  });
33
34
  const migrationsDir2 = config.get("migration.path");
@@ -39,6 +40,7 @@ class MigrateCommand extends Command {
39
40
  await migrationInstance.down(db.getSchema());
40
41
  await db.runQuery({
41
42
  sql: "delete from migrations where filename = $1",
43
+ parts: [],
42
44
  bindings: [migration_record.filename]
43
45
  });
44
46
  } catch (error) {
@@ -66,12 +68,14 @@ class MigrateCommand extends Command {
66
68
  files = dirEntries.filter((entry) => entry.endsWith(".ts") || entry.endsWith(".js")).sort();
67
69
  let batch_number = await db.runQuery({
68
70
  sql: "select max(batch) as next_batch from migrations",
71
+ parts: [],
69
72
  bindings: []
70
73
  });
71
74
  batch_number = batch_number[0].next_batch || 0;
72
75
  batch_number++;
73
76
  const migrations = await db.runQuery({
74
77
  sql: "select * from migrations order by created_at ASC",
78
+ parts: [],
75
79
  bindings: []
76
80
  });
77
81
  const completed_migrations = migrations.map((r) => r.filename);
@@ -86,6 +90,7 @@ class MigrateCommand extends Command {
86
90
  await c.up(db.getSchema());
87
91
  await db.runQuery({
88
92
  sql: "insert into migrations (filename, batch) values ($1,$2)",
93
+ parts: [],
89
94
  bindings: [class_to_migrate, batch_number]
90
95
  });
91
96
  migrated_count++;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/app/console/migrate/MigrateCommand.mts"],"sourcesContent":["import { cli, db as database, logger } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { Blueprint } from \"@devbro/neko-sql\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\n\nexport class MigrateCommand extends Command {\n static paths = [[`migrate`]];\n\n fresh = Option.Boolean(`--fresh`, false, {\n description: `whether to delete and recreate database`,\n });\n\n refresh = Option.Boolean(`--refresh`, false, {\n description: `whether to drop all tables before running migrations by using rollback function`,\n });\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n if (this.fresh) {\n throw new Error(\"not implemented\");\n }\n\n if (this.refresh) {\n logger().info(\"reverting all migrations!!\");\n // read all migrations and undo them all\n const existing_migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at DESC\",\n bindings: [],\n });\n\n const migrationsDir = config.get(\"migration.path\");\n\n for (const migration_record of existing_migrations) {\n logger().info(`rolling back ${migration_record.filename}`);\n try {\n const MigrationClass = (\n await import(path.join(migrationsDir, migration_record.filename))\n ).default;\n const migrationInstance: Migration = new MigrationClass();\n\n // Call the down method to rollback the migration\n await migrationInstance.down(db.getSchema());\n\n // Remove the migration record from the migrations table\n await db.runQuery({\n sql: \"delete from migrations where filename = $1\",\n bindings: [migration_record.filename],\n });\n } catch (error) {\n logger().error(\n `Failed to rollback migration ${migration_record.filename}: ${error}`,\n );\n throw error;\n }\n }\n\n logger().info(\n `rolled back ${existing_migrations.length} migrations successfully!`,\n );\n }\n\n //create migration table if not exists\n if (!(await schema.tableExists(\"migrations\"))) {\n await schema.createTable(\"migrations\", (blueprint: Blueprint) => {\n blueprint.id();\n blueprint.timestamps();\n blueprint.string(\"filename\");\n blueprint.integer(\"batch\");\n });\n }\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries\n .filter((entry) => entry.endsWith(\".ts\") || entry.endsWith(\".js\"))\n .sort();\n let batch_number = await db.runQuery({\n sql: \"select max(batch) as next_batch from migrations\",\n bindings: [],\n });\n batch_number = batch_number[0].next_batch || 0;\n batch_number++;\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at ASC\",\n bindings: [],\n });\n\n const completed_migrations = migrations.map((r: any) => r.filename);\n const pending_migrations = files.filter(\n (file) => !completed_migrations.includes(file),\n );\n\n let migrated_count = 0;\n for (const class_to_migrate of pending_migrations) {\n logger().info(`migrating up ${class_to_migrate}`);\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n const c: Migration = new ClassToMigrate();\n await c.up(db.getSchema());\n await db.runQuery({\n sql: \"insert into migrations (filename, batch) values ($1,$2)\",\n bindings: [class_to_migrate, batch_number],\n });\n migrated_count++;\n }\n\n if (migrated_count === 0) {\n logger().warn(\"no migrations to run!\");\n return;\n }\n\n logger().info(`migrated ${migrated_count} migrations successfully!`);\n return;\n });\n }\n}\n\ncli().register(MigrateCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,UAAU,cAAc;AAC5C,SAAS,SAAS,cAAc;AAEhC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAGhB,MAAM,uBAAuB,QAAQ;AAAA,EAT5C,OAS4C;AAAA;AAAA;AAAA,EAC1C,OAAO,QAAQ,CAAC,CAAC,SAAS,CAAC;AAAA,EAE3B,QAAQ,OAAO,QAAQ,WAAW,OAAO;AAAA,IACvC,aAAa;AAAA,EACf,CAAC;AAAA,EAED,UAAU,OAAO,QAAQ,aAAa,OAAO;AAAA,IAC3C,aAAa;AAAA,EACf,CAAC;AAAA,EAED,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,UAAI,KAAK,OAAO;AACd,cAAM,IAAI,MAAM,iBAAiB;AAAA,MACnC;AAEA,UAAI,KAAK,SAAS;AAChB,eAAO,EAAE,KAAK,4BAA4B;AAE1C,cAAM,sBAAsB,MAAM,GAAG,SAAS;AAAA,UAC5C,KAAK;AAAA,UACL,UAAU,CAAC;AAAA,QACb,CAAC;AAED,cAAMA,iBAAgB,OAAO,IAAI,gBAAgB;AAEjD,mBAAW,oBAAoB,qBAAqB;AAClD,iBAAO,EAAE,KAAK,gBAAgB,iBAAiB,QAAQ,EAAE;AACzD,cAAI;AACF,kBAAM,kBACJ,MAAM,OAAO,KAAK,KAAKA,gBAAe,iBAAiB,QAAQ,IAC/D;AACF,kBAAM,oBAA+B,IAAI,eAAe;AAGxD,kBAAM,kBAAkB,KAAK,GAAG,UAAU,CAAC;AAG3C,kBAAM,GAAG,SAAS;AAAA,cAChB,KAAK;AAAA,cACL,UAAU,CAAC,iBAAiB,QAAQ;AAAA,YACtC,CAAC;AAAA,UACH,SAAS,OAAO;AACd,mBAAO,EAAE;AAAA,cACP,gCAAgC,iBAAiB,QAAQ,KAAK,KAAK;AAAA,YACrE;AACA,kBAAM;AAAA,UACR;AAAA,QACF;AAEA,eAAO,EAAE;AAAA,UACP,eAAe,oBAAoB,MAAM;AAAA,QAC3C;AAAA,MACF;AAGA,UAAI,CAAE,MAAM,OAAO,YAAY,YAAY,GAAI;AAC7C,cAAM,OAAO,YAAY,cAAc,CAAC,cAAyB;AAC/D,oBAAU,GAAG;AACb,oBAAU,WAAW;AACrB,oBAAU,OAAO,UAAU;AAC3B,oBAAU,QAAQ,OAAO;AAAA,QAC3B,CAAC;AAAA,MACH;AAEA,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WACL,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,KAAK,MAAM,SAAS,KAAK,CAAC,EAChE,KAAK;AACR,UAAI,eAAe,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC;AAAA,MACb,CAAC;AACD,qBAAe,aAAa,CAAC,EAAE,cAAc;AAC7C;AAEA,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC;AAAA,MACb,CAAC;AAED,YAAM,uBAAuB,WAAW,IAAI,CAAC,MAAW,EAAE,QAAQ;AAClE,YAAM,qBAAqB,MAAM;AAAA,QAC/B,CAAC,SAAS,CAAC,qBAAqB,SAAS,IAAI;AAAA,MAC/C;AAEA,UAAI,iBAAiB;AACrB,iBAAW,oBAAoB,oBAAoB;AACjD,eAAO,EAAE,KAAK,gBAAgB,gBAAgB,EAAE;AAChD,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AACF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,GAAG,GAAG,UAAU,CAAC;AACzB,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,UAAU,CAAC,kBAAkB,YAAY;AAAA,QAC3C,CAAC;AACD;AAAA,MACF;AAEA,UAAI,mBAAmB,GAAG;AACxB,eAAO,EAAE,KAAK,uBAAuB;AACrC;AAAA,MACF;AAEA,aAAO,EAAE,KAAK,YAAY,cAAc,2BAA2B;AACnE;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,cAAc;","names":["migrationsDir"]}
1
+ {"version":3,"sources":["../../../../src/app/console/migrate/MigrateCommand.mts"],"sourcesContent":["import { cli, db as database, logger } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { Blueprint } from \"@devbro/neko-sql\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\n\nexport class MigrateCommand extends Command {\n static paths = [[`migrate`]];\n\n fresh = Option.Boolean(`--fresh`, false, {\n description: `whether to delete and recreate database`,\n });\n\n refresh = Option.Boolean(`--refresh`, false, {\n description: `whether to drop all tables before running migrations by using rollback function`,\n });\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n if (this.fresh) {\n throw new Error(\"not implemented\");\n }\n\n if (this.refresh) {\n logger().info(\"reverting all migrations!!\");\n // read all migrations and undo them all\n const existing_migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at DESC\",\n parts: [],\n bindings: [],\n });\n\n const migrationsDir = config.get(\"migration.path\");\n\n for (const migration_record of existing_migrations) {\n logger().info(`rolling back ${migration_record.filename}`);\n try {\n const MigrationClass = (\n await import(path.join(migrationsDir, migration_record.filename))\n ).default;\n const migrationInstance: Migration = new MigrationClass();\n\n // Call the down method to rollback the migration\n await migrationInstance.down(db.getSchema());\n\n // Remove the migration record from the migrations table\n await db.runQuery({\n sql: \"delete from migrations where filename = $1\",\n parts: [],\n bindings: [migration_record.filename],\n });\n } catch (error) {\n logger().error(\n `Failed to rollback migration ${migration_record.filename}: ${error}`,\n );\n throw error;\n }\n }\n\n logger().info(\n `rolled back ${existing_migrations.length} migrations successfully!`,\n );\n }\n\n //create migration table if not exists\n if (!(await schema.tableExists(\"migrations\"))) {\n await schema.createTable(\"migrations\", (blueprint: Blueprint) => {\n blueprint.id();\n blueprint.timestamps();\n blueprint.string(\"filename\");\n blueprint.integer(\"batch\");\n });\n }\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries\n .filter((entry) => entry.endsWith(\".ts\") || entry.endsWith(\".js\"))\n .sort();\n let batch_number = await db.runQuery({\n sql: \"select max(batch) as next_batch from migrations\",\n parts: [],\n bindings: [],\n });\n batch_number = batch_number[0].next_batch || 0;\n batch_number++;\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at ASC\",\n parts: [],\n bindings: [],\n });\n\n const completed_migrations = migrations.map((r: any) => r.filename);\n const pending_migrations = files.filter(\n (file) => !completed_migrations.includes(file),\n );\n\n let migrated_count = 0;\n for (const class_to_migrate of pending_migrations) {\n logger().info(`migrating up ${class_to_migrate}`);\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n const c: Migration = new ClassToMigrate();\n await c.up(db.getSchema());\n await db.runQuery({\n sql: \"insert into migrations (filename, batch) values ($1,$2)\",\n parts: [],\n bindings: [class_to_migrate, batch_number],\n });\n migrated_count++;\n }\n\n if (migrated_count === 0) {\n logger().warn(\"no migrations to run!\");\n return;\n }\n\n logger().info(`migrated ${migrated_count} migrations successfully!`);\n return;\n });\n }\n}\n\ncli().register(MigrateCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,UAAU,cAAc;AAC5C,SAAS,SAAS,cAAc;AAEhC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAGhB,MAAM,uBAAuB,QAAQ;AAAA,EAT5C,OAS4C;AAAA;AAAA;AAAA,EAC1C,OAAO,QAAQ,CAAC,CAAC,SAAS,CAAC;AAAA,EAE3B,QAAQ,OAAO,QAAQ,WAAW,OAAO;AAAA,IACvC,aAAa;AAAA,EACf,CAAC;AAAA,EAED,UAAU,OAAO,QAAQ,aAAa,OAAO;AAAA,IAC3C,aAAa;AAAA,EACf,CAAC;AAAA,EAED,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,UAAI,KAAK,OAAO;AACd,cAAM,IAAI,MAAM,iBAAiB;AAAA,MACnC;AAEA,UAAI,KAAK,SAAS;AAChB,eAAO,EAAE,KAAK,4BAA4B;AAE1C,cAAM,sBAAsB,MAAM,GAAG,SAAS;AAAA,UAC5C,KAAK;AAAA,UACL,OAAO,CAAC;AAAA,UACR,UAAU,CAAC;AAAA,QACb,CAAC;AAED,cAAMA,iBAAgB,OAAO,IAAI,gBAAgB;AAEjD,mBAAW,oBAAoB,qBAAqB;AAClD,iBAAO,EAAE,KAAK,gBAAgB,iBAAiB,QAAQ,EAAE;AACzD,cAAI;AACF,kBAAM,kBACJ,MAAM,OAAO,KAAK,KAAKA,gBAAe,iBAAiB,QAAQ,IAC/D;AACF,kBAAM,oBAA+B,IAAI,eAAe;AAGxD,kBAAM,kBAAkB,KAAK,GAAG,UAAU,CAAC;AAG3C,kBAAM,GAAG,SAAS;AAAA,cAChB,KAAK;AAAA,cACL,OAAO,CAAC;AAAA,cACR,UAAU,CAAC,iBAAiB,QAAQ;AAAA,YACtC,CAAC;AAAA,UACH,SAAS,OAAO;AACd,mBAAO,EAAE;AAAA,cACP,gCAAgC,iBAAiB,QAAQ,KAAK,KAAK;AAAA,YACrE;AACA,kBAAM;AAAA,UACR;AAAA,QACF;AAEA,eAAO,EAAE;AAAA,UACP,eAAe,oBAAoB,MAAM;AAAA,QAC3C;AAAA,MACF;AAGA,UAAI,CAAE,MAAM,OAAO,YAAY,YAAY,GAAI;AAC7C,cAAM,OAAO,YAAY,cAAc,CAAC,cAAyB;AAC/D,oBAAU,GAAG;AACb,oBAAU,WAAW;AACrB,oBAAU,OAAO,UAAU;AAC3B,oBAAU,QAAQ,OAAO;AAAA,QAC3B,CAAC;AAAA,MACH;AAEA,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WACL,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,KAAK,MAAM,SAAS,KAAK,CAAC,EAChE,KAAK;AACR,UAAI,eAAe,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,OAAO,CAAC;AAAA,QACR,UAAU,CAAC;AAAA,MACb,CAAC;AACD,qBAAe,aAAa,CAAC,EAAE,cAAc;AAC7C;AAEA,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,OAAO,CAAC;AAAA,QACR,UAAU,CAAC;AAAA,MACb,CAAC;AAED,YAAM,uBAAuB,WAAW,IAAI,CAAC,MAAW,EAAE,QAAQ;AAClE,YAAM,qBAAqB,MAAM;AAAA,QAC/B,CAAC,SAAS,CAAC,qBAAqB,SAAS,IAAI;AAAA,MAC/C;AAEA,UAAI,iBAAiB;AACrB,iBAAW,oBAAoB,oBAAoB;AACjD,eAAO,EAAE,KAAK,gBAAgB,gBAAgB,EAAE;AAChD,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AACF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,GAAG,GAAG,UAAU,CAAC;AACzB,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,OAAO,CAAC;AAAA,UACR,UAAU,CAAC,kBAAkB,YAAY;AAAA,QAC3C,CAAC;AACD;AAAA,MACF;AAEA,UAAI,mBAAmB,GAAG;AACxB,eAAO,EAAE,KAAK,uBAAuB;AACrC;AAAA,MACF;AAEA,aAAO,EAAE,KAAK,YAAY,cAAc,2BAA2B;AACnE;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,cAAc;","names":["migrationsDir"]}
@@ -26,6 +26,7 @@ class MigrateRollbackCommand extends Command {
26
26
  files = dirEntries.filter((entry) => entry.endsWith(".ts")).sort();
27
27
  const migrations = await db.runQuery({
28
28
  sql: "select * from migrations order by created_at DESC limit $1",
29
+ parts: [],
29
30
  bindings: [this.steps]
30
31
  });
31
32
  for (const migration of migrations) {
@@ -36,6 +37,7 @@ class MigrateRollbackCommand extends Command {
36
37
  await c.down(db.getSchema());
37
38
  await db.runQuery({
38
39
  sql: "delete from migrations where id = $1",
40
+ parts: [],
39
41
  bindings: [migration.id]
40
42
  });
41
43
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/app/console/migrate/MigrateRollbackCommand.mts"],"sourcesContent":["import { cli, db as database, logger } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\nimport * as t from \"typanion\";\n\nexport class MigrateRollbackCommand extends Command {\n static paths = [[`migrate`, \"rollback\"]];\n\n steps = Option.String(`--steps`, \"1\", {\n description: `how many migrations to rollback`,\n validator: t.isNumber(),\n });\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries.filter((entry) => entry.endsWith(\".ts\")).sort();\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at DESC limit $1\",\n bindings: [this.steps],\n });\n\n for (const migration of migrations) {\n const class_to_migrate = migration.filename;\n logger().info(`rolling back ${class_to_migrate}`);\n\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n\n const c: Migration = new ClassToMigrate();\n await c.down(db.getSchema());\n await db.runQuery({\n sql: \"delete from migrations where id = $1\",\n bindings: [migration.id],\n });\n }\n });\n }\n}\n\ncli().register(MigrateRollbackCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,UAAU,cAAc;AAC5C,SAAS,SAAS,cAAc;AAChC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAEvB,YAAY,OAAO;AAEZ,MAAM,+BAA+B,QAAQ;AAAA,EATpD,OASoD;AAAA;AAAA;AAAA,EAClD,OAAO,QAAQ,CAAC,CAAC,WAAW,UAAU,CAAC;AAAA,EAEvC,QAAQ,OAAO,OAAO,WAAW,KAAK;AAAA,IACpC,aAAa;AAAA,IACb,WAAW,EAAE,SAAS;AAAA,EACxB,CAAC;AAAA,EAED,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WAAW,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,CAAC,EAAE,KAAK;AAEjE,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,UAAU,CAAC,KAAK,KAAK;AAAA,MACvB,CAAC;AAED,iBAAW,aAAa,YAAY;AAClC,cAAM,mBAAmB,UAAU;AACnC,eAAO,EAAE,KAAK,gBAAgB,gBAAgB,EAAE;AAEhD,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AAEF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,KAAK,GAAG,UAAU,CAAC;AAC3B,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,UAAU,CAAC,UAAU,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,sBAAsB;","names":[]}
1
+ {"version":3,"sources":["../../../../src/app/console/migrate/MigrateRollbackCommand.mts"],"sourcesContent":["import { cli, db as database, logger } from \"../../../facades.mjs\";\nimport { Command, Option } from \"clipanion\";\nimport { context_provider } from \"@devbro/neko-context\";\nimport path from \"path\";\nimport fs from \"fs/promises\";\nimport { config } from \"@devbro/neko-config\";\nimport { Migration } from \"@devbro/neko-sql\";\nimport * as t from \"typanion\";\n\nexport class MigrateRollbackCommand extends Command {\n static paths = [[`migrate`, \"rollback\"]];\n\n steps = Option.String(`--steps`, \"1\", {\n description: `how many migrations to rollback`,\n validator: t.isNumber(),\n });\n\n async execute() {\n await context_provider.run(async () => {\n // this.context.stdout.write(`Hello Migrate Command!\\n`);\n const db = database();\n const schema = db.getSchema();\n\n const migrationsDir = config.get(\"migration.path\");\n let files: string[] = [];\n\n const dirEntries = await fs.readdir(migrationsDir);\n files = dirEntries.filter((entry) => entry.endsWith(\".ts\")).sort();\n\n const migrations = await db.runQuery({\n sql: \"select * from migrations order by created_at DESC limit $1\",\n parts: [],\n bindings: [this.steps],\n });\n\n for (const migration of migrations) {\n const class_to_migrate = migration.filename;\n logger().info(`rolling back ${class_to_migrate}`);\n\n const ClassToMigrate = (\n await import(path.join(migrationsDir, class_to_migrate))\n ).default;\n\n const c: Migration = new ClassToMigrate();\n await c.down(db.getSchema());\n await db.runQuery({\n sql: \"delete from migrations where id = $1\",\n parts: [],\n bindings: [migration.id],\n });\n }\n });\n }\n}\n\ncli().register(MigrateRollbackCommand);\n"],"mappings":";;AAAA,SAAS,KAAK,MAAM,UAAU,cAAc;AAC5C,SAAS,SAAS,cAAc;AAChC,SAAS,wBAAwB;AACjC,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,SAAS,cAAc;AAEvB,YAAY,OAAO;AAEZ,MAAM,+BAA+B,QAAQ;AAAA,EATpD,OASoD;AAAA;AAAA;AAAA,EAClD,OAAO,QAAQ,CAAC,CAAC,WAAW,UAAU,CAAC;AAAA,EAEvC,QAAQ,OAAO,OAAO,WAAW,KAAK;AAAA,IACpC,aAAa;AAAA,IACb,WAAW,EAAE,SAAS;AAAA,EACxB,CAAC;AAAA,EAED,MAAM,UAAU;AACd,UAAM,iBAAiB,IAAI,YAAY;AAErC,YAAM,KAAK,SAAS;AACpB,YAAM,SAAS,GAAG,UAAU;AAE5B,YAAM,gBAAgB,OAAO,IAAI,gBAAgB;AACjD,UAAI,QAAkB,CAAC;AAEvB,YAAM,aAAa,MAAM,GAAG,QAAQ,aAAa;AACjD,cAAQ,WAAW,OAAO,CAAC,UAAU,MAAM,SAAS,KAAK,CAAC,EAAE,KAAK;AAEjE,YAAM,aAAa,MAAM,GAAG,SAAS;AAAA,QACnC,KAAK;AAAA,QACL,OAAO,CAAC;AAAA,QACR,UAAU,CAAC,KAAK,KAAK;AAAA,MACvB,CAAC;AAED,iBAAW,aAAa,YAAY;AAClC,cAAM,mBAAmB,UAAU;AACnC,eAAO,EAAE,KAAK,gBAAgB,gBAAgB,EAAE;AAEhD,cAAM,kBACJ,MAAM,OAAO,KAAK,KAAK,eAAe,gBAAgB,IACtD;AAEF,cAAM,IAAe,IAAI,eAAe;AACxC,cAAM,EAAE,KAAK,GAAG,UAAU,CAAC;AAC3B,cAAM,GAAG,SAAS;AAAA,UAChB,KAAK;AAAA,UACL,OAAO,CAAC;AAAA,UACR,UAAU,CAAC,UAAU,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAEA,IAAI,EAAE,SAAS,sBAAsB;","names":[]}
@@ -115,6 +115,9 @@ var CompiledRoute = class {
115
115
  }
116
116
  prepareOutputJsonFormat(obj) {
117
117
  function traverse(value) {
118
+ if (value === void 0 || value === null) {
119
+ return null;
120
+ }
118
121
  if (!value || typeof value !== "object") {
119
122
  return value;
120
123
  }
@@ -491,8 +494,10 @@ var DatabaseTransport = class {
491
494
  db_connection: "default",
492
495
  listen_interval: 60,
493
496
  // seconds
494
- message_limit: 10
497
+ message_limit: 10,
495
498
  // messages per each fetch
499
+ max_retry_count: 5
500
+ // maximum retry count for failed messages
496
501
  };
497
502
  channels = /* @__PURE__ */ new Map();
498
503
  messageQueues = [];
@@ -501,24 +506,31 @@ var DatabaseTransport = class {
501
506
  await import_neko_context2.context_provider.run(async () => {
502
507
  const conn = db(this.config.db_connection);
503
508
  try {
504
- await conn.connect();
505
509
  let q = conn.getQuery();
506
- let messages = await q.table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
510
+ let messages = await conn.getQuery().table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).whereOp("retried_count", "<", this.config.max_retry_count).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
507
511
  for (let msg of messages) {
508
512
  try {
509
- let callback = this.channels.get(msg.channel);
510
- await callback(msg.message);
511
- await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
512
- status: "processed",
513
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
514
+ status: "processing",
513
515
  updated_at: /* @__PURE__ */ new Date(),
514
516
  last_tried_at: /* @__PURE__ */ new Date(),
515
517
  retried_count: (msg.retried_count || 0) + 1
516
518
  });
519
+ let callback = this.channels.get(msg.channel);
520
+ await callback(msg.message);
521
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
522
+ status: "processed",
523
+ updated_at: /* @__PURE__ */ new Date()
524
+ });
517
525
  } catch (error) {
526
+ logger().error("Error processing message:", {
527
+ error,
528
+ message_id: msg.id,
529
+ channel: msg.channel
530
+ });
518
531
  await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
519
532
  status: "failed",
520
- last_tried_at: /* @__PURE__ */ new Date(),
521
- retried_count: (msg.retried_count || 0) + 1,
533
+ updated_at: /* @__PURE__ */ new Date(),
522
534
  process_message: error.message || "Error processing message"
523
535
  });
524
536
  }
@@ -539,27 +551,21 @@ var DatabaseTransport = class {
539
551
  }
540
552
  async dispatch(channel, message) {
541
553
  const conn = db(this.config.db_connection);
542
- try {
543
- await conn.connect();
544
- let schema = conn.getSchema();
545
- if (await schema.tableExists(this.config.queue_table) === false) {
546
- return;
547
- }
548
- let q = conn.getQuery();
549
- await q.table(this.config.queue_table).insert({
550
- channel,
551
- message,
552
- processed: false,
553
- created_at: /* @__PURE__ */ new Date(),
554
- updated_at: /* @__PURE__ */ new Date(),
555
- last_tried_at: null,
556
- process_message: "",
557
- retried_count: 0,
558
- status: "pending"
559
- });
560
- } finally {
561
- await conn.disconnect();
554
+ let schema = conn.getSchema();
555
+ if (await schema.tableExists(this.config.queue_table) === false) {
556
+ return;
562
557
  }
558
+ let q = conn.getQuery();
559
+ await q.table(this.config.queue_table).insert({
560
+ channel,
561
+ message,
562
+ created_at: /* @__PURE__ */ new Date(),
563
+ updated_at: /* @__PURE__ */ new Date(),
564
+ last_tried_at: null,
565
+ process_message: "",
566
+ retried_count: 0,
567
+ status: "pending"
568
+ });
563
569
  }
564
570
  async registerListener(channel, callback) {
565
571
  this.channels.set(channel, callback);
@@ -118,6 +118,9 @@ var CompiledRoute = class {
118
118
  }
119
119
  prepareOutputJsonFormat(obj) {
120
120
  function traverse(value) {
121
+ if (value === void 0 || value === null) {
122
+ return null;
123
+ }
121
124
  if (!value || typeof value !== "object") {
122
125
  return value;
123
126
  }
@@ -494,8 +497,10 @@ var DatabaseTransport = class {
494
497
  db_connection: "default",
495
498
  listen_interval: 60,
496
499
  // seconds
497
- message_limit: 10
500
+ message_limit: 10,
498
501
  // messages per each fetch
502
+ max_retry_count: 5
503
+ // maximum retry count for failed messages
499
504
  };
500
505
  channels = /* @__PURE__ */ new Map();
501
506
  messageQueues = [];
@@ -504,24 +509,31 @@ var DatabaseTransport = class {
504
509
  await import_neko_context2.context_provider.run(async () => {
505
510
  const conn = db(this.config.db_connection);
506
511
  try {
507
- await conn.connect();
508
512
  let q = conn.getQuery();
509
- let messages = await q.table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
513
+ let messages = await conn.getQuery().table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).whereOp("retried_count", "<", this.config.max_retry_count).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
510
514
  for (let msg of messages) {
511
515
  try {
512
- let callback = this.channels.get(msg.channel);
513
- await callback(msg.message);
514
- await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
515
- status: "processed",
516
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
517
+ status: "processing",
516
518
  updated_at: /* @__PURE__ */ new Date(),
517
519
  last_tried_at: /* @__PURE__ */ new Date(),
518
520
  retried_count: (msg.retried_count || 0) + 1
519
521
  });
522
+ let callback = this.channels.get(msg.channel);
523
+ await callback(msg.message);
524
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
525
+ status: "processed",
526
+ updated_at: /* @__PURE__ */ new Date()
527
+ });
520
528
  } catch (error) {
529
+ logger().error("Error processing message:", {
530
+ error,
531
+ message_id: msg.id,
532
+ channel: msg.channel
533
+ });
521
534
  await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
522
535
  status: "failed",
523
- last_tried_at: /* @__PURE__ */ new Date(),
524
- retried_count: (msg.retried_count || 0) + 1,
536
+ updated_at: /* @__PURE__ */ new Date(),
525
537
  process_message: error.message || "Error processing message"
526
538
  });
527
539
  }
@@ -542,27 +554,21 @@ var DatabaseTransport = class {
542
554
  }
543
555
  async dispatch(channel, message) {
544
556
  const conn = db(this.config.db_connection);
545
- try {
546
- await conn.connect();
547
- let schema = conn.getSchema();
548
- if (await schema.tableExists(this.config.queue_table) === false) {
549
- return;
550
- }
551
- let q = conn.getQuery();
552
- await q.table(this.config.queue_table).insert({
553
- channel,
554
- message,
555
- processed: false,
556
- created_at: /* @__PURE__ */ new Date(),
557
- updated_at: /* @__PURE__ */ new Date(),
558
- last_tried_at: null,
559
- process_message: "",
560
- retried_count: 0,
561
- status: "pending"
562
- });
563
- } finally {
564
- await conn.disconnect();
557
+ let schema = conn.getSchema();
558
+ if (await schema.tableExists(this.config.queue_table) === false) {
559
+ return;
565
560
  }
561
+ let q = conn.getQuery();
562
+ await q.table(this.config.queue_table).insert({
563
+ channel,
564
+ message,
565
+ created_at: /* @__PURE__ */ new Date(),
566
+ updated_at: /* @__PURE__ */ new Date(),
567
+ last_tried_at: null,
568
+ process_message: "",
569
+ retried_count: 0,
570
+ status: "pending"
571
+ });
566
572
  }
567
573
  async registerListener(channel, callback) {
568
574
  this.channels.set(channel, callback);
@@ -116,6 +116,9 @@ var CompiledRoute = class {
116
116
  }
117
117
  prepareOutputJsonFormat(obj) {
118
118
  function traverse(value) {
119
+ if (value === void 0 || value === null) {
120
+ return null;
121
+ }
119
122
  if (!value || typeof value !== "object") {
120
123
  return value;
121
124
  }
@@ -492,8 +495,10 @@ var DatabaseTransport = class {
492
495
  db_connection: "default",
493
496
  listen_interval: 60,
494
497
  // seconds
495
- message_limit: 10
498
+ message_limit: 10,
496
499
  // messages per each fetch
500
+ max_retry_count: 5
501
+ // maximum retry count for failed messages
497
502
  };
498
503
  channels = /* @__PURE__ */ new Map();
499
504
  messageQueues = [];
@@ -502,24 +507,31 @@ var DatabaseTransport = class {
502
507
  await import_neko_context2.context_provider.run(async () => {
503
508
  const conn = db(this.config.db_connection);
504
509
  try {
505
- await conn.connect();
506
510
  let q = conn.getQuery();
507
- let messages = await q.table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
511
+ let messages = await conn.getQuery().table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).whereOp("retried_count", "<", this.config.max_retry_count).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
508
512
  for (let msg of messages) {
509
513
  try {
510
- let callback = this.channels.get(msg.channel);
511
- await callback(msg.message);
512
- await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
513
- status: "processed",
514
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
515
+ status: "processing",
514
516
  updated_at: /* @__PURE__ */ new Date(),
515
517
  last_tried_at: /* @__PURE__ */ new Date(),
516
518
  retried_count: (msg.retried_count || 0) + 1
517
519
  });
520
+ let callback = this.channels.get(msg.channel);
521
+ await callback(msg.message);
522
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
523
+ status: "processed",
524
+ updated_at: /* @__PURE__ */ new Date()
525
+ });
518
526
  } catch (error) {
527
+ logger().error("Error processing message:", {
528
+ error,
529
+ message_id: msg.id,
530
+ channel: msg.channel
531
+ });
519
532
  await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
520
533
  status: "failed",
521
- last_tried_at: /* @__PURE__ */ new Date(),
522
- retried_count: (msg.retried_count || 0) + 1,
534
+ updated_at: /* @__PURE__ */ new Date(),
523
535
  process_message: error.message || "Error processing message"
524
536
  });
525
537
  }
@@ -540,27 +552,21 @@ var DatabaseTransport = class {
540
552
  }
541
553
  async dispatch(channel, message) {
542
554
  const conn = db(this.config.db_connection);
543
- try {
544
- await conn.connect();
545
- let schema = conn.getSchema();
546
- if (await schema.tableExists(this.config.queue_table) === false) {
547
- return;
548
- }
549
- let q = conn.getQuery();
550
- await q.table(this.config.queue_table).insert({
551
- channel,
552
- message,
553
- processed: false,
554
- created_at: /* @__PURE__ */ new Date(),
555
- updated_at: /* @__PURE__ */ new Date(),
556
- last_tried_at: null,
557
- process_message: "",
558
- retried_count: 0,
559
- status: "pending"
560
- });
561
- } finally {
562
- await conn.disconnect();
555
+ let schema = conn.getSchema();
556
+ if (await schema.tableExists(this.config.queue_table) === false) {
557
+ return;
563
558
  }
559
+ let q = conn.getQuery();
560
+ await q.table(this.config.queue_table).insert({
561
+ channel,
562
+ message,
563
+ created_at: /* @__PURE__ */ new Date(),
564
+ updated_at: /* @__PURE__ */ new Date(),
565
+ last_tried_at: null,
566
+ process_message: "",
567
+ retried_count: 0,
568
+ status: "pending"
569
+ });
564
570
  }
565
571
  async registerListener(channel, callback) {
566
572
  this.channels.set(channel, callback);
@@ -114,6 +114,9 @@ var CompiledRoute = class {
114
114
  }
115
115
  prepareOutputJsonFormat(obj) {
116
116
  function traverse(value) {
117
+ if (value === void 0 || value === null) {
118
+ return null;
119
+ }
117
120
  if (!value || typeof value !== "object") {
118
121
  return value;
119
122
  }
@@ -490,8 +493,10 @@ var DatabaseTransport = class {
490
493
  db_connection: "default",
491
494
  listen_interval: 60,
492
495
  // seconds
493
- message_limit: 10
496
+ message_limit: 10,
494
497
  // messages per each fetch
498
+ max_retry_count: 5
499
+ // maximum retry count for failed messages
495
500
  };
496
501
  channels = /* @__PURE__ */ new Map();
497
502
  messageQueues = [];
@@ -500,24 +505,31 @@ var DatabaseTransport = class {
500
505
  await import_neko_context2.context_provider.run(async () => {
501
506
  const conn = db(this.config.db_connection);
502
507
  try {
503
- await conn.connect();
504
508
  let q = conn.getQuery();
505
- let messages = await q.table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
509
+ let messages = await conn.getQuery().table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).whereOp("retried_count", "<", this.config.max_retry_count).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
506
510
  for (let msg of messages) {
507
511
  try {
508
- let callback = this.channels.get(msg.channel);
509
- await callback(msg.message);
510
- await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
511
- status: "processed",
512
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
513
+ status: "processing",
512
514
  updated_at: /* @__PURE__ */ new Date(),
513
515
  last_tried_at: /* @__PURE__ */ new Date(),
514
516
  retried_count: (msg.retried_count || 0) + 1
515
517
  });
518
+ let callback = this.channels.get(msg.channel);
519
+ await callback(msg.message);
520
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
521
+ status: "processed",
522
+ updated_at: /* @__PURE__ */ new Date()
523
+ });
516
524
  } catch (error) {
525
+ logger().error("Error processing message:", {
526
+ error,
527
+ message_id: msg.id,
528
+ channel: msg.channel
529
+ });
517
530
  await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
518
531
  status: "failed",
519
- last_tried_at: /* @__PURE__ */ new Date(),
520
- retried_count: (msg.retried_count || 0) + 1,
532
+ updated_at: /* @__PURE__ */ new Date(),
521
533
  process_message: error.message || "Error processing message"
522
534
  });
523
535
  }
@@ -538,27 +550,21 @@ var DatabaseTransport = class {
538
550
  }
539
551
  async dispatch(channel, message) {
540
552
  const conn = db(this.config.db_connection);
541
- try {
542
- await conn.connect();
543
- let schema = conn.getSchema();
544
- if (await schema.tableExists(this.config.queue_table) === false) {
545
- return;
546
- }
547
- let q = conn.getQuery();
548
- await q.table(this.config.queue_table).insert({
549
- channel,
550
- message,
551
- processed: false,
552
- created_at: /* @__PURE__ */ new Date(),
553
- updated_at: /* @__PURE__ */ new Date(),
554
- last_tried_at: null,
555
- process_message: "",
556
- retried_count: 0,
557
- status: "pending"
558
- });
559
- } finally {
560
- await conn.disconnect();
553
+ let schema = conn.getSchema();
554
+ if (await schema.tableExists(this.config.queue_table) === false) {
555
+ return;
561
556
  }
557
+ let q = conn.getQuery();
558
+ await q.table(this.config.queue_table).insert({
559
+ channel,
560
+ message,
561
+ created_at: /* @__PURE__ */ new Date(),
562
+ updated_at: /* @__PURE__ */ new Date(),
563
+ last_tried_at: null,
564
+ process_message: "",
565
+ retried_count: 0,
566
+ status: "pending"
567
+ });
562
568
  }
563
569
  async registerListener(channel, callback) {
564
570
  this.channels.set(channel, callback);
@@ -114,6 +114,9 @@ var CompiledRoute = class {
114
114
  }
115
115
  prepareOutputJsonFormat(obj) {
116
116
  function traverse(value) {
117
+ if (value === void 0 || value === null) {
118
+ return null;
119
+ }
117
120
  if (!value || typeof value !== "object") {
118
121
  return value;
119
122
  }
@@ -490,8 +493,10 @@ var DatabaseTransport = class {
490
493
  db_connection: "default",
491
494
  listen_interval: 60,
492
495
  // seconds
493
- message_limit: 10
496
+ message_limit: 10,
494
497
  // messages per each fetch
498
+ max_retry_count: 5
499
+ // maximum retry count for failed messages
495
500
  };
496
501
  channels = /* @__PURE__ */ new Map();
497
502
  messageQueues = [];
@@ -500,24 +505,31 @@ var DatabaseTransport = class {
500
505
  await import_neko_context2.context_provider.run(async () => {
501
506
  const conn = db(this.config.db_connection);
502
507
  try {
503
- await conn.connect();
504
508
  let q = conn.getQuery();
505
- let messages = await q.table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
509
+ let messages = await conn.getQuery().table(this.config.queue_table).whereOp("channel", "in", Array.from(this.channels.keys())).whereOp("status", "in", ["pending", "failed"]).whereOp("retried_count", "<", this.config.max_retry_count).limit(this.config.message_limit).orderBy("last_tried_at", "asc").get();
506
510
  for (let msg of messages) {
507
511
  try {
508
- let callback = this.channels.get(msg.channel);
509
- await callback(msg.message);
510
- await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
511
- status: "processed",
512
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
513
+ status: "processing",
512
514
  updated_at: /* @__PURE__ */ new Date(),
513
515
  last_tried_at: /* @__PURE__ */ new Date(),
514
516
  retried_count: (msg.retried_count || 0) + 1
515
517
  });
518
+ let callback = this.channels.get(msg.channel);
519
+ await callback(msg.message);
520
+ await conn.getQuery().table(this.config.queue_table).whereOp("id", "=", msg.id).update({
521
+ status: "processed",
522
+ updated_at: /* @__PURE__ */ new Date()
523
+ });
516
524
  } catch (error) {
525
+ logger().error("Error processing message:", {
526
+ error,
527
+ message_id: msg.id,
528
+ channel: msg.channel
529
+ });
517
530
  await q.table(this.config.queue_table).whereOp("id", "=", msg.id).update({
518
531
  status: "failed",
519
- last_tried_at: /* @__PURE__ */ new Date(),
520
- retried_count: (msg.retried_count || 0) + 1,
532
+ updated_at: /* @__PURE__ */ new Date(),
521
533
  process_message: error.message || "Error processing message"
522
534
  });
523
535
  }
@@ -538,27 +550,21 @@ var DatabaseTransport = class {
538
550
  }
539
551
  async dispatch(channel, message) {
540
552
  const conn = db(this.config.db_connection);
541
- try {
542
- await conn.connect();
543
- let schema = conn.getSchema();
544
- if (await schema.tableExists(this.config.queue_table) === false) {
545
- return;
546
- }
547
- let q = conn.getQuery();
548
- await q.table(this.config.queue_table).insert({
549
- channel,
550
- message,
551
- processed: false,
552
- created_at: /* @__PURE__ */ new Date(),
553
- updated_at: /* @__PURE__ */ new Date(),
554
- last_tried_at: null,
555
- process_message: "",
556
- retried_count: 0,
557
- status: "pending"
558
- });
559
- } finally {
560
- await conn.disconnect();
553
+ let schema = conn.getSchema();
554
+ if (await schema.tableExists(this.config.queue_table) === false) {
555
+ return;
561
556
  }
557
+ let q = conn.getQuery();
558
+ await q.table(this.config.queue_table).insert({
559
+ channel,
560
+ message,
561
+ created_at: /* @__PURE__ */ new Date(),
562
+ updated_at: /* @__PURE__ */ new Date(),
563
+ last_tried_at: null,
564
+ process_message: "",
565
+ retried_count: 0,
566
+ status: "pending"
567
+ });
562
568
  }
563
569
  async registerListener(channel, callback) {
564
570
  this.channels.set(channel, callback);