stripe-experiment-sync 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,29 +1,17 @@
1
1
  import Stripe from 'stripe';
2
- import { D as DatabaseAdapter } from './adapter-BtXT5w9r.js';
3
- export { P as PgCompatibleClient } from './adapter-BtXT5w9r.js';
2
+ import pg, { PoolConfig, QueryResult } from 'pg';
3
+ import { ConnectionOptions } from 'node:tls';
4
4
 
5
5
  type PostgresConfig = {
6
6
  schema: string;
7
- adapter: DatabaseAdapter;
7
+ poolConfig: PoolConfig;
8
8
  };
9
9
  declare class PostgresClient {
10
10
  private config;
11
- private adapter;
11
+ pool: pg.Pool;
12
12
  constructor(config: PostgresConfig);
13
- /**
14
- * Get the underlying adapter.
15
- * Useful for accessing adapter-specific features.
16
- */
17
- getAdapter(): DatabaseAdapter;
18
- /**
19
- * Close all database connections.
20
- */
21
- end(): Promise<void>;
22
13
  delete(table: string, id: string): Promise<boolean>;
23
- query<T = any>(text: string, params?: any[]): Promise<{
24
- rows: T[];
25
- rowCount: number;
26
- }>;
14
+ query(text: string, params?: any[]): Promise<QueryResult>;
27
15
  upsertMany<T extends {
28
16
  [Key: string]: any;
29
17
  }>(entries: T[], table: string): Promise<T[]>;
@@ -80,6 +68,10 @@ declare class PostgresClient {
80
68
  * Execute a function while holding an advisory lock.
81
69
  * The lock is automatically released after the function completes (success or error).
82
70
  *
71
+ * IMPORTANT: This acquires a dedicated connection from the pool and holds it for the
72
+ * duration of the function execution. PostgreSQL advisory locks are session-level,
73
+ * so we must use the same connection for lock acquisition, operations, and release.
74
+ *
83
75
  * @param key - A string key to lock on (will be hashed to an integer)
84
76
  * @param fn - The function to execute while holding the lock
85
77
  * @returns The result of the function
@@ -112,22 +104,20 @@ declare class PostgresClient {
112
104
  runStartedAt: Date;
113
105
  } | null>;
114
106
  /**
115
- * Get full sync run details.
107
+ * Get sync run config (for concurrency control).
108
+ * Status is derived from sync_dashboard view.
116
109
  */
117
110
  getSyncRun(accountId: string, runStartedAt: Date): Promise<{
118
111
  accountId: string;
119
112
  runStartedAt: Date;
120
- status: string;
121
113
  maxConcurrent: number;
114
+ closedAt: Date | null;
122
115
  } | null>;
123
116
  /**
124
- * Mark a sync run as complete.
117
+ * Close a sync run (mark as done).
118
+ * Status (complete/error) is derived from object run states.
125
119
  */
126
- completeSyncRun(accountId: string, runStartedAt: Date): Promise<void>;
127
- /**
128
- * Mark a sync run as failed.
129
- */
130
- failSyncRun(accountId: string, runStartedAt: Date, errorMessage: string): Promise<void>;
120
+ closeSyncRun(accountId: string, runStartedAt: Date): Promise<void>;
131
121
  /**
132
122
  * Create object run entries for a sync run.
133
123
  * All objects start as 'pending'.
@@ -176,12 +166,18 @@ declare class PostgresClient {
176
166
  deleteSyncRuns(accountId: string): Promise<void>;
177
167
  /**
178
168
  * Mark an object sync as complete.
169
+ * Auto-closes the run when all objects are done.
179
170
  */
180
171
  completeObjectSync(accountId: string, runStartedAt: Date, object: string): Promise<void>;
181
172
  /**
182
173
  * Mark an object sync as failed.
174
+ * Auto-closes the run when all objects are done.
183
175
  */
184
176
  failObjectSync(accountId: string, runStartedAt: Date, object: string, errorMessage: string): Promise<void>;
177
+ /**
178
+ * Check if any object in a run has errored.
179
+ */
180
+ hasAnyObjectErrors(accountId: string, runStartedAt: Date): Promise<boolean>;
185
181
  /**
186
182
  * Count running objects in a run.
187
183
  */
@@ -233,11 +229,9 @@ type StripeSyncConfig = {
233
229
  * Default: false
234
230
  */
235
231
  revalidateObjectsViaStripeApi?: Array<RevalidateEntity>;
236
- /**
237
- * Database adapter for database operations.
238
- * Use PgAdapter for Node.js or a custom adapter for other environments (e.g., Deno).
239
- */
240
- adapter: DatabaseAdapter;
232
+ /** @deprecated Use `poolConfig` instead. */
233
+ maxPostgresConnections?: number;
234
+ poolConfig: PoolConfig;
241
235
  logger?: Logger;
242
236
  /**
243
237
  * Maximum number of retry attempts for 429 rate limit errors.
@@ -579,6 +573,13 @@ declare class StripeSync {
579
573
  private fetchMissingEntities;
580
574
  }
581
575
 
576
+ type MigrationConfig = {
577
+ databaseUrl: string;
578
+ ssl?: ConnectionOptions;
579
+ logger?: Logger;
580
+ };
581
+ declare function runMigrations(config: MigrationConfig): Promise<void>;
582
+
582
583
  /**
583
584
  * Hashes a Stripe API key using SHA-256
584
585
  * Used to store API key hashes in the database for fast account lookups
@@ -589,14 +590,6 @@ declare class StripeSync {
589
590
  */
590
591
  declare function hashApiKey(apiKey: string): string;
591
592
 
592
- /**
593
- * Run database migrations using the provided adapter.
594
- *
595
- * @param adapter - Database adapter (PgAdapter or PostgresJsAdapter)
596
- * @param logger - Optional logger for migration progress
597
- */
598
- declare function runMigrations(adapter: DatabaseAdapter, logger?: Logger): Promise<void>;
599
-
600
593
  declare const VERSION: string;
601
594
 
602
- export { DatabaseAdapter, type Logger, PostgresClient, type ProcessNextParams, type ProcessNextResult, type RevalidateEntity, StripeSync, type StripeSyncConfig, type Sync, type SyncBackfill, type SyncEntitlementsParams, type SyncFeaturesParams, type SyncObject, type SyncParams, VERSION, hashApiKey, runMigrations };
595
+ export { type Logger, PostgresClient, type ProcessNextParams, type ProcessNextResult, type RevalidateEntity, StripeSync, type StripeSyncConfig, type Sync, type SyncBackfill, type SyncEntitlementsParams, type SyncFeaturesParams, type SyncObject, type SyncParams, VERSION, hashApiKey, runMigrations };
package/dist/index.js CHANGED
@@ -1,47 +1,25 @@
1
1
  // package.json
2
2
  var package_default = {
3
3
  name: "stripe-experiment-sync",
4
- version: "1.0.0",
4
+ version: "1.0.2",
5
5
  private: false,
6
6
  description: "Stripe Sync Engine to sync Stripe data to Postgres",
7
7
  type: "module",
8
8
  main: "./dist/index.cjs",
9
9
  exports: {
10
- ".": {
11
- import: {
12
- types: "./dist/index.d.ts",
13
- default: "./dist/index.js"
14
- },
15
- require: {
16
- types: "./dist/index.d.cts",
17
- default: "./dist/index.cjs"
18
- }
19
- },
20
- "./pg": {
21
- import: {
22
- types: "./dist/pg.d.ts",
23
- default: "./dist/pg.js"
24
- },
25
- require: {
26
- types: "./dist/pg.d.cts",
27
- default: "./dist/pg.cjs"
28
- }
10
+ import: {
11
+ types: "./dist/index.d.ts",
12
+ import: "./dist/index.js"
29
13
  },
30
- "./postgres-js": {
31
- import: {
32
- types: "./dist/postgres-js.d.ts",
33
- default: "./dist/postgres-js.js"
34
- },
35
- require: {
36
- types: "./dist/postgres-js.d.cts",
37
- default: "./dist/postgres-js.cjs"
38
- }
14
+ require: {
15
+ types: "./dist/index.d.cts",
16
+ require: "./dist/index.cjs"
39
17
  }
40
18
  },
41
19
  scripts: {
42
20
  clean: "rimraf dist",
43
21
  prebuild: "npm run clean",
44
- build: "tsup src/index.ts src/pg.ts src/postgres-js.ts --format esm,cjs --dts --shims && cp -r src/database/migrations dist/migrations",
22
+ build: "tsup src/index.ts --format esm,cjs --dts --shims && cp -r src/database/migrations dist/migrations",
45
23
  lint: "eslint src --ext .ts",
46
24
  test: "vitest"
47
25
  },
@@ -51,7 +29,6 @@ var package_default = {
51
29
  dependencies: {
52
30
  pg: "^8.16.3",
53
31
  "pg-node-migrations": "0.0.8",
54
- postgres: "^3.4.7",
55
32
  ws: "^8.18.0",
56
33
  yesql: "^7.0.0"
57
34
  },
@@ -64,7 +41,6 @@ var package_default = {
64
41
  "@types/ws": "^8.5.13",
65
42
  "@types/yesql": "^4.1.4",
66
43
  "@vitest/ui": "^4.0.9",
67
- stripe: "^20.0.0",
68
44
  vitest: "^3.2.4"
69
45
  },
70
46
  repository: {
@@ -93,6 +69,7 @@ import Stripe2 from "stripe";
93
69
  import { pg as sql2 } from "yesql";
94
70
 
95
71
  // src/database/postgres.ts
72
+ import pg from "pg";
96
73
  import { pg as sql } from "yesql";
97
74
  var ORDERED_STRIPE_TABLES = [
98
75
  "subscription_items",
@@ -126,22 +103,9 @@ var TABLES_WITH_ACCOUNT_ID = /* @__PURE__ */ new Set(["_managed_webhooks"]);
126
103
  var PostgresClient = class {
127
104
  constructor(config) {
128
105
  this.config = config;
129
- this.adapter = config.adapter;
130
- }
131
- adapter;
132
- /**
133
- * Get the underlying adapter.
134
- * Useful for accessing adapter-specific features.
135
- */
136
- getAdapter() {
137
- return this.adapter;
138
- }
139
- /**
140
- * Close all database connections.
141
- */
142
- async end() {
143
- await this.adapter.end();
106
+ this.pool = new pg.Pool(config.poolConfig);
144
107
  }
108
+ pool;
145
109
  async delete(table, id) {
146
110
  const prepared = sql(`
147
111
  delete from "${this.config.schema}"."${table}"
@@ -153,7 +117,7 @@ var PostgresClient = class {
153
117
  }
154
118
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
155
119
  async query(text, params) {
156
- return this.adapter.query(text, params);
120
+ return this.pool.query(text, params);
157
121
  }
158
122
  async upsertMany(entries, table) {
159
123
  if (!entries.length) return [];
@@ -172,7 +136,7 @@ var PostgresClient = class {
172
136
  "_raw_data" = EXCLUDED."_raw_data"
173
137
  RETURNING *
174
138
  `;
175
- queries.push(this.adapter.query(upsertSql, [rawData]));
139
+ queries.push(this.pool.query(upsertSql, [rawData]));
176
140
  });
177
141
  results.push(...await Promise.all(queries));
178
142
  }
@@ -211,7 +175,7 @@ var PostgresClient = class {
211
175
  cleansed.last_synced_at = timestamp;
212
176
  cleansed.account_id = accountId;
213
177
  const prepared = sql(upsertSql, { useNullForMissing: true })(cleansed);
214
- queries.push(this.adapter.query(prepared.text, prepared.values));
178
+ queries.push(this.pool.query(prepared.text, prepared.values));
215
179
  } else {
216
180
  const rawData = JSON.stringify(entry);
217
181
  const upsertSql = `
@@ -226,7 +190,7 @@ var PostgresClient = class {
226
190
  OR "${table}"."_last_synced_at" < $2
227
191
  RETURNING *
228
192
  `;
229
- queries.push(this.adapter.query(upsertSql, [rawData, timestamp, accountId]));
193
+ queries.push(this.pool.query(upsertSql, [rawData, timestamp, accountId]));
230
194
  }
231
195
  });
232
196
  results.push(...await Promise.all(queries));
@@ -398,13 +362,27 @@ var PostgresClient = class {
398
362
  * Execute a function while holding an advisory lock.
399
363
  * The lock is automatically released after the function completes (success or error).
400
364
  *
365
+ * IMPORTANT: This acquires a dedicated connection from the pool and holds it for the
366
+ * duration of the function execution. PostgreSQL advisory locks are session-level,
367
+ * so we must use the same connection for lock acquisition, operations, and release.
368
+ *
401
369
  * @param key - A string key to lock on (will be hashed to an integer)
402
370
  * @param fn - The function to execute while holding the lock
403
371
  * @returns The result of the function
404
372
  */
405
373
  async withAdvisoryLock(key, fn) {
406
374
  const lockId = this.hashToInt32(key);
407
- return this.adapter.withAdvisoryLock(lockId, fn);
375
+ const client = await this.pool.connect();
376
+ try {
377
+ await client.query("SELECT pg_advisory_lock($1)", [lockId]);
378
+ return await fn();
379
+ } finally {
380
+ try {
381
+ await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
382
+ } finally {
383
+ client.release();
384
+ }
385
+ }
408
386
  }
409
387
  // =============================================================================
410
388
  // Observable Sync System Methods
@@ -420,12 +398,20 @@ var PostgresClient = class {
420
398
  */
421
399
  async cancelStaleRuns(accountId) {
422
400
  await this.query(
423
- `UPDATE "${this.config.schema}"."_sync_run" r
401
+ `UPDATE "${this.config.schema}"."_sync_obj_run" o
424
402
  SET status = 'error',
425
403
  error_message = 'Auto-cancelled: stale (no update in 5 min)',
426
404
  completed_at = now()
405
+ WHERE o."_account_id" = $1
406
+ AND o.status = 'running'
407
+ AND o.updated_at < now() - interval '5 minutes'`,
408
+ [accountId]
409
+ );
410
+ await this.query(
411
+ `UPDATE "${this.config.schema}"."_sync_run" r
412
+ SET closed_at = now()
427
413
  WHERE r."_account_id" = $1
428
- AND r.status = 'running'
414
+ AND r.closed_at IS NULL
429
415
  AND EXISTS (
430
416
  SELECT 1 FROM "${this.config.schema}"."_sync_obj_run" o
431
417
  WHERE o."_account_id" = r."_account_id"
@@ -435,7 +421,7 @@ var PostgresClient = class {
435
421
  SELECT 1 FROM "${this.config.schema}"."_sync_obj_run" o
436
422
  WHERE o."_account_id" = r."_account_id"
437
423
  AND o.run_started_at = r.started_at
438
- AND o.updated_at >= now() - interval '5 minutes'
424
+ AND o.status IN ('pending', 'running')
439
425
  )`,
440
426
  [accountId]
441
427
  );
@@ -451,7 +437,7 @@ var PostgresClient = class {
451
437
  await this.cancelStaleRuns(accountId);
452
438
  const existing = await this.query(
453
439
  `SELECT "_account_id", started_at FROM "${this.config.schema}"."_sync_run"
454
- WHERE "_account_id" = $1 AND status = 'running'`,
440
+ WHERE "_account_id" = $1 AND closed_at IS NULL`,
455
441
  [accountId]
456
442
  );
457
443
  if (existing.rows.length > 0) {
@@ -480,7 +466,7 @@ var PostgresClient = class {
480
466
  async getActiveSyncRun(accountId) {
481
467
  const result = await this.query(
482
468
  `SELECT "_account_id", started_at FROM "${this.config.schema}"."_sync_run"
483
- WHERE "_account_id" = $1 AND status = 'running'`,
469
+ WHERE "_account_id" = $1 AND closed_at IS NULL`,
484
470
  [accountId]
485
471
  );
486
472
  if (result.rows.length === 0) return null;
@@ -488,11 +474,12 @@ var PostgresClient = class {
488
474
  return { accountId: row._account_id, runStartedAt: row.started_at };
489
475
  }
490
476
  /**
491
- * Get full sync run details.
477
+ * Get sync run config (for concurrency control).
478
+ * Status is derived from sync_dashboard view.
492
479
  */
493
480
  async getSyncRun(accountId, runStartedAt) {
494
481
  const result = await this.query(
495
- `SELECT "_account_id", started_at, status, max_concurrent
482
+ `SELECT "_account_id", started_at, max_concurrent, closed_at
496
483
  FROM "${this.config.schema}"."_sync_run"
497
484
  WHERE "_account_id" = $1 AND started_at = $2`,
498
485
  [accountId, runStartedAt]
@@ -502,32 +489,22 @@ var PostgresClient = class {
502
489
  return {
503
490
  accountId: row._account_id,
504
491
  runStartedAt: row.started_at,
505
- status: row.status,
506
- maxConcurrent: row.max_concurrent
492
+ maxConcurrent: row.max_concurrent,
493
+ closedAt: row.closed_at
507
494
  };
508
495
  }
509
496
  /**
510
- * Mark a sync run as complete.
497
+ * Close a sync run (mark as done).
498
+ * Status (complete/error) is derived from object run states.
511
499
  */
512
- async completeSyncRun(accountId, runStartedAt) {
500
+ async closeSyncRun(accountId, runStartedAt) {
513
501
  await this.query(
514
502
  `UPDATE "${this.config.schema}"."_sync_run"
515
- SET status = 'complete', completed_at = now()
516
- WHERE "_account_id" = $1 AND started_at = $2`,
503
+ SET closed_at = now()
504
+ WHERE "_account_id" = $1 AND started_at = $2 AND closed_at IS NULL`,
517
505
  [accountId, runStartedAt]
518
506
  );
519
507
  }
520
- /**
521
- * Mark a sync run as failed.
522
- */
523
- async failSyncRun(accountId, runStartedAt, errorMessage) {
524
- await this.query(
525
- `UPDATE "${this.config.schema}"."_sync_run"
526
- SET status = 'error', error_message = $3, completed_at = now()
527
- WHERE "_account_id" = $1 AND started_at = $2`,
528
- [accountId, runStartedAt, errorMessage]
529
- );
530
- }
531
508
  /**
532
509
  * Create object run entries for a sync run.
533
510
  * All objects start as 'pending'.
@@ -651,6 +628,7 @@ var PostgresClient = class {
651
628
  }
652
629
  /**
653
630
  * Mark an object sync as complete.
631
+ * Auto-closes the run when all objects are done.
654
632
  */
655
633
  async completeObjectSync(accountId, runStartedAt, object) {
656
634
  await this.query(
@@ -659,9 +637,14 @@ var PostgresClient = class {
659
637
  WHERE "_account_id" = $1 AND run_started_at = $2 AND object = $3`,
660
638
  [accountId, runStartedAt, object]
661
639
  );
640
+ const allDone = await this.areAllObjectsComplete(accountId, runStartedAt);
641
+ if (allDone) {
642
+ await this.closeSyncRun(accountId, runStartedAt);
643
+ }
662
644
  }
663
645
  /**
664
646
  * Mark an object sync as failed.
647
+ * Auto-closes the run when all objects are done.
665
648
  */
666
649
  async failObjectSync(accountId, runStartedAt, object, errorMessage) {
667
650
  await this.query(
@@ -670,6 +653,21 @@ var PostgresClient = class {
670
653
  WHERE "_account_id" = $1 AND run_started_at = $2 AND object = $3`,
671
654
  [accountId, runStartedAt, object, errorMessage]
672
655
  );
656
+ const allDone = await this.areAllObjectsComplete(accountId, runStartedAt);
657
+ if (allDone) {
658
+ await this.closeSyncRun(accountId, runStartedAt);
659
+ }
660
+ }
661
+ /**
662
+ * Check if any object in a run has errored.
663
+ */
664
+ async hasAnyObjectErrors(accountId, runStartedAt) {
665
+ const result = await this.query(
666
+ `SELECT COUNT(*) as count FROM "${this.config.schema}"."_sync_obj_run"
667
+ WHERE "_account_id" = $1 AND run_started_at = $2 AND status = 'error'`,
668
+ [accountId, runStartedAt]
669
+ );
670
+ return parseInt(result.rows[0].count) > 0;
673
671
  }
674
672
  /**
675
673
  * Count running objects in a run.
@@ -908,9 +906,22 @@ var StripeSync = class {
908
906
  { autoExpandLists: config.autoExpandLists, stripeApiVersion: config.stripeApiVersion },
909
907
  "StripeSync initialized"
910
908
  );
909
+ const poolConfig = config.poolConfig ?? {};
910
+ if (config.databaseUrl) {
911
+ poolConfig.connectionString = config.databaseUrl;
912
+ }
913
+ if (config.maxPostgresConnections) {
914
+ poolConfig.max = config.maxPostgresConnections;
915
+ }
916
+ if (poolConfig.max === void 0) {
917
+ poolConfig.max = 10;
918
+ }
919
+ if (poolConfig.keepAlive === void 0) {
920
+ poolConfig.keepAlive = true;
921
+ }
911
922
  this.postgresClient = new PostgresClient({
912
923
  schema: "stripe",
913
- adapter: config.adapter
924
+ poolConfig
914
925
  });
915
926
  }
916
927
  stripe;
@@ -1252,8 +1263,8 @@ var StripeSync = class {
1252
1263
  // Depends on invoice
1253
1264
  listFn: (p) => this.stripe.creditNotes.list(p),
1254
1265
  upsertFn: (items, id, bf) => this.upsertCreditNotes(items, id, bf),
1255
- supportsCreatedFilter: false
1256
- // credit_notes don't support created filter
1266
+ supportsCreatedFilter: true
1267
+ // credit_notes support created filter
1257
1268
  },
1258
1269
  dispute: {
1259
1270
  order: 14,
@@ -1906,14 +1917,10 @@ var StripeSync = class {
1906
1917
  }
1907
1918
  }
1908
1919
  }
1909
- await this.postgresClient.completeSyncRun(accountId, runStartedAt);
1920
+ await this.postgresClient.closeSyncRun(accountId, runStartedAt);
1910
1921
  return results;
1911
1922
  } catch (error) {
1912
- await this.postgresClient.failSyncRun(
1913
- accountId,
1914
- runStartedAt,
1915
- error instanceof Error ? error.message : "Unknown error"
1916
- );
1923
+ await this.postgresClient.closeSyncRun(accountId, runStartedAt);
1917
1924
  throw error;
1918
1925
  }
1919
1926
  }
@@ -2444,12 +2451,13 @@ var StripeSync = class {
2444
2451
  await this.postgresClient.tryStartObjectSync(accountId, runStartedAt, resourceName);
2445
2452
  try {
2446
2453
  const result = await fn(cursor, runStartedAt);
2447
- await this.postgresClient.completeSyncRun(accountId, runStartedAt);
2454
+ await this.postgresClient.completeObjectSync(accountId, runStartedAt, resourceName);
2448
2455
  return result;
2449
2456
  } catch (error) {
2450
- await this.postgresClient.failSyncRun(
2457
+ await this.postgresClient.failObjectSync(
2451
2458
  accountId,
2452
2459
  runStartedAt,
2460
+ resourceName,
2453
2461
  error instanceof Error ? error.message : "Unknown error"
2454
2462
  );
2455
2463
  throw error;
@@ -3207,6 +3215,7 @@ function chunkArray(array, chunkSize) {
3207
3215
  }
3208
3216
 
3209
3217
  // src/database/migrate.ts
3218
+ import { Client } from "pg";
3210
3219
  import { migrate } from "pg-node-migrations";
3211
3220
  import fs from "fs";
3212
3221
  import path from "path";
@@ -3214,15 +3223,15 @@ import { fileURLToPath } from "url";
3214
3223
  var __filename2 = fileURLToPath(import.meta.url);
3215
3224
  var __dirname2 = path.dirname(__filename2);
3216
3225
  async function doesTableExist(client, schema, tableName) {
3217
- const result = await client.query({
3218
- text: `SELECT EXISTS (
3226
+ const result = await client.query(
3227
+ `SELECT EXISTS (
3219
3228
  SELECT 1
3220
3229
  FROM information_schema.tables
3221
3230
  WHERE table_schema = $1
3222
3231
  AND table_name = $2
3223
3232
  )`,
3224
- values: [schema, tableName]
3225
- });
3233
+ [schema, tableName]
3234
+ );
3226
3235
  return result.rows[0]?.exists || false;
3227
3236
  }
3228
3237
  async function renameMigrationsTableIfNeeded(client, schema = "stripe", logger) {
@@ -3240,9 +3249,9 @@ async function cleanupSchema(client, schema, logger) {
3240
3249
  await client.query(`CREATE SCHEMA "${schema}"`);
3241
3250
  logger?.info(`Schema "${schema}" has been reset`);
3242
3251
  }
3243
- async function connectAndMigrate(client, migrationsDirectory, logger, logOnError = false) {
3252
+ async function connectAndMigrate(client, migrationsDirectory, config, logOnError = false) {
3244
3253
  if (!fs.existsSync(migrationsDirectory)) {
3245
- logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
3254
+ config.logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
3246
3255
  return;
3247
3256
  }
3248
3257
  const optionalConfig = {
@@ -3253,18 +3262,23 @@ async function connectAndMigrate(client, migrationsDirectory, logger, logOnError
3253
3262
  await migrate({ client }, migrationsDirectory, optionalConfig);
3254
3263
  } catch (error) {
3255
3264
  if (logOnError && error instanceof Error) {
3256
- logger?.error(error, "Migration error:");
3265
+ config.logger?.error(error, "Migration error:");
3257
3266
  } else {
3258
3267
  throw error;
3259
3268
  }
3260
3269
  }
3261
3270
  }
3262
- async function runMigrations(adapter, logger) {
3263
- const client = adapter.toPgClient();
3271
+ async function runMigrations(config) {
3272
+ const client = new Client({
3273
+ connectionString: config.databaseUrl,
3274
+ ssl: config.ssl,
3275
+ connectionTimeoutMillis: 1e4
3276
+ });
3264
3277
  const schema = "stripe";
3265
3278
  try {
3279
+ await client.connect();
3266
3280
  await client.query(`CREATE SCHEMA IF NOT EXISTS ${schema};`);
3267
- await renameMigrationsTableIfNeeded(client, schema, logger);
3281
+ await renameMigrationsTableIfNeeded(client, schema, config.logger);
3268
3282
  const tableExists = await doesTableExist(client, schema, "_migrations");
3269
3283
  if (tableExists) {
3270
3284
  const migrationCount = await client.query(
@@ -3272,16 +3286,17 @@ async function runMigrations(adapter, logger) {
3272
3286
  );
3273
3287
  const isEmpty = migrationCount.rows[0]?.count === "0";
3274
3288
  if (isEmpty) {
3275
- await cleanupSchema(client, schema, logger);
3289
+ await cleanupSchema(client, schema, config.logger);
3276
3290
  }
3277
3291
  }
3278
- logger?.info("Running migrations");
3279
- await connectAndMigrate(client, path.resolve(__dirname2, "./migrations"), logger);
3292
+ config.logger?.info("Running migrations");
3293
+ await connectAndMigrate(client, path.resolve(__dirname2, "./migrations"), config);
3280
3294
  } catch (err) {
3281
- logger?.error(err, "Error running migrations");
3295
+ config.logger?.error(err, "Error running migrations");
3282
3296
  throw err;
3283
3297
  } finally {
3284
- logger?.info("Finished migrations");
3298
+ await client.end();
3299
+ config.logger?.info("Finished migrations");
3285
3300
  }
3286
3301
  }
3287
3302