stripe-experiment-sync 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +66 -68
- package/dist/index.d.cts +20 -31
- package/dist/index.d.ts +20 -31
- package/dist/index.js +66 -68
- package/package.json +8 -32
- package/dist/adapter-BtXT5w9r.d.cts +0 -51
- package/dist/adapter-BtXT5w9r.d.ts +0 -51
- package/dist/pg.cjs +0 -87
- package/dist/pg.d.cts +0 -28
- package/dist/pg.d.ts +0 -28
- package/dist/pg.js +0 -50
- package/dist/postgres-js.cjs +0 -90
- package/dist/postgres-js.d.cts +0 -31
- package/dist/postgres-js.d.ts +0 -31
- package/dist/postgres-js.js +0 -53
package/dist/index.cjs
CHANGED
|
@@ -45,47 +45,25 @@ var importMetaUrl = /* @__PURE__ */ getImportMetaUrl();
|
|
|
45
45
|
// package.json
|
|
46
46
|
var package_default = {
|
|
47
47
|
name: "stripe-experiment-sync",
|
|
48
|
-
version: "1.0.
|
|
48
|
+
version: "1.0.1",
|
|
49
49
|
private: false,
|
|
50
50
|
description: "Stripe Sync Engine to sync Stripe data to Postgres",
|
|
51
51
|
type: "module",
|
|
52
52
|
main: "./dist/index.cjs",
|
|
53
53
|
exports: {
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
default: "./dist/index.js"
|
|
58
|
-
},
|
|
59
|
-
require: {
|
|
60
|
-
types: "./dist/index.d.cts",
|
|
61
|
-
default: "./dist/index.cjs"
|
|
62
|
-
}
|
|
63
|
-
},
|
|
64
|
-
"./pg": {
|
|
65
|
-
import: {
|
|
66
|
-
types: "./dist/pg.d.ts",
|
|
67
|
-
default: "./dist/pg.js"
|
|
68
|
-
},
|
|
69
|
-
require: {
|
|
70
|
-
types: "./dist/pg.d.cts",
|
|
71
|
-
default: "./dist/pg.cjs"
|
|
72
|
-
}
|
|
54
|
+
import: {
|
|
55
|
+
types: "./dist/index.d.ts",
|
|
56
|
+
import: "./dist/index.js"
|
|
73
57
|
},
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
default: "./dist/postgres-js.js"
|
|
78
|
-
},
|
|
79
|
-
require: {
|
|
80
|
-
types: "./dist/postgres-js.d.cts",
|
|
81
|
-
default: "./dist/postgres-js.cjs"
|
|
82
|
-
}
|
|
58
|
+
require: {
|
|
59
|
+
types: "./dist/index.d.cts",
|
|
60
|
+
require: "./dist/index.cjs"
|
|
83
61
|
}
|
|
84
62
|
},
|
|
85
63
|
scripts: {
|
|
86
64
|
clean: "rimraf dist",
|
|
87
65
|
prebuild: "npm run clean",
|
|
88
|
-
build: "tsup src/index.ts
|
|
66
|
+
build: "tsup src/index.ts --format esm,cjs --dts --shims && cp -r src/database/migrations dist/migrations",
|
|
89
67
|
lint: "eslint src --ext .ts",
|
|
90
68
|
test: "vitest"
|
|
91
69
|
},
|
|
@@ -95,7 +73,6 @@ var package_default = {
|
|
|
95
73
|
dependencies: {
|
|
96
74
|
pg: "^8.16.3",
|
|
97
75
|
"pg-node-migrations": "0.0.8",
|
|
98
|
-
postgres: "^3.4.7",
|
|
99
76
|
ws: "^8.18.0",
|
|
100
77
|
yesql: "^7.0.0"
|
|
101
78
|
},
|
|
@@ -108,7 +85,6 @@ var package_default = {
|
|
|
108
85
|
"@types/ws": "^8.5.13",
|
|
109
86
|
"@types/yesql": "^4.1.4",
|
|
110
87
|
"@vitest/ui": "^4.0.9",
|
|
111
|
-
stripe: "^20.0.0",
|
|
112
88
|
vitest: "^3.2.4"
|
|
113
89
|
},
|
|
114
90
|
repository: {
|
|
@@ -137,6 +113,7 @@ var import_stripe2 = __toESM(require("stripe"), 1);
|
|
|
137
113
|
var import_yesql2 = require("yesql");
|
|
138
114
|
|
|
139
115
|
// src/database/postgres.ts
|
|
116
|
+
var import_pg = __toESM(require("pg"), 1);
|
|
140
117
|
var import_yesql = require("yesql");
|
|
141
118
|
var ORDERED_STRIPE_TABLES = [
|
|
142
119
|
"subscription_items",
|
|
@@ -170,22 +147,9 @@ var TABLES_WITH_ACCOUNT_ID = /* @__PURE__ */ new Set(["_managed_webhooks"]);
|
|
|
170
147
|
var PostgresClient = class {
|
|
171
148
|
constructor(config) {
|
|
172
149
|
this.config = config;
|
|
173
|
-
this.
|
|
174
|
-
}
|
|
175
|
-
adapter;
|
|
176
|
-
/**
|
|
177
|
-
* Get the underlying adapter.
|
|
178
|
-
* Useful for accessing adapter-specific features.
|
|
179
|
-
*/
|
|
180
|
-
getAdapter() {
|
|
181
|
-
return this.adapter;
|
|
182
|
-
}
|
|
183
|
-
/**
|
|
184
|
-
* Close all database connections.
|
|
185
|
-
*/
|
|
186
|
-
async end() {
|
|
187
|
-
await this.adapter.end();
|
|
150
|
+
this.pool = new import_pg.default.Pool(config.poolConfig);
|
|
188
151
|
}
|
|
152
|
+
pool;
|
|
189
153
|
async delete(table, id) {
|
|
190
154
|
const prepared = (0, import_yesql.pg)(`
|
|
191
155
|
delete from "${this.config.schema}"."${table}"
|
|
@@ -197,7 +161,7 @@ var PostgresClient = class {
|
|
|
197
161
|
}
|
|
198
162
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
199
163
|
async query(text, params) {
|
|
200
|
-
return this.
|
|
164
|
+
return this.pool.query(text, params);
|
|
201
165
|
}
|
|
202
166
|
async upsertMany(entries, table) {
|
|
203
167
|
if (!entries.length) return [];
|
|
@@ -216,7 +180,7 @@ var PostgresClient = class {
|
|
|
216
180
|
"_raw_data" = EXCLUDED."_raw_data"
|
|
217
181
|
RETURNING *
|
|
218
182
|
`;
|
|
219
|
-
queries.push(this.
|
|
183
|
+
queries.push(this.pool.query(upsertSql, [rawData]));
|
|
220
184
|
});
|
|
221
185
|
results.push(...await Promise.all(queries));
|
|
222
186
|
}
|
|
@@ -255,7 +219,7 @@ var PostgresClient = class {
|
|
|
255
219
|
cleansed.last_synced_at = timestamp;
|
|
256
220
|
cleansed.account_id = accountId;
|
|
257
221
|
const prepared = (0, import_yesql.pg)(upsertSql, { useNullForMissing: true })(cleansed);
|
|
258
|
-
queries.push(this.
|
|
222
|
+
queries.push(this.pool.query(prepared.text, prepared.values));
|
|
259
223
|
} else {
|
|
260
224
|
const rawData = JSON.stringify(entry);
|
|
261
225
|
const upsertSql = `
|
|
@@ -270,7 +234,7 @@ var PostgresClient = class {
|
|
|
270
234
|
OR "${table}"."_last_synced_at" < $2
|
|
271
235
|
RETURNING *
|
|
272
236
|
`;
|
|
273
|
-
queries.push(this.
|
|
237
|
+
queries.push(this.pool.query(upsertSql, [rawData, timestamp, accountId]));
|
|
274
238
|
}
|
|
275
239
|
});
|
|
276
240
|
results.push(...await Promise.all(queries));
|
|
@@ -442,13 +406,27 @@ var PostgresClient = class {
|
|
|
442
406
|
* Execute a function while holding an advisory lock.
|
|
443
407
|
* The lock is automatically released after the function completes (success or error).
|
|
444
408
|
*
|
|
409
|
+
* IMPORTANT: This acquires a dedicated connection from the pool and holds it for the
|
|
410
|
+
* duration of the function execution. PostgreSQL advisory locks are session-level,
|
|
411
|
+
* so we must use the same connection for lock acquisition, operations, and release.
|
|
412
|
+
*
|
|
445
413
|
* @param key - A string key to lock on (will be hashed to an integer)
|
|
446
414
|
* @param fn - The function to execute while holding the lock
|
|
447
415
|
* @returns The result of the function
|
|
448
416
|
*/
|
|
449
417
|
async withAdvisoryLock(key, fn) {
|
|
450
418
|
const lockId = this.hashToInt32(key);
|
|
451
|
-
|
|
419
|
+
const client = await this.pool.connect();
|
|
420
|
+
try {
|
|
421
|
+
await client.query("SELECT pg_advisory_lock($1)", [lockId]);
|
|
422
|
+
return await fn();
|
|
423
|
+
} finally {
|
|
424
|
+
try {
|
|
425
|
+
await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
|
|
426
|
+
} finally {
|
|
427
|
+
client.release();
|
|
428
|
+
}
|
|
429
|
+
}
|
|
452
430
|
}
|
|
453
431
|
// =============================================================================
|
|
454
432
|
// Observable Sync System Methods
|
|
@@ -952,9 +930,22 @@ var StripeSync = class {
|
|
|
952
930
|
{ autoExpandLists: config.autoExpandLists, stripeApiVersion: config.stripeApiVersion },
|
|
953
931
|
"StripeSync initialized"
|
|
954
932
|
);
|
|
933
|
+
const poolConfig = config.poolConfig ?? {};
|
|
934
|
+
if (config.databaseUrl) {
|
|
935
|
+
poolConfig.connectionString = config.databaseUrl;
|
|
936
|
+
}
|
|
937
|
+
if (config.maxPostgresConnections) {
|
|
938
|
+
poolConfig.max = config.maxPostgresConnections;
|
|
939
|
+
}
|
|
940
|
+
if (poolConfig.max === void 0) {
|
|
941
|
+
poolConfig.max = 10;
|
|
942
|
+
}
|
|
943
|
+
if (poolConfig.keepAlive === void 0) {
|
|
944
|
+
poolConfig.keepAlive = true;
|
|
945
|
+
}
|
|
955
946
|
this.postgresClient = new PostgresClient({
|
|
956
947
|
schema: "stripe",
|
|
957
|
-
|
|
948
|
+
poolConfig
|
|
958
949
|
});
|
|
959
950
|
}
|
|
960
951
|
stripe;
|
|
@@ -3251,6 +3242,7 @@ function chunkArray(array, chunkSize) {
|
|
|
3251
3242
|
}
|
|
3252
3243
|
|
|
3253
3244
|
// src/database/migrate.ts
|
|
3245
|
+
var import_pg2 = require("pg");
|
|
3254
3246
|
var import_pg_node_migrations = require("pg-node-migrations");
|
|
3255
3247
|
var import_node_fs = __toESM(require("fs"), 1);
|
|
3256
3248
|
var import_node_path = __toESM(require("path"), 1);
|
|
@@ -3258,15 +3250,15 @@ var import_node_url = require("url");
|
|
|
3258
3250
|
var __filename2 = (0, import_node_url.fileURLToPath)(importMetaUrl);
|
|
3259
3251
|
var __dirname = import_node_path.default.dirname(__filename2);
|
|
3260
3252
|
async function doesTableExist(client, schema, tableName) {
|
|
3261
|
-
const result = await client.query(
|
|
3262
|
-
|
|
3253
|
+
const result = await client.query(
|
|
3254
|
+
`SELECT EXISTS (
|
|
3263
3255
|
SELECT 1
|
|
3264
3256
|
FROM information_schema.tables
|
|
3265
3257
|
WHERE table_schema = $1
|
|
3266
3258
|
AND table_name = $2
|
|
3267
3259
|
)`,
|
|
3268
|
-
|
|
3269
|
-
|
|
3260
|
+
[schema, tableName]
|
|
3261
|
+
);
|
|
3270
3262
|
return result.rows[0]?.exists || false;
|
|
3271
3263
|
}
|
|
3272
3264
|
async function renameMigrationsTableIfNeeded(client, schema = "stripe", logger) {
|
|
@@ -3284,9 +3276,9 @@ async function cleanupSchema(client, schema, logger) {
|
|
|
3284
3276
|
await client.query(`CREATE SCHEMA "${schema}"`);
|
|
3285
3277
|
logger?.info(`Schema "${schema}" has been reset`);
|
|
3286
3278
|
}
|
|
3287
|
-
async function connectAndMigrate(client, migrationsDirectory,
|
|
3279
|
+
async function connectAndMigrate(client, migrationsDirectory, config, logOnError = false) {
|
|
3288
3280
|
if (!import_node_fs.default.existsSync(migrationsDirectory)) {
|
|
3289
|
-
logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
|
|
3281
|
+
config.logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
|
|
3290
3282
|
return;
|
|
3291
3283
|
}
|
|
3292
3284
|
const optionalConfig = {
|
|
@@ -3297,18 +3289,23 @@ async function connectAndMigrate(client, migrationsDirectory, logger, logOnError
|
|
|
3297
3289
|
await (0, import_pg_node_migrations.migrate)({ client }, migrationsDirectory, optionalConfig);
|
|
3298
3290
|
} catch (error) {
|
|
3299
3291
|
if (logOnError && error instanceof Error) {
|
|
3300
|
-
logger?.error(error, "Migration error:");
|
|
3292
|
+
config.logger?.error(error, "Migration error:");
|
|
3301
3293
|
} else {
|
|
3302
3294
|
throw error;
|
|
3303
3295
|
}
|
|
3304
3296
|
}
|
|
3305
3297
|
}
|
|
3306
|
-
async function runMigrations(
|
|
3307
|
-
const client =
|
|
3298
|
+
async function runMigrations(config) {
|
|
3299
|
+
const client = new import_pg2.Client({
|
|
3300
|
+
connectionString: config.databaseUrl,
|
|
3301
|
+
ssl: config.ssl,
|
|
3302
|
+
connectionTimeoutMillis: 1e4
|
|
3303
|
+
});
|
|
3308
3304
|
const schema = "stripe";
|
|
3309
3305
|
try {
|
|
3306
|
+
await client.connect();
|
|
3310
3307
|
await client.query(`CREATE SCHEMA IF NOT EXISTS ${schema};`);
|
|
3311
|
-
await renameMigrationsTableIfNeeded(client, schema, logger);
|
|
3308
|
+
await renameMigrationsTableIfNeeded(client, schema, config.logger);
|
|
3312
3309
|
const tableExists = await doesTableExist(client, schema, "_migrations");
|
|
3313
3310
|
if (tableExists) {
|
|
3314
3311
|
const migrationCount = await client.query(
|
|
@@ -3316,16 +3313,17 @@ async function runMigrations(adapter, logger) {
|
|
|
3316
3313
|
);
|
|
3317
3314
|
const isEmpty = migrationCount.rows[0]?.count === "0";
|
|
3318
3315
|
if (isEmpty) {
|
|
3319
|
-
await cleanupSchema(client, schema, logger);
|
|
3316
|
+
await cleanupSchema(client, schema, config.logger);
|
|
3320
3317
|
}
|
|
3321
3318
|
}
|
|
3322
|
-
logger?.info("Running migrations");
|
|
3323
|
-
await connectAndMigrate(client, import_node_path.default.resolve(__dirname, "./migrations"),
|
|
3319
|
+
config.logger?.info("Running migrations");
|
|
3320
|
+
await connectAndMigrate(client, import_node_path.default.resolve(__dirname, "./migrations"), config);
|
|
3324
3321
|
} catch (err) {
|
|
3325
|
-
logger?.error(err, "Error running migrations");
|
|
3322
|
+
config.logger?.error(err, "Error running migrations");
|
|
3326
3323
|
throw err;
|
|
3327
3324
|
} finally {
|
|
3328
|
-
|
|
3325
|
+
await client.end();
|
|
3326
|
+
config.logger?.info("Finished migrations");
|
|
3329
3327
|
}
|
|
3330
3328
|
}
|
|
3331
3329
|
|
package/dist/index.d.cts
CHANGED
|
@@ -1,29 +1,17 @@
|
|
|
1
1
|
import Stripe from 'stripe';
|
|
2
|
-
import {
|
|
3
|
-
|
|
2
|
+
import pg, { PoolConfig, QueryResult } from 'pg';
|
|
3
|
+
import { ConnectionOptions } from 'node:tls';
|
|
4
4
|
|
|
5
5
|
type PostgresConfig = {
|
|
6
6
|
schema: string;
|
|
7
|
-
|
|
7
|
+
poolConfig: PoolConfig;
|
|
8
8
|
};
|
|
9
9
|
declare class PostgresClient {
|
|
10
10
|
private config;
|
|
11
|
-
|
|
11
|
+
pool: pg.Pool;
|
|
12
12
|
constructor(config: PostgresConfig);
|
|
13
|
-
/**
|
|
14
|
-
* Get the underlying adapter.
|
|
15
|
-
* Useful for accessing adapter-specific features.
|
|
16
|
-
*/
|
|
17
|
-
getAdapter(): DatabaseAdapter;
|
|
18
|
-
/**
|
|
19
|
-
* Close all database connections.
|
|
20
|
-
*/
|
|
21
|
-
end(): Promise<void>;
|
|
22
13
|
delete(table: string, id: string): Promise<boolean>;
|
|
23
|
-
query
|
|
24
|
-
rows: T[];
|
|
25
|
-
rowCount: number;
|
|
26
|
-
}>;
|
|
14
|
+
query(text: string, params?: any[]): Promise<QueryResult>;
|
|
27
15
|
upsertMany<T extends {
|
|
28
16
|
[Key: string]: any;
|
|
29
17
|
}>(entries: T[], table: string): Promise<T[]>;
|
|
@@ -80,6 +68,10 @@ declare class PostgresClient {
|
|
|
80
68
|
* Execute a function while holding an advisory lock.
|
|
81
69
|
* The lock is automatically released after the function completes (success or error).
|
|
82
70
|
*
|
|
71
|
+
* IMPORTANT: This acquires a dedicated connection from the pool and holds it for the
|
|
72
|
+
* duration of the function execution. PostgreSQL advisory locks are session-level,
|
|
73
|
+
* so we must use the same connection for lock acquisition, operations, and release.
|
|
74
|
+
*
|
|
83
75
|
* @param key - A string key to lock on (will be hashed to an integer)
|
|
84
76
|
* @param fn - The function to execute while holding the lock
|
|
85
77
|
* @returns The result of the function
|
|
@@ -233,11 +225,9 @@ type StripeSyncConfig = {
|
|
|
233
225
|
* Default: false
|
|
234
226
|
*/
|
|
235
227
|
revalidateObjectsViaStripeApi?: Array<RevalidateEntity>;
|
|
236
|
-
/**
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
*/
|
|
240
|
-
adapter: DatabaseAdapter;
|
|
228
|
+
/** @deprecated Use `poolConfig` instead. */
|
|
229
|
+
maxPostgresConnections?: number;
|
|
230
|
+
poolConfig: PoolConfig;
|
|
241
231
|
logger?: Logger;
|
|
242
232
|
/**
|
|
243
233
|
* Maximum number of retry attempts for 429 rate limit errors.
|
|
@@ -579,6 +569,13 @@ declare class StripeSync {
|
|
|
579
569
|
private fetchMissingEntities;
|
|
580
570
|
}
|
|
581
571
|
|
|
572
|
+
type MigrationConfig = {
|
|
573
|
+
databaseUrl: string;
|
|
574
|
+
ssl?: ConnectionOptions;
|
|
575
|
+
logger?: Logger;
|
|
576
|
+
};
|
|
577
|
+
declare function runMigrations(config: MigrationConfig): Promise<void>;
|
|
578
|
+
|
|
582
579
|
/**
|
|
583
580
|
* Hashes a Stripe API key using SHA-256
|
|
584
581
|
* Used to store API key hashes in the database for fast account lookups
|
|
@@ -589,14 +586,6 @@ declare class StripeSync {
|
|
|
589
586
|
*/
|
|
590
587
|
declare function hashApiKey(apiKey: string): string;
|
|
591
588
|
|
|
592
|
-
/**
|
|
593
|
-
* Run database migrations using the provided adapter.
|
|
594
|
-
*
|
|
595
|
-
* @param adapter - Database adapter (PgAdapter or PostgresJsAdapter)
|
|
596
|
-
* @param logger - Optional logger for migration progress
|
|
597
|
-
*/
|
|
598
|
-
declare function runMigrations(adapter: DatabaseAdapter, logger?: Logger): Promise<void>;
|
|
599
|
-
|
|
600
589
|
declare const VERSION: string;
|
|
601
590
|
|
|
602
|
-
export {
|
|
591
|
+
export { type Logger, PostgresClient, type ProcessNextParams, type ProcessNextResult, type RevalidateEntity, StripeSync, type StripeSyncConfig, type Sync, type SyncBackfill, type SyncEntitlementsParams, type SyncFeaturesParams, type SyncObject, type SyncParams, VERSION, hashApiKey, runMigrations };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,29 +1,17 @@
|
|
|
1
1
|
import Stripe from 'stripe';
|
|
2
|
-
import {
|
|
3
|
-
|
|
2
|
+
import pg, { PoolConfig, QueryResult } from 'pg';
|
|
3
|
+
import { ConnectionOptions } from 'node:tls';
|
|
4
4
|
|
|
5
5
|
type PostgresConfig = {
|
|
6
6
|
schema: string;
|
|
7
|
-
|
|
7
|
+
poolConfig: PoolConfig;
|
|
8
8
|
};
|
|
9
9
|
declare class PostgresClient {
|
|
10
10
|
private config;
|
|
11
|
-
|
|
11
|
+
pool: pg.Pool;
|
|
12
12
|
constructor(config: PostgresConfig);
|
|
13
|
-
/**
|
|
14
|
-
* Get the underlying adapter.
|
|
15
|
-
* Useful for accessing adapter-specific features.
|
|
16
|
-
*/
|
|
17
|
-
getAdapter(): DatabaseAdapter;
|
|
18
|
-
/**
|
|
19
|
-
* Close all database connections.
|
|
20
|
-
*/
|
|
21
|
-
end(): Promise<void>;
|
|
22
13
|
delete(table: string, id: string): Promise<boolean>;
|
|
23
|
-
query
|
|
24
|
-
rows: T[];
|
|
25
|
-
rowCount: number;
|
|
26
|
-
}>;
|
|
14
|
+
query(text: string, params?: any[]): Promise<QueryResult>;
|
|
27
15
|
upsertMany<T extends {
|
|
28
16
|
[Key: string]: any;
|
|
29
17
|
}>(entries: T[], table: string): Promise<T[]>;
|
|
@@ -80,6 +68,10 @@ declare class PostgresClient {
|
|
|
80
68
|
* Execute a function while holding an advisory lock.
|
|
81
69
|
* The lock is automatically released after the function completes (success or error).
|
|
82
70
|
*
|
|
71
|
+
* IMPORTANT: This acquires a dedicated connection from the pool and holds it for the
|
|
72
|
+
* duration of the function execution. PostgreSQL advisory locks are session-level,
|
|
73
|
+
* so we must use the same connection for lock acquisition, operations, and release.
|
|
74
|
+
*
|
|
83
75
|
* @param key - A string key to lock on (will be hashed to an integer)
|
|
84
76
|
* @param fn - The function to execute while holding the lock
|
|
85
77
|
* @returns The result of the function
|
|
@@ -233,11 +225,9 @@ type StripeSyncConfig = {
|
|
|
233
225
|
* Default: false
|
|
234
226
|
*/
|
|
235
227
|
revalidateObjectsViaStripeApi?: Array<RevalidateEntity>;
|
|
236
|
-
/**
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
*/
|
|
240
|
-
adapter: DatabaseAdapter;
|
|
228
|
+
/** @deprecated Use `poolConfig` instead. */
|
|
229
|
+
maxPostgresConnections?: number;
|
|
230
|
+
poolConfig: PoolConfig;
|
|
241
231
|
logger?: Logger;
|
|
242
232
|
/**
|
|
243
233
|
* Maximum number of retry attempts for 429 rate limit errors.
|
|
@@ -579,6 +569,13 @@ declare class StripeSync {
|
|
|
579
569
|
private fetchMissingEntities;
|
|
580
570
|
}
|
|
581
571
|
|
|
572
|
+
type MigrationConfig = {
|
|
573
|
+
databaseUrl: string;
|
|
574
|
+
ssl?: ConnectionOptions;
|
|
575
|
+
logger?: Logger;
|
|
576
|
+
};
|
|
577
|
+
declare function runMigrations(config: MigrationConfig): Promise<void>;
|
|
578
|
+
|
|
582
579
|
/**
|
|
583
580
|
* Hashes a Stripe API key using SHA-256
|
|
584
581
|
* Used to store API key hashes in the database for fast account lookups
|
|
@@ -589,14 +586,6 @@ declare class StripeSync {
|
|
|
589
586
|
*/
|
|
590
587
|
declare function hashApiKey(apiKey: string): string;
|
|
591
588
|
|
|
592
|
-
/**
|
|
593
|
-
* Run database migrations using the provided adapter.
|
|
594
|
-
*
|
|
595
|
-
* @param adapter - Database adapter (PgAdapter or PostgresJsAdapter)
|
|
596
|
-
* @param logger - Optional logger for migration progress
|
|
597
|
-
*/
|
|
598
|
-
declare function runMigrations(adapter: DatabaseAdapter, logger?: Logger): Promise<void>;
|
|
599
|
-
|
|
600
589
|
declare const VERSION: string;
|
|
601
590
|
|
|
602
|
-
export {
|
|
591
|
+
export { type Logger, PostgresClient, type ProcessNextParams, type ProcessNextResult, type RevalidateEntity, StripeSync, type StripeSyncConfig, type Sync, type SyncBackfill, type SyncEntitlementsParams, type SyncFeaturesParams, type SyncObject, type SyncParams, VERSION, hashApiKey, runMigrations };
|
package/dist/index.js
CHANGED
|
@@ -1,47 +1,25 @@
|
|
|
1
1
|
// package.json
|
|
2
2
|
var package_default = {
|
|
3
3
|
name: "stripe-experiment-sync",
|
|
4
|
-
version: "1.0.
|
|
4
|
+
version: "1.0.1",
|
|
5
5
|
private: false,
|
|
6
6
|
description: "Stripe Sync Engine to sync Stripe data to Postgres",
|
|
7
7
|
type: "module",
|
|
8
8
|
main: "./dist/index.cjs",
|
|
9
9
|
exports: {
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
default: "./dist/index.js"
|
|
14
|
-
},
|
|
15
|
-
require: {
|
|
16
|
-
types: "./dist/index.d.cts",
|
|
17
|
-
default: "./dist/index.cjs"
|
|
18
|
-
}
|
|
19
|
-
},
|
|
20
|
-
"./pg": {
|
|
21
|
-
import: {
|
|
22
|
-
types: "./dist/pg.d.ts",
|
|
23
|
-
default: "./dist/pg.js"
|
|
24
|
-
},
|
|
25
|
-
require: {
|
|
26
|
-
types: "./dist/pg.d.cts",
|
|
27
|
-
default: "./dist/pg.cjs"
|
|
28
|
-
}
|
|
10
|
+
import: {
|
|
11
|
+
types: "./dist/index.d.ts",
|
|
12
|
+
import: "./dist/index.js"
|
|
29
13
|
},
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
default: "./dist/postgres-js.js"
|
|
34
|
-
},
|
|
35
|
-
require: {
|
|
36
|
-
types: "./dist/postgres-js.d.cts",
|
|
37
|
-
default: "./dist/postgres-js.cjs"
|
|
38
|
-
}
|
|
14
|
+
require: {
|
|
15
|
+
types: "./dist/index.d.cts",
|
|
16
|
+
require: "./dist/index.cjs"
|
|
39
17
|
}
|
|
40
18
|
},
|
|
41
19
|
scripts: {
|
|
42
20
|
clean: "rimraf dist",
|
|
43
21
|
prebuild: "npm run clean",
|
|
44
|
-
build: "tsup src/index.ts
|
|
22
|
+
build: "tsup src/index.ts --format esm,cjs --dts --shims && cp -r src/database/migrations dist/migrations",
|
|
45
23
|
lint: "eslint src --ext .ts",
|
|
46
24
|
test: "vitest"
|
|
47
25
|
},
|
|
@@ -51,7 +29,6 @@ var package_default = {
|
|
|
51
29
|
dependencies: {
|
|
52
30
|
pg: "^8.16.3",
|
|
53
31
|
"pg-node-migrations": "0.0.8",
|
|
54
|
-
postgres: "^3.4.7",
|
|
55
32
|
ws: "^8.18.0",
|
|
56
33
|
yesql: "^7.0.0"
|
|
57
34
|
},
|
|
@@ -64,7 +41,6 @@ var package_default = {
|
|
|
64
41
|
"@types/ws": "^8.5.13",
|
|
65
42
|
"@types/yesql": "^4.1.4",
|
|
66
43
|
"@vitest/ui": "^4.0.9",
|
|
67
|
-
stripe: "^20.0.0",
|
|
68
44
|
vitest: "^3.2.4"
|
|
69
45
|
},
|
|
70
46
|
repository: {
|
|
@@ -93,6 +69,7 @@ import Stripe2 from "stripe";
|
|
|
93
69
|
import { pg as sql2 } from "yesql";
|
|
94
70
|
|
|
95
71
|
// src/database/postgres.ts
|
|
72
|
+
import pg from "pg";
|
|
96
73
|
import { pg as sql } from "yesql";
|
|
97
74
|
var ORDERED_STRIPE_TABLES = [
|
|
98
75
|
"subscription_items",
|
|
@@ -126,22 +103,9 @@ var TABLES_WITH_ACCOUNT_ID = /* @__PURE__ */ new Set(["_managed_webhooks"]);
|
|
|
126
103
|
var PostgresClient = class {
|
|
127
104
|
constructor(config) {
|
|
128
105
|
this.config = config;
|
|
129
|
-
this.
|
|
130
|
-
}
|
|
131
|
-
adapter;
|
|
132
|
-
/**
|
|
133
|
-
* Get the underlying adapter.
|
|
134
|
-
* Useful for accessing adapter-specific features.
|
|
135
|
-
*/
|
|
136
|
-
getAdapter() {
|
|
137
|
-
return this.adapter;
|
|
138
|
-
}
|
|
139
|
-
/**
|
|
140
|
-
* Close all database connections.
|
|
141
|
-
*/
|
|
142
|
-
async end() {
|
|
143
|
-
await this.adapter.end();
|
|
106
|
+
this.pool = new pg.Pool(config.poolConfig);
|
|
144
107
|
}
|
|
108
|
+
pool;
|
|
145
109
|
async delete(table, id) {
|
|
146
110
|
const prepared = sql(`
|
|
147
111
|
delete from "${this.config.schema}"."${table}"
|
|
@@ -153,7 +117,7 @@ var PostgresClient = class {
|
|
|
153
117
|
}
|
|
154
118
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
155
119
|
async query(text, params) {
|
|
156
|
-
return this.
|
|
120
|
+
return this.pool.query(text, params);
|
|
157
121
|
}
|
|
158
122
|
async upsertMany(entries, table) {
|
|
159
123
|
if (!entries.length) return [];
|
|
@@ -172,7 +136,7 @@ var PostgresClient = class {
|
|
|
172
136
|
"_raw_data" = EXCLUDED."_raw_data"
|
|
173
137
|
RETURNING *
|
|
174
138
|
`;
|
|
175
|
-
queries.push(this.
|
|
139
|
+
queries.push(this.pool.query(upsertSql, [rawData]));
|
|
176
140
|
});
|
|
177
141
|
results.push(...await Promise.all(queries));
|
|
178
142
|
}
|
|
@@ -211,7 +175,7 @@ var PostgresClient = class {
|
|
|
211
175
|
cleansed.last_synced_at = timestamp;
|
|
212
176
|
cleansed.account_id = accountId;
|
|
213
177
|
const prepared = sql(upsertSql, { useNullForMissing: true })(cleansed);
|
|
214
|
-
queries.push(this.
|
|
178
|
+
queries.push(this.pool.query(prepared.text, prepared.values));
|
|
215
179
|
} else {
|
|
216
180
|
const rawData = JSON.stringify(entry);
|
|
217
181
|
const upsertSql = `
|
|
@@ -226,7 +190,7 @@ var PostgresClient = class {
|
|
|
226
190
|
OR "${table}"."_last_synced_at" < $2
|
|
227
191
|
RETURNING *
|
|
228
192
|
`;
|
|
229
|
-
queries.push(this.
|
|
193
|
+
queries.push(this.pool.query(upsertSql, [rawData, timestamp, accountId]));
|
|
230
194
|
}
|
|
231
195
|
});
|
|
232
196
|
results.push(...await Promise.all(queries));
|
|
@@ -398,13 +362,27 @@ var PostgresClient = class {
|
|
|
398
362
|
* Execute a function while holding an advisory lock.
|
|
399
363
|
* The lock is automatically released after the function completes (success or error).
|
|
400
364
|
*
|
|
365
|
+
* IMPORTANT: This acquires a dedicated connection from the pool and holds it for the
|
|
366
|
+
* duration of the function execution. PostgreSQL advisory locks are session-level,
|
|
367
|
+
* so we must use the same connection for lock acquisition, operations, and release.
|
|
368
|
+
*
|
|
401
369
|
* @param key - A string key to lock on (will be hashed to an integer)
|
|
402
370
|
* @param fn - The function to execute while holding the lock
|
|
403
371
|
* @returns The result of the function
|
|
404
372
|
*/
|
|
405
373
|
async withAdvisoryLock(key, fn) {
|
|
406
374
|
const lockId = this.hashToInt32(key);
|
|
407
|
-
|
|
375
|
+
const client = await this.pool.connect();
|
|
376
|
+
try {
|
|
377
|
+
await client.query("SELECT pg_advisory_lock($1)", [lockId]);
|
|
378
|
+
return await fn();
|
|
379
|
+
} finally {
|
|
380
|
+
try {
|
|
381
|
+
await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
|
|
382
|
+
} finally {
|
|
383
|
+
client.release();
|
|
384
|
+
}
|
|
385
|
+
}
|
|
408
386
|
}
|
|
409
387
|
// =============================================================================
|
|
410
388
|
// Observable Sync System Methods
|
|
@@ -908,9 +886,22 @@ var StripeSync = class {
|
|
|
908
886
|
{ autoExpandLists: config.autoExpandLists, stripeApiVersion: config.stripeApiVersion },
|
|
909
887
|
"StripeSync initialized"
|
|
910
888
|
);
|
|
889
|
+
const poolConfig = config.poolConfig ?? {};
|
|
890
|
+
if (config.databaseUrl) {
|
|
891
|
+
poolConfig.connectionString = config.databaseUrl;
|
|
892
|
+
}
|
|
893
|
+
if (config.maxPostgresConnections) {
|
|
894
|
+
poolConfig.max = config.maxPostgresConnections;
|
|
895
|
+
}
|
|
896
|
+
if (poolConfig.max === void 0) {
|
|
897
|
+
poolConfig.max = 10;
|
|
898
|
+
}
|
|
899
|
+
if (poolConfig.keepAlive === void 0) {
|
|
900
|
+
poolConfig.keepAlive = true;
|
|
901
|
+
}
|
|
911
902
|
this.postgresClient = new PostgresClient({
|
|
912
903
|
schema: "stripe",
|
|
913
|
-
|
|
904
|
+
poolConfig
|
|
914
905
|
});
|
|
915
906
|
}
|
|
916
907
|
stripe;
|
|
@@ -3207,6 +3198,7 @@ function chunkArray(array, chunkSize) {
|
|
|
3207
3198
|
}
|
|
3208
3199
|
|
|
3209
3200
|
// src/database/migrate.ts
|
|
3201
|
+
import { Client } from "pg";
|
|
3210
3202
|
import { migrate } from "pg-node-migrations";
|
|
3211
3203
|
import fs from "fs";
|
|
3212
3204
|
import path from "path";
|
|
@@ -3214,15 +3206,15 @@ import { fileURLToPath } from "url";
|
|
|
3214
3206
|
var __filename2 = fileURLToPath(import.meta.url);
|
|
3215
3207
|
var __dirname2 = path.dirname(__filename2);
|
|
3216
3208
|
async function doesTableExist(client, schema, tableName) {
|
|
3217
|
-
const result = await client.query(
|
|
3218
|
-
|
|
3209
|
+
const result = await client.query(
|
|
3210
|
+
`SELECT EXISTS (
|
|
3219
3211
|
SELECT 1
|
|
3220
3212
|
FROM information_schema.tables
|
|
3221
3213
|
WHERE table_schema = $1
|
|
3222
3214
|
AND table_name = $2
|
|
3223
3215
|
)`,
|
|
3224
|
-
|
|
3225
|
-
|
|
3216
|
+
[schema, tableName]
|
|
3217
|
+
);
|
|
3226
3218
|
return result.rows[0]?.exists || false;
|
|
3227
3219
|
}
|
|
3228
3220
|
async function renameMigrationsTableIfNeeded(client, schema = "stripe", logger) {
|
|
@@ -3240,9 +3232,9 @@ async function cleanupSchema(client, schema, logger) {
|
|
|
3240
3232
|
await client.query(`CREATE SCHEMA "${schema}"`);
|
|
3241
3233
|
logger?.info(`Schema "${schema}" has been reset`);
|
|
3242
3234
|
}
|
|
3243
|
-
async function connectAndMigrate(client, migrationsDirectory,
|
|
3235
|
+
async function connectAndMigrate(client, migrationsDirectory, config, logOnError = false) {
|
|
3244
3236
|
if (!fs.existsSync(migrationsDirectory)) {
|
|
3245
|
-
logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
|
|
3237
|
+
config.logger?.info(`Migrations directory ${migrationsDirectory} not found, skipping`);
|
|
3246
3238
|
return;
|
|
3247
3239
|
}
|
|
3248
3240
|
const optionalConfig = {
|
|
@@ -3253,18 +3245,23 @@ async function connectAndMigrate(client, migrationsDirectory, logger, logOnError
|
|
|
3253
3245
|
await migrate({ client }, migrationsDirectory, optionalConfig);
|
|
3254
3246
|
} catch (error) {
|
|
3255
3247
|
if (logOnError && error instanceof Error) {
|
|
3256
|
-
logger?.error(error, "Migration error:");
|
|
3248
|
+
config.logger?.error(error, "Migration error:");
|
|
3257
3249
|
} else {
|
|
3258
3250
|
throw error;
|
|
3259
3251
|
}
|
|
3260
3252
|
}
|
|
3261
3253
|
}
|
|
3262
|
-
async function runMigrations(
|
|
3263
|
-
const client =
|
|
3254
|
+
async function runMigrations(config) {
|
|
3255
|
+
const client = new Client({
|
|
3256
|
+
connectionString: config.databaseUrl,
|
|
3257
|
+
ssl: config.ssl,
|
|
3258
|
+
connectionTimeoutMillis: 1e4
|
|
3259
|
+
});
|
|
3264
3260
|
const schema = "stripe";
|
|
3265
3261
|
try {
|
|
3262
|
+
await client.connect();
|
|
3266
3263
|
await client.query(`CREATE SCHEMA IF NOT EXISTS ${schema};`);
|
|
3267
|
-
await renameMigrationsTableIfNeeded(client, schema, logger);
|
|
3264
|
+
await renameMigrationsTableIfNeeded(client, schema, config.logger);
|
|
3268
3265
|
const tableExists = await doesTableExist(client, schema, "_migrations");
|
|
3269
3266
|
if (tableExists) {
|
|
3270
3267
|
const migrationCount = await client.query(
|
|
@@ -3272,16 +3269,17 @@ async function runMigrations(adapter, logger) {
|
|
|
3272
3269
|
);
|
|
3273
3270
|
const isEmpty = migrationCount.rows[0]?.count === "0";
|
|
3274
3271
|
if (isEmpty) {
|
|
3275
|
-
await cleanupSchema(client, schema, logger);
|
|
3272
|
+
await cleanupSchema(client, schema, config.logger);
|
|
3276
3273
|
}
|
|
3277
3274
|
}
|
|
3278
|
-
logger?.info("Running migrations");
|
|
3279
|
-
await connectAndMigrate(client, path.resolve(__dirname2, "./migrations"),
|
|
3275
|
+
config.logger?.info("Running migrations");
|
|
3276
|
+
await connectAndMigrate(client, path.resolve(__dirname2, "./migrations"), config);
|
|
3280
3277
|
} catch (err) {
|
|
3281
|
-
logger?.error(err, "Error running migrations");
|
|
3278
|
+
config.logger?.error(err, "Error running migrations");
|
|
3282
3279
|
throw err;
|
|
3283
3280
|
} finally {
|
|
3284
|
-
|
|
3281
|
+
await client.end();
|
|
3282
|
+
config.logger?.info("Finished migrations");
|
|
3285
3283
|
}
|
|
3286
3284
|
}
|
|
3287
3285
|
|
package/package.json
CHANGED
|
@@ -1,46 +1,24 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "stripe-experiment-sync",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.1",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "Stripe Sync Engine to sync Stripe data to Postgres",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"main": "./dist/index.cjs",
|
|
8
8
|
"exports": {
|
|
9
|
-
"
|
|
10
|
-
"
|
|
11
|
-
|
|
12
|
-
"default": "./dist/index.js"
|
|
13
|
-
},
|
|
14
|
-
"require": {
|
|
15
|
-
"types": "./dist/index.d.cts",
|
|
16
|
-
"default": "./dist/index.cjs"
|
|
17
|
-
}
|
|
9
|
+
"import": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.js"
|
|
18
12
|
},
|
|
19
|
-
"
|
|
20
|
-
"
|
|
21
|
-
|
|
22
|
-
"default": "./dist/pg.js"
|
|
23
|
-
},
|
|
24
|
-
"require": {
|
|
25
|
-
"types": "./dist/pg.d.cts",
|
|
26
|
-
"default": "./dist/pg.cjs"
|
|
27
|
-
}
|
|
28
|
-
},
|
|
29
|
-
"./postgres-js": {
|
|
30
|
-
"import": {
|
|
31
|
-
"types": "./dist/postgres-js.d.ts",
|
|
32
|
-
"default": "./dist/postgres-js.js"
|
|
33
|
-
},
|
|
34
|
-
"require": {
|
|
35
|
-
"types": "./dist/postgres-js.d.cts",
|
|
36
|
-
"default": "./dist/postgres-js.cjs"
|
|
37
|
-
}
|
|
13
|
+
"require": {
|
|
14
|
+
"types": "./dist/index.d.cts",
|
|
15
|
+
"require": "./dist/index.cjs"
|
|
38
16
|
}
|
|
39
17
|
},
|
|
40
18
|
"scripts": {
|
|
41
19
|
"clean": "rimraf dist",
|
|
42
20
|
"prebuild": "npm run clean",
|
|
43
|
-
"build": "tsup src/index.ts
|
|
21
|
+
"build": "tsup src/index.ts --format esm,cjs --dts --shims && cp -r src/database/migrations dist/migrations",
|
|
44
22
|
"lint": "eslint src --ext .ts",
|
|
45
23
|
"test": "vitest"
|
|
46
24
|
},
|
|
@@ -50,7 +28,6 @@
|
|
|
50
28
|
"dependencies": {
|
|
51
29
|
"pg": "^8.16.3",
|
|
52
30
|
"pg-node-migrations": "0.0.8",
|
|
53
|
-
"postgres": "^3.4.7",
|
|
54
31
|
"ws": "^8.18.0",
|
|
55
32
|
"yesql": "^7.0.0"
|
|
56
33
|
},
|
|
@@ -63,7 +40,6 @@
|
|
|
63
40
|
"@types/ws": "^8.5.13",
|
|
64
41
|
"@types/yesql": "^4.1.4",
|
|
65
42
|
"@vitest/ui": "^4.0.9",
|
|
66
|
-
"stripe": "^20.0.0",
|
|
67
43
|
"vitest": "^3.2.4"
|
|
68
44
|
},
|
|
69
45
|
"repository": {
|
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* pg-compatible client interface for use with pg-node-migrations.
|
|
3
|
-
* This is the minimal interface required by the migration library.
|
|
4
|
-
*/
|
|
5
|
-
interface PgCompatibleClient {
|
|
6
|
-
query(sql: string | {
|
|
7
|
-
text: string;
|
|
8
|
-
values?: unknown[];
|
|
9
|
-
}): Promise<{
|
|
10
|
-
rows: unknown[];
|
|
11
|
-
rowCount: number;
|
|
12
|
-
}>;
|
|
13
|
-
}
|
|
14
|
-
/**
|
|
15
|
-
* Database adapter interface for abstracting database operations.
|
|
16
|
-
* This allows sync-engine to work with different database clients:
|
|
17
|
-
* - pg (Node.js) - for CLI, tests, existing deployments
|
|
18
|
-
* - postgres.js (Node.js + Deno) - for Supabase Edge Functions
|
|
19
|
-
*/
|
|
20
|
-
interface DatabaseAdapter {
|
|
21
|
-
/**
|
|
22
|
-
* Execute a SQL query with optional parameters.
|
|
23
|
-
* @param sql - The SQL query string with $1, $2, etc. placeholders
|
|
24
|
-
* @param params - Optional array of parameter values
|
|
25
|
-
* @returns Query result with rows and rowCount
|
|
26
|
-
*/
|
|
27
|
-
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<{
|
|
28
|
-
rows: T[];
|
|
29
|
-
rowCount: number;
|
|
30
|
-
}>;
|
|
31
|
-
/**
|
|
32
|
-
* Close all connections and clean up resources.
|
|
33
|
-
*/
|
|
34
|
-
end(): Promise<void>;
|
|
35
|
-
/**
|
|
36
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
37
|
-
* Adapters that don't support locking should just execute fn() directly.
|
|
38
|
-
*
|
|
39
|
-
* @param lockId - Integer lock ID (use hashToInt32 to convert string keys)
|
|
40
|
-
* @param fn - Function to execute while holding the lock
|
|
41
|
-
* @returns Result of the function
|
|
42
|
-
*/
|
|
43
|
-
withAdvisoryLock<T>(lockId: number, fn: () => Promise<T>): Promise<T>;
|
|
44
|
-
/**
|
|
45
|
-
* Returns a pg-compatible client for use with libraries that expect a pg.Client interface.
|
|
46
|
-
* Used by pg-node-migrations to run database migrations.
|
|
47
|
-
*/
|
|
48
|
-
toPgClient(): PgCompatibleClient;
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
export type { DatabaseAdapter as D, PgCompatibleClient as P };
|
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* pg-compatible client interface for use with pg-node-migrations.
|
|
3
|
-
* This is the minimal interface required by the migration library.
|
|
4
|
-
*/
|
|
5
|
-
interface PgCompatibleClient {
|
|
6
|
-
query(sql: string | {
|
|
7
|
-
text: string;
|
|
8
|
-
values?: unknown[];
|
|
9
|
-
}): Promise<{
|
|
10
|
-
rows: unknown[];
|
|
11
|
-
rowCount: number;
|
|
12
|
-
}>;
|
|
13
|
-
}
|
|
14
|
-
/**
|
|
15
|
-
* Database adapter interface for abstracting database operations.
|
|
16
|
-
* This allows sync-engine to work with different database clients:
|
|
17
|
-
* - pg (Node.js) - for CLI, tests, existing deployments
|
|
18
|
-
* - postgres.js (Node.js + Deno) - for Supabase Edge Functions
|
|
19
|
-
*/
|
|
20
|
-
interface DatabaseAdapter {
|
|
21
|
-
/**
|
|
22
|
-
* Execute a SQL query with optional parameters.
|
|
23
|
-
* @param sql - The SQL query string with $1, $2, etc. placeholders
|
|
24
|
-
* @param params - Optional array of parameter values
|
|
25
|
-
* @returns Query result with rows and rowCount
|
|
26
|
-
*/
|
|
27
|
-
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<{
|
|
28
|
-
rows: T[];
|
|
29
|
-
rowCount: number;
|
|
30
|
-
}>;
|
|
31
|
-
/**
|
|
32
|
-
* Close all connections and clean up resources.
|
|
33
|
-
*/
|
|
34
|
-
end(): Promise<void>;
|
|
35
|
-
/**
|
|
36
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
37
|
-
* Adapters that don't support locking should just execute fn() directly.
|
|
38
|
-
*
|
|
39
|
-
* @param lockId - Integer lock ID (use hashToInt32 to convert string keys)
|
|
40
|
-
* @param fn - Function to execute while holding the lock
|
|
41
|
-
* @returns Result of the function
|
|
42
|
-
*/
|
|
43
|
-
withAdvisoryLock<T>(lockId: number, fn: () => Promise<T>): Promise<T>;
|
|
44
|
-
/**
|
|
45
|
-
* Returns a pg-compatible client for use with libraries that expect a pg.Client interface.
|
|
46
|
-
* Used by pg-node-migrations to run database migrations.
|
|
47
|
-
*/
|
|
48
|
-
toPgClient(): PgCompatibleClient;
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
export type { DatabaseAdapter as D, PgCompatibleClient as P };
|
package/dist/pg.cjs
DELETED
|
@@ -1,87 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __create = Object.create;
|
|
3
|
-
var __defProp = Object.defineProperty;
|
|
4
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __export = (target, all) => {
|
|
9
|
-
for (var name in all)
|
|
10
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
-
};
|
|
12
|
-
var __copyProps = (to, from, except, desc) => {
|
|
13
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
-
for (let key of __getOwnPropNames(from))
|
|
15
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
-
}
|
|
18
|
-
return to;
|
|
19
|
-
};
|
|
20
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
-
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
-
mod
|
|
27
|
-
));
|
|
28
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
-
|
|
30
|
-
// src/pg.ts
|
|
31
|
-
var pg_exports = {};
|
|
32
|
-
__export(pg_exports, {
|
|
33
|
-
PgAdapter: () => PgAdapter
|
|
34
|
-
});
|
|
35
|
-
module.exports = __toCommonJS(pg_exports);
|
|
36
|
-
|
|
37
|
-
// src/database/pg-adapter.ts
|
|
38
|
-
var import_pg = __toESM(require("pg"), 1);
|
|
39
|
-
var PgAdapter = class {
|
|
40
|
-
pool;
|
|
41
|
-
constructor(config) {
|
|
42
|
-
this.pool = new import_pg.default.Pool(config);
|
|
43
|
-
}
|
|
44
|
-
async query(sql, params) {
|
|
45
|
-
const result = await this.pool.query(sql, params);
|
|
46
|
-
return {
|
|
47
|
-
rows: result.rows,
|
|
48
|
-
rowCount: result.rowCount ?? 0
|
|
49
|
-
};
|
|
50
|
-
}
|
|
51
|
-
async end() {
|
|
52
|
-
await this.pool.end();
|
|
53
|
-
}
|
|
54
|
-
/**
|
|
55
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
56
|
-
* Uses a dedicated connection to ensure lock is held for the duration.
|
|
57
|
-
*/
|
|
58
|
-
async withAdvisoryLock(lockId, fn) {
|
|
59
|
-
const client = await this.pool.connect();
|
|
60
|
-
try {
|
|
61
|
-
await client.query("SELECT pg_advisory_lock($1)", [lockId]);
|
|
62
|
-
return await fn();
|
|
63
|
-
} finally {
|
|
64
|
-
try {
|
|
65
|
-
await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
|
|
66
|
-
} finally {
|
|
67
|
-
client.release();
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
/**
|
|
72
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
73
|
-
* Used by pg-node-migrations to run database migrations.
|
|
74
|
-
*/
|
|
75
|
-
toPgClient() {
|
|
76
|
-
return {
|
|
77
|
-
query: async (sql) => {
|
|
78
|
-
const result = typeof sql === "string" ? await this.pool.query(sql) : await this.pool.query(sql.text, sql.values);
|
|
79
|
-
return { rows: result.rows, rowCount: result.rowCount ?? 0 };
|
|
80
|
-
}
|
|
81
|
-
};
|
|
82
|
-
}
|
|
83
|
-
};
|
|
84
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
85
|
-
0 && (module.exports = {
|
|
86
|
-
PgAdapter
|
|
87
|
-
});
|
package/dist/pg.d.cts
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
import { PoolConfig } from 'pg';
|
|
2
|
-
import { D as DatabaseAdapter, P as PgCompatibleClient } from './adapter-BtXT5w9r.cjs';
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* Database adapter implementation using node-postgres (pg).
|
|
6
|
-
* This is the default adapter for Node.js environments.
|
|
7
|
-
*/
|
|
8
|
-
declare class PgAdapter implements DatabaseAdapter {
|
|
9
|
-
private pool;
|
|
10
|
-
constructor(config: PoolConfig);
|
|
11
|
-
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<{
|
|
12
|
-
rows: T[];
|
|
13
|
-
rowCount: number;
|
|
14
|
-
}>;
|
|
15
|
-
end(): Promise<void>;
|
|
16
|
-
/**
|
|
17
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
18
|
-
* Uses a dedicated connection to ensure lock is held for the duration.
|
|
19
|
-
*/
|
|
20
|
-
withAdvisoryLock<T>(lockId: number, fn: () => Promise<T>): Promise<T>;
|
|
21
|
-
/**
|
|
22
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
23
|
-
* Used by pg-node-migrations to run database migrations.
|
|
24
|
-
*/
|
|
25
|
-
toPgClient(): PgCompatibleClient;
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
export { PgAdapter };
|
package/dist/pg.d.ts
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
import { PoolConfig } from 'pg';
|
|
2
|
-
import { D as DatabaseAdapter, P as PgCompatibleClient } from './adapter-BtXT5w9r.js';
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* Database adapter implementation using node-postgres (pg).
|
|
6
|
-
* This is the default adapter for Node.js environments.
|
|
7
|
-
*/
|
|
8
|
-
declare class PgAdapter implements DatabaseAdapter {
|
|
9
|
-
private pool;
|
|
10
|
-
constructor(config: PoolConfig);
|
|
11
|
-
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<{
|
|
12
|
-
rows: T[];
|
|
13
|
-
rowCount: number;
|
|
14
|
-
}>;
|
|
15
|
-
end(): Promise<void>;
|
|
16
|
-
/**
|
|
17
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
18
|
-
* Uses a dedicated connection to ensure lock is held for the duration.
|
|
19
|
-
*/
|
|
20
|
-
withAdvisoryLock<T>(lockId: number, fn: () => Promise<T>): Promise<T>;
|
|
21
|
-
/**
|
|
22
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
23
|
-
* Used by pg-node-migrations to run database migrations.
|
|
24
|
-
*/
|
|
25
|
-
toPgClient(): PgCompatibleClient;
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
export { PgAdapter };
|
package/dist/pg.js
DELETED
|
@@ -1,50 +0,0 @@
|
|
|
1
|
-
// src/database/pg-adapter.ts
|
|
2
|
-
import pg from "pg";
|
|
3
|
-
var PgAdapter = class {
|
|
4
|
-
pool;
|
|
5
|
-
constructor(config) {
|
|
6
|
-
this.pool = new pg.Pool(config);
|
|
7
|
-
}
|
|
8
|
-
async query(sql, params) {
|
|
9
|
-
const result = await this.pool.query(sql, params);
|
|
10
|
-
return {
|
|
11
|
-
rows: result.rows,
|
|
12
|
-
rowCount: result.rowCount ?? 0
|
|
13
|
-
};
|
|
14
|
-
}
|
|
15
|
-
async end() {
|
|
16
|
-
await this.pool.end();
|
|
17
|
-
}
|
|
18
|
-
/**
|
|
19
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
20
|
-
* Uses a dedicated connection to ensure lock is held for the duration.
|
|
21
|
-
*/
|
|
22
|
-
async withAdvisoryLock(lockId, fn) {
|
|
23
|
-
const client = await this.pool.connect();
|
|
24
|
-
try {
|
|
25
|
-
await client.query("SELECT pg_advisory_lock($1)", [lockId]);
|
|
26
|
-
return await fn();
|
|
27
|
-
} finally {
|
|
28
|
-
try {
|
|
29
|
-
await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
|
|
30
|
-
} finally {
|
|
31
|
-
client.release();
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
/**
|
|
36
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
37
|
-
* Used by pg-node-migrations to run database migrations.
|
|
38
|
-
*/
|
|
39
|
-
toPgClient() {
|
|
40
|
-
return {
|
|
41
|
-
query: async (sql) => {
|
|
42
|
-
const result = typeof sql === "string" ? await this.pool.query(sql) : await this.pool.query(sql.text, sql.values);
|
|
43
|
-
return { rows: result.rows, rowCount: result.rowCount ?? 0 };
|
|
44
|
-
}
|
|
45
|
-
};
|
|
46
|
-
}
|
|
47
|
-
};
|
|
48
|
-
export {
|
|
49
|
-
PgAdapter
|
|
50
|
-
};
|
package/dist/postgres-js.cjs
DELETED
|
@@ -1,90 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __create = Object.create;
|
|
3
|
-
var __defProp = Object.defineProperty;
|
|
4
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __export = (target, all) => {
|
|
9
|
-
for (var name in all)
|
|
10
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
-
};
|
|
12
|
-
var __copyProps = (to, from, except, desc) => {
|
|
13
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
-
for (let key of __getOwnPropNames(from))
|
|
15
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
-
}
|
|
18
|
-
return to;
|
|
19
|
-
};
|
|
20
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
-
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
-
mod
|
|
27
|
-
));
|
|
28
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
-
|
|
30
|
-
// src/postgres-js.ts
|
|
31
|
-
var postgres_js_exports = {};
|
|
32
|
-
__export(postgres_js_exports, {
|
|
33
|
-
PostgresJsAdapter: () => PostgresJsAdapter
|
|
34
|
-
});
|
|
35
|
-
module.exports = __toCommonJS(postgres_js_exports);
|
|
36
|
-
|
|
37
|
-
// src/database/postgres-js-adapter.ts
|
|
38
|
-
var import_postgres = __toESM(require("postgres"), 1);
|
|
39
|
-
var PostgresJsAdapter = class {
|
|
40
|
-
sql;
|
|
41
|
-
constructor(config) {
|
|
42
|
-
this.sql = (0, import_postgres.default)(config.connectionString, {
|
|
43
|
-
max: config.max ?? 10,
|
|
44
|
-
prepare: false
|
|
45
|
-
// Required for Supabase connection pooling
|
|
46
|
-
});
|
|
47
|
-
}
|
|
48
|
-
async query(sqlQuery, params) {
|
|
49
|
-
const result = await this.sql.unsafe(
|
|
50
|
-
sqlQuery,
|
|
51
|
-
params
|
|
52
|
-
);
|
|
53
|
-
return {
|
|
54
|
-
rows: [...result],
|
|
55
|
-
rowCount: result.count ?? result.length
|
|
56
|
-
};
|
|
57
|
-
}
|
|
58
|
-
async end() {
|
|
59
|
-
await this.sql.end();
|
|
60
|
-
}
|
|
61
|
-
/**
|
|
62
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
63
|
-
* Uses a transaction to ensure lock is held for the duration.
|
|
64
|
-
*/
|
|
65
|
-
async withAdvisoryLock(lockId, fn) {
|
|
66
|
-
const result = await this.sql.begin(async (tx) => {
|
|
67
|
-
await tx`SELECT pg_advisory_xact_lock(${lockId})`;
|
|
68
|
-
return await fn();
|
|
69
|
-
});
|
|
70
|
-
return result;
|
|
71
|
-
}
|
|
72
|
-
/**
|
|
73
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
74
|
-
* Used by pg-node-migrations to run database migrations.
|
|
75
|
-
*/
|
|
76
|
-
toPgClient() {
|
|
77
|
-
return {
|
|
78
|
-
query: async (sql) => {
|
|
79
|
-
const text = typeof sql === "string" ? sql : sql.text;
|
|
80
|
-
const values = typeof sql === "string" ? void 0 : sql.values;
|
|
81
|
-
const rows = await this.sql.unsafe(text, values);
|
|
82
|
-
return { rows: [...rows], rowCount: rows.length };
|
|
83
|
-
}
|
|
84
|
-
};
|
|
85
|
-
}
|
|
86
|
-
};
|
|
87
|
-
// Annotate the CommonJS export names for ESM import in node:
|
|
88
|
-
0 && (module.exports = {
|
|
89
|
-
PostgresJsAdapter
|
|
90
|
-
});
|
package/dist/postgres-js.d.cts
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
import { D as DatabaseAdapter, P as PgCompatibleClient } from './adapter-BtXT5w9r.cjs';
|
|
2
|
-
|
|
3
|
-
interface PostgresJsConfig {
|
|
4
|
-
connectionString: string;
|
|
5
|
-
max?: number;
|
|
6
|
-
}
|
|
7
|
-
/**
|
|
8
|
-
* Database adapter implementation using postgres.js.
|
|
9
|
-
* Works in Node.js, Deno, Bun, and Cloudflare Workers.
|
|
10
|
-
*/
|
|
11
|
-
declare class PostgresJsAdapter implements DatabaseAdapter {
|
|
12
|
-
private sql;
|
|
13
|
-
constructor(config: PostgresJsConfig);
|
|
14
|
-
query<T = Record<string, unknown>>(sqlQuery: string, params?: unknown[]): Promise<{
|
|
15
|
-
rows: T[];
|
|
16
|
-
rowCount: number;
|
|
17
|
-
}>;
|
|
18
|
-
end(): Promise<void>;
|
|
19
|
-
/**
|
|
20
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
21
|
-
* Uses a transaction to ensure lock is held for the duration.
|
|
22
|
-
*/
|
|
23
|
-
withAdvisoryLock<T>(lockId: number, fn: () => Promise<T>): Promise<T>;
|
|
24
|
-
/**
|
|
25
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
26
|
-
* Used by pg-node-migrations to run database migrations.
|
|
27
|
-
*/
|
|
28
|
-
toPgClient(): PgCompatibleClient;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
export { PostgresJsAdapter };
|
package/dist/postgres-js.d.ts
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
import { D as DatabaseAdapter, P as PgCompatibleClient } from './adapter-BtXT5w9r.js';
|
|
2
|
-
|
|
3
|
-
interface PostgresJsConfig {
|
|
4
|
-
connectionString: string;
|
|
5
|
-
max?: number;
|
|
6
|
-
}
|
|
7
|
-
/**
|
|
8
|
-
* Database adapter implementation using postgres.js.
|
|
9
|
-
* Works in Node.js, Deno, Bun, and Cloudflare Workers.
|
|
10
|
-
*/
|
|
11
|
-
declare class PostgresJsAdapter implements DatabaseAdapter {
|
|
12
|
-
private sql;
|
|
13
|
-
constructor(config: PostgresJsConfig);
|
|
14
|
-
query<T = Record<string, unknown>>(sqlQuery: string, params?: unknown[]): Promise<{
|
|
15
|
-
rows: T[];
|
|
16
|
-
rowCount: number;
|
|
17
|
-
}>;
|
|
18
|
-
end(): Promise<void>;
|
|
19
|
-
/**
|
|
20
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
21
|
-
* Uses a transaction to ensure lock is held for the duration.
|
|
22
|
-
*/
|
|
23
|
-
withAdvisoryLock<T>(lockId: number, fn: () => Promise<T>): Promise<T>;
|
|
24
|
-
/**
|
|
25
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
26
|
-
* Used by pg-node-migrations to run database migrations.
|
|
27
|
-
*/
|
|
28
|
-
toPgClient(): PgCompatibleClient;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
export { PostgresJsAdapter };
|
package/dist/postgres-js.js
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
// src/database/postgres-js-adapter.ts
|
|
2
|
-
import postgres from "postgres";
|
|
3
|
-
var PostgresJsAdapter = class {
|
|
4
|
-
sql;
|
|
5
|
-
constructor(config) {
|
|
6
|
-
this.sql = postgres(config.connectionString, {
|
|
7
|
-
max: config.max ?? 10,
|
|
8
|
-
prepare: false
|
|
9
|
-
// Required for Supabase connection pooling
|
|
10
|
-
});
|
|
11
|
-
}
|
|
12
|
-
async query(sqlQuery, params) {
|
|
13
|
-
const result = await this.sql.unsafe(
|
|
14
|
-
sqlQuery,
|
|
15
|
-
params
|
|
16
|
-
);
|
|
17
|
-
return {
|
|
18
|
-
rows: [...result],
|
|
19
|
-
rowCount: result.count ?? result.length
|
|
20
|
-
};
|
|
21
|
-
}
|
|
22
|
-
async end() {
|
|
23
|
-
await this.sql.end();
|
|
24
|
-
}
|
|
25
|
-
/**
|
|
26
|
-
* Execute a function while holding a PostgreSQL advisory lock.
|
|
27
|
-
* Uses a transaction to ensure lock is held for the duration.
|
|
28
|
-
*/
|
|
29
|
-
async withAdvisoryLock(lockId, fn) {
|
|
30
|
-
const result = await this.sql.begin(async (tx) => {
|
|
31
|
-
await tx`SELECT pg_advisory_xact_lock(${lockId})`;
|
|
32
|
-
return await fn();
|
|
33
|
-
});
|
|
34
|
-
return result;
|
|
35
|
-
}
|
|
36
|
-
/**
|
|
37
|
-
* Returns a pg-compatible client for use with libraries that expect pg.Client.
|
|
38
|
-
* Used by pg-node-migrations to run database migrations.
|
|
39
|
-
*/
|
|
40
|
-
toPgClient() {
|
|
41
|
-
return {
|
|
42
|
-
query: async (sql) => {
|
|
43
|
-
const text = typeof sql === "string" ? sql : sql.text;
|
|
44
|
-
const values = typeof sql === "string" ? void 0 : sql.values;
|
|
45
|
-
const rows = await this.sql.unsafe(text, values);
|
|
46
|
-
return { rows: [...rows], rowCount: rows.length };
|
|
47
|
-
}
|
|
48
|
-
};
|
|
49
|
-
}
|
|
50
|
-
};
|
|
51
|
-
export {
|
|
52
|
-
PostgresJsAdapter
|
|
53
|
-
};
|