stripe-experiment-sync 1.0.8 → 1.0.9-beta.1765909347

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/lib.cjs CHANGED
@@ -59,6 +59,7 @@ async function loadConfig(options) {
59
59
  config.stripeApiKey = options.stripeKey || process.env.STRIPE_API_KEY || "";
60
60
  config.ngrokAuthToken = options.ngrokToken || process.env.NGROK_AUTH_TOKEN || "";
61
61
  config.databaseUrl = options.databaseUrl || process.env.DATABASE_URL || "";
62
+ config.enableSigmaSync = options.enableSigmaSync ?? (process.env.ENABLE_SIGMA_SYNC !== void 0 ? process.env.ENABLE_SIGMA_SYNC === "true" : void 0);
62
63
  const questions = [];
63
64
  if (!config.stripeApiKey) {
64
65
  questions.push({
@@ -70,8 +71,8 @@ async function loadConfig(options) {
70
71
  if (!input || input.trim() === "") {
71
72
  return "Stripe API key is required";
72
73
  }
73
- if (!input.startsWith("sk_")) {
74
- return 'Stripe API key should start with "sk_"';
74
+ if (!input.startsWith("sk_") && !input.startsWith("rk_")) {
75
+ return 'Stripe API key should start with "sk_" or "rk_"';
75
76
  }
76
77
  return true;
77
78
  }
@@ -94,18 +95,29 @@ async function loadConfig(options) {
94
95
  }
95
96
  });
96
97
  }
98
+ if (config.enableSigmaSync === void 0) {
99
+ questions.push({
100
+ type: "confirm",
101
+ name: "enableSigmaSync",
102
+ message: "Enable Sigma sync? (Requires Sigma access in Stripe API key)",
103
+ default: false
104
+ });
105
+ }
97
106
  if (questions.length > 0) {
98
- console.log(import_chalk.default.yellow("\nMissing required configuration. Please provide:"));
107
+ console.log(import_chalk.default.yellow("\nMissing configuration. Please provide:"));
99
108
  const answers = await import_inquirer.default.prompt(questions);
100
109
  Object.assign(config, answers);
101
110
  }
111
+ if (config.enableSigmaSync === void 0) {
112
+ config.enableSigmaSync = false;
113
+ }
102
114
  return config;
103
115
  }
104
116
 
105
117
  // package.json
106
118
  var package_default = {
107
119
  name: "stripe-experiment-sync",
108
- version: "1.0.8-beta.1765856228",
120
+ version: "1.0.9-beta.1765909347",
109
121
  private: false,
110
122
  description: "Stripe Sync Engine to sync Stripe data to Postgres",
111
123
  type: "module",
@@ -145,6 +157,7 @@ var package_default = {
145
157
  dotenv: "^16.4.7",
146
158
  express: "^4.18.2",
147
159
  inquirer: "^12.3.0",
160
+ papaparse: "5.4.1",
148
161
  pg: "^8.16.3",
149
162
  "pg-node-migrations": "0.0.8",
150
163
  stripe: "^17.7.0",
@@ -156,6 +169,7 @@ var package_default = {
156
169
  "@types/express": "^4.17.21",
157
170
  "@types/inquirer": "^9.0.7",
158
171
  "@types/node": "^24.10.1",
172
+ "@types/papaparse": "5.3.16",
159
173
  "@types/pg": "^8.15.5",
160
174
  "@types/ws": "^8.5.13",
161
175
  "@types/yesql": "^4.1.4",
@@ -185,14 +199,60 @@ var package_default = {
185
199
  };
186
200
 
187
201
  // src/stripeSync.ts
188
- var import_stripe2 = __toESM(require("stripe"), 1);
202
+ var import_stripe3 = __toESM(require("stripe"), 1);
189
203
  var import_yesql2 = require("yesql");
190
204
 
191
205
  // src/database/postgres.ts
192
206
  var import_pg = __toESM(require("pg"), 1);
193
207
  var import_yesql = require("yesql");
208
+
209
+ // src/database/QueryUtils.ts
210
+ var QueryUtils = class _QueryUtils {
211
+ constructor() {
212
+ }
213
+ static quoteIdent(name) {
214
+ return `"${name}"`;
215
+ }
216
+ static quotedList(names) {
217
+ return names.map(_QueryUtils.quoteIdent).join(", ");
218
+ }
219
+ static buildInsertParts(columns) {
220
+ const columnsSql = columns.map((c) => _QueryUtils.quoteIdent(c.column)).join(", ");
221
+ const valuesSql = columns.map((c, i) => {
222
+ const placeholder = `$${i + 1}`;
223
+ return `${placeholder}::${c.pgType}`;
224
+ }).join(", ");
225
+ const params = columns.map((c) => c.value);
226
+ return { columnsSql, valuesSql, params };
227
+ }
228
+ static buildRawJsonUpsertQuery(schema, table, columns, conflictTarget) {
229
+ const { columnsSql, valuesSql, params } = _QueryUtils.buildInsertParts(columns);
230
+ const conflictSql = _QueryUtils.quotedList(conflictTarget);
231
+ const tsParamIdx = columns.findIndex((c) => c.column === "_last_synced_at") + 1;
232
+ if (tsParamIdx <= 0) {
233
+ throw new Error("buildRawJsonUpsertQuery requires _last_synced_at column");
234
+ }
235
+ const sql3 = `
236
+ INSERT INTO ${_QueryUtils.quoteIdent(schema)}.${_QueryUtils.quoteIdent(table)} (${columnsSql})
237
+ VALUES (${valuesSql})
238
+ ON CONFLICT (${conflictSql})
239
+ DO UPDATE SET
240
+ "_raw_data" = EXCLUDED."_raw_data",
241
+ "_last_synced_at" = $${tsParamIdx},
242
+ "_account_id" = EXCLUDED."_account_id"
243
+ WHERE ${_QueryUtils.quoteIdent(table)}."_last_synced_at" IS NULL
244
+ OR ${_QueryUtils.quoteIdent(table)}."_last_synced_at" < $${tsParamIdx}
245
+ RETURNING *
246
+ `;
247
+ return { sql: sql3, params };
248
+ }
249
+ };
250
+
251
+ // src/database/postgres.ts
194
252
  var ORDERED_STRIPE_TABLES = [
253
+ "exchange_rates_from_usd",
195
254
  "subscription_items",
255
+ "subscription_item_change_events_v2_beta",
196
256
  "subscriptions",
197
257
  "subscription_schedules",
198
258
  "checkout_session_line_items",
@@ -262,7 +322,7 @@ var PostgresClient = class {
262
322
  }
263
323
  return results.flatMap((it) => it.rows);
264
324
  }
265
- async upsertManyWithTimestampProtection(entries, table, accountId, syncTimestamp) {
325
+ async upsertManyWithTimestampProtection(entries, table, accountId, syncTimestamp, upsertOptions) {
266
326
  const timestamp = syncTimestamp || (/* @__PURE__ */ new Date()).toISOString();
267
327
  if (!entries.length) return [];
268
328
  const chunkSize = 5;
@@ -297,20 +357,33 @@ var PostgresClient = class {
297
357
  const prepared = (0, import_yesql.pg)(upsertSql, { useNullForMissing: true })(cleansed);
298
358
  queries.push(this.pool.query(prepared.text, prepared.values));
299
359
  } else {
300
- const rawData = JSON.stringify(entry);
301
- const upsertSql = `
302
- INSERT INTO "${this.config.schema}"."${table}" ("_raw_data", "_last_synced_at", "_account_id")
303
- VALUES ($1::jsonb, $2, $3)
304
- ON CONFLICT (id)
305
- DO UPDATE SET
306
- "_raw_data" = EXCLUDED."_raw_data",
307
- "_last_synced_at" = $2,
308
- "_account_id" = EXCLUDED."_account_id"
309
- WHERE "${table}"."_last_synced_at" IS NULL
310
- OR "${table}"."_last_synced_at" < $2
311
- RETURNING *
312
- `;
313
- queries.push(this.pool.query(upsertSql, [rawData, timestamp, accountId]));
360
+ const conflictTarget = upsertOptions?.conflictTarget ?? ["id"];
361
+ const extraColumns = upsertOptions?.extraColumns ?? [];
362
+ if (!conflictTarget.length) {
363
+ throw new Error(`Invalid upsert config for ${table}: conflictTarget must be non-empty`);
364
+ }
365
+ const columns = [
366
+ { column: "_raw_data", pgType: "jsonb", value: JSON.stringify(entry) },
367
+ ...extraColumns.map((c) => ({
368
+ column: c.column,
369
+ pgType: c.pgType,
370
+ value: entry[c.entryKey]
371
+ })),
372
+ { column: "_last_synced_at", pgType: "timestamptz", value: timestamp },
373
+ { column: "_account_id", pgType: "text", value: accountId }
374
+ ];
375
+ for (const c of columns) {
376
+ if (c.value === void 0) {
377
+ throw new Error(`Missing required value for ${table}.${c.column}`);
378
+ }
379
+ }
380
+ const { sql: upsertSql, params } = QueryUtils.buildRawJsonUpsertQuery(
381
+ this.config.schema,
382
+ table,
383
+ columns,
384
+ conflictTarget
385
+ );
386
+ queries.push(this.pool.query(upsertSql, params));
314
387
  }
315
388
  });
316
389
  results.push(...await Promise.all(queries));
@@ -710,7 +783,12 @@ var PostgresClient = class {
710
783
  } else {
711
784
  await this.query(
712
785
  `UPDATE "${this.config.schema}"."_sync_obj_runs"
713
- SET cursor = $4, updated_at = now()
786
+ SET cursor = CASE
787
+ WHEN cursor IS NULL THEN $4
788
+ WHEN (cursor COLLATE "C") < ($4::text COLLATE "C") THEN $4
789
+ ELSE cursor
790
+ END,
791
+ updated_at = now()
714
792
  WHERE "_account_id" = $1 AND run_started_at = $2 AND object = $3`,
715
793
  [accountId, runStartedAt, object, cursor]
716
794
  );
@@ -721,10 +799,17 @@ var PostgresClient = class {
721
799
  * This considers completed, error, AND running runs to ensure recovery syncs
722
800
  * don't re-process data that was already synced before a crash.
723
801
  * A 'running' status with a cursor means the process was killed mid-sync.
802
+ *
803
+ * Handles two cursor formats:
804
+ * - Numeric: compared as bigint for correct ordering
805
+ * - Composite cursors: compared as strings with COLLATE "C"
724
806
  */
725
807
  async getLastCompletedCursor(accountId, object) {
726
808
  const result = await this.query(
727
- `SELECT MAX(o.cursor::bigint)::text as cursor
809
+ `SELECT CASE
810
+ WHEN BOOL_OR(o.cursor !~ '^\\d+$') THEN MAX(o.cursor COLLATE "C")
811
+ ELSE MAX(CASE WHEN o.cursor ~ '^\\d+$' THEN o.cursor::bigint END)::text
812
+ END as cursor
728
813
  FROM "${this.config.schema}"."_sync_obj_runs" o
729
814
  WHERE o."_account_id" = $1
730
815
  AND o.object = $2
@@ -1009,6 +1094,269 @@ function hashApiKey(apiKey) {
1009
1094
  return (0, import_crypto.createHash)("sha256").update(apiKey).digest("hex");
1010
1095
  }
1011
1096
 
1097
+ // src/sigma/sigmaApi.ts
1098
+ var import_papaparse = __toESM(require("papaparse"), 1);
1099
+ var import_stripe2 = __toESM(require("stripe"), 1);
1100
+ var STRIPE_FILES_BASE = "https://files.stripe.com/v1";
1101
+ function sleep2(ms) {
1102
+ return new Promise((resolve) => setTimeout(resolve, ms));
1103
+ }
1104
+ function parseCsvObjects(csv) {
1105
+ const input = csv.replace(/^\uFEFF/, "");
1106
+ const parsed = import_papaparse.default.parse(input, {
1107
+ header: true,
1108
+ skipEmptyLines: "greedy"
1109
+ });
1110
+ if (parsed.errors.length > 0) {
1111
+ throw new Error(`Failed to parse Sigma CSV: ${parsed.errors[0]?.message ?? "unknown error"}`);
1112
+ }
1113
+ return parsed.data.filter((row) => row && Object.keys(row).length > 0).map(
1114
+ (row) => Object.fromEntries(
1115
+ Object.entries(row).map(([k, v]) => [k, v == null || v === "" ? null : String(v)])
1116
+ )
1117
+ );
1118
+ }
1119
+ function normalizeSigmaTimestampToIso(value) {
1120
+ const v = value.trim();
1121
+ if (!v) return null;
1122
+ const hasExplicitTz = /z$|[+-]\d{2}:?\d{2}$/i.test(v);
1123
+ const isoish = v.includes("T") ? v : v.replace(" ", "T");
1124
+ const candidate = hasExplicitTz ? isoish : `${isoish}Z`;
1125
+ const d = new Date(candidate);
1126
+ if (Number.isNaN(d.getTime())) return null;
1127
+ return d.toISOString();
1128
+ }
1129
+ async function fetchStripeText(url, apiKey, options) {
1130
+ const res = await fetch(url, {
1131
+ ...options,
1132
+ headers: {
1133
+ ...options.headers ?? {},
1134
+ Authorization: `Bearer ${apiKey}`
1135
+ }
1136
+ });
1137
+ const text = await res.text();
1138
+ if (!res.ok) {
1139
+ throw new Error(`Sigma file download error (${res.status}) for ${url}: ${text}`);
1140
+ }
1141
+ return text;
1142
+ }
1143
+ async function runSigmaQueryAndDownloadCsv(params) {
1144
+ const pollTimeoutMs = params.pollTimeoutMs ?? 5 * 60 * 1e3;
1145
+ const pollIntervalMs = params.pollIntervalMs ?? 2e3;
1146
+ const stripe = new import_stripe2.default(params.apiKey);
1147
+ const created = await stripe.rawRequest("POST", "/v1/sigma/query_runs", {
1148
+ sql: params.sql
1149
+ });
1150
+ const queryRunId = created.id;
1151
+ const start = Date.now();
1152
+ let current = created;
1153
+ while (current.status === "running") {
1154
+ if (Date.now() - start > pollTimeoutMs) {
1155
+ throw new Error(`Sigma query run timed out after ${pollTimeoutMs}ms: ${queryRunId}`);
1156
+ }
1157
+ await sleep2(pollIntervalMs);
1158
+ current = await stripe.rawRequest(
1159
+ "GET",
1160
+ `/v1/sigma/query_runs/${queryRunId}`,
1161
+ {}
1162
+ );
1163
+ }
1164
+ if (current.status !== "succeeded") {
1165
+ throw new Error(
1166
+ `Sigma query run did not succeed (status=${current.status}) id=${queryRunId} error=${JSON.stringify(
1167
+ current.error
1168
+ )}`
1169
+ );
1170
+ }
1171
+ const fileId = current.result?.file;
1172
+ if (!fileId) {
1173
+ throw new Error(`Sigma query run succeeded but result.file is missing (id=${queryRunId})`);
1174
+ }
1175
+ const csv = await fetchStripeText(
1176
+ `${STRIPE_FILES_BASE}/files/${fileId}/contents`,
1177
+ params.apiKey,
1178
+ { method: "GET" }
1179
+ );
1180
+ return { queryRunId, fileId, csv };
1181
+ }
1182
+
1183
+ // src/sigma/sigmaIngestionConfigs.ts
1184
+ var SIGMA_INGESTION_CONFIGS = {
1185
+ subscription_item_change_events_v2_beta: {
1186
+ sigmaTable: "subscription_item_change_events_v2_beta",
1187
+ destinationTable: "subscription_item_change_events_v2_beta",
1188
+ pageSize: 1e4,
1189
+ cursor: {
1190
+ version: 1,
1191
+ columns: [
1192
+ { column: "event_timestamp", type: "timestamp" },
1193
+ { column: "event_type", type: "string" },
1194
+ { column: "subscription_item_id", type: "string" }
1195
+ ]
1196
+ },
1197
+ upsert: {
1198
+ conflictTarget: ["_account_id", "event_timestamp", "event_type", "subscription_item_id"],
1199
+ extraColumns: [
1200
+ { column: "event_timestamp", pgType: "timestamptz", entryKey: "event_timestamp" },
1201
+ { column: "event_type", pgType: "text", entryKey: "event_type" },
1202
+ { column: "subscription_item_id", pgType: "text", entryKey: "subscription_item_id" }
1203
+ ]
1204
+ }
1205
+ },
1206
+ exchange_rates_from_usd: {
1207
+ sigmaTable: "exchange_rates_from_usd",
1208
+ destinationTable: "exchange_rates_from_usd",
1209
+ pageSize: 1e4,
1210
+ cursor: {
1211
+ version: 1,
1212
+ columns: [
1213
+ { column: "date", type: "string" },
1214
+ { column: "sell_currency", type: "string" }
1215
+ ]
1216
+ },
1217
+ upsert: {
1218
+ conflictTarget: ["_account_id", "date", "sell_currency"],
1219
+ extraColumns: [
1220
+ { column: "date", pgType: "date", entryKey: "date" },
1221
+ { column: "sell_currency", pgType: "text", entryKey: "sell_currency" }
1222
+ ]
1223
+ }
1224
+ }
1225
+ };
1226
+
1227
+ // src/sigma/sigmaIngestion.ts
1228
+ var SIGMA_CURSOR_DELIM = "";
1229
+ function escapeSigmaSqlStringLiteral(value) {
1230
+ return value.replace(/'/g, "''");
1231
+ }
1232
+ function formatSigmaTimestampForSqlLiteral(date) {
1233
+ return date.toISOString().replace("T", " ").replace("Z", "");
1234
+ }
1235
+ function decodeSigmaCursorValues(spec, cursor) {
1236
+ const prefix = `v${spec.version}${SIGMA_CURSOR_DELIM}`;
1237
+ if (!cursor.startsWith(prefix)) {
1238
+ throw new Error(
1239
+ `Unrecognized Sigma cursor format (expected prefix ${JSON.stringify(prefix)}): ${cursor}`
1240
+ );
1241
+ }
1242
+ const parts = cursor.split(SIGMA_CURSOR_DELIM);
1243
+ const expected = 1 + spec.columns.length;
1244
+ if (parts.length !== expected) {
1245
+ throw new Error(`Malformed Sigma cursor: expected ${expected} parts, got ${parts.length}`);
1246
+ }
1247
+ return parts.slice(1);
1248
+ }
1249
+ function encodeSigmaCursor(spec, values) {
1250
+ if (values.length !== spec.columns.length) {
1251
+ throw new Error(
1252
+ `Cannot encode Sigma cursor: expected ${spec.columns.length} values, got ${values.length}`
1253
+ );
1254
+ }
1255
+ for (const v of values) {
1256
+ if (v.includes(SIGMA_CURSOR_DELIM)) {
1257
+ throw new Error("Cannot encode Sigma cursor: value contains delimiter character");
1258
+ }
1259
+ }
1260
+ return [`v${spec.version}`, ...values].join(SIGMA_CURSOR_DELIM);
1261
+ }
1262
+ function sigmaSqlLiteralForCursorValue(spec, rawValue) {
1263
+ switch (spec.type) {
1264
+ case "timestamp": {
1265
+ const d = new Date(rawValue);
1266
+ if (Number.isNaN(d.getTime())) {
1267
+ throw new Error(`Invalid timestamp cursor value for ${spec.column}: ${rawValue}`);
1268
+ }
1269
+ return `timestamp '${formatSigmaTimestampForSqlLiteral(d)}'`;
1270
+ }
1271
+ case "number": {
1272
+ if (!/^-?\d+(\.\d+)?$/.test(rawValue)) {
1273
+ throw new Error(`Invalid numeric cursor value for ${spec.column}: ${rawValue}`);
1274
+ }
1275
+ return rawValue;
1276
+ }
1277
+ case "string":
1278
+ return `'${escapeSigmaSqlStringLiteral(rawValue)}'`;
1279
+ }
1280
+ }
1281
+ function buildSigmaCursorWhereClause(spec, cursorValues) {
1282
+ if (cursorValues.length !== spec.columns.length) {
1283
+ throw new Error(
1284
+ `Cannot build Sigma cursor predicate: expected ${spec.columns.length} values, got ${cursorValues.length}`
1285
+ );
1286
+ }
1287
+ const cols = spec.columns.map((c) => c.column);
1288
+ const lits = spec.columns.map((c, i) => sigmaSqlLiteralForCursorValue(c, cursorValues[i] ?? ""));
1289
+ const ors = [];
1290
+ for (let i = 0; i < cols.length; i++) {
1291
+ const ands = [];
1292
+ for (let j = 0; j < i; j++) {
1293
+ ands.push(`${cols[j]} = ${lits[j]}`);
1294
+ }
1295
+ ands.push(`${cols[i]} > ${lits[i]}`);
1296
+ ors.push(`(${ands.join(" AND ")})`);
1297
+ }
1298
+ return ors.join(" OR ");
1299
+ }
1300
+ function buildSigmaQuery(config, cursor) {
1301
+ const select = config.select === void 0 || config.select === "*" ? "*" : config.select.join(", ");
1302
+ const whereParts = [];
1303
+ if (config.additionalWhere) {
1304
+ whereParts.push(`(${config.additionalWhere})`);
1305
+ }
1306
+ if (cursor) {
1307
+ const values = decodeSigmaCursorValues(config.cursor, cursor);
1308
+ const predicate = buildSigmaCursorWhereClause(config.cursor, values);
1309
+ whereParts.push(`(${predicate})`);
1310
+ }
1311
+ const whereClause = whereParts.length > 0 ? `WHERE ${whereParts.join(" AND ")}` : "";
1312
+ const orderBy = config.cursor.columns.map((c) => c.column).join(", ");
1313
+ return [
1314
+ `SELECT ${select} FROM ${config.sigmaTable}`,
1315
+ whereClause,
1316
+ `ORDER BY ${orderBy} ASC`,
1317
+ `LIMIT ${config.pageSize}`
1318
+ ].filter(Boolean).join(" ");
1319
+ }
1320
+ function defaultSigmaRowToEntry(config, row) {
1321
+ const out = { ...row };
1322
+ for (const col of config.cursor.columns) {
1323
+ const raw = row[col.column];
1324
+ if (raw == null) {
1325
+ throw new Error(`Sigma row missing required cursor column: ${col.column}`);
1326
+ }
1327
+ if (col.type === "timestamp") {
1328
+ const normalized = normalizeSigmaTimestampToIso(raw);
1329
+ if (!normalized) {
1330
+ throw new Error(`Sigma row has invalid timestamp for ${col.column}: ${raw}`);
1331
+ }
1332
+ out[col.column] = normalized;
1333
+ } else if (col.type === "string") {
1334
+ const v = raw.trim();
1335
+ if (!v) {
1336
+ throw new Error(`Sigma row has empty string for required cursor column: ${col.column}`);
1337
+ }
1338
+ out[col.column] = v;
1339
+ } else {
1340
+ const v = raw.trim();
1341
+ if (!v) {
1342
+ throw new Error(`Sigma row has empty value for required cursor column: ${col.column}`);
1343
+ }
1344
+ out[col.column] = v;
1345
+ }
1346
+ }
1347
+ return out;
1348
+ }
1349
+ function sigmaCursorFromEntry(config, entry) {
1350
+ const values = config.cursor.columns.map((c) => {
1351
+ const raw = entry[c.column];
1352
+ if (raw == null) {
1353
+ throw new Error(`Cannot build cursor: entry missing ${c.column}`);
1354
+ }
1355
+ return String(raw);
1356
+ });
1357
+ return encodeSigmaCursor(config.cursor, values);
1358
+ }
1359
+
1012
1360
  // src/stripeSync.ts
1013
1361
  function getUniqueIds(entries, key) {
1014
1362
  const set = new Set(
@@ -1019,7 +1367,7 @@ function getUniqueIds(entries, key) {
1019
1367
  var StripeSync = class {
1020
1368
  constructor(config) {
1021
1369
  this.config = config;
1022
- const baseStripe = new import_stripe2.default(config.stripeSecretKey, {
1370
+ const baseStripe = new import_stripe3.default(config.stripeSecretKey, {
1023
1371
  // https://github.com/stripe/stripe-node#configuration
1024
1372
  // @ts-ignore
1025
1373
  apiVersion: config.stripeApiVersion,
@@ -1420,6 +1768,17 @@ var StripeSync = class {
1420
1768
  listFn: (p) => this.stripe.checkout.sessions.list(p),
1421
1769
  upsertFn: (items, id) => this.upsertCheckoutSessions(items, id),
1422
1770
  supportsCreatedFilter: true
1771
+ },
1772
+ // Sigma-backed resources
1773
+ subscription_item_change_events_v2_beta: {
1774
+ order: 18,
1775
+ supportsCreatedFilter: false,
1776
+ sigma: SIGMA_INGESTION_CONFIGS.subscription_item_change_events_v2_beta
1777
+ },
1778
+ exchange_rates_from_usd: {
1779
+ order: 19,
1780
+ supportsCreatedFilter: false,
1781
+ sigma: SIGMA_INGESTION_CONFIGS.exchange_rates_from_usd
1423
1782
  }
1424
1783
  };
1425
1784
  async processEvent(event) {
@@ -1452,7 +1811,13 @@ var StripeSync = class {
1452
1811
  * Order is determined by the `order` field in resourceRegistry.
1453
1812
  */
1454
1813
  getSupportedSyncObjects() {
1455
- return Object.entries(this.resourceRegistry).sort(([, a], [, b]) => a.order - b.order).map(([key]) => key);
1814
+ const all = Object.entries(this.resourceRegistry).sort(([, a], [, b]) => a.order - b.order).map(([key]) => key);
1815
+ if (!this.config.enableSigmaSync) {
1816
+ return all.filter(
1817
+ (o) => o !== "subscription_item_change_events_v2_beta" && o !== "exchange_rates_from_usd"
1818
+ );
1819
+ }
1820
+ return all;
1456
1821
  }
1457
1822
  // Event handler methods
1458
1823
  async handleChargeEvent(event, accountId) {
@@ -1531,7 +1896,7 @@ var StripeSync = class {
1531
1896
  );
1532
1897
  await this.upsertProducts([product], accountId, this.getSyncTimestamp(event, refetched));
1533
1898
  } catch (err) {
1534
- if (err instanceof import_stripe2.default.errors.StripeAPIError && err.code === "resource_missing") {
1899
+ if (err instanceof import_stripe3.default.errors.StripeAPIError && err.code === "resource_missing") {
1535
1900
  const product = event.data.object;
1536
1901
  await this.deleteProduct(product.id);
1537
1902
  } else {
@@ -1551,7 +1916,7 @@ var StripeSync = class {
1551
1916
  );
1552
1917
  await this.upsertPrices([price], accountId, false, this.getSyncTimestamp(event, refetched));
1553
1918
  } catch (err) {
1554
- if (err instanceof import_stripe2.default.errors.StripeAPIError && err.code === "resource_missing") {
1919
+ if (err instanceof import_stripe3.default.errors.StripeAPIError && err.code === "resource_missing") {
1555
1920
  const price = event.data.object;
1556
1921
  await this.deletePrice(price.id);
1557
1922
  } else {
@@ -1571,7 +1936,7 @@ var StripeSync = class {
1571
1936
  );
1572
1937
  await this.upsertPlans([plan], accountId, false, this.getSyncTimestamp(event, refetched));
1573
1938
  } catch (err) {
1574
- if (err instanceof import_stripe2.default.errors.StripeAPIError && err.code === "resource_missing") {
1939
+ if (err instanceof import_stripe3.default.errors.StripeAPIError && err.code === "resource_missing") {
1575
1940
  const plan = event.data.object;
1576
1941
  await this.deletePlan(plan.id);
1577
1942
  } else {
@@ -1818,10 +2183,10 @@ var StripeSync = class {
1818
2183
  let cursor = null;
1819
2184
  if (!params?.created) {
1820
2185
  if (objRun?.cursor) {
1821
- cursor = parseInt(objRun.cursor);
2186
+ cursor = objRun.cursor;
1822
2187
  } else {
1823
2188
  const lastCursor = await this.postgresClient.getLastCompletedCursor(accountId, resourceName);
1824
- cursor = lastCursor ? parseInt(lastCursor) : null;
2189
+ cursor = lastCursor ?? null;
1825
2190
  }
1826
2191
  }
1827
2192
  const result = await this.fetchOnePage(
@@ -1876,9 +2241,18 @@ var StripeSync = class {
1876
2241
  throw new Error(`Unsupported object type for processNext: ${object}`);
1877
2242
  }
1878
2243
  try {
2244
+ if (config.sigma) {
2245
+ return await this.fetchOneSigmaPage(
2246
+ accountId,
2247
+ resourceName,
2248
+ runStartedAt,
2249
+ cursor,
2250
+ config.sigma
2251
+ );
2252
+ }
1879
2253
  const listParams = { limit };
1880
2254
  if (config.supportsCreatedFilter) {
1881
- const created = params?.created ?? (cursor ? { gte: cursor } : void 0);
2255
+ const created = params?.created ?? (cursor && /^\d+$/.test(cursor) ? { gte: Number.parseInt(cursor, 10) } : void 0);
1882
2256
  if (created) {
1883
2257
  listParams.created = created;
1884
2258
  }
@@ -1923,6 +2297,97 @@ var StripeSync = class {
1923
2297
  throw error;
1924
2298
  }
1925
2299
  }
2300
+ async getSigmaFallbackCursorFromDestination(accountId, sigmaConfig) {
2301
+ const cursorCols = sigmaConfig.cursor.columns;
2302
+ const selectCols = cursorCols.map((c) => `"${c.column}"`).join(", ");
2303
+ const orderBy = cursorCols.map((c) => `"${c.column}" DESC`).join(", ");
2304
+ const result = await this.postgresClient.query(
2305
+ `SELECT ${selectCols}
2306
+ FROM "stripe"."${sigmaConfig.destinationTable}"
2307
+ WHERE "_account_id" = $1
2308
+ ORDER BY ${orderBy}
2309
+ LIMIT 1`,
2310
+ [accountId]
2311
+ );
2312
+ if (result.rows.length === 0) return null;
2313
+ const row = result.rows[0];
2314
+ const entryForCursor = {};
2315
+ for (const c of cursorCols) {
2316
+ const v = row[c.column];
2317
+ if (v == null) {
2318
+ throw new Error(
2319
+ `Sigma fallback cursor query returned null for ${sigmaConfig.destinationTable}.${c.column}`
2320
+ );
2321
+ }
2322
+ if (c.type === "timestamp") {
2323
+ const d = v instanceof Date ? v : new Date(String(v));
2324
+ if (Number.isNaN(d.getTime())) {
2325
+ throw new Error(
2326
+ `Sigma fallback cursor query returned invalid timestamp for ${sigmaConfig.destinationTable}.${c.column}: ${String(
2327
+ v
2328
+ )}`
2329
+ );
2330
+ }
2331
+ entryForCursor[c.column] = d.toISOString();
2332
+ } else {
2333
+ entryForCursor[c.column] = String(v);
2334
+ }
2335
+ }
2336
+ return sigmaCursorFromEntry(sigmaConfig, entryForCursor);
2337
+ }
2338
+ async fetchOneSigmaPage(accountId, resourceName, runStartedAt, cursor, sigmaConfig) {
2339
+ if (!this.config.stripeSecretKey) {
2340
+ throw new Error("Sigma sync requested but stripeSecretKey is not configured.");
2341
+ }
2342
+ if (resourceName !== sigmaConfig.destinationTable) {
2343
+ throw new Error(
2344
+ `Sigma sync config mismatch: resourceName=${resourceName} destinationTable=${sigmaConfig.destinationTable}`
2345
+ );
2346
+ }
2347
+ const effectiveCursor = cursor ?? await this.getSigmaFallbackCursorFromDestination(accountId, sigmaConfig);
2348
+ const sigmaSql = buildSigmaQuery(sigmaConfig, effectiveCursor);
2349
+ this.config.logger?.info(
2350
+ { object: resourceName, pageSize: sigmaConfig.pageSize, hasCursor: Boolean(effectiveCursor) },
2351
+ "Sigma sync: running query"
2352
+ );
2353
+ const { queryRunId, fileId, csv } = await runSigmaQueryAndDownloadCsv({
2354
+ apiKey: this.config.stripeSecretKey,
2355
+ sql: sigmaSql,
2356
+ logger: this.config.logger
2357
+ });
2358
+ const rows = parseCsvObjects(csv);
2359
+ if (rows.length === 0) {
2360
+ await this.postgresClient.completeObjectSync(accountId, runStartedAt, resourceName);
2361
+ return { processed: 0, hasMore: false, runStartedAt };
2362
+ }
2363
+ const entries = rows.map(
2364
+ (row) => defaultSigmaRowToEntry(sigmaConfig, row)
2365
+ );
2366
+ this.config.logger?.info(
2367
+ { object: resourceName, rows: entries.length, queryRunId, fileId },
2368
+ "Sigma sync: upserting rows"
2369
+ );
2370
+ await this.postgresClient.upsertManyWithTimestampProtection(
2371
+ entries,
2372
+ resourceName,
2373
+ accountId,
2374
+ void 0,
2375
+ sigmaConfig.upsert
2376
+ );
2377
+ await this.postgresClient.incrementObjectProgress(
2378
+ accountId,
2379
+ runStartedAt,
2380
+ resourceName,
2381
+ entries.length
2382
+ );
2383
+ const newCursor = sigmaCursorFromEntry(sigmaConfig, entries[entries.length - 1]);
2384
+ await this.postgresClient.updateObjectCursor(accountId, runStartedAt, resourceName, newCursor);
2385
+ const hasMore = rows.length === sigmaConfig.pageSize;
2386
+ if (!hasMore) {
2387
+ await this.postgresClient.completeObjectSync(accountId, runStartedAt, resourceName);
2388
+ }
2389
+ return { processed: entries.length, hasMore, runStartedAt };
2390
+ }
1926
2391
  /**
1927
2392
  * Process all pages for all (or specified) object types until complete.
1928
2393
  *
@@ -2051,6 +2516,12 @@ var StripeSync = class {
2051
2516
  case "checkout_sessions":
2052
2517
  results.checkoutSessions = result;
2053
2518
  break;
2519
+ case "subscription_item_change_events_v2_beta":
2520
+ results.subscriptionItemChangeEventsV2Beta = result;
2521
+ break;
2522
+ case "exchange_rates_from_usd":
2523
+ results.exchangeRatesFromUsd = result;
2524
+ break;
2054
2525
  }
2055
2526
  }
2056
2527
  }
@@ -3644,14 +4115,14 @@ Creating ngrok tunnel for port ${port}...`));
3644
4115
  // src/supabase/supabase.ts
3645
4116
  var import_supabase_management_js = require("supabase-management-js");
3646
4117
 
3647
- // raw-ts:/Users/lfdepombo/src/stripe-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-setup.ts
4118
+ // raw-ts:/home/runner/work/sync-engine/sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-setup.ts
3648
4119
  var stripe_setup_default = "import { StripeSync, runMigrations } from 'npm:stripe-experiment-sync'\n\nDeno.serve(async (req) => {\n if (req.method !== 'POST') {\n return new Response('Method not allowed', { status: 405 })\n }\n\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n let stripeSync = null\n try {\n // Get and validate database URL\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n throw new Error('SUPABASE_DB_URL environment variable is not set')\n }\n // Remove sslmode from connection string (not supported by pg in Deno)\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n await runMigrations({ databaseUrl: dbUrl })\n\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 2 }, // Need 2 for advisory lock + queries\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY'),\n })\n\n // Release any stale advisory locks from previous timeouts\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n\n // Construct webhook URL from SUPABASE_URL (available in all Edge Functions)\n const supabaseUrl = Deno.env.get('SUPABASE_URL')\n if (!supabaseUrl) {\n throw new Error('SUPABASE_URL environment variable is not set')\n }\n const webhookUrl = supabaseUrl + '/functions/v1/stripe-webhook'\n\n const webhook = await stripeSync.findOrCreateManagedWebhook(webhookUrl)\n\n await stripeSync.postgresClient.pool.end()\n\n return new Response(\n JSON.stringify({\n success: true,\n message: 'Setup complete',\n webhookId: webhook.id,\n }),\n {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } catch (error) {\n console.error('Setup error:', error)\n // Cleanup on error\n if (stripeSync) {\n try {\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n await stripeSync.postgresClient.pool.end()\n } catch (cleanupErr) {\n console.warn('Cleanup failed:', cleanupErr)\n }\n }\n return new Response(JSON.stringify({ success: false, error: error.message }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n})\n";
3649
4120
 
3650
- // raw-ts:/Users/lfdepombo/src/stripe-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-webhook.ts
4121
+ // raw-ts:/home/runner/work/sync-engine/sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-webhook.ts
3651
4122
  var stripe_webhook_default = "import { StripeSync } from 'npm:stripe-experiment-sync'\n\nDeno.serve(async (req) => {\n if (req.method !== 'POST') {\n return new Response('Method not allowed', { status: 405 })\n }\n\n const sig = req.headers.get('stripe-signature')\n if (!sig) {\n return new Response('Missing stripe-signature header', { status: 400 })\n }\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n const stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n })\n\n try {\n const rawBody = new Uint8Array(await req.arrayBuffer())\n await stripeSync.processWebhook(rawBody, sig)\n return new Response(JSON.stringify({ received: true }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Webhook processing error:', error)\n const isSignatureError =\n error.message?.includes('signature') || error.type === 'StripeSignatureVerificationError'\n const status = isSignatureError ? 400 : 500\n return new Response(JSON.stringify({ error: error.message }), {\n status,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n await stripeSync.postgresClient.pool.end()\n }\n})\n";
3652
4123
 
3653
- // raw-ts:/Users/lfdepombo/src/stripe-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-worker.ts
3654
- var stripe_worker_default = "/**\n * Stripe Sync Worker\n *\n * Triggered by pg_cron every 10 seconds. Uses pgmq for durable work queue.\n *\n * Flow:\n * 1. Read batch of messages from pgmq (qty=10, vt=60s)\n * 2. If queue empty: enqueue all objects (continuous sync)\n * 3. Process messages in parallel (Promise.all):\n * - processNext(object)\n * - Delete message on success\n * - Re-enqueue if hasMore\n * 4. Return results summary\n *\n * Concurrency:\n * - Multiple workers can run concurrently via overlapping pg_cron triggers.\n * - Each worker processes its batch of messages in parallel (Promise.all).\n * - pgmq visibility timeout prevents duplicate message reads across workers.\n * - processNext() is idempotent (uses internal cursor tracking), so duplicate\n * processing on timeout/crash is safe.\n */\n\nimport { StripeSync } from 'npm:stripe-experiment-sync'\nimport postgres from 'npm:postgres'\n\nconst QUEUE_NAME = 'stripe_sync_work'\nconst VISIBILITY_TIMEOUT = 60 // seconds\nconst BATCH_SIZE = 10\n\nDeno.serve(async (req) => {\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n let sql\n let stripeSync\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n } catch (error) {\n return new Response(\n JSON.stringify({\n error: 'Failed to create postgres connection',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n })\n } catch (error) {\n await sql.end()\n return new Response(\n JSON.stringify({\n error: 'Failed to create StripeSync',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n // Read batch of messages from queue\n const messages = await sql`\n SELECT * FROM pgmq.read(${QUEUE_NAME}::text, ${VISIBILITY_TIMEOUT}::int, ${BATCH_SIZE}::int)\n `\n\n // If queue empty, enqueue all objects for continuous sync\n if (messages.length === 0) {\n // Create sync run to make enqueued work visible (status='pending')\n const { objects } = await stripeSync.joinOrCreateSyncRun('worker')\n const msgs = objects.map((object) => JSON.stringify({ object }))\n\n await sql`\n SELECT pgmq.send_batch(\n ${QUEUE_NAME}::text,\n ${sql.array(msgs)}::jsonb[]\n )\n `\n\n return new Response(JSON.stringify({ enqueued: objects.length, objects }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n\n // Process messages in parallel\n const results = await Promise.all(\n messages.map(async (msg) => {\n const { object } = msg.message as { object: string }\n\n try {\n const result = await stripeSync.processNext(object)\n\n // Delete message on success (cast to bigint to disambiguate overloaded function)\n await sql`SELECT pgmq.delete(${QUEUE_NAME}::text, ${msg.msg_id}::bigint)`\n\n // Re-enqueue if more pages\n if (result.hasMore) {\n await sql`SELECT pgmq.send(${QUEUE_NAME}::text, ${sql.json({ object })}::jsonb)`\n }\n\n return { object, ...result }\n } catch (error) {\n // Log error but continue to next message\n // Message will become visible again after visibility timeout\n console.error(`Error processing ${object}:`, error)\n return {\n object,\n processed: 0,\n hasMore: false,\n error: error.message,\n stack: error.stack,\n }\n }\n })\n )\n\n return new Response(JSON.stringify({ results }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Worker error:', error)\n return new Response(JSON.stringify({ error: error.message, stack: error.stack }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n if (sql) await sql.end()\n if (stripeSync) await stripeSync.postgresClient.pool.end()\n }\n})\n";
4124
+ // raw-ts:/home/runner/work/sync-engine/sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-worker.ts
4125
+ var stripe_worker_default = "/**\n * Stripe Sync Worker\n *\n * Triggered by pg_cron every 10 seconds. Uses pgmq for durable work queue.\n *\n * Flow:\n * 1. Read batch of messages from pgmq (qty=10, vt=60s)\n * 2. If queue empty: enqueue all objects (continuous sync)\n * 3. Process messages in parallel (Promise.all):\n * - processNext(object)\n * - Delete message on success\n * - Re-enqueue if hasMore\n * 4. Return results summary\n *\n * Concurrency:\n * - Multiple workers can run concurrently via overlapping pg_cron triggers.\n * - Each worker processes its batch of messages in parallel (Promise.all).\n * - pgmq visibility timeout prevents duplicate message reads across workers.\n * - processNext() is idempotent (uses internal cursor tracking), so duplicate\n * processing on timeout/crash is safe.\n */\n\nimport { StripeSync } from 'npm:stripe-experiment-sync'\nimport postgres from 'npm:postgres'\n\nconst QUEUE_NAME = 'stripe_sync_work'\nconst VISIBILITY_TIMEOUT = 60 // seconds\nconst BATCH_SIZE = 10\n\nDeno.serve(async (req) => {\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n let sql\n let stripeSync\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n } catch (error) {\n return new Response(\n JSON.stringify({\n error: 'Failed to create postgres connection',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n enableSigmaSync: (Deno.env.get('ENABLE_SIGMA_SYNC') ?? 'false') === 'true',\n })\n } catch (error) {\n await sql.end()\n return new Response(\n JSON.stringify({\n error: 'Failed to create StripeSync',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n // Read batch of messages from queue\n const messages = await sql`\n SELECT * FROM pgmq.read(${QUEUE_NAME}::text, ${VISIBILITY_TIMEOUT}::int, ${BATCH_SIZE}::int)\n `\n\n // If queue empty, enqueue all objects for continuous sync\n if (messages.length === 0) {\n // Create sync run to make enqueued work visible (status='pending')\n const { objects } = await stripeSync.joinOrCreateSyncRun('worker')\n const msgs = objects.map((object) => JSON.stringify({ object }))\n\n await sql`\n SELECT pgmq.send_batch(\n ${QUEUE_NAME}::text,\n ${sql.array(msgs)}::jsonb[]\n )\n `\n\n return new Response(JSON.stringify({ enqueued: objects.length, objects }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n\n // Process messages in parallel\n const results = await Promise.all(\n messages.map(async (msg) => {\n const { object } = msg.message as { object: string }\n\n try {\n const result = await stripeSync.processNext(object)\n\n // Delete message on success (cast to bigint to disambiguate overloaded function)\n await sql`SELECT pgmq.delete(${QUEUE_NAME}::text, ${msg.msg_id}::bigint)`\n\n // Re-enqueue if more pages\n if (result.hasMore) {\n await sql`SELECT pgmq.send(${QUEUE_NAME}::text, ${sql.json({ object })}::jsonb)`\n }\n\n return { object, ...result }\n } catch (error) {\n // Log error but continue to next message\n // Message will become visible again after visibility timeout\n console.error(`Error processing ${object}:`, error)\n return {\n object,\n processed: 0,\n hasMore: false,\n error: error.message,\n stack: error.stack,\n }\n }\n })\n )\n\n return new Response(JSON.stringify({ results }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Worker error:', error)\n return new Response(JSON.stringify({ error: error.message, stack: error.stack }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n if (sql) await sql.end()\n if (stripeSync) await stripeSync.postgresClient.pool.end()\n }\n})\n";
3655
4126
 
3656
4127
  // src/supabase/edge-function-code.ts
3657
4128
  var setupFunctionCode = stripe_setup_default;
@@ -3659,7 +4130,7 @@ var webhookFunctionCode = stripe_webhook_default;
3659
4130
  var workerFunctionCode = stripe_worker_default;
3660
4131
 
3661
4132
  // src/supabase/supabase.ts
3662
- var import_stripe3 = __toESM(require("stripe"), 1);
4133
+ var import_stripe4 = __toESM(require("stripe"), 1);
3663
4134
  var STRIPE_SCHEMA_COMMENT_PREFIX = "stripe-sync";
3664
4135
  var INSTALLATION_STARTED_SUFFIX = "installation:started";
3665
4136
  var INSTALLATION_ERROR_SUFFIX = "installation:error";
@@ -3919,7 +4390,7 @@ var SupabaseSetupClient = class {
3919
4390
  * Removes all Edge Functions, secrets, database resources, and Stripe webhooks
3920
4391
  */
3921
4392
  async uninstall(stripeSecretKey) {
3922
- const stripe = new import_stripe3.default(stripeSecretKey, { apiVersion: "2025-02-24.acacia" });
4393
+ const stripe = new import_stripe4.default(stripeSecretKey, { apiVersion: "2025-02-24.acacia" });
3923
4394
  try {
3924
4395
  try {
3925
4396
  const webhookResult = await this.runSQL(`
@@ -3958,6 +4429,17 @@ var SupabaseSetupClient = class {
3958
4429
  } catch (err) {
3959
4430
  console.warn("Could not delete vault secret:", err);
3960
4431
  }
4432
+ try {
4433
+ await this.runSQL(`
4434
+ SELECT pg_terminate_backend(pid)
4435
+ FROM pg_stat_activity
4436
+ WHERE datname = current_database()
4437
+ AND pid != pg_backend_pid()
4438
+ AND query ILIKE '%stripe.%'
4439
+ `);
4440
+ } catch (err) {
4441
+ console.warn("Could not terminate connections:", err);
4442
+ }
3961
4443
  await this.runSQL(`DROP SCHEMA IF EXISTS stripe CASCADE`);
3962
4444
  } catch (error) {
3963
4445
  throw new Error(`Uninstall failed: ${error instanceof Error ? error.message : String(error)}`);
@@ -4054,7 +4536,9 @@ var VALID_SYNC_OBJECTS = [
4054
4536
  "credit_note",
4055
4537
  "early_fraud_warning",
4056
4538
  "refund",
4057
- "checkout_sessions"
4539
+ "checkout_sessions",
4540
+ "subscription_item_change_events_v2_beta",
4541
+ "exchange_rates_from_usd"
4058
4542
  ];
4059
4543
  async function backfillCommand(options, entityName) {
4060
4544
  let stripeSync = null;
@@ -4083,8 +4567,8 @@ async function backfillCommand(options, entityName) {
4083
4567
  if (!input || input.trim() === "") {
4084
4568
  return "Stripe API key is required";
4085
4569
  }
4086
- if (!input.startsWith("sk_")) {
4087
- return 'Stripe API key should start with "sk_"';
4570
+ if (!input.startsWith("sk_") && !input.startsWith("rk_")) {
4571
+ return 'Stripe API key should start with "sk_" or "rk_"';
4088
4572
  }
4089
4573
  return true;
4090
4574
  }
@@ -4141,6 +4625,7 @@ async function backfillCommand(options, entityName) {
4141
4625
  stripeSync = new StripeSync({
4142
4626
  databaseUrl: config.databaseUrl,
4143
4627
  stripeSecretKey: config.stripeApiKey,
4628
+ enableSigmaSync: process.env.ENABLE_SIGMA_SYNC === "true",
4144
4629
  stripeApiVersion: process.env.STRIPE_API_VERSION || "2020-08-27",
4145
4630
  autoExpandLists: process.env.AUTO_EXPAND_LISTS === "true",
4146
4631
  backfillRelatedEntities: process.env.BACKFILL_RELATED_ENTITIES !== "false",
@@ -4304,6 +4789,7 @@ Mode: ${modeLabel}`));
4304
4789
  stripeSync = new StripeSync({
4305
4790
  databaseUrl: config.databaseUrl,
4306
4791
  stripeSecretKey: config.stripeApiKey,
4792
+ enableSigmaSync: config.enableSigmaSync,
4307
4793
  stripeApiVersion: process.env.STRIPE_API_VERSION || "2020-08-27",
4308
4794
  autoExpandLists: process.env.AUTO_EXPAND_LISTS === "true",
4309
4795
  backfillRelatedEntities: process.env.BACKFILL_RELATED_ENTITIES !== "false",
@@ -4451,7 +4937,8 @@ async function installCommand(options) {
4451
4937
  mask: "*",
4452
4938
  validate: (input) => {
4453
4939
  if (!input.trim()) return "Stripe key is required";
4454
- if (!input.startsWith("sk_")) return 'Stripe key should start with "sk_"';
4940
+ if (!input.startsWith("sk_") && !input.startsWith("rk_"))
4941
+ return 'Stripe key should start with "sk_" or "rk_"';
4455
4942
  return true;
4456
4943
  }
4457
4944
  });
@@ -4521,7 +5008,8 @@ async function uninstallCommand(options) {
4521
5008
  mask: "*",
4522
5009
  validate: (input) => {
4523
5010
  if (!input.trim()) return "Stripe key is required";
4524
- if (!input.startsWith("sk_")) return 'Stripe key should start with "sk_"';
5011
+ if (!input.startsWith("sk_") && !input.startsWith("rk_"))
5012
+ return 'Stripe key should start with "sk_" or "rk_"';
4525
5013
  return true;
4526
5014
  }
4527
5015
  });