stripe-experiment-sync 1.0.8 → 1.0.9-beta.1765909347

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -33,7 +33,7 @@ var import_commander = require("commander");
33
33
  // package.json
34
34
  var package_default = {
35
35
  name: "stripe-experiment-sync",
36
- version: "1.0.8-beta.1765856228",
36
+ version: "1.0.9-beta.1765909347",
37
37
  private: false,
38
38
  description: "Stripe Sync Engine to sync Stripe data to Postgres",
39
39
  type: "module",
@@ -73,6 +73,7 @@ var package_default = {
73
73
  dotenv: "^16.4.7",
74
74
  express: "^4.18.2",
75
75
  inquirer: "^12.3.0",
76
+ papaparse: "5.4.1",
76
77
  pg: "^8.16.3",
77
78
  "pg-node-migrations": "0.0.8",
78
79
  stripe: "^17.7.0",
@@ -84,6 +85,7 @@ var package_default = {
84
85
  "@types/express": "^4.17.21",
85
86
  "@types/inquirer": "^9.0.7",
86
87
  "@types/node": "^24.10.1",
88
+ "@types/papaparse": "5.3.16",
87
89
  "@types/pg": "^8.15.5",
88
90
  "@types/ws": "^8.5.13",
89
91
  "@types/yesql": "^4.1.4",
@@ -127,6 +129,7 @@ async function loadConfig(options) {
127
129
  config.stripeApiKey = options.stripeKey || process.env.STRIPE_API_KEY || "";
128
130
  config.ngrokAuthToken = options.ngrokToken || process.env.NGROK_AUTH_TOKEN || "";
129
131
  config.databaseUrl = options.databaseUrl || process.env.DATABASE_URL || "";
132
+ config.enableSigmaSync = options.enableSigmaSync ?? (process.env.ENABLE_SIGMA_SYNC !== void 0 ? process.env.ENABLE_SIGMA_SYNC === "true" : void 0);
130
133
  const questions = [];
131
134
  if (!config.stripeApiKey) {
132
135
  questions.push({
@@ -138,8 +141,8 @@ async function loadConfig(options) {
138
141
  if (!input || input.trim() === "") {
139
142
  return "Stripe API key is required";
140
143
  }
141
- if (!input.startsWith("sk_")) {
142
- return 'Stripe API key should start with "sk_"';
144
+ if (!input.startsWith("sk_") && !input.startsWith("rk_")) {
145
+ return 'Stripe API key should start with "sk_" or "rk_"';
143
146
  }
144
147
  return true;
145
148
  }
@@ -162,23 +165,80 @@ async function loadConfig(options) {
162
165
  }
163
166
  });
164
167
  }
168
+ if (config.enableSigmaSync === void 0) {
169
+ questions.push({
170
+ type: "confirm",
171
+ name: "enableSigmaSync",
172
+ message: "Enable Sigma sync? (Requires Sigma access in Stripe API key)",
173
+ default: false
174
+ });
175
+ }
165
176
  if (questions.length > 0) {
166
- console.log(import_chalk.default.yellow("\nMissing required configuration. Please provide:"));
177
+ console.log(import_chalk.default.yellow("\nMissing configuration. Please provide:"));
167
178
  const answers = await import_inquirer.default.prompt(questions);
168
179
  Object.assign(config, answers);
169
180
  }
181
+ if (config.enableSigmaSync === void 0) {
182
+ config.enableSigmaSync = false;
183
+ }
170
184
  return config;
171
185
  }
172
186
 
173
187
  // src/stripeSync.ts
174
- var import_stripe2 = __toESM(require("stripe"), 1);
188
+ var import_stripe3 = __toESM(require("stripe"), 1);
175
189
  var import_yesql2 = require("yesql");
176
190
 
177
191
  // src/database/postgres.ts
178
192
  var import_pg = __toESM(require("pg"), 1);
179
193
  var import_yesql = require("yesql");
194
+
195
+ // src/database/QueryUtils.ts
196
+ var QueryUtils = class _QueryUtils {
197
+ constructor() {
198
+ }
199
+ static quoteIdent(name) {
200
+ return `"${name}"`;
201
+ }
202
+ static quotedList(names) {
203
+ return names.map(_QueryUtils.quoteIdent).join(", ");
204
+ }
205
+ static buildInsertParts(columns) {
206
+ const columnsSql = columns.map((c) => _QueryUtils.quoteIdent(c.column)).join(", ");
207
+ const valuesSql = columns.map((c, i) => {
208
+ const placeholder = `$${i + 1}`;
209
+ return `${placeholder}::${c.pgType}`;
210
+ }).join(", ");
211
+ const params = columns.map((c) => c.value);
212
+ return { columnsSql, valuesSql, params };
213
+ }
214
+ static buildRawJsonUpsertQuery(schema, table, columns, conflictTarget) {
215
+ const { columnsSql, valuesSql, params } = _QueryUtils.buildInsertParts(columns);
216
+ const conflictSql = _QueryUtils.quotedList(conflictTarget);
217
+ const tsParamIdx = columns.findIndex((c) => c.column === "_last_synced_at") + 1;
218
+ if (tsParamIdx <= 0) {
219
+ throw new Error("buildRawJsonUpsertQuery requires _last_synced_at column");
220
+ }
221
+ const sql3 = `
222
+ INSERT INTO ${_QueryUtils.quoteIdent(schema)}.${_QueryUtils.quoteIdent(table)} (${columnsSql})
223
+ VALUES (${valuesSql})
224
+ ON CONFLICT (${conflictSql})
225
+ DO UPDATE SET
226
+ "_raw_data" = EXCLUDED."_raw_data",
227
+ "_last_synced_at" = $${tsParamIdx},
228
+ "_account_id" = EXCLUDED."_account_id"
229
+ WHERE ${_QueryUtils.quoteIdent(table)}."_last_synced_at" IS NULL
230
+ OR ${_QueryUtils.quoteIdent(table)}."_last_synced_at" < $${tsParamIdx}
231
+ RETURNING *
232
+ `;
233
+ return { sql: sql3, params };
234
+ }
235
+ };
236
+
237
+ // src/database/postgres.ts
180
238
  var ORDERED_STRIPE_TABLES = [
239
+ "exchange_rates_from_usd",
181
240
  "subscription_items",
241
+ "subscription_item_change_events_v2_beta",
182
242
  "subscriptions",
183
243
  "subscription_schedules",
184
244
  "checkout_session_line_items",
@@ -248,7 +308,7 @@ var PostgresClient = class {
248
308
  }
249
309
  return results.flatMap((it) => it.rows);
250
310
  }
251
- async upsertManyWithTimestampProtection(entries, table, accountId, syncTimestamp) {
311
+ async upsertManyWithTimestampProtection(entries, table, accountId, syncTimestamp, upsertOptions) {
252
312
  const timestamp = syncTimestamp || (/* @__PURE__ */ new Date()).toISOString();
253
313
  if (!entries.length) return [];
254
314
  const chunkSize = 5;
@@ -283,20 +343,33 @@ var PostgresClient = class {
283
343
  const prepared = (0, import_yesql.pg)(upsertSql, { useNullForMissing: true })(cleansed);
284
344
  queries.push(this.pool.query(prepared.text, prepared.values));
285
345
  } else {
286
- const rawData = JSON.stringify(entry);
287
- const upsertSql = `
288
- INSERT INTO "${this.config.schema}"."${table}" ("_raw_data", "_last_synced_at", "_account_id")
289
- VALUES ($1::jsonb, $2, $3)
290
- ON CONFLICT (id)
291
- DO UPDATE SET
292
- "_raw_data" = EXCLUDED."_raw_data",
293
- "_last_synced_at" = $2,
294
- "_account_id" = EXCLUDED."_account_id"
295
- WHERE "${table}"."_last_synced_at" IS NULL
296
- OR "${table}"."_last_synced_at" < $2
297
- RETURNING *
298
- `;
299
- queries.push(this.pool.query(upsertSql, [rawData, timestamp, accountId]));
346
+ const conflictTarget = upsertOptions?.conflictTarget ?? ["id"];
347
+ const extraColumns = upsertOptions?.extraColumns ?? [];
348
+ if (!conflictTarget.length) {
349
+ throw new Error(`Invalid upsert config for ${table}: conflictTarget must be non-empty`);
350
+ }
351
+ const columns = [
352
+ { column: "_raw_data", pgType: "jsonb", value: JSON.stringify(entry) },
353
+ ...extraColumns.map((c) => ({
354
+ column: c.column,
355
+ pgType: c.pgType,
356
+ value: entry[c.entryKey]
357
+ })),
358
+ { column: "_last_synced_at", pgType: "timestamptz", value: timestamp },
359
+ { column: "_account_id", pgType: "text", value: accountId }
360
+ ];
361
+ for (const c of columns) {
362
+ if (c.value === void 0) {
363
+ throw new Error(`Missing required value for ${table}.${c.column}`);
364
+ }
365
+ }
366
+ const { sql: upsertSql, params } = QueryUtils.buildRawJsonUpsertQuery(
367
+ this.config.schema,
368
+ table,
369
+ columns,
370
+ conflictTarget
371
+ );
372
+ queries.push(this.pool.query(upsertSql, params));
300
373
  }
301
374
  });
302
375
  results.push(...await Promise.all(queries));
@@ -696,7 +769,12 @@ var PostgresClient = class {
696
769
  } else {
697
770
  await this.query(
698
771
  `UPDATE "${this.config.schema}"."_sync_obj_runs"
699
- SET cursor = $4, updated_at = now()
772
+ SET cursor = CASE
773
+ WHEN cursor IS NULL THEN $4
774
+ WHEN (cursor COLLATE "C") < ($4::text COLLATE "C") THEN $4
775
+ ELSE cursor
776
+ END,
777
+ updated_at = now()
700
778
  WHERE "_account_id" = $1 AND run_started_at = $2 AND object = $3`,
701
779
  [accountId, runStartedAt, object, cursor]
702
780
  );
@@ -707,10 +785,17 @@ var PostgresClient = class {
707
785
  * This considers completed, error, AND running runs to ensure recovery syncs
708
786
  * don't re-process data that was already synced before a crash.
709
787
  * A 'running' status with a cursor means the process was killed mid-sync.
788
+ *
789
+ * Handles two cursor formats:
790
+ * - Numeric: compared as bigint for correct ordering
791
+ * - Composite cursors: compared as strings with COLLATE "C"
710
792
  */
711
793
  async getLastCompletedCursor(accountId, object) {
712
794
  const result = await this.query(
713
- `SELECT MAX(o.cursor::bigint)::text as cursor
795
+ `SELECT CASE
796
+ WHEN BOOL_OR(o.cursor !~ '^\\d+$') THEN MAX(o.cursor COLLATE "C")
797
+ ELSE MAX(CASE WHEN o.cursor ~ '^\\d+$' THEN o.cursor::bigint END)::text
798
+ END as cursor
714
799
  FROM "${this.config.schema}"."_sync_obj_runs" o
715
800
  WHERE o."_account_id" = $1
716
801
  AND o.object = $2
@@ -995,6 +1080,269 @@ function hashApiKey(apiKey) {
995
1080
  return (0, import_crypto.createHash)("sha256").update(apiKey).digest("hex");
996
1081
  }
997
1082
 
1083
+ // src/sigma/sigmaApi.ts
1084
+ var import_papaparse = __toESM(require("papaparse"), 1);
1085
+ var import_stripe2 = __toESM(require("stripe"), 1);
1086
+ var STRIPE_FILES_BASE = "https://files.stripe.com/v1";
1087
+ function sleep2(ms) {
1088
+ return new Promise((resolve) => setTimeout(resolve, ms));
1089
+ }
1090
+ function parseCsvObjects(csv) {
1091
+ const input = csv.replace(/^\uFEFF/, "");
1092
+ const parsed = import_papaparse.default.parse(input, {
1093
+ header: true,
1094
+ skipEmptyLines: "greedy"
1095
+ });
1096
+ if (parsed.errors.length > 0) {
1097
+ throw new Error(`Failed to parse Sigma CSV: ${parsed.errors[0]?.message ?? "unknown error"}`);
1098
+ }
1099
+ return parsed.data.filter((row) => row && Object.keys(row).length > 0).map(
1100
+ (row) => Object.fromEntries(
1101
+ Object.entries(row).map(([k, v]) => [k, v == null || v === "" ? null : String(v)])
1102
+ )
1103
+ );
1104
+ }
1105
+ function normalizeSigmaTimestampToIso(value) {
1106
+ const v = value.trim();
1107
+ if (!v) return null;
1108
+ const hasExplicitTz = /z$|[+-]\d{2}:?\d{2}$/i.test(v);
1109
+ const isoish = v.includes("T") ? v : v.replace(" ", "T");
1110
+ const candidate = hasExplicitTz ? isoish : `${isoish}Z`;
1111
+ const d = new Date(candidate);
1112
+ if (Number.isNaN(d.getTime())) return null;
1113
+ return d.toISOString();
1114
+ }
1115
+ async function fetchStripeText(url, apiKey, options) {
1116
+ const res = await fetch(url, {
1117
+ ...options,
1118
+ headers: {
1119
+ ...options.headers ?? {},
1120
+ Authorization: `Bearer ${apiKey}`
1121
+ }
1122
+ });
1123
+ const text = await res.text();
1124
+ if (!res.ok) {
1125
+ throw new Error(`Sigma file download error (${res.status}) for ${url}: ${text}`);
1126
+ }
1127
+ return text;
1128
+ }
1129
+ async function runSigmaQueryAndDownloadCsv(params) {
1130
+ const pollTimeoutMs = params.pollTimeoutMs ?? 5 * 60 * 1e3;
1131
+ const pollIntervalMs = params.pollIntervalMs ?? 2e3;
1132
+ const stripe = new import_stripe2.default(params.apiKey);
1133
+ const created = await stripe.rawRequest("POST", "/v1/sigma/query_runs", {
1134
+ sql: params.sql
1135
+ });
1136
+ const queryRunId = created.id;
1137
+ const start = Date.now();
1138
+ let current = created;
1139
+ while (current.status === "running") {
1140
+ if (Date.now() - start > pollTimeoutMs) {
1141
+ throw new Error(`Sigma query run timed out after ${pollTimeoutMs}ms: ${queryRunId}`);
1142
+ }
1143
+ await sleep2(pollIntervalMs);
1144
+ current = await stripe.rawRequest(
1145
+ "GET",
1146
+ `/v1/sigma/query_runs/${queryRunId}`,
1147
+ {}
1148
+ );
1149
+ }
1150
+ if (current.status !== "succeeded") {
1151
+ throw new Error(
1152
+ `Sigma query run did not succeed (status=${current.status}) id=${queryRunId} error=${JSON.stringify(
1153
+ current.error
1154
+ )}`
1155
+ );
1156
+ }
1157
+ const fileId = current.result?.file;
1158
+ if (!fileId) {
1159
+ throw new Error(`Sigma query run succeeded but result.file is missing (id=${queryRunId})`);
1160
+ }
1161
+ const csv = await fetchStripeText(
1162
+ `${STRIPE_FILES_BASE}/files/${fileId}/contents`,
1163
+ params.apiKey,
1164
+ { method: "GET" }
1165
+ );
1166
+ return { queryRunId, fileId, csv };
1167
+ }
1168
+
1169
+ // src/sigma/sigmaIngestionConfigs.ts
1170
+ var SIGMA_INGESTION_CONFIGS = {
1171
+ subscription_item_change_events_v2_beta: {
1172
+ sigmaTable: "subscription_item_change_events_v2_beta",
1173
+ destinationTable: "subscription_item_change_events_v2_beta",
1174
+ pageSize: 1e4,
1175
+ cursor: {
1176
+ version: 1,
1177
+ columns: [
1178
+ { column: "event_timestamp", type: "timestamp" },
1179
+ { column: "event_type", type: "string" },
1180
+ { column: "subscription_item_id", type: "string" }
1181
+ ]
1182
+ },
1183
+ upsert: {
1184
+ conflictTarget: ["_account_id", "event_timestamp", "event_type", "subscription_item_id"],
1185
+ extraColumns: [
1186
+ { column: "event_timestamp", pgType: "timestamptz", entryKey: "event_timestamp" },
1187
+ { column: "event_type", pgType: "text", entryKey: "event_type" },
1188
+ { column: "subscription_item_id", pgType: "text", entryKey: "subscription_item_id" }
1189
+ ]
1190
+ }
1191
+ },
1192
+ exchange_rates_from_usd: {
1193
+ sigmaTable: "exchange_rates_from_usd",
1194
+ destinationTable: "exchange_rates_from_usd",
1195
+ pageSize: 1e4,
1196
+ cursor: {
1197
+ version: 1,
1198
+ columns: [
1199
+ { column: "date", type: "string" },
1200
+ { column: "sell_currency", type: "string" }
1201
+ ]
1202
+ },
1203
+ upsert: {
1204
+ conflictTarget: ["_account_id", "date", "sell_currency"],
1205
+ extraColumns: [
1206
+ { column: "date", pgType: "date", entryKey: "date" },
1207
+ { column: "sell_currency", pgType: "text", entryKey: "sell_currency" }
1208
+ ]
1209
+ }
1210
+ }
1211
+ };
1212
+
1213
+ // src/sigma/sigmaIngestion.ts
1214
+ var SIGMA_CURSOR_DELIM = "";
1215
+ function escapeSigmaSqlStringLiteral(value) {
1216
+ return value.replace(/'/g, "''");
1217
+ }
1218
+ function formatSigmaTimestampForSqlLiteral(date) {
1219
+ return date.toISOString().replace("T", " ").replace("Z", "");
1220
+ }
1221
+ function decodeSigmaCursorValues(spec, cursor) {
1222
+ const prefix = `v${spec.version}${SIGMA_CURSOR_DELIM}`;
1223
+ if (!cursor.startsWith(prefix)) {
1224
+ throw new Error(
1225
+ `Unrecognized Sigma cursor format (expected prefix ${JSON.stringify(prefix)}): ${cursor}`
1226
+ );
1227
+ }
1228
+ const parts = cursor.split(SIGMA_CURSOR_DELIM);
1229
+ const expected = 1 + spec.columns.length;
1230
+ if (parts.length !== expected) {
1231
+ throw new Error(`Malformed Sigma cursor: expected ${expected} parts, got ${parts.length}`);
1232
+ }
1233
+ return parts.slice(1);
1234
+ }
1235
+ function encodeSigmaCursor(spec, values) {
1236
+ if (values.length !== spec.columns.length) {
1237
+ throw new Error(
1238
+ `Cannot encode Sigma cursor: expected ${spec.columns.length} values, got ${values.length}`
1239
+ );
1240
+ }
1241
+ for (const v of values) {
1242
+ if (v.includes(SIGMA_CURSOR_DELIM)) {
1243
+ throw new Error("Cannot encode Sigma cursor: value contains delimiter character");
1244
+ }
1245
+ }
1246
+ return [`v${spec.version}`, ...values].join(SIGMA_CURSOR_DELIM);
1247
+ }
1248
+ function sigmaSqlLiteralForCursorValue(spec, rawValue) {
1249
+ switch (spec.type) {
1250
+ case "timestamp": {
1251
+ const d = new Date(rawValue);
1252
+ if (Number.isNaN(d.getTime())) {
1253
+ throw new Error(`Invalid timestamp cursor value for ${spec.column}: ${rawValue}`);
1254
+ }
1255
+ return `timestamp '${formatSigmaTimestampForSqlLiteral(d)}'`;
1256
+ }
1257
+ case "number": {
1258
+ if (!/^-?\d+(\.\d+)?$/.test(rawValue)) {
1259
+ throw new Error(`Invalid numeric cursor value for ${spec.column}: ${rawValue}`);
1260
+ }
1261
+ return rawValue;
1262
+ }
1263
+ case "string":
1264
+ return `'${escapeSigmaSqlStringLiteral(rawValue)}'`;
1265
+ }
1266
+ }
1267
+ function buildSigmaCursorWhereClause(spec, cursorValues) {
1268
+ if (cursorValues.length !== spec.columns.length) {
1269
+ throw new Error(
1270
+ `Cannot build Sigma cursor predicate: expected ${spec.columns.length} values, got ${cursorValues.length}`
1271
+ );
1272
+ }
1273
+ const cols = spec.columns.map((c) => c.column);
1274
+ const lits = spec.columns.map((c, i) => sigmaSqlLiteralForCursorValue(c, cursorValues[i] ?? ""));
1275
+ const ors = [];
1276
+ for (let i = 0; i < cols.length; i++) {
1277
+ const ands = [];
1278
+ for (let j = 0; j < i; j++) {
1279
+ ands.push(`${cols[j]} = ${lits[j]}`);
1280
+ }
1281
+ ands.push(`${cols[i]} > ${lits[i]}`);
1282
+ ors.push(`(${ands.join(" AND ")})`);
1283
+ }
1284
+ return ors.join(" OR ");
1285
+ }
1286
+ function buildSigmaQuery(config, cursor) {
1287
+ const select = config.select === void 0 || config.select === "*" ? "*" : config.select.join(", ");
1288
+ const whereParts = [];
1289
+ if (config.additionalWhere) {
1290
+ whereParts.push(`(${config.additionalWhere})`);
1291
+ }
1292
+ if (cursor) {
1293
+ const values = decodeSigmaCursorValues(config.cursor, cursor);
1294
+ const predicate = buildSigmaCursorWhereClause(config.cursor, values);
1295
+ whereParts.push(`(${predicate})`);
1296
+ }
1297
+ const whereClause = whereParts.length > 0 ? `WHERE ${whereParts.join(" AND ")}` : "";
1298
+ const orderBy = config.cursor.columns.map((c) => c.column).join(", ");
1299
+ return [
1300
+ `SELECT ${select} FROM ${config.sigmaTable}`,
1301
+ whereClause,
1302
+ `ORDER BY ${orderBy} ASC`,
1303
+ `LIMIT ${config.pageSize}`
1304
+ ].filter(Boolean).join(" ");
1305
+ }
1306
+ function defaultSigmaRowToEntry(config, row) {
1307
+ const out = { ...row };
1308
+ for (const col of config.cursor.columns) {
1309
+ const raw = row[col.column];
1310
+ if (raw == null) {
1311
+ throw new Error(`Sigma row missing required cursor column: ${col.column}`);
1312
+ }
1313
+ if (col.type === "timestamp") {
1314
+ const normalized = normalizeSigmaTimestampToIso(raw);
1315
+ if (!normalized) {
1316
+ throw new Error(`Sigma row has invalid timestamp for ${col.column}: ${raw}`);
1317
+ }
1318
+ out[col.column] = normalized;
1319
+ } else if (col.type === "string") {
1320
+ const v = raw.trim();
1321
+ if (!v) {
1322
+ throw new Error(`Sigma row has empty string for required cursor column: ${col.column}`);
1323
+ }
1324
+ out[col.column] = v;
1325
+ } else {
1326
+ const v = raw.trim();
1327
+ if (!v) {
1328
+ throw new Error(`Sigma row has empty value for required cursor column: ${col.column}`);
1329
+ }
1330
+ out[col.column] = v;
1331
+ }
1332
+ }
1333
+ return out;
1334
+ }
1335
+ function sigmaCursorFromEntry(config, entry) {
1336
+ const values = config.cursor.columns.map((c) => {
1337
+ const raw = entry[c.column];
1338
+ if (raw == null) {
1339
+ throw new Error(`Cannot build cursor: entry missing ${c.column}`);
1340
+ }
1341
+ return String(raw);
1342
+ });
1343
+ return encodeSigmaCursor(config.cursor, values);
1344
+ }
1345
+
998
1346
  // src/stripeSync.ts
999
1347
  function getUniqueIds(entries, key) {
1000
1348
  const set = new Set(
@@ -1005,7 +1353,7 @@ function getUniqueIds(entries, key) {
1005
1353
  var StripeSync = class {
1006
1354
  constructor(config) {
1007
1355
  this.config = config;
1008
- const baseStripe = new import_stripe2.default(config.stripeSecretKey, {
1356
+ const baseStripe = new import_stripe3.default(config.stripeSecretKey, {
1009
1357
  // https://github.com/stripe/stripe-node#configuration
1010
1358
  // @ts-ignore
1011
1359
  apiVersion: config.stripeApiVersion,
@@ -1406,6 +1754,17 @@ var StripeSync = class {
1406
1754
  listFn: (p) => this.stripe.checkout.sessions.list(p),
1407
1755
  upsertFn: (items, id) => this.upsertCheckoutSessions(items, id),
1408
1756
  supportsCreatedFilter: true
1757
+ },
1758
+ // Sigma-backed resources
1759
+ subscription_item_change_events_v2_beta: {
1760
+ order: 18,
1761
+ supportsCreatedFilter: false,
1762
+ sigma: SIGMA_INGESTION_CONFIGS.subscription_item_change_events_v2_beta
1763
+ },
1764
+ exchange_rates_from_usd: {
1765
+ order: 19,
1766
+ supportsCreatedFilter: false,
1767
+ sigma: SIGMA_INGESTION_CONFIGS.exchange_rates_from_usd
1409
1768
  }
1410
1769
  };
1411
1770
  async processEvent(event) {
@@ -1438,7 +1797,13 @@ var StripeSync = class {
1438
1797
  * Order is determined by the `order` field in resourceRegistry.
1439
1798
  */
1440
1799
  getSupportedSyncObjects() {
1441
- return Object.entries(this.resourceRegistry).sort(([, a], [, b]) => a.order - b.order).map(([key]) => key);
1800
+ const all = Object.entries(this.resourceRegistry).sort(([, a], [, b]) => a.order - b.order).map(([key]) => key);
1801
+ if (!this.config.enableSigmaSync) {
1802
+ return all.filter(
1803
+ (o) => o !== "subscription_item_change_events_v2_beta" && o !== "exchange_rates_from_usd"
1804
+ );
1805
+ }
1806
+ return all;
1442
1807
  }
1443
1808
  // Event handler methods
1444
1809
  async handleChargeEvent(event, accountId) {
@@ -1517,7 +1882,7 @@ var StripeSync = class {
1517
1882
  );
1518
1883
  await this.upsertProducts([product], accountId, this.getSyncTimestamp(event, refetched));
1519
1884
  } catch (err) {
1520
- if (err instanceof import_stripe2.default.errors.StripeAPIError && err.code === "resource_missing") {
1885
+ if (err instanceof import_stripe3.default.errors.StripeAPIError && err.code === "resource_missing") {
1521
1886
  const product = event.data.object;
1522
1887
  await this.deleteProduct(product.id);
1523
1888
  } else {
@@ -1537,7 +1902,7 @@ var StripeSync = class {
1537
1902
  );
1538
1903
  await this.upsertPrices([price], accountId, false, this.getSyncTimestamp(event, refetched));
1539
1904
  } catch (err) {
1540
- if (err instanceof import_stripe2.default.errors.StripeAPIError && err.code === "resource_missing") {
1905
+ if (err instanceof import_stripe3.default.errors.StripeAPIError && err.code === "resource_missing") {
1541
1906
  const price = event.data.object;
1542
1907
  await this.deletePrice(price.id);
1543
1908
  } else {
@@ -1557,7 +1922,7 @@ var StripeSync = class {
1557
1922
  );
1558
1923
  await this.upsertPlans([plan], accountId, false, this.getSyncTimestamp(event, refetched));
1559
1924
  } catch (err) {
1560
- if (err instanceof import_stripe2.default.errors.StripeAPIError && err.code === "resource_missing") {
1925
+ if (err instanceof import_stripe3.default.errors.StripeAPIError && err.code === "resource_missing") {
1561
1926
  const plan = event.data.object;
1562
1927
  await this.deletePlan(plan.id);
1563
1928
  } else {
@@ -1804,10 +2169,10 @@ var StripeSync = class {
1804
2169
  let cursor = null;
1805
2170
  if (!params?.created) {
1806
2171
  if (objRun?.cursor) {
1807
- cursor = parseInt(objRun.cursor);
2172
+ cursor = objRun.cursor;
1808
2173
  } else {
1809
2174
  const lastCursor = await this.postgresClient.getLastCompletedCursor(accountId, resourceName);
1810
- cursor = lastCursor ? parseInt(lastCursor) : null;
2175
+ cursor = lastCursor ?? null;
1811
2176
  }
1812
2177
  }
1813
2178
  const result = await this.fetchOnePage(
@@ -1862,9 +2227,18 @@ var StripeSync = class {
1862
2227
  throw new Error(`Unsupported object type for processNext: ${object}`);
1863
2228
  }
1864
2229
  try {
2230
+ if (config.sigma) {
2231
+ return await this.fetchOneSigmaPage(
2232
+ accountId,
2233
+ resourceName,
2234
+ runStartedAt,
2235
+ cursor,
2236
+ config.sigma
2237
+ );
2238
+ }
1865
2239
  const listParams = { limit };
1866
2240
  if (config.supportsCreatedFilter) {
1867
- const created = params?.created ?? (cursor ? { gte: cursor } : void 0);
2241
+ const created = params?.created ?? (cursor && /^\d+$/.test(cursor) ? { gte: Number.parseInt(cursor, 10) } : void 0);
1868
2242
  if (created) {
1869
2243
  listParams.created = created;
1870
2244
  }
@@ -1909,6 +2283,97 @@ var StripeSync = class {
1909
2283
  throw error;
1910
2284
  }
1911
2285
  }
2286
+ async getSigmaFallbackCursorFromDestination(accountId, sigmaConfig) {
2287
+ const cursorCols = sigmaConfig.cursor.columns;
2288
+ const selectCols = cursorCols.map((c) => `"${c.column}"`).join(", ");
2289
+ const orderBy = cursorCols.map((c) => `"${c.column}" DESC`).join(", ");
2290
+ const result = await this.postgresClient.query(
2291
+ `SELECT ${selectCols}
2292
+ FROM "stripe"."${sigmaConfig.destinationTable}"
2293
+ WHERE "_account_id" = $1
2294
+ ORDER BY ${orderBy}
2295
+ LIMIT 1`,
2296
+ [accountId]
2297
+ );
2298
+ if (result.rows.length === 0) return null;
2299
+ const row = result.rows[0];
2300
+ const entryForCursor = {};
2301
+ for (const c of cursorCols) {
2302
+ const v = row[c.column];
2303
+ if (v == null) {
2304
+ throw new Error(
2305
+ `Sigma fallback cursor query returned null for ${sigmaConfig.destinationTable}.${c.column}`
2306
+ );
2307
+ }
2308
+ if (c.type === "timestamp") {
2309
+ const d = v instanceof Date ? v : new Date(String(v));
2310
+ if (Number.isNaN(d.getTime())) {
2311
+ throw new Error(
2312
+ `Sigma fallback cursor query returned invalid timestamp for ${sigmaConfig.destinationTable}.${c.column}: ${String(
2313
+ v
2314
+ )}`
2315
+ );
2316
+ }
2317
+ entryForCursor[c.column] = d.toISOString();
2318
+ } else {
2319
+ entryForCursor[c.column] = String(v);
2320
+ }
2321
+ }
2322
+ return sigmaCursorFromEntry(sigmaConfig, entryForCursor);
2323
+ }
2324
+ async fetchOneSigmaPage(accountId, resourceName, runStartedAt, cursor, sigmaConfig) {
2325
+ if (!this.config.stripeSecretKey) {
2326
+ throw new Error("Sigma sync requested but stripeSecretKey is not configured.");
2327
+ }
2328
+ if (resourceName !== sigmaConfig.destinationTable) {
2329
+ throw new Error(
2330
+ `Sigma sync config mismatch: resourceName=${resourceName} destinationTable=${sigmaConfig.destinationTable}`
2331
+ );
2332
+ }
2333
+ const effectiveCursor = cursor ?? await this.getSigmaFallbackCursorFromDestination(accountId, sigmaConfig);
2334
+ const sigmaSql = buildSigmaQuery(sigmaConfig, effectiveCursor);
2335
+ this.config.logger?.info(
2336
+ { object: resourceName, pageSize: sigmaConfig.pageSize, hasCursor: Boolean(effectiveCursor) },
2337
+ "Sigma sync: running query"
2338
+ );
2339
+ const { queryRunId, fileId, csv } = await runSigmaQueryAndDownloadCsv({
2340
+ apiKey: this.config.stripeSecretKey,
2341
+ sql: sigmaSql,
2342
+ logger: this.config.logger
2343
+ });
2344
+ const rows = parseCsvObjects(csv);
2345
+ if (rows.length === 0) {
2346
+ await this.postgresClient.completeObjectSync(accountId, runStartedAt, resourceName);
2347
+ return { processed: 0, hasMore: false, runStartedAt };
2348
+ }
2349
+ const entries = rows.map(
2350
+ (row) => defaultSigmaRowToEntry(sigmaConfig, row)
2351
+ );
2352
+ this.config.logger?.info(
2353
+ { object: resourceName, rows: entries.length, queryRunId, fileId },
2354
+ "Sigma sync: upserting rows"
2355
+ );
2356
+ await this.postgresClient.upsertManyWithTimestampProtection(
2357
+ entries,
2358
+ resourceName,
2359
+ accountId,
2360
+ void 0,
2361
+ sigmaConfig.upsert
2362
+ );
2363
+ await this.postgresClient.incrementObjectProgress(
2364
+ accountId,
2365
+ runStartedAt,
2366
+ resourceName,
2367
+ entries.length
2368
+ );
2369
+ const newCursor = sigmaCursorFromEntry(sigmaConfig, entries[entries.length - 1]);
2370
+ await this.postgresClient.updateObjectCursor(accountId, runStartedAt, resourceName, newCursor);
2371
+ const hasMore = rows.length === sigmaConfig.pageSize;
2372
+ if (!hasMore) {
2373
+ await this.postgresClient.completeObjectSync(accountId, runStartedAt, resourceName);
2374
+ }
2375
+ return { processed: entries.length, hasMore, runStartedAt };
2376
+ }
1912
2377
  /**
1913
2378
  * Process all pages for all (or specified) object types until complete.
1914
2379
  *
@@ -2037,6 +2502,12 @@ var StripeSync = class {
2037
2502
  case "checkout_sessions":
2038
2503
  results.checkoutSessions = result;
2039
2504
  break;
2505
+ case "subscription_item_change_events_v2_beta":
2506
+ results.subscriptionItemChangeEventsV2Beta = result;
2507
+ break;
2508
+ case "exchange_rates_from_usd":
2509
+ results.exchangeRatesFromUsd = result;
2510
+ break;
2040
2511
  }
2041
2512
  }
2042
2513
  }
@@ -3630,14 +4101,14 @@ Creating ngrok tunnel for port ${port}...`));
3630
4101
  // src/supabase/supabase.ts
3631
4102
  var import_supabase_management_js = require("supabase-management-js");
3632
4103
 
3633
- // raw-ts:/Users/lfdepombo/src/stripe-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-setup.ts
4104
+ // raw-ts:/home/runner/work/sync-engine/sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-setup.ts
3634
4105
  var stripe_setup_default = "import { StripeSync, runMigrations } from 'npm:stripe-experiment-sync'\n\nDeno.serve(async (req) => {\n if (req.method !== 'POST') {\n return new Response('Method not allowed', { status: 405 })\n }\n\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n let stripeSync = null\n try {\n // Get and validate database URL\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n throw new Error('SUPABASE_DB_URL environment variable is not set')\n }\n // Remove sslmode from connection string (not supported by pg in Deno)\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n await runMigrations({ databaseUrl: dbUrl })\n\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 2 }, // Need 2 for advisory lock + queries\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY'),\n })\n\n // Release any stale advisory locks from previous timeouts\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n\n // Construct webhook URL from SUPABASE_URL (available in all Edge Functions)\n const supabaseUrl = Deno.env.get('SUPABASE_URL')\n if (!supabaseUrl) {\n throw new Error('SUPABASE_URL environment variable is not set')\n }\n const webhookUrl = supabaseUrl + '/functions/v1/stripe-webhook'\n\n const webhook = await stripeSync.findOrCreateManagedWebhook(webhookUrl)\n\n await stripeSync.postgresClient.pool.end()\n\n return new Response(\n JSON.stringify({\n success: true,\n message: 'Setup complete',\n webhookId: webhook.id,\n }),\n {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } catch (error) {\n console.error('Setup error:', error)\n // Cleanup on error\n if (stripeSync) {\n try {\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n await stripeSync.postgresClient.pool.end()\n } catch (cleanupErr) {\n console.warn('Cleanup failed:', cleanupErr)\n }\n }\n return new Response(JSON.stringify({ success: false, error: error.message }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n})\n";
3635
4106
 
3636
- // raw-ts:/Users/lfdepombo/src/stripe-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-webhook.ts
4107
+ // raw-ts:/home/runner/work/sync-engine/sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-webhook.ts
3637
4108
  var stripe_webhook_default = "import { StripeSync } from 'npm:stripe-experiment-sync'\n\nDeno.serve(async (req) => {\n if (req.method !== 'POST') {\n return new Response('Method not allowed', { status: 405 })\n }\n\n const sig = req.headers.get('stripe-signature')\n if (!sig) {\n return new Response('Missing stripe-signature header', { status: 400 })\n }\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n const stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n })\n\n try {\n const rawBody = new Uint8Array(await req.arrayBuffer())\n await stripeSync.processWebhook(rawBody, sig)\n return new Response(JSON.stringify({ received: true }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Webhook processing error:', error)\n const isSignatureError =\n error.message?.includes('signature') || error.type === 'StripeSignatureVerificationError'\n const status = isSignatureError ? 400 : 500\n return new Response(JSON.stringify({ error: error.message }), {\n status,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n await stripeSync.postgresClient.pool.end()\n }\n})\n";
3638
4109
 
3639
- // raw-ts:/Users/lfdepombo/src/stripe-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-worker.ts
3640
- var stripe_worker_default = "/**\n * Stripe Sync Worker\n *\n * Triggered by pg_cron every 10 seconds. Uses pgmq for durable work queue.\n *\n * Flow:\n * 1. Read batch of messages from pgmq (qty=10, vt=60s)\n * 2. If queue empty: enqueue all objects (continuous sync)\n * 3. Process messages in parallel (Promise.all):\n * - processNext(object)\n * - Delete message on success\n * - Re-enqueue if hasMore\n * 4. Return results summary\n *\n * Concurrency:\n * - Multiple workers can run concurrently via overlapping pg_cron triggers.\n * - Each worker processes its batch of messages in parallel (Promise.all).\n * - pgmq visibility timeout prevents duplicate message reads across workers.\n * - processNext() is idempotent (uses internal cursor tracking), so duplicate\n * processing on timeout/crash is safe.\n */\n\nimport { StripeSync } from 'npm:stripe-experiment-sync'\nimport postgres from 'npm:postgres'\n\nconst QUEUE_NAME = 'stripe_sync_work'\nconst VISIBILITY_TIMEOUT = 60 // seconds\nconst BATCH_SIZE = 10\n\nDeno.serve(async (req) => {\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n let sql\n let stripeSync\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n } catch (error) {\n return new Response(\n JSON.stringify({\n error: 'Failed to create postgres connection',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n })\n } catch (error) {\n await sql.end()\n return new Response(\n JSON.stringify({\n error: 'Failed to create StripeSync',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n // Read batch of messages from queue\n const messages = await sql`\n SELECT * FROM pgmq.read(${QUEUE_NAME}::text, ${VISIBILITY_TIMEOUT}::int, ${BATCH_SIZE}::int)\n `\n\n // If queue empty, enqueue all objects for continuous sync\n if (messages.length === 0) {\n // Create sync run to make enqueued work visible (status='pending')\n const { objects } = await stripeSync.joinOrCreateSyncRun('worker')\n const msgs = objects.map((object) => JSON.stringify({ object }))\n\n await sql`\n SELECT pgmq.send_batch(\n ${QUEUE_NAME}::text,\n ${sql.array(msgs)}::jsonb[]\n )\n `\n\n return new Response(JSON.stringify({ enqueued: objects.length, objects }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n\n // Process messages in parallel\n const results = await Promise.all(\n messages.map(async (msg) => {\n const { object } = msg.message as { object: string }\n\n try {\n const result = await stripeSync.processNext(object)\n\n // Delete message on success (cast to bigint to disambiguate overloaded function)\n await sql`SELECT pgmq.delete(${QUEUE_NAME}::text, ${msg.msg_id}::bigint)`\n\n // Re-enqueue if more pages\n if (result.hasMore) {\n await sql`SELECT pgmq.send(${QUEUE_NAME}::text, ${sql.json({ object })}::jsonb)`\n }\n\n return { object, ...result }\n } catch (error) {\n // Log error but continue to next message\n // Message will become visible again after visibility timeout\n console.error(`Error processing ${object}:`, error)\n return {\n object,\n processed: 0,\n hasMore: false,\n error: error.message,\n stack: error.stack,\n }\n }\n })\n )\n\n return new Response(JSON.stringify({ results }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Worker error:', error)\n return new Response(JSON.stringify({ error: error.message, stack: error.stack }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n if (sql) await sql.end()\n if (stripeSync) await stripeSync.postgresClient.pool.end()\n }\n})\n";
4110
+ // raw-ts:/home/runner/work/sync-engine/sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-worker.ts
4111
+ var stripe_worker_default = "/**\n * Stripe Sync Worker\n *\n * Triggered by pg_cron every 10 seconds. Uses pgmq for durable work queue.\n *\n * Flow:\n * 1. Read batch of messages from pgmq (qty=10, vt=60s)\n * 2. If queue empty: enqueue all objects (continuous sync)\n * 3. Process messages in parallel (Promise.all):\n * - processNext(object)\n * - Delete message on success\n * - Re-enqueue if hasMore\n * 4. Return results summary\n *\n * Concurrency:\n * - Multiple workers can run concurrently via overlapping pg_cron triggers.\n * - Each worker processes its batch of messages in parallel (Promise.all).\n * - pgmq visibility timeout prevents duplicate message reads across workers.\n * - processNext() is idempotent (uses internal cursor tracking), so duplicate\n * processing on timeout/crash is safe.\n */\n\nimport { StripeSync } from 'npm:stripe-experiment-sync'\nimport postgres from 'npm:postgres'\n\nconst QUEUE_NAME = 'stripe_sync_work'\nconst VISIBILITY_TIMEOUT = 60 // seconds\nconst BATCH_SIZE = 10\n\nDeno.serve(async (req) => {\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n let sql\n let stripeSync\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n } catch (error) {\n return new Response(\n JSON.stringify({\n error: 'Failed to create postgres connection',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n enableSigmaSync: (Deno.env.get('ENABLE_SIGMA_SYNC') ?? 'false') === 'true',\n })\n } catch (error) {\n await sql.end()\n return new Response(\n JSON.stringify({\n error: 'Failed to create StripeSync',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n // Read batch of messages from queue\n const messages = await sql`\n SELECT * FROM pgmq.read(${QUEUE_NAME}::text, ${VISIBILITY_TIMEOUT}::int, ${BATCH_SIZE}::int)\n `\n\n // If queue empty, enqueue all objects for continuous sync\n if (messages.length === 0) {\n // Create sync run to make enqueued work visible (status='pending')\n const { objects } = await stripeSync.joinOrCreateSyncRun('worker')\n const msgs = objects.map((object) => JSON.stringify({ object }))\n\n await sql`\n SELECT pgmq.send_batch(\n ${QUEUE_NAME}::text,\n ${sql.array(msgs)}::jsonb[]\n )\n `\n\n return new Response(JSON.stringify({ enqueued: objects.length, objects }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n\n // Process messages in parallel\n const results = await Promise.all(\n messages.map(async (msg) => {\n const { object } = msg.message as { object: string }\n\n try {\n const result = await stripeSync.processNext(object)\n\n // Delete message on success (cast to bigint to disambiguate overloaded function)\n await sql`SELECT pgmq.delete(${QUEUE_NAME}::text, ${msg.msg_id}::bigint)`\n\n // Re-enqueue if more pages\n if (result.hasMore) {\n await sql`SELECT pgmq.send(${QUEUE_NAME}::text, ${sql.json({ object })}::jsonb)`\n }\n\n return { object, ...result }\n } catch (error) {\n // Log error but continue to next message\n // Message will become visible again after visibility timeout\n console.error(`Error processing ${object}:`, error)\n return {\n object,\n processed: 0,\n hasMore: false,\n error: error.message,\n stack: error.stack,\n }\n }\n })\n )\n\n return new Response(JSON.stringify({ results }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Worker error:', error)\n return new Response(JSON.stringify({ error: error.message, stack: error.stack }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n if (sql) await sql.end()\n if (stripeSync) await stripeSync.postgresClient.pool.end()\n }\n})\n";
3641
4112
 
3642
4113
  // src/supabase/edge-function-code.ts
3643
4114
  var setupFunctionCode = stripe_setup_default;
@@ -3645,7 +4116,7 @@ var webhookFunctionCode = stripe_webhook_default;
3645
4116
  var workerFunctionCode = stripe_worker_default;
3646
4117
 
3647
4118
  // src/supabase/supabase.ts
3648
- var import_stripe3 = __toESM(require("stripe"), 1);
4119
+ var import_stripe4 = __toESM(require("stripe"), 1);
3649
4120
  var STRIPE_SCHEMA_COMMENT_PREFIX = "stripe-sync";
3650
4121
  var INSTALLATION_STARTED_SUFFIX = "installation:started";
3651
4122
  var INSTALLATION_ERROR_SUFFIX = "installation:error";
@@ -3905,7 +4376,7 @@ var SupabaseSetupClient = class {
3905
4376
  * Removes all Edge Functions, secrets, database resources, and Stripe webhooks
3906
4377
  */
3907
4378
  async uninstall(stripeSecretKey) {
3908
- const stripe = new import_stripe3.default(stripeSecretKey, { apiVersion: "2025-02-24.acacia" });
4379
+ const stripe = new import_stripe4.default(stripeSecretKey, { apiVersion: "2025-02-24.acacia" });
3909
4380
  try {
3910
4381
  try {
3911
4382
  const webhookResult = await this.runSQL(`
@@ -3944,6 +4415,17 @@ var SupabaseSetupClient = class {
3944
4415
  } catch (err) {
3945
4416
  console.warn("Could not delete vault secret:", err);
3946
4417
  }
4418
+ try {
4419
+ await this.runSQL(`
4420
+ SELECT pg_terminate_backend(pid)
4421
+ FROM pg_stat_activity
4422
+ WHERE datname = current_database()
4423
+ AND pid != pg_backend_pid()
4424
+ AND query ILIKE '%stripe.%'
4425
+ `);
4426
+ } catch (err) {
4427
+ console.warn("Could not terminate connections:", err);
4428
+ }
3947
4429
  await this.runSQL(`DROP SCHEMA IF EXISTS stripe CASCADE`);
3948
4430
  } catch (error) {
3949
4431
  throw new Error(`Uninstall failed: ${error instanceof Error ? error.message : String(error)}`);
@@ -4040,7 +4522,9 @@ var VALID_SYNC_OBJECTS = [
4040
4522
  "credit_note",
4041
4523
  "early_fraud_warning",
4042
4524
  "refund",
4043
- "checkout_sessions"
4525
+ "checkout_sessions",
4526
+ "subscription_item_change_events_v2_beta",
4527
+ "exchange_rates_from_usd"
4044
4528
  ];
4045
4529
  async function backfillCommand(options, entityName) {
4046
4530
  let stripeSync = null;
@@ -4069,8 +4553,8 @@ async function backfillCommand(options, entityName) {
4069
4553
  if (!input || input.trim() === "") {
4070
4554
  return "Stripe API key is required";
4071
4555
  }
4072
- if (!input.startsWith("sk_")) {
4073
- return 'Stripe API key should start with "sk_"';
4556
+ if (!input.startsWith("sk_") && !input.startsWith("rk_")) {
4557
+ return 'Stripe API key should start with "sk_" or "rk_"';
4074
4558
  }
4075
4559
  return true;
4076
4560
  }
@@ -4127,6 +4611,7 @@ async function backfillCommand(options, entityName) {
4127
4611
  stripeSync = new StripeSync({
4128
4612
  databaseUrl: config.databaseUrl,
4129
4613
  stripeSecretKey: config.stripeApiKey,
4614
+ enableSigmaSync: process.env.ENABLE_SIGMA_SYNC === "true",
4130
4615
  stripeApiVersion: process.env.STRIPE_API_VERSION || "2020-08-27",
4131
4616
  autoExpandLists: process.env.AUTO_EXPAND_LISTS === "true",
4132
4617
  backfillRelatedEntities: process.env.BACKFILL_RELATED_ENTITIES !== "false",
@@ -4290,6 +4775,7 @@ Mode: ${modeLabel}`));
4290
4775
  stripeSync = new StripeSync({
4291
4776
  databaseUrl: config.databaseUrl,
4292
4777
  stripeSecretKey: config.stripeApiKey,
4778
+ enableSigmaSync: config.enableSigmaSync,
4293
4779
  stripeApiVersion: process.env.STRIPE_API_VERSION || "2020-08-27",
4294
4780
  autoExpandLists: process.env.AUTO_EXPAND_LISTS === "true",
4295
4781
  backfillRelatedEntities: process.env.BACKFILL_RELATED_ENTITIES !== "false",
@@ -4437,7 +4923,8 @@ async function installCommand(options) {
4437
4923
  mask: "*",
4438
4924
  validate: (input) => {
4439
4925
  if (!input.trim()) return "Stripe key is required";
4440
- if (!input.startsWith("sk_")) return 'Stripe key should start with "sk_"';
4926
+ if (!input.startsWith("sk_") && !input.startsWith("rk_"))
4927
+ return 'Stripe key should start with "sk_" or "rk_"';
4441
4928
  return true;
4442
4929
  }
4443
4930
  });
@@ -4507,7 +4994,8 @@ async function uninstallCommand(options) {
4507
4994
  mask: "*",
4508
4995
  validate: (input) => {
4509
4996
  if (!input.trim()) return "Stripe key is required";
4510
- if (!input.startsWith("sk_")) return 'Stripe key should start with "sk_"';
4997
+ if (!input.startsWith("sk_") && !input.startsWith("rk_"))
4998
+ return 'Stripe key should start with "sk_" or "rk_"';
4511
4999
  return true;
4512
5000
  }
4513
5001
  });
@@ -4554,11 +5042,12 @@ program.command("migrate").description("Run database migrations only").option("-
4554
5042
  databaseUrl: options.databaseUrl
4555
5043
  });
4556
5044
  });
4557
- program.command("start").description("Start Stripe sync").option("--stripe-key <key>", "Stripe API key (or STRIPE_API_KEY env)").option("--ngrok-token <token>", "ngrok auth token (or NGROK_AUTH_TOKEN env)").option("--database-url <url>", "Postgres DATABASE_URL (or DATABASE_URL env)").action(async (options) => {
5045
+ program.command("start").description("Start Stripe sync").option("--stripe-key <key>", "Stripe API key (or STRIPE_API_KEY env)").option("--ngrok-token <token>", "ngrok auth token (or NGROK_AUTH_TOKEN env)").option("--database-url <url>", "Postgres DATABASE_URL (or DATABASE_URL env)").option("--sigma", "Sync Sigma data (requires Sigma access in Stripe API key)").action(async (options) => {
4558
5046
  await syncCommand({
4559
5047
  stripeKey: options.stripeKey,
4560
5048
  ngrokToken: options.ngrokToken,
4561
- databaseUrl: options.databaseUrl
5049
+ databaseUrl: options.databaseUrl,
5050
+ enableSigmaSync: options.sigma
4562
5051
  });
4563
5052
  });
4564
5053
  program.command("backfill <entityName>").description("Backfill a specific entity type from Stripe (e.g., customer, invoice, product)").option("--stripe-key <key>", "Stripe API key (or STRIPE_API_KEY env)").option("--database-url <url>", "Postgres DATABASE_URL (or DATABASE_URL env)").action(async (entityName, options) => {
@@ -4572,14 +5061,14 @@ program.command("backfill <entityName>").description("Backfill a specific entity
4572
5061
  });
4573
5062
  var supabase = program.command("supabase").description("Supabase Edge Functions commands");
4574
5063
  supabase.command("install").description("Install Stripe sync to Supabase Edge Functions").option("--token <token>", "Supabase access token (or SUPABASE_ACCESS_TOKEN env)").option("--project <ref>", "Supabase project ref (or SUPABASE_PROJECT_REF env)").option("--stripe-key <key>", "Stripe API key (or STRIPE_API_KEY env)").option(
4575
- "--version <version>",
5064
+ "--package-version <version>",
4576
5065
  "Package version to install (e.g., 1.0.8-beta.1, defaults to latest)"
4577
5066
  ).action(async (options) => {
4578
5067
  await installCommand({
4579
5068
  supabaseAccessToken: options.token,
4580
5069
  supabaseProjectRef: options.project,
4581
5070
  stripeKey: options.stripeKey,
4582
- packageVersion: options.version
5071
+ packageVersion: options.packageVersion
4583
5072
  });
4584
5073
  });
4585
5074
  supabase.command("uninstall").description("Uninstall Stripe sync from Supabase Edge Functions").option("--token <token>", "Supabase access token (or SUPABASE_ACCESS_TOKEN env)").option("--project <ref>", "Supabase project ref (or SUPABASE_PROJECT_REF env)").option("--stripe-key <key>", "Stripe API key (or STRIPE_API_KEY env)").action(async (options) => {