@paymentsdb/sync-engine 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  // package.json
2
2
  var package_default = {
3
3
  name: "@paymentsdb/sync-engine",
4
- version: "0.0.4",
4
+ version: "0.0.5",
5
5
  private: false,
6
6
  description: "Stripe Sync Engine to sync Stripe data to Postgres",
7
7
  type: "module",
@@ -31,7 +31,9 @@ var package_default = {
31
31
  prebuild: "npm run clean",
32
32
  build: "tsup src/index.ts src/supabase/index.ts src/cli/index.ts src/cli/lib.ts --format esm,cjs --dts --shims && cp -r src/database/migrations dist/migrations",
33
33
  lint: "eslint src --ext .ts",
34
- test: "vitest"
34
+ test: "vitest",
35
+ "test:integration": "TEST_POSTGRES_DB_URL=postgresql://postgres:postgres@localhost:55432/postgres vitest run src/stripeSync*integration.test.ts",
36
+ "generate:sigma-schema": "tsx src/sigma/schema/fetch-schema.ts"
35
37
  },
36
38
  files: [
37
39
  "dist"
@@ -1,23 +1,27 @@
1
1
  import {
2
2
  package_default
3
- } from "./chunk-FII5OTPO.js";
3
+ } from "./chunk-E6BGC7CB.js";
4
4
 
5
5
  // src/supabase/supabase.ts
6
6
  import { SupabaseManagementAPI } from "supabase-management-js";
7
7
 
8
- // raw-ts:/Users/prasoon/work/paymentsdb-sync-engine/src/supabase/edge-functions/stripe-setup.ts
9
- var stripe_setup_default = "import { StripeSync, runMigrations, VERSION } from 'npm:@paymentsdb/sync-engine'\nimport postgres from 'npm:postgres'\n\n// Get management API base URL from environment variable (for testing against localhost/staging)\n// Caller should provide full URL with protocol (e.g., http://localhost:54323 or https://api.supabase.com)\nconst MGMT_API_BASE_RAW = Deno.env.get('MANAGEMENT_API_URL') || 'https://api.supabase.com'\nconst MGMT_API_BASE = MGMT_API_BASE_RAW.match(/^https?:\\/\\//)\n ? MGMT_API_BASE_RAW\n : `https://${MGMT_API_BASE_RAW}`\n\n// Helper to validate accessToken against Management API\nasync function validateAccessToken(projectRef: string, accessToken: string): Promise<boolean> {\n // Try to fetch project details using the access token\n // This validates that the token is valid for the management API\n const url = `${MGMT_API_BASE}/v1/projects/${projectRef}`\n const response = await fetch(url, {\n method: 'GET',\n headers: {\n Authorization: `Bearer ${accessToken}`,\n 'Content-Type': 'application/json',\n },\n })\n\n // If we can successfully get the project, the token is valid\n return response.ok\n}\n\n// Helper to delete edge function via Management API\nasync function deleteEdgeFunction(\n projectRef: string,\n functionSlug: string,\n accessToken: string\n): Promise<void> {\n const url = `${MGMT_API_BASE}/v1/projects/${projectRef}/functions/${functionSlug}`\n const response = await fetch(url, {\n method: 'DELETE',\n headers: {\n Authorization: `Bearer ${accessToken}`,\n 'Content-Type': 'application/json',\n },\n })\n\n if (!response.ok && response.status !== 404) {\n const text = await response.text()\n throw new Error(`Failed to delete function ${functionSlug}: ${response.status} ${text}`)\n }\n}\n\n// Helper to delete secrets via Management API\nasync function deleteSecret(\n projectRef: string,\n secretName: string,\n accessToken: string\n): Promise<void> {\n const url = `${MGMT_API_BASE}/v1/projects/${projectRef}/secrets`\n const response = await fetch(url, {\n method: 'DELETE',\n headers: {\n Authorization: `Bearer ${accessToken}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify([secretName]),\n })\n\n if (!response.ok && response.status !== 404) {\n const text = await response.text()\n console.warn(`Failed to delete secret ${secretName}: ${response.status} ${text}`)\n }\n}\n\nDeno.serve(async (req) => {\n // Extract project ref from SUPABASE_URL (format: https://{projectRef}.{base})\n const supabaseUrl = Deno.env.get('SUPABASE_URL')\n if (!supabaseUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_URL not set' }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n const projectRef = new URL(supabaseUrl).hostname.split('.')[0]\n\n // Validate access token for all requests\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const accessToken = authHeader.substring(7) // Remove 'Bearer '\n const isValid = await validateAccessToken(projectRef, accessToken)\n if (!isValid) {\n return new Response('Forbidden: Invalid access token for this project', { status: 403 })\n }\n\n // Handle GET requests for status\n if (req.method === 'GET') {\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n let sql\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n\n // Query installation status from schema comment\n const commentResult = await sql`\n SELECT obj_description(oid, 'pg_namespace') as comment\n FROM pg_namespace\n WHERE nspname = 'stripe'\n `\n\n const comment = commentResult[0]?.comment || null\n let installationStatus = 'not_installed'\n\n if (comment && comment.includes('stripe-sync')) {\n // Parse installation status from comment\n if (comment.includes('installation:started')) {\n installationStatus = 'installing'\n } else if (comment.includes('installation:error')) {\n installationStatus = 'error'\n } else if (comment.includes('installed')) {\n installationStatus = 'installed'\n }\n }\n\n // Query sync runs (only if schema exists)\n let syncStatus = []\n if (comment) {\n try {\n syncStatus = await sql`\n SELECT DISTINCT ON (account_id)\n account_id, started_at, closed_at, status, error_message,\n total_processed, total_objects, complete_count, error_count,\n running_count, pending_count, triggered_by, max_concurrent\n FROM stripe.sync_runs\n ORDER BY account_id, started_at DESC\n `\n } catch (err) {\n // Ignore errors if sync_runs view doesn't exist yet\n console.warn('sync_runs query failed (may not exist yet):', err)\n }\n }\n\n return new Response(\n JSON.stringify({\n package_version: VERSION,\n installation_status: installationStatus,\n sync_status: syncStatus,\n }),\n {\n status: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Cache-Control': 'no-cache, no-store, must-revalidate',\n },\n }\n )\n } catch (error) {\n console.error('Status query error:', error)\n return new Response(\n JSON.stringify({\n error: error.message,\n package_version: VERSION,\n installation_status: 'not_installed',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } finally {\n if (sql) await sql.end()\n }\n }\n\n // Handle DELETE requests for uninstall\n if (req.method === 'DELETE') {\n let stripeSync = null\n try {\n // Get and validate database URL\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n throw new Error('SUPABASE_DB_URL environment variable is not set')\n }\n // Remove sslmode from connection string (not supported by pg in Deno)\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n // Stripe key is required for uninstall to delete webhooks\n const stripeKey = Deno.env.get('STRIPE_SECRET_KEY')\n if (!stripeKey) {\n throw new Error('STRIPE_SECRET_KEY environment variable is required for uninstall')\n }\n\n // Step 1: Delete Stripe webhooks and clean up database\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 2 },\n stripeSecretKey: stripeKey,\n appName: Deno.env.get('STRIPE_APP_NAME') || 'PaymentsDB',\n })\n\n // Delete all managed webhooks\n const webhooks = await stripeSync.listManagedWebhooks()\n for (const webhook of webhooks) {\n try {\n await stripeSync.deleteManagedWebhook(webhook.id)\n console.log(`Deleted webhook: ${webhook.id}`)\n } catch (err) {\n console.warn(`Could not delete webhook ${webhook.id}:`, err)\n }\n }\n\n // Unschedule pg_cron job\n try {\n await stripeSync.postgresClient.query(`\n DO $$\n BEGIN\n IF EXISTS (SELECT 1 FROM cron.job WHERE jobname = 'stripe-sync-worker') THEN\n PERFORM cron.unschedule('stripe-sync-worker');\n END IF;\n END $$;\n `)\n } catch (err) {\n console.warn('Could not unschedule pg_cron job:', err)\n }\n\n // Delete vault secret\n try {\n await stripeSync.postgresClient.query(`\n DELETE FROM vault.secrets\n WHERE name = 'stripe_sync_worker_secret'\n `)\n } catch (err) {\n console.warn('Could not delete vault secret:', err)\n }\n\n // Terminate connections holding locks on stripe schema\n try {\n await stripeSync.postgresClient.query(`\n SELECT pg_terminate_backend(pid)\n FROM pg_locks l\n JOIN pg_class c ON l.relation = c.oid\n JOIN pg_namespace n ON c.relnamespace = n.oid\n WHERE n.nspname = 'stripe'\n AND l.pid != pg_backend_pid()\n `)\n } catch (err) {\n console.warn('Could not terminate connections:', err)\n }\n\n // Drop schema with retry\n let dropAttempts = 0\n const maxAttempts = 3\n while (dropAttempts < maxAttempts) {\n try {\n await stripeSync.postgresClient.query('DROP SCHEMA IF EXISTS stripe CASCADE')\n break // Success, exit loop\n } catch (err) {\n dropAttempts++\n if (dropAttempts >= maxAttempts) {\n throw new Error(\n `Failed to drop schema after ${maxAttempts} attempts. ` +\n `There may be active connections or locks on the stripe schema. ` +\n `Error: ${err.message}`\n )\n }\n // Wait 1 second before retrying\n await new Promise((resolve) => setTimeout(resolve, 1000))\n }\n }\n\n await stripeSync.postgresClient.pool.end()\n\n // Step 2: Delete Supabase secrets\n try {\n await deleteSecret(projectRef, 'STRIPE_SECRET_KEY', accessToken)\n } catch (err) {\n console.warn('Could not delete STRIPE_SECRET_KEY secret:', err)\n }\n\n // Step 3: Delete Edge Functions\n try {\n await deleteEdgeFunction(projectRef, 'stripe-setup', accessToken)\n } catch (err) {\n console.warn('Could not delete stripe-setup function:', err)\n }\n\n try {\n await deleteEdgeFunction(projectRef, 'stripe-webhook', accessToken)\n } catch (err) {\n console.warn('Could not delete stripe-webhook function:', err)\n }\n\n try {\n await deleteEdgeFunction(projectRef, 'stripe-worker', accessToken)\n } catch (err) {\n console.warn('Could not delete stripe-worker function:', err)\n }\n\n return new Response(\n JSON.stringify({\n success: true,\n message: 'Uninstall complete',\n }),\n {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } catch (error) {\n console.error('Uninstall error:', error)\n // Cleanup on error\n if (stripeSync) {\n try {\n await stripeSync.postgresClient.pool.end()\n } catch (cleanupErr) {\n console.warn('Cleanup failed:', cleanupErr)\n }\n }\n return new Response(JSON.stringify({ success: false, error: error.message }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n }\n\n // Handle POST requests for install\n if (req.method !== 'POST') {\n return new Response('Method not allowed', { status: 405 })\n }\n\n let stripeSync = null\n try {\n // Get and validate database URL\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n throw new Error('SUPABASE_DB_URL environment variable is not set')\n }\n // Remove sslmode from connection string (not supported by pg in Deno)\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n await runMigrations({ databaseUrl: dbUrl })\n\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 2 }, // Need 2 for advisory lock + queries\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY'),\n appName: Deno.env.get('STRIPE_APP_NAME') || 'PaymentsDB',\n })\n\n // Release any stale advisory locks from previous timeouts\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n\n // Construct webhook URL from SUPABASE_URL (available in all Edge Functions)\n const supabaseUrl = Deno.env.get('SUPABASE_URL')\n if (!supabaseUrl) {\n throw new Error('SUPABASE_URL environment variable is not set')\n }\n const webhookUrl = supabaseUrl + '/functions/v1/stripe-webhook'\n\n const webhook = await stripeSync.findOrCreateManagedWebhook(webhookUrl)\n\n await stripeSync.postgresClient.pool.end()\n\n return new Response(\n JSON.stringify({\n success: true,\n message: 'Setup complete',\n webhookId: webhook.id,\n }),\n {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } catch (error) {\n console.error('Setup error:', error)\n // Cleanup on error\n if (stripeSync) {\n try {\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n await stripeSync.postgresClient.pool.end()\n } catch (cleanupErr) {\n console.warn('Cleanup failed:', cleanupErr)\n }\n }\n return new Response(JSON.stringify({ success: false, error: error.message }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n})\n";
8
+ // raw-ts:/Users/prasoon/work/paymentsdb-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-setup.ts
9
+ var stripe_setup_default = "import { StripeSync, runMigrations, VERSION } from 'npm:@paymentsdb/sync-engine'\nimport postgres from 'npm:postgres'\n\n// Get management API base URL from environment variable (for testing against localhost/staging)\n// Caller should provide full URL with protocol (e.g., http://localhost:54323 or https://api.supabase.com)\nconst MGMT_API_BASE_RAW = Deno.env.get('MANAGEMENT_API_URL') || 'https://api.supabase.com'\nconst MGMT_API_BASE = MGMT_API_BASE_RAW.match(/^https?:\\/\\//)\n ? MGMT_API_BASE_RAW\n : `https://${MGMT_API_BASE_RAW}`\n\n// Helper to validate accessToken against Management API\nasync function validateAccessToken(projectRef: string, accessToken: string): Promise<boolean> {\n // Try to fetch project details using the access token\n // This validates that the token is valid for the management API\n const url = `${MGMT_API_BASE}/v1/projects/${projectRef}`\n const response = await fetch(url, {\n method: 'GET',\n headers: {\n Authorization: `Bearer ${accessToken}`,\n 'Content-Type': 'application/json',\n },\n })\n\n // If we can successfully get the project, the token is valid\n return response.ok\n}\n\n// Helper to delete edge function via Management API\nasync function deleteEdgeFunction(\n projectRef: string,\n functionSlug: string,\n accessToken: string\n): Promise<void> {\n const url = `${MGMT_API_BASE}/v1/projects/${projectRef}/functions/${functionSlug}`\n const response = await fetch(url, {\n method: 'DELETE',\n headers: {\n Authorization: `Bearer ${accessToken}`,\n 'Content-Type': 'application/json',\n },\n })\n\n if (!response.ok && response.status !== 404) {\n const text = await response.text()\n throw new Error(`Failed to delete function ${functionSlug}: ${response.status} ${text}`)\n }\n}\n\n// Helper to delete secrets via Management API\nasync function deleteSecret(\n projectRef: string,\n secretName: string,\n accessToken: string\n): Promise<void> {\n const url = `${MGMT_API_BASE}/v1/projects/${projectRef}/secrets`\n const response = await fetch(url, {\n method: 'DELETE',\n headers: {\n Authorization: `Bearer ${accessToken}`,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify([secretName]),\n })\n\n if (!response.ok && response.status !== 404) {\n const text = await response.text()\n console.warn(`Failed to delete secret ${secretName}: ${response.status} ${text}`)\n }\n}\n\nDeno.serve(async (req) => {\n // Extract project ref from SUPABASE_URL (format: https://{projectRef}.{base})\n const supabaseUrl = Deno.env.get('SUPABASE_URL')\n if (!supabaseUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_URL not set' }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n const projectRef = new URL(supabaseUrl).hostname.split('.')[0]\n\n // Validate access token for all requests\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const accessToken = authHeader.substring(7) // Remove 'Bearer '\n const isValid = await validateAccessToken(projectRef, accessToken)\n if (!isValid) {\n return new Response('Forbidden: Invalid access token for this project', { status: 403 })\n }\n\n // Handle GET requests for status\n if (req.method === 'GET') {\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n let sql\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n\n // Query installation status from schema comment\n const commentResult = await sql`\n SELECT obj_description(oid, 'pg_namespace') as comment\n FROM pg_namespace\n WHERE nspname = 'stripe'\n `\n\n const comment = commentResult[0]?.comment || null\n let installationStatus = 'not_installed'\n\n if (comment && comment.includes('stripe-sync')) {\n // Parse installation status from comment\n if (comment.includes('installation:started')) {\n installationStatus = 'installing'\n } else if (comment.includes('installation:error')) {\n installationStatus = 'error'\n } else if (comment.includes('installed')) {\n installationStatus = 'installed'\n }\n }\n\n // Query sync runs (only if schema exists)\n let syncStatus = []\n if (comment) {\n try {\n syncStatus = await sql`\n SELECT DISTINCT ON (account_id)\n account_id, started_at, closed_at, status, error_message,\n total_processed, total_objects, complete_count, error_count,\n running_count, pending_count, triggered_by, max_concurrent\n FROM stripe.sync_runs\n ORDER BY account_id, started_at DESC\n `\n } catch (err) {\n // Ignore errors if sync_runs view doesn't exist yet\n console.warn('sync_runs query failed (may not exist yet):', err)\n }\n }\n\n return new Response(\n JSON.stringify({\n package_version: VERSION,\n installation_status: installationStatus,\n sync_status: syncStatus,\n }),\n {\n status: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Cache-Control': 'no-cache, no-store, must-revalidate',\n },\n }\n )\n } catch (error) {\n console.error('Status query error:', error)\n return new Response(\n JSON.stringify({\n error: error.message,\n package_version: VERSION,\n installation_status: 'not_installed',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } finally {\n if (sql) await sql.end()\n }\n }\n\n // Handle DELETE requests for uninstall\n if (req.method === 'DELETE') {\n let stripeSync = null\n try {\n // Get and validate database URL\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n throw new Error('SUPABASE_DB_URL environment variable is not set')\n }\n // Remove sslmode from connection string (not supported by pg in Deno)\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n // Stripe key is required for uninstall to delete webhooks\n const stripeKey = Deno.env.get('STRIPE_SECRET_KEY')\n if (!stripeKey) {\n throw new Error('STRIPE_SECRET_KEY environment variable is required for uninstall')\n }\n\n // Step 1: Delete Stripe webhooks and clean up database\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 2 },\n stripeSecretKey: stripeKey,\n appName: Deno.env.get('STRIPE_APP_NAME') || 'PaymentsDB',\n })\n\n // Delete all managed webhooks\n const webhooks = await stripeSync.listManagedWebhooks()\n for (const webhook of webhooks) {\n try {\n await stripeSync.deleteManagedWebhook(webhook.id)\n console.log(`Deleted webhook: ${webhook.id}`)\n } catch (err) {\n console.warn(`Could not delete webhook ${webhook.id}:`, err)\n }\n }\n\n // Unschedule pg_cron jobs\n try {\n await stripeSync.postgresClient.query(`\n DO $$\n BEGIN\n IF EXISTS (SELECT 1 FROM cron.job WHERE jobname = 'stripe-sync-worker') THEN\n PERFORM cron.unschedule('stripe-sync-worker');\n END IF;\n IF EXISTS (SELECT 1 FROM cron.job WHERE jobname = 'stripe-sigma-worker') THEN\n PERFORM cron.unschedule('stripe-sigma-worker');\n END IF;\n END $$;\n `)\n } catch (err) {\n console.warn('Could not unschedule pg_cron job:', err)\n }\n\n // Delete vault secrets\n try {\n await stripeSync.postgresClient.query(`\n DELETE FROM vault.secrets\n WHERE name IN ('stripe_sync_worker_secret', 'stripe_sigma_worker_secret')\n `)\n } catch (err) {\n console.warn('Could not delete vault secret:', err)\n }\n\n // Drop Sigma self-trigger function if present\n try {\n await stripeSync.postgresClient.query(`\n DROP FUNCTION IF EXISTS stripe.trigger_sigma_worker();\n `)\n } catch (err) {\n console.warn('Could not drop sigma trigger function:', err)\n }\n\n // Terminate connections holding locks on stripe schema\n try {\n await stripeSync.postgresClient.query(`\n SELECT pg_terminate_backend(pid)\n FROM pg_locks l\n JOIN pg_class c ON l.relation = c.oid\n JOIN pg_namespace n ON c.relnamespace = n.oid\n WHERE n.nspname = 'stripe'\n AND l.pid != pg_backend_pid()\n `)\n } catch (err) {\n console.warn('Could not terminate connections:', err)\n }\n\n // Drop schema with retry\n let dropAttempts = 0\n const maxAttempts = 3\n while (dropAttempts < maxAttempts) {\n try {\n await stripeSync.postgresClient.query('DROP SCHEMA IF EXISTS stripe CASCADE')\n break // Success, exit loop\n } catch (err) {\n dropAttempts++\n if (dropAttempts >= maxAttempts) {\n throw new Error(\n `Failed to drop schema after ${maxAttempts} attempts. ` +\n `There may be active connections or locks on the stripe schema. ` +\n `Error: ${err.message}`\n )\n }\n // Wait 1 second before retrying\n await new Promise((resolve) => setTimeout(resolve, 1000))\n }\n }\n\n await stripeSync.postgresClient.pool.end()\n\n // Step 2: Delete Supabase secrets\n try {\n await deleteSecret(projectRef, 'STRIPE_SECRET_KEY', accessToken)\n } catch (err) {\n console.warn('Could not delete STRIPE_SECRET_KEY secret:', err)\n }\n\n // Step 3: Delete Edge Functions\n try {\n await deleteEdgeFunction(projectRef, 'stripe-setup', accessToken)\n } catch (err) {\n console.warn('Could not delete stripe-setup function:', err)\n }\n\n try {\n await deleteEdgeFunction(projectRef, 'stripe-webhook', accessToken)\n } catch (err) {\n console.warn('Could not delete stripe-webhook function:', err)\n }\n\n try {\n await deleteEdgeFunction(projectRef, 'stripe-worker', accessToken)\n } catch (err) {\n console.warn('Could not delete stripe-worker function:', err)\n }\n\n return new Response(\n JSON.stringify({\n success: true,\n message: 'Uninstall complete',\n }),\n {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } catch (error) {\n console.error('Uninstall error:', error)\n // Cleanup on error\n if (stripeSync) {\n try {\n await stripeSync.postgresClient.pool.end()\n } catch (cleanupErr) {\n console.warn('Cleanup failed:', cleanupErr)\n }\n }\n return new Response(JSON.stringify({ success: false, error: error.message }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n }\n\n // Handle POST requests for install\n if (req.method !== 'POST') {\n return new Response('Method not allowed', { status: 405 })\n }\n\n let stripeSync = null\n try {\n // Get and validate database URL\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n throw new Error('SUPABASE_DB_URL environment variable is not set')\n }\n // Remove sslmode from connection string (not supported by pg in Deno)\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n const enableSigma = (Deno.env.get('ENABLE_SIGMA') ?? 'false') === 'true'\n await runMigrations({ databaseUrl: dbUrl, enableSigma })\n\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 2 }, // Need 2 for advisory lock + queries\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY'),\n appName: Deno.env.get('STRIPE_APP_NAME') || 'PaymentsDB',\n })\n\n // Release any stale advisory locks from previous timeouts\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n\n // Construct webhook URL from SUPABASE_URL (available in all Edge Functions)\n const supabaseUrl = Deno.env.get('SUPABASE_URL')\n if (!supabaseUrl) {\n throw new Error('SUPABASE_URL environment variable is not set')\n }\n const webhookUrl = supabaseUrl + '/functions/v1/stripe-webhook'\n\n const webhook = await stripeSync.findOrCreateManagedWebhook(webhookUrl)\n\n await stripeSync.postgresClient.pool.end()\n\n return new Response(\n JSON.stringify({\n success: true,\n message: 'Setup complete',\n webhookId: webhook.id,\n }),\n {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n }\n )\n } catch (error) {\n console.error('Setup error:', error)\n // Cleanup on error\n if (stripeSync) {\n try {\n await stripeSync.postgresClient.query('SELECT pg_advisory_unlock_all()')\n await stripeSync.postgresClient.pool.end()\n } catch (cleanupErr) {\n console.warn('Cleanup failed:', cleanupErr)\n }\n }\n return new Response(JSON.stringify({ success: false, error: error.message }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n})\n";
10
10
 
11
- // raw-ts:/Users/prasoon/work/paymentsdb-sync-engine/src/supabase/edge-functions/stripe-webhook.ts
11
+ // raw-ts:/Users/prasoon/work/paymentsdb-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-webhook.ts
12
12
  var stripe_webhook_default = "import { StripeSync } from 'npm:@paymentsdb/sync-engine'\n\nDeno.serve(async (req) => {\n if (req.method !== 'POST') {\n return new Response('Method not allowed', { status: 405 })\n }\n\n const sig = req.headers.get('stripe-signature')\n if (!sig) {\n return new Response('Missing stripe-signature header', { status: 400 })\n }\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n const stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n appName: Deno.env.get('STRIPE_APP_NAME') || 'PaymentsDB',\n })\n\n try {\n const rawBody = new Uint8Array(await req.arrayBuffer())\n await stripeSync.processWebhook(rawBody, sig)\n return new Response(JSON.stringify({ received: true }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Webhook processing error:', error)\n const isSignatureError =\n error.message?.includes('signature') || error.type === 'StripeSignatureVerificationError'\n const status = isSignatureError ? 400 : 500\n return new Response(JSON.stringify({ error: error.message }), {\n status,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n await stripeSync.postgresClient.pool.end()\n }\n})\n";
13
13
 
14
- // raw-ts:/Users/prasoon/work/paymentsdb-sync-engine/src/supabase/edge-functions/stripe-worker.ts
14
+ // raw-ts:/Users/prasoon/work/paymentsdb-sync-engine/packages/sync-engine/src/supabase/edge-functions/stripe-worker.ts
15
15
  var stripe_worker_default = "/**\n * Stripe Sync Worker\n *\n * Triggered by pg_cron at a configurable interval (default: 60 seconds). Uses pgmq for durable work queue.\n *\n * Flow:\n * 1. Read batch of messages from pgmq (qty=10, vt=60s)\n * 2. If queue empty: enqueue all objects (continuous sync)\n * 3. Process messages in parallel (Promise.all):\n * - processNext(object)\n * - Delete message on success\n * - Re-enqueue if hasMore\n * 4. Return results summary\n *\n * Concurrency:\n * - Multiple workers can run concurrently via overlapping pg_cron triggers.\n * - Each worker processes its batch of messages in parallel (Promise.all).\n * - pgmq visibility timeout prevents duplicate message reads across workers.\n * - processNext() is idempotent (uses internal cursor tracking), so duplicate\n * processing on timeout/crash is safe.\n */\n\nimport { StripeSync } from 'npm:@paymentsdb/sync-engine'\nimport postgres from 'npm:postgres'\n\nconst QUEUE_NAME = 'stripe_sync_work'\nconst VISIBILITY_TIMEOUT = 60 // seconds\nconst BATCH_SIZE = 10\n\nDeno.serve(async (req) => {\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const token = authHeader.substring(7) // Remove 'Bearer '\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return new Response(JSON.stringify({ error: 'SUPABASE_DB_URL not set' }), { status: 500 })\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n let sql\n let stripeSync\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n } catch (error) {\n return new Response(\n JSON.stringify({\n error: 'Failed to create postgres connection',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n // Validate that the token matches the unique worker secret stored in vault\n const vaultResult = await sql`\n SELECT decrypted_secret\n FROM vault.decrypted_secrets\n WHERE name = 'stripe_sync_worker_secret'\n `\n\n if (vaultResult.length === 0) {\n await sql.end()\n return new Response('Worker secret not configured in vault', { status: 500 })\n }\n\n const storedSecret = vaultResult[0].decrypted_secret\n if (token !== storedSecret) {\n await sql.end()\n return new Response('Forbidden: Invalid worker secret', { status: 403 })\n }\n\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n enableSigma: (Deno.env.get('ENABLE_SIGMA') ?? 'false') === 'true',\n appName: Deno.env.get('STRIPE_APP_NAME') || 'PaymentsDB',\n })\n } catch (error) {\n await sql.end()\n return new Response(\n JSON.stringify({\n error: 'Failed to create StripeSync',\n details: error.message,\n stack: error.stack,\n }),\n { status: 500, headers: { 'Content-Type': 'application/json' } }\n )\n }\n\n try {\n // Read batch of messages from queue\n const messages = await sql`\n SELECT * FROM pgmq.read(${QUEUE_NAME}::text, ${VISIBILITY_TIMEOUT}::int, ${BATCH_SIZE}::int)\n `\n\n // If queue empty, enqueue all objects for continuous sync\n if (messages.length === 0) {\n // Create sync run to make enqueued work visible (status='pending')\n const { objects } = await stripeSync.joinOrCreateSyncRun('worker')\n const msgs = objects.map((object) => JSON.stringify({ object }))\n\n await sql`\n SELECT pgmq.send_batch(\n ${QUEUE_NAME}::text,\n ${sql.array(msgs)}::jsonb[]\n )\n `\n\n return new Response(JSON.stringify({ enqueued: objects.length, objects }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n }\n\n // Process messages in parallel\n const results = await Promise.all(\n messages.map(async (msg) => {\n const { object } = msg.message as { object: string }\n\n try {\n const result = await stripeSync.processNext(object)\n\n // Delete message on success (cast to bigint to disambiguate overloaded function)\n await sql`SELECT pgmq.delete(${QUEUE_NAME}::text, ${msg.msg_id}::bigint)`\n\n // Re-enqueue if more pages\n if (result.hasMore) {\n await sql`SELECT pgmq.send(${QUEUE_NAME}::text, ${sql.json({ object })}::jsonb)`\n }\n\n return { object, ...result }\n } catch (error) {\n // Log error but continue to next message\n // Message will become visible again after visibility timeout\n console.error(`Error processing ${object}:`, error)\n return {\n object,\n processed: 0,\n hasMore: false,\n error: error.message,\n stack: error.stack,\n }\n }\n })\n )\n\n return new Response(JSON.stringify({ results }), {\n status: 200,\n headers: { 'Content-Type': 'application/json' },\n })\n } catch (error) {\n console.error('Worker error:', error)\n return new Response(JSON.stringify({ error: error.message, stack: error.stack }), {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n })\n } finally {\n if (sql) await sql.end()\n if (stripeSync) await stripeSync.postgresClient.pool.end()\n }\n})\n";
16
16
 
17
+ // raw-ts:/Users/prasoon/work/paymentsdb-sync-engine/packages/sync-engine/src/supabase/edge-functions/sigma-data-worker.ts
18
+ var sigma_data_worker_default = "/**\n * Stripe Sigma Data Worker.\n *\n * Hourly cron starts a run; self-trigger continues until all objects finish.\n * Progress persists in _sync_runs and _sync_obj_runs across invocations.\n */\n\nimport { StripeSync } from 'npm:stripe-experiment-sync'\nimport postgres from 'npm:postgres'\n\nconst BATCH_SIZE = 1\nconst MAX_RUN_AGE_MS = 6 * 60 * 60 * 1000\nconst jsonResponse = (body: unknown, status = 200) =>\n new Response(JSON.stringify(body), {\n status,\n headers: { 'Content-Type': 'application/json' },\n })\n\nDeno.serve(async (req) => {\n const authHeader = req.headers.get('Authorization')\n if (!authHeader?.startsWith('Bearer ')) {\n return new Response('Unauthorized', { status: 401 })\n }\n\n const token = authHeader.substring(7)\n\n const rawDbUrl = Deno.env.get('SUPABASE_DB_URL')\n if (!rawDbUrl) {\n return jsonResponse({ error: 'SUPABASE_DB_URL not set' }, 500)\n }\n const dbUrl = rawDbUrl.replace(/[?&]sslmode=[^&]*/g, '').replace(/[?&]$/, '')\n\n let sql: ReturnType<typeof postgres> | undefined\n let stripeSync: StripeSync | undefined\n\n try {\n sql = postgres(dbUrl, { max: 1, prepare: false })\n } catch (error) {\n return jsonResponse(\n {\n error: 'Failed to create postgres connection',\n details: error.message,\n stack: error.stack,\n },\n 500\n )\n }\n\n try {\n // Validate the token against vault secret\n const vaultResult = await sql`\n SELECT decrypted_secret\n FROM vault.decrypted_secrets\n WHERE name = 'stripe_sigma_worker_secret'\n `\n\n if (vaultResult.length === 0) {\n await sql.end()\n return new Response('Sigma worker secret not configured in vault', { status: 500 })\n }\n\n const storedSecret = vaultResult[0].decrypted_secret\n if (token !== storedSecret) {\n await sql.end()\n return new Response('Forbidden: Invalid sigma worker secret', { status: 403 })\n }\n\n stripeSync = new StripeSync({\n poolConfig: { connectionString: dbUrl, max: 1 },\n stripeSecretKey: Deno.env.get('STRIPE_SECRET_KEY')!,\n enableSigma: true,\n sigmaPageSizeOverride: 1000,\n })\n } catch (error) {\n await sql.end()\n return jsonResponse(\n {\n error: 'Failed to create StripeSync',\n details: error.message,\n stack: error.stack,\n },\n 500\n )\n }\n\n try {\n const accountId = await stripeSync.getAccountId()\n const sigmaObjects = stripeSync.getSupportedSigmaObjects()\n\n if (sigmaObjects.length === 0) {\n return jsonResponse({ message: 'No Sigma objects configured for sync' })\n }\n\n // Get or create sync run for sigma-worker (isolated from stripe-worker)\n const runResult = await stripeSync.postgresClient.getOrCreateSyncRun(accountId, 'sigma-worker')\n const runStartedAt =\n runResult?.runStartedAt ??\n (await stripeSync.postgresClient.getActiveSyncRun(accountId, 'sigma-worker'))?.runStartedAt\n\n if (!runStartedAt) {\n throw new Error('Failed to get or create sync run for sigma worker')\n }\n\n // Legacy cleanup: remove any prefixed sigma object runs that can block concurrency.\n // Previous versions stored objects as \"sigma.<table>\" which no longer matches processNext.\n await stripeSync.postgresClient.query(\n `UPDATE \"stripe\".\"_sync_obj_runs\"\n SET status = 'error',\n error_message = 'Legacy sigma worker prefix run (sigma.*); superseded by unprefixed runs',\n completed_at = now()\n WHERE \"_account_id\" = $1\n AND run_started_at = $2\n AND object LIKE 'sigma.%'\n AND status IN ('pending', 'running')`,\n [accountId, runStartedAt]\n )\n\n // Stop self-triggering after MAX_RUN_AGE_MS.\n const runAgeMs = Date.now() - runStartedAt.getTime()\n if (runAgeMs > MAX_RUN_AGE_MS) {\n console.warn(\n `Sigma worker: run too old (${Math.round(runAgeMs / 1000 / 60)} min), closing without self-trigger`\n )\n await stripeSync.postgresClient.closeSyncRun(accountId, runStartedAt)\n return jsonResponse({\n message: 'Sigma run exceeded max age, closed without processing',\n runAgeMinutes: Math.round(runAgeMs / 1000 / 60),\n selfTriggered: false,\n })\n }\n\n // Create object runs for all sigma objects (idempotent).\n await stripeSync.postgresClient.createObjectRuns(accountId, runStartedAt, sigmaObjects)\n await stripeSync.postgresClient.ensureSyncRunMaxConcurrent(accountId, runStartedAt, BATCH_SIZE)\n\n // Prefer running objects; otherwise claim pending ones.\n const runningObjects = await stripeSync.postgresClient.listObjectsByStatus(\n accountId,\n runStartedAt,\n 'running',\n sigmaObjects\n )\n\n const objectsToProcess = runningObjects.slice(0, BATCH_SIZE)\n let pendingObjects: string[] = []\n\n if (objectsToProcess.length === 0) {\n pendingObjects = await stripeSync.postgresClient.listObjectsByStatus(\n accountId,\n runStartedAt,\n 'pending',\n sigmaObjects\n )\n\n for (const objectKey of pendingObjects) {\n if (objectsToProcess.length >= BATCH_SIZE) break\n const started = await stripeSync.postgresClient.tryStartObjectSync(\n accountId,\n runStartedAt,\n objectKey\n )\n if (started) {\n objectsToProcess.push(objectKey)\n }\n }\n }\n\n if (objectsToProcess.length === 0) {\n if (pendingObjects.length === 0) {\n console.info('Sigma worker: all objects complete or errored - run finished')\n return jsonResponse({ message: 'Sigma sync run complete', selfTriggered: false })\n }\n\n console.info('Sigma worker: at concurrency limit, will self-trigger', {\n pendingCount: pendingObjects.length,\n })\n let selfTriggered = false\n try {\n await sql`SELECT stripe.trigger_sigma_worker()`\n selfTriggered = true\n } catch (error) {\n console.warn('Failed to self-trigger sigma worker:', error.message)\n }\n\n return jsonResponse({\n message: 'At concurrency limit',\n pendingCount: pendingObjects.length,\n selfTriggered,\n })\n }\n\n // Process objects sequentially (one lifecycle per invocation).\n const results: Array<Record<string, unknown>> = []\n\n for (const object of objectsToProcess) {\n const objectKey = object\n try {\n console.info(`Sigma worker: processing ${object}`)\n\n // Process one sigma page and upsert results.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const result = await stripeSync.processNext(object as any, {\n runStartedAt,\n triggeredBy: 'sigma-worker',\n })\n\n results.push({\n object,\n processed: result.processed,\n hasMore: result.hasMore,\n status: 'success',\n })\n\n if (result.hasMore) {\n console.info(\n `Sigma worker: ${object} has more pages, processed ${result.processed} rows so far`\n )\n } else {\n console.info(`Sigma worker: ${object} complete, processed ${result.processed} rows`)\n }\n } catch (error) {\n console.error(`Sigma worker: error processing ${object}:`, error)\n\n // Mark object as failed and move on (no retries)\n await stripeSync.postgresClient.failObjectSync(\n accountId,\n runStartedAt,\n objectKey,\n error.message ?? 'Unknown error'\n )\n\n results.push({\n object,\n processed: 0,\n hasMore: false,\n status: 'error',\n error: error.message,\n })\n }\n }\n\n // Determine if self-trigger is needed\n const pendingAfter = await stripeSync.postgresClient.listObjectsByStatus(\n accountId,\n runStartedAt,\n 'pending',\n sigmaObjects\n )\n const runningAfter = await stripeSync.postgresClient.listObjectsByStatus(\n accountId,\n runStartedAt,\n 'running',\n sigmaObjects\n )\n\n // Calculate remaining run time for logging\n const remainingMs = MAX_RUN_AGE_MS - (Date.now() - runStartedAt.getTime())\n const remainingMinutes = Math.round(remainingMs / 1000 / 60)\n\n // Only self-trigger if there are pending or running objects AND run hasn't timed out\n const shouldSelfTrigger =\n (pendingAfter.length > 0 || runningAfter.length > 0) && remainingMs > 0\n\n let selfTriggered = false\n if (shouldSelfTrigger) {\n console.info('Sigma worker: more work remains, self-triggering', {\n pending: pendingAfter.length,\n running: runningAfter.length,\n remainingMinutes,\n })\n try {\n await sql`SELECT stripe.trigger_sigma_worker()`\n selfTriggered = true\n } catch (error) {\n console.warn('Failed to self-trigger sigma worker:', error.message)\n }\n } else if (pendingAfter.length > 0 || runningAfter.length > 0) {\n // Would self-trigger but run timed out\n console.warn('Sigma worker: work remains but run timed out, closing', {\n pending: pendingAfter.length,\n running: runningAfter.length,\n })\n await stripeSync.postgresClient.closeSyncRun(accountId, runStartedAt)\n } else {\n console.info('Sigma worker: no more work, run complete')\n }\n\n return jsonResponse({\n results,\n selfTriggered,\n remaining: { pending: pendingAfter.length, running: runningAfter.length },\n })\n } catch (error) {\n console.error('Sigma worker error:', error)\n return jsonResponse({ error: error.message, stack: error.stack }, 500)\n } finally {\n if (sql) await sql.end()\n if (stripeSync) await stripeSync.postgresClient.pool.end()\n }\n})\n";
19
+
17
20
  // src/supabase/edge-function-code.ts
18
21
  var setupFunctionCode = stripe_setup_default;
19
22
  var webhookFunctionCode = stripe_webhook_default;
20
23
  var workerFunctionCode = stripe_worker_default;
24
+ var sigmaWorkerFunctionCode = sigma_data_worker_default;
21
25
 
22
26
  // src/supabase/supabase.ts
23
27
  var STRIPE_SCHEMA_COMMENT_PREFIX = "stripe-sync";
@@ -156,6 +160,61 @@ var SupabaseSetupClient = class {
156
160
  `;
157
161
  await this.runSQL(sql);
158
162
  }
163
+ /**
164
+ * Setup pg_cron job for Sigma data worker (every 12 hours)
165
+ * Creates secret, self-trigger function, and cron job
166
+ */
167
+ async setupSigmaPgCronJob() {
168
+ const sigmaWorkerSecret = crypto.randomUUID();
169
+ const escapedSigmaWorkerSecret = sigmaWorkerSecret.replace(/'/g, "''");
170
+ const sql = `
171
+ -- Enable extensions
172
+ CREATE EXTENSION IF NOT EXISTS pg_cron;
173
+ CREATE EXTENSION IF NOT EXISTS pg_net;
174
+
175
+ -- Store unique sigma worker secret in vault
176
+ DELETE FROM vault.secrets WHERE name = 'stripe_sigma_worker_secret';
177
+ SELECT vault.create_secret('${escapedSigmaWorkerSecret}', 'stripe_sigma_worker_secret');
178
+
179
+ -- Create self-trigger function for sigma worker continuation
180
+ -- This allows the worker to trigger itself when there's more work
181
+ CREATE OR REPLACE FUNCTION stripe.trigger_sigma_worker()
182
+ RETURNS void
183
+ LANGUAGE plpgsql
184
+ SECURITY DEFINER
185
+ AS $$
186
+ BEGIN
187
+ PERFORM net.http_post(
188
+ url := 'https://${this.projectRef}.${this.projectBaseUrl}/functions/v1/sigma-data-worker',
189
+ headers := jsonb_build_object(
190
+ 'Authorization', 'Bearer ' || (SELECT decrypted_secret FROM vault.decrypted_secrets WHERE name = 'stripe_sigma_worker_secret')
191
+ )
192
+ );
193
+ END;
194
+ $$;
195
+
196
+ -- Delete existing sigma job if it exists
197
+ SELECT cron.unschedule('stripe-sigma-worker') WHERE EXISTS (
198
+ SELECT 1 FROM cron.job WHERE jobname = 'stripe-sigma-worker'
199
+ );
200
+
201
+ -- Create cron job for Sigma sync
202
+ -- Runs at 00:00 and 12:00 UTC
203
+ SELECT cron.schedule(
204
+ 'stripe-sigma-worker',
205
+ '0 */12 * * *',
206
+ $$
207
+ SELECT net.http_post(
208
+ url := 'https://${this.projectRef}.${this.projectBaseUrl}/functions/v1/sigma-data-worker',
209
+ headers := jsonb_build_object(
210
+ 'Authorization', 'Bearer ' || (SELECT decrypted_secret FROM vault.decrypted_secrets WHERE name = 'stripe_sigma_worker_secret')
211
+ )
212
+ )
213
+ $$
214
+ );
215
+ `;
216
+ await this.runSQL(sql);
217
+ }
159
218
  /**
160
219
  * Get the webhook URL for this project
161
220
  */
@@ -326,7 +385,7 @@ var SupabaseSetupClient = class {
326
385
  `from 'npm:@paymentsdb/sync-engine@${version}'`
327
386
  );
328
387
  }
329
- async install(stripeKey, packageVersion, workerIntervalSeconds) {
388
+ async install(stripeKey, packageVersion, workerIntervalSeconds, enableSigma) {
330
389
  const trimmedStripeKey = stripeKey.trim();
331
390
  if (!trimmedStripeKey.startsWith("sk_") && !trimmedStripeKey.startsWith("rk_")) {
332
391
  throw new Error('Stripe key should start with "sk_" or "rk_"');
@@ -344,16 +403,26 @@ var SupabaseSetupClient = class {
344
403
  await this.deployFunction("stripe-setup", versionedSetup, false);
345
404
  await this.deployFunction("stripe-webhook", versionedWebhook, false);
346
405
  await this.deployFunction("stripe-worker", versionedWorker, false);
406
+ if (enableSigma) {
407
+ const versionedSigmaWorker = this.injectPackageVersion(sigmaWorkerFunctionCode, version);
408
+ await this.deployFunction("sigma-data-worker", versionedSigmaWorker, false);
409
+ }
347
410
  const secrets = [{ name: "STRIPE_SECRET_KEY", value: trimmedStripeKey }];
348
411
  if (this.supabaseManagementUrl) {
349
412
  secrets.push({ name: "MANAGEMENT_API_URL", value: this.supabaseManagementUrl });
350
413
  }
414
+ if (enableSigma) {
415
+ secrets.push({ name: "ENABLE_SIGMA", value: "true" });
416
+ }
351
417
  await this.setSecrets(secrets);
352
418
  const setupResult = await this.invokeFunction("stripe-setup", this.accessToken);
353
419
  if (!setupResult.success) {
354
420
  throw new Error(`Setup failed: ${setupResult.error}`);
355
421
  }
356
422
  await this.setupPgCronJob(workerIntervalSeconds);
423
+ if (enableSigma) {
424
+ await this.setupSigmaPgCronJob();
425
+ }
357
426
  await this.updateInstallationComment(
358
427
  `${STRIPE_SCHEMA_COMMENT_PREFIX} v${package_default.version} ${INSTALLATION_INSTALLED_SUFFIX}`
359
428
  );
@@ -371,7 +440,8 @@ async function install(params) {
371
440
  supabaseProjectRef,
372
441
  stripeKey,
373
442
  packageVersion,
374
- workerIntervalSeconds
443
+ workerIntervalSeconds,
444
+ enableSigma
375
445
  } = params;
376
446
  const client = new SupabaseSetupClient({
377
447
  accessToken: supabaseAccessToken,
@@ -379,7 +449,7 @@ async function install(params) {
379
449
  projectBaseUrl: params.baseProjectUrl,
380
450
  supabaseManagementUrl: params.supabaseManagementUrl
381
451
  });
382
- await client.install(stripeKey, packageVersion, workerIntervalSeconds);
452
+ await client.install(stripeKey, packageVersion, workerIntervalSeconds, enableSigma);
383
453
  }
384
454
  async function uninstall(params) {
385
455
  const { supabaseAccessToken, supabaseProjectRef } = params;
@@ -396,6 +466,7 @@ export {
396
466
  setupFunctionCode,
397
467
  webhookFunctionCode,
398
468
  workerFunctionCode,
469
+ sigmaWorkerFunctionCode,
399
470
  STRIPE_SCHEMA_COMMENT_PREFIX,
400
471
  INSTALLATION_STARTED_SUFFIX,
401
472
  INSTALLATION_ERROR_SUFFIX,
@@ -1,12 +1,13 @@
1
1
  import {
2
+ SIGMA_INGESTION_CONFIGS,
2
3
  StripeSync,
3
4
  createStripeWebSocketClient,
4
5
  runMigrations
5
- } from "./chunk-UD6RQUDV.js";
6
+ } from "./chunk-2KB2ISYF.js";
6
7
  import {
7
8
  install,
8
9
  uninstall
9
- } from "./chunk-WQOTGHLT.js";
10
+ } from "./chunk-GXIMCH5Y.js";
10
11
 
11
12
  // src/cli/config.ts
12
13
  import dotenv from "dotenv";
@@ -130,22 +131,23 @@ var VALID_SYNC_OBJECTS = [
130
131
  "credit_note",
131
132
  "early_fraud_warning",
132
133
  "refund",
133
- "checkout_sessions",
134
- "subscription_item_change_events_v2_beta",
135
- "exchange_rates_from_usd"
134
+ "checkout_sessions"
136
135
  ];
137
136
  async function backfillCommand(options, entityName) {
138
137
  let stripeSync = null;
139
138
  try {
140
- if (!VALID_SYNC_OBJECTS.includes(entityName)) {
139
+ dotenv2.config();
140
+ const enableSigma = options.enableSigma ?? process.env.ENABLE_SIGMA === "true";
141
+ const sigmaTableNames = enableSigma ? Object.keys(SIGMA_INGESTION_CONFIGS) : [];
142
+ const validEntities = [...VALID_SYNC_OBJECTS, ...sigmaTableNames];
143
+ if (!validEntities.includes(entityName)) {
144
+ const entityList = enableSigma ? `${VALID_SYNC_OBJECTS.join(", ")}, and ${sigmaTableNames.length} sigma tables` : VALID_SYNC_OBJECTS.join(", ");
141
145
  console.error(
142
- chalk3.red(
143
- `Error: Invalid entity name "${entityName}". Valid entities are: ${VALID_SYNC_OBJECTS.join(", ")}`
144
- )
146
+ chalk3.red(`Error: Invalid entity name "${entityName}". Valid entities are: ${entityList}`)
145
147
  );
146
148
  process.exit(1);
147
149
  }
148
- dotenv2.config();
150
+ const isSigmaTable = sigmaTableNames.includes(entityName);
149
151
  let stripeApiKey = options.stripeKey || process.env.STRIPE_API_KEY || process.env.STRIPE_SECRET_KEY || "";
150
152
  let databaseUrl = options.databaseUrl || process.env.DATABASE_URL || "";
151
153
  if (!stripeApiKey || !databaseUrl) {
@@ -198,11 +200,13 @@ async function backfillCommand(options, entityName) {
198
200
  ngrokAuthToken: ""
199
201
  // Not needed for backfill
200
202
  };
201
- console.log(chalk3.blue(`Backfilling ${entityName} from Stripe in 'stripe' schema...`));
203
+ const schemaName = isSigmaTable ? "sigma" : "stripe";
204
+ console.log(chalk3.blue(`Backfilling ${entityName} from Stripe in '${schemaName}' schema...`));
202
205
  console.log(chalk3.gray(`Database: ${config.databaseUrl.replace(/:[^:@]+@/, ":****@")}`));
203
206
  try {
204
207
  await runMigrations({
205
- databaseUrl: config.databaseUrl
208
+ databaseUrl: config.databaseUrl,
209
+ enableSigma
206
210
  });
207
211
  } catch (migrationError) {
208
212
  console.error(chalk3.red("Failed to run migrations:"));
@@ -219,18 +223,49 @@ async function backfillCommand(options, entityName) {
219
223
  stripeSync = new StripeSync({
220
224
  databaseUrl: config.databaseUrl,
221
225
  stripeSecretKey: config.stripeApiKey,
222
- enableSigma: process.env.ENABLE_SIGMA === "true",
226
+ enableSigma,
223
227
  stripeApiVersion: process.env.STRIPE_API_VERSION || "2020-08-27",
224
228
  autoExpandLists: process.env.AUTO_EXPAND_LISTS === "true",
225
229
  backfillRelatedEntities: process.env.BACKFILL_RELATED_ENTITIES !== "false",
226
230
  poolConfig
227
231
  });
228
- const result = await stripeSync.processUntilDone({ object: entityName });
229
- const totalSynced = Object.values(result).reduce(
230
- (sum, syncResult) => sum + (syncResult?.synced || 0),
231
- 0
232
- );
233
- console.log(chalk3.green(`\u2713 Backfill complete: ${totalSynced} ${entityName} objects synced`));
232
+ if (entityName === "all") {
233
+ const backfill = await stripeSync.processUntilDoneParallel({
234
+ object: "all",
235
+ triggeredBy: "cli-backfill",
236
+ maxParallel: 10,
237
+ skipInaccessibleSigmaTables: true
238
+ });
239
+ const objectCount = Object.keys(backfill.totals).length;
240
+ console.log(
241
+ chalk3.green(
242
+ `\u2713 Backfill complete: ${backfill.totalSynced} rows synced across ${objectCount} objects`
243
+ )
244
+ );
245
+ if (backfill.skipped.length > 0) {
246
+ console.log(
247
+ chalk3.yellow(
248
+ `Skipped ${backfill.skipped.length} Sigma tables without access: ${backfill.skipped.join(", ")}`
249
+ )
250
+ );
251
+ }
252
+ if (backfill.errors.length > 0) {
253
+ console.log(chalk3.red(`Backfill finished with ${backfill.errors.length} errors:`));
254
+ for (const err of backfill.errors) {
255
+ console.log(chalk3.red(` - ${err.object}: ${err.message}`));
256
+ }
257
+ }
258
+ } else {
259
+ const result = await stripeSync.processUntilDone({ object: entityName });
260
+ const totalSynced = Object.values(result).reduce(
261
+ (sum, syncResult) => sum + (syncResult?.synced || 0),
262
+ 0
263
+ );
264
+ const tableType = isSigmaTable ? "(sigma)" : "";
265
+ console.log(
266
+ chalk3.green(`\u2713 Backfill complete: ${totalSynced} ${entityName} ${tableType} rows synced`)
267
+ );
268
+ }
234
269
  await stripeSync.close();
235
270
  } catch (error) {
236
271
  if (error instanceof Error) {
@@ -270,11 +305,16 @@ async function migrateCommand(options) {
270
305
  ]);
271
306
  databaseUrl = answers.databaseUrl;
272
307
  }
308
+ const enableSigma = options.enableSigma ?? process.env.ENABLE_SIGMA === "true";
273
309
  console.log(chalk3.blue("Running database migrations in 'stripe' schema..."));
274
310
  console.log(chalk3.gray(`Database: ${databaseUrl.replace(/:[^:@]+@/, ":****@")}`));
311
+ if (enableSigma) {
312
+ console.log(chalk3.blue("Sigma tables enabled"));
313
+ }
275
314
  try {
276
315
  await runMigrations({
277
- databaseUrl
316
+ databaseUrl,
317
+ enableSigma
278
318
  });
279
319
  console.log(chalk3.green("\u2713 Migrations completed successfully"));
280
320
  } catch (migrationError) {
@@ -366,7 +406,8 @@ Mode: ${modeLabel}`));
366
406
  console.log(chalk3.gray(`Database: ${maskedDbUrl}`));
367
407
  try {
368
408
  await runMigrations({
369
- databaseUrl: config.databaseUrl
409
+ databaseUrl: config.databaseUrl,
410
+ enableSigma: config.enableSigma
370
411
  });
371
412
  } catch (migrationError) {
372
413
  console.error(chalk3.red("Failed to run migrations:"));
@@ -474,13 +515,38 @@ Starting server on port ${port}...`));
474
515
  console.log(chalk3.green(`\u2713 Server started on port ${port}`));
475
516
  }
476
517
  if (process.env.SKIP_BACKFILL !== "true") {
477
- console.log(chalk3.blue("\nStarting initial sync of all Stripe data..."));
478
- const syncResult = await stripeSync.processUntilDone();
479
- const totalSynced = Object.values(syncResult).reduce(
480
- (sum, result) => sum + (result?.synced || 0),
481
- 0
518
+ if (!stripeSync) {
519
+ throw new Error("StripeSync not initialized.");
520
+ }
521
+ console.log(chalk3.blue("\nStarting historical backfill (parallel sweep)..."));
522
+ const backfill = await stripeSync.processUntilDoneParallel({
523
+ triggeredBy: "cli-historical-backfill",
524
+ maxParallel: 10,
525
+ skipInaccessibleSigmaTables: true
526
+ });
527
+ const objectCount = Object.keys(backfill.totals).length;
528
+ console.log(
529
+ chalk3.green(
530
+ `\u2713 Historical backfill complete: ${backfill.totalSynced} rows synced across ${objectCount} objects`
531
+ )
482
532
  );
483
- console.log(chalk3.green(`\u2713 Sync complete: ${totalSynced} objects synced`));
533
+ if (backfill.skipped.length > 0) {
534
+ console.log(
535
+ chalk3.yellow(
536
+ `Skipped ${backfill.skipped.length} Sigma tables without access: ${backfill.skipped.join(", ")}`
537
+ )
538
+ );
539
+ }
540
+ if (backfill.errors.length > 0) {
541
+ console.log(
542
+ chalk3.red(
543
+ `Historical backfill finished with ${backfill.errors.length} errors. See logs above.`
544
+ )
545
+ );
546
+ }
547
+ console.log(chalk3.blue("\nStarting incremental backfill..."));
548
+ await stripeSync.processUntilDone();
549
+ console.log(chalk3.green("\u2713 Incremental backfill complete"));
484
550
  } else {
485
551
  console.log(chalk3.yellow("\n\u23ED\uFE0F Skipping initial sync (SKIP_BACKFILL=true)"));
486
552
  }
@@ -554,7 +620,8 @@ async function installCommand(options) {
554
620
  stripeKey,
555
621
  packageVersion: options.packageVersion,
556
622
  workerIntervalSeconds: options.workerInterval,
557
- supabaseManagementUrl
623
+ supabaseManagementUrl,
624
+ enableSigma: options.enableSigma
558
625
  });
559
626
  console.log(chalk3.cyan("\n\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501"));
560
627
  console.log(chalk3.cyan.bold(" Installation Complete!"));