pgflow 0.0.0-test-snapshot-releases-8d5d9bc1-20250922101013 → 0.0.0-testsnap-c8dfbf89-20251207204700

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/README.md +5 -6
  2. package/dist/commands/compile/index.d.ts +11 -0
  3. package/dist/commands/compile/index.d.ts.map +1 -0
  4. package/dist/commands/compile/index.js +156 -0
  5. package/dist/commands/install/copy-migrations.d.ts +5 -0
  6. package/dist/commands/install/copy-migrations.d.ts.map +1 -0
  7. package/dist/commands/install/copy-migrations.js +212 -0
  8. package/dist/commands/install/create-edge-function.d.ts +5 -0
  9. package/dist/commands/install/create-edge-function.d.ts.map +1 -0
  10. package/dist/commands/install/create-edge-function.js +75 -0
  11. package/dist/commands/install/create-example-worker.d.ts +5 -0
  12. package/dist/commands/install/create-example-worker.d.ts.map +1 -0
  13. package/dist/commands/install/create-example-worker.js +75 -0
  14. package/dist/commands/install/create-flows-directory.d.ts +5 -0
  15. package/dist/commands/install/create-flows-directory.d.ts.map +1 -0
  16. package/dist/commands/install/create-flows-directory.js +79 -0
  17. package/dist/commands/install/index.d.ts +4 -0
  18. package/dist/commands/install/index.d.ts.map +1 -0
  19. package/dist/commands/install/index.js +104 -0
  20. package/dist/commands/install/supabase-path-prompt.d.ts +4 -0
  21. package/dist/commands/install/supabase-path-prompt.d.ts.map +1 -0
  22. package/dist/commands/install/supabase-path-prompt.js +45 -0
  23. package/dist/commands/install/update-config-toml.d.ts +19 -0
  24. package/dist/commands/install/update-config-toml.d.ts.map +1 -0
  25. package/dist/commands/install/update-config-toml.js +125 -0
  26. package/dist/commands/install/update-env-file.d.ts +12 -0
  27. package/dist/commands/install/update-env-file.d.ts.map +1 -0
  28. package/dist/commands/install/update-env-file.js +86 -0
  29. package/dist/index.d.ts +3 -0
  30. package/dist/index.d.ts.map +1 -0
  31. package/dist/index.js +74 -0
  32. package/dist/package.json +39 -0
  33. package/dist/tsconfig.lib.tsbuildinfo +1 -0
  34. package/dist/utils/get-version.d.ts +6 -0
  35. package/dist/utils/get-version.d.ts.map +1 -0
  36. package/dist/utils/get-version.js +21 -0
  37. package/package.json +4 -3
package/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # @pgflow/cli
1
+ # pgflow CLI
2
2
 
3
3
  The Command Line Interface for pgflow - a PostgreSQL-native workflow engine.
4
4
 
@@ -7,7 +7,7 @@ The Command Line Interface for pgflow - a PostgreSQL-native workflow engine.
7
7
 
8
8
  ## Overview
9
9
 
10
- `@pgflow/cli` provides essential tools for setting up, managing, and deploying pgflow workflows in your Supabase environment. The CLI handles:
10
+ This package provides essential tools for setting up, managing, and deploying pgflow workflows in your Supabase environment. The CLI handles:
11
11
 
12
12
  - Installing pgflow in your Supabase project
13
13
  - Compiling TypeScript workflow definitions into SQL migrations
@@ -15,8 +15,7 @@ The Command Line Interface for pgflow - a PostgreSQL-native workflow engine.
15
15
 
16
16
  ## Prerequisites
17
17
 
18
- - Supabase CLI v2.0.2 or higher
19
- - Deno v1.45.x or higher (for flow compilation)
18
+ - Supabase CLI v2.50.3 or higher
20
19
  - Local Supabase project initialized
21
20
 
22
21
  ## Installation
@@ -65,13 +64,13 @@ The installer will:
65
64
  Convert a TypeScript flow definition into a SQL migration:
66
65
 
67
66
  ```bash
68
- npx pgflow@latest compile supabase/functions/_flows/my_flow.ts
67
+ npx pgflow@latest compile my_flow
69
68
  ```
70
69
 
71
70
  Options:
72
71
 
73
- - `--deno-json <path>` - Path to custom deno.json with import map
74
72
  - `--supabase-path <path>` - Path to custom Supabase directory
73
+ - `--control-plane-url <url>` - ControlPlane URL (default: `http://127.0.0.1:54321/functions/v1/pgflow`)
75
74
 
76
75
  The compiler will:
77
76
 
@@ -0,0 +1,11 @@
1
+ import { type Command } from 'commander';
2
+ /**
3
+ * Fetch flow SQL from ControlPlane HTTP endpoint
4
+ */
5
+ export declare function fetchFlowSQL(flowSlug: string, controlPlaneUrl: string, secretKey: string): Promise<{
6
+ flowSlug: string;
7
+ sql: string[];
8
+ }>;
9
+ declare const _default: (program: Command) => void;
10
+ export default _default;
11
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/compile/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAU,MAAM,WAAW,CAAC;AAMjD;;GAEG;AACH,wBAAsB,YAAY,CAChC,QAAQ,EAAE,MAAM,EAChB,eAAe,EAAE,MAAM,EACvB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,EAAE,CAAA;CAAE,CAAC,CAsE9C;yBAEe,SAAS,OAAO;AAAhC,wBAkIE"}
@@ -0,0 +1,156 @@
1
+ import { Option } from 'commander';
2
+ import chalk from 'chalk';
3
+ import { intro, log, outro } from '@clack/prompts';
4
+ import path from 'path';
5
+ import fs from 'fs';
6
+ /**
7
+ * Fetch flow SQL from ControlPlane HTTP endpoint
8
+ */
9
+ export async function fetchFlowSQL(flowSlug, controlPlaneUrl, secretKey) {
10
+ const url = `${controlPlaneUrl}/flows/${flowSlug}`;
11
+ try {
12
+ const response = await fetch(url, {
13
+ headers: {
14
+ 'Authorization': `Bearer ${secretKey}`,
15
+ 'apikey': secretKey,
16
+ 'Content-Type': 'application/json',
17
+ },
18
+ });
19
+ if (response.status === 404) {
20
+ let errorData = {};
21
+ try {
22
+ errorData = await response.json();
23
+ }
24
+ catch {
25
+ // JSON parse failed - likely Supabase gateway error (HTML or plain text)
26
+ }
27
+ // Check if this is our ControlPlane's 404 (has 'Flow Not Found' error)
28
+ // vs Supabase gateway's 404 (function doesn't exist)
29
+ if (errorData.error === 'Flow Not Found') {
30
+ throw new Error(`Flow '${flowSlug}' not found.\n\n` +
31
+ `${errorData.message || 'Did you add it to supabase/functions/pgflow/index.ts?'}\n\n` +
32
+ `Fix:\n` +
33
+ `1. Add your flow to supabase/functions/pgflow/index.ts\n` +
34
+ `2. Restart edge functions: supabase functions serve`);
35
+ }
36
+ // ControlPlane edge function itself doesn't exist
37
+ throw new Error('ControlPlane edge function not found.\n\n' +
38
+ 'The pgflow edge function is not installed or not running.\n\n' +
39
+ 'Fix:\n' +
40
+ '1. Run: npx pgflow install\n' +
41
+ '2. Start edge functions: supabase functions serve\n\n' +
42
+ 'Or use previous version: npx pgflow@0.8.0 compile path/to/flow.ts');
43
+ }
44
+ if (!response.ok) {
45
+ const errorText = await response.text();
46
+ throw new Error(`HTTP ${response.status}: ${errorText}`);
47
+ }
48
+ return await response.json();
49
+ }
50
+ catch (error) {
51
+ if (error instanceof Error) {
52
+ // Check for connection refused errors
53
+ if (error.message.includes('ECONNREFUSED') ||
54
+ error.message.includes('fetch failed')) {
55
+ throw new Error('Could not connect to ControlPlane.\n\n' +
56
+ 'Fix options:\n' +
57
+ '1. Start Supabase: supabase start\n' +
58
+ '2. Start edge functions: supabase functions serve\n\n' +
59
+ 'Or use previous version: npx pgflow@0.8.0 compile path/to/flow.ts');
60
+ }
61
+ throw error;
62
+ }
63
+ throw new Error(`Unknown error: ${String(error)}`);
64
+ }
65
+ }
66
+ export default (program) => {
67
+ program
68
+ .command('compile')
69
+ .description('Compiles a flow into SQL migration via ControlPlane HTTP')
70
+ .argument('<flowSlug>', 'Flow slug to compile (e.g., my_flow)')
71
+ .option('--supabase-path <supabasePath>', 'Path to the Supabase folder')
72
+ .option('--control-plane-url <url>', 'Control plane URL', 'http://127.0.0.1:54321/functions/v1/pgflow')
73
+ .addOption(new Option('--secret-key [key]', 'Supabase anon/service_role key')
74
+ .hideHelp())
75
+ .action(async (flowSlug, options) => {
76
+ intro('pgflow - Compile Flow to SQL');
77
+ try {
78
+ // Validate Supabase path
79
+ let supabasePath;
80
+ if (options.supabasePath) {
81
+ supabasePath = path.resolve(process.cwd(), options.supabasePath);
82
+ }
83
+ else {
84
+ // Default to ./supabase/ if not provided
85
+ supabasePath = path.resolve(process.cwd(), 'supabase');
86
+ }
87
+ // Check if Supabase path exists
88
+ if (!fs.existsSync(supabasePath)) {
89
+ log.error(`Supabase directory not found: ${supabasePath}\n` +
90
+ `Please provide a valid Supabase path using --supabase-path option or ensure ./supabase/ directory exists.`);
91
+ process.exit(1);
92
+ }
93
+ // Create migrations directory if it doesn't exist
94
+ const migrationsDir = path.resolve(supabasePath, 'migrations');
95
+ if (!fs.existsSync(migrationsDir)) {
96
+ fs.mkdirSync(migrationsDir, { recursive: true });
97
+ log.success(`Created migrations directory: ${migrationsDir}`);
98
+ }
99
+ // Check for existing migrations
100
+ const existingMigrations = fs
101
+ .readdirSync(migrationsDir)
102
+ .filter((file) => file.endsWith(`_create_${flowSlug}_flow.sql`));
103
+ if (existingMigrations.length > 0) {
104
+ log.warn(`Found existing migration(s) for '${flowSlug}':\n` +
105
+ existingMigrations.map((f) => ` ${f}`).join('\n') +
106
+ '\nCreating new migration anyway...');
107
+ }
108
+ // Fetch flow SQL from ControlPlane
109
+ log.info(`Compiling flow: ${flowSlug}`);
110
+ const result = await fetchFlowSQL(flowSlug, options.controlPlaneUrl, options.secretKey);
111
+ // Validate result
112
+ if (!result.sql || result.sql.length === 0) {
113
+ throw new Error('ControlPlane returned empty SQL');
114
+ }
115
+ // Join SQL statements
116
+ const compiledSql = result.sql.join('\n') + '\n';
117
+ // Generate timestamp for migration file in format YYYYMMDDHHMMSS using UTC
118
+ const now = new Date();
119
+ const timestamp = [
120
+ now.getUTCFullYear(),
121
+ String(now.getUTCMonth() + 1).padStart(2, '0'),
122
+ String(now.getUTCDate()).padStart(2, '0'),
123
+ String(now.getUTCHours()).padStart(2, '0'),
124
+ String(now.getUTCMinutes()).padStart(2, '0'),
125
+ String(now.getUTCSeconds()).padStart(2, '0'),
126
+ ].join('');
127
+ // Create migration filename in the format: <timestamp>_create_<flow_slug>_flow.sql
128
+ const migrationFileName = `${timestamp}_create_${flowSlug}_flow.sql`;
129
+ const migrationFilePath = path.join(migrationsDir, migrationFileName);
130
+ // Write the SQL to a migration file
131
+ fs.writeFileSync(migrationFilePath, compiledSql);
132
+ // Show the migration file path relative to the current directory
133
+ const relativeFilePath = path.relative(process.cwd(), migrationFilePath);
134
+ log.success(`Migration file created: ${relativeFilePath}`);
135
+ // Display next steps with outro
136
+ outro([
137
+ chalk.green.bold('✓ Flow compilation completed successfully!'),
138
+ '',
139
+ `- Run ${chalk.cyan('supabase migration up')} to apply the migration`,
140
+ '',
141
+ chalk.bold('Continue the setup:'),
142
+ chalk.blue.underline('https://pgflow.dev/getting-started/run-flow/'),
143
+ ].join('\n'));
144
+ }
145
+ catch (error) {
146
+ log.error(`Compilation failed: ${error instanceof Error ? error.message : String(error)}`);
147
+ outro([
148
+ chalk.bold('Compilation failed!'),
149
+ '',
150
+ chalk.bold('For troubleshooting help:'),
151
+ chalk.blue.underline('https://pgflow.dev/getting-started/compile-to-sql/'),
152
+ ].join('\n'));
153
+ process.exit(1);
154
+ }
155
+ });
156
+ };
@@ -0,0 +1,5 @@
1
+ export declare function copyMigrations({ supabasePath, autoConfirm, }: {
2
+ supabasePath: string;
3
+ autoConfirm?: boolean;
4
+ }): Promise<boolean>;
5
+ //# sourceMappingURL=copy-migrations.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"copy-migrations.d.ts","sourceRoot":"","sources":["../../../src/commands/install/copy-migrations.ts"],"names":[],"mappings":"AA6JA,wBAAsB,cAAc,CAAC,EACnC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAyHnB"}
@@ -0,0 +1,212 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { createRequire } from 'module';
4
+ import { fileURLToPath } from 'url';
5
+ import { log, confirm } from '@clack/prompts';
6
+ import chalk from 'chalk';
7
+ // Get the directory name in ES modules
8
+ const __filename = fileURLToPath(import.meta.url);
9
+ const __dirname = path.dirname(__filename);
10
+ // Create a require function to use require.resolve
11
+ const require = createRequire(import.meta.url);
12
+ // Function to find migrations directory
13
+ function findMigrationsDirectory() {
14
+ try {
15
+ // First try: resolve from installed @pgflow/core package
16
+ const corePackageJsonPath = require.resolve('@pgflow/core/package.json');
17
+ const corePackageFolder = path.dirname(corePackageJsonPath);
18
+ const packageMigrationsPath = path.join(corePackageFolder, 'dist', 'supabase', 'migrations');
19
+ if (fs.existsSync(packageMigrationsPath)) {
20
+ return packageMigrationsPath;
21
+ }
22
+ // If that fails, try development path
23
+ log.info('Could not find migrations in installed package, trying development paths...');
24
+ }
25
+ catch (error) {
26
+ log.info('Could not resolve @pgflow/core package, trying development paths...');
27
+ }
28
+ // Try development paths
29
+ // 1. Try relative to CLI dist folder (when running built CLI)
30
+ const distRelativePath = path.resolve(__dirname, '../../../../core/supabase/migrations');
31
+ if (fs.existsSync(distRelativePath)) {
32
+ return distRelativePath;
33
+ }
34
+ // 2. Try relative to CLI source folder (when running from source)
35
+ const sourceRelativePath = path.resolve(__dirname, '../../../../../core/supabase/migrations');
36
+ if (fs.existsSync(sourceRelativePath)) {
37
+ return sourceRelativePath;
38
+ }
39
+ // 3. Try local migrations directory (for backward compatibility)
40
+ const localMigrationsPath = path.resolve(__dirname, '../../migrations');
41
+ if (fs.existsSync(localMigrationsPath)) {
42
+ return localMigrationsPath;
43
+ }
44
+ // No migrations found
45
+ return null;
46
+ }
47
+ // Helper function to get the timestamp part from a migration filename
48
+ function getTimestampFromFilename(filename) {
49
+ const match = filename.match(/^(\d+)_/);
50
+ // Return the timestamp only if it exists and has the correct length (14 digits)
51
+ if (match && match[1] && match[1].length === 14 && /^\d{14}$/.test(match[1])) {
52
+ return match[1];
53
+ }
54
+ return '';
55
+ }
56
+ // Helper function to format a Date object into a migration timestamp string (YYYYMMDDhhmmss) using UTC
57
+ function formatDateToTimestamp(date) {
58
+ const year = date.getUTCFullYear();
59
+ const month = String(date.getUTCMonth() + 1).padStart(2, '0');
60
+ const day = String(date.getUTCDate()).padStart(2, '0');
61
+ const hours = String(date.getUTCHours()).padStart(2, '0');
62
+ const minutes = String(date.getUTCMinutes()).padStart(2, '0');
63
+ const seconds = String(date.getUTCSeconds()).padStart(2, '0');
64
+ return `${year}${month}${day}${hours}${minutes}${seconds}`;
65
+ }
66
+ // Helper function to parse a timestamp string into a Date object (interpreted as UTC)
67
+ function parseTimestampToDate(timestamp) {
68
+ // Validate format: YYYYMMDDhhmmss
69
+ if (!timestamp || timestamp.length !== 14 || !/^\d{14}$/.test(timestamp)) {
70
+ return null;
71
+ }
72
+ const year = parseInt(timestamp.substring(0, 4), 10);
73
+ const month = parseInt(timestamp.substring(4, 6), 10) - 1; // months are 0-indexed in JS Date
74
+ const day = parseInt(timestamp.substring(6, 8), 10);
75
+ const hours = parseInt(timestamp.substring(8, 10), 10);
76
+ const minutes = parseInt(timestamp.substring(10, 12), 10);
77
+ const seconds = parseInt(timestamp.substring(12, 14), 10);
78
+ // Create date in UTC and validate (invalid dates like Feb 31 will be auto-corrected by JS Date)
79
+ const date = new Date(Date.UTC(year, month, day, hours, minutes, seconds));
80
+ // Additional validation to ensure the parsed date matches the input
81
+ // This catches edge cases like month=13 that JS Date would autocorrect
82
+ if (date.getUTCFullYear() !== year ||
83
+ date.getUTCMonth() !== month ||
84
+ date.getUTCDate() !== day ||
85
+ date.getUTCHours() !== hours ||
86
+ date.getUTCMinutes() !== minutes ||
87
+ date.getUTCSeconds() !== seconds) {
88
+ return null;
89
+ }
90
+ return date;
91
+ }
92
+ // Helper function to generate a new timestamp that's higher than the reference timestamp (using UTC)
93
+ function generateNewTimestamp(referenceTimestamp, increment = 1) {
94
+ // First try to parse the reference timestamp to a Date
95
+ const parsedDate = parseTimestampToDate(referenceTimestamp);
96
+ // If we couldn't parse it, use current UTC time
97
+ if (!parsedDate) {
98
+ return formatDateToTimestamp(new Date());
99
+ }
100
+ // Add the specified number of seconds (default: 1)
101
+ parsedDate.setUTCSeconds(parsedDate.getUTCSeconds() + increment);
102
+ // Get current UTC time for comparison
103
+ const now = new Date();
104
+ // Return either the incremented timestamp or current time, whichever is later
105
+ // This ensures we never go backwards in time
106
+ if (parsedDate > now) {
107
+ return formatDateToTimestamp(parsedDate);
108
+ }
109
+ else {
110
+ // If we're already at or past current time, add increment to now
111
+ now.setUTCSeconds(now.getUTCSeconds() + increment);
112
+ return formatDateToTimestamp(now);
113
+ }
114
+ }
115
+ // Find the migrations directory
116
+ const sourcePath = findMigrationsDirectory();
117
+ export async function copyMigrations({ supabasePath, autoConfirm = false, }) {
118
+ const migrationsPath = path.join(supabasePath, 'migrations');
119
+ if (!fs.existsSync(migrationsPath)) {
120
+ fs.mkdirSync(migrationsPath);
121
+ }
122
+ // Check if pgflow migrations directory exists
123
+ if (!sourcePath || !fs.existsSync(sourcePath)) {
124
+ log.error(`Could not find migrations directory`);
125
+ log.warn('This might happen if @pgflow/core is not properly installed or built.');
126
+ log.info('Make sure @pgflow/core is installed and contains the migrations.');
127
+ log.info('If running in development mode, try building the core package first with: nx build core');
128
+ return false;
129
+ }
130
+ // Get all existing migrations in user's directory
131
+ const existingFiles = fs.existsSync(migrationsPath)
132
+ ? fs.readdirSync(migrationsPath)
133
+ : [];
134
+ // Find the latest migration timestamp in user's directory
135
+ let latestTimestamp = '00000000000000';
136
+ for (const file of existingFiles) {
137
+ if (file.endsWith('.sql')) {
138
+ const timestamp = getTimestampFromFilename(file);
139
+ // Only consider timestamps that have been validated by getTimestampFromFilename
140
+ // to have the correct length and format
141
+ if (timestamp && timestamp.length === 14) {
142
+ const parsedDate = parseTimestampToDate(timestamp);
143
+ // If we have a valid date and this timestamp is newer, update latestTimestamp
144
+ if (parsedDate && parseInt(timestamp, 10) > parseInt(latestTimestamp, 10)) {
145
+ latestTimestamp = timestamp;
146
+ }
147
+ }
148
+ }
149
+ }
150
+ // Get all source migrations
151
+ const sourceFiles = fs
152
+ .readdirSync(sourcePath)
153
+ .filter((file) => file.endsWith('.sql'));
154
+ const filesToCopy = [];
155
+ const skippedFiles = [];
156
+ // Check which migrations need to be installed
157
+ for (const sourceFile of sourceFiles) {
158
+ // Check if this migration is already installed (by checking if the original filename
159
+ // appears in any existing migration filename)
160
+ const isAlreadyInstalled = existingFiles.some((existingFile) => existingFile.includes(sourceFile));
161
+ if (isAlreadyInstalled) {
162
+ skippedFiles.push(sourceFile);
163
+ }
164
+ else {
165
+ filesToCopy.push({
166
+ source: sourceFile,
167
+ destination: sourceFile, // Will be updated later with new timestamp
168
+ });
169
+ }
170
+ }
171
+ // If no files to copy, show message and return false (no changes made)
172
+ if (filesToCopy.length === 0) {
173
+ log.success('Migrations already up to date');
174
+ return false;
175
+ }
176
+ // Generate new timestamps for migrations to install
177
+ let baseTimestamp = latestTimestamp;
178
+ filesToCopy.forEach((file) => {
179
+ // Generate timestamp with increasing values to maintain order
180
+ // Each iteration uses the timestamp generated by the previous iteration as the base
181
+ baseTimestamp = generateNewTimestamp(baseTimestamp);
182
+ // Create new filename with format: newTimestamp_originalFilename
183
+ file.destination = `${baseTimestamp}_${file.source}`;
184
+ });
185
+ // Show preview and ask for confirmation only when not auto-confirming
186
+ if (!autoConfirm) {
187
+ const migrationLines = filesToCopy.map((file) => {
188
+ return ` ${chalk.bold(file.source)}`;
189
+ });
190
+ const summaryMsg = [
191
+ `Add to ${chalk.cyan('migrations/')} ${chalk.dim('(database schema for workflow engine)')}:`,
192
+ '',
193
+ ...migrationLines,
194
+ ].join('\n');
195
+ log.info(summaryMsg);
196
+ const confirmResult = await confirm({
197
+ message: `Add ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''}?`,
198
+ });
199
+ if (confirmResult !== true) {
200
+ log.warn('Migration installation skipped');
201
+ return false;
202
+ }
203
+ }
204
+ // Install migrations with new filenames
205
+ for (const file of filesToCopy) {
206
+ const sourcePath1 = path.join(sourcePath, file.source);
207
+ const destinationPath = path.join(migrationsPath, file.destination);
208
+ fs.copyFileSync(sourcePath1, destinationPath);
209
+ }
210
+ log.success(`Installed ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''}`);
211
+ return true; // Return true to indicate migrations were copied
212
+ }
@@ -0,0 +1,5 @@
1
+ export declare function createEdgeFunction({ supabasePath, autoConfirm, }: {
2
+ supabasePath: string;
3
+ autoConfirm?: boolean;
4
+ }): Promise<boolean>;
5
+ //# sourceMappingURL=create-edge-function.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create-edge-function.d.ts","sourceRoot":"","sources":["../../../src/commands/install/create-edge-function.ts"],"names":[],"mappings":"AA0BA,wBAAsB,kBAAkB,CAAC,EACvC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAkEnB"}
@@ -0,0 +1,75 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { log, confirm } from '@clack/prompts';
4
+ import chalk from 'chalk';
5
+ import { getVersion } from '../../utils/get-version.js';
6
+ const INDEX_TS_TEMPLATE = `import { ControlPlane } from '@pgflow/edge-worker';
7
+ import * as flows from '../../flows/index.ts';
8
+
9
+ ControlPlane.serve(flows);
10
+ `;
11
+ const DENO_JSON_TEMPLATE = (version) => `{
12
+ "imports": {
13
+ "@pgflow/core": "npm:@pgflow/core@${version}",
14
+ "@pgflow/core/": "npm:@pgflow/core@${version}/",
15
+ "@pgflow/dsl": "npm:@pgflow/dsl@${version}",
16
+ "@pgflow/dsl/": "npm:@pgflow/dsl@${version}/",
17
+ "@pgflow/dsl/supabase": "npm:@pgflow/dsl@${version}/supabase",
18
+ "@pgflow/edge-worker": "jsr:@pgflow/edge-worker@${version}",
19
+ "@pgflow/edge-worker/": "jsr:@pgflow/edge-worker@${version}/",
20
+ "@pgflow/edge-worker/_internal": "jsr:@pgflow/edge-worker@${version}/_internal"
21
+ }
22
+ }
23
+ `;
24
+ export async function createEdgeFunction({ supabasePath, autoConfirm = false, }) {
25
+ const functionsDir = path.join(supabasePath, 'functions');
26
+ const pgflowFunctionDir = path.join(functionsDir, 'pgflow');
27
+ const indexPath = path.join(pgflowFunctionDir, 'index.ts');
28
+ const denoJsonPath = path.join(pgflowFunctionDir, 'deno.json');
29
+ // Relative paths for display
30
+ const relativeFunctionDir = 'supabase/functions/pgflow';
31
+ const relativeIndexPath = `${relativeFunctionDir}/index.ts`;
32
+ const relativeDenoJsonPath = `${relativeFunctionDir}/deno.json`;
33
+ // Check what needs to be created
34
+ const filesToCreate = [];
35
+ if (!fs.existsSync(indexPath)) {
36
+ filesToCreate.push({ path: indexPath, relativePath: relativeIndexPath });
37
+ }
38
+ if (!fs.existsSync(denoJsonPath)) {
39
+ filesToCreate.push({ path: denoJsonPath, relativePath: relativeDenoJsonPath });
40
+ }
41
+ // If all files exist, return success
42
+ if (filesToCreate.length === 0) {
43
+ log.success('Control Plane already up to date');
44
+ return false;
45
+ }
46
+ // Show preview and ask for confirmation only when not auto-confirming
47
+ if (!autoConfirm) {
48
+ const summaryMsg = [
49
+ `Create ${chalk.cyan('functions/pgflow/')} ${chalk.dim('(Control Plane for flow registration and compilation)')}:`,
50
+ '',
51
+ ...filesToCreate.map((file) => ` ${chalk.bold(path.basename(file.relativePath))}`),
52
+ ].join('\n');
53
+ log.info(summaryMsg);
54
+ const confirmResult = await confirm({
55
+ message: `Create functions/pgflow/?`,
56
+ });
57
+ if (confirmResult !== true) {
58
+ log.warn('Control Plane installation skipped');
59
+ return false;
60
+ }
61
+ }
62
+ // Create the directory if it doesn't exist
63
+ if (!fs.existsSync(pgflowFunctionDir)) {
64
+ fs.mkdirSync(pgflowFunctionDir, { recursive: true });
65
+ }
66
+ // Create files
67
+ if (filesToCreate.some((f) => f.path === indexPath)) {
68
+ fs.writeFileSync(indexPath, INDEX_TS_TEMPLATE);
69
+ }
70
+ if (filesToCreate.some((f) => f.path === denoJsonPath)) {
71
+ fs.writeFileSync(denoJsonPath, DENO_JSON_TEMPLATE(getVersion()));
72
+ }
73
+ log.success('Control Plane installed');
74
+ return true;
75
+ }
@@ -0,0 +1,5 @@
1
+ export declare function createExampleWorker({ supabasePath, autoConfirm, }: {
2
+ supabasePath: string;
3
+ autoConfirm?: boolean;
4
+ }): Promise<boolean>;
5
+ //# sourceMappingURL=create-example-worker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create-example-worker.d.ts","sourceRoot":"","sources":["../../../src/commands/install/create-example-worker.ts"],"names":[],"mappings":"AA0BA,wBAAsB,mBAAmB,CAAC,EACxC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAkEnB"}
@@ -0,0 +1,75 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { log, confirm } from '@clack/prompts';
4
+ import chalk from 'chalk';
5
+ import { getVersion } from '../../utils/get-version.js';
6
+ const INDEX_TS_TEMPLATE = `import { EdgeWorker } from '@pgflow/edge-worker';
7
+ import { GreetUser } from '../../flows/greet-user.ts';
8
+
9
+ EdgeWorker.start(GreetUser);
10
+ `;
11
+ const DENO_JSON_TEMPLATE = (version) => `{
12
+ "imports": {
13
+ "@pgflow/core": "npm:@pgflow/core@${version}",
14
+ "@pgflow/core/": "npm:@pgflow/core@${version}/",
15
+ "@pgflow/dsl": "npm:@pgflow/dsl@${version}",
16
+ "@pgflow/dsl/": "npm:@pgflow/dsl@${version}/",
17
+ "@pgflow/dsl/supabase": "npm:@pgflow/dsl@${version}/supabase",
18
+ "@pgflow/edge-worker": "jsr:@pgflow/edge-worker@${version}",
19
+ "@pgflow/edge-worker/": "jsr:@pgflow/edge-worker@${version}/",
20
+ "@pgflow/edge-worker/_internal": "jsr:@pgflow/edge-worker@${version}/_internal"
21
+ }
22
+ }
23
+ `;
24
+ export async function createExampleWorker({ supabasePath, autoConfirm = false, }) {
25
+ const functionsDir = path.join(supabasePath, 'functions');
26
+ const workerDir = path.join(functionsDir, 'greet-user-worker');
27
+ const indexPath = path.join(workerDir, 'index.ts');
28
+ const denoJsonPath = path.join(workerDir, 'deno.json');
29
+ // Relative paths for display
30
+ const relativeWorkerDir = 'supabase/functions/greet-user-worker';
31
+ const relativeIndexPath = `${relativeWorkerDir}/index.ts`;
32
+ const relativeDenoJsonPath = `${relativeWorkerDir}/deno.json`;
33
+ // Check what needs to be created
34
+ const filesToCreate = [];
35
+ if (!fs.existsSync(indexPath)) {
36
+ filesToCreate.push({ path: indexPath, relativePath: relativeIndexPath });
37
+ }
38
+ if (!fs.existsSync(denoJsonPath)) {
39
+ filesToCreate.push({ path: denoJsonPath, relativePath: relativeDenoJsonPath });
40
+ }
41
+ // If all files exist, return success
42
+ if (filesToCreate.length === 0) {
43
+ log.success('Example worker already up to date');
44
+ return false;
45
+ }
46
+ // Show preview and ask for confirmation only when not auto-confirming
47
+ if (!autoConfirm) {
48
+ const summaryMsg = [
49
+ `Create ${chalk.cyan('functions/greet-user-worker/')} ${chalk.dim('(example worker for GreetUser flow)')}:`,
50
+ '',
51
+ ...filesToCreate.map((file) => ` ${chalk.bold(path.basename(file.relativePath))}`),
52
+ ].join('\n');
53
+ log.info(summaryMsg);
54
+ const confirmResult = await confirm({
55
+ message: `Create functions/greet-user-worker/?`,
56
+ });
57
+ if (confirmResult !== true) {
58
+ log.warn('Example worker installation skipped');
59
+ return false;
60
+ }
61
+ }
62
+ // Create the directory if it doesn't exist
63
+ if (!fs.existsSync(workerDir)) {
64
+ fs.mkdirSync(workerDir, { recursive: true });
65
+ }
66
+ // Create files
67
+ if (filesToCreate.some((f) => f.path === indexPath)) {
68
+ fs.writeFileSync(indexPath, INDEX_TS_TEMPLATE);
69
+ }
70
+ if (filesToCreate.some((f) => f.path === denoJsonPath)) {
71
+ fs.writeFileSync(denoJsonPath, DENO_JSON_TEMPLATE(getVersion()));
72
+ }
73
+ log.success('Example worker created');
74
+ return true;
75
+ }
@@ -0,0 +1,5 @@
1
+ export declare function createFlowsDirectory({ supabasePath, autoConfirm, }: {
2
+ supabasePath: string;
3
+ autoConfirm?: boolean;
4
+ }): Promise<boolean>;
5
+ //# sourceMappingURL=create-flows-directory.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create-flows-directory.d.ts","sourceRoot":"","sources":["../../../src/commands/install/create-flows-directory.ts"],"names":[],"mappings":"AA+BA,wBAAsB,oBAAoB,CAAC,EACzC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAiEnB"}