pgflow 0.8.0 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -4
- package/dist/commands/compile/index.d.ts +7 -0
- package/dist/commands/compile/index.d.ts.map +1 -1
- package/dist/commands/compile/index.js +85 -130
- package/dist/commands/install/copy-migrations.d.ts.map +1 -1
- package/dist/commands/install/copy-migrations.js +19 -60
- package/dist/commands/install/create-edge-function.d.ts +5 -0
- package/dist/commands/install/create-edge-function.d.ts.map +1 -0
- package/dist/commands/install/create-edge-function.js +75 -0
- package/dist/commands/install/create-example-worker.d.ts +5 -0
- package/dist/commands/install/create-example-worker.d.ts.map +1 -0
- package/dist/commands/install/create-example-worker.js +75 -0
- package/dist/commands/install/create-flows-directory.d.ts +5 -0
- package/dist/commands/install/create-flows-directory.d.ts.map +1 -0
- package/dist/commands/install/create-flows-directory.js +79 -0
- package/dist/commands/install/index.d.ts.map +1 -1
- package/dist/commands/install/index.js +75 -61
- package/dist/commands/install/supabase-path-prompt.d.ts.map +1 -1
- package/dist/commands/install/supabase-path-prompt.js +0 -4
- package/dist/commands/install/update-config-toml.d.ts.map +1 -1
- package/dist/commands/install/update-config-toml.js +33 -27
- package/dist/commands/install/update-env-file.d.ts.map +1 -1
- package/dist/commands/install/update-env-file.js +18 -40
- package/dist/index.js +10 -26
- package/dist/package.json +1 -1
- package/dist/utils/get-version.d.ts +6 -0
- package/dist/utils/get-version.d.ts.map +1 -0
- package/dist/utils/get-version.js +21 -0
- package/package.json +2 -2
- package/dist/deno/internal_compile.js +0 -55
package/README.md
CHANGED
|
@@ -15,8 +15,7 @@ This package provides essential tools for setting up, managing, and deploying pg
|
|
|
15
15
|
|
|
16
16
|
## Prerequisites
|
|
17
17
|
|
|
18
|
-
- Supabase CLI v2.
|
|
19
|
-
- Deno v2.1.x or higher (for flow compilation)
|
|
18
|
+
- Supabase CLI v2.50.3 or higher
|
|
20
19
|
- Local Supabase project initialized
|
|
21
20
|
|
|
22
21
|
## Installation
|
|
@@ -65,13 +64,13 @@ The installer will:
|
|
|
65
64
|
Convert a TypeScript flow definition into a SQL migration:
|
|
66
65
|
|
|
67
66
|
```bash
|
|
68
|
-
npx pgflow@latest compile
|
|
67
|
+
npx pgflow@latest compile my_flow
|
|
69
68
|
```
|
|
70
69
|
|
|
71
70
|
Options:
|
|
72
71
|
|
|
73
|
-
- `--deno-json <path>` - Path to custom deno.json configuration file
|
|
74
72
|
- `--supabase-path <path>` - Path to custom Supabase directory
|
|
73
|
+
- `--control-plane-url <url>` - ControlPlane URL (default: `http://127.0.0.1:54321/functions/v1/pgflow`)
|
|
75
74
|
|
|
76
75
|
The compiler will:
|
|
77
76
|
|
|
@@ -1,4 +1,11 @@
|
|
|
1
1
|
import { type Command } from 'commander';
|
|
2
|
+
/**
|
|
3
|
+
* Fetch flow SQL from ControlPlane HTTP endpoint
|
|
4
|
+
*/
|
|
5
|
+
export declare function fetchFlowSQL(flowSlug: string, controlPlaneUrl: string, secretKey: string): Promise<{
|
|
6
|
+
flowSlug: string;
|
|
7
|
+
sql: string[];
|
|
8
|
+
}>;
|
|
2
9
|
declare const _default: (program: Command) => void;
|
|
3
10
|
export default _default;
|
|
4
11
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/compile/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/compile/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAU,MAAM,WAAW,CAAC;AAMjD;;GAEG;AACH,wBAAsB,YAAY,CAChC,QAAQ,EAAE,MAAM,EAChB,eAAe,EAAE,MAAM,EACvB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,EAAE,CAAA;CAAE,CAAC,CAsE9C;yBAEe,SAAS,OAAO;AAAhC,wBAkIE"}
|
|
@@ -1,73 +1,80 @@
|
|
|
1
|
+
import { Option } from 'commander';
|
|
1
2
|
import chalk from 'chalk';
|
|
2
|
-
import { intro, log,
|
|
3
|
+
import { intro, log, outro } from '@clack/prompts';
|
|
3
4
|
import path from 'path';
|
|
4
5
|
import fs from 'fs';
|
|
5
|
-
import { spawn } from 'child_process';
|
|
6
|
-
import { fileURLToPath } from 'url';
|
|
7
|
-
// Get the directory name in ES modules
|
|
8
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
-
const __dirname = path.dirname(__filename);
|
|
10
6
|
/**
|
|
11
|
-
*
|
|
12
|
-
* Each argument is displayed on a separate line for better readability
|
|
7
|
+
* Fetch flow SQL from ControlPlane HTTP endpoint
|
|
13
8
|
*/
|
|
14
|
-
function
|
|
15
|
-
const
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
9
|
+
export async function fetchFlowSQL(flowSlug, controlPlaneUrl, secretKey) {
|
|
10
|
+
const url = `${controlPlaneUrl}/flows/${flowSlug}`;
|
|
11
|
+
try {
|
|
12
|
+
const response = await fetch(url, {
|
|
13
|
+
headers: {
|
|
14
|
+
'Authorization': `Bearer ${secretKey}`,
|
|
15
|
+
'apikey': secretKey,
|
|
16
|
+
'Content-Type': 'application/json',
|
|
17
|
+
},
|
|
18
|
+
});
|
|
19
|
+
if (response.status === 404) {
|
|
20
|
+
let errorData = {};
|
|
21
|
+
try {
|
|
22
|
+
errorData = await response.json();
|
|
23
|
+
}
|
|
24
|
+
catch {
|
|
25
|
+
// JSON parse failed - likely Supabase gateway error (HTML or plain text)
|
|
26
|
+
}
|
|
27
|
+
// Check if this is our ControlPlane's 404 (has 'Flow Not Found' error)
|
|
28
|
+
// vs Supabase gateway's 404 (function doesn't exist)
|
|
29
|
+
if (errorData.error === 'Flow Not Found') {
|
|
30
|
+
throw new Error(`Flow '${flowSlug}' not found.\n\n` +
|
|
31
|
+
`${errorData.message || 'Did you add it to supabase/functions/pgflow/index.ts?'}\n\n` +
|
|
32
|
+
`Fix:\n` +
|
|
33
|
+
`1. Add your flow to supabase/functions/pgflow/index.ts\n` +
|
|
34
|
+
`2. Restart edge functions: supabase functions serve`);
|
|
35
|
+
}
|
|
36
|
+
// ControlPlane edge function itself doesn't exist
|
|
37
|
+
throw new Error('ControlPlane edge function not found.\n\n' +
|
|
38
|
+
'The pgflow edge function is not installed or not running.\n\n' +
|
|
39
|
+
'Fix:\n' +
|
|
40
|
+
'1. Run: npx pgflow install\n' +
|
|
41
|
+
'2. Start edge functions: supabase functions serve\n\n' +
|
|
42
|
+
'Or use previous version: npx pgflow@0.8.0 compile path/to/flow.ts');
|
|
21
43
|
}
|
|
22
|
-
|
|
23
|
-
|
|
44
|
+
if (!response.ok) {
|
|
45
|
+
const errorText = await response.text();
|
|
46
|
+
throw new Error(`HTTP ${response.status}: ${errorText}`);
|
|
24
47
|
}
|
|
25
|
-
|
|
26
|
-
|
|
48
|
+
return await response.json();
|
|
49
|
+
}
|
|
50
|
+
catch (error) {
|
|
51
|
+
if (error instanceof Error) {
|
|
52
|
+
// Check for connection refused errors
|
|
53
|
+
if (error.message.includes('ECONNREFUSED') ||
|
|
54
|
+
error.message.includes('fetch failed')) {
|
|
55
|
+
throw new Error('Could not connect to ControlPlane.\n\n' +
|
|
56
|
+
'Fix options:\n' +
|
|
57
|
+
'1. Start Supabase: supabase start\n' +
|
|
58
|
+
'2. Start edge functions: supabase functions serve\n\n' +
|
|
59
|
+
'Or use previous version: npx pgflow@0.8.0 compile path/to/flow.ts');
|
|
60
|
+
}
|
|
61
|
+
throw error;
|
|
27
62
|
}
|
|
28
|
-
|
|
29
|
-
}
|
|
30
|
-
return `$ ${cmd}\n${formattedArgs.join('\n')}`;
|
|
31
|
-
}
|
|
32
|
-
/**
|
|
33
|
-
* Creates a task log entry with a command and its output
|
|
34
|
-
*/
|
|
35
|
-
function createTaskLog(command, args, output) {
|
|
36
|
-
return [
|
|
37
|
-
chalk.bold('Command:'),
|
|
38
|
-
formatCommand(command, args),
|
|
39
|
-
'',
|
|
40
|
-
chalk.bold('Output:'),
|
|
41
|
-
output.trim() ? output.trim() : '(no output)',
|
|
42
|
-
].join('\n');
|
|
63
|
+
throw new Error(`Unknown error: ${String(error)}`);
|
|
64
|
+
}
|
|
43
65
|
}
|
|
44
66
|
export default (program) => {
|
|
45
67
|
program
|
|
46
68
|
.command('compile')
|
|
47
|
-
.description('Compiles a
|
|
48
|
-
.argument('<
|
|
49
|
-
.option('--deno-json <denoJsonPath>', 'Path to deno.json configuration file')
|
|
69
|
+
.description('Compiles a flow into SQL migration via ControlPlane HTTP')
|
|
70
|
+
.argument('<flowSlug>', 'Flow slug to compile (e.g., my_flow)')
|
|
50
71
|
.option('--supabase-path <supabasePath>', 'Path to the Supabase folder')
|
|
51
|
-
.
|
|
72
|
+
.option('--control-plane-url <url>', 'Control plane URL', 'http://127.0.0.1:54321/functions/v1/pgflow')
|
|
73
|
+
.addOption(new Option('--secret-key [key]', 'Supabase anon/service_role key')
|
|
74
|
+
.hideHelp())
|
|
75
|
+
.action(async (flowSlug, options) => {
|
|
52
76
|
intro('pgflow - Compile Flow to SQL');
|
|
53
77
|
try {
|
|
54
|
-
// Resolve paths
|
|
55
|
-
const resolvedFlowPath = path.resolve(process.cwd(), flowPath);
|
|
56
|
-
// Only resolve denoJsonPath if it's provided
|
|
57
|
-
let resolvedDenoJsonPath;
|
|
58
|
-
if (options.denoJson) {
|
|
59
|
-
resolvedDenoJsonPath = path.resolve(process.cwd(), options.denoJson);
|
|
60
|
-
// Validate deno.json path if provided
|
|
61
|
-
if (!fs.existsSync(resolvedDenoJsonPath)) {
|
|
62
|
-
log.error(`deno.json file not found: ${resolvedDenoJsonPath}`);
|
|
63
|
-
process.exit(1);
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
// Validate flow path
|
|
67
|
-
if (!fs.existsSync(resolvedFlowPath)) {
|
|
68
|
-
log.error(`Flow file not found: ${resolvedFlowPath}`);
|
|
69
|
-
process.exit(1);
|
|
70
|
-
}
|
|
71
78
|
// Validate Supabase path
|
|
72
79
|
let supabasePath;
|
|
73
80
|
if (options.supabasePath) {
|
|
@@ -83,14 +90,30 @@ export default (program) => {
|
|
|
83
90
|
`Please provide a valid Supabase path using --supabase-path option or ensure ./supabase/ directory exists.`);
|
|
84
91
|
process.exit(1);
|
|
85
92
|
}
|
|
86
|
-
// Find the internal_compile.js script
|
|
87
|
-
const internalCompileScript = path.resolve(__dirname, '../../deno/internal_compile.js');
|
|
88
93
|
// Create migrations directory if it doesn't exist
|
|
89
94
|
const migrationsDir = path.resolve(supabasePath, 'migrations');
|
|
90
95
|
if (!fs.existsSync(migrationsDir)) {
|
|
91
96
|
fs.mkdirSync(migrationsDir, { recursive: true });
|
|
92
97
|
log.success(`Created migrations directory: ${migrationsDir}`);
|
|
93
98
|
}
|
|
99
|
+
// Check for existing migrations
|
|
100
|
+
const existingMigrations = fs
|
|
101
|
+
.readdirSync(migrationsDir)
|
|
102
|
+
.filter((file) => file.endsWith(`_create_${flowSlug}_flow.sql`));
|
|
103
|
+
if (existingMigrations.length > 0) {
|
|
104
|
+
log.warn(`Found existing migration(s) for '${flowSlug}':\n` +
|
|
105
|
+
existingMigrations.map((f) => ` ${f}`).join('\n') +
|
|
106
|
+
'\nCreating new migration anyway...');
|
|
107
|
+
}
|
|
108
|
+
// Fetch flow SQL from ControlPlane
|
|
109
|
+
log.info(`Compiling flow: ${flowSlug}`);
|
|
110
|
+
const result = await fetchFlowSQL(flowSlug, options.controlPlaneUrl, options.secretKey);
|
|
111
|
+
// Validate result
|
|
112
|
+
if (!result.sql || result.sql.length === 0) {
|
|
113
|
+
throw new Error('ControlPlane returned empty SQL');
|
|
114
|
+
}
|
|
115
|
+
// Join SQL statements
|
|
116
|
+
const compiledSql = result.sql.join('\n') + '\n';
|
|
94
117
|
// Generate timestamp for migration file in format YYYYMMDDHHMMSS using UTC
|
|
95
118
|
const now = new Date();
|
|
96
119
|
const timestamp = [
|
|
@@ -101,25 +124,8 @@ export default (program) => {
|
|
|
101
124
|
String(now.getUTCMinutes()).padStart(2, '0'),
|
|
102
125
|
String(now.getUTCSeconds()).padStart(2, '0'),
|
|
103
126
|
].join('');
|
|
104
|
-
//
|
|
105
|
-
|
|
106
|
-
const compiledSql = await runDenoCompilation(internalCompileScript, resolvedFlowPath, resolvedDenoJsonPath);
|
|
107
|
-
// Extract flow name from the first line of the SQL output using regex
|
|
108
|
-
// Looking for pattern: SELECT pgflow.create_flow('flow_name', ...);
|
|
109
|
-
const flowNameMatch = compiledSql.match(/SELECT\s+pgflow\.create_flow\s*\(\s*'([^']+)'/i);
|
|
110
|
-
// Use extracted flow name or fallback to the file basename if extraction fails
|
|
111
|
-
let flowName;
|
|
112
|
-
if (flowNameMatch && flowNameMatch[1]) {
|
|
113
|
-
flowName = flowNameMatch[1];
|
|
114
|
-
log.info(`Extracted flow name: ${flowName}`);
|
|
115
|
-
}
|
|
116
|
-
else {
|
|
117
|
-
// Fallback to file basename if regex doesn't match
|
|
118
|
-
flowName = path.basename(resolvedFlowPath, path.extname(resolvedFlowPath));
|
|
119
|
-
log.warn(`Could not extract flow name from SQL, using file basename: ${flowName}`);
|
|
120
|
-
}
|
|
121
|
-
// Create migration filename in the format: <timestamp>_create_<flow_name>_flow.sql
|
|
122
|
-
const migrationFileName = `${timestamp}_create_${flowName}_flow.sql`;
|
|
127
|
+
// Create migration filename in the format: <timestamp>_create_<flow_slug>_flow.sql
|
|
128
|
+
const migrationFileName = `${timestamp}_create_${flowSlug}_flow.sql`;
|
|
123
129
|
const migrationFilePath = path.join(migrationsDir, migrationFileName);
|
|
124
130
|
// Write the SQL to a migration file
|
|
125
131
|
fs.writeFileSync(migrationFilePath, compiledSql);
|
|
@@ -128,12 +134,12 @@ export default (program) => {
|
|
|
128
134
|
log.success(`Migration file created: ${relativeFilePath}`);
|
|
129
135
|
// Display next steps with outro
|
|
130
136
|
outro([
|
|
131
|
-
chalk.bold('Flow compilation completed successfully!'),
|
|
137
|
+
chalk.green.bold('✓ Flow compilation completed successfully!'),
|
|
132
138
|
'',
|
|
133
139
|
`- Run ${chalk.cyan('supabase migration up')} to apply the migration`,
|
|
134
140
|
'',
|
|
135
141
|
chalk.bold('Continue the setup:'),
|
|
136
|
-
chalk.blue.underline('https://pgflow.dev/getting-started/run-flow/')
|
|
142
|
+
chalk.blue.underline('https://pgflow.dev/getting-started/run-flow/'),
|
|
137
143
|
].join('\n'));
|
|
138
144
|
}
|
|
139
145
|
catch (error) {
|
|
@@ -142,60 +148,9 @@ export default (program) => {
|
|
|
142
148
|
chalk.bold('Compilation failed!'),
|
|
143
149
|
'',
|
|
144
150
|
chalk.bold('For troubleshooting help:'),
|
|
145
|
-
chalk.blue.underline('https://pgflow.dev/getting-started/compile-to-sql/')
|
|
151
|
+
chalk.blue.underline('https://pgflow.dev/getting-started/compile-to-sql/'),
|
|
146
152
|
].join('\n'));
|
|
147
153
|
process.exit(1);
|
|
148
154
|
}
|
|
149
155
|
});
|
|
150
156
|
};
|
|
151
|
-
/**
|
|
152
|
-
* Runs the Deno compilation script and returns the compiled SQL
|
|
153
|
-
*/
|
|
154
|
-
async function runDenoCompilation(scriptPath, flowPath, denoJsonPath) {
|
|
155
|
-
return new Promise((resolve, reject) => {
|
|
156
|
-
// Validate input paths
|
|
157
|
-
if (!scriptPath) {
|
|
158
|
-
return reject(new Error('Internal script path is required'));
|
|
159
|
-
}
|
|
160
|
-
if (!flowPath) {
|
|
161
|
-
return reject(new Error('Flow path is required'));
|
|
162
|
-
}
|
|
163
|
-
// Build the command arguments array
|
|
164
|
-
const args = ['run', '--allow-read', '--allow-net', '--allow-env'];
|
|
165
|
-
// Only add the config argument if denoJsonPath is provided and valid
|
|
166
|
-
if (denoJsonPath && typeof denoJsonPath === 'string') {
|
|
167
|
-
args.push(`--config=${denoJsonPath}`);
|
|
168
|
-
}
|
|
169
|
-
// Add the script path and flow path
|
|
170
|
-
args.push(scriptPath, flowPath);
|
|
171
|
-
// Log the command for debugging with colored output
|
|
172
|
-
log.info('Running Deno compiler');
|
|
173
|
-
const deno = spawn('deno', args);
|
|
174
|
-
let stdout = '';
|
|
175
|
-
let stderr = '';
|
|
176
|
-
deno.stdout.on('data', (data) => {
|
|
177
|
-
stdout += data.toString();
|
|
178
|
-
});
|
|
179
|
-
deno.stderr.on('data', (data) => {
|
|
180
|
-
stderr += data.toString();
|
|
181
|
-
});
|
|
182
|
-
deno.on('close', (code) => {
|
|
183
|
-
// Always display the task log with command and output
|
|
184
|
-
note(createTaskLog('deno', args, stdout));
|
|
185
|
-
if (code === 0) {
|
|
186
|
-
if (stdout.trim().length === 0) {
|
|
187
|
-
reject(new Error('Compilation produced no output'));
|
|
188
|
-
}
|
|
189
|
-
else {
|
|
190
|
-
resolve(stdout);
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
else {
|
|
194
|
-
reject(new Error(`Deno process exited with code ${code}${stderr ? `\n${stderr}` : ''}`));
|
|
195
|
-
}
|
|
196
|
-
});
|
|
197
|
-
deno.on('error', (err) => {
|
|
198
|
-
reject(new Error(`Failed to start Deno process: ${err.message}. Make sure Deno is installed.`));
|
|
199
|
-
});
|
|
200
|
-
});
|
|
201
|
-
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"copy-migrations.d.ts","sourceRoot":"","sources":["../../../src/commands/install/copy-migrations.ts"],"names":[],"mappings":"AA6JA,wBAAsB,cAAc,CAAC,EACnC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,
|
|
1
|
+
{"version":3,"file":"copy-migrations.d.ts","sourceRoot":"","sources":["../../../src/commands/install/copy-migrations.ts"],"names":[],"mappings":"AA6JA,wBAAsB,cAAc,CAAC,EACnC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAyHnB"}
|
|
@@ -2,7 +2,7 @@ import fs from 'fs';
|
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import { createRequire } from 'module';
|
|
4
4
|
import { fileURLToPath } from 'url';
|
|
5
|
-
import { log, confirm
|
|
5
|
+
import { log, confirm } from '@clack/prompts';
|
|
6
6
|
import chalk from 'chalk';
|
|
7
7
|
// Get the directory name in ES modules
|
|
8
8
|
const __filename = fileURLToPath(import.meta.url);
|
|
@@ -168,30 +168,9 @@ export async function copyMigrations({ supabasePath, autoConfirm = false, }) {
|
|
|
168
168
|
});
|
|
169
169
|
}
|
|
170
170
|
}
|
|
171
|
-
// If no files to copy, show message
|
|
171
|
+
// If no files to copy, show message and return false (no changes made)
|
|
172
172
|
if (filesToCopy.length === 0) {
|
|
173
|
-
|
|
174
|
-
log.success('All pgflow migrations are already in place');
|
|
175
|
-
// Show details of already installed migrations
|
|
176
|
-
if (skippedFiles.length > 0) {
|
|
177
|
-
const detailedMsg = [
|
|
178
|
-
'Already installed migrations:',
|
|
179
|
-
...skippedFiles.map((file) => {
|
|
180
|
-
// Find the matching existing file to show how it was installed
|
|
181
|
-
const matchingFile = existingFiles.find((existingFile) => existingFile.includes(file));
|
|
182
|
-
if (matchingFile === file) {
|
|
183
|
-
// Installed with old direct method
|
|
184
|
-
return ` ${chalk.dim('•')} ${chalk.bold(file)}`;
|
|
185
|
-
}
|
|
186
|
-
else {
|
|
187
|
-
// Installed with new timestamped method
|
|
188
|
-
const timestampPart = matchingFile?.substring(0, matchingFile.indexOf(file) - 1) || '';
|
|
189
|
-
return ` ${chalk.dim('•')} ${chalk.dim(timestampPart + '_')}${chalk.bold(file)}`;
|
|
190
|
-
}
|
|
191
|
-
}),
|
|
192
|
-
].join('\n');
|
|
193
|
-
note(detailedMsg, 'Existing pgflow Migrations');
|
|
194
|
-
}
|
|
173
|
+
log.success('Migrations already up to date');
|
|
195
174
|
return false;
|
|
196
175
|
}
|
|
197
176
|
// Generate new timestamps for migrations to install
|
|
@@ -203,36 +182,24 @@ export async function copyMigrations({ supabasePath, autoConfirm = false, }) {
|
|
|
203
182
|
// Create new filename with format: newTimestamp_originalFilename
|
|
204
183
|
file.destination = `${baseTimestamp}_${file.source}`;
|
|
205
184
|
});
|
|
206
|
-
|
|
207
|
-
// Prepare summary message with colored output
|
|
208
|
-
const summaryParts = [];
|
|
209
|
-
if (filesToCopy.length > 0) {
|
|
210
|
-
summaryParts.push(`${chalk.green('New migrations to install:')}\n${filesToCopy
|
|
211
|
-
.map((file) => {
|
|
212
|
-
// Extract the timestamp part from the new filename
|
|
213
|
-
const newTimestamp = file.destination.substring(0, 14);
|
|
214
|
-
// Format: dim timestamp + bright original name
|
|
215
|
-
return `${chalk.green('+')} ${file.source} → ${chalk.dim(newTimestamp + '_')}${chalk.bold(file.source)}`;
|
|
216
|
-
})
|
|
217
|
-
.join('\n')}`);
|
|
218
|
-
}
|
|
219
|
-
if (skippedFiles.length > 0) {
|
|
220
|
-
summaryParts.push(`${chalk.yellow('Already installed:')}\n${skippedFiles
|
|
221
|
-
.map((file) => `${chalk.yellow('•')} ${file}`)
|
|
222
|
-
.join('\n')}`);
|
|
223
|
-
}
|
|
224
|
-
// Show summary and ask for confirmation if not auto-confirming
|
|
225
|
-
note(summaryParts.join('\n\n'), 'pgflow Migrations');
|
|
226
|
-
let shouldContinue = autoConfirm;
|
|
185
|
+
// Show preview and ask for confirmation only when not auto-confirming
|
|
227
186
|
if (!autoConfirm) {
|
|
187
|
+
const migrationLines = filesToCopy.map((file) => {
|
|
188
|
+
return ` ${chalk.bold(file.source)}`;
|
|
189
|
+
});
|
|
190
|
+
const summaryMsg = [
|
|
191
|
+
`Add to ${chalk.cyan('migrations/')} ${chalk.dim('(database schema for workflow engine)')}:`,
|
|
192
|
+
'',
|
|
193
|
+
...migrationLines,
|
|
194
|
+
].join('\n');
|
|
195
|
+
log.info(summaryMsg);
|
|
228
196
|
const confirmResult = await confirm({
|
|
229
|
-
message: `
|
|
197
|
+
message: `Add ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''}?`,
|
|
230
198
|
});
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
return false;
|
|
199
|
+
if (confirmResult !== true) {
|
|
200
|
+
log.warn('Migration installation skipped');
|
|
201
|
+
return false;
|
|
202
|
+
}
|
|
236
203
|
}
|
|
237
204
|
// Install migrations with new filenames
|
|
238
205
|
for (const file of filesToCopy) {
|
|
@@ -240,14 +207,6 @@ export async function copyMigrations({ supabasePath, autoConfirm = false, }) {
|
|
|
240
207
|
const destinationPath = path.join(migrationsPath, file.destination);
|
|
241
208
|
fs.copyFileSync(sourcePath1, destinationPath);
|
|
242
209
|
}
|
|
243
|
-
|
|
244
|
-
const detailedSuccessMsg = [
|
|
245
|
-
`Installed ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''} to your Supabase project:`,
|
|
246
|
-
...filesToCopy.map((file) => {
|
|
247
|
-
const newTimestamp = file.destination.substring(0, 14);
|
|
248
|
-
return ` ${chalk.dim(newTimestamp + '_')}${chalk.bold(file.source)}`;
|
|
249
|
-
}),
|
|
250
|
-
].join('\n');
|
|
251
|
-
log.success(detailedSuccessMsg);
|
|
210
|
+
log.success(`Installed ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''}`);
|
|
252
211
|
return true; // Return true to indicate migrations were copied
|
|
253
212
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"create-edge-function.d.ts","sourceRoot":"","sources":["../../../src/commands/install/create-edge-function.ts"],"names":[],"mappings":"AA0BA,wBAAsB,kBAAkB,CAAC,EACvC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAkEnB"}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { log, confirm } from '@clack/prompts';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
import { getVersion } from '../../utils/get-version.js';
|
|
6
|
+
const INDEX_TS_TEMPLATE = `import { ControlPlane } from '@pgflow/edge-worker';
|
|
7
|
+
import * as flows from '../../flows/index.ts';
|
|
8
|
+
|
|
9
|
+
ControlPlane.serve(flows);
|
|
10
|
+
`;
|
|
11
|
+
const DENO_JSON_TEMPLATE = (version) => `{
|
|
12
|
+
"imports": {
|
|
13
|
+
"@pgflow/core": "npm:@pgflow/core@${version}",
|
|
14
|
+
"@pgflow/core/": "npm:@pgflow/core@${version}/",
|
|
15
|
+
"@pgflow/dsl": "npm:@pgflow/dsl@${version}",
|
|
16
|
+
"@pgflow/dsl/": "npm:@pgflow/dsl@${version}/",
|
|
17
|
+
"@pgflow/dsl/supabase": "npm:@pgflow/dsl@${version}/supabase",
|
|
18
|
+
"@pgflow/edge-worker": "jsr:@pgflow/edge-worker@${version}",
|
|
19
|
+
"@pgflow/edge-worker/": "jsr:@pgflow/edge-worker@${version}/",
|
|
20
|
+
"@pgflow/edge-worker/_internal": "jsr:@pgflow/edge-worker@${version}/_internal"
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
`;
|
|
24
|
+
export async function createEdgeFunction({ supabasePath, autoConfirm = false, }) {
|
|
25
|
+
const functionsDir = path.join(supabasePath, 'functions');
|
|
26
|
+
const pgflowFunctionDir = path.join(functionsDir, 'pgflow');
|
|
27
|
+
const indexPath = path.join(pgflowFunctionDir, 'index.ts');
|
|
28
|
+
const denoJsonPath = path.join(pgflowFunctionDir, 'deno.json');
|
|
29
|
+
// Relative paths for display
|
|
30
|
+
const relativeFunctionDir = 'supabase/functions/pgflow';
|
|
31
|
+
const relativeIndexPath = `${relativeFunctionDir}/index.ts`;
|
|
32
|
+
const relativeDenoJsonPath = `${relativeFunctionDir}/deno.json`;
|
|
33
|
+
// Check what needs to be created
|
|
34
|
+
const filesToCreate = [];
|
|
35
|
+
if (!fs.existsSync(indexPath)) {
|
|
36
|
+
filesToCreate.push({ path: indexPath, relativePath: relativeIndexPath });
|
|
37
|
+
}
|
|
38
|
+
if (!fs.existsSync(denoJsonPath)) {
|
|
39
|
+
filesToCreate.push({ path: denoJsonPath, relativePath: relativeDenoJsonPath });
|
|
40
|
+
}
|
|
41
|
+
// If all files exist, return success
|
|
42
|
+
if (filesToCreate.length === 0) {
|
|
43
|
+
log.success('Control Plane already up to date');
|
|
44
|
+
return false;
|
|
45
|
+
}
|
|
46
|
+
// Show preview and ask for confirmation only when not auto-confirming
|
|
47
|
+
if (!autoConfirm) {
|
|
48
|
+
const summaryMsg = [
|
|
49
|
+
`Create ${chalk.cyan('functions/pgflow/')} ${chalk.dim('(Control Plane for flow registration and compilation)')}:`,
|
|
50
|
+
'',
|
|
51
|
+
...filesToCreate.map((file) => ` ${chalk.bold(path.basename(file.relativePath))}`),
|
|
52
|
+
].join('\n');
|
|
53
|
+
log.info(summaryMsg);
|
|
54
|
+
const confirmResult = await confirm({
|
|
55
|
+
message: `Create functions/pgflow/?`,
|
|
56
|
+
});
|
|
57
|
+
if (confirmResult !== true) {
|
|
58
|
+
log.warn('Control Plane installation skipped');
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
// Create the directory if it doesn't exist
|
|
63
|
+
if (!fs.existsSync(pgflowFunctionDir)) {
|
|
64
|
+
fs.mkdirSync(pgflowFunctionDir, { recursive: true });
|
|
65
|
+
}
|
|
66
|
+
// Create files
|
|
67
|
+
if (filesToCreate.some((f) => f.path === indexPath)) {
|
|
68
|
+
fs.writeFileSync(indexPath, INDEX_TS_TEMPLATE);
|
|
69
|
+
}
|
|
70
|
+
if (filesToCreate.some((f) => f.path === denoJsonPath)) {
|
|
71
|
+
fs.writeFileSync(denoJsonPath, DENO_JSON_TEMPLATE(getVersion()));
|
|
72
|
+
}
|
|
73
|
+
log.success('Control Plane installed');
|
|
74
|
+
return true;
|
|
75
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"create-example-worker.d.ts","sourceRoot":"","sources":["../../../src/commands/install/create-example-worker.ts"],"names":[],"mappings":"AA0BA,wBAAsB,mBAAmB,CAAC,EACxC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAkEnB"}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { log, confirm } from '@clack/prompts';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
import { getVersion } from '../../utils/get-version.js';
|
|
6
|
+
const INDEX_TS_TEMPLATE = `import { EdgeWorker } from '@pgflow/edge-worker';
|
|
7
|
+
import { GreetUser } from '../../flows/greet-user.ts';
|
|
8
|
+
|
|
9
|
+
EdgeWorker.start(GreetUser);
|
|
10
|
+
`;
|
|
11
|
+
const DENO_JSON_TEMPLATE = (version) => `{
|
|
12
|
+
"imports": {
|
|
13
|
+
"@pgflow/core": "npm:@pgflow/core@${version}",
|
|
14
|
+
"@pgflow/core/": "npm:@pgflow/core@${version}/",
|
|
15
|
+
"@pgflow/dsl": "npm:@pgflow/dsl@${version}",
|
|
16
|
+
"@pgflow/dsl/": "npm:@pgflow/dsl@${version}/",
|
|
17
|
+
"@pgflow/dsl/supabase": "npm:@pgflow/dsl@${version}/supabase",
|
|
18
|
+
"@pgflow/edge-worker": "jsr:@pgflow/edge-worker@${version}",
|
|
19
|
+
"@pgflow/edge-worker/": "jsr:@pgflow/edge-worker@${version}/",
|
|
20
|
+
"@pgflow/edge-worker/_internal": "jsr:@pgflow/edge-worker@${version}/_internal"
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
`;
|
|
24
|
+
export async function createExampleWorker({ supabasePath, autoConfirm = false, }) {
|
|
25
|
+
const functionsDir = path.join(supabasePath, 'functions');
|
|
26
|
+
const workerDir = path.join(functionsDir, 'greet-user-worker');
|
|
27
|
+
const indexPath = path.join(workerDir, 'index.ts');
|
|
28
|
+
const denoJsonPath = path.join(workerDir, 'deno.json');
|
|
29
|
+
// Relative paths for display
|
|
30
|
+
const relativeWorkerDir = 'supabase/functions/greet-user-worker';
|
|
31
|
+
const relativeIndexPath = `${relativeWorkerDir}/index.ts`;
|
|
32
|
+
const relativeDenoJsonPath = `${relativeWorkerDir}/deno.json`;
|
|
33
|
+
// Check what needs to be created
|
|
34
|
+
const filesToCreate = [];
|
|
35
|
+
if (!fs.existsSync(indexPath)) {
|
|
36
|
+
filesToCreate.push({ path: indexPath, relativePath: relativeIndexPath });
|
|
37
|
+
}
|
|
38
|
+
if (!fs.existsSync(denoJsonPath)) {
|
|
39
|
+
filesToCreate.push({ path: denoJsonPath, relativePath: relativeDenoJsonPath });
|
|
40
|
+
}
|
|
41
|
+
// If all files exist, return success
|
|
42
|
+
if (filesToCreate.length === 0) {
|
|
43
|
+
log.success('Example worker already up to date');
|
|
44
|
+
return false;
|
|
45
|
+
}
|
|
46
|
+
// Show preview and ask for confirmation only when not auto-confirming
|
|
47
|
+
if (!autoConfirm) {
|
|
48
|
+
const summaryMsg = [
|
|
49
|
+
`Create ${chalk.cyan('functions/greet-user-worker/')} ${chalk.dim('(example worker for GreetUser flow)')}:`,
|
|
50
|
+
'',
|
|
51
|
+
...filesToCreate.map((file) => ` ${chalk.bold(path.basename(file.relativePath))}`),
|
|
52
|
+
].join('\n');
|
|
53
|
+
log.info(summaryMsg);
|
|
54
|
+
const confirmResult = await confirm({
|
|
55
|
+
message: `Create functions/greet-user-worker/?`,
|
|
56
|
+
});
|
|
57
|
+
if (confirmResult !== true) {
|
|
58
|
+
log.warn('Example worker installation skipped');
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
// Create the directory if it doesn't exist
|
|
63
|
+
if (!fs.existsSync(workerDir)) {
|
|
64
|
+
fs.mkdirSync(workerDir, { recursive: true });
|
|
65
|
+
}
|
|
66
|
+
// Create files
|
|
67
|
+
if (filesToCreate.some((f) => f.path === indexPath)) {
|
|
68
|
+
fs.writeFileSync(indexPath, INDEX_TS_TEMPLATE);
|
|
69
|
+
}
|
|
70
|
+
if (filesToCreate.some((f) => f.path === denoJsonPath)) {
|
|
71
|
+
fs.writeFileSync(denoJsonPath, DENO_JSON_TEMPLATE(getVersion()));
|
|
72
|
+
}
|
|
73
|
+
log.success('Example worker created');
|
|
74
|
+
return true;
|
|
75
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"create-flows-directory.d.ts","sourceRoot":"","sources":["../../../src/commands/install/create-flows-directory.ts"],"names":[],"mappings":"AA+BA,wBAAsB,oBAAoB,CAAC,EACzC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAiEnB"}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { log, confirm } from '@clack/prompts';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
const INDEX_TS_TEMPLATE = `// Re-export all flows from this directory
|
|
6
|
+
// Example: export { MyFlow } from './my-flow.ts';
|
|
7
|
+
|
|
8
|
+
export { GreetUser } from './greet-user.ts';
|
|
9
|
+
`;
|
|
10
|
+
const GREET_USER_TEMPLATE = `import { Flow } from '@pgflow/dsl';
|
|
11
|
+
|
|
12
|
+
type Input = {
|
|
13
|
+
firstName: string;
|
|
14
|
+
lastName: string;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export const GreetUser = new Flow<Input>({
|
|
18
|
+
slug: 'greetUser',
|
|
19
|
+
})
|
|
20
|
+
.step(
|
|
21
|
+
{ slug: 'fullName' },
|
|
22
|
+
(input) => \`\${input.run.firstName} \${input.run.lastName}\`
|
|
23
|
+
)
|
|
24
|
+
.step(
|
|
25
|
+
{ slug: 'greeting', dependsOn: ['fullName'] },
|
|
26
|
+
(input) => \`Hello, \${input.fullName}!\`
|
|
27
|
+
);
|
|
28
|
+
`;
|
|
29
|
+
export async function createFlowsDirectory({ supabasePath, autoConfirm = false, }) {
|
|
30
|
+
const flowsDir = path.join(supabasePath, 'flows');
|
|
31
|
+
const indexPath = path.join(flowsDir, 'index.ts');
|
|
32
|
+
const greetUserPath = path.join(flowsDir, 'greet-user.ts');
|
|
33
|
+
// Relative paths for display
|
|
34
|
+
const relativeFlowsDir = 'supabase/flows';
|
|
35
|
+
const relativeIndexPath = `${relativeFlowsDir}/index.ts`;
|
|
36
|
+
const relativeGreetUserPath = `${relativeFlowsDir}/greet-user.ts`;
|
|
37
|
+
// Check what needs to be created
|
|
38
|
+
const filesToCreate = [];
|
|
39
|
+
if (!fs.existsSync(indexPath)) {
|
|
40
|
+
filesToCreate.push({ path: indexPath, relativePath: relativeIndexPath });
|
|
41
|
+
}
|
|
42
|
+
if (!fs.existsSync(greetUserPath)) {
|
|
43
|
+
filesToCreate.push({ path: greetUserPath, relativePath: relativeGreetUserPath });
|
|
44
|
+
}
|
|
45
|
+
// If all files exist, return success
|
|
46
|
+
if (filesToCreate.length === 0) {
|
|
47
|
+
log.success('Flows directory already up to date');
|
|
48
|
+
return false;
|
|
49
|
+
}
|
|
50
|
+
// Show preview and ask for confirmation only when not auto-confirming
|
|
51
|
+
if (!autoConfirm) {
|
|
52
|
+
const summaryMsg = [
|
|
53
|
+
`Create ${chalk.cyan('flows/')} ${chalk.dim('(flow definitions directory)')}:`,
|
|
54
|
+
'',
|
|
55
|
+
...filesToCreate.map((file) => ` ${chalk.bold(path.basename(file.relativePath))}`),
|
|
56
|
+
].join('\n');
|
|
57
|
+
log.info(summaryMsg);
|
|
58
|
+
const confirmResult = await confirm({
|
|
59
|
+
message: `Create flows/?`,
|
|
60
|
+
});
|
|
61
|
+
if (confirmResult !== true) {
|
|
62
|
+
log.warn('Flows directory installation skipped');
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
// Create the directory if it doesn't exist
|
|
67
|
+
if (!fs.existsSync(flowsDir)) {
|
|
68
|
+
fs.mkdirSync(flowsDir, { recursive: true });
|
|
69
|
+
}
|
|
70
|
+
// Create files
|
|
71
|
+
if (filesToCreate.some((f) => f.path === indexPath)) {
|
|
72
|
+
fs.writeFileSync(indexPath, INDEX_TS_TEMPLATE);
|
|
73
|
+
}
|
|
74
|
+
if (filesToCreate.some((f) => f.path === greetUserPath)) {
|
|
75
|
+
fs.writeFileSync(greetUserPath, GREET_USER_TEMPLATE);
|
|
76
|
+
}
|
|
77
|
+
log.success('Flows directory created');
|
|
78
|
+
return true;
|
|
79
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/install/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/install/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;yBAWzB,SAAS,OAAO;AAAhC,wBA6HE"}
|
|
@@ -1,8 +1,11 @@
|
|
|
1
|
-
import { intro,
|
|
1
|
+
import { intro, log, confirm, cancel, outro } from '@clack/prompts';
|
|
2
2
|
import chalk from 'chalk';
|
|
3
3
|
import { copyMigrations } from './copy-migrations.js';
|
|
4
4
|
import { updateConfigToml } from './update-config-toml.js';
|
|
5
5
|
import { updateEnvFile } from './update-env-file.js';
|
|
6
|
+
import { createEdgeFunction } from './create-edge-function.js';
|
|
7
|
+
import { createFlowsDirectory } from './create-flows-directory.js';
|
|
8
|
+
import { createExampleWorker } from './create-example-worker.js';
|
|
6
9
|
import { supabasePathPrompt } from './supabase-path-prompt.js';
|
|
7
10
|
export default (program) => {
|
|
8
11
|
program
|
|
@@ -12,79 +15,90 @@ export default (program) => {
|
|
|
12
15
|
.option('-y, --yes', 'Automatically confirm all prompts', false)
|
|
13
16
|
.action(async (options) => {
|
|
14
17
|
intro('Installing pgflow in your Supabase project');
|
|
15
|
-
//
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
supabasePath: () => supabasePathPrompt({ supabasePath: options.supabasePath }),
|
|
19
|
-
// Step 2: Update config.toml
|
|
20
|
-
configUpdate: async ({ results: { supabasePath } }) => {
|
|
21
|
-
if (!supabasePath)
|
|
22
|
-
return false;
|
|
23
|
-
return await updateConfigToml({
|
|
24
|
-
supabasePath,
|
|
25
|
-
autoConfirm: options.yes,
|
|
26
|
-
});
|
|
27
|
-
},
|
|
28
|
-
// Step 3: Copy migrations
|
|
29
|
-
migrations: async ({ results: { supabasePath } }) => {
|
|
30
|
-
if (!supabasePath)
|
|
31
|
-
return false;
|
|
32
|
-
return await copyMigrations({
|
|
33
|
-
supabasePath,
|
|
34
|
-
autoConfirm: options.yes,
|
|
35
|
-
});
|
|
36
|
-
},
|
|
37
|
-
// Step 4: Update environment variables
|
|
38
|
-
envFile: async ({ results: { supabasePath } }) => {
|
|
39
|
-
if (!supabasePath)
|
|
40
|
-
return false;
|
|
41
|
-
return await updateEnvFile({
|
|
42
|
-
supabasePath,
|
|
43
|
-
autoConfirm: options.yes,
|
|
44
|
-
});
|
|
45
|
-
},
|
|
46
|
-
}, {
|
|
47
|
-
// Handle cancellation
|
|
48
|
-
onCancel: () => {
|
|
49
|
-
cancel('Installation cancelled');
|
|
50
|
-
process.exit(1);
|
|
51
|
-
},
|
|
18
|
+
// Step 1: Get supabase path
|
|
19
|
+
const supabasePathResult = await supabasePathPrompt({
|
|
20
|
+
supabasePath: options.supabasePath,
|
|
52
21
|
});
|
|
53
|
-
|
|
54
|
-
const supabasePath = results.supabasePath;
|
|
55
|
-
const configUpdate = results.configUpdate;
|
|
56
|
-
const migrations = results.migrations;
|
|
57
|
-
const envFile = results.envFile;
|
|
58
|
-
// Exit if supabasePath is null (validation failed or user cancelled)
|
|
59
|
-
if (!supabasePath) {
|
|
22
|
+
if (!supabasePathResult || typeof supabasePathResult === 'symbol') {
|
|
60
23
|
cancel('Installation cancelled - valid Supabase path is required');
|
|
61
24
|
process.exit(1);
|
|
62
25
|
}
|
|
63
|
-
|
|
26
|
+
const supabasePath = supabasePathResult;
|
|
27
|
+
// Step 2: Show summary and get single confirmation
|
|
28
|
+
const summaryMsg = [
|
|
29
|
+
'This will:',
|
|
30
|
+
'',
|
|
31
|
+
` • Update ${chalk.cyan('supabase/config.toml')} ${chalk.dim('(enable pooler, per_worker runtime)')}`,
|
|
32
|
+
` • Add pgflow migrations to ${chalk.cyan('supabase/migrations/')}`,
|
|
33
|
+
` • Create ${chalk.cyan('supabase/flows/')} ${chalk.dim('(flow definitions with GreetUser example)')}`,
|
|
34
|
+
` • Create Control Plane in ${chalk.cyan('supabase/functions/pgflow/')}`,
|
|
35
|
+
` • Create ${chalk.cyan('supabase/functions/greet-user-worker/')} ${chalk.dim('(example worker)')}`,
|
|
36
|
+
` • Configure ${chalk.cyan('supabase/functions/.env')}`,
|
|
37
|
+
'',
|
|
38
|
+
` ${chalk.green('✓ Safe to re-run - completed steps will be skipped')}`,
|
|
39
|
+
].join('\n');
|
|
40
|
+
log.info(summaryMsg);
|
|
41
|
+
let shouldProceed = options.yes;
|
|
42
|
+
if (!options.yes) {
|
|
43
|
+
const confirmResult = await confirm({
|
|
44
|
+
message: 'Proceed?',
|
|
45
|
+
});
|
|
46
|
+
if (confirmResult !== true) {
|
|
47
|
+
cancel('Installation cancelled');
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
shouldProceed = true;
|
|
51
|
+
}
|
|
52
|
+
if (!shouldProceed) {
|
|
53
|
+
cancel('Installation cancelled');
|
|
54
|
+
process.exit(1);
|
|
55
|
+
}
|
|
56
|
+
// Step 3: Run all installation steps with autoConfirm
|
|
57
|
+
const configUpdate = await updateConfigToml({
|
|
58
|
+
supabasePath,
|
|
59
|
+
autoConfirm: true,
|
|
60
|
+
});
|
|
61
|
+
const migrations = await copyMigrations({
|
|
62
|
+
supabasePath,
|
|
63
|
+
autoConfirm: true,
|
|
64
|
+
});
|
|
65
|
+
const flowsDirectory = await createFlowsDirectory({
|
|
66
|
+
supabasePath,
|
|
67
|
+
autoConfirm: true,
|
|
68
|
+
});
|
|
69
|
+
const edgeFunction = await createEdgeFunction({
|
|
70
|
+
supabasePath,
|
|
71
|
+
autoConfirm: true,
|
|
72
|
+
});
|
|
73
|
+
const exampleWorker = await createExampleWorker({
|
|
74
|
+
supabasePath,
|
|
75
|
+
autoConfirm: true,
|
|
76
|
+
});
|
|
77
|
+
const envFile = await updateEnvFile({
|
|
78
|
+
supabasePath,
|
|
79
|
+
autoConfirm: true,
|
|
80
|
+
});
|
|
81
|
+
// Step 4: Show completion message
|
|
64
82
|
const outroMessages = [];
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
outroMessages.push(chalk.bold('pgflow setup completed successfully!'));
|
|
83
|
+
if (migrations || configUpdate || flowsDirectory || edgeFunction || exampleWorker || envFile) {
|
|
84
|
+
outroMessages.push(chalk.green.bold('✓ Installation complete!'));
|
|
68
85
|
}
|
|
69
86
|
else {
|
|
70
|
-
outroMessages.push(chalk.bold('pgflow is already
|
|
87
|
+
outroMessages.push(chalk.green.bold('✓ pgflow is already installed - no changes needed!'));
|
|
71
88
|
}
|
|
72
|
-
// Add
|
|
89
|
+
// Add numbered next steps
|
|
73
90
|
outroMessages.push('');
|
|
74
|
-
|
|
91
|
+
outroMessages.push('Next steps:');
|
|
92
|
+
let stepNumber = 1;
|
|
75
93
|
if (configUpdate || envFile) {
|
|
76
|
-
outroMessages.push(
|
|
94
|
+
outroMessages.push(` ${stepNumber}. Restart Supabase: ${chalk.cyan('supabase stop && supabase start')}`);
|
|
95
|
+
stepNumber++;
|
|
77
96
|
}
|
|
78
97
|
if (migrations) {
|
|
79
|
-
outroMessages.push(
|
|
80
|
-
|
|
81
|
-
// Always add documentation link with consistent formatting
|
|
82
|
-
if (outroMessages.length > 2) {
|
|
83
|
-
// If we have specific steps, add another newline
|
|
84
|
-
outroMessages.push('');
|
|
98
|
+
outroMessages.push(` ${stepNumber}. Apply migrations: ${chalk.cyan('supabase migrations up')}`);
|
|
99
|
+
stepNumber++;
|
|
85
100
|
}
|
|
86
|
-
outroMessages.push(
|
|
87
|
-
// Single outro for all paths
|
|
101
|
+
outroMessages.push(` ${stepNumber}. Run the example: ${chalk.blue.underline('https://pgflow.dev/get-started/flows/quickstart/')}`);
|
|
88
102
|
outro(outroMessages.join('\n'));
|
|
89
103
|
});
|
|
90
104
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"supabase-path-prompt.d.ts","sourceRoot":"","sources":["../../../src/commands/install/supabase-path-prompt.ts"],"names":[],"mappings":"AAIA,wBAAsB,kBAAkB,CAAC,OAAO,CAAC,EAAE;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,
|
|
1
|
+
{"version":3,"file":"supabase-path-prompt.d.ts","sourceRoot":"","sources":["../../../src/commands/install/supabase-path-prompt.ts"],"names":[],"mappings":"AAIA,wBAAsB,kBAAkB,CAAC,OAAO,CAAC,EAAE;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,4BAwC3E"}
|
|
@@ -22,10 +22,6 @@ export async function supabasePathPrompt(options) {
|
|
|
22
22
|
break;
|
|
23
23
|
}
|
|
24
24
|
}
|
|
25
|
-
// Always prompt for detected paths - don't skip
|
|
26
|
-
if (detectedPath) {
|
|
27
|
-
log.info(`Found Supabase project at: ${detectedPath}`);
|
|
28
|
-
}
|
|
29
25
|
const promptMessage = 'Where is your Supabase project located?';
|
|
30
26
|
const supabasePath = await text({
|
|
31
27
|
message: promptMessage,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"update-config-toml.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-config-toml.ts"],"names":[],"mappings":"AAqBA;;;;;;;;;;;;;GAaG;AACH,wBAAsB,gBAAgB,CAAC,EACrC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,
|
|
1
|
+
{"version":3,"file":"update-config-toml.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-config-toml.ts"],"names":[],"mappings":"AAqBA;;;;;;;;;;;;;GAaG;AACH,wBAAsB,gBAAgB,CAAC,EACrC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CA4HnB"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
|
-
import { log, confirm
|
|
3
|
+
import { log, confirm } from '@clack/prompts';
|
|
4
4
|
import * as TOML from '@decimalturn/toml-patch';
|
|
5
5
|
import chalk from 'chalk';
|
|
6
6
|
/**
|
|
@@ -45,36 +45,42 @@ export async function updateConfigToml({ supabasePath, autoConfirm = false, }) {
|
|
|
45
45
|
currentSettings.poolMode !== 'transaction' ||
|
|
46
46
|
currentSettings.edgeRuntimePolicy !== 'per_worker';
|
|
47
47
|
if (!needsChanges) {
|
|
48
|
-
log.success('
|
|
48
|
+
log.success('Configuration already up to date');
|
|
49
49
|
return false;
|
|
50
50
|
}
|
|
51
|
-
|
|
52
|
-
if (currentSettings.poolerEnabled !== true) {
|
|
53
|
-
changes.push(`${chalk.bold('Enable connection pooler:')}
|
|
54
|
-
${chalk.red(`- enabled = ${currentSettings.poolerEnabled}`)}
|
|
55
|
-
${chalk.green('+ enabled = true')}`);
|
|
56
|
-
}
|
|
57
|
-
if (currentSettings.poolMode !== 'transaction') {
|
|
58
|
-
changes.push(`${chalk.bold('Set pool mode to transaction:')}
|
|
59
|
-
${chalk.red(`- pool_mode = "${currentSettings.poolMode}"`)}
|
|
60
|
-
${chalk.green('+ pool_mode = "transaction"')}`);
|
|
61
|
-
}
|
|
62
|
-
if (currentSettings.edgeRuntimePolicy !== 'per_worker') {
|
|
63
|
-
changes.push(`${chalk.bold('Set edge runtime policy:')}
|
|
64
|
-
${chalk.red(`- policy = "${currentSettings.edgeRuntimePolicy}"`)}
|
|
65
|
-
${chalk.green('+ policy = "per_worker"')}`);
|
|
66
|
-
}
|
|
67
|
-
note(changes.join('\n\n'), 'Required Configuration Changes');
|
|
68
|
-
let shouldContinue = autoConfirm;
|
|
51
|
+
// Show preview and ask for confirmation only when not auto-confirming
|
|
69
52
|
if (!autoConfirm) {
|
|
53
|
+
const changes = [];
|
|
54
|
+
// Connection pooler changes
|
|
55
|
+
const poolerChanges = [];
|
|
56
|
+
if (currentSettings.poolerEnabled !== true) {
|
|
57
|
+
poolerChanges.push(`enabled = ${currentSettings.poolerEnabled} ${chalk.dim('->')} ${chalk.green('true')}`);
|
|
58
|
+
}
|
|
59
|
+
if (currentSettings.poolMode !== 'transaction') {
|
|
60
|
+
poolerChanges.push(`pool_mode = "${currentSettings.poolMode}" ${chalk.dim('->')} ${chalk.green('"transaction"')}`);
|
|
61
|
+
}
|
|
62
|
+
if (poolerChanges.length > 0) {
|
|
63
|
+
changes.push(` ${chalk.bold('[db.pooler]')} ${chalk.dim('(required for pgflow worker)')}`);
|
|
64
|
+
poolerChanges.forEach(change => changes.push(` ${change}`));
|
|
65
|
+
}
|
|
66
|
+
// Edge runtime changes
|
|
67
|
+
if (currentSettings.edgeRuntimePolicy !== 'per_worker') {
|
|
68
|
+
changes.push(` ${chalk.bold('[edge_runtime]')} ${chalk.dim('(required for long-running tasks)')}`);
|
|
69
|
+
changes.push(` policy = "${currentSettings.edgeRuntimePolicy}" ${chalk.dim('->')} ${chalk.green('"per_worker"')}`);
|
|
70
|
+
}
|
|
71
|
+
const summaryMsg = [
|
|
72
|
+
`Update ${chalk.cyan('config.toml')}:`,
|
|
73
|
+
'',
|
|
74
|
+
...changes,
|
|
75
|
+
].join('\n');
|
|
76
|
+
log.info(summaryMsg);
|
|
70
77
|
const confirmResult = await confirm({
|
|
71
|
-
message: `Update
|
|
78
|
+
message: `Update config.toml? (backup will be created)`,
|
|
72
79
|
});
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
return false;
|
|
80
|
+
if (confirmResult !== true) {
|
|
81
|
+
log.warn('Configuration update skipped');
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
78
84
|
}
|
|
79
85
|
// Update Supabase configuration
|
|
80
86
|
// Create backup
|
|
@@ -109,7 +115,7 @@ ${chalk.green('+ policy = "per_worker"')}`);
|
|
|
109
115
|
log.error(`Failed to write ${configPath}: ${errorMsg}`);
|
|
110
116
|
throw new Error(`Failed to write ${configPath}: ${errorMsg}`);
|
|
111
117
|
}
|
|
112
|
-
log.success('
|
|
118
|
+
log.success('Configuration updated');
|
|
113
119
|
return true;
|
|
114
120
|
}
|
|
115
121
|
catch (error) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"update-env-file.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-env-file.ts"],"names":[],"mappings":"AAKA;;;;;;GAMG;AACH,wBAAsB,aAAa,CAAC,EAClC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,
|
|
1
|
+
{"version":3,"file":"update-env-file.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-env-file.ts"],"names":[],"mappings":"AAKA;;;;;;GAMG;AACH,wBAAsB,aAAa,CAAC,EAClC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CA2FnB"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
|
-
import { log,
|
|
3
|
+
import { log, confirm } from '@clack/prompts';
|
|
4
4
|
import chalk from 'chalk';
|
|
5
5
|
/**
|
|
6
6
|
* Updates the functions/.env file with required environment variables for pgflow
|
|
@@ -33,57 +33,35 @@ export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
|
33
33
|
}
|
|
34
34
|
// Prepare new content
|
|
35
35
|
let newContent = currentContent;
|
|
36
|
-
// Build diff preview
|
|
37
|
-
const missingVars = [];
|
|
38
|
-
const existingVars = [];
|
|
39
36
|
// Check which variables need to be added
|
|
37
|
+
const missingVars = [];
|
|
40
38
|
for (const [key, value] of Object.entries(envVars)) {
|
|
41
39
|
if (!newContent.includes(`${key}=`)) {
|
|
42
40
|
missingVars.push({ key, value });
|
|
43
41
|
}
|
|
44
|
-
else {
|
|
45
|
-
existingVars.push(key);
|
|
46
|
-
}
|
|
47
42
|
}
|
|
48
43
|
// If no changes needed, return early
|
|
49
44
|
if (missingVars.length === 0) {
|
|
50
|
-
log.success('Environment variables
|
|
45
|
+
log.success('Environment variables already configured');
|
|
51
46
|
return false;
|
|
52
47
|
}
|
|
53
|
-
|
|
54
|
-
// Build diff preview
|
|
55
|
-
const diffPreview = [];
|
|
56
|
-
if (isNewFile) {
|
|
57
|
-
diffPreview.push(`${chalk.green('Creating new .env file with:')}`);
|
|
58
|
-
}
|
|
59
|
-
else {
|
|
60
|
-
diffPreview.push(`${chalk.green('Adding to existing .env file:')}`);
|
|
61
|
-
}
|
|
62
|
-
// Show variables to be added
|
|
63
|
-
for (const { key, value } of missingVars) {
|
|
64
|
-
diffPreview.push(`${chalk.green('+')} ${key}="${value}"`);
|
|
65
|
-
}
|
|
66
|
-
// Show existing variables if any
|
|
67
|
-
if (existingVars.length > 0) {
|
|
68
|
-
diffPreview.push('');
|
|
69
|
-
diffPreview.push(`${chalk.yellow('Already present:')}`);
|
|
70
|
-
for (const key of existingVars) {
|
|
71
|
-
diffPreview.push(`${chalk.yellow('•')} ${key}`);
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
// Show the diff preview
|
|
75
|
-
note(diffPreview.join('\n'), 'Environment Variables');
|
|
76
|
-
// Ask for confirmation if not auto-confirming
|
|
77
|
-
let shouldContinue = autoConfirm;
|
|
48
|
+
// Show preview and ask for confirmation only when not auto-confirming
|
|
78
49
|
if (!autoConfirm) {
|
|
50
|
+
const summaryParts = [
|
|
51
|
+
isNewFile
|
|
52
|
+
? `Create ${chalk.cyan('functions/.env')} ${chalk.dim('(worker configuration)')}:`
|
|
53
|
+
: `Update ${chalk.cyan('functions/.env')} ${chalk.dim('(worker configuration)')}:`,
|
|
54
|
+
'',
|
|
55
|
+
...missingVars.map(({ key, value }) => ` ${chalk.bold(key)}="${value}"`),
|
|
56
|
+
];
|
|
57
|
+
log.info(summaryParts.join('\n'));
|
|
79
58
|
const confirmResult = await confirm({
|
|
80
|
-
message: `Update
|
|
59
|
+
message: isNewFile ? `Create functions/.env?` : `Update functions/.env?`,
|
|
81
60
|
});
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
return false;
|
|
61
|
+
if (confirmResult !== true) {
|
|
62
|
+
log.warn('Environment variable update skipped');
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
87
65
|
}
|
|
88
66
|
// Update environment variables
|
|
89
67
|
// Apply changes if confirmed
|
|
@@ -98,7 +76,7 @@ export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
|
98
76
|
// Write the file if changes were made
|
|
99
77
|
try {
|
|
100
78
|
fs.writeFileSync(envFilePath, newContent);
|
|
101
|
-
log.success('Environment variables
|
|
79
|
+
log.success('Environment variables configured');
|
|
102
80
|
return true;
|
|
103
81
|
}
|
|
104
82
|
catch (error) {
|
package/dist/index.js
CHANGED
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { Command } from 'commander';
|
|
3
|
-
import { fileURLToPath } from 'url';
|
|
4
|
-
import { readFileSync } from 'fs';
|
|
5
|
-
import { dirname, join } from 'path';
|
|
6
3
|
import installCommand from './commands/install/index.js';
|
|
7
4
|
import compileCommand from './commands/compile/index.js';
|
|
5
|
+
import { getVersion } from './utils/get-version.js';
|
|
8
6
|
// Create a function to handle errors
|
|
9
7
|
const errorHandler = (error) => {
|
|
10
8
|
console.error('Error:', error instanceof Error ? error.message : String(error));
|
|
@@ -12,21 +10,6 @@ const errorHandler = (error) => {
|
|
|
12
10
|
};
|
|
13
11
|
// Set up process-wide unhandled rejection handler
|
|
14
12
|
process.on('unhandledRejection', errorHandler);
|
|
15
|
-
// Function to get version from package.json
|
|
16
|
-
function getVersion() {
|
|
17
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
18
|
-
const __dirname = dirname(__filename);
|
|
19
|
-
const packageJsonPath = join(__dirname, '..', 'package.json');
|
|
20
|
-
try {
|
|
21
|
-
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
|
|
22
|
-
return packageJson.version || 'unknown';
|
|
23
|
-
}
|
|
24
|
-
catch (error) {
|
|
25
|
-
// Log error but don't display it to the user when showing version
|
|
26
|
-
console.error('Error reading package.json:', error);
|
|
27
|
-
return 'unknown';
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
13
|
const program = new Command();
|
|
31
14
|
program
|
|
32
15
|
.name('npx pgflow')
|
|
@@ -50,18 +33,19 @@ const g = chalk.hex('#9ece6a'); // vibrant green
|
|
|
50
33
|
const l = chalk.hex('#2ac3de'); // bright teal/cyan
|
|
51
34
|
// const o = chalk.hex('#ff9e64'); // orange
|
|
52
35
|
// const w = chalk.hex('#f7768e'); // magenta/pink
|
|
36
|
+
const d = chalk.dim; // dim for secondary text
|
|
53
37
|
const banner = [
|
|
54
|
-
`
|
|
55
|
-
`
|
|
56
|
-
`
|
|
57
|
-
`
|
|
58
|
-
`
|
|
59
|
-
`
|
|
38
|
+
` ${l('__ _')}`,
|
|
39
|
+
` ${g('_ __ __ _')} ${l('/ _| | _____ __')}`,
|
|
40
|
+
` ${g("| '_ \\ / _'")} ${l('| |_| |/ _ \\ \\ /\\ / /')}`,
|
|
41
|
+
` ${g('| |_) | (_|')} ${l('| _| | (_) \\ V V /')}`,
|
|
42
|
+
` ${g('| .__/ \\__,')} ${l('|_| |_|\\___/ \\_/\\_/')}`,
|
|
43
|
+
` ${g('|_| |___/')} ${d('v' + getVersion())}`,
|
|
44
|
+
``,
|
|
45
|
+
` ${l('Workflows in Supabase')} ${d('|')} ${g.underline('pgflow.dev')}`,
|
|
60
46
|
].join('\n');
|
|
61
47
|
console.log(banner);
|
|
62
48
|
console.log();
|
|
63
|
-
console.log();
|
|
64
|
-
console.log();
|
|
65
49
|
// Use a promise-aware approach to parse arguments
|
|
66
50
|
async function main() {
|
|
67
51
|
try {
|
package/dist/package.json
CHANGED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"get-version.d.ts","sourceRoot":"","sources":["../../src/utils/get-version.ts"],"names":[],"mappings":"AAIA;;;GAGG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAanC"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { fileURLToPath } from 'url';
|
|
2
|
+
import { readFileSync } from 'fs';
|
|
3
|
+
import { dirname, join } from 'path';
|
|
4
|
+
/**
|
|
5
|
+
* Get the version from package.json
|
|
6
|
+
* Reads the version from the package.json file located one directory up from the compiled dist/ folder
|
|
7
|
+
*/
|
|
8
|
+
export function getVersion() {
|
|
9
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
10
|
+
const __dirname = dirname(__filename);
|
|
11
|
+
const packageJsonPath = join(__dirname, '..', '..', 'package.json');
|
|
12
|
+
try {
|
|
13
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
|
|
14
|
+
return packageJson.version || 'unknown';
|
|
15
|
+
}
|
|
16
|
+
catch (error) {
|
|
17
|
+
// Log error but don't display it to the user when showing version
|
|
18
|
+
console.error('Error reading package.json:', error);
|
|
19
|
+
return 'unknown';
|
|
20
|
+
}
|
|
21
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgflow",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.9.0",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
"@decimalturn/toml-patch": "0.3.7",
|
|
26
26
|
"chalk": "^5.4.1",
|
|
27
27
|
"commander": "^13.1.0",
|
|
28
|
-
"@pgflow/core": "0.
|
|
28
|
+
"@pgflow/core": "0.9.0"
|
|
29
29
|
},
|
|
30
30
|
"publishConfig": {
|
|
31
31
|
"access": "public"
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* internal_compile.js
|
|
3
|
-
*
|
|
4
|
-
* This script is executed by Deno to compile a Flow into SQL statements.
|
|
5
|
-
* It takes a path to a flow file as an argument, imports the default export,
|
|
6
|
-
* and passes it to compileFlow() from the DSL package.
|
|
7
|
-
*/
|
|
8
|
-
|
|
9
|
-
// Import the compileFlow function directly from @pgflow/dsl
|
|
10
|
-
// The import map in deno.json will resolve this import
|
|
11
|
-
import { compileFlow } from '@pgflow/dsl';
|
|
12
|
-
|
|
13
|
-
// Get the flow file path from command line arguments
|
|
14
|
-
const flowFilePath = Deno.args[0];
|
|
15
|
-
|
|
16
|
-
if (!flowFilePath) {
|
|
17
|
-
console.error('Error: No flow file path provided');
|
|
18
|
-
Deno.exit(1);
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
try {
|
|
22
|
-
// Dynamically import the flow file
|
|
23
|
-
const flowModule = await import(`file://${flowFilePath}`);
|
|
24
|
-
|
|
25
|
-
// Check if there's a default export
|
|
26
|
-
if (!flowModule.default) {
|
|
27
|
-
console.error(`Error: No default export found in ${flowFilePath}`);
|
|
28
|
-
Deno.exit(1);
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
// Get the flow instance
|
|
32
|
-
const flow = flowModule.default;
|
|
33
|
-
|
|
34
|
-
// Compile the flow to SQL
|
|
35
|
-
const sqlStatements = compileFlow(flow);
|
|
36
|
-
|
|
37
|
-
// Output the SQL statements to stdout
|
|
38
|
-
console.log(sqlStatements.join('\n'));
|
|
39
|
-
} catch (error) {
|
|
40
|
-
console.error(`Error compiling flow: ${error.message}`);
|
|
41
|
-
|
|
42
|
-
// If the error is related to importing compileFlow, provide more detailed error
|
|
43
|
-
if (error.message.includes('@pgflow/dsl')) {
|
|
44
|
-
console.error(
|
|
45
|
-
'Failed to import compileFlow from @pgflow/dsl. This might be due to:'
|
|
46
|
-
);
|
|
47
|
-
console.error(
|
|
48
|
-
'1. The function not being exported correctly from the package'
|
|
49
|
-
);
|
|
50
|
-
console.error('2. A version mismatch between the CLI and DSL packages');
|
|
51
|
-
console.error('3. Issues with the Deno import map configuration');
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
Deno.exit(1);
|
|
55
|
-
}
|