pgflow 0.0.0-test-snapshot-releases2-8d5d9bc1-20250922101158 → 0.0.0-update-supabase-868977e5-20251119071204
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -5
- package/dist/commands/compile/index.d.ts +4 -0
- package/dist/commands/compile/index.d.ts.map +1 -0
- package/dist/commands/compile/index.js +201 -0
- package/dist/commands/install/copy-migrations.d.ts +5 -0
- package/dist/commands/install/copy-migrations.d.ts.map +1 -0
- package/dist/commands/install/copy-migrations.js +253 -0
- package/dist/commands/install/index.d.ts +4 -0
- package/dist/commands/install/index.d.ts.map +1 -0
- package/dist/commands/install/index.js +90 -0
- package/dist/commands/install/supabase-path-prompt.d.ts +4 -0
- package/dist/commands/install/supabase-path-prompt.d.ts.map +1 -0
- package/dist/commands/install/supabase-path-prompt.js +49 -0
- package/dist/commands/install/update-config-toml.d.ts +19 -0
- package/dist/commands/install/update-config-toml.d.ts.map +1 -0
- package/dist/commands/install/update-config-toml.js +119 -0
- package/dist/commands/install/update-env-file.d.ts +12 -0
- package/dist/commands/install/update-env-file.d.ts.map +1 -0
- package/dist/commands/install/update-env-file.js +109 -0
- package/dist/deno/internal_compile.js +55 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +90 -0
- package/dist/package.json +39 -0
- package/dist/tsconfig.lib.tsbuildinfo +1 -0
- package/package.json +4 -3
package/README.md
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
#
|
|
1
|
+
# pgflow CLI
|
|
2
2
|
|
|
3
3
|
The Command Line Interface for pgflow - a PostgreSQL-native workflow engine.
|
|
4
4
|
|
|
@@ -7,7 +7,7 @@ The Command Line Interface for pgflow - a PostgreSQL-native workflow engine.
|
|
|
7
7
|
|
|
8
8
|
## Overview
|
|
9
9
|
|
|
10
|
-
|
|
10
|
+
This package provides essential tools for setting up, managing, and deploying pgflow workflows in your Supabase environment. The CLI handles:
|
|
11
11
|
|
|
12
12
|
- Installing pgflow in your Supabase project
|
|
13
13
|
- Compiling TypeScript workflow definitions into SQL migrations
|
|
@@ -15,8 +15,8 @@ The Command Line Interface for pgflow - a PostgreSQL-native workflow engine.
|
|
|
15
15
|
|
|
16
16
|
## Prerequisites
|
|
17
17
|
|
|
18
|
-
- Supabase CLI v2.
|
|
19
|
-
- Deno
|
|
18
|
+
- Supabase CLI v2.34.3 or higher
|
|
19
|
+
- Deno v2.1.x or higher (for flow compilation)
|
|
20
20
|
- Local Supabase project initialized
|
|
21
21
|
|
|
22
22
|
## Installation
|
|
@@ -70,7 +70,7 @@ npx pgflow@latest compile supabase/functions/_flows/my_flow.ts
|
|
|
70
70
|
|
|
71
71
|
Options:
|
|
72
72
|
|
|
73
|
-
- `--deno-json <path>` - Path to custom deno.json
|
|
73
|
+
- `--deno-json <path>` - Path to custom deno.json configuration file
|
|
74
74
|
- `--supabase-path <path>` - Path to custom Supabase directory
|
|
75
75
|
|
|
76
76
|
The compiler will:
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/compile/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;yBAmDzB,SAAS,OAAO;AAAhC,wBAsJE"}
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { intro, log, note, outro } from '@clack/prompts';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import fs from 'fs';
|
|
5
|
+
import { spawn } from 'child_process';
|
|
6
|
+
import { fileURLToPath } from 'url';
|
|
7
|
+
// Get the directory name in ES modules
|
|
8
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
+
const __dirname = path.dirname(__filename);
|
|
10
|
+
/**
|
|
11
|
+
* Formats a command and its arguments for display with syntax highlighting
|
|
12
|
+
* Each argument is displayed on a separate line for better readability
|
|
13
|
+
*/
|
|
14
|
+
function formatCommand(command, args) {
|
|
15
|
+
const cmd = chalk.cyan(command);
|
|
16
|
+
const formattedArgs = args.map((arg) => {
|
|
17
|
+
// Highlight config and file paths differently
|
|
18
|
+
if (arg.startsWith('--config=')) {
|
|
19
|
+
const [flag, value] = arg.split('=');
|
|
20
|
+
return ` ${chalk.yellow(flag)}=${chalk.green(value)}`;
|
|
21
|
+
}
|
|
22
|
+
else if (arg.startsWith('--')) {
|
|
23
|
+
return ` ${chalk.yellow(arg)}`;
|
|
24
|
+
}
|
|
25
|
+
else if (arg.endsWith('.ts') || arg.endsWith('.json')) {
|
|
26
|
+
return ` ${chalk.green(arg)}`;
|
|
27
|
+
}
|
|
28
|
+
return ` ${chalk.white(arg)}`;
|
|
29
|
+
});
|
|
30
|
+
return `$ ${cmd}\n${formattedArgs.join('\n')}`;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Creates a task log entry with a command and its output
|
|
34
|
+
*/
|
|
35
|
+
function createTaskLog(command, args, output) {
|
|
36
|
+
return [
|
|
37
|
+
chalk.bold('Command:'),
|
|
38
|
+
formatCommand(command, args),
|
|
39
|
+
'',
|
|
40
|
+
chalk.bold('Output:'),
|
|
41
|
+
output.trim() ? output.trim() : '(no output)',
|
|
42
|
+
].join('\n');
|
|
43
|
+
}
|
|
44
|
+
export default (program) => {
|
|
45
|
+
program
|
|
46
|
+
.command('compile')
|
|
47
|
+
.description('Compiles a TypeScript-defined flow into SQL migration')
|
|
48
|
+
.argument('<flowPath>', 'Path to the flow TypeScript file')
|
|
49
|
+
.option('--deno-json <denoJsonPath>', 'Path to deno.json configuration file')
|
|
50
|
+
.option('--supabase-path <supabasePath>', 'Path to the Supabase folder')
|
|
51
|
+
.action(async (flowPath, options) => {
|
|
52
|
+
intro('pgflow - Compile Flow to SQL');
|
|
53
|
+
try {
|
|
54
|
+
// Resolve paths
|
|
55
|
+
const resolvedFlowPath = path.resolve(process.cwd(), flowPath);
|
|
56
|
+
// Only resolve denoJsonPath if it's provided
|
|
57
|
+
let resolvedDenoJsonPath;
|
|
58
|
+
if (options.denoJson) {
|
|
59
|
+
resolvedDenoJsonPath = path.resolve(process.cwd(), options.denoJson);
|
|
60
|
+
// Validate deno.json path if provided
|
|
61
|
+
if (!fs.existsSync(resolvedDenoJsonPath)) {
|
|
62
|
+
log.error(`deno.json file not found: ${resolvedDenoJsonPath}`);
|
|
63
|
+
process.exit(1);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
// Validate flow path
|
|
67
|
+
if (!fs.existsSync(resolvedFlowPath)) {
|
|
68
|
+
log.error(`Flow file not found: ${resolvedFlowPath}`);
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
// Validate Supabase path
|
|
72
|
+
let supabasePath;
|
|
73
|
+
if (options.supabasePath) {
|
|
74
|
+
supabasePath = path.resolve(process.cwd(), options.supabasePath);
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
// Default to ./supabase/ if not provided
|
|
78
|
+
supabasePath = path.resolve(process.cwd(), 'supabase');
|
|
79
|
+
}
|
|
80
|
+
// Check if Supabase path exists
|
|
81
|
+
if (!fs.existsSync(supabasePath)) {
|
|
82
|
+
log.error(`Supabase directory not found: ${supabasePath}\n` +
|
|
83
|
+
`Please provide a valid Supabase path using --supabase-path option or ensure ./supabase/ directory exists.`);
|
|
84
|
+
process.exit(1);
|
|
85
|
+
}
|
|
86
|
+
// Find the internal_compile.js script
|
|
87
|
+
const internalCompileScript = path.resolve(__dirname, '../../deno/internal_compile.js');
|
|
88
|
+
// Create migrations directory if it doesn't exist
|
|
89
|
+
const migrationsDir = path.resolve(supabasePath, 'migrations');
|
|
90
|
+
if (!fs.existsSync(migrationsDir)) {
|
|
91
|
+
fs.mkdirSync(migrationsDir, { recursive: true });
|
|
92
|
+
log.success(`Created migrations directory: ${migrationsDir}`);
|
|
93
|
+
}
|
|
94
|
+
// Generate timestamp for migration file in format YYYYMMDDHHMMSS using UTC
|
|
95
|
+
const now = new Date();
|
|
96
|
+
const timestamp = [
|
|
97
|
+
now.getUTCFullYear(),
|
|
98
|
+
String(now.getUTCMonth() + 1).padStart(2, '0'),
|
|
99
|
+
String(now.getUTCDate()).padStart(2, '0'),
|
|
100
|
+
String(now.getUTCHours()).padStart(2, '0'),
|
|
101
|
+
String(now.getUTCMinutes()).padStart(2, '0'),
|
|
102
|
+
String(now.getUTCSeconds()).padStart(2, '0'),
|
|
103
|
+
].join('');
|
|
104
|
+
// Run the compilation
|
|
105
|
+
log.info(`Compiling flow: ${path.basename(resolvedFlowPath)}`);
|
|
106
|
+
const compiledSql = await runDenoCompilation(internalCompileScript, resolvedFlowPath, resolvedDenoJsonPath);
|
|
107
|
+
// Extract flow name from the first line of the SQL output using regex
|
|
108
|
+
// Looking for pattern: SELECT pgflow.create_flow('flow_name', ...);
|
|
109
|
+
const flowNameMatch = compiledSql.match(/SELECT\s+pgflow\.create_flow\s*\(\s*'([^']+)'/i);
|
|
110
|
+
// Use extracted flow name or fallback to the file basename if extraction fails
|
|
111
|
+
let flowName;
|
|
112
|
+
if (flowNameMatch && flowNameMatch[1]) {
|
|
113
|
+
flowName = flowNameMatch[1];
|
|
114
|
+
log.info(`Extracted flow name: ${flowName}`);
|
|
115
|
+
}
|
|
116
|
+
else {
|
|
117
|
+
// Fallback to file basename if regex doesn't match
|
|
118
|
+
flowName = path.basename(resolvedFlowPath, path.extname(resolvedFlowPath));
|
|
119
|
+
log.warn(`Could not extract flow name from SQL, using file basename: ${flowName}`);
|
|
120
|
+
}
|
|
121
|
+
// Create migration filename in the format: <timestamp>_create_<flow_name>_flow.sql
|
|
122
|
+
const migrationFileName = `${timestamp}_create_${flowName}_flow.sql`;
|
|
123
|
+
const migrationFilePath = path.join(migrationsDir, migrationFileName);
|
|
124
|
+
// Write the SQL to a migration file
|
|
125
|
+
fs.writeFileSync(migrationFilePath, compiledSql);
|
|
126
|
+
// Show the migration file path relative to the current directory
|
|
127
|
+
const relativeFilePath = path.relative(process.cwd(), migrationFilePath);
|
|
128
|
+
log.success(`Migration file created: ${relativeFilePath}`);
|
|
129
|
+
// Display next steps with outro
|
|
130
|
+
outro([
|
|
131
|
+
chalk.bold('Flow compilation completed successfully!'),
|
|
132
|
+
'',
|
|
133
|
+
`- Run ${chalk.cyan('supabase migration up')} to apply the migration`,
|
|
134
|
+
'',
|
|
135
|
+
chalk.bold('Continue the setup:'),
|
|
136
|
+
chalk.blue.underline('https://pgflow.dev/getting-started/run-flow/')
|
|
137
|
+
].join('\n'));
|
|
138
|
+
}
|
|
139
|
+
catch (error) {
|
|
140
|
+
log.error(`Compilation failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
141
|
+
outro([
|
|
142
|
+
chalk.bold('Compilation failed!'),
|
|
143
|
+
'',
|
|
144
|
+
chalk.bold('For troubleshooting help:'),
|
|
145
|
+
chalk.blue.underline('https://pgflow.dev/getting-started/compile-to-sql/')
|
|
146
|
+
].join('\n'));
|
|
147
|
+
process.exit(1);
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
};
|
|
151
|
+
/**
|
|
152
|
+
* Runs the Deno compilation script and returns the compiled SQL
|
|
153
|
+
*/
|
|
154
|
+
async function runDenoCompilation(scriptPath, flowPath, denoJsonPath) {
|
|
155
|
+
return new Promise((resolve, reject) => {
|
|
156
|
+
// Validate input paths
|
|
157
|
+
if (!scriptPath) {
|
|
158
|
+
return reject(new Error('Internal script path is required'));
|
|
159
|
+
}
|
|
160
|
+
if (!flowPath) {
|
|
161
|
+
return reject(new Error('Flow path is required'));
|
|
162
|
+
}
|
|
163
|
+
// Build the command arguments array
|
|
164
|
+
const args = ['run', '--allow-read', '--allow-net', '--allow-env'];
|
|
165
|
+
// Only add the config argument if denoJsonPath is provided and valid
|
|
166
|
+
if (denoJsonPath && typeof denoJsonPath === 'string') {
|
|
167
|
+
args.push(`--config=${denoJsonPath}`);
|
|
168
|
+
}
|
|
169
|
+
// Add the script path and flow path
|
|
170
|
+
args.push(scriptPath, flowPath);
|
|
171
|
+
// Log the command for debugging with colored output
|
|
172
|
+
log.info('Running Deno compiler');
|
|
173
|
+
const deno = spawn('deno', args);
|
|
174
|
+
let stdout = '';
|
|
175
|
+
let stderr = '';
|
|
176
|
+
deno.stdout.on('data', (data) => {
|
|
177
|
+
stdout += data.toString();
|
|
178
|
+
});
|
|
179
|
+
deno.stderr.on('data', (data) => {
|
|
180
|
+
stderr += data.toString();
|
|
181
|
+
});
|
|
182
|
+
deno.on('close', (code) => {
|
|
183
|
+
// Always display the task log with command and output
|
|
184
|
+
note(createTaskLog('deno', args, stdout));
|
|
185
|
+
if (code === 0) {
|
|
186
|
+
if (stdout.trim().length === 0) {
|
|
187
|
+
reject(new Error('Compilation produced no output'));
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
resolve(stdout);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
else {
|
|
194
|
+
reject(new Error(`Deno process exited with code ${code}${stderr ? `\n${stderr}` : ''}`));
|
|
195
|
+
}
|
|
196
|
+
});
|
|
197
|
+
deno.on('error', (err) => {
|
|
198
|
+
reject(new Error(`Failed to start Deno process: ${err.message}. Make sure Deno is installed.`));
|
|
199
|
+
});
|
|
200
|
+
});
|
|
201
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"copy-migrations.d.ts","sourceRoot":"","sources":["../../../src/commands/install/copy-migrations.ts"],"names":[],"mappings":"AA6JA,wBAAsB,cAAc,CAAC,EACnC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CA6LnB"}
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { createRequire } from 'module';
|
|
4
|
+
import { fileURLToPath } from 'url';
|
|
5
|
+
import { log, confirm, note } from '@clack/prompts';
|
|
6
|
+
import chalk from 'chalk';
|
|
7
|
+
// Get the directory name in ES modules
|
|
8
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
+
const __dirname = path.dirname(__filename);
|
|
10
|
+
// Create a require function to use require.resolve
|
|
11
|
+
const require = createRequire(import.meta.url);
|
|
12
|
+
// Function to find migrations directory
|
|
13
|
+
function findMigrationsDirectory() {
|
|
14
|
+
try {
|
|
15
|
+
// First try: resolve from installed @pgflow/core package
|
|
16
|
+
const corePackageJsonPath = require.resolve('@pgflow/core/package.json');
|
|
17
|
+
const corePackageFolder = path.dirname(corePackageJsonPath);
|
|
18
|
+
const packageMigrationsPath = path.join(corePackageFolder, 'dist', 'supabase', 'migrations');
|
|
19
|
+
if (fs.existsSync(packageMigrationsPath)) {
|
|
20
|
+
return packageMigrationsPath;
|
|
21
|
+
}
|
|
22
|
+
// If that fails, try development path
|
|
23
|
+
log.info('Could not find migrations in installed package, trying development paths...');
|
|
24
|
+
}
|
|
25
|
+
catch (error) {
|
|
26
|
+
log.info('Could not resolve @pgflow/core package, trying development paths...');
|
|
27
|
+
}
|
|
28
|
+
// Try development paths
|
|
29
|
+
// 1. Try relative to CLI dist folder (when running built CLI)
|
|
30
|
+
const distRelativePath = path.resolve(__dirname, '../../../../core/supabase/migrations');
|
|
31
|
+
if (fs.existsSync(distRelativePath)) {
|
|
32
|
+
return distRelativePath;
|
|
33
|
+
}
|
|
34
|
+
// 2. Try relative to CLI source folder (when running from source)
|
|
35
|
+
const sourceRelativePath = path.resolve(__dirname, '../../../../../core/supabase/migrations');
|
|
36
|
+
if (fs.existsSync(sourceRelativePath)) {
|
|
37
|
+
return sourceRelativePath;
|
|
38
|
+
}
|
|
39
|
+
// 3. Try local migrations directory (for backward compatibility)
|
|
40
|
+
const localMigrationsPath = path.resolve(__dirname, '../../migrations');
|
|
41
|
+
if (fs.existsSync(localMigrationsPath)) {
|
|
42
|
+
return localMigrationsPath;
|
|
43
|
+
}
|
|
44
|
+
// No migrations found
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
// Helper function to get the timestamp part from a migration filename
|
|
48
|
+
function getTimestampFromFilename(filename) {
|
|
49
|
+
const match = filename.match(/^(\d+)_/);
|
|
50
|
+
// Return the timestamp only if it exists and has the correct length (14 digits)
|
|
51
|
+
if (match && match[1] && match[1].length === 14 && /^\d{14}$/.test(match[1])) {
|
|
52
|
+
return match[1];
|
|
53
|
+
}
|
|
54
|
+
return '';
|
|
55
|
+
}
|
|
56
|
+
// Helper function to format a Date object into a migration timestamp string (YYYYMMDDhhmmss) using UTC
|
|
57
|
+
function formatDateToTimestamp(date) {
|
|
58
|
+
const year = date.getUTCFullYear();
|
|
59
|
+
const month = String(date.getUTCMonth() + 1).padStart(2, '0');
|
|
60
|
+
const day = String(date.getUTCDate()).padStart(2, '0');
|
|
61
|
+
const hours = String(date.getUTCHours()).padStart(2, '0');
|
|
62
|
+
const minutes = String(date.getUTCMinutes()).padStart(2, '0');
|
|
63
|
+
const seconds = String(date.getUTCSeconds()).padStart(2, '0');
|
|
64
|
+
return `${year}${month}${day}${hours}${minutes}${seconds}`;
|
|
65
|
+
}
|
|
66
|
+
// Helper function to parse a timestamp string into a Date object (interpreted as UTC)
|
|
67
|
+
function parseTimestampToDate(timestamp) {
|
|
68
|
+
// Validate format: YYYYMMDDhhmmss
|
|
69
|
+
if (!timestamp || timestamp.length !== 14 || !/^\d{14}$/.test(timestamp)) {
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
const year = parseInt(timestamp.substring(0, 4), 10);
|
|
73
|
+
const month = parseInt(timestamp.substring(4, 6), 10) - 1; // months are 0-indexed in JS Date
|
|
74
|
+
const day = parseInt(timestamp.substring(6, 8), 10);
|
|
75
|
+
const hours = parseInt(timestamp.substring(8, 10), 10);
|
|
76
|
+
const minutes = parseInt(timestamp.substring(10, 12), 10);
|
|
77
|
+
const seconds = parseInt(timestamp.substring(12, 14), 10);
|
|
78
|
+
// Create date in UTC and validate (invalid dates like Feb 31 will be auto-corrected by JS Date)
|
|
79
|
+
const date = new Date(Date.UTC(year, month, day, hours, minutes, seconds));
|
|
80
|
+
// Additional validation to ensure the parsed date matches the input
|
|
81
|
+
// This catches edge cases like month=13 that JS Date would autocorrect
|
|
82
|
+
if (date.getUTCFullYear() !== year ||
|
|
83
|
+
date.getUTCMonth() !== month ||
|
|
84
|
+
date.getUTCDate() !== day ||
|
|
85
|
+
date.getUTCHours() !== hours ||
|
|
86
|
+
date.getUTCMinutes() !== minutes ||
|
|
87
|
+
date.getUTCSeconds() !== seconds) {
|
|
88
|
+
return null;
|
|
89
|
+
}
|
|
90
|
+
return date;
|
|
91
|
+
}
|
|
92
|
+
// Helper function to generate a new timestamp that's higher than the reference timestamp (using UTC)
|
|
93
|
+
function generateNewTimestamp(referenceTimestamp, increment = 1) {
|
|
94
|
+
// First try to parse the reference timestamp to a Date
|
|
95
|
+
const parsedDate = parseTimestampToDate(referenceTimestamp);
|
|
96
|
+
// If we couldn't parse it, use current UTC time
|
|
97
|
+
if (!parsedDate) {
|
|
98
|
+
return formatDateToTimestamp(new Date());
|
|
99
|
+
}
|
|
100
|
+
// Add the specified number of seconds (default: 1)
|
|
101
|
+
parsedDate.setUTCSeconds(parsedDate.getUTCSeconds() + increment);
|
|
102
|
+
// Get current UTC time for comparison
|
|
103
|
+
const now = new Date();
|
|
104
|
+
// Return either the incremented timestamp or current time, whichever is later
|
|
105
|
+
// This ensures we never go backwards in time
|
|
106
|
+
if (parsedDate > now) {
|
|
107
|
+
return formatDateToTimestamp(parsedDate);
|
|
108
|
+
}
|
|
109
|
+
else {
|
|
110
|
+
// If we're already at or past current time, add increment to now
|
|
111
|
+
now.setUTCSeconds(now.getUTCSeconds() + increment);
|
|
112
|
+
return formatDateToTimestamp(now);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
// Find the migrations directory
|
|
116
|
+
const sourcePath = findMigrationsDirectory();
|
|
117
|
+
export async function copyMigrations({ supabasePath, autoConfirm = false, }) {
|
|
118
|
+
const migrationsPath = path.join(supabasePath, 'migrations');
|
|
119
|
+
if (!fs.existsSync(migrationsPath)) {
|
|
120
|
+
fs.mkdirSync(migrationsPath);
|
|
121
|
+
}
|
|
122
|
+
// Check if pgflow migrations directory exists
|
|
123
|
+
if (!sourcePath || !fs.existsSync(sourcePath)) {
|
|
124
|
+
log.error(`Could not find migrations directory`);
|
|
125
|
+
log.warn('This might happen if @pgflow/core is not properly installed or built.');
|
|
126
|
+
log.info('Make sure @pgflow/core is installed and contains the migrations.');
|
|
127
|
+
log.info('If running in development mode, try building the core package first with: nx build core');
|
|
128
|
+
return false;
|
|
129
|
+
}
|
|
130
|
+
// Get all existing migrations in user's directory
|
|
131
|
+
const existingFiles = fs.existsSync(migrationsPath)
|
|
132
|
+
? fs.readdirSync(migrationsPath)
|
|
133
|
+
: [];
|
|
134
|
+
// Find the latest migration timestamp in user's directory
|
|
135
|
+
let latestTimestamp = '00000000000000';
|
|
136
|
+
for (const file of existingFiles) {
|
|
137
|
+
if (file.endsWith('.sql')) {
|
|
138
|
+
const timestamp = getTimestampFromFilename(file);
|
|
139
|
+
// Only consider timestamps that have been validated by getTimestampFromFilename
|
|
140
|
+
// to have the correct length and format
|
|
141
|
+
if (timestamp && timestamp.length === 14) {
|
|
142
|
+
const parsedDate = parseTimestampToDate(timestamp);
|
|
143
|
+
// If we have a valid date and this timestamp is newer, update latestTimestamp
|
|
144
|
+
if (parsedDate && parseInt(timestamp, 10) > parseInt(latestTimestamp, 10)) {
|
|
145
|
+
latestTimestamp = timestamp;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
// Get all source migrations
|
|
151
|
+
const sourceFiles = fs
|
|
152
|
+
.readdirSync(sourcePath)
|
|
153
|
+
.filter((file) => file.endsWith('.sql'));
|
|
154
|
+
const filesToCopy = [];
|
|
155
|
+
const skippedFiles = [];
|
|
156
|
+
// Check which migrations need to be installed
|
|
157
|
+
for (const sourceFile of sourceFiles) {
|
|
158
|
+
// Check if this migration is already installed (by checking if the original filename
|
|
159
|
+
// appears in any existing migration filename)
|
|
160
|
+
const isAlreadyInstalled = existingFiles.some((existingFile) => existingFile.includes(sourceFile));
|
|
161
|
+
if (isAlreadyInstalled) {
|
|
162
|
+
skippedFiles.push(sourceFile);
|
|
163
|
+
}
|
|
164
|
+
else {
|
|
165
|
+
filesToCopy.push({
|
|
166
|
+
source: sourceFile,
|
|
167
|
+
destination: sourceFile, // Will be updated later with new timestamp
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
// If no files to copy, show message with details and return false (no changes made)
|
|
172
|
+
if (filesToCopy.length === 0) {
|
|
173
|
+
// Show success message
|
|
174
|
+
log.success('All pgflow migrations are already in place');
|
|
175
|
+
// Show details of already installed migrations
|
|
176
|
+
if (skippedFiles.length > 0) {
|
|
177
|
+
const detailedMsg = [
|
|
178
|
+
'Already installed migrations:',
|
|
179
|
+
...skippedFiles.map((file) => {
|
|
180
|
+
// Find the matching existing file to show how it was installed
|
|
181
|
+
const matchingFile = existingFiles.find((existingFile) => existingFile.includes(file));
|
|
182
|
+
if (matchingFile === file) {
|
|
183
|
+
// Installed with old direct method
|
|
184
|
+
return ` ${chalk.dim('•')} ${chalk.bold(file)}`;
|
|
185
|
+
}
|
|
186
|
+
else {
|
|
187
|
+
// Installed with new timestamped method
|
|
188
|
+
const timestampPart = matchingFile?.substring(0, matchingFile.indexOf(file) - 1) || '';
|
|
189
|
+
return ` ${chalk.dim('•')} ${chalk.dim(timestampPart + '_')}${chalk.bold(file)}`;
|
|
190
|
+
}
|
|
191
|
+
}),
|
|
192
|
+
].join('\n');
|
|
193
|
+
note(detailedMsg, 'Existing pgflow Migrations');
|
|
194
|
+
}
|
|
195
|
+
return false;
|
|
196
|
+
}
|
|
197
|
+
// Generate new timestamps for migrations to install
|
|
198
|
+
let baseTimestamp = latestTimestamp;
|
|
199
|
+
filesToCopy.forEach((file) => {
|
|
200
|
+
// Generate timestamp with increasing values to maintain order
|
|
201
|
+
// Each iteration uses the timestamp generated by the previous iteration as the base
|
|
202
|
+
baseTimestamp = generateNewTimestamp(baseTimestamp);
|
|
203
|
+
// Create new filename with format: newTimestamp_originalFilename
|
|
204
|
+
file.destination = `${baseTimestamp}_${file.source}`;
|
|
205
|
+
});
|
|
206
|
+
log.info(`Found ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''} to install`);
|
|
207
|
+
// Prepare summary message with colored output
|
|
208
|
+
const summaryParts = [];
|
|
209
|
+
if (filesToCopy.length > 0) {
|
|
210
|
+
summaryParts.push(`${chalk.green('New migrations to install:')}\n${filesToCopy
|
|
211
|
+
.map((file) => {
|
|
212
|
+
// Extract the timestamp part from the new filename
|
|
213
|
+
const newTimestamp = file.destination.substring(0, 14);
|
|
214
|
+
// Format: dim timestamp + bright original name
|
|
215
|
+
return `${chalk.green('+')} ${file.source} → ${chalk.dim(newTimestamp + '_')}${chalk.bold(file.source)}`;
|
|
216
|
+
})
|
|
217
|
+
.join('\n')}`);
|
|
218
|
+
}
|
|
219
|
+
if (skippedFiles.length > 0) {
|
|
220
|
+
summaryParts.push(`${chalk.yellow('Already installed:')}\n${skippedFiles
|
|
221
|
+
.map((file) => `${chalk.yellow('•')} ${file}`)
|
|
222
|
+
.join('\n')}`);
|
|
223
|
+
}
|
|
224
|
+
// Show summary and ask for confirmation if not auto-confirming
|
|
225
|
+
note(summaryParts.join('\n\n'), 'pgflow Migrations');
|
|
226
|
+
let shouldContinue = autoConfirm;
|
|
227
|
+
if (!autoConfirm) {
|
|
228
|
+
const confirmResult = await confirm({
|
|
229
|
+
message: `Install ${filesToCopy.length} new migration${filesToCopy.length !== 1 ? 's' : ''}?`,
|
|
230
|
+
});
|
|
231
|
+
shouldContinue = confirmResult === true;
|
|
232
|
+
}
|
|
233
|
+
if (!shouldContinue) {
|
|
234
|
+
log.warn('Migration installation skipped');
|
|
235
|
+
return false;
|
|
236
|
+
}
|
|
237
|
+
// Install migrations with new filenames
|
|
238
|
+
for (const file of filesToCopy) {
|
|
239
|
+
const sourcePath1 = path.join(sourcePath, file.source);
|
|
240
|
+
const destinationPath = path.join(migrationsPath, file.destination);
|
|
241
|
+
fs.copyFileSync(sourcePath1, destinationPath);
|
|
242
|
+
}
|
|
243
|
+
// Show detailed success message with styled filenames
|
|
244
|
+
const detailedSuccessMsg = [
|
|
245
|
+
`Installed ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''} to your Supabase project:`,
|
|
246
|
+
...filesToCopy.map((file) => {
|
|
247
|
+
const newTimestamp = file.destination.substring(0, 14);
|
|
248
|
+
return ` ${chalk.dim(newTimestamp + '_')}${chalk.bold(file.source)}`;
|
|
249
|
+
}),
|
|
250
|
+
].join('\n');
|
|
251
|
+
log.success(detailedSuccessMsg);
|
|
252
|
+
return true; // Return true to indicate migrations were copied
|
|
253
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/install/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;yBAQzB,SAAS,OAAO;AAAhC,wBAiHE"}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { intro, group, cancel, outro } from '@clack/prompts';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
import { copyMigrations } from './copy-migrations.js';
|
|
4
|
+
import { updateConfigToml } from './update-config-toml.js';
|
|
5
|
+
import { updateEnvFile } from './update-env-file.js';
|
|
6
|
+
import { supabasePathPrompt } from './supabase-path-prompt.js';
|
|
7
|
+
export default (program) => {
|
|
8
|
+
program
|
|
9
|
+
.command('install')
|
|
10
|
+
.description('Set up pgflow in your Supabase project')
|
|
11
|
+
.option('--supabase-path <path>', 'Path to the Supabase folder')
|
|
12
|
+
.option('-y, --yes', 'Automatically confirm all prompts', false)
|
|
13
|
+
.action(async (options) => {
|
|
14
|
+
intro('Installing pgflow in your Supabase project');
|
|
15
|
+
// Use the group feature to organize installation steps
|
|
16
|
+
const results = await group({
|
|
17
|
+
// Step 1: Determine Supabase path
|
|
18
|
+
supabasePath: () => supabasePathPrompt({ supabasePath: options.supabasePath }),
|
|
19
|
+
// Step 2: Update config.toml
|
|
20
|
+
configUpdate: async ({ results: { supabasePath } }) => {
|
|
21
|
+
if (!supabasePath)
|
|
22
|
+
return false;
|
|
23
|
+
return await updateConfigToml({
|
|
24
|
+
supabasePath,
|
|
25
|
+
autoConfirm: options.yes,
|
|
26
|
+
});
|
|
27
|
+
},
|
|
28
|
+
// Step 3: Copy migrations
|
|
29
|
+
migrations: async ({ results: { supabasePath } }) => {
|
|
30
|
+
if (!supabasePath)
|
|
31
|
+
return false;
|
|
32
|
+
return await copyMigrations({
|
|
33
|
+
supabasePath,
|
|
34
|
+
autoConfirm: options.yes,
|
|
35
|
+
});
|
|
36
|
+
},
|
|
37
|
+
// Step 4: Update environment variables
|
|
38
|
+
envFile: async ({ results: { supabasePath } }) => {
|
|
39
|
+
if (!supabasePath)
|
|
40
|
+
return false;
|
|
41
|
+
return await updateEnvFile({
|
|
42
|
+
supabasePath,
|
|
43
|
+
autoConfirm: options.yes,
|
|
44
|
+
});
|
|
45
|
+
},
|
|
46
|
+
}, {
|
|
47
|
+
// Handle cancellation
|
|
48
|
+
onCancel: () => {
|
|
49
|
+
cancel('Installation cancelled');
|
|
50
|
+
process.exit(1);
|
|
51
|
+
},
|
|
52
|
+
});
|
|
53
|
+
// Extract the results from the group operation
|
|
54
|
+
const supabasePath = results.supabasePath;
|
|
55
|
+
const configUpdate = results.configUpdate;
|
|
56
|
+
const migrations = results.migrations;
|
|
57
|
+
const envFile = results.envFile;
|
|
58
|
+
// Exit if supabasePath is null (validation failed or user cancelled)
|
|
59
|
+
if (!supabasePath) {
|
|
60
|
+
cancel('Installation cancelled - valid Supabase path is required');
|
|
61
|
+
process.exit(1);
|
|
62
|
+
}
|
|
63
|
+
// Show completion message
|
|
64
|
+
const outroMessages = [];
|
|
65
|
+
// Always start with a bolded acknowledgement
|
|
66
|
+
if (migrations || configUpdate || envFile) {
|
|
67
|
+
outroMessages.push(chalk.bold('pgflow setup completed successfully!'));
|
|
68
|
+
}
|
|
69
|
+
else {
|
|
70
|
+
outroMessages.push(chalk.bold('pgflow is already properly configured - no changes needed!'));
|
|
71
|
+
}
|
|
72
|
+
// Add a newline after the acknowledgement
|
|
73
|
+
outroMessages.push('');
|
|
74
|
+
// Add specific next steps if changes were made
|
|
75
|
+
if (configUpdate || envFile) {
|
|
76
|
+
outroMessages.push(`- Restart your Supabase instance for configuration changes to take effect`);
|
|
77
|
+
}
|
|
78
|
+
if (migrations) {
|
|
79
|
+
outroMessages.push(`- Apply the migrations with: ${chalk.cyan('supabase migrations up')}`);
|
|
80
|
+
}
|
|
81
|
+
// Always add documentation link with consistent formatting
|
|
82
|
+
if (outroMessages.length > 2) {
|
|
83
|
+
// If we have specific steps, add another newline
|
|
84
|
+
outroMessages.push('');
|
|
85
|
+
}
|
|
86
|
+
outroMessages.push(chalk.bold('Continue the setup:'), chalk.blue.underline('https://pgflow.dev/getting-started/create-first-flow/'));
|
|
87
|
+
// Single outro for all paths
|
|
88
|
+
outro(outroMessages.join('\n'));
|
|
89
|
+
});
|
|
90
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"supabase-path-prompt.d.ts","sourceRoot":"","sources":["../../../src/commands/install/supabase-path-prompt.ts"],"names":[],"mappings":"AAIA,wBAAsB,kBAAkB,CAAC,OAAO,CAAC,EAAE;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,4BA6C3E"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { text, log } from '@clack/prompts';
|
|
4
|
+
export async function supabasePathPrompt(options) {
|
|
5
|
+
// If supabasePath is provided as an option and it's valid, use it directly without prompting
|
|
6
|
+
if (options?.supabasePath) {
|
|
7
|
+
const validationError = validate(options.supabasePath);
|
|
8
|
+
if (validationError === undefined) {
|
|
9
|
+
log.info(`Using Supabase project at: ${options.supabasePath}`);
|
|
10
|
+
return options.supabasePath;
|
|
11
|
+
}
|
|
12
|
+
// If validation failed, log the error and continue to prompt
|
|
13
|
+
log.warn(validationError);
|
|
14
|
+
}
|
|
15
|
+
// Try to detect the Supabase directory automatically
|
|
16
|
+
const possiblePaths = ['./supabase', '../supabase', '../../supabase'];
|
|
17
|
+
let detectedPath = '';
|
|
18
|
+
for (const testPath of possiblePaths) {
|
|
19
|
+
if (fs.existsSync(testPath) &&
|
|
20
|
+
fs.existsSync(path.join(testPath, 'config.toml'))) {
|
|
21
|
+
detectedPath = testPath;
|
|
22
|
+
break;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
// Always prompt for detected paths - don't skip
|
|
26
|
+
if (detectedPath) {
|
|
27
|
+
log.info(`Found Supabase project at: ${detectedPath}`);
|
|
28
|
+
}
|
|
29
|
+
const promptMessage = 'Where is your Supabase project located?';
|
|
30
|
+
const supabasePath = await text({
|
|
31
|
+
message: promptMessage,
|
|
32
|
+
placeholder: detectedPath || 'supabase/',
|
|
33
|
+
initialValue: options?.supabasePath || detectedPath,
|
|
34
|
+
validate,
|
|
35
|
+
});
|
|
36
|
+
if (!supabasePath) {
|
|
37
|
+
throw new Error('User cancelled');
|
|
38
|
+
}
|
|
39
|
+
return supabasePath;
|
|
40
|
+
}
|
|
41
|
+
function validate(inputPath) {
|
|
42
|
+
if (!fs.existsSync(inputPath)) {
|
|
43
|
+
return `Directory not found: ${inputPath}`;
|
|
44
|
+
}
|
|
45
|
+
if (!fs.existsSync(path.join(inputPath, 'config.toml'))) {
|
|
46
|
+
return `Not a valid Supabase project (missing config.toml)`;
|
|
47
|
+
}
|
|
48
|
+
return undefined;
|
|
49
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Updates the config.toml file with necessary configurations for EdgeWorker
|
|
3
|
+
* while preserving comments and formatting
|
|
4
|
+
*
|
|
5
|
+
* Makes the following changes:
|
|
6
|
+
* 1. Enables the connection pooler
|
|
7
|
+
* 2. Ensures pool_mode is set to "transaction"
|
|
8
|
+
* 3. Changes edge_runtime policy from "oneshot" to "per_worker"
|
|
9
|
+
* 4. Creates a backup of the original config.toml file before making changes
|
|
10
|
+
*
|
|
11
|
+
* @param options.supabasePath - Path to the supabase directory
|
|
12
|
+
* @param options.autoConfirm - Whether to automatically confirm changes
|
|
13
|
+
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
14
|
+
*/
|
|
15
|
+
export declare function updateConfigToml({ supabasePath, autoConfirm, }: {
|
|
16
|
+
supabasePath: string;
|
|
17
|
+
autoConfirm?: boolean;
|
|
18
|
+
}): Promise<boolean>;
|
|
19
|
+
//# sourceMappingURL=update-config-toml.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"update-config-toml.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-config-toml.ts"],"names":[],"mappings":"AAqBA;;;;;;;;;;;;;GAaG;AACH,wBAAsB,gBAAgB,CAAC,EACrC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAwHnB"}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { log, confirm, note } from '@clack/prompts';
|
|
4
|
+
import * as TOML from '@decimalturn/toml-patch';
|
|
5
|
+
import chalk from 'chalk';
|
|
6
|
+
/**
|
|
7
|
+
* Updates the config.toml file with necessary configurations for EdgeWorker
|
|
8
|
+
* while preserving comments and formatting
|
|
9
|
+
*
|
|
10
|
+
* Makes the following changes:
|
|
11
|
+
* 1. Enables the connection pooler
|
|
12
|
+
* 2. Ensures pool_mode is set to "transaction"
|
|
13
|
+
* 3. Changes edge_runtime policy from "oneshot" to "per_worker"
|
|
14
|
+
* 4. Creates a backup of the original config.toml file before making changes
|
|
15
|
+
*
|
|
16
|
+
* @param options.supabasePath - Path to the supabase directory
|
|
17
|
+
* @param options.autoConfirm - Whether to automatically confirm changes
|
|
18
|
+
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
19
|
+
*/
|
|
20
|
+
export async function updateConfigToml({ supabasePath, autoConfirm = false, }) {
|
|
21
|
+
// Check Supabase configuration
|
|
22
|
+
const configPath = path.join(supabasePath, 'config.toml');
|
|
23
|
+
const backupPath = `${configPath}.backup`;
|
|
24
|
+
try {
|
|
25
|
+
if (!fs.existsSync(configPath)) {
|
|
26
|
+
log.error(`config.toml not found at ${configPath}`);
|
|
27
|
+
throw new Error(`config.toml not found at ${configPath}`);
|
|
28
|
+
}
|
|
29
|
+
const configContent = fs.readFileSync(configPath, 'utf8');
|
|
30
|
+
let config;
|
|
31
|
+
try {
|
|
32
|
+
config = TOML.parse(configContent);
|
|
33
|
+
}
|
|
34
|
+
catch (parseError) {
|
|
35
|
+
const errorMsg = parseError instanceof Error ? parseError.message : String(parseError);
|
|
36
|
+
log.error(`Invalid TOML syntax in ${configPath}: ${errorMsg}`);
|
|
37
|
+
throw new Error(`Invalid TOML syntax in ${configPath}: ${errorMsg}`);
|
|
38
|
+
}
|
|
39
|
+
const currentSettings = {
|
|
40
|
+
poolerEnabled: config.db?.pooler?.enabled ?? false,
|
|
41
|
+
poolMode: config.db?.pooler?.pool_mode ?? 'none',
|
|
42
|
+
edgeRuntimePolicy: config.edge_runtime?.policy ?? 'oneshot',
|
|
43
|
+
};
|
|
44
|
+
const needsChanges = currentSettings.poolerEnabled !== true ||
|
|
45
|
+
currentSettings.poolMode !== 'transaction' ||
|
|
46
|
+
currentSettings.edgeRuntimePolicy !== 'per_worker';
|
|
47
|
+
if (!needsChanges) {
|
|
48
|
+
log.success('Supabase configuration is already set up for pgflow');
|
|
49
|
+
return false;
|
|
50
|
+
}
|
|
51
|
+
const changes = [];
|
|
52
|
+
if (currentSettings.poolerEnabled !== true) {
|
|
53
|
+
changes.push(`${chalk.bold('Enable connection pooler:')}
|
|
54
|
+
${chalk.red(`- enabled = ${currentSettings.poolerEnabled}`)}
|
|
55
|
+
${chalk.green('+ enabled = true')}`);
|
|
56
|
+
}
|
|
57
|
+
if (currentSettings.poolMode !== 'transaction') {
|
|
58
|
+
changes.push(`${chalk.bold('Set pool mode to transaction:')}
|
|
59
|
+
${chalk.red(`- pool_mode = "${currentSettings.poolMode}"`)}
|
|
60
|
+
${chalk.green('+ pool_mode = "transaction"')}`);
|
|
61
|
+
}
|
|
62
|
+
if (currentSettings.edgeRuntimePolicy !== 'per_worker') {
|
|
63
|
+
changes.push(`${chalk.bold('Set edge runtime policy:')}
|
|
64
|
+
${chalk.red(`- policy = "${currentSettings.edgeRuntimePolicy}"`)}
|
|
65
|
+
${chalk.green('+ policy = "per_worker"')}`);
|
|
66
|
+
}
|
|
67
|
+
note(changes.join('\n\n'), 'Required Configuration Changes');
|
|
68
|
+
let shouldContinue = autoConfirm;
|
|
69
|
+
if (!autoConfirm) {
|
|
70
|
+
const confirmResult = await confirm({
|
|
71
|
+
message: `Update Supabase configuration? (a backup will be created)`,
|
|
72
|
+
});
|
|
73
|
+
shouldContinue = confirmResult === true;
|
|
74
|
+
}
|
|
75
|
+
if (!shouldContinue) {
|
|
76
|
+
log.warn('Configuration update skipped');
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
// Update Supabase configuration
|
|
80
|
+
// Create backup
|
|
81
|
+
fs.copyFileSync(configPath, backupPath);
|
|
82
|
+
const updatedConfig = { ...config };
|
|
83
|
+
// Ensure required objects exist and set values
|
|
84
|
+
if (!updatedConfig.db)
|
|
85
|
+
updatedConfig.db = {};
|
|
86
|
+
if (!updatedConfig.db.pooler)
|
|
87
|
+
updatedConfig.db.pooler = {};
|
|
88
|
+
if (!updatedConfig.edge_runtime)
|
|
89
|
+
updatedConfig.edge_runtime = {};
|
|
90
|
+
updatedConfig.db.pooler.enabled = true;
|
|
91
|
+
updatedConfig.db.pooler.pool_mode = 'transaction';
|
|
92
|
+
updatedConfig.edge_runtime.policy = 'per_worker';
|
|
93
|
+
let updatedContent;
|
|
94
|
+
try {
|
|
95
|
+
updatedContent = TOML.patch(configContent, updatedConfig, {
|
|
96
|
+
trailingComma: false,
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
catch (stringifyError) {
|
|
100
|
+
const errorMsg = stringifyError instanceof Error ? stringifyError.message : String(stringifyError);
|
|
101
|
+
log.error(`Failed to generate TOML for ${configPath}: ${errorMsg}`);
|
|
102
|
+
throw new Error(`Failed to generate TOML for ${configPath}: ${errorMsg}`);
|
|
103
|
+
}
|
|
104
|
+
try {
|
|
105
|
+
fs.writeFileSync(configPath, updatedContent);
|
|
106
|
+
}
|
|
107
|
+
catch (writeError) {
|
|
108
|
+
const errorMsg = writeError instanceof Error ? writeError.message : String(writeError);
|
|
109
|
+
log.error(`Failed to write ${configPath}: ${errorMsg}`);
|
|
110
|
+
throw new Error(`Failed to write ${configPath}: ${errorMsg}`);
|
|
111
|
+
}
|
|
112
|
+
log.success('Supabase configuration updated successfully');
|
|
113
|
+
return true;
|
|
114
|
+
}
|
|
115
|
+
catch (error) {
|
|
116
|
+
log.error(`Configuration update failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
117
|
+
throw error;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Updates the functions/.env file with required environment variables for pgflow
|
|
3
|
+
*
|
|
4
|
+
* @param options.supabasePath - Path to the supabase directory
|
|
5
|
+
* @param options.autoConfirm - Whether to automatically confirm changes
|
|
6
|
+
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
7
|
+
*/
|
|
8
|
+
export declare function updateEnvFile({ supabasePath, autoConfirm, }: {
|
|
9
|
+
supabasePath: string;
|
|
10
|
+
autoConfirm?: boolean;
|
|
11
|
+
}): Promise<boolean>;
|
|
12
|
+
//# sourceMappingURL=update-env-file.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"update-env-file.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-env-file.ts"],"names":[],"mappings":"AAKA;;;;;;GAMG;AACH,wBAAsB,aAAa,CAAC,EAClC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAqHnB"}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { log, note, confirm } from '@clack/prompts';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
/**
|
|
6
|
+
* Updates the functions/.env file with required environment variables for pgflow
|
|
7
|
+
*
|
|
8
|
+
* @param options.supabasePath - Path to the supabase directory
|
|
9
|
+
* @param options.autoConfirm - Whether to automatically confirm changes
|
|
10
|
+
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
11
|
+
*/
|
|
12
|
+
export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
13
|
+
// Check environment variables
|
|
14
|
+
const functionsDir = path.join(supabasePath, 'functions');
|
|
15
|
+
const envFilePath = path.join(functionsDir, '.env');
|
|
16
|
+
// Create functions directory if it doesn't exist
|
|
17
|
+
if (!fs.existsSync(functionsDir)) {
|
|
18
|
+
fs.mkdirSync(functionsDir, { recursive: true });
|
|
19
|
+
}
|
|
20
|
+
// Variables to add
|
|
21
|
+
const envVars = {
|
|
22
|
+
EDGE_WORKER_DB_URL: 'postgresql://postgres.pooler-dev:postgres@pooler:6543/postgres',
|
|
23
|
+
EDGE_WORKER_LOG_LEVEL: 'info',
|
|
24
|
+
};
|
|
25
|
+
// Check if file exists and read its content
|
|
26
|
+
let currentContent = '';
|
|
27
|
+
let isNewFile = false;
|
|
28
|
+
if (fs.existsSync(envFilePath)) {
|
|
29
|
+
currentContent = fs.readFileSync(envFilePath, 'utf8');
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
isNewFile = true;
|
|
33
|
+
}
|
|
34
|
+
// Prepare new content
|
|
35
|
+
let newContent = currentContent;
|
|
36
|
+
// Build diff preview
|
|
37
|
+
const missingVars = [];
|
|
38
|
+
const existingVars = [];
|
|
39
|
+
// Check which variables need to be added
|
|
40
|
+
for (const [key, value] of Object.entries(envVars)) {
|
|
41
|
+
if (!newContent.includes(`${key}=`)) {
|
|
42
|
+
missingVars.push({ key, value });
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
existingVars.push(key);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
// If no changes needed, return early
|
|
49
|
+
if (missingVars.length === 0) {
|
|
50
|
+
log.success('Environment variables are already set');
|
|
51
|
+
return false;
|
|
52
|
+
}
|
|
53
|
+
log.info(`Found ${missingVars.length} variable${missingVars.length !== 1 ? 's' : ''} to add`);
|
|
54
|
+
// Build diff preview
|
|
55
|
+
const diffPreview = [];
|
|
56
|
+
if (isNewFile) {
|
|
57
|
+
diffPreview.push(`${chalk.green('Creating new .env file with:')}`);
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
diffPreview.push(`${chalk.green('Adding to existing .env file:')}`);
|
|
61
|
+
}
|
|
62
|
+
// Show variables to be added
|
|
63
|
+
for (const { key, value } of missingVars) {
|
|
64
|
+
diffPreview.push(`${chalk.green('+')} ${key}="${value}"`);
|
|
65
|
+
}
|
|
66
|
+
// Show existing variables if any
|
|
67
|
+
if (existingVars.length > 0) {
|
|
68
|
+
diffPreview.push('');
|
|
69
|
+
diffPreview.push(`${chalk.yellow('Already present:')}`);
|
|
70
|
+
for (const key of existingVars) {
|
|
71
|
+
diffPreview.push(`${chalk.yellow('•')} ${key}`);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
// Show the diff preview
|
|
75
|
+
note(diffPreview.join('\n'), 'Environment Variables');
|
|
76
|
+
// Ask for confirmation if not auto-confirming
|
|
77
|
+
let shouldContinue = autoConfirm;
|
|
78
|
+
if (!autoConfirm) {
|
|
79
|
+
const confirmResult = await confirm({
|
|
80
|
+
message: `Update environment variables?`,
|
|
81
|
+
});
|
|
82
|
+
shouldContinue = confirmResult === true;
|
|
83
|
+
}
|
|
84
|
+
if (!shouldContinue) {
|
|
85
|
+
log.warn('Environment variable update skipped');
|
|
86
|
+
return false;
|
|
87
|
+
}
|
|
88
|
+
// Update environment variables
|
|
89
|
+
// Apply changes if confirmed
|
|
90
|
+
for (const { key, value } of missingVars) {
|
|
91
|
+
// Add a newline at the end if the file doesn't end with one and isn't empty
|
|
92
|
+
if (newContent && !newContent.endsWith('\n')) {
|
|
93
|
+
newContent += '\n';
|
|
94
|
+
}
|
|
95
|
+
// Add the new variable
|
|
96
|
+
newContent += `${key}="${value}"\n`;
|
|
97
|
+
}
|
|
98
|
+
// Write the file if changes were made
|
|
99
|
+
try {
|
|
100
|
+
fs.writeFileSync(envFilePath, newContent);
|
|
101
|
+
log.success('Environment variables updated successfully');
|
|
102
|
+
return true;
|
|
103
|
+
}
|
|
104
|
+
catch (error) {
|
|
105
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
106
|
+
log.error(`Failed to update environment variables: ${errorMessage}`);
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* internal_compile.js
|
|
3
|
+
*
|
|
4
|
+
* This script is executed by Deno to compile a Flow into SQL statements.
|
|
5
|
+
* It takes a path to a flow file as an argument, imports the default export,
|
|
6
|
+
* and passes it to compileFlow() from the DSL package.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
// Import the compileFlow function directly from @pgflow/dsl
|
|
10
|
+
// The import map in deno.json will resolve this import
|
|
11
|
+
import { compileFlow } from '@pgflow/dsl';
|
|
12
|
+
|
|
13
|
+
// Get the flow file path from command line arguments
|
|
14
|
+
const flowFilePath = Deno.args[0];
|
|
15
|
+
|
|
16
|
+
if (!flowFilePath) {
|
|
17
|
+
console.error('Error: No flow file path provided');
|
|
18
|
+
Deno.exit(1);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
// Dynamically import the flow file
|
|
23
|
+
const flowModule = await import(`file://${flowFilePath}`);
|
|
24
|
+
|
|
25
|
+
// Check if there's a default export
|
|
26
|
+
if (!flowModule.default) {
|
|
27
|
+
console.error(`Error: No default export found in ${flowFilePath}`);
|
|
28
|
+
Deno.exit(1);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Get the flow instance
|
|
32
|
+
const flow = flowModule.default;
|
|
33
|
+
|
|
34
|
+
// Compile the flow to SQL
|
|
35
|
+
const sqlStatements = compileFlow(flow);
|
|
36
|
+
|
|
37
|
+
// Output the SQL statements to stdout
|
|
38
|
+
console.log(sqlStatements.join('\n'));
|
|
39
|
+
} catch (error) {
|
|
40
|
+
console.error(`Error compiling flow: ${error.message}`);
|
|
41
|
+
|
|
42
|
+
// If the error is related to importing compileFlow, provide more detailed error
|
|
43
|
+
if (error.message.includes('@pgflow/dsl')) {
|
|
44
|
+
console.error(
|
|
45
|
+
'Failed to import compileFlow from @pgflow/dsl. This might be due to:'
|
|
46
|
+
);
|
|
47
|
+
console.error(
|
|
48
|
+
'1. The function not being exported correctly from the package'
|
|
49
|
+
);
|
|
50
|
+
console.error('2. A version mismatch between the CLI and DSL packages');
|
|
51
|
+
console.error('3. Issues with the Deno import map configuration');
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
Deno.exit(1);
|
|
55
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":""}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { fileURLToPath } from 'url';
|
|
4
|
+
import { readFileSync } from 'fs';
|
|
5
|
+
import { dirname, join } from 'path';
|
|
6
|
+
import installCommand from './commands/install/index.js';
|
|
7
|
+
import compileCommand from './commands/compile/index.js';
|
|
8
|
+
// Create a function to handle errors
|
|
9
|
+
const errorHandler = (error) => {
|
|
10
|
+
console.error('Error:', error instanceof Error ? error.message : String(error));
|
|
11
|
+
process.exit(1);
|
|
12
|
+
};
|
|
13
|
+
// Set up process-wide unhandled rejection handler
|
|
14
|
+
process.on('unhandledRejection', errorHandler);
|
|
15
|
+
// Function to get version from package.json
|
|
16
|
+
function getVersion() {
|
|
17
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
18
|
+
const __dirname = dirname(__filename);
|
|
19
|
+
const packageJsonPath = join(__dirname, '..', 'package.json');
|
|
20
|
+
try {
|
|
21
|
+
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
|
|
22
|
+
return packageJson.version || 'unknown';
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
// Log error but don't display it to the user when showing version
|
|
26
|
+
console.error('Error reading package.json:', error);
|
|
27
|
+
return 'unknown';
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
const program = new Command();
|
|
31
|
+
program
|
|
32
|
+
.name('npx pgflow')
|
|
33
|
+
.description('Command line interface to help you work with pgflow')
|
|
34
|
+
.version(getVersion())
|
|
35
|
+
.exitOverride((err) => {
|
|
36
|
+
// Don't treat version display as an error
|
|
37
|
+
if (err.code === 'commander.version') {
|
|
38
|
+
process.exit(0);
|
|
39
|
+
}
|
|
40
|
+
throw err;
|
|
41
|
+
});
|
|
42
|
+
// Register commands
|
|
43
|
+
installCommand(program);
|
|
44
|
+
compileCommand(program);
|
|
45
|
+
import chalk from 'chalk';
|
|
46
|
+
// Tokyo Night inspired colors
|
|
47
|
+
// const p = chalk.hex('#7aa2f7'); // blue-violet
|
|
48
|
+
const g = chalk.hex('#9ece6a'); // vibrant green
|
|
49
|
+
// const f = chalk.hex('#bb9af7'); // light purple/pink
|
|
50
|
+
const l = chalk.hex('#2ac3de'); // bright teal/cyan
|
|
51
|
+
// const o = chalk.hex('#ff9e64'); // orange
|
|
52
|
+
// const w = chalk.hex('#f7768e'); // magenta/pink
|
|
53
|
+
const banner = [
|
|
54
|
+
` ${l('__ _')} `,
|
|
55
|
+
` ${g('_ __ __ _')} ${l('/ _| | _____ __')} `,
|
|
56
|
+
` ${g("| '_ \\ / _'")} ${l('| |_| |/ _ \\ \\ /\\ / /')} `,
|
|
57
|
+
` ${g('| |_) | (_|')} ${l('| _| | (_) \\ V V /')} `,
|
|
58
|
+
` ${g('| .__/ \\__,')} ${l('|_| |_|\\___/ \\_/\\_/')} `,
|
|
59
|
+
` ${g('|_| |___/')}`,
|
|
60
|
+
].join('\n');
|
|
61
|
+
console.log(banner);
|
|
62
|
+
console.log();
|
|
63
|
+
console.log();
|
|
64
|
+
console.log();
|
|
65
|
+
// Use a promise-aware approach to parse arguments
|
|
66
|
+
async function main() {
|
|
67
|
+
try {
|
|
68
|
+
await program.parseAsync(process.argv);
|
|
69
|
+
// If we get here with no command specified, it's not an error
|
|
70
|
+
process.exitCode = 0;
|
|
71
|
+
}
|
|
72
|
+
catch (err) {
|
|
73
|
+
// Commander throws a CommanderError when help or version is displayed
|
|
74
|
+
// We want to exit with code 0 in these cases
|
|
75
|
+
if (err &&
|
|
76
|
+
typeof err === 'object' &&
|
|
77
|
+
'code' in err &&
|
|
78
|
+
(err.code === 'commander.helpDisplayed' ||
|
|
79
|
+
err.code === 'commander.help' ||
|
|
80
|
+
err.code === 'commander.version')) {
|
|
81
|
+
process.exitCode = 0;
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
// For other errors, use our error handler
|
|
85
|
+
errorHandler(err);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
// Execute and handle any errors
|
|
90
|
+
main();
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "pgflow",
|
|
3
|
+
"version": "0.0.0-update-supabase-868977e5-20251119071204",
|
|
4
|
+
"license": "Apache-2.0",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"typings": "./dist/index.d.ts",
|
|
8
|
+
"bin": "./dist/index.js",
|
|
9
|
+
"exports": {
|
|
10
|
+
"./package.json": "./package.json",
|
|
11
|
+
".": {
|
|
12
|
+
"types": "./dist/index.d.ts",
|
|
13
|
+
"import": "./dist/index.js"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"types": "./dist/index.d.ts",
|
|
17
|
+
"module": "./dist/index.js",
|
|
18
|
+
"devDependencies": {
|
|
19
|
+
"@types/node": "^22.14.1",
|
|
20
|
+
"tsx": "^4.19.3"
|
|
21
|
+
},
|
|
22
|
+
"dependencies": {
|
|
23
|
+
"@clack/prompts": "^0.10.1",
|
|
24
|
+
"@commander-js/extra-typings": "^13.1.0",
|
|
25
|
+
"@decimalturn/toml-patch": "0.3.7",
|
|
26
|
+
"@pgflow/core": "workspace:*",
|
|
27
|
+
"chalk": "^5.4.1",
|
|
28
|
+
"commander": "^13.1.0"
|
|
29
|
+
},
|
|
30
|
+
"publishConfig": {
|
|
31
|
+
"access": "public"
|
|
32
|
+
},
|
|
33
|
+
"files": [
|
|
34
|
+
"dist"
|
|
35
|
+
],
|
|
36
|
+
"scripts": {
|
|
37
|
+
"postinstall": "chmod +x dist/index.js || true"
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":"5.8.3"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgflow",
|
|
3
|
-
"version": "0.0.0-
|
|
3
|
+
"version": "0.0.0-update-supabase-868977e5-20251119071204",
|
|
4
|
+
"license": "Apache-2.0",
|
|
4
5
|
"type": "module",
|
|
5
6
|
"main": "./dist/index.js",
|
|
6
7
|
"typings": "./dist/index.d.ts",
|
|
@@ -21,10 +22,10 @@
|
|
|
21
22
|
"dependencies": {
|
|
22
23
|
"@clack/prompts": "^0.10.1",
|
|
23
24
|
"@commander-js/extra-typings": "^13.1.0",
|
|
25
|
+
"@decimalturn/toml-patch": "0.3.7",
|
|
24
26
|
"chalk": "^5.4.1",
|
|
25
27
|
"commander": "^13.1.0",
|
|
26
|
-
"
|
|
27
|
-
"@pgflow/core": "0.0.0-test-snapshot-releases2-8d5d9bc1-20250922101158"
|
|
28
|
+
"@pgflow/core": "0.0.0-update-supabase-868977e5-20251119071204"
|
|
28
29
|
},
|
|
29
30
|
"publishConfig": {
|
|
30
31
|
"access": "public"
|