pgflow 0.1.22 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/compile/index.d.ts.map +1 -1
- package/dist/commands/compile/index.js +50 -11
- package/dist/commands/install/copy-migrations.d.ts.map +1 -1
- package/dist/commands/install/copy-migrations.js +6 -5
- package/dist/commands/install/index.d.ts.map +1 -1
- package/dist/commands/install/index.js +71 -63
- package/dist/commands/install/supabase-path-prompt.d.ts +3 -1
- package/dist/commands/install/supabase-path-prompt.d.ts.map +1 -1
- package/dist/commands/install/supabase-path-prompt.js +19 -4
- package/dist/commands/install/update-config-toml.d.ts +1 -1
- package/dist/commands/install/update-config-toml.d.ts.map +1 -1
- package/dist/commands/install/update-config-toml.js +8 -6
- package/dist/commands/install/update-env-file.d.ts.map +1 -1
- package/dist/commands/install/update-env-file.js +5 -5
- package/dist/index.js +1 -3
- package/dist/package.json +1 -1
- package/package.json +2 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/compile/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/compile/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;kCAmDhB,OAAO;AAAhC,wBAsJE"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import chalk from 'chalk';
|
|
2
|
-
import { intro, log,
|
|
2
|
+
import { intro, log, note, outro } from '@clack/prompts';
|
|
3
3
|
import path from 'path';
|
|
4
4
|
import fs from 'fs';
|
|
5
5
|
import { spawn } from 'child_process';
|
|
@@ -29,6 +29,18 @@ function formatCommand(command, args) {
|
|
|
29
29
|
});
|
|
30
30
|
return `$ ${cmd}\n${formattedArgs.join('\n')}`;
|
|
31
31
|
}
|
|
32
|
+
/**
|
|
33
|
+
* Creates a task log entry with a command and its output
|
|
34
|
+
*/
|
|
35
|
+
function createTaskLog(command, args, output) {
|
|
36
|
+
return [
|
|
37
|
+
chalk.bold('Command:'),
|
|
38
|
+
formatCommand(command, args),
|
|
39
|
+
'',
|
|
40
|
+
chalk.bold('Output:'),
|
|
41
|
+
output.trim() ? output.trim() : '(no output)',
|
|
42
|
+
].join('\n');
|
|
43
|
+
}
|
|
32
44
|
export default (program) => {
|
|
33
45
|
program
|
|
34
46
|
.command('compile')
|
|
@@ -77,7 +89,7 @@ export default (program) => {
|
|
|
77
89
|
const migrationsDir = path.resolve(supabasePath, 'migrations');
|
|
78
90
|
if (!fs.existsSync(migrationsDir)) {
|
|
79
91
|
fs.mkdirSync(migrationsDir, { recursive: true });
|
|
80
|
-
log.
|
|
92
|
+
log.success(`Created migrations directory: ${migrationsDir}`);
|
|
81
93
|
}
|
|
82
94
|
// Generate timestamp for migration file in format YYYYMMDDHHMMSS
|
|
83
95
|
const now = new Date();
|
|
@@ -89,24 +101,49 @@ export default (program) => {
|
|
|
89
101
|
String(now.getMinutes()).padStart(2, '0'),
|
|
90
102
|
String(now.getSeconds()).padStart(2, '0'),
|
|
91
103
|
].join('');
|
|
92
|
-
// Extract the base filename without extension from the flow path
|
|
93
|
-
const flowBasename = path.basename(resolvedFlowPath, path.extname(resolvedFlowPath));
|
|
94
|
-
// Create migration filename in the format: <timestamp>_create_<flow_file_basename>_flow.sql
|
|
95
|
-
const migrationFileName = `${timestamp}_create_${flowBasename}_flow.sql`;
|
|
96
|
-
const migrationFilePath = path.join(migrationsDir, migrationFileName);
|
|
97
104
|
// Run the compilation
|
|
98
|
-
|
|
99
|
-
s.start(`Compiling flow: ${path.basename(resolvedFlowPath)}`);
|
|
105
|
+
log.info(`Compiling flow: ${path.basename(resolvedFlowPath)}`);
|
|
100
106
|
const compiledSql = await runDenoCompilation(internalCompileScript, resolvedFlowPath, resolvedDenoJsonPath);
|
|
107
|
+
// Extract flow name from the first line of the SQL output using regex
|
|
108
|
+
// Looking for pattern: SELECT pgflow.create_flow('flow_name', ...);
|
|
109
|
+
const flowNameMatch = compiledSql.match(/SELECT\s+pgflow\.create_flow\s*\(\s*'([^']+)'/i);
|
|
110
|
+
// Use extracted flow name or fallback to the file basename if extraction fails
|
|
111
|
+
let flowName;
|
|
112
|
+
if (flowNameMatch && flowNameMatch[1]) {
|
|
113
|
+
flowName = flowNameMatch[1];
|
|
114
|
+
log.info(`Extracted flow name: ${flowName}`);
|
|
115
|
+
}
|
|
116
|
+
else {
|
|
117
|
+
// Fallback to file basename if regex doesn't match
|
|
118
|
+
flowName = path.basename(resolvedFlowPath, path.extname(resolvedFlowPath));
|
|
119
|
+
log.warn(`Could not extract flow name from SQL, using file basename: ${flowName}`);
|
|
120
|
+
}
|
|
121
|
+
// Create migration filename in the format: <timestamp>_create_<flow_name>_flow.sql
|
|
122
|
+
const migrationFileName = `${timestamp}_create_${flowName}_flow.sql`;
|
|
123
|
+
const migrationFilePath = path.join(migrationsDir, migrationFileName);
|
|
101
124
|
// Write the SQL to a migration file
|
|
102
125
|
fs.writeFileSync(migrationFilePath, compiledSql);
|
|
103
|
-
s.stop(`Successfully compiled flow to SQL`);
|
|
104
126
|
// Show the migration file path relative to the current directory
|
|
105
127
|
const relativeFilePath = path.relative(process.cwd(), migrationFilePath);
|
|
106
128
|
log.success(`Migration file created: ${relativeFilePath}`);
|
|
129
|
+
// Display next steps with outro
|
|
130
|
+
outro([
|
|
131
|
+
chalk.bold('Flow compilation completed successfully!'),
|
|
132
|
+
'',
|
|
133
|
+
`- Run ${chalk.cyan('supabase migration up')} to apply the migration`,
|
|
134
|
+
'',
|
|
135
|
+
chalk.bold('Continue the setup:'),
|
|
136
|
+
chalk.blue.underline('https://pgflow.dev/getting-started/run-flow/')
|
|
137
|
+
].join('\n'));
|
|
107
138
|
}
|
|
108
139
|
catch (error) {
|
|
109
140
|
log.error(`Compilation failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
141
|
+
outro([
|
|
142
|
+
chalk.bold('Compilation failed!'),
|
|
143
|
+
'',
|
|
144
|
+
chalk.bold('For troubleshooting help:'),
|
|
145
|
+
chalk.blue.underline('https://pgflow.dev/getting-started/compile-to-sql/')
|
|
146
|
+
].join('\n'));
|
|
110
147
|
process.exit(1);
|
|
111
148
|
}
|
|
112
149
|
});
|
|
@@ -132,7 +169,7 @@ async function runDenoCompilation(scriptPath, flowPath, denoJsonPath) {
|
|
|
132
169
|
// Add the script path and flow path
|
|
133
170
|
args.push(scriptPath, flowPath);
|
|
134
171
|
// Log the command for debugging with colored output
|
|
135
|
-
|
|
172
|
+
log.info('Running Deno compiler');
|
|
136
173
|
const deno = spawn('deno', args);
|
|
137
174
|
let stdout = '';
|
|
138
175
|
let stderr = '';
|
|
@@ -143,6 +180,8 @@ async function runDenoCompilation(scriptPath, flowPath, denoJsonPath) {
|
|
|
143
180
|
stderr += data.toString();
|
|
144
181
|
});
|
|
145
182
|
deno.on('close', (code) => {
|
|
183
|
+
// Always display the task log with command and output
|
|
184
|
+
note(createTaskLog('deno', args, stdout));
|
|
146
185
|
if (code === 0) {
|
|
147
186
|
if (stdout.trim().length === 0) {
|
|
148
187
|
reject(new Error('Compilation produced no output'));
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"copy-migrations.d.ts","sourceRoot":"","sources":["../../../src/commands/install/copy-migrations.ts"],"names":[],"mappings":"AAyEA,wBAAsB,cAAc,CAAC,EACnC,YAAY,EACZ,WAAmB,EACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,
|
|
1
|
+
{"version":3,"file":"copy-migrations.d.ts","sourceRoot":"","sources":["../../../src/commands/install/copy-migrations.ts"],"names":[],"mappings":"AAyEA,wBAAsB,cAAc,CAAC,EACnC,YAAY,EACZ,WAAmB,EACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAoGnB"}
|
|
@@ -47,6 +47,7 @@ function findMigrationsDirectory() {
|
|
|
47
47
|
// Find the migrations directory
|
|
48
48
|
const sourcePath = findMigrationsDirectory();
|
|
49
49
|
export async function copyMigrations({ supabasePath, autoConfirm = false }) {
|
|
50
|
+
// Check migrations
|
|
50
51
|
const migrationsPath = path.join(supabasePath, 'migrations');
|
|
51
52
|
if (!fs.existsSync(migrationsPath)) {
|
|
52
53
|
fs.mkdirSync(migrationsPath);
|
|
@@ -54,7 +55,7 @@ export async function copyMigrations({ supabasePath, autoConfirm = false }) {
|
|
|
54
55
|
// Check if pgflow migrations directory exists
|
|
55
56
|
if (!sourcePath || !fs.existsSync(sourcePath)) {
|
|
56
57
|
log.error(`Could not find migrations directory`);
|
|
57
|
-
log.
|
|
58
|
+
log.warn('This might happen if @pgflow/core is not properly installed or built.');
|
|
58
59
|
log.info('Make sure @pgflow/core is installed and contains the migrations.');
|
|
59
60
|
log.info('If running in development mode, try building the core package first with: nx build core');
|
|
60
61
|
return false;
|
|
@@ -78,9 +79,10 @@ export async function copyMigrations({ supabasePath, autoConfirm = false }) {
|
|
|
78
79
|
}
|
|
79
80
|
// If no files to copy, show message and return false (no changes made)
|
|
80
81
|
if (filesToCopy.length === 0) {
|
|
81
|
-
log.
|
|
82
|
+
log.success('All pgflow migrations are already in place');
|
|
82
83
|
return false;
|
|
83
84
|
}
|
|
85
|
+
log.info(`Found ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''} to install`);
|
|
84
86
|
// Prepare summary message with colored output
|
|
85
87
|
const summaryParts = [];
|
|
86
88
|
if (filesToCopy.length > 0) {
|
|
@@ -101,16 +103,15 @@ ${skippedFiles.map((file) => `${chalk.yellow('•')} ${file}`).join('\n')}`);
|
|
|
101
103
|
shouldContinue = confirmResult === true;
|
|
102
104
|
}
|
|
103
105
|
if (!shouldContinue) {
|
|
104
|
-
log.
|
|
106
|
+
log.warn('Migration installation skipped');
|
|
105
107
|
return false;
|
|
106
108
|
}
|
|
107
|
-
|
|
109
|
+
// Install migrations
|
|
108
110
|
// Copy the files
|
|
109
111
|
for (const file of filesToCopy) {
|
|
110
112
|
const source = path.join(sourcePath, file);
|
|
111
113
|
const destination = path.join(migrationsPath, file);
|
|
112
114
|
fs.copyFileSync(source, destination);
|
|
113
|
-
log.step(`Added ${file}`);
|
|
114
115
|
}
|
|
115
116
|
log.success(`Installed ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''} to your Supabase project`);
|
|
116
117
|
return true; // Return true to indicate migrations were copied
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/install/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;kCAQhB,OAAO;AAAhC,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/install/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;kCAQhB,OAAO;AAAhC,wBAiHE"}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import { intro,
|
|
1
|
+
import { intro, group, cancel, outro } from '@clack/prompts';
|
|
2
|
+
import chalk from 'chalk';
|
|
2
3
|
import { copyMigrations } from './copy-migrations.js';
|
|
3
4
|
import { updateConfigToml } from './update-config-toml.js';
|
|
4
5
|
import { updateEnvFile } from './update-env-file.js';
|
|
5
|
-
import
|
|
6
|
-
import fs from 'fs';
|
|
6
|
+
import { supabasePathPrompt } from './supabase-path-prompt.js';
|
|
7
7
|
export default (program) => {
|
|
8
8
|
program
|
|
9
9
|
.command('install')
|
|
@@ -11,72 +11,80 @@ export default (program) => {
|
|
|
11
11
|
.option('--supabase-path <path>', 'Path to the Supabase folder')
|
|
12
12
|
.option('-y, --yes', 'Automatically confirm all prompts', false)
|
|
13
13
|
.action(async (options) => {
|
|
14
|
-
intro('pgflow
|
|
15
|
-
//
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
supabasePath
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
14
|
+
intro('Installing pgflow in your Supabase project');
|
|
15
|
+
// Use the group feature to organize installation steps
|
|
16
|
+
const results = await group({
|
|
17
|
+
// Step 1: Determine Supabase path
|
|
18
|
+
supabasePath: () => supabasePathPrompt({ supabasePath: options.supabasePath }),
|
|
19
|
+
// Step 2: Update config.toml
|
|
20
|
+
configUpdate: async ({ results: { supabasePath } }) => {
|
|
21
|
+
if (!supabasePath)
|
|
22
|
+
return false;
|
|
23
|
+
return await updateConfigToml({
|
|
24
|
+
supabasePath,
|
|
25
|
+
autoConfirm: options.yes,
|
|
26
|
+
});
|
|
27
|
+
},
|
|
28
|
+
// Step 3: Copy migrations
|
|
29
|
+
migrations: async ({ results: { supabasePath } }) => {
|
|
30
|
+
if (!supabasePath)
|
|
31
|
+
return false;
|
|
32
|
+
return await copyMigrations({
|
|
33
|
+
supabasePath,
|
|
34
|
+
autoConfirm: options.yes,
|
|
35
|
+
});
|
|
36
|
+
},
|
|
37
|
+
// Step 4: Update environment variables
|
|
38
|
+
envFile: async ({ results: { supabasePath } }) => {
|
|
39
|
+
if (!supabasePath)
|
|
40
|
+
return false;
|
|
41
|
+
return await updateEnvFile({
|
|
42
|
+
supabasePath,
|
|
43
|
+
autoConfirm: options.yes,
|
|
44
|
+
});
|
|
45
|
+
},
|
|
46
|
+
}, {
|
|
47
|
+
// Handle cancellation
|
|
48
|
+
onCancel: () => {
|
|
49
|
+
cancel('Installation cancelled');
|
|
38
50
|
process.exit(1);
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
//
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
if
|
|
47
|
-
|
|
51
|
+
},
|
|
52
|
+
});
|
|
53
|
+
// Extract the results from the group operation
|
|
54
|
+
const supabasePath = results.supabasePath;
|
|
55
|
+
const configUpdate = results.configUpdate;
|
|
56
|
+
const migrations = results.migrations;
|
|
57
|
+
const envFile = results.envFile;
|
|
58
|
+
// Exit if supabasePath is null (validation failed or user cancelled)
|
|
59
|
+
if (!supabasePath) {
|
|
60
|
+
cancel('Installation cancelled - valid Supabase path is required');
|
|
48
61
|
process.exit(1);
|
|
49
62
|
}
|
|
50
|
-
// First update config.toml, then copy migrations
|
|
51
|
-
const configUpdated = await updateConfigToml({
|
|
52
|
-
supabasePath,
|
|
53
|
-
autoConfirm: options.yes
|
|
54
|
-
});
|
|
55
|
-
const migrationsCopied = await copyMigrations({
|
|
56
|
-
supabasePath,
|
|
57
|
-
autoConfirm: options.yes
|
|
58
|
-
});
|
|
59
|
-
const envFileCreated = await updateEnvFile({
|
|
60
|
-
supabasePath,
|
|
61
|
-
autoConfirm: options.yes
|
|
62
|
-
});
|
|
63
63
|
// Show completion message
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
if (configUpdated || envFileCreated) {
|
|
69
|
-
nextSteps.push('• Restart your Supabase instance for configuration changes to take effect');
|
|
70
|
-
}
|
|
71
|
-
if (migrationsCopied) {
|
|
72
|
-
nextSteps.push('• Apply the migrations with: supabase db push');
|
|
73
|
-
}
|
|
74
|
-
if (nextSteps.length > 0) {
|
|
75
|
-
note(nextSteps.join('\n'), 'Next steps');
|
|
76
|
-
}
|
|
64
|
+
const outroMessages = [];
|
|
65
|
+
// Always start with a bolded acknowledgement
|
|
66
|
+
if (migrations || configUpdate || envFile) {
|
|
67
|
+
outroMessages.push(chalk.bold('pgflow setup completed successfully!'));
|
|
77
68
|
}
|
|
78
69
|
else {
|
|
79
|
-
|
|
70
|
+
outroMessages.push(chalk.bold('pgflow is already properly configured - no changes needed!'));
|
|
71
|
+
}
|
|
72
|
+
// Add a newline after the acknowledgement
|
|
73
|
+
outroMessages.push('');
|
|
74
|
+
// Add specific next steps if changes were made
|
|
75
|
+
if (configUpdate || envFile) {
|
|
76
|
+
outroMessages.push(`- Restart your Supabase instance for configuration changes to take effect`);
|
|
77
|
+
}
|
|
78
|
+
if (migrations) {
|
|
79
|
+
outroMessages.push(`- Apply the migrations with: ${chalk.cyan('supabase migrations up')}`);
|
|
80
|
+
}
|
|
81
|
+
// Always add documentation link with consistent formatting
|
|
82
|
+
if (outroMessages.length > 2) {
|
|
83
|
+
// If we have specific steps, add another newline
|
|
84
|
+
outroMessages.push('');
|
|
80
85
|
}
|
|
86
|
+
outroMessages.push(chalk.bold('Continue the setup:'), chalk.blue.underline('https://pgflow.dev/getting-started/compile-to-sql/'));
|
|
87
|
+
// Single outro for all paths
|
|
88
|
+
outro(outroMessages.join('\n'));
|
|
81
89
|
});
|
|
82
90
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"supabase-path-prompt.d.ts","sourceRoot":"","sources":["../../../src/commands/install/supabase-path-prompt.ts"],"names":[],"mappings":"AAIA,wBAAsB,kBAAkB,
|
|
1
|
+
{"version":3,"file":"supabase-path-prompt.d.ts","sourceRoot":"","sources":["../../../src/commands/install/supabase-path-prompt.ts"],"names":[],"mappings":"AAIA,wBAAsB,kBAAkB,CAAC,OAAO,CAAC,EAAE;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,4BA6C3E"}
|
|
@@ -1,7 +1,17 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import { text, log } from '@clack/prompts';
|
|
4
|
-
export async function supabasePathPrompt() {
|
|
4
|
+
export async function supabasePathPrompt(options) {
|
|
5
|
+
// If supabasePath is provided as an option and it's valid, use it directly without prompting
|
|
6
|
+
if (options?.supabasePath) {
|
|
7
|
+
const validationError = validate(options.supabasePath);
|
|
8
|
+
if (validationError === undefined) {
|
|
9
|
+
log.info(`Using Supabase project at: ${options.supabasePath}`);
|
|
10
|
+
return options.supabasePath;
|
|
11
|
+
}
|
|
12
|
+
// If validation failed, log the error and continue to prompt
|
|
13
|
+
log.warn(validationError);
|
|
14
|
+
}
|
|
5
15
|
// Try to detect the Supabase directory automatically
|
|
6
16
|
const possiblePaths = ['./supabase', '../supabase', '../../supabase'];
|
|
7
17
|
let detectedPath = '';
|
|
@@ -12,16 +22,21 @@ export async function supabasePathPrompt() {
|
|
|
12
22
|
break;
|
|
13
23
|
}
|
|
14
24
|
}
|
|
25
|
+
// Always prompt for detected paths - don't skip
|
|
15
26
|
if (detectedPath) {
|
|
16
|
-
log.
|
|
27
|
+
log.info(`Found Supabase project at: ${detectedPath}`);
|
|
17
28
|
}
|
|
18
29
|
const promptMessage = 'Where is your Supabase project located?';
|
|
19
|
-
|
|
30
|
+
const supabasePath = await text({
|
|
20
31
|
message: promptMessage,
|
|
21
32
|
placeholder: detectedPath || 'supabase/',
|
|
22
|
-
initialValue: detectedPath,
|
|
33
|
+
initialValue: options?.supabasePath || detectedPath,
|
|
23
34
|
validate,
|
|
24
35
|
});
|
|
36
|
+
if (!supabasePath) {
|
|
37
|
+
throw new Error('User cancelled');
|
|
38
|
+
}
|
|
39
|
+
return supabasePath;
|
|
25
40
|
}
|
|
26
41
|
function validate(inputPath) {
|
|
27
42
|
if (!fs.existsSync(inputPath)) {
|
|
@@ -12,7 +12,7 @@
|
|
|
12
12
|
* @param options.autoConfirm - Whether to automatically confirm changes
|
|
13
13
|
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
14
14
|
*/
|
|
15
|
-
export declare function updateConfigToml({ supabasePath, autoConfirm }: {
|
|
15
|
+
export declare function updateConfigToml({ supabasePath, autoConfirm, }: {
|
|
16
16
|
supabasePath: string;
|
|
17
17
|
autoConfirm?: boolean;
|
|
18
18
|
}): Promise<boolean>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"update-config-toml.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-config-toml.ts"],"names":[],"mappings":"AAqBA;;;;;;;;;;;;;GAaG;AACH,wBAAsB,gBAAgB,CAAC,EACrC,YAAY,EACZ,WAAmB,
|
|
1
|
+
{"version":3,"file":"update-config-toml.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-config-toml.ts"],"names":[],"mappings":"AAqBA;;;;;;;;;;;;;GAaG;AACH,wBAAsB,gBAAgB,CAAC,EACrC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CA8HnB"}
|
|
@@ -17,11 +17,13 @@ import chalk from 'chalk';
|
|
|
17
17
|
* @param options.autoConfirm - Whether to automatically confirm changes
|
|
18
18
|
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
19
19
|
*/
|
|
20
|
-
export async function updateConfigToml({ supabasePath, autoConfirm = false }) {
|
|
20
|
+
export async function updateConfigToml({ supabasePath, autoConfirm = false, }) {
|
|
21
|
+
// Check Supabase configuration
|
|
21
22
|
const configPath = path.join(supabasePath, 'config.toml');
|
|
22
23
|
const backupPath = `${configPath}.backup`;
|
|
23
24
|
try {
|
|
24
25
|
if (!fs.existsSync(configPath)) {
|
|
26
|
+
log.error(`config.toml not found at ${configPath}`);
|
|
25
27
|
throw new Error(`config.toml not found at ${configPath}`);
|
|
26
28
|
}
|
|
27
29
|
const configContent = fs.readFileSync(configPath, 'utf8');
|
|
@@ -35,7 +37,7 @@ export async function updateConfigToml({ supabasePath, autoConfirm = false }) {
|
|
|
35
37
|
currentSettings.poolMode !== 'transaction' ||
|
|
36
38
|
currentSettings.edgeRuntimePolicy !== 'per_worker';
|
|
37
39
|
if (!needsChanges) {
|
|
38
|
-
log.
|
|
40
|
+
log.success('Supabase configuration is already set up for pgflow');
|
|
39
41
|
return false;
|
|
40
42
|
}
|
|
41
43
|
const changes = [];
|
|
@@ -63,12 +65,12 @@ ${chalk.green('+ policy = "per_worker"')}`);
|
|
|
63
65
|
shouldContinue = confirmResult === true;
|
|
64
66
|
}
|
|
65
67
|
if (!shouldContinue) {
|
|
66
|
-
log.
|
|
68
|
+
log.warn('Configuration update skipped');
|
|
67
69
|
return false;
|
|
68
70
|
}
|
|
71
|
+
// Update Supabase configuration
|
|
72
|
+
// Create backup
|
|
69
73
|
fs.copyFileSync(configPath, backupPath);
|
|
70
|
-
log.step(`Created backup of config.toml`);
|
|
71
|
-
log.step(`Updating Supabase configuration...`);
|
|
72
74
|
const updatedConfig = { ...config };
|
|
73
75
|
// Ensure required objects exist and set values
|
|
74
76
|
if (!updatedConfig.db)
|
|
@@ -105,7 +107,7 @@ ${chalk.green('+ policy = "per_worker"')}`);
|
|
|
105
107
|
// Fix edge_runtime.policy line - transforms "policy = "per_worker"," into "policy = "per_worker""
|
|
106
108
|
/policy = "per_worker",(\s*$|\s*#|\s*\n)/g, 'policy = "per_worker"$1');
|
|
107
109
|
fs.writeFileSync(configPath, updatedContent);
|
|
108
|
-
log.success(
|
|
110
|
+
log.success('Supabase configuration updated successfully');
|
|
109
111
|
return true;
|
|
110
112
|
}
|
|
111
113
|
catch (error) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"update-env-file.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-env-file.ts"],"names":[],"mappings":"AAKA;;;;;;GAMG;AACH,wBAAsB,aAAa,CAAC,EAClC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,
|
|
1
|
+
{"version":3,"file":"update-env-file.d.ts","sourceRoot":"","sources":["../../../src/commands/install/update-env-file.ts"],"names":[],"mappings":"AAKA;;;;;;GAMG;AACH,wBAAsB,aAAa,CAAC,EAClC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CAqHnB"}
|
|
@@ -10,11 +10,11 @@ import chalk from 'chalk';
|
|
|
10
10
|
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
11
11
|
*/
|
|
12
12
|
export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
13
|
+
// Check environment variables
|
|
13
14
|
const functionsDir = path.join(supabasePath, 'functions');
|
|
14
15
|
const envFilePath = path.join(functionsDir, '.env');
|
|
15
16
|
// Create functions directory if it doesn't exist
|
|
16
17
|
if (!fs.existsSync(functionsDir)) {
|
|
17
|
-
log.step('Creating functions directory...');
|
|
18
18
|
fs.mkdirSync(functionsDir, { recursive: true });
|
|
19
19
|
}
|
|
20
20
|
// Variables to add
|
|
@@ -29,7 +29,6 @@ export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
|
29
29
|
currentContent = fs.readFileSync(envFilePath, 'utf8');
|
|
30
30
|
}
|
|
31
31
|
else {
|
|
32
|
-
log.step('Creating new .env file...');
|
|
33
32
|
isNewFile = true;
|
|
34
33
|
}
|
|
35
34
|
// Prepare new content
|
|
@@ -48,9 +47,10 @@ export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
|
48
47
|
}
|
|
49
48
|
// If no changes needed, return early
|
|
50
49
|
if (missingVars.length === 0) {
|
|
51
|
-
log.
|
|
50
|
+
log.success('Environment variables are already set');
|
|
52
51
|
return false;
|
|
53
52
|
}
|
|
53
|
+
log.info(`Found ${missingVars.length} variable${missingVars.length !== 1 ? 's' : ''} to add`);
|
|
54
54
|
// Build diff preview
|
|
55
55
|
const diffPreview = [];
|
|
56
56
|
if (isNewFile) {
|
|
@@ -82,9 +82,10 @@ export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
|
82
82
|
shouldContinue = confirmResult === true;
|
|
83
83
|
}
|
|
84
84
|
if (!shouldContinue) {
|
|
85
|
-
log.
|
|
85
|
+
log.warn('Environment variable update skipped');
|
|
86
86
|
return false;
|
|
87
87
|
}
|
|
88
|
+
// Update environment variables
|
|
88
89
|
// Apply changes if confirmed
|
|
89
90
|
for (const { key, value } of missingVars) {
|
|
90
91
|
// Add a newline at the end if the file doesn't end with one and isn't empty
|
|
@@ -93,7 +94,6 @@ export async function updateEnvFile({ supabasePath, autoConfirm = false, }) {
|
|
|
93
94
|
}
|
|
94
95
|
// Add the new variable
|
|
95
96
|
newContent += `${key}="${value}"\n`;
|
|
96
|
-
log.step(`Adding ${key} environment variable`);
|
|
97
97
|
}
|
|
98
98
|
// Write the file if changes were made
|
|
99
99
|
try {
|
package/dist/index.js
CHANGED
|
@@ -46,7 +46,7 @@ import chalk from 'chalk';
|
|
|
46
46
|
// Tokyo Night inspired colors
|
|
47
47
|
// const p = chalk.hex('#7aa2f7'); // blue-violet
|
|
48
48
|
const g = chalk.hex('#9ece6a'); // vibrant green
|
|
49
|
-
const f = chalk.hex('#bb9af7'); // light purple/pink
|
|
49
|
+
// const f = chalk.hex('#bb9af7'); // light purple/pink
|
|
50
50
|
const l = chalk.hex('#2ac3de'); // bright teal/cyan
|
|
51
51
|
// const o = chalk.hex('#ff9e64'); // orange
|
|
52
52
|
// const w = chalk.hex('#f7768e'); // magenta/pink
|
|
@@ -57,8 +57,6 @@ const banner = [
|
|
|
57
57
|
` ${g('| |_) | (_|')} ${l('| _| | (_) \\ V V /')} `,
|
|
58
58
|
` ${g('| .__/ \\__,')} ${l('|_| |_|\\___/ \\_/\\_/')} `,
|
|
59
59
|
` ${g('|_| |___/')}`,
|
|
60
|
-
``,
|
|
61
|
-
` ${f('Postgres-native Workflow Engine')}`,
|
|
62
60
|
].join('\n');
|
|
63
61
|
console.log(banner);
|
|
64
62
|
console.log();
|
package/dist/package.json
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgflow",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.2.0",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"typings": "./dist/index.d.ts",
|
|
@@ -24,7 +24,7 @@
|
|
|
24
24
|
"chalk": "^5.4.1",
|
|
25
25
|
"commander": "^13.1.0",
|
|
26
26
|
"toml-patch": "^0.2.3",
|
|
27
|
-
"@pgflow/core": "0.
|
|
27
|
+
"@pgflow/core": "0.2.0"
|
|
28
28
|
},
|
|
29
29
|
"publishConfig": {
|
|
30
30
|
"access": "public"
|