pgflow 0.0.0-add-workerconfig-to-context--20250905094004-b98e1fec-20250905074005 → 0.0.0-test-snapshot-releases-8d5d9bc1-20250922101013

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/package.json +2 -2
  2. package/dist/commands/compile/index.d.ts +0 -4
  3. package/dist/commands/compile/index.d.ts.map +0 -1
  4. package/dist/commands/compile/index.js +0 -201
  5. package/dist/commands/install/copy-migrations.d.ts +0 -5
  6. package/dist/commands/install/copy-migrations.d.ts.map +0 -1
  7. package/dist/commands/install/copy-migrations.js +0 -253
  8. package/dist/commands/install/index.d.ts +0 -4
  9. package/dist/commands/install/index.d.ts.map +0 -1
  10. package/dist/commands/install/index.js +0 -90
  11. package/dist/commands/install/supabase-path-prompt.d.ts +0 -4
  12. package/dist/commands/install/supabase-path-prompt.d.ts.map +0 -1
  13. package/dist/commands/install/supabase-path-prompt.js +0 -49
  14. package/dist/commands/install/update-config-toml.d.ts +0 -19
  15. package/dist/commands/install/update-config-toml.d.ts.map +0 -1
  16. package/dist/commands/install/update-config-toml.js +0 -117
  17. package/dist/commands/install/update-env-file.d.ts +0 -12
  18. package/dist/commands/install/update-env-file.d.ts.map +0 -1
  19. package/dist/commands/install/update-env-file.js +0 -109
  20. package/dist/deno/internal_compile.js +0 -55
  21. package/dist/deno.lock +0 -28
  22. package/dist/index.d.ts +0 -3
  23. package/dist/index.d.ts.map +0 -1
  24. package/dist/index.js +0 -90
  25. package/dist/node_modules/.deno/.deno.lock +0 -0
  26. package/dist/node_modules/.deno/.deno.lock.poll +0 -1
  27. package/dist/node_modules/.deno/.setup-cache.bin +0 -0
  28. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/.initialized +0 -0
  29. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/LICENSE +0 -202
  30. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/README.md +0 -203
  31. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/CHANGELOG.md +0 -409
  32. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/README.md +0 -203
  33. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/compile-flow.d.ts +0 -10
  34. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/compile-flow.d.ts.map +0 -1
  35. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/compile-flow.js +0 -45
  36. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/dsl.d.ts +0 -149
  37. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/dsl.d.ts.map +0 -1
  38. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/dsl.js +0 -90
  39. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/example-flow.d.ts +0 -29
  40. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/example-flow.d.ts.map +0 -1
  41. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/example-flow.js +0 -41
  42. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/index.d.ts +0 -3
  43. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/index.d.ts.map +0 -1
  44. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/index.js +0 -2
  45. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/package.json +0 -35
  46. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/platforms/index.d.ts +0 -2
  47. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/platforms/index.d.ts.map +0 -1
  48. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/platforms/index.js +0 -2
  49. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/platforms/supabase.d.ts +0 -32
  50. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/platforms/supabase.d.ts.map +0 -1
  51. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/platforms/supabase.js +0 -4
  52. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/supabase.d.ts +0 -2
  53. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/supabase.d.ts.map +0 -1
  54. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/supabase.js +0 -2
  55. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/tsconfig.lib.tsbuildinfo +0 -1
  56. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/utils.d.ts +0 -37
  57. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/utils.d.ts.map +0 -1
  58. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/dist/utils.js +0 -73
  59. package/dist/node_modules/.deno/@pgflow+dsl@0.6.0/node_modules/@pgflow/dsl/package.json +0 -35
  60. package/dist/package.json +0 -38
  61. package/dist/tsconfig.lib.tsbuildinfo +0 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pgflow",
3
- "version": "0.0.0-add-workerconfig-to-context--20250905094004-b98e1fec-20250905074005",
3
+ "version": "0.0.0-test-snapshot-releases-8d5d9bc1-20250922101013",
4
4
  "type": "module",
5
5
  "main": "./dist/index.js",
6
6
  "typings": "./dist/index.d.ts",
@@ -24,7 +24,7 @@
24
24
  "chalk": "^5.4.1",
25
25
  "commander": "^13.1.0",
26
26
  "toml-patch": "^0.2.3",
27
- "@pgflow/core": "0.0.0-add-workerconfig-to-context--20250905094004-b98e1fec-20250905074005"
27
+ "@pgflow/core": "0.0.0-test-snapshot-releases-8d5d9bc1-20250922101013"
28
28
  },
29
29
  "publishConfig": {
30
30
  "access": "public"
@@ -1,4 +0,0 @@
1
- import { type Command } from 'commander';
2
- declare const _default: (program: Command) => void;
3
- export default _default;
4
- //# sourceMappingURL=index.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/compile/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;yBAmDzB,SAAS,OAAO;AAAhC,wBAsJE"}
@@ -1,201 +0,0 @@
1
- import chalk from 'chalk';
2
- import { intro, log, note, outro } from '@clack/prompts';
3
- import path from 'path';
4
- import fs from 'fs';
5
- import { spawn } from 'child_process';
6
- import { fileURLToPath } from 'url';
7
- // Get the directory name in ES modules
8
- const __filename = fileURLToPath(import.meta.url);
9
- const __dirname = path.dirname(__filename);
10
- /**
11
- * Formats a command and its arguments for display with syntax highlighting
12
- * Each argument is displayed on a separate line for better readability
13
- */
14
- function formatCommand(command, args) {
15
- const cmd = chalk.cyan(command);
16
- const formattedArgs = args.map((arg) => {
17
- // Highlight import map and file paths differently
18
- if (arg.startsWith('--import-map=')) {
19
- const [flag, value] = arg.split('=');
20
- return ` ${chalk.yellow(flag)}=${chalk.green(value)}`;
21
- }
22
- else if (arg.startsWith('--')) {
23
- return ` ${chalk.yellow(arg)}`;
24
- }
25
- else if (arg.endsWith('.ts') || arg.endsWith('.json')) {
26
- return ` ${chalk.green(arg)}`;
27
- }
28
- return ` ${chalk.white(arg)}`;
29
- });
30
- return `$ ${cmd}\n${formattedArgs.join('\n')}`;
31
- }
32
- /**
33
- * Creates a task log entry with a command and its output
34
- */
35
- function createTaskLog(command, args, output) {
36
- return [
37
- chalk.bold('Command:'),
38
- formatCommand(command, args),
39
- '',
40
- chalk.bold('Output:'),
41
- output.trim() ? output.trim() : '(no output)',
42
- ].join('\n');
43
- }
44
- export default (program) => {
45
- program
46
- .command('compile')
47
- .description('Compiles a TypeScript-defined flow into SQL migration')
48
- .argument('<flowPath>', 'Path to the flow TypeScript file')
49
- .option('--deno-json <denoJsonPath>', 'Path to deno.json with valid importMap')
50
- .option('--supabase-path <supabasePath>', 'Path to the Supabase folder')
51
- .action(async (flowPath, options) => {
52
- intro('pgflow - Compile Flow to SQL');
53
- try {
54
- // Resolve paths
55
- const resolvedFlowPath = path.resolve(process.cwd(), flowPath);
56
- // Only resolve denoJsonPath if it's provided
57
- let resolvedDenoJsonPath;
58
- if (options.denoJson) {
59
- resolvedDenoJsonPath = path.resolve(process.cwd(), options.denoJson);
60
- // Validate deno.json path if provided
61
- if (!fs.existsSync(resolvedDenoJsonPath)) {
62
- log.error(`deno.json file not found: ${resolvedDenoJsonPath}`);
63
- process.exit(1);
64
- }
65
- }
66
- // Validate flow path
67
- if (!fs.existsSync(resolvedFlowPath)) {
68
- log.error(`Flow file not found: ${resolvedFlowPath}`);
69
- process.exit(1);
70
- }
71
- // Validate Supabase path
72
- let supabasePath;
73
- if (options.supabasePath) {
74
- supabasePath = path.resolve(process.cwd(), options.supabasePath);
75
- }
76
- else {
77
- // Default to ./supabase/ if not provided
78
- supabasePath = path.resolve(process.cwd(), 'supabase');
79
- }
80
- // Check if Supabase path exists
81
- if (!fs.existsSync(supabasePath)) {
82
- log.error(`Supabase directory not found: ${supabasePath}\n` +
83
- `Please provide a valid Supabase path using --supabase-path option or ensure ./supabase/ directory exists.`);
84
- process.exit(1);
85
- }
86
- // Find the internal_compile.js script
87
- const internalCompileScript = path.resolve(__dirname, '../../deno/internal_compile.js');
88
- // Create migrations directory if it doesn't exist
89
- const migrationsDir = path.resolve(supabasePath, 'migrations');
90
- if (!fs.existsSync(migrationsDir)) {
91
- fs.mkdirSync(migrationsDir, { recursive: true });
92
- log.success(`Created migrations directory: ${migrationsDir}`);
93
- }
94
- // Generate timestamp for migration file in format YYYYMMDDHHMMSS using UTC
95
- const now = new Date();
96
- const timestamp = [
97
- now.getUTCFullYear(),
98
- String(now.getUTCMonth() + 1).padStart(2, '0'),
99
- String(now.getUTCDate()).padStart(2, '0'),
100
- String(now.getUTCHours()).padStart(2, '0'),
101
- String(now.getUTCMinutes()).padStart(2, '0'),
102
- String(now.getUTCSeconds()).padStart(2, '0'),
103
- ].join('');
104
- // Run the compilation
105
- log.info(`Compiling flow: ${path.basename(resolvedFlowPath)}`);
106
- const compiledSql = await runDenoCompilation(internalCompileScript, resolvedFlowPath, resolvedDenoJsonPath);
107
- // Extract flow name from the first line of the SQL output using regex
108
- // Looking for pattern: SELECT pgflow.create_flow('flow_name', ...);
109
- const flowNameMatch = compiledSql.match(/SELECT\s+pgflow\.create_flow\s*\(\s*'([^']+)'/i);
110
- // Use extracted flow name or fallback to the file basename if extraction fails
111
- let flowName;
112
- if (flowNameMatch && flowNameMatch[1]) {
113
- flowName = flowNameMatch[1];
114
- log.info(`Extracted flow name: ${flowName}`);
115
- }
116
- else {
117
- // Fallback to file basename if regex doesn't match
118
- flowName = path.basename(resolvedFlowPath, path.extname(resolvedFlowPath));
119
- log.warn(`Could not extract flow name from SQL, using file basename: ${flowName}`);
120
- }
121
- // Create migration filename in the format: <timestamp>_create_<flow_name>_flow.sql
122
- const migrationFileName = `${timestamp}_create_${flowName}_flow.sql`;
123
- const migrationFilePath = path.join(migrationsDir, migrationFileName);
124
- // Write the SQL to a migration file
125
- fs.writeFileSync(migrationFilePath, compiledSql);
126
- // Show the migration file path relative to the current directory
127
- const relativeFilePath = path.relative(process.cwd(), migrationFilePath);
128
- log.success(`Migration file created: ${relativeFilePath}`);
129
- // Display next steps with outro
130
- outro([
131
- chalk.bold('Flow compilation completed successfully!'),
132
- '',
133
- `- Run ${chalk.cyan('supabase migration up')} to apply the migration`,
134
- '',
135
- chalk.bold('Continue the setup:'),
136
- chalk.blue.underline('https://pgflow.dev/getting-started/run-flow/')
137
- ].join('\n'));
138
- }
139
- catch (error) {
140
- log.error(`Compilation failed: ${error instanceof Error ? error.message : String(error)}`);
141
- outro([
142
- chalk.bold('Compilation failed!'),
143
- '',
144
- chalk.bold('For troubleshooting help:'),
145
- chalk.blue.underline('https://pgflow.dev/getting-started/compile-to-sql/')
146
- ].join('\n'));
147
- process.exit(1);
148
- }
149
- });
150
- };
151
- /**
152
- * Runs the Deno compilation script and returns the compiled SQL
153
- */
154
- async function runDenoCompilation(scriptPath, flowPath, denoJsonPath) {
155
- return new Promise((resolve, reject) => {
156
- // Validate input paths
157
- if (!scriptPath) {
158
- return reject(new Error('Internal script path is required'));
159
- }
160
- if (!flowPath) {
161
- return reject(new Error('Flow path is required'));
162
- }
163
- // Build the command arguments array
164
- const args = ['run', '--allow-read', '--allow-net', '--allow-env'];
165
- // Only add the import-map argument if denoJsonPath is provided and valid
166
- if (denoJsonPath && typeof denoJsonPath === 'string') {
167
- args.push(`--import-map=${denoJsonPath}`);
168
- }
169
- // Add the script path and flow path
170
- args.push(scriptPath, flowPath);
171
- // Log the command for debugging with colored output
172
- log.info('Running Deno compiler');
173
- const deno = spawn('deno', args);
174
- let stdout = '';
175
- let stderr = '';
176
- deno.stdout.on('data', (data) => {
177
- stdout += data.toString();
178
- });
179
- deno.stderr.on('data', (data) => {
180
- stderr += data.toString();
181
- });
182
- deno.on('close', (code) => {
183
- // Always display the task log with command and output
184
- note(createTaskLog('deno', args, stdout));
185
- if (code === 0) {
186
- if (stdout.trim().length === 0) {
187
- reject(new Error('Compilation produced no output'));
188
- }
189
- else {
190
- resolve(stdout);
191
- }
192
- }
193
- else {
194
- reject(new Error(`Deno process exited with code ${code}${stderr ? `\n${stderr}` : ''}`));
195
- }
196
- });
197
- deno.on('error', (err) => {
198
- reject(new Error(`Failed to start Deno process: ${err.message}. Make sure Deno is installed.`));
199
- });
200
- });
201
- }
@@ -1,5 +0,0 @@
1
- export declare function copyMigrations({ supabasePath, autoConfirm, }: {
2
- supabasePath: string;
3
- autoConfirm?: boolean;
4
- }): Promise<boolean>;
5
- //# sourceMappingURL=copy-migrations.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"copy-migrations.d.ts","sourceRoot":"","sources":["../../../src/commands/install/copy-migrations.ts"],"names":[],"mappings":"AA6JA,wBAAsB,cAAc,CAAC,EACnC,YAAY,EACZ,WAAmB,GACpB,EAAE;IACD,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,GAAG,OAAO,CAAC,OAAO,CAAC,CA6LnB"}
@@ -1,253 +0,0 @@
1
- import fs from 'fs';
2
- import path from 'path';
3
- import { createRequire } from 'module';
4
- import { fileURLToPath } from 'url';
5
- import { log, confirm, note } from '@clack/prompts';
6
- import chalk from 'chalk';
7
- // Get the directory name in ES modules
8
- const __filename = fileURLToPath(import.meta.url);
9
- const __dirname = path.dirname(__filename);
10
- // Create a require function to use require.resolve
11
- const require = createRequire(import.meta.url);
12
- // Function to find migrations directory
13
- function findMigrationsDirectory() {
14
- try {
15
- // First try: resolve from installed @pgflow/core package
16
- const corePackageJsonPath = require.resolve('@pgflow/core/package.json');
17
- const corePackageFolder = path.dirname(corePackageJsonPath);
18
- const packageMigrationsPath = path.join(corePackageFolder, 'dist', 'supabase', 'migrations');
19
- if (fs.existsSync(packageMigrationsPath)) {
20
- return packageMigrationsPath;
21
- }
22
- // If that fails, try development path
23
- log.info('Could not find migrations in installed package, trying development paths...');
24
- }
25
- catch (error) {
26
- log.info('Could not resolve @pgflow/core package, trying development paths...');
27
- }
28
- // Try development paths
29
- // 1. Try relative to CLI dist folder (when running built CLI)
30
- const distRelativePath = path.resolve(__dirname, '../../../../core/supabase/migrations');
31
- if (fs.existsSync(distRelativePath)) {
32
- return distRelativePath;
33
- }
34
- // 2. Try relative to CLI source folder (when running from source)
35
- const sourceRelativePath = path.resolve(__dirname, '../../../../../core/supabase/migrations');
36
- if (fs.existsSync(sourceRelativePath)) {
37
- return sourceRelativePath;
38
- }
39
- // 3. Try local migrations directory (for backward compatibility)
40
- const localMigrationsPath = path.resolve(__dirname, '../../migrations');
41
- if (fs.existsSync(localMigrationsPath)) {
42
- return localMigrationsPath;
43
- }
44
- // No migrations found
45
- return null;
46
- }
47
- // Helper function to get the timestamp part from a migration filename
48
- function getTimestampFromFilename(filename) {
49
- const match = filename.match(/^(\d+)_/);
50
- // Return the timestamp only if it exists and has the correct length (14 digits)
51
- if (match && match[1] && match[1].length === 14 && /^\d{14}$/.test(match[1])) {
52
- return match[1];
53
- }
54
- return '';
55
- }
56
- // Helper function to format a Date object into a migration timestamp string (YYYYMMDDhhmmss) using UTC
57
- function formatDateToTimestamp(date) {
58
- const year = date.getUTCFullYear();
59
- const month = String(date.getUTCMonth() + 1).padStart(2, '0');
60
- const day = String(date.getUTCDate()).padStart(2, '0');
61
- const hours = String(date.getUTCHours()).padStart(2, '0');
62
- const minutes = String(date.getUTCMinutes()).padStart(2, '0');
63
- const seconds = String(date.getUTCSeconds()).padStart(2, '0');
64
- return `${year}${month}${day}${hours}${minutes}${seconds}`;
65
- }
66
- // Helper function to parse a timestamp string into a Date object (interpreted as UTC)
67
- function parseTimestampToDate(timestamp) {
68
- // Validate format: YYYYMMDDhhmmss
69
- if (!timestamp || timestamp.length !== 14 || !/^\d{14}$/.test(timestamp)) {
70
- return null;
71
- }
72
- const year = parseInt(timestamp.substring(0, 4), 10);
73
- const month = parseInt(timestamp.substring(4, 6), 10) - 1; // months are 0-indexed in JS Date
74
- const day = parseInt(timestamp.substring(6, 8), 10);
75
- const hours = parseInt(timestamp.substring(8, 10), 10);
76
- const minutes = parseInt(timestamp.substring(10, 12), 10);
77
- const seconds = parseInt(timestamp.substring(12, 14), 10);
78
- // Create date in UTC and validate (invalid dates like Feb 31 will be auto-corrected by JS Date)
79
- const date = new Date(Date.UTC(year, month, day, hours, minutes, seconds));
80
- // Additional validation to ensure the parsed date matches the input
81
- // This catches edge cases like month=13 that JS Date would autocorrect
82
- if (date.getUTCFullYear() !== year ||
83
- date.getUTCMonth() !== month ||
84
- date.getUTCDate() !== day ||
85
- date.getUTCHours() !== hours ||
86
- date.getUTCMinutes() !== minutes ||
87
- date.getUTCSeconds() !== seconds) {
88
- return null;
89
- }
90
- return date;
91
- }
92
- // Helper function to generate a new timestamp that's higher than the reference timestamp (using UTC)
93
- function generateNewTimestamp(referenceTimestamp, increment = 1) {
94
- // First try to parse the reference timestamp to a Date
95
- const parsedDate = parseTimestampToDate(referenceTimestamp);
96
- // If we couldn't parse it, use current UTC time
97
- if (!parsedDate) {
98
- return formatDateToTimestamp(new Date());
99
- }
100
- // Add the specified number of seconds (default: 1)
101
- parsedDate.setUTCSeconds(parsedDate.getUTCSeconds() + increment);
102
- // Get current UTC time for comparison
103
- const now = new Date();
104
- // Return either the incremented timestamp or current time, whichever is later
105
- // This ensures we never go backwards in time
106
- if (parsedDate > now) {
107
- return formatDateToTimestamp(parsedDate);
108
- }
109
- else {
110
- // If we're already at or past current time, add increment to now
111
- now.setUTCSeconds(now.getUTCSeconds() + increment);
112
- return formatDateToTimestamp(now);
113
- }
114
- }
115
- // Find the migrations directory
116
- const sourcePath = findMigrationsDirectory();
117
- export async function copyMigrations({ supabasePath, autoConfirm = false, }) {
118
- const migrationsPath = path.join(supabasePath, 'migrations');
119
- if (!fs.existsSync(migrationsPath)) {
120
- fs.mkdirSync(migrationsPath);
121
- }
122
- // Check if pgflow migrations directory exists
123
- if (!sourcePath || !fs.existsSync(sourcePath)) {
124
- log.error(`Could not find migrations directory`);
125
- log.warn('This might happen if @pgflow/core is not properly installed or built.');
126
- log.info('Make sure @pgflow/core is installed and contains the migrations.');
127
- log.info('If running in development mode, try building the core package first with: nx build core');
128
- return false;
129
- }
130
- // Get all existing migrations in user's directory
131
- const existingFiles = fs.existsSync(migrationsPath)
132
- ? fs.readdirSync(migrationsPath)
133
- : [];
134
- // Find the latest migration timestamp in user's directory
135
- let latestTimestamp = '00000000000000';
136
- for (const file of existingFiles) {
137
- if (file.endsWith('.sql')) {
138
- const timestamp = getTimestampFromFilename(file);
139
- // Only consider timestamps that have been validated by getTimestampFromFilename
140
- // to have the correct length and format
141
- if (timestamp && timestamp.length === 14) {
142
- const parsedDate = parseTimestampToDate(timestamp);
143
- // If we have a valid date and this timestamp is newer, update latestTimestamp
144
- if (parsedDate && parseInt(timestamp, 10) > parseInt(latestTimestamp, 10)) {
145
- latestTimestamp = timestamp;
146
- }
147
- }
148
- }
149
- }
150
- // Get all source migrations
151
- const sourceFiles = fs
152
- .readdirSync(sourcePath)
153
- .filter((file) => file.endsWith('.sql'));
154
- const filesToCopy = [];
155
- const skippedFiles = [];
156
- // Check which migrations need to be installed
157
- for (const sourceFile of sourceFiles) {
158
- // Check if this migration is already installed (by checking if the original filename
159
- // appears in any existing migration filename)
160
- const isAlreadyInstalled = existingFiles.some((existingFile) => existingFile.includes(sourceFile));
161
- if (isAlreadyInstalled) {
162
- skippedFiles.push(sourceFile);
163
- }
164
- else {
165
- filesToCopy.push({
166
- source: sourceFile,
167
- destination: sourceFile, // Will be updated later with new timestamp
168
- });
169
- }
170
- }
171
- // If no files to copy, show message with details and return false (no changes made)
172
- if (filesToCopy.length === 0) {
173
- // Show success message
174
- log.success('All pgflow migrations are already in place');
175
- // Show details of already installed migrations
176
- if (skippedFiles.length > 0) {
177
- const detailedMsg = [
178
- 'Already installed migrations:',
179
- ...skippedFiles.map((file) => {
180
- // Find the matching existing file to show how it was installed
181
- const matchingFile = existingFiles.find((existingFile) => existingFile.includes(file));
182
- if (matchingFile === file) {
183
- // Installed with old direct method
184
- return ` ${chalk.dim('•')} ${chalk.bold(file)}`;
185
- }
186
- else {
187
- // Installed with new timestamped method
188
- const timestampPart = matchingFile?.substring(0, matchingFile.indexOf(file) - 1) || '';
189
- return ` ${chalk.dim('•')} ${chalk.dim(timestampPart + '_')}${chalk.bold(file)}`;
190
- }
191
- }),
192
- ].join('\n');
193
- note(detailedMsg, 'Existing pgflow Migrations');
194
- }
195
- return false;
196
- }
197
- // Generate new timestamps for migrations to install
198
- let baseTimestamp = latestTimestamp;
199
- filesToCopy.forEach((file) => {
200
- // Generate timestamp with increasing values to maintain order
201
- // Each iteration uses the timestamp generated by the previous iteration as the base
202
- baseTimestamp = generateNewTimestamp(baseTimestamp);
203
- // Create new filename with format: newTimestamp_originalFilename
204
- file.destination = `${baseTimestamp}_${file.source}`;
205
- });
206
- log.info(`Found ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''} to install`);
207
- // Prepare summary message with colored output
208
- const summaryParts = [];
209
- if (filesToCopy.length > 0) {
210
- summaryParts.push(`${chalk.green('New migrations to install:')}\n${filesToCopy
211
- .map((file) => {
212
- // Extract the timestamp part from the new filename
213
- const newTimestamp = file.destination.substring(0, 14);
214
- // Format: dim timestamp + bright original name
215
- return `${chalk.green('+')} ${file.source} → ${chalk.dim(newTimestamp + '_')}${chalk.bold(file.source)}`;
216
- })
217
- .join('\n')}`);
218
- }
219
- if (skippedFiles.length > 0) {
220
- summaryParts.push(`${chalk.yellow('Already installed:')}\n${skippedFiles
221
- .map((file) => `${chalk.yellow('•')} ${file}`)
222
- .join('\n')}`);
223
- }
224
- // Show summary and ask for confirmation if not auto-confirming
225
- note(summaryParts.join('\n\n'), 'pgflow Migrations');
226
- let shouldContinue = autoConfirm;
227
- if (!autoConfirm) {
228
- const confirmResult = await confirm({
229
- message: `Install ${filesToCopy.length} new migration${filesToCopy.length !== 1 ? 's' : ''}?`,
230
- });
231
- shouldContinue = confirmResult === true;
232
- }
233
- if (!shouldContinue) {
234
- log.warn('Migration installation skipped');
235
- return false;
236
- }
237
- // Install migrations with new filenames
238
- for (const file of filesToCopy) {
239
- const sourcePath1 = path.join(sourcePath, file.source);
240
- const destinationPath = path.join(migrationsPath, file.destination);
241
- fs.copyFileSync(sourcePath1, destinationPath);
242
- }
243
- // Show detailed success message with styled filenames
244
- const detailedSuccessMsg = [
245
- `Installed ${filesToCopy.length} migration${filesToCopy.length !== 1 ? 's' : ''} to your Supabase project:`,
246
- ...filesToCopy.map((file) => {
247
- const newTimestamp = file.destination.substring(0, 14);
248
- return ` ${chalk.dim(newTimestamp + '_')}${chalk.bold(file.source)}`;
249
- }),
250
- ].join('\n');
251
- log.success(detailedSuccessMsg);
252
- return true; // Return true to indicate migrations were copied
253
- }
@@ -1,4 +0,0 @@
1
- import { type Command } from 'commander';
2
- declare const _default: (program: Command) => void;
3
- export default _default;
4
- //# sourceMappingURL=index.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/commands/install/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,OAAO,EAAE,MAAM,WAAW,CAAC;yBAQzB,SAAS,OAAO;AAAhC,wBAiHE"}
@@ -1,90 +0,0 @@
1
- import { intro, group, cancel, outro } from '@clack/prompts';
2
- import chalk from 'chalk';
3
- import { copyMigrations } from './copy-migrations.js';
4
- import { updateConfigToml } from './update-config-toml.js';
5
- import { updateEnvFile } from './update-env-file.js';
6
- import { supabasePathPrompt } from './supabase-path-prompt.js';
7
- export default (program) => {
8
- program
9
- .command('install')
10
- .description('Set up pgflow in your Supabase project')
11
- .option('--supabase-path <path>', 'Path to the Supabase folder')
12
- .option('-y, --yes', 'Automatically confirm all prompts', false)
13
- .action(async (options) => {
14
- intro('Installing pgflow in your Supabase project');
15
- // Use the group feature to organize installation steps
16
- const results = await group({
17
- // Step 1: Determine Supabase path
18
- supabasePath: () => supabasePathPrompt({ supabasePath: options.supabasePath }),
19
- // Step 2: Update config.toml
20
- configUpdate: async ({ results: { supabasePath } }) => {
21
- if (!supabasePath)
22
- return false;
23
- return await updateConfigToml({
24
- supabasePath,
25
- autoConfirm: options.yes,
26
- });
27
- },
28
- // Step 3: Copy migrations
29
- migrations: async ({ results: { supabasePath } }) => {
30
- if (!supabasePath)
31
- return false;
32
- return await copyMigrations({
33
- supabasePath,
34
- autoConfirm: options.yes,
35
- });
36
- },
37
- // Step 4: Update environment variables
38
- envFile: async ({ results: { supabasePath } }) => {
39
- if (!supabasePath)
40
- return false;
41
- return await updateEnvFile({
42
- supabasePath,
43
- autoConfirm: options.yes,
44
- });
45
- },
46
- }, {
47
- // Handle cancellation
48
- onCancel: () => {
49
- cancel('Installation cancelled');
50
- process.exit(1);
51
- },
52
- });
53
- // Extract the results from the group operation
54
- const supabasePath = results.supabasePath;
55
- const configUpdate = results.configUpdate;
56
- const migrations = results.migrations;
57
- const envFile = results.envFile;
58
- // Exit if supabasePath is null (validation failed or user cancelled)
59
- if (!supabasePath) {
60
- cancel('Installation cancelled - valid Supabase path is required');
61
- process.exit(1);
62
- }
63
- // Show completion message
64
- const outroMessages = [];
65
- // Always start with a bolded acknowledgement
66
- if (migrations || configUpdate || envFile) {
67
- outroMessages.push(chalk.bold('pgflow setup completed successfully!'));
68
- }
69
- else {
70
- outroMessages.push(chalk.bold('pgflow is already properly configured - no changes needed!'));
71
- }
72
- // Add a newline after the acknowledgement
73
- outroMessages.push('');
74
- // Add specific next steps if changes were made
75
- if (configUpdate || envFile) {
76
- outroMessages.push(`- Restart your Supabase instance for configuration changes to take effect`);
77
- }
78
- if (migrations) {
79
- outroMessages.push(`- Apply the migrations with: ${chalk.cyan('supabase migrations up')}`);
80
- }
81
- // Always add documentation link with consistent formatting
82
- if (outroMessages.length > 2) {
83
- // If we have specific steps, add another newline
84
- outroMessages.push('');
85
- }
86
- outroMessages.push(chalk.bold('Continue the setup:'), chalk.blue.underline('https://pgflow.dev/getting-started/create-first-flow/'));
87
- // Single outro for all paths
88
- outro(outroMessages.join('\n'));
89
- });
90
- };
@@ -1,4 +0,0 @@
1
- export declare function supabasePathPrompt(options?: {
2
- supabasePath?: string;
3
- }): Promise<string | symbol>;
4
- //# sourceMappingURL=supabase-path-prompt.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"supabase-path-prompt.d.ts","sourceRoot":"","sources":["../../../src/commands/install/supabase-path-prompt.ts"],"names":[],"mappings":"AAIA,wBAAsB,kBAAkB,CAAC,OAAO,CAAC,EAAE;IAAE,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,4BA6C3E"}
@@ -1,49 +0,0 @@
1
- import fs from 'fs';
2
- import path from 'path';
3
- import { text, log } from '@clack/prompts';
4
- export async function supabasePathPrompt(options) {
5
- // If supabasePath is provided as an option and it's valid, use it directly without prompting
6
- if (options?.supabasePath) {
7
- const validationError = validate(options.supabasePath);
8
- if (validationError === undefined) {
9
- log.info(`Using Supabase project at: ${options.supabasePath}`);
10
- return options.supabasePath;
11
- }
12
- // If validation failed, log the error and continue to prompt
13
- log.warn(validationError);
14
- }
15
- // Try to detect the Supabase directory automatically
16
- const possiblePaths = ['./supabase', '../supabase', '../../supabase'];
17
- let detectedPath = '';
18
- for (const testPath of possiblePaths) {
19
- if (fs.existsSync(testPath) &&
20
- fs.existsSync(path.join(testPath, 'config.toml'))) {
21
- detectedPath = testPath;
22
- break;
23
- }
24
- }
25
- // Always prompt for detected paths - don't skip
26
- if (detectedPath) {
27
- log.info(`Found Supabase project at: ${detectedPath}`);
28
- }
29
- const promptMessage = 'Where is your Supabase project located?';
30
- const supabasePath = await text({
31
- message: promptMessage,
32
- placeholder: detectedPath || 'supabase/',
33
- initialValue: options?.supabasePath || detectedPath,
34
- validate,
35
- });
36
- if (!supabasePath) {
37
- throw new Error('User cancelled');
38
- }
39
- return supabasePath;
40
- }
41
- function validate(inputPath) {
42
- if (!fs.existsSync(inputPath)) {
43
- return `Directory not found: ${inputPath}`;
44
- }
45
- if (!fs.existsSync(path.join(inputPath, 'config.toml'))) {
46
- return `Not a valid Supabase project (missing config.toml)`;
47
- }
48
- return undefined;
49
- }