pgpm 2.9.1 → 2.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -0
- package/commands/dump.d.ts +4 -0
- package/commands/dump.js +166 -0
- package/commands/init/index.js +114 -65
- package/commands.js +2 -0
- package/esm/commands/dump.js +161 -0
- package/esm/commands/init/index.js +114 -65
- package/esm/commands.js +2 -0
- package/esm/index.js +1 -0
- package/esm/utils/display.js +1 -0
- package/index.d.ts +1 -0
- package/index.js +3 -1
- package/package.json +11 -10
- package/utils/display.d.ts +1 -1
- package/utils/display.js +1 -0
package/README.md
CHANGED
|
@@ -328,6 +328,28 @@ pgpm package --no-plan
|
|
|
328
328
|
|
|
329
329
|
### Utilities
|
|
330
330
|
|
|
331
|
+
#### `pgpm dump`
|
|
332
|
+
|
|
333
|
+
Dump a postgres database to a sql file.
|
|
334
|
+
|
|
335
|
+
```bash
|
|
336
|
+
# dump to default timestamped file
|
|
337
|
+
pgpm dump --database mydb
|
|
338
|
+
|
|
339
|
+
# interactive mode (prompts for database)
|
|
340
|
+
pgpm dump
|
|
341
|
+
|
|
342
|
+
# dump to specific file
|
|
343
|
+
pgpm dump --database mydb --out ./backup.sql
|
|
344
|
+
|
|
345
|
+
# dump from a specific working directory
|
|
346
|
+
pgpm dump --database mydb --cwd ./packages/my-module
|
|
347
|
+
|
|
348
|
+
# dump with pruning
|
|
349
|
+
# useful for creating test fixtures or development snapshots
|
|
350
|
+
pgpm dump --database mydb --database-id <uuid>
|
|
351
|
+
```
|
|
352
|
+
|
|
331
353
|
#### `pgpm export`
|
|
332
354
|
|
|
333
355
|
Export migrations from existing databases.
|
package/commands/dump.js
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const logger_1 = require("@pgpmjs/logger");
|
|
7
|
+
const pg_env_1 = require("pg-env");
|
|
8
|
+
const pg_cache_1 = require("pg-cache");
|
|
9
|
+
const child_process_1 = require("child_process");
|
|
10
|
+
const fs_1 = __importDefault(require("fs"));
|
|
11
|
+
const path_1 = __importDefault(require("path"));
|
|
12
|
+
const quote_utils_1 = require("pgsql-deparser/utils/quote-utils");
|
|
13
|
+
const utils_1 = require("../utils");
|
|
14
|
+
const log = new logger_1.Logger('dump');
|
|
15
|
+
const dumpUsageText = `
|
|
16
|
+
Dump Command:
|
|
17
|
+
|
|
18
|
+
pgpm dump [options]
|
|
19
|
+
|
|
20
|
+
Dump a postgres database to a sql file.
|
|
21
|
+
|
|
22
|
+
Options:
|
|
23
|
+
--help, -h Show this help message
|
|
24
|
+
--db, --database <name> Target postgres database name
|
|
25
|
+
--out <path> Output file path
|
|
26
|
+
--database-id <id> When set, the dump will include a prune step that keeps only this database_id after restore
|
|
27
|
+
--cwd <directory> Working directory (default: current directory)
|
|
28
|
+
|
|
29
|
+
Examples:
|
|
30
|
+
pgpm dump
|
|
31
|
+
pgpm dump --database mydb
|
|
32
|
+
pgpm dump --database mydb --out ./mydb.sql
|
|
33
|
+
pgpm dump --database mydb --database-id 00000000-0000-0000-0000-000000000000
|
|
34
|
+
`;
|
|
35
|
+
function nowStamp() {
|
|
36
|
+
const d = new Date();
|
|
37
|
+
const pad = (n) => String(n).padStart(2, '0');
|
|
38
|
+
return `${d.getFullYear()}${pad(d.getMonth() + 1)}${pad(d.getDate())}-${pad(d.getHours())}${pad(d.getMinutes())}${pad(d.getSeconds())}`;
|
|
39
|
+
}
|
|
40
|
+
async function runPgDump(args, env) {
|
|
41
|
+
await new Promise((resolve, reject) => {
|
|
42
|
+
const child = (0, child_process_1.spawn)('pg_dump', args, {
|
|
43
|
+
env,
|
|
44
|
+
stdio: 'inherit',
|
|
45
|
+
shell: false
|
|
46
|
+
});
|
|
47
|
+
child.on('error', (err) => {
|
|
48
|
+
if (err.code === 'ENOENT') {
|
|
49
|
+
log.error('pg_dump not found; ensure PostgreSQL client tools are installed and in PATH');
|
|
50
|
+
}
|
|
51
|
+
reject(err);
|
|
52
|
+
});
|
|
53
|
+
child.on('close', (code) => {
|
|
54
|
+
if (code === 0) {
|
|
55
|
+
resolve();
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
reject(new Error(`pg_dump exited with code ${code ?? 1}`));
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
async function resolveDatabaseId(dbname, databaseIdRaw) {
|
|
63
|
+
const pool = (0, pg_cache_1.getPgPool)((0, pg_env_1.getPgEnvOptions)({ database: dbname }));
|
|
64
|
+
const res = await pool.query(`select id, name from metaschema_public.database order by name`);
|
|
65
|
+
const byId = res.rows.find((r) => String(r.id) === databaseIdRaw);
|
|
66
|
+
if (byId)
|
|
67
|
+
return { id: String(byId.id), name: String(byId.name) };
|
|
68
|
+
const byName = res.rows.find((r) => String(r.name) === databaseIdRaw);
|
|
69
|
+
if (byName)
|
|
70
|
+
return { id: String(byName.id), name: String(byName.name) };
|
|
71
|
+
return null;
|
|
72
|
+
}
|
|
73
|
+
async function buildPruneSql(dbname, databaseId) {
|
|
74
|
+
const pool = (0, pg_cache_1.getPgPool)((0, pg_env_1.getPgEnvOptions)({ database: dbname }));
|
|
75
|
+
const tables = await pool.query(`
|
|
76
|
+
select c.table_schema, c.table_name
|
|
77
|
+
from information_schema.columns c
|
|
78
|
+
join information_schema.tables t
|
|
79
|
+
on t.table_schema = c.table_schema
|
|
80
|
+
and t.table_name = c.table_name
|
|
81
|
+
where c.column_name = 'database_id'
|
|
82
|
+
and t.table_type = 'BASE TABLE'
|
|
83
|
+
and c.table_schema not in ('pg_catalog', 'information_schema')
|
|
84
|
+
order by c.table_schema, c.table_name
|
|
85
|
+
`);
|
|
86
|
+
const lines = [];
|
|
87
|
+
lines.push('');
|
|
88
|
+
lines.push('-- pgpm dump prune');
|
|
89
|
+
lines.push('-- this section keeps only one database_id after restore');
|
|
90
|
+
lines.push('do $$ begin');
|
|
91
|
+
lines.push(` raise notice 'pruning data to database_id ${databaseId}';`);
|
|
92
|
+
lines.push('end $$;');
|
|
93
|
+
lines.push('set session_replication_role = replica;');
|
|
94
|
+
for (const row of tables.rows) {
|
|
95
|
+
const schema = String(row.table_schema);
|
|
96
|
+
const table = String(row.table_name);
|
|
97
|
+
// Use QuoteUtils for robust identifier quoting
|
|
98
|
+
const qualified = quote_utils_1.QuoteUtils.quoteQualifiedIdentifier(schema, table);
|
|
99
|
+
// Use formatEString to safely escape the UUID/string literal
|
|
100
|
+
const dbIdLiteral = quote_utils_1.QuoteUtils.formatEString(databaseId);
|
|
101
|
+
lines.push(`delete from ${qualified} where database_id <> ${dbIdLiteral};`);
|
|
102
|
+
}
|
|
103
|
+
// Handle metaschema_public.database deletion
|
|
104
|
+
const metaschemaDb = quote_utils_1.QuoteUtils.quoteQualifiedIdentifier('metaschema_public', 'database');
|
|
105
|
+
const dbIdLiteral = quote_utils_1.QuoteUtils.formatEString(databaseId);
|
|
106
|
+
lines.push(`delete from ${metaschemaDb} where id <> ${dbIdLiteral};`);
|
|
107
|
+
lines.push('set session_replication_role = origin;');
|
|
108
|
+
lines.push('do $$ begin');
|
|
109
|
+
lines.push(` raise notice 'prune done';`);
|
|
110
|
+
lines.push('end $$;');
|
|
111
|
+
lines.push('');
|
|
112
|
+
return lines.join('\n');
|
|
113
|
+
}
|
|
114
|
+
// Helper to retrieve argument from parsed argv or positional _ array
|
|
115
|
+
function getArg(argv, key) {
|
|
116
|
+
if (argv[key])
|
|
117
|
+
return argv[key];
|
|
118
|
+
const args = argv._ || [];
|
|
119
|
+
const idx = args.indexOf(`--${key}`);
|
|
120
|
+
if (idx > -1 && args.length > idx + 1) {
|
|
121
|
+
return args[idx + 1];
|
|
122
|
+
}
|
|
123
|
+
return undefined;
|
|
124
|
+
}
|
|
125
|
+
exports.default = async (argv, prompter, _options) => {
|
|
126
|
+
if (argv.help || argv.h) {
|
|
127
|
+
console.log(dumpUsageText);
|
|
128
|
+
process.exit(0);
|
|
129
|
+
}
|
|
130
|
+
const cwd = argv.cwd || process.cwd();
|
|
131
|
+
const dbname = await (0, utils_1.getTargetDatabase)(argv, prompter, { message: 'Select database' });
|
|
132
|
+
const outPath = path_1.default.resolve(cwd, argv.out || `pgpm-dump-${dbname}-${nowStamp()}.sql`);
|
|
133
|
+
fs_1.default.mkdirSync(path_1.default.dirname(outPath), { recursive: true });
|
|
134
|
+
let databaseIdInfo = null;
|
|
135
|
+
const databaseIdRaw = getArg(argv, 'database-id');
|
|
136
|
+
if (databaseIdRaw) {
|
|
137
|
+
databaseIdInfo = await resolveDatabaseId(dbname, databaseIdRaw);
|
|
138
|
+
if (!databaseIdInfo) {
|
|
139
|
+
throw new Error(`unknown database-id ${databaseIdRaw}`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
log.info(`dumping database ${dbname}`);
|
|
143
|
+
log.info(`writing to ${outPath}`);
|
|
144
|
+
if (databaseIdInfo) {
|
|
145
|
+
log.info(`database id ${databaseIdInfo.id}`);
|
|
146
|
+
}
|
|
147
|
+
const pgEnv = (0, pg_env_1.getPgEnvOptions)({ database: dbname });
|
|
148
|
+
const spawnEnv = (0, pg_env_1.getSpawnEnvWithPg)(pgEnv);
|
|
149
|
+
const args = [
|
|
150
|
+
'--format=plain',
|
|
151
|
+
'--no-owner',
|
|
152
|
+
'--no-privileges',
|
|
153
|
+
'--file',
|
|
154
|
+
outPath,
|
|
155
|
+
dbname
|
|
156
|
+
];
|
|
157
|
+
await runPgDump(args, spawnEnv);
|
|
158
|
+
if (databaseIdInfo) {
|
|
159
|
+
const pruneSql = await buildPruneSql(dbname, databaseIdInfo.id);
|
|
160
|
+
// Use writeFileSync with 'a' flag for explicit append as requested
|
|
161
|
+
fs_1.default.writeFileSync(outPath, pruneSql, { encoding: 'utf8', flag: 'a' });
|
|
162
|
+
log.info('added prune section to dump file');
|
|
163
|
+
}
|
|
164
|
+
log.success('dump complete');
|
|
165
|
+
return argv;
|
|
166
|
+
};
|
package/commands/init/index.js
CHANGED
|
@@ -7,6 +7,7 @@ exports.createInitUsageText = void 0;
|
|
|
7
7
|
const fs_1 = __importDefault(require("fs"));
|
|
8
8
|
const path_1 = __importDefault(require("path"));
|
|
9
9
|
const core_1 = require("@pgpmjs/core");
|
|
10
|
+
const env_1 = require("@pgpmjs/env");
|
|
10
11
|
const types_1 = require("@pgpmjs/types");
|
|
11
12
|
const inquirerer_1 = require("inquirerer");
|
|
12
13
|
const DEFAULT_MOTD = `
|
|
@@ -97,7 +98,7 @@ async function handleInit(argv, prompter) {
|
|
|
97
98
|
cwd,
|
|
98
99
|
});
|
|
99
100
|
}
|
|
100
|
-
// Default to module init (for 'module' type or unknown types)
|
|
101
|
+
// Default to module init (for 'module' type, 'generic' type, or unknown types)
|
|
101
102
|
return handleModuleInit(argv, prompter, {
|
|
102
103
|
fromPath,
|
|
103
104
|
templateRepo,
|
|
@@ -105,6 +106,7 @@ async function handleInit(argv, prompter) {
|
|
|
105
106
|
dir,
|
|
106
107
|
noTty,
|
|
107
108
|
cwd,
|
|
109
|
+
requiresWorkspace: inspection.config?.requiresWorkspace,
|
|
108
110
|
}, wasExplicitModuleRequest);
|
|
109
111
|
}
|
|
110
112
|
async function handleBoilerplateInit(argv, prompter, ctx) {
|
|
@@ -172,7 +174,7 @@ async function handleBoilerplateInit(argv, prompter, ctx) {
|
|
|
172
174
|
cwd: ctx.cwd,
|
|
173
175
|
});
|
|
174
176
|
}
|
|
175
|
-
// Default to module init (for 'module' type or unknown types)
|
|
177
|
+
// Default to module init (for 'module' type, 'generic' type, or unknown types)
|
|
176
178
|
// When using --boilerplate, user made an explicit choice, so treat as explicit request
|
|
177
179
|
return handleModuleInit(argv, prompter, {
|
|
178
180
|
fromPath,
|
|
@@ -181,6 +183,7 @@ async function handleBoilerplateInit(argv, prompter, ctx) {
|
|
|
181
183
|
dir: ctx.dir,
|
|
182
184
|
noTty: ctx.noTty,
|
|
183
185
|
cwd: ctx.cwd,
|
|
186
|
+
requiresWorkspace: inspection.config?.requiresWorkspace,
|
|
184
187
|
}, true);
|
|
185
188
|
}
|
|
186
189
|
async function handleWorkspaceInit(argv, prompter, ctx) {
|
|
@@ -233,49 +236,67 @@ async function handleWorkspaceInit(argv, prompter, ctx) {
|
|
|
233
236
|
return { ...argv, ...answers, cwd: targetPath };
|
|
234
237
|
}
|
|
235
238
|
async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest = false) {
|
|
239
|
+
// Determine workspace requirement (defaults to 'pgpm' for backward compatibility)
|
|
240
|
+
const workspaceType = ctx.requiresWorkspace ?? 'pgpm';
|
|
241
|
+
// Whether this is a pgpm-managed template (creates pgpm.plan, .control files)
|
|
242
|
+
const isPgpmTemplate = workspaceType === 'pgpm';
|
|
236
243
|
const project = new core_1.PgpmPackage(ctx.cwd);
|
|
237
|
-
if
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
if (
|
|
242
|
-
|
|
243
|
-
|
|
244
|
+
// Check workspace requirement based on type (skip if workspaceType is false)
|
|
245
|
+
if (workspaceType !== false) {
|
|
246
|
+
let workspacePath;
|
|
247
|
+
let workspaceTypeName = '';
|
|
248
|
+
if (workspaceType === 'pgpm') {
|
|
249
|
+
workspacePath = project.workspacePath;
|
|
250
|
+
workspaceTypeName = 'PGPM';
|
|
244
251
|
}
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
252
|
+
else {
|
|
253
|
+
workspacePath = (0, env_1.resolveWorkspaceByType)(ctx.cwd, workspaceType);
|
|
254
|
+
workspaceTypeName = workspaceType.toUpperCase();
|
|
255
|
+
}
|
|
256
|
+
if (!workspacePath) {
|
|
257
|
+
const noTty = Boolean(argv.noTty || argv['no-tty'] || process.env.CI === 'true');
|
|
258
|
+
// If user explicitly requested module init or we're in non-interactive mode,
|
|
259
|
+
// just show the error with helpful guidance
|
|
260
|
+
if (wasExplicitModuleRequest || noTty) {
|
|
261
|
+
process.stderr.write(`Not inside a ${workspaceTypeName} workspace.\n`);
|
|
262
|
+
throw types_1.errors.NOT_IN_WORKSPACE({});
|
|
263
|
+
}
|
|
264
|
+
// Only offer to create a workspace for pgpm templates
|
|
265
|
+
if (workspaceType === 'pgpm') {
|
|
266
|
+
const recoveryQuestion = [
|
|
267
|
+
{
|
|
268
|
+
name: 'workspace',
|
|
269
|
+
alias: 'w',
|
|
270
|
+
message: `You are not inside a ${workspaceTypeName} workspace. Would you like to create a new workspace instead?`,
|
|
271
|
+
type: 'confirm',
|
|
272
|
+
required: true,
|
|
273
|
+
},
|
|
274
|
+
];
|
|
275
|
+
const { workspace } = await prompter.prompt(argv, recoveryQuestion);
|
|
276
|
+
if (workspace) {
|
|
277
|
+
return handleWorkspaceInit(argv, prompter, {
|
|
278
|
+
fromPath: 'workspace',
|
|
279
|
+
templateRepo: ctx.templateRepo,
|
|
280
|
+
branch: ctx.branch,
|
|
281
|
+
dir: ctx.dir,
|
|
282
|
+
noTty: ctx.noTty,
|
|
283
|
+
cwd: ctx.cwd,
|
|
284
|
+
});
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
// User declined or non-pgpm workspace type, show the error
|
|
288
|
+
process.stderr.write(`Not inside a ${workspaceTypeName} workspace.\n`);
|
|
289
|
+
throw types_1.errors.NOT_IN_WORKSPACE({});
|
|
265
290
|
}
|
|
266
|
-
// User declined, show the error
|
|
267
|
-
process.stderr.write('Not inside a PGPM workspace.\n');
|
|
268
|
-
throw types_1.errors.NOT_IN_WORKSPACE({});
|
|
269
291
|
}
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
292
|
+
// Only check workspace directory constraints if we're in a workspace
|
|
293
|
+
if (project.workspacePath) {
|
|
294
|
+
if (!project.isInsideAllowedDirs(ctx.cwd) && !project.isInWorkspace() && !project.isParentOfAllowedDirs(ctx.cwd)) {
|
|
295
|
+
process.stderr.write('You must be inside the workspace root or a parent directory of modules (like packages/).\n');
|
|
296
|
+
throw types_1.errors.NOT_IN_WORKSPACE_MODULE({});
|
|
297
|
+
}
|
|
273
298
|
}
|
|
274
|
-
|
|
275
|
-
// Note: moduleName is needed here before scaffolding because initModule creates
|
|
276
|
-
// the directory first, then scaffolds. The boilerplate's ____moduleName____ question
|
|
277
|
-
// gets skipped because the answer is already passed through. So users only see it
|
|
278
|
-
// once, but the definition exists in two places for this architectural reason.
|
|
299
|
+
// Build questions based on whether this is a pgpm template
|
|
279
300
|
const moduleQuestions = [
|
|
280
301
|
{
|
|
281
302
|
name: 'moduleName',
|
|
@@ -283,7 +304,11 @@ async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest =
|
|
|
283
304
|
required: true,
|
|
284
305
|
type: 'text',
|
|
285
306
|
},
|
|
286
|
-
|
|
307
|
+
];
|
|
308
|
+
// Only ask for extensions if this is a pgpm template
|
|
309
|
+
if (isPgpmTemplate && project.workspacePath) {
|
|
310
|
+
const availExtensions = await project.getAvailableModules();
|
|
311
|
+
moduleQuestions.push({
|
|
287
312
|
name: 'extensions',
|
|
288
313
|
message: 'Which extensions?',
|
|
289
314
|
options: availExtensions,
|
|
@@ -291,36 +316,61 @@ async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest =
|
|
|
291
316
|
allowCustomOptions: true,
|
|
292
317
|
required: true,
|
|
293
318
|
default: ['plpgsql', 'uuid-ossp'],
|
|
294
|
-
}
|
|
295
|
-
|
|
319
|
+
});
|
|
320
|
+
}
|
|
296
321
|
const answers = await prompter.prompt(argv, moduleQuestions);
|
|
297
322
|
const modName = (0, core_1.sluggify)(answers.moduleName);
|
|
298
|
-
const extensions = answers.extensions
|
|
299
|
-
|
|
300
|
-
|
|
323
|
+
const extensions = isPgpmTemplate && answers.extensions
|
|
324
|
+
? answers.extensions
|
|
325
|
+
.filter((opt) => opt.selected)
|
|
326
|
+
.map((opt) => opt.name)
|
|
327
|
+
: [];
|
|
301
328
|
const templateAnswers = {
|
|
302
329
|
...argv,
|
|
303
330
|
...answers,
|
|
304
331
|
moduleName: modName,
|
|
305
332
|
packageIdentifier: argv.packageIdentifier || modName
|
|
306
333
|
};
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
334
|
+
// Determine output path based on whether we're in a workspace
|
|
335
|
+
let modulePath;
|
|
336
|
+
if (project.workspacePath) {
|
|
337
|
+
// Use workspace-aware initModule
|
|
338
|
+
await project.initModule({
|
|
339
|
+
name: modName,
|
|
340
|
+
description: answers.description || modName,
|
|
341
|
+
author: answers.author || modName,
|
|
342
|
+
extensions,
|
|
343
|
+
templateRepo: ctx.templateRepo,
|
|
344
|
+
templatePath: ctx.fromPath,
|
|
345
|
+
branch: ctx.branch,
|
|
346
|
+
dir: ctx.dir,
|
|
347
|
+
toolName: core_1.DEFAULT_TEMPLATE_TOOL_NAME,
|
|
348
|
+
answers: templateAnswers,
|
|
349
|
+
noTty: ctx.noTty,
|
|
350
|
+
pgpm: isPgpmTemplate,
|
|
351
|
+
});
|
|
352
|
+
const isRoot = path_1.default.resolve(project.workspacePath) === path_1.default.resolve(ctx.cwd);
|
|
353
|
+
modulePath = isRoot
|
|
354
|
+
? path_1.default.join(ctx.cwd, 'packages', modName)
|
|
355
|
+
: path_1.default.join(ctx.cwd, modName);
|
|
356
|
+
}
|
|
357
|
+
else {
|
|
358
|
+
// Not in a workspace - scaffold directly to current directory
|
|
359
|
+
modulePath = path_1.default.join(ctx.cwd, modName);
|
|
360
|
+
fs_1.default.mkdirSync(modulePath, { recursive: true });
|
|
361
|
+
await (0, core_1.scaffoldTemplate)({
|
|
362
|
+
fromPath: ctx.fromPath,
|
|
363
|
+
outputDir: modulePath,
|
|
364
|
+
templateRepo: ctx.templateRepo,
|
|
365
|
+
branch: ctx.branch,
|
|
366
|
+
dir: ctx.dir,
|
|
367
|
+
answers: templateAnswers,
|
|
368
|
+
noTty: ctx.noTty,
|
|
369
|
+
toolName: core_1.DEFAULT_TEMPLATE_TOOL_NAME,
|
|
370
|
+
cwd: ctx.cwd,
|
|
371
|
+
prompter
|
|
372
|
+
});
|
|
373
|
+
}
|
|
324
374
|
const motdPath = path_1.default.join(modulePath, '.motd');
|
|
325
375
|
let motd = DEFAULT_MOTD;
|
|
326
376
|
if (fs_1.default.existsSync(motdPath)) {
|
|
@@ -336,7 +386,6 @@ async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest =
|
|
|
336
386
|
if (!motd.endsWith('\n')) {
|
|
337
387
|
process.stdout.write('\n');
|
|
338
388
|
}
|
|
339
|
-
|
|
340
|
-
process.stdout.write(`\n✨ Enjoy!\n\ncd ./${relPath}\n`);
|
|
389
|
+
process.stdout.write(`\n✨ Enjoy!\n\ncd ./${modName}\n`);
|
|
341
390
|
return { ...argv, ...answers };
|
|
342
391
|
}
|
package/commands.js
CHANGED
|
@@ -14,6 +14,7 @@ const cache_1 = __importDefault(require("./commands/cache"));
|
|
|
14
14
|
const clear_1 = __importDefault(require("./commands/clear"));
|
|
15
15
|
const deploy_1 = __importDefault(require("./commands/deploy"));
|
|
16
16
|
const docker_1 = __importDefault(require("./commands/docker"));
|
|
17
|
+
const dump_1 = __importDefault(require("./commands/dump"));
|
|
17
18
|
const env_1 = __importDefault(require("./commands/env"));
|
|
18
19
|
const export_1 = __importDefault(require("./commands/export"));
|
|
19
20
|
const extension_1 = __importDefault(require("./commands/extension"));
|
|
@@ -50,6 +51,7 @@ const createPgpmCommandMap = (skipPgTeardown = false) => {
|
|
|
50
51
|
clear: pgt(clear_1.default),
|
|
51
52
|
deploy: pgt(deploy_1.default),
|
|
52
53
|
docker: docker_1.default,
|
|
54
|
+
dump: pgt(dump_1.default),
|
|
53
55
|
env: env_1.default,
|
|
54
56
|
verify: pgt(verify_1.default),
|
|
55
57
|
revert: pgt(revert_1.default),
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { Logger } from '@pgpmjs/logger';
|
|
2
|
+
import { getPgEnvOptions, getSpawnEnvWithPg } from 'pg-env';
|
|
3
|
+
import { getPgPool } from 'pg-cache';
|
|
4
|
+
import { spawn } from 'child_process';
|
|
5
|
+
import fs from 'fs';
|
|
6
|
+
import path from 'path';
|
|
7
|
+
import { QuoteUtils } from 'pgsql-deparser/utils/quote-utils';
|
|
8
|
+
import { getTargetDatabase } from '../utils';
|
|
9
|
+
const log = new Logger('dump');
|
|
10
|
+
const dumpUsageText = `
|
|
11
|
+
Dump Command:
|
|
12
|
+
|
|
13
|
+
pgpm dump [options]
|
|
14
|
+
|
|
15
|
+
Dump a postgres database to a sql file.
|
|
16
|
+
|
|
17
|
+
Options:
|
|
18
|
+
--help, -h Show this help message
|
|
19
|
+
--db, --database <name> Target postgres database name
|
|
20
|
+
--out <path> Output file path
|
|
21
|
+
--database-id <id> When set, the dump will include a prune step that keeps only this database_id after restore
|
|
22
|
+
--cwd <directory> Working directory (default: current directory)
|
|
23
|
+
|
|
24
|
+
Examples:
|
|
25
|
+
pgpm dump
|
|
26
|
+
pgpm dump --database mydb
|
|
27
|
+
pgpm dump --database mydb --out ./mydb.sql
|
|
28
|
+
pgpm dump --database mydb --database-id 00000000-0000-0000-0000-000000000000
|
|
29
|
+
`;
|
|
30
|
+
function nowStamp() {
|
|
31
|
+
const d = new Date();
|
|
32
|
+
const pad = (n) => String(n).padStart(2, '0');
|
|
33
|
+
return `${d.getFullYear()}${pad(d.getMonth() + 1)}${pad(d.getDate())}-${pad(d.getHours())}${pad(d.getMinutes())}${pad(d.getSeconds())}`;
|
|
34
|
+
}
|
|
35
|
+
async function runPgDump(args, env) {
|
|
36
|
+
await new Promise((resolve, reject) => {
|
|
37
|
+
const child = spawn('pg_dump', args, {
|
|
38
|
+
env,
|
|
39
|
+
stdio: 'inherit',
|
|
40
|
+
shell: false
|
|
41
|
+
});
|
|
42
|
+
child.on('error', (err) => {
|
|
43
|
+
if (err.code === 'ENOENT') {
|
|
44
|
+
log.error('pg_dump not found; ensure PostgreSQL client tools are installed and in PATH');
|
|
45
|
+
}
|
|
46
|
+
reject(err);
|
|
47
|
+
});
|
|
48
|
+
child.on('close', (code) => {
|
|
49
|
+
if (code === 0) {
|
|
50
|
+
resolve();
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
reject(new Error(`pg_dump exited with code ${code ?? 1}`));
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
async function resolveDatabaseId(dbname, databaseIdRaw) {
|
|
58
|
+
const pool = getPgPool(getPgEnvOptions({ database: dbname }));
|
|
59
|
+
const res = await pool.query(`select id, name from metaschema_public.database order by name`);
|
|
60
|
+
const byId = res.rows.find((r) => String(r.id) === databaseIdRaw);
|
|
61
|
+
if (byId)
|
|
62
|
+
return { id: String(byId.id), name: String(byId.name) };
|
|
63
|
+
const byName = res.rows.find((r) => String(r.name) === databaseIdRaw);
|
|
64
|
+
if (byName)
|
|
65
|
+
return { id: String(byName.id), name: String(byName.name) };
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
async function buildPruneSql(dbname, databaseId) {
|
|
69
|
+
const pool = getPgPool(getPgEnvOptions({ database: dbname }));
|
|
70
|
+
const tables = await pool.query(`
|
|
71
|
+
select c.table_schema, c.table_name
|
|
72
|
+
from information_schema.columns c
|
|
73
|
+
join information_schema.tables t
|
|
74
|
+
on t.table_schema = c.table_schema
|
|
75
|
+
and t.table_name = c.table_name
|
|
76
|
+
where c.column_name = 'database_id'
|
|
77
|
+
and t.table_type = 'BASE TABLE'
|
|
78
|
+
and c.table_schema not in ('pg_catalog', 'information_schema')
|
|
79
|
+
order by c.table_schema, c.table_name
|
|
80
|
+
`);
|
|
81
|
+
const lines = [];
|
|
82
|
+
lines.push('');
|
|
83
|
+
lines.push('-- pgpm dump prune');
|
|
84
|
+
lines.push('-- this section keeps only one database_id after restore');
|
|
85
|
+
lines.push('do $$ begin');
|
|
86
|
+
lines.push(` raise notice 'pruning data to database_id ${databaseId}';`);
|
|
87
|
+
lines.push('end $$;');
|
|
88
|
+
lines.push('set session_replication_role = replica;');
|
|
89
|
+
for (const row of tables.rows) {
|
|
90
|
+
const schema = String(row.table_schema);
|
|
91
|
+
const table = String(row.table_name);
|
|
92
|
+
// Use QuoteUtils for robust identifier quoting
|
|
93
|
+
const qualified = QuoteUtils.quoteQualifiedIdentifier(schema, table);
|
|
94
|
+
// Use formatEString to safely escape the UUID/string literal
|
|
95
|
+
const dbIdLiteral = QuoteUtils.formatEString(databaseId);
|
|
96
|
+
lines.push(`delete from ${qualified} where database_id <> ${dbIdLiteral};`);
|
|
97
|
+
}
|
|
98
|
+
// Handle metaschema_public.database deletion
|
|
99
|
+
const metaschemaDb = QuoteUtils.quoteQualifiedIdentifier('metaschema_public', 'database');
|
|
100
|
+
const dbIdLiteral = QuoteUtils.formatEString(databaseId);
|
|
101
|
+
lines.push(`delete from ${metaschemaDb} where id <> ${dbIdLiteral};`);
|
|
102
|
+
lines.push('set session_replication_role = origin;');
|
|
103
|
+
lines.push('do $$ begin');
|
|
104
|
+
lines.push(` raise notice 'prune done';`);
|
|
105
|
+
lines.push('end $$;');
|
|
106
|
+
lines.push('');
|
|
107
|
+
return lines.join('\n');
|
|
108
|
+
}
|
|
109
|
+
// Helper to retrieve argument from parsed argv or positional _ array
|
|
110
|
+
function getArg(argv, key) {
|
|
111
|
+
if (argv[key])
|
|
112
|
+
return argv[key];
|
|
113
|
+
const args = argv._ || [];
|
|
114
|
+
const idx = args.indexOf(`--${key}`);
|
|
115
|
+
if (idx > -1 && args.length > idx + 1) {
|
|
116
|
+
return args[idx + 1];
|
|
117
|
+
}
|
|
118
|
+
return undefined;
|
|
119
|
+
}
|
|
120
|
+
export default async (argv, prompter, _options) => {
|
|
121
|
+
if (argv.help || argv.h) {
|
|
122
|
+
console.log(dumpUsageText);
|
|
123
|
+
process.exit(0);
|
|
124
|
+
}
|
|
125
|
+
const cwd = argv.cwd || process.cwd();
|
|
126
|
+
const dbname = await getTargetDatabase(argv, prompter, { message: 'Select database' });
|
|
127
|
+
const outPath = path.resolve(cwd, argv.out || `pgpm-dump-${dbname}-${nowStamp()}.sql`);
|
|
128
|
+
fs.mkdirSync(path.dirname(outPath), { recursive: true });
|
|
129
|
+
let databaseIdInfo = null;
|
|
130
|
+
const databaseIdRaw = getArg(argv, 'database-id');
|
|
131
|
+
if (databaseIdRaw) {
|
|
132
|
+
databaseIdInfo = await resolveDatabaseId(dbname, databaseIdRaw);
|
|
133
|
+
if (!databaseIdInfo) {
|
|
134
|
+
throw new Error(`unknown database-id ${databaseIdRaw}`);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
log.info(`dumping database ${dbname}`);
|
|
138
|
+
log.info(`writing to ${outPath}`);
|
|
139
|
+
if (databaseIdInfo) {
|
|
140
|
+
log.info(`database id ${databaseIdInfo.id}`);
|
|
141
|
+
}
|
|
142
|
+
const pgEnv = getPgEnvOptions({ database: dbname });
|
|
143
|
+
const spawnEnv = getSpawnEnvWithPg(pgEnv);
|
|
144
|
+
const args = [
|
|
145
|
+
'--format=plain',
|
|
146
|
+
'--no-owner',
|
|
147
|
+
'--no-privileges',
|
|
148
|
+
'--file',
|
|
149
|
+
outPath,
|
|
150
|
+
dbname
|
|
151
|
+
];
|
|
152
|
+
await runPgDump(args, spawnEnv);
|
|
153
|
+
if (databaseIdInfo) {
|
|
154
|
+
const pruneSql = await buildPruneSql(dbname, databaseIdInfo.id);
|
|
155
|
+
// Use writeFileSync with 'a' flag for explicit append as requested
|
|
156
|
+
fs.writeFileSync(outPath, pruneSql, { encoding: 'utf8', flag: 'a' });
|
|
157
|
+
log.info('added prune section to dump file');
|
|
158
|
+
}
|
|
159
|
+
log.success('dump complete');
|
|
160
|
+
return argv;
|
|
161
|
+
};
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import { DEFAULT_TEMPLATE_REPO, DEFAULT_TEMPLATE_TOOL_NAME, inspectTemplate, PgpmPackage, resolveBoilerplateBaseDir, scaffoldTemplate, scanBoilerplates, sluggify, } from '@pgpmjs/core';
|
|
4
|
+
import { resolveWorkspaceByType } from '@pgpmjs/env';
|
|
4
5
|
import { errors } from '@pgpmjs/types';
|
|
5
6
|
import { registerDefaultResolver } from 'inquirerer';
|
|
6
7
|
const DEFAULT_MOTD = `
|
|
@@ -90,7 +91,7 @@ async function handleInit(argv, prompter) {
|
|
|
90
91
|
cwd,
|
|
91
92
|
});
|
|
92
93
|
}
|
|
93
|
-
// Default to module init (for 'module' type or unknown types)
|
|
94
|
+
// Default to module init (for 'module' type, 'generic' type, or unknown types)
|
|
94
95
|
return handleModuleInit(argv, prompter, {
|
|
95
96
|
fromPath,
|
|
96
97
|
templateRepo,
|
|
@@ -98,6 +99,7 @@ async function handleInit(argv, prompter) {
|
|
|
98
99
|
dir,
|
|
99
100
|
noTty,
|
|
100
101
|
cwd,
|
|
102
|
+
requiresWorkspace: inspection.config?.requiresWorkspace,
|
|
101
103
|
}, wasExplicitModuleRequest);
|
|
102
104
|
}
|
|
103
105
|
async function handleBoilerplateInit(argv, prompter, ctx) {
|
|
@@ -165,7 +167,7 @@ async function handleBoilerplateInit(argv, prompter, ctx) {
|
|
|
165
167
|
cwd: ctx.cwd,
|
|
166
168
|
});
|
|
167
169
|
}
|
|
168
|
-
// Default to module init (for 'module' type or unknown types)
|
|
170
|
+
// Default to module init (for 'module' type, 'generic' type, or unknown types)
|
|
169
171
|
// When using --boilerplate, user made an explicit choice, so treat as explicit request
|
|
170
172
|
return handleModuleInit(argv, prompter, {
|
|
171
173
|
fromPath,
|
|
@@ -174,6 +176,7 @@ async function handleBoilerplateInit(argv, prompter, ctx) {
|
|
|
174
176
|
dir: ctx.dir,
|
|
175
177
|
noTty: ctx.noTty,
|
|
176
178
|
cwd: ctx.cwd,
|
|
179
|
+
requiresWorkspace: inspection.config?.requiresWorkspace,
|
|
177
180
|
}, true);
|
|
178
181
|
}
|
|
179
182
|
async function handleWorkspaceInit(argv, prompter, ctx) {
|
|
@@ -226,49 +229,67 @@ async function handleWorkspaceInit(argv, prompter, ctx) {
|
|
|
226
229
|
return { ...argv, ...answers, cwd: targetPath };
|
|
227
230
|
}
|
|
228
231
|
async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest = false) {
|
|
232
|
+
// Determine workspace requirement (defaults to 'pgpm' for backward compatibility)
|
|
233
|
+
const workspaceType = ctx.requiresWorkspace ?? 'pgpm';
|
|
234
|
+
// Whether this is a pgpm-managed template (creates pgpm.plan, .control files)
|
|
235
|
+
const isPgpmTemplate = workspaceType === 'pgpm';
|
|
229
236
|
const project = new PgpmPackage(ctx.cwd);
|
|
230
|
-
if
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
if (
|
|
235
|
-
|
|
236
|
-
|
|
237
|
+
// Check workspace requirement based on type (skip if workspaceType is false)
|
|
238
|
+
if (workspaceType !== false) {
|
|
239
|
+
let workspacePath;
|
|
240
|
+
let workspaceTypeName = '';
|
|
241
|
+
if (workspaceType === 'pgpm') {
|
|
242
|
+
workspacePath = project.workspacePath;
|
|
243
|
+
workspaceTypeName = 'PGPM';
|
|
237
244
|
}
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
245
|
+
else {
|
|
246
|
+
workspacePath = resolveWorkspaceByType(ctx.cwd, workspaceType);
|
|
247
|
+
workspaceTypeName = workspaceType.toUpperCase();
|
|
248
|
+
}
|
|
249
|
+
if (!workspacePath) {
|
|
250
|
+
const noTty = Boolean(argv.noTty || argv['no-tty'] || process.env.CI === 'true');
|
|
251
|
+
// If user explicitly requested module init or we're in non-interactive mode,
|
|
252
|
+
// just show the error with helpful guidance
|
|
253
|
+
if (wasExplicitModuleRequest || noTty) {
|
|
254
|
+
process.stderr.write(`Not inside a ${workspaceTypeName} workspace.\n`);
|
|
255
|
+
throw errors.NOT_IN_WORKSPACE({});
|
|
256
|
+
}
|
|
257
|
+
// Only offer to create a workspace for pgpm templates
|
|
258
|
+
if (workspaceType === 'pgpm') {
|
|
259
|
+
const recoveryQuestion = [
|
|
260
|
+
{
|
|
261
|
+
name: 'workspace',
|
|
262
|
+
alias: 'w',
|
|
263
|
+
message: `You are not inside a ${workspaceTypeName} workspace. Would you like to create a new workspace instead?`,
|
|
264
|
+
type: 'confirm',
|
|
265
|
+
required: true,
|
|
266
|
+
},
|
|
267
|
+
];
|
|
268
|
+
const { workspace } = await prompter.prompt(argv, recoveryQuestion);
|
|
269
|
+
if (workspace) {
|
|
270
|
+
return handleWorkspaceInit(argv, prompter, {
|
|
271
|
+
fromPath: 'workspace',
|
|
272
|
+
templateRepo: ctx.templateRepo,
|
|
273
|
+
branch: ctx.branch,
|
|
274
|
+
dir: ctx.dir,
|
|
275
|
+
noTty: ctx.noTty,
|
|
276
|
+
cwd: ctx.cwd,
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
// User declined or non-pgpm workspace type, show the error
|
|
281
|
+
process.stderr.write(`Not inside a ${workspaceTypeName} workspace.\n`);
|
|
282
|
+
throw errors.NOT_IN_WORKSPACE({});
|
|
258
283
|
}
|
|
259
|
-
// User declined, show the error
|
|
260
|
-
process.stderr.write('Not inside a PGPM workspace.\n');
|
|
261
|
-
throw errors.NOT_IN_WORKSPACE({});
|
|
262
284
|
}
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
285
|
+
// Only check workspace directory constraints if we're in a workspace
|
|
286
|
+
if (project.workspacePath) {
|
|
287
|
+
if (!project.isInsideAllowedDirs(ctx.cwd) && !project.isInWorkspace() && !project.isParentOfAllowedDirs(ctx.cwd)) {
|
|
288
|
+
process.stderr.write('You must be inside the workspace root or a parent directory of modules (like packages/).\n');
|
|
289
|
+
throw errors.NOT_IN_WORKSPACE_MODULE({});
|
|
290
|
+
}
|
|
266
291
|
}
|
|
267
|
-
|
|
268
|
-
// Note: moduleName is needed here before scaffolding because initModule creates
|
|
269
|
-
// the directory first, then scaffolds. The boilerplate's ____moduleName____ question
|
|
270
|
-
// gets skipped because the answer is already passed through. So users only see it
|
|
271
|
-
// once, but the definition exists in two places for this architectural reason.
|
|
292
|
+
// Build questions based on whether this is a pgpm template
|
|
272
293
|
const moduleQuestions = [
|
|
273
294
|
{
|
|
274
295
|
name: 'moduleName',
|
|
@@ -276,7 +297,11 @@ async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest =
|
|
|
276
297
|
required: true,
|
|
277
298
|
type: 'text',
|
|
278
299
|
},
|
|
279
|
-
|
|
300
|
+
];
|
|
301
|
+
// Only ask for extensions if this is a pgpm template
|
|
302
|
+
if (isPgpmTemplate && project.workspacePath) {
|
|
303
|
+
const availExtensions = await project.getAvailableModules();
|
|
304
|
+
moduleQuestions.push({
|
|
280
305
|
name: 'extensions',
|
|
281
306
|
message: 'Which extensions?',
|
|
282
307
|
options: availExtensions,
|
|
@@ -284,36 +309,61 @@ async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest =
|
|
|
284
309
|
allowCustomOptions: true,
|
|
285
310
|
required: true,
|
|
286
311
|
default: ['plpgsql', 'uuid-ossp'],
|
|
287
|
-
}
|
|
288
|
-
|
|
312
|
+
});
|
|
313
|
+
}
|
|
289
314
|
const answers = await prompter.prompt(argv, moduleQuestions);
|
|
290
315
|
const modName = sluggify(answers.moduleName);
|
|
291
|
-
const extensions = answers.extensions
|
|
292
|
-
|
|
293
|
-
|
|
316
|
+
const extensions = isPgpmTemplate && answers.extensions
|
|
317
|
+
? answers.extensions
|
|
318
|
+
.filter((opt) => opt.selected)
|
|
319
|
+
.map((opt) => opt.name)
|
|
320
|
+
: [];
|
|
294
321
|
const templateAnswers = {
|
|
295
322
|
...argv,
|
|
296
323
|
...answers,
|
|
297
324
|
moduleName: modName,
|
|
298
325
|
packageIdentifier: argv.packageIdentifier || modName
|
|
299
326
|
};
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
327
|
+
// Determine output path based on whether we're in a workspace
|
|
328
|
+
let modulePath;
|
|
329
|
+
if (project.workspacePath) {
|
|
330
|
+
// Use workspace-aware initModule
|
|
331
|
+
await project.initModule({
|
|
332
|
+
name: modName,
|
|
333
|
+
description: answers.description || modName,
|
|
334
|
+
author: answers.author || modName,
|
|
335
|
+
extensions,
|
|
336
|
+
templateRepo: ctx.templateRepo,
|
|
337
|
+
templatePath: ctx.fromPath,
|
|
338
|
+
branch: ctx.branch,
|
|
339
|
+
dir: ctx.dir,
|
|
340
|
+
toolName: DEFAULT_TEMPLATE_TOOL_NAME,
|
|
341
|
+
answers: templateAnswers,
|
|
342
|
+
noTty: ctx.noTty,
|
|
343
|
+
pgpm: isPgpmTemplate,
|
|
344
|
+
});
|
|
345
|
+
const isRoot = path.resolve(project.workspacePath) === path.resolve(ctx.cwd);
|
|
346
|
+
modulePath = isRoot
|
|
347
|
+
? path.join(ctx.cwd, 'packages', modName)
|
|
348
|
+
: path.join(ctx.cwd, modName);
|
|
349
|
+
}
|
|
350
|
+
else {
|
|
351
|
+
// Not in a workspace - scaffold directly to current directory
|
|
352
|
+
modulePath = path.join(ctx.cwd, modName);
|
|
353
|
+
fs.mkdirSync(modulePath, { recursive: true });
|
|
354
|
+
await scaffoldTemplate({
|
|
355
|
+
fromPath: ctx.fromPath,
|
|
356
|
+
outputDir: modulePath,
|
|
357
|
+
templateRepo: ctx.templateRepo,
|
|
358
|
+
branch: ctx.branch,
|
|
359
|
+
dir: ctx.dir,
|
|
360
|
+
answers: templateAnswers,
|
|
361
|
+
noTty: ctx.noTty,
|
|
362
|
+
toolName: DEFAULT_TEMPLATE_TOOL_NAME,
|
|
363
|
+
cwd: ctx.cwd,
|
|
364
|
+
prompter
|
|
365
|
+
});
|
|
366
|
+
}
|
|
317
367
|
const motdPath = path.join(modulePath, '.motd');
|
|
318
368
|
let motd = DEFAULT_MOTD;
|
|
319
369
|
if (fs.existsSync(motdPath)) {
|
|
@@ -329,7 +379,6 @@ async function handleModuleInit(argv, prompter, ctx, wasExplicitModuleRequest =
|
|
|
329
379
|
if (!motd.endsWith('\n')) {
|
|
330
380
|
process.stdout.write('\n');
|
|
331
381
|
}
|
|
332
|
-
|
|
333
|
-
process.stdout.write(`\n✨ Enjoy!\n\ncd ./${relPath}\n`);
|
|
382
|
+
process.stdout.write(`\n✨ Enjoy!\n\ncd ./${modName}\n`);
|
|
334
383
|
return { ...argv, ...answers };
|
|
335
384
|
}
|
package/esm/commands.js
CHANGED
|
@@ -8,6 +8,7 @@ import cache from './commands/cache';
|
|
|
8
8
|
import clear from './commands/clear';
|
|
9
9
|
import deploy from './commands/deploy';
|
|
10
10
|
import docker from './commands/docker';
|
|
11
|
+
import dump from './commands/dump';
|
|
11
12
|
import env from './commands/env';
|
|
12
13
|
import _export from './commands/export';
|
|
13
14
|
import extension from './commands/extension';
|
|
@@ -44,6 +45,7 @@ export const createPgpmCommandMap = (skipPgTeardown = false) => {
|
|
|
44
45
|
clear: pgt(clear),
|
|
45
46
|
deploy: pgt(deploy),
|
|
46
47
|
docker,
|
|
48
|
+
dump: pgt(dump),
|
|
47
49
|
env,
|
|
48
50
|
verify: pgt(verify),
|
|
49
51
|
revert: pgt(revert),
|
package/esm/index.js
CHANGED
|
@@ -10,6 +10,7 @@ export { default as analyze } from './commands/analyze';
|
|
|
10
10
|
export { default as clear } from './commands/clear';
|
|
11
11
|
export { default as deploy } from './commands/deploy';
|
|
12
12
|
export { default as docker } from './commands/docker';
|
|
13
|
+
export { default as dump } from './commands/dump';
|
|
13
14
|
export { default as env } from './commands/env';
|
|
14
15
|
export { default as _export } from './commands/export';
|
|
15
16
|
export { default as extension } from './commands/extension';
|
package/esm/utils/display.js
CHANGED
|
@@ -18,6 +18,7 @@ export const usageText = `
|
|
|
18
18
|
upgrade Upgrade installed pgpm modules to latest versions (alias: up)
|
|
19
19
|
|
|
20
20
|
Database Administration:
|
|
21
|
+
dump Dump a database to a sql file
|
|
21
22
|
kill Terminate database connections and optionally drop databases
|
|
22
23
|
install Install database modules
|
|
23
24
|
tag Add tags to changes for versioning
|
package/index.d.ts
CHANGED
|
@@ -9,6 +9,7 @@ export { default as analyze } from './commands/analyze';
|
|
|
9
9
|
export { default as clear } from './commands/clear';
|
|
10
10
|
export { default as deploy } from './commands/deploy';
|
|
11
11
|
export { default as docker } from './commands/docker';
|
|
12
|
+
export { default as dump } from './commands/dump';
|
|
12
13
|
export { default as env } from './commands/env';
|
|
13
14
|
export { default as _export } from './commands/export';
|
|
14
15
|
export { default as extension } from './commands/extension';
|
package/index.js
CHANGED
|
@@ -18,7 +18,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
18
18
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
19
19
|
};
|
|
20
20
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
21
|
-
exports.options = exports.verify = exports.testPackages = exports.tag = exports.revert = exports.renameCmd = exports.remove = exports.plan = exports._package = exports.migrate = exports.kill = exports.install = exports.extension = exports._export = exports.env = exports.docker = exports.deploy = exports.clear = exports.analyze = exports.adminUsers = exports.add = exports.createPgpmCommandMap = exports.createInitUsageText = void 0;
|
|
21
|
+
exports.options = exports.verify = exports.testPackages = exports.tag = exports.revert = exports.renameCmd = exports.remove = exports.plan = exports._package = exports.migrate = exports.kill = exports.install = exports.extension = exports._export = exports.env = exports.dump = exports.docker = exports.deploy = exports.clear = exports.analyze = exports.adminUsers = exports.add = exports.createPgpmCommandMap = exports.createInitUsageText = void 0;
|
|
22
22
|
const find_and_require_package_json_1 = require("find-and-require-package-json");
|
|
23
23
|
const inquirerer_1 = require("inquirerer");
|
|
24
24
|
const commands_1 = require("./commands");
|
|
@@ -37,6 +37,8 @@ var deploy_1 = require("./commands/deploy");
|
|
|
37
37
|
Object.defineProperty(exports, "deploy", { enumerable: true, get: function () { return __importDefault(deploy_1).default; } });
|
|
38
38
|
var docker_1 = require("./commands/docker");
|
|
39
39
|
Object.defineProperty(exports, "docker", { enumerable: true, get: function () { return __importDefault(docker_1).default; } });
|
|
40
|
+
var dump_1 = require("./commands/dump");
|
|
41
|
+
Object.defineProperty(exports, "dump", { enumerable: true, get: function () { return __importDefault(dump_1).default; } });
|
|
40
42
|
var env_1 = require("./commands/env");
|
|
41
43
|
Object.defineProperty(exports, "env", { enumerable: true, get: function () { return __importDefault(env_1).default; } });
|
|
42
44
|
var export_1 = require("./commands/export");
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgpm",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.10.1",
|
|
4
4
|
"author": "Constructive <developers@constructive.io>",
|
|
5
5
|
"description": "PostgreSQL Package Manager - Database migration and package management CLI",
|
|
6
6
|
"main": "index.js",
|
|
@@ -33,7 +33,7 @@
|
|
|
33
33
|
"test:watch": "jest --watch"
|
|
34
34
|
},
|
|
35
35
|
"devDependencies": {
|
|
36
|
-
"@inquirerer/test": "^1.2.
|
|
36
|
+
"@inquirerer/test": "^1.2.5",
|
|
37
37
|
"@types/js-yaml": "^4.0.9",
|
|
38
38
|
"@types/minimist": "^1.2.5",
|
|
39
39
|
"@types/node": "^20.12.7",
|
|
@@ -46,19 +46,20 @@
|
|
|
46
46
|
"ts-node": "^10.9.2"
|
|
47
47
|
},
|
|
48
48
|
"dependencies": {
|
|
49
|
-
"@inquirerer/utils": "^3.1.
|
|
50
|
-
"@pgpmjs/core": "^4.
|
|
51
|
-
"@pgpmjs/env": "^2.9.
|
|
49
|
+
"@inquirerer/utils": "^3.1.3",
|
|
50
|
+
"@pgpmjs/core": "^4.10.1",
|
|
51
|
+
"@pgpmjs/env": "^2.9.3",
|
|
52
52
|
"@pgpmjs/logger": "^1.3.7",
|
|
53
53
|
"@pgpmjs/types": "^2.14.0",
|
|
54
|
-
"appstash": "^0.2.
|
|
55
|
-
"find-and-require-package-json": "^0.8.
|
|
56
|
-
"genomic": "^5.2.
|
|
57
|
-
"inquirerer": "^4.
|
|
54
|
+
"appstash": "^0.2.8",
|
|
55
|
+
"find-and-require-package-json": "^0.8.6",
|
|
56
|
+
"genomic": "^5.2.3",
|
|
57
|
+
"inquirerer": "^4.3.1",
|
|
58
58
|
"js-yaml": "^4.1.0",
|
|
59
59
|
"minimist": "^1.2.8",
|
|
60
60
|
"pg-cache": "^1.6.14",
|
|
61
61
|
"pg-env": "^1.2.5",
|
|
62
|
+
"pgsql-deparser": "^17.17.2",
|
|
62
63
|
"semver": "^7.6.2",
|
|
63
64
|
"shelljs": "^0.10.0",
|
|
64
65
|
"yanse": "^0.1.11"
|
|
@@ -75,5 +76,5 @@
|
|
|
75
76
|
"pg",
|
|
76
77
|
"pgsql"
|
|
77
78
|
],
|
|
78
|
-
"gitHead": "
|
|
79
|
+
"gitHead": "22c89cfc6f67879e77ca47f652c31c80828665f5"
|
|
79
80
|
}
|
package/utils/display.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const usageText = "\n Usage: pgpm <command> [options]\n\n Core Database Operations:\n add Add database changes to plans and create SQL files\n deploy Deploy database changes and migrations\n verify Verify database state and migrations\n revert Revert database changes and migrations\n\n Project Management:\n init Initialize workspace or module\n extension Manage module dependencies\n plan Generate module deployment plans\n package Package module for distribution\n export Export database migrations from existing databases\n update Update pgpm to the latest version\n cache Manage cached templates (clean)\n upgrade Upgrade installed pgpm modules to latest versions (alias: up)\n\n Database Administration:\n kill Terminate database connections and optionally drop databases\n install Install database modules\n tag Add tags to changes for versioning\n clear Clear database state\n remove Remove database changes\n analyze Analyze database structure\n rename Rename database changes\n admin-users Manage admin users\n\n Testing:\n test-packages Run integration tests on all workspace packages\n\n Migration Tools:\n migrate Migration management subcommands\n init Initialize migration tracking\n status Show migration status\n list List all changes\n deps Show change dependencies\n \n Development Tools:\n docker Manage PostgreSQL Docker containers (start/stop)\n env Manage PostgreSQL environment variables\n test-packages Run integration tests on workspace packages\n \n Global Options:\n -h, --help Display this help information\n -v, --version Display version information\n --cwd <directory> Working directory (default: current directory)\n\n Individual Command Help:\n pgpm <command> --help Display detailed help for specific command\n pgpm <command> -h Display detailed help for specific command\n\n Examples:\n pgpm deploy --help Show deploy command options\n pgpm init workspace Initialize new workspace\n pgpm install @pgpm/base32 Install a database module\n ";
|
|
1
|
+
export declare const usageText = "\n Usage: pgpm <command> [options]\n\n Core Database Operations:\n add Add database changes to plans and create SQL files\n deploy Deploy database changes and migrations\n verify Verify database state and migrations\n revert Revert database changes and migrations\n\n Project Management:\n init Initialize workspace or module\n extension Manage module dependencies\n plan Generate module deployment plans\n package Package module for distribution\n export Export database migrations from existing databases\n update Update pgpm to the latest version\n cache Manage cached templates (clean)\n upgrade Upgrade installed pgpm modules to latest versions (alias: up)\n\n Database Administration:\n dump Dump a database to a sql file\n kill Terminate database connections and optionally drop databases\n install Install database modules\n tag Add tags to changes for versioning\n clear Clear database state\n remove Remove database changes\n analyze Analyze database structure\n rename Rename database changes\n admin-users Manage admin users\n\n Testing:\n test-packages Run integration tests on all workspace packages\n\n Migration Tools:\n migrate Migration management subcommands\n init Initialize migration tracking\n status Show migration status\n list List all changes\n deps Show change dependencies\n \n Development Tools:\n docker Manage PostgreSQL Docker containers (start/stop)\n env Manage PostgreSQL environment variables\n test-packages Run integration tests on workspace packages\n \n Global Options:\n -h, --help Display this help information\n -v, --version Display version information\n --cwd <directory> Working directory (default: current directory)\n\n Individual Command Help:\n pgpm <command> --help Display detailed help for specific command\n pgpm <command> -h Display detailed help for specific command\n\n Examples:\n pgpm deploy --help Show deploy command options\n pgpm init workspace Initialize new workspace\n pgpm install @pgpm/base32 Install a database module\n ";
|
package/utils/display.js
CHANGED
|
@@ -21,6 +21,7 @@ exports.usageText = `
|
|
|
21
21
|
upgrade Upgrade installed pgpm modules to latest versions (alias: up)
|
|
22
22
|
|
|
23
23
|
Database Administration:
|
|
24
|
+
dump Dump a database to a sql file
|
|
24
25
|
kill Terminate database connections and optionally drop databases
|
|
25
26
|
install Install database modules
|
|
26
27
|
tag Add tags to changes for versioning
|