@pgpmjs/core 3.0.5 → 3.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +17 -17
- package/core/class/pgpm.js +6 -6
- package/core/template-scaffold.js +2 -22
- package/esm/core/class/pgpm.js +6 -6
- package/esm/core/template-scaffold.js +2 -22
- package/esm/export/export-migrations.js +2 -3
- package/esm/files/plan/generator.js +2 -2
- package/esm/files/plan/writer.js +3 -3
- package/esm/migrate/client.js +1 -1
- package/esm/projects/deploy.js +1 -1
- package/esm/projects/revert.js +1 -1
- package/esm/projects/verify.js +1 -1
- package/esm/utils/debug.js +13 -13
- package/esm/workspace/paths.js +7 -7
- package/export/export-migrations.js +2 -3
- package/files/plan/generator.js +2 -2
- package/files/plan/writer.js +3 -3
- package/migrate/client.js +1 -1
- package/package.json +12 -15
- package/projects/deploy.js +1 -1
- package/projects/revert.js +1 -1
- package/projects/verify.js +1 -1
- package/utils/debug.js +13 -13
- package/workspace/paths.d.ts +2 -2
- package/workspace/paths.js +9 -9
package/README.md
CHANGED
|
@@ -51,10 +51,10 @@ Common issues and solutions for pgpm, PostgreSQL, and testing.
|
|
|
51
51
|
|
|
52
52
|
### 🧪 Testing
|
|
53
53
|
|
|
54
|
-
* [pgsql-test](https://github.com/constructive-io/constructive/tree/main/
|
|
55
|
-
* [supabase-test](https://github.com/constructive-io/constructive/tree/main/
|
|
56
|
-
* [graphile-test](https://github.com/constructive-io/constructive/tree/main/
|
|
57
|
-
* [pg-query-context](https://github.com/constructive-io/constructive/tree/main/
|
|
54
|
+
* [pgsql-test](https://github.com/constructive-io/constructive/tree/main/postgres/pgsql-test): **📊 Isolated testing environments** with per-test transaction rollbacks—ideal for integration tests, complex migrations, and RLS simulation.
|
|
55
|
+
* [supabase-test](https://github.com/constructive-io/constructive/tree/main/postgres/supabase-test): **🧪 Supabase-native test harness** preconfigured for the local Supabase stack—per-test rollbacks, JWT/role context helpers, and CI/GitHub Actions ready.
|
|
56
|
+
* [graphile-test](https://github.com/constructive-io/constructive/tree/main/graphile/graphile-test): **🔐 Authentication mocking** for Graphile-focused test helpers and emulating row-level security contexts.
|
|
57
|
+
* [pg-query-context](https://github.com/constructive-io/constructive/tree/main/postgres/pg-query-context): **🔒 Session context injection** to add session-local context (e.g., `SET LOCAL`) into queries—ideal for setting `role`, `jwt.claims`, and other session settings.
|
|
58
58
|
|
|
59
59
|
### 🧠 Parsing & AST
|
|
60
60
|
|
|
@@ -68,25 +68,25 @@ Common issues and solutions for pgpm, PostgreSQL, and testing.
|
|
|
68
68
|
|
|
69
69
|
### 🚀 API & Dev Tools
|
|
70
70
|
|
|
71
|
-
* [
|
|
72
|
-
* [
|
|
71
|
+
* [@constructive-io/graphql-server](https://github.com/constructive-io/constructive/tree/main/graphql/server): **⚡ Express-based API server** powered by PostGraphile to expose a secure, scalable GraphQL API over your Postgres database.
|
|
72
|
+
* [@constructive-io/graphql-explorer](https://github.com/constructive-io/constructive/tree/main/graphql/explorer): **🔎 Visual API explorer** with GraphiQL for browsing across all databases and schemas—useful for debugging, documentation, and API prototyping.
|
|
73
73
|
|
|
74
74
|
### 🔁 Streaming & Uploads
|
|
75
75
|
|
|
76
|
-
* [etag-hash](https://github.com/constructive-io/constructive/tree/main/
|
|
77
|
-
* [etag-stream](https://github.com/constructive-io/constructive/tree/main/
|
|
78
|
-
* [uuid-hash](https://github.com/constructive-io/constructive/tree/main/
|
|
79
|
-
* [uuid-stream](https://github.com/constructive-io/constructive/tree/main/
|
|
80
|
-
* [
|
|
81
|
-
* [
|
|
76
|
+
* [etag-hash](https://github.com/constructive-io/constructive/tree/main/streaming/etag-hash): **🏷️ S3-compatible ETags** created by streaming and hashing file uploads in chunks.
|
|
77
|
+
* [etag-stream](https://github.com/constructive-io/constructive/tree/main/streaming/etag-stream): **🔄 ETag computation** via Node stream transformer during upload or transfer.
|
|
78
|
+
* [uuid-hash](https://github.com/constructive-io/constructive/tree/main/streaming/uuid-hash): **🆔 Deterministic UUIDs** generated from hashed content, great for deduplication and asset referencing.
|
|
79
|
+
* [uuid-stream](https://github.com/constructive-io/constructive/tree/main/streaming/uuid-stream): **🌊 Streaming UUID generation** based on piped file content—ideal for upload pipelines.
|
|
80
|
+
* [@constructive-io/s3-streamer](https://github.com/constructive-io/constructive/tree/main/streaming/s3-streamer): **📤 Direct S3 streaming** for large files with support for metadata injection and content validation.
|
|
81
|
+
* [@constructive-io/upload-names](https://github.com/constructive-io/constructive/tree/main/streaming/upload-names): **📂 Collision-resistant filenames** utility for structured and unique file names for uploads.
|
|
82
82
|
|
|
83
83
|
### 🧰 CLI & Codegen
|
|
84
84
|
|
|
85
|
-
* [pgpm](https://github.com/constructive-io/constructive/tree/main/
|
|
86
|
-
* [@
|
|
87
|
-
* [
|
|
88
|
-
* [@
|
|
89
|
-
* [@
|
|
85
|
+
* [pgpm](https://github.com/constructive-io/constructive/tree/main/pgpm/pgpm): **🖥️ PostgreSQL Package Manager** for modular Postgres development. Works with database workspaces, scaffolding, migrations, seeding, and installing database packages.
|
|
86
|
+
* [@constructive-io/cli](https://github.com/constructive-io/constructive/tree/main/packages/cli): **🖥️ Command-line toolkit** for managing Constructive projects—supports database scaffolding, migrations, seeding, code generation, and automation.
|
|
87
|
+
* [@constructive-io/graphql-codegen](https://github.com/constructive-io/constructive/tree/main/graphql/codegen): **✨ GraphQL code generation** (types, operations, SDK) from schema/endpoint introspection.
|
|
88
|
+
* [@constructive-io/query-builder](https://github.com/constructive-io/constructive/tree/main/packages/query-builder): **🏗️ SQL constructor** providing a robust TypeScript-based query builder for dynamic generation of `SELECT`, `INSERT`, `UPDATE`, `DELETE`, and stored procedure calls—supports advanced SQL features like `JOIN`, `GROUP BY`, and schema-qualified queries.
|
|
89
|
+
* [@constructive-io/graphql-query](https://github.com/constructive-io/constructive/tree/main/graphql/query): **🧩 Fluent GraphQL builder** for PostGraphile schemas. ⚡ Schema-aware via introspection, 🧩 composable and ergonomic for building deeply nested queries.
|
|
90
90
|
|
|
91
91
|
## Credits
|
|
92
92
|
|
package/core/class/pgpm.js
CHANGED
|
@@ -175,7 +175,7 @@ class PgpmPackage {
|
|
|
175
175
|
targetPath = path_1.default.join(this.cwd, modName);
|
|
176
176
|
}
|
|
177
177
|
else if (isInsideModule) {
|
|
178
|
-
console.error(yanse_1.default.red(`Error: Cannot create a module inside an existing module. Please run '
|
|
178
|
+
console.error(yanse_1.default.red(`Error: Cannot create a module inside an existing module. Please run 'pgpm init' from the workspace root or from a parent directory like 'packages/'.`));
|
|
179
179
|
process.exit(1);
|
|
180
180
|
}
|
|
181
181
|
else {
|
|
@@ -622,8 +622,8 @@ class PgpmPackage {
|
|
|
622
622
|
name: tagName,
|
|
623
623
|
change: targetChange,
|
|
624
624
|
timestamp: (0, generator_1.getNow)(),
|
|
625
|
-
planner: '
|
|
626
|
-
email: '
|
|
625
|
+
planner: 'constructive',
|
|
626
|
+
email: 'constructive@5b0c196eeb62',
|
|
627
627
|
comment
|
|
628
628
|
};
|
|
629
629
|
plan.tags.push(newTag);
|
|
@@ -690,8 +690,8 @@ class PgpmPackage {
|
|
|
690
690
|
name: changeName,
|
|
691
691
|
dependencies: dependencies || [],
|
|
692
692
|
timestamp: (0, generator_1.getNow)(),
|
|
693
|
-
planner: '
|
|
694
|
-
email: '
|
|
693
|
+
planner: 'constructive',
|
|
694
|
+
email: 'constructive@5b0c196eeb62',
|
|
695
695
|
comment: comment || `add ${changeName}`
|
|
696
696
|
};
|
|
697
697
|
plan.changes.push(newChange);
|
|
@@ -797,7 +797,7 @@ ${dependencies.length > 0 ? dependencies.map(dep => `-- requires: ${dep}`).join(
|
|
|
797
797
|
const newlyAdded = [];
|
|
798
798
|
for (const pkgstr of pkgstrs) {
|
|
799
799
|
const { name } = (0, parse_package_name_1.parse)(pkgstr);
|
|
800
|
-
const tempDir = fs_1.default.mkdtempSync(path_1.default.join(os_1.default.tmpdir(), '
|
|
800
|
+
const tempDir = fs_1.default.mkdtempSync(path_1.default.join(os_1.default.tmpdir(), 'pgpm-install-'));
|
|
801
801
|
try {
|
|
802
802
|
process.chdir(tempDir);
|
|
803
803
|
(0, child_process_1.execSync)(`npm install ${pkgstr} --production --prefix ./extensions`, {
|
|
@@ -17,22 +17,6 @@ const templatizer = new create_gen_app_1.Templatizer();
|
|
|
17
17
|
const looksLikePath = (value) => {
|
|
18
18
|
return (value.startsWith('.') || value.startsWith('/') || value.startsWith('~'));
|
|
19
19
|
};
|
|
20
|
-
const normalizeQuestions = (questions) => questions?.map((q) => ({
|
|
21
|
-
...q,
|
|
22
|
-
type: q.type || 'text',
|
|
23
|
-
}));
|
|
24
|
-
const attachQuestionsToTemplatizer = (templ, questions) => {
|
|
25
|
-
if (!questions?.length || typeof templ?.extract !== 'function')
|
|
26
|
-
return;
|
|
27
|
-
const originalExtract = templ.extract.bind(templ);
|
|
28
|
-
templ.extract = async (templateDir) => {
|
|
29
|
-
const extracted = await originalExtract(templateDir);
|
|
30
|
-
extracted.projectQuestions = {
|
|
31
|
-
questions: normalizeQuestions(questions),
|
|
32
|
-
};
|
|
33
|
-
return extracted;
|
|
34
|
-
};
|
|
35
|
-
};
|
|
36
20
|
/**
|
|
37
21
|
* Resolve the template path using the new metadata-driven resolution.
|
|
38
22
|
*
|
|
@@ -98,10 +82,8 @@ async function scaffoldTemplate(options) {
|
|
|
98
82
|
fs_1.default.existsSync(resolvedRepo) &&
|
|
99
83
|
fs_1.default.statSync(resolvedRepo).isDirectory()) {
|
|
100
84
|
const { fromPath, resolvedTemplatePath } = resolveFromPath(resolvedRepo, templatePath, type, dir);
|
|
101
|
-
// Read boilerplate config for questions
|
|
85
|
+
// Read boilerplate config for questions (create-gen-app now handles .boilerplate.json natively)
|
|
102
86
|
const boilerplateConfig = (0, boilerplate_scanner_1.readBoilerplateConfig)(resolvedTemplatePath);
|
|
103
|
-
// Inject questions into the templatizer pipeline so prompt types and defaults are applied
|
|
104
|
-
attachQuestionsToTemplatizer(templatizer, boilerplateConfig?.questions);
|
|
105
87
|
await templatizer.process(resolvedRepo, outputDir, {
|
|
106
88
|
argv: answers,
|
|
107
89
|
noTty,
|
|
@@ -149,10 +131,8 @@ async function scaffoldTemplate(options) {
|
|
|
149
131
|
templateDir = tempDest;
|
|
150
132
|
}
|
|
151
133
|
const { fromPath, resolvedTemplatePath } = resolveFromPath(templateDir, templatePath, type, dir);
|
|
152
|
-
// Read boilerplate config for questions
|
|
134
|
+
// Read boilerplate config for questions (create-gen-app now handles .boilerplate.json natively)
|
|
153
135
|
const boilerplateConfig = (0, boilerplate_scanner_1.readBoilerplateConfig)(resolvedTemplatePath);
|
|
154
|
-
// Inject questions into the templatizer pipeline so prompt types and defaults are applied
|
|
155
|
-
attachQuestionsToTemplatizer(templatizer, boilerplateConfig?.questions);
|
|
156
136
|
await templatizer.process(templateDir, outputDir, {
|
|
157
137
|
argv: answers,
|
|
158
138
|
noTty,
|
package/esm/core/class/pgpm.js
CHANGED
|
@@ -136,7 +136,7 @@ export class PgpmPackage {
|
|
|
136
136
|
targetPath = path.join(this.cwd, modName);
|
|
137
137
|
}
|
|
138
138
|
else if (isInsideModule) {
|
|
139
|
-
console.error(yanse.red(`Error: Cannot create a module inside an existing module. Please run '
|
|
139
|
+
console.error(yanse.red(`Error: Cannot create a module inside an existing module. Please run 'pgpm init' from the workspace root or from a parent directory like 'packages/'.`));
|
|
140
140
|
process.exit(1);
|
|
141
141
|
}
|
|
142
142
|
else {
|
|
@@ -583,8 +583,8 @@ export class PgpmPackage {
|
|
|
583
583
|
name: tagName,
|
|
584
584
|
change: targetChange,
|
|
585
585
|
timestamp: getPlanTimestamp(),
|
|
586
|
-
planner: '
|
|
587
|
-
email: '
|
|
586
|
+
planner: 'constructive',
|
|
587
|
+
email: 'constructive@5b0c196eeb62',
|
|
588
588
|
comment
|
|
589
589
|
};
|
|
590
590
|
plan.tags.push(newTag);
|
|
@@ -651,8 +651,8 @@ export class PgpmPackage {
|
|
|
651
651
|
name: changeName,
|
|
652
652
|
dependencies: dependencies || [],
|
|
653
653
|
timestamp: getPlanTimestamp(),
|
|
654
|
-
planner: '
|
|
655
|
-
email: '
|
|
654
|
+
planner: 'constructive',
|
|
655
|
+
email: 'constructive@5b0c196eeb62',
|
|
656
656
|
comment: comment || `add ${changeName}`
|
|
657
657
|
};
|
|
658
658
|
plan.changes.push(newChange);
|
|
@@ -758,7 +758,7 @@ ${dependencies.length > 0 ? dependencies.map(dep => `-- requires: ${dep}`).join(
|
|
|
758
758
|
const newlyAdded = [];
|
|
759
759
|
for (const pkgstr of pkgstrs) {
|
|
760
760
|
const { name } = parse(pkgstr);
|
|
761
|
-
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), '
|
|
761
|
+
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'pgpm-install-'));
|
|
762
762
|
try {
|
|
763
763
|
process.chdir(tempDir);
|
|
764
764
|
execSync(`npm install ${pkgstr} --production --prefix ./extensions`, {
|
|
@@ -10,22 +10,6 @@ const templatizer = new Templatizer();
|
|
|
10
10
|
const looksLikePath = (value) => {
|
|
11
11
|
return (value.startsWith('.') || value.startsWith('/') || value.startsWith('~'));
|
|
12
12
|
};
|
|
13
|
-
const normalizeQuestions = (questions) => questions?.map((q) => ({
|
|
14
|
-
...q,
|
|
15
|
-
type: q.type || 'text',
|
|
16
|
-
}));
|
|
17
|
-
const attachQuestionsToTemplatizer = (templ, questions) => {
|
|
18
|
-
if (!questions?.length || typeof templ?.extract !== 'function')
|
|
19
|
-
return;
|
|
20
|
-
const originalExtract = templ.extract.bind(templ);
|
|
21
|
-
templ.extract = async (templateDir) => {
|
|
22
|
-
const extracted = await originalExtract(templateDir);
|
|
23
|
-
extracted.projectQuestions = {
|
|
24
|
-
questions: normalizeQuestions(questions),
|
|
25
|
-
};
|
|
26
|
-
return extracted;
|
|
27
|
-
};
|
|
28
|
-
};
|
|
29
13
|
/**
|
|
30
14
|
* Resolve the template path using the new metadata-driven resolution.
|
|
31
15
|
*
|
|
@@ -91,10 +75,8 @@ export async function scaffoldTemplate(options) {
|
|
|
91
75
|
fs.existsSync(resolvedRepo) &&
|
|
92
76
|
fs.statSync(resolvedRepo).isDirectory()) {
|
|
93
77
|
const { fromPath, resolvedTemplatePath } = resolveFromPath(resolvedRepo, templatePath, type, dir);
|
|
94
|
-
// Read boilerplate config for questions
|
|
78
|
+
// Read boilerplate config for questions (create-gen-app now handles .boilerplate.json natively)
|
|
95
79
|
const boilerplateConfig = readBoilerplateConfig(resolvedTemplatePath);
|
|
96
|
-
// Inject questions into the templatizer pipeline so prompt types and defaults are applied
|
|
97
|
-
attachQuestionsToTemplatizer(templatizer, boilerplateConfig?.questions);
|
|
98
80
|
await templatizer.process(resolvedRepo, outputDir, {
|
|
99
81
|
argv: answers,
|
|
100
82
|
noTty,
|
|
@@ -142,10 +124,8 @@ export async function scaffoldTemplate(options) {
|
|
|
142
124
|
templateDir = tempDest;
|
|
143
125
|
}
|
|
144
126
|
const { fromPath, resolvedTemplatePath } = resolveFromPath(templateDir, templatePath, type, dir);
|
|
145
|
-
// Read boilerplate config for questions
|
|
127
|
+
// Read boilerplate config for questions (create-gen-app now handles .boilerplate.json natively)
|
|
146
128
|
const boilerplateConfig = readBoilerplateConfig(resolvedTemplatePath);
|
|
147
|
-
// Inject questions into the templatizer pipeline so prompt types and defaults are applied
|
|
148
|
-
attachQuestionsToTemplatizer(templatizer, boilerplateConfig?.questions);
|
|
149
129
|
await templatizer.process(templateDir, outputDir, {
|
|
150
130
|
argv: answers,
|
|
151
131
|
noTty,
|
|
@@ -56,8 +56,7 @@ const exportMigrationsToDisk = async ({ project, options, database, databaseId,
|
|
|
56
56
|
'pgpm-stamps',
|
|
57
57
|
'pgpm-base32',
|
|
58
58
|
'pgpm-totp',
|
|
59
|
-
'pgpm-types'
|
|
60
|
-
'pgpm-default-roles'
|
|
59
|
+
'pgpm-types'
|
|
61
60
|
]
|
|
62
61
|
});
|
|
63
62
|
writeSqitchPlan(results.rows, opts);
|
|
@@ -162,7 +161,7 @@ const preparePackage = async ({ project, author, outdir, name, extensions }) =>
|
|
|
162
161
|
* Generates a function for replacing schema names and extension names in strings.
|
|
163
162
|
*/
|
|
164
163
|
const makeReplacer = ({ schemas, name }) => {
|
|
165
|
-
const replacements = ['
|
|
164
|
+
const replacements = ['constructive-extension-name', name];
|
|
166
165
|
const schemaReplacers = schemas.map((schema) => [
|
|
167
166
|
schema.schema_name,
|
|
168
167
|
toSnakeCase(`${name}_${schema.name}`)
|
|
@@ -33,10 +33,10 @@ export function generatePlan(options) {
|
|
|
33
33
|
// Generate the plan entries
|
|
34
34
|
entries.forEach(entry => {
|
|
35
35
|
if (entry.dependencies && entry.dependencies.length > 0) {
|
|
36
|
-
planfile.push(`${entry.change} [${entry.dependencies.join(' ')}] ${now}
|
|
36
|
+
planfile.push(`${entry.change} [${entry.dependencies.join(' ')}] ${now} constructive <constructive@5b0c196eeb62>${entry.comment ? ` # ${entry.comment}` : ''}`);
|
|
37
37
|
}
|
|
38
38
|
else {
|
|
39
|
-
planfile.push(`${entry.change} ${now}
|
|
39
|
+
planfile.push(`${entry.change} ${now} constructive <constructive@5b0c196eeb62>${entry.comment ? ` # ${entry.comment}` : ''}`);
|
|
40
40
|
}
|
|
41
41
|
});
|
|
42
42
|
return planfile.join('\n');
|
package/esm/files/plan/writer.js
CHANGED
|
@@ -7,12 +7,12 @@ export function writeSqitchPlan(rows, opts) {
|
|
|
7
7
|
const dir = path.resolve(path.join(opts.outdir, opts.name));
|
|
8
8
|
fs.mkdirSync(dir, { recursive: true });
|
|
9
9
|
const date = () => '2017-08-11T08:11:51Z'; // stubbed timestamp
|
|
10
|
-
const author = opts.author || '
|
|
10
|
+
const author = opts.author || 'constructive';
|
|
11
11
|
const email = `${author}@5b0c196eeb62`;
|
|
12
12
|
const duplicates = {};
|
|
13
13
|
const plan = opts.replacer(`%syntax-version=1.0.0
|
|
14
|
-
%project=
|
|
15
|
-
%uri=
|
|
14
|
+
%project=constructive-extension-name
|
|
15
|
+
%uri=constructive-extension-name
|
|
16
16
|
|
|
17
17
|
${rows
|
|
18
18
|
.map((row) => {
|
package/esm/migrate/client.js
CHANGED
|
@@ -56,7 +56,7 @@ export class PgpmMigrate {
|
|
|
56
56
|
if (this.initialized)
|
|
57
57
|
return;
|
|
58
58
|
try {
|
|
59
|
-
log.info('Checking
|
|
59
|
+
log.info('Checking Constructive migration schema...');
|
|
60
60
|
// Check if pgpm_migrate schema exists
|
|
61
61
|
const result = await this.pool.query(`
|
|
62
62
|
SELECT schema_name
|
package/esm/projects/deploy.js
CHANGED
|
@@ -91,7 +91,7 @@ export const deployProject = async (opts, name, database, pkg, toChange) => {
|
|
|
91
91
|
}
|
|
92
92
|
else {
|
|
93
93
|
// Use new migration system
|
|
94
|
-
log.debug(`→ Command:
|
|
94
|
+
log.debug(`→ Command: constructive migrate deploy db:pg:${database}`);
|
|
95
95
|
try {
|
|
96
96
|
const client = new PgpmMigrate(mergedOpts.pg);
|
|
97
97
|
const result = await client.deploy({
|
package/esm/projects/revert.js
CHANGED
|
@@ -46,7 +46,7 @@ export const revertProject = async (opts, name, database, pkg, options) => {
|
|
|
46
46
|
log.info(`📂 Reverting local module: ${extension}`);
|
|
47
47
|
log.debug(`→ Path: ${modulePath}`);
|
|
48
48
|
// Use new migration system
|
|
49
|
-
log.debug(`→ Command:
|
|
49
|
+
log.debug(`→ Command: constructive migrate revert db:pg:${database}`);
|
|
50
50
|
try {
|
|
51
51
|
const client = new PgpmMigrate(opts.pg);
|
|
52
52
|
const result = await client.revert({
|
package/esm/projects/verify.js
CHANGED
|
@@ -34,7 +34,7 @@ export const verifyProject = async (opts, name, database, pkg, options) => {
|
|
|
34
34
|
const modulePath = resolve(pkg.workspacePath, modules[extension].path);
|
|
35
35
|
log.info(`📂 Verifying local module: ${extension}`);
|
|
36
36
|
log.debug(`→ Path: ${modulePath}`);
|
|
37
|
-
log.debug(`→ Command:
|
|
37
|
+
log.debug(`→ Command: constructive migrate verify db:pg:${database}`);
|
|
38
38
|
try {
|
|
39
39
|
const client = new PgpmMigrate(opts.pg);
|
|
40
40
|
const result = await client.verify({
|
package/esm/utils/debug.js
CHANGED
|
@@ -74,11 +74,11 @@ export class DebugHelper {
|
|
|
74
74
|
log.debug(`❌ Transaction rolled back after ${duration || 0}ms`);
|
|
75
75
|
}
|
|
76
76
|
static fromEnvironment() {
|
|
77
|
-
const enabled = process.env.
|
|
78
|
-
const logLevel = process.env.
|
|
79
|
-
const showStackTrace = process.env.
|
|
80
|
-
const showQueryParams = process.env.
|
|
81
|
-
const showFullSQL = process.env.
|
|
77
|
+
const enabled = process.env.CONSTRUCTIVE_DEBUG === 'true' || process.env.DEBUG === 'constructive*';
|
|
78
|
+
const logLevel = process.env.CONSTRUCTIVE_DEBUG_LEVEL || 'debug';
|
|
79
|
+
const showStackTrace = process.env.CONSTRUCTIVE_DEBUG_STACK !== 'false';
|
|
80
|
+
const showQueryParams = process.env.CONSTRUCTIVE_DEBUG_PARAMS !== 'false';
|
|
81
|
+
const showFullSQL = process.env.CONSTRUCTIVE_DEBUG_SQL !== 'false';
|
|
82
82
|
return new DebugHelper({
|
|
83
83
|
enabled,
|
|
84
84
|
logLevel,
|
|
@@ -92,18 +92,18 @@ export class DebugHelper {
|
|
|
92
92
|
export const debugHelper = DebugHelper.fromEnvironment();
|
|
93
93
|
// Utility functions
|
|
94
94
|
export function enableDebugMode() {
|
|
95
|
-
process.env.
|
|
95
|
+
process.env.CONSTRUCTIVE_DEBUG = 'true';
|
|
96
96
|
process.env.LOG_LEVEL = 'debug';
|
|
97
97
|
log.info('🔍 Debug mode enabled');
|
|
98
|
-
log.info(' Set
|
|
99
|
-
log.info(' Set
|
|
100
|
-
log.info(' Set
|
|
101
|
-
log.info(' Set
|
|
102
|
-
log.info(' Set
|
|
98
|
+
log.info(' Set CONSTRUCTIVE_DEBUG=false to disable');
|
|
99
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_LEVEL=info|warn|error|debug to change log level');
|
|
100
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_STACK=false to hide stack traces');
|
|
101
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_PARAMS=false to hide query parameters');
|
|
102
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_SQL=false to hide full SQL scripts');
|
|
103
103
|
}
|
|
104
104
|
export function createDebugSummary(error, context) {
|
|
105
105
|
const summary = [];
|
|
106
|
-
summary.push('===
|
|
106
|
+
summary.push('=== Constructive Debug Summary ===');
|
|
107
107
|
summary.push('');
|
|
108
108
|
if (error) {
|
|
109
109
|
summary.push('Error Information:');
|
|
@@ -136,7 +136,7 @@ export function createDebugSummary(error, context) {
|
|
|
136
136
|
summary.push('');
|
|
137
137
|
}
|
|
138
138
|
summary.push('Debugging Tips:');
|
|
139
|
-
summary.push(' 1. Run with
|
|
139
|
+
summary.push(' 1. Run with CONSTRUCTIVE_DEBUG=true for more details');
|
|
140
140
|
summary.push(' 2. Check the transaction query history above');
|
|
141
141
|
summary.push(' 3. Verify your SQL scripts for syntax errors');
|
|
142
142
|
summary.push(' 4. Ensure dependencies are applied in correct order');
|
package/esm/workspace/paths.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { walkUp } from './utils';
|
|
2
2
|
const PROJECT_FILES = {
|
|
3
3
|
PLAN: 'pgpm.plan',
|
|
4
|
-
|
|
4
|
+
PGPM: 'pgpm.json',
|
|
5
5
|
};
|
|
6
6
|
/**
|
|
7
7
|
* Finds the module path by looking for pgpm.plan.
|
|
@@ -12,20 +12,20 @@ export const modulePath = (cwd = process.cwd()) => {
|
|
|
12
12
|
return walkUp(cwd, PROJECT_FILES.PLAN);
|
|
13
13
|
};
|
|
14
14
|
/**
|
|
15
|
-
* Finds the
|
|
15
|
+
* Finds the pgpm project path.
|
|
16
16
|
* @param cwd - Current working directory.
|
|
17
17
|
* @returns A promise that resolves to the directory path containing `pgpm.json`.
|
|
18
18
|
*/
|
|
19
|
-
export const
|
|
20
|
-
return walkUp(cwd, PROJECT_FILES.
|
|
19
|
+
export const pgpmPath = (cwd = process.cwd()) => {
|
|
20
|
+
return walkUp(cwd, PROJECT_FILES.PGPM);
|
|
21
21
|
};
|
|
22
22
|
export const getWorkspacePath = (cwd) => {
|
|
23
23
|
let workspacePath;
|
|
24
24
|
try {
|
|
25
|
-
workspacePath =
|
|
25
|
+
workspacePath = pgpmPath(cwd);
|
|
26
26
|
}
|
|
27
27
|
catch (err) {
|
|
28
|
-
console.error('Error: You must be in a
|
|
28
|
+
console.error('Error: You must be in a pgpm workspace. You can initialize one with `pgpm init workspace`.');
|
|
29
29
|
process.exit(1);
|
|
30
30
|
}
|
|
31
31
|
return workspacePath;
|
|
@@ -36,7 +36,7 @@ export const getModulePath = (cwd) => {
|
|
|
36
36
|
pkgPath = modulePath(cwd);
|
|
37
37
|
}
|
|
38
38
|
catch (err) {
|
|
39
|
-
console.error('Error: You must be in a
|
|
39
|
+
console.error('Error: You must be in a pgpm module. You can initialize one with the `init` command.');
|
|
40
40
|
process.exit(1);
|
|
41
41
|
}
|
|
42
42
|
return pkgPath;
|
|
@@ -62,8 +62,7 @@ const exportMigrationsToDisk = async ({ project, options, database, databaseId,
|
|
|
62
62
|
'pgpm-stamps',
|
|
63
63
|
'pgpm-base32',
|
|
64
64
|
'pgpm-totp',
|
|
65
|
-
'pgpm-types'
|
|
66
|
-
'pgpm-default-roles'
|
|
65
|
+
'pgpm-types'
|
|
67
66
|
]
|
|
68
67
|
});
|
|
69
68
|
(0, files_1.writeSqitchPlan)(results.rows, opts);
|
|
@@ -169,7 +168,7 @@ const preparePackage = async ({ project, author, outdir, name, extensions }) =>
|
|
|
169
168
|
* Generates a function for replacing schema names and extension names in strings.
|
|
170
169
|
*/
|
|
171
170
|
const makeReplacer = ({ schemas, name }) => {
|
|
172
|
-
const replacements = ['
|
|
171
|
+
const replacements = ['constructive-extension-name', name];
|
|
173
172
|
const schemaReplacers = schemas.map((schema) => [
|
|
174
173
|
schema.schema_name,
|
|
175
174
|
(0, komoji_1.toSnakeCase)(`${name}_${schema.name}`)
|
package/files/plan/generator.js
CHANGED
|
@@ -41,10 +41,10 @@ function generatePlan(options) {
|
|
|
41
41
|
// Generate the plan entries
|
|
42
42
|
entries.forEach(entry => {
|
|
43
43
|
if (entry.dependencies && entry.dependencies.length > 0) {
|
|
44
|
-
planfile.push(`${entry.change} [${entry.dependencies.join(' ')}] ${now}
|
|
44
|
+
planfile.push(`${entry.change} [${entry.dependencies.join(' ')}] ${now} constructive <constructive@5b0c196eeb62>${entry.comment ? ` # ${entry.comment}` : ''}`);
|
|
45
45
|
}
|
|
46
46
|
else {
|
|
47
|
-
planfile.push(`${entry.change} ${now}
|
|
47
|
+
planfile.push(`${entry.change} ${now} constructive <constructive@5b0c196eeb62>${entry.comment ? ` # ${entry.comment}` : ''}`);
|
|
48
48
|
}
|
|
49
49
|
});
|
|
50
50
|
return planfile.join('\n');
|
package/files/plan/writer.js
CHANGED
|
@@ -17,12 +17,12 @@ function writeSqitchPlan(rows, opts) {
|
|
|
17
17
|
const dir = path_1.default.resolve(path_1.default.join(opts.outdir, opts.name));
|
|
18
18
|
fs_1.default.mkdirSync(dir, { recursive: true });
|
|
19
19
|
const date = () => '2017-08-11T08:11:51Z'; // stubbed timestamp
|
|
20
|
-
const author = opts.author || '
|
|
20
|
+
const author = opts.author || 'constructive';
|
|
21
21
|
const email = `${author}@5b0c196eeb62`;
|
|
22
22
|
const duplicates = {};
|
|
23
23
|
const plan = opts.replacer(`%syntax-version=1.0.0
|
|
24
|
-
%project=
|
|
25
|
-
%uri=
|
|
24
|
+
%project=constructive-extension-name
|
|
25
|
+
%uri=constructive-extension-name
|
|
26
26
|
|
|
27
27
|
${rows
|
|
28
28
|
.map((row) => {
|
package/migrate/client.js
CHANGED
|
@@ -59,7 +59,7 @@ class PgpmMigrate {
|
|
|
59
59
|
if (this.initialized)
|
|
60
60
|
return;
|
|
61
61
|
try {
|
|
62
|
-
log.info('Checking
|
|
62
|
+
log.info('Checking Constructive migration schema...');
|
|
63
63
|
// Check if pgpm_migrate schema exists
|
|
64
64
|
const result = await this.pool.query(`
|
|
65
65
|
SELECT schema_name
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@pgpmjs/core",
|
|
3
|
-
"version": "3.0.
|
|
3
|
+
"version": "3.0.7",
|
|
4
4
|
"author": "Constructive <developers@constructive.io>",
|
|
5
5
|
"description": "PGPM Package and Migration Tools",
|
|
6
6
|
"main": "index.js",
|
|
@@ -28,7 +28,7 @@
|
|
|
28
28
|
"build": "makage build && npm run copy",
|
|
29
29
|
"build:dev": "makage build --dev",
|
|
30
30
|
"lint": "eslint . --fix",
|
|
31
|
-
"test": "jest",
|
|
31
|
+
"test": "jest --passWithNoTests",
|
|
32
32
|
"test:watch": "jest --watch"
|
|
33
33
|
},
|
|
34
34
|
"keywords": [
|
|
@@ -41,30 +41,27 @@
|
|
|
41
41
|
"sqitch"
|
|
42
42
|
],
|
|
43
43
|
"devDependencies": {
|
|
44
|
+
"@pgsql/types": "^17.6.2",
|
|
44
45
|
"@types/pg": "^8.16.0",
|
|
45
46
|
"copyfiles": "^2.4.1",
|
|
46
|
-
"makage": "^0.1.
|
|
47
|
+
"makage": "^0.1.9"
|
|
47
48
|
},
|
|
48
49
|
"dependencies": {
|
|
49
|
-
"@pgpmjs/env": "^2.8.
|
|
50
|
-
"@pgpmjs/logger": "^1.3.
|
|
51
|
-
"@pgpmjs/server-utils": "^2.8.
|
|
52
|
-
"@pgpmjs/types": "^2.12.
|
|
53
|
-
"
|
|
54
|
-
"create-gen-app": "^0.3.6",
|
|
50
|
+
"@pgpmjs/env": "^2.8.5",
|
|
51
|
+
"@pgpmjs/logger": "^1.3.5",
|
|
52
|
+
"@pgpmjs/server-utils": "^2.8.7",
|
|
53
|
+
"@pgpmjs/types": "^2.12.5",
|
|
54
|
+
"create-gen-app": "^0.4.0",
|
|
55
55
|
"csv-to-pg": "^2.0.10",
|
|
56
56
|
"glob": "^13.0.0",
|
|
57
57
|
"komoji": "^0.7.11",
|
|
58
58
|
"parse-package-name": "^1.0.0",
|
|
59
59
|
"pg": "^8.16.3",
|
|
60
|
-
"pg-cache": "^1.6.
|
|
61
|
-
"pg-env": "^1.2.
|
|
60
|
+
"pg-cache": "^1.6.7",
|
|
61
|
+
"pg-env": "^1.2.4",
|
|
62
62
|
"pgsql-deparser": "^17.12.2",
|
|
63
63
|
"pgsql-parser": "^17.9.2",
|
|
64
64
|
"yanse": "^0.1.8"
|
|
65
65
|
},
|
|
66
|
-
"
|
|
67
|
-
"@pgsql/types": "^17.6.1"
|
|
68
|
-
},
|
|
69
|
-
"gitHead": "86d74dc4fce9051df0d2b5bcc163607aba42f009"
|
|
66
|
+
"gitHead": "e45ec95404e48d0c0542da882a3baea0cd6de1c7"
|
|
70
67
|
}
|
package/projects/deploy.js
CHANGED
|
@@ -127,7 +127,7 @@ const deployProject = async (opts, name, database, pkg, toChange) => {
|
|
|
127
127
|
}
|
|
128
128
|
else {
|
|
129
129
|
// Use new migration system
|
|
130
|
-
log.debug(`→ Command:
|
|
130
|
+
log.debug(`→ Command: constructive migrate deploy db:pg:${database}`);
|
|
131
131
|
try {
|
|
132
132
|
const client = new client_1.PgpmMigrate(mergedOpts.pg);
|
|
133
133
|
const result = await client.deploy({
|
package/projects/revert.js
CHANGED
|
@@ -82,7 +82,7 @@ const revertProject = async (opts, name, database, pkg, options) => {
|
|
|
82
82
|
log.info(`📂 Reverting local module: ${extension}`);
|
|
83
83
|
log.debug(`→ Path: ${modulePath}`);
|
|
84
84
|
// Use new migration system
|
|
85
|
-
log.debug(`→ Command:
|
|
85
|
+
log.debug(`→ Command: constructive migrate revert db:pg:${database}`);
|
|
86
86
|
try {
|
|
87
87
|
const client = new client_1.PgpmMigrate(opts.pg);
|
|
88
88
|
const result = await client.revert({
|
package/projects/verify.js
CHANGED
|
@@ -70,7 +70,7 @@ const verifyProject = async (opts, name, database, pkg, options) => {
|
|
|
70
70
|
const modulePath = (0, path_1.resolve)(pkg.workspacePath, modules[extension].path);
|
|
71
71
|
log.info(`📂 Verifying local module: ${extension}`);
|
|
72
72
|
log.debug(`→ Path: ${modulePath}`);
|
|
73
|
-
log.debug(`→ Command:
|
|
73
|
+
log.debug(`→ Command: constructive migrate verify db:pg:${database}`);
|
|
74
74
|
try {
|
|
75
75
|
const client = new client_1.PgpmMigrate(opts.pg);
|
|
76
76
|
const result = await client.verify({
|
package/utils/debug.js
CHANGED
|
@@ -79,11 +79,11 @@ class DebugHelper {
|
|
|
79
79
|
log.debug(`❌ Transaction rolled back after ${duration || 0}ms`);
|
|
80
80
|
}
|
|
81
81
|
static fromEnvironment() {
|
|
82
|
-
const enabled = process.env.
|
|
83
|
-
const logLevel = process.env.
|
|
84
|
-
const showStackTrace = process.env.
|
|
85
|
-
const showQueryParams = process.env.
|
|
86
|
-
const showFullSQL = process.env.
|
|
82
|
+
const enabled = process.env.CONSTRUCTIVE_DEBUG === 'true' || process.env.DEBUG === 'constructive*';
|
|
83
|
+
const logLevel = process.env.CONSTRUCTIVE_DEBUG_LEVEL || 'debug';
|
|
84
|
+
const showStackTrace = process.env.CONSTRUCTIVE_DEBUG_STACK !== 'false';
|
|
85
|
+
const showQueryParams = process.env.CONSTRUCTIVE_DEBUG_PARAMS !== 'false';
|
|
86
|
+
const showFullSQL = process.env.CONSTRUCTIVE_DEBUG_SQL !== 'false';
|
|
87
87
|
return new DebugHelper({
|
|
88
88
|
enabled,
|
|
89
89
|
logLevel,
|
|
@@ -98,18 +98,18 @@ exports.DebugHelper = DebugHelper;
|
|
|
98
98
|
exports.debugHelper = DebugHelper.fromEnvironment();
|
|
99
99
|
// Utility functions
|
|
100
100
|
function enableDebugMode() {
|
|
101
|
-
process.env.
|
|
101
|
+
process.env.CONSTRUCTIVE_DEBUG = 'true';
|
|
102
102
|
process.env.LOG_LEVEL = 'debug';
|
|
103
103
|
log.info('🔍 Debug mode enabled');
|
|
104
|
-
log.info(' Set
|
|
105
|
-
log.info(' Set
|
|
106
|
-
log.info(' Set
|
|
107
|
-
log.info(' Set
|
|
108
|
-
log.info(' Set
|
|
104
|
+
log.info(' Set CONSTRUCTIVE_DEBUG=false to disable');
|
|
105
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_LEVEL=info|warn|error|debug to change log level');
|
|
106
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_STACK=false to hide stack traces');
|
|
107
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_PARAMS=false to hide query parameters');
|
|
108
|
+
log.info(' Set CONSTRUCTIVE_DEBUG_SQL=false to hide full SQL scripts');
|
|
109
109
|
}
|
|
110
110
|
function createDebugSummary(error, context) {
|
|
111
111
|
const summary = [];
|
|
112
|
-
summary.push('===
|
|
112
|
+
summary.push('=== Constructive Debug Summary ===');
|
|
113
113
|
summary.push('');
|
|
114
114
|
if (error) {
|
|
115
115
|
summary.push('Error Information:');
|
|
@@ -142,7 +142,7 @@ function createDebugSummary(error, context) {
|
|
|
142
142
|
summary.push('');
|
|
143
143
|
}
|
|
144
144
|
summary.push('Debugging Tips:');
|
|
145
|
-
summary.push(' 1. Run with
|
|
145
|
+
summary.push(' 1. Run with CONSTRUCTIVE_DEBUG=true for more details');
|
|
146
146
|
summary.push(' 2. Check the transaction query history above');
|
|
147
147
|
summary.push(' 3. Verify your SQL scripts for syntax errors');
|
|
148
148
|
summary.push(' 4. Ensure dependencies are applied in correct order');
|
package/workspace/paths.d.ts
CHANGED
|
@@ -5,10 +5,10 @@
|
|
|
5
5
|
*/
|
|
6
6
|
export declare const modulePath: (cwd?: string) => string;
|
|
7
7
|
/**
|
|
8
|
-
* Finds the
|
|
8
|
+
* Finds the pgpm project path.
|
|
9
9
|
* @param cwd - Current working directory.
|
|
10
10
|
* @returns A promise that resolves to the directory path containing `pgpm.json`.
|
|
11
11
|
*/
|
|
12
|
-
export declare const
|
|
12
|
+
export declare const pgpmPath: (cwd?: string) => string;
|
|
13
13
|
export declare const getWorkspacePath: (cwd: string) => string;
|
|
14
14
|
export declare const getModulePath: (cwd: string) => string;
|
package/workspace/paths.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getModulePath = exports.getWorkspacePath = exports.
|
|
3
|
+
exports.getModulePath = exports.getWorkspacePath = exports.pgpmPath = exports.modulePath = void 0;
|
|
4
4
|
const utils_1 = require("./utils");
|
|
5
5
|
const PROJECT_FILES = {
|
|
6
6
|
PLAN: 'pgpm.plan',
|
|
7
|
-
|
|
7
|
+
PGPM: 'pgpm.json',
|
|
8
8
|
};
|
|
9
9
|
/**
|
|
10
10
|
* Finds the module path by looking for pgpm.plan.
|
|
@@ -16,21 +16,21 @@ const modulePath = (cwd = process.cwd()) => {
|
|
|
16
16
|
};
|
|
17
17
|
exports.modulePath = modulePath;
|
|
18
18
|
/**
|
|
19
|
-
* Finds the
|
|
19
|
+
* Finds the pgpm project path.
|
|
20
20
|
* @param cwd - Current working directory.
|
|
21
21
|
* @returns A promise that resolves to the directory path containing `pgpm.json`.
|
|
22
22
|
*/
|
|
23
|
-
const
|
|
24
|
-
return (0, utils_1.walkUp)(cwd, PROJECT_FILES.
|
|
23
|
+
const pgpmPath = (cwd = process.cwd()) => {
|
|
24
|
+
return (0, utils_1.walkUp)(cwd, PROJECT_FILES.PGPM);
|
|
25
25
|
};
|
|
26
|
-
exports.
|
|
26
|
+
exports.pgpmPath = pgpmPath;
|
|
27
27
|
const getWorkspacePath = (cwd) => {
|
|
28
28
|
let workspacePath;
|
|
29
29
|
try {
|
|
30
|
-
workspacePath = (0, exports.
|
|
30
|
+
workspacePath = (0, exports.pgpmPath)(cwd);
|
|
31
31
|
}
|
|
32
32
|
catch (err) {
|
|
33
|
-
console.error('Error: You must be in a
|
|
33
|
+
console.error('Error: You must be in a pgpm workspace. You can initialize one with `pgpm init workspace`.');
|
|
34
34
|
process.exit(1);
|
|
35
35
|
}
|
|
36
36
|
return workspacePath;
|
|
@@ -42,7 +42,7 @@ const getModulePath = (cwd) => {
|
|
|
42
42
|
pkgPath = (0, exports.modulePath)(cwd);
|
|
43
43
|
}
|
|
44
44
|
catch (err) {
|
|
45
|
-
console.error('Error: You must be in a
|
|
45
|
+
console.error('Error: You must be in a pgpm module. You can initialize one with the `init` command.');
|
|
46
46
|
process.exit(1);
|
|
47
47
|
}
|
|
48
48
|
return pkgPath;
|