@pgpmjs/core 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +23 -0
- package/README.md +99 -0
- package/core/boilerplate-scanner.d.ts +41 -0
- package/core/boilerplate-scanner.js +106 -0
- package/core/boilerplate-types.d.ts +52 -0
- package/core/boilerplate-types.js +6 -0
- package/core/class/pgpm.d.ts +150 -0
- package/core/class/pgpm.js +1470 -0
- package/core/template-scaffold.d.ts +29 -0
- package/core/template-scaffold.js +168 -0
- package/esm/core/boilerplate-scanner.js +96 -0
- package/esm/core/boilerplate-types.js +5 -0
- package/esm/core/class/pgpm.js +1430 -0
- package/esm/core/template-scaffold.js +161 -0
- package/esm/export/export-meta.js +240 -0
- package/esm/export/export-migrations.js +180 -0
- package/esm/extensions/extensions.js +31 -0
- package/esm/files/extension/index.js +3 -0
- package/esm/files/extension/reader.js +79 -0
- package/esm/files/extension/writer.js +63 -0
- package/esm/files/index.js +6 -0
- package/esm/files/plan/generator.js +49 -0
- package/esm/files/plan/index.js +5 -0
- package/esm/files/plan/parser.js +296 -0
- package/esm/files/plan/validators.js +181 -0
- package/esm/files/plan/writer.js +114 -0
- package/esm/files/sql/index.js +1 -0
- package/esm/files/sql/writer.js +107 -0
- package/esm/files/sql-scripts/index.js +2 -0
- package/esm/files/sql-scripts/reader.js +19 -0
- package/esm/files/types/index.js +1 -0
- package/esm/files/types/package.js +1 -0
- package/esm/index.js +21 -0
- package/esm/init/client.js +144 -0
- package/esm/init/sql/bootstrap-roles.sql +55 -0
- package/esm/init/sql/bootstrap-test-roles.sql +72 -0
- package/esm/migrate/clean.js +23 -0
- package/esm/migrate/client.js +551 -0
- package/esm/migrate/index.js +5 -0
- package/esm/migrate/sql/procedures.sql +258 -0
- package/esm/migrate/sql/schema.sql +37 -0
- package/esm/migrate/types.js +1 -0
- package/esm/migrate/utils/event-logger.js +28 -0
- package/esm/migrate/utils/hash.js +27 -0
- package/esm/migrate/utils/transaction.js +125 -0
- package/esm/modules/modules.js +49 -0
- package/esm/packaging/package.js +96 -0
- package/esm/packaging/transform.js +70 -0
- package/esm/projects/deploy.js +123 -0
- package/esm/projects/revert.js +75 -0
- package/esm/projects/verify.js +61 -0
- package/esm/resolution/deps.js +526 -0
- package/esm/resolution/resolve.js +101 -0
- package/esm/utils/debug.js +147 -0
- package/esm/utils/target-utils.js +37 -0
- package/esm/workspace/paths.js +43 -0
- package/esm/workspace/utils.js +31 -0
- package/export/export-meta.d.ts +8 -0
- package/export/export-meta.js +244 -0
- package/export/export-migrations.d.ts +17 -0
- package/export/export-migrations.js +187 -0
- package/extensions/extensions.d.ts +5 -0
- package/extensions/extensions.js +35 -0
- package/files/extension/index.d.ts +2 -0
- package/files/extension/index.js +19 -0
- package/files/extension/reader.d.ts +24 -0
- package/files/extension/reader.js +86 -0
- package/files/extension/writer.d.ts +39 -0
- package/files/extension/writer.js +70 -0
- package/files/index.d.ts +5 -0
- package/files/index.js +22 -0
- package/files/plan/generator.d.ts +22 -0
- package/files/plan/generator.js +57 -0
- package/files/plan/index.d.ts +4 -0
- package/files/plan/index.js +21 -0
- package/files/plan/parser.d.ts +27 -0
- package/files/plan/parser.js +303 -0
- package/files/plan/validators.d.ts +52 -0
- package/files/plan/validators.js +187 -0
- package/files/plan/writer.d.ts +27 -0
- package/files/plan/writer.js +124 -0
- package/files/sql/index.d.ts +1 -0
- package/files/sql/index.js +17 -0
- package/files/sql/writer.d.ts +12 -0
- package/files/sql/writer.js +114 -0
- package/files/sql-scripts/index.d.ts +1 -0
- package/files/sql-scripts/index.js +18 -0
- package/files/sql-scripts/reader.d.ts +8 -0
- package/files/sql-scripts/reader.js +23 -0
- package/files/types/index.d.ts +46 -0
- package/files/types/index.js +17 -0
- package/files/types/package.d.ts +20 -0
- package/files/types/package.js +2 -0
- package/index.d.ts +21 -0
- package/index.js +45 -0
- package/init/client.d.ts +26 -0
- package/init/client.js +148 -0
- package/init/sql/bootstrap-roles.sql +55 -0
- package/init/sql/bootstrap-test-roles.sql +72 -0
- package/migrate/clean.d.ts +1 -0
- package/migrate/clean.js +27 -0
- package/migrate/client.d.ts +80 -0
- package/migrate/client.js +555 -0
- package/migrate/index.d.ts +5 -0
- package/migrate/index.js +21 -0
- package/migrate/sql/procedures.sql +258 -0
- package/migrate/sql/schema.sql +37 -0
- package/migrate/types.d.ts +67 -0
- package/migrate/types.js +2 -0
- package/migrate/utils/event-logger.d.ts +13 -0
- package/migrate/utils/event-logger.js +32 -0
- package/migrate/utils/hash.d.ts +12 -0
- package/migrate/utils/hash.js +32 -0
- package/migrate/utils/transaction.d.ts +27 -0
- package/migrate/utils/transaction.js +129 -0
- package/modules/modules.d.ts +31 -0
- package/modules/modules.js +56 -0
- package/package.json +70 -0
- package/packaging/package.d.ts +19 -0
- package/packaging/package.js +102 -0
- package/packaging/transform.d.ts +22 -0
- package/packaging/transform.js +75 -0
- package/projects/deploy.d.ts +8 -0
- package/projects/deploy.js +160 -0
- package/projects/revert.d.ts +15 -0
- package/projects/revert.js +112 -0
- package/projects/verify.d.ts +8 -0
- package/projects/verify.js +98 -0
- package/resolution/deps.d.ts +57 -0
- package/resolution/deps.js +531 -0
- package/resolution/resolve.d.ts +37 -0
- package/resolution/resolve.js +107 -0
- package/utils/debug.d.ts +21 -0
- package/utils/debug.js +153 -0
- package/utils/target-utils.d.ts +5 -0
- package/utils/target-utils.js +40 -0
- package/workspace/paths.d.ts +14 -0
- package/workspace/paths.js +50 -0
- package/workspace/utils.d.ts +8 -0
- package/workspace/utils.js +36 -0
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
-- Register a package (auto-called by deploy if needed)
|
|
2
|
+
CREATE PROCEDURE pgpm_migrate.register_package(p_package TEXT)
|
|
3
|
+
LANGUAGE plpgsql AS $$
|
|
4
|
+
BEGIN
|
|
5
|
+
INSERT INTO pgpm_migrate.packages (package)
|
|
6
|
+
VALUES (p_package)
|
|
7
|
+
ON CONFLICT (package) DO NOTHING;
|
|
8
|
+
END;
|
|
9
|
+
$$;
|
|
10
|
+
|
|
11
|
+
-- Check if a change is deployed (handles both local and cross-package dependencies)
|
|
12
|
+
CREATE FUNCTION pgpm_migrate.is_deployed(
|
|
13
|
+
p_package TEXT,
|
|
14
|
+
p_change_name TEXT
|
|
15
|
+
)
|
|
16
|
+
RETURNS BOOLEAN
|
|
17
|
+
LANGUAGE plpgsql STABLE AS $$
|
|
18
|
+
DECLARE
|
|
19
|
+
v_actual_package TEXT;
|
|
20
|
+
v_actual_change TEXT;
|
|
21
|
+
v_colon_pos INT;
|
|
22
|
+
BEGIN
|
|
23
|
+
-- Check if change_name contains a package prefix (cross-package dependency)
|
|
24
|
+
v_colon_pos := position(':' in p_change_name);
|
|
25
|
+
|
|
26
|
+
IF v_colon_pos > 0 THEN
|
|
27
|
+
-- Split into package and change name
|
|
28
|
+
v_actual_package := substring(p_change_name from 1 for v_colon_pos - 1);
|
|
29
|
+
v_actual_change := substring(p_change_name from v_colon_pos + 1);
|
|
30
|
+
ELSE
|
|
31
|
+
-- Use provided package as default
|
|
32
|
+
v_actual_package := p_package;
|
|
33
|
+
v_actual_change := p_change_name;
|
|
34
|
+
END IF;
|
|
35
|
+
|
|
36
|
+
RETURN EXISTS (
|
|
37
|
+
SELECT 1 FROM pgpm_migrate.changes
|
|
38
|
+
WHERE package = v_actual_package
|
|
39
|
+
AND change_name = v_actual_change
|
|
40
|
+
);
|
|
41
|
+
END;
|
|
42
|
+
$$;
|
|
43
|
+
|
|
44
|
+
-- Deploy a change
|
|
45
|
+
CREATE PROCEDURE pgpm_migrate.deploy(
|
|
46
|
+
p_package TEXT,
|
|
47
|
+
p_change_name TEXT,
|
|
48
|
+
p_script_hash TEXT,
|
|
49
|
+
p_requires TEXT[],
|
|
50
|
+
p_deploy_sql TEXT,
|
|
51
|
+
p_log_only BOOLEAN DEFAULT FALSE
|
|
52
|
+
)
|
|
53
|
+
LANGUAGE plpgsql AS $$
|
|
54
|
+
DECLARE
|
|
55
|
+
v_change_id TEXT;
|
|
56
|
+
BEGIN
|
|
57
|
+
-- Ensure package exists
|
|
58
|
+
CALL pgpm_migrate.register_package(p_package);
|
|
59
|
+
|
|
60
|
+
-- Generate simple ID
|
|
61
|
+
v_change_id := encode(sha256((p_package || p_change_name || p_script_hash)::bytea), 'hex');
|
|
62
|
+
|
|
63
|
+
-- Check if already deployed
|
|
64
|
+
IF pgpm_migrate.is_deployed(p_package, p_change_name) THEN
|
|
65
|
+
-- Check if it's the same script (by hash)
|
|
66
|
+
IF EXISTS (
|
|
67
|
+
SELECT 1 FROM pgpm_migrate.changes
|
|
68
|
+
WHERE package = p_package
|
|
69
|
+
AND change_name = p_change_name
|
|
70
|
+
AND script_hash = p_script_hash
|
|
71
|
+
) THEN
|
|
72
|
+
-- Same change with same content, skip silently
|
|
73
|
+
RETURN;
|
|
74
|
+
ELSE
|
|
75
|
+
-- Different content, this is an error
|
|
76
|
+
RAISE EXCEPTION 'Change % already deployed in package % with different content', p_change_name, p_package;
|
|
77
|
+
END IF;
|
|
78
|
+
END IF;
|
|
79
|
+
|
|
80
|
+
-- Check dependencies
|
|
81
|
+
IF p_requires IS NOT NULL THEN
|
|
82
|
+
DECLARE
|
|
83
|
+
missing_changes TEXT[];
|
|
84
|
+
BEGIN
|
|
85
|
+
SELECT array_agg(req) INTO missing_changes
|
|
86
|
+
FROM unnest(p_requires) AS req
|
|
87
|
+
WHERE NOT pgpm_migrate.is_deployed(p_package, req);
|
|
88
|
+
|
|
89
|
+
IF array_length(missing_changes, 1) > 0 THEN
|
|
90
|
+
RAISE EXCEPTION 'Missing required changes for %: %', p_change_name, array_to_string(missing_changes, ', ');
|
|
91
|
+
END IF;
|
|
92
|
+
END;
|
|
93
|
+
END IF;
|
|
94
|
+
|
|
95
|
+
-- Execute deploy (skip if log-only mode)
|
|
96
|
+
IF NOT p_log_only THEN
|
|
97
|
+
BEGIN
|
|
98
|
+
EXECUTE p_deploy_sql;
|
|
99
|
+
EXCEPTION WHEN OTHERS THEN
|
|
100
|
+
RAISE;
|
|
101
|
+
END;
|
|
102
|
+
END IF;
|
|
103
|
+
|
|
104
|
+
-- Record deployment
|
|
105
|
+
INSERT INTO pgpm_migrate.changes (change_id, change_name, package, script_hash)
|
|
106
|
+
VALUES (v_change_id, p_change_name, p_package, p_script_hash);
|
|
107
|
+
|
|
108
|
+
-- Record dependencies (INSERTED AFTER SUCCESSFUL DEPLOYMENT)
|
|
109
|
+
IF p_requires IS NOT NULL THEN
|
|
110
|
+
INSERT INTO pgpm_migrate.dependencies (change_id, requires)
|
|
111
|
+
SELECT v_change_id, req FROM unnest(p_requires) AS req;
|
|
112
|
+
END IF;
|
|
113
|
+
|
|
114
|
+
-- Log success
|
|
115
|
+
INSERT INTO pgpm_migrate.events (event_type, change_name, package)
|
|
116
|
+
VALUES ('deploy', p_change_name, p_package);
|
|
117
|
+
END;
|
|
118
|
+
$$;
|
|
119
|
+
|
|
120
|
+
-- Revert a change
|
|
121
|
+
CREATE PROCEDURE pgpm_migrate.revert(
|
|
122
|
+
p_package TEXT,
|
|
123
|
+
p_change_name TEXT,
|
|
124
|
+
p_revert_sql TEXT
|
|
125
|
+
)
|
|
126
|
+
LANGUAGE plpgsql AS $$
|
|
127
|
+
BEGIN
|
|
128
|
+
-- Check if deployed
|
|
129
|
+
IF NOT pgpm_migrate.is_deployed(p_package, p_change_name) THEN
|
|
130
|
+
RAISE EXCEPTION 'Change % not deployed in package %', p_change_name, p_package;
|
|
131
|
+
END IF;
|
|
132
|
+
|
|
133
|
+
-- Check if other changes depend on this (including cross-package dependencies)
|
|
134
|
+
IF EXISTS (
|
|
135
|
+
SELECT 1 FROM pgpm_migrate.dependencies d
|
|
136
|
+
JOIN pgpm_migrate.changes c ON c.change_id = d.change_id
|
|
137
|
+
WHERE (
|
|
138
|
+
-- Local dependency within same package
|
|
139
|
+
(d.requires = p_change_name AND c.package = p_package)
|
|
140
|
+
OR
|
|
141
|
+
-- Cross-package dependency
|
|
142
|
+
(d.requires = p_package || ':' || p_change_name)
|
|
143
|
+
)
|
|
144
|
+
) THEN
|
|
145
|
+
-- Get list of dependent changes for better error message
|
|
146
|
+
DECLARE
|
|
147
|
+
dependent_changes TEXT;
|
|
148
|
+
BEGIN
|
|
149
|
+
SELECT string_agg(
|
|
150
|
+
CASE
|
|
151
|
+
WHEN d.requires = p_change_name THEN c.change_name
|
|
152
|
+
ELSE c.package || ':' || c.change_name
|
|
153
|
+
END,
|
|
154
|
+
', '
|
|
155
|
+
) INTO dependent_changes
|
|
156
|
+
FROM pgpm_migrate.dependencies d
|
|
157
|
+
JOIN pgpm_migrate.changes c ON c.change_id = d.change_id
|
|
158
|
+
WHERE (
|
|
159
|
+
(d.requires = p_change_name AND c.package = p_package)
|
|
160
|
+
OR
|
|
161
|
+
(d.requires = p_package || ':' || p_change_name)
|
|
162
|
+
);
|
|
163
|
+
|
|
164
|
+
RAISE EXCEPTION 'Cannot revert %: required by %', p_change_name, dependent_changes;
|
|
165
|
+
END;
|
|
166
|
+
END IF;
|
|
167
|
+
|
|
168
|
+
-- Execute revert
|
|
169
|
+
EXECUTE p_revert_sql;
|
|
170
|
+
|
|
171
|
+
-- Remove from deployed
|
|
172
|
+
DELETE FROM pgpm_migrate.changes
|
|
173
|
+
WHERE package = p_package AND change_name = p_change_name;
|
|
174
|
+
|
|
175
|
+
-- Log revert
|
|
176
|
+
INSERT INTO pgpm_migrate.events (event_type, change_name, package)
|
|
177
|
+
VALUES ('revert', p_change_name, p_package);
|
|
178
|
+
END;
|
|
179
|
+
$$;
|
|
180
|
+
|
|
181
|
+
-- Verify a change
|
|
182
|
+
CREATE FUNCTION pgpm_migrate.verify(
|
|
183
|
+
p_package TEXT,
|
|
184
|
+
p_change_name TEXT,
|
|
185
|
+
p_verify_sql TEXT
|
|
186
|
+
)
|
|
187
|
+
RETURNS BOOLEAN
|
|
188
|
+
LANGUAGE plpgsql AS $$
|
|
189
|
+
BEGIN
|
|
190
|
+
EXECUTE p_verify_sql;
|
|
191
|
+
RETURN TRUE;
|
|
192
|
+
EXCEPTION WHEN OTHERS THEN
|
|
193
|
+
RETURN FALSE;
|
|
194
|
+
END;
|
|
195
|
+
$$;
|
|
196
|
+
|
|
197
|
+
-- List deployed changes
|
|
198
|
+
CREATE FUNCTION pgpm_migrate.deployed_changes(
|
|
199
|
+
p_package TEXT DEFAULT NULL
|
|
200
|
+
)
|
|
201
|
+
RETURNS TABLE(package TEXT, change_name TEXT, deployed_at TIMESTAMPTZ)
|
|
202
|
+
LANGUAGE sql STABLE AS $$
|
|
203
|
+
SELECT package, change_name, deployed_at
|
|
204
|
+
FROM pgpm_migrate.changes
|
|
205
|
+
WHERE p_package IS NULL OR package = p_package
|
|
206
|
+
ORDER BY deployed_at;
|
|
207
|
+
$$;
|
|
208
|
+
|
|
209
|
+
-- Get changes that depend on a given change
|
|
210
|
+
CREATE FUNCTION pgpm_migrate.get_dependents(
|
|
211
|
+
p_package TEXT,
|
|
212
|
+
p_change_name TEXT
|
|
213
|
+
)
|
|
214
|
+
RETURNS TABLE(package TEXT, change_name TEXT, dependency TEXT)
|
|
215
|
+
LANGUAGE sql STABLE AS $$
|
|
216
|
+
SELECT c.package, c.change_name, d.requires as dependency
|
|
217
|
+
FROM pgpm_migrate.dependencies d
|
|
218
|
+
JOIN pgpm_migrate.changes c ON c.change_id = d.change_id
|
|
219
|
+
WHERE (
|
|
220
|
+
-- Local dependency within same package
|
|
221
|
+
(d.requires = p_change_name AND c.package = p_package)
|
|
222
|
+
OR
|
|
223
|
+
-- Cross-package dependency
|
|
224
|
+
(d.requires = p_package || ':' || p_change_name)
|
|
225
|
+
)
|
|
226
|
+
ORDER BY c.package, c.change_name;
|
|
227
|
+
$$;
|
|
228
|
+
|
|
229
|
+
-- Get deployment status
|
|
230
|
+
CREATE FUNCTION pgpm_migrate.status(
|
|
231
|
+
p_package TEXT DEFAULT NULL
|
|
232
|
+
)
|
|
233
|
+
RETURNS TABLE(
|
|
234
|
+
package TEXT,
|
|
235
|
+
total_deployed INTEGER,
|
|
236
|
+
last_change TEXT,
|
|
237
|
+
last_deployed TIMESTAMPTZ
|
|
238
|
+
)
|
|
239
|
+
LANGUAGE sql STABLE AS $$
|
|
240
|
+
WITH latest AS (
|
|
241
|
+
SELECT DISTINCT ON (package)
|
|
242
|
+
package,
|
|
243
|
+
change_name,
|
|
244
|
+
deployed_at
|
|
245
|
+
FROM pgpm_migrate.changes
|
|
246
|
+
WHERE p_package IS NULL OR package = p_package
|
|
247
|
+
ORDER BY package, deployed_at DESC
|
|
248
|
+
)
|
|
249
|
+
SELECT
|
|
250
|
+
c.package,
|
|
251
|
+
COUNT(*)::INTEGER AS total_deployed,
|
|
252
|
+
l.change_name AS last_change,
|
|
253
|
+
l.deployed_at AS last_deployed
|
|
254
|
+
FROM pgpm_migrate.changes c
|
|
255
|
+
JOIN latest l ON l.package = c.package
|
|
256
|
+
WHERE p_package IS NULL OR c.package = p_package
|
|
257
|
+
GROUP BY c.package, l.change_name, l.deployed_at;
|
|
258
|
+
$$;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
-- Create schema
|
|
2
|
+
CREATE SCHEMA pgpm_migrate;
|
|
3
|
+
|
|
4
|
+
-- 1. Packages (minimal - just name and timestamp)
|
|
5
|
+
CREATE TABLE pgpm_migrate.packages (
|
|
6
|
+
package TEXT PRIMARY KEY,
|
|
7
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT clock_timestamp()
|
|
8
|
+
);
|
|
9
|
+
|
|
10
|
+
-- 2. Deployed changes (what's currently deployed)
|
|
11
|
+
CREATE TABLE pgpm_migrate.changes (
|
|
12
|
+
change_id TEXT PRIMARY KEY,
|
|
13
|
+
change_name TEXT NOT NULL,
|
|
14
|
+
package TEXT NOT NULL REFERENCES pgpm_migrate.packages(package),
|
|
15
|
+
script_hash TEXT NOT NULL,
|
|
16
|
+
deployed_at TIMESTAMPTZ NOT NULL DEFAULT clock_timestamp(),
|
|
17
|
+
UNIQUE(package, change_name),
|
|
18
|
+
UNIQUE(package, script_hash)
|
|
19
|
+
);
|
|
20
|
+
|
|
21
|
+
-- 3. Dependencies (what depends on what)
|
|
22
|
+
CREATE TABLE pgpm_migrate.dependencies (
|
|
23
|
+
change_id TEXT NOT NULL REFERENCES pgpm_migrate.changes(change_id) ON DELETE CASCADE,
|
|
24
|
+
requires TEXT NOT NULL,
|
|
25
|
+
PRIMARY KEY (change_id, requires)
|
|
26
|
+
);
|
|
27
|
+
|
|
28
|
+
-- 4. Event log (minimal history for rollback)
|
|
29
|
+
CREATE TABLE pgpm_migrate.events (
|
|
30
|
+
event_id SERIAL PRIMARY KEY,
|
|
31
|
+
event_type TEXT NOT NULL CHECK (event_type IN ('deploy', 'revert', 'verify')),
|
|
32
|
+
change_name TEXT NOT NULL,
|
|
33
|
+
package TEXT NOT NULL,
|
|
34
|
+
occurred_at TIMESTAMPTZ NOT NULL DEFAULT clock_timestamp(),
|
|
35
|
+
error_message TEXT,
|
|
36
|
+
error_code TEXT
|
|
37
|
+
);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { Logger } from '@pgpmjs/logger';
|
|
2
|
+
import { getPgPool } from 'pg-cache';
|
|
3
|
+
const log = new Logger('migrate:event-logger');
|
|
4
|
+
export class EventLogger {
|
|
5
|
+
pool;
|
|
6
|
+
constructor(config) {
|
|
7
|
+
this.pool = getPgPool(config);
|
|
8
|
+
}
|
|
9
|
+
async logEvent(entry) {
|
|
10
|
+
try {
|
|
11
|
+
await this.pool.query(`
|
|
12
|
+
INSERT INTO pgpm_migrate.events
|
|
13
|
+
(event_type, change_name, package, error_message, error_code)
|
|
14
|
+
VALUES ($1::TEXT, $2::TEXT, $3::TEXT, $4::TEXT, $5::TEXT)
|
|
15
|
+
`, [
|
|
16
|
+
entry.eventType,
|
|
17
|
+
entry.changeName,
|
|
18
|
+
entry.package,
|
|
19
|
+
entry.errorMessage || null,
|
|
20
|
+
entry.errorCode || null
|
|
21
|
+
]);
|
|
22
|
+
log.debug(`Logged ${entry.eventType} event for ${entry.package}:${entry.changeName}`);
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
log.error(`Failed to log event: ${error.message}`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { createHash } from 'crypto';
|
|
2
|
+
import { readFile } from 'fs/promises';
|
|
3
|
+
import { parse } from 'pgsql-parser';
|
|
4
|
+
import { cleanTree } from '../../packaging/package';
|
|
5
|
+
/**
|
|
6
|
+
* Generate SHA256 hash of a file's contents
|
|
7
|
+
*/
|
|
8
|
+
export async function hashFile(filePath) {
|
|
9
|
+
const content = await readFile(filePath, 'utf-8');
|
|
10
|
+
return createHash('sha256').update(content).digest('hex');
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Generate SHA256 hash of a string
|
|
14
|
+
*/
|
|
15
|
+
export function hashString(content) {
|
|
16
|
+
return createHash('sha256').update(content).digest('hex');
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Generate SHA256 hash of a SQL file's parsed and cleaned AST
|
|
20
|
+
*/
|
|
21
|
+
export async function hashSqlFile(filePath) {
|
|
22
|
+
const content = await readFile(filePath, 'utf-8');
|
|
23
|
+
const parsed = await parse(content);
|
|
24
|
+
const cleaned = cleanTree(parsed);
|
|
25
|
+
const astString = JSON.stringify(cleaned);
|
|
26
|
+
return hashString(astString);
|
|
27
|
+
}
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import { Logger } from '@pgpmjs/logger';
|
|
2
|
+
const log = new Logger('migrate:transaction');
|
|
3
|
+
/**
|
|
4
|
+
* Execute a function within a transaction context
|
|
5
|
+
* If useTransaction is true, wraps the execution in a transaction
|
|
6
|
+
* If false, uses the pool directly without transaction
|
|
7
|
+
*/
|
|
8
|
+
export async function withTransaction(pool, options, fn) {
|
|
9
|
+
const queryHistory = [];
|
|
10
|
+
const addQuery = (query, params, startTime) => {
|
|
11
|
+
queryHistory.push({
|
|
12
|
+
query,
|
|
13
|
+
params,
|
|
14
|
+
timestamp: Date.now(),
|
|
15
|
+
duration: startTime ? Date.now() - startTime : undefined
|
|
16
|
+
});
|
|
17
|
+
};
|
|
18
|
+
if (!options.useTransaction) {
|
|
19
|
+
// No transaction - use pool directly
|
|
20
|
+
log.debug('Executing without transaction');
|
|
21
|
+
return fn({ client: pool, isTransaction: false, queryHistory, addQuery });
|
|
22
|
+
}
|
|
23
|
+
// Use transaction
|
|
24
|
+
const client = await pool.connect();
|
|
25
|
+
const transactionStartTime = Date.now();
|
|
26
|
+
log.debug('Starting transaction');
|
|
27
|
+
try {
|
|
28
|
+
const beginTime = Date.now();
|
|
29
|
+
await client.query('BEGIN');
|
|
30
|
+
addQuery('BEGIN', [], beginTime);
|
|
31
|
+
const result = await fn({ client, isTransaction: true, queryHistory, addQuery });
|
|
32
|
+
const commitTime = Date.now();
|
|
33
|
+
await client.query('COMMIT');
|
|
34
|
+
addQuery('COMMIT', [], commitTime);
|
|
35
|
+
const transactionDuration = Date.now() - transactionStartTime;
|
|
36
|
+
log.debug(`Transaction committed successfully in ${transactionDuration}ms`);
|
|
37
|
+
return result;
|
|
38
|
+
}
|
|
39
|
+
catch (error) {
|
|
40
|
+
const rollbackTime = Date.now();
|
|
41
|
+
try {
|
|
42
|
+
await client.query('ROLLBACK');
|
|
43
|
+
addQuery('ROLLBACK', [], rollbackTime);
|
|
44
|
+
}
|
|
45
|
+
catch (rollbackError) {
|
|
46
|
+
log.error('Failed to rollback transaction:', rollbackError);
|
|
47
|
+
}
|
|
48
|
+
const transactionDuration = Date.now() - transactionStartTime;
|
|
49
|
+
// Enhanced error logging with context
|
|
50
|
+
const errorLines = [];
|
|
51
|
+
errorLines.push(`Transaction rolled back due to error after ${transactionDuration}ms:`);
|
|
52
|
+
errorLines.push(`Error Code: ${error.code || 'N/A'}`);
|
|
53
|
+
errorLines.push(`Error Message: ${error.message || 'N/A'}`);
|
|
54
|
+
// Log query history for debugging
|
|
55
|
+
if (queryHistory.length > 0) {
|
|
56
|
+
errorLines.push('Query history for this transaction:');
|
|
57
|
+
queryHistory.forEach((entry, index) => {
|
|
58
|
+
const duration = entry.duration ? ` (${entry.duration}ms)` : '';
|
|
59
|
+
const params = entry.params && entry.params.length > 0
|
|
60
|
+
? ` with params: ${JSON.stringify(entry.params.slice(0, 2))}${entry.params.length > 2 ? '...' : ''}`
|
|
61
|
+
: '';
|
|
62
|
+
errorLines.push(` ${index + 1}. ${entry.query.split('\n')[0].trim()}${params}${duration}`);
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
// For transaction aborted errors, provide additional context
|
|
66
|
+
if (error.code === '25P02') {
|
|
67
|
+
errorLines.push('🔍 Debug Info: Transaction was aborted due to a previous error.');
|
|
68
|
+
errorLines.push(' This usually means a previous command in the transaction failed.');
|
|
69
|
+
errorLines.push(' Check the query history above to identify the failing command.');
|
|
70
|
+
}
|
|
71
|
+
// Log the consolidated error message
|
|
72
|
+
log.error(errorLines.join('\n'));
|
|
73
|
+
throw error;
|
|
74
|
+
}
|
|
75
|
+
finally {
|
|
76
|
+
client.release();
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Helper to execute a query within a transaction context with enhanced logging
|
|
81
|
+
*/
|
|
82
|
+
export async function executeQuery(context, query, params) {
|
|
83
|
+
const startTime = Date.now();
|
|
84
|
+
try {
|
|
85
|
+
const result = await context.client.query(query, params);
|
|
86
|
+
const duration = Date.now() - startTime;
|
|
87
|
+
// Add to query history
|
|
88
|
+
context.addQuery(query, params, startTime);
|
|
89
|
+
// Log slow queries for debugging
|
|
90
|
+
if (duration > 1000) {
|
|
91
|
+
log.warn(`Slow query detected (${duration}ms): ${query.split('\n')[0].trim()}`);
|
|
92
|
+
}
|
|
93
|
+
return result;
|
|
94
|
+
}
|
|
95
|
+
catch (error) {
|
|
96
|
+
const duration = Date.now() - startTime;
|
|
97
|
+
// Add failed query to history
|
|
98
|
+
context.addQuery(query, params, startTime);
|
|
99
|
+
// Enhanced error logging
|
|
100
|
+
const errorLines = [];
|
|
101
|
+
errorLines.push(`Query failed after ${duration}ms:`);
|
|
102
|
+
errorLines.push(` Query: ${query.split('\n')[0].trim()}`);
|
|
103
|
+
if (params && params.length > 0) {
|
|
104
|
+
errorLines.push(` Params: ${JSON.stringify(params.slice(0, 3))}${params.length > 3 ? '...' : ''}`);
|
|
105
|
+
}
|
|
106
|
+
errorLines.push(` Error Code: ${error.code || 'N/A'}`);
|
|
107
|
+
errorLines.push(` Error Message: ${error.message || 'N/A'}`);
|
|
108
|
+
// Provide debugging hints for common errors
|
|
109
|
+
if (error.code === '42P01') {
|
|
110
|
+
errorLines.push('💡 Hint: Relation (table/view) does not exist. Check if migrations are applied in correct order.');
|
|
111
|
+
}
|
|
112
|
+
else if (error.code === '42883') {
|
|
113
|
+
errorLines.push('💡 Hint: Function does not exist. Check if required extensions or functions are installed.');
|
|
114
|
+
}
|
|
115
|
+
else if (error.code === '23505') {
|
|
116
|
+
errorLines.push('💡 Hint: Unique constraint violation. Check for duplicate data.');
|
|
117
|
+
}
|
|
118
|
+
else if (error.code === '23503') {
|
|
119
|
+
errorLines.push('💡 Hint: Foreign key constraint violation. Check referential integrity.');
|
|
120
|
+
}
|
|
121
|
+
// Log the consolidated error message
|
|
122
|
+
log.error(errorLines.join('\n'));
|
|
123
|
+
throw error;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { getLatestChange } from '../files';
|
|
2
|
+
import { errors } from '@pgpmjs/types';
|
|
3
|
+
/**
|
|
4
|
+
* Get the latest change from the pgpm.plan file for a specific module.
|
|
5
|
+
*/
|
|
6
|
+
export const latestChange = (sqlmodule, modules, basePath) => {
|
|
7
|
+
const module = modules[sqlmodule];
|
|
8
|
+
if (!module) {
|
|
9
|
+
throw errors.MODULE_NOT_FOUND({ name: sqlmodule });
|
|
10
|
+
}
|
|
11
|
+
const planPath = `${basePath}/${module.path}/pgpm.plan`;
|
|
12
|
+
return getLatestChange(planPath);
|
|
13
|
+
};
|
|
14
|
+
/**
|
|
15
|
+
* Get the latest change and version for a specific module.
|
|
16
|
+
*/
|
|
17
|
+
export const latestChangeAndVersion = (sqlmodule, modules, basePath) => {
|
|
18
|
+
const module = modules[sqlmodule];
|
|
19
|
+
if (!module) {
|
|
20
|
+
throw errors.MODULE_NOT_FOUND({ name: sqlmodule });
|
|
21
|
+
}
|
|
22
|
+
const planPath = `${basePath}/${module.path}/pgpm.plan`;
|
|
23
|
+
const change = getLatestChange(planPath);
|
|
24
|
+
const pkg = require(`${basePath}/${module.path}/package.json`);
|
|
25
|
+
return { change, version: pkg.version };
|
|
26
|
+
};
|
|
27
|
+
/**
|
|
28
|
+
* Get extensions and modules required by a specific module.
|
|
29
|
+
*/
|
|
30
|
+
export const getExtensionsAndModules = (sqlmodule, modules) => {
|
|
31
|
+
const module = modules[sqlmodule];
|
|
32
|
+
if (!module) {
|
|
33
|
+
throw errors.MODULE_NOT_FOUND({ name: sqlmodule });
|
|
34
|
+
}
|
|
35
|
+
const native = module.requires.filter((req) => !Object.keys(modules).includes(req));
|
|
36
|
+
const sqitch = module.requires.filter((req) => Object.keys(modules).includes(req));
|
|
37
|
+
return { native, sqitch };
|
|
38
|
+
};
|
|
39
|
+
/**
|
|
40
|
+
* Get extensions and modules with their latest changes and versions.
|
|
41
|
+
*/
|
|
42
|
+
export const getExtensionsAndModulesChanges = (sqlmodule, modules, basePath) => {
|
|
43
|
+
const { native, sqitch } = getExtensionsAndModules(sqlmodule, modules);
|
|
44
|
+
const sqitchWithDetails = sqitch.map((mod) => {
|
|
45
|
+
const { change, version } = latestChangeAndVersion(mod, modules, basePath);
|
|
46
|
+
return { name: mod, latest: change, version };
|
|
47
|
+
});
|
|
48
|
+
return { native, sqitch: sqitchWithDetails };
|
|
49
|
+
};
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { Logger } from '@pgpmjs/logger';
|
|
2
|
+
import { mkdirSync, readFileSync, rmSync, writeFileSync } from 'fs';
|
|
3
|
+
import { relative } from 'path';
|
|
4
|
+
import { deparse } from 'pgsql-deparser';
|
|
5
|
+
import { parse } from 'pgsql-parser';
|
|
6
|
+
import { getExtensionName } from '../files';
|
|
7
|
+
import { resolve, resolveWithPlan } from '../resolution/resolve';
|
|
8
|
+
import { transformProps } from './transform';
|
|
9
|
+
const log = new Logger('package');
|
|
10
|
+
const noop = () => undefined;
|
|
11
|
+
export const cleanTree = (tree) => {
|
|
12
|
+
return transformProps(tree, {
|
|
13
|
+
stmt_len: noop,
|
|
14
|
+
stmt_location: noop,
|
|
15
|
+
location: noop,
|
|
16
|
+
});
|
|
17
|
+
};
|
|
18
|
+
const filterStatements = (stmts, extension) => {
|
|
19
|
+
if (!extension)
|
|
20
|
+
return stmts;
|
|
21
|
+
return stmts.filter(node => {
|
|
22
|
+
const stmt = node.stmt;
|
|
23
|
+
return !stmt.hasOwnProperty('TransactionStmt') &&
|
|
24
|
+
!stmt.hasOwnProperty('CreateExtensionStmt');
|
|
25
|
+
});
|
|
26
|
+
};
|
|
27
|
+
export const packageModule = async (packageDir, { usePlan = true, extension = true, pretty = true, functionDelimiter = '$EOFCODE$' } = {}) => {
|
|
28
|
+
const resolveFn = usePlan ? resolveWithPlan : resolve;
|
|
29
|
+
const sql = resolveFn(packageDir);
|
|
30
|
+
if (!sql?.trim()) {
|
|
31
|
+
log.warn(`⚠️ No SQL generated for module at ${packageDir}. Skipping.`);
|
|
32
|
+
return { sql: '' };
|
|
33
|
+
}
|
|
34
|
+
const extname = getExtensionName(packageDir);
|
|
35
|
+
try {
|
|
36
|
+
const parsed = await parse(sql);
|
|
37
|
+
parsed.stmts = filterStatements(parsed.stmts, extension);
|
|
38
|
+
const topLine = extension
|
|
39
|
+
? `\\echo Use "CREATE EXTENSION ${extname}" to load this file. \\quit\n`
|
|
40
|
+
: '';
|
|
41
|
+
const finalSql = await deparse(parsed, {
|
|
42
|
+
pretty,
|
|
43
|
+
functionDelimiter
|
|
44
|
+
});
|
|
45
|
+
const tree1 = parsed.stmts;
|
|
46
|
+
const tree2 = await parse(finalSql);
|
|
47
|
+
const results = {
|
|
48
|
+
sql: `${topLine}${finalSql}`,
|
|
49
|
+
};
|
|
50
|
+
const diff = JSON.stringify(cleanTree(tree1)) !== JSON.stringify(cleanTree(tree2));
|
|
51
|
+
if (diff) {
|
|
52
|
+
results.diff = true;
|
|
53
|
+
results.tree1 = JSON.stringify(cleanTree(tree1), null, 2);
|
|
54
|
+
results.tree2 = JSON.stringify(cleanTree(tree2), null, 2);
|
|
55
|
+
}
|
|
56
|
+
return results;
|
|
57
|
+
}
|
|
58
|
+
catch (e) {
|
|
59
|
+
log.error(`❌ Failed to parse SQL for ${packageDir}`);
|
|
60
|
+
console.error(e);
|
|
61
|
+
throw e;
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
export const writePackage = async ({ version, extension = true, usePlan = true, packageDir, }) => {
|
|
65
|
+
const pkgPath = `${packageDir}/package.json`;
|
|
66
|
+
const pkg = require(pkgPath);
|
|
67
|
+
const extname = await getExtensionName(packageDir);
|
|
68
|
+
const makePath = `${packageDir}/Makefile`;
|
|
69
|
+
const controlPath = `${packageDir}/${extname}.control`;
|
|
70
|
+
const sqlFileName = `${extname}--${version}.sql`;
|
|
71
|
+
const Makefile = readFileSync(makePath, 'utf-8');
|
|
72
|
+
const control = readFileSync(controlPath, 'utf-8');
|
|
73
|
+
const { sql, diff, tree1, tree2 } = await packageModule(packageDir, {
|
|
74
|
+
extension,
|
|
75
|
+
usePlan,
|
|
76
|
+
});
|
|
77
|
+
const outPath = extension ? `${packageDir}/sql` : `${packageDir}/out`;
|
|
78
|
+
rmSync(outPath, { recursive: true, force: true });
|
|
79
|
+
mkdirSync(outPath, { recursive: true });
|
|
80
|
+
if (extension) {
|
|
81
|
+
writeFileSync(controlPath, control.replace(/default_version = '[0-9\.]+'/, `default_version = '${version}'`));
|
|
82
|
+
pkg.version = version;
|
|
83
|
+
writeFileSync(pkgPath, JSON.stringify(pkg, null, 2));
|
|
84
|
+
const regex = new RegExp(`${extname}--[0-9.]+.sql`);
|
|
85
|
+
writeFileSync(makePath, Makefile.replace(regex, sqlFileName));
|
|
86
|
+
}
|
|
87
|
+
if (diff) {
|
|
88
|
+
log.warn(`⚠️ SQL diff exists! Review the ${relative(packageDir, outPath)}/ folder.`);
|
|
89
|
+
// Uncomment if needed:
|
|
90
|
+
// writeFileSync(`${outPath}/orig.${sqlFileName}.tree.json`, tree1);
|
|
91
|
+
// writeFileSync(`${outPath}/parsed.${sqlFileName}.tree.json`, tree2);
|
|
92
|
+
}
|
|
93
|
+
const writePath = `${outPath}/${sqlFileName}`;
|
|
94
|
+
writeFileSync(writePath, sql);
|
|
95
|
+
log.success(`${relative(packageDir, writePath)} written`);
|
|
96
|
+
};
|