@hexaijs/postgres 0.4.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/helpers-vPAudN_S.d.ts +125 -0
- package/dist/index.d.ts +64 -8
- package/dist/index.js +828 -29
- package/dist/index.js.map +1 -1
- package/dist/test.d.ts +9 -8
- package/dist/test.js +683 -246
- package/dist/test.js.map +1 -1
- package/package.json +7 -7
- package/dist/config/index.d.ts +0 -3
- package/dist/config/index.d.ts.map +0 -1
- package/dist/config/index.js +0 -19
- package/dist/config/index.js.map +0 -1
- package/dist/config/postgres-config-spec.d.ts +0 -32
- package/dist/config/postgres-config-spec.d.ts.map +0 -1
- package/dist/config/postgres-config-spec.js +0 -49
- package/dist/config/postgres-config-spec.js.map +0 -1
- package/dist/config/postgres-config.d.ts +0 -59
- package/dist/config/postgres-config.d.ts.map +0 -1
- package/dist/config/postgres-config.js +0 -181
- package/dist/config/postgres-config.js.map +0 -1
- package/dist/helpers.d.ts +0 -57
- package/dist/helpers.d.ts.map +0 -1
- package/dist/helpers.js +0 -276
- package/dist/helpers.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/postgres-event-store.d.ts +0 -18
- package/dist/postgres-event-store.d.ts.map +0 -1
- package/dist/postgres-event-store.js +0 -83
- package/dist/postgres-event-store.js.map +0 -1
- package/dist/postgres-unit-of-work.d.ts +0 -24
- package/dist/postgres-unit-of-work.d.ts.map +0 -1
- package/dist/postgres-unit-of-work.js +0 -308
- package/dist/postgres-unit-of-work.js.map +0 -1
- package/dist/run-hexai-migrations.d.ts +0 -3
- package/dist/run-hexai-migrations.d.ts.map +0 -1
- package/dist/run-hexai-migrations.js +0 -17
- package/dist/run-hexai-migrations.js.map +0 -1
- package/dist/run-migrations.d.ts +0 -11
- package/dist/run-migrations.d.ts.map +0 -1
- package/dist/run-migrations.js +0 -202
- package/dist/run-migrations.js.map +0 -1
- package/dist/test-fixtures/config.d.ts +0 -5
- package/dist/test-fixtures/config.d.ts.map +0 -1
- package/dist/test-fixtures/config.js +0 -14
- package/dist/test-fixtures/config.js.map +0 -1
- package/dist/test-fixtures/hooks.d.ts +0 -8
- package/dist/test-fixtures/hooks.d.ts.map +0 -1
- package/dist/test-fixtures/hooks.js +0 -77
- package/dist/test-fixtures/hooks.js.map +0 -1
- package/dist/test-fixtures/index.d.ts +0 -3
- package/dist/test-fixtures/index.d.ts.map +0 -1
- package/dist/test-fixtures/index.js +0 -19
- package/dist/test-fixtures/index.js.map +0 -1
- package/dist/test.d.ts.map +0 -1
- package/dist/types.d.ts +0 -14
- package/dist/types.d.ts.map +0 -1
- package/dist/types.js +0 -11
- package/dist/types.js.map +0 -1
package/dist/index.js
CHANGED
|
@@ -1,30 +1,829 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
var
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
1
|
+
import { AsyncLocalStorage } from 'async_hooks';
|
|
2
|
+
import * as pg2 from 'pg';
|
|
3
|
+
import { Propagation, Message } from '@hexaijs/core';
|
|
4
|
+
import * as fs2 from 'fs/promises';
|
|
5
|
+
import * as path2 from 'path';
|
|
6
|
+
import path2__default from 'path';
|
|
7
|
+
import runner from 'node-pg-migrate';
|
|
8
|
+
import { fileURLToPath } from 'url';
|
|
9
|
+
|
|
10
|
+
// src/postgres-unit-of-work.ts
|
|
11
|
+
|
|
12
|
+
// src/config/postgres-config.ts
|
|
13
|
+
var PostgresConfig = class _PostgresConfig {
|
|
14
|
+
host;
|
|
15
|
+
database;
|
|
16
|
+
user;
|
|
17
|
+
port;
|
|
18
|
+
password;
|
|
19
|
+
pool;
|
|
20
|
+
constructor(config) {
|
|
21
|
+
this.database = config.database;
|
|
22
|
+
this.password = config.password;
|
|
23
|
+
this.host = config.host ?? "localhost";
|
|
24
|
+
this.user = config.user ?? "postgres";
|
|
25
|
+
this.port = config.port ?? 5432;
|
|
26
|
+
this.pool = config.pool;
|
|
27
|
+
}
|
|
28
|
+
static fromUrl(value) {
|
|
29
|
+
return new _PostgresConfig(_PostgresConfig.parseUrl(value));
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Creates a PostgresConfig from environment variables.
|
|
33
|
+
*
|
|
34
|
+
* @param prefix - Environment variable prefix
|
|
35
|
+
* @param options - Loading options (mode: "url" | "fields")
|
|
36
|
+
* @throws Error if required environment variables are not set
|
|
37
|
+
*
|
|
38
|
+
* @example
|
|
39
|
+
* ```typescript
|
|
40
|
+
* // URL mode (default): reads ASSIGNMENT_DB_URL
|
|
41
|
+
* const config = PostgresConfig.fromEnv("ASSIGNMENT_DB");
|
|
42
|
+
*
|
|
43
|
+
* // Fields mode: reads POSTGRES_HOST, POSTGRES_PORT, POSTGRES_DATABASE, POSTGRES_USER, POSTGRES_PASSWORD
|
|
44
|
+
* const config = PostgresConfig.fromEnv("POSTGRES", { mode: "fields" });
|
|
45
|
+
* ```
|
|
46
|
+
*/
|
|
47
|
+
static fromEnv(prefix, options) {
|
|
48
|
+
const mode = options?.mode ?? "url";
|
|
49
|
+
if (mode === "url") {
|
|
50
|
+
const envKey = `${prefix}_URL`;
|
|
51
|
+
const url = process.env[envKey];
|
|
52
|
+
if (!url) {
|
|
53
|
+
throw new Error(`Environment variable ${envKey} is not set`);
|
|
54
|
+
}
|
|
55
|
+
return _PostgresConfig.fromUrl(url);
|
|
56
|
+
}
|
|
57
|
+
const database = process.env[`${prefix}_DATABASE`];
|
|
58
|
+
if (!database) {
|
|
59
|
+
throw new Error(
|
|
60
|
+
`Environment variable ${prefix}_DATABASE is not set`
|
|
61
|
+
);
|
|
62
|
+
}
|
|
63
|
+
return new _PostgresConfig({
|
|
64
|
+
database,
|
|
65
|
+
host: process.env[`${prefix}_HOST`],
|
|
66
|
+
port: process.env[`${prefix}_PORT`] ? parseInt(process.env[`${prefix}_PORT`]) : void 0,
|
|
67
|
+
user: process.env[`${prefix}_USER`],
|
|
68
|
+
password: process.env[`${prefix}_PASSWORD`]
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
static parseUrl(value) {
|
|
72
|
+
const regex = /postgres(ql)?:\/\/(?<user>[^:/]+)(:(?<password>[^@]+))?@(?<host>[^:/]+)(:(?<port>\d+))?\/(?<database>.+)/;
|
|
73
|
+
const matches = value.match(regex);
|
|
74
|
+
if (!matches?.groups) {
|
|
75
|
+
throw new Error(`Invalid postgres url: ${value}`);
|
|
76
|
+
}
|
|
77
|
+
const { user, password, host, port, database } = matches.groups;
|
|
78
|
+
return {
|
|
79
|
+
host,
|
|
80
|
+
database,
|
|
81
|
+
user,
|
|
82
|
+
port: port ? parseInt(port) : 5432,
|
|
83
|
+
password
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
withDatabase(database) {
|
|
87
|
+
return new _PostgresConfig({
|
|
88
|
+
host: this.host,
|
|
89
|
+
database,
|
|
90
|
+
user: this.user,
|
|
91
|
+
port: this.port,
|
|
92
|
+
password: this.password,
|
|
93
|
+
pool: this.pool
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
withUser(user) {
|
|
97
|
+
return new _PostgresConfig({
|
|
98
|
+
host: this.host,
|
|
99
|
+
database: this.database,
|
|
100
|
+
user,
|
|
101
|
+
port: this.port,
|
|
102
|
+
password: this.password,
|
|
103
|
+
pool: this.pool
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
withPassword(password) {
|
|
107
|
+
return new _PostgresConfig({
|
|
108
|
+
host: this.host,
|
|
109
|
+
database: this.database,
|
|
110
|
+
user: this.user,
|
|
111
|
+
port: this.port,
|
|
112
|
+
password,
|
|
113
|
+
pool: this.pool
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
withHost(host) {
|
|
117
|
+
return new _PostgresConfig({
|
|
118
|
+
host,
|
|
119
|
+
database: this.database,
|
|
120
|
+
user: this.user,
|
|
121
|
+
port: this.port,
|
|
122
|
+
password: this.password,
|
|
123
|
+
pool: this.pool
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
withPort(port) {
|
|
127
|
+
return new _PostgresConfig({
|
|
128
|
+
host: this.host,
|
|
129
|
+
database: this.database,
|
|
130
|
+
user: this.user,
|
|
131
|
+
port,
|
|
132
|
+
password: this.password,
|
|
133
|
+
pool: this.pool
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
withPoolSize(size) {
|
|
137
|
+
return new _PostgresConfig({
|
|
138
|
+
host: this.host,
|
|
139
|
+
database: this.database,
|
|
140
|
+
user: this.user,
|
|
141
|
+
port: this.port,
|
|
142
|
+
password: this.password,
|
|
143
|
+
pool: { ...this.pool, size }
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
withConnectionTimeout(connectionTimeout) {
|
|
147
|
+
return new _PostgresConfig({
|
|
148
|
+
host: this.host,
|
|
149
|
+
database: this.database,
|
|
150
|
+
user: this.user,
|
|
151
|
+
port: this.port,
|
|
152
|
+
password: this.password,
|
|
153
|
+
pool: { ...this.pool, connectionTimeout }
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
withIdleTimeout(idleTimeout) {
|
|
157
|
+
return new _PostgresConfig({
|
|
158
|
+
host: this.host,
|
|
159
|
+
database: this.database,
|
|
160
|
+
user: this.user,
|
|
161
|
+
port: this.port,
|
|
162
|
+
password: this.password,
|
|
163
|
+
pool: { ...this.pool, idleTimeout }
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
toString() {
|
|
167
|
+
let url = `postgres://${this.user}`;
|
|
168
|
+
if (this.password) {
|
|
169
|
+
url += `:${this.password}`;
|
|
170
|
+
}
|
|
171
|
+
url += `@${this.host}:${this.port}/${this.database}`;
|
|
172
|
+
const queryParams = [];
|
|
173
|
+
if (this.pool?.size !== void 0) {
|
|
174
|
+
queryParams.push(`pool_size=${this.pool.size}`);
|
|
175
|
+
}
|
|
176
|
+
if (this.pool?.connectionTimeout !== void 0) {
|
|
177
|
+
queryParams.push(
|
|
178
|
+
`connection_timeout=${this.pool.connectionTimeout}`
|
|
179
|
+
);
|
|
180
|
+
}
|
|
181
|
+
if (this.pool?.idleTimeout !== void 0) {
|
|
182
|
+
queryParams.push(`idle_timeout=${this.pool.idleTimeout}`);
|
|
183
|
+
}
|
|
184
|
+
if (queryParams.length > 0) {
|
|
185
|
+
url += `?${queryParams.join("&")}`;
|
|
186
|
+
}
|
|
187
|
+
return url;
|
|
188
|
+
}
|
|
189
|
+
};
|
|
190
|
+
|
|
191
|
+
// src/config/postgres-config-spec.ts
|
|
192
|
+
var PostgresConfigSpec = class {
|
|
193
|
+
constructor(prefix, mode = "url") {
|
|
194
|
+
this.prefix = prefix;
|
|
195
|
+
this.mode = mode;
|
|
196
|
+
}
|
|
197
|
+
_type = "postgres";
|
|
198
|
+
resolve(errors) {
|
|
199
|
+
try {
|
|
200
|
+
return PostgresConfig.fromEnv(this.prefix, { mode: this.mode });
|
|
201
|
+
} catch (e) {
|
|
202
|
+
errors.push(e.message);
|
|
203
|
+
return void 0;
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
};
|
|
207
|
+
function postgresConfig(prefix, mode = "url") {
|
|
208
|
+
return new PostgresConfigSpec(prefix, mode);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// src/types.ts
|
|
212
|
+
var IsolationLevel = /* @__PURE__ */ ((IsolationLevel2) => {
|
|
213
|
+
IsolationLevel2["READ_UNCOMMITTED"] = "read uncommitted";
|
|
214
|
+
IsolationLevel2["READ_COMMITTED"] = "read committed";
|
|
215
|
+
IsolationLevel2["REPEATABLE_READ"] = "repeatable read";
|
|
216
|
+
IsolationLevel2["SERIALIZABLE"] = "serializable";
|
|
217
|
+
return IsolationLevel2;
|
|
218
|
+
})(IsolationLevel || {});
|
|
219
|
+
var ClientWrapper = class {
|
|
220
|
+
client;
|
|
221
|
+
getClient() {
|
|
222
|
+
return this.client;
|
|
223
|
+
}
|
|
224
|
+
constructor(urlOrClient) {
|
|
225
|
+
if (urlOrClient instanceof PostgresConfig || typeof urlOrClient === "string") {
|
|
226
|
+
this.client = new pg2.Client({
|
|
227
|
+
connectionString: urlOrClient.toString()
|
|
228
|
+
});
|
|
229
|
+
} else {
|
|
230
|
+
this.client = urlOrClient;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
async withClient(work) {
|
|
234
|
+
await ensureConnection(this.client);
|
|
235
|
+
return work(this.client);
|
|
236
|
+
}
|
|
237
|
+
async query(query, params) {
|
|
238
|
+
const result = await this.withClient(
|
|
239
|
+
(client) => client.query(query, params)
|
|
240
|
+
);
|
|
241
|
+
return result.rows;
|
|
242
|
+
}
|
|
243
|
+
async close() {
|
|
244
|
+
await this.client.end();
|
|
245
|
+
}
|
|
246
|
+
};
|
|
247
|
+
var DatabaseManager = class extends ClientWrapper {
|
|
248
|
+
async createDatabase(name) {
|
|
249
|
+
const exists = await this.query(
|
|
250
|
+
`SELECT 1 FROM pg_database WHERE datname = '${name}'`
|
|
251
|
+
);
|
|
252
|
+
if (exists.length === 0) {
|
|
253
|
+
await this.client.query(`CREATE DATABASE ${name}`);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
async dropDatabase(name) {
|
|
257
|
+
await this.query(`DROP DATABASE IF EXISTS ${name}`);
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
var TableManager = class extends ClientWrapper {
|
|
261
|
+
async getTableSchema(tableName) {
|
|
262
|
+
const result = await this.query(`
|
|
263
|
+
SELECT
|
|
264
|
+
column_name AS column,
|
|
265
|
+
data_type AS type
|
|
266
|
+
FROM information_schema.columns
|
|
267
|
+
WHERE table_name = '${tableName}';
|
|
268
|
+
`);
|
|
269
|
+
return result.map((row) => ({
|
|
270
|
+
column: row.column,
|
|
271
|
+
type: row.type
|
|
272
|
+
}));
|
|
273
|
+
}
|
|
274
|
+
async tableExists(tableName) {
|
|
275
|
+
const result = await this.query(`
|
|
276
|
+
SELECT
|
|
277
|
+
table_name
|
|
278
|
+
FROM information_schema.tables
|
|
279
|
+
WHERE table_name = '${tableName}';
|
|
280
|
+
`);
|
|
281
|
+
return result.length > 0;
|
|
282
|
+
}
|
|
283
|
+
async createTable(name, columns) {
|
|
284
|
+
if (await this.tableExists(name)) {
|
|
285
|
+
return;
|
|
286
|
+
}
|
|
287
|
+
const query = `
|
|
288
|
+
CREATE TABLE ${name} (
|
|
289
|
+
${columns.map((column) => `${column.name} ${column.property}`).join(", ")}
|
|
290
|
+
);
|
|
291
|
+
`;
|
|
292
|
+
await this.query(query);
|
|
293
|
+
}
|
|
294
|
+
async dropTable(name) {
|
|
295
|
+
await this.query(`DROP TABLE IF EXISTS "${name}";`);
|
|
296
|
+
}
|
|
297
|
+
async truncateTable(name) {
|
|
298
|
+
await this.query(`TRUNCATE TABLE "${name}" RESTART IDENTITY CASCADE;`);
|
|
299
|
+
}
|
|
300
|
+
async truncateAllTables() {
|
|
301
|
+
const tables = await this.getTableNames();
|
|
302
|
+
await Promise.all(tables.map((table) => this.truncateTable(table)));
|
|
303
|
+
}
|
|
304
|
+
async dropAllTables() {
|
|
305
|
+
const tables = await this.getTableNames();
|
|
306
|
+
await Promise.all(tables.map((table) => this.dropTable(table)));
|
|
307
|
+
}
|
|
308
|
+
async getTableNames() {
|
|
309
|
+
const result = await this.query(`
|
|
310
|
+
SELECT table_name
|
|
311
|
+
FROM information_schema.tables
|
|
312
|
+
WHERE table_schema = 'public'
|
|
313
|
+
AND table_type = 'BASE TABLE';
|
|
314
|
+
`);
|
|
315
|
+
return result.map((row) => row.table_name);
|
|
316
|
+
}
|
|
317
|
+
};
|
|
318
|
+
async function ensureConnection(client) {
|
|
319
|
+
try {
|
|
320
|
+
await client.connect();
|
|
321
|
+
} catch (e) {
|
|
322
|
+
if (e.message.includes("already")) ; else {
|
|
323
|
+
throw e;
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
// src/postgres-unit-of-work.ts
|
|
329
|
+
var DefaultPostgresUnitOfWork = class {
|
|
330
|
+
constructor(clientFactory, clientCleanUp) {
|
|
331
|
+
this.clientFactory = clientFactory;
|
|
332
|
+
this.clientCleanUp = clientCleanUp;
|
|
333
|
+
}
|
|
334
|
+
transactionStorage = new AsyncLocalStorage();
|
|
335
|
+
getClient() {
|
|
336
|
+
const current = this.getCurrentTransaction();
|
|
337
|
+
if (!current) {
|
|
338
|
+
throw new Error("Unit of work not started");
|
|
339
|
+
}
|
|
340
|
+
return current.getClient();
|
|
341
|
+
}
|
|
342
|
+
async wrap(fn, options = {}) {
|
|
343
|
+
const resolvedOptions = this.resolveOptions(options);
|
|
344
|
+
const transaction = this.resolveTransaction(resolvedOptions);
|
|
345
|
+
return this.executeInContext(
|
|
346
|
+
transaction,
|
|
347
|
+
(tx) => tx.execute(fn, resolvedOptions)
|
|
348
|
+
);
|
|
349
|
+
}
|
|
350
|
+
async withClient(fn) {
|
|
351
|
+
const currentTransaction = this.getCurrentTransaction();
|
|
352
|
+
if (currentTransaction) {
|
|
353
|
+
return fn(currentTransaction.getClient());
|
|
354
|
+
}
|
|
355
|
+
const client = await this.clientFactory();
|
|
356
|
+
try {
|
|
357
|
+
await ensureConnection(client);
|
|
358
|
+
return await fn(client);
|
|
359
|
+
} finally {
|
|
360
|
+
await this.clientCleanUp?.(client);
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
getCurrentTransaction() {
|
|
364
|
+
return this.transactionStorage.getStore() ?? null;
|
|
365
|
+
}
|
|
366
|
+
resolveOptions(options) {
|
|
367
|
+
return {
|
|
368
|
+
propagation: Propagation.EXISTING,
|
|
369
|
+
...options
|
|
370
|
+
};
|
|
371
|
+
}
|
|
372
|
+
resolveTransaction(options) {
|
|
373
|
+
if (options.propagation === Propagation.NEW) {
|
|
374
|
+
return this.createTransaction();
|
|
375
|
+
}
|
|
376
|
+
return this.getCurrentTransaction() ?? this.createTransaction();
|
|
377
|
+
}
|
|
378
|
+
createTransaction() {
|
|
379
|
+
return new PostgresTransaction(this.clientFactory, this.clientCleanUp);
|
|
380
|
+
}
|
|
381
|
+
executeInContext(transaction, callback) {
|
|
382
|
+
return this.transactionStorage.run(
|
|
383
|
+
transaction,
|
|
384
|
+
() => callback(transaction)
|
|
385
|
+
);
|
|
386
|
+
}
|
|
387
|
+
};
|
|
388
|
+
var PostgresTransaction = class {
|
|
389
|
+
constructor(clientFactory, clientCleanUp) {
|
|
390
|
+
this.clientFactory = clientFactory;
|
|
391
|
+
this.clientCleanUp = clientCleanUp;
|
|
392
|
+
}
|
|
393
|
+
initialized = false;
|
|
394
|
+
closed = false;
|
|
395
|
+
abortError;
|
|
396
|
+
nestingDepth = 0;
|
|
397
|
+
options;
|
|
398
|
+
client;
|
|
399
|
+
savepoints = [];
|
|
400
|
+
async execute(fn, options) {
|
|
401
|
+
this.options = options;
|
|
402
|
+
await this.ensureStarted();
|
|
403
|
+
const executor = this.resolveExecutor(options.propagation);
|
|
404
|
+
return executor === this ? this.runWithLifecycle(fn) : executor.execute(fn, options);
|
|
405
|
+
}
|
|
406
|
+
getClient() {
|
|
407
|
+
return this.client;
|
|
408
|
+
}
|
|
409
|
+
async ensureStarted() {
|
|
410
|
+
if (this.initialized) {
|
|
411
|
+
return;
|
|
412
|
+
}
|
|
413
|
+
this.initialized = true;
|
|
414
|
+
await this.initializeClient();
|
|
415
|
+
await this.beginTransaction();
|
|
416
|
+
}
|
|
417
|
+
async initializeClient() {
|
|
418
|
+
const client = await this.clientFactory();
|
|
419
|
+
if (!("query" in client)) {
|
|
420
|
+
throw new Error("Client factory must return a pg.ClientBase");
|
|
421
|
+
}
|
|
422
|
+
await ensureConnection(client);
|
|
423
|
+
this.client = client;
|
|
424
|
+
}
|
|
425
|
+
async beginTransaction() {
|
|
426
|
+
await this.client.query("BEGIN");
|
|
427
|
+
const isolationLevel = this.options.isolationLevel ?? "read committed" /* READ_COMMITTED */;
|
|
428
|
+
if (isolationLevel !== "read committed" /* READ_COMMITTED */) {
|
|
429
|
+
await this.client.query(
|
|
430
|
+
`SET TRANSACTION ISOLATION LEVEL ${isolationLevel}`
|
|
431
|
+
);
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
async runWithLifecycle(fn) {
|
|
435
|
+
try {
|
|
436
|
+
return await this.executeWithNesting(fn);
|
|
437
|
+
} catch (e) {
|
|
438
|
+
console.error(`Transaction aborting, error in transaction:`);
|
|
439
|
+
console.error(e);
|
|
440
|
+
this.markAsAborted(e);
|
|
441
|
+
throw e;
|
|
442
|
+
} finally {
|
|
443
|
+
await this.finalizeIfRoot();
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
async executeWithNesting(fn) {
|
|
447
|
+
this.nestingDepth++;
|
|
448
|
+
try {
|
|
449
|
+
return await fn(this.client);
|
|
450
|
+
} finally {
|
|
451
|
+
this.nestingDepth--;
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
markAsAborted(error) {
|
|
455
|
+
this.abortError = error;
|
|
456
|
+
}
|
|
457
|
+
async finalizeIfRoot() {
|
|
458
|
+
if (this.nestingDepth === 0) {
|
|
459
|
+
await (this.isAborted() ? this.rollback() : this.commit());
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
resolveExecutor(propagation) {
|
|
463
|
+
if (this.nestingDepth === 0) {
|
|
464
|
+
return this;
|
|
465
|
+
}
|
|
466
|
+
return propagation === Propagation.NESTED ? this.createSavepoint() : this.findActiveSavepoint() ?? this;
|
|
467
|
+
}
|
|
468
|
+
createSavepoint() {
|
|
469
|
+
const savepoint = new Savepoint(
|
|
470
|
+
`sp_${this.savepoints.length + 1}`,
|
|
471
|
+
this.client,
|
|
472
|
+
() => this.removeSavepoint()
|
|
473
|
+
);
|
|
474
|
+
this.savepoints.push(savepoint);
|
|
475
|
+
return savepoint;
|
|
476
|
+
}
|
|
477
|
+
findActiveSavepoint() {
|
|
478
|
+
for (let i = this.savepoints.length - 1; i >= 0; i--) {
|
|
479
|
+
if (!this.savepoints[i].isClosed()) {
|
|
480
|
+
return this.savepoints[i];
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
removeSavepoint() {
|
|
485
|
+
this.savepoints.pop();
|
|
486
|
+
}
|
|
487
|
+
async commit() {
|
|
488
|
+
if (this.closed) {
|
|
489
|
+
return;
|
|
490
|
+
}
|
|
491
|
+
this.closed = true;
|
|
492
|
+
await this.client.query("COMMIT");
|
|
493
|
+
await this.clientCleanUp?.(this.client);
|
|
494
|
+
}
|
|
495
|
+
async rollback() {
|
|
496
|
+
if (this.closed) {
|
|
497
|
+
return;
|
|
498
|
+
}
|
|
499
|
+
this.closed = true;
|
|
500
|
+
try {
|
|
501
|
+
await this.client.query("ROLLBACK");
|
|
502
|
+
} catch (e) {
|
|
503
|
+
if (e instanceof Error && e.message.includes("Client was closed and is not queryable")) {
|
|
504
|
+
return;
|
|
505
|
+
}
|
|
506
|
+
throw e;
|
|
507
|
+
}
|
|
508
|
+
await this.clientCleanUp?.(this.client);
|
|
509
|
+
}
|
|
510
|
+
isAborted() {
|
|
511
|
+
return this.abortError !== void 0 && !this.closed;
|
|
512
|
+
}
|
|
513
|
+
};
|
|
514
|
+
var Savepoint = class {
|
|
515
|
+
constructor(name, client, onClose) {
|
|
516
|
+
this.name = name;
|
|
517
|
+
this.client = client;
|
|
518
|
+
this.onClose = onClose;
|
|
519
|
+
}
|
|
520
|
+
initialized = false;
|
|
521
|
+
closed = false;
|
|
522
|
+
abortError;
|
|
523
|
+
nestingDepth = 0;
|
|
524
|
+
async execute(fn) {
|
|
525
|
+
await this.ensureStarted();
|
|
526
|
+
return this.runWithLifecycle(fn);
|
|
527
|
+
}
|
|
528
|
+
isClosed() {
|
|
529
|
+
return this.closed;
|
|
530
|
+
}
|
|
531
|
+
async ensureStarted() {
|
|
532
|
+
if (this.initialized) {
|
|
533
|
+
return;
|
|
534
|
+
}
|
|
535
|
+
this.initialized = true;
|
|
536
|
+
await this.client.query(`SAVEPOINT ${this.name}`);
|
|
537
|
+
}
|
|
538
|
+
async runWithLifecycle(fn) {
|
|
539
|
+
this.nestingDepth++;
|
|
540
|
+
try {
|
|
541
|
+
return await fn(this.client);
|
|
542
|
+
} catch (e) {
|
|
543
|
+
this.markAsAborted(e);
|
|
544
|
+
throw e;
|
|
545
|
+
} finally {
|
|
546
|
+
this.nestingDepth--;
|
|
547
|
+
await this.finalizeIfRoot();
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
markAsAborted(error) {
|
|
551
|
+
this.abortError = error;
|
|
552
|
+
}
|
|
553
|
+
async finalizeIfRoot() {
|
|
554
|
+
if (this.nestingDepth === 0) {
|
|
555
|
+
await (this.isAborted() ? this.rollback() : this.commit());
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
async commit() {
|
|
559
|
+
if (this.closed) {
|
|
560
|
+
return;
|
|
561
|
+
}
|
|
562
|
+
this.closed = true;
|
|
563
|
+
await this.client.query(`RELEASE SAVEPOINT ${this.name}`);
|
|
564
|
+
this.onClose();
|
|
565
|
+
}
|
|
566
|
+
async rollback() {
|
|
567
|
+
if (this.closed) {
|
|
568
|
+
return;
|
|
569
|
+
}
|
|
570
|
+
this.closed = true;
|
|
571
|
+
await this.client.query(`ROLLBACK TO SAVEPOINT ${this.name}`);
|
|
572
|
+
this.onClose();
|
|
573
|
+
}
|
|
574
|
+
isAborted() {
|
|
575
|
+
return this.abortError !== void 0 && !this.closed;
|
|
576
|
+
}
|
|
577
|
+
};
|
|
578
|
+
function createPostgresUnitOfWork(source) {
|
|
579
|
+
if (source instanceof pg2.Pool) {
|
|
580
|
+
return new DefaultPostgresUnitOfWork(
|
|
581
|
+
async () => source.connect(),
|
|
582
|
+
(client) => client.release()
|
|
583
|
+
);
|
|
584
|
+
}
|
|
585
|
+
const connectionString = source instanceof PostgresConfig ? source.toString() : source;
|
|
586
|
+
return new DefaultPostgresUnitOfWork(
|
|
587
|
+
() => new pg2.Client({ connectionString }),
|
|
588
|
+
(client) => client.end()
|
|
589
|
+
);
|
|
590
|
+
}
|
|
591
|
+
function extractNumericPrefix(filename) {
|
|
592
|
+
const match = filename.match(/^(\d+)/);
|
|
593
|
+
return match ? parseInt(match[1], 10) : 0;
|
|
594
|
+
}
|
|
595
|
+
async function ensureTableCompatibility(client, tableName) {
|
|
596
|
+
const tableExists = await client.query(
|
|
597
|
+
`
|
|
598
|
+
SELECT 1 FROM information_schema.tables
|
|
599
|
+
WHERE table_name = $1
|
|
600
|
+
`,
|
|
601
|
+
[tableName]
|
|
602
|
+
);
|
|
603
|
+
if (tableExists.rows.length === 0) return;
|
|
604
|
+
const hasAppliedAt = await client.query(
|
|
605
|
+
`
|
|
606
|
+
SELECT 1 FROM information_schema.columns
|
|
607
|
+
WHERE table_name = $1 AND column_name = 'applied_at'
|
|
608
|
+
`,
|
|
609
|
+
[tableName]
|
|
610
|
+
);
|
|
611
|
+
if (hasAppliedAt.rows.length > 0) {
|
|
612
|
+
await client.query(`
|
|
613
|
+
ALTER TABLE "${tableName}"
|
|
614
|
+
RENAME COLUMN applied_at TO run_on
|
|
615
|
+
`);
|
|
616
|
+
console.log(`Migrated table ${tableName}: applied_at \u2192 run_on`);
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
async function isSqlMigrationFormat(dir) {
|
|
620
|
+
try {
|
|
621
|
+
const entries = await fs2.readdir(dir, { withFileTypes: true });
|
|
622
|
+
for (const entry of entries) {
|
|
623
|
+
if (entry.isDirectory()) {
|
|
624
|
+
const sqlPath = path2.join(dir, entry.name, "migration.sql");
|
|
625
|
+
try {
|
|
626
|
+
await fs2.access(sqlPath);
|
|
627
|
+
return true;
|
|
628
|
+
} catch {
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
}
|
|
632
|
+
return false;
|
|
633
|
+
} catch {
|
|
634
|
+
return false;
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
async function runSqlMigrations(client, dir, migrationsTable, dryRun) {
|
|
638
|
+
await client.query(`
|
|
639
|
+
CREATE TABLE IF NOT EXISTS "${migrationsTable}" (
|
|
640
|
+
id SERIAL PRIMARY KEY,
|
|
641
|
+
name VARCHAR(255) NOT NULL,
|
|
642
|
+
run_on TIMESTAMP NOT NULL DEFAULT NOW()
|
|
643
|
+
)
|
|
644
|
+
`);
|
|
645
|
+
const appliedResult = await client.query(
|
|
646
|
+
`SELECT name FROM "${migrationsTable}" ORDER BY run_on ASC`
|
|
647
|
+
);
|
|
648
|
+
const appliedMigrations = new Set(appliedResult.rows.map((r) => r.name));
|
|
649
|
+
const entries = await fs2.readdir(dir, { withFileTypes: true });
|
|
650
|
+
const migrationDirs = entries.filter((e) => e.isDirectory()).map((e) => e.name).sort((a, b) => extractNumericPrefix(a) - extractNumericPrefix(b));
|
|
651
|
+
const migrationsToApply = [];
|
|
652
|
+
for (const migrationDir of migrationDirs) {
|
|
653
|
+
if (appliedMigrations.has(migrationDir)) {
|
|
654
|
+
continue;
|
|
655
|
+
}
|
|
656
|
+
const sqlPath = path2.join(dir, migrationDir, "migration.sql");
|
|
657
|
+
try {
|
|
658
|
+
const sql = await fs2.readFile(sqlPath, "utf-8");
|
|
659
|
+
migrationsToApply.push({ name: migrationDir, sql });
|
|
660
|
+
} catch {
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
if (migrationsToApply.length === 0) {
|
|
664
|
+
console.log("No migrations to run!");
|
|
665
|
+
return;
|
|
666
|
+
}
|
|
667
|
+
console.log(`> Migrating files:`);
|
|
668
|
+
for (const migration of migrationsToApply) {
|
|
669
|
+
console.log(`> - ${migration.name}`);
|
|
670
|
+
}
|
|
671
|
+
if (dryRun) {
|
|
672
|
+
console.log("Dry run - no migrations applied");
|
|
673
|
+
return;
|
|
674
|
+
}
|
|
675
|
+
for (const migration of migrationsToApply) {
|
|
676
|
+
console.log(`### MIGRATION ${migration.name} (UP) ###`);
|
|
677
|
+
await client.query(migration.sql);
|
|
678
|
+
await client.query(
|
|
679
|
+
`INSERT INTO "${migrationsTable}" (name) VALUES ($1)`,
|
|
680
|
+
[migration.name]
|
|
681
|
+
);
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
async function runMigrations({
|
|
685
|
+
namespace,
|
|
686
|
+
url,
|
|
687
|
+
dir,
|
|
688
|
+
direction = "up",
|
|
689
|
+
count,
|
|
690
|
+
dryRun = false
|
|
691
|
+
}) {
|
|
692
|
+
const migrationsTable = namespace ? `hexai__migrations_${namespace}` : "hexai__migrations";
|
|
693
|
+
const client = new pg2.Client(url);
|
|
694
|
+
try {
|
|
695
|
+
await client.connect();
|
|
696
|
+
await ensureTableCompatibility(client, migrationsTable);
|
|
697
|
+
const isSqlFormat = await isSqlMigrationFormat(dir);
|
|
698
|
+
if (isSqlFormat) {
|
|
699
|
+
await runSqlMigrations(client, dir, migrationsTable, dryRun);
|
|
700
|
+
} else {
|
|
701
|
+
await client.end();
|
|
702
|
+
await runner({
|
|
703
|
+
databaseUrl: url.toString(),
|
|
704
|
+
dir,
|
|
705
|
+
direction,
|
|
706
|
+
count,
|
|
707
|
+
migrationsTable,
|
|
708
|
+
dryRun,
|
|
709
|
+
singleTransaction: true,
|
|
710
|
+
log: (msg) => {
|
|
711
|
+
if (!msg.startsWith("Can't determine timestamp for")) {
|
|
712
|
+
console.log(msg);
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
});
|
|
716
|
+
return;
|
|
717
|
+
}
|
|
718
|
+
} finally {
|
|
719
|
+
try {
|
|
720
|
+
await client.end();
|
|
721
|
+
} catch {
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
var __dirname$1 = path2__default.dirname(fileURLToPath(import.meta.url));
|
|
726
|
+
var MIGRATIONS_DIR = path2__default.join(__dirname$1, "../migrations");
|
|
727
|
+
async function runHexaiMigrations(dbUrl) {
|
|
728
|
+
await runMigrations({
|
|
729
|
+
dir: MIGRATIONS_DIR,
|
|
730
|
+
url: dbUrl,
|
|
731
|
+
namespace: "hexai"
|
|
732
|
+
});
|
|
733
|
+
}
|
|
734
|
+
var DEFAULT_TABLE_NAME = "hexai__events";
|
|
735
|
+
var COLUMNS_PER_EVENT = 3;
|
|
736
|
+
var PostgresEventStore = class {
|
|
737
|
+
constructor(client, config = {}) {
|
|
738
|
+
this.client = client;
|
|
739
|
+
this.tableName = config.tableName ?? DEFAULT_TABLE_NAME;
|
|
740
|
+
}
|
|
741
|
+
tableName;
|
|
742
|
+
async store(event) {
|
|
743
|
+
const serialized = event.serialize();
|
|
744
|
+
const result = await this.client.query(
|
|
745
|
+
`INSERT INTO ${this.tableName} (message_type, headers, payload)
|
|
746
|
+
VALUES ($1, $2, $3)
|
|
747
|
+
RETURNING position`,
|
|
748
|
+
[
|
|
749
|
+
event.getMessageType(),
|
|
750
|
+
JSON.stringify(serialized.headers),
|
|
751
|
+
JSON.stringify(serialized.payload)
|
|
752
|
+
]
|
|
753
|
+
);
|
|
754
|
+
return {
|
|
755
|
+
position: +result.rows[0].position,
|
|
756
|
+
event
|
|
757
|
+
};
|
|
758
|
+
}
|
|
759
|
+
async storeAll(events) {
|
|
760
|
+
if (events.length === 0) {
|
|
761
|
+
return [];
|
|
762
|
+
}
|
|
763
|
+
const values = [];
|
|
764
|
+
const placeholders = [];
|
|
765
|
+
events.forEach((event, index) => {
|
|
766
|
+
const serialized = event.serialize();
|
|
767
|
+
const offset = index * COLUMNS_PER_EVENT;
|
|
768
|
+
placeholders.push(
|
|
769
|
+
`($${offset + 1}, $${offset + 2}, $${offset + 3})`
|
|
770
|
+
);
|
|
771
|
+
values.push(
|
|
772
|
+
event.getMessageType(),
|
|
773
|
+
JSON.stringify(serialized.headers),
|
|
774
|
+
JSON.stringify(serialized.payload)
|
|
775
|
+
);
|
|
776
|
+
});
|
|
777
|
+
const result = await this.client.query(
|
|
778
|
+
`INSERT INTO ${this.tableName} (message_type, headers, payload)
|
|
779
|
+
VALUES ${placeholders.join(", ")}
|
|
780
|
+
RETURNING position`,
|
|
781
|
+
values
|
|
782
|
+
);
|
|
783
|
+
return result.rows.map((row, index) => ({
|
|
784
|
+
position: +row.position,
|
|
785
|
+
event: events[index]
|
|
786
|
+
}));
|
|
787
|
+
}
|
|
788
|
+
async fetch(afterPosition, limit) {
|
|
789
|
+
const lastPosition = await this.getLastPosition();
|
|
790
|
+
let query = `
|
|
791
|
+
SELECT position, message_type, headers, payload
|
|
792
|
+
FROM ${this.tableName}
|
|
793
|
+
WHERE position > $1
|
|
794
|
+
ORDER BY position ASC
|
|
795
|
+
`;
|
|
796
|
+
const params = [afterPosition];
|
|
797
|
+
if (limit !== void 0) {
|
|
798
|
+
query += ` LIMIT $2`;
|
|
799
|
+
params.push(limit);
|
|
800
|
+
}
|
|
801
|
+
const result = await this.client.query(query, params);
|
|
802
|
+
const events = result.rows.map(
|
|
803
|
+
(row) => this.deserializeRow(row)
|
|
804
|
+
);
|
|
805
|
+
return {
|
|
806
|
+
events,
|
|
807
|
+
lastPosition
|
|
808
|
+
};
|
|
809
|
+
}
|
|
810
|
+
async getLastPosition() {
|
|
811
|
+
const result = await this.client.query(
|
|
812
|
+
`SELECT MAX(position) as max FROM ${this.tableName}`
|
|
813
|
+
);
|
|
814
|
+
return +(result.rows[0].max ?? 0);
|
|
815
|
+
}
|
|
816
|
+
deserializeRow(row) {
|
|
817
|
+
const headers = row.headers;
|
|
818
|
+
const payload = row.payload;
|
|
819
|
+
const event = Message.from(payload, headers);
|
|
820
|
+
return {
|
|
821
|
+
position: +row.position,
|
|
822
|
+
event
|
|
823
|
+
};
|
|
824
|
+
}
|
|
825
|
+
};
|
|
826
|
+
|
|
827
|
+
export { ClientWrapper, DatabaseManager, DefaultPostgresUnitOfWork, IsolationLevel, PostgresConfig, PostgresConfigSpec, PostgresEventStore, TableManager, createPostgresUnitOfWork, ensureConnection, postgresConfig, runHexaiMigrations, runMigrations };
|
|
828
|
+
//# sourceMappingURL=index.js.map
|
|
30
829
|
//# sourceMappingURL=index.js.map
|