@event-driven-io/dumbo 0.13.0-beta.2 → 0.13.0-beta.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3B24ORM3.js +34 -0
- package/dist/chunk-3B24ORM3.js.map +1 -0
- package/dist/chunk-FXETEUW2.cjs +34 -0
- package/dist/chunk-FXETEUW2.cjs.map +1 -0
- package/dist/chunk-JDP6VMRY.cjs +556 -0
- package/dist/chunk-JDP6VMRY.cjs.map +1 -0
- package/dist/{chunk-OJ34O3Q2.cjs → chunk-PYTHH6WO.cjs} +2161 -1639
- package/dist/chunk-PYTHH6WO.cjs.map +1 -0
- package/dist/chunk-SQXAAA3N.cjs +481 -0
- package/dist/chunk-SQXAAA3N.cjs.map +1 -0
- package/dist/chunk-U445U32Q.js +83 -0
- package/dist/chunk-U445U32Q.js.map +1 -0
- package/dist/{chunk-XVV3OOQX.js → chunk-USPL7TWB.js} +209 -58
- package/dist/chunk-USPL7TWB.js.map +1 -0
- package/dist/chunk-V3QUMPUM.js +481 -0
- package/dist/chunk-V3QUMPUM.js.map +1 -0
- package/dist/chunk-VIQJEUVF.cjs +83 -0
- package/dist/chunk-VIQJEUVF.cjs.map +1 -0
- package/dist/{chunk-N7RWT46K.js → chunk-ZH6YVE3B.js} +2132 -1610
- package/dist/chunk-ZH6YVE3B.js.map +1 -0
- package/dist/cloudflare.cjs +451 -0
- package/dist/cloudflare.cjs.map +1 -0
- package/dist/cloudflare.d.cts +92 -0
- package/dist/cloudflare.d.ts +92 -0
- package/dist/cloudflare.js +451 -0
- package/dist/cloudflare.js.map +1 -0
- package/dist/{columnProcessors-DMPpTPqM.d.ts → columnProcessors-BwcyMnUX.d.ts} +1 -1
- package/dist/{columnProcessors-BX-sH7ah.d.cts → columnProcessors-CO9z2XBz.d.cts} +1 -1
- package/dist/{connectionString-B1wm0TFc.d.cts → connectionString-DHJiGj2D.d.cts} +339 -129
- package/dist/{connectionString-B1wm0TFc.d.ts → connectionString-DHJiGj2D.d.ts} +339 -129
- package/dist/index.cjs +96 -4
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +23 -18
- package/dist/index.d.ts +23 -18
- package/dist/index.js +99 -7
- package/dist/index.js.map +1 -1
- package/dist/pg.cjs +375 -9
- package/dist/pg.cjs.map +1 -1
- package/dist/pg.d.cts +76 -104
- package/dist/pg.d.ts +76 -104
- package/dist/pg.js +427 -61
- package/dist/pg.js.map +1 -1
- package/dist/postgresql.cjs +50 -0
- package/dist/postgresql.cjs.map +1 -0
- package/dist/postgresql.d.cts +61 -0
- package/dist/postgresql.d.ts +61 -0
- package/dist/postgresql.js +50 -0
- package/dist/sqlite.cjs +56 -0
- package/dist/sqlite.cjs.map +1 -0
- package/dist/{index-C0h0c380.d.cts → sqlite.d.cts} +24 -7
- package/dist/{index-C2z_XBn6.d.ts → sqlite.d.ts} +24 -7
- package/dist/sqlite.js +56 -0
- package/dist/sqlite3.cjs +240 -11
- package/dist/sqlite3.cjs.map +1 -1
- package/dist/sqlite3.d.cts +38 -18
- package/dist/sqlite3.d.ts +38 -18
- package/dist/sqlite3.js +251 -22
- package/dist/sqlite3.js.map +1 -1
- package/package.json +69 -29
- package/dist/chunk-A7TC7IOP.cjs +0 -55
- package/dist/chunk-A7TC7IOP.cjs.map +0 -1
- package/dist/chunk-F7JQ7BX7.js +0 -240
- package/dist/chunk-F7JQ7BX7.js.map +0 -1
- package/dist/chunk-I34X53VL.js +0 -688
- package/dist/chunk-I34X53VL.js.map +0 -1
- package/dist/chunk-ISNF6USX.cjs +0 -688
- package/dist/chunk-ISNF6USX.cjs.map +0 -1
- package/dist/chunk-IYEHOE4S.cjs +0 -405
- package/dist/chunk-IYEHOE4S.cjs.map +0 -1
- package/dist/chunk-L2YZQAG3.cjs +0 -240
- package/dist/chunk-L2YZQAG3.cjs.map +0 -1
- package/dist/chunk-N7RWT46K.js.map +0 -1
- package/dist/chunk-OJ34O3Q2.cjs.map +0 -1
- package/dist/chunk-TXSETOGH.js +0 -55
- package/dist/chunk-TXSETOGH.js.map +0 -1
- package/dist/chunk-XVV3OOQX.js.map +0 -1
- package/dist/d1.cjs +0 -277
- package/dist/d1.cjs.map +0 -1
- package/dist/d1.d.cts +0 -72
- package/dist/d1.d.ts +0 -72
- package/dist/d1.js +0 -277
- package/dist/d1.js.map +0 -1
- package/dist/pg-3ACXFMU4.cjs +0 -59
- package/dist/pg-3ACXFMU4.cjs.map +0 -1
- package/dist/pg-GHOW3XSG.js +0 -59
- package/dist/sqlite3-EEIKQCJR.js +0 -25
- package/dist/sqlite3-SE4DDYZE.cjs +0 -25
- package/dist/sqlite3-SE4DDYZE.cjs.map +0 -1
- /package/dist/{pg-GHOW3XSG.js.map → postgresql.js.map} +0 -0
- /package/dist/{sqlite3-EEIKQCJR.js.map → sqlite.js.map} +0 -0
package/dist/index.js
CHANGED
|
@@ -1,41 +1,57 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
parseConnectionString
|
|
4
|
-
} from "./chunk-TXSETOGH.js";
|
|
1
|
+
import "./chunk-U445U32Q.js";
|
|
2
|
+
import "./chunk-3B24ORM3.js";
|
|
5
3
|
import {
|
|
6
4
|
ANSISQLIdentifierQuote,
|
|
7
5
|
ANSISQLParamPlaceholder,
|
|
6
|
+
AdminShutdownError,
|
|
8
7
|
AutoIncrementSQLColumnToken,
|
|
8
|
+
BatchCommandNoChangesError,
|
|
9
9
|
BigIntegerToken,
|
|
10
10
|
BigSerialToken,
|
|
11
|
+
CheckViolationError,
|
|
11
12
|
ColumnTypeToken,
|
|
12
13
|
ColumnURN,
|
|
13
14
|
ColumnURNType,
|
|
15
|
+
ConcurrencyError,
|
|
16
|
+
ConnectionError,
|
|
17
|
+
DataError,
|
|
14
18
|
DatabaseSchemaURN,
|
|
15
19
|
DatabaseSchemaURNType,
|
|
16
20
|
DatabaseURN,
|
|
17
21
|
DatabaseURNType,
|
|
22
|
+
DeadlockError,
|
|
18
23
|
DefaultMapSQLParamValueOptions,
|
|
19
24
|
DumboDatabaseDriverRegistry,
|
|
25
|
+
DumboDatabaseMetadataRegistry,
|
|
26
|
+
DumboError,
|
|
27
|
+
ExclusionViolationError,
|
|
20
28
|
ExpandArrayProcessor,
|
|
21
29
|
ExpandSQLInProcessor,
|
|
30
|
+
ForeignKeyViolationError,
|
|
22
31
|
FormatIdentifierProcessor,
|
|
23
32
|
IndexURN,
|
|
24
33
|
IndexURNType,
|
|
34
|
+
InsufficientResourcesError,
|
|
25
35
|
IntegerToken,
|
|
36
|
+
IntegrityConstraintViolationError,
|
|
37
|
+
InvalidOperationError,
|
|
26
38
|
JSONBToken,
|
|
39
|
+
JSONCodec,
|
|
27
40
|
JSONReplacer,
|
|
28
41
|
JSONReplacers,
|
|
29
42
|
JSONReviver,
|
|
30
43
|
JSONRevivers,
|
|
31
44
|
JSONSerializer,
|
|
45
|
+
LockNotAvailableError,
|
|
32
46
|
LogLevel,
|
|
33
47
|
LogStyle,
|
|
34
48
|
MIGRATIONS_LOCK_ID,
|
|
35
49
|
MapLiteralProcessor,
|
|
36
50
|
NoDatabaseLock,
|
|
51
|
+
NotNullViolationError,
|
|
37
52
|
ParametrizedSQLBuilder,
|
|
38
|
-
|
|
53
|
+
QueryCanceledError,
|
|
54
|
+
RawSQL,
|
|
39
55
|
SQL,
|
|
40
56
|
SQLArray,
|
|
41
57
|
SQLColumnToken,
|
|
@@ -52,11 +68,15 @@ import {
|
|
|
52
68
|
SQLValueMapper,
|
|
53
69
|
SchemaComponentMigrator,
|
|
54
70
|
SerialToken,
|
|
71
|
+
SerializationError,
|
|
72
|
+
SystemError,
|
|
55
73
|
TableURN,
|
|
56
74
|
TableURNType,
|
|
57
75
|
TimestampToken,
|
|
58
76
|
TimestamptzToken,
|
|
59
77
|
TokenizedSQL,
|
|
78
|
+
TransientDatabaseError,
|
|
79
|
+
UniqueConstraintError,
|
|
60
80
|
VarcharToken,
|
|
61
81
|
ansiSqlReservedMap,
|
|
62
82
|
canHandleDriverWithConnectionString,
|
|
@@ -81,6 +101,7 @@ import {
|
|
|
81
101
|
defaultProcessorsRegistry,
|
|
82
102
|
describeSQL,
|
|
83
103
|
dumboDatabaseDriverRegistry,
|
|
104
|
+
dumboDatabaseMetadataRegistry,
|
|
84
105
|
dumboSchema,
|
|
85
106
|
executeInAmbientConnection,
|
|
86
107
|
executeInNewConnection,
|
|
@@ -94,7 +115,10 @@ import {
|
|
|
94
115
|
formatSQL,
|
|
95
116
|
fromDatabaseDriverType,
|
|
96
117
|
getDatabaseDriverName,
|
|
118
|
+
getDatabaseMetadata,
|
|
97
119
|
getDatabaseType,
|
|
120
|
+
getDefaultDatabase,
|
|
121
|
+
getDefaultDatabaseAsync,
|
|
98
122
|
getDefaultMigratorOptionsFromRegistry,
|
|
99
123
|
getFormatter,
|
|
100
124
|
indexSchemaComponent,
|
|
@@ -104,6 +128,7 @@ import {
|
|
|
104
128
|
jsonSerializer,
|
|
105
129
|
mapANSISQLParamPlaceholder,
|
|
106
130
|
mapColumnToBigint,
|
|
131
|
+
mapColumnToDate,
|
|
107
132
|
mapColumnToJSON,
|
|
108
133
|
mapDefaultSQLColumnProcessors,
|
|
109
134
|
mapRows,
|
|
@@ -117,6 +142,7 @@ import {
|
|
|
117
142
|
registerDefaultMigratorOptions,
|
|
118
143
|
registerFormatter,
|
|
119
144
|
relationship,
|
|
145
|
+
resolveDatabaseMetadata,
|
|
120
146
|
runSQLMigrations,
|
|
121
147
|
schemaComponent,
|
|
122
148
|
schemaComponentURN,
|
|
@@ -133,41 +159,97 @@ import {
|
|
|
133
159
|
transactionFactoryWithAmbientConnection,
|
|
134
160
|
transactionFactoryWithDbClient,
|
|
135
161
|
transactionFactoryWithNewConnection
|
|
136
|
-
} from "./chunk-
|
|
162
|
+
} from "./chunk-ZH6YVE3B.js";
|
|
163
|
+
|
|
164
|
+
// src/storage/all/connections/connectionString.ts
|
|
165
|
+
var parseConnectionString = (connectionString) => {
|
|
166
|
+
if (connectionString.startsWith("postgresql://") || connectionString.startsWith("postgres://")) {
|
|
167
|
+
return {
|
|
168
|
+
databaseType: "PostgreSQL",
|
|
169
|
+
driverName: "pg"
|
|
170
|
+
};
|
|
171
|
+
}
|
|
172
|
+
if (connectionString.startsWith("file:") || connectionString === ":memory:" || connectionString.startsWith("/") || connectionString.startsWith("./")) {
|
|
173
|
+
return {
|
|
174
|
+
databaseType: "SQLite",
|
|
175
|
+
driverName: "sqlite3"
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
if (connectionString.startsWith("d1:")) {
|
|
179
|
+
return {
|
|
180
|
+
databaseType: "SQLite",
|
|
181
|
+
driverName: "d1"
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
throw new Error(
|
|
185
|
+
`Unsupported database connection string: ${connectionString}`
|
|
186
|
+
);
|
|
187
|
+
};
|
|
188
|
+
|
|
189
|
+
// src/storage/all/index.ts
|
|
190
|
+
function dumbo(options) {
|
|
191
|
+
const { driverType } = options;
|
|
192
|
+
const driver = options.driver ?? dumboDatabaseDriverRegistry.tryGet(options);
|
|
193
|
+
if (driver === null) {
|
|
194
|
+
throw new Error(`No plugin found for driver type: ${driverType}`);
|
|
195
|
+
}
|
|
196
|
+
return driver.createPool({
|
|
197
|
+
...options,
|
|
198
|
+
driverType: driver.driverType
|
|
199
|
+
});
|
|
200
|
+
}
|
|
137
201
|
export {
|
|
138
202
|
ANSISQLIdentifierQuote,
|
|
139
203
|
ANSISQLParamPlaceholder,
|
|
204
|
+
AdminShutdownError,
|
|
140
205
|
AutoIncrementSQLColumnToken,
|
|
206
|
+
BatchCommandNoChangesError,
|
|
141
207
|
BigIntegerToken,
|
|
142
208
|
BigSerialToken,
|
|
209
|
+
CheckViolationError,
|
|
143
210
|
ColumnTypeToken,
|
|
144
211
|
ColumnURN,
|
|
145
212
|
ColumnURNType,
|
|
213
|
+
ConcurrencyError,
|
|
214
|
+
ConnectionError,
|
|
215
|
+
DataError,
|
|
146
216
|
DatabaseSchemaURN,
|
|
147
217
|
DatabaseSchemaURNType,
|
|
148
218
|
DatabaseURN,
|
|
149
219
|
DatabaseURNType,
|
|
220
|
+
DeadlockError,
|
|
150
221
|
DefaultMapSQLParamValueOptions,
|
|
151
222
|
DumboDatabaseDriverRegistry,
|
|
223
|
+
DumboDatabaseMetadataRegistry,
|
|
224
|
+
DumboError,
|
|
225
|
+
ExclusionViolationError,
|
|
152
226
|
ExpandArrayProcessor,
|
|
153
227
|
ExpandSQLInProcessor,
|
|
228
|
+
ForeignKeyViolationError,
|
|
154
229
|
FormatIdentifierProcessor,
|
|
155
230
|
IndexURN,
|
|
156
231
|
IndexURNType,
|
|
232
|
+
InsufficientResourcesError,
|
|
157
233
|
IntegerToken,
|
|
234
|
+
IntegrityConstraintViolationError,
|
|
235
|
+
InvalidOperationError,
|
|
158
236
|
JSONBToken,
|
|
237
|
+
JSONCodec,
|
|
159
238
|
JSONReplacer,
|
|
160
239
|
JSONReplacers,
|
|
161
240
|
JSONReviver,
|
|
162
241
|
JSONRevivers,
|
|
163
242
|
JSONSerializer,
|
|
243
|
+
LockNotAvailableError,
|
|
164
244
|
LogLevel,
|
|
165
245
|
LogStyle,
|
|
166
246
|
MIGRATIONS_LOCK_ID,
|
|
167
247
|
MapLiteralProcessor,
|
|
168
248
|
NoDatabaseLock,
|
|
249
|
+
NotNullViolationError,
|
|
169
250
|
ParametrizedSQLBuilder,
|
|
170
|
-
|
|
251
|
+
QueryCanceledError,
|
|
252
|
+
RawSQL,
|
|
171
253
|
SQL,
|
|
172
254
|
SQLArray,
|
|
173
255
|
SQLColumnToken,
|
|
@@ -184,11 +266,15 @@ export {
|
|
|
184
266
|
SQLValueMapper,
|
|
185
267
|
SchemaComponentMigrator,
|
|
186
268
|
SerialToken,
|
|
269
|
+
SerializationError,
|
|
270
|
+
SystemError,
|
|
187
271
|
TableURN,
|
|
188
272
|
TableURNType,
|
|
189
273
|
TimestampToken,
|
|
190
274
|
TimestamptzToken,
|
|
191
275
|
TokenizedSQL,
|
|
276
|
+
TransientDatabaseError,
|
|
277
|
+
UniqueConstraintError,
|
|
192
278
|
VarcharToken,
|
|
193
279
|
ansiSqlReservedMap,
|
|
194
280
|
canHandleDriverWithConnectionString,
|
|
@@ -214,6 +300,7 @@ export {
|
|
|
214
300
|
describeSQL,
|
|
215
301
|
dumbo,
|
|
216
302
|
dumboDatabaseDriverRegistry,
|
|
303
|
+
dumboDatabaseMetadataRegistry,
|
|
217
304
|
dumboSchema,
|
|
218
305
|
executeInAmbientConnection,
|
|
219
306
|
executeInNewConnection,
|
|
@@ -227,7 +314,10 @@ export {
|
|
|
227
314
|
formatSQL,
|
|
228
315
|
fromDatabaseDriverType,
|
|
229
316
|
getDatabaseDriverName,
|
|
317
|
+
getDatabaseMetadata,
|
|
230
318
|
getDatabaseType,
|
|
319
|
+
getDefaultDatabase,
|
|
320
|
+
getDefaultDatabaseAsync,
|
|
231
321
|
getDefaultMigratorOptionsFromRegistry,
|
|
232
322
|
getFormatter,
|
|
233
323
|
indexSchemaComponent,
|
|
@@ -237,6 +327,7 @@ export {
|
|
|
237
327
|
jsonSerializer,
|
|
238
328
|
mapANSISQLParamPlaceholder,
|
|
239
329
|
mapColumnToBigint,
|
|
330
|
+
mapColumnToDate,
|
|
240
331
|
mapColumnToJSON,
|
|
241
332
|
mapDefaultSQLColumnProcessors,
|
|
242
333
|
mapRows,
|
|
@@ -251,6 +342,7 @@ export {
|
|
|
251
342
|
registerDefaultMigratorOptions,
|
|
252
343
|
registerFormatter,
|
|
253
344
|
relationship,
|
|
345
|
+
resolveDatabaseMetadata,
|
|
254
346
|
runSQLMigrations,
|
|
255
347
|
schemaComponent,
|
|
256
348
|
schemaComponentURN,
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/storage/all/connections/connectionString.ts","../src/storage/all/index.ts"],"sourcesContent":["import type { DatabaseDriverTypeParts, DatabaseType } from '../../../core';\n\nexport type DatabaseConnectionString<\n DatabaseTypeName extends DatabaseType = DatabaseType,\n Format extends string = string,\n> = Format & {\n _databaseType: DatabaseTypeName;\n};\n\nexport const parseConnectionString = (\n connectionString: DatabaseConnectionString | string,\n): DatabaseDriverTypeParts => {\n if (\n connectionString.startsWith('postgresql://') ||\n connectionString.startsWith('postgres://')\n ) {\n return {\n databaseType: 'PostgreSQL',\n driverName: 'pg',\n };\n }\n\n if (\n connectionString.startsWith('file:') ||\n connectionString === ':memory:' ||\n connectionString.startsWith('/') ||\n connectionString.startsWith('./')\n ) {\n return {\n databaseType: 'SQLite',\n driverName: 'sqlite3',\n };\n }\n\n if (connectionString.startsWith('d1:')) {\n return {\n databaseType: 'SQLite',\n driverName: 'd1',\n };\n }\n\n throw new Error(\n `Unsupported database connection string: ${connectionString}`,\n );\n};\n","import {\n dumboDatabaseDriverRegistry,\n type AnyDumboDatabaseDriver,\n type DumboConnectionOptions,\n type ExtractDumboDatabaseDriverOptions,\n type ExtractDumboTypeFromDriver,\n type JSONSerializationOptions,\n} from '../../core';\n\nexport * from './connections';\n\nexport function dumbo<Driver extends AnyDumboDatabaseDriver>(\n options: ExtractDumboDatabaseDriverOptions<Driver> & {\n driver: Driver;\n } & JSONSerializationOptions,\n): ExtractDumboTypeFromDriver<Driver>;\n\nexport function dumbo<\n DatabaseDriver extends AnyDumboDatabaseDriver = AnyDumboDatabaseDriver,\n ConnectionOptions extends DumboConnectionOptions<DatabaseDriver> =\n DumboConnectionOptions<DatabaseDriver>,\n>(\n options: ConnectionOptions & { driver?: never },\n): ExtractDumboTypeFromDriver<DatabaseDriver>;\n\nexport function dumbo<\n DatabaseDriver extends AnyDumboDatabaseDriver = AnyDumboDatabaseDriver,\n>(\n options: DumboConnectionOptions<DatabaseDriver>,\n): ExtractDumboTypeFromDriver<DatabaseDriver> {\n const { driverType } = options;\n\n const driver =\n options.driver ??\n dumboDatabaseDriverRegistry.tryGet<DatabaseDriver>(options);\n\n if (driver === null) {\n throw new Error(`No plugin found for driver type: ${driverType}`);\n }\n\n return driver.createPool({\n ...options,\n driverType: driver.driverType,\n }) as ExtractDumboTypeFromDriver<DatabaseDriver>;\n}\n\nimport '../postgresql/core/schema/postgreSQLMetadata';\nimport '../sqlite/core/schema/sqliteMetadata';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AASO,IAAM,wBAAwB,CACnC,qBAC4B;AAC5B,MACE,iBAAiB,WAAW,eAAe,KAC3C,iBAAiB,WAAW,aAAa,GACzC;AACA,WAAO;AAAA,MACL,cAAc;AAAA,MACd,YAAY;AAAA,IACd;AAAA,EACF;AAEA,MACE,iBAAiB,WAAW,OAAO,KACnC,qBAAqB,cACrB,iBAAiB,WAAW,GAAG,KAC/B,iBAAiB,WAAW,IAAI,GAChC;AACA,WAAO;AAAA,MACL,cAAc;AAAA,MACd,YAAY;AAAA,IACd;AAAA,EACF;AAEA,MAAI,iBAAiB,WAAW,KAAK,GAAG;AACtC,WAAO;AAAA,MACL,cAAc;AAAA,MACd,YAAY;AAAA,IACd;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR,2CAA2C,gBAAgB;AAAA,EAC7D;AACF;;;ACnBO,SAAS,MAGd,SAC4C;AAC5C,QAAM,EAAE,WAAW,IAAI;AAEvB,QAAM,SACJ,QAAQ,UACR,4BAA4B,OAAuB,OAAO;AAE5D,MAAI,WAAW,MAAM;AACnB,UAAM,IAAI,MAAM,oCAAoC,UAAU,EAAE;AAAA,EAClE;AAEA,SAAO,OAAO,WAAW;AAAA,IACvB,GAAG;AAAA,IACH,YAAY,OAAO;AAAA,EACrB,CAAC;AACH;","names":[]}
|
package/dist/pg.cjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
@@ -11,6 +11,7 @@
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
|
|
14
|
+
var _chunkSQXAAA3Ncjs = require('./chunk-SQXAAA3N.cjs');
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
|
|
@@ -21,6 +22,7 @@
|
|
|
21
22
|
|
|
22
23
|
|
|
23
24
|
|
|
25
|
+
var _chunkVIQJEUVFcjs = require('./chunk-VIQJEUVF.cjs');
|
|
24
26
|
|
|
25
27
|
|
|
26
28
|
|
|
@@ -32,18 +34,387 @@
|
|
|
32
34
|
|
|
33
35
|
|
|
34
36
|
|
|
37
|
+
var _chunkPYTHH6WOcjs = require('./chunk-PYTHH6WO.cjs');
|
|
35
38
|
|
|
39
|
+
// src/storage/postgresql/pg/connections/connection.ts
|
|
40
|
+
var _pg = require('pg'); var _pg2 = _interopRequireDefault(_pg);
|
|
36
41
|
|
|
42
|
+
// src/storage/postgresql/pg/execute/execute.ts
|
|
37
43
|
|
|
44
|
+
var isPgNativePool = (poolOrClient) => {
|
|
45
|
+
return poolOrClient instanceof _pg2.default.Pool;
|
|
46
|
+
};
|
|
47
|
+
var isPgClient = (poolOrClient) => poolOrClient instanceof _pg2.default.Client;
|
|
48
|
+
var isPgPoolClient = (poolOrClient) => "release" in poolOrClient && typeof poolOrClient.release === "function";
|
|
49
|
+
var pgExecute = async (poolOrClient, handle) => {
|
|
50
|
+
const client = isPgNativePool(poolOrClient) ? await poolOrClient.connect() : poolOrClient;
|
|
51
|
+
try {
|
|
52
|
+
return await handle(client);
|
|
53
|
+
} finally {
|
|
54
|
+
if (isPgNativePool(poolOrClient) && isPgPoolClient(client))
|
|
55
|
+
client.release();
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
var pgSQLExecutor = ({
|
|
59
|
+
serializer
|
|
60
|
+
}) => ({
|
|
61
|
+
driverType: PgDriverType,
|
|
62
|
+
query: async (client, sql, options) => {
|
|
63
|
+
const results = await batchQuery(
|
|
64
|
+
client,
|
|
65
|
+
[sql],
|
|
66
|
+
serializer,
|
|
67
|
+
options
|
|
68
|
+
);
|
|
69
|
+
return results[0];
|
|
70
|
+
},
|
|
71
|
+
batchQuery: (client, sqls, options) => batchQuery(client, sqls, serializer, options),
|
|
72
|
+
command: async (client, sql, options) => {
|
|
73
|
+
const results = await batchCommand(
|
|
74
|
+
client,
|
|
75
|
+
[sql],
|
|
76
|
+
serializer,
|
|
77
|
+
options
|
|
78
|
+
);
|
|
79
|
+
return results[0];
|
|
80
|
+
},
|
|
81
|
+
batchCommand: (client, sqls, options) => batchCommand(client, sqls, serializer, options),
|
|
82
|
+
formatter: _chunkSQXAAA3Ncjs.pgFormatter
|
|
83
|
+
});
|
|
84
|
+
async function batchQuery(client, sqls, serializer, options) {
|
|
85
|
+
const results = Array(
|
|
86
|
+
sqls.length
|
|
87
|
+
);
|
|
88
|
+
if (_optionalChain([options, 'optionalAccess', _ => _.timeoutMs])) {
|
|
89
|
+
await client.query(`SET statement_timeout = ${options.timeoutMs}`);
|
|
90
|
+
}
|
|
91
|
+
for (let i = 0; i < sqls.length; i++) {
|
|
92
|
+
const { query, params } = _chunkSQXAAA3Ncjs.pgFormatter.format(sqls[i], { serializer });
|
|
93
|
+
_chunkPYTHH6WOcjs.tracer.info("db:sql:query", {
|
|
94
|
+
query,
|
|
95
|
+
params,
|
|
96
|
+
debugSQL: _chunkSQXAAA3Ncjs.pgFormatter.describe(sqls[i], { serializer })
|
|
97
|
+
});
|
|
98
|
+
try {
|
|
99
|
+
let result = params.length > 0 ? await client.query(query, params) : await client.query(query);
|
|
100
|
+
if (_optionalChain([options, 'optionalAccess', _2 => _2.mapping])) {
|
|
101
|
+
result = {
|
|
102
|
+
...result,
|
|
103
|
+
rows: result.rows.map(
|
|
104
|
+
(row) => _chunkPYTHH6WOcjs.mapSQLQueryResult.call(void 0, row, options.mapping)
|
|
105
|
+
)
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
results[i] = { rowCount: result.rowCount, rows: result.rows };
|
|
109
|
+
} catch (error) {
|
|
110
|
+
_chunkPYTHH6WOcjs.tracer.error("db:sql:batch_query:execute:error", { error });
|
|
111
|
+
throw _chunkSQXAAA3Ncjs.mapPostgresError.call(void 0, error);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
return results;
|
|
115
|
+
}
|
|
116
|
+
async function batchCommand(client, sqls, serializer, options) {
|
|
117
|
+
const results = Array(
|
|
118
|
+
sqls.length
|
|
119
|
+
);
|
|
120
|
+
if (_optionalChain([options, 'optionalAccess', _3 => _3.timeoutMs])) {
|
|
121
|
+
await client.query(`SET statement_timeout = ${options.timeoutMs}`);
|
|
122
|
+
}
|
|
123
|
+
for (let i = 0; i < sqls.length; i++) {
|
|
124
|
+
const { query, params } = _chunkSQXAAA3Ncjs.pgFormatter.format(sqls[i], { serializer });
|
|
125
|
+
_chunkPYTHH6WOcjs.tracer.info("db:sql:command", {
|
|
126
|
+
query,
|
|
127
|
+
params,
|
|
128
|
+
debugSQL: _chunkSQXAAA3Ncjs.pgFormatter.describe(sqls[i], { serializer })
|
|
129
|
+
});
|
|
130
|
+
try {
|
|
131
|
+
let result = params.length > 0 ? await client.query(query, params) : await client.query(query);
|
|
132
|
+
if (_optionalChain([options, 'optionalAccess', _4 => _4.mapping])) {
|
|
133
|
+
result = {
|
|
134
|
+
...result,
|
|
135
|
+
rows: result.rows.map(
|
|
136
|
+
(row) => _chunkPYTHH6WOcjs.mapSQLQueryResult.call(void 0, row, options.mapping)
|
|
137
|
+
)
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
results[i] = { rowCount: result.rowCount, rows: result.rows };
|
|
141
|
+
if (_optionalChain([options, 'optionalAccess', _5 => _5.assertChanges]) && (_nullishCoalesce(results[i].rowCount, () => ( 0))) === 0) {
|
|
142
|
+
throw new (0, _chunkPYTHH6WOcjs.BatchCommandNoChangesError)(i);
|
|
143
|
+
}
|
|
144
|
+
} catch (error) {
|
|
145
|
+
_chunkPYTHH6WOcjs.tracer.error("db:sql:batch_command:execute:error", { error });
|
|
146
|
+
throw _chunkSQXAAA3Ncjs.mapPostgresError.call(void 0, error);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return results;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// src/storage/postgresql/pg/connections/transaction.ts
|
|
153
|
+
var pgTransaction = (connection, serializer) => (getClient, options) => ({
|
|
154
|
+
connection: connection(),
|
|
155
|
+
driverType: PgDriverType,
|
|
156
|
+
begin: async () => {
|
|
157
|
+
const client = await getClient;
|
|
158
|
+
await client.query("BEGIN");
|
|
159
|
+
},
|
|
160
|
+
commit: async () => {
|
|
161
|
+
const client = await getClient;
|
|
162
|
+
try {
|
|
163
|
+
await client.query("COMMIT");
|
|
164
|
+
} finally {
|
|
165
|
+
if (_optionalChain([options, 'optionalAccess', _6 => _6.close])) await _optionalChain([options, 'optionalAccess', _7 => _7.close, 'call', _8 => _8(client)]);
|
|
166
|
+
}
|
|
167
|
+
},
|
|
168
|
+
rollback: async (error) => {
|
|
169
|
+
const client = await getClient;
|
|
170
|
+
try {
|
|
171
|
+
await client.query("ROLLBACK");
|
|
172
|
+
} finally {
|
|
173
|
+
if (_optionalChain([options, 'optionalAccess', _9 => _9.close])) await _optionalChain([options, 'optionalAccess', _10 => _10.close, 'call', _11 => _11(client, error)]);
|
|
174
|
+
}
|
|
175
|
+
},
|
|
176
|
+
execute: _chunkPYTHH6WOcjs.sqlExecutor.call(void 0, pgSQLExecutor({ serializer }), {
|
|
177
|
+
connect: () => getClient
|
|
178
|
+
})
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
// src/storage/postgresql/pg/connections/connection.ts
|
|
182
|
+
var PgDriverType = "PostgreSQL:pg";
|
|
183
|
+
var pgClientConnection = (options) => {
|
|
184
|
+
const { connect, close } = options;
|
|
185
|
+
return _chunkPYTHH6WOcjs.createConnection.call(void 0, {
|
|
186
|
+
driverType: PgDriverType,
|
|
187
|
+
connect,
|
|
188
|
+
close,
|
|
189
|
+
initTransaction: (connection) => pgTransaction(connection, options.serializer),
|
|
190
|
+
executor: pgSQLExecutor,
|
|
191
|
+
serializer: options.serializer
|
|
192
|
+
});
|
|
193
|
+
};
|
|
194
|
+
var pgPoolClientConnection = (options) => {
|
|
195
|
+
const { connect, close } = options;
|
|
196
|
+
return _chunkPYTHH6WOcjs.createConnection.call(void 0, {
|
|
197
|
+
driverType: PgDriverType,
|
|
198
|
+
connect,
|
|
199
|
+
close,
|
|
200
|
+
initTransaction: (connection) => pgTransaction(connection, options.serializer),
|
|
201
|
+
executor: pgSQLExecutor,
|
|
202
|
+
serializer: options.serializer
|
|
203
|
+
});
|
|
204
|
+
};
|
|
205
|
+
function pgConnection(options) {
|
|
206
|
+
return options.type === "Client" ? pgClientConnection(options) : pgPoolClientConnection(options);
|
|
207
|
+
}
|
|
208
|
+
var checkConnection = async (connectionString) => {
|
|
209
|
+
const client = new _pg2.default.Client({
|
|
210
|
+
connectionString
|
|
211
|
+
});
|
|
212
|
+
try {
|
|
213
|
+
await client.connect();
|
|
214
|
+
return { successful: true };
|
|
215
|
+
} catch (error) {
|
|
216
|
+
const code = error instanceof Error && "code" in error && typeof error.code === "string" ? error.code : void 0;
|
|
217
|
+
return {
|
|
218
|
+
successful: false,
|
|
219
|
+
errorType: code === "ECONNREFUSED" ? "ConnectionRefused" : code === "28P01" ? "Authentication" : "Unknown",
|
|
220
|
+
code,
|
|
221
|
+
error
|
|
222
|
+
};
|
|
223
|
+
} finally {
|
|
224
|
+
await client.end();
|
|
225
|
+
}
|
|
226
|
+
};
|
|
227
|
+
|
|
228
|
+
// src/storage/postgresql/pg/connections/pool.ts
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
// src/storage/postgresql/pg/serialization/index.ts
|
|
232
|
+
var setPgTypeParser = (client, options) => {
|
|
233
|
+
if (_optionalChain([options, 'optionalAccess', _12 => _12.parseBigInts]) === true)
|
|
234
|
+
client.setTypeParser(20, (val) => BigInt(val));
|
|
235
|
+
if (_optionalChain([options, 'optionalAccess', _13 => _13.serializer])) {
|
|
236
|
+
client.setTypeParser(3802, (val) => options.serializer.deserialize(val));
|
|
237
|
+
client.setTypeParser(114, (val) => options.serializer.deserialize(val));
|
|
238
|
+
}
|
|
239
|
+
};
|
|
240
|
+
|
|
241
|
+
// src/storage/postgresql/pg/connections/pool.ts
|
|
242
|
+
var pgNativePool = (options) => {
|
|
243
|
+
const { connectionString, database } = options;
|
|
244
|
+
const pool = getPgPool({ connectionString, database });
|
|
245
|
+
const getConnection = () => pgConnection({
|
|
246
|
+
type: "PoolClient",
|
|
247
|
+
connect: async () => {
|
|
248
|
+
const client = await pool.connect();
|
|
249
|
+
setPgTypeParser(client, {
|
|
250
|
+
parseBigInts: true,
|
|
251
|
+
serializer: options.serializer
|
|
252
|
+
});
|
|
253
|
+
return client;
|
|
254
|
+
},
|
|
255
|
+
close: (client) => Promise.resolve(client.release()),
|
|
256
|
+
serializer: options.serializer
|
|
257
|
+
});
|
|
258
|
+
const open = () => Promise.resolve(getConnection());
|
|
259
|
+
const close = () => endPgPool({ connectionString, database });
|
|
260
|
+
return _chunkPYTHH6WOcjs.createConnectionPool.call(void 0, {
|
|
261
|
+
driverType: PgDriverType,
|
|
262
|
+
connection: open,
|
|
263
|
+
close,
|
|
264
|
+
getConnection
|
|
265
|
+
});
|
|
266
|
+
};
|
|
267
|
+
var pgAmbientNativePool = (options) => {
|
|
268
|
+
const { pool } = options;
|
|
269
|
+
return _chunkPYTHH6WOcjs.createConnectionPool.call(void 0, {
|
|
270
|
+
driverType: PgDriverType,
|
|
271
|
+
getConnection: () => pgConnection({
|
|
272
|
+
type: "PoolClient",
|
|
273
|
+
connect: () => pool.connect(),
|
|
274
|
+
close: (client) => Promise.resolve(client.release()),
|
|
275
|
+
serializer: options.serializer
|
|
276
|
+
})
|
|
277
|
+
});
|
|
278
|
+
};
|
|
279
|
+
var pgAmbientConnectionPool = (options) => {
|
|
280
|
+
const { connection } = options;
|
|
281
|
+
return _chunkPYTHH6WOcjs.createAmbientConnectionPool.call(void 0, {
|
|
282
|
+
driverType: PgDriverType,
|
|
283
|
+
connection
|
|
284
|
+
});
|
|
285
|
+
};
|
|
286
|
+
var pgClientPool = (options) => {
|
|
287
|
+
const { connectionString, database } = options;
|
|
288
|
+
return _chunkPYTHH6WOcjs.createConnectionPool.call(void 0, {
|
|
289
|
+
driverType: PgDriverType,
|
|
290
|
+
getConnection: () => {
|
|
291
|
+
const connect = async () => {
|
|
292
|
+
const client = new _pg2.default.Client({ connectionString, database });
|
|
293
|
+
setPgTypeParser(client, {
|
|
294
|
+
parseBigInts: true,
|
|
295
|
+
serializer: options.serializer
|
|
296
|
+
});
|
|
297
|
+
await client.connect();
|
|
298
|
+
return client;
|
|
299
|
+
};
|
|
300
|
+
return pgConnection({
|
|
301
|
+
type: "Client",
|
|
302
|
+
connect,
|
|
303
|
+
close: (client) => client.end(),
|
|
304
|
+
serializer: options.serializer
|
|
305
|
+
});
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
};
|
|
309
|
+
var pgAmbientClientPool = (options) => {
|
|
310
|
+
const { client } = options;
|
|
311
|
+
const getConnection = () => {
|
|
312
|
+
const connect = () => Promise.resolve(client);
|
|
313
|
+
return pgConnection({
|
|
314
|
+
type: "Client",
|
|
315
|
+
connect,
|
|
316
|
+
close: () => Promise.resolve(),
|
|
317
|
+
serializer: options.serializer
|
|
318
|
+
});
|
|
319
|
+
};
|
|
320
|
+
const open = () => Promise.resolve(getConnection());
|
|
321
|
+
const close = () => Promise.resolve();
|
|
322
|
+
return _chunkPYTHH6WOcjs.createConnectionPool.call(void 0, {
|
|
323
|
+
driverType: PgDriverType,
|
|
324
|
+
connection: open,
|
|
325
|
+
close,
|
|
326
|
+
getConnection
|
|
327
|
+
});
|
|
328
|
+
};
|
|
329
|
+
function pgPool(options) {
|
|
330
|
+
const { connectionString, database } = options;
|
|
331
|
+
const serializer = _nullishCoalesce(_optionalChain([options, 'access', _14 => _14.serialization, 'optionalAccess', _15 => _15.serializer]), () => ( _chunkPYTHH6WOcjs.JSONSerializer));
|
|
332
|
+
if ("client" in options && options.client)
|
|
333
|
+
return pgAmbientClientPool({ client: options.client, serializer });
|
|
334
|
+
if ("connection" in options && options.connection)
|
|
335
|
+
return pgAmbientConnectionPool({
|
|
336
|
+
connection: options.connection
|
|
337
|
+
});
|
|
338
|
+
if ("pooled" in options && options.pooled === false)
|
|
339
|
+
return pgClientPool({ connectionString, database, serializer });
|
|
340
|
+
if ("pool" in options && options.pool)
|
|
341
|
+
return pgAmbientNativePool({ pool: options.pool, serializer });
|
|
342
|
+
return pgNativePool({
|
|
343
|
+
connectionString,
|
|
344
|
+
database,
|
|
345
|
+
serializer
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
var pools = /* @__PURE__ */ new Map();
|
|
349
|
+
var usageCounter = /* @__PURE__ */ new Map();
|
|
350
|
+
var getPgPool = (connectionStringOrOptions) => {
|
|
351
|
+
const connectionString = typeof connectionStringOrOptions === "string" ? connectionStringOrOptions : connectionStringOrOptions.connectionString;
|
|
352
|
+
const poolOptions = typeof connectionStringOrOptions === "string" ? { connectionString } : connectionStringOrOptions;
|
|
353
|
+
const database = _nullishCoalesce(poolOptions.database, () => ( (poolOptions.connectionString ? _nullishCoalesce(_chunkVIQJEUVFcjs.parseDatabaseName.call(void 0, poolOptions.connectionString), () => ( _chunkVIQJEUVFcjs.defaultPostgreSqlDatabase)) : void 0)));
|
|
354
|
+
const lookupKey = key(connectionString, database);
|
|
355
|
+
updatePoolUsageCounter(lookupKey, 1);
|
|
356
|
+
return _nullishCoalesce(pools.get(lookupKey), () => ( pools.set(lookupKey, new _pg2.default.Pool(poolOptions)).get(lookupKey)));
|
|
357
|
+
};
|
|
358
|
+
var endPgPool = async ({
|
|
359
|
+
connectionString,
|
|
360
|
+
database,
|
|
361
|
+
force
|
|
362
|
+
}) => {
|
|
363
|
+
database = _nullishCoalesce(_nullishCoalesce(database, () => ( _chunkVIQJEUVFcjs.parseDatabaseName.call(void 0, connectionString))), () => ( void 0));
|
|
364
|
+
const lookupKey = key(connectionString, database);
|
|
365
|
+
const pool = pools.get(lookupKey);
|
|
366
|
+
if (pool && (updatePoolUsageCounter(lookupKey, -1) <= 0 || force === true)) {
|
|
367
|
+
await onEndPool(lookupKey, pool);
|
|
368
|
+
}
|
|
369
|
+
};
|
|
370
|
+
var onEndPool = async (lookupKey, pool) => {
|
|
371
|
+
try {
|
|
372
|
+
await pool.end();
|
|
373
|
+
} catch (error) {
|
|
374
|
+
_chunkPYTHH6WOcjs.tracer.error("connection-closing-error", { lookupKey, error });
|
|
375
|
+
}
|
|
376
|
+
pools.delete(lookupKey);
|
|
377
|
+
};
|
|
378
|
+
var endAllPgPools = () => Promise.all(
|
|
379
|
+
[...pools.entries()].map(([lookupKey, pool]) => onEndPool(lookupKey, pool))
|
|
380
|
+
);
|
|
381
|
+
var key = (connectionString, database) => `${connectionString}|${_nullishCoalesce(database, () => ( _chunkVIQJEUVFcjs.defaultPostgreSqlDatabase))}`;
|
|
382
|
+
var updatePoolUsageCounter = (lookupKey, by) => {
|
|
383
|
+
const currentCounter = _nullishCoalesce(usageCounter.get(lookupKey), () => ( 0));
|
|
384
|
+
const newCounter = currentCounter + by;
|
|
385
|
+
usageCounter.set(lookupKey, currentCounter + by);
|
|
386
|
+
return newCounter;
|
|
387
|
+
};
|
|
38
388
|
|
|
389
|
+
// src/storage/postgresql/pg/index.ts
|
|
390
|
+
var tryParseConnectionString = (connectionString) => {
|
|
391
|
+
try {
|
|
392
|
+
return _chunkVIQJEUVFcjs.PostgreSQLConnectionString.call(void 0, connectionString);
|
|
393
|
+
} catch (e) {
|
|
394
|
+
return null;
|
|
395
|
+
}
|
|
396
|
+
};
|
|
397
|
+
var pgDumboDriver = {
|
|
398
|
+
driverType: PgDriverType,
|
|
399
|
+
createPool: (options) => pgPool(options),
|
|
400
|
+
sqlFormatter: _chunkSQXAAA3Ncjs.pgFormatter,
|
|
401
|
+
defaultMigratorOptions: _chunkSQXAAA3Ncjs.DefaultPostgreSQLMigratorOptions,
|
|
402
|
+
canHandle: _chunkPYTHH6WOcjs.canHandleDriverWithConnectionString.call(void 0,
|
|
403
|
+
PgDriverType,
|
|
404
|
+
tryParseConnectionString
|
|
405
|
+
),
|
|
406
|
+
databaseMetadata: _chunkVIQJEUVFcjs.postgreSQLMetadata
|
|
407
|
+
};
|
|
408
|
+
var usePgDumboDriver = () => {
|
|
409
|
+
_chunkPYTHH6WOcjs.dumboDatabaseDriverRegistry.register(PgDriverType, pgDumboDriver);
|
|
410
|
+
};
|
|
411
|
+
usePgDumboDriver();
|
|
39
412
|
|
|
40
413
|
|
|
41
414
|
|
|
42
415
|
|
|
43
416
|
|
|
44
417
|
|
|
45
|
-
var _chunkISNF6USXcjs = require('./chunk-ISNF6USX.cjs');
|
|
46
|
-
require('./chunk-OJ34O3Q2.cjs');
|
|
47
418
|
|
|
48
419
|
|
|
49
420
|
|
|
@@ -84,10 +455,5 @@ require('./chunk-OJ34O3Q2.cjs');
|
|
|
84
455
|
|
|
85
456
|
|
|
86
457
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
exports.AdvisoryLock = _chunkISNF6USXcjs.AdvisoryLock; exports.DefaultPostgreSQLMigratorOptions = _chunkISNF6USXcjs.DefaultPostgreSQLMigratorOptions; exports.NodePostgresDriverType = _chunkISNF6USXcjs.NodePostgresDriverType; exports.PostgreSQLConnectionString = _chunkISNF6USXcjs.PostgreSQLConnectionString; exports.PostgreSQLDatabaseName = _chunkISNF6USXcjs.PostgreSQLDatabaseName; exports.acquireAdvisoryLock = _chunkISNF6USXcjs.acquireAdvisoryLock; exports.advisoryLock = _chunkISNF6USXcjs.advisoryLock; exports.checkConnection = _chunkISNF6USXcjs.checkConnection; exports.connectionPool = _chunkISNF6USXcjs.connectionPool; exports.databaseDriver = _chunkISNF6USXcjs.pgDatabaseDriver; exports.defaultPostgreSQLConnectionString = _chunkISNF6USXcjs.defaultPostgreSQLConnectionString; exports.defaultPostgreSqlDatabase = _chunkISNF6USXcjs.defaultPostgreSqlDatabase; exports.endAllPools = _chunkISNF6USXcjs.endAllPools; exports.endPool = _chunkISNF6USXcjs.endPool; exports.functionExists = _chunkISNF6USXcjs.functionExists; exports.functionExistsSQL = _chunkISNF6USXcjs.functionExistsSQL; exports.getDatabaseNameOrDefault = _chunkISNF6USXcjs.getDatabaseNameOrDefault; exports.getPool = _chunkISNF6USXcjs.getPool; exports.isNodePostgresClient = _chunkISNF6USXcjs.isNodePostgresClient; exports.isNodePostgresNativePool = _chunkISNF6USXcjs.isNodePostgresNativePool; exports.isNodePostgresPoolClient = _chunkISNF6USXcjs.isNodePostgresPoolClient; exports.nodePostgresAmbientClientPool = _chunkISNF6USXcjs.nodePostgresAmbientClientPool; exports.nodePostgresAmbientConnectionPool = _chunkISNF6USXcjs.nodePostgresAmbientConnectionPool; exports.nodePostgresAmbientNativePool = _chunkISNF6USXcjs.nodePostgresAmbientNativePool; exports.nodePostgresClientConnection = _chunkISNF6USXcjs.nodePostgresClientConnection; exports.nodePostgresClientPool = _chunkISNF6USXcjs.nodePostgresClientPool; exports.nodePostgresConnection = _chunkISNF6USXcjs.nodePostgresConnection; exports.nodePostgresExecute = _chunkISNF6USXcjs.nodePostgresExecute; exports.nodePostgresNativePool = _chunkISNF6USXcjs.nodePostgresNativePool; exports.nodePostgresPool = _chunkISNF6USXcjs.nodePostgresPool; exports.nodePostgresPoolClientConnection = _chunkISNF6USXcjs.nodePostgresPoolClientConnection; exports.nodePostgresSQLExecutor = _chunkISNF6USXcjs.nodePostgresSQLExecutor; exports.nodePostgresTransaction = _chunkISNF6USXcjs.nodePostgresTransaction; exports.onEndPool = _chunkISNF6USXcjs.onEndPool; exports.pgDatabaseDriver = _chunkISNF6USXcjs.pgDatabaseDriver; exports.pgFormatter = _chunkISNF6USXcjs.pgFormatter; exports.postgreSQLColumnProcessors = _chunkISNF6USXcjs.postgreSQLColumnProcessors; exports.postgresPool = _chunkISNF6USXcjs.postgresPool; exports.releaseAdvisoryLock = _chunkISNF6USXcjs.releaseAdvisoryLock; exports.setNodePostgresTypeParser = _chunkISNF6USXcjs.setNodePostgresTypeParser; exports.tableExists = _chunkISNF6USXcjs.tableExists; exports.tableExistsSQL = _chunkISNF6USXcjs.tableExistsSQL; exports.tryAcquireAdvisoryLock = _chunkISNF6USXcjs.tryAcquireAdvisoryLock; exports.usePgDatabaseDriver = _chunkISNF6USXcjs.usePgDatabaseDriver;
|
|
458
|
+
exports.AdvisoryLock = _chunkSQXAAA3Ncjs.AdvisoryLock; exports.DefaultPostgreSQLMigratorOptions = _chunkSQXAAA3Ncjs.DefaultPostgreSQLMigratorOptions; exports.PgDriverType = PgDriverType; exports.PostgreSQLArrayProcessor = _chunkSQXAAA3Ncjs.PostgreSQLArrayProcessor; exports.PostgreSQLConnectionString = _chunkVIQJEUVFcjs.PostgreSQLConnectionString; exports.PostgreSQLDatabaseName = _chunkSQXAAA3Ncjs.PostgreSQLDatabaseName; exports.PostgreSQLExpandSQLInProcessor = _chunkSQXAAA3Ncjs.PostgreSQLExpandSQLInProcessor; exports.acquireAdvisoryLock = _chunkSQXAAA3Ncjs.acquireAdvisoryLock; exports.advisoryLock = _chunkSQXAAA3Ncjs.advisoryLock; exports.checkConnection = checkConnection; exports.defaultPostgreSQLConnectionString = _chunkVIQJEUVFcjs.defaultPostgreSQLConnectionString; exports.defaultPostgreSqlDatabase = _chunkVIQJEUVFcjs.defaultPostgreSqlDatabase; exports.endAllPgPools = endAllPgPools; exports.endPgPool = endPgPool; exports.functionExists = _chunkVIQJEUVFcjs.functionExists; exports.functionExistsSQL = _chunkVIQJEUVFcjs.functionExistsSQL; exports.getPgPool = getPgPool; exports.isPgClient = isPgClient; exports.isPgNativePool = isPgNativePool; exports.isPgPoolClient = isPgPoolClient; exports.mapPostgresError = _chunkSQXAAA3Ncjs.mapPostgresError; exports.onEndPool = onEndPool; exports.parseDatabaseName = _chunkVIQJEUVFcjs.parseDatabaseName; exports.pgAmbientClientPool = pgAmbientClientPool; exports.pgAmbientConnectionPool = pgAmbientConnectionPool; exports.pgAmbientNativePool = pgAmbientNativePool; exports.pgClientConnection = pgClientConnection; exports.pgClientPool = pgClientPool; exports.pgConnection = pgConnection; exports.pgDumboDriver = pgDumboDriver; exports.pgExecute = pgExecute; exports.pgFormatter = _chunkSQXAAA3Ncjs.pgFormatter; exports.pgNativePool = pgNativePool; exports.pgPool = pgPool; exports.pgPoolClientConnection = pgPoolClientConnection; exports.pgSQLExecutor = pgSQLExecutor; exports.pgTransaction = pgTransaction; exports.postgreSQLColumnProcessors = _chunkSQXAAA3Ncjs.postgreSQLColumnProcessors; exports.postgreSQLMetadata = _chunkVIQJEUVFcjs.postgreSQLMetadata; exports.releaseAdvisoryLock = _chunkSQXAAA3Ncjs.releaseAdvisoryLock; exports.setPgTypeParser = setPgTypeParser; exports.tableExists = _chunkVIQJEUVFcjs.tableExists; exports.tableExistsSQL = _chunkVIQJEUVFcjs.tableExistsSQL; exports.tryAcquireAdvisoryLock = _chunkSQXAAA3Ncjs.tryAcquireAdvisoryLock; exports.usePgDumboDriver = usePgDumboDriver;
|
|
93
459
|
//# sourceMappingURL=pg.cjs.map
|