@depup/postgres 3.4.8-depup.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,53 @@
1
+ export class PostgresError extends Error {
2
+ constructor(x) {
3
+ super(x.message)
4
+ this.name = this.constructor.name
5
+ Object.assign(this, x)
6
+ }
7
+ }
8
+
9
+ export const Errors = {
10
+ connection,
11
+ postgres,
12
+ generic,
13
+ notSupported
14
+ }
15
+
16
+ function connection(x, options, socket) {
17
+ const { host, port } = socket || options
18
+ const error = Object.assign(
19
+ new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
20
+ {
21
+ code: x,
22
+ errno: x,
23
+ address: options.path || host
24
+ }, options.path ? {} : { port: port }
25
+ )
26
+ Error.captureStackTrace(error, connection)
27
+ return error
28
+ }
29
+
30
+ function postgres(x) {
31
+ const error = new PostgresError(x)
32
+ Error.captureStackTrace(error, postgres)
33
+ return error
34
+ }
35
+
36
+ function generic(code, message) {
37
+ const error = Object.assign(new Error(code + ': ' + message), { code })
38
+ Error.captureStackTrace(error, generic)
39
+ return error
40
+ }
41
+
42
+ /* c8 ignore next 10 */
43
+ function notSupported(x) {
44
+ const error = Object.assign(
45
+ new Error(x + ' (B) is not supported'),
46
+ {
47
+ code: 'MESSAGE_NOT_SUPPORTED',
48
+ name: x
49
+ }
50
+ )
51
+ Error.captureStackTrace(error, notSupported)
52
+ return error
53
+ }
@@ -0,0 +1,568 @@
1
+ import { process } from '../polyfills.js'
2
+ import { os } from '../polyfills.js'
3
+ import { fs } from '../polyfills.js'
4
+
5
+ import {
6
+ mergeUserTypes,
7
+ inferType,
8
+ Parameter,
9
+ Identifier,
10
+ Builder,
11
+ toPascal,
12
+ pascal,
13
+ toCamel,
14
+ camel,
15
+ toKebab,
16
+ kebab,
17
+ fromPascal,
18
+ fromCamel,
19
+ fromKebab
20
+ } from './types.js'
21
+
22
+ import Connection from './connection.js'
23
+ import { Query, CLOSE } from './query.js'
24
+ import Queue from './queue.js'
25
+ import { Errors, PostgresError } from './errors.js'
26
+ import Subscribe from './subscribe.js'
27
+ import largeObject from './large.js'
28
+
29
+ Object.assign(Postgres, {
30
+ PostgresError,
31
+ toPascal,
32
+ pascal,
33
+ toCamel,
34
+ camel,
35
+ toKebab,
36
+ kebab,
37
+ fromPascal,
38
+ fromCamel,
39
+ fromKebab,
40
+ BigInt: {
41
+ to: 20,
42
+ from: [20],
43
+ parse: x => BigInt(x), // eslint-disable-line
44
+ serialize: x => x.toString()
45
+ }
46
+ })
47
+
48
+ export default Postgres
49
+
50
+ function Postgres(a, b) {
51
+ const options = parseOptions(a, b)
52
+ , subscribe = options.no_subscribe || Subscribe(Postgres, { ...options })
53
+
54
+ let ending = false
55
+
56
+ const queries = Queue()
57
+ , connecting = Queue()
58
+ , reserved = Queue()
59
+ , closed = Queue()
60
+ , ended = Queue()
61
+ , open = Queue()
62
+ , busy = Queue()
63
+ , full = Queue()
64
+ , queues = { connecting, reserved, closed, ended, open, busy, full }
65
+
66
+ const connections = [...Array(options.max)].map(() => Connection(options, queues, { onopen, onend, onclose }))
67
+
68
+ const sql = Sql(handler)
69
+
70
+ Object.assign(sql, {
71
+ get parameters() { return options.parameters },
72
+ largeObject: largeObject.bind(null, sql),
73
+ subscribe,
74
+ CLOSE,
75
+ END: CLOSE,
76
+ PostgresError,
77
+ options,
78
+ reserve,
79
+ listen,
80
+ begin,
81
+ close,
82
+ end
83
+ })
84
+
85
+ return sql
86
+
87
+ function Sql(handler) {
88
+ handler.debug = options.debug
89
+
90
+ Object.entries(options.types).reduce((acc, [name, type]) => {
91
+ acc[name] = (x) => new Parameter(x, type.to)
92
+ return acc
93
+ }, typed)
94
+
95
+ Object.assign(sql, {
96
+ types: typed,
97
+ typed,
98
+ unsafe,
99
+ notify,
100
+ array,
101
+ json,
102
+ file
103
+ })
104
+
105
+ return sql
106
+
107
+ function typed(value, type) {
108
+ return new Parameter(value, type)
109
+ }
110
+
111
+ function sql(strings, ...args) {
112
+ const query = strings && Array.isArray(strings.raw)
113
+ ? new Query(strings, args, handler, cancel)
114
+ : typeof strings === 'string' && !args.length
115
+ ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
116
+ : new Builder(strings, args)
117
+ return query
118
+ }
119
+
120
+ function unsafe(string, args = [], options = {}) {
121
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
122
+ const query = new Query([string], args, handler, cancel, {
123
+ prepare: false,
124
+ ...options,
125
+ simple: 'simple' in options ? options.simple : args.length === 0
126
+ })
127
+ return query
128
+ }
129
+
130
+ function file(path, args = [], options = {}) {
131
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
132
+ const query = new Query([], args, (query) => {
133
+ fs.readFile(path, 'utf8', (err, string) => {
134
+ if (err)
135
+ return query.reject(err)
136
+
137
+ query.strings = [string]
138
+ handler(query)
139
+ })
140
+ }, cancel, {
141
+ ...options,
142
+ simple: 'simple' in options ? options.simple : args.length === 0
143
+ })
144
+ return query
145
+ }
146
+ }
147
+
148
+ async function listen(name, fn, onlisten) {
149
+ const listener = { fn, onlisten }
150
+
151
+ const sql = listen.sql || (listen.sql = Postgres({
152
+ ...options,
153
+ max: 1,
154
+ idle_timeout: null,
155
+ max_lifetime: null,
156
+ fetch_types: false,
157
+ onclose() {
158
+ Object.entries(listen.channels).forEach(([name, { listeners }]) => {
159
+ delete listen.channels[name]
160
+ Promise.all(listeners.map(l => listen(name, l.fn, l.onlisten).catch(() => { /* noop */ })))
161
+ })
162
+ },
163
+ onnotify(c, x) {
164
+ c in listen.channels && listen.channels[c].listeners.forEach(l => l.fn(x))
165
+ }
166
+ }))
167
+
168
+ const channels = listen.channels || (listen.channels = {})
169
+ , exists = name in channels
170
+
171
+ if (exists) {
172
+ channels[name].listeners.push(listener)
173
+ const result = await channels[name].result
174
+ listener.onlisten && listener.onlisten()
175
+ return { state: result.state, unlisten }
176
+ }
177
+
178
+ channels[name] = { result: sql`listen ${
179
+ sql.unsafe('"' + name.replace(/"/g, '""') + '"')
180
+ }`, listeners: [listener] }
181
+ const result = await channels[name].result
182
+ listener.onlisten && listener.onlisten()
183
+ return { state: result.state, unlisten }
184
+
185
+ async function unlisten() {
186
+ if (name in channels === false)
187
+ return
188
+
189
+ channels[name].listeners = channels[name].listeners.filter(x => x !== listener)
190
+ if (channels[name].listeners.length)
191
+ return
192
+
193
+ delete channels[name]
194
+ return sql`unlisten ${
195
+ sql.unsafe('"' + name.replace(/"/g, '""') + '"')
196
+ }`
197
+ }
198
+ }
199
+
200
+ async function notify(channel, payload) {
201
+ return await sql`select pg_notify(${ channel }, ${ '' + payload })`
202
+ }
203
+
204
+ async function reserve() {
205
+ const queue = Queue()
206
+ const c = open.length
207
+ ? open.shift()
208
+ : await new Promise((resolve, reject) => {
209
+ const query = { reserve: resolve, reject }
210
+ queries.push(query)
211
+ closed.length && connect(closed.shift(), query)
212
+ })
213
+
214
+ move(c, reserved)
215
+ c.reserved = () => queue.length
216
+ ? c.execute(queue.shift())
217
+ : move(c, reserved)
218
+ c.reserved.release = true
219
+
220
+ const sql = Sql(handler)
221
+ sql.release = () => {
222
+ c.reserved = null
223
+ onopen(c)
224
+ }
225
+
226
+ return sql
227
+
228
+ function handler(q) {
229
+ c.queue === full
230
+ ? queue.push(q)
231
+ : c.execute(q) || move(c, full)
232
+ }
233
+ }
234
+
235
+ async function begin(options, fn) {
236
+ !fn && (fn = options, options = '')
237
+ const queries = Queue()
238
+ let savepoints = 0
239
+ , connection
240
+ , prepare = null
241
+
242
+ try {
243
+ await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).execute()
244
+ return await Promise.race([
245
+ scope(connection, fn),
246
+ new Promise((_, reject) => connection.onclose = reject)
247
+ ])
248
+ } catch (error) {
249
+ throw error
250
+ }
251
+
252
+ async function scope(c, fn, name) {
253
+ const sql = Sql(handler)
254
+ sql.savepoint = savepoint
255
+ sql.prepare = x => prepare = x.replace(/[^a-z0-9$-_. ]/gi)
256
+ let uncaughtError
257
+ , result
258
+
259
+ name && await sql`savepoint ${ sql(name) }`
260
+ try {
261
+ result = await new Promise((resolve, reject) => {
262
+ const x = fn(sql)
263
+ Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
264
+ })
265
+
266
+ if (uncaughtError)
267
+ throw uncaughtError
268
+ } catch (e) {
269
+ await (name
270
+ ? sql`rollback to ${ sql(name) }`
271
+ : sql`rollback`
272
+ )
273
+ throw e instanceof PostgresError && e.code === '25P02' && uncaughtError || e
274
+ }
275
+
276
+ if (!name) {
277
+ prepare
278
+ ? await sql`prepare transaction '${ sql.unsafe(prepare) }'`
279
+ : await sql`commit`
280
+ }
281
+
282
+ return result
283
+
284
+ function savepoint(name, fn) {
285
+ if (name && Array.isArray(name.raw))
286
+ return savepoint(sql => sql.apply(sql, arguments))
287
+
288
+ arguments.length === 1 && (fn = name, name = null)
289
+ return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
290
+ }
291
+
292
+ function handler(q) {
293
+ q.catch(e => uncaughtError || (uncaughtError = e))
294
+ c.queue === full
295
+ ? queries.push(q)
296
+ : c.execute(q) || move(c, full)
297
+ }
298
+ }
299
+
300
+ function onexecute(c) {
301
+ connection = c
302
+ move(c, reserved)
303
+ c.reserved = () => queries.length
304
+ ? c.execute(queries.shift())
305
+ : move(c, reserved)
306
+ }
307
+ }
308
+
309
+ function move(c, queue) {
310
+ c.queue.remove(c)
311
+ queue.push(c)
312
+ c.queue = queue
313
+ queue === open
314
+ ? c.idleTimer.start()
315
+ : c.idleTimer.cancel()
316
+ return c
317
+ }
318
+
319
+ function json(x) {
320
+ return new Parameter(x, 3802)
321
+ }
322
+
323
+ function array(x, type) {
324
+ if (!Array.isArray(x))
325
+ return array(Array.from(arguments))
326
+
327
+ return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
328
+ }
329
+
330
+ function handler(query) {
331
+ if (ending)
332
+ return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
333
+
334
+ if (open.length)
335
+ return go(open.shift(), query)
336
+
337
+ if (closed.length)
338
+ return connect(closed.shift(), query)
339
+
340
+ busy.length
341
+ ? go(busy.shift(), query)
342
+ : queries.push(query)
343
+ }
344
+
345
+ function go(c, query) {
346
+ return c.execute(query)
347
+ ? move(c, busy)
348
+ : move(c, full)
349
+ }
350
+
351
+ function cancel(query) {
352
+ return new Promise((resolve, reject) => {
353
+ query.state
354
+ ? query.active
355
+ ? Connection(options).cancel(query.state, resolve, reject)
356
+ : query.cancelled = { resolve, reject }
357
+ : (
358
+ queries.remove(query),
359
+ query.cancelled = true,
360
+ query.reject(Errors.generic('57014', 'canceling statement due to user request')),
361
+ resolve()
362
+ )
363
+ })
364
+ }
365
+
366
+ async function end({ timeout = null } = {}) {
367
+ if (ending)
368
+ return ending
369
+
370
+ await 1
371
+ let timer
372
+ return ending = Promise.race([
373
+ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
374
+ Promise.all(connections.map(c => c.end()).concat(
375
+ listen.sql ? listen.sql.end({ timeout: 0 }) : [],
376
+ subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
377
+ ))
378
+ ]).then(() => clearTimeout(timer))
379
+ }
380
+
381
+ async function close() {
382
+ await Promise.all(connections.map(c => c.end()))
383
+ }
384
+
385
+ async function destroy(resolve) {
386
+ await Promise.all(connections.map(c => c.terminate()))
387
+ while (queries.length)
388
+ queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
389
+ resolve()
390
+ }
391
+
392
+ function connect(c, query) {
393
+ move(c, connecting)
394
+ c.connect(query)
395
+ return c
396
+ }
397
+
398
+ function onend(c) {
399
+ move(c, ended)
400
+ }
401
+
402
+ function onopen(c) {
403
+ if (queries.length === 0)
404
+ return move(c, open)
405
+
406
+ let max = Math.ceil(queries.length / (connecting.length + 1))
407
+ , ready = true
408
+
409
+ while (ready && queries.length && max-- > 0) {
410
+ const query = queries.shift()
411
+ if (query.reserve)
412
+ return query.reserve(c)
413
+
414
+ ready = c.execute(query)
415
+ }
416
+
417
+ ready
418
+ ? move(c, busy)
419
+ : move(c, full)
420
+ }
421
+
422
+ function onclose(c, e) {
423
+ move(c, closed)
424
+ c.reserved = null
425
+ c.onclose && (c.onclose(e), c.onclose = null)
426
+ options.onclose && options.onclose(c.id)
427
+ queries.length && connect(c, queries.shift())
428
+ }
429
+ }
430
+
431
+ function parseOptions(a, b) {
432
+ if (a && a.shared)
433
+ return a
434
+
435
+ const env = process.env // eslint-disable-line
436
+ , o = (!a || typeof a === 'string' ? b : a) || {}
437
+ , { url, multihost } = parseUrl(a)
438
+ , query = [...url.searchParams].reduce((a, [b, c]) => (a[b] = c, a), {})
439
+ , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
440
+ , port = o.port || url.port || env.PGPORT || 5432
441
+ , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
442
+
443
+ o.no_prepare && (o.prepare = false)
444
+ query.sslmode && (query.ssl = query.sslmode, delete query.sslmode)
445
+ 'timeout' in o && (console.log('The timeout option is deprecated, use idle_timeout instead'), o.idle_timeout = o.timeout) // eslint-disable-line
446
+ query.sslrootcert === 'system' && (query.ssl = 'verify-full')
447
+
448
+ const ints = ['idle_timeout', 'connect_timeout', 'max_lifetime', 'max_pipeline', 'backoff', 'keep_alive']
449
+ const defaults = {
450
+ max : globalThis.Cloudflare ? 3 : 10,
451
+ ssl : false,
452
+ sslnegotiation : null,
453
+ idle_timeout : null,
454
+ connect_timeout : 30,
455
+ max_lifetime : max_lifetime,
456
+ max_pipeline : 100,
457
+ backoff : backoff,
458
+ keep_alive : 60,
459
+ prepare : true,
460
+ debug : false,
461
+ fetch_types : true,
462
+ publications : 'alltables',
463
+ target_session_attrs: null
464
+ }
465
+
466
+ return {
467
+ host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
468
+ port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
469
+ path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
470
+ database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
471
+ user : user,
472
+ pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
473
+ ...Object.entries(defaults).reduce(
474
+ (acc, [k, d]) => {
475
+ const value = k in o ? o[k] : k in query
476
+ ? (query[k] === 'disable' || query[k] === 'false' ? false : query[k])
477
+ : env['PG' + k.toUpperCase()] || d
478
+ acc[k] = typeof value === 'string' && ints.includes(k)
479
+ ? +value
480
+ : value
481
+ return acc
482
+ },
483
+ {}
484
+ ),
485
+ connection : {
486
+ application_name: env.PGAPPNAME || 'postgres.js',
487
+ ...o.connection,
488
+ ...Object.entries(query).reduce((acc, [k, v]) => (k in defaults || (acc[k] = v), acc), {})
489
+ },
490
+ types : o.types || {},
491
+ target_session_attrs: tsa(o, url, env),
492
+ onnotice : o.onnotice,
493
+ onnotify : o.onnotify,
494
+ onclose : o.onclose,
495
+ onparameter : o.onparameter,
496
+ socket : o.socket,
497
+ transform : parseTransform(o.transform || { undefined: undefined }),
498
+ parameters : {},
499
+ shared : { retries: 0, typeArrayMap: {} },
500
+ ...mergeUserTypes(o.types)
501
+ }
502
+ }
503
+
504
+ function tsa(o, url, env) {
505
+ const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
506
+ if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
507
+ return x
508
+
509
+ throw new Error('target_session_attrs ' + x + ' is not supported')
510
+ }
511
+
512
+ function backoff(retries) {
513
+ return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
514
+ }
515
+
516
+ function max_lifetime() {
517
+ return 60 * (30 + Math.random() * 30)
518
+ }
519
+
520
+ function parseTransform(x) {
521
+ return {
522
+ undefined: x.undefined,
523
+ column: {
524
+ from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
525
+ to: x.column && x.column.to
526
+ },
527
+ value: {
528
+ from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
529
+ to: x.value && x.value.to
530
+ },
531
+ row: {
532
+ from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
533
+ to: x.row && x.row.to
534
+ }
535
+ }
536
+ }
537
+
538
+ function parseUrl(url) {
539
+ if (!url || typeof url !== 'string')
540
+ return { url: { searchParams: new Map() } }
541
+
542
+ let host = url
543
+ host = host.slice(host.indexOf('://') + 3).split(/[?/]/)[0]
544
+ host = decodeURIComponent(host.slice(host.indexOf('@') + 1))
545
+
546
+ const urlObj = new URL(url.replace(host, host.split(',')[0]))
547
+
548
+ return {
549
+ url: {
550
+ username: decodeURIComponent(urlObj.username),
551
+ password: decodeURIComponent(urlObj.password),
552
+ host: urlObj.host,
553
+ hostname: urlObj.hostname,
554
+ port: urlObj.port,
555
+ pathname: urlObj.pathname,
556
+ searchParams: urlObj.searchParams
557
+ },
558
+ multihost: host.indexOf(',') > -1 && host
559
+ }
560
+ }
561
+
562
+ function osUsername() {
563
+ try {
564
+ return os.userInfo().username // eslint-disable-line
565
+ } catch (_) {
566
+ return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
567
+ }
568
+ }
@@ -0,0 +1,70 @@
1
+ import Stream from 'node:stream'
2
+
3
+ export default function largeObject(sql, oid, mode = 0x00020000 | 0x00040000) {
4
+ return new Promise(async(resolve, reject) => {
5
+ await sql.begin(async sql => {
6
+ let finish
7
+ !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
8
+ const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
9
+
10
+ const lo = {
11
+ writable,
12
+ readable,
13
+ close : () => sql`select lo_close(${ fd })`.then(finish),
14
+ tell : () => sql`select lo_tell64(${ fd })`,
15
+ read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
16
+ write : (x) => sql`select lowrite(${ fd }, ${ x })`,
17
+ truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
18
+ seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
19
+ size : () => sql`
20
+ select
21
+ lo_lseek64(${ fd }, location, 0) as position,
22
+ seek.size
23
+ from (
24
+ select
25
+ lo_lseek64($1, 0, 2) as size,
26
+ tell.location
27
+ from (select lo_tell64($1) as location) tell
28
+ ) seek
29
+ `
30
+ }
31
+
32
+ resolve(lo)
33
+
34
+ return new Promise(async r => finish = r)
35
+
36
+ async function readable({
37
+ highWaterMark = 2048 * 8,
38
+ start = 0,
39
+ end = Infinity
40
+ } = {}) {
41
+ let max = end - start
42
+ start && await lo.seek(start)
43
+ return new Stream.Readable({
44
+ highWaterMark,
45
+ async read(size) {
46
+ const l = size > max ? size - max : size
47
+ max -= size
48
+ const [{ data }] = await lo.read(l)
49
+ this.push(data)
50
+ if (data.length < size)
51
+ this.push(null)
52
+ }
53
+ })
54
+ }
55
+
56
+ async function writable({
57
+ highWaterMark = 2048 * 8,
58
+ start = 0
59
+ } = {}) {
60
+ start && await lo.seek(start)
61
+ return new Stream.Writable({
62
+ highWaterMark,
63
+ write(chunk, encoding, callback) {
64
+ lo.write(chunk).then(() => callback(), callback)
65
+ }
66
+ })
67
+ }
68
+ }).catch(reject)
69
+ })
70
+ }