neopg 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,53 @@
1
+ const PostgresError = module.exports.PostgresError = class PostgresError extends Error {
2
+ constructor(x) {
3
+ super(x.message)
4
+ this.name = this.constructor.name
5
+ Object.assign(this, x)
6
+ }
7
+ }
8
+
9
+ const Errors = module.exports.Errors = {
10
+ connection,
11
+ postgres,
12
+ generic,
13
+ notSupported
14
+ }
15
+
16
+ function connection(x, options, socket) {
17
+ const { host, port } = socket || options
18
+ const error = Object.assign(
19
+ new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
20
+ {
21
+ code: x,
22
+ errno: x,
23
+ address: options.path || host
24
+ }, options.path ? {} : { port: port }
25
+ )
26
+ Error.captureStackTrace(error, connection)
27
+ return error
28
+ }
29
+
30
+ function postgres(x) {
31
+ const error = new PostgresError(x)
32
+ Error.captureStackTrace(error, postgres)
33
+ return error
34
+ }
35
+
36
+ function generic(code, message) {
37
+ const error = Object.assign(new Error(code + ': ' + message), { code })
38
+ Error.captureStackTrace(error, generic)
39
+ return error
40
+ }
41
+
42
+ /* c8 ignore next 10 */
43
+ function notSupported(x) {
44
+ const error = Object.assign(
45
+ new Error(x + ' (B) is not supported'),
46
+ {
47
+ code: 'MESSAGE_NOT_SUPPORTED',
48
+ name: x
49
+ }
50
+ )
51
+ Error.captureStackTrace(error, notSupported)
52
+ return error
53
+ }
@@ -0,0 +1,566 @@
1
+ const os = require('os')
2
+ const fs = require('fs')
3
+
4
+ const {
5
+ mergeUserTypes,
6
+ inferType,
7
+ Parameter,
8
+ Identifier,
9
+ Builder,
10
+ toPascal,
11
+ pascal,
12
+ toCamel,
13
+ camel,
14
+ toKebab,
15
+ kebab,
16
+ fromPascal,
17
+ fromCamel,
18
+ fromKebab
19
+ } = require('./types.js')
20
+
21
+ const Connection = require('./connection.js')
22
+ const { Query, CLOSE } = require('./query.js')
23
+ const Queue = require('./queue.js')
24
+ const { Errors, PostgresError } = require('./errors.js')
25
+ const Subscribe = require('./subscribe.js')
26
+ const largeObject = require('./large.js')
27
+
28
+ Object.assign(Postgres, {
29
+ PostgresError,
30
+ toPascal,
31
+ pascal,
32
+ toCamel,
33
+ camel,
34
+ toKebab,
35
+ kebab,
36
+ fromPascal,
37
+ fromCamel,
38
+ fromKebab,
39
+ BigInt: {
40
+ to: 20,
41
+ from: [20],
42
+ parse: x => BigInt(x), // eslint-disable-line
43
+ serialize: x => x.toString()
44
+ }
45
+ })
46
+
47
+ module.exports = Postgres
48
+
49
+ function Postgres(a, b) {
50
+ const options = parseOptions(a, b)
51
+ , subscribe = options.no_subscribe || Subscribe(Postgres, { ...options })
52
+
53
+ let ending = false
54
+
55
+ const queries = Queue()
56
+ , connecting = Queue()
57
+ , reserved = Queue()
58
+ , closed = Queue()
59
+ , ended = Queue()
60
+ , open = Queue()
61
+ , busy = Queue()
62
+ , full = Queue()
63
+ , queues = { connecting, reserved, closed, ended, open, busy, full }
64
+
65
+ const connections = [...Array(options.max)].map(() => Connection(options, queues, { onopen, onend, onclose }))
66
+
67
+ const sql = Sql(handler)
68
+
69
+ Object.assign(sql, {
70
+ get parameters() { return options.parameters },
71
+ largeObject: largeObject.bind(null, sql),
72
+ subscribe,
73
+ CLOSE,
74
+ END: CLOSE,
75
+ PostgresError,
76
+ options,
77
+ reserve,
78
+ listen,
79
+ begin,
80
+ close,
81
+ end
82
+ })
83
+
84
+ return sql
85
+
86
+ function Sql(handler) {
87
+ handler.debug = options.debug
88
+
89
+ Object.entries(options.types).reduce((acc, [name, type]) => {
90
+ acc[name] = (x) => new Parameter(x, type.to)
91
+ return acc
92
+ }, typed)
93
+
94
+ Object.assign(sql, {
95
+ types: typed,
96
+ typed,
97
+ unsafe,
98
+ notify,
99
+ array,
100
+ json,
101
+ file
102
+ })
103
+
104
+ return sql
105
+
106
+ function typed(value, type) {
107
+ return new Parameter(value, type)
108
+ }
109
+
110
+ function sql(strings, ...args) {
111
+ const query = strings && Array.isArray(strings.raw)
112
+ ? new Query(strings, args, handler, cancel)
113
+ : typeof strings === 'string' && !args.length
114
+ ? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
115
+ : new Builder(strings, args)
116
+ return query
117
+ }
118
+
119
+ function unsafe(string, args = [], options = {}) {
120
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
121
+ const query = new Query([string], args, handler, cancel, {
122
+ prepare: false,
123
+ ...options,
124
+ simple: 'simple' in options ? options.simple : args.length === 0
125
+ })
126
+ return query
127
+ }
128
+
129
+ function file(path, args = [], options = {}) {
130
+ arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
131
+ const query = new Query([], args, (query) => {
132
+ fs.readFile(path, 'utf8', (err, string) => {
133
+ if (err)
134
+ return query.reject(err)
135
+
136
+ query.strings = [string]
137
+ handler(query)
138
+ })
139
+ }, cancel, {
140
+ ...options,
141
+ simple: 'simple' in options ? options.simple : args.length === 0
142
+ })
143
+ return query
144
+ }
145
+ }
146
+
147
+ async function listen(name, fn, onlisten) {
148
+ const listener = { fn, onlisten }
149
+
150
+ const sql = listen.sql || (listen.sql = Postgres({
151
+ ...options,
152
+ max: 1,
153
+ idle_timeout: null,
154
+ max_lifetime: null,
155
+ fetch_types: false,
156
+ onclose() {
157
+ Object.entries(listen.channels).forEach(([name, { listeners }]) => {
158
+ delete listen.channels[name]
159
+ Promise.all(listeners.map(l => listen(name, l.fn, l.onlisten).catch(() => { /* noop */ })))
160
+ })
161
+ },
162
+ onnotify(c, x) {
163
+ c in listen.channels && listen.channels[c].listeners.forEach(l => l.fn(x))
164
+ }
165
+ }))
166
+
167
+ const channels = listen.channels || (listen.channels = {})
168
+ , exists = name in channels
169
+
170
+ if (exists) {
171
+ channels[name].listeners.push(listener)
172
+ const result = await channels[name].result
173
+ listener.onlisten && listener.onlisten()
174
+ return { state: result.state, unlisten }
175
+ }
176
+
177
+ channels[name] = { result: sql`listen ${
178
+ sql.unsafe('"' + name.replace(/"/g, '""') + '"')
179
+ }`, listeners: [listener] }
180
+ const result = await channels[name].result
181
+ listener.onlisten && listener.onlisten()
182
+ return { state: result.state, unlisten }
183
+
184
+ async function unlisten() {
185
+ if (name in channels === false)
186
+ return
187
+
188
+ channels[name].listeners = channels[name].listeners.filter(x => x !== listener)
189
+ if (channels[name].listeners.length)
190
+ return
191
+
192
+ delete channels[name]
193
+ return sql`unlisten ${
194
+ sql.unsafe('"' + name.replace(/"/g, '""') + '"')
195
+ }`
196
+ }
197
+ }
198
+
199
+ async function notify(channel, payload) {
200
+ return await sql`select pg_notify(${ channel }, ${ '' + payload })`
201
+ }
202
+
203
+ async function reserve() {
204
+ const queue = Queue()
205
+ const c = open.length
206
+ ? open.shift()
207
+ : await new Promise((resolve, reject) => {
208
+ const query = { reserve: resolve, reject }
209
+ queries.push(query)
210
+ closed.length && connect(closed.shift(), query)
211
+ })
212
+
213
+ move(c, reserved)
214
+ c.reserved = () => queue.length
215
+ ? c.execute(queue.shift())
216
+ : move(c, reserved)
217
+ c.reserved.release = true
218
+
219
+ const sql = Sql(handler)
220
+ sql.release = () => {
221
+ c.reserved = null
222
+ onopen(c)
223
+ }
224
+
225
+ return sql
226
+
227
+ function handler(q) {
228
+ c.queue === full
229
+ ? queue.push(q)
230
+ : c.execute(q) || move(c, full)
231
+ }
232
+ }
233
+
234
+ async function begin(options, fn) {
235
+ !fn && (fn = options, options = '')
236
+ const queries = Queue()
237
+ let savepoints = 0
238
+ , connection
239
+ , prepare = null
240
+
241
+ try {
242
+ await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).execute()
243
+ return await Promise.race([
244
+ scope(connection, fn),
245
+ new Promise((_, reject) => connection.onclose = reject)
246
+ ])
247
+ } catch (error) {
248
+ throw error
249
+ }
250
+
251
+ async function scope(c, fn, name) {
252
+ const sql = Sql(handler)
253
+ sql.savepoint = savepoint
254
+ sql.prepare = x => prepare = x.replace(/[^a-z0-9$-_. ]/gi)
255
+ let uncaughtError
256
+ , result
257
+
258
+ name && await sql`savepoint ${ sql(name) }`
259
+ try {
260
+ result = await new Promise((resolve, reject) => {
261
+ const x = fn(sql)
262
+ Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
263
+ })
264
+
265
+ if (uncaughtError)
266
+ throw uncaughtError
267
+ } catch (e) {
268
+ await (name
269
+ ? sql`rollback to ${ sql(name) }`
270
+ : sql`rollback`
271
+ )
272
+ throw e instanceof PostgresError && e.code === '25P02' && uncaughtError || e
273
+ }
274
+
275
+ if (!name) {
276
+ prepare
277
+ ? await sql`prepare transaction '${ sql.unsafe(prepare) }'`
278
+ : await sql`commit`
279
+ }
280
+
281
+ return result
282
+
283
+ function savepoint(name, fn) {
284
+ if (name && Array.isArray(name.raw))
285
+ return savepoint(sql => sql.apply(sql, arguments))
286
+
287
+ arguments.length === 1 && (fn = name, name = null)
288
+ return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
289
+ }
290
+
291
+ function handler(q) {
292
+ q.catch(e => uncaughtError || (uncaughtError = e))
293
+ c.queue === full
294
+ ? queries.push(q)
295
+ : c.execute(q) || move(c, full)
296
+ }
297
+ }
298
+
299
+ function onexecute(c) {
300
+ connection = c
301
+ move(c, reserved)
302
+ c.reserved = () => queries.length
303
+ ? c.execute(queries.shift())
304
+ : move(c, reserved)
305
+ }
306
+ }
307
+
308
+ function move(c, queue) {
309
+ c.queue.remove(c)
310
+ queue.push(c)
311
+ c.queue = queue
312
+ queue === open
313
+ ? c.idleTimer.start()
314
+ : c.idleTimer.cancel()
315
+ return c
316
+ }
317
+
318
+ function json(x) {
319
+ return new Parameter(x, 3802)
320
+ }
321
+
322
+ function array(x, type) {
323
+ if (!Array.isArray(x))
324
+ return array(Array.from(arguments))
325
+
326
+ return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
327
+ }
328
+
329
+ function handler(query) {
330
+ if (ending)
331
+ return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
332
+
333
+ if (open.length)
334
+ return go(open.shift(), query)
335
+
336
+ if (closed.length)
337
+ return connect(closed.shift(), query)
338
+
339
+ busy.length
340
+ ? go(busy.shift(), query)
341
+ : queries.push(query)
342
+ }
343
+
344
+ function go(c, query) {
345
+ return c.execute(query)
346
+ ? move(c, busy)
347
+ : move(c, full)
348
+ }
349
+
350
+ function cancel(query) {
351
+ return new Promise((resolve, reject) => {
352
+ query.state
353
+ ? query.active
354
+ ? Connection(options).cancel(query.state, resolve, reject)
355
+ : query.cancelled = { resolve, reject }
356
+ : (
357
+ queries.remove(query),
358
+ query.cancelled = true,
359
+ query.reject(Errors.generic('57014', 'canceling statement due to user request')),
360
+ resolve()
361
+ )
362
+ })
363
+ }
364
+
365
+ async function end({ timeout = null } = {}) {
366
+ if (ending)
367
+ return ending
368
+
369
+ await 1
370
+ let timer
371
+ return ending = Promise.race([
372
+ new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
373
+ Promise.all(connections.map(c => c.end()).concat(
374
+ listen.sql ? listen.sql.end({ timeout: 0 }) : [],
375
+ subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
376
+ ))
377
+ ]).then(() => clearTimeout(timer))
378
+ }
379
+
380
+ async function close() {
381
+ await Promise.all(connections.map(c => c.end()))
382
+ }
383
+
384
+ async function destroy(resolve) {
385
+ await Promise.all(connections.map(c => c.terminate()))
386
+ while (queries.length)
387
+ queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
388
+ resolve()
389
+ }
390
+
391
+ function connect(c, query) {
392
+ move(c, connecting)
393
+ c.connect(query)
394
+ return c
395
+ }
396
+
397
+ function onend(c) {
398
+ move(c, ended)
399
+ }
400
+
401
+ function onopen(c) {
402
+ if (queries.length === 0)
403
+ return move(c, open)
404
+
405
+ let max = Math.ceil(queries.length / (connecting.length + 1))
406
+ , ready = true
407
+
408
+ while (ready && queries.length && max-- > 0) {
409
+ const query = queries.shift()
410
+ if (query.reserve)
411
+ return query.reserve(c)
412
+
413
+ ready = c.execute(query)
414
+ }
415
+
416
+ ready
417
+ ? move(c, busy)
418
+ : move(c, full)
419
+ }
420
+
421
+ function onclose(c, e) {
422
+ move(c, closed)
423
+ c.reserved = null
424
+ c.onclose && (c.onclose(e), c.onclose = null)
425
+ options.onclose && options.onclose(c.id)
426
+ queries.length && connect(c, queries.shift())
427
+ }
428
+ }
429
+
430
+ function parseOptions(a, b) {
431
+ if (a && a.shared)
432
+ return a
433
+
434
+ const env = process.env // eslint-disable-line
435
+ , o = (!a || typeof a === 'string' ? b : a) || {}
436
+ , { url, multihost } = parseUrl(a)
437
+ , query = [...url.searchParams].reduce((a, [b, c]) => (a[b] = c, a), {})
438
+ , host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
439
+ , port = o.port || url.port || env.PGPORT || 5432
440
+ , user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
441
+
442
+ o.no_prepare && (o.prepare = false)
443
+ query.sslmode && (query.ssl = query.sslmode, delete query.sslmode)
444
+ 'timeout' in o && (console.log('The timeout option is deprecated, use idle_timeout instead'), o.idle_timeout = o.timeout) // eslint-disable-line
445
+ query.sslrootcert === 'system' && (query.ssl = 'verify-full')
446
+
447
+ const ints = ['idle_timeout', 'connect_timeout', 'max_lifetime', 'max_pipeline', 'backoff', 'keep_alive']
448
+ const defaults = {
449
+ max : 10,
450
+ ssl : false,
451
+ idle_timeout : null,
452
+ connect_timeout : 30,
453
+ max_lifetime : max_lifetime,
454
+ max_pipeline : 100,
455
+ backoff : backoff,
456
+ keep_alive : 60,
457
+ prepare : true,
458
+ debug : false,
459
+ fetch_types : true,
460
+ publications : 'alltables',
461
+ target_session_attrs: null
462
+ }
463
+
464
+ return {
465
+ host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
466
+ port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
467
+ path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
468
+ database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
469
+ user : user,
470
+ pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
471
+ ...Object.entries(defaults).reduce(
472
+ (acc, [k, d]) => {
473
+ const value = k in o ? o[k] : k in query
474
+ ? (query[k] === 'disable' || query[k] === 'false' ? false : query[k])
475
+ : env['PG' + k.toUpperCase()] || d
476
+ acc[k] = typeof value === 'string' && ints.includes(k)
477
+ ? +value
478
+ : value
479
+ return acc
480
+ },
481
+ {}
482
+ ),
483
+ connection : {
484
+ application_name: env.PGAPPNAME || 'postgres.js',
485
+ ...o.connection,
486
+ ...Object.entries(query).reduce((acc, [k, v]) => (k in defaults || (acc[k] = v), acc), {})
487
+ },
488
+ types : o.types || {},
489
+ target_session_attrs: tsa(o, url, env),
490
+ onnotice : o.onnotice,
491
+ onnotify : o.onnotify,
492
+ onclose : o.onclose,
493
+ onparameter : o.onparameter,
494
+ socket : o.socket,
495
+ transform : parseTransform(o.transform || { undefined: undefined }),
496
+ parameters : {},
497
+ shared : { retries: 0, typeArrayMap: {} },
498
+ ...mergeUserTypes(o.types)
499
+ }
500
+ }
501
+
502
+ function tsa(o, url, env) {
503
+ const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
504
+ if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
505
+ return x
506
+
507
+ throw new Error('target_session_attrs ' + x + ' is not supported')
508
+ }
509
+
510
+ function backoff(retries) {
511
+ return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
512
+ }
513
+
514
+ function max_lifetime() {
515
+ return 60 * (30 + Math.random() * 30)
516
+ }
517
+
518
+ function parseTransform(x) {
519
+ return {
520
+ undefined: x.undefined,
521
+ column: {
522
+ from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
523
+ to: x.column && x.column.to
524
+ },
525
+ value: {
526
+ from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
527
+ to: x.value && x.value.to
528
+ },
529
+ row: {
530
+ from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
531
+ to: x.row && x.row.to
532
+ }
533
+ }
534
+ }
535
+
536
+ function parseUrl(url) {
537
+ if (!url || typeof url !== 'string')
538
+ return { url: { searchParams: new Map() } }
539
+
540
+ let host = url
541
+ host = host.slice(host.indexOf('://') + 3).split(/[?/]/)[0]
542
+ host = decodeURIComponent(host.slice(host.indexOf('@') + 1))
543
+
544
+ const urlObj = new URL(url.replace(host, host.split(',')[0]))
545
+
546
+ return {
547
+ url: {
548
+ username: decodeURIComponent(urlObj.username),
549
+ password: decodeURIComponent(urlObj.password),
550
+ host: urlObj.host,
551
+ hostname: urlObj.hostname,
552
+ port: urlObj.port,
553
+ pathname: urlObj.pathname,
554
+ searchParams: urlObj.searchParams
555
+ },
556
+ multihost: host.indexOf(',') > -1 && host
557
+ }
558
+ }
559
+
560
+ function osUsername() {
561
+ try {
562
+ return os.userInfo().username // eslint-disable-line
563
+ } catch (_) {
564
+ return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
565
+ }
566
+ }
@@ -0,0 +1,70 @@
1
+ const Stream = require('stream')
2
+
3
+ module.exports = largeObject;function largeObject(sql, oid, mode = 0x00020000 | 0x00040000) {
4
+ return new Promise(async(resolve, reject) => {
5
+ await sql.begin(async sql => {
6
+ let finish
7
+ !oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
8
+ const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
9
+
10
+ const lo = {
11
+ writable,
12
+ readable,
13
+ close : () => sql`select lo_close(${ fd })`.then(finish),
14
+ tell : () => sql`select lo_tell64(${ fd })`,
15
+ read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
16
+ write : (x) => sql`select lowrite(${ fd }, ${ x })`,
17
+ truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
18
+ seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
19
+ size : () => sql`
20
+ select
21
+ lo_lseek64(${ fd }, location, 0) as position,
22
+ seek.size
23
+ from (
24
+ select
25
+ lo_lseek64($1, 0, 2) as size,
26
+ tell.location
27
+ from (select lo_tell64($1) as location) tell
28
+ ) seek
29
+ `
30
+ }
31
+
32
+ resolve(lo)
33
+
34
+ return new Promise(async r => finish = r)
35
+
36
+ async function readable({
37
+ highWaterMark = 2048 * 8,
38
+ start = 0,
39
+ end = Infinity
40
+ } = {}) {
41
+ let max = end - start
42
+ start && await lo.seek(start)
43
+ return new Stream.Readable({
44
+ highWaterMark,
45
+ async read(size) {
46
+ const l = size > max ? size - max : size
47
+ max -= size
48
+ const [{ data }] = await lo.read(l)
49
+ this.push(data)
50
+ if (data.length < size)
51
+ this.push(null)
52
+ }
53
+ })
54
+ }
55
+
56
+ async function writable({
57
+ highWaterMark = 2048 * 8,
58
+ start = 0
59
+ } = {}) {
60
+ start && await lo.seek(start)
61
+ return new Stream.Writable({
62
+ highWaterMark,
63
+ write(chunk, encoding, callback) {
64
+ lo.write(chunk).then(() => callback(), callback)
65
+ }
66
+ })
67
+ }
68
+ }).catch(reject)
69
+ })
70
+ }