pogi 2.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.vscode/launch.json +35 -0
- package/CHANGELOG.md +277 -0
- package/LICENSE +21 -0
- package/README.md +85 -0
- package/docs/API/PgDb.md +218 -0
- package/docs/API/PgSchema.md +91 -0
- package/docs/API/PgTable.md +365 -0
- package/docs/API/QueryOptions.md +77 -0
- package/docs/API/condition.md +133 -0
- package/docs/connection.md +91 -0
- package/docs/css/docs.css +164 -0
- package/docs/executingSqlFile.md +44 -0
- package/docs/faq.md +15 -0
- package/docs/functions.md +19 -0
- package/docs/generatingInterfaceForTables.md +35 -0
- package/docs/index.md +48 -0
- package/docs/logger.md +40 -0
- package/docs/mappingDatabaseTypes.md +89 -0
- package/docs/notification.md +19 -0
- package/docs/pitfalls.md +73 -0
- package/docs/streams.md +68 -0
- package/docs/transaction.md +65 -0
- package/lib/bin/generateInterface.d.ts +1 -0
- package/lib/bin/generateInterface.js +53 -0
- package/lib/bin/generateInterface.js.map +1 -0
- package/lib/connectionOptions.d.ts +25 -0
- package/lib/connectionOptions.js +3 -0
- package/lib/connectionOptions.js.map +1 -0
- package/lib/index.d.ts +6 -0
- package/lib/index.js +10 -0
- package/lib/index.js.map +1 -0
- package/lib/pgConverters.d.ts +10 -0
- package/lib/pgConverters.js +66 -0
- package/lib/pgConverters.js.map +1 -0
- package/lib/pgDb.d.ts +86 -0
- package/lib/pgDb.js +745 -0
- package/lib/pgDb.js.map +1 -0
- package/lib/pgDbLogger.d.ts +5 -0
- package/lib/pgDbLogger.js +3 -0
- package/lib/pgDbLogger.js.map +1 -0
- package/lib/pgDbOperators.d.ts +113 -0
- package/lib/pgDbOperators.js +44 -0
- package/lib/pgDbOperators.js.map +1 -0
- package/lib/pgSchema.d.ts +16 -0
- package/lib/pgSchema.js +16 -0
- package/lib/pgSchema.js.map +1 -0
- package/lib/pgTable.d.ts +131 -0
- package/lib/pgTable.js +322 -0
- package/lib/pgTable.js.map +1 -0
- package/lib/pgUtils.d.ts +31 -0
- package/lib/pgUtils.js +157 -0
- package/lib/pgUtils.js.map +1 -0
- package/lib/queryAble.d.ts +76 -0
- package/lib/queryAble.js +330 -0
- package/lib/queryAble.js.map +1 -0
- package/lib/queryWhere.d.ts +8 -0
- package/lib/queryWhere.js +249 -0
- package/lib/queryWhere.js.map +1 -0
- package/mkdocs.yml +25 -0
- package/package.json +65 -0
- package/spec/resources/init.sql +122 -0
- package/spec/resources/throw_exception.sql +5 -0
- package/spec/resources/tricky.sql +13 -0
- package/spec/run.js +5 -0
- package/spec/support/jasmine.json +9 -0
- package/src/bin/generateInterface.ts +54 -0
- package/src/connectionOptions.ts +42 -0
- package/src/index.ts +6 -0
- package/src/pgConverters.ts +55 -0
- package/src/pgDb.ts +820 -0
- package/src/pgDbLogger.ts +13 -0
- package/src/pgDbOperators.ts +62 -0
- package/src/pgSchema.ts +15 -0
- package/src/pgTable.ts +401 -0
- package/src/pgUtils.ts +176 -0
- package/src/queryAble.ts +393 -0
- package/src/queryWhere.ts +326 -0
- package/src/test/pgDbOperatorSpec.ts +492 -0
- package/src/test/pgDbSpec.ts +1339 -0
- package/src/test/pgServiceRestartTest.ts +1500 -0
- package/src/tsconfig.json +33 -0
- package/utils_sql/lower.sql +4 -0
package/src/pgUtils.ts
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import {QueryOptions, ResultFieldType, QueryAble} from "./queryAble";
|
|
2
|
+
import {FieldType} from "./pgDb";
|
|
3
|
+
import {PgDbLogger} from "./pgDbLogger";
|
|
4
|
+
import * as _ from 'lodash';
|
|
5
|
+
|
|
6
|
+
const util = require('util');
|
|
7
|
+
const NAMED_PARAMS_REGEXP = /(?:^|[^:]):(!?[a-zA-Z0-9_]+)/g; // do not convert "::type cast"
|
|
8
|
+
const ASC_DESC_REGEXP = /^([^" (]+)( asc| desc)?$/;
|
|
9
|
+
|
|
10
|
+
export let pgUtils = {
|
|
11
|
+
|
|
12
|
+
logError(logger: PgDbLogger, options: { error?: string|Error, sql: string, params: any, connection }) {
|
|
13
|
+
let { error, sql, params, connection } = options;
|
|
14
|
+
logger.error(error, sql, util.inspect(logger.paramSanitizer ? logger.paramSanitizer(params) : params, false, null), connection ? connection.processID : null);
|
|
15
|
+
},
|
|
16
|
+
|
|
17
|
+
quoteField(f) {
|
|
18
|
+
return f.indexOf('"') == -1 && f.indexOf('(') == -1 ? '"' + f + '"' : f;
|
|
19
|
+
},
|
|
20
|
+
|
|
21
|
+
processQueryFields(options: QueryOptions): string {
|
|
22
|
+
let s = options && options.distinct ? ' DISTINCT ' : ' ';
|
|
23
|
+
if (options && options.fields) {
|
|
24
|
+
if (Array.isArray(options.fields)) {
|
|
25
|
+
return s + options.fields.map(pgUtils.quoteField).join(', ');
|
|
26
|
+
} else {
|
|
27
|
+
return s + options.fields;
|
|
28
|
+
}
|
|
29
|
+
} else {
|
|
30
|
+
return s + ' *';
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* :named -> $1 (not works with DDL (schema, table, column))
|
|
36
|
+
* :!named -> "value" (for DDL (schema, table, column))
|
|
37
|
+
* do not touch ::type cast
|
|
38
|
+
*/
|
|
39
|
+
processNamedParams(sql: string, params: Object) {
|
|
40
|
+
let sql2 = [];
|
|
41
|
+
let params2 = [];
|
|
42
|
+
|
|
43
|
+
let p = NAMED_PARAMS_REGEXP.exec(sql);
|
|
44
|
+
let lastIndex = 0;
|
|
45
|
+
while (p) {
|
|
46
|
+
let ddl = false;
|
|
47
|
+
let name = p[1];
|
|
48
|
+
if (name[0] == '!') {
|
|
49
|
+
name = name.slice(1);
|
|
50
|
+
ddl = true;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (!(name in params)) {
|
|
54
|
+
throw new Error(`No ${p[1]} in params (keys: ${Object.keys(params)})`);
|
|
55
|
+
}
|
|
56
|
+
sql2.push(sql.slice(lastIndex, NAMED_PARAMS_REGEXP.lastIndex - p[1].length - 1));
|
|
57
|
+
|
|
58
|
+
if (ddl) {
|
|
59
|
+
sql2.push('"' + ('' + params[name]).replace(/"/g, '""') + '"');
|
|
60
|
+
} else {
|
|
61
|
+
params2.push(params[name]);
|
|
62
|
+
sql2.push('$' + params2.length);
|
|
63
|
+
}
|
|
64
|
+
lastIndex = NAMED_PARAMS_REGEXP.lastIndex;
|
|
65
|
+
p = NAMED_PARAMS_REGEXP.exec(sql);
|
|
66
|
+
}
|
|
67
|
+
sql2.push(sql.substr(lastIndex));
|
|
68
|
+
|
|
69
|
+
return {
|
|
70
|
+
sql: sql2.join(''),
|
|
71
|
+
params: params2
|
|
72
|
+
}
|
|
73
|
+
},
|
|
74
|
+
|
|
75
|
+
processQueryOptions(options: QueryOptions): string {
|
|
76
|
+
options = options || {};
|
|
77
|
+
let sql = '';
|
|
78
|
+
|
|
79
|
+
if (options.groupBy) {
|
|
80
|
+
if (Array.isArray(options.groupBy)) {
|
|
81
|
+
sql += ' GROUP BY ' + options.groupBy.map(pgUtils.quoteField).join(',');
|
|
82
|
+
} else {
|
|
83
|
+
sql += ' GROUP BY ' + pgUtils.quoteField(options.groupBy);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (options.orderBy) {
|
|
87
|
+
if (typeof options.orderBy == 'string') {
|
|
88
|
+
sql += ' ORDER BY ' + pgUtils.quoteField(options.orderBy);
|
|
89
|
+
}
|
|
90
|
+
else if (Array.isArray(options.orderBy)) {
|
|
91
|
+
let orderBy = options.orderBy.map(v =>
|
|
92
|
+
v[0] == '+' ? pgUtils.quoteField(v.substr(1, v.length - 1)) + ' asc' :
|
|
93
|
+
v[0] == '-' ? pgUtils.quoteField(v.substr(1, v.length - 1)) + ' desc' :
|
|
94
|
+
v.replace(ASC_DESC_REGEXP, '"$1"$2'));
|
|
95
|
+
sql += ' ORDER BY ' + orderBy.join(',');
|
|
96
|
+
} else {
|
|
97
|
+
let orderBy = [];
|
|
98
|
+
_.forEach(options.orderBy, (v, k) => orderBy.push(pgUtils.quoteField(k) + ' ' + v));
|
|
99
|
+
sql += ' ORDER BY ' + orderBy.join(',');
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
if (options.limit) {
|
|
103
|
+
sql += util.format(' LIMIT %d', options.limit);
|
|
104
|
+
}
|
|
105
|
+
if (options.offset) {
|
|
106
|
+
sql += util.format(' OFFSET %d', options.offset);
|
|
107
|
+
}
|
|
108
|
+
if (options.forUpdate){
|
|
109
|
+
sql += ' FOR UPDATE';
|
|
110
|
+
}
|
|
111
|
+
return sql;
|
|
112
|
+
},
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* NOTE-DATE: there are 2 approaches to keep tz (the time correctly):
|
|
116
|
+
* 1) use Date.toISOString() function, but then the $x placeholder should be TIMESTAMP WITH TIME ZONE $x
|
|
117
|
+
* 2) use Date, and then no need to change the placeholder $x
|
|
118
|
+
* lets use 2)
|
|
119
|
+
*/
|
|
120
|
+
transformInsertUpdateParams(param: any, fieldType: FieldType) {
|
|
121
|
+
return (param != null && fieldType == FieldType.JSON) ? JSON.stringify(param) :
|
|
122
|
+
(param != null && fieldType == FieldType.TIME && !(param instanceof Date)) ? new Date(param) : param;
|
|
123
|
+
},
|
|
124
|
+
|
|
125
|
+
postProcessResult(res: any[], fields: ResultFieldType[], pgdbTypeParsers: { [oid: number]: (s:string) => any }) {
|
|
126
|
+
if (res) {
|
|
127
|
+
if (res[0] && !Array.isArray(res[0])) {
|
|
128
|
+
if (Object.keys(res[0]).length != fields.length) {
|
|
129
|
+
throw Error("Name collision for the query, two or more fields have the same name.");
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
pgUtils.convertTypes(res, fields, pgdbTypeParsers);
|
|
133
|
+
}
|
|
134
|
+
},
|
|
135
|
+
|
|
136
|
+
convertTypes(res: any[], fields: ResultFieldType[], pgdbTypeParsers: { [oid: number]: (s:string) => any }) {
|
|
137
|
+
let isArrayMode = Array.isArray(res[0]);
|
|
138
|
+
fields.forEach((field, i) => {
|
|
139
|
+
if (pgdbTypeParsers[field.dataTypeID]) {
|
|
140
|
+
if (isArrayMode) {
|
|
141
|
+
res.forEach(e => e[i] = e[i] == null ? null : pgdbTypeParsers[field.dataTypeID](e[i]));
|
|
142
|
+
} else {
|
|
143
|
+
res.forEach(e => e[field.name] = e[field.name] == null ? null : pgdbTypeParsers[field.dataTypeID](e[field.name]));
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
},
|
|
148
|
+
|
|
149
|
+
createFunctionCaller(q: QueryAble, fn: { schema: string, name: string, return_single_row: boolean, return_single_value: boolean }) {
|
|
150
|
+
return async (...args) => {
|
|
151
|
+
let placeHolders = [];
|
|
152
|
+
let params = [];
|
|
153
|
+
args.forEach((arg) => {
|
|
154
|
+
placeHolders.push('$' + (placeHolders.length + 1));
|
|
155
|
+
params.push(arg);
|
|
156
|
+
});
|
|
157
|
+
let res = await q.query(`SELECT "${fn.schema}"."${fn.name}"(${placeHolders.join(',')})`, params);
|
|
158
|
+
|
|
159
|
+
if (fn.return_single_value) {
|
|
160
|
+
let keys = res[0] ? Object.keys(res[0]) : [];
|
|
161
|
+
if (keys.length != 1) {
|
|
162
|
+
throw Error(`Return type error. schema: ${fn.schema} fn: ${fn.name} expected return type: single value, current value:` + JSON.stringify(res))
|
|
163
|
+
}
|
|
164
|
+
res = res.map((r) => r[keys[0]]);
|
|
165
|
+
}
|
|
166
|
+
if (fn.return_single_row) {
|
|
167
|
+
if (res.length != 1) {
|
|
168
|
+
throw Error(`Return type error. schema: ${fn.schema} fn: ${fn.name} expected return type: single value, current value:` + JSON.stringify(res))
|
|
169
|
+
}
|
|
170
|
+
return res[0];
|
|
171
|
+
} else {
|
|
172
|
+
return res;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
};
|
package/src/queryAble.ts
ADDED
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
import { PgDbLogger } from "./pgDbLogger";
|
|
2
|
+
import { pgUtils } from "./pgUtils";
|
|
3
|
+
import * as stream from "stream";
|
|
4
|
+
|
|
5
|
+
const util = require('util');
|
|
6
|
+
const QueryStream = require('pg-query-stream');
|
|
7
|
+
const through = require('through');
|
|
8
|
+
|
|
9
|
+
export interface QueryOptions {
|
|
10
|
+
limit?: number;
|
|
11
|
+
offset?: number;
|
|
12
|
+
orderBy?: string | string[] | { [fieldName: string]: 'asc' | 'desc' };//free text or column list
|
|
13
|
+
groupBy?: string | string[];//free text or column list
|
|
14
|
+
fields?: string | string[];//free text or column list
|
|
15
|
+
logger?: PgDbLogger;
|
|
16
|
+
forUpdate?: boolean;
|
|
17
|
+
distinct?: boolean;
|
|
18
|
+
skipUndefined?: boolean;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export interface SqlQueryOptions {
|
|
22
|
+
logger?: PgDbLogger;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface ResultFieldType {
|
|
26
|
+
name: string,
|
|
27
|
+
tableID: number,
|
|
28
|
+
columnID: number,
|
|
29
|
+
dataTypeID: number,
|
|
30
|
+
dataTypeSize: number,
|
|
31
|
+
dataTypeModifier: number,
|
|
32
|
+
format: string
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export interface ResultType {
|
|
36
|
+
command: 'SELECT' | 'UPDATE' | 'DELETE',
|
|
37
|
+
rowCount: number,
|
|
38
|
+
oid: number,
|
|
39
|
+
rows: any[],
|
|
40
|
+
fields: ResultFieldType[],
|
|
41
|
+
_parsers: Function[][],
|
|
42
|
+
RowCtor: Function[],
|
|
43
|
+
rowsAsArray: boolean,
|
|
44
|
+
_getTypeParser: Function[]
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export interface PgRowResult {
|
|
48
|
+
columns: string[],
|
|
49
|
+
rows: any[]
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
let defaultLogger = {
|
|
53
|
+
log: () => { },
|
|
54
|
+
error: () => { }
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
export class QueryAble {
|
|
58
|
+
db;
|
|
59
|
+
schema;
|
|
60
|
+
protected logger: PgDbLogger;
|
|
61
|
+
|
|
62
|
+
constructor() {
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
setLogger(logger: PgDbLogger) {
|
|
66
|
+
this.logger = logger;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
getLogger(useConsoleAsDefault = false) {
|
|
70
|
+
return this.logger || this.schema && this.schema.logger || this.db.logger || (useConsoleAsDefault ? console : defaultLogger);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/** alias to {@link query} */
|
|
74
|
+
async run(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<any[]> {
|
|
75
|
+
return this.query(sql, params, options);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Params can be
|
|
80
|
+
* 1) array, then sql should have $1 $2 for placeholders
|
|
81
|
+
* 2) object, then sql should have:
|
|
82
|
+
* :example -> for params in statements (set/where), will be transformed to $1 $2 ...
|
|
83
|
+
* :!example -> for DDL names (schema, table, column), will be replaced in the query
|
|
84
|
+
* e.g. query('select * from a.b where id=$1;',['the_stage_is_set']);
|
|
85
|
+
* e.g. query('select * from :!schema.:!table where id=:id;',{schema:'a',table:'b', id:'the_stage_is_set'});
|
|
86
|
+
*/
|
|
87
|
+
async query(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<any[]> {
|
|
88
|
+
let connection = this.db.connection;
|
|
89
|
+
let logger = (options && options.logger || this.getLogger(false));
|
|
90
|
+
return this.internalQuery({ connection, sql, params, logger });
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
protected async internalQuery(options: { connection, sql: string, params?: any, logger?}): Promise<any[]>;
|
|
94
|
+
protected async internalQuery(options: { connection, sql: string, params?: any, logger?, rowMode: true }): Promise<PgRowResult>;
|
|
95
|
+
protected async internalQuery(options: { connection, sql: string, params?: any, logger?, rowMode?: boolean }): Promise<any[] | PgRowResult> {
|
|
96
|
+
if (this.db.needToFixConnectionForListen()) {
|
|
97
|
+
await this.db.runRestartConnectionForListen();
|
|
98
|
+
}
|
|
99
|
+
let { connection, sql, params, logger } = options;
|
|
100
|
+
logger = logger || this.getLogger(false);
|
|
101
|
+
|
|
102
|
+
try {
|
|
103
|
+
if (params && !Array.isArray(params)) {
|
|
104
|
+
let p = pgUtils.processNamedParams(sql, params);
|
|
105
|
+
sql = p.sql;
|
|
106
|
+
params = p.params;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (connection) {
|
|
110
|
+
logger.log('reused connection', sql, util.inspect(params, false, null), connection.processID);
|
|
111
|
+
let res = await connection.query({ text: sql, values: params, rowMode: options?.rowMode ? 'array' : undefined });
|
|
112
|
+
await this.checkAndFixOids(connection, res.fields);
|
|
113
|
+
this.postProcessFields(res.rows, res.fields, logger);
|
|
114
|
+
|
|
115
|
+
return options?.rowMode ? { columns: (res.fields || []).map(f => f.name), rows: res.rows || [] } : res.rows;
|
|
116
|
+
} else {
|
|
117
|
+
connection = await this.db.pool.connect();
|
|
118
|
+
logger.log('new connection', sql, util.inspect(params, false, null), connection.processID);
|
|
119
|
+
connection.on('error', (err: Error) => { });
|
|
120
|
+
let res = await connection.query({ text: sql, values: params, rowMode: options?.rowMode ? 'array' : undefined });
|
|
121
|
+
await this.checkAndFixOids(connection, res.fields);
|
|
122
|
+
connection.release();
|
|
123
|
+
connection = null;
|
|
124
|
+
this.postProcessFields(res.rows, res.fields, logger);
|
|
125
|
+
|
|
126
|
+
return options?.rowMode ? { columns: (res.fields || []).map(f => f.name), rows: res.rows || [] } : res.rows;
|
|
127
|
+
}
|
|
128
|
+
} catch (e) {
|
|
129
|
+
pgUtils.logError(logger, { error: e, sql, params, connection });
|
|
130
|
+
if (connection) {
|
|
131
|
+
try {
|
|
132
|
+
//If any problem has happened in a dedicated connection, (wrong sql format or non-accessible postgres server)
|
|
133
|
+
//close the connection to be a free connection in the pool,
|
|
134
|
+
//but keep the db.connection member non - null to crash in all of the following commands
|
|
135
|
+
connection.release();
|
|
136
|
+
} catch (e) {
|
|
137
|
+
logger.error('connection error', e.message);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
throw e;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Same as query but response is two array: columns and rows and rows are arrays also not objects
|
|
146
|
+
* This is useful for queries which have colliding column names
|
|
147
|
+
*/
|
|
148
|
+
async queryAsRows(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<PgRowResult> {
|
|
149
|
+
let connection = this.db.connection;
|
|
150
|
+
let logger = (options && options.logger || this.getLogger(false));
|
|
151
|
+
return this.internalQuery({ connection, sql, params, logger, rowMode: true });
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* If the callback function return true, the connection will be closed.
|
|
156
|
+
*/
|
|
157
|
+
async queryWithOnCursorCallback(sql: string, params: any[] | {}, options: SqlQueryOptions, callback: (any) => any): Promise<void> {
|
|
158
|
+
if (this.db.needToFixConnectionForListen()) {
|
|
159
|
+
await this.db.runRestartConnectionForListen();
|
|
160
|
+
}
|
|
161
|
+
let connection = this.db.connection;
|
|
162
|
+
let logger = this.getLogger(true);
|
|
163
|
+
|
|
164
|
+
try {
|
|
165
|
+
if (params && !Array.isArray(params)) {
|
|
166
|
+
let p = pgUtils.processNamedParams(sql, params);
|
|
167
|
+
sql = p.sql;
|
|
168
|
+
params = p.params;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
let queryInternal = async () => {
|
|
172
|
+
this.getLogger(false).log(sql, util.inspect(params, false, null), connection.processID);
|
|
173
|
+
let fieldsToFix: ResultFieldType[];
|
|
174
|
+
let isFirst = true;
|
|
175
|
+
|
|
176
|
+
let query = new QueryStream(sql, params);
|
|
177
|
+
let stream = connection.query(query);
|
|
178
|
+
await new Promise((resolve, reject) => {
|
|
179
|
+
query.handleError = (err: Error, connection) => {
|
|
180
|
+
reject(err);
|
|
181
|
+
};
|
|
182
|
+
stream.on('data', (res) => {
|
|
183
|
+
try {
|
|
184
|
+
let fields = stream._result && stream._result.fields || stream.cursor._result && stream.cursor._result.fields;
|
|
185
|
+
if (isFirst) {
|
|
186
|
+
if (this.hasUnknownOids(fields)) {
|
|
187
|
+
fieldsToFix = fields;
|
|
188
|
+
stream.destroy();
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
isFirst = false;
|
|
192
|
+
}
|
|
193
|
+
this.postProcessFields([res], fields, this.getLogger(false));
|
|
194
|
+
|
|
195
|
+
if (callback(res)) {
|
|
196
|
+
stream.destroy();
|
|
197
|
+
}
|
|
198
|
+
} catch (e) {
|
|
199
|
+
reject(e);
|
|
200
|
+
}
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
stream.on('close', resolve);
|
|
204
|
+
stream.on('error', reject);
|
|
205
|
+
});
|
|
206
|
+
if (fieldsToFix) {
|
|
207
|
+
await this.checkAndFixOids(connection, fieldsToFix);
|
|
208
|
+
query = new QueryStream(sql, params);
|
|
209
|
+
stream = connection.query(query);
|
|
210
|
+
await new Promise((resolve, reject) => {
|
|
211
|
+
query.handleError = (err: Error, connection) => {
|
|
212
|
+
reject(err);
|
|
213
|
+
};
|
|
214
|
+
stream.on('data', (res) => {
|
|
215
|
+
try {
|
|
216
|
+
let fields = stream._result && stream._result.fields || stream.cursor._result && stream.cursor._result.fields;
|
|
217
|
+
this.postProcessFields([res], fields, this.getLogger(false));
|
|
218
|
+
|
|
219
|
+
if (callback(res)) {
|
|
220
|
+
stream.destroy();
|
|
221
|
+
}
|
|
222
|
+
} catch (e) {
|
|
223
|
+
reject(e);
|
|
224
|
+
}
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
stream.on('close', resolve);
|
|
228
|
+
stream.on('error', reject);
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
if (connection) {
|
|
234
|
+
await queryInternal();
|
|
235
|
+
} else {
|
|
236
|
+
connection = await this.db.pool.connect();
|
|
237
|
+
logger.log('new connection', sql, util.inspect(params, false, null), connection.processID);
|
|
238
|
+
connection.on('error', (err: Error) => { });
|
|
239
|
+
await queryInternal();
|
|
240
|
+
connection.release();
|
|
241
|
+
connection = null;
|
|
242
|
+
}
|
|
243
|
+
} catch (e) {
|
|
244
|
+
pgUtils.logError(logger, { error: e, sql, params, connection });
|
|
245
|
+
if (connection) {
|
|
246
|
+
try {
|
|
247
|
+
connection.release();
|
|
248
|
+
} catch (e) {
|
|
249
|
+
logger.error('connection error', e.message);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
throw e;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
async queryAsStream(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<stream.Readable> {
|
|
257
|
+
if (this.db.needToFixConnectionForListen()) {
|
|
258
|
+
await this.db.runRestartConnectionForListen();
|
|
259
|
+
}
|
|
260
|
+
let connection = this.db.connection;
|
|
261
|
+
let logger = (options && options.logger || this.getLogger(false));
|
|
262
|
+
let pgStream;
|
|
263
|
+
let queriable = this;
|
|
264
|
+
let isFirst = true;
|
|
265
|
+
let convertTypeFilter = through(function (data) {
|
|
266
|
+
try {
|
|
267
|
+
let fields = pgStream._result && pgStream._result.fields || pgStream.cursor._result && pgStream.cursor._result.fields;
|
|
268
|
+
if (isFirst) {
|
|
269
|
+
if (queriable.hasUnknownOids(fields)) {
|
|
270
|
+
throw new Error('[337] Query returns fields with unknown oid.');
|
|
271
|
+
}
|
|
272
|
+
isFirst = false;
|
|
273
|
+
}
|
|
274
|
+
queriable.postProcessFields([data], fields, queriable.db.pgdbTypeParsers);
|
|
275
|
+
|
|
276
|
+
this.emit('data', data);
|
|
277
|
+
} catch (err) {
|
|
278
|
+
this.emit('error', err);
|
|
279
|
+
}
|
|
280
|
+
});
|
|
281
|
+
convertTypeFilter.on('error', (e) => {
|
|
282
|
+
if (connection) {
|
|
283
|
+
try {
|
|
284
|
+
connection.release();
|
|
285
|
+
} catch (e) {
|
|
286
|
+
logger.error('connection error', e.message);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
connection = null;
|
|
290
|
+
pgUtils.logError(logger, { error: e, sql, params, connection });
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
try {
|
|
294
|
+
if (params && !Array.isArray(params)) {
|
|
295
|
+
let p = pgUtils.processNamedParams(sql, params);
|
|
296
|
+
sql = p.sql;
|
|
297
|
+
params = p.params;
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
if (connection) {
|
|
301
|
+
logger.log(sql, util.inspect(params, false, null), connection.processID);
|
|
302
|
+
let query = new QueryStream(sql, params);
|
|
303
|
+
query.handleError = (err: Error, connection) => {
|
|
304
|
+
convertTypeFilter.emit('error', err);
|
|
305
|
+
};
|
|
306
|
+
pgStream = connection.query(query);
|
|
307
|
+
return pgStream.pipe(convertTypeFilter);
|
|
308
|
+
} else {
|
|
309
|
+
connection = await this.db.pool.connect();
|
|
310
|
+
logger.log('new connection', sql, util.inspect(params, false, null), connection.processID);
|
|
311
|
+
connection.on('error', (err: Error) => { });
|
|
312
|
+
let query = new QueryStream(sql, params);
|
|
313
|
+
query.handleError = (err: Error, connection) => {
|
|
314
|
+
convertTypeFilter.emit('error', err);
|
|
315
|
+
};
|
|
316
|
+
pgStream = connection.query(query);
|
|
317
|
+
pgStream.on('close', () => {
|
|
318
|
+
if (connection) connection.release();
|
|
319
|
+
connection = null;
|
|
320
|
+
});
|
|
321
|
+
pgStream.on('error', (e) => {
|
|
322
|
+
pgUtils.logError(logger, { error: e, sql, params, connection });
|
|
323
|
+
if (connection) connection.release();
|
|
324
|
+
connection = null;
|
|
325
|
+
});
|
|
326
|
+
return pgStream.pipe(convertTypeFilter);
|
|
327
|
+
}
|
|
328
|
+
} catch (e) {
|
|
329
|
+
pgUtils.logError(logger, { error: e, sql, params, connection });
|
|
330
|
+
throw e;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
async queryOne(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<any> {
|
|
335
|
+
let res = await this.query(sql, params, options);
|
|
336
|
+
if (res.length > 1) {
|
|
337
|
+
let logger = (options && options.logger || this.getLogger(false));
|
|
338
|
+
let error = Error('More then one rows exists');
|
|
339
|
+
pgUtils.logError(logger, { error, sql, params, connection: this.db.connection });
|
|
340
|
+
throw error;
|
|
341
|
+
}
|
|
342
|
+
return res[0];
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
async queryFirst(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<any> {
|
|
346
|
+
let res = await this.query(sql, params, options);
|
|
347
|
+
return res[0];
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
/** @return one record's one field */
|
|
351
|
+
async queryOneField(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<any> {
|
|
352
|
+
let res = await this.query(sql, params, options);
|
|
353
|
+
if (!res.length) {
|
|
354
|
+
return null;
|
|
355
|
+
}
|
|
356
|
+
let fieldName = Object.keys(res[0])[0];
|
|
357
|
+
if (res.length > 1) {
|
|
358
|
+
let logger = (options && options.logger || this.getLogger(false));
|
|
359
|
+
let error = Error('More then one field exists!');
|
|
360
|
+
pgUtils.logError(logger, { error, sql, params, connection: this.db.connection });
|
|
361
|
+
throw error;
|
|
362
|
+
}
|
|
363
|
+
return res.length == 1 ? res[0][fieldName] : null;
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
/** @return one column for the matching records */
|
|
367
|
+
async queryOneColumn(sql: string, params?: any[] | {}, options?: SqlQueryOptions): Promise<any[]> {
|
|
368
|
+
let res = await this.query(sql, params, options);
|
|
369
|
+
if (!res.length) {
|
|
370
|
+
return [];
|
|
371
|
+
}
|
|
372
|
+
let fieldName = Object.keys(res[0])[0];
|
|
373
|
+
return res.map(r => r[fieldName]);
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
private postProcessFields(rows: any[], fields: ResultFieldType[], logger) {
|
|
377
|
+
pgUtils.postProcessResult(rows, fields, this.db.pgdbTypeParsers);
|
|
378
|
+
if (this.db.postProcessResult) this.db.postProcessResult(rows, fields, logger);
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
private async checkAndFixOids(connection, fields: ResultFieldType[]) {
|
|
382
|
+
if (fields) {
|
|
383
|
+
let oidList = fields.map(field => field.dataTypeID);
|
|
384
|
+
return this.db.resetMissingParsers(connection, oidList);
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
private hasUnknownOids(fields: ResultFieldType[]): boolean {
|
|
389
|
+
let oidList = fields.map(field => field.dataTypeID);
|
|
390
|
+
let unknownOids = oidList.filter(oid => !this.db.knownOids[oid]);
|
|
391
|
+
return !!unknownOids.length;
|
|
392
|
+
}
|
|
393
|
+
}
|