@clickhouse/client 0.3.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +2 -2
- package/README.md +14 -0
- package/dist/client.d.ts +11 -26
- package/dist/client.js +11 -53
- package/dist/client.js.map +1 -1
- package/dist/config.d.ts +52 -0
- package/dist/config.js +80 -0
- package/dist/config.js.map +1 -0
- package/dist/connection/compression.d.ts +4 -3
- package/dist/connection/compression.js +7 -5
- package/dist/connection/compression.js.map +1 -1
- package/dist/connection/create_connection.d.ts +16 -0
- package/dist/connection/create_connection.js +42 -0
- package/dist/connection/create_connection.js.map +1 -0
- package/dist/connection/index.d.ts +1 -0
- package/dist/connection/index.js +1 -0
- package/dist/connection/index.js.map +1 -1
- package/dist/connection/node_base_connection.d.ts +23 -12
- package/dist/connection/node_base_connection.js +507 -227
- package/dist/connection/node_base_connection.js.map +1 -1
- package/dist/connection/node_custom_agent_connection.d.ts +8 -0
- package/dist/connection/node_custom_agent_connection.js +47 -0
- package/dist/connection/node_custom_agent_connection.js.map +1 -0
- package/dist/connection/node_http_connection.d.ts +1 -5
- package/dist/connection/node_http_connection.js +6 -5
- package/dist/connection/node_http_connection.js.map +1 -1
- package/dist/connection/node_https_connection.d.ts +3 -6
- package/dist/connection/node_https_connection.js +39 -19
- package/dist/connection/node_https_connection.js.map +1 -1
- package/dist/connection/stream.d.ts +5 -3
- package/dist/connection/stream.js +69 -5
- package/dist/connection/stream.js.map +1 -1
- package/dist/index.d.ts +6 -3
- package/dist/index.js +13 -4
- package/dist/index.js.map +1 -1
- package/dist/result_set.d.ts +41 -6
- package/dist/result_set.js +127 -52
- package/dist/result_set.js.map +1 -1
- package/dist/utils/encoder.d.ts +3 -2
- package/dist/utils/encoder.js +16 -3
- package/dist/utils/encoder.js.map +1 -1
- package/dist/utils/index.js.map +1 -1
- package/dist/utils/process.js +1 -2
- package/dist/utils/process.js.map +1 -1
- package/dist/utils/runtime.d.ts +6 -0
- package/dist/utils/runtime.js +65 -0
- package/dist/utils/runtime.js.map +1 -0
- package/dist/utils/stream.d.ts +1 -2
- package/dist/utils/stream.js +23 -9
- package/dist/utils/stream.js.map +1 -1
- package/dist/utils/user_agent.d.ts +4 -0
- package/dist/utils/user_agent.js +7 -31
- package/dist/utils/user_agent.js.map +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.js +1 -1
- package/dist/version.js.map +1 -1
- package/package.json +10 -3
|
@@ -25,7 +25,19 @@ class NodeBaseConnection {
|
|
|
25
25
|
writable: true,
|
|
26
26
|
value: agent
|
|
27
27
|
});
|
|
28
|
-
Object.defineProperty(this, "
|
|
28
|
+
Object.defineProperty(this, "defaultAuthHeader", {
|
|
29
|
+
enumerable: true,
|
|
30
|
+
configurable: true,
|
|
31
|
+
writable: true,
|
|
32
|
+
value: void 0
|
|
33
|
+
});
|
|
34
|
+
Object.defineProperty(this, "defaultHeaders", {
|
|
35
|
+
enumerable: true,
|
|
36
|
+
configurable: true,
|
|
37
|
+
writable: true,
|
|
38
|
+
value: void 0
|
|
39
|
+
});
|
|
40
|
+
Object.defineProperty(this, "jsonHandling", {
|
|
29
41
|
enumerable: true,
|
|
30
42
|
configurable: true,
|
|
31
43
|
writable: true,
|
|
@@ -49,225 +61,106 @@ class NodeBaseConnection {
|
|
|
49
61
|
writable: true,
|
|
50
62
|
value: void 0
|
|
51
63
|
});
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
64
|
+
if (params.auth.type === 'Credentials') {
|
|
65
|
+
this.defaultAuthHeader = `Basic ${Buffer.from(`${params.auth.username}:${params.auth.password}`).toString('base64')}`;
|
|
66
|
+
}
|
|
67
|
+
else if (params.auth.type === 'JWT') {
|
|
68
|
+
this.defaultAuthHeader = `Bearer ${params.auth.access_token}`;
|
|
69
|
+
}
|
|
70
|
+
else {
|
|
71
|
+
throw new Error(`Unknown auth type: ${params.auth.type}`);
|
|
72
|
+
}
|
|
73
|
+
this.defaultHeaders = {
|
|
74
|
+
// Node.js HTTP agent, for some reason, does not set this on its own when KeepAlive is enabled
|
|
59
75
|
Connection: this.params.keep_alive.enabled ? 'keep-alive' : 'close',
|
|
60
|
-
Authorization: `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`,
|
|
61
76
|
'User-Agent': (0, utils_1.getUserAgent)(this.params.application_id),
|
|
62
|
-
|
|
77
|
+
};
|
|
78
|
+
this.logger = params.log_writer;
|
|
79
|
+
this.idleSocketTTL = params.keep_alive.idle_socket_ttl;
|
|
80
|
+
this.jsonHandling = params.json ?? {
|
|
81
|
+
parse: JSON.parse,
|
|
82
|
+
stringify: JSON.stringify,
|
|
63
83
|
};
|
|
64
84
|
}
|
|
65
|
-
async
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
const decompressionResult = (0, compression_1.decompressResponse)(_response);
|
|
76
|
-
if ((0, compression_1.isDecompressionError)(decompressionResult)) {
|
|
77
|
-
return reject(decompressionResult.error);
|
|
78
|
-
}
|
|
79
|
-
if ((0, client_common_1.isSuccessfulResponse)(_response.statusCode)) {
|
|
80
|
-
return resolve({
|
|
81
|
-
stream: decompressionResult.response,
|
|
82
|
-
summary: params.parse_summary
|
|
83
|
-
? this.parseSummary(op, _response)
|
|
84
|
-
: undefined,
|
|
85
|
-
});
|
|
86
|
-
}
|
|
87
|
-
else {
|
|
88
|
-
reject((0, client_common_1.parseError)(await (0, utils_1.getAsText)(decompressionResult.response)));
|
|
89
|
-
}
|
|
90
|
-
};
|
|
91
|
-
function onAbort() {
|
|
92
|
-
// Prefer 'abort' event since it always triggered unlike 'error' and 'close'
|
|
93
|
-
// see the full sequence of events https://nodejs.org/api/http.html#httprequesturl-options-callback
|
|
94
|
-
removeRequestListeners();
|
|
95
|
-
request.once('error', function () {
|
|
96
|
-
/**
|
|
97
|
-
* catch "Error: ECONNRESET" error which shouldn't be reported to users.
|
|
98
|
-
* see the full sequence of events https://nodejs.org/api/http.html#httprequesturl-options-callback
|
|
99
|
-
* */
|
|
85
|
+
async ping(params) {
|
|
86
|
+
const query_id = this.getQueryId(params.query_id);
|
|
87
|
+
const { controller, controllerCleanup } = this.getAbortController(params);
|
|
88
|
+
let result;
|
|
89
|
+
try {
|
|
90
|
+
if (params.select) {
|
|
91
|
+
const searchParams = (0, client_common_1.toSearchParams)({
|
|
92
|
+
database: undefined,
|
|
93
|
+
query: PingQuery,
|
|
94
|
+
query_id,
|
|
100
95
|
});
|
|
101
|
-
|
|
96
|
+
result = await this.request({
|
|
97
|
+
method: 'GET',
|
|
98
|
+
url: (0, client_common_1.transformUrl)({ url: this.params.url, searchParams }),
|
|
99
|
+
query: PingQuery,
|
|
100
|
+
abort_signal: controller.signal,
|
|
101
|
+
headers: this.buildRequestHeaders(),
|
|
102
|
+
}, 'Ping');
|
|
102
103
|
}
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
104
|
+
else {
|
|
105
|
+
result = await this.request({
|
|
106
|
+
method: 'GET',
|
|
107
|
+
url: (0, client_common_1.transformUrl)({ url: this.params.url, pathname: '/ping' }),
|
|
108
|
+
abort_signal: controller.signal,
|
|
109
|
+
headers: this.buildRequestHeaders(),
|
|
110
|
+
query: 'ping',
|
|
111
|
+
}, 'Ping');
|
|
108
112
|
}
|
|
109
|
-
|
|
110
|
-
// if request.end() was called due to no data to send
|
|
111
|
-
if (request.writableEnded) {
|
|
112
|
-
return;
|
|
113
|
-
}
|
|
114
|
-
const bodyStream = (0, utils_1.isStream)(params.body)
|
|
115
|
-
? params.body
|
|
116
|
-
: stream_1.default.Readable.from([params.body]);
|
|
117
|
-
const callback = (err) => {
|
|
118
|
-
if (err) {
|
|
119
|
-
removeRequestListeners();
|
|
120
|
-
reject(err);
|
|
121
|
-
}
|
|
122
|
-
};
|
|
123
|
-
if (params.compress_request) {
|
|
124
|
-
stream_1.default.pipeline(bodyStream, zlib_1.default.createGzip(), request, callback);
|
|
125
|
-
}
|
|
126
|
-
else {
|
|
127
|
-
stream_1.default.pipeline(bodyStream, request, callback);
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
const onSocket = (socket) => {
|
|
131
|
-
if (this.params.keep_alive.enabled) {
|
|
132
|
-
const socketInfo = this.knownSockets.get(socket);
|
|
133
|
-
// It is the first time we encounter this socket,
|
|
134
|
-
// so it doesn't have the idle timeout handler attached to it
|
|
135
|
-
if (socketInfo === undefined) {
|
|
136
|
-
const socketId = crypto_1.default.randomUUID();
|
|
137
|
-
this.logger.trace({
|
|
138
|
-
message: `Using a fresh socket ${socketId}, setting up a new 'free' listener`,
|
|
139
|
-
});
|
|
140
|
-
this.knownSockets.set(socket, {
|
|
141
|
-
id: socketId,
|
|
142
|
-
idle_timeout_handle: undefined,
|
|
143
|
-
});
|
|
144
|
-
// When the request is complete and the socket is released,
|
|
145
|
-
// make sure that the socket is removed after `idleSocketTTL`.
|
|
146
|
-
socket.on('free', () => {
|
|
147
|
-
this.logger.trace({
|
|
148
|
-
message: `Socket ${socketId} was released`,
|
|
149
|
-
});
|
|
150
|
-
// Avoiding the built-in socket.timeout() method usage here,
|
|
151
|
-
// as we don't want to clash with the actual request timeout.
|
|
152
|
-
const idleTimeoutHandle = setTimeout(() => {
|
|
153
|
-
this.logger.trace({
|
|
154
|
-
message: `Removing socket ${socketId} after ${this.idleSocketTTL} ms of idle`,
|
|
155
|
-
});
|
|
156
|
-
this.knownSockets.delete(socket);
|
|
157
|
-
socket.destroy();
|
|
158
|
-
}, this.idleSocketTTL).unref();
|
|
159
|
-
this.knownSockets.set(socket, {
|
|
160
|
-
id: socketId,
|
|
161
|
-
idle_timeout_handle: idleTimeoutHandle,
|
|
162
|
-
});
|
|
163
|
-
});
|
|
164
|
-
const cleanup = () => {
|
|
165
|
-
const maybeSocketInfo = this.knownSockets.get(socket);
|
|
166
|
-
// clean up a possibly dangling idle timeout handle (preventing leaks)
|
|
167
|
-
if (maybeSocketInfo?.idle_timeout_handle) {
|
|
168
|
-
clearTimeout(maybeSocketInfo.idle_timeout_handle);
|
|
169
|
-
}
|
|
170
|
-
this.logger.trace({
|
|
171
|
-
message: `Socket ${socketId} was closed or ended, 'free' listener removed`,
|
|
172
|
-
});
|
|
173
|
-
};
|
|
174
|
-
socket.once('end', cleanup);
|
|
175
|
-
socket.once('close', cleanup);
|
|
176
|
-
}
|
|
177
|
-
else {
|
|
178
|
-
clearTimeout(socketInfo.idle_timeout_handle);
|
|
179
|
-
this.logger.trace({
|
|
180
|
-
message: `Reusing socket ${socketInfo.id}`,
|
|
181
|
-
});
|
|
182
|
-
this.knownSockets.set(socket, {
|
|
183
|
-
...socketInfo,
|
|
184
|
-
idle_timeout_handle: undefined,
|
|
185
|
-
});
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
// Socket is "prepared" with idle handlers, continue with our request
|
|
189
|
-
pipeStream();
|
|
190
|
-
// This is for request timeout only. Surprisingly, it is not always enough to set in the HTTP request.
|
|
191
|
-
// The socket won't be actually destroyed, and it will be returned to the pool.
|
|
192
|
-
socket.setTimeout(this.params.request_timeout, onTimeout);
|
|
193
|
-
};
|
|
194
|
-
function onTimeout() {
|
|
195
|
-
removeRequestListeners();
|
|
196
|
-
request.destroy();
|
|
197
|
-
reject(new Error('Timeout error.'));
|
|
198
|
-
}
|
|
199
|
-
function removeRequestListeners() {
|
|
200
|
-
if (request.socket !== null) {
|
|
201
|
-
request.socket.setTimeout(0); // reset previously set timeout
|
|
202
|
-
request.socket.removeListener('timeout', onTimeout);
|
|
203
|
-
}
|
|
204
|
-
request.removeListener('socket', onSocket);
|
|
205
|
-
request.removeListener('response', onResponse);
|
|
206
|
-
request.removeListener('error', onError);
|
|
207
|
-
request.removeListener('close', onClose);
|
|
208
|
-
if (params.abort_signal !== undefined) {
|
|
209
|
-
request.removeListener('abort', onAbort);
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
request.on('socket', onSocket);
|
|
213
|
-
request.on('response', onResponse);
|
|
214
|
-
request.on('error', onError);
|
|
215
|
-
request.on('close', onClose);
|
|
216
|
-
if (params.abort_signal !== undefined) {
|
|
217
|
-
params.abort_signal.addEventListener('abort', onAbort, { once: true });
|
|
218
|
-
}
|
|
219
|
-
if (!params.body)
|
|
220
|
-
return request.end();
|
|
221
|
-
});
|
|
222
|
-
}
|
|
223
|
-
async ping() {
|
|
224
|
-
const abortController = new AbortController();
|
|
225
|
-
try {
|
|
226
|
-
const { stream } = await this.request({
|
|
227
|
-
method: 'GET',
|
|
228
|
-
url: (0, client_common_1.transformUrl)({ url: this.params.url, pathname: '/ping' }),
|
|
229
|
-
abort_signal: abortController.signal,
|
|
230
|
-
}, 'Ping');
|
|
231
|
-
await (0, stream_2.drainStream)(stream);
|
|
113
|
+
await (0, stream_2.drainStream)(result.stream);
|
|
232
114
|
return { success: true };
|
|
233
115
|
}
|
|
234
116
|
catch (error) {
|
|
235
117
|
// it is used to ensure that the outgoing request is terminated,
|
|
236
|
-
// and we don't get
|
|
237
|
-
|
|
118
|
+
// and we don't get unhandled error propagation later
|
|
119
|
+
controller.abort('Ping failed');
|
|
238
120
|
// not an error, as this might be semi-expected
|
|
239
121
|
this.logger.warn({
|
|
240
122
|
message: this.httpRequestErrorMessage('Ping'),
|
|
241
123
|
err: error,
|
|
124
|
+
args: {
|
|
125
|
+
query_id,
|
|
126
|
+
},
|
|
242
127
|
});
|
|
243
128
|
return {
|
|
244
129
|
success: false,
|
|
245
130
|
error: error, // should NOT be propagated to the user
|
|
246
131
|
};
|
|
247
132
|
}
|
|
133
|
+
finally {
|
|
134
|
+
controllerCleanup();
|
|
135
|
+
}
|
|
248
136
|
}
|
|
249
137
|
async query(params) {
|
|
250
138
|
const query_id = this.getQueryId(params.query_id);
|
|
251
139
|
const clickhouse_settings = (0, client_common_1.withHttpSettings)(params.clickhouse_settings, this.params.compression.decompress_response);
|
|
252
140
|
const searchParams = (0, client_common_1.toSearchParams)({
|
|
253
141
|
database: this.params.database,
|
|
254
|
-
clickhouse_settings,
|
|
255
142
|
query_params: params.query_params,
|
|
256
143
|
session_id: params.session_id,
|
|
144
|
+
clickhouse_settings,
|
|
257
145
|
query_id,
|
|
146
|
+
role: params.role,
|
|
258
147
|
});
|
|
259
|
-
const decompressResponse = clickhouse_settings.enable_http_compression === 1;
|
|
260
148
|
const { controller, controllerCleanup } = this.getAbortController(params);
|
|
149
|
+
// allows enforcing the compression via the settings even if the client instance has it disabled
|
|
150
|
+
const enableResponseCompression = clickhouse_settings.enable_http_compression === 1;
|
|
261
151
|
try {
|
|
262
|
-
const { stream } = await this.request({
|
|
152
|
+
const { response_headers, stream } = await this.request({
|
|
263
153
|
method: 'POST',
|
|
264
154
|
url: (0, client_common_1.transformUrl)({ url: this.params.url, searchParams }),
|
|
265
155
|
body: params.query,
|
|
266
156
|
abort_signal: controller.signal,
|
|
267
|
-
|
|
157
|
+
enable_response_compression: enableResponseCompression,
|
|
158
|
+
headers: this.buildRequestHeaders(params),
|
|
159
|
+
query: params.query,
|
|
268
160
|
}, 'Query');
|
|
269
161
|
return {
|
|
270
162
|
stream,
|
|
163
|
+
response_headers,
|
|
271
164
|
query_id,
|
|
272
165
|
};
|
|
273
166
|
}
|
|
@@ -280,7 +173,7 @@ class NodeBaseConnection {
|
|
|
280
173
|
search_params: searchParams,
|
|
281
174
|
err: err,
|
|
282
175
|
extra_args: {
|
|
283
|
-
decompress_response:
|
|
176
|
+
decompress_response: enableResponseCompression,
|
|
284
177
|
clickhouse_settings,
|
|
285
178
|
},
|
|
286
179
|
});
|
|
@@ -290,48 +183,6 @@ class NodeBaseConnection {
|
|
|
290
183
|
controllerCleanup();
|
|
291
184
|
}
|
|
292
185
|
}
|
|
293
|
-
async exec(params) {
|
|
294
|
-
const query_id = this.getQueryId(params.query_id);
|
|
295
|
-
const searchParams = (0, client_common_1.toSearchParams)({
|
|
296
|
-
database: this.params.database,
|
|
297
|
-
clickhouse_settings: params.clickhouse_settings,
|
|
298
|
-
query_params: params.query_params,
|
|
299
|
-
session_id: params.session_id,
|
|
300
|
-
query_id,
|
|
301
|
-
});
|
|
302
|
-
const { controller, controllerCleanup } = this.getAbortController(params);
|
|
303
|
-
try {
|
|
304
|
-
const { stream, summary } = await this.request({
|
|
305
|
-
method: 'POST',
|
|
306
|
-
url: (0, client_common_1.transformUrl)({ url: this.params.url, searchParams }),
|
|
307
|
-
body: params.query,
|
|
308
|
-
abort_signal: controller.signal,
|
|
309
|
-
parse_summary: true,
|
|
310
|
-
}, 'Exec');
|
|
311
|
-
return {
|
|
312
|
-
stream,
|
|
313
|
-
query_id,
|
|
314
|
-
summary,
|
|
315
|
-
};
|
|
316
|
-
}
|
|
317
|
-
catch (err) {
|
|
318
|
-
controller.abort('Exec HTTP request failed');
|
|
319
|
-
this.logRequestError({
|
|
320
|
-
op: 'Exec',
|
|
321
|
-
query_id: query_id,
|
|
322
|
-
query_params: params,
|
|
323
|
-
search_params: searchParams,
|
|
324
|
-
err: err,
|
|
325
|
-
extra_args: {
|
|
326
|
-
clickhouse_settings: params.clickhouse_settings ?? {},
|
|
327
|
-
},
|
|
328
|
-
});
|
|
329
|
-
throw err; // should be propagated to the user
|
|
330
|
-
}
|
|
331
|
-
finally {
|
|
332
|
-
controllerCleanup();
|
|
333
|
-
}
|
|
334
|
-
}
|
|
335
186
|
async insert(params) {
|
|
336
187
|
const query_id = this.getQueryId(params.query_id);
|
|
337
188
|
const searchParams = (0, client_common_1.toSearchParams)({
|
|
@@ -340,20 +191,23 @@ class NodeBaseConnection {
|
|
|
340
191
|
query_params: params.query_params,
|
|
341
192
|
query: params.query,
|
|
342
193
|
session_id: params.session_id,
|
|
194
|
+
role: params.role,
|
|
343
195
|
query_id,
|
|
344
196
|
});
|
|
345
197
|
const { controller, controllerCleanup } = this.getAbortController(params);
|
|
346
198
|
try {
|
|
347
|
-
const { stream, summary } = await this.request({
|
|
199
|
+
const { stream, summary, response_headers } = await this.request({
|
|
348
200
|
method: 'POST',
|
|
349
201
|
url: (0, client_common_1.transformUrl)({ url: this.params.url, searchParams }),
|
|
350
202
|
body: params.values,
|
|
351
203
|
abort_signal: controller.signal,
|
|
352
|
-
|
|
204
|
+
enable_request_compression: this.params.compression.compress_request,
|
|
353
205
|
parse_summary: true,
|
|
206
|
+
headers: this.buildRequestHeaders(params),
|
|
207
|
+
query: params.query,
|
|
354
208
|
}, 'Insert');
|
|
355
209
|
await (0, stream_2.drainStream)(stream);
|
|
356
|
-
return { query_id, summary };
|
|
210
|
+
return { query_id, summary, response_headers };
|
|
357
211
|
}
|
|
358
212
|
catch (err) {
|
|
359
213
|
controller.abort('Insert HTTP request failed');
|
|
@@ -373,11 +227,100 @@ class NodeBaseConnection {
|
|
|
373
227
|
controllerCleanup();
|
|
374
228
|
}
|
|
375
229
|
}
|
|
230
|
+
async exec(params) {
|
|
231
|
+
const query_id = this.getQueryId(params.query_id);
|
|
232
|
+
return this.runExec({
|
|
233
|
+
...params,
|
|
234
|
+
query_id,
|
|
235
|
+
op: 'Exec',
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
async command(params) {
|
|
239
|
+
const query_id = this.getQueryId(params.query_id);
|
|
240
|
+
const commandStartTime = Date.now();
|
|
241
|
+
this.logger.trace({
|
|
242
|
+
message: 'Command: operation started',
|
|
243
|
+
args: {
|
|
244
|
+
query: params.query,
|
|
245
|
+
query_id,
|
|
246
|
+
},
|
|
247
|
+
});
|
|
248
|
+
const { stream, summary, response_headers } = await this.runExec({
|
|
249
|
+
...params,
|
|
250
|
+
query_id,
|
|
251
|
+
op: 'Command',
|
|
252
|
+
});
|
|
253
|
+
const runExecDuration = Date.now() - commandStartTime;
|
|
254
|
+
this.logger.trace({
|
|
255
|
+
message: 'Command: runExec completed, starting stream drain',
|
|
256
|
+
args: {
|
|
257
|
+
query_id,
|
|
258
|
+
runExec_duration_ms: runExecDuration,
|
|
259
|
+
stream_state: {
|
|
260
|
+
readable: stream.readable,
|
|
261
|
+
readableEnded: stream.readableEnded,
|
|
262
|
+
readableLength: stream.readableLength,
|
|
263
|
+
},
|
|
264
|
+
},
|
|
265
|
+
});
|
|
266
|
+
// ignore the response stream and release the socket immediately
|
|
267
|
+
const drainStartTime = Date.now();
|
|
268
|
+
await (0, stream_2.drainStream)(stream, this.logger, query_id);
|
|
269
|
+
const drainDuration = Date.now() - drainStartTime;
|
|
270
|
+
const totalDuration = Date.now() - commandStartTime;
|
|
271
|
+
this.logger.trace({
|
|
272
|
+
message: 'Command: operation completed',
|
|
273
|
+
args: {
|
|
274
|
+
query_id,
|
|
275
|
+
drain_duration_ms: drainDuration,
|
|
276
|
+
total_duration_ms: totalDuration,
|
|
277
|
+
},
|
|
278
|
+
});
|
|
279
|
+
return { query_id, summary, response_headers };
|
|
280
|
+
}
|
|
376
281
|
async close() {
|
|
377
282
|
if (this.agent !== undefined && this.agent.destroy !== undefined) {
|
|
378
283
|
this.agent.destroy();
|
|
379
284
|
}
|
|
380
285
|
}
|
|
286
|
+
defaultHeadersWithOverride(params) {
|
|
287
|
+
return {
|
|
288
|
+
// Custom HTTP headers from the client configuration
|
|
289
|
+
...(this.params.http_headers ?? {}),
|
|
290
|
+
// Custom HTTP headers for this particular request; it will override the client configuration with the same keys
|
|
291
|
+
...(params?.http_headers ?? {}),
|
|
292
|
+
// Includes the `Connection` + `User-Agent` headers which we do not allow to override
|
|
293
|
+
// An appropriate `Authorization` header might be added later
|
|
294
|
+
// It is not always required - see the TLS headers in `node_https_connection.ts`
|
|
295
|
+
...this.defaultHeaders,
|
|
296
|
+
};
|
|
297
|
+
}
|
|
298
|
+
buildRequestHeaders(params) {
|
|
299
|
+
const headers = this.defaultHeadersWithOverride(params);
|
|
300
|
+
if ((0, client_common_1.isJWTAuth)(params?.auth)) {
|
|
301
|
+
return {
|
|
302
|
+
...headers,
|
|
303
|
+
Authorization: `Bearer ${params.auth.access_token}`,
|
|
304
|
+
};
|
|
305
|
+
}
|
|
306
|
+
if (this.params.set_basic_auth_header) {
|
|
307
|
+
if ((0, client_common_1.isCredentialsAuth)(params?.auth)) {
|
|
308
|
+
return {
|
|
309
|
+
...headers,
|
|
310
|
+
Authorization: `Basic ${Buffer.from(`${params.auth.username}:${params.auth.password}`).toString('base64')}`,
|
|
311
|
+
};
|
|
312
|
+
}
|
|
313
|
+
else {
|
|
314
|
+
return {
|
|
315
|
+
...headers,
|
|
316
|
+
Authorization: this.defaultAuthHeader,
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
return {
|
|
321
|
+
...headers,
|
|
322
|
+
};
|
|
323
|
+
}
|
|
381
324
|
getQueryId(query_id) {
|
|
382
325
|
return query_id || crypto_1.default.randomUUID();
|
|
383
326
|
}
|
|
@@ -434,7 +377,7 @@ class NodeBaseConnection {
|
|
|
434
377
|
const summaryHeader = response.headers['x-clickhouse-summary'];
|
|
435
378
|
if (typeof summaryHeader === 'string') {
|
|
436
379
|
try {
|
|
437
|
-
return
|
|
380
|
+
return this.jsonHandling.parse(summaryHeader);
|
|
438
381
|
}
|
|
439
382
|
catch (err) {
|
|
440
383
|
this.logger.error({
|
|
@@ -447,6 +390,343 @@ class NodeBaseConnection {
|
|
|
447
390
|
}
|
|
448
391
|
}
|
|
449
392
|
}
|
|
393
|
+
async runExec(params) {
|
|
394
|
+
const query_id = params.query_id;
|
|
395
|
+
const sendQueryInParams = params.values !== undefined;
|
|
396
|
+
const clickhouse_settings = (0, client_common_1.withHttpSettings)(params.clickhouse_settings, this.params.compression.decompress_response);
|
|
397
|
+
const toSearchParamsOptions = {
|
|
398
|
+
query: sendQueryInParams ? params.query : undefined,
|
|
399
|
+
database: this.params.database,
|
|
400
|
+
query_params: params.query_params,
|
|
401
|
+
session_id: params.session_id,
|
|
402
|
+
role: params.role,
|
|
403
|
+
clickhouse_settings,
|
|
404
|
+
query_id,
|
|
405
|
+
};
|
|
406
|
+
const searchParams = (0, client_common_1.toSearchParams)(toSearchParamsOptions);
|
|
407
|
+
const { controller, controllerCleanup } = this.getAbortController(params);
|
|
408
|
+
const tryDecompressResponseStream = params.op === 'Exec'
|
|
409
|
+
? // allows disabling stream decompression for the `Exec` operation only
|
|
410
|
+
(params.decompress_response_stream ??
|
|
411
|
+
this.params.compression.decompress_response)
|
|
412
|
+
: // there is nothing useful in the response stream for the `Command` operation,
|
|
413
|
+
// and it is immediately destroyed; never decompress it
|
|
414
|
+
false;
|
|
415
|
+
const ignoreErrorResponse = params.ignore_error_response ?? false;
|
|
416
|
+
try {
|
|
417
|
+
const { stream, summary, response_headers } = await this.request({
|
|
418
|
+
method: 'POST',
|
|
419
|
+
url: (0, client_common_1.transformUrl)({ url: this.params.url, searchParams }),
|
|
420
|
+
body: sendQueryInParams ? params.values : params.query,
|
|
421
|
+
abort_signal: controller.signal,
|
|
422
|
+
parse_summary: true,
|
|
423
|
+
enable_request_compression: this.params.compression.compress_request,
|
|
424
|
+
enable_response_compression: this.params.compression.decompress_response,
|
|
425
|
+
try_decompress_response_stream: tryDecompressResponseStream,
|
|
426
|
+
ignore_error_response: ignoreErrorResponse,
|
|
427
|
+
headers: this.buildRequestHeaders(params),
|
|
428
|
+
query: params.query,
|
|
429
|
+
}, params.op);
|
|
430
|
+
return {
|
|
431
|
+
stream,
|
|
432
|
+
query_id,
|
|
433
|
+
summary,
|
|
434
|
+
response_headers,
|
|
435
|
+
};
|
|
436
|
+
}
|
|
437
|
+
catch (err) {
|
|
438
|
+
controller.abort(`${params.op} HTTP request failed`);
|
|
439
|
+
this.logRequestError({
|
|
440
|
+
op: params.op,
|
|
441
|
+
query_id: query_id,
|
|
442
|
+
query_params: params,
|
|
443
|
+
search_params: searchParams,
|
|
444
|
+
err: err,
|
|
445
|
+
extra_args: {
|
|
446
|
+
clickhouse_settings: params.clickhouse_settings ?? {},
|
|
447
|
+
},
|
|
448
|
+
});
|
|
449
|
+
throw err; // should be propagated to the user
|
|
450
|
+
}
|
|
451
|
+
finally {
|
|
452
|
+
controllerCleanup();
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
async request(params, op) {
|
|
456
|
+
// allows the event loop to process the idle socket timers, if the CPU load is high
|
|
457
|
+
// otherwise, we can occasionally get an expired socket, see https://github.com/ClickHouse/clickhouse-js/issues/294
|
|
458
|
+
await (0, client_common_1.sleep)(0);
|
|
459
|
+
const currentStackTrace = this.params.capture_enhanced_stack_trace
|
|
460
|
+
? (0, client_common_1.getCurrentStackTrace)()
|
|
461
|
+
: undefined;
|
|
462
|
+
const logger = this.logger;
|
|
463
|
+
const requestTimeout = this.params.request_timeout;
|
|
464
|
+
return new Promise((resolve, reject) => {
|
|
465
|
+
const start = Date.now();
|
|
466
|
+
const request = this.createClientRequest(params);
|
|
467
|
+
function onError(e) {
|
|
468
|
+
removeRequestListeners();
|
|
469
|
+
const err = (0, client_common_1.enhanceStackTrace)(e, currentStackTrace);
|
|
470
|
+
reject(err);
|
|
471
|
+
}
|
|
472
|
+
let responseStream;
|
|
473
|
+
const onResponse = async (_response) => {
|
|
474
|
+
this.logResponse(op, request, params, _response, start);
|
|
475
|
+
const query_id = params.url.searchParams.get('query_id') ?? 'unknown';
|
|
476
|
+
const tryDecompressResponseStream = params.try_decompress_response_stream ?? true;
|
|
477
|
+
const ignoreErrorResponse = params.ignore_error_response ?? false;
|
|
478
|
+
// even if the stream decompression is disabled, we have to decompress it in case of an error
|
|
479
|
+
const isFailedResponse = !(0, client_common_1.isSuccessfulResponse)(_response.statusCode);
|
|
480
|
+
if (tryDecompressResponseStream ||
|
|
481
|
+
(isFailedResponse && !ignoreErrorResponse)) {
|
|
482
|
+
const decompressionResult = (0, compression_1.decompressResponse)(_response, this.logger);
|
|
483
|
+
if ((0, compression_1.isDecompressionError)(decompressionResult)) {
|
|
484
|
+
const err = (0, client_common_1.enhanceStackTrace)(decompressionResult.error, currentStackTrace);
|
|
485
|
+
return reject(err);
|
|
486
|
+
}
|
|
487
|
+
responseStream = decompressionResult.response;
|
|
488
|
+
}
|
|
489
|
+
else {
|
|
490
|
+
responseStream = _response;
|
|
491
|
+
}
|
|
492
|
+
logger.trace({
|
|
493
|
+
message: `${op}: response stream created`,
|
|
494
|
+
args: {
|
|
495
|
+
query_id,
|
|
496
|
+
operation: op,
|
|
497
|
+
stream_state: {
|
|
498
|
+
readable: responseStream.readable,
|
|
499
|
+
readableEnded: responseStream.readableEnded,
|
|
500
|
+
readableLength: responseStream.readableLength,
|
|
501
|
+
},
|
|
502
|
+
is_failed_response: isFailedResponse,
|
|
503
|
+
will_decompress: tryDecompressResponseStream,
|
|
504
|
+
},
|
|
505
|
+
});
|
|
506
|
+
if (isFailedResponse && !ignoreErrorResponse) {
|
|
507
|
+
try {
|
|
508
|
+
const errorMessage = await (0, utils_1.getAsText)(responseStream);
|
|
509
|
+
const err = (0, client_common_1.enhanceStackTrace)((0, client_common_1.parseError)(errorMessage), currentStackTrace);
|
|
510
|
+
reject(err);
|
|
511
|
+
}
|
|
512
|
+
catch (e) {
|
|
513
|
+
// If the ClickHouse response is malformed
|
|
514
|
+
const err = (0, client_common_1.enhanceStackTrace)(e, currentStackTrace);
|
|
515
|
+
reject(err);
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
else {
|
|
519
|
+
return resolve({
|
|
520
|
+
stream: responseStream,
|
|
521
|
+
summary: params.parse_summary
|
|
522
|
+
? this.parseSummary(op, _response)
|
|
523
|
+
: undefined,
|
|
524
|
+
response_headers: { ..._response.headers },
|
|
525
|
+
});
|
|
526
|
+
}
|
|
527
|
+
};
|
|
528
|
+
function onAbort() {
|
|
529
|
+
// Prefer 'abort' event since it always triggered unlike 'error' and 'close'
|
|
530
|
+
// see the full sequence of events https://nodejs.org/api/http.html#httprequesturl-options-callback
|
|
531
|
+
removeRequestListeners();
|
|
532
|
+
request.once('error', function () {
|
|
533
|
+
/**
|
|
534
|
+
* catch "Error: ECONNRESET" error which shouldn't be reported to users.
|
|
535
|
+
* see the full sequence of events https://nodejs.org/api/http.html#httprequesturl-options-callback
|
|
536
|
+
* */
|
|
537
|
+
});
|
|
538
|
+
const err = (0, client_common_1.enhanceStackTrace)(new Error('The user aborted a request.'), currentStackTrace);
|
|
539
|
+
reject(err);
|
|
540
|
+
}
|
|
541
|
+
function onClose() {
|
|
542
|
+
// Adapter uses 'close' event to clean up listeners after the successful response.
|
|
543
|
+
// It's necessary in order to handle 'abort' and 'timeout' events while response is streamed.
|
|
544
|
+
// It's always the last event, according to https://nodejs.org/docs/latest-v14.x/api/http.html#http_http_request_url_options_callback
|
|
545
|
+
removeRequestListeners();
|
|
546
|
+
}
|
|
547
|
+
function pipeStream() {
|
|
548
|
+
// if request.end() was called due to no data to send
|
|
549
|
+
if (request.writableEnded) {
|
|
550
|
+
return;
|
|
551
|
+
}
|
|
552
|
+
const bodyStream = (0, utils_1.isStream)(params.body)
|
|
553
|
+
? params.body
|
|
554
|
+
: stream_1.default.Readable.from([params.body]);
|
|
555
|
+
const callback = (e) => {
|
|
556
|
+
if (e) {
|
|
557
|
+
removeRequestListeners();
|
|
558
|
+
const err = (0, client_common_1.enhanceStackTrace)(e, currentStackTrace);
|
|
559
|
+
reject(err);
|
|
560
|
+
}
|
|
561
|
+
};
|
|
562
|
+
if (params.enable_request_compression) {
|
|
563
|
+
stream_1.default.pipeline(bodyStream, zlib_1.default.createGzip(), request, callback);
|
|
564
|
+
}
|
|
565
|
+
else {
|
|
566
|
+
stream_1.default.pipeline(bodyStream, request, callback);
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
const onSocket = (socket) => {
|
|
570
|
+
try {
|
|
571
|
+
if (this.params.keep_alive.enabled &&
|
|
572
|
+
this.params.keep_alive.idle_socket_ttl > 0) {
|
|
573
|
+
const socketInfo = this.knownSockets.get(socket);
|
|
574
|
+
// It is the first time we've encountered this socket,
|
|
575
|
+
// so it doesn't have the idle timeout handler attached to it
|
|
576
|
+
if (socketInfo === undefined) {
|
|
577
|
+
const socketId = crypto_1.default.randomUUID();
|
|
578
|
+
this.logger.trace({
|
|
579
|
+
message: `Using a fresh socket ${socketId}, setting up a new 'free' listener`,
|
|
580
|
+
});
|
|
581
|
+
this.knownSockets.set(socket, {
|
|
582
|
+
id: socketId,
|
|
583
|
+
idle_timeout_handle: undefined,
|
|
584
|
+
});
|
|
585
|
+
// When the request is complete and the socket is released,
|
|
586
|
+
// make sure that the socket is removed after `idleSocketTTL`.
|
|
587
|
+
socket.on('free', () => {
|
|
588
|
+
this.logger.trace({
|
|
589
|
+
message: `Socket ${socketId} was released`,
|
|
590
|
+
});
|
|
591
|
+
// Avoiding the built-in socket.timeout() method usage here,
|
|
592
|
+
// as we don't want to clash with the actual request timeout.
|
|
593
|
+
const idleTimeoutHandle = setTimeout(() => {
|
|
594
|
+
this.logger.trace({
|
|
595
|
+
message: `Removing socket ${socketId} after ${this.idleSocketTTL} ms of idle`,
|
|
596
|
+
});
|
|
597
|
+
this.knownSockets.delete(socket);
|
|
598
|
+
socket.destroy();
|
|
599
|
+
}, this.idleSocketTTL).unref();
|
|
600
|
+
this.knownSockets.set(socket, {
|
|
601
|
+
id: socketId,
|
|
602
|
+
idle_timeout_handle: idleTimeoutHandle,
|
|
603
|
+
});
|
|
604
|
+
});
|
|
605
|
+
const cleanup = () => {
|
|
606
|
+
const maybeSocketInfo = this.knownSockets.get(socket);
|
|
607
|
+
// clean up a possibly dangling idle timeout handle (preventing leaks)
|
|
608
|
+
if (maybeSocketInfo?.idle_timeout_handle) {
|
|
609
|
+
clearTimeout(maybeSocketInfo.idle_timeout_handle);
|
|
610
|
+
}
|
|
611
|
+
this.logger.trace({
|
|
612
|
+
message: `Socket ${socketId} was closed or ended, 'free' listener removed`,
|
|
613
|
+
});
|
|
614
|
+
if (responseStream && !responseStream.readableEnded) {
|
|
615
|
+
this.logger.warn({
|
|
616
|
+
message: `${op}: socket was closed or ended before the response was fully read. ` +
|
|
617
|
+
'This can potentially result in an uncaught ECONNRESET error! ' +
|
|
618
|
+
'Consider fully consuming, draining, or destroying the response stream.',
|
|
619
|
+
args: {
|
|
620
|
+
query: params.query,
|
|
621
|
+
query_id: params.url.searchParams.get('query_id') ?? 'unknown',
|
|
622
|
+
},
|
|
623
|
+
});
|
|
624
|
+
}
|
|
625
|
+
};
|
|
626
|
+
socket.once('end', cleanup);
|
|
627
|
+
socket.once('close', cleanup);
|
|
628
|
+
}
|
|
629
|
+
else {
|
|
630
|
+
clearTimeout(socketInfo.idle_timeout_handle);
|
|
631
|
+
this.logger.trace({
|
|
632
|
+
message: `Reusing socket ${socketInfo.id}`,
|
|
633
|
+
});
|
|
634
|
+
this.knownSockets.set(socket, {
|
|
635
|
+
...socketInfo,
|
|
636
|
+
idle_timeout_handle: undefined,
|
|
637
|
+
});
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
catch (e) {
|
|
642
|
+
logger.error({
|
|
643
|
+
message: 'An error occurred while housekeeping the idle sockets',
|
|
644
|
+
err: e,
|
|
645
|
+
});
|
|
646
|
+
}
|
|
647
|
+
// Socket is "prepared" with idle handlers, continue with our request
|
|
648
|
+
pipeStream();
|
|
649
|
+
// This is for request timeout only. Surprisingly, it is not always enough to set in the HTTP request.
|
|
650
|
+
// The socket won't be destroyed, and it will be returned to the pool.
|
|
651
|
+
socket.setTimeout(this.params.request_timeout, onTimeout);
|
|
652
|
+
};
|
|
653
|
+
function onTimeout() {
|
|
654
|
+
const query_id = params.url.searchParams.get('query_id') ?? 'unknown';
|
|
655
|
+
const socketState = request.socket
|
|
656
|
+
? {
|
|
657
|
+
connecting: request.socket.connecting,
|
|
658
|
+
pending: request.socket.pending,
|
|
659
|
+
destroyed: request.socket.destroyed,
|
|
660
|
+
readyState: request.socket.readyState,
|
|
661
|
+
}
|
|
662
|
+
: undefined;
|
|
663
|
+
const responseStreamState = responseStream
|
|
664
|
+
? {
|
|
665
|
+
readable: responseStream.readable,
|
|
666
|
+
readableEnded: responseStream.readableEnded,
|
|
667
|
+
readableLength: responseStream.readableLength,
|
|
668
|
+
}
|
|
669
|
+
: undefined;
|
|
670
|
+
logger.trace({
|
|
671
|
+
message: `${op}: timeout occurred`,
|
|
672
|
+
args: {
|
|
673
|
+
query_id,
|
|
674
|
+
operation: op,
|
|
675
|
+
timeout_ms: requestTimeout,
|
|
676
|
+
socket_state: socketState,
|
|
677
|
+
response_stream_state: responseStreamState,
|
|
678
|
+
has_response_stream: responseStream !== undefined,
|
|
679
|
+
},
|
|
680
|
+
});
|
|
681
|
+
const err = (0, client_common_1.enhanceStackTrace)(new Error('Timeout error.'), currentStackTrace);
|
|
682
|
+
removeRequestListeners();
|
|
683
|
+
try {
|
|
684
|
+
request.destroy();
|
|
685
|
+
}
|
|
686
|
+
catch (e) {
|
|
687
|
+
logger.error({
|
|
688
|
+
message: 'An error occurred while destroying the request',
|
|
689
|
+
err: e,
|
|
690
|
+
});
|
|
691
|
+
}
|
|
692
|
+
reject(err);
|
|
693
|
+
}
|
|
694
|
+
function removeRequestListeners() {
|
|
695
|
+
if (request.socket !== null) {
|
|
696
|
+
request.socket.setTimeout(0); // reset previously set timeout
|
|
697
|
+
request.socket.removeListener('timeout', onTimeout);
|
|
698
|
+
}
|
|
699
|
+
request.removeListener('socket', onSocket);
|
|
700
|
+
request.removeListener('response', onResponse);
|
|
701
|
+
request.removeListener('error', onError);
|
|
702
|
+
request.removeListener('close', onClose);
|
|
703
|
+
if (params.abort_signal !== undefined) {
|
|
704
|
+
request.removeListener('abort', onAbort);
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
request.on('socket', onSocket);
|
|
708
|
+
request.on('response', onResponse);
|
|
709
|
+
request.on('error', onError);
|
|
710
|
+
request.on('close', onClose);
|
|
711
|
+
if (params.abort_signal !== undefined) {
|
|
712
|
+
params.abort_signal.addEventListener('abort', onAbort, {
|
|
713
|
+
once: true,
|
|
714
|
+
});
|
|
715
|
+
}
|
|
716
|
+
if (!params.body) {
|
|
717
|
+
try {
|
|
718
|
+
return request.end();
|
|
719
|
+
}
|
|
720
|
+
catch (e) {
|
|
721
|
+
this.logger.error({
|
|
722
|
+
message: 'An error occurred while ending the request without body',
|
|
723
|
+
err: e,
|
|
724
|
+
});
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
});
|
|
728
|
+
}
|
|
450
729
|
}
|
|
451
730
|
exports.NodeBaseConnection = NodeBaseConnection;
|
|
731
|
+
const PingQuery = `SELECT 'ping'`;
|
|
452
732
|
//# sourceMappingURL=node_base_connection.js.map
|