@jsforce/jsforce-node 3.0.0-next.1 → 3.0.0-next.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/bulk.d.ts +38 -229
- package/lib/api/bulk.js +26 -720
- package/lib/api/bulk2.d.ts +324 -0
- package/lib/api/bulk2.js +800 -0
- package/lib/connection.d.ts +2 -1
- package/lib/http-api.js +38 -0
- package/lib/index.d.ts +1 -0
- package/lib/index.js +1 -0
- package/lib/request.js +73 -4
- package/lib/soap.js +11 -0
- package/lib/types/common.d.ts +6 -1
- package/lib/util/get-body-size.d.ts +4 -0
- package/lib/util/get-body-size.js +39 -0
- package/package.json +3 -1
package/lib/api/bulk2.js
ADDED
|
@@ -0,0 +1,800 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.IngestJobV2 = exports.QueryJobV2 = exports.BulkV2 = void 0;
|
|
7
|
+
const events_1 = require("events");
|
|
8
|
+
const stream_1 = require("stream");
|
|
9
|
+
const record_stream_1 = require("../record-stream");
|
|
10
|
+
const http_api_1 = __importDefault(require("../http-api"));
|
|
11
|
+
const jsforce_1 = require("../jsforce");
|
|
12
|
+
const logger_1 = require("../util/logger");
|
|
13
|
+
const stream_2 = require("../util/stream");
|
|
14
|
+
const is_1 = __importDefault(require("@sindresorhus/is"));
|
|
15
|
+
class JobPollingTimeoutError extends Error {
|
|
16
|
+
jobId;
|
|
17
|
+
/**
|
|
18
|
+
*
|
|
19
|
+
*/
|
|
20
|
+
constructor(message, jobId) {
|
|
21
|
+
super(message);
|
|
22
|
+
this.name = 'JobPollingTimeout';
|
|
23
|
+
this.jobId = jobId;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
class BulkApiV2 extends http_api_1.default {
|
|
27
|
+
hasErrorInResponseBody(body) {
|
|
28
|
+
return (Array.isArray(body) &&
|
|
29
|
+
typeof body[0] === 'object' &&
|
|
30
|
+
'errorCode' in body[0]);
|
|
31
|
+
}
|
|
32
|
+
isSessionExpired(response) {
|
|
33
|
+
return (response.statusCode === 401 && /INVALID_SESSION_ID/.test(response.body));
|
|
34
|
+
}
|
|
35
|
+
parseError(body) {
|
|
36
|
+
return {
|
|
37
|
+
errorCode: body[0].errorCode,
|
|
38
|
+
message: body[0].message,
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
class BulkV2 {
|
|
43
|
+
connection;
|
|
44
|
+
logger;
|
|
45
|
+
/**
|
|
46
|
+
* Polling interval in milliseconds
|
|
47
|
+
*
|
|
48
|
+
* Default: 1000 (1 second)
|
|
49
|
+
*/
|
|
50
|
+
pollInterval = 1000;
|
|
51
|
+
/**
|
|
52
|
+
* Polling timeout in milliseconds
|
|
53
|
+
*
|
|
54
|
+
* Default: 30000 (30 seconds)
|
|
55
|
+
*/
|
|
56
|
+
pollTimeout = 30000;
|
|
57
|
+
constructor(connection) {
|
|
58
|
+
this.connection = connection;
|
|
59
|
+
this.logger = this.connection._logLevel
|
|
60
|
+
? (0, logger_1.getLogger)('bulk2').createInstance(this.connection._logLevel)
|
|
61
|
+
: (0, logger_1.getLogger)('bulk2');
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Create an instance of an ingest job object.
|
|
65
|
+
*
|
|
66
|
+
* @params {NewIngestJobOptions} options object
|
|
67
|
+
* @returns {IngestJobV2} An ingest job instance
|
|
68
|
+
* @example
|
|
69
|
+
* // Upsert records to the Account object.
|
|
70
|
+
*
|
|
71
|
+
* const job = connection.bulk2.createJob({
|
|
72
|
+
* operation: 'insert'
|
|
73
|
+
* object: 'Account',
|
|
74
|
+
* });
|
|
75
|
+
*
|
|
76
|
+
* // create the job in the org
|
|
77
|
+
* await job.open()
|
|
78
|
+
*
|
|
79
|
+
* // upload data
|
|
80
|
+
* await job.uploadData(csvFile)
|
|
81
|
+
*
|
|
82
|
+
* // finished uploading data, mark it as ready for processing
|
|
83
|
+
* await job.close()
|
|
84
|
+
*/
|
|
85
|
+
createJob(options) {
|
|
86
|
+
return new IngestJobV2(this.connection, {
|
|
87
|
+
bodyParams: options,
|
|
88
|
+
pollingOptions: this,
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
job(type = 'ingest', options) {
|
|
92
|
+
if (type === 'ingest') {
|
|
93
|
+
return new IngestJobV2(this.connection, {
|
|
94
|
+
id: options.id,
|
|
95
|
+
pollingOptions: this,
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
return new QueryJobV2(this.connection, {
|
|
100
|
+
id: options.id,
|
|
101
|
+
pollingOptions: this,
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Create, upload, and start bulkload job
|
|
107
|
+
*/
|
|
108
|
+
async loadAndWaitForResults(options) {
|
|
109
|
+
if (!options.pollTimeout)
|
|
110
|
+
options.pollTimeout = this.pollTimeout;
|
|
111
|
+
if (!options.pollInterval)
|
|
112
|
+
options.pollInterval = this.pollInterval;
|
|
113
|
+
const job = this.createJob({
|
|
114
|
+
object: options.object,
|
|
115
|
+
operation: options.operation,
|
|
116
|
+
});
|
|
117
|
+
try {
|
|
118
|
+
await job.open();
|
|
119
|
+
await job.uploadData(options.input);
|
|
120
|
+
await job.close();
|
|
121
|
+
await job.poll(options.pollInterval, options.pollTimeout);
|
|
122
|
+
return await job.getAllResults();
|
|
123
|
+
}
|
|
124
|
+
catch (error) {
|
|
125
|
+
const err = error;
|
|
126
|
+
this.logger.error(`bulk load failed due to: ${err}`);
|
|
127
|
+
if (err.name !== 'JobPollingTimeoutError') {
|
|
128
|
+
// fires off one last attempt to clean up and ignores the result | error
|
|
129
|
+
job.delete().catch((ignored) => ignored);
|
|
130
|
+
}
|
|
131
|
+
throw err;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Execute bulk query and get a record stream.
|
|
136
|
+
*
|
|
137
|
+
* Default timeout: 10000ms
|
|
138
|
+
*
|
|
139
|
+
* @param soql SOQL query
|
|
140
|
+
* @param BulkV2PollingOptions options object
|
|
141
|
+
*
|
|
142
|
+
* @returns {RecordStream} - Record stream, convertible to a CSV data stream
|
|
143
|
+
*/
|
|
144
|
+
async query(soql, options) {
|
|
145
|
+
const queryJob = new QueryJobV2(this.connection, {
|
|
146
|
+
bodyParams: {
|
|
147
|
+
query: soql,
|
|
148
|
+
operation: options?.scanAll ? 'queryAll' : 'query',
|
|
149
|
+
columnDelimiter: options?.columnDelimiter,
|
|
150
|
+
lineEnding: options?.lineEnding,
|
|
151
|
+
},
|
|
152
|
+
pollingOptions: this,
|
|
153
|
+
});
|
|
154
|
+
const recordStream = new record_stream_1.Parsable();
|
|
155
|
+
const dataStream = recordStream.stream('csv');
|
|
156
|
+
try {
|
|
157
|
+
await queryJob.open();
|
|
158
|
+
await queryJob.poll(options?.pollInterval, options?.pollTimeout);
|
|
159
|
+
const queryRecordsStream = await queryJob
|
|
160
|
+
.result()
|
|
161
|
+
.then((s) => s.stream());
|
|
162
|
+
queryRecordsStream.pipe(dataStream);
|
|
163
|
+
}
|
|
164
|
+
catch (error) {
|
|
165
|
+
const err = error;
|
|
166
|
+
this.logger.error(`bulk query failed due to: ${err}`);
|
|
167
|
+
if (err.name !== 'JobPollingTimeoutError') {
|
|
168
|
+
// fires off one last attempt to clean up and ignores the result | error
|
|
169
|
+
queryJob.delete().catch((ignored) => ignored);
|
|
170
|
+
}
|
|
171
|
+
throw err;
|
|
172
|
+
}
|
|
173
|
+
return recordStream;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
exports.BulkV2 = BulkV2;
|
|
177
|
+
class QueryJobV2 extends events_1.EventEmitter {
|
|
178
|
+
connection;
|
|
179
|
+
logger;
|
|
180
|
+
_id;
|
|
181
|
+
bodyParams;
|
|
182
|
+
pollingOptions;
|
|
183
|
+
error;
|
|
184
|
+
jobInfo;
|
|
185
|
+
locator;
|
|
186
|
+
constructor(conn, options) {
|
|
187
|
+
super();
|
|
188
|
+
this.connection = conn;
|
|
189
|
+
this.logger = this.connection._logLevel
|
|
190
|
+
? (0, logger_1.getLogger)('bulk2:QueryJobV2').createInstance(this.connection._logLevel)
|
|
191
|
+
: (0, logger_1.getLogger)('bulk2:QueryJobV2');
|
|
192
|
+
if ('id' in options) {
|
|
193
|
+
this._id = options.id;
|
|
194
|
+
}
|
|
195
|
+
else {
|
|
196
|
+
this.bodyParams = options.bodyParams;
|
|
197
|
+
}
|
|
198
|
+
this.pollingOptions = options.pollingOptions;
|
|
199
|
+
// default error handler to keep the latest error
|
|
200
|
+
this.on('error', (error) => (this.error = error));
|
|
201
|
+
}
|
|
202
|
+
/**
|
|
203
|
+
* Get the query job ID.
|
|
204
|
+
*
|
|
205
|
+
* @returns {string} query job Id.
|
|
206
|
+
*/
|
|
207
|
+
get id() {
|
|
208
|
+
return this.jobInfo ? this.jobInfo.id : this._id;
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Get the query job info.
|
|
212
|
+
*
|
|
213
|
+
* @returns {Promise<QueryJobInfoV2>} query job information.
|
|
214
|
+
*/
|
|
215
|
+
getInfo() {
|
|
216
|
+
if (this.jobInfo) {
|
|
217
|
+
return this.jobInfo;
|
|
218
|
+
}
|
|
219
|
+
throw new Error('No internal job info. Make sure to call `await job.check`.');
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Creates a query job
|
|
223
|
+
*
|
|
224
|
+
* @returns {Promise<QueryJobInfoV2>} job information.
|
|
225
|
+
*/
|
|
226
|
+
async open() {
|
|
227
|
+
if (!this.bodyParams) {
|
|
228
|
+
throw new Error('Missing required body params to open a new query job.');
|
|
229
|
+
}
|
|
230
|
+
try {
|
|
231
|
+
this.jobInfo = await this.createQueryRequest({
|
|
232
|
+
method: 'POST',
|
|
233
|
+
body: JSON.stringify(this.bodyParams),
|
|
234
|
+
headers: {
|
|
235
|
+
'Content-Type': 'application/json; charset=utf-8',
|
|
236
|
+
},
|
|
237
|
+
responseType: 'application/json',
|
|
238
|
+
});
|
|
239
|
+
this.logger.debug(`Successfully created job ${this.id}`);
|
|
240
|
+
this.emit('open', this.jobInfo);
|
|
241
|
+
}
|
|
242
|
+
catch (err) {
|
|
243
|
+
this.emit('error', err);
|
|
244
|
+
throw err;
|
|
245
|
+
}
|
|
246
|
+
return this.jobInfo;
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Abort the job
|
|
250
|
+
*
|
|
251
|
+
* The 'aborted' event is emitted when the job successfully aborts.
|
|
252
|
+
* @returns {Promise<QueryJobInfoV2>} job information.
|
|
253
|
+
*/
|
|
254
|
+
async abort() {
|
|
255
|
+
try {
|
|
256
|
+
const state = 'Aborted';
|
|
257
|
+
this.jobInfo = await this.createQueryRequest({
|
|
258
|
+
method: 'PATCH',
|
|
259
|
+
path: `/${this.id}`,
|
|
260
|
+
body: JSON.stringify({ state }),
|
|
261
|
+
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
|
262
|
+
responseType: 'application/json',
|
|
263
|
+
});
|
|
264
|
+
this.logger.debug(`Successfully aborted job ${this.id}`);
|
|
265
|
+
return this.jobInfo;
|
|
266
|
+
}
|
|
267
|
+
catch (err) {
|
|
268
|
+
this.emit('error', err);
|
|
269
|
+
throw err;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
/**
|
|
273
|
+
* Poll for the state of the processing for the job.
|
|
274
|
+
*
|
|
275
|
+
* @param interval Polling interval in milliseconds
|
|
276
|
+
* @param timeout Polling timeout in milliseconds
|
|
277
|
+
* @returns {Promise<Record[]>} A promise that resolves when the job finished being processed.
|
|
278
|
+
*/
|
|
279
|
+
async poll(interval = this.pollingOptions.pollInterval, timeout = this.pollingOptions.pollTimeout) {
|
|
280
|
+
const jobId = this.id;
|
|
281
|
+
const startTime = Date.now();
|
|
282
|
+
const endTime = startTime + timeout;
|
|
283
|
+
this.logger.debug(`Start polling for job status`);
|
|
284
|
+
this.logger.debug(`Polling options: timeout:${timeout}ms | interval: ${interval}ms.`);
|
|
285
|
+
if (timeout === 0) {
|
|
286
|
+
throw new JobPollingTimeoutError(`Skipping polling because of timeout = 0ms. Job Id = ${jobId}`, jobId);
|
|
287
|
+
}
|
|
288
|
+
while (endTime > Date.now()) {
|
|
289
|
+
try {
|
|
290
|
+
const res = await this.check();
|
|
291
|
+
switch (res.state) {
|
|
292
|
+
case 'Aborted':
|
|
293
|
+
throw new Error('Job has been aborted');
|
|
294
|
+
case 'UploadComplete':
|
|
295
|
+
case 'InProgress':
|
|
296
|
+
this.emit('inProgress', res);
|
|
297
|
+
await delay(interval);
|
|
298
|
+
break;
|
|
299
|
+
case 'Failed':
|
|
300
|
+
// unlike ingest jobs, the API doesn't return an error msg:
|
|
301
|
+
// https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/query_get_one_job.htm
|
|
302
|
+
this.logger.debug(res);
|
|
303
|
+
throw new Error('Query job failed to complete');
|
|
304
|
+
case 'JobComplete':
|
|
305
|
+
this.logger.debug(`Job ${this.id} was successfully processed.`);
|
|
306
|
+
this.emit('jobComplete');
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
catch (err) {
|
|
311
|
+
this.emit('error', err);
|
|
312
|
+
throw err;
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
const timeoutError = new JobPollingTimeoutError(`Polling timed out after ${timeout}ms. Job Id = ${jobId}`, jobId);
|
|
316
|
+
this.emit('error', timeoutError);
|
|
317
|
+
throw timeoutError;
|
|
318
|
+
}
|
|
319
|
+
/**
|
|
320
|
+
* Check the latest job status
|
|
321
|
+
*
|
|
322
|
+
* @returns {Promise<QueryJobInfoV2>} job information.
|
|
323
|
+
*/
|
|
324
|
+
async check() {
|
|
325
|
+
try {
|
|
326
|
+
const jobInfo = await this.createQueryRequest({
|
|
327
|
+
method: 'GET',
|
|
328
|
+
path: `/${this.id}`,
|
|
329
|
+
responseType: 'application/json',
|
|
330
|
+
});
|
|
331
|
+
this.jobInfo = jobInfo;
|
|
332
|
+
return jobInfo;
|
|
333
|
+
}
|
|
334
|
+
catch (err) {
|
|
335
|
+
this.emit('error', err);
|
|
336
|
+
throw err;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
/**
|
|
340
|
+
* Get the results for a query job as a record stream
|
|
341
|
+
*
|
|
342
|
+
* This method assumes the job finished being processed
|
|
343
|
+
* @returns {RecordStream} - Record stream, convertible to a CSV data stream
|
|
344
|
+
*/
|
|
345
|
+
async result() {
|
|
346
|
+
const resultStream = new record_stream_1.Parsable();
|
|
347
|
+
const resultDataStream = resultStream.stream('csv');
|
|
348
|
+
const resultsPath = `/${this.id}/results`;
|
|
349
|
+
while (this.locator !== 'null') {
|
|
350
|
+
const resPromise = this.createQueryRequest({
|
|
351
|
+
method: 'GET',
|
|
352
|
+
path: this.locator
|
|
353
|
+
? `/${resultsPath}?locator=${this.locator}`
|
|
354
|
+
: resultsPath,
|
|
355
|
+
headers: {
|
|
356
|
+
Accept: 'text/csv',
|
|
357
|
+
},
|
|
358
|
+
});
|
|
359
|
+
resPromise.stream().pipe(resultDataStream);
|
|
360
|
+
await resPromise;
|
|
361
|
+
}
|
|
362
|
+
return resultStream;
|
|
363
|
+
}
|
|
364
|
+
/**
|
|
365
|
+
* Deletes a query job.
|
|
366
|
+
*/
|
|
367
|
+
async delete() {
|
|
368
|
+
return this.createQueryRequest({
|
|
369
|
+
method: 'DELETE',
|
|
370
|
+
path: `/${this.id}`,
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
createQueryRequest(request) {
|
|
374
|
+
const { path, responseType } = request;
|
|
375
|
+
const basePath = `services/data/v${this.connection.version}/jobs/query`;
|
|
376
|
+
const url = new URL(path ? basePath + path : basePath, this.connection.instanceUrl).toString();
|
|
377
|
+
const httpApi = new BulkApiV2(this.connection, { responseType });
|
|
378
|
+
httpApi.on('response', (response) => {
|
|
379
|
+
this.locator = response.headers['sforce-locator'];
|
|
380
|
+
this.logger.debug(`sforce-locator: ${this.locator}`);
|
|
381
|
+
});
|
|
382
|
+
return httpApi.request({
|
|
383
|
+
...request,
|
|
384
|
+
url,
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
exports.QueryJobV2 = QueryJobV2;
|
|
389
|
+
/**
|
|
390
|
+
* Class for Bulk API V2 Ingest Job
|
|
391
|
+
*/
|
|
392
|
+
class IngestJobV2 extends events_1.EventEmitter {
|
|
393
|
+
connection;
|
|
394
|
+
logger;
|
|
395
|
+
_id;
|
|
396
|
+
bodyParams;
|
|
397
|
+
jobData;
|
|
398
|
+
pollingOptions;
|
|
399
|
+
bulkJobSuccessfulResults;
|
|
400
|
+
bulkJobFailedResults;
|
|
401
|
+
bulkJobUnprocessedRecords;
|
|
402
|
+
error;
|
|
403
|
+
jobInfo;
|
|
404
|
+
constructor(conn, options) {
|
|
405
|
+
super();
|
|
406
|
+
this.connection = conn;
|
|
407
|
+
this.logger = this.connection._logLevel
|
|
408
|
+
? (0, logger_1.getLogger)('bulk2:IngestJobV2').createInstance(this.connection._logLevel)
|
|
409
|
+
: (0, logger_1.getLogger)('bulk2:IngestJobV2');
|
|
410
|
+
this.pollingOptions = options.pollingOptions;
|
|
411
|
+
if ('id' in options) {
|
|
412
|
+
this._id = options.id;
|
|
413
|
+
}
|
|
414
|
+
else {
|
|
415
|
+
this.bodyParams = options.bodyParams;
|
|
416
|
+
}
|
|
417
|
+
this.jobData = new JobDataV2({
|
|
418
|
+
createRequest: (request) => this.createIngestRequest(request),
|
|
419
|
+
job: this,
|
|
420
|
+
});
|
|
421
|
+
// default error handler to keep the latest error
|
|
422
|
+
this.on('error', (error) => (this.error = error));
|
|
423
|
+
}
|
|
424
|
+
/**
|
|
425
|
+
* Get the query job ID.
|
|
426
|
+
*
|
|
427
|
+
* @returns {string} query job Id.
|
|
428
|
+
*/
|
|
429
|
+
get id() {
|
|
430
|
+
return this.jobInfo ? this.jobInfo.id : this._id;
|
|
431
|
+
}
|
|
432
|
+
/**
|
|
433
|
+
* Get the query job info.
|
|
434
|
+
*
|
|
435
|
+
* @returns {Promise<QueryJobInfoV2>} ingest job information.
|
|
436
|
+
*/
|
|
437
|
+
getInfo() {
|
|
438
|
+
if (this.jobInfo) {
|
|
439
|
+
return this.jobInfo;
|
|
440
|
+
}
|
|
441
|
+
throw new Error('No internal job info. Make sure to call `await job.check`.');
|
|
442
|
+
}
|
|
443
|
+
/**
|
|
444
|
+
* Create a job representing a bulk operation in the org
|
|
445
|
+
*
|
|
446
|
+
* @returns {Promise<QueryJobInfoV2>} job information.
|
|
447
|
+
*/
|
|
448
|
+
async open() {
|
|
449
|
+
if (!this.bodyParams) {
|
|
450
|
+
throw new Error('Missing required body params to open a new ingest job.');
|
|
451
|
+
}
|
|
452
|
+
try {
|
|
453
|
+
this.jobInfo = await this.createIngestRequest({
|
|
454
|
+
method: 'POST',
|
|
455
|
+
body: JSON.stringify(this.bodyParams),
|
|
456
|
+
headers: {
|
|
457
|
+
'Content-Type': 'application/json; charset=utf-8',
|
|
458
|
+
},
|
|
459
|
+
responseType: 'application/json',
|
|
460
|
+
});
|
|
461
|
+
this.logger.debug(`Successfully created job ${this.id}`);
|
|
462
|
+
this.emit('open');
|
|
463
|
+
}
|
|
464
|
+
catch (err) {
|
|
465
|
+
this.emit('error', err);
|
|
466
|
+
throw err;
|
|
467
|
+
}
|
|
468
|
+
return this.jobInfo;
|
|
469
|
+
}
|
|
470
|
+
/** Upload data for a job in CSV format
|
|
471
|
+
*
|
|
472
|
+
* @param input CSV as a string, or array of records or readable stream
|
|
473
|
+
*/
|
|
474
|
+
async uploadData(input) {
|
|
475
|
+
await this.jobData.execute(input).result;
|
|
476
|
+
this.logger.debug(`Successfully uploaded data to job ${this.id}`);
|
|
477
|
+
}
|
|
478
|
+
/**
|
|
479
|
+
* Close opened job
|
|
480
|
+
*
|
|
481
|
+
* This method will notify the org that the upload of job data is complete and is ready for processing.
|
|
482
|
+
*/
|
|
483
|
+
async close() {
|
|
484
|
+
try {
|
|
485
|
+
const state = 'UploadComplete';
|
|
486
|
+
this.jobInfo = await this.createIngestRequest({
|
|
487
|
+
method: 'PATCH',
|
|
488
|
+
path: `/${this.id}`,
|
|
489
|
+
body: JSON.stringify({ state }),
|
|
490
|
+
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
|
491
|
+
responseType: 'application/json',
|
|
492
|
+
});
|
|
493
|
+
this.logger.debug(`Successfully closed job ${this.id}`);
|
|
494
|
+
this.emit('close');
|
|
495
|
+
}
|
|
496
|
+
catch (err) {
|
|
497
|
+
this.emit('error', err);
|
|
498
|
+
throw err;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
/**
|
|
502
|
+
* Set the status to abort
|
|
503
|
+
*/
|
|
504
|
+
async abort() {
|
|
505
|
+
try {
|
|
506
|
+
const state = 'Aborted';
|
|
507
|
+
this.jobInfo = await this.createIngestRequest({
|
|
508
|
+
method: 'PATCH',
|
|
509
|
+
path: `/${this.id}`,
|
|
510
|
+
body: JSON.stringify({ state }),
|
|
511
|
+
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
|
512
|
+
responseType: 'application/json',
|
|
513
|
+
});
|
|
514
|
+
this.logger.debug(`Successfully aborted job ${this.id}`);
|
|
515
|
+
this.emit('aborted');
|
|
516
|
+
}
|
|
517
|
+
catch (err) {
|
|
518
|
+
this.emit('error', err);
|
|
519
|
+
throw err;
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
/**
|
|
523
|
+
* Poll for the state of the processing for the job.
|
|
524
|
+
*
|
|
525
|
+
* This method will only throw after a timeout. To capture a
|
|
526
|
+
* job failure while polling you must set a listener for the
|
|
527
|
+
* `failed` event before calling it:
|
|
528
|
+
*
|
|
529
|
+
* job.on('failed', (err) => console.error(err))
|
|
530
|
+
* await job.poll()
|
|
531
|
+
*
|
|
532
|
+
* @param interval Polling interval in milliseconds
|
|
533
|
+
* @param timeout Polling timeout in milliseconds
|
|
534
|
+
* @returns {Promise<void>} A promise that resolves when the job finishes successfully
|
|
535
|
+
*/
|
|
536
|
+
async poll(interval = this.pollingOptions.pollInterval, timeout = this.pollingOptions.pollTimeout) {
|
|
537
|
+
const jobId = this.id;
|
|
538
|
+
const startTime = Date.now();
|
|
539
|
+
const endTime = startTime + timeout;
|
|
540
|
+
if (timeout === 0) {
|
|
541
|
+
throw new JobPollingTimeoutError(`Skipping polling because of timeout = 0ms. Job Id = ${jobId}`, jobId);
|
|
542
|
+
}
|
|
543
|
+
this.logger.debug(`Start polling for job status`);
|
|
544
|
+
this.logger.debug(`Polling options: timeout:${timeout}ms | interval: ${interval}ms.`);
|
|
545
|
+
while (endTime > Date.now()) {
|
|
546
|
+
try {
|
|
547
|
+
const res = await this.check();
|
|
548
|
+
switch (res.state) {
|
|
549
|
+
case 'Open':
|
|
550
|
+
throw new Error('Job is still open. Make sure close the job by `close` method on the job instance before polling.');
|
|
551
|
+
case 'Aborted':
|
|
552
|
+
throw new Error('Job has been aborted');
|
|
553
|
+
case 'UploadComplete':
|
|
554
|
+
case 'InProgress':
|
|
555
|
+
this.emit('inProgress', res);
|
|
556
|
+
await delay(interval);
|
|
557
|
+
break;
|
|
558
|
+
case 'Failed':
|
|
559
|
+
this.logger.debug(res);
|
|
560
|
+
throw new Error(`Ingest job failed to complete due to: ${res.errorMessage}`);
|
|
561
|
+
case 'JobComplete':
|
|
562
|
+
this.logger.debug(`Job ${this.id} was successfully processed.`);
|
|
563
|
+
this.emit('jobComplete');
|
|
564
|
+
return;
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
catch (err) {
|
|
568
|
+
this.emit('error', err);
|
|
569
|
+
throw err;
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
const timeoutError = new JobPollingTimeoutError(`Polling timed out after ${timeout}ms. Job Id = ${jobId}`, jobId);
|
|
573
|
+
this.emit('error', timeoutError);
|
|
574
|
+
throw timeoutError;
|
|
575
|
+
}
|
|
576
|
+
/**
|
|
577
|
+
* Check the latest batch status in server
|
|
578
|
+
*/
|
|
579
|
+
async check() {
|
|
580
|
+
try {
|
|
581
|
+
const jobInfo = await this.createIngestRequest({
|
|
582
|
+
method: 'GET',
|
|
583
|
+
path: `/${this.id}`,
|
|
584
|
+
responseType: 'application/json',
|
|
585
|
+
});
|
|
586
|
+
this.jobInfo = jobInfo;
|
|
587
|
+
return jobInfo;
|
|
588
|
+
}
|
|
589
|
+
catch (err) {
|
|
590
|
+
this.emit('error', err);
|
|
591
|
+
throw err;
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
/** Return all record results
|
|
595
|
+
*
|
|
596
|
+
* This method will return successful, failed and unprocessed records
|
|
597
|
+
*
|
|
598
|
+
* @returns Promise<IngestJobV2Results>
|
|
599
|
+
*/
|
|
600
|
+
async getAllResults() {
|
|
601
|
+
const [successfulResults, failedResults, unprocessedRecords,] = await Promise.all([
|
|
602
|
+
this.getSuccessfulResults(),
|
|
603
|
+
this.getFailedResults(),
|
|
604
|
+
this.getUnprocessedRecords(),
|
|
605
|
+
]);
|
|
606
|
+
return { successfulResults, failedResults, unprocessedRecords };
|
|
607
|
+
}
|
|
608
|
+
/** Return successful results
|
|
609
|
+
*
|
|
610
|
+
* The order of records returned is not guaranteed to match the ordering of the uploaded data.
|
|
611
|
+
*
|
|
612
|
+
* @returns Promise<IngestJobV2SuccessfulResults>
|
|
613
|
+
*/
|
|
614
|
+
async getSuccessfulResults() {
|
|
615
|
+
if (this.bulkJobSuccessfulResults) {
|
|
616
|
+
return this.bulkJobSuccessfulResults;
|
|
617
|
+
}
|
|
618
|
+
const results = await this.createIngestRequest({
|
|
619
|
+
method: 'GET',
|
|
620
|
+
path: `/${this.id}/successfulResults`,
|
|
621
|
+
responseType: 'text/csv',
|
|
622
|
+
});
|
|
623
|
+
this.bulkJobSuccessfulResults = results ?? [];
|
|
624
|
+
return this.bulkJobSuccessfulResults;
|
|
625
|
+
}
|
|
626
|
+
/** Return failed results
|
|
627
|
+
*
|
|
628
|
+
* The order of records in the response is not guaranteed to match the ordering of records in the original job data.
|
|
629
|
+
*
|
|
630
|
+
* @returns Promise<IngestJobV2SuccessfulResults>
|
|
631
|
+
*/
|
|
632
|
+
async getFailedResults() {
|
|
633
|
+
if (this.bulkJobFailedResults) {
|
|
634
|
+
return this.bulkJobFailedResults;
|
|
635
|
+
}
|
|
636
|
+
const results = await this.createIngestRequest({
|
|
637
|
+
method: 'GET',
|
|
638
|
+
path: `/${this.id}/failedResults`,
|
|
639
|
+
responseType: 'text/csv',
|
|
640
|
+
});
|
|
641
|
+
this.bulkJobFailedResults = results ?? [];
|
|
642
|
+
return this.bulkJobFailedResults;
|
|
643
|
+
}
|
|
644
|
+
/** Return unprocessed results
|
|
645
|
+
*
|
|
646
|
+
* The unprocessed records endpoint returns records as a CSV.
|
|
647
|
+
* If the request helper is able to parse it, you get the records
|
|
648
|
+
* as an array of objects.
|
|
649
|
+
* If unable to parse the it (bad CSV), you get the raw response as a string.
|
|
650
|
+
*
|
|
651
|
+
* The order of records in the response is not guaranteed to match the ordering of records in the original job data.
|
|
652
|
+
*
|
|
653
|
+
* @returns Promise<IngestJobV2UnprocessedRecords>
|
|
654
|
+
*/
|
|
655
|
+
async getUnprocessedRecords() {
|
|
656
|
+
if (this.bulkJobUnprocessedRecords) {
|
|
657
|
+
return this.bulkJobUnprocessedRecords;
|
|
658
|
+
}
|
|
659
|
+
const results = await this.createIngestRequest({
|
|
660
|
+
method: 'GET',
|
|
661
|
+
path: `/${this.id}/unprocessedrecords`,
|
|
662
|
+
responseType: 'text/csv',
|
|
663
|
+
});
|
|
664
|
+
this.bulkJobUnprocessedRecords = results ?? [];
|
|
665
|
+
return this.bulkJobUnprocessedRecords;
|
|
666
|
+
}
|
|
667
|
+
/**
|
|
668
|
+
* Deletes an ingest job.
|
|
669
|
+
*/
|
|
670
|
+
async delete() {
|
|
671
|
+
return this.createIngestRequest({
|
|
672
|
+
method: 'DELETE',
|
|
673
|
+
path: `/${this.id}`,
|
|
674
|
+
});
|
|
675
|
+
}
|
|
676
|
+
createIngestRequest(request) {
|
|
677
|
+
const { path, responseType } = request;
|
|
678
|
+
const basePath = `/services/data/v${this.connection.version}/jobs/ingest`;
|
|
679
|
+
const url = new URL(path ? basePath + path : basePath, this.connection.instanceUrl).toString();
|
|
680
|
+
return new BulkApiV2(this.connection, { responseType }).request({
|
|
681
|
+
...request,
|
|
682
|
+
url,
|
|
683
|
+
});
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
exports.IngestJobV2 = IngestJobV2;
|
|
687
|
+
class JobDataV2 extends stream_1.Writable {
|
|
688
|
+
job;
|
|
689
|
+
uploadStream;
|
|
690
|
+
downloadStream;
|
|
691
|
+
dataStream;
|
|
692
|
+
result;
|
|
693
|
+
/**
|
|
694
|
+
*
|
|
695
|
+
*/
|
|
696
|
+
constructor(options) {
|
|
697
|
+
super({ objectMode: true });
|
|
698
|
+
const createRequest = options.createRequest;
|
|
699
|
+
this.job = options.job;
|
|
700
|
+
this.uploadStream = new record_stream_1.Serializable();
|
|
701
|
+
this.downloadStream = new record_stream_1.Parsable();
|
|
702
|
+
const converterOptions = { nullValue: '#N/A' };
|
|
703
|
+
const uploadDataStream = this.uploadStream.stream('csv', converterOptions);
|
|
704
|
+
const downloadDataStream = this.downloadStream.stream('csv', converterOptions);
|
|
705
|
+
this.dataStream = (0, stream_2.concatStreamsAsDuplex)(uploadDataStream, downloadDataStream);
|
|
706
|
+
this.on('finish', () => this.uploadStream.end());
|
|
707
|
+
uploadDataStream.once('readable', () => {
|
|
708
|
+
try {
|
|
709
|
+
// pipe upload data to batch API request stream
|
|
710
|
+
const req = createRequest({
|
|
711
|
+
method: 'PUT',
|
|
712
|
+
path: `/${this.job.id}/batches`,
|
|
713
|
+
headers: {
|
|
714
|
+
'Content-Type': 'text/csv',
|
|
715
|
+
},
|
|
716
|
+
responseType: 'application/json',
|
|
717
|
+
});
|
|
718
|
+
(async () => {
|
|
719
|
+
try {
|
|
720
|
+
const res = await req;
|
|
721
|
+
this.emit('response', res);
|
|
722
|
+
}
|
|
723
|
+
catch (err) {
|
|
724
|
+
this.emit('error', err);
|
|
725
|
+
}
|
|
726
|
+
})();
|
|
727
|
+
uploadDataStream.pipe(req.stream());
|
|
728
|
+
}
|
|
729
|
+
catch (err) {
|
|
730
|
+
this.emit('error', err);
|
|
731
|
+
}
|
|
732
|
+
});
|
|
733
|
+
}
|
|
734
|
+
_write(record_, enc, cb) {
|
|
735
|
+
const { Id, type, attributes, ...rrec } = record_;
|
|
736
|
+
let record;
|
|
737
|
+
switch (this.job.getInfo().operation) {
|
|
738
|
+
case 'insert':
|
|
739
|
+
record = rrec;
|
|
740
|
+
break;
|
|
741
|
+
case 'delete':
|
|
742
|
+
case 'hardDelete':
|
|
743
|
+
record = { Id };
|
|
744
|
+
break;
|
|
745
|
+
default:
|
|
746
|
+
record = { Id, ...rrec };
|
|
747
|
+
}
|
|
748
|
+
this.uploadStream.write(record, enc, cb);
|
|
749
|
+
}
|
|
750
|
+
/**
|
|
751
|
+
* Returns duplex stream which accepts CSV data input and batch result output
|
|
752
|
+
*/
|
|
753
|
+
stream() {
|
|
754
|
+
return this.dataStream;
|
|
755
|
+
}
|
|
756
|
+
/**
|
|
757
|
+
* Execute batch operation
|
|
758
|
+
*/
|
|
759
|
+
execute(input) {
|
|
760
|
+
if (this.result) {
|
|
761
|
+
throw new Error('Data can only be uploaded to a job once.');
|
|
762
|
+
}
|
|
763
|
+
this.result = new Promise((resolve, reject) => {
|
|
764
|
+
this.once('response', () => resolve());
|
|
765
|
+
this.once('error', reject);
|
|
766
|
+
});
|
|
767
|
+
if (is_1.default.nodeStream(input)) {
|
|
768
|
+
// if input has stream.Readable interface
|
|
769
|
+
input.pipe(this.dataStream);
|
|
770
|
+
}
|
|
771
|
+
else {
|
|
772
|
+
const recordData = structuredClone(input);
|
|
773
|
+
if (Array.isArray(recordData)) {
|
|
774
|
+
for (const record of recordData) {
|
|
775
|
+
for (const key of Object.keys(record)) {
|
|
776
|
+
if (typeof record[key] === 'boolean') {
|
|
777
|
+
record[key] = String(record[key]);
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
this.write(record);
|
|
781
|
+
}
|
|
782
|
+
this.end();
|
|
783
|
+
}
|
|
784
|
+
else if (typeof recordData === 'string') {
|
|
785
|
+
this.dataStream.write(recordData, 'utf8');
|
|
786
|
+
this.dataStream.end();
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
return this;
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
function delay(ms) {
|
|
793
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
794
|
+
}
|
|
795
|
+
/*--------------------------------------------*/
|
|
796
|
+
/*
|
|
797
|
+
* Register hook in connection instantiation for dynamically adding this API module features
|
|
798
|
+
*/
|
|
799
|
+
(0, jsforce_1.registerModule)('bulk2', (conn) => new BulkV2(conn));
|
|
800
|
+
exports.default = BulkV2;
|