@jsforce/jsforce-node 0.0.1 → 3.0.0-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +54 -0
- package/index.d.ts +4 -0
- package/index.js +1 -0
- package/lib/VERSION.d.ts +2 -0
- package/lib/VERSION.js +3 -0
- package/lib/api/analytics/types.d.ts +509 -0
- package/lib/api/analytics/types.js +2 -0
- package/lib/api/analytics.d.ts +163 -0
- package/lib/api/analytics.js +342 -0
- package/lib/api/apex.d.ts +44 -0
- package/lib/api/apex.js +86 -0
- package/lib/api/bulk.d.ts +444 -0
- package/lib/api/bulk.js +1372 -0
- package/lib/api/chatter.d.ts +133 -0
- package/lib/api/chatter.js +248 -0
- package/lib/api/metadata/schema.d.ts +16117 -0
- package/lib/api/metadata/schema.js +9094 -0
- package/lib/api/metadata.d.ts +189 -0
- package/lib/api/metadata.js +406 -0
- package/lib/api/soap/schema.d.ts +3167 -0
- package/lib/api/soap/schema.js +1787 -0
- package/lib/api/soap.d.ts +76 -0
- package/lib/api/soap.js +155 -0
- package/lib/api/streaming/extension.d.ts +94 -0
- package/lib/api/streaming/extension.js +151 -0
- package/lib/api/streaming.d.ts +160 -0
- package/lib/api/streaming.js +252 -0
- package/lib/api/tooling.d.ts +284 -0
- package/lib/api/tooling.js +202 -0
- package/lib/api/wsdl/wsdl2schema.d.ts +1 -0
- package/lib/api/wsdl/wsdl2schema.js +354 -0
- package/lib/browser/canvas.d.ts +12 -0
- package/lib/browser/canvas.js +77 -0
- package/lib/browser/client.d.ts +82 -0
- package/lib/browser/client.js +244 -0
- package/lib/browser/jsonp.d.ts +12 -0
- package/lib/browser/jsonp.js +69 -0
- package/lib/browser/registry.d.ts +3 -0
- package/lib/browser/registry.js +5 -0
- package/lib/browser/request.d.ts +10 -0
- package/lib/browser/request.js +202 -0
- package/lib/cache.d.ts +74 -0
- package/lib/cache.js +159 -0
- package/lib/connection.d.ts +355 -0
- package/lib/connection.js +1153 -0
- package/lib/core.d.ts +17 -0
- package/lib/core.js +55 -0
- package/lib/csv.d.ts +23 -0
- package/lib/csv.js +35 -0
- package/lib/date.d.ts +82 -0
- package/lib/date.js +201 -0
- package/lib/http-api.d.ts +75 -0
- package/lib/http-api.js +257 -0
- package/lib/index.d.ts +12 -0
- package/lib/index.js +31 -0
- package/lib/jsforce.d.ts +26 -0
- package/lib/jsforce.js +67 -0
- package/lib/jwtOAuth2.d.ts +8 -0
- package/lib/jwtOAuth2.js +23 -0
- package/lib/oauth2.d.ts +92 -0
- package/lib/oauth2.js +245 -0
- package/lib/process.d.ts +157 -0
- package/lib/process.js +143 -0
- package/lib/query.d.ts +341 -0
- package/lib/query.js +817 -0
- package/lib/quick-action.d.ts +44 -0
- package/lib/quick-action.js +46 -0
- package/lib/record-reference.d.ts +46 -0
- package/lib/record-reference.js +65 -0
- package/lib/record-stream.d.ts +83 -0
- package/lib/record-stream.js +233 -0
- package/lib/registry/base.d.ts +43 -0
- package/lib/registry/base.js +96 -0
- package/lib/registry/empty.d.ts +7 -0
- package/lib/registry/empty.js +13 -0
- package/lib/registry/file.d.ts +11 -0
- package/lib/registry/file.js +51 -0
- package/lib/registry/index.d.ts +8 -0
- package/lib/registry/index.js +21 -0
- package/lib/registry/sfdx.d.ts +56 -0
- package/lib/registry/sfdx.js +133 -0
- package/lib/registry/types.d.ts +47 -0
- package/lib/registry/types.js +2 -0
- package/lib/request-helper.d.ts +23 -0
- package/lib/request-helper.js +102 -0
- package/lib/request.d.ts +11 -0
- package/lib/request.js +75 -0
- package/lib/session-refresh-delegate.d.ts +31 -0
- package/lib/session-refresh-delegate.js +69 -0
- package/lib/soap.d.ts +60 -0
- package/lib/soap.js +246 -0
- package/lib/sobject.d.ts +258 -0
- package/lib/sobject.js +376 -0
- package/lib/soql-builder.d.ts +25 -0
- package/lib/soql-builder.js +226 -0
- package/lib/transport.d.ts +63 -0
- package/lib/transport.js +175 -0
- package/lib/types/common.d.ts +560 -0
- package/lib/types/common.js +2 -0
- package/lib/types/index.d.ts +7 -0
- package/lib/types/index.js +23 -0
- package/lib/types/projection.d.ts +26 -0
- package/lib/types/projection.js +2 -0
- package/lib/types/record.d.ts +44 -0
- package/lib/types/record.js +2 -0
- package/lib/types/schema.d.ts +50 -0
- package/lib/types/schema.js +2 -0
- package/lib/types/soap.d.ts +43 -0
- package/lib/types/soap.js +2 -0
- package/lib/types/standard-schema.d.ts +16199 -0
- package/lib/types/standard-schema.js +2 -0
- package/lib/types/util.d.ts +7 -0
- package/lib/types/util.js +2 -0
- package/lib/util/formatter.d.ts +8 -0
- package/lib/util/formatter.js +24 -0
- package/lib/util/function.d.ts +32 -0
- package/lib/util/function.js +52 -0
- package/lib/util/logger.d.ts +29 -0
- package/lib/util/logger.js +102 -0
- package/lib/util/promise.d.ts +19 -0
- package/lib/util/promise.js +25 -0
- package/lib/util/stream.d.ts +12 -0
- package/lib/util/stream.js +88 -0
- package/package.json +260 -6
- package/typings/faye/index.d.ts +16 -0
- package/typings/index.d.ts +1 -0
package/lib/api/bulk.js
ADDED
|
@@ -0,0 +1,1372 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.IngestJobV2 = exports.QueryJobV2 = exports.BulkV2 = exports.Bulk = exports.Batch = exports.Job = void 0;
|
|
7
|
+
/**
|
|
8
|
+
* @file Manages Salesforce Bulk API related operations
|
|
9
|
+
* @author Shinichi Tomita <shinichi.tomita@gmail.com>
|
|
10
|
+
*/
|
|
11
|
+
const events_1 = require("events");
|
|
12
|
+
const stream_1 = require("stream");
|
|
13
|
+
const multistream_1 = __importDefault(require("multistream"));
|
|
14
|
+
const record_stream_1 = require("../record-stream");
|
|
15
|
+
const http_api_1 = __importDefault(require("../http-api"));
|
|
16
|
+
const jsforce_1 = require("../jsforce");
|
|
17
|
+
const stream_2 = require("../util/stream");
|
|
18
|
+
const function_1 = require("../util/function");
|
|
19
|
+
/**
|
|
20
|
+
* Class for Bulk API Job
|
|
21
|
+
*/
|
|
22
|
+
class Job extends events_1.EventEmitter {
|
|
23
|
+
type;
|
|
24
|
+
operation;
|
|
25
|
+
options;
|
|
26
|
+
id;
|
|
27
|
+
state;
|
|
28
|
+
_bulk;
|
|
29
|
+
_batches;
|
|
30
|
+
_jobInfo;
|
|
31
|
+
_error;
|
|
32
|
+
/**
|
|
33
|
+
*
|
|
34
|
+
*/
|
|
35
|
+
constructor(bulk, type, operation, options, jobId) {
|
|
36
|
+
super();
|
|
37
|
+
this._bulk = bulk;
|
|
38
|
+
this.type = type;
|
|
39
|
+
this.operation = operation;
|
|
40
|
+
this.options = options || {};
|
|
41
|
+
this.id = jobId ?? null;
|
|
42
|
+
this.state = this.id ? 'Open' : 'Unknown';
|
|
43
|
+
this._batches = {};
|
|
44
|
+
// default error handler to keep the latest error
|
|
45
|
+
this.on('error', (error) => (this._error = error));
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Return latest jobInfo from cache
|
|
49
|
+
*/
|
|
50
|
+
info() {
|
|
51
|
+
// if cache is not available, check the latest
|
|
52
|
+
if (!this._jobInfo) {
|
|
53
|
+
this._jobInfo = this.check();
|
|
54
|
+
}
|
|
55
|
+
return this._jobInfo;
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Open new job and get jobinfo
|
|
59
|
+
*/
|
|
60
|
+
open() {
|
|
61
|
+
const bulk = this._bulk;
|
|
62
|
+
const options = this.options;
|
|
63
|
+
// if sobject type / operation is not provided
|
|
64
|
+
if (!this.type || !this.operation) {
|
|
65
|
+
throw new Error('type / operation is required to open a new job');
|
|
66
|
+
}
|
|
67
|
+
// if not requested opening job
|
|
68
|
+
if (!this._jobInfo) {
|
|
69
|
+
let operation = this.operation.toLowerCase();
|
|
70
|
+
if (operation === 'harddelete') {
|
|
71
|
+
operation = 'hardDelete';
|
|
72
|
+
}
|
|
73
|
+
if (operation === 'queryall') {
|
|
74
|
+
operation = 'queryAll';
|
|
75
|
+
}
|
|
76
|
+
const body = `
|
|
77
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
78
|
+
<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">
|
|
79
|
+
<operation>${operation}</operation>
|
|
80
|
+
<object>${this.type}</object>
|
|
81
|
+
${options.extIdField
|
|
82
|
+
? `<externalIdFieldName>${options.extIdField}</externalIdFieldName>`
|
|
83
|
+
: ''}
|
|
84
|
+
${options.concurrencyMode
|
|
85
|
+
? `<concurrencyMode>${options.concurrencyMode}</concurrencyMode>`
|
|
86
|
+
: ''}
|
|
87
|
+
${options.assignmentRuleId
|
|
88
|
+
? `<assignmentRuleId>${options.assignmentRuleId}</assignmentRuleId>`
|
|
89
|
+
: ''}
|
|
90
|
+
<contentType>CSV</contentType>
|
|
91
|
+
</jobInfo>
|
|
92
|
+
`.trim();
|
|
93
|
+
this._jobInfo = (async () => {
|
|
94
|
+
try {
|
|
95
|
+
const res = await bulk._request({
|
|
96
|
+
method: 'POST',
|
|
97
|
+
path: '/job',
|
|
98
|
+
body,
|
|
99
|
+
headers: {
|
|
100
|
+
'Content-Type': 'application/xml; charset=utf-8',
|
|
101
|
+
},
|
|
102
|
+
responseType: 'application/xml',
|
|
103
|
+
});
|
|
104
|
+
this.emit('open', res.jobInfo);
|
|
105
|
+
this.id = res.jobInfo.id;
|
|
106
|
+
this.state = res.jobInfo.state;
|
|
107
|
+
return res.jobInfo;
|
|
108
|
+
}
|
|
109
|
+
catch (err) {
|
|
110
|
+
this.emit('error', err);
|
|
111
|
+
throw err;
|
|
112
|
+
}
|
|
113
|
+
})();
|
|
114
|
+
}
|
|
115
|
+
return this._jobInfo;
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Create a new batch instance in the job
|
|
119
|
+
*/
|
|
120
|
+
createBatch() {
|
|
121
|
+
const batch = new Batch(this);
|
|
122
|
+
batch.on('queue', () => {
|
|
123
|
+
this._batches[batch.id] = batch;
|
|
124
|
+
});
|
|
125
|
+
return batch;
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Get a batch instance specified by given batch ID
|
|
129
|
+
*/
|
|
130
|
+
batch(batchId) {
|
|
131
|
+
let batch = this._batches[batchId];
|
|
132
|
+
if (!batch) {
|
|
133
|
+
batch = new Batch(this, batchId);
|
|
134
|
+
this._batches[batchId] = batch;
|
|
135
|
+
}
|
|
136
|
+
return batch;
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Check the latest job status from server
|
|
140
|
+
*/
|
|
141
|
+
check() {
|
|
142
|
+
const bulk = this._bulk;
|
|
143
|
+
const logger = bulk._logger;
|
|
144
|
+
this._jobInfo = (async () => {
|
|
145
|
+
const jobId = await this.ready();
|
|
146
|
+
const res = await bulk._request({
|
|
147
|
+
method: 'GET',
|
|
148
|
+
path: '/job/' + jobId,
|
|
149
|
+
responseType: 'application/xml',
|
|
150
|
+
});
|
|
151
|
+
logger.debug(res.jobInfo);
|
|
152
|
+
this.id = res.jobInfo.id;
|
|
153
|
+
this.type = res.jobInfo.object;
|
|
154
|
+
this.operation = res.jobInfo.operation;
|
|
155
|
+
this.state = res.jobInfo.state;
|
|
156
|
+
return res.jobInfo;
|
|
157
|
+
})();
|
|
158
|
+
return this._jobInfo;
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Wait till the job is assigned to server
|
|
162
|
+
*/
|
|
163
|
+
ready() {
|
|
164
|
+
return this.id
|
|
165
|
+
? Promise.resolve(this.id)
|
|
166
|
+
: this.open().then(({ id }) => id);
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* List all registered batch info in job
|
|
170
|
+
*/
|
|
171
|
+
async list() {
|
|
172
|
+
const bulk = this._bulk;
|
|
173
|
+
const logger = bulk._logger;
|
|
174
|
+
const jobId = await this.ready();
|
|
175
|
+
const res = await bulk._request({
|
|
176
|
+
method: 'GET',
|
|
177
|
+
path: '/job/' + jobId + '/batch',
|
|
178
|
+
responseType: 'application/xml',
|
|
179
|
+
});
|
|
180
|
+
logger.debug(res.batchInfoList.batchInfo);
|
|
181
|
+
const batchInfoList = Array.isArray(res.batchInfoList.batchInfo)
|
|
182
|
+
? res.batchInfoList.batchInfo
|
|
183
|
+
: [res.batchInfoList.batchInfo];
|
|
184
|
+
return batchInfoList;
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Close opened job
|
|
188
|
+
*/
|
|
189
|
+
async close() {
|
|
190
|
+
if (!this.id) {
|
|
191
|
+
return;
|
|
192
|
+
}
|
|
193
|
+
try {
|
|
194
|
+
const jobInfo = await this._changeState('Closed');
|
|
195
|
+
this.id = null;
|
|
196
|
+
this.emit('close', jobInfo);
|
|
197
|
+
return jobInfo;
|
|
198
|
+
}
|
|
199
|
+
catch (err) {
|
|
200
|
+
this.emit('error', err);
|
|
201
|
+
throw err;
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Set the status to abort
|
|
206
|
+
*/
|
|
207
|
+
async abort() {
|
|
208
|
+
if (!this.id) {
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
try {
|
|
212
|
+
const jobInfo = await this._changeState('Aborted');
|
|
213
|
+
this.id = null;
|
|
214
|
+
this.emit('abort', jobInfo);
|
|
215
|
+
return jobInfo;
|
|
216
|
+
}
|
|
217
|
+
catch (err) {
|
|
218
|
+
this.emit('error', err);
|
|
219
|
+
throw err;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* @private
|
|
224
|
+
*/
|
|
225
|
+
async _changeState(state) {
|
|
226
|
+
const bulk = this._bulk;
|
|
227
|
+
const logger = bulk._logger;
|
|
228
|
+
this._jobInfo = (async () => {
|
|
229
|
+
const jobId = await this.ready();
|
|
230
|
+
const body = `
|
|
231
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
232
|
+
<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">
|
|
233
|
+
<state>${state}</state>
|
|
234
|
+
</jobInfo>
|
|
235
|
+
`.trim();
|
|
236
|
+
const res = await bulk._request({
|
|
237
|
+
method: 'POST',
|
|
238
|
+
path: '/job/' + jobId,
|
|
239
|
+
body: body,
|
|
240
|
+
headers: {
|
|
241
|
+
'Content-Type': 'application/xml; charset=utf-8',
|
|
242
|
+
},
|
|
243
|
+
responseType: 'application/xml',
|
|
244
|
+
});
|
|
245
|
+
logger.debug(res.jobInfo);
|
|
246
|
+
this.state = res.jobInfo.state;
|
|
247
|
+
return res.jobInfo;
|
|
248
|
+
})();
|
|
249
|
+
return this._jobInfo;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
exports.Job = Job;
|
|
253
|
+
/*--------------------------------------------*/
|
|
254
|
+
class PollingTimeoutError extends Error {
|
|
255
|
+
jobId;
|
|
256
|
+
batchId;
|
|
257
|
+
/**
|
|
258
|
+
*
|
|
259
|
+
*/
|
|
260
|
+
constructor(message, jobId, batchId) {
|
|
261
|
+
super(message);
|
|
262
|
+
this.name = 'PollingTimeout';
|
|
263
|
+
this.jobId = jobId;
|
|
264
|
+
this.batchId = batchId;
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
class JobPollingTimeoutError extends Error {
|
|
268
|
+
jobId;
|
|
269
|
+
/**
|
|
270
|
+
*
|
|
271
|
+
*/
|
|
272
|
+
constructor(message, jobId) {
|
|
273
|
+
super(message);
|
|
274
|
+
this.name = 'JobPollingTimeout';
|
|
275
|
+
this.jobId = jobId;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
/*--------------------------------------------*/
|
|
279
|
+
/**
|
|
280
|
+
* Batch (extends Writable)
|
|
281
|
+
*/
|
|
282
|
+
class Batch extends stream_1.Writable {
|
|
283
|
+
job;
|
|
284
|
+
id;
|
|
285
|
+
_bulk;
|
|
286
|
+
_uploadStream;
|
|
287
|
+
_downloadStream;
|
|
288
|
+
_dataStream;
|
|
289
|
+
_result;
|
|
290
|
+
_error;
|
|
291
|
+
/**
|
|
292
|
+
*
|
|
293
|
+
*/
|
|
294
|
+
constructor(job, id) {
|
|
295
|
+
super({ objectMode: true });
|
|
296
|
+
this.job = job;
|
|
297
|
+
this.id = id;
|
|
298
|
+
this._bulk = job._bulk;
|
|
299
|
+
// default error handler to keep the latest error
|
|
300
|
+
this.on('error', (error) => (this._error = error));
|
|
301
|
+
//
|
|
302
|
+
// setup data streams
|
|
303
|
+
//
|
|
304
|
+
const converterOptions = { nullValue: '#N/A' };
|
|
305
|
+
const uploadStream = (this._uploadStream = new record_stream_1.Serializable());
|
|
306
|
+
const uploadDataStream = uploadStream.stream('csv', converterOptions);
|
|
307
|
+
const downloadStream = (this._downloadStream = new record_stream_1.Parsable());
|
|
308
|
+
const downloadDataStream = downloadStream.stream('csv', converterOptions);
|
|
309
|
+
this.on('finish', () => uploadStream.end());
|
|
310
|
+
uploadDataStream.once('readable', async () => {
|
|
311
|
+
try {
|
|
312
|
+
// ensure the job is opened in server or job id is already assigned
|
|
313
|
+
await this.job.ready();
|
|
314
|
+
// pipe upload data to batch API request stream
|
|
315
|
+
uploadDataStream.pipe(this._createRequestStream());
|
|
316
|
+
}
|
|
317
|
+
catch (err) {
|
|
318
|
+
this.emit('error', err);
|
|
319
|
+
}
|
|
320
|
+
});
|
|
321
|
+
// duplex data stream, opened access to API programmers by Batch#stream()
|
|
322
|
+
this._dataStream = (0, stream_2.concatStreamsAsDuplex)(uploadDataStream, downloadDataStream);
|
|
323
|
+
}
|
|
324
|
+
/**
|
|
325
|
+
* Connect batch API and create stream instance of request/response
|
|
326
|
+
*
|
|
327
|
+
* @private
|
|
328
|
+
*/
|
|
329
|
+
_createRequestStream() {
|
|
330
|
+
const bulk = this._bulk;
|
|
331
|
+
const logger = bulk._logger;
|
|
332
|
+
const req = bulk._request({
|
|
333
|
+
method: 'POST',
|
|
334
|
+
path: '/job/' + this.job.id + '/batch',
|
|
335
|
+
headers: {
|
|
336
|
+
'Content-Type': 'text/csv',
|
|
337
|
+
},
|
|
338
|
+
responseType: 'application/xml',
|
|
339
|
+
});
|
|
340
|
+
(async () => {
|
|
341
|
+
try {
|
|
342
|
+
const res = await req;
|
|
343
|
+
logger.debug(res.batchInfo);
|
|
344
|
+
this.id = res.batchInfo.id;
|
|
345
|
+
this.emit('queue', res.batchInfo);
|
|
346
|
+
}
|
|
347
|
+
catch (err) {
|
|
348
|
+
this.emit('error', err);
|
|
349
|
+
}
|
|
350
|
+
})();
|
|
351
|
+
return req.stream();
|
|
352
|
+
}
|
|
353
|
+
/**
|
|
354
|
+
* Implementation of Writable
|
|
355
|
+
*/
|
|
356
|
+
_write(record_, enc, cb) {
|
|
357
|
+
const { Id, type, attributes, ...rrec } = record_;
|
|
358
|
+
let record;
|
|
359
|
+
switch (this.job.operation) {
|
|
360
|
+
case 'insert':
|
|
361
|
+
record = rrec;
|
|
362
|
+
break;
|
|
363
|
+
case 'delete':
|
|
364
|
+
case 'hardDelete':
|
|
365
|
+
record = { Id };
|
|
366
|
+
break;
|
|
367
|
+
default:
|
|
368
|
+
record = { Id, ...rrec };
|
|
369
|
+
}
|
|
370
|
+
this._uploadStream.write(record, enc, cb);
|
|
371
|
+
}
|
|
372
|
+
/**
|
|
373
|
+
* Returns duplex stream which accepts CSV data input and batch result output
|
|
374
|
+
*/
|
|
375
|
+
stream() {
|
|
376
|
+
return this._dataStream;
|
|
377
|
+
}
|
|
378
|
+
/**
|
|
379
|
+
* Execute batch operation
|
|
380
|
+
*/
|
|
381
|
+
execute(input) {
|
|
382
|
+
// if batch is already executed
|
|
383
|
+
if (this._result) {
|
|
384
|
+
throw new Error('Batch already executed.');
|
|
385
|
+
}
|
|
386
|
+
this._result = new Promise((resolve, reject) => {
|
|
387
|
+
this.once('response', resolve);
|
|
388
|
+
this.once('error', reject);
|
|
389
|
+
});
|
|
390
|
+
if ((0, function_1.isObject)(input) && 'pipe' in input && (0, function_1.isFunction)(input.pipe)) {
|
|
391
|
+
// if input has stream.Readable interface
|
|
392
|
+
input.pipe(this._dataStream);
|
|
393
|
+
}
|
|
394
|
+
else {
|
|
395
|
+
if (Array.isArray(input)) {
|
|
396
|
+
for (const record of input) {
|
|
397
|
+
for (const key of Object.keys(record)) {
|
|
398
|
+
if (typeof record[key] === 'boolean') {
|
|
399
|
+
record[key] = String(record[key]);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
this.write(record);
|
|
403
|
+
}
|
|
404
|
+
this.end();
|
|
405
|
+
}
|
|
406
|
+
else if (typeof input === 'string') {
|
|
407
|
+
this._dataStream.write(input, 'utf8');
|
|
408
|
+
this._dataStream.end();
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
// return Batch instance for chaining
|
|
412
|
+
return this;
|
|
413
|
+
}
|
|
414
|
+
run = this.execute;
|
|
415
|
+
exec = this.execute;
|
|
416
|
+
/**
|
|
417
|
+
* Promise/A+ interface
|
|
418
|
+
* Delegate to promise, return promise instance for batch result
|
|
419
|
+
*/
|
|
420
|
+
then(onResolved, onReject) {
|
|
421
|
+
if (!this._result) {
|
|
422
|
+
this.execute();
|
|
423
|
+
}
|
|
424
|
+
return this._result.then(onResolved, onReject);
|
|
425
|
+
}
|
|
426
|
+
/**
|
|
427
|
+
* Check the latest batch status in server
|
|
428
|
+
*/
|
|
429
|
+
async check() {
|
|
430
|
+
const bulk = this._bulk;
|
|
431
|
+
const logger = bulk._logger;
|
|
432
|
+
const jobId = this.job.id;
|
|
433
|
+
const batchId = this.id;
|
|
434
|
+
if (!jobId || !batchId) {
|
|
435
|
+
throw new Error('Batch not started.');
|
|
436
|
+
}
|
|
437
|
+
const res = await bulk._request({
|
|
438
|
+
method: 'GET',
|
|
439
|
+
path: '/job/' + jobId + '/batch/' + batchId,
|
|
440
|
+
responseType: 'application/xml',
|
|
441
|
+
});
|
|
442
|
+
logger.debug(res.batchInfo);
|
|
443
|
+
return res.batchInfo;
|
|
444
|
+
}
|
|
445
|
+
/**
|
|
446
|
+
* Polling the batch result and retrieve
|
|
447
|
+
*/
|
|
448
|
+
poll(interval, timeout) {
|
|
449
|
+
const jobId = this.job.id;
|
|
450
|
+
const batchId = this.id;
|
|
451
|
+
if (!jobId || !batchId) {
|
|
452
|
+
throw new Error('Batch not started.');
|
|
453
|
+
}
|
|
454
|
+
const startTime = new Date().getTime();
|
|
455
|
+
const poll = async () => {
|
|
456
|
+
const now = new Date().getTime();
|
|
457
|
+
if (startTime + timeout < now) {
|
|
458
|
+
const err = new PollingTimeoutError('Polling time out. Job Id = ' + jobId + ' , batch Id = ' + batchId, jobId, batchId);
|
|
459
|
+
this.emit('error', err);
|
|
460
|
+
return;
|
|
461
|
+
}
|
|
462
|
+
let res;
|
|
463
|
+
try {
|
|
464
|
+
res = await this.check();
|
|
465
|
+
}
|
|
466
|
+
catch (err) {
|
|
467
|
+
this.emit('error', err);
|
|
468
|
+
return;
|
|
469
|
+
}
|
|
470
|
+
if (res.state === 'Failed') {
|
|
471
|
+
if (parseInt(res.numberRecordsProcessed, 10) > 0) {
|
|
472
|
+
this.retrieve();
|
|
473
|
+
}
|
|
474
|
+
else {
|
|
475
|
+
this.emit('error', new Error(res.stateMessage));
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
else if (res.state === 'Completed') {
|
|
479
|
+
this.retrieve();
|
|
480
|
+
}
|
|
481
|
+
else {
|
|
482
|
+
this.emit('progress', res);
|
|
483
|
+
setTimeout(poll, interval);
|
|
484
|
+
}
|
|
485
|
+
};
|
|
486
|
+
setTimeout(poll, interval);
|
|
487
|
+
}
|
|
488
|
+
/**
|
|
489
|
+
* Retrieve batch result
|
|
490
|
+
*/
|
|
491
|
+
async retrieve() {
|
|
492
|
+
const bulk = this._bulk;
|
|
493
|
+
const jobId = this.job.id;
|
|
494
|
+
const job = this.job;
|
|
495
|
+
const batchId = this.id;
|
|
496
|
+
if (!jobId || !batchId) {
|
|
497
|
+
throw new Error('Batch not started.');
|
|
498
|
+
}
|
|
499
|
+
try {
|
|
500
|
+
const resp = await bulk._request({
|
|
501
|
+
method: 'GET',
|
|
502
|
+
path: '/job/' + jobId + '/batch/' + batchId + '/result',
|
|
503
|
+
});
|
|
504
|
+
let results;
|
|
505
|
+
if (job.operation === 'query' || job.operation === 'queryAll') {
|
|
506
|
+
const res = resp;
|
|
507
|
+
const resultId = res['result-list'].result;
|
|
508
|
+
results = (Array.isArray(resultId)
|
|
509
|
+
? resultId
|
|
510
|
+
: [resultId]).map((id) => ({ id, batchId, jobId }));
|
|
511
|
+
}
|
|
512
|
+
else {
|
|
513
|
+
const res = resp;
|
|
514
|
+
results = res.map((ret) => ({
|
|
515
|
+
id: ret.Id || null,
|
|
516
|
+
success: ret.Success === 'true',
|
|
517
|
+
errors: ret.Error ? [ret.Error] : [],
|
|
518
|
+
}));
|
|
519
|
+
}
|
|
520
|
+
this.emit('response', results);
|
|
521
|
+
return results;
|
|
522
|
+
}
|
|
523
|
+
catch (err) {
|
|
524
|
+
this.emit('error', err);
|
|
525
|
+
throw err;
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
/**
|
|
529
|
+
* Fetch query result as a record stream
|
|
530
|
+
* @param {String} resultId - Result id
|
|
531
|
+
* @returns {RecordStream} - Record stream, convertible to CSV data stream
|
|
532
|
+
*/
|
|
533
|
+
result(resultId) {
|
|
534
|
+
const jobId = this.job.id;
|
|
535
|
+
const batchId = this.id;
|
|
536
|
+
if (!jobId || !batchId) {
|
|
537
|
+
throw new Error('Batch not started.');
|
|
538
|
+
}
|
|
539
|
+
const resultStream = new record_stream_1.Parsable();
|
|
540
|
+
const resultDataStream = resultStream.stream('csv');
|
|
541
|
+
this._bulk
|
|
542
|
+
._request({
|
|
543
|
+
method: 'GET',
|
|
544
|
+
path: '/job/' + jobId + '/batch/' + batchId + '/result/' + resultId,
|
|
545
|
+
responseType: 'application/octet-stream',
|
|
546
|
+
})
|
|
547
|
+
.stream()
|
|
548
|
+
.pipe(resultDataStream);
|
|
549
|
+
return resultStream;
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
exports.Batch = Batch;
|
|
553
|
+
/*--------------------------------------------*/
|
|
554
|
+
/**
|
|
555
|
+
*
|
|
556
|
+
*/
|
|
557
|
+
class BulkApi extends http_api_1.default {
|
|
558
|
+
beforeSend(request) {
|
|
559
|
+
request.headers = {
|
|
560
|
+
...request.headers,
|
|
561
|
+
'X-SFDC-SESSION': this._conn.accessToken ?? '',
|
|
562
|
+
};
|
|
563
|
+
}
|
|
564
|
+
isSessionExpired(response) {
|
|
565
|
+
return (response.statusCode === 400 &&
|
|
566
|
+
/<exceptionCode>InvalidSessionId<\/exceptionCode>/.test(response.body));
|
|
567
|
+
}
|
|
568
|
+
hasErrorInResponseBody(body) {
|
|
569
|
+
return !!body.error;
|
|
570
|
+
}
|
|
571
|
+
parseError(body) {
|
|
572
|
+
return {
|
|
573
|
+
errorCode: body.error.exceptionCode,
|
|
574
|
+
message: body.error.exceptionMessage,
|
|
575
|
+
};
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
class BulkApiV2 extends http_api_1.default {
|
|
579
|
+
hasErrorInResponseBody(body) {
|
|
580
|
+
return (Array.isArray(body) &&
|
|
581
|
+
typeof body[0] === 'object' &&
|
|
582
|
+
'errorCode' in body[0]);
|
|
583
|
+
}
|
|
584
|
+
isSessionExpired(response) {
|
|
585
|
+
return (response.statusCode === 401 && /INVALID_SESSION_ID/.test(response.body));
|
|
586
|
+
}
|
|
587
|
+
parseError(body) {
|
|
588
|
+
return {
|
|
589
|
+
errorCode: body[0].errorCode,
|
|
590
|
+
message: body[0].message,
|
|
591
|
+
};
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
/*--------------------------------------------*/
|
|
595
|
+
/**
|
|
596
|
+
* Class for Bulk API
|
|
597
|
+
*
|
|
598
|
+
* @class
|
|
599
|
+
*/
|
|
600
|
+
class Bulk {
|
|
601
|
+
_conn;
|
|
602
|
+
_logger;
|
|
603
|
+
/**
|
|
604
|
+
* Polling interval in milliseconds
|
|
605
|
+
*/
|
|
606
|
+
pollInterval = 1000;
|
|
607
|
+
/**
|
|
608
|
+
* Polling timeout in milliseconds
|
|
609
|
+
* @type {Number}
|
|
610
|
+
*/
|
|
611
|
+
pollTimeout = 10000;
|
|
612
|
+
/**
|
|
613
|
+
*
|
|
614
|
+
*/
|
|
615
|
+
constructor(conn) {
|
|
616
|
+
this._conn = conn;
|
|
617
|
+
this._logger = conn._logger;
|
|
618
|
+
}
|
|
619
|
+
/**
|
|
620
|
+
*
|
|
621
|
+
*/
|
|
622
|
+
_request(request_) {
|
|
623
|
+
const conn = this._conn;
|
|
624
|
+
const { path, responseType, ...rreq } = request_;
|
|
625
|
+
const baseUrl = [conn.instanceUrl, 'services/async', conn.version].join('/');
|
|
626
|
+
const request = {
|
|
627
|
+
...rreq,
|
|
628
|
+
url: baseUrl + path,
|
|
629
|
+
};
|
|
630
|
+
return new BulkApi(this._conn, { responseType }).request(request);
|
|
631
|
+
}
|
|
632
|
+
load(type, operation, optionsOrInput, input) {
|
|
633
|
+
let options = {};
|
|
634
|
+
if (typeof optionsOrInput === 'string' ||
|
|
635
|
+
Array.isArray(optionsOrInput) ||
|
|
636
|
+
((0, function_1.isObject)(optionsOrInput) &&
|
|
637
|
+
'pipe' in optionsOrInput &&
|
|
638
|
+
typeof optionsOrInput.pipe === 'function')) {
|
|
639
|
+
// when options is not plain hash object, it is omitted
|
|
640
|
+
input = optionsOrInput;
|
|
641
|
+
}
|
|
642
|
+
else {
|
|
643
|
+
options = optionsOrInput;
|
|
644
|
+
}
|
|
645
|
+
const job = this.createJob(type, operation, options);
|
|
646
|
+
const batch = job.createBatch();
|
|
647
|
+
const cleanup = () => job.close();
|
|
648
|
+
const cleanupOnError = (err) => {
|
|
649
|
+
if (err.name !== 'PollingTimeout') {
|
|
650
|
+
cleanup();
|
|
651
|
+
}
|
|
652
|
+
};
|
|
653
|
+
batch.on('response', cleanup);
|
|
654
|
+
batch.on('error', cleanupOnError);
|
|
655
|
+
batch.on('queue', () => {
|
|
656
|
+
batch?.poll(this.pollInterval, this.pollTimeout);
|
|
657
|
+
});
|
|
658
|
+
return batch.execute(input);
|
|
659
|
+
}
|
|
660
|
+
/**
|
|
661
|
+
* Execute bulk query and get record stream
|
|
662
|
+
*/
|
|
663
|
+
query(soql) {
|
|
664
|
+
const m = soql.replace(/\([\s\S]+\)/g, '').match(/FROM\s+(\w+)/i);
|
|
665
|
+
if (!m) {
|
|
666
|
+
throw new Error('No sobject type found in query, maybe caused by invalid SOQL.');
|
|
667
|
+
}
|
|
668
|
+
const type = m[1];
|
|
669
|
+
const recordStream = new record_stream_1.Parsable();
|
|
670
|
+
const dataStream = recordStream.stream('csv');
|
|
671
|
+
(async () => {
|
|
672
|
+
try {
|
|
673
|
+
const results = await this.load(type, 'query', soql);
|
|
674
|
+
const streams = results.map((result) => this.job(result.jobId)
|
|
675
|
+
.batch(result.batchId)
|
|
676
|
+
.result(result.id)
|
|
677
|
+
.stream());
|
|
678
|
+
(0, multistream_1.default)(streams).pipe(dataStream);
|
|
679
|
+
}
|
|
680
|
+
catch (err) {
|
|
681
|
+
recordStream.emit('error', err);
|
|
682
|
+
}
|
|
683
|
+
})();
|
|
684
|
+
return recordStream;
|
|
685
|
+
}
|
|
686
|
+
/**
|
|
687
|
+
* Create a new job instance
|
|
688
|
+
*/
|
|
689
|
+
createJob(type, operation, options = {}) {
|
|
690
|
+
return new Job(this, type, operation, options);
|
|
691
|
+
}
|
|
692
|
+
/**
|
|
693
|
+
* Get a job instance specified by given job ID
|
|
694
|
+
*
|
|
695
|
+
* @param {String} jobId - Job ID
|
|
696
|
+
* @returns {Bulk~Job}
|
|
697
|
+
*/
|
|
698
|
+
job(jobId) {
|
|
699
|
+
return new Job(this, null, null, null, jobId);
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
exports.Bulk = Bulk;
|
|
703
|
+
class BulkV2 {
|
|
704
|
+
#connection;
|
|
705
|
+
/**
|
|
706
|
+
* Polling interval in milliseconds
|
|
707
|
+
*/
|
|
708
|
+
pollInterval = 1000;
|
|
709
|
+
/**
|
|
710
|
+
* Polling timeout in milliseconds
|
|
711
|
+
* @type {Number}
|
|
712
|
+
*/
|
|
713
|
+
pollTimeout = 10000;
|
|
714
|
+
constructor(connection) {
|
|
715
|
+
this.#connection = connection;
|
|
716
|
+
}
|
|
717
|
+
/**
|
|
718
|
+
* Create an instance of an ingest job object.
|
|
719
|
+
*
|
|
720
|
+
* @params {NewIngestJobOptions} options object
|
|
721
|
+
* @returns {IngestJobV2} An ingest job instance
|
|
722
|
+
* @example
|
|
723
|
+
* // Upsert records to the Account object.
|
|
724
|
+
*
|
|
725
|
+
* const job = connection.bulk2.createJob({
|
|
726
|
+
* operation: 'insert'
|
|
727
|
+
* object: 'Account',
|
|
728
|
+
* });
|
|
729
|
+
*
|
|
730
|
+
* // create the job in the org
|
|
731
|
+
* await job.open()
|
|
732
|
+
*
|
|
733
|
+
* // upload data
|
|
734
|
+
* await job.uploadData(csvFile)
|
|
735
|
+
*
|
|
736
|
+
* // finished uploading data, mark it as ready for processing
|
|
737
|
+
* await job.close()
|
|
738
|
+
*/
|
|
739
|
+
createJob(options) {
|
|
740
|
+
return new IngestJobV2({
|
|
741
|
+
connection: this.#connection,
|
|
742
|
+
jobInfo: options,
|
|
743
|
+
pollingOptions: this,
|
|
744
|
+
});
|
|
745
|
+
}
|
|
746
|
+
/**
|
|
747
|
+
* Get a ingest job instance specified by a given job ID
|
|
748
|
+
*
|
|
749
|
+
* @param options Options object with a job ID
|
|
750
|
+
* @returns IngestJobV2 An ingest job
|
|
751
|
+
*/
|
|
752
|
+
job(options) {
|
|
753
|
+
return new IngestJobV2({
|
|
754
|
+
connection: this.#connection,
|
|
755
|
+
jobInfo: options,
|
|
756
|
+
pollingOptions: this,
|
|
757
|
+
});
|
|
758
|
+
}
|
|
759
|
+
/**
|
|
760
|
+
* Create, upload, and start bulkload job
|
|
761
|
+
*/
|
|
762
|
+
async loadAndWaitForResults(options) {
|
|
763
|
+
if (!options.pollTimeout)
|
|
764
|
+
options.pollTimeout = this.pollTimeout;
|
|
765
|
+
if (!options.pollInterval)
|
|
766
|
+
options.pollInterval = this.pollInterval;
|
|
767
|
+
const job = this.createJob(options);
|
|
768
|
+
try {
|
|
769
|
+
await job.open();
|
|
770
|
+
await job.uploadData(options.input);
|
|
771
|
+
await job.close();
|
|
772
|
+
await job.poll(options.pollInterval, options.pollTimeout);
|
|
773
|
+
return await job.getAllResults();
|
|
774
|
+
}
|
|
775
|
+
catch (error) {
|
|
776
|
+
const err = error;
|
|
777
|
+
if (err.name !== 'JobPollingTimeoutError') {
|
|
778
|
+
// fires off one last attempt to clean up and ignores the result | error
|
|
779
|
+
job.delete().catch((ignored) => ignored);
|
|
780
|
+
}
|
|
781
|
+
throw err;
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
/**
|
|
785
|
+
* Execute bulk query and get records
|
|
786
|
+
*
|
|
787
|
+
* Default timeout: 10000ms
|
|
788
|
+
*
|
|
789
|
+
* @param soql SOQL query
|
|
790
|
+
* @param BulkV2PollingOptions options object
|
|
791
|
+
*
|
|
792
|
+
* @returns Record[]
|
|
793
|
+
*/
|
|
794
|
+
async query(soql, options) {
|
|
795
|
+
const queryJob = new QueryJobV2({
|
|
796
|
+
connection: this.#connection,
|
|
797
|
+
operation: options?.scanAll ? 'queryAll' : 'query',
|
|
798
|
+
query: soql,
|
|
799
|
+
pollingOptions: this,
|
|
800
|
+
});
|
|
801
|
+
try {
|
|
802
|
+
await queryJob.open();
|
|
803
|
+
await queryJob.poll(options?.pollInterval, options?.pollTimeout);
|
|
804
|
+
return await queryJob.getResults();
|
|
805
|
+
}
|
|
806
|
+
catch (error) {
|
|
807
|
+
const err = error;
|
|
808
|
+
if (err.name !== 'JobPollingTimeoutError') {
|
|
809
|
+
// fires off one last attempt to clean up and ignores the result | error
|
|
810
|
+
queryJob.delete().catch((ignored) => ignored);
|
|
811
|
+
}
|
|
812
|
+
throw err;
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
exports.BulkV2 = BulkV2;
|
|
817
|
+
class QueryJobV2 extends events_1.EventEmitter {
|
|
818
|
+
#connection;
|
|
819
|
+
#operation;
|
|
820
|
+
#query;
|
|
821
|
+
#pollingOptions;
|
|
822
|
+
#queryResults;
|
|
823
|
+
#error;
|
|
824
|
+
jobInfo;
|
|
825
|
+
locator;
|
|
826
|
+
finished = false;
|
|
827
|
+
constructor(options) {
|
|
828
|
+
super();
|
|
829
|
+
this.#connection = options.connection;
|
|
830
|
+
this.#operation = options.operation;
|
|
831
|
+
this.#query = options.query;
|
|
832
|
+
this.#pollingOptions = options.pollingOptions;
|
|
833
|
+
// default error handler to keep the latest error
|
|
834
|
+
this.on('error', (error) => (this.#error = error));
|
|
835
|
+
}
|
|
836
|
+
/**
|
|
837
|
+
* Creates a query job
|
|
838
|
+
*/
|
|
839
|
+
async open() {
|
|
840
|
+
try {
|
|
841
|
+
this.jobInfo = await this.createQueryRequest({
|
|
842
|
+
method: 'POST',
|
|
843
|
+
path: '',
|
|
844
|
+
body: JSON.stringify({
|
|
845
|
+
operation: this.#operation,
|
|
846
|
+
query: this.#query,
|
|
847
|
+
}),
|
|
848
|
+
headers: {
|
|
849
|
+
'Content-Type': 'application/json; charset=utf-8',
|
|
850
|
+
},
|
|
851
|
+
responseType: 'application/json',
|
|
852
|
+
});
|
|
853
|
+
this.emit('open');
|
|
854
|
+
}
|
|
855
|
+
catch (err) {
|
|
856
|
+
this.emit('error', err);
|
|
857
|
+
throw err;
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
/**
|
|
861
|
+
* Set the status to abort
|
|
862
|
+
*/
|
|
863
|
+
async abort() {
|
|
864
|
+
try {
|
|
865
|
+
const state = 'Aborted';
|
|
866
|
+
this.jobInfo = await this.createQueryRequest({
|
|
867
|
+
method: 'PATCH',
|
|
868
|
+
path: `/${this.jobInfo?.id}`,
|
|
869
|
+
body: JSON.stringify({ state }),
|
|
870
|
+
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
|
871
|
+
responseType: 'application/json',
|
|
872
|
+
});
|
|
873
|
+
this.emit('aborted');
|
|
874
|
+
}
|
|
875
|
+
catch (err) {
|
|
876
|
+
this.emit('error', err);
|
|
877
|
+
throw err;
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
/**
|
|
881
|
+
* Poll for the state of the processing for the job.
|
|
882
|
+
*
|
|
883
|
+
* This method will only throw after a timeout. To capture a
|
|
884
|
+
* job failure while polling you must set a listener for the
|
|
885
|
+
* `failed` event before calling it:
|
|
886
|
+
*
|
|
887
|
+
* job.on('failed', (err) => console.error(err))
|
|
888
|
+
* await job.poll()
|
|
889
|
+
*
|
|
890
|
+
* @param interval Polling interval in milliseconds
|
|
891
|
+
* @param timeout Polling timeout in milliseconds
|
|
892
|
+
* @returns {Promise<Record[]>} A promise that resolves to an array of records
|
|
893
|
+
*/
|
|
894
|
+
async poll(interval = this.#pollingOptions.pollInterval, timeout = this.#pollingOptions.pollTimeout) {
|
|
895
|
+
const jobId = getJobIdOrError(this.jobInfo);
|
|
896
|
+
const startTime = Date.now();
|
|
897
|
+
while (startTime + timeout > Date.now()) {
|
|
898
|
+
try {
|
|
899
|
+
const res = await this.check();
|
|
900
|
+
switch (res.state) {
|
|
901
|
+
case 'Open':
|
|
902
|
+
throw new Error('Job has not been started');
|
|
903
|
+
case 'Aborted':
|
|
904
|
+
throw new Error('Job has been aborted');
|
|
905
|
+
case 'UploadComplete':
|
|
906
|
+
case 'InProgress':
|
|
907
|
+
await delay(interval);
|
|
908
|
+
break;
|
|
909
|
+
case 'Failed':
|
|
910
|
+
// unlike ingest jobs, the API doesn't return an error msg:
|
|
911
|
+
// https://developer.salesforce.com/docs/atlas.en-us.api_asynch.meta/api_asynch/query_get_one_job.htm
|
|
912
|
+
this.emit('failed', new Error('Query job failed to complete.'));
|
|
913
|
+
return;
|
|
914
|
+
case 'JobComplete':
|
|
915
|
+
this.emit('jobcomplete');
|
|
916
|
+
return;
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
catch (err) {
|
|
920
|
+
this.emit('error', err);
|
|
921
|
+
throw err;
|
|
922
|
+
}
|
|
923
|
+
}
|
|
924
|
+
const timeoutError = new JobPollingTimeoutError(`Polling timed out after ${timeout}ms. Job Id = ${jobId}`, jobId);
|
|
925
|
+
this.emit('error', timeoutError);
|
|
926
|
+
throw timeoutError;
|
|
927
|
+
}
|
|
928
|
+
/**
|
|
929
|
+
* Check the latest batch status in server
|
|
930
|
+
*/
|
|
931
|
+
async check() {
|
|
932
|
+
try {
|
|
933
|
+
const jobInfo = await this.createQueryRequest({
|
|
934
|
+
method: 'GET',
|
|
935
|
+
path: `/${getJobIdOrError(this.jobInfo)}`,
|
|
936
|
+
responseType: 'application/json',
|
|
937
|
+
});
|
|
938
|
+
this.jobInfo = jobInfo;
|
|
939
|
+
return jobInfo;
|
|
940
|
+
}
|
|
941
|
+
catch (err) {
|
|
942
|
+
this.emit('error', err);
|
|
943
|
+
throw err;
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
request(request, options = {}) {
|
|
947
|
+
// if request is simple string, regard it as url in GET method
|
|
948
|
+
const request_ = typeof request === 'string' ? { method: 'GET', url: request } : request;
|
|
949
|
+
const httpApi = new http_api_1.default(this.#connection, options);
|
|
950
|
+
httpApi.on('response', (response) => {
|
|
951
|
+
this.locator = response.headers['sforce-locator'];
|
|
952
|
+
});
|
|
953
|
+
return httpApi.request(request_);
|
|
954
|
+
}
|
|
955
|
+
getResultsUrl() {
|
|
956
|
+
const url = `${this.#connection.instanceUrl}/services/data/v${this.#connection.version}/jobs/query/${getJobIdOrError(this.jobInfo)}/results`;
|
|
957
|
+
return this.locator ? `${url}?locator=${this.locator}` : url;
|
|
958
|
+
}
|
|
959
|
+
/**
|
|
960
|
+
* Get the results for a query job.
|
|
961
|
+
*
|
|
962
|
+
* @returns {Promise<Record[]>} A promise that resolves to an array of records
|
|
963
|
+
*/
|
|
964
|
+
async getResults() {
|
|
965
|
+
if (this.finished && this.#queryResults) {
|
|
966
|
+
return this.#queryResults;
|
|
967
|
+
}
|
|
968
|
+
this.#queryResults = [];
|
|
969
|
+
while (this.locator !== 'null') {
|
|
970
|
+
const nextResults = await this.request({
|
|
971
|
+
method: 'GET',
|
|
972
|
+
url: this.getResultsUrl(),
|
|
973
|
+
headers: {
|
|
974
|
+
Accept: 'text/csv',
|
|
975
|
+
},
|
|
976
|
+
});
|
|
977
|
+
this.#queryResults = this.#queryResults.concat(nextResults);
|
|
978
|
+
}
|
|
979
|
+
this.finished = true;
|
|
980
|
+
return this.#queryResults;
|
|
981
|
+
}
|
|
982
|
+
/**
|
|
983
|
+
* Deletes a query job.
|
|
984
|
+
*/
|
|
985
|
+
async delete() {
|
|
986
|
+
return this.createQueryRequest({
|
|
987
|
+
method: 'DELETE',
|
|
988
|
+
path: `/${getJobIdOrError(this.jobInfo)}`,
|
|
989
|
+
});
|
|
990
|
+
}
|
|
991
|
+
createQueryRequest(request) {
|
|
992
|
+
const { path, responseType } = request;
|
|
993
|
+
const baseUrl = [
|
|
994
|
+
this.#connection.instanceUrl,
|
|
995
|
+
'services/data',
|
|
996
|
+
`v${this.#connection.version}`,
|
|
997
|
+
'jobs/query',
|
|
998
|
+
].join('/');
|
|
999
|
+
return new BulkApiV2(this.#connection, { responseType }).request({
|
|
1000
|
+
...request,
|
|
1001
|
+
url: baseUrl + path,
|
|
1002
|
+
});
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
exports.QueryJobV2 = QueryJobV2;
|
|
1006
|
+
/**
|
|
1007
|
+
* Class for Bulk API V2 Ingest Job
|
|
1008
|
+
*/
|
|
1009
|
+
class IngestJobV2 extends events_1.EventEmitter {
|
|
1010
|
+
#connection;
|
|
1011
|
+
#pollingOptions;
|
|
1012
|
+
#jobData;
|
|
1013
|
+
#bulkJobSuccessfulResults;
|
|
1014
|
+
#bulkJobFailedResults;
|
|
1015
|
+
#bulkJobUnprocessedRecords;
|
|
1016
|
+
#error;
|
|
1017
|
+
jobInfo;
|
|
1018
|
+
/**
|
|
1019
|
+
*
|
|
1020
|
+
*/
|
|
1021
|
+
constructor(options) {
|
|
1022
|
+
super();
|
|
1023
|
+
this.#connection = options.connection;
|
|
1024
|
+
this.#pollingOptions = options.pollingOptions;
|
|
1025
|
+
this.jobInfo = options.jobInfo;
|
|
1026
|
+
this.#jobData = new JobDataV2({
|
|
1027
|
+
createRequest: (request) => this.createIngestRequest(request),
|
|
1028
|
+
job: this,
|
|
1029
|
+
});
|
|
1030
|
+
// default error handler to keep the latest error
|
|
1031
|
+
this.on('error', (error) => (this.#error = error));
|
|
1032
|
+
}
|
|
1033
|
+
get id() {
|
|
1034
|
+
return this.jobInfo.id;
|
|
1035
|
+
}
|
|
1036
|
+
/**
|
|
1037
|
+
* Create a job representing a bulk operation in the org
|
|
1038
|
+
*/
|
|
1039
|
+
async open() {
|
|
1040
|
+
try {
|
|
1041
|
+
this.jobInfo = await this.createIngestRequest({
|
|
1042
|
+
method: 'POST',
|
|
1043
|
+
path: '',
|
|
1044
|
+
body: JSON.stringify({
|
|
1045
|
+
assignmentRuleId: this.jobInfo?.assignmentRuleId,
|
|
1046
|
+
externalIdFieldName: this.jobInfo?.externalIdFieldName,
|
|
1047
|
+
object: this.jobInfo?.object,
|
|
1048
|
+
operation: this.jobInfo?.operation,
|
|
1049
|
+
lineEnding: this.jobInfo?.lineEnding,
|
|
1050
|
+
}),
|
|
1051
|
+
headers: {
|
|
1052
|
+
'Content-Type': 'application/json; charset=utf-8',
|
|
1053
|
+
},
|
|
1054
|
+
responseType: 'application/json',
|
|
1055
|
+
});
|
|
1056
|
+
this.emit('open');
|
|
1057
|
+
}
|
|
1058
|
+
catch (err) {
|
|
1059
|
+
this.emit('error', err);
|
|
1060
|
+
throw err;
|
|
1061
|
+
}
|
|
1062
|
+
}
|
|
1063
|
+
/** Upload data for a job in CSV format
|
|
1064
|
+
*
|
|
1065
|
+
* @param input CSV as a string, or array of records or readable stream
|
|
1066
|
+
*/
|
|
1067
|
+
async uploadData(input) {
|
|
1068
|
+
await this.#jobData.execute(input);
|
|
1069
|
+
}
|
|
1070
|
+
async getAllResults() {
|
|
1071
|
+
const [successfulResults, failedResults, unprocessedRecords,] = await Promise.all([
|
|
1072
|
+
this.getSuccessfulResults(),
|
|
1073
|
+
this.getFailedResults(),
|
|
1074
|
+
this.getUnprocessedRecords(),
|
|
1075
|
+
]);
|
|
1076
|
+
return { successfulResults, failedResults, unprocessedRecords };
|
|
1077
|
+
}
|
|
1078
|
+
/**
|
|
1079
|
+
* Close opened job
|
|
1080
|
+
*/
|
|
1081
|
+
async close() {
|
|
1082
|
+
try {
|
|
1083
|
+
const state = 'UploadComplete';
|
|
1084
|
+
this.jobInfo = await this.createIngestRequest({
|
|
1085
|
+
method: 'PATCH',
|
|
1086
|
+
path: `/${this.jobInfo.id}`,
|
|
1087
|
+
body: JSON.stringify({ state }),
|
|
1088
|
+
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
|
1089
|
+
responseType: 'application/json',
|
|
1090
|
+
});
|
|
1091
|
+
this.emit('uploadcomplete');
|
|
1092
|
+
}
|
|
1093
|
+
catch (err) {
|
|
1094
|
+
this.emit('error', err);
|
|
1095
|
+
throw err;
|
|
1096
|
+
}
|
|
1097
|
+
}
|
|
1098
|
+
/**
|
|
1099
|
+
* Set the status to abort
|
|
1100
|
+
*/
|
|
1101
|
+
async abort() {
|
|
1102
|
+
try {
|
|
1103
|
+
const state = 'Aborted';
|
|
1104
|
+
this.jobInfo = await this.createIngestRequest({
|
|
1105
|
+
method: 'PATCH',
|
|
1106
|
+
path: `/${this.jobInfo.id}`,
|
|
1107
|
+
body: JSON.stringify({ state }),
|
|
1108
|
+
headers: { 'Content-Type': 'application/json; charset=utf-8' },
|
|
1109
|
+
responseType: 'application/json',
|
|
1110
|
+
});
|
|
1111
|
+
this.emit('aborted');
|
|
1112
|
+
}
|
|
1113
|
+
catch (err) {
|
|
1114
|
+
this.emit('error', err);
|
|
1115
|
+
throw err;
|
|
1116
|
+
}
|
|
1117
|
+
}
|
|
1118
|
+
/**
|
|
1119
|
+
* Poll for the state of the processing for the job.
|
|
1120
|
+
*
|
|
1121
|
+
* This method will only throw after a timeout. To capture a
|
|
1122
|
+
* job failure while polling you must set a listener for the
|
|
1123
|
+
* `failed` event before calling it:
|
|
1124
|
+
*
|
|
1125
|
+
* job.on('failed', (err) => console.error(err))
|
|
1126
|
+
* await job.poll()
|
|
1127
|
+
*
|
|
1128
|
+
* @param interval Polling interval in milliseconds
|
|
1129
|
+
* @param timeout Polling timeout in milliseconds
|
|
1130
|
+
* @returns {Promise<void>} A promise that resolves when the job finishes successfully
|
|
1131
|
+
*/
|
|
1132
|
+
async poll(interval = this.#pollingOptions.pollInterval, timeout = this.#pollingOptions.pollTimeout) {
|
|
1133
|
+
const jobId = getJobIdOrError(this.jobInfo);
|
|
1134
|
+
const startTime = Date.now();
|
|
1135
|
+
while (startTime + timeout > Date.now()) {
|
|
1136
|
+
try {
|
|
1137
|
+
const res = await this.check();
|
|
1138
|
+
switch (res.state) {
|
|
1139
|
+
case 'Open':
|
|
1140
|
+
throw new Error('Job has not been started');
|
|
1141
|
+
case 'Aborted':
|
|
1142
|
+
throw new Error('Job has been aborted');
|
|
1143
|
+
case 'UploadComplete':
|
|
1144
|
+
case 'InProgress':
|
|
1145
|
+
await delay(interval);
|
|
1146
|
+
break;
|
|
1147
|
+
case 'Failed':
|
|
1148
|
+
this.emit('failed', new Error('Ingest job failed to complete.'));
|
|
1149
|
+
return;
|
|
1150
|
+
case 'JobComplete':
|
|
1151
|
+
this.emit('jobcomplete');
|
|
1152
|
+
return;
|
|
1153
|
+
}
|
|
1154
|
+
}
|
|
1155
|
+
catch (err) {
|
|
1156
|
+
this.emit('error', err);
|
|
1157
|
+
throw err;
|
|
1158
|
+
}
|
|
1159
|
+
}
|
|
1160
|
+
const timeoutError = new JobPollingTimeoutError(`Polling timed out after ${timeout}ms. Job Id = ${jobId}`, jobId);
|
|
1161
|
+
this.emit('error', timeoutError);
|
|
1162
|
+
throw timeoutError;
|
|
1163
|
+
}
|
|
1164
|
+
/**
|
|
1165
|
+
* Check the latest batch status in server
|
|
1166
|
+
*/
|
|
1167
|
+
async check() {
|
|
1168
|
+
try {
|
|
1169
|
+
const jobInfo = await this.createIngestRequest({
|
|
1170
|
+
method: 'GET',
|
|
1171
|
+
path: `/${getJobIdOrError(this.jobInfo)}`,
|
|
1172
|
+
responseType: 'application/json',
|
|
1173
|
+
});
|
|
1174
|
+
this.jobInfo = jobInfo;
|
|
1175
|
+
return jobInfo;
|
|
1176
|
+
}
|
|
1177
|
+
catch (err) {
|
|
1178
|
+
this.emit('error', err);
|
|
1179
|
+
throw err;
|
|
1180
|
+
}
|
|
1181
|
+
}
|
|
1182
|
+
async getSuccessfulResults() {
|
|
1183
|
+
if (this.#bulkJobSuccessfulResults) {
|
|
1184
|
+
return this.#bulkJobSuccessfulResults;
|
|
1185
|
+
}
|
|
1186
|
+
const results = await this.createIngestRequest({
|
|
1187
|
+
method: 'GET',
|
|
1188
|
+
path: `/${getJobIdOrError(this.jobInfo)}/successfulResults`,
|
|
1189
|
+
responseType: 'text/csv',
|
|
1190
|
+
});
|
|
1191
|
+
this.#bulkJobSuccessfulResults = results ?? [];
|
|
1192
|
+
return this.#bulkJobSuccessfulResults;
|
|
1193
|
+
}
|
|
1194
|
+
async getFailedResults() {
|
|
1195
|
+
if (this.#bulkJobFailedResults) {
|
|
1196
|
+
return this.#bulkJobFailedResults;
|
|
1197
|
+
}
|
|
1198
|
+
const results = await this.createIngestRequest({
|
|
1199
|
+
method: 'GET',
|
|
1200
|
+
path: `/${getJobIdOrError(this.jobInfo)}/failedResults`,
|
|
1201
|
+
responseType: 'text/csv',
|
|
1202
|
+
});
|
|
1203
|
+
this.#bulkJobFailedResults = results ?? [];
|
|
1204
|
+
return this.#bulkJobFailedResults;
|
|
1205
|
+
}
|
|
1206
|
+
async getUnprocessedRecords() {
|
|
1207
|
+
if (this.#bulkJobUnprocessedRecords) {
|
|
1208
|
+
return this.#bulkJobUnprocessedRecords;
|
|
1209
|
+
}
|
|
1210
|
+
const results = await this.createIngestRequest({
|
|
1211
|
+
method: 'GET',
|
|
1212
|
+
path: `/${getJobIdOrError(this.jobInfo)}/unprocessedrecords`,
|
|
1213
|
+
responseType: 'text/csv',
|
|
1214
|
+
});
|
|
1215
|
+
this.#bulkJobUnprocessedRecords = results ?? [];
|
|
1216
|
+
return this.#bulkJobUnprocessedRecords;
|
|
1217
|
+
}
|
|
1218
|
+
/**
|
|
1219
|
+
* Deletes an ingest job.
|
|
1220
|
+
*/
|
|
1221
|
+
async delete() {
|
|
1222
|
+
return this.createIngestRequest({
|
|
1223
|
+
method: 'DELETE',
|
|
1224
|
+
path: `/${getJobIdOrError(this.jobInfo)}`,
|
|
1225
|
+
});
|
|
1226
|
+
}
|
|
1227
|
+
createIngestRequest(request) {
|
|
1228
|
+
const { path, responseType } = request;
|
|
1229
|
+
const baseUrl = [
|
|
1230
|
+
this.#connection.instanceUrl,
|
|
1231
|
+
'services/data',
|
|
1232
|
+
`v${this.#connection.version}`,
|
|
1233
|
+
'jobs/ingest',
|
|
1234
|
+
].join('/');
|
|
1235
|
+
return new BulkApiV2(this.#connection, { responseType }).request({
|
|
1236
|
+
...request,
|
|
1237
|
+
url: baseUrl + path,
|
|
1238
|
+
});
|
|
1239
|
+
}
|
|
1240
|
+
}
|
|
1241
|
+
exports.IngestJobV2 = IngestJobV2;
|
|
1242
|
+
class JobDataV2 extends stream_1.Writable {
|
|
1243
|
+
#job;
|
|
1244
|
+
#uploadStream;
|
|
1245
|
+
#downloadStream;
|
|
1246
|
+
#dataStream;
|
|
1247
|
+
#result;
|
|
1248
|
+
/**
|
|
1249
|
+
*
|
|
1250
|
+
*/
|
|
1251
|
+
constructor(options) {
|
|
1252
|
+
super({ objectMode: true });
|
|
1253
|
+
const createRequest = options.createRequest;
|
|
1254
|
+
this.#job = options.job;
|
|
1255
|
+
this.#uploadStream = new record_stream_1.Serializable();
|
|
1256
|
+
this.#downloadStream = new record_stream_1.Parsable();
|
|
1257
|
+
const converterOptions = { nullValue: '#N/A' };
|
|
1258
|
+
const uploadDataStream = this.#uploadStream.stream('csv', converterOptions);
|
|
1259
|
+
const downloadDataStream = this.#downloadStream.stream('csv', converterOptions);
|
|
1260
|
+
this.#dataStream = (0, stream_2.concatStreamsAsDuplex)(uploadDataStream, downloadDataStream);
|
|
1261
|
+
this.on('finish', () => this.#uploadStream.end());
|
|
1262
|
+
uploadDataStream.once('readable', () => {
|
|
1263
|
+
try {
|
|
1264
|
+
// pipe upload data to batch API request stream
|
|
1265
|
+
const req = createRequest({
|
|
1266
|
+
method: 'PUT',
|
|
1267
|
+
path: `/${this.#job.jobInfo?.id}/batches`,
|
|
1268
|
+
headers: {
|
|
1269
|
+
'Content-Type': 'text/csv',
|
|
1270
|
+
},
|
|
1271
|
+
responseType: 'application/json',
|
|
1272
|
+
});
|
|
1273
|
+
(async () => {
|
|
1274
|
+
try {
|
|
1275
|
+
const res = await req;
|
|
1276
|
+
this.emit('response', res);
|
|
1277
|
+
}
|
|
1278
|
+
catch (err) {
|
|
1279
|
+
this.emit('error', err);
|
|
1280
|
+
}
|
|
1281
|
+
})();
|
|
1282
|
+
uploadDataStream.pipe(req.stream());
|
|
1283
|
+
}
|
|
1284
|
+
catch (err) {
|
|
1285
|
+
this.emit('error', err);
|
|
1286
|
+
}
|
|
1287
|
+
});
|
|
1288
|
+
}
|
|
1289
|
+
_write(record_, enc, cb) {
|
|
1290
|
+
const { Id, type, attributes, ...rrec } = record_;
|
|
1291
|
+
let record;
|
|
1292
|
+
switch (this.#job.jobInfo.operation) {
|
|
1293
|
+
case 'insert':
|
|
1294
|
+
record = rrec;
|
|
1295
|
+
break;
|
|
1296
|
+
case 'delete':
|
|
1297
|
+
case 'hardDelete':
|
|
1298
|
+
record = { Id };
|
|
1299
|
+
break;
|
|
1300
|
+
default:
|
|
1301
|
+
record = { Id, ...rrec };
|
|
1302
|
+
}
|
|
1303
|
+
this.#uploadStream.write(record, enc, cb);
|
|
1304
|
+
}
|
|
1305
|
+
/**
|
|
1306
|
+
* Returns duplex stream which accepts CSV data input and batch result output
|
|
1307
|
+
*/
|
|
1308
|
+
stream() {
|
|
1309
|
+
return this.#dataStream;
|
|
1310
|
+
}
|
|
1311
|
+
/**
|
|
1312
|
+
* Execute batch operation
|
|
1313
|
+
*/
|
|
1314
|
+
execute(input) {
|
|
1315
|
+
if (this.#result) {
|
|
1316
|
+
throw new Error('Data can only be uploaded to a job once.');
|
|
1317
|
+
}
|
|
1318
|
+
this.#result = new Promise((resolve, reject) => {
|
|
1319
|
+
this.once('response', () => resolve());
|
|
1320
|
+
this.once('error', reject);
|
|
1321
|
+
});
|
|
1322
|
+
if ((0, function_1.isObject)(input) && 'pipe' in input && (0, function_1.isFunction)(input.pipe)) {
|
|
1323
|
+
// if input has stream.Readable interface
|
|
1324
|
+
input.pipe(this.#dataStream);
|
|
1325
|
+
}
|
|
1326
|
+
else {
|
|
1327
|
+
if (Array.isArray(input)) {
|
|
1328
|
+
for (const record of input) {
|
|
1329
|
+
for (const key of Object.keys(record)) {
|
|
1330
|
+
if (typeof record[key] === 'boolean') {
|
|
1331
|
+
record[key] = String(record[key]);
|
|
1332
|
+
}
|
|
1333
|
+
}
|
|
1334
|
+
this.write(record);
|
|
1335
|
+
}
|
|
1336
|
+
this.end();
|
|
1337
|
+
}
|
|
1338
|
+
else if (typeof input === 'string') {
|
|
1339
|
+
this.#dataStream.write(input, 'utf8');
|
|
1340
|
+
this.#dataStream.end();
|
|
1341
|
+
}
|
|
1342
|
+
}
|
|
1343
|
+
return this;
|
|
1344
|
+
}
|
|
1345
|
+
/**
|
|
1346
|
+
* Promise/A+ interface
|
|
1347
|
+
* Delegate to promise, return promise instance for batch result
|
|
1348
|
+
*/
|
|
1349
|
+
then(onResolved, onReject) {
|
|
1350
|
+
if (this.#result === undefined) {
|
|
1351
|
+
this.execute();
|
|
1352
|
+
}
|
|
1353
|
+
return this.#result.then(onResolved, onReject);
|
|
1354
|
+
}
|
|
1355
|
+
}
|
|
1356
|
+
function getJobIdOrError(jobInfo) {
|
|
1357
|
+
const jobId = jobInfo?.id;
|
|
1358
|
+
if (jobId === undefined) {
|
|
1359
|
+
throw new Error('No job id, maybe you need to call `job.open()` first.');
|
|
1360
|
+
}
|
|
1361
|
+
return jobId;
|
|
1362
|
+
}
|
|
1363
|
+
function delay(ms) {
|
|
1364
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1365
|
+
}
|
|
1366
|
+
/*--------------------------------------------*/
|
|
1367
|
+
/*
|
|
1368
|
+
* Register hook in connection instantiation for dynamically adding this API module features
|
|
1369
|
+
*/
|
|
1370
|
+
(0, jsforce_1.registerModule)('bulk', (conn) => new Bulk(conn));
|
|
1371
|
+
(0, jsforce_1.registerModule)('bulk2', (conn) => new BulkV2(conn));
|
|
1372
|
+
exports.default = Bulk;
|