@jsforce/jsforce-node 0.0.1 → 3.0.0-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +54 -0
- package/index.d.ts +4 -0
- package/index.js +1 -0
- package/lib/VERSION.d.ts +2 -0
- package/lib/VERSION.js +3 -0
- package/lib/api/analytics/types.d.ts +509 -0
- package/lib/api/analytics/types.js +2 -0
- package/lib/api/analytics.d.ts +163 -0
- package/lib/api/analytics.js +342 -0
- package/lib/api/apex.d.ts +44 -0
- package/lib/api/apex.js +86 -0
- package/lib/api/bulk.d.ts +253 -0
- package/lib/api/bulk.js +678 -0
- package/lib/api/bulk2.d.ts +324 -0
- package/lib/api/bulk2.js +800 -0
- package/lib/api/chatter.d.ts +133 -0
- package/lib/api/chatter.js +248 -0
- package/lib/api/metadata/schema.d.ts +16117 -0
- package/lib/api/metadata/schema.js +9094 -0
- package/lib/api/metadata.d.ts +189 -0
- package/lib/api/metadata.js +406 -0
- package/lib/api/soap/schema.d.ts +3167 -0
- package/lib/api/soap/schema.js +1787 -0
- package/lib/api/soap.d.ts +76 -0
- package/lib/api/soap.js +155 -0
- package/lib/api/streaming/extension.d.ts +94 -0
- package/lib/api/streaming/extension.js +151 -0
- package/lib/api/streaming.d.ts +160 -0
- package/lib/api/streaming.js +252 -0
- package/lib/api/tooling.d.ts +284 -0
- package/lib/api/tooling.js +202 -0
- package/lib/api/wsdl/wsdl2schema.d.ts +1 -0
- package/lib/api/wsdl/wsdl2schema.js +354 -0
- package/lib/browser/canvas.d.ts +12 -0
- package/lib/browser/canvas.js +77 -0
- package/lib/browser/client.d.ts +82 -0
- package/lib/browser/client.js +244 -0
- package/lib/browser/jsonp.d.ts +12 -0
- package/lib/browser/jsonp.js +69 -0
- package/lib/browser/registry.d.ts +3 -0
- package/lib/browser/registry.js +5 -0
- package/lib/browser/request.d.ts +10 -0
- package/lib/browser/request.js +202 -0
- package/lib/cache.d.ts +74 -0
- package/lib/cache.js +159 -0
- package/lib/connection.d.ts +356 -0
- package/lib/connection.js +1153 -0
- package/lib/core.d.ts +17 -0
- package/lib/core.js +55 -0
- package/lib/csv.d.ts +23 -0
- package/lib/csv.js +35 -0
- package/lib/date.d.ts +82 -0
- package/lib/date.js +201 -0
- package/lib/http-api.d.ts +75 -0
- package/lib/http-api.js +295 -0
- package/lib/index.d.ts +13 -0
- package/lib/index.js +32 -0
- package/lib/jsforce.d.ts +26 -0
- package/lib/jsforce.js +67 -0
- package/lib/jwtOAuth2.d.ts +8 -0
- package/lib/jwtOAuth2.js +23 -0
- package/lib/oauth2.d.ts +92 -0
- package/lib/oauth2.js +245 -0
- package/lib/process.d.ts +157 -0
- package/lib/process.js +143 -0
- package/lib/query.d.ts +341 -0
- package/lib/query.js +817 -0
- package/lib/quick-action.d.ts +44 -0
- package/lib/quick-action.js +46 -0
- package/lib/record-reference.d.ts +46 -0
- package/lib/record-reference.js +65 -0
- package/lib/record-stream.d.ts +83 -0
- package/lib/record-stream.js +233 -0
- package/lib/registry/base.d.ts +43 -0
- package/lib/registry/base.js +96 -0
- package/lib/registry/empty.d.ts +7 -0
- package/lib/registry/empty.js +13 -0
- package/lib/registry/file.d.ts +11 -0
- package/lib/registry/file.js +51 -0
- package/lib/registry/index.d.ts +8 -0
- package/lib/registry/index.js +21 -0
- package/lib/registry/sfdx.d.ts +56 -0
- package/lib/registry/sfdx.js +133 -0
- package/lib/registry/types.d.ts +47 -0
- package/lib/registry/types.js +2 -0
- package/lib/request-helper.d.ts +23 -0
- package/lib/request-helper.js +102 -0
- package/lib/request.d.ts +11 -0
- package/lib/request.js +75 -0
- package/lib/session-refresh-delegate.d.ts +31 -0
- package/lib/session-refresh-delegate.js +69 -0
- package/lib/soap.d.ts +60 -0
- package/lib/soap.js +257 -0
- package/lib/sobject.d.ts +258 -0
- package/lib/sobject.js +376 -0
- package/lib/soql-builder.d.ts +25 -0
- package/lib/soql-builder.js +226 -0
- package/lib/transport.d.ts +63 -0
- package/lib/transport.js +175 -0
- package/lib/types/common.d.ts +560 -0
- package/lib/types/common.js +2 -0
- package/lib/types/index.d.ts +7 -0
- package/lib/types/index.js +23 -0
- package/lib/types/projection.d.ts +26 -0
- package/lib/types/projection.js +2 -0
- package/lib/types/record.d.ts +44 -0
- package/lib/types/record.js +2 -0
- package/lib/types/schema.d.ts +50 -0
- package/lib/types/schema.js +2 -0
- package/lib/types/soap.d.ts +43 -0
- package/lib/types/soap.js +2 -0
- package/lib/types/standard-schema.d.ts +16199 -0
- package/lib/types/standard-schema.js +2 -0
- package/lib/types/util.d.ts +7 -0
- package/lib/types/util.js +2 -0
- package/lib/util/formatter.d.ts +8 -0
- package/lib/util/formatter.js +24 -0
- package/lib/util/function.d.ts +32 -0
- package/lib/util/function.js +52 -0
- package/lib/util/get-body-size.d.ts +4 -0
- package/lib/util/get-body-size.js +39 -0
- package/lib/util/logger.d.ts +29 -0
- package/lib/util/logger.js +102 -0
- package/lib/util/promise.d.ts +19 -0
- package/lib/util/promise.js +25 -0
- package/lib/util/stream.d.ts +12 -0
- package/lib/util/stream.js +88 -0
- package/package.json +262 -6
- package/typings/faye/index.d.ts +16 -0
- package/typings/index.d.ts +1 -0
package/lib/api/bulk.js
ADDED
|
@@ -0,0 +1,678 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Bulk = exports.Batch = exports.Job = void 0;
|
|
7
|
+
/**
|
|
8
|
+
* @file Manages Salesforce Bulk API related operations
|
|
9
|
+
* @author Shinichi Tomita <shinichi.tomita@gmail.com>
|
|
10
|
+
*/
|
|
11
|
+
const events_1 = require("events");
|
|
12
|
+
const stream_1 = require("stream");
|
|
13
|
+
const multistream_1 = __importDefault(require("multistream"));
|
|
14
|
+
const record_stream_1 = require("../record-stream");
|
|
15
|
+
const http_api_1 = __importDefault(require("../http-api"));
|
|
16
|
+
const jsforce_1 = require("../jsforce");
|
|
17
|
+
const stream_2 = require("../util/stream");
|
|
18
|
+
const is_1 = __importDefault(require("@sindresorhus/is"));
|
|
19
|
+
/**
|
|
20
|
+
* Class for Bulk API Job
|
|
21
|
+
*/
|
|
22
|
+
class Job extends events_1.EventEmitter {
|
|
23
|
+
type;
|
|
24
|
+
operation;
|
|
25
|
+
options;
|
|
26
|
+
id;
|
|
27
|
+
state;
|
|
28
|
+
_bulk;
|
|
29
|
+
_batches;
|
|
30
|
+
_jobInfo;
|
|
31
|
+
_error;
|
|
32
|
+
/**
|
|
33
|
+
*
|
|
34
|
+
*/
|
|
35
|
+
constructor(bulk, type, operation, options, jobId) {
|
|
36
|
+
super();
|
|
37
|
+
this._bulk = bulk;
|
|
38
|
+
this.type = type;
|
|
39
|
+
this.operation = operation;
|
|
40
|
+
this.options = options || {};
|
|
41
|
+
this.id = jobId ?? null;
|
|
42
|
+
this.state = this.id ? 'Open' : 'Unknown';
|
|
43
|
+
this._batches = {};
|
|
44
|
+
// default error handler to keep the latest error
|
|
45
|
+
this.on('error', (error) => (this._error = error));
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Return latest jobInfo from cache
|
|
49
|
+
*/
|
|
50
|
+
info() {
|
|
51
|
+
// if cache is not available, check the latest
|
|
52
|
+
if (!this._jobInfo) {
|
|
53
|
+
this._jobInfo = this.check();
|
|
54
|
+
}
|
|
55
|
+
return this._jobInfo;
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Open new job and get jobinfo
|
|
59
|
+
*/
|
|
60
|
+
open() {
|
|
61
|
+
const bulk = this._bulk;
|
|
62
|
+
const options = this.options;
|
|
63
|
+
// if sobject type / operation is not provided
|
|
64
|
+
if (!this.type || !this.operation) {
|
|
65
|
+
throw new Error('type / operation is required to open a new job');
|
|
66
|
+
}
|
|
67
|
+
// if not requested opening job
|
|
68
|
+
if (!this._jobInfo) {
|
|
69
|
+
let operation = this.operation.toLowerCase();
|
|
70
|
+
if (operation === 'harddelete') {
|
|
71
|
+
operation = 'hardDelete';
|
|
72
|
+
}
|
|
73
|
+
if (operation === 'queryall') {
|
|
74
|
+
operation = 'queryAll';
|
|
75
|
+
}
|
|
76
|
+
const body = `
|
|
77
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
78
|
+
<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">
|
|
79
|
+
<operation>${operation}</operation>
|
|
80
|
+
<object>${this.type}</object>
|
|
81
|
+
${options.extIdField
|
|
82
|
+
? `<externalIdFieldName>${options.extIdField}</externalIdFieldName>`
|
|
83
|
+
: ''}
|
|
84
|
+
${options.concurrencyMode
|
|
85
|
+
? `<concurrencyMode>${options.concurrencyMode}</concurrencyMode>`
|
|
86
|
+
: ''}
|
|
87
|
+
${options.assignmentRuleId
|
|
88
|
+
? `<assignmentRuleId>${options.assignmentRuleId}</assignmentRuleId>`
|
|
89
|
+
: ''}
|
|
90
|
+
<contentType>CSV</contentType>
|
|
91
|
+
</jobInfo>
|
|
92
|
+
`.trim();
|
|
93
|
+
this._jobInfo = (async () => {
|
|
94
|
+
try {
|
|
95
|
+
const res = await bulk._request({
|
|
96
|
+
method: 'POST',
|
|
97
|
+
path: '/job',
|
|
98
|
+
body,
|
|
99
|
+
headers: {
|
|
100
|
+
'Content-Type': 'application/xml; charset=utf-8',
|
|
101
|
+
},
|
|
102
|
+
responseType: 'application/xml',
|
|
103
|
+
});
|
|
104
|
+
this.emit('open', res.jobInfo);
|
|
105
|
+
this.id = res.jobInfo.id;
|
|
106
|
+
this.state = res.jobInfo.state;
|
|
107
|
+
return res.jobInfo;
|
|
108
|
+
}
|
|
109
|
+
catch (err) {
|
|
110
|
+
this.emit('error', err);
|
|
111
|
+
throw err;
|
|
112
|
+
}
|
|
113
|
+
})();
|
|
114
|
+
}
|
|
115
|
+
return this._jobInfo;
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Create a new batch instance in the job
|
|
119
|
+
*/
|
|
120
|
+
createBatch() {
|
|
121
|
+
const batch = new Batch(this);
|
|
122
|
+
batch.on('queue', () => {
|
|
123
|
+
this._batches[batch.id] = batch;
|
|
124
|
+
});
|
|
125
|
+
return batch;
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Get a batch instance specified by given batch ID
|
|
129
|
+
*/
|
|
130
|
+
batch(batchId) {
|
|
131
|
+
let batch = this._batches[batchId];
|
|
132
|
+
if (!batch) {
|
|
133
|
+
batch = new Batch(this, batchId);
|
|
134
|
+
this._batches[batchId] = batch;
|
|
135
|
+
}
|
|
136
|
+
return batch;
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Check the latest job status from server
|
|
140
|
+
*/
|
|
141
|
+
check() {
|
|
142
|
+
const bulk = this._bulk;
|
|
143
|
+
const logger = bulk._logger;
|
|
144
|
+
this._jobInfo = (async () => {
|
|
145
|
+
const jobId = await this.ready();
|
|
146
|
+
const res = await bulk._request({
|
|
147
|
+
method: 'GET',
|
|
148
|
+
path: '/job/' + jobId,
|
|
149
|
+
responseType: 'application/xml',
|
|
150
|
+
});
|
|
151
|
+
logger.debug(res.jobInfo);
|
|
152
|
+
this.id = res.jobInfo.id;
|
|
153
|
+
this.type = res.jobInfo.object;
|
|
154
|
+
this.operation = res.jobInfo.operation;
|
|
155
|
+
this.state = res.jobInfo.state;
|
|
156
|
+
return res.jobInfo;
|
|
157
|
+
})();
|
|
158
|
+
return this._jobInfo;
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Wait till the job is assigned to server
|
|
162
|
+
*/
|
|
163
|
+
ready() {
|
|
164
|
+
return this.id
|
|
165
|
+
? Promise.resolve(this.id)
|
|
166
|
+
: this.open().then(({ id }) => id);
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* List all registered batch info in job
|
|
170
|
+
*/
|
|
171
|
+
async list() {
|
|
172
|
+
const bulk = this._bulk;
|
|
173
|
+
const logger = bulk._logger;
|
|
174
|
+
const jobId = await this.ready();
|
|
175
|
+
const res = await bulk._request({
|
|
176
|
+
method: 'GET',
|
|
177
|
+
path: '/job/' + jobId + '/batch',
|
|
178
|
+
responseType: 'application/xml',
|
|
179
|
+
});
|
|
180
|
+
logger.debug(res.batchInfoList.batchInfo);
|
|
181
|
+
const batchInfoList = Array.isArray(res.batchInfoList.batchInfo)
|
|
182
|
+
? res.batchInfoList.batchInfo
|
|
183
|
+
: [res.batchInfoList.batchInfo];
|
|
184
|
+
return batchInfoList;
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Close opened job
|
|
188
|
+
*/
|
|
189
|
+
async close() {
|
|
190
|
+
if (!this.id) {
|
|
191
|
+
return;
|
|
192
|
+
}
|
|
193
|
+
try {
|
|
194
|
+
const jobInfo = await this._changeState('Closed');
|
|
195
|
+
this.id = null;
|
|
196
|
+
this.emit('close', jobInfo);
|
|
197
|
+
return jobInfo;
|
|
198
|
+
}
|
|
199
|
+
catch (err) {
|
|
200
|
+
this.emit('error', err);
|
|
201
|
+
throw err;
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Set the status to abort
|
|
206
|
+
*/
|
|
207
|
+
async abort() {
|
|
208
|
+
if (!this.id) {
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
try {
|
|
212
|
+
const jobInfo = await this._changeState('Aborted');
|
|
213
|
+
this.id = null;
|
|
214
|
+
this.emit('abort', jobInfo);
|
|
215
|
+
return jobInfo;
|
|
216
|
+
}
|
|
217
|
+
catch (err) {
|
|
218
|
+
this.emit('error', err);
|
|
219
|
+
throw err;
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
/**
|
|
223
|
+
* @private
|
|
224
|
+
*/
|
|
225
|
+
async _changeState(state) {
|
|
226
|
+
const bulk = this._bulk;
|
|
227
|
+
const logger = bulk._logger;
|
|
228
|
+
this._jobInfo = (async () => {
|
|
229
|
+
const jobId = await this.ready();
|
|
230
|
+
const body = `
|
|
231
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
232
|
+
<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">
|
|
233
|
+
<state>${state}</state>
|
|
234
|
+
</jobInfo>
|
|
235
|
+
`.trim();
|
|
236
|
+
const res = await bulk._request({
|
|
237
|
+
method: 'POST',
|
|
238
|
+
path: '/job/' + jobId,
|
|
239
|
+
body: body,
|
|
240
|
+
headers: {
|
|
241
|
+
'Content-Type': 'application/xml; charset=utf-8',
|
|
242
|
+
},
|
|
243
|
+
responseType: 'application/xml',
|
|
244
|
+
});
|
|
245
|
+
logger.debug(res.jobInfo);
|
|
246
|
+
this.state = res.jobInfo.state;
|
|
247
|
+
return res.jobInfo;
|
|
248
|
+
})();
|
|
249
|
+
return this._jobInfo;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
exports.Job = Job;
|
|
253
|
+
/*--------------------------------------------*/
|
|
254
|
+
class PollingTimeoutError extends Error {
|
|
255
|
+
jobId;
|
|
256
|
+
batchId;
|
|
257
|
+
/**
|
|
258
|
+
*
|
|
259
|
+
*/
|
|
260
|
+
constructor(message, jobId, batchId) {
|
|
261
|
+
super(message);
|
|
262
|
+
this.name = 'PollingTimeout';
|
|
263
|
+
this.jobId = jobId;
|
|
264
|
+
this.batchId = batchId;
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
/*--------------------------------------------*/
|
|
268
|
+
/**
|
|
269
|
+
* Batch (extends Writable)
|
|
270
|
+
*/
|
|
271
|
+
class Batch extends stream_1.Writable {
|
|
272
|
+
job;
|
|
273
|
+
id;
|
|
274
|
+
_bulk;
|
|
275
|
+
_uploadStream;
|
|
276
|
+
_downloadStream;
|
|
277
|
+
_dataStream;
|
|
278
|
+
_result;
|
|
279
|
+
_error;
|
|
280
|
+
/**
|
|
281
|
+
*
|
|
282
|
+
*/
|
|
283
|
+
constructor(job, id) {
|
|
284
|
+
super({ objectMode: true });
|
|
285
|
+
this.job = job;
|
|
286
|
+
this.id = id;
|
|
287
|
+
this._bulk = job._bulk;
|
|
288
|
+
// default error handler to keep the latest error
|
|
289
|
+
this.on('error', (error) => (this._error = error));
|
|
290
|
+
//
|
|
291
|
+
// setup data streams
|
|
292
|
+
//
|
|
293
|
+
const converterOptions = { nullValue: '#N/A' };
|
|
294
|
+
const uploadStream = (this._uploadStream = new record_stream_1.Serializable());
|
|
295
|
+
const uploadDataStream = uploadStream.stream('csv', converterOptions);
|
|
296
|
+
const downloadStream = (this._downloadStream = new record_stream_1.Parsable());
|
|
297
|
+
const downloadDataStream = downloadStream.stream('csv', converterOptions);
|
|
298
|
+
this.on('finish', () => uploadStream.end());
|
|
299
|
+
uploadDataStream.once('readable', async () => {
|
|
300
|
+
try {
|
|
301
|
+
// ensure the job is opened in server or job id is already assigned
|
|
302
|
+
await this.job.ready();
|
|
303
|
+
// pipe upload data to batch API request stream
|
|
304
|
+
uploadDataStream.pipe(this._createRequestStream());
|
|
305
|
+
}
|
|
306
|
+
catch (err) {
|
|
307
|
+
this.emit('error', err);
|
|
308
|
+
}
|
|
309
|
+
});
|
|
310
|
+
// duplex data stream, opened access to API programmers by Batch#stream()
|
|
311
|
+
this._dataStream = (0, stream_2.concatStreamsAsDuplex)(uploadDataStream, downloadDataStream);
|
|
312
|
+
}
|
|
313
|
+
/**
|
|
314
|
+
* Connect batch API and create stream instance of request/response
|
|
315
|
+
*
|
|
316
|
+
* @private
|
|
317
|
+
*/
|
|
318
|
+
_createRequestStream() {
|
|
319
|
+
const bulk = this._bulk;
|
|
320
|
+
const logger = bulk._logger;
|
|
321
|
+
const req = bulk._request({
|
|
322
|
+
method: 'POST',
|
|
323
|
+
path: '/job/' + this.job.id + '/batch',
|
|
324
|
+
headers: {
|
|
325
|
+
'Content-Type': 'text/csv',
|
|
326
|
+
},
|
|
327
|
+
responseType: 'application/xml',
|
|
328
|
+
});
|
|
329
|
+
(async () => {
|
|
330
|
+
try {
|
|
331
|
+
const res = await req;
|
|
332
|
+
logger.debug(res.batchInfo);
|
|
333
|
+
this.id = res.batchInfo.id;
|
|
334
|
+
this.emit('queue', res.batchInfo);
|
|
335
|
+
}
|
|
336
|
+
catch (err) {
|
|
337
|
+
this.emit('error', err);
|
|
338
|
+
}
|
|
339
|
+
})();
|
|
340
|
+
return req.stream();
|
|
341
|
+
}
|
|
342
|
+
/**
|
|
343
|
+
* Implementation of Writable
|
|
344
|
+
*/
|
|
345
|
+
_write(record_, enc, cb) {
|
|
346
|
+
const { Id, type, attributes, ...rrec } = record_;
|
|
347
|
+
let record;
|
|
348
|
+
switch (this.job.operation) {
|
|
349
|
+
case 'insert':
|
|
350
|
+
record = rrec;
|
|
351
|
+
break;
|
|
352
|
+
case 'delete':
|
|
353
|
+
case 'hardDelete':
|
|
354
|
+
record = { Id };
|
|
355
|
+
break;
|
|
356
|
+
default:
|
|
357
|
+
record = { Id, ...rrec };
|
|
358
|
+
}
|
|
359
|
+
this._uploadStream.write(record, enc, cb);
|
|
360
|
+
}
|
|
361
|
+
/**
|
|
362
|
+
* Returns duplex stream which accepts CSV data input and batch result output
|
|
363
|
+
*/
|
|
364
|
+
stream() {
|
|
365
|
+
return this._dataStream;
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Execute batch operation
|
|
369
|
+
*/
|
|
370
|
+
execute(input) {
|
|
371
|
+
// if batch is already executed
|
|
372
|
+
if (this._result) {
|
|
373
|
+
throw new Error('Batch already executed.');
|
|
374
|
+
}
|
|
375
|
+
this._result = new Promise((resolve, reject) => {
|
|
376
|
+
this.once('response', resolve);
|
|
377
|
+
this.once('error', reject);
|
|
378
|
+
});
|
|
379
|
+
if (is_1.default.nodeStream(input)) {
|
|
380
|
+
// if input has stream.Readable interface
|
|
381
|
+
input.pipe(this._dataStream);
|
|
382
|
+
}
|
|
383
|
+
else {
|
|
384
|
+
const recordData = structuredClone(input);
|
|
385
|
+
if (Array.isArray(recordData)) {
|
|
386
|
+
for (const record of recordData) {
|
|
387
|
+
for (const key of Object.keys(record)) {
|
|
388
|
+
if (typeof record[key] === 'boolean') {
|
|
389
|
+
record[key] = String(record[key]);
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
this.write(record);
|
|
393
|
+
}
|
|
394
|
+
this.end();
|
|
395
|
+
}
|
|
396
|
+
else if (typeof recordData === 'string') {
|
|
397
|
+
this._dataStream.write(recordData, 'utf8');
|
|
398
|
+
this._dataStream.end();
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
// return Batch instance for chaining
|
|
402
|
+
return this;
|
|
403
|
+
}
|
|
404
|
+
run = this.execute;
|
|
405
|
+
exec = this.execute;
|
|
406
|
+
/**
|
|
407
|
+
* Promise/A+ interface
|
|
408
|
+
* Delegate to promise, return promise instance for batch result
|
|
409
|
+
*/
|
|
410
|
+
then(onResolved, onReject) {
|
|
411
|
+
if (!this._result) {
|
|
412
|
+
this.execute();
|
|
413
|
+
}
|
|
414
|
+
return this._result.then(onResolved, onReject);
|
|
415
|
+
}
|
|
416
|
+
/**
|
|
417
|
+
* Check the latest batch status in server
|
|
418
|
+
*/
|
|
419
|
+
async check() {
|
|
420
|
+
const bulk = this._bulk;
|
|
421
|
+
const logger = bulk._logger;
|
|
422
|
+
const jobId = this.job.id;
|
|
423
|
+
const batchId = this.id;
|
|
424
|
+
if (!jobId || !batchId) {
|
|
425
|
+
throw new Error('Batch not started.');
|
|
426
|
+
}
|
|
427
|
+
const res = await bulk._request({
|
|
428
|
+
method: 'GET',
|
|
429
|
+
path: '/job/' + jobId + '/batch/' + batchId,
|
|
430
|
+
responseType: 'application/xml',
|
|
431
|
+
});
|
|
432
|
+
logger.debug(res.batchInfo);
|
|
433
|
+
return res.batchInfo;
|
|
434
|
+
}
|
|
435
|
+
/**
|
|
436
|
+
* Polling the batch result and retrieve
|
|
437
|
+
*/
|
|
438
|
+
poll(interval, timeout) {
|
|
439
|
+
const jobId = this.job.id;
|
|
440
|
+
const batchId = this.id;
|
|
441
|
+
if (!jobId || !batchId) {
|
|
442
|
+
throw new Error('Batch not started.');
|
|
443
|
+
}
|
|
444
|
+
const startTime = new Date().getTime();
|
|
445
|
+
const endTime = startTime + timeout;
|
|
446
|
+
if (timeout === 0) {
|
|
447
|
+
throw new PollingTimeoutError(`Skipping polling because of timeout = 0ms. Job Id = ${jobId} | Batch Id = ${batchId}`, jobId, batchId);
|
|
448
|
+
}
|
|
449
|
+
const poll = async () => {
|
|
450
|
+
const now = new Date().getTime();
|
|
451
|
+
if (endTime < now) {
|
|
452
|
+
const err = new PollingTimeoutError('Polling time out. Job Id = ' + jobId + ' , batch Id = ' + batchId, jobId, batchId);
|
|
453
|
+
this.emit('error', err);
|
|
454
|
+
return;
|
|
455
|
+
}
|
|
456
|
+
let res;
|
|
457
|
+
try {
|
|
458
|
+
res = await this.check();
|
|
459
|
+
}
|
|
460
|
+
catch (err) {
|
|
461
|
+
this.emit('error', err);
|
|
462
|
+
return;
|
|
463
|
+
}
|
|
464
|
+
if (res.state === 'Failed') {
|
|
465
|
+
if (parseInt(res.numberRecordsProcessed, 10) > 0) {
|
|
466
|
+
this.retrieve();
|
|
467
|
+
}
|
|
468
|
+
else {
|
|
469
|
+
this.emit('error', new Error(res.stateMessage));
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
else if (res.state === 'Completed') {
|
|
473
|
+
this.retrieve();
|
|
474
|
+
}
|
|
475
|
+
else {
|
|
476
|
+
this.emit('inProgress', res);
|
|
477
|
+
setTimeout(poll, interval);
|
|
478
|
+
}
|
|
479
|
+
};
|
|
480
|
+
setTimeout(poll, interval);
|
|
481
|
+
}
|
|
482
|
+
/**
|
|
483
|
+
* Retrieve batch result
|
|
484
|
+
*/
|
|
485
|
+
async retrieve() {
|
|
486
|
+
const bulk = this._bulk;
|
|
487
|
+
const jobId = this.job.id;
|
|
488
|
+
const job = this.job;
|
|
489
|
+
const batchId = this.id;
|
|
490
|
+
if (!jobId || !batchId) {
|
|
491
|
+
throw new Error('Batch not started.');
|
|
492
|
+
}
|
|
493
|
+
try {
|
|
494
|
+
const resp = await bulk._request({
|
|
495
|
+
method: 'GET',
|
|
496
|
+
path: '/job/' + jobId + '/batch/' + batchId + '/result',
|
|
497
|
+
});
|
|
498
|
+
let results;
|
|
499
|
+
if (job.operation === 'query' || job.operation === 'queryAll') {
|
|
500
|
+
const res = resp;
|
|
501
|
+
const resultId = res['result-list'].result;
|
|
502
|
+
results = (Array.isArray(resultId)
|
|
503
|
+
? resultId
|
|
504
|
+
: [resultId]).map((id) => ({ id, batchId, jobId }));
|
|
505
|
+
}
|
|
506
|
+
else {
|
|
507
|
+
const res = resp;
|
|
508
|
+
results = res.map((ret) => ({
|
|
509
|
+
id: ret.Id || null,
|
|
510
|
+
success: ret.Success === 'true',
|
|
511
|
+
errors: ret.Error ? [ret.Error] : [],
|
|
512
|
+
}));
|
|
513
|
+
}
|
|
514
|
+
this.emit('response', results);
|
|
515
|
+
return results;
|
|
516
|
+
}
|
|
517
|
+
catch (err) {
|
|
518
|
+
this.emit('error', err);
|
|
519
|
+
throw err;
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
/**
|
|
523
|
+
* Fetch query batch result as a record stream
|
|
524
|
+
*
|
|
525
|
+
* @param {String} resultId - Result id
|
|
526
|
+
* @returns {RecordStream} - Record stream, convertible to CSV data stream
|
|
527
|
+
*/
|
|
528
|
+
result(resultId) {
|
|
529
|
+
const jobId = this.job.id;
|
|
530
|
+
const batchId = this.id;
|
|
531
|
+
if (!jobId || !batchId) {
|
|
532
|
+
throw new Error('Batch not started.');
|
|
533
|
+
}
|
|
534
|
+
const resultStream = new record_stream_1.Parsable();
|
|
535
|
+
const resultDataStream = resultStream.stream('csv');
|
|
536
|
+
this._bulk
|
|
537
|
+
._request({
|
|
538
|
+
method: 'GET',
|
|
539
|
+
path: '/job/' + jobId + '/batch/' + batchId + '/result/' + resultId,
|
|
540
|
+
responseType: 'application/octet-stream',
|
|
541
|
+
})
|
|
542
|
+
.stream()
|
|
543
|
+
.pipe(resultDataStream);
|
|
544
|
+
return resultStream;
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
exports.Batch = Batch;
|
|
548
|
+
/*--------------------------------------------*/
|
|
549
|
+
/**
|
|
550
|
+
*
|
|
551
|
+
*/
|
|
552
|
+
class BulkApi extends http_api_1.default {
|
|
553
|
+
beforeSend(request) {
|
|
554
|
+
request.headers = {
|
|
555
|
+
...request.headers,
|
|
556
|
+
'X-SFDC-SESSION': this._conn.accessToken ?? '',
|
|
557
|
+
};
|
|
558
|
+
}
|
|
559
|
+
isSessionExpired(response) {
|
|
560
|
+
return (response.statusCode === 400 &&
|
|
561
|
+
/<exceptionCode>InvalidSessionId<\/exceptionCode>/.test(response.body));
|
|
562
|
+
}
|
|
563
|
+
hasErrorInResponseBody(body) {
|
|
564
|
+
return !!body.error;
|
|
565
|
+
}
|
|
566
|
+
parseError(body) {
|
|
567
|
+
return {
|
|
568
|
+
errorCode: body.error.exceptionCode,
|
|
569
|
+
message: body.error.exceptionMessage,
|
|
570
|
+
};
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
/*--------------------------------------------*/
|
|
574
|
+
/**
|
|
575
|
+
* Class for Bulk API
|
|
576
|
+
*
|
|
577
|
+
* @class
|
|
578
|
+
*/
|
|
579
|
+
class Bulk {
|
|
580
|
+
_conn;
|
|
581
|
+
_logger;
|
|
582
|
+
/**
|
|
583
|
+
* Polling interval in milliseconds
|
|
584
|
+
*
|
|
585
|
+
* Default: 1000 (1 second)
|
|
586
|
+
*/
|
|
587
|
+
pollInterval = 1000;
|
|
588
|
+
/**
|
|
589
|
+
* Polling timeout in milliseconds
|
|
590
|
+
*
|
|
591
|
+
* Default: 30000 (30 seconds)
|
|
592
|
+
*/
|
|
593
|
+
pollTimeout = 30000;
|
|
594
|
+
/**
|
|
595
|
+
*
|
|
596
|
+
*/
|
|
597
|
+
constructor(conn) {
|
|
598
|
+
this._conn = conn;
|
|
599
|
+
this._logger = conn._logger;
|
|
600
|
+
}
|
|
601
|
+
/**
|
|
602
|
+
*
|
|
603
|
+
*/
|
|
604
|
+
_request(request_) {
|
|
605
|
+
const conn = this._conn;
|
|
606
|
+
const { path, responseType, ...rreq } = request_;
|
|
607
|
+
const baseUrl = [conn.instanceUrl, 'services/async', conn.version].join('/');
|
|
608
|
+
const request = {
|
|
609
|
+
...rreq,
|
|
610
|
+
url: baseUrl + path,
|
|
611
|
+
};
|
|
612
|
+
return new BulkApi(this._conn, { responseType }).request(request);
|
|
613
|
+
}
|
|
614
|
+
load(type, operation, optionsOrInput, input) {
|
|
615
|
+
let options = {};
|
|
616
|
+
if (typeof optionsOrInput === 'string' ||
|
|
617
|
+
Array.isArray(optionsOrInput) ||
|
|
618
|
+
is_1.default.nodeStream(optionsOrInput)) {
|
|
619
|
+
// when options is not plain hash object, it is omitted
|
|
620
|
+
input = optionsOrInput;
|
|
621
|
+
}
|
|
622
|
+
else {
|
|
623
|
+
options = optionsOrInput;
|
|
624
|
+
}
|
|
625
|
+
const job = this.createJob(type, operation, options);
|
|
626
|
+
const batch = job.createBatch();
|
|
627
|
+
const cleanup = () => job.close();
|
|
628
|
+
const cleanupOnError = (err) => {
|
|
629
|
+
if (err.name !== 'PollingTimeout') {
|
|
630
|
+
cleanup();
|
|
631
|
+
}
|
|
632
|
+
};
|
|
633
|
+
batch.on('response', cleanup);
|
|
634
|
+
batch.on('error', cleanupOnError);
|
|
635
|
+
batch.on('queue', () => {
|
|
636
|
+
batch?.poll(this.pollInterval, this.pollTimeout);
|
|
637
|
+
});
|
|
638
|
+
return batch.execute(input);
|
|
639
|
+
}
|
|
640
|
+
/**
|
|
641
|
+
* Execute bulk query and get record stream
|
|
642
|
+
*/
|
|
643
|
+
async query(soql) {
|
|
644
|
+
const m = soql.replace(/\([\s\S]+\)/g, '').match(/FROM\s+(\w+)/i);
|
|
645
|
+
if (!m) {
|
|
646
|
+
throw new Error('No sobject type found in query, maybe caused by invalid SOQL.');
|
|
647
|
+
}
|
|
648
|
+
const type = m[1];
|
|
649
|
+
const recordStream = new record_stream_1.Parsable();
|
|
650
|
+
const dataStream = recordStream.stream('csv');
|
|
651
|
+
const results = await this.load(type, 'query', soql);
|
|
652
|
+
const streams = results.map((result) => this.job(result.jobId).batch(result.batchId).result(result.id).stream());
|
|
653
|
+
(0, multistream_1.default)(streams).pipe(dataStream);
|
|
654
|
+
return recordStream;
|
|
655
|
+
}
|
|
656
|
+
/**
|
|
657
|
+
* Create a new job instance
|
|
658
|
+
*/
|
|
659
|
+
createJob(type, operation, options = {}) {
|
|
660
|
+
return new Job(this, type, operation, options);
|
|
661
|
+
}
|
|
662
|
+
/**
|
|
663
|
+
* Get a job instance specified by given job ID
|
|
664
|
+
*
|
|
665
|
+
* @param {String} jobId - Job ID
|
|
666
|
+
* @returns {Bulk~Job}
|
|
667
|
+
*/
|
|
668
|
+
job(jobId) {
|
|
669
|
+
return new Job(this, null, null, null, jobId);
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
exports.Bulk = Bulk;
|
|
673
|
+
/*--------------------------------------------*/
|
|
674
|
+
/*
|
|
675
|
+
* Register hook in connection instantiation for dynamically adding this API module features
|
|
676
|
+
*/
|
|
677
|
+
(0, jsforce_1.registerModule)('bulk', (conn) => new Bulk(conn));
|
|
678
|
+
exports.default = Bulk;
|