@heroku/applink 1.0.0-ea

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/CHANGELOG.md +16 -0
  2. package/LICENSE +29 -0
  3. package/README.md +62 -0
  4. package/SECURITY.md +7 -0
  5. package/TERMS_OF_USE.md +24 -0
  6. package/dist/add-ons/heroku-applink.d.ts +2 -0
  7. package/dist/add-ons/heroku-applink.js +54 -0
  8. package/dist/index.d.ts +268 -0
  9. package/dist/index.js +52 -0
  10. package/dist/sdk/bulk-api.d.ts +3 -0
  11. package/dist/sdk/bulk-api.js +422 -0
  12. package/dist/sdk/context.d.ts +6 -0
  13. package/dist/sdk/context.js +11 -0
  14. package/dist/sdk/data-api.d.ts +21 -0
  15. package/dist/sdk/data-api.js +283 -0
  16. package/dist/sdk/data-cloud-api.d.ts +10 -0
  17. package/dist/sdk/data-cloud-api.js +59 -0
  18. package/dist/sdk/invocation-event.d.ts +8 -0
  19. package/dist/sdk/invocation-event.js +12 -0
  20. package/dist/sdk/logger.d.ts +10 -0
  21. package/dist/sdk/logger.js +31 -0
  22. package/dist/sdk/org.d.ts +14 -0
  23. package/dist/sdk/org.js +45 -0
  24. package/dist/sdk/sub-request.d.ts +32 -0
  25. package/dist/sdk/sub-request.js +77 -0
  26. package/dist/sdk/unit-of-work.d.ts +22 -0
  27. package/dist/sdk/unit-of-work.js +47 -0
  28. package/dist/sdk/user.d.ts +6 -0
  29. package/dist/sdk/user.js +10 -0
  30. package/dist/utils/addon-config.d.ts +7 -0
  31. package/dist/utils/addon-config.js +39 -0
  32. package/dist/utils/base-logger.d.ts +3 -0
  33. package/dist/utils/base-logger.js +27 -0
  34. package/dist/utils/create-connections.d.ts +7 -0
  35. package/dist/utils/create-connections.js +15 -0
  36. package/dist/utils/maps.d.ts +1 -0
  37. package/dist/utils/maps.js +18 -0
  38. package/dist/utils/request.d.ts +7 -0
  39. package/dist/utils/request.js +20 -0
  40. package/package.json +86 -0
@@ -0,0 +1,422 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createBulkApi = createBulkApi;
4
+ const create_connections_1 = require("../utils/create-connections");
5
+ const bulk_1 = require("jsforce/lib/api/bulk");
6
+ const sync_1 = require("csv-stringify/sync");
7
+ const csv_stringify_1 = require("csv-stringify");
8
+ const http_api_1 = require("jsforce/lib/http-api");
9
+ const sync_2 = require("csv-parse/sync");
10
+ const luxon_1 = require("luxon");
11
+ const SIZE_1_MB = 1000000;
12
+ const SIZE_100_MB = 100 * SIZE_1_MB;
13
+ const CSV_OPTIONS = { delimiter: "," };
14
+ function createBulkApi(clientOptions) {
15
+ const connection = (0, create_connections_1.createConnection)(clientOptions);
16
+ const getDefaultPollingOptions = () => {
17
+ return {
18
+ pollInterval: connection.bulk2.pollInterval,
19
+ pollTimeout: connection.bulk2.pollTimeout,
20
+ };
21
+ };
22
+ const getIngestJob = (jobReference) => {
23
+ return new bulk_1.IngestJobV2({
24
+ connection: connection,
25
+ jobInfo: {
26
+ id: jobReference.id,
27
+ },
28
+ pollingOptions: getDefaultPollingOptions(),
29
+ });
30
+ };
31
+ const getQueryJob = (jobReference) => {
32
+ const job = new bulk_1.QueryJobV2({
33
+ connection: connection,
34
+ query: undefined,
35
+ operation: undefined,
36
+ pollingOptions: getDefaultPollingOptions(),
37
+ });
38
+ job.jobInfo = Object.assign({}, job.jobInfo, { id: jobReference.id });
39
+ return job;
40
+ };
41
+ const bulkApi = {
42
+ abort(jobReference) {
43
+ switch (jobReference.type) {
44
+ case "ingestJob":
45
+ return getIngestJob(jobReference).abort();
46
+ case "queryJob":
47
+ return getQueryJob(jobReference).abort();
48
+ }
49
+ },
50
+ delete(jobReference) {
51
+ switch (jobReference.type) {
52
+ case "ingestJob":
53
+ return getIngestJob(jobReference).delete();
54
+ case "queryJob":
55
+ return getQueryJob(jobReference).delete();
56
+ }
57
+ },
58
+ getFailedResults(jobReference) {
59
+ return fetchIngestResults({
60
+ connection,
61
+ jobReference,
62
+ resultType: "failedResults",
63
+ });
64
+ },
65
+ getInfo(jobReference) {
66
+ switch (jobReference.type) {
67
+ case "ingestJob":
68
+ return getIngestJob(jobReference).check().then(toIngestJobInfo);
69
+ case "queryJob":
70
+ return getQueryJob(jobReference).check().then(toQueryJobInfo);
71
+ }
72
+ },
73
+ getMoreQueryResults(currentResult, getQueryJobResultsOptions) {
74
+ return fetchQueryResults({
75
+ connection,
76
+ getQueryJobResultsOptions,
77
+ jobReference: currentResult.jobReference,
78
+ locator: currentResult.locator,
79
+ });
80
+ },
81
+ getQueryResults(jobReference, getQueryJobResultsOptions) {
82
+ return fetchQueryResults({
83
+ connection,
84
+ jobReference,
85
+ getQueryJobResultsOptions,
86
+ });
87
+ },
88
+ getSuccessfulResults(jobReference) {
89
+ return fetchIngestResults({
90
+ connection,
91
+ jobReference,
92
+ resultType: "successfulResults",
93
+ });
94
+ },
95
+ getUnprocessedRecords(jobReference) {
96
+ return fetchIngestResults({
97
+ connection,
98
+ jobReference,
99
+ resultType: "unprocessedrecords",
100
+ });
101
+ },
102
+ async ingest(options) {
103
+ const results = [];
104
+ const { dataTable } = options;
105
+ for await (const ingestDataTablePayload of bulkApi.splitDataTable(dataTable)) {
106
+ let job;
107
+ try {
108
+ job = connection.bulk2.createJob(options);
109
+ await job.open();
110
+ await streamDataTableIntoJob(job, dataTable);
111
+ await job.close();
112
+ results.push({ id: job.id, type: "ingestJob" });
113
+ }
114
+ catch (e) {
115
+ if (e instanceof Error) {
116
+ results.push({
117
+ unprocessedRecords: ingestDataTablePayload,
118
+ error: toClientError(e),
119
+ jobReference: typeof job?.id === "string"
120
+ ? { id: job.id, type: "ingestJob" }
121
+ : undefined,
122
+ });
123
+ }
124
+ }
125
+ }
126
+ return results;
127
+ },
128
+ async query(options) {
129
+ const url = new URL([
130
+ connection.instanceUrl,
131
+ "services/data",
132
+ `v${connection.version}`,
133
+ "jobs/query",
134
+ ].join("/"));
135
+ const apiClient = new BulkApiClient(connection);
136
+ const job = await apiClient.request({
137
+ url: url.toString(),
138
+ method: "POST",
139
+ body: JSON.stringify({
140
+ operation: options.operation ?? "query",
141
+ query: options.soql,
142
+ }),
143
+ headers: {
144
+ "Content-Type": "application/json; charset=utf-8",
145
+ },
146
+ });
147
+ return {
148
+ id: job.id,
149
+ type: "queryJob",
150
+ };
151
+ },
152
+ createDataTableBuilder(columns) {
153
+ const rows = [];
154
+ function addArrayRow(row) {
155
+ addRowWithExtractor(row, (array, columnName) => {
156
+ return array[columns.indexOf(columnName)];
157
+ });
158
+ }
159
+ function addMapRow(row) {
160
+ addRowWithExtractor(row, (map, columnName) => map.get(columnName));
161
+ }
162
+ function addRowWithExtractor(row, fieldValueExtractor) {
163
+ const mappedRow = columns.reduce((acc, column) => {
164
+ const value = fieldValueExtractor(row, column);
165
+ acc.set(column, value);
166
+ return acc;
167
+ }, new Map());
168
+ rows.push(mappedRow);
169
+ }
170
+ return {
171
+ addRow(row, fieldValueExtractor) {
172
+ if (Array.isArray(row)) {
173
+ addArrayRow(row);
174
+ }
175
+ else if (row instanceof Map) {
176
+ addMapRow(row);
177
+ }
178
+ else {
179
+ addRowWithExtractor(row, fieldValueExtractor);
180
+ }
181
+ return this;
182
+ },
183
+ addRows(rows, fieldValueExtractor) {
184
+ rows.forEach((row) => {
185
+ this.addRow(row, fieldValueExtractor);
186
+ });
187
+ return this;
188
+ },
189
+ build() {
190
+ return Object.assign(rows, {
191
+ columns,
192
+ });
193
+ },
194
+ };
195
+ },
196
+ splitDataTable(dataTable) {
197
+ const columns = dataTable.columns;
198
+ const splitDataTables = [];
199
+ const columnsLine = (0, sync_1.stringify)([columns], CSV_OPTIONS);
200
+ const columnsSize = Buffer.byteLength(columnsLine);
201
+ let currentSize = columnsSize;
202
+ let dataTableBuilder = bulkApi.createDataTableBuilder(columns);
203
+ dataTable.forEach((row) => {
204
+ const rowValues = dataTable.columns.map((column) => row.get(column));
205
+ const rowLine = (0, sync_1.stringify)([rowValues], CSV_OPTIONS);
206
+ const rowSize = Buffer.byteLength(rowLine);
207
+ if (currentSize + rowSize < SIZE_100_MB) {
208
+ currentSize += rowSize;
209
+ }
210
+ else {
211
+ splitDataTables.push(dataTableBuilder.build());
212
+ currentSize = columnsSize + rowSize;
213
+ dataTableBuilder = bulkApi.createDataTableBuilder(columns);
214
+ }
215
+ dataTableBuilder.addRow(row);
216
+ });
217
+ splitDataTables.push(dataTableBuilder.build());
218
+ return splitDataTables;
219
+ },
220
+ formatDate(value) {
221
+ const dateTime = luxon_1.DateTime.fromJSDate(value).toUTC();
222
+ if (dateTime.isValid) {
223
+ return dateTime.toISODate();
224
+ }
225
+ throw new Error(`Invalid Date`);
226
+ },
227
+ formatDateTime(value) {
228
+ const dateTime = luxon_1.DateTime.fromJSDate(value).toUTC();
229
+ if (dateTime.isValid) {
230
+ return dateTime.toISO();
231
+ }
232
+ throw new Error(`Invalid DateTime`);
233
+ },
234
+ formatNullValue() {
235
+ return "#N/A";
236
+ },
237
+ };
238
+ return bulkApi;
239
+ }
240
+ function toClientError(error) {
241
+ if (isClientError(error)) {
242
+ return error;
243
+ }
244
+ return Object.assign(error, {
245
+ errorCode: "UNKNOWN",
246
+ });
247
+ }
248
+ function isClientError(error) {
249
+ return typeof error.errorCode === "string";
250
+ }
251
+ async function streamDataTableIntoJob(job, dataTable) {
252
+ await new Promise((resolve, reject) => {
253
+ const stringifier = (0, csv_stringify_1.stringify)(CSV_OPTIONS);
254
+ stringifier.on("error", reject);
255
+ job.uploadData(stringifier).then(resolve, reject);
256
+ stringifier.write(dataTable.columns);
257
+ dataTable.forEach((row) => {
258
+ const rowValues = dataTable.columns.map((column) => row.get(column));
259
+ stringifier.write(rowValues);
260
+ });
261
+ stringifier.end();
262
+ });
263
+ }
264
+ function toIngestJobInfo(jobInfo) {
265
+ return {
266
+ ...toJobInfo(jobInfo),
267
+ jobType: "V2Ingest",
268
+ operation: jobInfo.operation,
269
+ state: jobInfo.state,
270
+ };
271
+ }
272
+ function toQueryJobInfo(jobInfo) {
273
+ return {
274
+ ...toJobInfo(jobInfo),
275
+ jobType: "V2Query",
276
+ operation: jobInfo.operation,
277
+ state: jobInfo.state,
278
+ };
279
+ }
280
+ function toJobInfo(jobInfo) {
281
+ if (jobInfo.jobType === "BigObjectIngest" || jobInfo.jobType === "Classic") {
282
+ throw new Error(`JobType "${jobInfo.jobType}" is not supported`);
283
+ }
284
+ return {
285
+ ...jobInfo,
286
+ apiVersion: parseInt(`${jobInfo.apiVersion}`, 10),
287
+ columnDelimiter: "COMMA",
288
+ concurrencyMode: "Parallel",
289
+ contentType: "CSV",
290
+ createdById: jobInfo.createdById,
291
+ createdDate: jobInfo.createdDate,
292
+ id: jobInfo.id,
293
+ lineEnding: "LF",
294
+ object: jobInfo.object,
295
+ operation: jobInfo.operation,
296
+ state: jobInfo.state,
297
+ systemModstamp: jobInfo.systemModstamp,
298
+ jobType: jobInfo.jobType,
299
+ };
300
+ }
301
+ function resultsToDataTable(results, responseColumns) {
302
+ const columns = convertToColumns(responseColumns);
303
+ const rows = results.map((result) => {
304
+ return columns.reduce((acc, column) => {
305
+ acc.set(column, result[column]);
306
+ return acc;
307
+ }, new Map());
308
+ });
309
+ const dataTable = Object.assign(rows, {
310
+ columns,
311
+ });
312
+ return dataTable;
313
+ }
314
+ function convertToColumns(columns) {
315
+ if (columns.length < 1) {
316
+ throw new Error("parsed data table has no columns");
317
+ }
318
+ const [first, ...rest] = columns;
319
+ return [first, ...rest];
320
+ }
321
+ function parseColumnsFromResponse(response) {
322
+ try {
323
+ const headerLine = response.body.substring(0, response.body.indexOf("\n"));
324
+ return (0, sync_2.parse)(headerLine, CSV_OPTIONS)[0];
325
+ }
326
+ catch (e) {
327
+ return [];
328
+ }
329
+ }
330
+ async function fetchIngestResults(options) {
331
+ const { connection, jobReference } = options;
332
+ const url = [
333
+ connection.instanceUrl,
334
+ "services/data",
335
+ `v${connection.version}`,
336
+ "jobs/ingest",
337
+ jobReference.id,
338
+ options.resultType,
339
+ ].join("/");
340
+ const api = new BulkApiClient(connection);
341
+ let columns = [];
342
+ api.once("response", (res) => {
343
+ columns = parseColumnsFromResponse(res);
344
+ });
345
+ const records = await api.request({
346
+ method: "GET",
347
+ url: url.toString(),
348
+ headers: {
349
+ Accept: "text/csv",
350
+ },
351
+ });
352
+ return resultsToDataTable(records, columns);
353
+ }
354
+ async function fetchQueryResults(options) {
355
+ const { connection, jobReference } = options;
356
+ const url = new URL([
357
+ connection.instanceUrl,
358
+ "services/data",
359
+ `v${connection.version}`,
360
+ "jobs/query",
361
+ jobReference.id,
362
+ "results",
363
+ ].join("/"));
364
+ if (options.locator) {
365
+ url.searchParams.set("locator", options.locator);
366
+ }
367
+ if (options.getQueryJobResultsOptions?.maxRecords) {
368
+ url.searchParams.set("maxRecords", `${options.getQueryJobResultsOptions.maxRecords}`);
369
+ }
370
+ const api = new BulkApiClient(connection);
371
+ let columns = [];
372
+ let locator;
373
+ let numberOfRecords = 0;
374
+ api.once("response", (res) => {
375
+ columns = parseColumnsFromResponse(res);
376
+ if ("sforce-locator" in res.headers) {
377
+ const headerValue = res.headers["sforce-locator"];
378
+ if (headerValue && headerValue !== "null") {
379
+ locator = headerValue;
380
+ }
381
+ }
382
+ if ("sforce-numberofrecords" in res.headers) {
383
+ const headerValue = res.headers["sforce-numberofrecords"];
384
+ if (headerValue && /^\d+$/.test(headerValue)) {
385
+ numberOfRecords = parseInt(headerValue, 10);
386
+ }
387
+ }
388
+ });
389
+ const records = await api.request({
390
+ method: "GET",
391
+ url: url.toString(),
392
+ headers: {
393
+ Accept: "text/csv",
394
+ },
395
+ });
396
+ return {
397
+ locator,
398
+ numberOfRecords,
399
+ jobReference,
400
+ done: locator === undefined,
401
+ dataTable: resultsToDataTable(records, columns),
402
+ };
403
+ }
404
+ class BulkApiClient extends http_api_1.HttpApi {
405
+ constructor(connection) {
406
+ super(connection, {});
407
+ }
408
+ hasErrorInResponseBody(body) {
409
+ return (Array.isArray(body) &&
410
+ typeof body[0] === "object" &&
411
+ "errorCode" in body[0]);
412
+ }
413
+ isSessionExpired(response) {
414
+ return (response.statusCode === 401 && /INVALID_SESSION_ID/.test(response.body));
415
+ }
416
+ parseError(body) {
417
+ return {
418
+ errorCode: body[0].errorCode,
419
+ message: body[0].message,
420
+ };
421
+ }
422
+ }
@@ -0,0 +1,6 @@
1
+ import { Context, Org } from "../index.js";
2
+ export declare class ContextImpl implements Context {
3
+ readonly id: string;
4
+ readonly org?: Org;
5
+ constructor(accessToken: string, apiVersion: string, id: string, namespace: string, orgId: string, orgDomainUrl: string, userId: string, username: string, orgType: "SalesforceOrg" | "DataCloudOrg" | "DatacloudOrg");
6
+ }
@@ -0,0 +1,11 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ContextImpl = void 0;
4
+ const org_js_1 = require("./org.js");
5
+ class ContextImpl {
6
+ constructor(accessToken, apiVersion, id, namespace, orgId, orgDomainUrl, userId, username, orgType) {
7
+ this.id = id;
8
+ this.org = new org_js_1.OrgImpl(accessToken, apiVersion, namespace, orgId, orgDomainUrl, userId, username, orgType);
9
+ }
10
+ }
11
+ exports.ContextImpl = ContextImpl;
@@ -0,0 +1,21 @@
1
+ import { DataApi, RecordForCreate, RecordForUpdate, RecordModificationResult, RecordQueryResult, ReferenceId, UnitOfWork } from "../index";
2
+ export declare class DataApiImpl implements DataApi {
3
+ readonly accessToken: string;
4
+ private readonly apiVersion;
5
+ private conn;
6
+ private readonly domainUrl;
7
+ constructor(accessToken: string, apiVersion: string, domainUrl: string);
8
+ private connect;
9
+ private promisifyRequests;
10
+ create(recordCreate: RecordForCreate): Promise<RecordModificationResult>;
11
+ query(soql: string): Promise<RecordQueryResult>;
12
+ queryMore(queryResult: RecordQueryResult): Promise<RecordQueryResult>;
13
+ update(recordUpdate: RecordForUpdate): Promise<RecordModificationResult>;
14
+ delete(type: string, id: string): Promise<RecordModificationResult>;
15
+ newUnitOfWork(): UnitOfWork;
16
+ commitUnitOfWork(unitOfWork: UnitOfWork): Promise<Map<ReferenceId, RecordModificationResult>>;
17
+ private validate_response;
18
+ private validate_record_response;
19
+ private validate_records_response;
20
+ private handle_bad_response;
21
+ }