zapier-platform-core 12.0.2 → 12.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "zapier-platform-core",
3
- "version": "12.0.2",
3
+ "version": "12.1.0",
4
4
  "description": "The core SDK for CLI apps in the Zapier Developer Platform.",
5
5
  "repository": "zapier/zapier-platform",
6
6
  "homepage": "https://platform.zapier.com/",
@@ -47,10 +47,11 @@
47
47
  "form-data": "4.0.0",
48
48
  "lodash": "4.17.21",
49
49
  "mime-types": "2.1.34",
50
+ "node-abort-controller": "3.0.1",
50
51
  "node-fetch": "2.6.7",
51
52
  "oauth-sign": "0.9.0",
52
53
  "semver": "7.3.5",
53
- "zapier-platform-schema": "12.0.2"
54
+ "zapier-platform-schema": "12.1.0"
54
55
  },
55
56
  "devDependencies": {
56
57
  "adm-zip": "0.5.5",
@@ -76,4 +76,4 @@ const logResponse = (resp) => {
76
76
  .catch(() => resp);
77
77
  };
78
78
 
79
- module.exports = logResponse;
79
+ module.exports = { logResponse, prepareRequestLog };
@@ -16,7 +16,7 @@ const oauth1SignRequest = require('../http-middlewares/before/oauth1-sign-reques
16
16
  const prepareRequest = require('../http-middlewares/before/prepare-request');
17
17
 
18
18
  // after middles
19
- const logResponse = require('../http-middlewares/after/log-response');
19
+ const { logResponse } = require('../http-middlewares/after/log-response');
20
20
  const prepareResponse = require('../http-middlewares/after/prepare-response');
21
21
  const throwForStaleAuth = require('../http-middlewares/after/throw-for-stale-auth');
22
22
  const throwForStatusMiddleware = require('../http-middlewares/after/throw-for-status');
@@ -1,11 +1,14 @@
1
1
  'use strict';
2
2
 
3
+ const { promisify } = require('util');
3
4
  const { Transform } = require('stream');
5
+ const { parse: querystringParse } = require('querystring');
4
6
 
5
7
  const _ = require('lodash');
8
+ const { AbortController } = require('node-abort-controller');
6
9
 
7
10
  const request = require('./request-client-internal');
8
- const { simpleTruncate, recurseReplace } = require('./data');
11
+ const { simpleTruncate, recurseReplace, truncateData } = require('./data');
9
12
  const {
10
13
  DEFAULT_LOGGING_HTTP_API_KEY,
11
14
  DEFAULT_LOGGING_HTTP_ENDPOINT,
@@ -18,12 +21,19 @@ const {
18
21
  recurseExtract,
19
22
  } = require('@zapier/secret-scrubber');
20
23
  // not really a public function, but it came from here originally
21
- const { isUrlWithSecrets } = require('@zapier/secret-scrubber/lib/convenience');
24
+ const {
25
+ isUrlWithSecrets,
26
+ isSensitiveKey,
27
+ } = require('@zapier/secret-scrubber/lib/convenience');
22
28
 
23
29
  // The payload size per request to stream logs. This should be slighly lower
24
30
  // than the limit (16 MB) on the server side.
25
31
  const LOG_STREAM_BYTES_LIMIT = 15 * 1024 * 1024;
26
32
 
33
+ const DEFAULT_LOGGER_TIMEOUT = 200;
34
+
35
+ const sleep = promisify(setTimeout);
36
+
27
37
  const isUrl = (url) => {
28
38
  try {
29
39
  // eslint-disable-next-line no-new
@@ -34,7 +44,8 @@ const isUrl = (url) => {
34
44
  }
35
45
  };
36
46
 
37
- const truncate = (str) => simpleTruncate(str, 3500, ' [...]');
47
+ const MAX_LENGTH = 3500;
48
+ const truncateString = (str) => simpleTruncate(str, MAX_LENGTH, ' [...]');
38
49
 
39
50
  const formatHeaders = (headers = {}) => {
40
51
  if (_.isEmpty(headers)) {
@@ -70,7 +81,7 @@ const httpDetailsLogMessage = (data) => {
70
81
  (result, value, key) => {
71
82
  result[key] = value;
72
83
  if (typeof value === 'string') {
73
- result[key] = truncate(value);
84
+ result[key] = truncateString(value);
74
85
  }
75
86
  return result;
76
87
  },
@@ -108,28 +119,61 @@ const toStdout = (event, msg, data) => {
108
119
  }
109
120
  };
110
121
 
122
+ // try to parse json; if successful, find secrets in it
123
+ const attemptFindSecretsInStr = (s) => {
124
+ let parsedRespContent;
125
+ try {
126
+ parsedRespContent = JSON.parse(s);
127
+ } catch {
128
+ return [];
129
+ }
130
+ return findSensitiveValues(parsedRespContent);
131
+ };
132
+
111
133
  const buildSensitiveValues = (event, data) => {
112
134
  const bundle = event.bundle || {};
113
135
  const authData = bundle.authData || {};
114
136
  // for the most part, we should censor all the values from authData
115
137
  // the exception is safe urls, which should be filtered out - we want those to be logged
138
+ // but, we _should_ censor-no-matter-what sensitive keys, even if their value is a safe url
139
+ // this covers the case where someone's password is a valid url ¯\_(ツ)_/¯
116
140
  const sensitiveAuthData = recurseExtract(authData, (key, value) => {
141
+ if (isSensitiveKey(key)) {
142
+ return true;
143
+ }
144
+
117
145
  if (isUrl(value) && !isUrlWithSecrets(value)) {
118
146
  return false;
119
147
  }
120
148
  return true;
121
149
  });
122
- return [
150
+
151
+ const result = [
123
152
  ...sensitiveAuthData,
124
153
  ...findSensitiveValues(process.env),
125
154
  ...findSensitiveValues(data),
126
155
  ];
156
+
157
+ // for our http logs (genrated by prepareRequestLog), make sure that we try to parse the content to find any new strings
158
+ // (such as what comes back in the response during an auth refresh)
159
+
160
+ for (const prop of ['response_content', 'request_data']) {
161
+ if (data[prop]) {
162
+ result.push(...attemptFindSecretsInStr(data[prop]));
163
+ }
164
+ }
165
+ if (data.request_params) {
166
+ result.push(...findSensitiveValues(querystringParse(data.request_params)));
167
+ }
168
+ // unique- no point in duplicates
169
+ return [...new Set(result)];
127
170
  };
128
171
 
129
172
  class LogStream extends Transform {
130
173
  constructor(options) {
131
174
  super(options);
132
175
  this.bytesWritten = 0;
176
+ this.controller = new AbortController();
133
177
  this.request = this._newRequest(options.url, options.token);
134
178
  }
135
179
 
@@ -142,15 +186,20 @@ class LogStream extends Transform {
142
186
  'X-Token': token,
143
187
  },
144
188
  body: this,
189
+ signal: this.controller.signal,
145
190
  };
146
191
  return request(httpOptions).catch((err) => {
192
+ if (err.name === 'AbortError') {
193
+ return {
194
+ status: 200,
195
+ content: 'aborted',
196
+ };
197
+ }
198
+
147
199
  // Swallow logging errors. This will show up in AWS logs at least.
148
- console.error(
149
- 'Error making log request:',
150
- err,
151
- 'http options:',
152
- httpOptions
153
- );
200
+ // Don't need to log for AbortError because that happens when we abort
201
+ // on purpose.
202
+ console.error('Error making log request:', err);
154
203
  });
155
204
  }
156
205
 
@@ -159,6 +208,10 @@ class LogStream extends Transform {
159
208
  this.bytesWritten += Buffer.byteLength(chunk, encoding);
160
209
  callback();
161
210
  }
211
+
212
+ abort() {
213
+ this.controller.abort();
214
+ }
162
215
  }
163
216
 
164
217
  // Implements singleton for LogStream. The goal is for every sendLog() call we
@@ -185,17 +238,35 @@ class LogStreamFactory {
185
238
  return this._logStream;
186
239
  }
187
240
 
188
- async end() {
241
+ // Ends the logger and gets a response from the log server. Optionally takes
242
+ // timeoutToAbort to specify how many milliseconds we want to wait before
243
+ // force aborting the connection to the log server.
244
+ async end(timeoutToAbort = DEFAULT_LOGGER_TIMEOUT) {
189
245
  // Mark the factory as ended. This suggests that any logStream.write() that
190
246
  // follows should end() right away.
191
247
  this.ended = true;
248
+ let response;
192
249
 
193
250
  if (this._logStream) {
194
251
  this._logStream.end();
195
- const response = await this._logStream.request;
252
+
253
+ const clock =
254
+ timeoutToAbort > 0 ? sleep(timeoutToAbort) : Promise.resolve(undefined);
255
+ const responsePromise = this._logStream.request;
256
+
257
+ const result = await Promise.race([clock, responsePromise]);
258
+ const isTimeout = !result;
259
+ if (isTimeout) {
260
+ this._logStream.abort();
261
+ // Expect to get a `{content: 'aborted'}` response
262
+ response = await responsePromise;
263
+ } else {
264
+ response = result;
265
+ }
266
+
196
267
  this._logStream = null;
197
- return response;
198
268
  }
269
+ return response;
199
270
  }
200
271
  }
201
272
 
@@ -207,15 +278,21 @@ const sendLog = async (logStreamFactory, options, event, message, data) => {
207
278
  data.response_headers = unheader(data.response_headers);
208
279
 
209
280
  const sensitiveValues = buildSensitiveValues(event, data);
281
+
282
+ // data.input and data.output have the ability to grow unbounded; the following caps the size to a reasonable amount
283
+ if (data.log_type === 'bundle') {
284
+ data.input = truncateData(data.input, MAX_LENGTH);
285
+ data.output = truncateData(data.output, MAX_LENGTH);
286
+ }
210
287
  // scrub throws an error if there are no secrets
211
- const safeMessage = truncate(
288
+ const safeMessage = truncateString(
212
289
  sensitiveValues.length ? scrub(message, sensitiveValues) : message
213
290
  );
214
291
  const safeData = recurseReplace(
215
292
  sensitiveValues.length ? scrub(data, sensitiveValues) : data,
216
- truncate
293
+ truncateString
217
294
  );
218
- const unsafeData = recurseReplace(data, truncate);
295
+ const unsafeData = recurseReplace(data, truncateString);
219
296
  // Keep safe log keys uncensored
220
297
  Object.keys(safeData).forEach((key) => {
221
298
  if (SAFE_LOG_KEYS.includes(key)) {
@@ -251,7 +328,7 @@ const sendLog = async (logStreamFactory, options, event, message, data) => {
251
328
  // (bad) callback that is still running after the Lambda handler returns?
252
329
  // We need to make sure the bad callback ends the logger as well.
253
330
  // Otherwise, it will hang!
254
- logStreamFactory.end();
331
+ logStreamFactory.end(DEFAULT_LOGGER_TIMEOUT);
255
332
  }
256
333
  }
257
334
  };
@@ -288,8 +365,8 @@ const createLogger = (event, options) => {
288
365
  const logStreamFactory = new LogStreamFactory();
289
366
  const logger = sendLog.bind(undefined, logStreamFactory, options, event);
290
367
 
291
- logger.end = async () => {
292
- return logStreamFactory.end();
368
+ logger.end = async (timeoutToAbort = DEFAULT_LOGGER_TIMEOUT) => {
369
+ return logStreamFactory.end(timeoutToAbort);
293
370
  };
294
371
  return logger;
295
372
  };
package/src/tools/data.js CHANGED
@@ -172,6 +172,162 @@ const simpleTruncate = (string, length, suffix) => {
172
172
  return string;
173
173
  };
174
174
 
175
+ /**
176
+ * Adds an item to an object or array.
177
+ * If the parent is an object, the value will be set at the specified key.
178
+ * If the parent is an array, the value will be added to the end of the array (and the key will be ignored).
179
+ * Used by truncateData.
180
+ *
181
+ * @param {object | any[]} parent An object or array.
182
+ * @param {string} key The key to set the value at (objects only; ignored for arrays).
183
+ * @param {any} value The value to add.
184
+ */
185
+ const _addItem = (parent, key, value) => {
186
+ if (Array.isArray(parent)) {
187
+ parent.push(value);
188
+ } else {
189
+ parent[key] = value;
190
+ }
191
+ };
192
+
193
+ /**
194
+ * Examines an object entry or array entry (`item`) and determines its "cost" (how many characters will it take to add it to `parent`).
195
+ * If the item is a string, `availableSpace` is used to determine if the string should be truncated.
196
+ * If the item is an object or array, its entries / values are added to `queue`.
197
+ * Used by truncateData.
198
+ *
199
+ * @param {any[]} queue
200
+ * @param {object | any[]} parent
201
+ * @param {string} key
202
+ * @param {any} item
203
+ * @param {number} availableSpace
204
+ * @returns
205
+ */
206
+ const _processItem = (queue, parent, key, item, availableSpace) => {
207
+ let itemLength = 0;
208
+ let itemToAdd = item;
209
+ let wasTruncated = false;
210
+
211
+ itemLength += key.length; // array keys are empty strings, so this is a noop
212
+ itemLength += key.length ? 3 : 0; // objects get +2 for "" around the key and +1 for the :
213
+ itemLength += 1; // arrays and objects both have +1 for commas between entries
214
+ if (parent && typeof parent === 'object' && _.isEmpty(parent)) {
215
+ itemLength -= 1; // this is the first entry for an object or array; remove the count for a comma
216
+ }
217
+
218
+ if (typeof item === 'number' || typeof item === 'boolean' || item == null) {
219
+ itemLength += String(item).length;
220
+ } else if (typeof item === 'string') {
221
+ const overhead = itemLength + 2; // the minimum amount of space needed after truncation
222
+ if (item.length + overhead > availableSpace) {
223
+ // this string is going to push us over the edge; truncate it
224
+ itemToAdd = simpleTruncate(item, availableSpace - overhead, ' [...]');
225
+ wasTruncated = true;
226
+ }
227
+ itemLength += itemToAdd.length + 2; // 2 for quotes around the string value
228
+ } else if (typeof item === 'object') {
229
+ itemLength += 2; // '{}' or '[]'
230
+
231
+ let entries;
232
+ if (Array.isArray(item)) {
233
+ const newArr = [];
234
+ itemToAdd = newArr;
235
+ entries = item.map((subValue) => [newArr, '', subValue]);
236
+ } else {
237
+ const newObj = {};
238
+ itemToAdd = newObj;
239
+ entries = Object.entries(item).map(([subKey, subValue]) => [
240
+ newObj,
241
+ subKey,
242
+ subValue,
243
+ ]);
244
+ }
245
+ queue.unshift(...entries);
246
+ } else {
247
+ // JSON.stringify doesn't usually really do anything for any other typeofs
248
+ // we're just going to use `undefined` and hope for the best
249
+ itemLength += 'undefined'.length;
250
+ itemToAdd = undefined;
251
+ }
252
+ return [itemLength, itemToAdd, wasTruncated];
253
+ };
254
+
255
+ /**
256
+ * Takes a given `data` object or array and copies pieces of that data into `output` until its stringified length fits in `maxLength` characters.
257
+ *
258
+ * In general, output should track with `JSON.stringify(item).substring(0, maxLength)` (i.e. depth-first traversal of arrays and object entries), but in a JSON-aware way.
259
+ * If the item's initial stringified length is less than or equal to `maxLength`, the item is returned as-is.
260
+ * @param {object | any[]} data The JSON object or array to be truncated.
261
+ * @param {number} maxLength The maximum length of JSON.stringify(output). Note that this may not be the exact output length, but it serves as an upper bound. Minimum value is 40.
262
+ * @returns {object | any[]} The truncated object or array.
263
+ */
264
+ const truncateData = (data, maxLength) => {
265
+ if (!data || typeof data !== 'object') {
266
+ // the following code is only meant to work on objects and arrays
267
+ return data;
268
+ }
269
+ if (JSON.stringify(data).length <= maxLength) {
270
+ // no need to truncate
271
+ return data;
272
+ }
273
+
274
+ const root = Array.isArray(data) ? [] : {};
275
+ let length = 2; // '{}' or '[]'
276
+ let dataWasTruncated = false; // used during iteration to track if a string was truncated
277
+ const truncateMessageSize = 39; // the overhead required to add a message about truncating data
278
+
279
+ if (maxLength < 40) {
280
+ // adding the truncate message takes 39 characters, but the minimum output (i.e. just the message wrapped in an object or array)
281
+ // is 40 characters due to the overhead of the {} or [] characters (+2) minus the comma (-1)
282
+ throw new Error(`maxLength must be at least 40`);
283
+ }
284
+
285
+ const queue = Array.isArray(data)
286
+ ? data.map((value) => [root, '', value])
287
+ : Object.entries(data).map(([key, value]) => [root, key, value]);
288
+
289
+ // iterate over the queue
290
+ while (queue.length > 0) {
291
+ const [parent, key, item] = queue.shift();
292
+ const [itemLength, processedItem, itemWasTruncated] = _processItem(
293
+ queue,
294
+ parent,
295
+ key,
296
+ item,
297
+ maxLength - length - truncateMessageSize
298
+ );
299
+
300
+ if (itemWasTruncated) {
301
+ // if a string was truncated, we mark the total data as truncated for messaging purposes
302
+ dataWasTruncated = true;
303
+ }
304
+
305
+ if (length + itemLength + truncateMessageSize < maxLength) {
306
+ // we're still under the max length, add this and keep going
307
+ _addItem(parent, key, processedItem);
308
+ length += itemLength;
309
+ } else {
310
+ if (length + itemLength + truncateMessageSize === maxLength) {
311
+ // we can fit this item + the truncate message, so let's add it before we stop
312
+ _addItem(parent, key, processedItem);
313
+ }
314
+ dataWasTruncated = true;
315
+ break;
316
+ }
317
+ }
318
+
319
+ // we can hit the following even if we got through all the items in the queue in the case that any strings were truncated
320
+ if (dataWasTruncated) {
321
+ if (Array.isArray(root)) {
322
+ root.push('NOTE : This data has been truncated.');
323
+ } else {
324
+ root.NOTE = 'This data has been truncated.';
325
+ }
326
+ }
327
+
328
+ return root;
329
+ };
330
+
175
331
  const genId = () => parseInt(Math.random() * 100000000);
176
332
 
177
333
  module.exports = {
@@ -186,4 +342,5 @@ module.exports = {
186
342
  memoizedFindMapDeep,
187
343
  recurseReplace,
188
344
  simpleTruncate,
345
+ truncateData,
189
346
  };