zapier-platform-core 9.5.0 → 9.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "zapier-platform-core",
3
- "version": "9.5.0",
3
+ "version": "9.7.1",
4
4
  "description": "The core SDK for CLI apps in the Zapier Developer Platform.",
5
5
  "repository": "zapier/zapier-platform-core",
6
6
  "homepage": "https://zapier.com/",
@@ -41,16 +41,18 @@
41
41
  "bluebird": "3.5.5",
42
42
  "content-disposition": "0.5.3",
43
43
  "dotenv": "8.1.0",
44
- "form-data": "2.5.0",
44
+ "form-data": "4.0.0",
45
45
  "lodash": "4.17.15",
46
- "node-fetch": "2.6.0",
46
+ "mime-types": "2.1.34",
47
+ "node-fetch": "2.6.7",
47
48
  "oauth-sign": "0.9.0",
48
49
  "semver": "5.6.0",
49
- "zapier-platform-schema": "9.5.0"
50
+ "zapier-platform-schema": "9.7.1"
50
51
  },
51
52
  "devDependencies": {
52
53
  "adm-zip": "0.4.13",
53
54
  "aws-sdk": "2.238.1",
55
+ "dicer": "0.3.0",
54
56
  "fs-extra": "8.1.0",
55
57
  "mock-fs": "4.10.1"
56
58
  },
@@ -7,7 +7,7 @@ const {
7
7
  flattenPaths,
8
8
  getObjectType,
9
9
  isPlainObj,
10
- recurseReplace
10
+ recurseReplace,
11
11
  } = require('./data');
12
12
 
13
13
  const DEFAULT_BUNDLE = {
@@ -15,7 +15,7 @@ const DEFAULT_BUNDLE = {
15
15
  inputData: {},
16
16
  meta: {},
17
17
  subscribeData: {},
18
- targetUrl: ''
18
+ targetUrl: '',
19
19
  };
20
20
 
21
21
  const isCurlies = /{{.*?}}/g;
@@ -34,7 +34,7 @@ const recurseCleanFuncs = (obj, path) => {
34
34
  });
35
35
  } else if (isPlainObj(obj)) {
36
36
  const newObj = {};
37
- Object.keys(obj).forEach(key => {
37
+ Object.keys(obj).forEach((key) => {
38
38
  const value = obj[key];
39
39
  newObj[key] = recurseCleanFuncs(value, path.concat([key]));
40
40
  });
@@ -45,7 +45,7 @@ const recurseCleanFuncs = (obj, path) => {
45
45
 
46
46
  // Recurse a nested object replace all instances of keys->vals in the bank.
47
47
  const recurseReplaceBank = (obj, bank = {}) => {
48
- const replacer = out => {
48
+ const replacer = (out) => {
49
49
  if (!['string', 'number'].includes(typeof out)) {
50
50
  return out;
51
51
  }
@@ -56,7 +56,7 @@ const recurseReplaceBank = (obj, bank = {}) => {
56
56
  const originalValueStr = String(out);
57
57
  let maybeChangedString = originalValueStr;
58
58
 
59
- Object.keys(bank).forEach(key => {
59
+ Object.keys(bank).forEach((key) => {
60
60
  // Escape characters (ex. {{foo}} => \\{\\{foo\\}\\} )
61
61
  const escapedKey = key.replace(/[-[\]/{}()\\*+?.^$|]/g, '\\$&');
62
62
  const matchesKey = new RegExp(escapedKey, 'g');
@@ -106,12 +106,12 @@ const finalizeBundle = pipe(
106
106
  );
107
107
 
108
108
  // Takes a raw app and bundle and composes a bank of {{key}}->val
109
- const createBundleBank = (appRaw, event = {}, serializeFunc = x => x) => {
109
+ const createBundleBank = (appRaw, event = {}, serializeFunc = (x) => x) => {
110
110
  const bank = {
111
111
  bundle: finalizeBundle(event.bundle),
112
112
  process: {
113
- env: _.extend({}, process.env || {})
114
- }
113
+ env: _.extend({}, process.env || {}),
114
+ },
115
115
  };
116
116
 
117
117
  const options = { preserve: { 'bundle.inputData': true } };
@@ -123,7 +123,7 @@ const createBundleBank = (appRaw, event = {}, serializeFunc = x => x) => {
123
123
  }, {});
124
124
  };
125
125
 
126
- const maskOutput = output => _.pick(output, 'results', 'status');
126
+ const maskOutput = (output) => _.pick(output, 'results', 'status');
127
127
 
128
128
  // These normalize functions are called after the initial before middleware that
129
129
  // cleans the request. The reason is that we need to know why a value is empty
@@ -132,7 +132,7 @@ const maskOutput = output => _.pick(output, 'results', 'status');
132
132
  // an earlier Zap step? Or was it a null value? Each has different results depending
133
133
  // on how the partner has configued their integration.
134
134
  const normalizeEmptyRequestFields = (shouldCleanup, field, req) => {
135
- const handleEmpty = key => {
135
+ const handleEmpty = (key) => {
136
136
  const value = req[field][key] || '';
137
137
  const cleaned = value.replace(isCurlies, '');
138
138
 
@@ -152,11 +152,11 @@ const normalizeEmptyRequestFields = (shouldCleanup, field, req) => {
152
152
  });
153
153
  };
154
154
 
155
- const isEmptyQueryParam = value =>
155
+ const isEmptyQueryParam = (value) =>
156
156
  value === '' ||
157
157
  value === null ||
158
158
  value === undefined ||
159
- isCurlies.test(value);
159
+ (typeof value === 'string' && value.search(isCurlies) >= 0);
160
160
 
161
161
  const normalizeEmptyParamFields = normalizeEmptyRequestFields.bind(
162
162
  null,
@@ -165,7 +165,7 @@ const normalizeEmptyParamFields = normalizeEmptyRequestFields.bind(
165
165
  );
166
166
  const normalizeEmptyBodyFields = normalizeEmptyRequestFields.bind(
167
167
  null,
168
- v => isCurlies.test(v),
168
+ (v) => typeof v === 'string' && v.search(isCurlies) >= 0,
169
169
  'body'
170
170
  );
171
171
 
@@ -175,5 +175,5 @@ module.exports = {
175
175
  normalizeEmptyBodyFields,
176
176
  normalizeEmptyParamFields,
177
177
  recurseCleanFuncs,
178
- recurseReplaceBank
178
+ recurseReplaceBank,
179
179
  };
@@ -1,14 +1,18 @@
1
1
  'use strict';
2
2
 
3
- const _ = require('lodash');
3
+ const fs = require('fs');
4
+ const os = require('os');
4
5
  const path = require('path');
5
- const FormData = require('form-data');
6
+ const { pipeline } = require('stream');
7
+ const { promisify } = require('util');
8
+ const { randomBytes } = require('crypto');
9
+
10
+ const _ = require('lodash');
6
11
  const contentDisposition = require('content-disposition');
12
+ const FormData = require('form-data');
13
+ const mime = require('mime-types');
7
14
 
8
15
  const request = require('./request-client-internal');
9
- const ZapierPromise = require('./promise');
10
-
11
- const isPromise = obj => obj && typeof obj.then === 'function';
12
16
 
13
17
  const UPLOAD_MAX_SIZE = 1000 * 1000 * 150; // 150mb, in zapier backend too
14
18
 
@@ -19,33 +23,187 @@ const LENGTH_ERR_MESSAGE =
19
23
  const DEFAULT_FILE_NAME = 'unnamedfile';
20
24
  const DEFAULT_CONTENT_TYPE = 'application/octet-stream';
21
25
 
22
- const uploader = (
26
+ const streamPipeline = promisify(pipeline);
27
+
28
+ const filenameFromURL = (url) => {
29
+ try {
30
+ return decodeURIComponent(path.posix.basename(new URL(url).pathname));
31
+ } catch (error) {
32
+ return null;
33
+ }
34
+ };
35
+
36
+ const filenameFromHeader = (response) => {
37
+ const cd = response.headers.get('content-disposition');
38
+ let filename;
39
+ if (cd) {
40
+ try {
41
+ filename = contentDisposition.parse(cd).parameters.filename;
42
+ } catch (error) {
43
+ return null;
44
+ }
45
+ }
46
+ return filename || null;
47
+ };
48
+
49
+ const resolveRemoteStream = async (stream) => {
50
+ // Download to a temp file, get the file size, and create a readable stream
51
+ // from the temp file.
52
+ //
53
+ // The streamPipeline usage is taken from
54
+ // https://github.com/node-fetch/node-fetch#streams
55
+ const tmpFilePath = path.join(
56
+ os.tmpdir(),
57
+ 'stash-' + randomBytes(16).toString('hex')
58
+ );
59
+
60
+ try {
61
+ await streamPipeline(stream, fs.createWriteStream(tmpFilePath));
62
+ } catch (error) {
63
+ try {
64
+ fs.unlinkSync(tmpFilePath);
65
+ } catch (e) {
66
+ // File doesn't exist? Probably okay
67
+ }
68
+ throw error;
69
+ }
70
+
71
+ const length = fs.statSync(tmpFilePath).size;
72
+ const readStream = fs.createReadStream(tmpFilePath);
73
+
74
+ readStream.on('end', () => {
75
+ // Burn after reading
76
+ try {
77
+ fs.unlinkSync(tmpFilePath);
78
+ } catch (e) {
79
+ // TODO: We probably want to log warning here
80
+ }
81
+ });
82
+
83
+ return {
84
+ streamOrData: readStream,
85
+ length,
86
+ };
87
+ };
88
+
89
+ const resolveResponseToStream = async (response) => {
90
+ // Get filename from content-disposition header or URL
91
+ let filename =
92
+ filenameFromHeader(response) ||
93
+ filenameFromURL(response.url || _.get(response, ['request', 'url'])) ||
94
+ DEFAULT_FILE_NAME;
95
+
96
+ const contentType = response.headers.get('content-type');
97
+ if (contentType && !path.extname(filename)) {
98
+ const ext = mime.extension(contentType);
99
+ if (ext && ext !== 'bin') {
100
+ filename += '.' + ext;
101
+ }
102
+ }
103
+
104
+ if (response.body && typeof response.body.pipe === 'function') {
105
+ // streamable response created by z.request({ raw: true })
106
+ return {
107
+ ...(await resolveRemoteStream(response.body)),
108
+ contentType: contentType || DEFAULT_CONTENT_TYPE,
109
+ filename,
110
+ };
111
+ }
112
+
113
+ // regular response created by z.request({ raw: false })
114
+ return {
115
+ streamOrData: response.content,
116
+ length: Buffer.byteLength(response.content),
117
+ contentType: contentType || DEFAULT_CONTENT_TYPE,
118
+ filename,
119
+ };
120
+ };
121
+
122
+ const resolveStreamWithMeta = async (stream) => {
123
+ const isLocalFile = stream.path && fs.existsSync(stream.path);
124
+ if (isLocalFile) {
125
+ const filename = path.basename(stream.path);
126
+ return {
127
+ streamOrData: stream,
128
+ length: fs.statSync(stream.path).size,
129
+ contentType: mime.lookup(filename) || DEFAULT_CONTENT_TYPE,
130
+ filename,
131
+ };
132
+ }
133
+
134
+ return {
135
+ ...(await resolveRemoteStream(stream)),
136
+ contentType: DEFAULT_CONTENT_TYPE,
137
+ filename: DEFAULT_FILE_NAME,
138
+ };
139
+ };
140
+
141
+ // Returns an object with fields:
142
+ // * streamOrData: a readable stream, a string, or a Buffer
143
+ // * length: content length in bytes
144
+ // * contentType
145
+ // * filename
146
+ const resolveToBufferStringStream = async (responseOrData) => {
147
+ if (typeof responseOrData === 'string' || responseOrData instanceof String) {
148
+ // The .toString() call only makes a difference for the String object case.
149
+ // It converts a String object to a regular string.
150
+ const str = responseOrData.toString();
151
+ return {
152
+ streamOrData: str,
153
+ length: Buffer.byteLength(str),
154
+ contentType: 'text/plain',
155
+ filename: `${DEFAULT_FILE_NAME}.txt`,
156
+ };
157
+ } else if (Buffer.isBuffer(responseOrData)) {
158
+ return {
159
+ streamOrData: responseOrData,
160
+ length: responseOrData.length,
161
+ contentType: DEFAULT_CONTENT_TYPE,
162
+ filename: DEFAULT_FILE_NAME,
163
+ };
164
+ } else if (
165
+ (responseOrData.body && typeof responseOrData.body.pipe === 'function') ||
166
+ typeof responseOrData.content === 'string'
167
+ ) {
168
+ return resolveResponseToStream(responseOrData);
169
+ } else if (typeof responseOrData.pipe === 'function') {
170
+ return resolveStreamWithMeta(responseOrData);
171
+ }
172
+
173
+ throw new TypeError(
174
+ `z.stashFile() cannot stash type '${typeof responseOrData}'. ` +
175
+ 'Pass it a request, readable stream, string, or Buffer.'
176
+ );
177
+ };
178
+
179
+ const uploader = async (
23
180
  signedPostData,
24
181
  bufferStringStream,
25
182
  knownLength,
26
183
  filename,
27
184
  contentType
28
185
  ) => {
29
- const form = new FormData();
30
-
31
186
  if (knownLength && knownLength > UPLOAD_MAX_SIZE) {
32
- return ZapierPromise.reject(
33
- new Error(`${knownLength} is too big, ${UPLOAD_MAX_SIZE} is the max`)
34
- );
187
+ throw new Error(`${knownLength} is too big, ${UPLOAD_MAX_SIZE} is the max`);
35
188
  }
189
+ filename = path.basename(filename).replace('"', '');
36
190
 
37
- _.each(signedPostData.fields, (value, key) => {
38
- form.append(key, value);
39
- });
191
+ const fields = {
192
+ ...signedPostData.fields,
193
+ 'Content-Disposition': contentDisposition(filename),
194
+ 'Content-Type': contentType,
195
+ };
40
196
 
41
- filename = path.basename(filename || DEFAULT_FILE_NAME).replace('"', '');
197
+ const form = new FormData();
42
198
 
43
- form.append('Content-Disposition', contentDisposition(filename));
199
+ Object.entries(fields).forEach(([key, value]) => {
200
+ form.append(key, value);
201
+ });
44
202
 
45
203
  form.append('file', bufferStringStream, {
46
- contentType,
47
204
  knownLength,
48
- filename
205
+ contentType,
206
+ filename,
49
207
  });
50
208
 
51
209
  // Try to catch the missing length early, before upload to S3 fails.
@@ -56,120 +214,93 @@ const uploader = (
56
214
  }
57
215
 
58
216
  // Send to S3 with presigned request.
59
- return request({
217
+ const response = await request({
60
218
  url: signedPostData.url,
61
219
  method: 'POST',
62
- body: form
63
- }).then(res => {
64
- if (res.status === 204) {
65
- return `${signedPostData.url}${signedPostData.fields.key}`;
66
- }
67
- if (
68
- res.content.indexOf(
69
- 'You must provide the Content-Length HTTP header.'
70
- ) !== -1
71
- ) {
72
- throw new Error(LENGTH_ERR_MESSAGE);
73
- }
74
- throw new Error(`Got ${res.status} - ${res.content}`);
220
+ body: form,
75
221
  });
222
+
223
+ if (response.status === 204) {
224
+ return new URL(signedPostData.fields.key, signedPostData.url).href;
225
+ }
226
+
227
+ if (
228
+ response.content &&
229
+ response.content.includes &&
230
+ response.content.includes(
231
+ 'You must provide the Content-Length HTTP header.'
232
+ )
233
+ ) {
234
+ throw new Error(LENGTH_ERR_MESSAGE);
235
+ }
236
+
237
+ throw new Error(`Got ${response.status} - ${response.content}`);
76
238
  };
77
239
 
78
240
  // Designed to be some user provided function/api.
79
- const createFileStasher = input => {
241
+ const createFileStasher = (input) => {
80
242
  const rpc = _.get(input, '_zapier.rpc');
81
243
 
82
- return (bufferStringStream, knownLength, filename, contentType) => {
244
+ return async (requestOrData, knownLength, filename, contentType) => {
83
245
  // TODO: maybe this could be smart?
84
246
  // if it is already a public url, do we pass through? or upload?
85
247
  if (!rpc) {
86
- return ZapierPromise.reject(new Error('rpc is not available'));
248
+ throw new Error('rpc is not available');
87
249
  }
88
250
 
89
- const isRunningOnHydrator =
90
- _.get(input, '_zapier.event.method', '').indexOf('hydrators.') === 0;
91
- const isRunningOnCreate =
92
- _.get(input, '_zapier.event.method', '').indexOf('creates.') === 0;
251
+ const isRunningOnHydrator = _.get(
252
+ input,
253
+ '_zapier.event.method',
254
+ ''
255
+ ).startsWith('hydrators.');
256
+ const isRunningOnCreate = _.get(
257
+ input,
258
+ '_zapier.event.method',
259
+ ''
260
+ ).startsWith('creates.');
93
261
 
94
262
  if (!isRunningOnHydrator && !isRunningOnCreate) {
95
- return ZapierPromise.reject(
96
- new Error(
97
- 'Files can only be stashed within a create or hydration function/method.'
98
- )
263
+ throw new Error(
264
+ 'Files can only be stashed within a create or hydration function/method.'
99
265
  );
100
266
  }
101
267
 
102
- const fileContentType = contentType || DEFAULT_CONTENT_TYPE;
103
-
104
- return rpc('get_presigned_upload_post_data', fileContentType).then(
105
- result => {
106
- if (isPromise(bufferStringStream)) {
107
- return bufferStringStream.then(maybeResponse => {
108
- const isStreamed = _.get(maybeResponse, 'request.raw', false);
109
-
110
- const parseFinalResponse = response => {
111
- let newBufferStringStream = response;
112
- if (_.isString(response)) {
113
- newBufferStringStream = response;
114
- } else if (response) {
115
- if (Buffer.isBuffer(response)) {
116
- newBufferStringStream = response;
117
- } else if (Buffer.isBuffer(response.dataBuffer)) {
118
- newBufferStringStream = response.dataBuffer;
119
- } else if (
120
- response.body &&
121
- typeof response.body.pipe === 'function'
122
- ) {
123
- newBufferStringStream = response.body;
124
- } else {
125
- newBufferStringStream = response.content;
126
- }
127
-
128
- if (response.headers) {
129
- knownLength =
130
- knownLength || response.getHeader('content-length');
131
- const cd = response.getHeader('content-disposition');
132
- if (cd) {
133
- filename =
134
- filename ||
135
- contentDisposition.parse(cd).parameters.filename;
136
- }
137
- }
138
- } else {
139
- throw new Error(
140
- 'Cannot stash a Promise wrapped file of unknown type.'
141
- );
142
- }
143
-
144
- return uploader(
145
- result,
146
- newBufferStringStream,
147
- knownLength,
148
- filename,
149
- fileContentType
150
- );
151
- };
152
-
153
- if (isStreamed) {
154
- maybeResponse.throwForStatus();
155
- return maybeResponse.buffer().then(buffer => {
156
- maybeResponse.dataBuffer = buffer;
157
- return parseFinalResponse(maybeResponse);
158
- });
159
- } else {
160
- return parseFinalResponse(maybeResponse);
161
- }
162
- });
163
- } else {
164
- return uploader(
165
- result,
166
- bufferStringStream,
167
- knownLength,
168
- filename,
169
- fileContentType
170
- );
171
- }
172
- }
268
+ // requestOrData can be one of these:
269
+ // * string
270
+ // * Buffer
271
+ // * z.request() - a Promise of a regular response
272
+ // * z.request({ raw: true }) - a Promise of a "streamable" response
273
+ // * await z.request() - a regular response
274
+ // * await z.request({ raw: true }) - a streamable response
275
+ //
276
+ // After the following, requestOrData is resolved to responseOrData, which
277
+ // is either:
278
+ // - string
279
+ // - Buffer
280
+ // - a regular response
281
+ // - a streamable response
282
+ const [signedPostData, responseOrData] = await Promise.all([
283
+ rpc('get_presigned_upload_post_data'),
284
+ requestOrData,
285
+ ]);
286
+
287
+ if (responseOrData.throwForStatus) {
288
+ responseOrData.throwForStatus();
289
+ }
290
+
291
+ const {
292
+ streamOrData,
293
+ length,
294
+ contentType: _contentType,
295
+ filename: _filename,
296
+ } = await resolveToBufferStringStream(responseOrData);
297
+
298
+ return uploader(
299
+ signedPostData,
300
+ streamOrData,
301
+ knownLength || length,
302
+ filename || _filename,
303
+ contentType || _contentType
173
304
  );
174
305
  };
175
306
  };
@@ -2,14 +2,14 @@ const zlib = require('zlib');
2
2
  const _ = require('lodash');
3
3
  const constants = require('../constants');
4
4
 
5
- const createHttpPatch = event => {
6
- const createLogger = require('./create-logger');
7
- const logBuffer = [];
8
- const logger = createLogger(event, { logBuffer });
9
-
10
- const httpPatch = object => {
5
+ const createHttpPatch = (event) => {
6
+ const httpPatch = (object, logger) => {
11
7
  const originalRequest = object.request;
12
8
 
9
+ // Important not to reuse logger between calls, because we always destroy
10
+ // the logger at the end of a Lambda call.
11
+ object.zapierLogger = logger;
12
+
13
13
  // Avoids multiple patching and memory leaks (mostly when running tests locally)
14
14
  if (object.patchedByZapier) {
15
15
  return;
@@ -21,13 +21,15 @@ const createHttpPatch = event => {
21
21
  object.request = (options, callback) => {
22
22
  // `options` can be an object or a string. If options is a string, it is
23
23
  // automatically parsed with url.parse().
24
- // See https://nodejs.org/docs/latest-v6.x/api/http.html#http_http_request_options_callback
24
+ // See https://nodejs.org/docs/latest-v12.x/api/http.html#http_http_request_options_callback
25
25
  let requestUrl;
26
26
  if (typeof options === 'string') {
27
27
  requestUrl = options;
28
28
  } else if (typeof options.url === 'string') {
29
- // XXX: Somehow options.url is available for some requests although http.request doesn't really accept it.
30
- // Without this else-if, many HTTP requests don't work. Should take a deeper look at this weirdness.
29
+ // XXX: Somehow options.url is available for some requests although
30
+ // http.request doesn't really accept it. Without this else-if, many
31
+ // HTTP requests don't work. Should take a deeper look at this
32
+ // weirdness.
31
33
  requestUrl = options.url;
32
34
  } else {
33
35
  requestUrl =
@@ -49,10 +51,10 @@ const createHttpPatch = event => {
49
51
  }
50
52
 
51
53
  // Proxy the callback to get the response
52
- const newCallback = function(response) {
54
+ const newCallback = function (response) {
53
55
  const chunks = [];
54
56
 
55
- const sendToLogger = responseBody => {
57
+ const sendToLogger = (responseBody) => {
56
58
  // Prepare data for GL
57
59
  const logData = {
58
60
  log_type: 'http',
@@ -64,10 +66,10 @@ const createHttpPatch = event => {
64
66
  request_via_client: false,
65
67
  response_status_code: response.statusCode,
66
68
  response_headers: response.headers,
67
- response_content: responseBody
69
+ response_content: responseBody,
68
70
  };
69
71
 
70
- logger(
72
+ object.zapierLogger(
71
73
  `${logData.response_status_code} ${logData.request_method} ${logData.request_url}`,
72
74
  logData
73
75
  );
@@ -85,14 +87,14 @@ const createHttpPatch = event => {
85
87
  sendToLogger(responseBody);
86
88
  });
87
89
  } else {
88
- const responseBody = _.map(chunks, chunk => chunk.toString()).join(
89
- '\n'
90
- );
90
+ const responseBody = _.map(chunks, (chunk) =>
91
+ chunk.toString()
92
+ ).join('\n');
91
93
  sendToLogger(responseBody);
92
94
  }
93
95
  };
94
96
 
95
- response.on('data', chunk => chunks.push(chunk));
97
+ response.on('data', (chunk) => chunks.push(chunk));
96
98
  response.on('end', logResponse);
97
99
  response.on('error', logResponse);
98
100
 
@@ -22,7 +22,7 @@ const ZapierPromise = require('./promise');
22
22
  const RequestSchema = require('zapier-platform-schema/lib/schemas/RequestSchema');
23
23
  const FunctionSchema = require('zapier-platform-schema/lib/schemas/FunctionSchema');
24
24
 
25
- const isRequestOrFunction = obj => {
25
+ const isRequestOrFunction = (obj) => {
26
26
  return (
27
27
  RequestSchema.validate(obj).valid || FunctionSchema.validate(obj).valid
28
28
  );
@@ -34,7 +34,7 @@ const extendAppRaw = (base, extension) => {
34
34
  'perform',
35
35
  'performList',
36
36
  'performSubscribe',
37
- 'performUnsubscribe'
37
+ 'performUnsubscribe',
38
38
  ];
39
39
  const concatArrayAndOverrideKeys = (objValue, srcValue, key) => {
40
40
  if (Array.isArray(objValue) && Array.isArray(srcValue)) {
@@ -99,7 +99,7 @@ const getAppRawOverride = (rpc, appRawOverride) => {
99
99
 
100
100
  // Otherwise just get it via RPC
101
101
  rpc('get_definition_override')
102
- .then(fetchedOverride => {
102
+ .then((fetchedOverride) => {
103
103
  // "cache" it.
104
104
  fs.writeFileSync(hashPath, appRawOverride);
105
105
  fs.writeFileSync(overridePath, JSON.stringify(fetchedOverride));
@@ -108,7 +108,7 @@ const getAppRawOverride = (rpc, appRawOverride) => {
108
108
 
109
109
  resolve(fetchedOverride);
110
110
  })
111
- .catch(err => reject(err));
111
+ .catch((err) => reject(err));
112
112
  });
113
113
  };
114
114
 
@@ -118,8 +118,8 @@ const loadApp = (event, rpc, appRawOrPath) => {
118
118
  return new ZapierPromise((resolve, reject) => {
119
119
  if (event && event.appRawOverride) {
120
120
  return getAppRawOverride(rpc, event.appRawOverride)
121
- .then(appRawOverride => resolve(appRawOverride))
122
- .catch(err => reject(err));
121
+ .then((appRawOverride) => resolve(appRawOverride))
122
+ .catch((err) => reject(err));
123
123
  }
124
124
 
125
125
  if (_.isString(appRawOrPath)) {
@@ -130,7 +130,7 @@ const loadApp = (event, rpc, appRawOrPath) => {
130
130
  });
131
131
  };
132
132
 
133
- const createLambdaHandler = appRawOrPath => {
133
+ const createLambdaHandler = (appRawOrPath) => {
134
134
  const handler = (event, context, callback) => {
135
135
  // Wait for all async events to complete before callback returns.
136
136
  // This is not strictly necessary since this is the default now when
@@ -147,65 +147,61 @@ const createLambdaHandler = appRawOrPath => {
147
147
 
148
148
  environmentTools.cleanEnvironment();
149
149
 
150
+ // Copy bundle environment into process.env *before* creating the logger and
151
+ // loading app code, so that the logger gets the endpoint from process.env,
152
+ // and top level app code can get bundle environment vars via process.env.
153
+ environmentTools.applyEnvironment(event);
154
+
150
155
  // Create logger outside of domain, so we can use in both error and run callbacks.
151
156
  const logBuffer = [];
152
157
  const logger = createLogger(event, { logBuffer });
153
158
 
154
159
  let isCallbackCalled = false;
155
160
  const callbackOnce = (err, resp) => {
156
- if (!isCallbackCalled) {
157
- isCallbackCalled = true;
158
- callback(err, resp);
159
- }
161
+ logger.end().finally(() => {
162
+ if (!isCallbackCalled) {
163
+ isCallbackCalled = true;
164
+ callback(err, resp);
165
+ }
166
+ });
160
167
  };
161
168
 
162
169
  const logErrorAndCallbackOnce = (logMsg, logData, err) => {
163
- // Wait for logger to complete before callback. This isn't
164
- // strictly necessary because callbacksWaitsForEmptyLoop is
165
- // the default behavior with callbacks anyway, but don't want
166
- // to rely on that.
167
- logger(logMsg, logData).then(() => {
168
- // Check for `.message` in case someone did `throw "My Error"`
169
- if (
170
- !constants.IS_TESTING &&
171
- err &&
172
- !err.doNotContextify &&
173
- err.message
174
- ) {
175
- err.message += `\n\nConsole logs:\n${logBuffer
176
- .map(s => ` ${s.message}`)
177
- .join('')}`;
178
- }
179
- callbackOnce(err);
180
- });
170
+ logger(logMsg, logData);
171
+
172
+ // Check for `.message` in case someone did `throw "My Error"`
173
+ if (!constants.IS_TESTING && err && !err.doNotContextify && err.message) {
174
+ err.message += `\n\nConsole logs:\n${logBuffer
175
+ .map((s) => ` ${s.message}`)
176
+ .join('')}`;
177
+ }
178
+
179
+ callbackOnce(err);
181
180
  };
182
181
 
183
182
  const handlerDomain = domain.create();
184
183
 
185
- handlerDomain.on('error', err => {
186
- const logMsg = `Uncaught error: ${err}\n${(err && err.stack) ||
187
- '<stack>'}`;
184
+ handlerDomain.on('error', (err) => {
185
+ const logMsg = `Uncaught error: ${err}\n${
186
+ (err && err.stack) || '<stack>'
187
+ }`;
188
188
  const logData = { err, log_type: 'error' };
189
189
  logErrorAndCallbackOnce(logMsg, logData, err);
190
190
  });
191
191
 
192
192
  handlerDomain.run(() => {
193
- // Copy bundle environment into process.env *before* loading app code,
194
- // so that top level app code can get bundle environment vars via process.env.
195
- environmentTools.applyEnvironment(event);
196
-
197
193
  const rpc = createRpcClient(event);
198
194
 
199
195
  return loadApp(event, rpc, appRawOrPath)
200
- .then(appRaw => {
196
+ .then((appRaw) => {
201
197
  const app = createApp(appRaw);
202
198
 
203
199
  const { skipHttpPatch } = appRaw.flags || {};
204
200
  // Adds logging for _all_ kinds of http(s) requests, no matter the library
205
- if (!skipHttpPatch) {
201
+ if (!skipHttpPatch && !event.calledFromCli) {
206
202
  const httpPatch = createHttpPatch(event);
207
- httpPatch(require('http'));
208
- httpPatch(require('https')); // 'https' needs to be patched separately
203
+ httpPatch(require('http'), logger);
204
+ httpPatch(require('https'), logger); // 'https' needs to be patched separately
209
205
  }
210
206
 
211
207
  // TODO: Avoid calling prepareApp(appRaw) repeatedly here as createApp()
@@ -215,12 +211,13 @@ const createLambdaHandler = appRawOrPath => {
215
211
  const input = createInput(compiledApp, event, logger, logBuffer, rpc);
216
212
  return app(input);
217
213
  })
218
- .then(output => {
214
+ .then((output) => {
219
215
  callbackOnce(null, cleaner.maskOutput(output));
220
216
  })
221
- .catch(err => {
222
- const logMsg = `Unhandled error: ${err}\n${(err && err.stack) ||
223
- '<stack>'}`;
217
+ .catch((err) => {
218
+ const logMsg = `Unhandled error: ${err}\n${
219
+ (err && err.stack) || '<stack>'
220
+ }`;
224
221
  const logData = { err, log_type: 'error' };
225
222
  logErrorAndCallbackOnce(logMsg, logData, err);
226
223
  });
@@ -8,8 +8,11 @@ const semver = require('semver');
8
8
  const createLegacyScriptingRunner = (z, input) => {
9
9
  const app = _.get(input, '_zapier.app');
10
10
 
11
- let source =
12
- _.get(app, 'legacy.scriptingSource') || app.legacyScriptingSource;
11
+ // once we have node 14 everywhere, this can be:
12
+ // let source = _.get(app, 'legacy.scriptingSource') ?? app.legacyScriptingSource;
13
+ let source = _.get(app, 'legacy.scriptingSource');
14
+ source = source === undefined ? app.legacyScriptingSource : source;
15
+
13
16
  if (source === undefined) {
14
17
  // Don't initialize z.legacyScripting for a pure CLI app
15
18
  return null;
@@ -26,8 +29,8 @@ const createLegacyScriptingRunner = (z, input) => {
26
29
  let LegacyScriptingRunner, version;
27
30
  try {
28
31
  LegacyScriptingRunner = require('zapier-platform-legacy-scripting-runner');
29
- version = require('zapier-platform-legacy-scripting-runner/package.json')
30
- .version;
32
+ version =
33
+ require('zapier-platform-legacy-scripting-runner/package.json').version;
31
34
  } catch (e) {
32
35
  // Find it in cwd, in case we're developing legacy-scripting-runner itself
33
36
  const cwd = process.cwd();
@@ -1,16 +1,21 @@
1
1
  'use strict';
2
2
 
3
+ const { Transform } = require('stream');
4
+
3
5
  const _ = require('lodash');
4
6
 
5
7
  const request = require('./request-client-internal');
6
8
  const cleaner = require('./cleaner');
7
9
  const dataTools = require('./data');
8
10
  const hashing = require('./hashing');
9
- const ZapierPromise = require('./promise');
10
11
  const constants = require('../constants');
11
12
  const { unheader } = require('./http');
12
13
 
13
- const truncate = str => dataTools.simpleTruncate(str, 3500, ' [...]');
14
+ // The payload size per request to stream logs. This should be slighly lower
15
+ // than the limit (16 MB) on the server side.
16
+ const LOG_STREAM_BYTES_LIMIT = 15 * 1024 * 1024;
17
+
18
+ const truncate = (str) => dataTools.simpleTruncate(str, 3500, ' [...]');
14
19
 
15
20
  const formatHeaders = (headers = {}) => {
16
21
  if (_.isEmpty(headers)) {
@@ -28,7 +33,7 @@ const formatHeaders = (headers = {}) => {
28
33
  .join('\n');
29
34
  };
30
35
 
31
- const maybeStringify = d => {
36
+ const maybeStringify = (d) => {
32
37
  if (_.isPlainObject(d) || Array.isArray(d)) {
33
38
  return JSON.stringify(d);
34
39
  }
@@ -36,7 +41,7 @@ const maybeStringify = d => {
36
41
  };
37
42
 
38
43
  // format HTTP request details into string suitable for printing to stdout
39
- const httpDetailsLogMessage = data => {
44
+ const httpDetailsLogMessage = (data) => {
40
45
  if (data.log_type !== 'http') {
41
46
  return '';
42
47
  }
@@ -58,9 +63,9 @@ const httpDetailsLogMessage = data => {
58
63
  }
59
64
 
60
65
  return `\
61
- ${trimmedData.request_method || 'GET'} ${
62
- trimmedData.request_url
63
- }${trimmedData.request_params || ''}
66
+ ${trimmedData.request_method || 'GET'} ${trimmedData.request_url}${
67
+ trimmedData.request_params || ''
68
+ }
64
69
  ${formatHeaders(trimmedData.request_headers) || ''}
65
70
 
66
71
  ${maybeStringify(trimmedData.request_data) || ''}
@@ -93,12 +98,12 @@ const makeSensitiveBank = (event, data) => {
93
98
  const matcher = (key, value) => {
94
99
  if (_.isString(value)) {
95
100
  const lowerKey = key.toLowerCase();
96
- return _.some(constants.SENSITIVE_KEYS, k => lowerKey.indexOf(k) >= 0);
101
+ return _.some(constants.SENSITIVE_KEYS, (k) => lowerKey.indexOf(k) >= 0);
97
102
  }
98
103
  return false;
99
104
  };
100
105
 
101
- dataTools.recurseExtract(data, matcher).forEach(value => {
106
+ dataTools.recurseExtract(data, matcher).forEach((value) => {
102
107
  sensitiveValues.push(value);
103
108
  });
104
109
 
@@ -126,7 +131,75 @@ const makeSensitiveBank = (event, data) => {
126
131
  );
127
132
  };
128
133
 
129
- const sendLog = (options, event, message, data) => {
134
+ class LogStream extends Transform {
135
+ constructor(options) {
136
+ super(options);
137
+ this.bytesWritten = 0;
138
+ this.request = this._newRequest(options.url, options.token);
139
+ }
140
+
141
+ _newRequest(url, token) {
142
+ const httpOptions = {
143
+ url,
144
+ method: 'POST',
145
+ headers: {
146
+ 'Content-Type': 'application/x-ndjson',
147
+ 'X-Token': token,
148
+ },
149
+ body: this,
150
+ };
151
+ return request(httpOptions).catch((err) => {
152
+ // Swallow logging errors. This will show up in AWS logs at least.
153
+ console.error(
154
+ 'Error making log request:',
155
+ err,
156
+ 'http options:',
157
+ httpOptions
158
+ );
159
+ });
160
+ }
161
+
162
+ _transform(chunk, encoding, callback) {
163
+ this.push(chunk);
164
+ this.bytesWritten += Buffer.byteLength(chunk, encoding);
165
+ callback();
166
+ }
167
+ }
168
+
169
+ // Implements singleton for LogStream. The goal is for every sendLog() call we
170
+ // reuse the same request until the request body grows too big and exceeds
171
+ // LOG_STREAM_BYTES_LIMIT.
172
+ class LogStreamFactory {
173
+ constructor() {
174
+ this._logStream = null;
175
+ }
176
+
177
+ getOrCreate(url, token) {
178
+ if (this._logStream) {
179
+ if (this._logStream.bytesWritten < LOG_STREAM_BYTES_LIMIT) {
180
+ // Reuse the same request for efficiency
181
+ return this._logStream;
182
+ }
183
+
184
+ // End this one before creating another
185
+ this._logStream.end();
186
+ }
187
+
188
+ this._logStream = new LogStream({ url, token });
189
+ return this._logStream;
190
+ }
191
+
192
+ async end() {
193
+ if (this._logStream) {
194
+ this._logStream.end();
195
+ const response = await this._logStream.request;
196
+ this._logStream = null;
197
+ return response;
198
+ }
199
+ }
200
+ }
201
+
202
+ const sendLog = async (logStreamFactory, options, event, message, data) => {
130
203
  data = _.extend({}, data || {}, event.logExtra || {});
131
204
  data.log_type = data.log_type || 'console';
132
205
 
@@ -144,7 +217,7 @@ const sendLog = (options, event, message, data) => {
144
217
  const unsafeData = dataTools.recurseReplace(data, truncate);
145
218
 
146
219
  // Keep safe log keys uncensored
147
- Object.keys(safeData).forEach(key => {
220
+ Object.keys(safeData).forEach((key) => {
148
221
  if (constants.SAFE_LOG_KEYS.indexOf(key) !== -1) {
149
222
  safeData[key] = unsafeData[key];
150
223
  }
@@ -153,20 +226,6 @@ const sendLog = (options, event, message, data) => {
153
226
  safeData.request_headers = formatHeaders(safeData.request_headers);
154
227
  safeData.response_headers = formatHeaders(safeData.response_headers);
155
228
 
156
- const body = {
157
- message: safeMessage,
158
- data: safeData,
159
- token: options.token
160
- };
161
-
162
- const httpOptions = {
163
- url: options.endpoint,
164
- method: 'POST',
165
- headers: { 'Content-Type': 'application/json' },
166
- body: JSON.stringify(body),
167
- timeout: 3000
168
- };
169
-
170
229
  if (event.logToStdout) {
171
230
  toStdout(event, message, unsafeData);
172
231
  }
@@ -177,24 +236,36 @@ const sendLog = (options, event, message, data) => {
177
236
  }
178
237
 
179
238
  if (options.token) {
180
- return request(httpOptions).catch(err => {
181
- // Swallow logging errors.
182
- // This will show up in AWS logs at least:
183
- console.error(
184
- 'Error making log request:',
185
- err,
186
- 'http options:',
187
- httpOptions
188
- );
189
- });
190
- } else {
191
- return ZapierPromise.resolve();
239
+ const logStream = logStreamFactory.getOrCreate(
240
+ options.endpoint,
241
+ options.token
242
+ );
243
+ logStream.write(
244
+ // JSON Lines format: It's important the serialized JSON object itself has
245
+ // no line breaks, and after an object it ends with a line break.
246
+ JSON.stringify({ message: safeMessage, data: safeData }) + '\n'
247
+ );
192
248
  }
193
249
  };
194
250
 
195
251
  /*
196
252
  Creates low level logging function that POSTs to endpoint (GL by default).
197
253
  Use internally; do not expose to devs.
254
+
255
+ Usage:
256
+
257
+ const logger = createLogger(event, options);
258
+
259
+ // These will reuse the same request to the log server
260
+ logger('log message here', { log_type: 'console' });
261
+ logger('another log', { log_type: 'console' });
262
+ logger('200 GET https://example.com', { log_type: 'http' });
263
+
264
+ // After an invocation, the Lambda handler MUST call logger.end() to close
265
+ // the log stream. Otherwise, it will hang!
266
+ logger.end().finally(() => {
267
+ // anything else you want to do to finish an invocation
268
+ });
198
269
  */
199
270
  const createLogger = (event, options) => {
200
271
  options = options || {};
@@ -205,10 +276,16 @@ const createLogger = (event, options) => {
205
276
  process.env.LOGGING_ENDPOINT || constants.DEFAULT_LOGGING_HTTP_ENDPOINT,
206
277
  apiKey:
207
278
  process.env.LOGGING_API_KEY || constants.DEFAULT_LOGGING_HTTP_API_KEY,
208
- token: process.env.LOGGING_TOKEN || event.token
279
+ token: process.env.LOGGING_TOKEN || event.token,
209
280
  });
210
281
 
211
- return sendLog.bind(undefined, options, event);
282
+ const logStreamFactory = new LogStreamFactory();
283
+ const logger = sendLog.bind(undefined, logStreamFactory, options, event);
284
+
285
+ logger.end = async () => {
286
+ return logStreamFactory.end();
287
+ };
288
+ return logger;
212
289
  };
213
290
 
214
291
  module.exports = createLogger;
@@ -1,5 +1,7 @@
1
1
  'use strict';
2
2
 
3
+ const { Writable } = require('stream');
4
+
3
5
  const fetch = require('node-fetch');
4
6
 
5
7
  // XXX: PatchedRequest is to get past node-fetch's check that forbids GET requests
@@ -7,10 +9,10 @@ const fetch = require('node-fetch');
7
9
  // https://github.com/node-fetch/node-fetch/blob/v2.6.0/src/request.js#L75-L78
8
10
  class PatchedRequest extends fetch.Request {
9
11
  constructor(url, opts) {
10
- const origMethod = (opts.method || 'GET').toUpperCase();
12
+ const origMethod = ((opts && opts.method) || 'GET').toUpperCase();
11
13
 
12
14
  const isGetWithBody =
13
- (origMethod === 'GET' || origMethod === 'HEAD') && opts.body;
15
+ (origMethod === 'GET' || origMethod === 'HEAD') && opts && opts.body;
14
16
  let newOpts = opts;
15
17
  if (isGetWithBody) {
16
18
  // Temporary remove body to fool fetch.Request constructor
@@ -50,9 +52,31 @@ class PatchedRequest extends fetch.Request {
50
52
 
51
53
  const newFetch = (url, opts) => {
52
54
  const request = new PatchedRequest(url, opts);
55
+
53
56
  // fetch actually accepts a Request object as an argument. It'll clone the
54
57
  // request internally, that's why the PatchedRequest.body hack works.
55
- return fetch(request);
58
+ const responsePromise = fetch(request);
59
+
60
+ // node-fetch clones request.body and use the cloned body internally. We need
61
+ // to make sure to consume the original body stream so its internal buffer is
62
+ // not filled up, which causes it to pause.
63
+ // See https://github.com/node-fetch/node-fetch/issues/151
64
+ //
65
+ // Exclude form-data object to be consistent with
66
+ // https://github.com/node-fetch/node-fetch/blob/v2.6.6/src/body.js#L403-L412
67
+ if (
68
+ request.body &&
69
+ typeof request.body.pipe === 'function' &&
70
+ typeof request.body.getBoundary !== 'function'
71
+ ) {
72
+ const nullStream = new Writable();
73
+ nullStream._write = function (chunk, encoding, done) {
74
+ done();
75
+ };
76
+ request.body.pipe(nullStream);
77
+ }
78
+
79
+ return responsePromise;
56
80
  };
57
81
 
58
82
  newFetch.Promise = require('./promise');
package/src/.DS_Store DELETED
Binary file
Binary file