@google-cloud/nodejs-common 2.0.0 → 2.0.2-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@google-cloud/nodejs-common",
3
- "version": "2.0.0",
3
+ "version": "2.0.2-alpha",
4
4
  "description": "A NodeJs common library for solutions based on Cloud Functions",
5
5
  "author": "Google Inc.",
6
6
  "license": "Apache-2.0",
@@ -19,6 +19,7 @@
19
19
  'use strict';
20
20
 
21
21
  const {join} = require('path');
22
+ const { request } = require('gaxios');
22
23
  const {StorageFile} = require('./storage.js');
23
24
  /** Type definition of 'Cloud Storage Object' for Cloud Functions. */
24
25
  const {storage_v1: {Schema$Object: StorageEventData}} = require('googleapis');
@@ -74,12 +75,59 @@ let EventContext;
74
75
  */
75
76
  let CloudFunction;
76
77
 
78
+ /**
79
+ * Tries to move the original file to another specific fold before run the
80
+ * main function. Sometimes there are duplicated GCS trigger events for Cloud
81
+ * Functions, in order to solve that, the function will move the file to the
82
+ * target folder before invoke the function.
83
+ * After the file is successfully moved, invoke the main function with the
84
+ * moved file.
85
+ *
86
+ * @param {!MainFunctionOfStorage} fn The main function to run on the file.
87
+ * @param {string} folder The folder that the main function should check.
88
+ * @param {string} messageTag The tag that is used to mark logs.
89
+ * @param {string=} processed The folder that the file will be moved to.
90
+ * @return {!Promise<string>} The result of the main function.
91
+ */
92
+ const moveAndProcessFile = async (fn, file, messageTag,
93
+ processed = 'processed/') => {
94
+ const fileName = file.name;
95
+ try {
96
+ const fileObj = new StorageFile(file.bucket, fileName);
97
+ const [newFile] = await fileObj.getFile().move(join(processed, fileName));
98
+ console.log(`${messageTag} move '${fileName}' to '${newFile.name}'`);
99
+ const result = await fn({
100
+ name: newFile.name,
101
+ oldName: fileName,
102
+ bucket: file.bucket,
103
+ size: file.size,
104
+ updated: file.updated,
105
+ });
106
+ const message =
107
+ `${messageTag} completed. ${newFile.name} triggered the Cloud Functions.`;
108
+ console.log(message);
109
+ return result;
110
+ } catch (error) {
111
+ let message;
112
+ if (error.message.startsWith('file#delete failed with an error')) {
113
+ message =
114
+ `${messageTag} quit. Fail to move ${fileName}. Maybe duplicated.`;
115
+ } else {
116
+ message =
117
+ `${messageTag} failed. ${fileName} got an error: ${error.message}`;
118
+ }
119
+ console.warn(message);
120
+ throw new Error(message);
121
+ }
122
+ }
123
+
77
124
  /**
78
125
  * Triggers the main function with the correct new coming file for once.
79
126
  * Detailed steps:
80
127
  * 1. Checks the coming file is in the proper folder. In case there are
81
128
  * different Cloud Functions with different purposes monitoring on the same
82
129
  * Storage Bucket, Cloud Functions can be distinguished with its own 'folder';
130
+ * It invokes function `moveAndProcessFile` to achieve following two things:
83
131
  * 2. Tries to move the original file to another specific fold before run the
84
132
  * main function. Sometimes there are duplicated GCS trigger events for Cloud
85
133
  * Functions, in order to solve that, the function will move the file to the
@@ -89,10 +137,10 @@ let CloudFunction;
89
137
  *
90
138
  * @param {!MainFunctionOfStorage} fn The main function to run on the file.
91
139
  * @param {string} folder The folder that the main function should check.
92
- * @param {string=} processed The folder that the file will be moved to.
140
+ * @param {string|undefined} processed The folder that the file will be moved to.
93
141
  * @return {!CloudFunction} The Cloud Functions that will be exported.
94
142
  */
95
- const validatedStorageTrigger = (fn, folder, processed = 'processed/') => {
143
+ const validatedStorageTrigger = (fn, folder, processed) => {
96
144
  /**
97
145
  * Returns the Cloud Function that can handle duplicated Storage triggers.
98
146
  * @type {!CloudFunction}
@@ -107,35 +155,33 @@ const validatedStorageTrigger = (fn, folder, processed = 'processed/') => {
107
155
  console.log(message);
108
156
  return message;
109
157
  }
110
- try {
111
- const fileObj = new StorageFile(file.bucket, fileName);
112
- const [newFile] = await fileObj.getFile().move(join(processed, fileName));
113
- console.log(`Event[${eventId}] move: '${fileName}' to '${newFile.name}'`);
114
- await fn({
115
- name: newFile.name,
116
- oldName: fileName,
117
- bucket: file.bucket,
118
- size: file.size,
119
- updated: file.updated,
120
- });
121
- const message =
122
- `Event[${eventId}] completed: ${newFile.name} triggered the Cloud Functions.`;
123
- console.log(message);
124
- return message;
125
- } catch (error) {
126
- let message;
127
- if (error.message.startsWith('file#delete failed with an error')) {
128
- message = `Quit event[${eventId}]: Fail to move ${fileName}. Maybe duplicated.`;
129
- } else {
130
- message = `Event[${eventId}] triggered: ${fileName} Cloud Functions got an error: ${error.message}`;
131
- }
132
- console.warn(message);
133
- return message;
134
- }
158
+ return moveAndProcessFile(fn, file, `Event[${eventId}]`, processed);
135
159
  };
136
160
  return handleFile;
137
161
  };
138
162
 
163
+ /**
164
+ * Returns an Id token by using the Metadata Server for the given Cloud
165
+ * Functions Url.
166
+ * @see https://cloud.google.com/functions/docs/securing/function-identity#identity_tokens
167
+ * @param {string} functionUrl
168
+ * @return {string}
169
+ */
170
+ const getIdTokenForFunction = async (functionUrl) => {
171
+ const METADATA_SERVER_URL =
172
+ 'http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/identity?audience=';
173
+ const tokenUrl = METADATA_SERVER_URL + functionUrl;
174
+ const tokenResponse = await request({
175
+ url: tokenUrl,
176
+ headers: { 'Metadata-Flavor': 'Google' },
177
+ method: 'POST',
178
+ responseType: 'text',
179
+ });
180
+ const token = tokenResponse.data;
181
+ if (!token) throw new Error('Fail to get ID token for ' + functionUrl);
182
+ return token;
183
+ }
184
+
139
185
  /**
140
186
  * Cloud Functions has a specific folder to host deployed source code. The
141
187
  * path of the folder is stored in the environment variable named
@@ -178,6 +224,9 @@ module.exports = {
178
224
  MainFunctionOfStorage,
179
225
  EventContext,
180
226
  CloudFunction,
227
+ moveAndProcessFile,
181
228
  validatedStorageTrigger,
229
+ getIdTokenForFunction,
182
230
  convertEnvPathToAbsolute,
183
231
  };
232
+
@@ -124,9 +124,12 @@ class StorageFile {
124
124
  checkPoint = Math.max(start, end - possibleLineBreakRange + 1);
125
125
  }
126
126
  const content = await this.loadContent(checkPoint, end);
127
- const index = Buffer.from(content).lastIndexOf(LINE_BREAKER);
127
+ let i = 0;
128
+ while (content.charCodeAt(i) === 0xFFFD) i++;
129
+ const cleanedContent = i > 0 ? content.slice(i) : content;
130
+ const index = Buffer.from(cleanedContent).lastIndexOf(LINE_BREAKER);
128
131
  if (index >= 0) {
129
- return checkPoint + index;
132
+ return checkPoint + i + index;
130
133
  }
131
134
  if (checkPoint > start) {
132
135
  return this.getLastLineBreaker(
@@ -252,3 +255,4 @@ module.exports = {
252
255
  LINE_BREAKER,
253
256
  DEFAULT_SPLIT_SIZE,
254
257
  };
258
+