@aj-archipelago/cortex 1.1.26 → 1.1.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  import fs from "fs";
2
2
  import path from "path";
3
- import { BlobServiceClient } from "@azure/storage-blob";
3
+ import { generateBlobSASQueryParameters, StorageSharedKeyCredential, BlobServiceClient } from "@azure/storage-blob";
4
4
  import { v4 as uuidv4 } from "uuid";
5
5
  import Busboy from "busboy";
6
6
  import { PassThrough } from "stream";
@@ -53,6 +53,7 @@ function isBase64(str) {
53
53
  }
54
54
  }
55
55
 
56
+ const { SAS_TOKEN_LIFE_DAYS = 30 } = process.env;
56
57
  const GCP_SERVICE_ACCOUNT_KEY =
57
58
  process.env.GCP_SERVICE_ACCOUNT_KEY_BASE64 ||
58
59
  process.env.GCP_SERVICE_ACCOUNT_KEY ||
@@ -138,6 +139,7 @@ async function saveFileToBlob(chunkPath, requestId) {
138
139
  const { containerClient } = await getBlobClient();
139
140
  // Use the filename with a UUID as the blob name
140
141
  const blobName = `${requestId}/${uuidv4()}_${encodeURIComponent(path.basename(chunkPath))}`;
142
+ const sasToken = generateSASToken(containerClient, blobName);
141
143
 
142
144
  // Create a read stream for the chunk file
143
145
  const fileStream = fs.createReadStream(chunkPath);
@@ -147,10 +149,28 @@ async function saveFileToBlob(chunkPath, requestId) {
147
149
  await blockBlobClient.uploadStream(fileStream);
148
150
 
149
151
  // Return the full URI of the uploaded blob
150
- const blobUrl = blockBlobClient.url;
152
+ const blobUrl = `${blockBlobClient.url}?${sasToken}`;
151
153
  return blobUrl;
152
154
  }
153
155
 
156
+ const generateSASToken = (containerClient, blobName, expiryTimeSeconds =
157
+ parseInt(SAS_TOKEN_LIFE_DAYS) * 24 * 60 * 60
158
+ ) => {
159
+ const { accountName, accountKey } = containerClient.credential;
160
+ const sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey);
161
+
162
+ const sasOptions = {
163
+ containerName: containerClient.containerName,
164
+ blobName: blobName,
165
+ permissions: "r", // Read permission
166
+ startsOn: new Date(),
167
+ expiresOn: new Date(new Date().valueOf() + expiryTimeSeconds * 1000)
168
+ };
169
+
170
+ const sasToken = generateBlobSASQueryParameters(sasOptions, sharedKeyCredential).toString();
171
+ return sasToken;
172
+ };
173
+
154
174
  //deletes blob that has the requestId
155
175
  async function deleteBlob(requestId) {
156
176
  if (!requestId) throw new Error("Missing requestId parameter");
@@ -271,8 +291,9 @@ async function uploadFile(context, requestId, body, saveToLocal, useGoogle, file
271
291
  await blockBlobClient.uploadStream(passThroughStream, undefined, undefined, options);
272
292
 
273
293
  const message = `File '${encodedFilename}' uploaded successfully.`;
274
- const url = blockBlobClient.url;
275
294
  context.log(message);
295
+ const sasToken = generateSASToken(containerClient, encodedFilename);
296
+ const url = `${blockBlobClient.url}?${sasToken}`;
276
297
  body = { message, url };
277
298
  }
278
299
 
@@ -118,7 +118,8 @@ const ytdlDownload = async (url, filename, video = false) => {
118
118
  ? { filter: 'audioandvideo' } // audio and video
119
119
  : { quality: 'highestaudio' }; // audio only
120
120
 
121
- const videoStream = ytdl(url, videoOptions);
121
+ const encodedUrl = encodeURI(url);
122
+ const videoStream = ytdl(encodedUrl, videoOptions);
122
123
  let lastLoggedTime = Date.now();
123
124
 
124
125
  videoStream.on('error', (error) => {
@@ -9,7 +9,7 @@
9
9
  "version": "1.0.0",
10
10
  "dependencies": {
11
11
  "@azure/storage-blob": "^12.13.0",
12
- "@distube/ytdl-core": "^4.13.5",
12
+ "@distube/ytdl-core": "^4.14.3",
13
13
  "@google-cloud/storage": "^7.10.0",
14
14
  "axios": "^1.3.6",
15
15
  "busboy": "^1.6.0",
@@ -158,19 +158,19 @@
158
158
  }
159
159
  },
160
160
  "node_modules/@distube/ytdl-core": {
161
- "version": "4.13.5",
162
- "resolved": "https://registry.npmjs.org/@distube/ytdl-core/-/ytdl-core-4.13.5.tgz",
163
- "integrity": "sha512-g+4UJIR/auAJbia7iB0aWvaJDbs22P53NySWa47b1NT4xMTDJYguxHFArPrvRkcJrb/AgKjv/XoSZGghpL0CJA==",
161
+ "version": "4.14.3",
162
+ "resolved": "https://registry.npmjs.org/@distube/ytdl-core/-/ytdl-core-4.14.3.tgz",
163
+ "integrity": "sha512-z6i5EVGEuKhuvuRNyIqafBSs5aRA28HssnWehCRhEtYrxeFgXImfmpKTjUusHYU5vQt1swSVPVb2JCd22P0CPA==",
164
164
  "dependencies": {
165
165
  "http-cookie-agent": "^6.0.5",
166
166
  "m3u8stream": "^0.8.6",
167
167
  "miniget": "^4.2.3",
168
168
  "sax": "^1.4.1",
169
169
  "tough-cookie": "^4.1.4",
170
- "undici": "^6.19.2"
170
+ "undici": "five"
171
171
  },
172
172
  "engines": {
173
- "node": ">=16"
173
+ "node": ">=14.0"
174
174
  },
175
175
  "funding": {
176
176
  "url": "https://github.com/distubejs/ytdl-core?sponsor"
@@ -3073,16 +3073,16 @@
3073
3073
  }
3074
3074
  },
3075
3075
  "@distube/ytdl-core": {
3076
- "version": "4.13.5",
3077
- "resolved": "https://registry.npmjs.org/@distube/ytdl-core/-/ytdl-core-4.13.5.tgz",
3078
- "integrity": "sha512-g+4UJIR/auAJbia7iB0aWvaJDbs22P53NySWa47b1NT4xMTDJYguxHFArPrvRkcJrb/AgKjv/XoSZGghpL0CJA==",
3076
+ "version": "4.14.3",
3077
+ "resolved": "https://registry.npmjs.org/@distube/ytdl-core/-/ytdl-core-4.14.3.tgz",
3078
+ "integrity": "sha512-z6i5EVGEuKhuvuRNyIqafBSs5aRA28HssnWehCRhEtYrxeFgXImfmpKTjUusHYU5vQt1swSVPVb2JCd22P0CPA==",
3079
3079
  "requires": {
3080
3080
  "http-cookie-agent": "^6.0.5",
3081
3081
  "m3u8stream": "^0.8.6",
3082
3082
  "miniget": "^4.2.3",
3083
3083
  "sax": "^1.4.1",
3084
3084
  "tough-cookie": "^4.1.4",
3085
- "undici": "^6.19.2"
3085
+ "undici": "five"
3086
3086
  }
3087
3087
  },
3088
3088
  "@google-cloud/paginator": {
@@ -10,7 +10,7 @@
10
10
  },
11
11
  "dependencies": {
12
12
  "@azure/storage-blob": "^12.13.0",
13
- "@distube/ytdl-core": "^4.13.5",
13
+ "@distube/ytdl-core": "^4.14.3",
14
14
  "@google-cloud/storage": "^7.10.0",
15
15
  "axios": "^1.3.6",
16
16
  "busboy": "^1.6.0",
package/lib/logger.js CHANGED
@@ -19,11 +19,25 @@ const prodFormat = winston.format.combine(
19
19
  winston.format.simple()
20
20
  );
21
21
 
22
- const transports = process.env.NODE_ENV === 'production' ?
23
- new winston.transports.Console({ level: 'info', format: prodFormat }) :
24
- new winston.transports.Console({ level: 'debug', format: debugFormat });
22
+ const getTransport = () => {
23
+ switch (process.env.NODE_ENV) {
24
+ case 'production':
25
+ return new winston.transports.Console({ level: 'info', format: prodFormat });
26
+ case 'development':
27
+ return new winston.transports.Console({ level: 'verbose', format: debugFormat });
28
+ case 'debug':
29
+ return new winston.transports.Console({ level: 'debug', format: debugFormat });
30
+ default:
31
+ // Default to development settings if NODE_ENV is not set or unknown
32
+ console.warn(`Unknown NODE_ENV: ${process.env.NODE_ENV}. Defaulting to development settings.`);
33
+ return new winston.transports.Console({ level: 'verbose', format: debugFormat });
34
+ }
35
+ };
25
36
 
26
- const logger = winston.createLogger({ transports });
37
+ // Create the logger
38
+ const logger = winston.createLogger({
39
+ transports: [getTransport()]
40
+ });
27
41
 
28
42
  // Function to obscure sensitive URL parameters
29
43
  export const obscureUrlParams = url => {
@@ -277,7 +277,7 @@ const makeRequest = async (cortexRequest) => {
277
277
  ({ response, duration } = await selectedEndpoint.limiter.schedule({expiration: pathway.timeout * 1000 + 1000, id: `${requestId}_${uuidv4()}`}, () => requestWithMonitor(selectedEndpoint, url, data, axiosConfigObj)));
278
278
 
279
279
  if (!controller.signal?.aborted) {
280
- logger.debug(`<<< [${requestId}] received response for request ${index}`);
280
+ logger.verbose(`<<< [${requestId}] received response for request ${index}`);
281
281
  }
282
282
  }
283
283
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aj-archipelago/cortex",
3
- "version": "1.1.26",
3
+ "version": "1.1.28",
4
4
  "description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
5
5
  "private": false,
6
6
  "repository": {
@@ -190,7 +190,7 @@ export default {
190
190
  indexCount++;
191
191
  const rowCount = data.length;
192
192
  if (rowCount === 0) {
193
- logger.debug(`Index ${indexCount} had no matching sources.`);
193
+ logger.verbose(`Index ${indexCount} had no matching sources.`);
194
194
  continue;
195
195
  }
196
196
  const proportion = rowCount / totalLength;
@@ -203,7 +203,7 @@ export default {
203
203
  let items = data.splice(0, slots);
204
204
  searchResults.push(...items);
205
205
 
206
- logger.debug(`Index ${indexCount} had ${rowCount} matching sources. ${items.length} forwarded to the LLM.`);
206
+ logger.verbose(`Index ${indexCount} had ${rowCount} matching sources. ${items.length} forwarded to the LLM.`);
207
207
  // Update remaining slots for next iteration
208
208
  remainingSlots -= slots;
209
209
  }
@@ -39,7 +39,7 @@ class AzureBingPlugin extends ModelPlugin {
39
39
  logRequestData(data, responseData, prompt) {
40
40
  this.logAIRequestFinished();
41
41
 
42
- logger.debug(`${this.parseResponse(responseData)}`);
42
+ logger.verbose(`${this.parseResponse(responseData)}`);
43
43
 
44
44
  prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
45
45
  }
@@ -47,8 +47,8 @@ class AzureTranslatePlugin extends ModelPlugin {
47
47
  logRequestData(data, responseData, prompt) {
48
48
  const modelInput = data[0].Text;
49
49
 
50
- logger.debug(`${modelInput}`);
51
- logger.debug(`${this.parseResponse(responseData)}`);
50
+ logger.verbose(`${modelInput}`);
51
+ logger.verbose(`${this.parseResponse(responseData)}`);
52
52
 
53
53
  prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
54
54
  }
@@ -183,7 +183,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
183
183
  if (system) {
184
184
  const { length, units } = this.getLength(system);
185
185
  logger.info(`[system messages sent containing ${length} ${units}]`);
186
- logger.debug(`${system}`);
186
+ logger.verbose(`${system}`);
187
187
  }
188
188
 
189
189
  if (messages && messages.length > 1) {
@@ -209,7 +209,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
209
209
  " ... " +
210
210
  words.slice(-20).join(" ");
211
211
 
212
- logger.debug(
212
+ logger.verbose(
213
213
  `message ${index + 1}: role: ${
214
214
  message.role
215
215
  }, ${units}: ${length}, content: "${preview}"`
@@ -225,7 +225,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
225
225
  : message.content;
226
226
  const { length, units } = this.getLength(content);
227
227
  logger.info(`[request sent containing ${length} ${units}]`);
228
- logger.debug(`${content}`);
228
+ logger.verbose(`${content}`);
229
229
  }
230
230
 
231
231
  if (stream) {
@@ -234,7 +234,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
234
234
  const responseText = this.parseResponse(responseData);
235
235
  const { length, units } = this.getLength(responseText);
236
236
  logger.info(`[response received containing ${length} ${units}]`);
237
- logger.debug(`${responseText}`);
237
+ logger.verbose(`${responseText}`);
238
238
  }
239
239
 
240
240
  prompt &&
@@ -186,26 +186,26 @@ class Gemini15ChatPlugin extends ModelPlugin {
186
186
  const { length, units } = this.getLength(messageContent);
187
187
  const preview = words.length < 41 ? messageContent : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
188
188
 
189
- logger.debug(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
189
+ logger.verbose(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
190
190
  });
191
191
  } else if (messages && messages.length === 1) {
192
- logger.debug(`${messages[0].parts[0].text}`);
192
+ logger.verbose(`${messages[0].parts[0].text}`);
193
193
  }
194
194
 
195
195
  // check if responseData is an array or string
196
196
  if (typeof responseData === 'string') {
197
197
  const { length, units } = this.getLength(responseData);
198
198
  logger.info(`[response received containing ${length} ${units}]`);
199
- logger.debug(`${responseData}`);
199
+ logger.verbose(`${responseData}`);
200
200
  } else if (Array.isArray(responseData)) {
201
201
  const { mergedResult, safetyRatings } = mergeResults(responseData);
202
202
  if (safetyRatings?.length) {
203
203
  logger.warn(`!!! response was blocked because the input or response potentially violates policies`);
204
- logger.debug(`Safety Ratings: ${JSON.stringify(safetyRatings, null, 2)}`);
204
+ logger.verbose(`Safety Ratings: ${JSON.stringify(safetyRatings, null, 2)}`);
205
205
  }
206
206
  const { length, units } = this.getLength(mergedResult);
207
207
  logger.info(`[response received containing ${length} ${units}]`);
208
- logger.debug(`${mergedResult}`);
208
+ logger.verbose(`${mergedResult}`);
209
209
  } else {
210
210
  logger.info(`[response received as an SSE stream]`);
211
211
  }
@@ -181,26 +181,26 @@ class GeminiChatPlugin extends ModelPlugin {
181
181
  const { length, units } = this.getLength(messageContent);
182
182
  const preview = words.length < 41 ? messageContent : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
183
183
 
184
- logger.debug(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
184
+ logger.verbose(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
185
185
  });
186
186
  } else if (messages && messages.length === 1) {
187
- logger.debug(`${messages[0].parts[0].text}`);
187
+ logger.verbose(`${messages[0].parts[0].text}`);
188
188
  }
189
189
 
190
190
  // check if responseData is an array or string
191
191
  if (typeof responseData === 'string') {
192
192
  const { length, units } = this.getLength(responseData);
193
193
  logger.info(`[response received containing ${length} ${units}]`);
194
- logger.debug(`${responseData}`);
194
+ logger.verbose(`${responseData}`);
195
195
  } else if (Array.isArray(responseData)) {
196
196
  const { mergedResult, safetyRatings } = mergeResults(responseData);
197
197
  if (safetyRatings?.length) {
198
198
  logger.warn(`!!! response was blocked because the input or response potentially violates policies`);
199
- logger.debug(`Safety Ratings: ${JSON.stringify(safetyRatings, null, 2)}`);
199
+ logger.verbose(`Safety Ratings: ${JSON.stringify(safetyRatings, null, 2)}`);
200
200
  }
201
201
  const { length, units } = this.getLength(mergedResult);
202
202
  logger.info(`[response received containing ${length} ${units}]`);
203
- logger.debug(`${mergedResult}`);
203
+ logger.verbose(`${mergedResult}`);
204
204
  } else {
205
205
  logger.info(`[response received as an SSE stream]`);
206
206
  }
@@ -55,13 +55,13 @@ class LocalModelPlugin extends ModelPlugin {
55
55
  //args.push("--temperature", requestParameters.temperature);
56
56
 
57
57
  try {
58
- logger.debug(`Running local model: ${executablePath}, ${args}`);
58
+ logger.verbose(`Running local model: ${executablePath}, ${args}`);
59
59
  const result = execFileSync(executablePath, args, { encoding: 'utf8' });
60
60
  // Remove only the first occurrence of requestParameters.prompt from the result
61
61
  // Could have used regex here but then would need to escape the prompt
62
62
  const parts = result.split(requestParameters.prompt, 2);
63
63
  const modifiedResult = parts[0] + parts[1];
64
- logger.debug(`Result: ${modifiedResult}`);
64
+ logger.verbose(`Result: ${modifiedResult}`);
65
65
  return this.filterFirstResponse(modifiedResult);
66
66
  } catch (error) {
67
67
  logger.error(`Error running local model: ${error}`);
@@ -250,13 +250,13 @@ class ModelPlugin {
250
250
  if (modelInput) {
251
251
  const { length, units } = this.getLength(modelInput);
252
252
  logger.info(`[request sent containing ${length} ${units}]`);
253
- logger.debug(`${modelInput}`);
253
+ logger.verbose(`${modelInput}`);
254
254
  }
255
255
 
256
256
  const responseText = JSON.stringify(responseData);
257
257
  const { length, units } = this.getLength(responseText);
258
258
  logger.info(`[response received containing ${length} ${units}]`);
259
- logger.debug(`${responseText}`);
259
+ logger.verbose(`${responseText}`);
260
260
 
261
261
  prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
262
262
  }
@@ -117,7 +117,7 @@ class OpenAIChatPlugin extends ModelPlugin {
117
117
  const { length, units } = this.getLength(content);
118
118
  const preview = words.length < 41 ? content : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
119
119
 
120
- logger.debug(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
120
+ logger.verbose(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
121
121
  totalLength += length;
122
122
  totalUnits = units;
123
123
  });
@@ -127,7 +127,7 @@ class OpenAIChatPlugin extends ModelPlugin {
127
127
  const content = Array.isArray(message.content) ? message.content.map(item => JSON.stringify(item)).join(', ') : message.content;
128
128
  const { length, units } = this.getLength(content);
129
129
  logger.info(`[request sent containing ${length} ${units}]`);
130
- logger.debug(`${content}`);
130
+ logger.verbose(`${content}`);
131
131
  }
132
132
 
133
133
  if (stream) {
@@ -136,7 +136,7 @@ class OpenAIChatPlugin extends ModelPlugin {
136
136
  const responseText = this.parseResponse(responseData);
137
137
  const { length, units } = this.getLength(responseText);
138
138
  logger.info(`[response received containing ${length} ${units}]`);
139
- logger.debug(`${responseText}`);
139
+ logger.verbose(`${responseText}`);
140
140
  }
141
141
 
142
142
  prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
@@ -110,7 +110,7 @@ class OpenAICompletionPlugin extends ModelPlugin {
110
110
  const { length, units } = this.getLength(modelInput);
111
111
 
112
112
  logger.info(`[request sent containing ${length} ${units}]`);
113
- logger.debug(`${modelInput}`);
113
+ logger.verbose(`${modelInput}`);
114
114
 
115
115
  if (stream) {
116
116
  logger.info(`[response received as an SSE stream]`);
@@ -118,7 +118,7 @@ class OpenAICompletionPlugin extends ModelPlugin {
118
118
  const responseText = this.parseResponse(responseData);
119
119
  const { length, units } = this.getLength(responseText);
120
120
  logger.info(`[response received containing ${length} ${units}]`);
121
- logger.debug(`${responseText}`);
121
+ logger.verbose(`${responseText}`);
122
122
  }
123
123
 
124
124
  prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
@@ -190,13 +190,13 @@ class PalmChatPlugin extends ModelPlugin {
190
190
  if (context) {
191
191
  const { length, units } = this.getLength(context);
192
192
  logger.info(`[chat request contains context information of length ${length} ${units}]`)
193
- logger.debug(`context: ${context}`);
193
+ logger.verbose(`context: ${context}`);
194
194
  }
195
195
 
196
196
  if (examples && examples.length) {
197
197
  logger.info(`[chat request contains ${examples.length} examples]`);
198
198
  examples.forEach((example, index) => {
199
- logger.debug(`example ${index + 1}: input: "${example.input.content}", output: "${example.output.content}"`);
199
+ logger.verbose(`example ${index + 1}: input: "${example.input.content}", output: "${example.output.content}"`);
200
200
  });
201
201
  }
202
202
 
@@ -207,10 +207,10 @@ class PalmChatPlugin extends ModelPlugin {
207
207
  const { length, units } = this.getLength(message.content);
208
208
  const preview = words.length < 41 ? message.content : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
209
209
 
210
- logger.debug(`message ${index + 1}: author: ${message.author}, ${units}: ${length}, content: "${preview}"`);
210
+ logger.verbose(`message ${index + 1}: author: ${message.author}, ${units}: ${length}, content: "${preview}"`);
211
211
  });
212
212
  } else if (messages && messages.length === 1) {
213
- logger.debug(`${messages[0].content}`);
213
+ logger.verbose(`${messages[0].content}`);
214
214
  }
215
215
 
216
216
  const safetyAttributes = this.getSafetyAttributes(responseData);
@@ -218,7 +218,7 @@ class PalmChatPlugin extends ModelPlugin {
218
218
  const responseText = this.parseResponse(responseData);
219
219
  const { length, units } = this.getLength(responseText);
220
220
  logger.info(`[response received containing ${length} ${units}]`);
221
- logger.debug(`${responseText}`);
221
+ logger.verbose(`${responseText}`);
222
222
 
223
223
  if (safetyAttributes) {
224
224
  logger.warn(`[response contains safety attributes: ${JSON.stringify(safetyAttributes, null, 2)}]`);
@@ -114,13 +114,13 @@ class PalmCompletionPlugin extends ModelPlugin {
114
114
  if (modelInput) {
115
115
  const { length, units } = this.getLength(modelInput);
116
116
  logger.info(`[request sent containing ${length} ${units}]`);
117
- logger.debug(`${modelInput}`);
117
+ logger.verbose(`${modelInput}`);
118
118
  }
119
119
 
120
120
  const responseText = this.parseResponse(responseData);
121
121
  const { length, units } = this.getLength(responseText);
122
122
  logger.info(`[response received containing ${length} ${units}]`);
123
- logger.debug(`${responseText}`);
123
+ logger.verbose(`${responseText}`);
124
124
 
125
125
  if (safetyAttributes) {
126
126
  logger.warn(`[response contains safety attributes: ${JSON.stringify(safetyAttributes, null, 2)}]`);