@aj-archipelago/cortex 1.1.25 → 1.1.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cortexRequest.js +42 -6
- package/lib/logger.js +18 -4
- package/lib/requestExecutor.js +4 -1
- package/package.json +1 -1
- package/pathways/rag_jarvis.js +2 -2
- package/server/plugins/azureBingPlugin.js +1 -1
- package/server/plugins/azureTranslatePlugin.js +2 -2
- package/server/plugins/claude3VertexPlugin.js +4 -4
- package/server/plugins/gemini15ChatPlugin.js +5 -5
- package/server/plugins/geminiChatPlugin.js +5 -5
- package/server/plugins/localModelPlugin.js +2 -2
- package/server/plugins/modelPlugin.js +2 -2
- package/server/plugins/neuralSpacePlugin.js +41 -38
- package/server/plugins/openAiChatPlugin.js +3 -3
- package/server/plugins/openAiCompletionPlugin.js +2 -2
- package/server/plugins/openAiWhisperPlugin.js +14 -10
- package/server/plugins/palmChatPlugin.js +5 -5
- package/server/plugins/palmCompletionPlugin.js +2 -2
package/lib/cortexRequest.js
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
import { selectEndpoint } from './requestExecutor.js';
|
|
2
2
|
|
|
3
3
|
class CortexRequest {
|
|
4
|
-
constructor( { url, urlSuffix, data, params, headers, auth, cache, model, pathwayResolver, selectedEndpoint, stream } = {}) {
|
|
4
|
+
constructor( { url, urlSuffix, data, params, headers, auth, cache, model, pathwayResolver, selectedEndpoint, stream, initCallback } = {}) {
|
|
5
5
|
this._url = url || '';
|
|
6
6
|
this._urlSuffix = urlSuffix || '';
|
|
7
7
|
this._data = data || {};
|
|
8
8
|
this._params = params || {};
|
|
9
9
|
this._headers = headers || {};
|
|
10
|
+
this._addHeaders = {};
|
|
10
11
|
this._auth = auth || {};
|
|
11
12
|
this._cache = cache || {};
|
|
12
13
|
this._model = model || '';
|
|
@@ -14,6 +15,7 @@ class CortexRequest {
|
|
|
14
15
|
this._selectedEndpoint = selectedEndpoint || {};
|
|
15
16
|
this._stream = stream || false;
|
|
16
17
|
this._method = 'POST';
|
|
18
|
+
this._initCallback = initCallback || null;
|
|
17
19
|
|
|
18
20
|
if (this._pathwayResolver) {
|
|
19
21
|
this._model = this._pathwayResolver.model;
|
|
@@ -24,17 +26,22 @@ class CortexRequest {
|
|
|
24
26
|
}
|
|
25
27
|
}
|
|
26
28
|
|
|
29
|
+
initRequest() {
|
|
30
|
+
if (typeof this._initCallback === 'function') {
|
|
31
|
+
this._initCallback(this);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
27
35
|
selectNewEndpoint() {
|
|
28
36
|
const sep = selectEndpoint(this._model);
|
|
29
37
|
if (sep) {
|
|
30
38
|
this._selectedEndpoint = sep;
|
|
31
39
|
this._url = sep.url;
|
|
32
|
-
this._data = { ...this._data, ...sep.params };
|
|
33
|
-
this._headers = { ...this._headers, ...sep.headers };
|
|
40
|
+
this._data = { ...this._data, ...sep.data, ...sep.params };
|
|
34
41
|
if (sep.auth) {
|
|
35
42
|
this._auth = { ...sep.auth };
|
|
36
43
|
}
|
|
37
|
-
this.
|
|
44
|
+
this.initRequest();
|
|
38
45
|
}
|
|
39
46
|
}
|
|
40
47
|
|
|
@@ -74,9 +81,22 @@ class CortexRequest {
|
|
|
74
81
|
this._data = value;
|
|
75
82
|
}
|
|
76
83
|
|
|
84
|
+
// initCallback getter and setter
|
|
85
|
+
get initCallback() {
|
|
86
|
+
return this._initCallback;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
set initCallback(value) {
|
|
90
|
+
if (typeof value !== 'function') {
|
|
91
|
+
throw new Error('initCallback must be a function');
|
|
92
|
+
}
|
|
93
|
+
this._initCallback = value;
|
|
94
|
+
this.initRequest();
|
|
95
|
+
}
|
|
96
|
+
|
|
77
97
|
// params getter and setter
|
|
78
98
|
get params() {
|
|
79
|
-
return this._params;
|
|
99
|
+
return {...this._params, ...this._selectedEndpoint.params};
|
|
80
100
|
}
|
|
81
101
|
|
|
82
102
|
set params(value) {
|
|
@@ -85,13 +105,29 @@ class CortexRequest {
|
|
|
85
105
|
|
|
86
106
|
// headers getter and setter
|
|
87
107
|
get headers() {
|
|
88
|
-
return { ...this._headers, ...this._auth };
|
|
108
|
+
return { ...this._headers, ...this._selectedEndpoint.headers, ...this._auth, ...this._addHeaders };
|
|
89
109
|
}
|
|
90
110
|
|
|
91
111
|
set headers(value) {
|
|
92
112
|
this._headers = value;
|
|
93
113
|
}
|
|
94
114
|
|
|
115
|
+
// addheaders getter and setter
|
|
116
|
+
get addHeaders() {
|
|
117
|
+
return this._addHeaders;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
set addHeaders(value) {
|
|
121
|
+
// Create a new object to store the processed headers
|
|
122
|
+
this._addHeaders = {};
|
|
123
|
+
|
|
124
|
+
// Iterate over the input headers and convert keys to title case
|
|
125
|
+
for (const [key, val] of Object.entries(value)) {
|
|
126
|
+
const titleCaseKey = key.replace(/(^|-)./g, m => m.toUpperCase());
|
|
127
|
+
this._addHeaders[titleCaseKey] = val;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
95
131
|
// auth getter and setter
|
|
96
132
|
get auth() {
|
|
97
133
|
return this._auth;
|
package/lib/logger.js
CHANGED
|
@@ -19,11 +19,25 @@ const prodFormat = winston.format.combine(
|
|
|
19
19
|
winston.format.simple()
|
|
20
20
|
);
|
|
21
21
|
|
|
22
|
-
const
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
const getTransport = () => {
|
|
23
|
+
switch (process.env.NODE_ENV) {
|
|
24
|
+
case 'production':
|
|
25
|
+
return new winston.transports.Console({ level: 'info', format: prodFormat });
|
|
26
|
+
case 'development':
|
|
27
|
+
return new winston.transports.Console({ level: 'verbose', format: debugFormat });
|
|
28
|
+
case 'debug':
|
|
29
|
+
return new winston.transports.Console({ level: 'debug', format: debugFormat });
|
|
30
|
+
default:
|
|
31
|
+
// Default to development settings if NODE_ENV is not set or unknown
|
|
32
|
+
console.warn(`Unknown NODE_ENV: ${process.env.NODE_ENV}. Defaulting to development settings.`);
|
|
33
|
+
return new winston.transports.Console({ level: 'verbose', format: debugFormat });
|
|
34
|
+
}
|
|
35
|
+
};
|
|
25
36
|
|
|
26
|
-
|
|
37
|
+
// Create the logger
|
|
38
|
+
const logger = winston.createLogger({
|
|
39
|
+
transports: [getTransport()]
|
|
40
|
+
});
|
|
27
41
|
|
|
28
42
|
// Function to obscure sensitive URL parameters
|
|
29
43
|
export const obscureUrlParams = url => {
|
package/lib/requestExecutor.js
CHANGED
|
@@ -277,7 +277,7 @@ const makeRequest = async (cortexRequest) => {
|
|
|
277
277
|
({ response, duration } = await selectedEndpoint.limiter.schedule({expiration: pathway.timeout * 1000 + 1000, id: `${requestId}_${uuidv4()}`}, () => requestWithMonitor(selectedEndpoint, url, data, axiosConfigObj)));
|
|
278
278
|
|
|
279
279
|
if (!controller.signal?.aborted) {
|
|
280
|
-
logger.
|
|
280
|
+
logger.verbose(`<<< [${requestId}] received response for request ${index}`);
|
|
281
281
|
}
|
|
282
282
|
}
|
|
283
283
|
|
|
@@ -323,6 +323,8 @@ const makeRequest = async (cortexRequest) => {
|
|
|
323
323
|
status !== 504) {
|
|
324
324
|
return { response, duration };
|
|
325
325
|
}
|
|
326
|
+
// set up for a retry by reinitializing the request
|
|
327
|
+
cortexRequest.initRequest();
|
|
326
328
|
} else {
|
|
327
329
|
// if there are multiple endpoints, retry everything by default
|
|
328
330
|
// as it could be a temporary issue with one endpoint
|
|
@@ -331,6 +333,7 @@ const makeRequest = async (cortexRequest) => {
|
|
|
331
333
|
if (status == 400) {
|
|
332
334
|
return { response, duration };
|
|
333
335
|
}
|
|
336
|
+
// set up for a retry by selecting a new endpoint, which will also reinitialize the request
|
|
334
337
|
cortexRequest.selectNewEndpoint();
|
|
335
338
|
}
|
|
336
339
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aj-archipelago/cortex",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.27",
|
|
4
4
|
"description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
|
|
5
5
|
"private": false,
|
|
6
6
|
"repository": {
|
package/pathways/rag_jarvis.js
CHANGED
|
@@ -190,7 +190,7 @@ export default {
|
|
|
190
190
|
indexCount++;
|
|
191
191
|
const rowCount = data.length;
|
|
192
192
|
if (rowCount === 0) {
|
|
193
|
-
logger.
|
|
193
|
+
logger.verbose(`Index ${indexCount} had no matching sources.`);
|
|
194
194
|
continue;
|
|
195
195
|
}
|
|
196
196
|
const proportion = rowCount / totalLength;
|
|
@@ -203,7 +203,7 @@ export default {
|
|
|
203
203
|
let items = data.splice(0, slots);
|
|
204
204
|
searchResults.push(...items);
|
|
205
205
|
|
|
206
|
-
logger.
|
|
206
|
+
logger.verbose(`Index ${indexCount} had ${rowCount} matching sources. ${items.length} forwarded to the LLM.`);
|
|
207
207
|
// Update remaining slots for next iteration
|
|
208
208
|
remainingSlots -= slots;
|
|
209
209
|
}
|
|
@@ -39,7 +39,7 @@ class AzureBingPlugin extends ModelPlugin {
|
|
|
39
39
|
logRequestData(data, responseData, prompt) {
|
|
40
40
|
this.logAIRequestFinished();
|
|
41
41
|
|
|
42
|
-
logger.
|
|
42
|
+
logger.verbose(`${this.parseResponse(responseData)}`);
|
|
43
43
|
|
|
44
44
|
prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
|
|
45
45
|
}
|
|
@@ -47,8 +47,8 @@ class AzureTranslatePlugin extends ModelPlugin {
|
|
|
47
47
|
logRequestData(data, responseData, prompt) {
|
|
48
48
|
const modelInput = data[0].Text;
|
|
49
49
|
|
|
50
|
-
logger.
|
|
51
|
-
logger.
|
|
50
|
+
logger.verbose(`${modelInput}`);
|
|
51
|
+
logger.verbose(`${this.parseResponse(responseData)}`);
|
|
52
52
|
|
|
53
53
|
prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
|
|
54
54
|
}
|
|
@@ -183,7 +183,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
|
|
|
183
183
|
if (system) {
|
|
184
184
|
const { length, units } = this.getLength(system);
|
|
185
185
|
logger.info(`[system messages sent containing ${length} ${units}]`);
|
|
186
|
-
logger.
|
|
186
|
+
logger.verbose(`${system}`);
|
|
187
187
|
}
|
|
188
188
|
|
|
189
189
|
if (messages && messages.length > 1) {
|
|
@@ -209,7 +209,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
|
|
|
209
209
|
" ... " +
|
|
210
210
|
words.slice(-20).join(" ");
|
|
211
211
|
|
|
212
|
-
logger.
|
|
212
|
+
logger.verbose(
|
|
213
213
|
`message ${index + 1}: role: ${
|
|
214
214
|
message.role
|
|
215
215
|
}, ${units}: ${length}, content: "${preview}"`
|
|
@@ -225,7 +225,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
|
|
|
225
225
|
: message.content;
|
|
226
226
|
const { length, units } = this.getLength(content);
|
|
227
227
|
logger.info(`[request sent containing ${length} ${units}]`);
|
|
228
|
-
logger.
|
|
228
|
+
logger.verbose(`${content}`);
|
|
229
229
|
}
|
|
230
230
|
|
|
231
231
|
if (stream) {
|
|
@@ -234,7 +234,7 @@ class Claude3VertexPlugin extends OpenAIVisionPlugin {
|
|
|
234
234
|
const responseText = this.parseResponse(responseData);
|
|
235
235
|
const { length, units } = this.getLength(responseText);
|
|
236
236
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
237
|
-
logger.
|
|
237
|
+
logger.verbose(`${responseText}`);
|
|
238
238
|
}
|
|
239
239
|
|
|
240
240
|
prompt &&
|
|
@@ -186,26 +186,26 @@ class Gemini15ChatPlugin extends ModelPlugin {
|
|
|
186
186
|
const { length, units } = this.getLength(messageContent);
|
|
187
187
|
const preview = words.length < 41 ? messageContent : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
|
|
188
188
|
|
|
189
|
-
logger.
|
|
189
|
+
logger.verbose(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
|
|
190
190
|
});
|
|
191
191
|
} else if (messages && messages.length === 1) {
|
|
192
|
-
logger.
|
|
192
|
+
logger.verbose(`${messages[0].parts[0].text}`);
|
|
193
193
|
}
|
|
194
194
|
|
|
195
195
|
// check if responseData is an array or string
|
|
196
196
|
if (typeof responseData === 'string') {
|
|
197
197
|
const { length, units } = this.getLength(responseData);
|
|
198
198
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
199
|
-
logger.
|
|
199
|
+
logger.verbose(`${responseData}`);
|
|
200
200
|
} else if (Array.isArray(responseData)) {
|
|
201
201
|
const { mergedResult, safetyRatings } = mergeResults(responseData);
|
|
202
202
|
if (safetyRatings?.length) {
|
|
203
203
|
logger.warn(`!!! response was blocked because the input or response potentially violates policies`);
|
|
204
|
-
logger.
|
|
204
|
+
logger.verbose(`Safety Ratings: ${JSON.stringify(safetyRatings, null, 2)}`);
|
|
205
205
|
}
|
|
206
206
|
const { length, units } = this.getLength(mergedResult);
|
|
207
207
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
208
|
-
logger.
|
|
208
|
+
logger.verbose(`${mergedResult}`);
|
|
209
209
|
} else {
|
|
210
210
|
logger.info(`[response received as an SSE stream]`);
|
|
211
211
|
}
|
|
@@ -181,26 +181,26 @@ class GeminiChatPlugin extends ModelPlugin {
|
|
|
181
181
|
const { length, units } = this.getLength(messageContent);
|
|
182
182
|
const preview = words.length < 41 ? messageContent : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
|
|
183
183
|
|
|
184
|
-
logger.
|
|
184
|
+
logger.verbose(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
|
|
185
185
|
});
|
|
186
186
|
} else if (messages && messages.length === 1) {
|
|
187
|
-
logger.
|
|
187
|
+
logger.verbose(`${messages[0].parts[0].text}`);
|
|
188
188
|
}
|
|
189
189
|
|
|
190
190
|
// check if responseData is an array or string
|
|
191
191
|
if (typeof responseData === 'string') {
|
|
192
192
|
const { length, units } = this.getLength(responseData);
|
|
193
193
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
194
|
-
logger.
|
|
194
|
+
logger.verbose(`${responseData}`);
|
|
195
195
|
} else if (Array.isArray(responseData)) {
|
|
196
196
|
const { mergedResult, safetyRatings } = mergeResults(responseData);
|
|
197
197
|
if (safetyRatings?.length) {
|
|
198
198
|
logger.warn(`!!! response was blocked because the input or response potentially violates policies`);
|
|
199
|
-
logger.
|
|
199
|
+
logger.verbose(`Safety Ratings: ${JSON.stringify(safetyRatings, null, 2)}`);
|
|
200
200
|
}
|
|
201
201
|
const { length, units } = this.getLength(mergedResult);
|
|
202
202
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
203
|
-
logger.
|
|
203
|
+
logger.verbose(`${mergedResult}`);
|
|
204
204
|
} else {
|
|
205
205
|
logger.info(`[response received as an SSE stream]`);
|
|
206
206
|
}
|
|
@@ -55,13 +55,13 @@ class LocalModelPlugin extends ModelPlugin {
|
|
|
55
55
|
//args.push("--temperature", requestParameters.temperature);
|
|
56
56
|
|
|
57
57
|
try {
|
|
58
|
-
logger.
|
|
58
|
+
logger.verbose(`Running local model: ${executablePath}, ${args}`);
|
|
59
59
|
const result = execFileSync(executablePath, args, { encoding: 'utf8' });
|
|
60
60
|
// Remove only the first occurrence of requestParameters.prompt from the result
|
|
61
61
|
// Could have used regex here but then would need to escape the prompt
|
|
62
62
|
const parts = result.split(requestParameters.prompt, 2);
|
|
63
63
|
const modifiedResult = parts[0] + parts[1];
|
|
64
|
-
logger.
|
|
64
|
+
logger.verbose(`Result: ${modifiedResult}`);
|
|
65
65
|
return this.filterFirstResponse(modifiedResult);
|
|
66
66
|
} catch (error) {
|
|
67
67
|
logger.error(`Error running local model: ${error}`);
|
|
@@ -250,13 +250,13 @@ class ModelPlugin {
|
|
|
250
250
|
if (modelInput) {
|
|
251
251
|
const { length, units } = this.getLength(modelInput);
|
|
252
252
|
logger.info(`[request sent containing ${length} ${units}]`);
|
|
253
|
-
logger.
|
|
253
|
+
logger.verbose(`${modelInput}`);
|
|
254
254
|
}
|
|
255
255
|
|
|
256
256
|
const responseText = JSON.stringify(responseData);
|
|
257
257
|
const { length, units } = this.getLength(responseText);
|
|
258
258
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
259
|
-
logger.
|
|
259
|
+
logger.verbose(`${responseText}`);
|
|
260
260
|
|
|
261
261
|
prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
|
|
262
262
|
}
|
|
@@ -117,49 +117,52 @@ class NeuralSpacePlugin extends ModelPlugin {
|
|
|
117
117
|
const cortexRequest = new CortexRequest({ pathwayResolver });
|
|
118
118
|
cortexRequest.url = this.requestUrl();
|
|
119
119
|
|
|
120
|
-
const
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
//phrase/segment level
|
|
129
|
-
if ((responseFormat && !wordTimestamped) || maxLineWidth) {
|
|
130
|
-
configObj.speaker_diarization = {
|
|
131
|
-
// mode: "speakers",
|
|
132
|
-
// num_speakers: numSpeakers,
|
|
133
|
-
// overrides: {
|
|
134
|
-
// clustering: {
|
|
135
|
-
// threshold: clusteringThreshold,
|
|
136
|
-
// },
|
|
137
|
-
// },
|
|
120
|
+
const nsInitCallback = (requestInstance) => {
|
|
121
|
+
const formData = new FormData();
|
|
122
|
+
formData.append("files", fs.createReadStream(chunk));
|
|
123
|
+
const configObj = {
|
|
124
|
+
file_transcription: {
|
|
125
|
+
mode: "advanced",
|
|
126
|
+
},
|
|
138
127
|
};
|
|
139
128
|
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
129
|
+
//phrase/segment level
|
|
130
|
+
if ((responseFormat && !wordTimestamped) || maxLineWidth) {
|
|
131
|
+
configObj.speaker_diarization = {
|
|
132
|
+
// mode: "speakers",
|
|
133
|
+
// num_speakers: numSpeakers,
|
|
134
|
+
// overrides: {
|
|
135
|
+
// clustering: {
|
|
136
|
+
// threshold: clusteringThreshold,
|
|
137
|
+
// },
|
|
138
|
+
// },
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
configObj.subtitles_guidelines = {
|
|
142
|
+
line_count: 1,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (maxLineWidth) {
|
|
147
|
+
configObj.subtitles_guidelines = {
|
|
148
|
+
character_count: maxLineWidth,
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
if (language) {
|
|
153
|
+
configObj.file_transcription.language_id = language;
|
|
154
|
+
}
|
|
155
|
+
formData.append("config", JSON.stringify(configObj));
|
|
156
|
+
|
|
157
|
+
requestInstance.data = formData;
|
|
158
|
+
requestInstance.params = {};
|
|
159
|
+
requestInstance.addHeaders = {
|
|
160
|
+
...formData.getHeaders(),
|
|
148
161
|
};
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
if (language) {
|
|
152
|
-
configObj.file_transcription.language_id = language;
|
|
153
|
-
}
|
|
154
|
-
formData.append("config", JSON.stringify(configObj));
|
|
155
|
-
|
|
156
|
-
cortexRequest.data = formData;
|
|
157
|
-
cortexRequest.params = {};
|
|
158
|
-
cortexRequest.headers = {
|
|
159
|
-
...cortexRequest.headers,
|
|
160
|
-
...formData.getHeaders(),
|
|
161
162
|
};
|
|
162
163
|
|
|
164
|
+
cortexRequest.initCallback = nsInitCallback;
|
|
165
|
+
|
|
163
166
|
const result = await this.executeRequest(cortexRequest);
|
|
164
167
|
|
|
165
168
|
const jobId = result?.data?.jobId;
|
|
@@ -117,7 +117,7 @@ class OpenAIChatPlugin extends ModelPlugin {
|
|
|
117
117
|
const { length, units } = this.getLength(content);
|
|
118
118
|
const preview = words.length < 41 ? content : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
|
|
119
119
|
|
|
120
|
-
logger.
|
|
120
|
+
logger.verbose(`message ${index + 1}: role: ${message.role}, ${units}: ${length}, content: "${preview}"`);
|
|
121
121
|
totalLength += length;
|
|
122
122
|
totalUnits = units;
|
|
123
123
|
});
|
|
@@ -127,7 +127,7 @@ class OpenAIChatPlugin extends ModelPlugin {
|
|
|
127
127
|
const content = Array.isArray(message.content) ? message.content.map(item => JSON.stringify(item)).join(', ') : message.content;
|
|
128
128
|
const { length, units } = this.getLength(content);
|
|
129
129
|
logger.info(`[request sent containing ${length} ${units}]`);
|
|
130
|
-
logger.
|
|
130
|
+
logger.verbose(`${content}`);
|
|
131
131
|
}
|
|
132
132
|
|
|
133
133
|
if (stream) {
|
|
@@ -136,7 +136,7 @@ class OpenAIChatPlugin extends ModelPlugin {
|
|
|
136
136
|
const responseText = this.parseResponse(responseData);
|
|
137
137
|
const { length, units } = this.getLength(responseText);
|
|
138
138
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
139
|
-
logger.
|
|
139
|
+
logger.verbose(`${responseText}`);
|
|
140
140
|
}
|
|
141
141
|
|
|
142
142
|
prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
|
|
@@ -110,7 +110,7 @@ class OpenAICompletionPlugin extends ModelPlugin {
|
|
|
110
110
|
const { length, units } = this.getLength(modelInput);
|
|
111
111
|
|
|
112
112
|
logger.info(`[request sent containing ${length} ${units}]`);
|
|
113
|
-
logger.
|
|
113
|
+
logger.verbose(`${modelInput}`);
|
|
114
114
|
|
|
115
115
|
if (stream) {
|
|
116
116
|
logger.info(`[response received as an SSE stream]`);
|
|
@@ -118,7 +118,7 @@ class OpenAICompletionPlugin extends ModelPlugin {
|
|
|
118
118
|
const responseText = this.parseResponse(responseData);
|
|
119
119
|
const { length, units } = this.getLength(responseText);
|
|
120
120
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
121
|
-
logger.
|
|
121
|
+
logger.verbose(`${responseText}`);
|
|
122
122
|
}
|
|
123
123
|
|
|
124
124
|
prompt && prompt.debugInfo && (prompt.debugInfo += `\n${JSON.stringify(data)}`);
|
|
@@ -37,22 +37,26 @@ class OpenAIWhisperPlugin extends ModelPlugin {
|
|
|
37
37
|
chunks.push(chunk);
|
|
38
38
|
|
|
39
39
|
const { language, responseFormat } = parameters;
|
|
40
|
-
const params = {};
|
|
41
40
|
const { modelPromptText } = this.getCompiledPrompt(text, parameters, prompt);
|
|
42
41
|
const response_format = responseFormat || 'text';
|
|
43
42
|
|
|
44
|
-
const
|
|
45
|
-
formData.append('file', fs.createReadStream(chunk));
|
|
46
|
-
formData.append('model', cortexRequest.params.model);
|
|
47
|
-
formData.append('response_format', response_format);
|
|
48
|
-
language && formData.append('language', language);
|
|
49
|
-
modelPromptText && formData.append('prompt', modelPromptText);
|
|
43
|
+
const whisperInitCallback = (requestInstance) => {
|
|
50
44
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
45
|
+
const formData = new FormData();
|
|
46
|
+
formData.append('file', fs.createReadStream(chunk));
|
|
47
|
+
formData.append('model', requestInstance.params.model);
|
|
48
|
+
formData.append('response_format', response_format);
|
|
49
|
+
language && formData.append('language', language);
|
|
50
|
+
modelPromptText && formData.append('prompt', modelPromptText);
|
|
54
51
|
|
|
52
|
+
requestInstance.data = formData;
|
|
53
|
+
requestInstance.addHeaders = { ...formData.getHeaders() };
|
|
54
|
+
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
cortexRequest.initCallback = whisperInitCallback;
|
|
55
58
|
return this.executeRequest(cortexRequest);
|
|
59
|
+
|
|
56
60
|
} catch (err) {
|
|
57
61
|
logger.error(`Error getting word timestamped data from api: ${err}`);
|
|
58
62
|
throw err;
|
|
@@ -190,13 +190,13 @@ class PalmChatPlugin extends ModelPlugin {
|
|
|
190
190
|
if (context) {
|
|
191
191
|
const { length, units } = this.getLength(context);
|
|
192
192
|
logger.info(`[chat request contains context information of length ${length} ${units}]`)
|
|
193
|
-
logger.
|
|
193
|
+
logger.verbose(`context: ${context}`);
|
|
194
194
|
}
|
|
195
195
|
|
|
196
196
|
if (examples && examples.length) {
|
|
197
197
|
logger.info(`[chat request contains ${examples.length} examples]`);
|
|
198
198
|
examples.forEach((example, index) => {
|
|
199
|
-
logger.
|
|
199
|
+
logger.verbose(`example ${index + 1}: input: "${example.input.content}", output: "${example.output.content}"`);
|
|
200
200
|
});
|
|
201
201
|
}
|
|
202
202
|
|
|
@@ -207,10 +207,10 @@ class PalmChatPlugin extends ModelPlugin {
|
|
|
207
207
|
const { length, units } = this.getLength(message.content);
|
|
208
208
|
const preview = words.length < 41 ? message.content : words.slice(0, 20).join(" ") + " ... " + words.slice(-20).join(" ");
|
|
209
209
|
|
|
210
|
-
logger.
|
|
210
|
+
logger.verbose(`message ${index + 1}: author: ${message.author}, ${units}: ${length}, content: "${preview}"`);
|
|
211
211
|
});
|
|
212
212
|
} else if (messages && messages.length === 1) {
|
|
213
|
-
logger.
|
|
213
|
+
logger.verbose(`${messages[0].content}`);
|
|
214
214
|
}
|
|
215
215
|
|
|
216
216
|
const safetyAttributes = this.getSafetyAttributes(responseData);
|
|
@@ -218,7 +218,7 @@ class PalmChatPlugin extends ModelPlugin {
|
|
|
218
218
|
const responseText = this.parseResponse(responseData);
|
|
219
219
|
const { length, units } = this.getLength(responseText);
|
|
220
220
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
221
|
-
logger.
|
|
221
|
+
logger.verbose(`${responseText}`);
|
|
222
222
|
|
|
223
223
|
if (safetyAttributes) {
|
|
224
224
|
logger.warn(`[response contains safety attributes: ${JSON.stringify(safetyAttributes, null, 2)}]`);
|
|
@@ -114,13 +114,13 @@ class PalmCompletionPlugin extends ModelPlugin {
|
|
|
114
114
|
if (modelInput) {
|
|
115
115
|
const { length, units } = this.getLength(modelInput);
|
|
116
116
|
logger.info(`[request sent containing ${length} ${units}]`);
|
|
117
|
-
logger.
|
|
117
|
+
logger.verbose(`${modelInput}`);
|
|
118
118
|
}
|
|
119
119
|
|
|
120
120
|
const responseText = this.parseResponse(responseData);
|
|
121
121
|
const { length, units } = this.getLength(responseText);
|
|
122
122
|
logger.info(`[response received containing ${length} ${units}]`);
|
|
123
|
-
logger.
|
|
123
|
+
logger.verbose(`${responseText}`);
|
|
124
124
|
|
|
125
125
|
if (safetyAttributes) {
|
|
126
126
|
logger.warn(`[response contains safety attributes: ${JSON.stringify(safetyAttributes, null, 2)}]`);
|