@aj-archipelago/cortex 1.3.37 → 1.3.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/pathwayTools.js +39 -3
- package/package.json +1 -1
- package/pathways/chat_title.js +2 -1
- package/pathways/system/entity/sys_entity_agent.js +3 -6
- package/pathways/system/entity/tools/sys_tool_cognitive_search.js +18 -0
- package/server/pathwayResolver.js +3 -1
- package/server/plugins/openAiVisionPlugin.js +2 -24
package/lib/pathwayTools.js
CHANGED
|
@@ -97,11 +97,23 @@ const callTool = async (toolName, args, toolDefinitions, pathwayResolver) => {
|
|
|
97
97
|
// Extract and add each search result
|
|
98
98
|
parsedResult.value.forEach(result => {
|
|
99
99
|
if (result.searchResultId) {
|
|
100
|
+
// Build content by concatenating headers and chunk if available
|
|
101
|
+
let content = '';
|
|
102
|
+
if (result.header_1) content += result.header_1 + '\n\n';
|
|
103
|
+
if (result.header_2) content += result.header_2 + '\n\n';
|
|
104
|
+
if (result.header_3) content += result.header_3 + '\n\n';
|
|
105
|
+
if (result.chunk) content += result.chunk;
|
|
106
|
+
|
|
107
|
+
// If no headers/chunk were found, fall back to existing content fields
|
|
108
|
+
if (!content) {
|
|
109
|
+
content = result.content || result.text || result.chunk || '';
|
|
110
|
+
}
|
|
111
|
+
|
|
100
112
|
pathwayResolver.searchResults.push({
|
|
101
113
|
searchResultId: result.searchResultId,
|
|
102
|
-
title: result.title || '',
|
|
114
|
+
title: result.title || result.key || '',
|
|
103
115
|
url: result.url || '',
|
|
104
|
-
content:
|
|
116
|
+
content: content,
|
|
105
117
|
path: result.path || '',
|
|
106
118
|
wireid: result.wireid || '',
|
|
107
119
|
source: result.source || '',
|
|
@@ -120,6 +132,30 @@ const callTool = async (toolName, args, toolDefinitions, pathwayResolver) => {
|
|
|
120
132
|
}
|
|
121
133
|
}
|
|
122
134
|
|
|
135
|
+
const addCitationsToResolver = (pathwayResolver, contentBuffer) => {
|
|
136
|
+
const regex = /:cd_source\[(.*?)\]/g;
|
|
137
|
+
let match;
|
|
138
|
+
const foundIds = [];
|
|
139
|
+
while ((match = regex.exec(contentBuffer)) !== null) {
|
|
140
|
+
// Ensure the capture group exists and is not empty
|
|
141
|
+
if (match[1] && match[1].trim()) {
|
|
142
|
+
foundIds.push(match[1].trim());
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (foundIds.length > 0) {
|
|
147
|
+
const {searchResults, tool} = pathwayResolver;
|
|
148
|
+
logger.info(`Found referenced searchResultIds: ${foundIds.join(', ')}`);
|
|
149
|
+
|
|
150
|
+
if (searchResults) {
|
|
151
|
+
const toolObj = typeof tool === 'string' ? JSON.parse(tool) : (tool || {});
|
|
152
|
+
toolObj.citations = searchResults
|
|
153
|
+
.filter(result => foundIds.includes(result.searchResultId));
|
|
154
|
+
pathwayResolver.tool = JSON.stringify(toolObj);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
123
159
|
const gpt3Encode = (text) => {
|
|
124
160
|
return encode(text);
|
|
125
161
|
}
|
|
@@ -166,4 +202,4 @@ const say = async (requestId, message, maxMessageLength = Infinity, voiceRespons
|
|
|
166
202
|
}
|
|
167
203
|
};
|
|
168
204
|
|
|
169
|
-
export { callPathway, gpt3Encode, gpt3Decode, say, callTool };
|
|
205
|
+
export { callPathway, gpt3Encode, gpt3Decode, say, callTool, addCitationsToResolver };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aj-archipelago/cortex",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.39",
|
|
4
4
|
"description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
|
|
5
5
|
"private": false,
|
|
6
6
|
"repository": {
|
package/pathways/chat_title.js
CHANGED
|
@@ -16,10 +16,11 @@ export default {
|
|
|
16
16
|
}),
|
|
17
17
|
],
|
|
18
18
|
inputParameters: {
|
|
19
|
+
chatHistory: [{role: '', content: []}],
|
|
19
20
|
title: '',
|
|
20
21
|
text: '',
|
|
21
22
|
},
|
|
22
|
-
model: 'oai-
|
|
23
|
+
model: 'oai-gpt41-nano',
|
|
23
24
|
useInputChunking: false,
|
|
24
25
|
temperature: 0,
|
|
25
26
|
enableDuplicateRequests: false
|
|
@@ -269,7 +269,6 @@ export default {
|
|
|
269
269
|
try {
|
|
270
270
|
let currentMessages = JSON.parse(JSON.stringify(args.chatHistory));
|
|
271
271
|
|
|
272
|
-
// Run the initial prompt with streaming
|
|
273
272
|
let response = await runAllPrompts({
|
|
274
273
|
...args,
|
|
275
274
|
chatHistory: currentMessages,
|
|
@@ -279,16 +278,14 @@ export default {
|
|
|
279
278
|
|
|
280
279
|
let toolCallback = pathwayResolver.pathway.toolCallback;
|
|
281
280
|
while (response?.tool_calls) {
|
|
282
|
-
response = await toolCallback(args, response,
|
|
281
|
+
response = await toolCallback(args, response, pathwayResolver);
|
|
283
282
|
}
|
|
284
283
|
|
|
285
|
-
// Return the final response
|
|
286
284
|
return response;
|
|
287
285
|
|
|
288
286
|
} catch (e) {
|
|
289
|
-
|
|
290
|
-
const chatResponse = await callPathway('sys_generator_quick', {...args, model: styleModel, stream: false},
|
|
291
|
-
resolver.tool = JSON.stringify({ search: false, title: args.title });
|
|
287
|
+
pathwayResolver.logError(e);
|
|
288
|
+
const chatResponse = await callPathway('sys_generator_quick', {...args, model: styleModel, stream: false}, pathwayResolver);
|
|
292
289
|
return chatResponse;
|
|
293
290
|
}
|
|
294
291
|
}
|
|
@@ -199,6 +199,24 @@ export default {
|
|
|
199
199
|
|
|
200
200
|
const combinedResults = [];
|
|
201
201
|
|
|
202
|
+
// Add OData context and count information if present
|
|
203
|
+
if (parsedResponse["@odata.context"]) {
|
|
204
|
+
combinedResults.push({
|
|
205
|
+
searchResultId: getSearchResultId(),
|
|
206
|
+
key: "@odata.context",
|
|
207
|
+
content: parsedResponse["@odata.context"],
|
|
208
|
+
source_type: 'metadata'
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
if (parsedResponse["@odata.count"]) {
|
|
212
|
+
combinedResults.push({
|
|
213
|
+
searchResultId: getSearchResultId(),
|
|
214
|
+
key: "@odata.count",
|
|
215
|
+
content: parsedResponse["@odata.count"].toString(),
|
|
216
|
+
source_type: 'metadata'
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
|
|
202
220
|
if (parsedResponse.value && Array.isArray(parsedResponse.value)) {
|
|
203
221
|
// Filter out vector fields from each result before adding to combinedResults
|
|
204
222
|
combinedResults.push(...parsedResponse.value.map(result => ({
|
|
@@ -7,7 +7,7 @@ import { PathwayResponseParser } from './pathwayResponseParser.js';
|
|
|
7
7
|
import { Prompt } from './prompt.js';
|
|
8
8
|
import { getv, setv } from '../lib/keyValueStorageClient.js';
|
|
9
9
|
import { requestState } from './requestState.js';
|
|
10
|
-
import { callPathway } from '../lib/pathwayTools.js';
|
|
10
|
+
import { callPathway, addCitationsToResolver } from '../lib/pathwayTools.js';
|
|
11
11
|
import { publishRequestProgress } from '../lib/redisSubscription.js';
|
|
12
12
|
import logger from '../lib/logger.js';
|
|
13
13
|
// eslint-disable-next-line import/no-extraneous-dependencies
|
|
@@ -322,6 +322,8 @@ class PathwayResolver {
|
|
|
322
322
|
await saveChangedMemory();
|
|
323
323
|
}
|
|
324
324
|
|
|
325
|
+
addCitationsToResolver(this, data);
|
|
326
|
+
|
|
325
327
|
return data;
|
|
326
328
|
}
|
|
327
329
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import OpenAIChatPlugin from './openAiChatPlugin.js';
|
|
2
2
|
import logger from '../../lib/logger.js';
|
|
3
3
|
import { requestState } from '../requestState.js';
|
|
4
|
-
|
|
4
|
+
import { addCitationsToResolver } from '../../lib/pathwayTools.js';
|
|
5
5
|
function safeJsonParse(content) {
|
|
6
6
|
try {
|
|
7
7
|
const parsedContent = JSON.parse(content);
|
|
@@ -289,29 +289,7 @@ class OpenAIVisionPlugin extends OpenAIChatPlugin {
|
|
|
289
289
|
break;
|
|
290
290
|
default: // Includes 'stop' and other normal finish reasons
|
|
291
291
|
// Look to see if we need to add citations to the response
|
|
292
|
-
|
|
293
|
-
const regex = /:cd_source\[(.*?)\]/g;
|
|
294
|
-
let match;
|
|
295
|
-
const foundIds = [];
|
|
296
|
-
while ((match = regex.exec(this.contentBuffer)) !== null) {
|
|
297
|
-
// Ensure the capture group exists and is not empty
|
|
298
|
-
if (match[1] && match[1].trim()) {
|
|
299
|
-
foundIds.push(match[1].trim());
|
|
300
|
-
}
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
if (foundIds.length > 0) {
|
|
304
|
-
const {searchResults, tool} = pathwayResolver;
|
|
305
|
-
logger.info(`Found referenced searchResultIds: ${foundIds.join(', ')}`);
|
|
306
|
-
|
|
307
|
-
if (searchResults) {
|
|
308
|
-
const toolObj = typeof tool === 'string' ? JSON.parse(tool) : (tool || {});
|
|
309
|
-
toolObj.citations = searchResults
|
|
310
|
-
.filter(result => foundIds.includes(result.searchResultId));
|
|
311
|
-
pathwayResolver.tool = JSON.stringify(toolObj);
|
|
312
|
-
}
|
|
313
|
-
}
|
|
314
|
-
}
|
|
292
|
+
addCitationsToResolver(pathwayResolver, this.contentBuffer);
|
|
315
293
|
requestProgress.progress = 1;
|
|
316
294
|
// Clear buffers on finish
|
|
317
295
|
this.toolCallsBuffer = [];
|