@aj-archipelago/cortex 1.3.36 → 1.3.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -97,11 +97,23 @@ const callTool = async (toolName, args, toolDefinitions, pathwayResolver) => {
97
97
  // Extract and add each search result
98
98
  parsedResult.value.forEach(result => {
99
99
  if (result.searchResultId) {
100
+ // Build content by concatenating headers and chunk if available
101
+ let content = '';
102
+ if (result.header_1) content += result.header_1 + '\n\n';
103
+ if (result.header_2) content += result.header_2 + '\n\n';
104
+ if (result.header_3) content += result.header_3 + '\n\n';
105
+ if (result.chunk) content += result.chunk;
106
+
107
+ // If no headers/chunk were found, fall back to existing content fields
108
+ if (!content) {
109
+ content = result.content || result.text || result.chunk || '';
110
+ }
111
+
100
112
  pathwayResolver.searchResults.push({
101
113
  searchResultId: result.searchResultId,
102
- title: result.title || '',
114
+ title: result.title || result.key || '',
103
115
  url: result.url || '',
104
- content: result.content || '',
116
+ content: content,
105
117
  path: result.path || '',
106
118
  wireid: result.wireid || '',
107
119
  source: result.source || '',
@@ -120,6 +132,30 @@ const callTool = async (toolName, args, toolDefinitions, pathwayResolver) => {
120
132
  }
121
133
  }
122
134
 
135
+ const addCitationsToResolver = (pathwayResolver, contentBuffer) => {
136
+ const regex = /:cd_source\[(.*?)\]/g;
137
+ let match;
138
+ const foundIds = [];
139
+ while ((match = regex.exec(contentBuffer)) !== null) {
140
+ // Ensure the capture group exists and is not empty
141
+ if (match[1] && match[1].trim()) {
142
+ foundIds.push(match[1].trim());
143
+ }
144
+ }
145
+
146
+ if (foundIds.length > 0) {
147
+ const {searchResults, tool} = pathwayResolver;
148
+ logger.info(`Found referenced searchResultIds: ${foundIds.join(', ')}`);
149
+
150
+ if (searchResults) {
151
+ const toolObj = typeof tool === 'string' ? JSON.parse(tool) : (tool || {});
152
+ toolObj.citations = searchResults
153
+ .filter(result => foundIds.includes(result.searchResultId));
154
+ pathwayResolver.tool = JSON.stringify(toolObj);
155
+ }
156
+ }
157
+ }
158
+
123
159
  const gpt3Encode = (text) => {
124
160
  return encode(text);
125
161
  }
@@ -166,4 +202,4 @@ const say = async (requestId, message, maxMessageLength = Infinity, voiceRespons
166
202
  }
167
203
  };
168
204
 
169
- export { callPathway, gpt3Encode, gpt3Decode, say, callTool };
205
+ export { callPathway, gpt3Encode, gpt3Decode, say, callTool, addCitationsToResolver };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aj-archipelago/cortex",
3
- "version": "1.3.36",
3
+ "version": "1.3.38",
4
4
  "description": "Cortex is a GraphQL API for AI. It provides a simple, extensible interface for using AI services from OpenAI, Azure and others.",
5
5
  "private": false,
6
6
  "repository": {
@@ -19,7 +19,7 @@ export default {
19
19
  title: '',
20
20
  text: '',
21
21
  },
22
- model: 'oai-gpt4o',
22
+ model: 'oai-gpt41-nano',
23
23
  useInputChunking: false,
24
24
  temperature: 0,
25
25
  enableDuplicateRequests: false
@@ -36,6 +36,10 @@ export default {
36
36
  timeout: 600,
37
37
 
38
38
  toolCallback: async (args, message, resolver) => {
39
+ if (!args || !message || !resolver) {
40
+ return;
41
+ }
42
+
39
43
  const { tool_calls } = message;
40
44
  const pathwayResolver = resolver;
41
45
  const { entityTools, entityToolsOpenAiFormat } = args;
@@ -170,11 +174,10 @@ export default {
170
174
 
171
175
  args.chatHistory = finalMessages;
172
176
 
173
- await pathwayResolver.promptAndParse({
177
+ return await pathwayResolver.promptAndParse({
174
178
  ...args,
175
179
  tools: entityToolsOpenAiFormat,
176
180
  tool_choice: "auto",
177
- stream: true
178
181
  });
179
182
  }
180
183
  },
@@ -183,7 +186,7 @@ export default {
183
186
  let pathwayResolver = resolver;
184
187
 
185
188
  // Load input parameters and information into args
186
- const { entityId, voiceResponse, aiMemorySelfModify } = { ...pathwayResolver.pathway.inputParameters, ...args };
189
+ const { entityId, voiceResponse, aiMemorySelfModify, stream } = { ...pathwayResolver.pathway.inputParameters, ...args };
187
190
 
188
191
  const entityConfig = loadEntityConfig(entityId);
189
192
  const { entityTools, entityToolsOpenAiFormat } = getToolsForEntity(entityConfig);
@@ -249,6 +252,8 @@ export default {
249
252
  // truncate the chat history in case there is really long content
250
253
  const truncatedChatHistory = resolver.modelExecutor.plugin.truncateMessagesToTargetLength(args.chatHistory, null, 1000);
251
254
 
255
+ const fetchTitleResponsePromise = callPathway('chat_title', {...args, chatHistory: truncatedChatHistory, stream: false});
256
+
252
257
  // Add the memory context to the chat history if applicable
253
258
  if (truncatedChatHistory.length > 1 && entityUseMemory) {
254
259
  const memoryContext = await callPathway('sys_read_memory', { ...args, chatHistory: truncatedChatHistory, section: 'memoryContext', priority: 0, recentHours: 0, stream: false }, resolver);
@@ -266,23 +271,26 @@ export default {
266
271
  try {
267
272
  let currentMessages = JSON.parse(JSON.stringify(args.chatHistory));
268
273
 
269
- // Run the initial prompt with streaming
270
- const response = await runAllPrompts({
274
+ const title = await fetchTitleResponsePromise;
275
+ pathwayResolver.tool = JSON.stringify({ title });
276
+
277
+ let response = await runAllPrompts({
271
278
  ...args,
272
279
  chatHistory: currentMessages,
273
280
  tools: entityToolsOpenAiFormat,
274
- tool_choice: "auto",
275
- stream: true
281
+ tool_choice: "auto"
276
282
  });
277
283
 
284
+ let toolCallback = pathwayResolver.pathway.toolCallback;
285
+ while (response?.tool_calls) {
286
+ response = await toolCallback(args, response, pathwayResolver);
287
+ }
278
288
 
279
- // Return the final response
280
289
  return response;
281
290
 
282
291
  } catch (e) {
283
- resolver.logError(e);
284
- const chatResponse = await callPathway('sys_generator_quick', {...args, model: styleModel, stream: false}, resolver);
285
- resolver.tool = JSON.stringify({ search: false, title: args.title });
292
+ pathwayResolver.logError(e);
293
+ const chatResponse = await callPathway('sys_generator_quick', {...args, model: styleModel, stream: false}, pathwayResolver);
286
294
  return chatResponse;
287
295
  }
288
296
  }
@@ -199,6 +199,24 @@ export default {
199
199
 
200
200
  const combinedResults = [];
201
201
 
202
+ // Add OData context and count information if present
203
+ if (parsedResponse["@odata.context"]) {
204
+ combinedResults.push({
205
+ searchResultId: getSearchResultId(),
206
+ key: "@odata.context",
207
+ content: parsedResponse["@odata.context"],
208
+ source_type: 'metadata'
209
+ });
210
+ }
211
+ if (parsedResponse["@odata.count"]) {
212
+ combinedResults.push({
213
+ searchResultId: getSearchResultId(),
214
+ key: "@odata.count",
215
+ content: parsedResponse["@odata.count"].toString(),
216
+ source_type: 'metadata'
217
+ });
218
+ }
219
+
202
220
  if (parsedResponse.value && Array.isArray(parsedResponse.value)) {
203
221
  // Filter out vector fields from each result before adding to combinedResults
204
222
  combinedResults.push(...parsedResponse.value.map(result => ({
@@ -7,7 +7,7 @@ import { PathwayResponseParser } from './pathwayResponseParser.js';
7
7
  import { Prompt } from './prompt.js';
8
8
  import { getv, setv } from '../lib/keyValueStorageClient.js';
9
9
  import { requestState } from './requestState.js';
10
- import { callPathway } from '../lib/pathwayTools.js';
10
+ import { callPathway, addCitationsToResolver } from '../lib/pathwayTools.js';
11
11
  import { publishRequestProgress } from '../lib/redisSubscription.js';
12
12
  import logger from '../lib/logger.js';
13
13
  // eslint-disable-next-line import/no-extraneous-dependencies
@@ -322,6 +322,8 @@ class PathwayResolver {
322
322
  await saveChangedMemory();
323
323
  }
324
324
 
325
+ addCitationsToResolver(this, data);
326
+
325
327
  return data;
326
328
  }
327
329
 
@@ -1,7 +1,7 @@
1
1
  import OpenAIChatPlugin from './openAiChatPlugin.js';
2
2
  import logger from '../../lib/logger.js';
3
3
  import { requestState } from '../requestState.js';
4
-
4
+ import { addCitationsToResolver } from '../../lib/pathwayTools.js';
5
5
  function safeJsonParse(content) {
6
6
  try {
7
7
  const parsedContent = JSON.parse(content);
@@ -266,7 +266,7 @@ class OpenAIVisionPlugin extends OpenAIChatPlugin {
266
266
  switch (finishReason.toLowerCase()) {
267
267
  case 'tool_calls':
268
268
  // Process complete tool calls when we get the finish reason
269
- if (this.pathwayToolCallback && this.toolCallsBuffer.length > 0) {
269
+ if (this.pathwayToolCallback && this.toolCallsBuffer.length > 0 && pathwayResolver) {
270
270
  const toolMessage = {
271
271
  role: 'assistant',
272
272
  content: delta?.content || '',
@@ -289,29 +289,7 @@ class OpenAIVisionPlugin extends OpenAIChatPlugin {
289
289
  break;
290
290
  default: // Includes 'stop' and other normal finish reasons
291
291
  // Look to see if we need to add citations to the response
292
- if (pathwayResolver && this.contentBuffer) {
293
- const regex = /:cd_source\[(.*?)\]/g;
294
- let match;
295
- const foundIds = [];
296
- while ((match = regex.exec(this.contentBuffer)) !== null) {
297
- // Ensure the capture group exists and is not empty
298
- if (match[1] && match[1].trim()) {
299
- foundIds.push(match[1].trim());
300
- }
301
- }
302
-
303
- if (foundIds.length > 0) {
304
- const {searchResults, tool} = pathwayResolver;
305
- logger.info(`Found referenced searchResultIds: ${foundIds.join(', ')}`);
306
-
307
- if (searchResults) {
308
- const toolObj = typeof tool === 'string' ? JSON.parse(tool) : (tool || {});
309
- toolObj.citations = searchResults
310
- .filter(result => foundIds.includes(result.searchResultId));
311
- pathwayResolver.tool = JSON.stringify(toolObj);
312
- }
313
- }
314
- }
292
+ addCitationsToResolver(pathwayResolver, this.contentBuffer);
315
293
  requestProgress.progress = 1;
316
294
  // Clear buffers on finish
317
295
  this.toolCallsBuffer = [];