@reverbia/sdk 1.0.0-next.20251218151654 → 1.0.0-next.20251218195705

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -506,6 +506,8 @@ interface BaseSendMessageWithStorageArgs {
506
506
  files?: FileMetadata[];
507
507
  onData?: (chunk: string) => void;
508
508
  memoryContext?: string;
509
+ searchContext?: string;
510
+ sources?: SearchSource[];
509
511
  }
510
512
  interface BaseSendMessageSuccessResult {
511
513
  data: LlmapiChatCompletionResponse;
@@ -506,6 +506,8 @@ interface BaseSendMessageWithStorageArgs {
506
506
  files?: FileMetadata[];
507
507
  onData?: (chunk: string) => void;
508
508
  memoryContext?: string;
509
+ searchContext?: string;
510
+ sources?: SearchSource[];
509
511
  }
510
512
  interface BaseSendMessageSuccessResult {
511
513
  data: LlmapiChatCompletionResponse;
@@ -1315,7 +1315,8 @@ function useChat(options) {
1315
1315
  onData,
1316
1316
  runTools = true,
1317
1317
  headers,
1318
- memoryContext
1318
+ memoryContext,
1319
+ searchContext
1319
1320
  }) => {
1320
1321
  const messagesValidation = validateMessages(messages);
1321
1322
  if (!messagesValidation.valid) {
@@ -1341,6 +1342,22 @@ function useChat(options) {
1341
1342
  };
1342
1343
  messagesWithContext = [memorySystemMessage, ...messages];
1343
1344
  }
1345
+ if (searchContext) {
1346
+ const searchSystemMessage = {
1347
+ role: "system",
1348
+ content: [
1349
+ {
1350
+ type: "text",
1351
+ text: "Here are the search results for the user's query. Use this information to respond to the user's request:"
1352
+ },
1353
+ {
1354
+ type: "text",
1355
+ text: searchContext
1356
+ }
1357
+ ]
1358
+ };
1359
+ messagesWithContext = [searchSystemMessage, ...messagesWithContext];
1360
+ }
1344
1361
  let messagesWithToolContext = messagesWithContext;
1345
1362
  const shouldRunTools = runTools && tools && tools.length > 0;
1346
1363
  if (shouldRunTools) {
@@ -2223,7 +2240,9 @@ function useChatStorage(options) {
2223
2240
  onData: perRequestOnData,
2224
2241
  runTools,
2225
2242
  headers,
2226
- memoryContext
2243
+ memoryContext,
2244
+ searchContext,
2245
+ sources
2227
2246
  } = args;
2228
2247
  let convId;
2229
2248
  try {
@@ -2289,7 +2308,8 @@ function useChatStorage(options) {
2289
2308
  onData: perRequestOnData,
2290
2309
  runTools,
2291
2310
  headers,
2292
- memoryContext
2311
+ memoryContext,
2312
+ searchContext
2293
2313
  });
2294
2314
  const responseDuration = (Date.now() - startTime) / 1e3;
2295
2315
  if (result.error || !result.data) {
@@ -2306,19 +2326,24 @@ function useChatStorage(options) {
2306
2326
  model: responseModel,
2307
2327
  usage: convertUsageToStored(abortedResult.data?.usage),
2308
2328
  responseDuration,
2309
- wasStopped: true
2329
+ wasStopped: true,
2330
+ sources
2310
2331
  });
2311
2332
  const completionData = abortedResult.data || {
2312
2333
  id: `aborted-${Date.now()}`,
2313
2334
  model: responseModel,
2314
- choices: [{
2315
- index: 0,
2316
- message: {
2317
- role: "assistant",
2318
- content: [{ type: "text", text: assistantContent2 }]
2319
- },
2320
- finish_reason: "stop"
2321
- }],
2335
+ choices: [
2336
+ {
2337
+ index: 0,
2338
+ message: {
2339
+ role: "assistant",
2340
+ content: [
2341
+ { type: "text", text: assistantContent2 }
2342
+ ]
2343
+ },
2344
+ finish_reason: "stop"
2345
+ }
2346
+ ],
2322
2347
  usage: void 0
2323
2348
  };
2324
2349
  return {
@@ -2349,7 +2374,8 @@ function useChatStorage(options) {
2349
2374
  content: assistantContent,
2350
2375
  model: responseData.model,
2351
2376
  usage: convertUsageToStored(responseData.usage),
2352
- responseDuration
2377
+ responseDuration,
2378
+ sources
2353
2379
  });
2354
2380
  } catch (err) {
2355
2381
  return {
@@ -2377,7 +2403,12 @@ function useChatStorage(options) {
2377
2403
  );
2378
2404
  const updateMessageEmbedding = (0, import_react2.useCallback)(
2379
2405
  async (uniqueId, vector, embeddingModel) => {
2380
- return updateMessageEmbeddingOp(storageCtx, uniqueId, vector, embeddingModel);
2406
+ return updateMessageEmbeddingOp(
2407
+ storageCtx,
2408
+ uniqueId,
2409
+ vector,
2410
+ embeddingModel
2411
+ );
2381
2412
  },
2382
2413
  [storageCtx]
2383
2414
  );
@@ -476,6 +476,11 @@ type SendMessageArgs = BaseSendMessageArgs & {
476
476
  * This is typically formatted memories from useMemoryStorage.
477
477
  */
478
478
  memoryContext?: string;
479
+ /**
480
+ * Search context to inject as a system message.
481
+ * This is typically formatted search results from useSearch.
482
+ */
483
+ searchContext?: string;
479
484
  };
480
485
  type SendMessageResult = {
481
486
  data: LlmapiChatCompletionResponse;
@@ -733,6 +738,8 @@ interface BaseSendMessageWithStorageArgs {
733
738
  files?: FileMetadata[];
734
739
  onData?: (chunk: string) => void;
735
740
  memoryContext?: string;
741
+ searchContext?: string;
742
+ sources?: SearchSource[];
736
743
  }
737
744
  interface BaseUseChatStorageResult {
738
745
  isLoading: boolean;
@@ -476,6 +476,11 @@ type SendMessageArgs = BaseSendMessageArgs & {
476
476
  * This is typically formatted memories from useMemoryStorage.
477
477
  */
478
478
  memoryContext?: string;
479
+ /**
480
+ * Search context to inject as a system message.
481
+ * This is typically formatted search results from useSearch.
482
+ */
483
+ searchContext?: string;
479
484
  };
480
485
  type SendMessageResult = {
481
486
  data: LlmapiChatCompletionResponse;
@@ -733,6 +738,8 @@ interface BaseSendMessageWithStorageArgs {
733
738
  files?: FileMetadata[];
734
739
  onData?: (chunk: string) => void;
735
740
  memoryContext?: string;
741
+ searchContext?: string;
742
+ sources?: SearchSource[];
736
743
  }
737
744
  interface BaseUseChatStorageResult {
738
745
  isLoading: boolean;
@@ -1238,7 +1238,8 @@ function useChat(options) {
1238
1238
  onData,
1239
1239
  runTools = true,
1240
1240
  headers,
1241
- memoryContext
1241
+ memoryContext,
1242
+ searchContext
1242
1243
  }) => {
1243
1244
  const messagesValidation = validateMessages(messages);
1244
1245
  if (!messagesValidation.valid) {
@@ -1264,6 +1265,22 @@ function useChat(options) {
1264
1265
  };
1265
1266
  messagesWithContext = [memorySystemMessage, ...messages];
1266
1267
  }
1268
+ if (searchContext) {
1269
+ const searchSystemMessage = {
1270
+ role: "system",
1271
+ content: [
1272
+ {
1273
+ type: "text",
1274
+ text: "Here are the search results for the user's query. Use this information to respond to the user's request:"
1275
+ },
1276
+ {
1277
+ type: "text",
1278
+ text: searchContext
1279
+ }
1280
+ ]
1281
+ };
1282
+ messagesWithContext = [searchSystemMessage, ...messagesWithContext];
1283
+ }
1267
1284
  let messagesWithToolContext = messagesWithContext;
1268
1285
  const shouldRunTools = runTools && tools && tools.length > 0;
1269
1286
  if (shouldRunTools) {
@@ -2149,7 +2166,9 @@ function useChatStorage(options) {
2149
2166
  onData: perRequestOnData,
2150
2167
  runTools,
2151
2168
  headers,
2152
- memoryContext
2169
+ memoryContext,
2170
+ searchContext,
2171
+ sources
2153
2172
  } = args;
2154
2173
  let convId;
2155
2174
  try {
@@ -2215,7 +2234,8 @@ function useChatStorage(options) {
2215
2234
  onData: perRequestOnData,
2216
2235
  runTools,
2217
2236
  headers,
2218
- memoryContext
2237
+ memoryContext,
2238
+ searchContext
2219
2239
  });
2220
2240
  const responseDuration = (Date.now() - startTime) / 1e3;
2221
2241
  if (result.error || !result.data) {
@@ -2232,19 +2252,24 @@ function useChatStorage(options) {
2232
2252
  model: responseModel,
2233
2253
  usage: convertUsageToStored(abortedResult.data?.usage),
2234
2254
  responseDuration,
2235
- wasStopped: true
2255
+ wasStopped: true,
2256
+ sources
2236
2257
  });
2237
2258
  const completionData = abortedResult.data || {
2238
2259
  id: `aborted-${Date.now()}`,
2239
2260
  model: responseModel,
2240
- choices: [{
2241
- index: 0,
2242
- message: {
2243
- role: "assistant",
2244
- content: [{ type: "text", text: assistantContent2 }]
2245
- },
2246
- finish_reason: "stop"
2247
- }],
2261
+ choices: [
2262
+ {
2263
+ index: 0,
2264
+ message: {
2265
+ role: "assistant",
2266
+ content: [
2267
+ { type: "text", text: assistantContent2 }
2268
+ ]
2269
+ },
2270
+ finish_reason: "stop"
2271
+ }
2272
+ ],
2248
2273
  usage: void 0
2249
2274
  };
2250
2275
  return {
@@ -2275,7 +2300,8 @@ function useChatStorage(options) {
2275
2300
  content: assistantContent,
2276
2301
  model: responseData.model,
2277
2302
  usage: convertUsageToStored(responseData.usage),
2278
- responseDuration
2303
+ responseDuration,
2304
+ sources
2279
2305
  });
2280
2306
  } catch (err) {
2281
2307
  return {
@@ -2303,7 +2329,12 @@ function useChatStorage(options) {
2303
2329
  );
2304
2330
  const updateMessageEmbedding = useCallback2(
2305
2331
  async (uniqueId, vector, embeddingModel) => {
2306
- return updateMessageEmbeddingOp(storageCtx, uniqueId, vector, embeddingModel);
2332
+ return updateMessageEmbeddingOp(
2333
+ storageCtx,
2334
+ uniqueId,
2335
+ vector,
2336
+ embeddingModel
2337
+ );
2307
2338
  },
2308
2339
  [storageCtx]
2309
2340
  );
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@reverbia/sdk",
3
- "version": "1.0.0-next.20251218151654",
3
+ "version": "1.0.0-next.20251218195705",
4
4
  "description": "",
5
5
  "main": "./dist/index.cjs",
6
6
  "module": "./dist/index.mjs",