veryfront 0.0.27 → 0.0.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/ai/index.js CHANGED
@@ -1709,7 +1709,7 @@ var BYTES_PER_MB = 1024 * 1024;
1709
1709
  // deno.json
1710
1710
  var deno_default = {
1711
1711
  name: "veryfront",
1712
- version: "0.0.27",
1712
+ version: "0.0.29",
1713
1713
  nodeModulesDir: "auto",
1714
1714
  exports: {
1715
1715
  ".": "./src/index.ts",
@@ -2388,6 +2388,7 @@ var AgentRuntime = class {
2388
2388
  }
2389
2389
  /**
2390
2390
  * Stream a response
2391
+ * Returns a ReadableStream compatible with Vercel AI SDK Data Stream Protocol
2391
2392
  */
2392
2393
  async stream(messages, _context, callbacks) {
2393
2394
  for (const msg of messages) {
@@ -2397,10 +2398,18 @@ var AgentRuntime = class {
2397
2398
  const systemPrompt = await this.resolveSystemPrompt();
2398
2399
  const { provider, model } = getProviderFromModel(this.config.model);
2399
2400
  const encoder = new TextEncoder();
2401
+ const messageId = `msg_${Date.now()}`;
2400
2402
  return new ReadableStream({
2401
2403
  start: async (controller) => {
2402
2404
  try {
2403
2405
  this.status = "streaming";
2406
+ const startEvent = JSON.stringify({
2407
+ type: "start",
2408
+ messageId
2409
+ });
2410
+ controller.enqueue(encoder.encode(`data: ${startEvent}
2411
+
2412
+ `));
2404
2413
  const response = await this.executeAgentLoopStreaming(
2405
2414
  provider,
2406
2415
  model,
@@ -2408,24 +2417,25 @@ var AgentRuntime = class {
2408
2417
  memoryMessages,
2409
2418
  controller,
2410
2419
  encoder,
2411
- callbacks
2420
+ callbacks,
2421
+ messageId
2412
2422
  );
2413
- const statusData = JSON.stringify({
2414
- type: "status",
2415
- status: "completed",
2423
+ const finishEvent = JSON.stringify({
2424
+ type: "finish",
2416
2425
  usage: response.usage
2417
2426
  });
2418
- controller.enqueue(encoder.encode(`data: ${statusData}
2427
+ controller.enqueue(encoder.encode(`data: ${finishEvent}
2419
2428
 
2420
2429
  `));
2430
+ controller.enqueue(encoder.encode("data: [DONE]\n\n"));
2421
2431
  controller.close();
2422
2432
  } catch (error) {
2423
2433
  this.status = "error";
2424
- const errorData = JSON.stringify({
2434
+ const errorEvent = JSON.stringify({
2425
2435
  type: "error",
2426
2436
  error: error instanceof Error ? error.message : String(error)
2427
2437
  });
2428
- controller.enqueue(encoder.encode(`data: ${errorData}
2438
+ controller.enqueue(encoder.encode(`data: ${errorEvent}
2429
2439
 
2430
2440
  `));
2431
2441
  controller.close();
@@ -2578,8 +2588,9 @@ var AgentRuntime = class {
2578
2588
  }
2579
2589
  /**
2580
2590
  * Execute agent loop with streaming
2591
+ * Uses Vercel AI SDK Data Stream Protocol format
2581
2592
  */
2582
- async executeAgentLoopStreaming(provider, model, systemPrompt, messages, controller, encoder, callbacks) {
2593
+ async executeAgentLoopStreaming(provider, model, systemPrompt, messages, controller, encoder, callbacks, messageId) {
2583
2594
  const capabilities = getPlatformCapabilities();
2584
2595
  const maxSteps = this.getMaxSteps(capabilities.maxAgentSteps);
2585
2596
  const toolCalls = [];
@@ -2656,11 +2667,11 @@ var AgentRuntime = class {
2656
2667
  switch (event.type) {
2657
2668
  case "content": {
2658
2669
  accumulatedText += event.content;
2659
- const chunkData = JSON.stringify({
2660
- type: "chunk",
2661
- content: event.content
2670
+ const textDeltaEvent = JSON.stringify({
2671
+ type: "text-delta",
2672
+ textDelta: event.content
2662
2673
  });
2663
- controller.enqueue(encoder.encode(`data: ${chunkData}
2674
+ controller.enqueue(encoder.encode(`data: ${textDeltaEvent}
2664
2675
 
2665
2676
  `));
2666
2677
  if (callbacks?.onChunk) {
@@ -2675,12 +2686,28 @@ var AgentRuntime = class {
2675
2686
  name: event.toolCall.name,
2676
2687
  arguments: ""
2677
2688
  });
2689
+ const toolStartEvent = JSON.stringify({
2690
+ type: "tool-call-streaming-start",
2691
+ toolCallId: event.toolCall.id,
2692
+ toolName: event.toolCall.name
2693
+ });
2694
+ controller.enqueue(encoder.encode(`data: ${toolStartEvent}
2695
+
2696
+ `));
2678
2697
  }
2679
2698
  break;
2680
2699
  case "tool_call_delta":
2681
2700
  if (event.id && streamToolCalls.has(event.id)) {
2682
2701
  const tc = streamToolCalls.get(event.id);
2683
2702
  tc.arguments += event.arguments;
2703
+ const toolDeltaEvent = JSON.stringify({
2704
+ type: "tool-call-delta",
2705
+ toolCallId: event.id,
2706
+ argsTextDelta: event.arguments
2707
+ });
2708
+ controller.enqueue(encoder.encode(`data: ${toolDeltaEvent}
2709
+
2710
+ `));
2684
2711
  }
2685
2712
  break;
2686
2713
  case "tool_call_complete":
@@ -2690,6 +2717,16 @@ var AgentRuntime = class {
2690
2717
  name: event.toolCall.name,
2691
2718
  arguments: event.toolCall.arguments
2692
2719
  });
2720
+ const { args } = parseStreamToolArgs(event.toolCall.arguments);
2721
+ const toolCallEvent = JSON.stringify({
2722
+ type: "tool-call",
2723
+ toolCallId: event.toolCall.id,
2724
+ toolName: event.toolCall.name,
2725
+ args
2726
+ });
2727
+ controller.enqueue(encoder.encode(`data: ${toolCallEvent}
2728
+
2729
+ `));
2693
2730
  }
2694
2731
  break;
2695
2732
  case "finish":
@@ -2788,15 +2825,13 @@ var AgentRuntime = class {
2788
2825
  if (callbacks?.onToolCall) {
2789
2826
  callbacks.onToolCall(toolCall);
2790
2827
  }
2791
- const toolCallData = JSON.stringify({
2792
- type: "tool_call",
2793
- toolCall: {
2794
- id: toolCall.id,
2795
- name: toolCall.name,
2796
- args: toolCall.args
2797
- }
2828
+ const toolCallEvent = JSON.stringify({
2829
+ type: "tool-call",
2830
+ toolCallId: toolCall.id,
2831
+ toolName: toolCall.name,
2832
+ args: toolCall.args
2798
2833
  });
2799
- controller.enqueue(encoder.encode(`data: ${toolCallData}
2834
+ controller.enqueue(encoder.encode(`data: ${toolCallEvent}
2800
2835
 
2801
2836
  `));
2802
2837
  const result = await executeTool(tc.name, toolCall.args, {
@@ -2806,15 +2841,12 @@ var AgentRuntime = class {
2806
2841
  toolCall.result = result;
2807
2842
  toolCall.executionTime = Date.now() - startTime;
2808
2843
  toolCalls.push(toolCall);
2809
- const toolResultData = JSON.stringify({
2810
- type: "tool_result",
2811
- toolCall: {
2812
- id: toolCall.id,
2813
- name: toolCall.name,
2814
- result
2815
- }
2844
+ const toolResultEvent = JSON.stringify({
2845
+ type: "tool-result",
2846
+ toolCallId: toolCall.id,
2847
+ result
2816
2848
  });
2817
- controller.enqueue(encoder.encode(`data: ${toolResultData}
2849
+ controller.enqueue(encoder.encode(`data: ${toolResultEvent}
2818
2850
 
2819
2851
  `));
2820
2852
  const toolResultMessage = {
@@ -3214,6 +3246,8 @@ function createAgentStreamResult(stream) {
3214
3246
  "Content-Type": "text/event-stream",
3215
3247
  "Cache-Control": "no-cache",
3216
3248
  "Connection": "keep-alive",
3249
+ // Required header for Vercel AI SDK Data Stream Protocol v1
3250
+ "x-vercel-ai-ui-message-stream": "v1",
3217
3251
  ...options?.headers
3218
3252
  }
3219
3253
  });
@@ -3274,7 +3308,9 @@ ${compatibility.warnings.join("\n")}`
3274
3308
  headers: {
3275
3309
  "Content-Type": "text/event-stream",
3276
3310
  "Cache-Control": "no-cache",
3277
- "Connection": "keep-alive"
3311
+ "Connection": "keep-alive",
3312
+ // Required header for Vercel AI SDK Data Stream Protocol v1
3313
+ "x-vercel-ai-ui-message-stream": "v1"
3278
3314
  }
3279
3315
  });
3280
3316
  },