@xsai-ext/telemetry 0.5.0-beta.1 → 0.5.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +13 -46
  2. package/package.json +3 -3
package/dist/index.js CHANGED
@@ -1,9 +1,9 @@
1
- import { embed as embed$1, clean, embedMany as embedMany$1, trampoline, resolveStepOptions, chat, responseJSON, InvalidResponseError, stepCountAtLeast, executeTool, shouldStop, determineStepType, RemoteAPIError, JSONParseError, DelayedPromise, objCamelToSnake } from 'xsai';
1
+ import { embed as embed$1, clean, embedMany as embedMany$1, trampoline, resolveStepOptions, chat, responseJSON, InvalidResponseError, stepCountAtLeast, executeTool, shouldStop, determineStepType, DelayedPromise, objCamelToSnake } from 'xsai';
2
2
  export * from 'xsai';
3
3
  import { trace, SpanStatusCode } from '@opentelemetry/api';
4
- import { EventSourceParserStream } from 'eventsource-parser/stream';
4
+ import { createControlledStream, errorControllers, closeControllers, EventSourceParserStream, JsonMessageTransformStream } from '@xsai/shared-stream';
5
5
 
6
- var version = "0.5.0-beta.1";
6
+ var version = "0.5.0-beta.2";
7
7
  var pkg = {
8
8
  version: version};
9
9
 
@@ -244,31 +244,6 @@ const generateText = async (options) => {
244
244
  }));
245
245
  };
246
246
 
247
- const parseChunk = (data) => {
248
- if (data.startsWith("{") && data.includes('"error":')) {
249
- throw new RemoteAPIError(`Error from server: ${data}`, {
250
- responseBody: data
251
- });
252
- }
253
- try {
254
- return JSON.parse(data);
255
- } catch (cause) {
256
- throw new JSONParseError(`Failed to parse stream chunk JSON: ${data}`, {
257
- cause,
258
- text: data
259
- });
260
- }
261
- };
262
- const transformChunk = () => {
263
- return new TransformStream({
264
- transform: async (chunk, controller) => {
265
- if (!chunk.data || chunk.data === "[DONE]")
266
- return;
267
- controller.enqueue(parseChunk(chunk.data));
268
- }
269
- });
270
- };
271
-
272
247
  const streamText = (options) => {
273
248
  const tracer = getTracer();
274
249
  const steps = [];
@@ -281,14 +256,11 @@ const streamText = (options) => {
281
256
  const resultMessages = new DelayedPromise();
282
257
  const resultUsage = new DelayedPromise();
283
258
  const resultTotalUsage = new DelayedPromise();
284
- let eventCtrl;
285
- let textCtrl;
286
- let reasoningTextCtrl;
287
- const eventStream = new ReadableStream({ start: (controller) => eventCtrl = controller });
288
- const textStream = new ReadableStream({ start: (controller) => textCtrl = controller });
289
- const reasoningTextStream = new ReadableStream({ start: (controller) => reasoningTextCtrl = controller });
259
+ const [eventStream, eventCtrl] = createControlledStream();
260
+ const [textStream, textCtrl] = createControlledStream();
261
+ const [reasoningTextStream, reasoningTextCtrl] = createControlledStream();
290
262
  const pushEvent = (stepEvent) => {
291
- eventCtrl?.enqueue(stepEvent);
263
+ eventCtrl.current?.enqueue(stepEvent);
292
264
  void options.onEvent?.(stepEvent);
293
265
  };
294
266
  const pushStep = (step) => {
@@ -333,23 +305,22 @@ const streamText = (options) => {
333
305
  let text = "";
334
306
  let reasoningText;
335
307
  const pushText = (content) => {
336
- textCtrl?.enqueue(content);
308
+ textCtrl.current?.enqueue(content);
337
309
  text += content;
338
310
  };
339
311
  const pushReasoningText = (reasoningContent) => {
340
312
  if (reasoningText == null)
341
313
  reasoningText = "";
342
- reasoningTextCtrl?.enqueue(reasoningContent);
314
+ reasoningTextCtrl.current?.enqueue(reasoningContent);
343
315
  reasoningText += reasoningContent;
344
316
  };
345
317
  const tool_calls = [];
346
318
  const toolCalls = [];
347
319
  const toolResults = [];
348
320
  let finishReason = "other";
349
- await stream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(transformChunk()).pipeTo(new WritableStream({
321
+ await stream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(new JsonMessageTransformStream()).pipeTo(new WritableStream({
350
322
  abort: (reason) => {
351
- eventCtrl?.error(reason);
352
- textCtrl?.error(reason);
323
+ errorControllers(reason, eventCtrl, textCtrl, reasoningTextCtrl);
353
324
  },
354
325
  close: () => {
355
326
  },
@@ -490,18 +461,14 @@ const streamText = (options) => {
490
461
  finalError ??= err;
491
462
  }
492
463
  if (finalError != null) {
493
- eventCtrl?.error(finalError);
494
- textCtrl?.error(finalError);
495
- reasoningTextCtrl?.error(finalError);
464
+ errorControllers(finalError, eventCtrl, textCtrl, reasoningTextCtrl);
496
465
  resultSteps.reject(finalError);
497
466
  resultMessages.reject(finalError);
498
467
  resultUsage.reject(finalError);
499
468
  resultTotalUsage.reject(finalError);
500
469
  return;
501
470
  }
502
- eventCtrl?.close();
503
- textCtrl?.close();
504
- reasoningTextCtrl?.close();
471
+ closeControllers(eventCtrl, textCtrl, reasoningTextCtrl);
505
472
  resultSteps.resolve(steps);
506
473
  resultMessages.resolve(messages);
507
474
  resultUsage.resolve(usage);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@xsai-ext/telemetry",
3
3
  "type": "module",
4
- "version": "0.5.0-beta.1",
4
+ "version": "0.5.0-beta.2",
5
5
  "description": "extra-small AI SDK.",
6
6
  "author": "Moeru AI",
7
7
  "license": "MIT",
@@ -30,8 +30,8 @@
30
30
  ],
31
31
  "dependencies": {
32
32
  "@opentelemetry/api": "^1.9.0",
33
- "eventsource-parser": "^3.0.6",
34
- "xsai": "~0.5.0-beta.1"
33
+ "@xsai/shared-stream": "~0.5.0-beta.2",
34
+ "xsai": "~0.5.0-beta.2"
35
35
  },
36
36
  "devDependencies": {
37
37
  "@langfuse/otel": "^4.5.1",