observa-sdk 0.0.14 → 0.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -451,7 +451,7 @@ function extractProviderFromModel(model) {
451
451
  }
452
452
  if (typeof model === "string") {
453
453
  const parts = model.split("/");
454
- if (parts.length > 1) {
454
+ if (parts.length > 1 && parts[0]) {
455
455
  return parts[0].toLowerCase();
456
456
  }
457
457
  const modelLower = model.toLowerCase();
@@ -537,6 +537,37 @@ async function traceGenerateText(originalFn, args, options) {
537
537
  throw error;
538
538
  }
539
539
  }
540
+ function wrapReadableStream(stream, onComplete, onError) {
541
+ const [userStream, trackingStream] = stream.tee();
542
+ const decoder = new TextDecoder();
543
+ let firstTokenTime = null;
544
+ const streamStartTime = Date.now();
545
+ const chunks = [];
546
+ (async () => {
547
+ try {
548
+ const reader = trackingStream.getReader();
549
+ while (true) {
550
+ const { done, value } = await reader.read();
551
+ if (done) break;
552
+ if (firstTokenTime === null && value) {
553
+ firstTokenTime = Date.now();
554
+ }
555
+ const text = decoder.decode(value, { stream: true });
556
+ chunks.push(text);
557
+ }
558
+ const fullText = chunks.join("");
559
+ onComplete({
560
+ text: fullText,
561
+ timeToFirstToken: firstTokenTime ? firstTokenTime - streamStartTime : null,
562
+ streamingDuration: firstTokenTime ? Date.now() - firstTokenTime : null,
563
+ totalLatency: Date.now() - streamStartTime
564
+ });
565
+ } catch (error) {
566
+ onError(error);
567
+ }
568
+ })();
569
+ return userStream;
570
+ }
540
571
  async function traceStreamText(originalFn, args, options) {
541
572
  const startTime = Date.now();
542
573
  const requestParams = args[0] || {};
@@ -546,43 +577,46 @@ async function traceStreamText(originalFn, args, options) {
546
577
  try {
547
578
  const result = await originalFn(...args);
548
579
  if (result.textStream) {
549
- const wrappedStream = wrapStream(
550
- result.textStream,
551
- (fullResponse) => {
552
- recordTrace3(
580
+ const originalTextStream = result.textStream;
581
+ const isReadableStream = originalTextStream && typeof originalTextStream.getReader === "function";
582
+ if (isReadableStream) {
583
+ const wrappedStream = wrapReadableStream(
584
+ originalTextStream,
585
+ (fullResponse) => {
586
+ recordTrace3(
587
+ {
588
+ model: modelIdentifier,
589
+ prompt: requestParams.prompt || requestParams.messages || null,
590
+ messages: requestParams.messages || null
591
+ },
592
+ fullResponse,
593
+ startTime,
594
+ options,
595
+ fullResponse.timeToFirstToken,
596
+ fullResponse.streamingDuration,
597
+ provider
598
+ );
599
+ },
600
+ (err) => recordError3(
553
601
  {
554
602
  model: modelIdentifier,
555
- prompt: requestParams.prompt || requestParams.messages || null,
556
- messages: requestParams.messages || null
603
+ prompt: requestParams.prompt || requestParams.messages || null
557
604
  },
558
- fullResponse,
605
+ err,
559
606
  startTime,
560
- options,
561
- fullResponse.timeToFirstToken,
562
- fullResponse.streamingDuration,
563
- provider
564
- );
565
- },
566
- (err) => recordError3(
567
- {
568
- model: modelIdentifier,
569
- prompt: requestParams.prompt || requestParams.messages || null
570
- },
571
- err,
572
- startTime,
573
- options
574
- ),
575
- "vercel-ai"
576
- );
577
- const wrappedResult = Object.create(Object.getPrototypeOf(result));
578
- Object.assign(wrappedResult, result);
579
- Object.defineProperty(wrappedResult, "textStream", {
580
- value: wrappedStream,
581
- writable: true,
582
- enumerable: true,
583
- configurable: true
584
- });
585
- return wrappedResult;
607
+ options
608
+ )
609
+ );
610
+ const wrappedResult = Object.create(Object.getPrototypeOf(result));
611
+ Object.assign(wrappedResult, result);
612
+ Object.defineProperty(wrappedResult, "textStream", {
613
+ value: wrappedStream,
614
+ writable: true,
615
+ enumerable: true,
616
+ configurable: true
617
+ });
618
+ return wrappedResult;
619
+ }
586
620
  }
587
621
  recordTrace3(
588
622
  {
package/dist/index.js CHANGED
@@ -431,7 +431,7 @@ function extractProviderFromModel(model) {
431
431
  }
432
432
  if (typeof model === "string") {
433
433
  const parts = model.split("/");
434
- if (parts.length > 1) {
434
+ if (parts.length > 1 && parts[0]) {
435
435
  return parts[0].toLowerCase();
436
436
  }
437
437
  const modelLower = model.toLowerCase();
@@ -517,6 +517,37 @@ async function traceGenerateText(originalFn, args, options) {
517
517
  throw error;
518
518
  }
519
519
  }
520
+ function wrapReadableStream(stream, onComplete, onError) {
521
+ const [userStream, trackingStream] = stream.tee();
522
+ const decoder = new TextDecoder();
523
+ let firstTokenTime = null;
524
+ const streamStartTime = Date.now();
525
+ const chunks = [];
526
+ (async () => {
527
+ try {
528
+ const reader = trackingStream.getReader();
529
+ while (true) {
530
+ const { done, value } = await reader.read();
531
+ if (done) break;
532
+ if (firstTokenTime === null && value) {
533
+ firstTokenTime = Date.now();
534
+ }
535
+ const text = decoder.decode(value, { stream: true });
536
+ chunks.push(text);
537
+ }
538
+ const fullText = chunks.join("");
539
+ onComplete({
540
+ text: fullText,
541
+ timeToFirstToken: firstTokenTime ? firstTokenTime - streamStartTime : null,
542
+ streamingDuration: firstTokenTime ? Date.now() - firstTokenTime : null,
543
+ totalLatency: Date.now() - streamStartTime
544
+ });
545
+ } catch (error) {
546
+ onError(error);
547
+ }
548
+ })();
549
+ return userStream;
550
+ }
520
551
  async function traceStreamText(originalFn, args, options) {
521
552
  const startTime = Date.now();
522
553
  const requestParams = args[0] || {};
@@ -526,43 +557,46 @@ async function traceStreamText(originalFn, args, options) {
526
557
  try {
527
558
  const result = await originalFn(...args);
528
559
  if (result.textStream) {
529
- const wrappedStream = wrapStream(
530
- result.textStream,
531
- (fullResponse) => {
532
- recordTrace3(
560
+ const originalTextStream = result.textStream;
561
+ const isReadableStream = originalTextStream && typeof originalTextStream.getReader === "function";
562
+ if (isReadableStream) {
563
+ const wrappedStream = wrapReadableStream(
564
+ originalTextStream,
565
+ (fullResponse) => {
566
+ recordTrace3(
567
+ {
568
+ model: modelIdentifier,
569
+ prompt: requestParams.prompt || requestParams.messages || null,
570
+ messages: requestParams.messages || null
571
+ },
572
+ fullResponse,
573
+ startTime,
574
+ options,
575
+ fullResponse.timeToFirstToken,
576
+ fullResponse.streamingDuration,
577
+ provider
578
+ );
579
+ },
580
+ (err) => recordError3(
533
581
  {
534
582
  model: modelIdentifier,
535
- prompt: requestParams.prompt || requestParams.messages || null,
536
- messages: requestParams.messages || null
583
+ prompt: requestParams.prompt || requestParams.messages || null
537
584
  },
538
- fullResponse,
585
+ err,
539
586
  startTime,
540
- options,
541
- fullResponse.timeToFirstToken,
542
- fullResponse.streamingDuration,
543
- provider
544
- );
545
- },
546
- (err) => recordError3(
547
- {
548
- model: modelIdentifier,
549
- prompt: requestParams.prompt || requestParams.messages || null
550
- },
551
- err,
552
- startTime,
553
- options
554
- ),
555
- "vercel-ai"
556
- );
557
- const wrappedResult = Object.create(Object.getPrototypeOf(result));
558
- Object.assign(wrappedResult, result);
559
- Object.defineProperty(wrappedResult, "textStream", {
560
- value: wrappedStream,
561
- writable: true,
562
- enumerable: true,
563
- configurable: true
564
- });
565
- return wrappedResult;
587
+ options
588
+ )
589
+ );
590
+ const wrappedResult = Object.create(Object.getPrototypeOf(result));
591
+ Object.assign(wrappedResult, result);
592
+ Object.defineProperty(wrappedResult, "textStream", {
593
+ value: wrappedStream,
594
+ writable: true,
595
+ enumerable: true,
596
+ configurable: true
597
+ });
598
+ return wrappedResult;
599
+ }
566
600
  }
567
601
  recordTrace3(
568
602
  {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "observa-sdk",
3
- "version": "0.0.14",
3
+ "version": "0.0.15",
4
4
  "description": "Enterprise-grade observability SDK for AI applications. Track and monitor LLM interactions with zero friction.",
5
5
  "type": "module",
6
6
  "main": "./dist/index.cjs",