langwatch 0.1.7 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,13 +13,13 @@
13
13
  "format:check": "prettier --check \"{app,lib,components}**/*.{ts,tsx,mdx}\" --cache"
14
14
  },
15
15
  "dependencies": {
16
- "@ai-sdk/openai": "^1.0.8",
16
+ "@ai-sdk/openai": "^1.3.22",
17
17
  "@langchain/openai": "^0.1.3",
18
- "@opentelemetry/api-logs": "^0.56.0",
19
- "@opentelemetry/exporter-trace-otlp-http": "^0.56.0",
20
- "@opentelemetry/instrumentation": "^0.56.0",
21
- "@opentelemetry/sdk-logs": "^0.56.0",
22
- "@opentelemetry/sdk-trace-node": "^1.29.0",
18
+ "@opentelemetry/api-logs": "^0.202.0",
19
+ "@opentelemetry/exporter-trace-otlp-http": "^0.202.0",
20
+ "@opentelemetry/instrumentation": "^0.202.0",
21
+ "@opentelemetry/sdk-logs": "^0.202.0",
22
+ "@opentelemetry/sdk-trace-node": "^2.0.1",
23
23
  "@radix-ui/react-alert-dialog": "^1.0.5",
24
24
  "@radix-ui/react-dialog": "^1.0.5",
25
25
  "@radix-ui/react-dropdown-menu": "^2.0.6",
@@ -30,11 +30,11 @@
30
30
  "@radix-ui/react-slot": "^1.0.2",
31
31
  "@radix-ui/react-switch": "^1.0.3",
32
32
  "@radix-ui/react-tooltip": "^1.0.7",
33
- "@vercel/analytics": "^1.1.2",
33
+ "@vercel/analytics": "^1.5.0",
34
34
  "@vercel/kv": "^1.0.1",
35
35
  "@vercel/og": "^0.6.2",
36
- "@vercel/otel": "^1.10.0",
37
- "ai": "^4.0.14",
36
+ "@vercel/otel": "^1.12.0",
37
+ "ai": "^4.3.16",
38
38
  "class-variance-authority": "^0.7.0",
39
39
  "clsx": "^2.1.0",
40
40
  "d3-scale": "^4.0.2",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "langwatch",
3
- "version": "0.1.7",
3
+ "version": "0.2.0",
4
4
  "description": "",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",
@@ -19,13 +19,14 @@
19
19
  "scripts": {
20
20
  "prepare": "./copy-types.sh",
21
21
  "test": "vitest",
22
- "build": "npm run prepare && tsup && esbuild src/index.ts --bundle --platform=node --outfile=dist/index.js",
22
+ "prebuild": "npm run prepare",
23
+ "build": "tsup && esbuild src/index.ts --bundle --platform=node --outfile=dist/index.js",
23
24
  "prepublish": "npm run build"
24
25
  },
25
26
  "author": "",
26
27
  "license": "Apache-2.0",
27
28
  "engines": {
28
- "node": ">=18.0.0"
29
+ "node": ">=20.0.0"
29
30
  },
30
31
  "devDependencies": {
31
32
  "@ai-sdk/openai": "^0.0.18",
@@ -33,6 +34,7 @@
33
34
  "@types/debug": "^4.1.12",
34
35
  "@types/eslint__js": "^8.42.3",
35
36
  "@types/node": "^16.0.0",
37
+ "@typescript/native-preview": "^7.0.0-dev.20250610.1",
36
38
  "dotenv": "^16.4.5",
37
39
  "esbuild": "^0.21.5",
38
40
  "eslint": "^8.57.0",
@@ -44,8 +46,9 @@
44
46
  },
45
47
  "dependencies": {
46
48
  "@langchain/core": "^0.2.7",
47
- "@opentelemetry/otlp-transformer": "^0.56.0",
48
- "@opentelemetry/sdk-trace-base": "^1.30.1",
49
+ "@opentelemetry/api": "^1.9.0",
50
+ "@opentelemetry/otlp-transformer": "^0.202.0",
51
+ "@opentelemetry/sdk-trace-base": "^2.0.1",
49
52
  "ai": "^4.0.14",
50
53
  "eventemitter3": "^5.0.1",
51
54
  "javascript-stringify": "^2.1.0",
@@ -2,8 +2,10 @@ import {
2
2
  type ReadableSpan,
3
3
  type SpanExporter,
4
4
  } from "@opentelemetry/sdk-trace-base";
5
- import { createExportTraceServiceRequest } from "@opentelemetry/otlp-transformer";
6
-
5
+ import {
6
+ JsonTraceSerializer,
7
+ type ISerializer,
8
+ } from "@opentelemetry/otlp-transformer";
7
9
  import { type ExportResult, ExportResultCode } from "@opentelemetry/core";
8
10
 
9
11
  export class LangWatchExporter implements SpanExporter {
@@ -11,6 +13,7 @@ export class LangWatchExporter implements SpanExporter {
11
13
  private apiKey: string;
12
14
  private includeAllSpans: boolean;
13
15
  private debug: boolean;
16
+ private serializer: ISerializer<ReadableSpan[], unknown>;
14
17
 
15
18
  constructor(
16
19
  params: {
@@ -27,6 +30,7 @@ export class LangWatchExporter implements SpanExporter {
27
30
  this.apiKey = params.apiKey ?? process.env.LANGWATCH_API_KEY ?? "";
28
31
  this.includeAllSpans = params.includeAllSpans ?? false;
29
32
  this.debug = params.debug ?? false;
33
+ this.serializer = JsonTraceSerializer;
30
34
 
31
35
  if (!this.apiKey) {
32
36
  throw new Error("LANGWATCH_API_KEY is not set");
@@ -40,6 +44,7 @@ export class LangWatchExporter implements SpanExporter {
40
44
  const spans = allSpans.filter(
41
45
  (span) => this.includeAllSpans || this.isAiSdkSpan(span)
42
46
  );
47
+
43
48
  if (spans.length === 0) {
44
49
  resultCallback({ code: ExportResultCode.SUCCESS });
45
50
  return;
@@ -50,7 +55,7 @@ export class LangWatchExporter implements SpanExporter {
50
55
 
51
56
  let body;
52
57
  try {
53
- body = JSON.stringify(createExportTraceServiceRequest(spans));
58
+ body = this.serializer.serializeRequest(spans);
54
59
  } catch (error) {
55
60
  console.error("[LangWatchExporter] Failed to serialize spans:", error);
56
61
  resultCallback({ code: ExportResultCode.FAILED });
@@ -82,7 +87,7 @@ export class LangWatchExporter implements SpanExporter {
82
87
  }
83
88
 
84
89
  private isAiSdkSpan(span: ReadableSpan): boolean {
85
- return span.instrumentationLibrary.name === "ai";
90
+ return span.instrumentationScope?.name === "ai";
86
91
  }
87
92
 
88
93
  shutdown(): Promise<void> {
package/src/index.test.ts CHANGED
@@ -9,7 +9,7 @@ import {
9
9
  type SpyInstanceFn,
10
10
  } from "vitest";
11
11
  import { openai } from "@ai-sdk/openai";
12
- import { generateText, type CoreMessage } from "ai";
12
+ import { generateText, LanguageModel, type CoreMessage } from "ai";
13
13
  import "dotenv/config";
14
14
  import { version } from "../package.json";
15
15
 
@@ -369,14 +369,14 @@ describe("LangWatch tracer", () => {
369
369
  });
370
370
 
371
371
  const response = await generateText({
372
- model: model,
372
+ model: model as LanguageModel,
373
373
  messages: messages,
374
374
  });
375
375
 
376
376
  span.end({
377
377
  output: {
378
378
  type: "chat_messages",
379
- value: convertFromVercelAIMessages(response.responseMessages),
379
+ value: convertFromVercelAIMessages(response.response.messages),
380
380
  },
381
381
  metrics: {
382
382
  promptTokens: response.usage?.promptTokens,
package/src/index.ts CHANGED
@@ -34,6 +34,7 @@ import {
34
34
  type RESTEvaluation,
35
35
  type SpanInputOutput,
36
36
  type LLMModeTrace,
37
+ type ErrorCapture,
37
38
  } from "./types";
38
39
  import { camelToSnakeCaseNested, type Strict } from "./typeUtils";
39
40
  import {
@@ -56,6 +57,7 @@ export type {
56
57
  RAGSpan,
57
58
  SpanInputOutput,
58
59
  LLMModeTrace,
60
+ ErrorCapture,
59
61
  };
60
62
 
61
63
  export {
package/src/types.ts CHANGED
@@ -11,6 +11,7 @@ import {
11
11
  type Trace as ServerTrace,
12
12
  type RESTEvaluation as ServerRESTEvaluation,
13
13
  type LLMModeTrace as ServerLLMModeTrace,
14
+ type ErrorCapture as ServerErrorCapture,
14
15
  } from "./server/types/tracer";
15
16
 
16
17
  export type Trace = ServerTrace;
@@ -77,3 +78,5 @@ export type PendingRAGSpan = PendingSpan<RAGSpan>;
77
78
  export type RESTEvaluation = SnakeToCamelCaseNested<
78
79
  Omit<ServerRESTEvaluation, "error">
79
80
  > & { error?: ServerRESTEvaluation["error"] };
81
+
82
+ export type ErrorCapture = ServerErrorCapture;