@mastra/arize 0.0.0-agent-error-handling-20251023180025

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,63 @@
1
+ # @mastra/arize
2
+
3
+ ## 0.0.0-agent-error-handling-20251023180025
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [[`f743dbb`](https://github.com/mastra-ai/mastra/commit/f743dbb8b40d1627b5c10c0e6fc154f4ebb6e394), [`5df9cce`](https://github.com/mastra-ai/mastra/commit/5df9cce1a753438413f64c11eeef8f845745c2a8), [`149c99e`](https://github.com/mastra-ai/mastra/commit/149c99e55de947e9a4bdaa76bb83dca7f96b61f8), [`2c4438b`](https://github.com/mastra-ai/mastra/commit/2c4438b87817ab7eed818c7990fef010475af1a3)]:
8
+ - @mastra/core@0.0.0-agent-error-handling-20251023180025
9
+ - @mastra/otel-exporter@0.0.0-agent-error-handling-20251023180025
10
+
11
+ ## 0.0.0
12
+
13
+ ### Minor Changes
14
+
15
+ - feat(otel-exporter): Add customizable 'exporter' constructor parameter ([#8827](https://github.com/mastra-ai/mastra/pull/8827))
16
+
17
+ You can now pass in an instantiated `TraceExporter` inheriting class into `OtelExporter`.
18
+ This will circumvent the default package detection, no longer instantiating a `TraceExporter`
19
+ automatically if one is instead passed in to the `OtelExporter` constructor.
20
+
21
+ feat(arize): Initial release of @mastra/arize observability package
22
+
23
+ The `@mastra/arize` package exports an `ArizeExporter` class that can be used to easily send AI
24
+ traces from Mastra to Arize AX, Arize Phoenix, or any OpenInference compatible collector.
25
+ It sends traces uses `BatchSpanProcessor` over OTLP connections.
26
+ It leverages the `@mastra/otel-exporter` package, reusing `OtelExporter` for transmission and
27
+ span management.
28
+ See the README in `observability/arize/README.md` for more details
29
+
30
+ ### Patch Changes
31
+
32
+ - fix(observability): Add ParentSpanContext to MastraSpan's with parentage ([#9085](https://github.com/mastra-ai/mastra/pull/9085))
33
+
34
+ - Updated dependencies [[`c67ca32`](https://github.com/mastra-ai/mastra/commit/c67ca32e3c2cf69bfc146580770c720220ca44ac), [`efb5ed9`](https://github.com/mastra-ai/mastra/commit/efb5ed946ae7f410bc68c9430beb4b010afd25ec), [`ff1fe1d`](https://github.com/mastra-ai/mastra/commit/ff1fe1d344c665a5da099386e12adffa04f9a8c7), [`dbc9e12`](https://github.com/mastra-ai/mastra/commit/dbc9e1216ba575ba59ead4afb727a01215f7de4f), [`99e41b9`](https://github.com/mastra-ai/mastra/commit/99e41b94957cdd25137d3ac12e94e8b21aa01b68), [`c28833c`](https://github.com/mastra-ai/mastra/commit/c28833c5b6d8e10eeffd7f7d39129d53b8bca240), [`8ea07b4`](https://github.com/mastra-ai/mastra/commit/8ea07b4bdc73e4218437dbb6dcb0f4b23e745a44), [`ba201b8`](https://github.com/mastra-ai/mastra/commit/ba201b8f8feac4c72350f2dbd52c13c7297ba7b0), [`f053e89`](https://github.com/mastra-ai/mastra/commit/f053e89160dbd0bd3333fc3492f68231b5c7c349), [`4fc4136`](https://github.com/mastra-ai/mastra/commit/4fc413652866a8d2240694fddb2562e9edbb70df), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`d10baf5`](https://github.com/mastra-ai/mastra/commit/d10baf5a3c924f2a6654e23a3e318ed03f189b76), [`038c55a`](https://github.com/mastra-ai/mastra/commit/038c55a7090fc1b1513a966386d3072617f836ac), [`8bcab54`](https://github.com/mastra-ai/mastra/commit/8bcab543a13ff3fa4ba24da05076cd35f1797827), [`182f045`](https://github.com/mastra-ai/mastra/commit/182f0458f25bd70aa774e64fd923c8a483eddbf1), [`9a1a485`](https://github.com/mastra-ai/mastra/commit/9a1a4859b855e37239f652bf14b1ecd1029b8c4e), [`9257233`](https://github.com/mastra-ai/mastra/commit/9257233c4ffce09b2bedc2a9adbd70d7a83fa8e2), [`7620d2b`](https://github.com/mastra-ai/mastra/commit/7620d2bddeb4fae4c3c0a0b4e672969795fca11a), [`b2365f0`](https://github.com/mastra-ai/mastra/commit/b2365f038dd4c5f06400428b224af963f399ad50), [`0f1a4c9`](https://github.com/mastra-ai/mastra/commit/0f1a4c984fb4b104b2f0b63ba18c9fa77f567700), [`9029ba3`](https://github.com/mastra-ai/mastra/commit/9029ba34459c8859fed4c6b73efd8e2d0021e7ba), [`426cc56`](https://github.com/mastra-ai/mastra/commit/426cc561c85ae76a112ded2385532a91f9f9f074), [`00931fb`](https://github.com/mastra-ai/mastra/commit/00931fb1a21aa42c4fbc20c2c40dd62466b8fc8f), [`e473bfe`](https://github.com/mastra-ai/mastra/commit/e473bfe416c0b8e876973c2b6a6f13c394b7a93f), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`2db6160`](https://github.com/mastra-ai/mastra/commit/2db6160e2022ff8827c15d30157e684683b934b5), [`8aeea37`](https://github.com/mastra-ai/mastra/commit/8aeea37efdde347c635a67fed56794943b7f74ec), [`02fe153`](https://github.com/mastra-ai/mastra/commit/02fe15351d6021d214da48ec982a0e9e4150bcee), [`648e2ca`](https://github.com/mastra-ai/mastra/commit/648e2ca42da54838c6ccbdaadc6fadd808fa6b86), [`74567b3`](https://github.com/mastra-ai/mastra/commit/74567b3d237ae3915cd0bca3cf55fa0a64e4e4a4), [`b65c5e0`](https://github.com/mastra-ai/mastra/commit/b65c5e0fe6f3c390a9a8bbcf69304d972c3a4afb), [`15a1733`](https://github.com/mastra-ai/mastra/commit/15a1733074cee8bd37370e1af34cd818e89fa7ac), [`fc2a774`](https://github.com/mastra-ai/mastra/commit/fc2a77468981aaddc3e77f83f0c4ad4a4af140da), [`4e08933`](https://github.com/mastra-ai/mastra/commit/4e08933625464dfde178347af5b6278fcf34188e), [`10188d6`](https://github.com/mastra-ai/mastra/commit/10188d632a729010441f9c7e2a41eab60afccb23)]:
35
+ - @mastra/core@0.22.0
36
+ - @mastra/otel-exporter@0.2.0
37
+
38
+ ## 0.0.0-alpha.1
39
+
40
+ ### Minor Changes
41
+
42
+ - feat(otel-exporter): Add customizable 'exporter' constructor parameter ([#8827](https://github.com/mastra-ai/mastra/pull/8827))
43
+
44
+ You can now pass in an instantiated `TraceExporter` inheriting class into `OtelExporter`.
45
+ This will circumvent the default package detection, no longer instantiating a `TraceExporter`
46
+ automatically if one is instead passed in to the `OtelExporter` constructor.
47
+
48
+ feat(arize): Initial release of @mastra/arize observability package
49
+
50
+ The `@mastra/arize` package exports an `ArizeExporter` class that can be used to easily send AI
51
+ traces from Mastra to Arize AX, Arize Phoenix, or any OpenInference compatible collector.
52
+ It sends traces uses `BatchSpanProcessor` over OTLP connections.
53
+ It leverages the `@mastra/otel-exporter` package, reusing `OtelExporter` for transmission and
54
+ span management.
55
+ See the README in `observability/arize/README.md` for more details
56
+
57
+ ### Patch Changes
58
+
59
+ - fix(observability): Add ParentSpanContext to MastraSpan's with parentage ([#9085](https://github.com/mastra-ai/mastra/pull/9085))
60
+
61
+ - Updated dependencies [[`efb5ed9`](https://github.com/mastra-ai/mastra/commit/efb5ed946ae7f410bc68c9430beb4b010afd25ec), [`ff1fe1d`](https://github.com/mastra-ai/mastra/commit/ff1fe1d344c665a5da099386e12adffa04f9a8c7), [`8ea07b4`](https://github.com/mastra-ai/mastra/commit/8ea07b4bdc73e4218437dbb6dcb0f4b23e745a44), [`ba201b8`](https://github.com/mastra-ai/mastra/commit/ba201b8f8feac4c72350f2dbd52c13c7297ba7b0), [`4fc4136`](https://github.com/mastra-ai/mastra/commit/4fc413652866a8d2240694fddb2562e9edbb70df), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`d10baf5`](https://github.com/mastra-ai/mastra/commit/d10baf5a3c924f2a6654e23a3e318ed03f189b76), [`038c55a`](https://github.com/mastra-ai/mastra/commit/038c55a7090fc1b1513a966386d3072617f836ac), [`8bcab54`](https://github.com/mastra-ai/mastra/commit/8bcab543a13ff3fa4ba24da05076cd35f1797827), [`182f045`](https://github.com/mastra-ai/mastra/commit/182f0458f25bd70aa774e64fd923c8a483eddbf1), [`7620d2b`](https://github.com/mastra-ai/mastra/commit/7620d2bddeb4fae4c3c0a0b4e672969795fca11a), [`b2365f0`](https://github.com/mastra-ai/mastra/commit/b2365f038dd4c5f06400428b224af963f399ad50), [`9029ba3`](https://github.com/mastra-ai/mastra/commit/9029ba34459c8859fed4c6b73efd8e2d0021e7ba), [`426cc56`](https://github.com/mastra-ai/mastra/commit/426cc561c85ae76a112ded2385532a91f9f9f074), [`00931fb`](https://github.com/mastra-ai/mastra/commit/00931fb1a21aa42c4fbc20c2c40dd62466b8fc8f), [`e473bfe`](https://github.com/mastra-ai/mastra/commit/e473bfe416c0b8e876973c2b6a6f13c394b7a93f), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`648e2ca`](https://github.com/mastra-ai/mastra/commit/648e2ca42da54838c6ccbdaadc6fadd808fa6b86), [`b65c5e0`](https://github.com/mastra-ai/mastra/commit/b65c5e0fe6f3c390a9a8bbcf69304d972c3a4afb), [`10188d6`](https://github.com/mastra-ai/mastra/commit/10188d632a729010441f9c7e2a41eab60afccb23)]:
62
+ - @mastra/core@0.22.0-alpha.1
63
+ - @mastra/otel-exporter@0.2.0-alpha.0
package/LICENSE.md ADDED
@@ -0,0 +1,15 @@
1
+ # Apache License 2.0
2
+
3
+ Copyright (c) 2025 Kepler Software, Inc.
4
+
5
+ Licensed under the Apache License, Version 2.0 (the "License");
6
+ you may not use this file except in compliance with the License.
7
+ You may obtain a copy of the License at
8
+
9
+ http://www.apache.org/licenses/LICENSE-2.0
10
+
11
+ Unless required by applicable law or agreed to in writing, software
12
+ distributed under the License is distributed on an "AS IS" BASIS,
13
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ See the License for the specific language governing permissions and
15
+ limitations under the License.
package/README.md ADDED
@@ -0,0 +1,144 @@
1
+ # @mastra/arize - OpenTelemetry + OpenInference AI Tracing Exporter
2
+
3
+ Export Mastra AI traces to any OpenTelemetry observability platform that supports OpenInference, like [Arize AX](https://arize.com/generative-ai/), or [Phoenix](https://phoenix.arize.com/).
4
+
5
+ For more information on OpenInference, see the [OpenInference Semantic Conventions](https://github.com/Arize-ai/openinference/tree/main/spec) specification.
6
+
7
+ ## Installation
8
+
9
+ ```bash
10
+ npm install @mastra/arize
11
+ ```
12
+
13
+ ## Configuration
14
+
15
+ You can add `ArizeExporter` to your Mastra configuration to export traces to Arize AX or Phoenix, or any other OpenTelemetry compatible observability platform that supports OpenInference.
16
+
17
+ ### Phoenix
18
+
19
+ ```typescript
20
+ import { ArizeExporter } from '@mastra/arize';
21
+ import { Mastra } from '@mastra/core';
22
+
23
+ // required, ends in /v1/traces
24
+ const ENDPOINT = process.env.PHOENIX_ENDPOINT!;
25
+ // optional if using unauthenticated Phoenix instance
26
+ const API_KEY = process.env.PHOENIX_API_KEY;
27
+ // optional, determines the project name in Phoenix
28
+ const PROJECT_NAME = process.env.PHOENIX_PROJECT_NAME || 'mastra-service';
29
+
30
+ const mastra = new Mastra({
31
+ ...,
32
+ observability: {
33
+ // Enables ArizeExporter for AI tracing
34
+ configs: {
35
+ arize: {
36
+ serviceName: PROJECT_NAME,
37
+ exporters: [
38
+ new ArizeExporter({
39
+ endpoint: ENDPOINT,
40
+ apiKey: API_KEY,
41
+ projectName: PROJECT_NAME,
42
+ }),
43
+ ],
44
+ },
45
+ },
46
+ },
47
+ });
48
+ ```
49
+
50
+ > [!TIP]
51
+ > You can easily use this exporter with both [self-hosted Phoenix](https://docs.arize.com/phoenix/deployment), or, [Phoenix Cloud](https://app.phoenix.arize.com/login).
52
+ >
53
+ > To quickly verify functionality, you can try out a local in-memory Phoenix instance:
54
+ >
55
+ > ```bash
56
+ > docker run --pull=always -d --name arize-phoenix -p 6006:6006 -e PHOENIX_SQL_DATABASE_URL="sqlite:///:memory:" arizephoenix/phoenix:latest
57
+ > ```
58
+ >
59
+ > Configure your `ArizeExporter` endpoint to `http://localhost:6006/v1/traces` and run the default Mastra weather agent to see traces!
60
+
61
+ ### Arize AX
62
+
63
+ ```typescript
64
+ import { ArizeExporter } from '@mastra/arize';
65
+ import { Mastra } from '@mastra/core';
66
+
67
+ // required space destination for trace exports
68
+ const SPACE_ID = process.env.ARIZE_SPACE_ID!;
69
+ // Arize AX API key
70
+ const API_KEY = process.env.ARIZE_API_KEY!;
71
+ // optional, determines the project name in Arize AX
72
+ const PROJECT_NAME = process.env.ARIZE_PROJECT_NAME || 'mastra-service';
73
+
74
+ const mastra = new Mastra({
75
+ ...,
76
+ observability: {
77
+ configs: {
78
+ arize: {
79
+ serviceName: PROJECT_NAME,
80
+ exporters: [
81
+ new ArizeExporter({
82
+ apiKey: process.env.ARIZE_API_KEY!,
83
+ spaceId: SPACE_ID,
84
+ projectName: PROJECT_NAME,
85
+ }),
86
+ ],
87
+ },
88
+ },
89
+ },
90
+ });
91
+ ```
92
+
93
+ > [!TIP]
94
+ > Need an Arize AX API key? [Get one here](https://app.arize.com/).
95
+
96
+ ## Optional Configuration
97
+
98
+ You can configure the `ArizeExporter` to tweak the underlying OpenTelemetry `BatchSpanProcessor`, or add additional resource attributes to each span.
99
+
100
+ ```typescript
101
+ import { ArizeExporter } from '@mastra/arize';
102
+ import { Mastra } from '@mastra/core';
103
+
104
+ const mastra = new Mastra({
105
+ ...,
106
+ observability: {
107
+ configs: {
108
+ arize: {
109
+ serviceName: 'mastra-service',
110
+ exporters: [
111
+ new ArizeExporter({
112
+ // Required at runtime
113
+ endpoint: 'https://your-collector.example.com/v1/traces',
114
+ // Required if using authenticated endpoint
115
+ apiKey: "your-api-key",
116
+ // Optional headers to be added to each OTLP request, in addition to authentication headers
117
+ headers: {
118
+ 'x-api-key': process.env.API_KEY,
119
+ },
120
+ // Optional log level for debugging the exporter
121
+ logLevel: 'debug',
122
+ // Optional batch size for the underlying BatchSpanProcessor, before spans are exported
123
+ batchSize: 512,
124
+ // Optional timeout for the underlying BatchSpanProcessor, before spans are exported
125
+ timeout: 30000,
126
+ // Optional resource attributes to be added to each span
127
+ resourceAttributes: {
128
+ 'custom.attribute': 'value',
129
+ },
130
+ })
131
+ ],
132
+ },
133
+ },
134
+ },
135
+ });
136
+ ```
137
+
138
+ ## OpenInference Semantic Conventions
139
+
140
+ This exporter follows the [OpenInference Semantic Conventions](https://github.com/Arize-ai/openinference/tree/main/spec) for generative AI applications.
141
+
142
+ ## License
143
+
144
+ Apache 2.0
@@ -0,0 +1,34 @@
1
+ import { OtelExporter } from '@mastra/otel-exporter';
2
+ import type { OtelExporterConfig } from '@mastra/otel-exporter';
3
+ export declare const ARIZE_AX_ENDPOINT = "https://otlp.arize.com/v1/traces";
4
+ export type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {
5
+ /**
6
+ * Required if sending traces to Arize AX
7
+ */
8
+ spaceId?: string;
9
+ /**
10
+ * Required if sending traces to Arize AX, or to any other collector that
11
+ * requires an Authorization header
12
+ */
13
+ apiKey?: string;
14
+ /**
15
+ * Collector endpoint destination for trace exports.
16
+ * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.
17
+ * Optional when sending traces to Arize AX.
18
+ */
19
+ endpoint?: string;
20
+ /**
21
+ * Optional project name to be added as a resource attribute using
22
+ * OpenInference Semantic Conventions
23
+ */
24
+ projectName?: string;
25
+ /**
26
+ * Optional headers to be added to each OTLP request
27
+ */
28
+ headers?: Record<string, string>;
29
+ };
30
+ export declare class ArizeExporter extends OtelExporter {
31
+ name: string;
32
+ constructor(config: ArizeExporterConfig);
33
+ }
34
+ //# sourceMappingURL=ai-tracing.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ai-tracing.d.ts","sourceRoot":"","sources":["../src/ai-tracing.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AACrD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAMhE,eAAO,MAAM,iBAAiB,qCAAqC,CAAC;AAEpE,MAAM,MAAM,mBAAmB,GAAG,IAAI,CAAC,kBAAkB,EAAE,UAAU,CAAC,GAAG;IACvE;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC,CAAC;AAEF,qBAAa,aAAc,SAAQ,YAAY;IAC7C,IAAI,SAAW;gBAEH,MAAM,EAAE,mBAAmB;CAsCxC"}
@@ -0,0 +1,20 @@
1
+ /**
2
+ * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to
3
+ * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.
4
+ * If parsing fails at any step, the original inputOutputString is returned unmodified.
5
+ *
6
+ * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts
7
+ * into the gen_ai input and output schemas as of October 20th, 2025.
8
+ *
9
+ * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages
10
+ * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json
11
+ * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages
12
+ * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json
13
+ *
14
+ * @param inputOutputString a jsonified string that contains messages adhering to what appears to be
15
+ * Mastra's message shape.
16
+ * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.
17
+ * If parsing fails at any step, the original inputOutputString is returned unmodified.
18
+ */
19
+ export declare const convertMastraMessagesToGenAIMessages: (inputOutputString: string) => string;
20
+ //# sourceMappingURL=gen-ai.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"gen-ai.d.ts","sourceRoot":"","sources":["../src/gen-ai.ts"],"names":[],"mappings":"AAsEA;;;;;;;;;;;;;;;;;GAiBG;AACH,eAAO,MAAM,oCAAoC,GAAI,mBAAmB,MAAM,KAAG,MAyEhF,CAAC"}
package/dist/index.cjs ADDED
@@ -0,0 +1,157 @@
1
+ 'use strict';
2
+
3
+ var openinferenceSemanticConventions = require('@arizeai/openinference-semantic-conventions');
4
+ var logger = require('@mastra/core/logger');
5
+ var otelExporter = require('@mastra/otel-exporter');
6
+ var openinferenceGenai = require('@arizeai/openinference-genai');
7
+ var exporterTraceOtlpProto = require('@opentelemetry/exporter-trace-otlp-proto');
8
+
9
+ // src/ai-tracing.ts
10
+
11
+ // src/gen-ai.ts
12
+ var isMastraMessagePart = (p) => {
13
+ return typeof p === "object" && p != null && "type" in p && (p.type === "text" || p.type === "tool-call" || p.type === "tool-result") && (p.type === "text" && "text" in p || p.type === "tool-call" && "toolCallId" in p && "toolName" in p && "input" in p || p.type === "tool-result" && "toolCallId" in p && "toolName" in p && "output" in p);
14
+ };
15
+ var isMastraMessage = (m) => {
16
+ return typeof m === "object" && m != null && "role" in m && "content" in m && (typeof m.content === "string" || Array.isArray(m.content) && m.content.every(isMastraMessagePart));
17
+ };
18
+ var convertMastraMessagesToGenAIMessages = (inputOutputString) => {
19
+ try {
20
+ const parsedIO = JSON.parse(inputOutputString);
21
+ if (typeof parsedIO !== "object" || parsedIO == null || !("messages" in parsedIO) && !("text" in parsedIO)) {
22
+ return inputOutputString;
23
+ }
24
+ if ("text" in parsedIO) {
25
+ return JSON.stringify([
26
+ {
27
+ role: "assistant",
28
+ parts: [{ type: "text", content: parsedIO.text }]
29
+ }
30
+ ]);
31
+ }
32
+ if (Array.isArray(parsedIO.messages)) {
33
+ return JSON.stringify(
34
+ parsedIO.messages.map((m) => {
35
+ if (!isMastraMessage(m)) {
36
+ return m;
37
+ }
38
+ const role = m.role;
39
+ let parts = [];
40
+ if (Array.isArray(m.content)) {
41
+ parts = m.content.map((c) => {
42
+ switch (c.type) {
43
+ case "text":
44
+ return {
45
+ type: "text",
46
+ content: c.text
47
+ };
48
+ case "tool-call":
49
+ return {
50
+ type: "tool_call",
51
+ id: c.toolCallId,
52
+ name: c.toolName,
53
+ arguments: JSON.stringify(c.input)
54
+ };
55
+ case "tool-result":
56
+ return {
57
+ type: "tool_call_response",
58
+ id: c.toolCallId,
59
+ name: c.toolName,
60
+ response: JSON.stringify(c.output.value)
61
+ };
62
+ default:
63
+ return c;
64
+ }
65
+ });
66
+ } else {
67
+ parts = [
68
+ {
69
+ type: "text",
70
+ content: m.content
71
+ }
72
+ ];
73
+ }
74
+ return {
75
+ role,
76
+ parts
77
+ };
78
+ })
79
+ );
80
+ }
81
+ return inputOutputString;
82
+ } catch {
83
+ return inputOutputString;
84
+ }
85
+ };
86
+
87
+ // src/openInferenceOTLPExporter.ts
88
+ var OpenInferenceOTLPTraceExporter = class extends exporterTraceOtlpProto.OTLPTraceExporter {
89
+ export(spans, resultCallback) {
90
+ const processedSpans = spans.map((span) => {
91
+ if (span.attributes?.["gen_ai.prompt"] && typeof span.attributes["gen_ai.prompt"] === "string") {
92
+ span.attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(
93
+ span.attributes["gen_ai.prompt"]
94
+ );
95
+ }
96
+ if (span.attributes?.["gen_ai.completion"] && typeof span.attributes["gen_ai.completion"] === "string") {
97
+ span.attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(
98
+ span.attributes["gen_ai.completion"]
99
+ );
100
+ }
101
+ const processedAttributes = openinferenceGenai.convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);
102
+ if (processedAttributes) {
103
+ span.attributes = processedAttributes;
104
+ }
105
+ return span;
106
+ });
107
+ super.export(processedSpans, resultCallback);
108
+ }
109
+ };
110
+
111
+ // src/ai-tracing.ts
112
+ var LOG_PREFIX = "[ArizeExporter]";
113
+ var ARIZE_AX_ENDPOINT = "https://otlp.arize.com/v1/traces";
114
+ var ArizeExporter = class extends otelExporter.OtelExporter {
115
+ name = "arize";
116
+ constructor(config) {
117
+ const logger$1 = new logger.ConsoleLogger({ level: config.logLevel ?? "warn" });
118
+ let endpoint = config.endpoint;
119
+ const headers = {
120
+ ...config.headers
121
+ };
122
+ if (config.spaceId) {
123
+ headers["space_id"] = config.spaceId;
124
+ headers["api_key"] = config.apiKey ?? "";
125
+ endpoint = config.endpoint || ARIZE_AX_ENDPOINT;
126
+ } else if (config.apiKey) {
127
+ headers["Authorization"] = `Bearer ${config.apiKey}`;
128
+ }
129
+ if (!endpoint) {
130
+ logger$1.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);
131
+ return;
132
+ }
133
+ super({
134
+ exporter: new OpenInferenceOTLPTraceExporter({
135
+ url: endpoint,
136
+ headers
137
+ }),
138
+ ...config,
139
+ resourceAttributes: {
140
+ [openinferenceSemanticConventions.SEMRESATTRS_PROJECT_NAME]: config.projectName,
141
+ ...config.resourceAttributes
142
+ },
143
+ provider: {
144
+ custom: {
145
+ endpoint: config.endpoint,
146
+ headers,
147
+ protocol: "http/protobuf"
148
+ }
149
+ }
150
+ });
151
+ }
152
+ };
153
+
154
+ exports.ARIZE_AX_ENDPOINT = ARIZE_AX_ENDPOINT;
155
+ exports.ArizeExporter = ArizeExporter;
156
+ //# sourceMappingURL=index.cjs.map
157
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/ai-tracing.ts"],"names":["OTLPTraceExporter","convertGenAISpanAttributesToOpenInferenceSpanAttributes","OtelExporter","logger","ConsoleLogger","SEMRESATTRS_PROJECT_NAME"],"mappings":";;;;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;AC1JO,IAAM,8BAAA,GAAN,cAA6CA,wCAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AAEvC,MAAA,IAAI,IAAA,CAAK,aAAa,eAAe,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,eAAe,CAAA,KAAM,QAAA,EAAU;AAC9F,QAAA,IAAA,CAAK,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA;AAAA,UACzC,IAAA,CAAK,WAAW,eAAe;AAAA,SACjC;AAAA,MACF;AAEA,MAAA,IAAI,IAAA,CAAK,aAAa,mBAAmB,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,mBAAmB,CAAA,KAAM,QAAA,EAAU;AACtG,QAAA,IAAA,CAAK,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA;AAAA,UAC1C,IAAA,CAAK,WAAW,mBAAmB;AAAA,SACrC;AAAA,MACF;AACA,MAAA,MAAM,mBAAA,GAAsBC,0EAAA,CAAwD,IAAA,CAAK,UAAU,CAAA;AAEnG,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAC,KAA+B,UAAA,GAAa,mBAAA;AAAA,MAC/C;AACA,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;ACzBA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4BC,yBAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAMC,QAAA,GAAS,IAAIC,oBAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAAD,QAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAACE,yDAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,UAAU,MAAA,CAAO,QAAA;AAAA,UACjB,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.cjs","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n // convert Mastra input messages to GenAI messages if present\n if (span.attributes?.['gen_ai.prompt'] && typeof span.attributes['gen_ai.prompt'] === 'string') {\n span.attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.prompt'],\n );\n }\n // convert Mastra output messages to GenAI messages if present\n if (span.attributes?.['gen_ai.completion'] && typeof span.attributes['gen_ai.completion'] === 'string') {\n span.attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.completion'],\n );\n }\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);\n // only add processed attributes if conversion was successful\n if (processedAttributes) {\n (span as Mutable<ReadableSpan>).attributes = processedAttributes;\n }\n return span;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint: config.endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
@@ -0,0 +1,2 @@
1
+ export * from './ai-tracing.js';
2
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,iBAAiB,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,154 @@
1
+ import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';
2
+ import { ConsoleLogger } from '@mastra/core/logger';
3
+ import { OtelExporter } from '@mastra/otel-exporter';
4
+ import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';
5
+ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
6
+
7
+ // src/ai-tracing.ts
8
+
9
+ // src/gen-ai.ts
10
+ var isMastraMessagePart = (p) => {
11
+ return typeof p === "object" && p != null && "type" in p && (p.type === "text" || p.type === "tool-call" || p.type === "tool-result") && (p.type === "text" && "text" in p || p.type === "tool-call" && "toolCallId" in p && "toolName" in p && "input" in p || p.type === "tool-result" && "toolCallId" in p && "toolName" in p && "output" in p);
12
+ };
13
+ var isMastraMessage = (m) => {
14
+ return typeof m === "object" && m != null && "role" in m && "content" in m && (typeof m.content === "string" || Array.isArray(m.content) && m.content.every(isMastraMessagePart));
15
+ };
16
+ var convertMastraMessagesToGenAIMessages = (inputOutputString) => {
17
+ try {
18
+ const parsedIO = JSON.parse(inputOutputString);
19
+ if (typeof parsedIO !== "object" || parsedIO == null || !("messages" in parsedIO) && !("text" in parsedIO)) {
20
+ return inputOutputString;
21
+ }
22
+ if ("text" in parsedIO) {
23
+ return JSON.stringify([
24
+ {
25
+ role: "assistant",
26
+ parts: [{ type: "text", content: parsedIO.text }]
27
+ }
28
+ ]);
29
+ }
30
+ if (Array.isArray(parsedIO.messages)) {
31
+ return JSON.stringify(
32
+ parsedIO.messages.map((m) => {
33
+ if (!isMastraMessage(m)) {
34
+ return m;
35
+ }
36
+ const role = m.role;
37
+ let parts = [];
38
+ if (Array.isArray(m.content)) {
39
+ parts = m.content.map((c) => {
40
+ switch (c.type) {
41
+ case "text":
42
+ return {
43
+ type: "text",
44
+ content: c.text
45
+ };
46
+ case "tool-call":
47
+ return {
48
+ type: "tool_call",
49
+ id: c.toolCallId,
50
+ name: c.toolName,
51
+ arguments: JSON.stringify(c.input)
52
+ };
53
+ case "tool-result":
54
+ return {
55
+ type: "tool_call_response",
56
+ id: c.toolCallId,
57
+ name: c.toolName,
58
+ response: JSON.stringify(c.output.value)
59
+ };
60
+ default:
61
+ return c;
62
+ }
63
+ });
64
+ } else {
65
+ parts = [
66
+ {
67
+ type: "text",
68
+ content: m.content
69
+ }
70
+ ];
71
+ }
72
+ return {
73
+ role,
74
+ parts
75
+ };
76
+ })
77
+ );
78
+ }
79
+ return inputOutputString;
80
+ } catch {
81
+ return inputOutputString;
82
+ }
83
+ };
84
+
85
+ // src/openInferenceOTLPExporter.ts
86
+ var OpenInferenceOTLPTraceExporter = class extends OTLPTraceExporter {
87
+ export(spans, resultCallback) {
88
+ const processedSpans = spans.map((span) => {
89
+ if (span.attributes?.["gen_ai.prompt"] && typeof span.attributes["gen_ai.prompt"] === "string") {
90
+ span.attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(
91
+ span.attributes["gen_ai.prompt"]
92
+ );
93
+ }
94
+ if (span.attributes?.["gen_ai.completion"] && typeof span.attributes["gen_ai.completion"] === "string") {
95
+ span.attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(
96
+ span.attributes["gen_ai.completion"]
97
+ );
98
+ }
99
+ const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);
100
+ if (processedAttributes) {
101
+ span.attributes = processedAttributes;
102
+ }
103
+ return span;
104
+ });
105
+ super.export(processedSpans, resultCallback);
106
+ }
107
+ };
108
+
109
+ // src/ai-tracing.ts
110
+ var LOG_PREFIX = "[ArizeExporter]";
111
+ var ARIZE_AX_ENDPOINT = "https://otlp.arize.com/v1/traces";
112
+ var ArizeExporter = class extends OtelExporter {
113
+ name = "arize";
114
+ constructor(config) {
115
+ const logger = new ConsoleLogger({ level: config.logLevel ?? "warn" });
116
+ let endpoint = config.endpoint;
117
+ const headers = {
118
+ ...config.headers
119
+ };
120
+ if (config.spaceId) {
121
+ headers["space_id"] = config.spaceId;
122
+ headers["api_key"] = config.apiKey ?? "";
123
+ endpoint = config.endpoint || ARIZE_AX_ENDPOINT;
124
+ } else if (config.apiKey) {
125
+ headers["Authorization"] = `Bearer ${config.apiKey}`;
126
+ }
127
+ if (!endpoint) {
128
+ logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);
129
+ return;
130
+ }
131
+ super({
132
+ exporter: new OpenInferenceOTLPTraceExporter({
133
+ url: endpoint,
134
+ headers
135
+ }),
136
+ ...config,
137
+ resourceAttributes: {
138
+ [SEMRESATTRS_PROJECT_NAME]: config.projectName,
139
+ ...config.resourceAttributes
140
+ },
141
+ provider: {
142
+ custom: {
143
+ endpoint: config.endpoint,
144
+ headers,
145
+ protocol: "http/protobuf"
146
+ }
147
+ }
148
+ });
149
+ }
150
+ };
151
+
152
+ export { ARIZE_AX_ENDPOINT, ArizeExporter };
153
+ //# sourceMappingURL=index.js.map
154
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/ai-tracing.ts"],"names":[],"mappings":";;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;AC1JO,IAAM,8BAAA,GAAN,cAA6C,iBAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AAEvC,MAAA,IAAI,IAAA,CAAK,aAAa,eAAe,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,eAAe,CAAA,KAAM,QAAA,EAAU;AAC9F,QAAA,IAAA,CAAK,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA;AAAA,UACzC,IAAA,CAAK,WAAW,eAAe;AAAA,SACjC;AAAA,MACF;AAEA,MAAA,IAAI,IAAA,CAAK,aAAa,mBAAmB,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,mBAAmB,CAAA,KAAM,QAAA,EAAU;AACtG,QAAA,IAAA,CAAK,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA;AAAA,UAC1C,IAAA,CAAK,WAAW,mBAAmB;AAAA,SACrC;AAAA,MACF;AACA,MAAA,MAAM,mBAAA,GAAsB,uDAAA,CAAwD,IAAA,CAAK,UAAU,CAAA;AAEnG,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAC,KAA+B,UAAA,GAAa,mBAAA;AAAA,MAC/C;AACA,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;ACzBA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4B,YAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAM,MAAA,GAAS,IAAI,aAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAAC,wBAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,UAAU,MAAA,CAAO,QAAA;AAAA,UACjB,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.js","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n // convert Mastra input messages to GenAI messages if present\n if (span.attributes?.['gen_ai.prompt'] && typeof span.attributes['gen_ai.prompt'] === 'string') {\n span.attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.prompt'],\n );\n }\n // convert Mastra output messages to GenAI messages if present\n if (span.attributes?.['gen_ai.completion'] && typeof span.attributes['gen_ai.completion'] === 'string') {\n span.attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.completion'],\n );\n }\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);\n // only add processed attributes if conversion was successful\n if (processedAttributes) {\n (span as Mutable<ReadableSpan>).attributes = processedAttributes;\n }\n return span;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint: config.endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
@@ -0,0 +1,7 @@
1
+ import type { ExportResult } from '@opentelemetry/core';
2
+ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
3
+ import type { ReadableSpan } from '@opentelemetry/sdk-trace-base';
4
+ export declare class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {
5
+ export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void): void;
6
+ }
7
+ //# sourceMappingURL=openInferenceOTLPExporter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openInferenceOTLPExporter.d.ts","sourceRoot":"","sources":["../src/openInferenceOTLPExporter.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,MAAM,0CAA0C,CAAC;AAC7E,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,+BAA+B,CAAC;AAGlE,qBAAa,8BAA+B,SAAQ,iBAAiB;IACnE,MAAM,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE,cAAc,EAAE,CAAC,MAAM,EAAE,YAAY,KAAK,IAAI;CAwB7E"}
package/package.json ADDED
@@ -0,0 +1,65 @@
1
+ {
2
+ "name": "@mastra/arize",
3
+ "version": "0.0.0-agent-error-handling-20251023180025",
4
+ "description": "Arize observability provider for Mastra - includes AI tracing and future observability features",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "types": "dist/index.d.ts",
8
+ "files": [
9
+ "dist",
10
+ "CHANGELOG.md"
11
+ ],
12
+ "exports": {
13
+ ".": {
14
+ "import": {
15
+ "types": "./dist/index.d.ts",
16
+ "default": "./dist/index.js"
17
+ },
18
+ "require": {
19
+ "types": "./dist/index.d.ts",
20
+ "default": "./dist/index.cjs"
21
+ }
22
+ },
23
+ "./package.json": "./package.json"
24
+ },
25
+ "license": "Apache-2.0",
26
+ "dependencies": {
27
+ "@arizeai/openinference-genai": "0.1.0",
28
+ "@arizeai/openinference-semantic-conventions": "^2.1.2",
29
+ "@opentelemetry/exporter-trace-otlp-proto": "^0.205.0",
30
+ "@opentelemetry/resources": "^2.1.0",
31
+ "@opentelemetry/sdk-trace-base": "^2.1.0",
32
+ "@opentelemetry/core": "^2.1.0",
33
+ "@mastra/otel-exporter": "0.0.0-agent-error-handling-20251023180025"
34
+ },
35
+ "devDependencies": {
36
+ "@microsoft/api-extractor": "^7.52.8",
37
+ "@types/node": "^20.19.0",
38
+ "eslint": "^9.36.0",
39
+ "tsup": "^8.5.0",
40
+ "typescript": "^5.8.3",
41
+ "vitest": "^3.2.4",
42
+ "@internal/lint": "0.0.0-agent-error-handling-20251023180025",
43
+ "@internal/types-builder": "0.0.0-agent-error-handling-20251023180025",
44
+ "@mastra/core": "0.0.0-agent-error-handling-20251023180025"
45
+ },
46
+ "peerDependencies": {
47
+ "@mastra/core": "0.0.0-agent-error-handling-20251023180025"
48
+ },
49
+ "homepage": "https://mastra.ai",
50
+ "repository": {
51
+ "type": "git",
52
+ "url": "git+https://github.com/mastra-ai/mastra.git",
53
+ "directory": "observability/arize"
54
+ },
55
+ "bugs": {
56
+ "url": "https://github.com/mastra-ai/mastra/issues"
57
+ },
58
+ "scripts": {
59
+ "build": "tsup --silent --config tsup.config.ts",
60
+ "build:watch": "pnpm build --watch",
61
+ "test": "vitest run",
62
+ "test:watch": "vitest watch",
63
+ "lint": "eslint ."
64
+ }
65
+ }