@mastra/arize 1.0.0-beta.0 → 1.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,25 @@
1
1
  # @mastra/arize
2
2
 
3
+ ## 1.0.0-beta.2
4
+
5
+ ### Minor Changes
6
+
7
+ - Adds out of the box support for sessions in Arize AX and Phoenix. Additionally maps user ids and passed through metadata if available. ([#10731](https://github.com/mastra-ai/mastra/pull/10731))
8
+
9
+ ### Patch Changes
10
+
11
+ - Updated dependencies [[`ac0d2f4`](https://github.com/mastra-ai/mastra/commit/ac0d2f4ff8831f72c1c66c2be809706d17f65789), [`1a0d3fc`](https://github.com/mastra-ai/mastra/commit/1a0d3fc811482c9c376cdf79ee615c23bae9b2d6), [`85a628b`](https://github.com/mastra-ai/mastra/commit/85a628b1224a8f64cd82ea7f033774bf22df7a7e), [`c237233`](https://github.com/mastra-ai/mastra/commit/c23723399ccedf7f5744b3f40997b79246bfbe64), [`15f9e21`](https://github.com/mastra-ai/mastra/commit/15f9e216177201ea6e3f6d0bfb063fcc0953444f), [`ff94dea`](https://github.com/mastra-ai/mastra/commit/ff94dea935f4e34545c63bcb6c29804732698809), [`5b2ff46`](https://github.com/mastra-ai/mastra/commit/5b2ff4651df70c146523a7fca773f8eb0a2272f8), [`db41688`](https://github.com/mastra-ai/mastra/commit/db4168806d007417e2e60b4f68656dca4e5f40c9), [`5ca599d`](https://github.com/mastra-ai/mastra/commit/5ca599d0bb59a1595f19f58473fcd67cc71cef58), [`bff1145`](https://github.com/mastra-ai/mastra/commit/bff114556b3cbadad9b2768488708f8ad0e91475), [`5c8ca24`](https://github.com/mastra-ai/mastra/commit/5c8ca247094e0cc2cdbd7137822fb47241f86e77), [`e191844`](https://github.com/mastra-ai/mastra/commit/e1918444ca3f80e82feef1dad506cd4ec6e2875f), [`22553f1`](https://github.com/mastra-ai/mastra/commit/22553f11c63ee5e966a9c034a349822249584691), [`7237163`](https://github.com/mastra-ai/mastra/commit/72371635dbf96a87df4b073cc48fc655afbdce3d), [`2500740`](https://github.com/mastra-ai/mastra/commit/2500740ea23da067d6e50ec71c625ab3ce275e64), [`873ecbb`](https://github.com/mastra-ai/mastra/commit/873ecbb517586aa17d2f1e99283755b3ebb2863f), [`4f9bbe5`](https://github.com/mastra-ai/mastra/commit/4f9bbe5968f42c86f4930b8193de3c3c17e5bd36), [`02e51fe`](https://github.com/mastra-ai/mastra/commit/02e51feddb3d4155cfbcc42624fd0d0970d032c0), [`8f3fa3a`](https://github.com/mastra-ai/mastra/commit/8f3fa3a652bb77da092f913ec51ae46e3a7e27dc), [`cd29ad2`](https://github.com/mastra-ai/mastra/commit/cd29ad23a255534e8191f249593849ed29160886), [`bdf4d8c`](https://github.com/mastra-ai/mastra/commit/bdf4d8cdc656d8a2c21d81834bfa3bfa70f56c16), [`854e3da`](https://github.com/mastra-ai/mastra/commit/854e3dad5daac17a91a20986399d3a51f54bf68b), [`ce18d38`](https://github.com/mastra-ai/mastra/commit/ce18d38678c65870350d123955014a8432075fd9), [`cccf9c8`](https://github.com/mastra-ai/mastra/commit/cccf9c8b2d2dfc1a5e63919395b83d78c89682a0), [`61a5705`](https://github.com/mastra-ai/mastra/commit/61a570551278b6743e64243b3ce7d73de915ca8a), [`db70a48`](https://github.com/mastra-ai/mastra/commit/db70a48aeeeeb8e5f92007e8ede52c364ce15287), [`f0fdc14`](https://github.com/mastra-ai/mastra/commit/f0fdc14ee233d619266b3d2bbdeea7d25cfc6d13), [`db18bc9`](https://github.com/mastra-ai/mastra/commit/db18bc9c3825e2c1a0ad9a183cc9935f6691bfa1), [`9b37b56`](https://github.com/mastra-ai/mastra/commit/9b37b565e1f2a76c24f728945cc740c2b09be9da), [`41a23c3`](https://github.com/mastra-ai/mastra/commit/41a23c32f9877d71810f37e24930515df2ff7a0f), [`5d171ad`](https://github.com/mastra-ai/mastra/commit/5d171ad9ef340387276b77c2bb3e83e83332d729), [`f03ae60`](https://github.com/mastra-ai/mastra/commit/f03ae60500fe350c9d828621006cdafe1975fdd8), [`d1e74a0`](https://github.com/mastra-ai/mastra/commit/d1e74a0a293866dece31022047f5dbab65a304d0), [`39e7869`](https://github.com/mastra-ai/mastra/commit/39e7869bc7d0ee391077ce291474d8a84eedccff), [`5761926`](https://github.com/mastra-ai/mastra/commit/57619260c4a2cdd598763abbacd90de594c6bc76), [`c900fdd`](https://github.com/mastra-ai/mastra/commit/c900fdd504c41348efdffb205cfe80d48c38fa33), [`604a79f`](https://github.com/mastra-ai/mastra/commit/604a79fecf276e26a54a3fe01bb94e65315d2e0e), [`887f0b4`](https://github.com/mastra-ai/mastra/commit/887f0b4746cdbd7cb7d6b17ac9f82aeb58037ea5), [`2562143`](https://github.com/mastra-ai/mastra/commit/256214336b4faa78646c9c1776612393790d8784), [`ef11a61`](https://github.com/mastra-ai/mastra/commit/ef11a61920fa0ed08a5b7ceedd192875af119749)]:
12
+ - @mastra/core@1.0.0-beta.6
13
+ - @mastra/otel-exporter@1.0.0-beta.2
14
+
15
+ ## 1.0.0-beta.1
16
+
17
+ ### Patch Changes
18
+
19
+ - Updated dependencies [[`2319326`](https://github.com/mastra-ai/mastra/commit/2319326f8c64e503a09bbcf14be2dd65405445e0), [`d629361`](https://github.com/mastra-ai/mastra/commit/d629361a60f6565b5bfb11976fdaf7308af858e2), [`08c31c1`](https://github.com/mastra-ai/mastra/commit/08c31c188ebccd598acaf55e888b6397d01f7eae), [`fd3d338`](https://github.com/mastra-ai/mastra/commit/fd3d338a2c362174ed5b383f1f011ad9fb0302aa), [`c30400a`](https://github.com/mastra-ai/mastra/commit/c30400a49b994b1b97256fe785eb6c906fc2b232), [`69e0a87`](https://github.com/mastra-ai/mastra/commit/69e0a878896a2da9494945d86e056a5f8f05b851), [`01f8878`](https://github.com/mastra-ai/mastra/commit/01f88783de25e4de048c1c8aace43e26373c6ea5), [`4c77209`](https://github.com/mastra-ai/mastra/commit/4c77209e6c11678808b365d545845918c40045c8), [`d827d08`](https://github.com/mastra-ai/mastra/commit/d827d0808ffe1f3553a84e975806cc989b9735dd), [`23c10a1`](https://github.com/mastra-ai/mastra/commit/23c10a1efdd9a693c405511ab2dc8a1236603162), [`676ccc7`](https://github.com/mastra-ai/mastra/commit/676ccc7fe92468d2d45d39c31a87825c89fd1ea0), [`c10398d`](https://github.com/mastra-ai/mastra/commit/c10398d5b88f1d4af556f4267ff06f1d11e89179), [`00c2387`](https://github.com/mastra-ai/mastra/commit/00c2387f5f04a365316f851e58666ac43f8c4edf), [`ad6250d`](https://github.com/mastra-ai/mastra/commit/ad6250dbdaad927e29f74a27b83f6c468b50a705), [`3a73998`](https://github.com/mastra-ai/mastra/commit/3a73998fa4ebeb7f3dc9301afe78095fc63e7999), [`e16d553`](https://github.com/mastra-ai/mastra/commit/e16d55338403c7553531cc568125c63d53653dff), [`4d59f58`](https://github.com/mastra-ai/mastra/commit/4d59f58de2d90d6e2810a19d4518e38ddddb9038), [`e1bb9c9`](https://github.com/mastra-ai/mastra/commit/e1bb9c94b4eb68b019ae275981be3feb769b5365), [`351a11f`](https://github.com/mastra-ai/mastra/commit/351a11fcaf2ed1008977fa9b9a489fc422e51cd4)]:
20
+ - @mastra/core@1.0.0-beta.3
21
+ - @mastra/otel-exporter@1.0.0-beta.1
22
+
3
23
  ## 1.0.0-beta.0
4
24
 
5
25
  ### Major Changes
package/README.md CHANGED
@@ -135,6 +135,20 @@ const mastra = new Mastra({
135
135
  });
136
136
  ```
137
137
 
138
+ ### Custom metadata
139
+
140
+ Any custom span attributes that are not part of the standard Mastra/OpenInference fields are serialized into the OpenInference `metadata` payload and shown in Arize/Phoenix. An easy way to add them is through `tracingOptions.metadata`:
141
+
142
+ ```ts
143
+ await agent.generate(input, {
144
+ tracingOptions: {
145
+ metadata: {
146
+ companyId: 'acme-co',
147
+ },
148
+ },
149
+ });
150
+ ```
151
+
138
152
  ## OpenInference Semantic Conventions
139
153
 
140
154
  This exporter follows the [OpenInference Semantic Conventions](https://github.com/Arize-ai/openinference/tree/main/spec) for generative AI applications.
package/dist/index.cjs CHANGED
@@ -88,21 +88,74 @@ var convertMastraMessagesToGenAIMessages = (inputOutputString) => {
88
88
  var OpenInferenceOTLPTraceExporter = class extends exporterTraceOtlpProto.OTLPTraceExporter {
89
89
  export(spans, resultCallback) {
90
90
  const processedSpans = spans.map((span) => {
91
- if (span.attributes?.["gen_ai.prompt"] && typeof span.attributes["gen_ai.prompt"] === "string") {
92
- span.attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(
93
- span.attributes["gen_ai.prompt"]
94
- );
91
+ const attributes = { ...span.attributes ?? {} };
92
+ const mutableSpan = span;
93
+ if (attributes["gen_ai.prompt"] && typeof attributes["gen_ai.prompt"] === "string") {
94
+ attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.prompt"]);
95
95
  }
96
- if (span.attributes?.["gen_ai.completion"] && typeof span.attributes["gen_ai.completion"] === "string") {
97
- span.attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(
98
- span.attributes["gen_ai.completion"]
99
- );
96
+ if (attributes["gen_ai.completion"] && typeof attributes["gen_ai.completion"] === "string") {
97
+ attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.completion"]);
100
98
  }
101
- const processedAttributes = openinferenceGenai.convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);
99
+ const reservedPrefixes = [
100
+ "gen_ai.",
101
+ "llm.",
102
+ "input.",
103
+ "output.",
104
+ "span.",
105
+ "mastra",
106
+ "agent.",
107
+ "workflow.",
108
+ "mcp.",
109
+ "openinference.",
110
+ "retrieval.",
111
+ "reranker.",
112
+ "embedding.",
113
+ "document.",
114
+ "tool",
115
+ "error.",
116
+ "http.",
117
+ "db."
118
+ ];
119
+ const metadataEntries = {};
120
+ const reservedExact = /* @__PURE__ */ new Set(["input", "output", "sessionId", "metadata"]);
121
+ for (const [key, value] of Object.entries(attributes)) {
122
+ const isReserved = reservedPrefixes.some((prefix) => key.startsWith(prefix)) || key === "threadId" || key === "userId" || key === openinferenceSemanticConventions.SemanticConventions.SESSION_ID || key === openinferenceSemanticConventions.SemanticConventions.USER_ID || reservedExact.has(key);
123
+ if (!isReserved) {
124
+ metadataEntries[key] = value;
125
+ }
126
+ }
127
+ let metadataPayload;
128
+ if (Object.keys(metadataEntries).length > 0) {
129
+ try {
130
+ metadataPayload = JSON.stringify(metadataEntries);
131
+ attributes[openinferenceSemanticConventions.SemanticConventions.METADATA] = metadataPayload;
132
+ } catch {
133
+ }
134
+ }
135
+ const sessionId = typeof attributes["threadId"] === "string" ? attributes["threadId"] : void 0;
136
+ const userId = typeof attributes["userId"] === "string" ? attributes["userId"] : void 0;
137
+ if (sessionId) {
138
+ attributes[openinferenceSemanticConventions.SemanticConventions.SESSION_ID] = sessionId;
139
+ delete attributes["threadId"];
140
+ }
141
+ if (userId) {
142
+ attributes[openinferenceSemanticConventions.SemanticConventions.USER_ID] = userId;
143
+ delete attributes["userId"];
144
+ }
145
+ const processedAttributes = openinferenceGenai.convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);
102
146
  if (processedAttributes) {
103
- span.attributes = processedAttributes;
147
+ if (sessionId) {
148
+ processedAttributes[openinferenceSemanticConventions.SemanticConventions.SESSION_ID] = sessionId;
149
+ }
150
+ if (userId) {
151
+ processedAttributes[openinferenceSemanticConventions.SemanticConventions.USER_ID] = userId;
152
+ }
153
+ if (metadataPayload) {
154
+ processedAttributes[openinferenceSemanticConventions.SemanticConventions.METADATA] = metadataPayload;
155
+ }
156
+ mutableSpan.attributes = processedAttributes;
104
157
  }
105
- return span;
158
+ return mutableSpan;
106
159
  });
107
160
  super.export(processedSpans, resultCallback);
108
161
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":["OTLPTraceExporter","convertGenAISpanAttributesToOpenInferenceSpanAttributes","OtelExporter","logger","ConsoleLogger","SEMRESATTRS_PROJECT_NAME"],"mappings":";;;;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;AC1JO,IAAM,8BAAA,GAAN,cAA6CA,wCAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AAEvC,MAAA,IAAI,IAAA,CAAK,aAAa,eAAe,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,eAAe,CAAA,KAAM,QAAA,EAAU;AAC9F,QAAA,IAAA,CAAK,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA;AAAA,UACzC,IAAA,CAAK,WAAW,eAAe;AAAA,SACjC;AAAA,MACF;AAEA,MAAA,IAAI,IAAA,CAAK,aAAa,mBAAmB,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,mBAAmB,CAAA,KAAM,QAAA,EAAU;AACtG,QAAA,IAAA,CAAK,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA;AAAA,UAC1C,IAAA,CAAK,WAAW,mBAAmB;AAAA,SACrC;AAAA,MACF;AACA,MAAA,MAAM,mBAAA,GAAsBC,0EAAA,CAAwD,IAAA,CAAK,UAAU,CAAA;AAEnG,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAC,KAA+B,UAAA,GAAa,mBAAA;AAAA,MAC/C;AACA,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;ACzBA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4BC,yBAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAMC,QAAA,GAAS,IAAIC,oBAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAAD,QAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAACE,yDAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.cjs","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n // convert Mastra input messages to GenAI messages if present\n if (span.attributes?.['gen_ai.prompt'] && typeof span.attributes['gen_ai.prompt'] === 'string') {\n span.attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.prompt'],\n );\n }\n // convert Mastra output messages to GenAI messages if present\n if (span.attributes?.['gen_ai.completion'] && typeof span.attributes['gen_ai.completion'] === 'string') {\n span.attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.completion'],\n );\n }\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);\n // only add processed attributes if conversion was successful\n if (processedAttributes) {\n (span as Mutable<ReadableSpan>).attributes = processedAttributes;\n }\n return span;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
1
+ {"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":["OTLPTraceExporter","SemanticConventions","convertGenAISpanAttributesToOpenInferenceSpanAttributes","OtelExporter","logger","ConsoleLogger","SEMRESATTRS_PROJECT_NAME"],"mappings":";;;;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;ACzJO,IAAM,8BAAA,GAAN,cAA6CA,wCAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AACvC,MAAA,MAAM,aAAa,EAAE,GAAI,IAAA,CAAK,UAAA,IAAc,EAAC,EAAG;AAChD,MAAA,MAAM,WAAA,GAAc,IAAA;AAEpB,MAAA,IAAI,WAAW,eAAe,CAAA,IAAK,OAAO,UAAA,CAAW,eAAe,MAAM,QAAA,EAAU;AAClF,QAAA,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,eAAe,CAAC,CAAA;AAAA,MACxG;AAEA,MAAA,IAAI,WAAW,mBAAmB,CAAA,IAAK,OAAO,UAAA,CAAW,mBAAmB,MAAM,QAAA,EAAU;AAC1F,QAAA,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,mBAAmB,CAAC,CAAA;AAAA,MAC7G;AAGA,MAAA,MAAM,gBAAA,GAAmB;AAAA,QACvB,SAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,SAAA;AAAA,QACA,OAAA;AAAA,QACA,QAAA;AAAA,QACA,QAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,gBAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,OAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,kBAA2C,EAAC;AAClD,MAAA,MAAM,aAAA,uBAAoB,GAAA,CAAY,CAAC,SAAS,QAAA,EAAU,WAAA,EAAa,UAAU,CAAC,CAAA;AAClF,MAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,UAAU,CAAA,EAAG;AACrD,QAAA,MAAM,UAAA,GACJ,iBAAiB,IAAA,CAAK,CAAA,MAAA,KAAU,IAAI,UAAA,CAAW,MAAM,CAAC,CAAA,IACtD,GAAA,KAAQ,cACR,GAAA,KAAQ,QAAA,IACR,QAAQC,oDAAA,CAAoB,UAAA,IAC5B,QAAQA,oDAAA,CAAoB,OAAA,IAC5B,aAAA,CAAc,GAAA,CAAI,GAAG,CAAA;AACvB,QAAA,IAAI,CAAC,UAAA,EAAY;AACf,UAAA,eAAA,CAAgB,GAAG,CAAA,GAAI,KAAA;AAAA,QACzB;AAAA,MACF;AAEA,MAAA,IAAI,eAAA;AACJ,MAAA,IAAI,MAAA,CAAO,IAAA,CAAK,eAAe,CAAA,CAAE,SAAS,CAAA,EAAG;AAC3C,QAAA,IAAI;AACF,UAAA,eAAA,GAAkB,IAAA,CAAK,UAAU,eAAe,CAAA;AAChD,UAAA,UAAA,CAAWA,oDAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QAC7C,CAAA,CAAA,MAAQ;AAAA,QAER;AAAA,MACF;AAEA,MAAA,MAAM,SAAA,GAAY,OAAO,UAAA,CAAW,UAAU,MAAM,QAAA,GAAY,UAAA,CAAW,UAAU,CAAA,GAAe,MAAA;AACpG,MAAA,MAAM,MAAA,GAAS,OAAO,UAAA,CAAW,QAAQ,MAAM,QAAA,GAAY,UAAA,CAAW,QAAQ,CAAA,GAAe,MAAA;AAE7F,MAAA,IAAI,SAAA,EAAW;AACb,QAAA,UAAA,CAAWA,oDAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAC7C,QAAA,OAAO,WAAW,UAAU,CAAA;AAAA,MAC9B;AAEA,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,UAAA,CAAWA,oDAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAC1C,QAAA,OAAO,WAAW,QAAQ,CAAA;AAAA,MAC5B;AAEA,MAAA,MAAM,mBAAA,GAAsBC,2EAAwD,UAAU,CAAA;AAE9F,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAA,IAAI,SAAA,EAAW;AACb,UAAA,mBAAA,CAAoBD,oDAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAAA,QACxD;AACA,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,mBAAA,CAAoBA,oDAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAAA,QACrD;AACA,QAAA,IAAI,eAAA,EAAiB;AACnB,UAAA,mBAAA,CAAoBA,oDAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QACtD;AACA,QAAA,WAAA,CAAY,UAAA,GAAa,mBAAA;AAAA,MAC3B;AAEA,MAAA,OAAO,WAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;AC9FA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4BE,yBAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAMC,QAAA,GAAS,IAAIC,oBAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAAD,QAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAACE,yDAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.cjs","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport { SemanticConventions } from '@arizeai/openinference-semantic-conventions';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n const attributes = { ...(span.attributes ?? {}) };\n const mutableSpan = span as Mutable<ReadableSpan>;\n\n if (attributes['gen_ai.prompt'] && typeof attributes['gen_ai.prompt'] === 'string') {\n attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.prompt']);\n }\n\n if (attributes['gen_ai.completion'] && typeof attributes['gen_ai.completion'] === 'string') {\n attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.completion']);\n }\n\n // Gather custom attributes into OpenInference metadata (flat best-effort)\n const reservedPrefixes = [\n 'gen_ai.',\n 'llm.',\n 'input.',\n 'output.',\n 'span.',\n 'mastra',\n 'agent.',\n 'workflow.',\n 'mcp.',\n 'openinference.',\n 'retrieval.',\n 'reranker.',\n 'embedding.',\n 'document.',\n 'tool',\n 'error.',\n 'http.',\n 'db.',\n ];\n const metadataEntries: Record<string, unknown> = {};\n const reservedExact = new Set<string>(['input', 'output', 'sessionId', 'metadata']);\n for (const [key, value] of Object.entries(attributes)) {\n const isReserved =\n reservedPrefixes.some(prefix => key.startsWith(prefix)) ||\n key === 'threadId' ||\n key === 'userId' ||\n key === SemanticConventions.SESSION_ID ||\n key === SemanticConventions.USER_ID ||\n reservedExact.has(key);\n if (!isReserved) {\n metadataEntries[key] = value;\n }\n }\n\n let metadataPayload: string | undefined;\n if (Object.keys(metadataEntries).length > 0) {\n try {\n metadataPayload = JSON.stringify(metadataEntries);\n attributes[SemanticConventions.METADATA] = metadataPayload;\n } catch {\n // best-effort only\n }\n }\n\n const sessionId = typeof attributes['threadId'] === 'string' ? (attributes['threadId'] as string) : undefined;\n const userId = typeof attributes['userId'] === 'string' ? (attributes['userId'] as string) : undefined;\n\n if (sessionId) {\n attributes[SemanticConventions.SESSION_ID] = sessionId;\n delete attributes['threadId'];\n }\n\n if (userId) {\n attributes[SemanticConventions.USER_ID] = userId;\n delete attributes['userId'];\n }\n\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);\n\n if (processedAttributes) {\n if (sessionId) {\n processedAttributes[SemanticConventions.SESSION_ID] = sessionId;\n }\n if (userId) {\n processedAttributes[SemanticConventions.USER_ID] = userId;\n }\n if (metadataPayload) {\n processedAttributes[SemanticConventions.METADATA] = metadataPayload;\n }\n mutableSpan.attributes = processedAttributes;\n }\n\n return mutableSpan;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';
1
+ import { SEMRESATTRS_PROJECT_NAME, SemanticConventions } from '@arizeai/openinference-semantic-conventions';
2
2
  import { ConsoleLogger } from '@mastra/core/logger';
3
3
  import { OtelExporter } from '@mastra/otel-exporter';
4
4
  import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';
@@ -86,21 +86,74 @@ var convertMastraMessagesToGenAIMessages = (inputOutputString) => {
86
86
  var OpenInferenceOTLPTraceExporter = class extends OTLPTraceExporter {
87
87
  export(spans, resultCallback) {
88
88
  const processedSpans = spans.map((span) => {
89
- if (span.attributes?.["gen_ai.prompt"] && typeof span.attributes["gen_ai.prompt"] === "string") {
90
- span.attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(
91
- span.attributes["gen_ai.prompt"]
92
- );
89
+ const attributes = { ...span.attributes ?? {} };
90
+ const mutableSpan = span;
91
+ if (attributes["gen_ai.prompt"] && typeof attributes["gen_ai.prompt"] === "string") {
92
+ attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.prompt"]);
93
93
  }
94
- if (span.attributes?.["gen_ai.completion"] && typeof span.attributes["gen_ai.completion"] === "string") {
95
- span.attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(
96
- span.attributes["gen_ai.completion"]
97
- );
94
+ if (attributes["gen_ai.completion"] && typeof attributes["gen_ai.completion"] === "string") {
95
+ attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.completion"]);
98
96
  }
99
- const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);
97
+ const reservedPrefixes = [
98
+ "gen_ai.",
99
+ "llm.",
100
+ "input.",
101
+ "output.",
102
+ "span.",
103
+ "mastra",
104
+ "agent.",
105
+ "workflow.",
106
+ "mcp.",
107
+ "openinference.",
108
+ "retrieval.",
109
+ "reranker.",
110
+ "embedding.",
111
+ "document.",
112
+ "tool",
113
+ "error.",
114
+ "http.",
115
+ "db."
116
+ ];
117
+ const metadataEntries = {};
118
+ const reservedExact = /* @__PURE__ */ new Set(["input", "output", "sessionId", "metadata"]);
119
+ for (const [key, value] of Object.entries(attributes)) {
120
+ const isReserved = reservedPrefixes.some((prefix) => key.startsWith(prefix)) || key === "threadId" || key === "userId" || key === SemanticConventions.SESSION_ID || key === SemanticConventions.USER_ID || reservedExact.has(key);
121
+ if (!isReserved) {
122
+ metadataEntries[key] = value;
123
+ }
124
+ }
125
+ let metadataPayload;
126
+ if (Object.keys(metadataEntries).length > 0) {
127
+ try {
128
+ metadataPayload = JSON.stringify(metadataEntries);
129
+ attributes[SemanticConventions.METADATA] = metadataPayload;
130
+ } catch {
131
+ }
132
+ }
133
+ const sessionId = typeof attributes["threadId"] === "string" ? attributes["threadId"] : void 0;
134
+ const userId = typeof attributes["userId"] === "string" ? attributes["userId"] : void 0;
135
+ if (sessionId) {
136
+ attributes[SemanticConventions.SESSION_ID] = sessionId;
137
+ delete attributes["threadId"];
138
+ }
139
+ if (userId) {
140
+ attributes[SemanticConventions.USER_ID] = userId;
141
+ delete attributes["userId"];
142
+ }
143
+ const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);
100
144
  if (processedAttributes) {
101
- span.attributes = processedAttributes;
145
+ if (sessionId) {
146
+ processedAttributes[SemanticConventions.SESSION_ID] = sessionId;
147
+ }
148
+ if (userId) {
149
+ processedAttributes[SemanticConventions.USER_ID] = userId;
150
+ }
151
+ if (metadataPayload) {
152
+ processedAttributes[SemanticConventions.METADATA] = metadataPayload;
153
+ }
154
+ mutableSpan.attributes = processedAttributes;
102
155
  }
103
- return span;
156
+ return mutableSpan;
104
157
  });
105
158
  super.export(processedSpans, resultCallback);
106
159
  }
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":[],"mappings":";;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;AC1JO,IAAM,8BAAA,GAAN,cAA6C,iBAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AAEvC,MAAA,IAAI,IAAA,CAAK,aAAa,eAAe,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,eAAe,CAAA,KAAM,QAAA,EAAU;AAC9F,QAAA,IAAA,CAAK,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA;AAAA,UACzC,IAAA,CAAK,WAAW,eAAe;AAAA,SACjC;AAAA,MACF;AAEA,MAAA,IAAI,IAAA,CAAK,aAAa,mBAAmB,CAAA,IAAK,OAAO,IAAA,CAAK,UAAA,CAAW,mBAAmB,CAAA,KAAM,QAAA,EAAU;AACtG,QAAA,IAAA,CAAK,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA;AAAA,UAC1C,IAAA,CAAK,WAAW,mBAAmB;AAAA,SACrC;AAAA,MACF;AACA,MAAA,MAAM,mBAAA,GAAsB,uDAAA,CAAwD,IAAA,CAAK,UAAU,CAAA;AAEnG,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAC,KAA+B,UAAA,GAAa,mBAAA;AAAA,MAC/C;AACA,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;ACzBA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4B,YAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAM,MAAA,GAAS,IAAI,aAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAAC,wBAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.js","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n // convert Mastra input messages to GenAI messages if present\n if (span.attributes?.['gen_ai.prompt'] && typeof span.attributes['gen_ai.prompt'] === 'string') {\n span.attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.prompt'],\n );\n }\n // convert Mastra output messages to GenAI messages if present\n if (span.attributes?.['gen_ai.completion'] && typeof span.attributes['gen_ai.completion'] === 'string') {\n span.attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(\n span.attributes['gen_ai.completion'],\n );\n }\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(span.attributes);\n // only add processed attributes if conversion was successful\n if (processedAttributes) {\n (span as Mutable<ReadableSpan>).attributes = processedAttributes;\n }\n return span;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
1
+ {"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":[],"mappings":";;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;ACzJO,IAAM,8BAAA,GAAN,cAA6C,iBAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AACvC,MAAA,MAAM,aAAa,EAAE,GAAI,IAAA,CAAK,UAAA,IAAc,EAAC,EAAG;AAChD,MAAA,MAAM,WAAA,GAAc,IAAA;AAEpB,MAAA,IAAI,WAAW,eAAe,CAAA,IAAK,OAAO,UAAA,CAAW,eAAe,MAAM,QAAA,EAAU;AAClF,QAAA,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,eAAe,CAAC,CAAA;AAAA,MACxG;AAEA,MAAA,IAAI,WAAW,mBAAmB,CAAA,IAAK,OAAO,UAAA,CAAW,mBAAmB,MAAM,QAAA,EAAU;AAC1F,QAAA,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,mBAAmB,CAAC,CAAA;AAAA,MAC7G;AAGA,MAAA,MAAM,gBAAA,GAAmB;AAAA,QACvB,SAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,SAAA;AAAA,QACA,OAAA;AAAA,QACA,QAAA;AAAA,QACA,QAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,gBAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,OAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,kBAA2C,EAAC;AAClD,MAAA,MAAM,aAAA,uBAAoB,GAAA,CAAY,CAAC,SAAS,QAAA,EAAU,WAAA,EAAa,UAAU,CAAC,CAAA;AAClF,MAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,UAAU,CAAA,EAAG;AACrD,QAAA,MAAM,UAAA,GACJ,iBAAiB,IAAA,CAAK,CAAA,MAAA,KAAU,IAAI,UAAA,CAAW,MAAM,CAAC,CAAA,IACtD,GAAA,KAAQ,cACR,GAAA,KAAQ,QAAA,IACR,QAAQ,mBAAA,CAAoB,UAAA,IAC5B,QAAQ,mBAAA,CAAoB,OAAA,IAC5B,aAAA,CAAc,GAAA,CAAI,GAAG,CAAA;AACvB,QAAA,IAAI,CAAC,UAAA,EAAY;AACf,UAAA,eAAA,CAAgB,GAAG,CAAA,GAAI,KAAA;AAAA,QACzB;AAAA,MACF;AAEA,MAAA,IAAI,eAAA;AACJ,MAAA,IAAI,MAAA,CAAO,IAAA,CAAK,eAAe,CAAA,CAAE,SAAS,CAAA,EAAG;AAC3C,QAAA,IAAI;AACF,UAAA,eAAA,GAAkB,IAAA,CAAK,UAAU,eAAe,CAAA;AAChD,UAAA,UAAA,CAAW,mBAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QAC7C,CAAA,CAAA,MAAQ;AAAA,QAER;AAAA,MACF;AAEA,MAAA,MAAM,SAAA,GAAY,OAAO,UAAA,CAAW,UAAU,MAAM,QAAA,GAAY,UAAA,CAAW,UAAU,CAAA,GAAe,MAAA;AACpG,MAAA,MAAM,MAAA,GAAS,OAAO,UAAA,CAAW,QAAQ,MAAM,QAAA,GAAY,UAAA,CAAW,QAAQ,CAAA,GAAe,MAAA;AAE7F,MAAA,IAAI,SAAA,EAAW;AACb,QAAA,UAAA,CAAW,mBAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAC7C,QAAA,OAAO,WAAW,UAAU,CAAA;AAAA,MAC9B;AAEA,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,UAAA,CAAW,mBAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAC1C,QAAA,OAAO,WAAW,QAAQ,CAAA;AAAA,MAC5B;AAEA,MAAA,MAAM,mBAAA,GAAsB,wDAAwD,UAAU,CAAA;AAE9F,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAA,IAAI,SAAA,EAAW;AACb,UAAA,mBAAA,CAAoB,mBAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAAA,QACxD;AACA,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,mBAAA,CAAoB,mBAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAAA,QACrD;AACA,QAAA,IAAI,eAAA,EAAiB;AACnB,UAAA,mBAAA,CAAoB,mBAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QACtD;AACA,QAAA,WAAA,CAAY,UAAA,GAAa,mBAAA;AAAA,MAC3B;AAEA,MAAA,OAAO,WAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;AC9FA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4B,YAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAM,MAAA,GAAS,IAAI,aAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAAC,wBAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.js","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport { SemanticConventions } from '@arizeai/openinference-semantic-conventions';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n const attributes = { ...(span.attributes ?? {}) };\n const mutableSpan = span as Mutable<ReadableSpan>;\n\n if (attributes['gen_ai.prompt'] && typeof attributes['gen_ai.prompt'] === 'string') {\n attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.prompt']);\n }\n\n if (attributes['gen_ai.completion'] && typeof attributes['gen_ai.completion'] === 'string') {\n attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.completion']);\n }\n\n // Gather custom attributes into OpenInference metadata (flat best-effort)\n const reservedPrefixes = [\n 'gen_ai.',\n 'llm.',\n 'input.',\n 'output.',\n 'span.',\n 'mastra',\n 'agent.',\n 'workflow.',\n 'mcp.',\n 'openinference.',\n 'retrieval.',\n 'reranker.',\n 'embedding.',\n 'document.',\n 'tool',\n 'error.',\n 'http.',\n 'db.',\n ];\n const metadataEntries: Record<string, unknown> = {};\n const reservedExact = new Set<string>(['input', 'output', 'sessionId', 'metadata']);\n for (const [key, value] of Object.entries(attributes)) {\n const isReserved =\n reservedPrefixes.some(prefix => key.startsWith(prefix)) ||\n key === 'threadId' ||\n key === 'userId' ||\n key === SemanticConventions.SESSION_ID ||\n key === SemanticConventions.USER_ID ||\n reservedExact.has(key);\n if (!isReserved) {\n metadataEntries[key] = value;\n }\n }\n\n let metadataPayload: string | undefined;\n if (Object.keys(metadataEntries).length > 0) {\n try {\n metadataPayload = JSON.stringify(metadataEntries);\n attributes[SemanticConventions.METADATA] = metadataPayload;\n } catch {\n // best-effort only\n }\n }\n\n const sessionId = typeof attributes['threadId'] === 'string' ? (attributes['threadId'] as string) : undefined;\n const userId = typeof attributes['userId'] === 'string' ? (attributes['userId'] as string) : undefined;\n\n if (sessionId) {\n attributes[SemanticConventions.SESSION_ID] = sessionId;\n delete attributes['threadId'];\n }\n\n if (userId) {\n attributes[SemanticConventions.USER_ID] = userId;\n delete attributes['userId'];\n }\n\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);\n\n if (processedAttributes) {\n if (sessionId) {\n processedAttributes[SemanticConventions.SESSION_ID] = sessionId;\n }\n if (userId) {\n processedAttributes[SemanticConventions.USER_ID] = userId;\n }\n if (metadataPayload) {\n processedAttributes[SemanticConventions.METADATA] = metadataPayload;\n }\n mutableSpan.attributes = processedAttributes;\n }\n\n return mutableSpan;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"openInferenceOTLPExporter.d.ts","sourceRoot":"","sources":["../src/openInferenceOTLPExporter.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,MAAM,0CAA0C,CAAC;AAC7E,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,+BAA+B,CAAC;AAGlE,qBAAa,8BAA+B,SAAQ,iBAAiB;IACnE,MAAM,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE,cAAc,EAAE,CAAC,MAAM,EAAE,YAAY,KAAK,IAAI;CAwB7E"}
1
+ {"version":3,"file":"openInferenceOTLPExporter.d.ts","sourceRoot":"","sources":["../src/openInferenceOTLPExporter.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,MAAM,0CAA0C,CAAC;AAC7E,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,+BAA+B,CAAC;AAGlE,qBAAa,8BAA+B,SAAQ,iBAAiB;IACnE,MAAM,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE,cAAc,EAAE,CAAC,MAAM,EAAE,YAAY,KAAK,IAAI;CA4F7E"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/arize",
3
- "version": "1.0.0-beta.0",
3
+ "version": "1.0.0-beta.2",
4
4
  "description": "Arize observability provider for Mastra - includes tracing and future observability features",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -26,22 +26,24 @@
26
26
  "dependencies": {
27
27
  "@arizeai/openinference-genai": "0.1.0",
28
28
  "@arizeai/openinference-semantic-conventions": "^2.1.2",
29
+ "@opentelemetry/core": "^2.1.0",
29
30
  "@opentelemetry/exporter-trace-otlp-proto": "^0.205.0",
30
31
  "@opentelemetry/resources": "^2.1.0",
31
32
  "@opentelemetry/sdk-trace-base": "^2.1.0",
32
- "@opentelemetry/core": "^2.1.0",
33
- "@mastra/otel-exporter": "1.0.0-beta.0"
33
+ "@mastra/otel-exporter": "1.0.0-beta.2"
34
34
  },
35
35
  "devDependencies": {
36
36
  "@microsoft/api-extractor": "^7.52.8",
37
- "@types/node": "^20.19.0",
37
+ "@types/node": "22.13.17",
38
+ "@vitest/coverage-v8": "4.0.12",
39
+ "@vitest/ui": "4.0.12",
38
40
  "eslint": "^9.36.0",
39
41
  "tsup": "^8.5.0",
40
42
  "typescript": "^5.8.3",
41
- "vitest": "^3.2.4",
43
+ "vitest": "4.0.12",
42
44
  "@internal/lint": "0.0.53",
43
45
  "@internal/types-builder": "0.0.28",
44
- "@mastra/core": "1.0.0-beta.0"
46
+ "@mastra/core": "1.0.0-beta.6"
45
47
  },
46
48
  "peerDependencies": {
47
49
  "@mastra/core": ">=1.0.0-0 <2.0.0-0"