@mastra/arize 1.0.0-beta.2 → 1.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +46 -0
- package/dist/index.cjs +48 -134
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +49 -135
- package/dist/index.js.map +1 -1
- package/dist/openInferenceOTLPExporter.d.ts.map +1 -1
- package/package.json +5 -4
- package/dist/gen-ai.d.ts +0 -20
- package/dist/gen-ai.d.ts.map +0 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,51 @@
|
|
|
1
1
|
# @mastra/arize
|
|
2
2
|
|
|
3
|
+
## 1.0.0-beta.4
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies [[`72df8ae`](https://github.com/mastra-ai/mastra/commit/72df8ae595584cdd7747d5c39ffaca45e4507227), [`9198899`](https://github.com/mastra-ai/mastra/commit/91988995c427b185c33714b7f3be955367911324), [`653e65a`](https://github.com/mastra-ai/mastra/commit/653e65ae1f9502c2958a32f47a5a2df11e612a92), [`c6fd6fe`](https://github.com/mastra-ai/mastra/commit/c6fd6fedd09e9cf8004b03a80925f5e94826ad7e), [`0bed332`](https://github.com/mastra-ai/mastra/commit/0bed332843f627202c6520eaf671771313cd20f3)]:
|
|
8
|
+
- @mastra/core@1.0.0-beta.9
|
|
9
|
+
- @mastra/otel-exporter@1.0.0-beta.4
|
|
10
|
+
|
|
11
|
+
## 1.0.0-beta.3
|
|
12
|
+
|
|
13
|
+
### Patch Changes
|
|
14
|
+
|
|
15
|
+
- Updated OtelExporters, Bridge, and Arize packages to better implement GenAI v1.38.0 Otel Semantic Conventions. See: ([#10591](https://github.com/mastra-ai/mastra/pull/10591))
|
|
16
|
+
https://github.com/open-telemetry/semantic-conventions/blob/v1.38.0/docs/gen-ai/README.md
|
|
17
|
+
|
|
18
|
+
- feat(observability): Add tags support to OtelExporter, OtelBridge, and ArizeExporter ([#10843](https://github.com/mastra-ai/mastra/pull/10843))
|
|
19
|
+
|
|
20
|
+
This change adds support for the `tracingOptions.tags` feature to the OpenTelemetry-based exporters and bridge. Tags are now included as span attributes when present on root spans, following the same pattern as Braintrust and Langfuse exporters.
|
|
21
|
+
|
|
22
|
+
**Changes:**
|
|
23
|
+
- **OtelExporter**: Tags are now included as `mastra.tags` span attribute for root spans
|
|
24
|
+
- **OtelBridge**: Tags flow through the SpanConverter and are included in native OTEL spans as `mastra.tags`
|
|
25
|
+
- **ArizeExporter**: Tags are mapped to the native OpenInference `tag.tags` semantic convention
|
|
26
|
+
|
|
27
|
+
**Implementation Details:**
|
|
28
|
+
- Tags are only included on root spans (by design)
|
|
29
|
+
- Tags are stored as JSON-stringified arrays for maximum backend compatibility (many OTEL backends have limited native array support)
|
|
30
|
+
- Empty or undefined tag arrays are not included in span attributes
|
|
31
|
+
|
|
32
|
+
**Usage:**
|
|
33
|
+
|
|
34
|
+
```typescript
|
|
35
|
+
const result = await agent.generate({
|
|
36
|
+
messages: [{ role: 'user', content: 'Hello' }],
|
|
37
|
+
tracingOptions: {
|
|
38
|
+
tags: ['production', 'experiment-v2'],
|
|
39
|
+
},
|
|
40
|
+
});
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
Fixes #10771
|
|
44
|
+
|
|
45
|
+
- Updated dependencies [[`3076c67`](https://github.com/mastra-ai/mastra/commit/3076c6778b18988ae7d5c4c5c466366974b2d63f), [`85d7ee1`](https://github.com/mastra-ai/mastra/commit/85d7ee18ff4e14d625a8a30ec6656bb49804989b), [`c6c1092`](https://github.com/mastra-ai/mastra/commit/c6c1092f8fbf76109303f69e000e96fd1960c4ce), [`81dc110`](https://github.com/mastra-ai/mastra/commit/81dc11008d147cf5bdc8996ead1aa61dbdebb6fc), [`7aedb74`](https://github.com/mastra-ai/mastra/commit/7aedb74883adf66af38e270e4068fd42e7a37036), [`8f02d80`](https://github.com/mastra-ai/mastra/commit/8f02d800777397e4b45d7f1ad041988a8b0c6630), [`d7aad50`](https://github.com/mastra-ai/mastra/commit/d7aad501ce61646b76b4b511e558ac4eea9884d0), [`ce0a73a`](https://github.com/mastra-ai/mastra/commit/ce0a73abeaa75b10ca38f9e40a255a645d50ebfb), [`a02e542`](https://github.com/mastra-ai/mastra/commit/a02e542d23179bad250b044b17ff023caa61739f), [`a372c64`](https://github.com/mastra-ai/mastra/commit/a372c640ad1fd12e8f0613cebdc682fc156b4d95), [`db500e8`](https://github.com/mastra-ai/mastra/commit/db500e8b8c5b4df9bd3590a7da6e007c08f94945), [`8846867`](https://github.com/mastra-ai/mastra/commit/8846867ffa9a3746767618e314bebac08eb77d87), [`42a42cf`](https://github.com/mastra-ai/mastra/commit/42a42cf3132b9786feecbb8c13c583dce5b0e198), [`ae08bf0`](https://github.com/mastra-ai/mastra/commit/ae08bf0ebc6a4e4da992b711c4a389c32ba84cf4), [`21735a7`](https://github.com/mastra-ai/mastra/commit/21735a7ef306963554a69a89b44f06c3bcd85141), [`1d877b8`](https://github.com/mastra-ai/mastra/commit/1d877b8d7b536a251c1a7a18db7ddcf4f68d6f8b)]:
|
|
46
|
+
- @mastra/core@1.0.0-beta.7
|
|
47
|
+
- @mastra/otel-exporter@1.0.0-beta.3
|
|
48
|
+
|
|
3
49
|
## 1.0.0-beta.2
|
|
4
50
|
|
|
5
51
|
### Minor Changes
|
package/dist/index.cjs
CHANGED
|
@@ -5,155 +5,69 @@ var logger = require('@mastra/core/logger');
|
|
|
5
5
|
var otelExporter = require('@mastra/otel-exporter');
|
|
6
6
|
var openinferenceGenai = require('@arizeai/openinference-genai');
|
|
7
7
|
var exporterTraceOtlpProto = require('@opentelemetry/exporter-trace-otlp-proto');
|
|
8
|
+
var incubating = require('@opentelemetry/semantic-conventions/incubating');
|
|
8
9
|
|
|
9
10
|
// src/tracing.ts
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
return
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
if (typeof parsedIO !== "object" || parsedIO == null || !("messages" in parsedIO) && !("text" in parsedIO)) {
|
|
22
|
-
return inputOutputString;
|
|
23
|
-
}
|
|
24
|
-
if ("text" in parsedIO) {
|
|
25
|
-
return JSON.stringify([
|
|
26
|
-
{
|
|
27
|
-
role: "assistant",
|
|
28
|
-
parts: [{ type: "text", content: parsedIO.text }]
|
|
11
|
+
var MASTRA_GENERAL_PREFIX = "mastra.";
|
|
12
|
+
var MASTRA_METADATA_PREFIX = "mastra.metadata.";
|
|
13
|
+
function splitMastraAttributes(attributes) {
|
|
14
|
+
return Object.entries(attributes).reduce(
|
|
15
|
+
(acc, [key, value]) => {
|
|
16
|
+
if (key.startsWith(MASTRA_GENERAL_PREFIX)) {
|
|
17
|
+
if (key.startsWith(MASTRA_METADATA_PREFIX)) {
|
|
18
|
+
const strippedKey = key.slice(MASTRA_METADATA_PREFIX.length);
|
|
19
|
+
acc.mastraMetadata[strippedKey] = value;
|
|
20
|
+
} else {
|
|
21
|
+
acc.mastraOther[key] = value;
|
|
29
22
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
return m;
|
|
37
|
-
}
|
|
38
|
-
const role = m.role;
|
|
39
|
-
let parts = [];
|
|
40
|
-
if (Array.isArray(m.content)) {
|
|
41
|
-
parts = m.content.map((c) => {
|
|
42
|
-
switch (c.type) {
|
|
43
|
-
case "text":
|
|
44
|
-
return {
|
|
45
|
-
type: "text",
|
|
46
|
-
content: c.text
|
|
47
|
-
};
|
|
48
|
-
case "tool-call":
|
|
49
|
-
return {
|
|
50
|
-
type: "tool_call",
|
|
51
|
-
id: c.toolCallId,
|
|
52
|
-
name: c.toolName,
|
|
53
|
-
arguments: JSON.stringify(c.input)
|
|
54
|
-
};
|
|
55
|
-
case "tool-result":
|
|
56
|
-
return {
|
|
57
|
-
type: "tool_call_response",
|
|
58
|
-
id: c.toolCallId,
|
|
59
|
-
name: c.toolName,
|
|
60
|
-
response: JSON.stringify(c.output.value)
|
|
61
|
-
};
|
|
62
|
-
default:
|
|
63
|
-
return c;
|
|
64
|
-
}
|
|
65
|
-
});
|
|
66
|
-
} else {
|
|
67
|
-
parts = [
|
|
68
|
-
{
|
|
69
|
-
type: "text",
|
|
70
|
-
content: m.content
|
|
71
|
-
}
|
|
72
|
-
];
|
|
73
|
-
}
|
|
74
|
-
return {
|
|
75
|
-
role,
|
|
76
|
-
parts
|
|
77
|
-
};
|
|
78
|
-
})
|
|
79
|
-
);
|
|
23
|
+
}
|
|
24
|
+
return acc;
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
mastraMetadata: {},
|
|
28
|
+
mastraOther: {}
|
|
80
29
|
}
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
return inputOutputString;
|
|
84
|
-
}
|
|
85
|
-
};
|
|
86
|
-
|
|
87
|
-
// src/openInferenceOTLPExporter.ts
|
|
30
|
+
);
|
|
31
|
+
}
|
|
88
32
|
var OpenInferenceOTLPTraceExporter = class extends exporterTraceOtlpProto.OTLPTraceExporter {
|
|
89
33
|
export(spans, resultCallback) {
|
|
90
34
|
const processedSpans = spans.map((span) => {
|
|
91
35
|
const attributes = { ...span.attributes ?? {} };
|
|
92
36
|
const mutableSpan = span;
|
|
93
|
-
|
|
94
|
-
attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.prompt"]);
|
|
95
|
-
}
|
|
96
|
-
if (attributes["gen_ai.completion"] && typeof attributes["gen_ai.completion"] === "string") {
|
|
97
|
-
attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.completion"]);
|
|
98
|
-
}
|
|
99
|
-
const reservedPrefixes = [
|
|
100
|
-
"gen_ai.",
|
|
101
|
-
"llm.",
|
|
102
|
-
"input.",
|
|
103
|
-
"output.",
|
|
104
|
-
"span.",
|
|
105
|
-
"mastra",
|
|
106
|
-
"agent.",
|
|
107
|
-
"workflow.",
|
|
108
|
-
"mcp.",
|
|
109
|
-
"openinference.",
|
|
110
|
-
"retrieval.",
|
|
111
|
-
"reranker.",
|
|
112
|
-
"embedding.",
|
|
113
|
-
"document.",
|
|
114
|
-
"tool",
|
|
115
|
-
"error.",
|
|
116
|
-
"http.",
|
|
117
|
-
"db."
|
|
118
|
-
];
|
|
119
|
-
const metadataEntries = {};
|
|
120
|
-
const reservedExact = /* @__PURE__ */ new Set(["input", "output", "sessionId", "metadata"]);
|
|
121
|
-
for (const [key, value] of Object.entries(attributes)) {
|
|
122
|
-
const isReserved = reservedPrefixes.some((prefix) => key.startsWith(prefix)) || key === "threadId" || key === "userId" || key === openinferenceSemanticConventions.SemanticConventions.SESSION_ID || key === openinferenceSemanticConventions.SemanticConventions.USER_ID || reservedExact.has(key);
|
|
123
|
-
if (!isReserved) {
|
|
124
|
-
metadataEntries[key] = value;
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
let metadataPayload;
|
|
128
|
-
if (Object.keys(metadataEntries).length > 0) {
|
|
129
|
-
try {
|
|
130
|
-
metadataPayload = JSON.stringify(metadataEntries);
|
|
131
|
-
attributes[openinferenceSemanticConventions.SemanticConventions.METADATA] = metadataPayload;
|
|
132
|
-
} catch {
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
const sessionId = typeof attributes["threadId"] === "string" ? attributes["threadId"] : void 0;
|
|
136
|
-
const userId = typeof attributes["userId"] === "string" ? attributes["userId"] : void 0;
|
|
137
|
-
if (sessionId) {
|
|
138
|
-
attributes[openinferenceSemanticConventions.SemanticConventions.SESSION_ID] = sessionId;
|
|
139
|
-
delete attributes["threadId"];
|
|
140
|
-
}
|
|
141
|
-
if (userId) {
|
|
142
|
-
attributes[openinferenceSemanticConventions.SemanticConventions.USER_ID] = userId;
|
|
143
|
-
delete attributes["userId"];
|
|
144
|
-
}
|
|
37
|
+
const { mastraMetadata, mastraOther } = splitMastraAttributes(attributes);
|
|
145
38
|
const processedAttributes = openinferenceGenai.convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);
|
|
146
39
|
if (processedAttributes) {
|
|
147
|
-
|
|
148
|
-
|
|
40
|
+
const threadId = mastraMetadata["threadId"];
|
|
41
|
+
if (threadId) {
|
|
42
|
+
delete mastraMetadata["threadId"];
|
|
43
|
+
processedAttributes[openinferenceSemanticConventions.SESSION_ID] = threadId;
|
|
44
|
+
}
|
|
45
|
+
if (mastraOther["mastra.tags"]) {
|
|
46
|
+
processedAttributes[openinferenceSemanticConventions.TAG_TAGS] = mastraOther["mastra.tags"];
|
|
47
|
+
delete mastraOther["mastra.tags"];
|
|
149
48
|
}
|
|
49
|
+
const userId = mastraMetadata["userId"];
|
|
150
50
|
if (userId) {
|
|
151
|
-
|
|
51
|
+
delete mastraMetadata["userId"];
|
|
52
|
+
processedAttributes[openinferenceSemanticConventions.USER_ID] = userId;
|
|
53
|
+
}
|
|
54
|
+
if (Object.keys(mastraMetadata).length > 0) {
|
|
55
|
+
try {
|
|
56
|
+
processedAttributes[openinferenceSemanticConventions.METADATA] = JSON.stringify(mastraMetadata);
|
|
57
|
+
} catch {
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
const inputMessages = attributes[incubating.ATTR_GEN_AI_INPUT_MESSAGES];
|
|
61
|
+
if (inputMessages) {
|
|
62
|
+
processedAttributes[openinferenceSemanticConventions.INPUT_MIME_TYPE] = "application/json";
|
|
63
|
+
processedAttributes[openinferenceSemanticConventions.INPUT_VALUE] = inputMessages;
|
|
152
64
|
}
|
|
153
|
-
|
|
154
|
-
|
|
65
|
+
const outputMessages = attributes[incubating.ATTR_GEN_AI_OUTPUT_MESSAGES];
|
|
66
|
+
if (outputMessages) {
|
|
67
|
+
processedAttributes[openinferenceSemanticConventions.OUTPUT_MIME_TYPE] = "application/json";
|
|
68
|
+
processedAttributes[openinferenceSemanticConventions.OUTPUT_VALUE] = outputMessages;
|
|
155
69
|
}
|
|
156
|
-
mutableSpan.attributes = processedAttributes;
|
|
70
|
+
mutableSpan.attributes = { ...processedAttributes, ...mastraOther };
|
|
157
71
|
}
|
|
158
72
|
return mutableSpan;
|
|
159
73
|
});
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":["OTLPTraceExporter","SemanticConventions","convertGenAISpanAttributesToOpenInferenceSpanAttributes","OtelExporter","logger","ConsoleLogger","SEMRESATTRS_PROJECT_NAME"],"mappings":";;;;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;ACzJO,IAAM,8BAAA,GAAN,cAA6CA,wCAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AACvC,MAAA,MAAM,aAAa,EAAE,GAAI,IAAA,CAAK,UAAA,IAAc,EAAC,EAAG;AAChD,MAAA,MAAM,WAAA,GAAc,IAAA;AAEpB,MAAA,IAAI,WAAW,eAAe,CAAA,IAAK,OAAO,UAAA,CAAW,eAAe,MAAM,QAAA,EAAU;AAClF,QAAA,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,eAAe,CAAC,CAAA;AAAA,MACxG;AAEA,MAAA,IAAI,WAAW,mBAAmB,CAAA,IAAK,OAAO,UAAA,CAAW,mBAAmB,MAAM,QAAA,EAAU;AAC1F,QAAA,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,mBAAmB,CAAC,CAAA;AAAA,MAC7G;AAGA,MAAA,MAAM,gBAAA,GAAmB;AAAA,QACvB,SAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,SAAA;AAAA,QACA,OAAA;AAAA,QACA,QAAA;AAAA,QACA,QAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,gBAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,OAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,kBAA2C,EAAC;AAClD,MAAA,MAAM,aAAA,uBAAoB,GAAA,CAAY,CAAC,SAAS,QAAA,EAAU,WAAA,EAAa,UAAU,CAAC,CAAA;AAClF,MAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,UAAU,CAAA,EAAG;AACrD,QAAA,MAAM,UAAA,GACJ,iBAAiB,IAAA,CAAK,CAAA,MAAA,KAAU,IAAI,UAAA,CAAW,MAAM,CAAC,CAAA,IACtD,GAAA,KAAQ,cACR,GAAA,KAAQ,QAAA,IACR,QAAQC,oDAAA,CAAoB,UAAA,IAC5B,QAAQA,oDAAA,CAAoB,OAAA,IAC5B,aAAA,CAAc,GAAA,CAAI,GAAG,CAAA;AACvB,QAAA,IAAI,CAAC,UAAA,EAAY;AACf,UAAA,eAAA,CAAgB,GAAG,CAAA,GAAI,KAAA;AAAA,QACzB;AAAA,MACF;AAEA,MAAA,IAAI,eAAA;AACJ,MAAA,IAAI,MAAA,CAAO,IAAA,CAAK,eAAe,CAAA,CAAE,SAAS,CAAA,EAAG;AAC3C,QAAA,IAAI;AACF,UAAA,eAAA,GAAkB,IAAA,CAAK,UAAU,eAAe,CAAA;AAChD,UAAA,UAAA,CAAWA,oDAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QAC7C,CAAA,CAAA,MAAQ;AAAA,QAER;AAAA,MACF;AAEA,MAAA,MAAM,SAAA,GAAY,OAAO,UAAA,CAAW,UAAU,MAAM,QAAA,GAAY,UAAA,CAAW,UAAU,CAAA,GAAe,MAAA;AACpG,MAAA,MAAM,MAAA,GAAS,OAAO,UAAA,CAAW,QAAQ,MAAM,QAAA,GAAY,UAAA,CAAW,QAAQ,CAAA,GAAe,MAAA;AAE7F,MAAA,IAAI,SAAA,EAAW;AACb,QAAA,UAAA,CAAWA,oDAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAC7C,QAAA,OAAO,WAAW,UAAU,CAAA;AAAA,MAC9B;AAEA,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,UAAA,CAAWA,oDAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAC1C,QAAA,OAAO,WAAW,QAAQ,CAAA;AAAA,MAC5B;AAEA,MAAA,MAAM,mBAAA,GAAsBC,2EAAwD,UAAU,CAAA;AAE9F,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAA,IAAI,SAAA,EAAW;AACb,UAAA,mBAAA,CAAoBD,oDAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAAA,QACxD;AACA,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,mBAAA,CAAoBA,oDAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAAA,QACrD;AACA,QAAA,IAAI,eAAA,EAAiB;AACnB,UAAA,mBAAA,CAAoBA,oDAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QACtD;AACA,QAAA,WAAA,CAAY,UAAA,GAAa,mBAAA;AAAA,MAC3B;AAEA,MAAA,OAAO,WAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;AC9FA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4BE,yBAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAMC,QAAA,GAAS,IAAIC,oBAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAAD,QAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAACE,yDAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.cjs","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport { SemanticConventions } from '@arizeai/openinference-semantic-conventions';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n const attributes = { ...(span.attributes ?? {}) };\n const mutableSpan = span as Mutable<ReadableSpan>;\n\n if (attributes['gen_ai.prompt'] && typeof attributes['gen_ai.prompt'] === 'string') {\n attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.prompt']);\n }\n\n if (attributes['gen_ai.completion'] && typeof attributes['gen_ai.completion'] === 'string') {\n attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.completion']);\n }\n\n // Gather custom attributes into OpenInference metadata (flat best-effort)\n const reservedPrefixes = [\n 'gen_ai.',\n 'llm.',\n 'input.',\n 'output.',\n 'span.',\n 'mastra',\n 'agent.',\n 'workflow.',\n 'mcp.',\n 'openinference.',\n 'retrieval.',\n 'reranker.',\n 'embedding.',\n 'document.',\n 'tool',\n 'error.',\n 'http.',\n 'db.',\n ];\n const metadataEntries: Record<string, unknown> = {};\n const reservedExact = new Set<string>(['input', 'output', 'sessionId', 'metadata']);\n for (const [key, value] of Object.entries(attributes)) {\n const isReserved =\n reservedPrefixes.some(prefix => key.startsWith(prefix)) ||\n key === 'threadId' ||\n key === 'userId' ||\n key === SemanticConventions.SESSION_ID ||\n key === SemanticConventions.USER_ID ||\n reservedExact.has(key);\n if (!isReserved) {\n metadataEntries[key] = value;\n }\n }\n\n let metadataPayload: string | undefined;\n if (Object.keys(metadataEntries).length > 0) {\n try {\n metadataPayload = JSON.stringify(metadataEntries);\n attributes[SemanticConventions.METADATA] = metadataPayload;\n } catch {\n // best-effort only\n }\n }\n\n const sessionId = typeof attributes['threadId'] === 'string' ? (attributes['threadId'] as string) : undefined;\n const userId = typeof attributes['userId'] === 'string' ? (attributes['userId'] as string) : undefined;\n\n if (sessionId) {\n attributes[SemanticConventions.SESSION_ID] = sessionId;\n delete attributes['threadId'];\n }\n\n if (userId) {\n attributes[SemanticConventions.USER_ID] = userId;\n delete attributes['userId'];\n }\n\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);\n\n if (processedAttributes) {\n if (sessionId) {\n processedAttributes[SemanticConventions.SESSION_ID] = sessionId;\n }\n if (userId) {\n processedAttributes[SemanticConventions.USER_ID] = userId;\n }\n if (metadataPayload) {\n processedAttributes[SemanticConventions.METADATA] = metadataPayload;\n }\n mutableSpan.attributes = processedAttributes;\n }\n\n return mutableSpan;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":["OTLPTraceExporter","convertGenAISpanAttributesToOpenInferenceSpanAttributes","SESSION_ID","TAG_TAGS","USER_ID","METADATA","ATTR_GEN_AI_INPUT_MESSAGES","INPUT_MIME_TYPE","INPUT_VALUE","ATTR_GEN_AI_OUTPUT_MESSAGES","OUTPUT_MIME_TYPE","OUTPUT_VALUE","OtelExporter","logger","ConsoleLogger","SEMRESATTRS_PROJECT_NAME"],"mappings":";;;;;;;;;;AAoBA,IAAM,qBAAA,GAAwB,SAAA;AAC9B,IAAM,sBAAA,GAAyB,kBAAA;AAS/B,SAAS,sBAAsB,UAAA,EAG7B;AACA,EAAA,OAAO,MAAA,CAAO,OAAA,CAAQ,UAAU,CAAA,CAAE,MAAA;AAAA,IAChC,CAAC,GAAA,EAAK,CAAC,GAAA,EAAK,KAAK,CAAA,KAAM;AACrB,MAAA,IAAI,GAAA,CAAI,UAAA,CAAW,qBAAqB,CAAA,EAAG;AACzC,QAAA,IAAI,GAAA,CAAI,UAAA,CAAW,sBAAsB,CAAA,EAAG;AAC1C,UAAA,MAAM,WAAA,GAAc,GAAA,CAAI,KAAA,CAAM,sBAAA,CAAuB,MAAM,CAAA;AAC3D,UAAA,GAAA,CAAI,cAAA,CAAe,WAAW,CAAA,GAAI,KAAA;AAAA,QACpC,CAAA,MAAO;AACL,UAAA,GAAA,CAAI,WAAA,CAAY,GAAG,CAAA,GAAI,KAAA;AAAA,QACzB;AAAA,MACF;AACA,MAAA,OAAO,GAAA;AAAA,IACT,CAAA;AAAA,IACA;AAAA,MACE,gBAAgB,EAAC;AAAA,MACjB,aAAa;AAAC;AAChB,GACF;AACF;AAEO,IAAM,8BAAA,GAAN,cAA6CA,wCAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AACvC,MAAA,MAAM,aAAa,EAAE,GAAI,IAAA,CAAK,UAAA,IAAc,EAAC,EAAG;AAChD,MAAA,MAAM,WAAA,GAAc,IAAA;AAEpB,MAAA,MAAM,EAAE,cAAA,EAAgB,WAAA,EAAY,GAAI,sBAAsB,UAAU,CAAA;AACxE,MAAA,MAAM,mBAAA,GAAsBC,2EAAwD,UAAU,CAAA;AAG9F,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAA,MAAM,QAAA,GAAW,eAAe,UAAU,CAAA;AAC1C,QAAA,IAAI,QAAA,EAAU;AACZ,UAAA,OAAO,eAAe,UAAU,CAAA;AAChC,UAAA,mBAAA,CAAoBC,2CAAU,CAAA,GAAI,QAAA;AAAA,QACpC;AAGA,QAAA,IAAI,WAAA,CAAY,aAAa,CAAA,EAAG;AAC9B,UAAA,mBAAA,CAAoBC,yCAAQ,CAAA,GAAI,WAAA,CAAY,aAAa,CAAA;AACzD,UAAA,OAAO,YAAY,aAAa,CAAA;AAAA,QAClC;AAEA,QAAA,MAAM,MAAA,GAAS,eAAe,QAAQ,CAAA;AACtC,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,OAAO,eAAe,QAAQ,CAAA;AAC9B,UAAA,mBAAA,CAAoBC,wCAAO,CAAA,GAAI,MAAA;AAAA,QACjC;AAGA,QAAA,IAAI,MAAA,CAAO,IAAA,CAAK,cAAc,CAAA,CAAE,SAAS,CAAA,EAAG;AAC1C,UAAA,IAAI;AACF,YAAA,mBAAA,CAAoBC,yCAAQ,CAAA,GAAI,IAAA,CAAK,SAAA,CAAU,cAAc,CAAA;AAAA,UAC/D,CAAA,CAAA,MAAQ;AAAA,UAER;AAAA,QACF;AAEA,QAAA,MAAM,aAAA,GAAgB,WAAWC,qCAA0B,CAAA;AAC3D,QAAA,IAAI,aAAA,EAAe;AACjB,UAAA,mBAAA,CAAoBC,gDAAe,CAAA,GAAI,kBAAA;AACvC,UAAA,mBAAA,CAAoBC,4CAAW,CAAA,GAAI,aAAA;AAAA,QACrC;AACA,QAAA,MAAM,cAAA,GAAiB,WAAWC,sCAA2B,CAAA;AAC7D,QAAA,IAAI,cAAA,EAAgB;AAClB,UAAA,mBAAA,CAAoBC,iDAAgB,CAAA,GAAI,kBAAA;AACxC,UAAA,mBAAA,CAAoBC,6CAAY,CAAA,GAAI,cAAA;AAAA,QACtC;AAEA,QAAA,WAAA,CAAY,UAAA,GAAa,EAAE,GAAG,mBAAA,EAAqB,GAAG,WAAA,EAAY;AAAA,MACpE;AAEA,MAAA,OAAO,WAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;ACvGA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4BC,yBAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAMC,QAAA,GAAS,IAAIC,oBAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAAD,QAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAACE,yDAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.cjs","sourcesContent":["import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport {\n INPUT_MIME_TYPE,\n INPUT_VALUE,\n METADATA,\n OUTPUT_MIME_TYPE,\n OUTPUT_VALUE,\n SESSION_ID,\n TAG_TAGS,\n USER_ID,\n} from '@arizeai/openinference-semantic-conventions';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport {\n ATTR_GEN_AI_INPUT_MESSAGES,\n ATTR_GEN_AI_OUTPUT_MESSAGES,\n} from '@opentelemetry/semantic-conventions/incubating';\n\nconst MASTRA_GENERAL_PREFIX = 'mastra.';\nconst MASTRA_METADATA_PREFIX = 'mastra.metadata.';\n\n/**\n * Splits Mastra span attributes into two groups:\n * - `metadata`: keys starting with \"mastra.metadata.\" (prefix removed)\n * - `other`: all remaining keys starting with \"mastra.\"\n *\n * Any attributes not starting with \"mastra.\" are ignored entirely.\n */\nfunction splitMastraAttributes(attributes: Record<string, any>): {\n mastraMetadata: Record<string, any>;\n mastraOther: Record<string, any>;\n} {\n return Object.entries(attributes).reduce(\n (acc, [key, value]) => {\n if (key.startsWith(MASTRA_GENERAL_PREFIX)) {\n if (key.startsWith(MASTRA_METADATA_PREFIX)) {\n const strippedKey = key.slice(MASTRA_METADATA_PREFIX.length);\n acc.mastraMetadata[strippedKey] = value;\n } else {\n acc.mastraOther[key] = value;\n }\n }\n return acc;\n },\n {\n mastraMetadata: {} as Record<string, any>,\n mastraOther: {} as Record<string, any>,\n },\n );\n}\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n const attributes = { ...(span.attributes ?? {}) };\n const mutableSpan = span as Mutable<ReadableSpan>;\n\n const { mastraMetadata, mastraOther } = splitMastraAttributes(attributes);\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);\n\n // only add processed attributes if conversion was successful\n if (processedAttributes) {\n const threadId = mastraMetadata['threadId'];\n if (threadId) {\n delete mastraMetadata['threadId'];\n processedAttributes[SESSION_ID] = threadId;\n }\n\n // Map mastra.tags to OpenInference native tag.tags convention (tags are only on root spans)\n if (mastraOther['mastra.tags']) {\n processedAttributes[TAG_TAGS] = mastraOther['mastra.tags'];\n delete mastraOther['mastra.tags'];\n }\n\n const userId = mastraMetadata['userId'];\n if (userId) {\n delete mastraMetadata['userId'];\n processedAttributes[USER_ID] = userId;\n }\n\n // Gather custom metadata into OpenInference metadata (flat best-effort)\n if (Object.keys(mastraMetadata).length > 0) {\n try {\n processedAttributes[METADATA] = JSON.stringify(mastraMetadata);\n } catch {\n // best-effort only\n }\n }\n\n const inputMessages = attributes[ATTR_GEN_AI_INPUT_MESSAGES];\n if (inputMessages) {\n processedAttributes[INPUT_MIME_TYPE] = 'application/json';\n processedAttributes[INPUT_VALUE] = inputMessages;\n }\n const outputMessages = attributes[ATTR_GEN_AI_OUTPUT_MESSAGES];\n if (outputMessages) {\n processedAttributes[OUTPUT_MIME_TYPE] = 'application/json';\n processedAttributes[OUTPUT_VALUE] = outputMessages;\n }\n\n mutableSpan.attributes = { ...processedAttributes, ...mastraOther };\n }\n\n return mutableSpan;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
|
package/dist/index.js
CHANGED
|
@@ -1,157 +1,71 @@
|
|
|
1
|
-
import { SEMRESATTRS_PROJECT_NAME,
|
|
1
|
+
import { SEMRESATTRS_PROJECT_NAME, SESSION_ID, TAG_TAGS, USER_ID, METADATA, INPUT_MIME_TYPE, INPUT_VALUE, OUTPUT_MIME_TYPE, OUTPUT_VALUE } from '@arizeai/openinference-semantic-conventions';
|
|
2
2
|
import { ConsoleLogger } from '@mastra/core/logger';
|
|
3
3
|
import { OtelExporter } from '@mastra/otel-exporter';
|
|
4
4
|
import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';
|
|
5
5
|
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
|
|
6
|
+
import { ATTR_GEN_AI_INPUT_MESSAGES, ATTR_GEN_AI_OUTPUT_MESSAGES } from '@opentelemetry/semantic-conventions/incubating';
|
|
6
7
|
|
|
7
8
|
// src/tracing.ts
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
return
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
if (typeof parsedIO !== "object" || parsedIO == null || !("messages" in parsedIO) && !("text" in parsedIO)) {
|
|
20
|
-
return inputOutputString;
|
|
21
|
-
}
|
|
22
|
-
if ("text" in parsedIO) {
|
|
23
|
-
return JSON.stringify([
|
|
24
|
-
{
|
|
25
|
-
role: "assistant",
|
|
26
|
-
parts: [{ type: "text", content: parsedIO.text }]
|
|
9
|
+
var MASTRA_GENERAL_PREFIX = "mastra.";
|
|
10
|
+
var MASTRA_METADATA_PREFIX = "mastra.metadata.";
|
|
11
|
+
function splitMastraAttributes(attributes) {
|
|
12
|
+
return Object.entries(attributes).reduce(
|
|
13
|
+
(acc, [key, value]) => {
|
|
14
|
+
if (key.startsWith(MASTRA_GENERAL_PREFIX)) {
|
|
15
|
+
if (key.startsWith(MASTRA_METADATA_PREFIX)) {
|
|
16
|
+
const strippedKey = key.slice(MASTRA_METADATA_PREFIX.length);
|
|
17
|
+
acc.mastraMetadata[strippedKey] = value;
|
|
18
|
+
} else {
|
|
19
|
+
acc.mastraOther[key] = value;
|
|
27
20
|
}
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
return m;
|
|
35
|
-
}
|
|
36
|
-
const role = m.role;
|
|
37
|
-
let parts = [];
|
|
38
|
-
if (Array.isArray(m.content)) {
|
|
39
|
-
parts = m.content.map((c) => {
|
|
40
|
-
switch (c.type) {
|
|
41
|
-
case "text":
|
|
42
|
-
return {
|
|
43
|
-
type: "text",
|
|
44
|
-
content: c.text
|
|
45
|
-
};
|
|
46
|
-
case "tool-call":
|
|
47
|
-
return {
|
|
48
|
-
type: "tool_call",
|
|
49
|
-
id: c.toolCallId,
|
|
50
|
-
name: c.toolName,
|
|
51
|
-
arguments: JSON.stringify(c.input)
|
|
52
|
-
};
|
|
53
|
-
case "tool-result":
|
|
54
|
-
return {
|
|
55
|
-
type: "tool_call_response",
|
|
56
|
-
id: c.toolCallId,
|
|
57
|
-
name: c.toolName,
|
|
58
|
-
response: JSON.stringify(c.output.value)
|
|
59
|
-
};
|
|
60
|
-
default:
|
|
61
|
-
return c;
|
|
62
|
-
}
|
|
63
|
-
});
|
|
64
|
-
} else {
|
|
65
|
-
parts = [
|
|
66
|
-
{
|
|
67
|
-
type: "text",
|
|
68
|
-
content: m.content
|
|
69
|
-
}
|
|
70
|
-
];
|
|
71
|
-
}
|
|
72
|
-
return {
|
|
73
|
-
role,
|
|
74
|
-
parts
|
|
75
|
-
};
|
|
76
|
-
})
|
|
77
|
-
);
|
|
21
|
+
}
|
|
22
|
+
return acc;
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
mastraMetadata: {},
|
|
26
|
+
mastraOther: {}
|
|
78
27
|
}
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
return inputOutputString;
|
|
82
|
-
}
|
|
83
|
-
};
|
|
84
|
-
|
|
85
|
-
// src/openInferenceOTLPExporter.ts
|
|
28
|
+
);
|
|
29
|
+
}
|
|
86
30
|
var OpenInferenceOTLPTraceExporter = class extends OTLPTraceExporter {
|
|
87
31
|
export(spans, resultCallback) {
|
|
88
32
|
const processedSpans = spans.map((span) => {
|
|
89
33
|
const attributes = { ...span.attributes ?? {} };
|
|
90
34
|
const mutableSpan = span;
|
|
91
|
-
|
|
92
|
-
attributes["gen_ai.input.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.prompt"]);
|
|
93
|
-
}
|
|
94
|
-
if (attributes["gen_ai.completion"] && typeof attributes["gen_ai.completion"] === "string") {
|
|
95
|
-
attributes["gen_ai.output.messages"] = convertMastraMessagesToGenAIMessages(attributes["gen_ai.completion"]);
|
|
96
|
-
}
|
|
97
|
-
const reservedPrefixes = [
|
|
98
|
-
"gen_ai.",
|
|
99
|
-
"llm.",
|
|
100
|
-
"input.",
|
|
101
|
-
"output.",
|
|
102
|
-
"span.",
|
|
103
|
-
"mastra",
|
|
104
|
-
"agent.",
|
|
105
|
-
"workflow.",
|
|
106
|
-
"mcp.",
|
|
107
|
-
"openinference.",
|
|
108
|
-
"retrieval.",
|
|
109
|
-
"reranker.",
|
|
110
|
-
"embedding.",
|
|
111
|
-
"document.",
|
|
112
|
-
"tool",
|
|
113
|
-
"error.",
|
|
114
|
-
"http.",
|
|
115
|
-
"db."
|
|
116
|
-
];
|
|
117
|
-
const metadataEntries = {};
|
|
118
|
-
const reservedExact = /* @__PURE__ */ new Set(["input", "output", "sessionId", "metadata"]);
|
|
119
|
-
for (const [key, value] of Object.entries(attributes)) {
|
|
120
|
-
const isReserved = reservedPrefixes.some((prefix) => key.startsWith(prefix)) || key === "threadId" || key === "userId" || key === SemanticConventions.SESSION_ID || key === SemanticConventions.USER_ID || reservedExact.has(key);
|
|
121
|
-
if (!isReserved) {
|
|
122
|
-
metadataEntries[key] = value;
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
let metadataPayload;
|
|
126
|
-
if (Object.keys(metadataEntries).length > 0) {
|
|
127
|
-
try {
|
|
128
|
-
metadataPayload = JSON.stringify(metadataEntries);
|
|
129
|
-
attributes[SemanticConventions.METADATA] = metadataPayload;
|
|
130
|
-
} catch {
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
const sessionId = typeof attributes["threadId"] === "string" ? attributes["threadId"] : void 0;
|
|
134
|
-
const userId = typeof attributes["userId"] === "string" ? attributes["userId"] : void 0;
|
|
135
|
-
if (sessionId) {
|
|
136
|
-
attributes[SemanticConventions.SESSION_ID] = sessionId;
|
|
137
|
-
delete attributes["threadId"];
|
|
138
|
-
}
|
|
139
|
-
if (userId) {
|
|
140
|
-
attributes[SemanticConventions.USER_ID] = userId;
|
|
141
|
-
delete attributes["userId"];
|
|
142
|
-
}
|
|
35
|
+
const { mastraMetadata, mastraOther } = splitMastraAttributes(attributes);
|
|
143
36
|
const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);
|
|
144
37
|
if (processedAttributes) {
|
|
145
|
-
|
|
146
|
-
|
|
38
|
+
const threadId = mastraMetadata["threadId"];
|
|
39
|
+
if (threadId) {
|
|
40
|
+
delete mastraMetadata["threadId"];
|
|
41
|
+
processedAttributes[SESSION_ID] = threadId;
|
|
42
|
+
}
|
|
43
|
+
if (mastraOther["mastra.tags"]) {
|
|
44
|
+
processedAttributes[TAG_TAGS] = mastraOther["mastra.tags"];
|
|
45
|
+
delete mastraOther["mastra.tags"];
|
|
147
46
|
}
|
|
47
|
+
const userId = mastraMetadata["userId"];
|
|
148
48
|
if (userId) {
|
|
149
|
-
|
|
49
|
+
delete mastraMetadata["userId"];
|
|
50
|
+
processedAttributes[USER_ID] = userId;
|
|
51
|
+
}
|
|
52
|
+
if (Object.keys(mastraMetadata).length > 0) {
|
|
53
|
+
try {
|
|
54
|
+
processedAttributes[METADATA] = JSON.stringify(mastraMetadata);
|
|
55
|
+
} catch {
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
const inputMessages = attributes[ATTR_GEN_AI_INPUT_MESSAGES];
|
|
59
|
+
if (inputMessages) {
|
|
60
|
+
processedAttributes[INPUT_MIME_TYPE] = "application/json";
|
|
61
|
+
processedAttributes[INPUT_VALUE] = inputMessages;
|
|
150
62
|
}
|
|
151
|
-
|
|
152
|
-
|
|
63
|
+
const outputMessages = attributes[ATTR_GEN_AI_OUTPUT_MESSAGES];
|
|
64
|
+
if (outputMessages) {
|
|
65
|
+
processedAttributes[OUTPUT_MIME_TYPE] = "application/json";
|
|
66
|
+
processedAttributes[OUTPUT_VALUE] = outputMessages;
|
|
153
67
|
}
|
|
154
|
-
mutableSpan.attributes = processedAttributes;
|
|
68
|
+
mutableSpan.attributes = { ...processedAttributes, ...mastraOther };
|
|
155
69
|
}
|
|
156
70
|
return mutableSpan;
|
|
157
71
|
});
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/gen-ai.ts","../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":[],"mappings":";;;;;;;;;AAgDA,IAAM,mBAAA,GAAsB,CAAC,CAAA,KAAuC;AAClE,EAAA,OACE,OAAO,CAAA,KAAM,QAAA,IACb,CAAA,IAAK,IAAA,IACL,UAAU,CAAA,KACT,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,IAAA,KAAS,mBACzD,CAAA,CAAE,IAAA,KAAS,MAAA,IAAU,MAAA,IAAU,KAC9B,CAAA,CAAE,IAAA,KAAS,WAAA,IAAe,YAAA,IAAgB,KAAK,UAAA,IAAc,CAAA,IAAK,OAAA,IAAW,CAAA,IAC7E,EAAE,IAAA,KAAS,aAAA,IAAiB,gBAAgB,CAAA,IAAK,UAAA,IAAc,KAAK,QAAA,IAAY,CAAA,CAAA;AAEvF,CAAA;AAEA,IAAM,eAAA,GAAkB,CAAC,CAAA,KAAmC;AAC1D,EAAA,OACE,OAAO,MAAM,QAAA,IACb,CAAA,IAAK,QACL,MAAA,IAAU,CAAA,IACV,aAAa,CAAA,KACZ,OAAO,EAAE,OAAA,KAAY,QAAA,IAAa,MAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,IAAK,CAAA,CAAE,OAAA,CAAQ,KAAA,CAAM,mBAAmB,CAAA,CAAA;AAEtG,CAAA;AAoBO,IAAM,oCAAA,GAAuC,CAAC,iBAAA,KAAsC;AACzF,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,KAAA,CAAM,iBAAiB,CAAA;AAC7C,IAAA,IAAI,OAAO,QAAA,KAAa,QAAA,IAAY,QAAA,IAAY,IAAA,IAAS,EAAE,UAAA,IAAc,QAAA,CAAA,IAAa,EAAE,MAAA,IAAU,QAAA,CAAA,EAAY;AAE5G,MAAA,OAAO,iBAAA;AAAA,IACT;AAGA,IAAA,IAAI,UAAU,QAAA,EAAU;AACtB,MAAA,OAAO,KAAK,SAAA,CAAU;AAAA,QACpB;AAAA,UACE,IAAA,EAAM,WAAA;AAAA,UACN,KAAA,EAAO,CAAC,EAAE,IAAA,EAAM,QAAQ,OAAA,EAAS,QAAA,CAAS,MAAgB;AAAA;AAC5D,OACD,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,QAAA,CAAS,QAAQ,CAAA,EAAG;AACpC,MAAA,OAAO,IAAA,CAAK,SAAA;AAAA,QACT,QAAA,CAAS,QAAA,CAAuB,GAAA,CAAI,CAAA,CAAA,KAAK;AACxC,UAAA,IAAI,CAAC,eAAA,CAAgB,CAAC,CAAA,EAAG;AACvB,YAAA,OAAO,CAAA;AAAA,UACT;AACA,UAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AACf,UAAA,IAAI,QAA4B,EAAC;AACjC,UAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,OAAO,CAAA,EAAG;AAC5B,YAAA,KAAA,GAAQ,CAAA,CAAE,OAAA,CAAQ,GAAA,CAAI,CAAA,CAAA,KAAK;AACzB,cAAA,QAAQ,EAAE,IAAA;AAAM,gBACd,KAAK,MAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,MAAA;AAAA,oBACN,SAAS,CAAA,CAAE;AAAA,mBACb;AAAA,gBACF,KAAK,WAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,WAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,SAAA,EAAW,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,KAAK;AAAA,mBACnC;AAAA,gBACF,KAAK,aAAA;AACH,kBAAA,OAAO;AAAA,oBACL,IAAA,EAAM,oBAAA;AAAA,oBACN,IAAI,CAAA,CAAE,UAAA;AAAA,oBACN,MAAM,CAAA,CAAE,QAAA;AAAA,oBACR,QAAA,EAAU,IAAA,CAAK,SAAA,CAAU,CAAA,CAAE,OAAO,KAAK;AAAA,mBACzC;AAAA,gBACF;AACE,kBAAA,OAAO,CAAA;AAAA;AACX,YACF,CAAC,CAAA;AAAA,UACH,CAAA,MAAO;AACL,YAAA,KAAA,GAAQ;AAAA,cACN;AAAA,gBACE,IAAA,EAAM,MAAA;AAAA,gBACN,SAAS,CAAA,CAAE;AAAA;AACb,aACF;AAAA,UACF;AACA,UAAA,OAAO;AAAA,YACL,IAAA;AAAA,YACA;AAAA,WACF;AAAA,QACF,CAAC;AAAA,OACH;AAAA,IACF;AAEA,IAAA,OAAO,iBAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,iBAAA;AAAA,EACT;AACF,CAAA;;;ACzJO,IAAM,8BAAA,GAAN,cAA6C,iBAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AACvC,MAAA,MAAM,aAAa,EAAE,GAAI,IAAA,CAAK,UAAA,IAAc,EAAC,EAAG;AAChD,MAAA,MAAM,WAAA,GAAc,IAAA;AAEpB,MAAA,IAAI,WAAW,eAAe,CAAA,IAAK,OAAO,UAAA,CAAW,eAAe,MAAM,QAAA,EAAU;AAClF,QAAA,UAAA,CAAW,uBAAuB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,eAAe,CAAC,CAAA;AAAA,MACxG;AAEA,MAAA,IAAI,WAAW,mBAAmB,CAAA,IAAK,OAAO,UAAA,CAAW,mBAAmB,MAAM,QAAA,EAAU;AAC1F,QAAA,UAAA,CAAW,wBAAwB,CAAA,GAAI,oCAAA,CAAqC,UAAA,CAAW,mBAAmB,CAAC,CAAA;AAAA,MAC7G;AAGA,MAAA,MAAM,gBAAA,GAAmB;AAAA,QACvB,SAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,SAAA;AAAA,QACA,OAAA;AAAA,QACA,QAAA;AAAA,QACA,QAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,gBAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,YAAA;AAAA,QACA,WAAA;AAAA,QACA,MAAA;AAAA,QACA,QAAA;AAAA,QACA,OAAA;AAAA,QACA;AAAA,OACF;AACA,MAAA,MAAM,kBAA2C,EAAC;AAClD,MAAA,MAAM,aAAA,uBAAoB,GAAA,CAAY,CAAC,SAAS,QAAA,EAAU,WAAA,EAAa,UAAU,CAAC,CAAA;AAClF,MAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,UAAU,CAAA,EAAG;AACrD,QAAA,MAAM,UAAA,GACJ,iBAAiB,IAAA,CAAK,CAAA,MAAA,KAAU,IAAI,UAAA,CAAW,MAAM,CAAC,CAAA,IACtD,GAAA,KAAQ,cACR,GAAA,KAAQ,QAAA,IACR,QAAQ,mBAAA,CAAoB,UAAA,IAC5B,QAAQ,mBAAA,CAAoB,OAAA,IAC5B,aAAA,CAAc,GAAA,CAAI,GAAG,CAAA;AACvB,QAAA,IAAI,CAAC,UAAA,EAAY;AACf,UAAA,eAAA,CAAgB,GAAG,CAAA,GAAI,KAAA;AAAA,QACzB;AAAA,MACF;AAEA,MAAA,IAAI,eAAA;AACJ,MAAA,IAAI,MAAA,CAAO,IAAA,CAAK,eAAe,CAAA,CAAE,SAAS,CAAA,EAAG;AAC3C,QAAA,IAAI;AACF,UAAA,eAAA,GAAkB,IAAA,CAAK,UAAU,eAAe,CAAA;AAChD,UAAA,UAAA,CAAW,mBAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QAC7C,CAAA,CAAA,MAAQ;AAAA,QAER;AAAA,MACF;AAEA,MAAA,MAAM,SAAA,GAAY,OAAO,UAAA,CAAW,UAAU,MAAM,QAAA,GAAY,UAAA,CAAW,UAAU,CAAA,GAAe,MAAA;AACpG,MAAA,MAAM,MAAA,GAAS,OAAO,UAAA,CAAW,QAAQ,MAAM,QAAA,GAAY,UAAA,CAAW,QAAQ,CAAA,GAAe,MAAA;AAE7F,MAAA,IAAI,SAAA,EAAW;AACb,QAAA,UAAA,CAAW,mBAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAC7C,QAAA,OAAO,WAAW,UAAU,CAAA;AAAA,MAC9B;AAEA,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,UAAA,CAAW,mBAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAC1C,QAAA,OAAO,WAAW,QAAQ,CAAA;AAAA,MAC5B;AAEA,MAAA,MAAM,mBAAA,GAAsB,wDAAwD,UAAU,CAAA;AAE9F,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAA,IAAI,SAAA,EAAW;AACb,UAAA,mBAAA,CAAoB,mBAAA,CAAoB,UAAU,CAAA,GAAI,SAAA;AAAA,QACxD;AACA,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,mBAAA,CAAoB,mBAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAAA,QACrD;AACA,QAAA,IAAI,eAAA,EAAiB;AACnB,UAAA,mBAAA,CAAoB,mBAAA,CAAoB,QAAQ,CAAA,GAAI,eAAA;AAAA,QACtD;AACA,QAAA,WAAA,CAAY,UAAA,GAAa,mBAAA;AAAA,MAC3B;AAEA,MAAA,OAAO,WAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;AC9FA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4B,YAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAM,MAAA,GAAS,IAAI,aAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAAC,wBAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.js","sourcesContent":["/**\n * Type represenation of a gen_ai chat message part\n */\ntype GenAIMessagePart =\n | {\n type: 'text';\n content: string;\n }\n | {\n type: 'tool_call';\n id: string;\n name: string;\n arguments: string;\n }\n | {\n type: 'tool_call_response';\n id: string;\n name: string;\n response: string;\n };\n\n/**\n * Type representation of a gen_ai chat message\n */\ntype GenAIMessage = {\n role: string;\n parts: GenAIMessagePart[];\n};\n\n/**\n * Assumed type representation of a Mastra message content type\n */\ntype MastraMessagePart =\n | {\n type: 'text';\n text: string;\n }\n | { type: 'tool-call'; toolCallId: string; toolName: string; input: unknown }\n | { type: 'tool-result'; toolCallId: string; toolName: string; output: { value: unknown } };\n\n/**\n * Assumed type representation of a Mastra message\n */\ntype MastraMessage = {\n role: string;\n content: MastraMessagePart[];\n};\n\nconst isMastraMessagePart = (p: unknown): p is MastraMessagePart => {\n return (\n typeof p === 'object' &&\n p != null &&\n 'type' in p &&\n (p.type === 'text' || p.type === 'tool-call' || p.type === 'tool-result') &&\n ((p.type === 'text' && 'text' in p) ||\n (p.type === 'tool-call' && 'toolCallId' in p && 'toolName' in p && 'input' in p) ||\n (p.type === 'tool-result' && 'toolCallId' in p && 'toolName' in p && 'output' in p))\n );\n};\n\nconst isMastraMessage = (m: unknown): m is MastraMessage => {\n return (\n typeof m === 'object' &&\n m != null &&\n 'role' in m &&\n 'content' in m &&\n (typeof m.content === 'string' || (Array.isArray(m.content) && m.content.every(isMastraMessagePart)))\n );\n};\n\n/**\n * Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to\n * OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n *\n * This conversion is best effort; It assumes a consistent shape for mastra messages, and converts\n * into the gen_ai input and output schemas as of October 20th, 2025.\n *\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json\n * @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages\n * @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json\n *\n * @param inputOutputString a jsonified string that contains messages adhering to what appears to be\n * Mastra's message shape.\n * @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.\n * If parsing fails at any step, the original inputOutputString is returned unmodified.\n */\nexport const convertMastraMessagesToGenAIMessages = (inputOutputString: string): string => {\n try {\n const parsedIO = JSON.parse(inputOutputString) as unknown;\n if (typeof parsedIO !== 'object' || parsedIO == null || (!('messages' in parsedIO) && !('text' in parsedIO))) {\n // inputOutputString fails initial type guard, just return it\n return inputOutputString;\n }\n // if the IO simply contains a text string, return a single text message\n // formatted as a gen_ai assistant message, assuming its an assistant response\n if ('text' in parsedIO) {\n return JSON.stringify([\n {\n role: 'assistant',\n parts: [{ type: 'text', content: parsedIO.text as string }],\n } satisfies GenAIMessage,\n ]);\n }\n // if the IO contains messages, convert them to gen_ai messages\n if (Array.isArray(parsedIO.messages)) {\n return JSON.stringify(\n (parsedIO.messages as unknown[]).map(m => {\n if (!isMastraMessage(m)) {\n return m;\n }\n const role = m.role;\n let parts: GenAIMessagePart[] = [];\n if (Array.isArray(m.content)) {\n parts = m.content.map(c => {\n switch (c.type) {\n case 'text':\n return {\n type: 'text',\n content: c.text,\n };\n case 'tool-call':\n return {\n type: 'tool_call',\n id: c.toolCallId,\n name: c.toolName,\n arguments: JSON.stringify(c.input),\n };\n case 'tool-result':\n return {\n type: 'tool_call_response',\n id: c.toolCallId,\n name: c.toolName,\n response: JSON.stringify(c.output.value),\n };\n default:\n return c;\n }\n });\n } else {\n parts = [\n {\n type: 'text',\n content: m.content,\n },\n ];\n }\n return {\n role,\n parts,\n } satisfies GenAIMessage;\n }),\n );\n }\n // we've failed type-guards, just return original I/O string\n return inputOutputString;\n } catch {\n // silently fallback to original I/O string\n return inputOutputString;\n }\n};\n","import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport { SemanticConventions } from '@arizeai/openinference-semantic-conventions';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport { convertMastraMessagesToGenAIMessages } from './gen-ai';\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n const attributes = { ...(span.attributes ?? {}) };\n const mutableSpan = span as Mutable<ReadableSpan>;\n\n if (attributes['gen_ai.prompt'] && typeof attributes['gen_ai.prompt'] === 'string') {\n attributes['gen_ai.input.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.prompt']);\n }\n\n if (attributes['gen_ai.completion'] && typeof attributes['gen_ai.completion'] === 'string') {\n attributes['gen_ai.output.messages'] = convertMastraMessagesToGenAIMessages(attributes['gen_ai.completion']);\n }\n\n // Gather custom attributes into OpenInference metadata (flat best-effort)\n const reservedPrefixes = [\n 'gen_ai.',\n 'llm.',\n 'input.',\n 'output.',\n 'span.',\n 'mastra',\n 'agent.',\n 'workflow.',\n 'mcp.',\n 'openinference.',\n 'retrieval.',\n 'reranker.',\n 'embedding.',\n 'document.',\n 'tool',\n 'error.',\n 'http.',\n 'db.',\n ];\n const metadataEntries: Record<string, unknown> = {};\n const reservedExact = new Set<string>(['input', 'output', 'sessionId', 'metadata']);\n for (const [key, value] of Object.entries(attributes)) {\n const isReserved =\n reservedPrefixes.some(prefix => key.startsWith(prefix)) ||\n key === 'threadId' ||\n key === 'userId' ||\n key === SemanticConventions.SESSION_ID ||\n key === SemanticConventions.USER_ID ||\n reservedExact.has(key);\n if (!isReserved) {\n metadataEntries[key] = value;\n }\n }\n\n let metadataPayload: string | undefined;\n if (Object.keys(metadataEntries).length > 0) {\n try {\n metadataPayload = JSON.stringify(metadataEntries);\n attributes[SemanticConventions.METADATA] = metadataPayload;\n } catch {\n // best-effort only\n }\n }\n\n const sessionId = typeof attributes['threadId'] === 'string' ? (attributes['threadId'] as string) : undefined;\n const userId = typeof attributes['userId'] === 'string' ? (attributes['userId'] as string) : undefined;\n\n if (sessionId) {\n attributes[SemanticConventions.SESSION_ID] = sessionId;\n delete attributes['threadId'];\n }\n\n if (userId) {\n attributes[SemanticConventions.USER_ID] = userId;\n delete attributes['userId'];\n }\n\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);\n\n if (processedAttributes) {\n if (sessionId) {\n processedAttributes[SemanticConventions.SESSION_ID] = sessionId;\n }\n if (userId) {\n processedAttributes[SemanticConventions.USER_ID] = userId;\n }\n if (metadataPayload) {\n processedAttributes[SemanticConventions.METADATA] = metadataPayload;\n }\n mutableSpan.attributes = processedAttributes;\n }\n\n return mutableSpan;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
|
|
1
|
+
{"version":3,"sources":["../src/openInferenceOTLPExporter.ts","../src/tracing.ts"],"names":[],"mappings":";;;;;;;;AAoBA,IAAM,qBAAA,GAAwB,SAAA;AAC9B,IAAM,sBAAA,GAAyB,kBAAA;AAS/B,SAAS,sBAAsB,UAAA,EAG7B;AACA,EAAA,OAAO,MAAA,CAAO,OAAA,CAAQ,UAAU,CAAA,CAAE,MAAA;AAAA,IAChC,CAAC,GAAA,EAAK,CAAC,GAAA,EAAK,KAAK,CAAA,KAAM;AACrB,MAAA,IAAI,GAAA,CAAI,UAAA,CAAW,qBAAqB,CAAA,EAAG;AACzC,QAAA,IAAI,GAAA,CAAI,UAAA,CAAW,sBAAsB,CAAA,EAAG;AAC1C,UAAA,MAAM,WAAA,GAAc,GAAA,CAAI,KAAA,CAAM,sBAAA,CAAuB,MAAM,CAAA;AAC3D,UAAA,GAAA,CAAI,cAAA,CAAe,WAAW,CAAA,GAAI,KAAA;AAAA,QACpC,CAAA,MAAO;AACL,UAAA,GAAA,CAAI,WAAA,CAAY,GAAG,CAAA,GAAI,KAAA;AAAA,QACzB;AAAA,MACF;AACA,MAAA,OAAO,GAAA;AAAA,IACT,CAAA;AAAA,IACA;AAAA,MACE,gBAAgB,EAAC;AAAA,MACjB,aAAa;AAAC;AAChB,GACF;AACF;AAEO,IAAM,8BAAA,GAAN,cAA6C,iBAAA,CAAkB;AAAA,EACpE,MAAA,CAAO,OAAuB,cAAA,EAAgD;AAC5E,IAAA,MAAM,cAAA,GAAiB,KAAA,CAAM,GAAA,CAAI,CAAA,IAAA,KAAQ;AACvC,MAAA,MAAM,aAAa,EAAE,GAAI,IAAA,CAAK,UAAA,IAAc,EAAC,EAAG;AAChD,MAAA,MAAM,WAAA,GAAc,IAAA;AAEpB,MAAA,MAAM,EAAE,cAAA,EAAgB,WAAA,EAAY,GAAI,sBAAsB,UAAU,CAAA;AACxE,MAAA,MAAM,mBAAA,GAAsB,wDAAwD,UAAU,CAAA;AAG9F,MAAA,IAAI,mBAAA,EAAqB;AACvB,QAAA,MAAM,QAAA,GAAW,eAAe,UAAU,CAAA;AAC1C,QAAA,IAAI,QAAA,EAAU;AACZ,UAAA,OAAO,eAAe,UAAU,CAAA;AAChC,UAAA,mBAAA,CAAoB,UAAU,CAAA,GAAI,QAAA;AAAA,QACpC;AAGA,QAAA,IAAI,WAAA,CAAY,aAAa,CAAA,EAAG;AAC9B,UAAA,mBAAA,CAAoB,QAAQ,CAAA,GAAI,WAAA,CAAY,aAAa,CAAA;AACzD,UAAA,OAAO,YAAY,aAAa,CAAA;AAAA,QAClC;AAEA,QAAA,MAAM,MAAA,GAAS,eAAe,QAAQ,CAAA;AACtC,QAAA,IAAI,MAAA,EAAQ;AACV,UAAA,OAAO,eAAe,QAAQ,CAAA;AAC9B,UAAA,mBAAA,CAAoB,OAAO,CAAA,GAAI,MAAA;AAAA,QACjC;AAGA,QAAA,IAAI,MAAA,CAAO,IAAA,CAAK,cAAc,CAAA,CAAE,SAAS,CAAA,EAAG;AAC1C,UAAA,IAAI;AACF,YAAA,mBAAA,CAAoB,QAAQ,CAAA,GAAI,IAAA,CAAK,SAAA,CAAU,cAAc,CAAA;AAAA,UAC/D,CAAA,CAAA,MAAQ;AAAA,UAER;AAAA,QACF;AAEA,QAAA,MAAM,aAAA,GAAgB,WAAW,0BAA0B,CAAA;AAC3D,QAAA,IAAI,aAAA,EAAe;AACjB,UAAA,mBAAA,CAAoB,eAAe,CAAA,GAAI,kBAAA;AACvC,UAAA,mBAAA,CAAoB,WAAW,CAAA,GAAI,aAAA;AAAA,QACrC;AACA,QAAA,MAAM,cAAA,GAAiB,WAAW,2BAA2B,CAAA;AAC7D,QAAA,IAAI,cAAA,EAAgB;AAClB,UAAA,mBAAA,CAAoB,gBAAgB,CAAA,GAAI,kBAAA;AACxC,UAAA,mBAAA,CAAoB,YAAY,CAAA,GAAI,cAAA;AAAA,QACtC;AAEA,QAAA,WAAA,CAAY,UAAA,GAAa,EAAE,GAAG,mBAAA,EAAqB,GAAG,WAAA,EAAY;AAAA,MACpE;AAEA,MAAA,OAAO,WAAA;AAAA,IACT,CAAC,CAAA;AAED,IAAA,KAAA,CAAM,MAAA,CAAO,gBAAgB,cAAc,CAAA;AAAA,EAC7C;AACF,CAAA;;;ACvGA,IAAM,UAAA,GAAa,iBAAA;AAEZ,IAAM,iBAAA,GAAoB;AA6B1B,IAAM,aAAA,GAAN,cAA4B,YAAA,CAAa;AAAA,EAC9C,IAAA,GAAO,OAAA;AAAA,EAEP,YAAY,MAAA,EAA6B;AACvC,IAAA,MAAM,MAAA,GAAS,IAAI,aAAA,CAAc,EAAE,OAAO,MAAA,CAAO,QAAA,IAAY,QAAQ,CAAA;AACrE,IAAA,IAAI,WAA+B,MAAA,CAAO,QAAA;AAC1C,IAAA,MAAM,OAAA,GAAkC;AAAA,MACtC,GAAG,MAAA,CAAO;AAAA,KACZ;AACA,IAAA,IAAI,OAAO,OAAA,EAAS;AAElB,MAAA,OAAA,CAAQ,UAAU,IAAI,MAAA,CAAO,OAAA;AAC7B,MAAA,OAAA,CAAQ,SAAS,CAAA,GAAI,MAAA,CAAO,MAAA,IAAU,EAAA;AACtC,MAAA,QAAA,GAAW,OAAO,QAAA,IAAY,iBAAA;AAAA,IAChC,CAAA,MAAA,IAAW,OAAO,MAAA,EAAQ;AAExB,MAAA,OAAA,CAAQ,eAAe,CAAA,GAAI,CAAA,OAAA,EAAU,MAAA,CAAO,MAAM,CAAA,CAAA;AAAA,IACpD;AACA,IAAA,IAAI,CAAC,QAAA,EAAU;AACb,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,EAAG,UAAU,CAAA,2DAAA,CAA6D,CAAA;AACvF,MAAA;AAAA,IACF;AACA,IAAA,KAAA,CAAM;AAAA,MACJ,QAAA,EAAU,IAAI,8BAAA,CAA+B;AAAA,QAC3C,GAAA,EAAK,QAAA;AAAA,QACL;AAAA,OACD,CAAA;AAAA,MACD,GAAG,MAAA;AAAA,MACH,kBAAA,EAAoB;AAAA,QAClB,CAAC,wBAAwB,GAAG,MAAA,CAAO,WAAA;AAAA,QACnC,GAAG,MAAA,CAAO;AAAA,OACZ;AAAA,MACA,QAAA,EAAU;AAAA,QACR,MAAA,EAAQ;AAAA,UACN,QAAA;AAAA,UACA,OAAA;AAAA,UACA,QAAA,EAAU;AAAA;AACZ;AACF,KAC4B,CAAA;AAAA,EAChC;AACF","file":"index.js","sourcesContent":["import { convertGenAISpanAttributesToOpenInferenceSpanAttributes } from '@arizeai/openinference-genai';\nimport type { Mutable } from '@arizeai/openinference-genai/types';\nimport {\n INPUT_MIME_TYPE,\n INPUT_VALUE,\n METADATA,\n OUTPUT_MIME_TYPE,\n OUTPUT_VALUE,\n SESSION_ID,\n TAG_TAGS,\n USER_ID,\n} from '@arizeai/openinference-semantic-conventions';\nimport type { ExportResult } from '@opentelemetry/core';\nimport { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';\nimport type { ReadableSpan } from '@opentelemetry/sdk-trace-base';\nimport {\n ATTR_GEN_AI_INPUT_MESSAGES,\n ATTR_GEN_AI_OUTPUT_MESSAGES,\n} from '@opentelemetry/semantic-conventions/incubating';\n\nconst MASTRA_GENERAL_PREFIX = 'mastra.';\nconst MASTRA_METADATA_PREFIX = 'mastra.metadata.';\n\n/**\n * Splits Mastra span attributes into two groups:\n * - `metadata`: keys starting with \"mastra.metadata.\" (prefix removed)\n * - `other`: all remaining keys starting with \"mastra.\"\n *\n * Any attributes not starting with \"mastra.\" are ignored entirely.\n */\nfunction splitMastraAttributes(attributes: Record<string, any>): {\n mastraMetadata: Record<string, any>;\n mastraOther: Record<string, any>;\n} {\n return Object.entries(attributes).reduce(\n (acc, [key, value]) => {\n if (key.startsWith(MASTRA_GENERAL_PREFIX)) {\n if (key.startsWith(MASTRA_METADATA_PREFIX)) {\n const strippedKey = key.slice(MASTRA_METADATA_PREFIX.length);\n acc.mastraMetadata[strippedKey] = value;\n } else {\n acc.mastraOther[key] = value;\n }\n }\n return acc;\n },\n {\n mastraMetadata: {} as Record<string, any>,\n mastraOther: {} as Record<string, any>,\n },\n );\n}\n\nexport class OpenInferenceOTLPTraceExporter extends OTLPTraceExporter {\n export(spans: ReadableSpan[], resultCallback: (result: ExportResult) => void) {\n const processedSpans = spans.map(span => {\n const attributes = { ...(span.attributes ?? {}) };\n const mutableSpan = span as Mutable<ReadableSpan>;\n\n const { mastraMetadata, mastraOther } = splitMastraAttributes(attributes);\n const processedAttributes = convertGenAISpanAttributesToOpenInferenceSpanAttributes(attributes);\n\n // only add processed attributes if conversion was successful\n if (processedAttributes) {\n const threadId = mastraMetadata['threadId'];\n if (threadId) {\n delete mastraMetadata['threadId'];\n processedAttributes[SESSION_ID] = threadId;\n }\n\n // Map mastra.tags to OpenInference native tag.tags convention (tags are only on root spans)\n if (mastraOther['mastra.tags']) {\n processedAttributes[TAG_TAGS] = mastraOther['mastra.tags'];\n delete mastraOther['mastra.tags'];\n }\n\n const userId = mastraMetadata['userId'];\n if (userId) {\n delete mastraMetadata['userId'];\n processedAttributes[USER_ID] = userId;\n }\n\n // Gather custom metadata into OpenInference metadata (flat best-effort)\n if (Object.keys(mastraMetadata).length > 0) {\n try {\n processedAttributes[METADATA] = JSON.stringify(mastraMetadata);\n } catch {\n // best-effort only\n }\n }\n\n const inputMessages = attributes[ATTR_GEN_AI_INPUT_MESSAGES];\n if (inputMessages) {\n processedAttributes[INPUT_MIME_TYPE] = 'application/json';\n processedAttributes[INPUT_VALUE] = inputMessages;\n }\n const outputMessages = attributes[ATTR_GEN_AI_OUTPUT_MESSAGES];\n if (outputMessages) {\n processedAttributes[OUTPUT_MIME_TYPE] = 'application/json';\n processedAttributes[OUTPUT_VALUE] = outputMessages;\n }\n\n mutableSpan.attributes = { ...processedAttributes, ...mastraOther };\n }\n\n return mutableSpan;\n });\n\n super.export(processedSpans, resultCallback);\n }\n}\n","import { SEMRESATTRS_PROJECT_NAME } from '@arizeai/openinference-semantic-conventions';\nimport { ConsoleLogger } from '@mastra/core/logger';\nimport { OtelExporter } from '@mastra/otel-exporter';\nimport type { OtelExporterConfig } from '@mastra/otel-exporter';\n\nimport { OpenInferenceOTLPTraceExporter } from './openInferenceOTLPExporter.js';\n\nconst LOG_PREFIX = '[ArizeExporter]';\n\nexport const ARIZE_AX_ENDPOINT = 'https://otlp.arize.com/v1/traces';\n\nexport type ArizeExporterConfig = Omit<OtelExporterConfig, 'provider'> & {\n /**\n * Required if sending traces to Arize AX\n */\n spaceId?: string;\n /**\n * Required if sending traces to Arize AX, or to any other collector that\n * requires an Authorization header\n */\n apiKey?: string;\n /**\n * Collector endpoint destination for trace exports.\n * Required when sending traces to Phoenix, Phoenix Cloud, or other collectors.\n * Optional when sending traces to Arize AX.\n */\n endpoint?: string;\n /**\n * Optional project name to be added as a resource attribute using\n * OpenInference Semantic Conventions\n */\n projectName?: string;\n /**\n * Optional headers to be added to each OTLP request\n */\n headers?: Record<string, string>;\n};\n\nexport class ArizeExporter extends OtelExporter {\n name = 'arize';\n\n constructor(config: ArizeExporterConfig) {\n const logger = new ConsoleLogger({ level: config.logLevel ?? 'warn' });\n let endpoint: string | undefined = config.endpoint;\n const headers: Record<string, string> = {\n ...config.headers,\n };\n if (config.spaceId) {\n // arize ax header configuration\n headers['space_id'] = config.spaceId;\n headers['api_key'] = config.apiKey ?? '';\n endpoint = config.endpoint || ARIZE_AX_ENDPOINT;\n } else if (config.apiKey) {\n // standard otel header configuration\n headers['Authorization'] = `Bearer ${config.apiKey}`;\n }\n if (!endpoint) {\n logger.error(`${LOG_PREFIX} Endpoint is required in configuration. Disabling exporter.`);\n return;\n }\n super({\n exporter: new OpenInferenceOTLPTraceExporter({\n url: endpoint,\n headers,\n }),\n ...config,\n resourceAttributes: {\n [SEMRESATTRS_PROJECT_NAME]: config.projectName,\n ...config.resourceAttributes,\n },\n provider: {\n custom: {\n endpoint,\n headers,\n protocol: 'http/protobuf',\n },\n } satisfies OtelExporterConfig['provider'],\n } satisfies OtelExporterConfig);\n }\n}\n"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openInferenceOTLPExporter.d.ts","sourceRoot":"","sources":["../src/openInferenceOTLPExporter.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"openInferenceOTLPExporter.d.ts","sourceRoot":"","sources":["../src/openInferenceOTLPExporter.ts"],"names":[],"mappings":"AAYA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,MAAM,0CAA0C,CAAC;AAC7E,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,+BAA+B,CAAC;AAuClE,qBAAa,8BAA+B,SAAQ,iBAAiB;IACnE,MAAM,CAAC,KAAK,EAAE,YAAY,EAAE,EAAE,cAAc,EAAE,CAAC,MAAM,EAAE,YAAY,KAAK,IAAI;CAwD7E"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/arize",
|
|
3
|
-
"version": "1.0.0-beta.
|
|
3
|
+
"version": "1.0.0-beta.4",
|
|
4
4
|
"description": "Arize observability provider for Mastra - includes tracing and future observability features",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -30,7 +30,8 @@
|
|
|
30
30
|
"@opentelemetry/exporter-trace-otlp-proto": "^0.205.0",
|
|
31
31
|
"@opentelemetry/resources": "^2.1.0",
|
|
32
32
|
"@opentelemetry/sdk-trace-base": "^2.1.0",
|
|
33
|
-
"@
|
|
33
|
+
"@opentelemetry/semantic-conventions": "1.38.0",
|
|
34
|
+
"@mastra/otel-exporter": "1.0.0-beta.4"
|
|
34
35
|
},
|
|
35
36
|
"devDependencies": {
|
|
36
37
|
"@microsoft/api-extractor": "^7.52.8",
|
|
@@ -41,9 +42,9 @@
|
|
|
41
42
|
"tsup": "^8.5.0",
|
|
42
43
|
"typescript": "^5.8.3",
|
|
43
44
|
"vitest": "4.0.12",
|
|
44
|
-
"@internal/lint": "0.0.53",
|
|
45
45
|
"@internal/types-builder": "0.0.28",
|
|
46
|
-
"@
|
|
46
|
+
"@internal/lint": "0.0.53",
|
|
47
|
+
"@mastra/core": "1.0.0-beta.9"
|
|
47
48
|
},
|
|
48
49
|
"peerDependencies": {
|
|
49
50
|
"@mastra/core": ">=1.0.0-0 <2.0.0-0"
|
package/dist/gen-ai.d.ts
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Convert an Input/Output string from a MastraSpan into a jsonified string that adheres to
|
|
3
|
-
* OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.
|
|
4
|
-
* If parsing fails at any step, the original inputOutputString is returned unmodified.
|
|
5
|
-
*
|
|
6
|
-
* This conversion is best effort; It assumes a consistent shape for mastra messages, and converts
|
|
7
|
-
* into the gen_ai input and output schemas as of October 20th, 2025.
|
|
8
|
-
*
|
|
9
|
-
* @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-input-messages
|
|
10
|
-
* @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-input-messages.json
|
|
11
|
-
* @see https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-output-messages
|
|
12
|
-
* @see https://opentelemetry.io/docs/specs/semconv/gen-ai/gen-ai-output-messages.json
|
|
13
|
-
*
|
|
14
|
-
* @param inputOutputString a jsonified string that contains messages adhering to what appears to be
|
|
15
|
-
* Mastra's message shape.
|
|
16
|
-
* @returns a jsonified string that contains messages adhering to the OpenTelemetry gen_ai.input.messages and gen_ai.output.messages schema.
|
|
17
|
-
* If parsing fails at any step, the original inputOutputString is returned unmodified.
|
|
18
|
-
*/
|
|
19
|
-
export declare const convertMastraMessagesToGenAIMessages: (inputOutputString: string) => string;
|
|
20
|
-
//# sourceMappingURL=gen-ai.d.ts.map
|
package/dist/gen-ai.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"gen-ai.d.ts","sourceRoot":"","sources":["../src/gen-ai.ts"],"names":[],"mappings":"AAsEA;;;;;;;;;;;;;;;;;GAiBG;AACH,eAAO,MAAM,oCAAoC,GAAI,mBAAmB,MAAM,KAAG,MAyEhF,CAAC"}
|