@mastra/otel-exporter 0.0.0-dynamic-model-router-20251010230835 → 0.0.0-elated-armadillo-be37a1-20251219210627

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,11 +1,14 @@
1
- import { AITracingEventType, AISpanType } from '@mastra/core/ai-tracing';
2
- import { ConsoleLogger } from '@mastra/core/logger';
3
- import { diag, DiagConsoleLogger, DiagLogLevel, SpanKind, SpanStatusCode, TraceFlags } from '@opentelemetry/api';
4
- import { resourceFromAttributes } from '@opentelemetry/resources';
1
+ import { SpanType, TracingEventType } from '@mastra/core/observability';
2
+ import { BaseExporter } from '@mastra/observability';
3
+ import { TraceFlags, SpanKind, SpanStatusCode, diag, DiagConsoleLogger, DiagLogLevel } from '@opentelemetry/api';
5
4
  import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
5
+ import { readFileSync } from 'fs';
6
+ import { fileURLToPath } from 'url';
7
+ import { resourceFromAttributes } from '@opentelemetry/resources';
6
8
  import { ATTR_TELEMETRY_SDK_LANGUAGE, ATTR_TELEMETRY_SDK_VERSION, ATTR_TELEMETRY_SDK_NAME, ATTR_SERVICE_VERSION, ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions';
9
+ import { ATTR_GEN_AI_OPERATION_NAME, ATTR_GEN_AI_INPUT_MESSAGES, ATTR_GEN_AI_OUTPUT_MESSAGES, ATTR_GEN_AI_REQUEST_MODEL, ATTR_GEN_AI_PROVIDER_NAME, ATTR_GEN_AI_AGENT_ID, ATTR_GEN_AI_AGENT_NAME, ATTR_GEN_AI_REQUEST_TEMPERATURE, ATTR_GEN_AI_REQUEST_MAX_TOKENS, ATTR_GEN_AI_REQUEST_TOP_P, ATTR_GEN_AI_REQUEST_TOP_K, ATTR_GEN_AI_REQUEST_PRESENCE_PENALTY, ATTR_GEN_AI_REQUEST_FREQUENCY_PENALTY, ATTR_GEN_AI_REQUEST_STOP_SEQUENCES, ATTR_GEN_AI_REQUEST_SEED, ATTR_GEN_AI_RESPONSE_FINISH_REASONS, ATTR_GEN_AI_RESPONSE_MODEL, ATTR_GEN_AI_RESPONSE_ID, ATTR_SERVER_ADDRESS, ATTR_SERVER_PORT, ATTR_GEN_AI_TOOL_NAME, ATTR_GEN_AI_TOOL_DESCRIPTION, ATTR_GEN_AI_CONVERSATION_ID, ATTR_GEN_AI_SYSTEM_INSTRUCTIONS, ATTR_ERROR_TYPE, ATTR_ERROR_MESSAGE, ATTR_GEN_AI_USAGE_INPUT_TOKENS, ATTR_GEN_AI_USAGE_OUTPUT_TOKENS } from '@opentelemetry/semantic-conventions/incubating';
7
10
 
8
- // src/ai-tracing.ts
11
+ // src/tracing.ts
9
12
 
10
13
  // src/loadExporter.ts
11
14
  var OTLPHttpExporter;
@@ -203,400 +206,498 @@ function resolveCustomConfig(config) {
203
206
  protocol: config.protocol || "http/json"
204
207
  };
205
208
  }
206
- var MastraReadableSpan = class {
207
- name;
208
- kind;
209
- spanContext;
210
- parentSpanId;
211
- startTime;
212
- endTime;
213
- status;
214
- attributes;
215
- links;
216
- events;
217
- duration;
218
- ended;
219
- resource;
220
- instrumentationLibrary;
221
- instrumentationScope;
222
- droppedAttributesCount = 0;
223
- droppedEventsCount = 0;
224
- droppedLinksCount = 0;
225
- constructor(aiSpan, attributes, kind, parentSpanId, resource, instrumentationLibrary) {
226
- this.name = aiSpan.name;
227
- this.kind = kind;
228
- this.attributes = attributes;
229
- this.parentSpanId = parentSpanId;
230
- this.links = [];
231
- this.events = [];
232
- this.startTime = this.dateToHrTime(aiSpan.startTime);
233
- this.endTime = aiSpan.endTime ? this.dateToHrTime(aiSpan.endTime) : this.startTime;
234
- this.ended = !!aiSpan.endTime;
235
- if (aiSpan.endTime) {
236
- const durationMs = aiSpan.endTime.getTime() - aiSpan.startTime.getTime();
237
- this.duration = [Math.floor(durationMs / 1e3), durationMs % 1e3 * 1e6];
238
- } else {
239
- this.duration = [0, 0];
209
+
210
+ // src/gen-ai-messages.ts
211
+ var isMastraMessagePart = (p) => {
212
+ return typeof p === "object" && p != null && "type" in p && (p.type === "text" || p.type === "tool-call" || p.type === "tool-result") && (p.type === "text" && "text" in p || p.type === "tool-call" && "toolCallId" in p && "toolName" in p && "input" in p || p.type === "tool-result" && "toolCallId" in p && "toolName" in p && "output" in p);
213
+ };
214
+ var isMastraMessage = (m) => {
215
+ return typeof m === "object" && m != null && "role" in m && "content" in m && (typeof m.content === "string" || Array.isArray(m.content) && m.content.every(isMastraMessagePart));
216
+ };
217
+ var convertMastraMessagesToGenAIMessages = (inputOutputString) => {
218
+ try {
219
+ const parsedIO = JSON.parse(inputOutputString);
220
+ if (typeof parsedIO !== "object" || parsedIO == null || !("messages" in parsedIO) && !("text" in parsedIO)) {
221
+ return inputOutputString;
240
222
  }
241
- if (aiSpan.errorInfo) {
242
- this.status = {
243
- code: SpanStatusCode.ERROR,
244
- message: aiSpan.errorInfo.message
245
- };
246
- this.events.push({
247
- name: "exception",
248
- attributes: {
249
- "exception.message": aiSpan.errorInfo.message,
250
- "exception.type": "Error",
251
- ...aiSpan.errorInfo.details?.stack && {
252
- "exception.stacktrace": aiSpan.errorInfo.details.stack
223
+ if ("text" in parsedIO) {
224
+ return JSON.stringify([
225
+ {
226
+ role: "assistant",
227
+ parts: [{ type: "text", content: parsedIO.text }]
228
+ }
229
+ ]);
230
+ }
231
+ if (Array.isArray(parsedIO.messages)) {
232
+ return JSON.stringify(
233
+ parsedIO.messages.map((m) => {
234
+ if (!isMastraMessage(m)) {
235
+ return m;
253
236
  }
254
- },
255
- time: this.startTime,
256
- droppedAttributesCount: 0
257
- });
258
- } else if (aiSpan.endTime) {
259
- this.status = { code: SpanStatusCode.OK };
260
- } else {
261
- this.status = { code: SpanStatusCode.UNSET };
262
- }
263
- if (aiSpan.isEvent) {
264
- this.events.push({
265
- name: "instant_event",
266
- attributes: {},
267
- time: this.startTime,
268
- droppedAttributesCount: 0
269
- });
237
+ const role = m.role;
238
+ let parts = [];
239
+ if (Array.isArray(m.content)) {
240
+ parts = m.content.map((c) => {
241
+ switch (c.type) {
242
+ case "text":
243
+ return {
244
+ type: "text",
245
+ content: c.text
246
+ };
247
+ case "tool-call":
248
+ return {
249
+ type: "tool_call",
250
+ id: c.toolCallId,
251
+ name: c.toolName,
252
+ arguments: JSON.stringify(c.input)
253
+ };
254
+ case "tool-result":
255
+ return {
256
+ type: "tool_call_response",
257
+ id: c.toolCallId,
258
+ name: c.toolName,
259
+ response: JSON.stringify(c.output.value)
260
+ };
261
+ default:
262
+ return c;
263
+ }
264
+ });
265
+ } else {
266
+ parts = [
267
+ {
268
+ type: "text",
269
+ content: m.content
270
+ }
271
+ ];
272
+ }
273
+ return {
274
+ role,
275
+ parts
276
+ };
277
+ })
278
+ );
270
279
  }
271
- this.spanContext = () => ({
272
- traceId: aiSpan.traceId,
273
- spanId: aiSpan.id,
274
- traceFlags: TraceFlags.SAMPLED,
275
- isRemote: false
276
- });
277
- this.resource = resource || {};
278
- this.instrumentationLibrary = instrumentationLibrary || {
279
- name: "@mastra/otel",
280
- version: "1.0.0"
281
- };
282
- this.instrumentationScope = this.instrumentationLibrary;
283
- }
284
- /**
285
- * Convert JavaScript Date to hrtime format
286
- */
287
- dateToHrTime(date) {
288
- const ms = date.getTime();
289
- const seconds = Math.floor(ms / 1e3);
290
- const nanoseconds = ms % 1e3 * 1e6;
291
- return [seconds, nanoseconds];
280
+ return inputOutputString;
281
+ } catch {
282
+ return inputOutputString;
292
283
  }
293
284
  };
294
285
 
295
- // src/span-converter.ts
296
- var SPAN_KIND_MAPPING = {
297
- // LLM operations are CLIENT spans (calling external AI services)
298
- [AISpanType.LLM_GENERATION]: SpanKind.CLIENT,
299
- [AISpanType.LLM_CHUNK]: SpanKind.CLIENT,
300
- // MCP tool calls are CLIENT (external service calls)
301
- [AISpanType.MCP_TOOL_CALL]: SpanKind.CLIENT,
302
- // Root spans for agent/workflow are SERVER (entry points)
303
- [AISpanType.AGENT_RUN]: SpanKind.SERVER,
304
- [AISpanType.WORKFLOW_RUN]: SpanKind.SERVER
305
- };
306
- var SpanConverter = class {
307
- resource;
308
- instrumentationLibrary;
309
- constructor(resource) {
310
- this.resource = resource;
311
- this.instrumentationLibrary = {
312
- name: "@mastra/otel",
313
- version: "1.0.0"
314
- };
286
+ // src/gen-ai-semantics.ts
287
+ function formatUsageMetrics(usage) {
288
+ if (!usage) return {};
289
+ const metrics = {};
290
+ if (usage.inputTokens !== void 0) {
291
+ metrics[ATTR_GEN_AI_USAGE_INPUT_TOKENS] = usage.inputTokens;
315
292
  }
316
- /**
317
- * Convert a Mastra AI span to an OpenTelemetry ReadableSpan
318
- * This preserves Mastra's trace and span IDs
319
- */
320
- convertSpan(aiSpan) {
321
- const spanKind = this.getSpanKind(aiSpan);
322
- const attributes = this.buildAttributes(aiSpan);
323
- const spanName = this.buildSpanName(aiSpan);
324
- const otelSpan = { ...aiSpan, name: spanName };
325
- return new MastraReadableSpan(
326
- otelSpan,
327
- attributes,
328
- spanKind,
329
- aiSpan.parentSpanId,
330
- // Use the parentSpanId from the Mastra span directly
331
- this.resource,
332
- this.instrumentationLibrary
333
- );
293
+ if (usage.outputTokens !== void 0) {
294
+ metrics[ATTR_GEN_AI_USAGE_OUTPUT_TOKENS] = usage.outputTokens;
334
295
  }
335
- /**
336
- * Get the appropriate SpanKind based on span type and context
337
- */
338
- getSpanKind(aiSpan) {
339
- if (aiSpan.isRootSpan) {
340
- if (aiSpan.type === AISpanType.AGENT_RUN || aiSpan.type === AISpanType.WORKFLOW_RUN) {
341
- return SpanKind.SERVER;
342
- }
296
+ if (usage.outputDetails?.reasoning !== void 0) {
297
+ metrics["gen_ai.usage.reasoning_tokens"] = usage.outputDetails.reasoning;
298
+ }
299
+ if (usage.inputDetails?.cacheRead !== void 0) {
300
+ metrics["gen_ai.usage.cached_input_tokens"] = usage.inputDetails.cacheRead;
301
+ }
302
+ if (usage.inputDetails?.cacheWrite !== void 0) {
303
+ metrics["gen_ai.usage.cache_write_tokens"] = usage.inputDetails.cacheWrite;
304
+ }
305
+ if (usage.inputDetails?.audio !== void 0) {
306
+ metrics["gen_ai.usage.audio_input_tokens"] = usage.inputDetails.audio;
307
+ }
308
+ if (usage.outputDetails?.audio !== void 0) {
309
+ metrics["gen_ai.usage.audio_output_tokens"] = usage.outputDetails.audio;
310
+ }
311
+ return metrics;
312
+ }
313
+ function getOperationName(span) {
314
+ switch (span.type) {
315
+ case SpanType.MODEL_GENERATION:
316
+ return "chat";
317
+ case SpanType.TOOL_CALL:
318
+ case SpanType.MCP_TOOL_CALL:
319
+ return "execute_tool";
320
+ case SpanType.AGENT_RUN:
321
+ return "invoke_agent";
322
+ case SpanType.WORKFLOW_RUN:
323
+ return "invoke_workflow";
324
+ default:
325
+ return span.type.toLowerCase();
326
+ }
327
+ }
328
+ function sanitizeSpanName(name) {
329
+ return name.replace(/[^\p{L}\p{N}._ -]/gu, "");
330
+ }
331
+ function getSpanIdentifier(span) {
332
+ switch (span.type) {
333
+ case SpanType.MODEL_GENERATION: {
334
+ const attrs = span.attributes;
335
+ return attrs?.model ?? "unknown";
343
336
  }
344
- return SPAN_KIND_MAPPING[aiSpan.type] || SpanKind.INTERNAL;
337
+ case SpanType.TOOL_CALL:
338
+ case SpanType.MCP_TOOL_CALL: {
339
+ const attrs = span.attributes;
340
+ return attrs?.toolId ?? "unknown";
341
+ }
342
+ case SpanType.AGENT_RUN: {
343
+ const attrs = span.attributes;
344
+ return attrs?.agentName ?? attrs?.agentId ?? "unknown";
345
+ }
346
+ case SpanType.WORKFLOW_RUN: {
347
+ const attrs = span.attributes;
348
+ return attrs?.workflowId ?? "unknown";
349
+ }
350
+ default:
351
+ return null;
345
352
  }
346
- /**
347
- * Build OTEL-compliant span name based on span type and attributes
348
- */
349
- buildSpanName(aiSpan) {
350
- switch (aiSpan.type) {
351
- case AISpanType.LLM_GENERATION: {
352
- const attrs = aiSpan.attributes;
353
- const operation = attrs?.resultType === "tool_selection" ? "tool_selection" : "chat";
354
- const model = attrs?.model || "unknown";
355
- return `${operation} ${model}`;
356
- }
357
- case AISpanType.TOOL_CALL:
358
- case AISpanType.MCP_TOOL_CALL: {
359
- const toolAttrs = aiSpan.attributes;
360
- const toolName = toolAttrs?.toolId || "unknown";
361
- return `tool.execute ${toolName}`;
362
- }
363
- case AISpanType.AGENT_RUN: {
364
- const agentAttrs = aiSpan.attributes;
365
- const agentId = agentAttrs?.agentId || "unknown";
366
- return `agent.${agentId}`;
367
- }
368
- case AISpanType.WORKFLOW_RUN: {
369
- const workflowAttrs = aiSpan.attributes;
370
- const workflowId = workflowAttrs?.workflowId || "unknown";
371
- return `workflow.${workflowId}`;
372
- }
373
- case AISpanType.WORKFLOW_STEP:
374
- return aiSpan.name;
375
- default:
376
- return aiSpan.name;
353
+ }
354
+ function getSpanName(span) {
355
+ const identifier = getSpanIdentifier(span);
356
+ if (identifier) {
357
+ const operation = getOperationName(span);
358
+ return `${operation} ${identifier}`;
359
+ }
360
+ return sanitizeSpanName(span.name);
361
+ }
362
+ function getAttributes(span) {
363
+ const attributes = {};
364
+ const spanType = span.type.toLowerCase();
365
+ attributes[ATTR_GEN_AI_OPERATION_NAME] = getOperationName(span);
366
+ attributes["mastra.span.type"] = span.type;
367
+ if (span.input !== void 0) {
368
+ const inputStr = typeof span.input === "string" ? span.input : JSON.stringify(span.input);
369
+ if (span.type === SpanType.MODEL_GENERATION) {
370
+ attributes[ATTR_GEN_AI_INPUT_MESSAGES] = convertMastraMessagesToGenAIMessages(inputStr);
371
+ } else if (span.type === SpanType.TOOL_CALL || span.type === SpanType.MCP_TOOL_CALL) {
372
+ attributes["gen_ai.tool.call.arguments"] = inputStr;
373
+ } else {
374
+ attributes[`mastra.${spanType}.input`] = inputStr;
377
375
  }
378
376
  }
379
- /**
380
- * Build OpenTelemetry attributes from Mastra AI span
381
- * Following OTEL Semantic Conventions for GenAI
382
- */
383
- buildAttributes(aiSpan) {
384
- const attributes = {};
385
- attributes["gen_ai.operation.name"] = this.getOperationName(aiSpan);
386
- attributes["span.kind"] = this.getSpanKindString(aiSpan);
387
- attributes["mastra.span.type"] = aiSpan.type;
388
- attributes["mastra.trace_id"] = aiSpan.traceId;
389
- attributes["mastra.span_id"] = aiSpan.id;
390
- if (aiSpan.parentSpanId) {
391
- attributes["mastra.parent_span_id"] = aiSpan.parentSpanId;
392
- }
393
- if (aiSpan.input !== void 0) {
394
- const inputStr = typeof aiSpan.input === "string" ? aiSpan.input : JSON.stringify(aiSpan.input);
395
- attributes["input"] = inputStr;
396
- if (aiSpan.type === AISpanType.LLM_GENERATION) {
397
- attributes["gen_ai.prompt"] = inputStr;
398
- } else if (aiSpan.type === AISpanType.TOOL_CALL || aiSpan.type === AISpanType.MCP_TOOL_CALL) {
399
- attributes["gen_ai.tool.input"] = inputStr;
400
- }
377
+ if (span.output !== void 0) {
378
+ const outputStr = typeof span.output === "string" ? span.output : JSON.stringify(span.output);
379
+ if (span.type === SpanType.MODEL_GENERATION) {
380
+ attributes[ATTR_GEN_AI_OUTPUT_MESSAGES] = convertMastraMessagesToGenAIMessages(outputStr);
381
+ } else if (span.type === SpanType.TOOL_CALL || span.type === SpanType.MCP_TOOL_CALL) {
382
+ attributes["gen_ai.tool.call.result"] = outputStr;
383
+ } else {
384
+ attributes[`mastra.${spanType}.output`] = outputStr;
401
385
  }
402
- if (aiSpan.output !== void 0) {
403
- const outputStr = typeof aiSpan.output === "string" ? aiSpan.output : JSON.stringify(aiSpan.output);
404
- attributes["output"] = outputStr;
405
- if (aiSpan.type === AISpanType.LLM_GENERATION) {
406
- attributes["gen_ai.completion"] = outputStr;
407
- } else if (aiSpan.type === AISpanType.TOOL_CALL || aiSpan.type === AISpanType.MCP_TOOL_CALL) {
408
- attributes["gen_ai.tool.output"] = outputStr;
409
- }
386
+ }
387
+ if (span.type === SpanType.MODEL_GENERATION && span.attributes) {
388
+ const modelAttrs = span.attributes;
389
+ if (modelAttrs.model) {
390
+ attributes[ATTR_GEN_AI_REQUEST_MODEL] = modelAttrs.model;
410
391
  }
411
- if (aiSpan.type === AISpanType.LLM_GENERATION && aiSpan.attributes) {
412
- const llmAttrs = aiSpan.attributes;
413
- if (llmAttrs.model) {
414
- attributes["gen_ai.request.model"] = llmAttrs.model;
392
+ if (modelAttrs.provider) {
393
+ attributes[ATTR_GEN_AI_PROVIDER_NAME] = normalizeProvider(modelAttrs.provider);
394
+ }
395
+ if (modelAttrs.agentId) {
396
+ attributes[ATTR_GEN_AI_AGENT_ID] = modelAttrs.agentId;
397
+ }
398
+ if (modelAttrs.agentName) {
399
+ attributes[ATTR_GEN_AI_AGENT_NAME] = modelAttrs.agentName;
400
+ }
401
+ Object.assign(attributes, formatUsageMetrics(modelAttrs.usage));
402
+ if (modelAttrs.parameters) {
403
+ if (modelAttrs.parameters.temperature !== void 0) {
404
+ attributes[ATTR_GEN_AI_REQUEST_TEMPERATURE] = modelAttrs.parameters.temperature;
415
405
  }
416
- if (llmAttrs.provider) {
417
- attributes["gen_ai.system"] = llmAttrs.provider;
406
+ if (modelAttrs.parameters.maxOutputTokens !== void 0) {
407
+ attributes[ATTR_GEN_AI_REQUEST_MAX_TOKENS] = modelAttrs.parameters.maxOutputTokens;
418
408
  }
419
- if (llmAttrs.usage) {
420
- const inputTokens = llmAttrs.usage.inputTokens ?? llmAttrs.usage.promptTokens;
421
- const outputTokens = llmAttrs.usage.outputTokens ?? llmAttrs.usage.completionTokens;
422
- if (inputTokens !== void 0) {
423
- attributes["gen_ai.usage.input_tokens"] = inputTokens;
424
- }
425
- if (outputTokens !== void 0) {
426
- attributes["gen_ai.usage.output_tokens"] = outputTokens;
427
- }
428
- if (llmAttrs.usage.totalTokens !== void 0) {
429
- attributes["gen_ai.usage.total_tokens"] = llmAttrs.usage.totalTokens;
430
- }
431
- if (llmAttrs.usage.reasoningTokens !== void 0) {
432
- attributes["gen_ai.usage.reasoning_tokens"] = llmAttrs.usage.reasoningTokens;
433
- }
434
- if (llmAttrs.usage.cachedInputTokens !== void 0) {
435
- attributes["gen_ai.usage.cached_input_tokens"] = llmAttrs.usage.cachedInputTokens;
436
- }
409
+ if (modelAttrs.parameters.topP !== void 0) {
410
+ attributes[ATTR_GEN_AI_REQUEST_TOP_P] = modelAttrs.parameters.topP;
437
411
  }
438
- if (llmAttrs.parameters) {
439
- if (llmAttrs.parameters.temperature !== void 0) {
440
- attributes["gen_ai.request.temperature"] = llmAttrs.parameters.temperature;
441
- }
442
- if (llmAttrs.parameters.maxOutputTokens !== void 0) {
443
- attributes["gen_ai.request.max_tokens"] = llmAttrs.parameters.maxOutputTokens;
444
- }
445
- if (llmAttrs.parameters.topP !== void 0) {
446
- attributes["gen_ai.request.top_p"] = llmAttrs.parameters.topP;
447
- }
448
- if (llmAttrs.parameters.topK !== void 0) {
449
- attributes["gen_ai.request.top_k"] = llmAttrs.parameters.topK;
450
- }
451
- if (llmAttrs.parameters.presencePenalty !== void 0) {
452
- attributes["gen_ai.request.presence_penalty"] = llmAttrs.parameters.presencePenalty;
453
- }
454
- if (llmAttrs.parameters.frequencyPenalty !== void 0) {
455
- attributes["gen_ai.request.frequency_penalty"] = llmAttrs.parameters.frequencyPenalty;
456
- }
457
- if (llmAttrs.parameters.stopSequences) {
458
- attributes["gen_ai.request.stop_sequences"] = JSON.stringify(llmAttrs.parameters.stopSequences);
459
- }
412
+ if (modelAttrs.parameters.topK !== void 0) {
413
+ attributes[ATTR_GEN_AI_REQUEST_TOP_K] = modelAttrs.parameters.topK;
460
414
  }
461
- if (llmAttrs.finishReason) {
462
- attributes["gen_ai.response.finish_reasons"] = llmAttrs.finishReason;
415
+ if (modelAttrs.parameters.presencePenalty !== void 0) {
416
+ attributes[ATTR_GEN_AI_REQUEST_PRESENCE_PENALTY] = modelAttrs.parameters.presencePenalty;
463
417
  }
464
- }
465
- if ((aiSpan.type === AISpanType.TOOL_CALL || aiSpan.type === AISpanType.MCP_TOOL_CALL) && aiSpan.attributes) {
466
- const toolAttrs = aiSpan.attributes;
467
- if (toolAttrs.toolId) {
468
- attributes["gen_ai.tool.name"] = toolAttrs.toolId;
418
+ if (modelAttrs.parameters.frequencyPenalty !== void 0) {
419
+ attributes[ATTR_GEN_AI_REQUEST_FREQUENCY_PENALTY] = modelAttrs.parameters.frequencyPenalty;
469
420
  }
470
- if (aiSpan.type === AISpanType.MCP_TOOL_CALL) {
471
- const mcpAttrs = toolAttrs;
472
- if (mcpAttrs.mcpServer) {
473
- attributes["mcp.server"] = mcpAttrs.mcpServer;
474
- }
475
- if (mcpAttrs.serverVersion) {
476
- attributes["mcp.server.version"] = mcpAttrs.serverVersion;
477
- }
478
- } else {
479
- if (toolAttrs.toolDescription) {
480
- attributes["gen_ai.tool.description"] = toolAttrs.toolDescription;
481
- }
421
+ if (modelAttrs.parameters.stopSequences) {
422
+ attributes[ATTR_GEN_AI_REQUEST_STOP_SEQUENCES] = JSON.stringify(modelAttrs.parameters.stopSequences);
482
423
  }
483
- if (toolAttrs.success !== void 0) {
484
- attributes["gen_ai.tool.success"] = toolAttrs.success;
424
+ if (modelAttrs.parameters.seed) {
425
+ attributes[ATTR_GEN_AI_REQUEST_SEED] = modelAttrs.parameters.seed;
485
426
  }
486
427
  }
487
- if (aiSpan.type === AISpanType.AGENT_RUN && aiSpan.attributes) {
488
- const agentAttrs = aiSpan.attributes;
489
- if (agentAttrs.agentId) {
490
- attributes["agent.id"] = agentAttrs.agentId;
491
- }
492
- if (agentAttrs.maxSteps) {
493
- attributes["agent.max_steps"] = agentAttrs.maxSteps;
494
- }
495
- if (agentAttrs.availableTools) {
496
- attributes["agent.available_tools"] = JSON.stringify(agentAttrs.availableTools);
497
- }
428
+ if (modelAttrs.finishReason) {
429
+ attributes[ATTR_GEN_AI_RESPONSE_FINISH_REASONS] = JSON.stringify([modelAttrs.finishReason]);
498
430
  }
499
- if (aiSpan.type === AISpanType.WORKFLOW_RUN && aiSpan.attributes) {
500
- const workflowAttrs = aiSpan.attributes;
501
- if (workflowAttrs.workflowId) {
502
- attributes["workflow.id"] = workflowAttrs.workflowId;
503
- }
504
- if (workflowAttrs.status) {
505
- attributes["workflow.status"] = workflowAttrs.status;
506
- }
431
+ if (modelAttrs.responseModel) {
432
+ attributes[ATTR_GEN_AI_RESPONSE_MODEL] = modelAttrs.responseModel;
433
+ }
434
+ if (modelAttrs.responseId) {
435
+ attributes[ATTR_GEN_AI_RESPONSE_ID] = modelAttrs.responseId;
436
+ }
437
+ if (modelAttrs.serverAddress) {
438
+ attributes[ATTR_SERVER_ADDRESS] = modelAttrs.serverAddress;
439
+ }
440
+ if (modelAttrs.serverPort !== void 0) {
441
+ attributes[ATTR_SERVER_PORT] = modelAttrs.serverPort;
442
+ }
443
+ }
444
+ if ((span.type === SpanType.TOOL_CALL || span.type === SpanType.MCP_TOOL_CALL) && span.attributes) {
445
+ const toolAttrs = span.attributes;
446
+ if (toolAttrs.toolId) {
447
+ attributes[ATTR_GEN_AI_TOOL_NAME] = toolAttrs.toolId;
507
448
  }
508
- if (aiSpan.errorInfo) {
509
- attributes["error"] = true;
510
- attributes["error.type"] = aiSpan.errorInfo.id || "unknown";
511
- attributes["error.message"] = aiSpan.errorInfo.message;
512
- if (aiSpan.errorInfo.domain) {
513
- attributes["error.domain"] = aiSpan.errorInfo.domain;
449
+ if (span.type === SpanType.MCP_TOOL_CALL) {
450
+ const mcpAttrs = toolAttrs;
451
+ if (mcpAttrs.mcpServer) {
452
+ attributes[ATTR_SERVER_ADDRESS] = mcpAttrs.mcpServer;
514
453
  }
515
- if (aiSpan.errorInfo.category) {
516
- attributes["error.category"] = aiSpan.errorInfo.category;
454
+ } else {
455
+ if (toolAttrs.toolDescription) {
456
+ attributes[ATTR_GEN_AI_TOOL_DESCRIPTION] = toolAttrs.toolDescription;
517
457
  }
518
458
  }
519
- if (aiSpan.metadata) {
520
- Object.entries(aiSpan.metadata).forEach(([key, value]) => {
521
- if (!attributes[key]) {
522
- if (value === null || value === void 0) {
523
- return;
524
- }
525
- if (typeof value === "object") {
526
- attributes[key] = JSON.stringify(value);
527
- } else {
528
- attributes[key] = value;
529
- }
530
- }
531
- });
459
+ }
460
+ if (span.type === SpanType.AGENT_RUN && span.attributes) {
461
+ const agentAttrs = span.attributes;
462
+ if (agentAttrs.agentId) {
463
+ attributes[ATTR_GEN_AI_AGENT_ID] = agentAttrs.agentId;
464
+ }
465
+ if (agentAttrs.agentName) {
466
+ attributes[ATTR_GEN_AI_AGENT_NAME] = agentAttrs.agentName;
467
+ }
468
+ if (agentAttrs.conversationId) {
469
+ attributes[ATTR_GEN_AI_CONVERSATION_ID] = agentAttrs.conversationId;
532
470
  }
533
- if (aiSpan.startTime) {
534
- attributes["mastra.start_time"] = aiSpan.startTime.toISOString();
471
+ if (agentAttrs.maxSteps) {
472
+ attributes[`mastra.${spanType}.max_steps`] = agentAttrs.maxSteps;
535
473
  }
536
- if (aiSpan.endTime) {
537
- attributes["mastra.end_time"] = aiSpan.endTime.toISOString();
538
- const duration = aiSpan.endTime.getTime() - aiSpan.startTime.getTime();
539
- attributes["mastra.duration_ms"] = duration;
474
+ if (agentAttrs.availableTools) {
475
+ attributes[`gen_ai.tool.definitions`] = JSON.stringify(agentAttrs.availableTools);
476
+ }
477
+ attributes[ATTR_GEN_AI_SYSTEM_INSTRUCTIONS] = agentAttrs.instructions;
478
+ }
479
+ if (span.errorInfo) {
480
+ attributes[ATTR_ERROR_TYPE] = span.errorInfo.id || "unknown";
481
+ attributes[ATTR_ERROR_MESSAGE] = span.errorInfo.message;
482
+ if (span.errorInfo.domain) {
483
+ attributes["error.domain"] = span.errorInfo.domain;
484
+ }
485
+ if (span.errorInfo.category) {
486
+ attributes["error.category"] = span.errorInfo.category;
487
+ }
488
+ }
489
+ return attributes;
490
+ }
491
+ var PROVIDER_ALIASES = {
492
+ anthropic: ["anthropic", "claude"],
493
+ "aws.bedrock": ["awsbedrock", "bedrock", "amazonbedrock"],
494
+ "azure.ai.inference": ["azureaiinference", "azureinference"],
495
+ "azure.ai.openai": ["azureaiopenai", "azureopenai", "msopenai", "microsoftopenai"],
496
+ cohere: ["cohere"],
497
+ deepseek: ["deepseek"],
498
+ "gcp.gemini": ["gcpgemini", "gemini"],
499
+ "gcp.gen_ai": ["gcpgenai", "googlegenai", "googleai"],
500
+ "gcp.vertex_ai": ["gcpvertexai", "vertexai"],
501
+ groq: ["groq"],
502
+ "ibm.watsonx.ai": ["ibmwatsonxai", "watsonx", "watsonxai"],
503
+ mistral_ai: ["mistral", "mistralai"],
504
+ openai: ["openai", "oai"],
505
+ perplexity: ["perplexity", "pplx"],
506
+ x_ai: ["xai", "x-ai", "x_ai", "x.com ai"]
507
+ };
508
+ function normalizeProviderString(input) {
509
+ return input.toLowerCase().replace(/[^a-z0-9]/g, "");
510
+ }
511
+ function normalizeProvider(providerName) {
512
+ const normalized = normalizeProviderString(providerName);
513
+ for (const [canonical, aliases] of Object.entries(PROVIDER_ALIASES)) {
514
+ for (const alias of aliases) {
515
+ if (normalized === alias) {
516
+ return canonical;
517
+ }
540
518
  }
541
- return attributes;
542
519
  }
520
+ return providerName.toLowerCase();
521
+ }
522
+
523
+ // src/span-converter.ts
524
+ var SpanConverter = class {
525
+ constructor(params) {
526
+ this.params = params;
527
+ this.format = params.format;
528
+ }
529
+ resource;
530
+ scope;
531
+ initPromise;
532
+ format;
543
533
  /**
544
- * Get the operation name based on span type for gen_ai.operation.name
534
+ * Lazily initialize resource & scope on first use.
535
+ * Subsequent calls reuse the same promise (no races).
545
536
  */
546
- getOperationName(aiSpan) {
547
- switch (aiSpan.type) {
548
- case AISpanType.LLM_GENERATION: {
549
- const attrs = aiSpan.attributes;
550
- return attrs?.resultType === "tool_selection" ? "tool_selection" : "chat";
551
- }
552
- case AISpanType.TOOL_CALL:
553
- case AISpanType.MCP_TOOL_CALL:
554
- return "tool.execute";
555
- case AISpanType.AGENT_RUN:
556
- return "agent.run";
557
- case AISpanType.WORKFLOW_RUN:
558
- return "workflow.run";
559
- default:
560
- return aiSpan.type.replace(/_/g, ".");
537
+ async initIfNeeded() {
538
+ if (this.initPromise) {
539
+ return this.initPromise;
561
540
  }
541
+ this.initPromise = (async () => {
542
+ const packageVersion = await getPackageVersion(this.params.packageName) ?? "unknown";
543
+ const serviceVersion = await getPackageVersion("@mastra/core") ?? "unknown";
544
+ let resource = resourceFromAttributes({
545
+ [ATTR_SERVICE_NAME]: this.params.serviceName || "mastra-service",
546
+ [ATTR_SERVICE_VERSION]: serviceVersion,
547
+ [ATTR_TELEMETRY_SDK_NAME]: this.params.packageName,
548
+ [ATTR_TELEMETRY_SDK_VERSION]: packageVersion,
549
+ [ATTR_TELEMETRY_SDK_LANGUAGE]: "nodejs"
550
+ });
551
+ if (this.params.config?.resourceAttributes) {
552
+ resource = resource.merge(
553
+ // Duplicate attributes from config will override defaults above
554
+ resourceFromAttributes(this.params.config.resourceAttributes)
555
+ );
556
+ }
557
+ this.resource = resource;
558
+ this.scope = {
559
+ name: this.params.packageName,
560
+ version: packageVersion
561
+ };
562
+ })();
563
+ return this.initPromise;
562
564
  }
563
565
  /**
564
- * Get span kind as string for attribute
566
+ * Convert a Mastra Span to an OpenTelemetry ReadableSpan
565
567
  */
566
- getSpanKindString(aiSpan) {
567
- const kind = this.getSpanKind(aiSpan);
568
- switch (kind) {
569
- case SpanKind.SERVER:
570
- return "server";
571
- case SpanKind.CLIENT:
572
- return "client";
573
- case SpanKind.INTERNAL:
574
- return "internal";
575
- case SpanKind.PRODUCER:
576
- return "producer";
577
- case SpanKind.CONSUMER:
578
- return "consumer";
579
- default:
580
- return "internal";
568
+ async convertSpan(span) {
569
+ await this.initIfNeeded();
570
+ if (!this.resource || !this.scope) {
571
+ throw new Error("SpanConverter not initialized correctly");
572
+ }
573
+ const name = getSpanName(span);
574
+ const kind = getSpanKind(span.type);
575
+ const attributes = getAttributes(span);
576
+ if (span.metadata) {
577
+ for (const [k, v] of Object.entries(span.metadata)) {
578
+ if (v === null || v === void 0) {
579
+ continue;
580
+ }
581
+ attributes[`mastra.metadata.${k}`] = typeof v === "object" ? JSON.stringify(v) : v;
582
+ }
583
+ }
584
+ if (span.isRootSpan && span.tags?.length) {
585
+ attributes["mastra.tags"] = JSON.stringify(span.tags);
581
586
  }
587
+ const startTime = dateToHrTime(span.startTime);
588
+ const endTime = span.endTime ? dateToHrTime(span.endTime) : startTime;
589
+ const duration = computeDuration(span.startTime, span.endTime);
590
+ const { status, events } = buildStatusAndEvents(span, startTime);
591
+ const spanContext = {
592
+ traceId: span.traceId,
593
+ spanId: span.id,
594
+ traceFlags: TraceFlags.SAMPLED,
595
+ isRemote: false
596
+ };
597
+ const parentSpanContext = span.parentSpanId ? {
598
+ traceId: span.traceId,
599
+ spanId: span.parentSpanId,
600
+ traceFlags: TraceFlags.SAMPLED,
601
+ isRemote: false
602
+ } : void 0;
603
+ const links = [];
604
+ const readable = {
605
+ name,
606
+ kind,
607
+ spanContext: () => spanContext,
608
+ parentSpanContext,
609
+ startTime,
610
+ endTime,
611
+ status,
612
+ attributes,
613
+ links,
614
+ events,
615
+ duration,
616
+ ended: !!span.endTime,
617
+ resource: this.resource,
618
+ instrumentationScope: this.scope,
619
+ droppedAttributesCount: 0,
620
+ droppedEventsCount: 0,
621
+ droppedLinksCount: 0
622
+ };
623
+ return readable;
582
624
  }
583
625
  };
626
+ async function getPackageVersion(pkgName) {
627
+ try {
628
+ const manifestUrl = new URL(await import.meta.resolve(`${pkgName}/package.json`));
629
+ const path = fileURLToPath(manifestUrl);
630
+ const pkgJson = JSON.parse(readFileSync(path, "utf8"));
631
+ return pkgJson.version;
632
+ } catch {
633
+ return void 0;
634
+ }
635
+ }
636
+ function getSpanKind(type) {
637
+ switch (type) {
638
+ case SpanType.MODEL_GENERATION:
639
+ case SpanType.MCP_TOOL_CALL:
640
+ return SpanKind.CLIENT;
641
+ default:
642
+ return SpanKind.INTERNAL;
643
+ }
644
+ }
645
+ function dateToHrTime(date) {
646
+ const ms = date.getTime();
647
+ const seconds = Math.floor(ms / 1e3);
648
+ const nanoseconds = ms % 1e3 * 1e6;
649
+ return [seconds, nanoseconds];
650
+ }
651
+ function computeDuration(start, end) {
652
+ if (!end) return [0, 0];
653
+ const diffMs = end.getTime() - start.getTime();
654
+ return [Math.floor(diffMs / 1e3), diffMs % 1e3 * 1e6];
655
+ }
656
+ function buildStatusAndEvents(span, defaultTime) {
657
+ const events = [];
658
+ if (span.errorInfo) {
659
+ const status = {
660
+ code: SpanStatusCode.ERROR,
661
+ message: span.errorInfo.message
662
+ };
663
+ events.push({
664
+ name: "exception",
665
+ attributes: {
666
+ "exception.message": span.errorInfo.message,
667
+ "exception.type": "Error",
668
+ ...span.errorInfo.details?.stack && {
669
+ "exception.stacktrace": span.errorInfo.details.stack
670
+ }
671
+ },
672
+ time: defaultTime,
673
+ droppedAttributesCount: 0
674
+ });
675
+ return { status, events };
676
+ }
677
+ if (span.endTime) {
678
+ return {
679
+ status: { code: SpanStatusCode.OK },
680
+ events
681
+ };
682
+ }
683
+ return {
684
+ status: { code: SpanStatusCode.UNSET },
685
+ events
686
+ };
687
+ }
584
688
 
585
- // src/ai-tracing.ts
586
- var OtelExporter = class {
689
+ // src/tracing.ts
690
+ var OtelExporter = class extends BaseExporter {
587
691
  config;
588
- tracingConfig;
692
+ observabilityConfig;
589
693
  spanConverter;
590
694
  processor;
591
695
  exporter;
592
696
  isSetup = false;
593
- isDisabled = false;
594
- logger;
595
697
  name = "opentelemetry";
596
698
  constructor(config) {
699
+ super(config);
597
700
  this.config = config;
598
- this.spanConverter = new SpanConverter();
599
- this.logger = new ConsoleLogger({ level: config.logLevel ?? "warn" });
600
701
  if (config.logLevel === "debug") {
601
702
  diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.DEBUG);
602
703
  }
@@ -604,11 +705,11 @@ var OtelExporter = class {
604
705
  /**
605
706
  * Initialize with tracing configuration
606
707
  */
607
- init(config) {
608
- this.tracingConfig = config;
708
+ init(options) {
709
+ this.observabilityConfig = options.config;
609
710
  }
610
711
  async setupExporter() {
611
- if (this.isSetup) return;
712
+ if (this.isSetup || this.exporter) return;
612
713
  if (!this.config.provider) {
613
714
  this.logger.error(
614
715
  '[OtelExporter] Provider configuration is required. Use the "custom" provider for generic endpoints.'
@@ -623,6 +724,10 @@ var OtelExporter = class {
623
724
  this.isSetup = true;
624
725
  return;
625
726
  }
727
+ if (this.config.exporter) {
728
+ this.exporter = this.config.exporter;
729
+ return;
730
+ }
626
731
  const endpoint = resolved.endpoint;
627
732
  const headers = resolved.headers;
628
733
  const protocol = resolved.protocol;
@@ -676,15 +781,15 @@ var OtelExporter = class {
676
781
  this.isSetup = true;
677
782
  return;
678
783
  }
679
- const resource = resourceFromAttributes({
680
- [ATTR_SERVICE_NAME]: this.tracingConfig?.serviceName || "mastra-service",
681
- [ATTR_SERVICE_VERSION]: "1.0.0",
682
- // Add telemetry SDK information
683
- [ATTR_TELEMETRY_SDK_NAME]: "@mastra/otel-exporter",
684
- [ATTR_TELEMETRY_SDK_VERSION]: "1.0.0",
685
- [ATTR_TELEMETRY_SDK_LANGUAGE]: "nodejs"
784
+ }
785
+ async setupProcessor() {
786
+ if (this.processor || this.isSetup) return;
787
+ this.spanConverter = new SpanConverter({
788
+ packageName: "@mastra/otel-exporter",
789
+ serviceName: this.observabilityConfig?.serviceName,
790
+ config: this.config,
791
+ format: "GenAI_v1_38_0"
686
792
  });
687
- this.spanConverter = new SpanConverter(resource);
688
793
  this.processor = new BatchSpanProcessor(this.exporter, {
689
794
  maxExportBatchSize: this.config.batchSize || 512,
690
795
  // Default batch size
@@ -698,13 +803,15 @@ var OtelExporter = class {
698
803
  this.logger.debug(
699
804
  `[OtelExporter] Using BatchSpanProcessor (batch size: ${this.config.batchSize || 512}, delay: 5s)`
700
805
  );
806
+ }
807
+ async setup() {
808
+ if (this.isSetup) return;
809
+ await this.setupExporter();
810
+ await this.setupProcessor();
701
811
  this.isSetup = true;
702
812
  }
703
- async exportEvent(event) {
704
- if (this.isDisabled) {
705
- return;
706
- }
707
- if (event.type !== AITracingEventType.SPAN_ENDED) {
813
+ async _exportTracingEvent(event) {
814
+ if (event.type !== TracingEventType.SPAN_ENDED) {
708
815
  return;
709
816
  }
710
817
  const span = event.exportedSpan;
@@ -712,15 +819,15 @@ var OtelExporter = class {
712
819
  }
713
820
  async exportSpan(span) {
714
821
  if (!this.isSetup) {
715
- await this.setupExporter();
822
+ await this.setup();
716
823
  }
717
824
  if (this.isDisabled || !this.processor) {
718
825
  return;
719
826
  }
720
827
  try {
721
- const readableSpan = this.spanConverter.convertSpan(span);
828
+ const otelSpan = await this.spanConverter.convertSpan(span);
722
829
  await new Promise((resolve) => {
723
- this.processor.onEnd(readableSpan);
830
+ this.processor.onEnd(otelSpan);
724
831
  resolve();
725
832
  });
726
833
  this.logger.debug(
@@ -737,6 +844,6 @@ var OtelExporter = class {
737
844
  }
738
845
  };
739
846
 
740
- export { OtelExporter };
847
+ export { OtelExporter, SpanConverter, getSpanKind };
741
848
  //# sourceMappingURL=index.js.map
742
849
  //# sourceMappingURL=index.js.map