@mastra/otel-exporter 0.0.0-dynamic-model-router-20251010230835 → 0.0.0-extract-tool-ui-inp-playground-ui-20251023135343

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,89 @@
1
1
  # @mastra/otel-exporter
2
2
 
3
- ## 0.0.0-dynamic-model-router-20251010230835
3
+ ## 0.0.0-extract-tool-ui-inp-playground-ui-20251023135343
4
+
5
+ ### Minor Changes
6
+
7
+ - Rename LLM span types and attributes to use Model prefix ([#9105](https://github.com/mastra-ai/mastra/pull/9105))
8
+
9
+ BREAKING CHANGE: This release renames AI tracing span types and attribute interfaces to use the "Model" prefix instead of "LLM":
10
+ - `AISpanType.LLM_GENERATION` → `AISpanType.MODEL_GENERATION`
11
+ - `AISpanType.LLM_STEP` → `AISpanType.MODEL_STEP`
12
+ - `AISpanType.LLM_CHUNK` → `AISpanType.MODEL_CHUNK`
13
+ - `LLMGenerationAttributes` → `ModelGenerationAttributes`
14
+ - `LLMStepAttributes` → `ModelStepAttributes`
15
+ - `LLMChunkAttributes` → `ModelChunkAttributes`
16
+ - `InternalSpans.LLM` → `InternalSpans.MODEL`
17
+
18
+ This change better reflects that these span types apply to all AI models, not just Large Language Models.
19
+
20
+ Migration guide:
21
+ - Update all imports: `import { ModelGenerationAttributes } from '@mastra/core/ai-tracing'`
22
+ - Update span type references: `AISpanType.MODEL_GENERATION`
23
+ - Update InternalSpans usage: `InternalSpans.MODEL`
24
+
25
+ ### Patch Changes
26
+
27
+ - Updated dependencies [[`f743dbb`](https://github.com/mastra-ai/mastra/commit/f743dbb8b40d1627b5c10c0e6fc154f4ebb6e394), [`6c049d9`](https://github.com/mastra-ai/mastra/commit/6c049d94063fdcbd5b81c4912a2bf82a92c9cc0b), [`5df9cce`](https://github.com/mastra-ai/mastra/commit/5df9cce1a753438413f64c11eeef8f845745c2a8), [`2c4438b`](https://github.com/mastra-ai/mastra/commit/2c4438b87817ab7eed818c7990fef010475af1a3)]:
28
+ - @mastra/core@0.0.0-extract-tool-ui-inp-playground-ui-20251023135343
29
+
30
+ ## 0.2.0
31
+
32
+ ### Minor Changes
33
+
34
+ - feat(otel-exporter): Add customizable 'exporter' constructor parameter ([#8827](https://github.com/mastra-ai/mastra/pull/8827))
35
+
36
+ You can now pass in an instantiated `TraceExporter` inheriting class into `OtelExporter`.
37
+ This will circumvent the default package detection, no longer instantiating a `TraceExporter`
38
+ automatically if one is instead passed in to the `OtelExporter` constructor.
39
+
40
+ feat(arize): Initial release of @mastra/arize observability package
41
+
42
+ The `@mastra/arize` package exports an `ArizeExporter` class that can be used to easily send AI
43
+ traces from Mastra to Arize AX, Arize Phoenix, or any OpenInference compatible collector.
44
+ It sends traces uses `BatchSpanProcessor` over OTLP connections.
45
+ It leverages the `@mastra/otel-exporter` package, reusing `OtelExporter` for transmission and
46
+ span management.
47
+ See the README in `observability/arize/README.md` for more details
48
+
49
+ ### Patch Changes
50
+
51
+ - fix(observability): Add ParentSpanContext to MastraSpan's with parentage ([#9085](https://github.com/mastra-ai/mastra/pull/9085))
52
+
53
+ - Update peerdeps to 0.23.0-0 ([#9043](https://github.com/mastra-ai/mastra/pull/9043))
54
+
55
+ - Updated dependencies [[`c67ca32`](https://github.com/mastra-ai/mastra/commit/c67ca32e3c2cf69bfc146580770c720220ca44ac), [`efb5ed9`](https://github.com/mastra-ai/mastra/commit/efb5ed946ae7f410bc68c9430beb4b010afd25ec), [`dbc9e12`](https://github.com/mastra-ai/mastra/commit/dbc9e1216ba575ba59ead4afb727a01215f7de4f), [`99e41b9`](https://github.com/mastra-ai/mastra/commit/99e41b94957cdd25137d3ac12e94e8b21aa01b68), [`c28833c`](https://github.com/mastra-ai/mastra/commit/c28833c5b6d8e10eeffd7f7d39129d53b8bca240), [`8ea07b4`](https://github.com/mastra-ai/mastra/commit/8ea07b4bdc73e4218437dbb6dcb0f4b23e745a44), [`ba201b8`](https://github.com/mastra-ai/mastra/commit/ba201b8f8feac4c72350f2dbd52c13c7297ba7b0), [`f053e89`](https://github.com/mastra-ai/mastra/commit/f053e89160dbd0bd3333fc3492f68231b5c7c349), [`4fc4136`](https://github.com/mastra-ai/mastra/commit/4fc413652866a8d2240694fddb2562e9edbb70df), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`d10baf5`](https://github.com/mastra-ai/mastra/commit/d10baf5a3c924f2a6654e23a3e318ed03f189b76), [`038c55a`](https://github.com/mastra-ai/mastra/commit/038c55a7090fc1b1513a966386d3072617f836ac), [`182f045`](https://github.com/mastra-ai/mastra/commit/182f0458f25bd70aa774e64fd923c8a483eddbf1), [`9a1a485`](https://github.com/mastra-ai/mastra/commit/9a1a4859b855e37239f652bf14b1ecd1029b8c4e), [`9257233`](https://github.com/mastra-ai/mastra/commit/9257233c4ffce09b2bedc2a9adbd70d7a83fa8e2), [`7620d2b`](https://github.com/mastra-ai/mastra/commit/7620d2bddeb4fae4c3c0a0b4e672969795fca11a), [`b2365f0`](https://github.com/mastra-ai/mastra/commit/b2365f038dd4c5f06400428b224af963f399ad50), [`0f1a4c9`](https://github.com/mastra-ai/mastra/commit/0f1a4c984fb4b104b2f0b63ba18c9fa77f567700), [`9029ba3`](https://github.com/mastra-ai/mastra/commit/9029ba34459c8859fed4c6b73efd8e2d0021e7ba), [`426cc56`](https://github.com/mastra-ai/mastra/commit/426cc561c85ae76a112ded2385532a91f9f9f074), [`00931fb`](https://github.com/mastra-ai/mastra/commit/00931fb1a21aa42c4fbc20c2c40dd62466b8fc8f), [`e473bfe`](https://github.com/mastra-ai/mastra/commit/e473bfe416c0b8e876973c2b6a6f13c394b7a93f), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`2db6160`](https://github.com/mastra-ai/mastra/commit/2db6160e2022ff8827c15d30157e684683b934b5), [`8aeea37`](https://github.com/mastra-ai/mastra/commit/8aeea37efdde347c635a67fed56794943b7f74ec), [`02fe153`](https://github.com/mastra-ai/mastra/commit/02fe15351d6021d214da48ec982a0e9e4150bcee), [`648e2ca`](https://github.com/mastra-ai/mastra/commit/648e2ca42da54838c6ccbdaadc6fadd808fa6b86), [`74567b3`](https://github.com/mastra-ai/mastra/commit/74567b3d237ae3915cd0bca3cf55fa0a64e4e4a4), [`b65c5e0`](https://github.com/mastra-ai/mastra/commit/b65c5e0fe6f3c390a9a8bbcf69304d972c3a4afb), [`15a1733`](https://github.com/mastra-ai/mastra/commit/15a1733074cee8bd37370e1af34cd818e89fa7ac), [`fc2a774`](https://github.com/mastra-ai/mastra/commit/fc2a77468981aaddc3e77f83f0c4ad4a4af140da), [`4e08933`](https://github.com/mastra-ai/mastra/commit/4e08933625464dfde178347af5b6278fcf34188e)]:
56
+ - @mastra/core@0.22.0
57
+
58
+ ## 0.2.0-alpha.0
59
+
60
+ ### Minor Changes
61
+
62
+ - feat(otel-exporter): Add customizable 'exporter' constructor parameter ([#8827](https://github.com/mastra-ai/mastra/pull/8827))
63
+
64
+ You can now pass in an instantiated `TraceExporter` inheriting class into `OtelExporter`.
65
+ This will circumvent the default package detection, no longer instantiating a `TraceExporter`
66
+ automatically if one is instead passed in to the `OtelExporter` constructor.
67
+
68
+ feat(arize): Initial release of @mastra/arize observability package
69
+
70
+ The `@mastra/arize` package exports an `ArizeExporter` class that can be used to easily send AI
71
+ traces from Mastra to Arize AX, Arize Phoenix, or any OpenInference compatible collector.
72
+ It sends traces uses `BatchSpanProcessor` over OTLP connections.
73
+ It leverages the `@mastra/otel-exporter` package, reusing `OtelExporter` for transmission and
74
+ span management.
75
+ See the README in `observability/arize/README.md` for more details
76
+
77
+ ### Patch Changes
78
+
79
+ - fix(observability): Add ParentSpanContext to MastraSpan's with parentage ([#9085](https://github.com/mastra-ai/mastra/pull/9085))
80
+
81
+ - Update peerdeps to 0.23.0-0 ([#9043](https://github.com/mastra-ai/mastra/pull/9043))
82
+
83
+ - Updated dependencies [[`efb5ed9`](https://github.com/mastra-ai/mastra/commit/efb5ed946ae7f410bc68c9430beb4b010afd25ec), [`8ea07b4`](https://github.com/mastra-ai/mastra/commit/8ea07b4bdc73e4218437dbb6dcb0f4b23e745a44), [`ba201b8`](https://github.com/mastra-ai/mastra/commit/ba201b8f8feac4c72350f2dbd52c13c7297ba7b0), [`4fc4136`](https://github.com/mastra-ai/mastra/commit/4fc413652866a8d2240694fddb2562e9edbb70df), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`d10baf5`](https://github.com/mastra-ai/mastra/commit/d10baf5a3c924f2a6654e23a3e318ed03f189b76), [`038c55a`](https://github.com/mastra-ai/mastra/commit/038c55a7090fc1b1513a966386d3072617f836ac), [`182f045`](https://github.com/mastra-ai/mastra/commit/182f0458f25bd70aa774e64fd923c8a483eddbf1), [`7620d2b`](https://github.com/mastra-ai/mastra/commit/7620d2bddeb4fae4c3c0a0b4e672969795fca11a), [`b2365f0`](https://github.com/mastra-ai/mastra/commit/b2365f038dd4c5f06400428b224af963f399ad50), [`9029ba3`](https://github.com/mastra-ai/mastra/commit/9029ba34459c8859fed4c6b73efd8e2d0021e7ba), [`426cc56`](https://github.com/mastra-ai/mastra/commit/426cc561c85ae76a112ded2385532a91f9f9f074), [`00931fb`](https://github.com/mastra-ai/mastra/commit/00931fb1a21aa42c4fbc20c2c40dd62466b8fc8f), [`e473bfe`](https://github.com/mastra-ai/mastra/commit/e473bfe416c0b8e876973c2b6a6f13c394b7a93f), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`648e2ca`](https://github.com/mastra-ai/mastra/commit/648e2ca42da54838c6ccbdaadc6fadd808fa6b86), [`b65c5e0`](https://github.com/mastra-ai/mastra/commit/b65c5e0fe6f3c390a9a8bbcf69304d972c3a4afb)]:
84
+ - @mastra/core@0.22.0-alpha.1
85
+
86
+ ## 0.1.0
4
87
 
5
88
  ### Minor Changes
6
89
 
@@ -12,14 +95,31 @@
12
95
 
13
96
  - Added tracing of input & output processors (this includes using structuredOutput) ([#8623](https://github.com/mastra-ai/mastra/pull/8623))
14
97
 
98
+ - Add `resourceAttributes` to `OtelExporterConfig` so that attributes like `deployment.environment` can be set in the new OpenTelemetry exporter. ([#8700](https://github.com/mastra-ai/mastra/pull/8700))
99
+
15
100
  - Update peer dependencies to match core package version bump (0.21.0) ([#8557](https://github.com/mastra-ai/mastra/pull/8557))
16
101
 
17
102
  - Update peer dependencies to match core package version bump (0.21.0) ([#8626](https://github.com/mastra-ai/mastra/pull/8626))
18
103
 
19
104
  - Update peer dependencies to match core package version bump (0.21.0) ([#8686](https://github.com/mastra-ai/mastra/pull/8686))
20
105
 
21
- - Updated dependencies [[`1ed9670`](https://github.com/mastra-ai/mastra/commit/1ed9670d3ca50cb60dc2e517738c5eef3968ed27), [`b5a66b7`](https://github.com/mastra-ai/mastra/commit/b5a66b748a14fc8b3f63b04642ddb9621fbcc9e0), [`37a2314`](https://github.com/mastra-ai/mastra/commit/37a23148e0e5a3b40d4f9f098b194671a8a49faf), [`7b1ef57`](https://github.com/mastra-ai/mastra/commit/7b1ef57fc071c2aa2a2e32905b18cd88719c5a39), [`ee68e82`](https://github.com/mastra-ai/mastra/commit/ee68e8289ea4408d29849e899bc6e78b3bd4e843), [`228228b`](https://github.com/mastra-ai/mastra/commit/228228b0b1de9291cb8887587f5cea1a8757ebad), [`ea33930`](https://github.com/mastra-ai/mastra/commit/ea339301e82d6318257720d811b043014ee44064), [`b5a66b7`](https://github.com/mastra-ai/mastra/commit/b5a66b748a14fc8b3f63b04642ddb9621fbcc9e0), [`135d6f2`](https://github.com/mastra-ai/mastra/commit/135d6f22a326ed1dffff858700669dff09d2c9eb), [`59d036d`](https://github.com/mastra-ai/mastra/commit/59d036d4c2706b430b0e3f1f1e0ee853ce16ca04)]:
22
- - @mastra/core@0.0.0-dynamic-model-router-20251010230835
106
+ - Updated dependencies [[`1ed9670`](https://github.com/mastra-ai/mastra/commit/1ed9670d3ca50cb60dc2e517738c5eef3968ed27), [`b5a66b7`](https://github.com/mastra-ai/mastra/commit/b5a66b748a14fc8b3f63b04642ddb9621fbcc9e0), [`f59fc1e`](https://github.com/mastra-ai/mastra/commit/f59fc1e406b8912e692f6bff6cfd4754cc8d165c), [`158381d`](https://github.com/mastra-ai/mastra/commit/158381d39335be934b81ef8a1947bccace492c25), [`a1799bc`](https://github.com/mastra-ai/mastra/commit/a1799bcc1b5a1cdc188f2ac0165f17a1c4ac6f7b), [`6ff6094`](https://github.com/mastra-ai/mastra/commit/6ff60946f4ecfebdeef6e21d2b230c2204f2c9b8), [`fb703b9`](https://github.com/mastra-ai/mastra/commit/fb703b9634eeaff1a6eb2b5531ce0f9e8fb04727), [`37a2314`](https://github.com/mastra-ai/mastra/commit/37a23148e0e5a3b40d4f9f098b194671a8a49faf), [`7b1ef57`](https://github.com/mastra-ai/mastra/commit/7b1ef57fc071c2aa2a2e32905b18cd88719c5a39), [`05a9dee`](https://github.com/mastra-ai/mastra/commit/05a9dee3d355694d28847bfffb6289657fcf7dfa), [`e3c1077`](https://github.com/mastra-ai/mastra/commit/e3c107763aedd1643d3def5df450c235da9ff76c), [`1908ca0`](https://github.com/mastra-ai/mastra/commit/1908ca0521f90e43779cc29ab590173ca560443c), [`1bccdb3`](https://github.com/mastra-ai/mastra/commit/1bccdb33eb90cbeba2dc5ece1c2561fb774b26b6), [`5ef944a`](https://github.com/mastra-ai/mastra/commit/5ef944a3721d93105675cac2b2311432ff8cc393), [`d6b186f`](https://github.com/mastra-ai/mastra/commit/d6b186fb08f1caf1b86f73d3a5ee88fb999ca3be), [`ee68e82`](https://github.com/mastra-ai/mastra/commit/ee68e8289ea4408d29849e899bc6e78b3bd4e843), [`228228b`](https://github.com/mastra-ai/mastra/commit/228228b0b1de9291cb8887587f5cea1a8757ebad), [`ea33930`](https://github.com/mastra-ai/mastra/commit/ea339301e82d6318257720d811b043014ee44064), [`65493b3`](https://github.com/mastra-ai/mastra/commit/65493b31c36f6fdb78f9679f7e1ecf0c250aa5ee), [`a998b8f`](https://github.com/mastra-ai/mastra/commit/a998b8f858091c2ec47683e60766cf12d03001e4), [`b5a66b7`](https://github.com/mastra-ai/mastra/commit/b5a66b748a14fc8b3f63b04642ddb9621fbcc9e0), [`8a37bdd`](https://github.com/mastra-ai/mastra/commit/8a37bddb6d8614a32c5b70303d583d80c620ea61), [`135d6f2`](https://github.com/mastra-ai/mastra/commit/135d6f22a326ed1dffff858700669dff09d2c9eb)]:
107
+ - @mastra/core@0.21.0
108
+
109
+ ## 0.1.0-alpha.1
110
+
111
+ ### Minor Changes
112
+
113
+ - Update peer dependencies to match core package version bump (0.21.0) ([#8623](https://github.com/mastra-ai/mastra/pull/8623))
114
+
115
+ ### Patch Changes
116
+
117
+ - Added tracing of input & output processors (this includes using structuredOutput) ([#8623](https://github.com/mastra-ai/mastra/pull/8623))
118
+
119
+ - Add `resourceAttributes` to `OtelExporterConfig` so that attributes like `deployment.environment` can be set in the new OpenTelemetry exporter. ([#8700](https://github.com/mastra-ai/mastra/pull/8700))
120
+
121
+ - Updated dependencies [[`1ed9670`](https://github.com/mastra-ai/mastra/commit/1ed9670d3ca50cb60dc2e517738c5eef3968ed27), [`158381d`](https://github.com/mastra-ai/mastra/commit/158381d39335be934b81ef8a1947bccace492c25), [`fb703b9`](https://github.com/mastra-ai/mastra/commit/fb703b9634eeaff1a6eb2b5531ce0f9e8fb04727), [`37a2314`](https://github.com/mastra-ai/mastra/commit/37a23148e0e5a3b40d4f9f098b194671a8a49faf), [`05a9dee`](https://github.com/mastra-ai/mastra/commit/05a9dee3d355694d28847bfffb6289657fcf7dfa), [`e3c1077`](https://github.com/mastra-ai/mastra/commit/e3c107763aedd1643d3def5df450c235da9ff76c), [`1bccdb3`](https://github.com/mastra-ai/mastra/commit/1bccdb33eb90cbeba2dc5ece1c2561fb774b26b6), [`5ef944a`](https://github.com/mastra-ai/mastra/commit/5ef944a3721d93105675cac2b2311432ff8cc393), [`d6b186f`](https://github.com/mastra-ai/mastra/commit/d6b186fb08f1caf1b86f73d3a5ee88fb999ca3be), [`65493b3`](https://github.com/mastra-ai/mastra/commit/65493b31c36f6fdb78f9679f7e1ecf0c250aa5ee), [`a998b8f`](https://github.com/mastra-ai/mastra/commit/a998b8f858091c2ec47683e60766cf12d03001e4), [`8a37bdd`](https://github.com/mastra-ai/mastra/commit/8a37bddb6d8614a32c5b70303d583d80c620ea61)]:
122
+ - @mastra/core@0.21.0-alpha.1
23
123
 
24
124
  ## 0.0.4-alpha.0
25
125
 
package/README.md CHANGED
@@ -382,8 +382,8 @@ The exporter maps Mastra's AI tracing data to OTEL-compliant attributes:
382
382
  - `gen_ai.request.top_p` - Top-p sampling parameter
383
383
  - `gen_ai.request.top_k` - Top-k sampling parameter
384
384
  - `gen_ai.response.finish_reasons` - Reason for completion
385
- - `gen_ai.prompt` - Input prompt (for LLM spans)
386
- - `gen_ai.completion` - Model output (for LLM spans)
385
+ - `gen_ai.prompt` - Input prompt (for Model spans)
386
+ - `gen_ai.completion` - Model output (for Model spans)
387
387
 
388
388
  #### Tool Attributes
389
389
 
@@ -19,6 +19,8 @@ export declare class OtelExporter implements AITracingExporter {
19
19
  */
20
20
  init(config: TracingConfig): void;
21
21
  private setupExporter;
22
+ private setupProcessor;
23
+ private setup;
22
24
  exportEvent(event: AITracingEvent): Promise<void>;
23
25
  private exportSpan;
24
26
  shutdown(): Promise<void>;
@@ -1 +1 @@
1
- {"version":3,"file":"ai-tracing.d.ts","sourceRoot":"","sources":["../src/ai-tracing.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,KAAK,EAAE,iBAAiB,EAAE,cAAc,EAAqB,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAiBnH,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAC;AAErD,qBAAa,YAAa,YAAW,iBAAiB;IACpD,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,SAAS,CAAC,CAAqB;IACvC,OAAO,CAAC,QAAQ,CAAC,CAAe;IAChC,OAAO,CAAC,OAAO,CAAkB;IACjC,OAAO,CAAC,UAAU,CAAkB;IACpC,OAAO,CAAC,MAAM,CAAgB;IAE9B,IAAI,SAAmB;gBAEX,MAAM,EAAE,kBAAkB;IAWtC;;OAEG;IACH,IAAI,CAAC,MAAM,EAAE,aAAa,GAAG,IAAI;YAInB,aAAa;IAiHrB,WAAW,CAAC,KAAK,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;YAgBzC,UAAU;IA8BlB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAMhC"}
1
+ {"version":3,"file":"ai-tracing.d.ts","sourceRoot":"","sources":["../src/ai-tracing.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,KAAK,EAAE,iBAAiB,EAAE,cAAc,EAAqB,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAiBnH,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAC;AAErD,qBAAa,YAAa,YAAW,iBAAiB;IACpD,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,SAAS,CAAC,CAAqB;IACvC,OAAO,CAAC,QAAQ,CAAC,CAAe;IAChC,OAAO,CAAC,OAAO,CAAkB;IACjC,OAAO,CAAC,UAAU,CAAkB;IACpC,OAAO,CAAC,MAAM,CAAgB;IAE9B,IAAI,SAAmB;gBAEX,MAAM,EAAE,kBAAkB;IAWtC;;OAEG;IACH,IAAI,CAAC,MAAM,EAAE,aAAa,GAAG,IAAI;YAInB,aAAa;YA6Fb,cAAc;YAqCd,KAAK;IAOb,WAAW,CAAC,KAAK,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;YAgBzC,UAAU;IA8BlB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAMhC"}
package/dist/index.cjs CHANGED
@@ -209,6 +209,7 @@ var MastraReadableSpan = class {
209
209
  name;
210
210
  kind;
211
211
  spanContext;
212
+ parentSpanContext;
212
213
  parentSpanId;
213
214
  startTime;
214
215
  endTime;
@@ -276,6 +277,14 @@ var MastraReadableSpan = class {
276
277
  traceFlags: api.TraceFlags.SAMPLED,
277
278
  isRemote: false
278
279
  });
280
+ if (parentSpanId) {
281
+ this.parentSpanContext = {
282
+ traceId: aiSpan.traceId,
283
+ spanId: parentSpanId,
284
+ traceFlags: api.TraceFlags.SAMPLED,
285
+ isRemote: false
286
+ };
287
+ }
279
288
  this.resource = resource || {};
280
289
  this.instrumentationLibrary = instrumentationLibrary || {
281
290
  name: "@mastra/otel",
@@ -296,9 +305,9 @@ var MastraReadableSpan = class {
296
305
 
297
306
  // src/span-converter.ts
298
307
  var SPAN_KIND_MAPPING = {
299
- // LLM operations are CLIENT spans (calling external AI services)
300
- [aiTracing.AISpanType.LLM_GENERATION]: api.SpanKind.CLIENT,
301
- [aiTracing.AISpanType.LLM_CHUNK]: api.SpanKind.CLIENT,
308
+ // Model operations are CLIENT spans (calling external AI services)
309
+ [aiTracing.AISpanType.MODEL_GENERATION]: api.SpanKind.CLIENT,
310
+ [aiTracing.AISpanType.MODEL_CHUNK]: api.SpanKind.CLIENT,
302
311
  // MCP tool calls are CLIENT (external service calls)
303
312
  [aiTracing.AISpanType.MCP_TOOL_CALL]: api.SpanKind.CLIENT,
304
313
  // Root spans for agent/workflow are SERVER (entry points)
@@ -350,7 +359,7 @@ var SpanConverter = class {
350
359
  */
351
360
  buildSpanName(aiSpan) {
352
361
  switch (aiSpan.type) {
353
- case aiTracing.AISpanType.LLM_GENERATION: {
362
+ case aiTracing.AISpanType.MODEL_GENERATION: {
354
363
  const attrs = aiSpan.attributes;
355
364
  const operation = attrs?.resultType === "tool_selection" ? "tool_selection" : "chat";
356
365
  const model = attrs?.model || "unknown";
@@ -395,7 +404,7 @@ var SpanConverter = class {
395
404
  if (aiSpan.input !== void 0) {
396
405
  const inputStr = typeof aiSpan.input === "string" ? aiSpan.input : JSON.stringify(aiSpan.input);
397
406
  attributes["input"] = inputStr;
398
- if (aiSpan.type === aiTracing.AISpanType.LLM_GENERATION) {
407
+ if (aiSpan.type === aiTracing.AISpanType.MODEL_GENERATION) {
399
408
  attributes["gen_ai.prompt"] = inputStr;
400
409
  } else if (aiSpan.type === aiTracing.AISpanType.TOOL_CALL || aiSpan.type === aiTracing.AISpanType.MCP_TOOL_CALL) {
401
410
  attributes["gen_ai.tool.input"] = inputStr;
@@ -404,64 +413,64 @@ var SpanConverter = class {
404
413
  if (aiSpan.output !== void 0) {
405
414
  const outputStr = typeof aiSpan.output === "string" ? aiSpan.output : JSON.stringify(aiSpan.output);
406
415
  attributes["output"] = outputStr;
407
- if (aiSpan.type === aiTracing.AISpanType.LLM_GENERATION) {
416
+ if (aiSpan.type === aiTracing.AISpanType.MODEL_GENERATION) {
408
417
  attributes["gen_ai.completion"] = outputStr;
409
418
  } else if (aiSpan.type === aiTracing.AISpanType.TOOL_CALL || aiSpan.type === aiTracing.AISpanType.MCP_TOOL_CALL) {
410
419
  attributes["gen_ai.tool.output"] = outputStr;
411
420
  }
412
421
  }
413
- if (aiSpan.type === aiTracing.AISpanType.LLM_GENERATION && aiSpan.attributes) {
414
- const llmAttrs = aiSpan.attributes;
415
- if (llmAttrs.model) {
416
- attributes["gen_ai.request.model"] = llmAttrs.model;
422
+ if (aiSpan.type === aiTracing.AISpanType.MODEL_GENERATION && aiSpan.attributes) {
423
+ const modelAttrs = aiSpan.attributes;
424
+ if (modelAttrs.model) {
425
+ attributes["gen_ai.request.model"] = modelAttrs.model;
417
426
  }
418
- if (llmAttrs.provider) {
419
- attributes["gen_ai.system"] = llmAttrs.provider;
427
+ if (modelAttrs.provider) {
428
+ attributes["gen_ai.system"] = modelAttrs.provider;
420
429
  }
421
- if (llmAttrs.usage) {
422
- const inputTokens = llmAttrs.usage.inputTokens ?? llmAttrs.usage.promptTokens;
423
- const outputTokens = llmAttrs.usage.outputTokens ?? llmAttrs.usage.completionTokens;
430
+ if (modelAttrs.usage) {
431
+ const inputTokens = modelAttrs.usage.inputTokens ?? modelAttrs.usage.promptTokens;
432
+ const outputTokens = modelAttrs.usage.outputTokens ?? modelAttrs.usage.completionTokens;
424
433
  if (inputTokens !== void 0) {
425
434
  attributes["gen_ai.usage.input_tokens"] = inputTokens;
426
435
  }
427
436
  if (outputTokens !== void 0) {
428
437
  attributes["gen_ai.usage.output_tokens"] = outputTokens;
429
438
  }
430
- if (llmAttrs.usage.totalTokens !== void 0) {
431
- attributes["gen_ai.usage.total_tokens"] = llmAttrs.usage.totalTokens;
439
+ if (modelAttrs.usage.totalTokens !== void 0) {
440
+ attributes["gen_ai.usage.total_tokens"] = modelAttrs.usage.totalTokens;
432
441
  }
433
- if (llmAttrs.usage.reasoningTokens !== void 0) {
434
- attributes["gen_ai.usage.reasoning_tokens"] = llmAttrs.usage.reasoningTokens;
442
+ if (modelAttrs.usage.reasoningTokens !== void 0) {
443
+ attributes["gen_ai.usage.reasoning_tokens"] = modelAttrs.usage.reasoningTokens;
435
444
  }
436
- if (llmAttrs.usage.cachedInputTokens !== void 0) {
437
- attributes["gen_ai.usage.cached_input_tokens"] = llmAttrs.usage.cachedInputTokens;
445
+ if (modelAttrs.usage.cachedInputTokens !== void 0) {
446
+ attributes["gen_ai.usage.cached_input_tokens"] = modelAttrs.usage.cachedInputTokens;
438
447
  }
439
448
  }
440
- if (llmAttrs.parameters) {
441
- if (llmAttrs.parameters.temperature !== void 0) {
442
- attributes["gen_ai.request.temperature"] = llmAttrs.parameters.temperature;
449
+ if (modelAttrs.parameters) {
450
+ if (modelAttrs.parameters.temperature !== void 0) {
451
+ attributes["gen_ai.request.temperature"] = modelAttrs.parameters.temperature;
443
452
  }
444
- if (llmAttrs.parameters.maxOutputTokens !== void 0) {
445
- attributes["gen_ai.request.max_tokens"] = llmAttrs.parameters.maxOutputTokens;
453
+ if (modelAttrs.parameters.maxOutputTokens !== void 0) {
454
+ attributes["gen_ai.request.max_tokens"] = modelAttrs.parameters.maxOutputTokens;
446
455
  }
447
- if (llmAttrs.parameters.topP !== void 0) {
448
- attributes["gen_ai.request.top_p"] = llmAttrs.parameters.topP;
456
+ if (modelAttrs.parameters.topP !== void 0) {
457
+ attributes["gen_ai.request.top_p"] = modelAttrs.parameters.topP;
449
458
  }
450
- if (llmAttrs.parameters.topK !== void 0) {
451
- attributes["gen_ai.request.top_k"] = llmAttrs.parameters.topK;
459
+ if (modelAttrs.parameters.topK !== void 0) {
460
+ attributes["gen_ai.request.top_k"] = modelAttrs.parameters.topK;
452
461
  }
453
- if (llmAttrs.parameters.presencePenalty !== void 0) {
454
- attributes["gen_ai.request.presence_penalty"] = llmAttrs.parameters.presencePenalty;
462
+ if (modelAttrs.parameters.presencePenalty !== void 0) {
463
+ attributes["gen_ai.request.presence_penalty"] = modelAttrs.parameters.presencePenalty;
455
464
  }
456
- if (llmAttrs.parameters.frequencyPenalty !== void 0) {
457
- attributes["gen_ai.request.frequency_penalty"] = llmAttrs.parameters.frequencyPenalty;
465
+ if (modelAttrs.parameters.frequencyPenalty !== void 0) {
466
+ attributes["gen_ai.request.frequency_penalty"] = modelAttrs.parameters.frequencyPenalty;
458
467
  }
459
- if (llmAttrs.parameters.stopSequences) {
460
- attributes["gen_ai.request.stop_sequences"] = JSON.stringify(llmAttrs.parameters.stopSequences);
468
+ if (modelAttrs.parameters.stopSequences) {
469
+ attributes["gen_ai.request.stop_sequences"] = JSON.stringify(modelAttrs.parameters.stopSequences);
461
470
  }
462
471
  }
463
- if (llmAttrs.finishReason) {
464
- attributes["gen_ai.response.finish_reasons"] = llmAttrs.finishReason;
472
+ if (modelAttrs.finishReason) {
473
+ attributes["gen_ai.response.finish_reasons"] = modelAttrs.finishReason;
465
474
  }
466
475
  }
467
476
  if ((aiSpan.type === aiTracing.AISpanType.TOOL_CALL || aiSpan.type === aiTracing.AISpanType.MCP_TOOL_CALL) && aiSpan.attributes) {
@@ -490,6 +499,7 @@ var SpanConverter = class {
490
499
  const agentAttrs = aiSpan.attributes;
491
500
  if (agentAttrs.agentId) {
492
501
  attributes["agent.id"] = agentAttrs.agentId;
502
+ attributes["gen_ai.agent.id"] = agentAttrs.agentId;
493
503
  }
494
504
  if (agentAttrs.maxSteps) {
495
505
  attributes["agent.max_steps"] = agentAttrs.maxSteps;
@@ -547,7 +557,7 @@ var SpanConverter = class {
547
557
  */
548
558
  getOperationName(aiSpan) {
549
559
  switch (aiSpan.type) {
550
- case aiTracing.AISpanType.LLM_GENERATION: {
560
+ case aiTracing.AISpanType.MODEL_GENERATION: {
551
561
  const attrs = aiSpan.attributes;
552
562
  return attrs?.resultType === "tool_selection" ? "tool_selection" : "chat";
553
563
  }
@@ -610,7 +620,7 @@ var OtelExporter = class {
610
620
  this.tracingConfig = config;
611
621
  }
612
622
  async setupExporter() {
613
- if (this.isSetup) return;
623
+ if (this.isSetup || this.exporter) return;
614
624
  if (!this.config.provider) {
615
625
  this.logger.error(
616
626
  '[OtelExporter] Provider configuration is required. Use the "custom" provider for generic endpoints.'
@@ -625,6 +635,10 @@ var OtelExporter = class {
625
635
  this.isSetup = true;
626
636
  return;
627
637
  }
638
+ if (this.config.exporter) {
639
+ this.exporter = this.config.exporter;
640
+ return;
641
+ }
628
642
  const endpoint = resolved.endpoint;
629
643
  const headers = resolved.headers;
630
644
  const protocol = resolved.protocol;
@@ -678,7 +692,10 @@ var OtelExporter = class {
678
692
  this.isSetup = true;
679
693
  return;
680
694
  }
681
- const resource = resources.resourceFromAttributes({
695
+ }
696
+ async setupProcessor() {
697
+ if (this.processor || this.isSetup) return;
698
+ let resource = resources.resourceFromAttributes({
682
699
  [semanticConventions.ATTR_SERVICE_NAME]: this.tracingConfig?.serviceName || "mastra-service",
683
700
  [semanticConventions.ATTR_SERVICE_VERSION]: "1.0.0",
684
701
  // Add telemetry SDK information
@@ -686,6 +703,12 @@ var OtelExporter = class {
686
703
  [semanticConventions.ATTR_TELEMETRY_SDK_VERSION]: "1.0.0",
687
704
  [semanticConventions.ATTR_TELEMETRY_SDK_LANGUAGE]: "nodejs"
688
705
  });
706
+ if (this.config.resourceAttributes) {
707
+ resource = resource.merge(
708
+ // Duplicate attributes from config will override defaults above
709
+ resources.resourceFromAttributes(this.config.resourceAttributes)
710
+ );
711
+ }
689
712
  this.spanConverter = new SpanConverter(resource);
690
713
  this.processor = new sdkTraceBase.BatchSpanProcessor(this.exporter, {
691
714
  maxExportBatchSize: this.config.batchSize || 512,
@@ -700,6 +723,11 @@ var OtelExporter = class {
700
723
  this.logger.debug(
701
724
  `[OtelExporter] Using BatchSpanProcessor (batch size: ${this.config.batchSize || 512}, delay: 5s)`
702
725
  );
726
+ }
727
+ async setup() {
728
+ if (this.isSetup) return;
729
+ await this.setupExporter();
730
+ await this.setupProcessor();
703
731
  this.isSetup = true;
704
732
  }
705
733
  async exportEvent(event) {
@@ -714,7 +742,7 @@ var OtelExporter = class {
714
742
  }
715
743
  async exportSpan(span) {
716
744
  if (!this.isSetup) {
717
- await this.setupExporter();
745
+ await this.setup();
718
746
  }
719
747
  if (this.isDisabled || !this.processor) {
720
748
  return;