@mastra/otel-exporter 0.0.0-usechat-duplicate-20251016110554 → 0.0.0-vnext-20251104230439

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,11 +1,98 @@
1
1
  # @mastra/otel-exporter
2
2
 
3
- ## 0.0.0-usechat-duplicate-20251016110554
3
+ ## 0.0.0-vnext-20251104230439
4
+
5
+ ### Major Changes
6
+
7
+ - Mark as stable ([`83d5942`](https://github.com/mastra-ai/mastra/commit/83d5942669ce7bba4a6ca4fd4da697a10eb5ebdc))
8
+
9
+ - moved ai-tracing code into @mastra/observability ([#9661](https://github.com/mastra-ai/mastra/pull/9661))
10
+
11
+ ### Minor Changes
12
+
13
+ - Rename LLM span types and attributes to use Model prefix ([#9105](https://github.com/mastra-ai/mastra/pull/9105))
14
+
15
+ BREAKING CHANGE: This release renames AI tracing span types and attribute interfaces to use the "Model" prefix instead of "LLM":
16
+ - `AISpanType.LLM_GENERATION` → `AISpanType.MODEL_GENERATION`
17
+ - `AISpanType.LLM_STEP` → `AISpanType.MODEL_STEP`
18
+ - `AISpanType.LLM_CHUNK` → `AISpanType.MODEL_CHUNK`
19
+ - `LLMGenerationAttributes` → `ModelGenerationAttributes`
20
+ - `LLMStepAttributes` → `ModelStepAttributes`
21
+ - `LLMChunkAttributes` → `ModelChunkAttributes`
22
+ - `InternalSpans.LLM` → `InternalSpans.MODEL`
23
+
24
+ This change better reflects that these span types apply to all AI models, not just Large Language Models.
25
+
26
+ Migration guide:
27
+ - Update all imports: `import { ModelGenerationAttributes } from '@mastra/core/ai-tracing'`
28
+ - Update span type references: `AISpanType.MODEL_GENERATION`
29
+ - Update InternalSpans usage: `InternalSpans.MODEL`
4
30
 
5
31
  ### Patch Changes
6
32
 
7
- - Updated dependencies [[`ca85c93`](https://github.com/mastra-ai/mastra/commit/ca85c932b232e6ad820c811ec176d98e68c59b0a), [`a1d40f8`](https://github.com/mastra-ai/mastra/commit/a1d40f88d4ce42c4508774ad22e38ac582157af2), [`01c4a25`](https://github.com/mastra-ai/mastra/commit/01c4a2506c514d5e861c004d3d2fb3791c6391f3)]:
8
- - @mastra/core@0.0.0-usechat-duplicate-20251016110554
33
+ - Fixed import isssues in exporters. ([#9316](https://github.com/mastra-ai/mastra/pull/9316))
34
+
35
+ - Update peer dependencies to match core package version bump (1.0.0) ([#9237](https://github.com/mastra-ai/mastra/pull/9237))
36
+
37
+ - Updated dependencies [[`f743dbb`](https://github.com/mastra-ai/mastra/commit/f743dbb8b40d1627b5c10c0e6fc154f4ebb6e394), [`fec5129`](https://github.com/mastra-ai/mastra/commit/fec5129de7fc64423ea03661a56cef31dc747a0d), [`0e8ed46`](https://github.com/mastra-ai/mastra/commit/0e8ed467c54d6901a6a365f270ec15d6faadb36c), [`6c049d9`](https://github.com/mastra-ai/mastra/commit/6c049d94063fdcbd5b81c4912a2bf82a92c9cc0b), [`3443770`](https://github.com/mastra-ai/mastra/commit/3443770662df8eb24c9df3589b2792d78cfcb811), [`f0a07e0`](https://github.com/mastra-ai/mastra/commit/f0a07e0111b3307c5fabfa4094c5c2cfb734fbe6), [`1521d71`](https://github.com/mastra-ai/mastra/commit/1521d716e5daedc74690c983fbd961123c56756b), [`9e1911d`](https://github.com/mastra-ai/mastra/commit/9e1911db2b4db85e0e768c3f15e0d61e319869f6), [`ebac155`](https://github.com/mastra-ai/mastra/commit/ebac15564a590117db7078233f927a7e28a85106), [`5948e6a`](https://github.com/mastra-ai/mastra/commit/5948e6a5146c83666ba3f294b2be576c82a513fb), [`83b08dc`](https://github.com/mastra-ai/mastra/commit/83b08dcf1bfcc915efab23c09207df90fa247908), [`8940859`](https://github.com/mastra-ai/mastra/commit/89408593658199b4ad67f7b65e888f344e64a442), [`f0f8f12`](https://github.com/mastra-ai/mastra/commit/f0f8f125c308f2d0fd36942ef652fd852df7522f), [`e629310`](https://github.com/mastra-ai/mastra/commit/e629310f1a73fa236d49ec7a1d1cceb6229dc7cc), [`4c6b492`](https://github.com/mastra-ai/mastra/commit/4c6b492c4dd591c6a592520c1f6855d6e936d71f), [`dff01d8`](https://github.com/mastra-ai/mastra/commit/dff01d81ce1f4e4087cfac20fa868e6db138dd14), [`9d819d5`](https://github.com/mastra-ai/mastra/commit/9d819d54b61481639f4008e4694791bddf187edd), [`71c8d6c`](https://github.com/mastra-ai/mastra/commit/71c8d6c161253207b2b9588bdadb7eed604f7253), [`6179a9b`](https://github.com/mastra-ai/mastra/commit/6179a9ba36ffac326de3cc3c43cdc8028d37c251), [`00f4921`](https://github.com/mastra-ai/mastra/commit/00f4921dd2c91a1e5446799599ef7116a8214a1a), [`7051bf3`](https://github.com/mastra-ai/mastra/commit/7051bf38b3b122a069008f861f7bfc004a6d9f6e), [`a8f1494`](https://github.com/mastra-ai/mastra/commit/a8f1494f4bbdc2770bcf327d4c7d869e332183f1), [`0793497`](https://github.com/mastra-ai/mastra/commit/079349753620c40246ffd673e3f9d7d9820beff3), [`5df9cce`](https://github.com/mastra-ai/mastra/commit/5df9cce1a753438413f64c11eeef8f845745c2a8), [`a854ede`](https://github.com/mastra-ai/mastra/commit/a854ede62bf5ac0945a624ac48913dd69c73aabf), [`c576fc0`](https://github.com/mastra-ai/mastra/commit/c576fc0b100b2085afded91a37c97a0ea0ec09c7), [`3defc80`](https://github.com/mastra-ai/mastra/commit/3defc80cf2b88a1b7fc1cc4ddcb91e982a614609), [`16153fe`](https://github.com/mastra-ai/mastra/commit/16153fe7eb13c99401f48e6ca32707c965ee28b9), [`9f4a683`](https://github.com/mastra-ai/mastra/commit/9f4a6833e88b52574665c028fd5508ad5c2f6004), [`bc94344`](https://github.com/mastra-ai/mastra/commit/bc943444a1342d8a662151b7bce1df7dae32f59c), [`57d157f`](https://github.com/mastra-ai/mastra/commit/57d157f0b163a95c3e6c9eae31bdb11d1bfc64f9), [`2a90c55`](https://github.com/mastra-ai/mastra/commit/2a90c55a86a9210697d5adaab5ee94584b079adc), [`96d35f6`](https://github.com/mastra-ai/mastra/commit/96d35f61376bc2b1bf148648a2c1985bd51bef55), [`5cbe88a`](https://github.com/mastra-ai/mastra/commit/5cbe88aefbd9f933bca669fd371ea36bf939ac6d), [`a1bd7b8`](https://github.com/mastra-ai/mastra/commit/a1bd7b8571db16b94eb01588f451a74758c96d65), [`d78b38d`](https://github.com/mastra-ai/mastra/commit/d78b38d898fce285260d3bbb4befade54331617f), [`0633100`](https://github.com/mastra-ai/mastra/commit/0633100a911ad22f5256471bdf753da21c104742), [`c710c16`](https://github.com/mastra-ai/mastra/commit/c710c1652dccfdc4111c8412bca7a6bb1d48b441), [`cfae733`](https://github.com/mastra-ai/mastra/commit/cfae73394f4920635e6c919c8e95ff9a0788e2e5), [`e3dfda7`](https://github.com/mastra-ai/mastra/commit/e3dfda7b11bf3b8c4bb55637028befb5f387fc74), [`844ea5d`](https://github.com/mastra-ai/mastra/commit/844ea5dc0c248961e7bf73629ae7dcff503e853c), [`f0f8f12`](https://github.com/mastra-ai/mastra/commit/f0f8f125c308f2d0fd36942ef652fd852df7522f), [`0d7618b`](https://github.com/mastra-ai/mastra/commit/0d7618bc650bf2800934b243eca5648f4aeed9c2), [`7b763e5`](https://github.com/mastra-ai/mastra/commit/7b763e52fc3eaf699c2a99f2adf418dd46e4e9a5), [`d36cfbb`](https://github.com/mastra-ai/mastra/commit/d36cfbbb6565ba5f827883cc9bb648eb14befdc1), [`3697853`](https://github.com/mastra-ai/mastra/commit/3697853deeb72017d90e0f38a93c1e29221aeca0), [`a534e95`](https://github.com/mastra-ai/mastra/commit/a534e9591f83b3cc1ebff99c67edf4cda7bf81d3), [`9d0e7fe`](https://github.com/mastra-ai/mastra/commit/9d0e7feca8ed98de959f53476ee1456073673348), [`53d927c`](https://github.com/mastra-ai/mastra/commit/53d927cc6f03bff33655b7e2b788da445a08731d), [`22f64bc`](https://github.com/mastra-ai/mastra/commit/22f64bc1d37149480b58bf2fefe35b79a1e3e7d5), [`83d5942`](https://github.com/mastra-ai/mastra/commit/83d5942669ce7bba4a6ca4fd4da697a10eb5ebdc), [`bda6370`](https://github.com/mastra-ai/mastra/commit/bda637009360649aaf579919e7873e33553c273e), [`d7acd8e`](https://github.com/mastra-ai/mastra/commit/d7acd8e987b5d7eff4fd98b0906c17c06a2e83d5), [`c7f1f7d`](https://github.com/mastra-ai/mastra/commit/c7f1f7d24f61f247f018cc2d1f33bf63212959a7), [`0bddc6d`](https://github.com/mastra-ai/mastra/commit/0bddc6d8dbd6f6008c0cba2e4960a2da75a55af1), [`735d8c1`](https://github.com/mastra-ai/mastra/commit/735d8c1c0d19fbc09e6f8b66cf41bc7655993838), [`acf322e`](https://github.com/mastra-ai/mastra/commit/acf322e0f1fd0189684cf529d91c694bea918a45), [`c942802`](https://github.com/mastra-ai/mastra/commit/c942802a477a925b01859a7b8688d4355715caaa), [`a0c8c1b`](https://github.com/mastra-ai/mastra/commit/a0c8c1b87d4fee252aebda73e8637fbe01d761c9), [`cc34739`](https://github.com/mastra-ai/mastra/commit/cc34739c34b6266a91bea561119240a7acf47887), [`c218bd3`](https://github.com/mastra-ai/mastra/commit/c218bd3759e32423735b04843a09404572631014), [`2c4438b`](https://github.com/mastra-ai/mastra/commit/2c4438b87817ab7eed818c7990fef010475af1a3), [`2b8893c`](https://github.com/mastra-ai/mastra/commit/2b8893cb108ef9acb72ee7835cd625610d2c1a4a), [`8e5c75b`](https://github.com/mastra-ai/mastra/commit/8e5c75bdb1d08a42d45309a4c72def4b6890230f), [`fa8409b`](https://github.com/mastra-ai/mastra/commit/fa8409bc39cfd8ba6643b9db5269b90b22e2a2f7), [`173c535`](https://github.com/mastra-ai/mastra/commit/173c535c0645b0da404fe09f003778f0b0d4e019)]:
38
+ - @mastra/core@0.0.0-vnext-20251104230439
39
+ - @mastra/observability@0.0.0-vnext-20251104230439
40
+
41
+ ## 0.2.0
42
+
43
+ ### Minor Changes
44
+
45
+ - feat(otel-exporter): Add customizable 'exporter' constructor parameter ([#8827](https://github.com/mastra-ai/mastra/pull/8827))
46
+
47
+ You can now pass in an instantiated `TraceExporter` inheriting class into `OtelExporter`.
48
+ This will circumvent the default package detection, no longer instantiating a `TraceExporter`
49
+ automatically if one is instead passed in to the `OtelExporter` constructor.
50
+
51
+ feat(arize): Initial release of @mastra/arize observability package
52
+
53
+ The `@mastra/arize` package exports an `ArizeExporter` class that can be used to easily send AI
54
+ traces from Mastra to Arize AX, Arize Phoenix, or any OpenInference compatible collector.
55
+ It sends traces uses `BatchSpanProcessor` over OTLP connections.
56
+ It leverages the `@mastra/otel-exporter` package, reusing `OtelExporter` for transmission and
57
+ span management.
58
+ See the README in `observability/arize/README.md` for more details
59
+
60
+ ### Patch Changes
61
+
62
+ - fix(observability): Add ParentSpanContext to MastraSpan's with parentage ([#9085](https://github.com/mastra-ai/mastra/pull/9085))
63
+
64
+ - Update peerdeps to 0.23.0-0 ([#9043](https://github.com/mastra-ai/mastra/pull/9043))
65
+
66
+ - Updated dependencies [[`c67ca32`](https://github.com/mastra-ai/mastra/commit/c67ca32e3c2cf69bfc146580770c720220ca44ac), [`efb5ed9`](https://github.com/mastra-ai/mastra/commit/efb5ed946ae7f410bc68c9430beb4b010afd25ec), [`dbc9e12`](https://github.com/mastra-ai/mastra/commit/dbc9e1216ba575ba59ead4afb727a01215f7de4f), [`99e41b9`](https://github.com/mastra-ai/mastra/commit/99e41b94957cdd25137d3ac12e94e8b21aa01b68), [`c28833c`](https://github.com/mastra-ai/mastra/commit/c28833c5b6d8e10eeffd7f7d39129d53b8bca240), [`8ea07b4`](https://github.com/mastra-ai/mastra/commit/8ea07b4bdc73e4218437dbb6dcb0f4b23e745a44), [`ba201b8`](https://github.com/mastra-ai/mastra/commit/ba201b8f8feac4c72350f2dbd52c13c7297ba7b0), [`f053e89`](https://github.com/mastra-ai/mastra/commit/f053e89160dbd0bd3333fc3492f68231b5c7c349), [`4fc4136`](https://github.com/mastra-ai/mastra/commit/4fc413652866a8d2240694fddb2562e9edbb70df), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`d10baf5`](https://github.com/mastra-ai/mastra/commit/d10baf5a3c924f2a6654e23a3e318ed03f189b76), [`038c55a`](https://github.com/mastra-ai/mastra/commit/038c55a7090fc1b1513a966386d3072617f836ac), [`182f045`](https://github.com/mastra-ai/mastra/commit/182f0458f25bd70aa774e64fd923c8a483eddbf1), [`9a1a485`](https://github.com/mastra-ai/mastra/commit/9a1a4859b855e37239f652bf14b1ecd1029b8c4e), [`9257233`](https://github.com/mastra-ai/mastra/commit/9257233c4ffce09b2bedc2a9adbd70d7a83fa8e2), [`7620d2b`](https://github.com/mastra-ai/mastra/commit/7620d2bddeb4fae4c3c0a0b4e672969795fca11a), [`b2365f0`](https://github.com/mastra-ai/mastra/commit/b2365f038dd4c5f06400428b224af963f399ad50), [`0f1a4c9`](https://github.com/mastra-ai/mastra/commit/0f1a4c984fb4b104b2f0b63ba18c9fa77f567700), [`9029ba3`](https://github.com/mastra-ai/mastra/commit/9029ba34459c8859fed4c6b73efd8e2d0021e7ba), [`426cc56`](https://github.com/mastra-ai/mastra/commit/426cc561c85ae76a112ded2385532a91f9f9f074), [`00931fb`](https://github.com/mastra-ai/mastra/commit/00931fb1a21aa42c4fbc20c2c40dd62466b8fc8f), [`e473bfe`](https://github.com/mastra-ai/mastra/commit/e473bfe416c0b8e876973c2b6a6f13c394b7a93f), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`2db6160`](https://github.com/mastra-ai/mastra/commit/2db6160e2022ff8827c15d30157e684683b934b5), [`8aeea37`](https://github.com/mastra-ai/mastra/commit/8aeea37efdde347c635a67fed56794943b7f74ec), [`02fe153`](https://github.com/mastra-ai/mastra/commit/02fe15351d6021d214da48ec982a0e9e4150bcee), [`648e2ca`](https://github.com/mastra-ai/mastra/commit/648e2ca42da54838c6ccbdaadc6fadd808fa6b86), [`74567b3`](https://github.com/mastra-ai/mastra/commit/74567b3d237ae3915cd0bca3cf55fa0a64e4e4a4), [`b65c5e0`](https://github.com/mastra-ai/mastra/commit/b65c5e0fe6f3c390a9a8bbcf69304d972c3a4afb), [`15a1733`](https://github.com/mastra-ai/mastra/commit/15a1733074cee8bd37370e1af34cd818e89fa7ac), [`fc2a774`](https://github.com/mastra-ai/mastra/commit/fc2a77468981aaddc3e77f83f0c4ad4a4af140da), [`4e08933`](https://github.com/mastra-ai/mastra/commit/4e08933625464dfde178347af5b6278fcf34188e)]:
67
+ - @mastra/core@0.22.0
68
+
69
+ ## 0.2.0-alpha.0
70
+
71
+ ### Minor Changes
72
+
73
+ - feat(otel-exporter): Add customizable 'exporter' constructor parameter ([#8827](https://github.com/mastra-ai/mastra/pull/8827))
74
+
75
+ You can now pass in an instantiated `TraceExporter` inheriting class into `OtelExporter`.
76
+ This will circumvent the default package detection, no longer instantiating a `TraceExporter`
77
+ automatically if one is instead passed in to the `OtelExporter` constructor.
78
+
79
+ feat(arize): Initial release of @mastra/arize observability package
80
+
81
+ The `@mastra/arize` package exports an `ArizeExporter` class that can be used to easily send AI
82
+ traces from Mastra to Arize AX, Arize Phoenix, or any OpenInference compatible collector.
83
+ It sends traces uses `BatchSpanProcessor` over OTLP connections.
84
+ It leverages the `@mastra/otel-exporter` package, reusing `OtelExporter` for transmission and
85
+ span management.
86
+ See the README in `observability/arize/README.md` for more details
87
+
88
+ ### Patch Changes
89
+
90
+ - fix(observability): Add ParentSpanContext to MastraSpan's with parentage ([#9085](https://github.com/mastra-ai/mastra/pull/9085))
91
+
92
+ - Update peerdeps to 0.23.0-0 ([#9043](https://github.com/mastra-ai/mastra/pull/9043))
93
+
94
+ - Updated dependencies [[`efb5ed9`](https://github.com/mastra-ai/mastra/commit/efb5ed946ae7f410bc68c9430beb4b010afd25ec), [`8ea07b4`](https://github.com/mastra-ai/mastra/commit/8ea07b4bdc73e4218437dbb6dcb0f4b23e745a44), [`ba201b8`](https://github.com/mastra-ai/mastra/commit/ba201b8f8feac4c72350f2dbd52c13c7297ba7b0), [`4fc4136`](https://github.com/mastra-ai/mastra/commit/4fc413652866a8d2240694fddb2562e9edbb70df), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`d10baf5`](https://github.com/mastra-ai/mastra/commit/d10baf5a3c924f2a6654e23a3e318ed03f189b76), [`038c55a`](https://github.com/mastra-ai/mastra/commit/038c55a7090fc1b1513a966386d3072617f836ac), [`182f045`](https://github.com/mastra-ai/mastra/commit/182f0458f25bd70aa774e64fd923c8a483eddbf1), [`7620d2b`](https://github.com/mastra-ai/mastra/commit/7620d2bddeb4fae4c3c0a0b4e672969795fca11a), [`b2365f0`](https://github.com/mastra-ai/mastra/commit/b2365f038dd4c5f06400428b224af963f399ad50), [`9029ba3`](https://github.com/mastra-ai/mastra/commit/9029ba34459c8859fed4c6b73efd8e2d0021e7ba), [`426cc56`](https://github.com/mastra-ai/mastra/commit/426cc561c85ae76a112ded2385532a91f9f9f074), [`00931fb`](https://github.com/mastra-ai/mastra/commit/00931fb1a21aa42c4fbc20c2c40dd62466b8fc8f), [`e473bfe`](https://github.com/mastra-ai/mastra/commit/e473bfe416c0b8e876973c2b6a6f13c394b7a93f), [`b78e04d`](https://github.com/mastra-ai/mastra/commit/b78e04d935a16ecb1e59c5c96e564903527edddd), [`648e2ca`](https://github.com/mastra-ai/mastra/commit/648e2ca42da54838c6ccbdaadc6fadd808fa6b86), [`b65c5e0`](https://github.com/mastra-ai/mastra/commit/b65c5e0fe6f3c390a9a8bbcf69304d972c3a4afb)]:
95
+ - @mastra/core@0.22.0-alpha.1
9
96
 
10
97
  ## 0.1.0
11
98
 
package/README.md CHANGED
@@ -19,7 +19,7 @@ npm install @mastra/otel-exporter @opentelemetry/exporter-trace-otlp-grpc @grpc/
19
19
 
20
20
  ```typescript
21
21
  import { OtelExporter } from '@mastra/otel-exporter';
22
- import { Mastra } from '@mastra/core';
22
+ import { Mastra } from '@mastra/core/mastra';
23
23
 
24
24
  const mastra = new Mastra({
25
25
  ...,
@@ -58,7 +58,7 @@ npm install @mastra/otel-exporter @opentelemetry/exporter-trace-otlp-proto
58
58
 
59
59
  ```typescript
60
60
  import { OtelExporter } from '@mastra/otel-exporter';
61
- import { Mastra } from '@mastra/core';
61
+ import { Mastra } from '@mastra/core/mastra';
62
62
 
63
63
  const mastra = new Mastra({
64
64
  ...,
@@ -95,7 +95,7 @@ npm install @mastra/otel-exporter @opentelemetry/exporter-trace-otlp-proto
95
95
 
96
96
  ```typescript
97
97
  import { OtelExporter } from '@mastra/otel-exporter';
98
- import { Mastra } from '@mastra/core';
98
+ import { Mastra } from '@mastra/core/mastra';
99
99
 
100
100
  const mastra = new Mastra({
101
101
  ...,
@@ -132,7 +132,7 @@ npm install @mastra/otel-exporter @opentelemetry/exporter-trace-otlp-http
132
132
 
133
133
  ```typescript
134
134
  import { OtelExporter } from '@mastra/otel-exporter';
135
- import { Mastra } from '@mastra/core';
135
+ import { Mastra } from '@mastra/core/mastra';
136
136
 
137
137
  const mastra = new Mastra({
138
138
  ...,
@@ -169,7 +169,7 @@ npm install @mastra/otel-exporter @opentelemetry/exporter-trace-otlp-proto
169
169
 
170
170
  ```typescript
171
171
  import { OtelExporter } from '@mastra/otel-exporter';
172
- import { Mastra } from '@mastra/core';
172
+ import { Mastra } from '@mastra/core/mastra';
173
173
 
174
174
  const mastra = new Mastra({
175
175
  ...,
@@ -208,7 +208,7 @@ npm install @mastra/otel-exporter @opentelemetry/exporter-zipkin
208
208
 
209
209
  ```typescript
210
210
  import { OtelExporter } from '@mastra/otel-exporter';
211
- import { Mastra } from '@mastra/core';
211
+ import { Mastra } from '@mastra/core/mastra';
212
212
 
213
213
  const mastra = new Mastra({
214
214
  ...,
@@ -258,7 +258,7 @@ Most providers recommend HTTP/Protobuf for production use.
258
258
 
259
259
  ```typescript
260
260
  import { OtelExporter } from '@mastra/otel-exporter';
261
- import { Mastra } from '@mastra/core';
261
+ import { Mastra } from '@mastra/core/mastra';
262
262
 
263
263
  const mastra = new Mastra({
264
264
  ...,
@@ -382,8 +382,8 @@ The exporter maps Mastra's AI tracing data to OTEL-compliant attributes:
382
382
  - `gen_ai.request.top_p` - Top-p sampling parameter
383
383
  - `gen_ai.request.top_k` - Top-k sampling parameter
384
384
  - `gen_ai.response.finish_reasons` - Reason for completion
385
- - `gen_ai.prompt` - Input prompt (for LLM spans)
386
- - `gen_ai.completion` - Model output (for LLM spans)
385
+ - `gen_ai.prompt` - Input prompt (for Model spans)
386
+ - `gen_ai.completion` - Model output (for Model spans)
387
387
 
388
388
  #### Tool Attributes
389
389
 
@@ -1,25 +1,26 @@
1
1
  /**
2
2
  * OpenTelemetry AI Tracing Exporter for Mastra
3
3
  */
4
- import type { AITracingExporter, AITracingEvent, TracingConfig } from '@mastra/core/ai-tracing';
4
+ import type { AITracingEvent, InitExporterOptions } from '@mastra/core/observability';
5
+ import { BaseExporter } from '@mastra/observability';
5
6
  import type { OtelExporterConfig } from './types.js';
6
- export declare class OtelExporter implements AITracingExporter {
7
+ export declare class OtelExporter extends BaseExporter {
7
8
  private config;
8
9
  private tracingConfig?;
9
10
  private spanConverter;
10
11
  private processor?;
11
12
  private exporter?;
12
13
  private isSetup;
13
- private isDisabled;
14
- private logger;
15
14
  name: string;
16
15
  constructor(config: OtelExporterConfig);
17
16
  /**
18
17
  * Initialize with tracing configuration
19
18
  */
20
- init(config: TracingConfig): void;
19
+ init(options: InitExporterOptions): void;
21
20
  private setupExporter;
22
- exportEvent(event: AITracingEvent): Promise<void>;
21
+ private setupProcessor;
22
+ private setup;
23
+ protected _exportEvent(event: AITracingEvent): Promise<void>;
23
24
  private exportSpan;
24
25
  shutdown(): Promise<void>;
25
26
  }
@@ -1 +1 @@
1
- {"version":3,"file":"ai-tracing.d.ts","sourceRoot":"","sources":["../src/ai-tracing.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,KAAK,EAAE,iBAAiB,EAAE,cAAc,EAAqB,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAiBnH,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAC;AAErD,qBAAa,YAAa,YAAW,iBAAiB;IACpD,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,SAAS,CAAC,CAAqB;IACvC,OAAO,CAAC,QAAQ,CAAC,CAAe;IAChC,OAAO,CAAC,OAAO,CAAkB;IACjC,OAAO,CAAC,UAAU,CAAkB;IACpC,OAAO,CAAC,MAAM,CAAgB;IAE9B,IAAI,SAAmB;gBAEX,MAAM,EAAE,kBAAkB;IAWtC;;OAEG;IACH,IAAI,CAAC,MAAM,EAAE,aAAa,GAAG,IAAI;YAInB,aAAa;IAwHrB,WAAW,CAAC,KAAK,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;YAgBzC,UAAU;IA8BlB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAMhC"}
1
+ {"version":3,"file":"ai-tracing.d.ts","sourceRoot":"","sources":["../src/ai-tracing.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,cAAc,EAAqB,mBAAmB,EAAiB,MAAM,4BAA4B,CAAC;AAExH,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAgBrD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAC;AAErD,qBAAa,YAAa,SAAQ,YAAY;IAC5C,OAAO,CAAC,MAAM,CAAqB;IACnC,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,SAAS,CAAC,CAAqB;IACvC,OAAO,CAAC,QAAQ,CAAC,CAAe;IAChC,OAAO,CAAC,OAAO,CAAkB;IAEjC,IAAI,SAAmB;gBAEX,MAAM,EAAE,kBAAkB;IAYtC;;OAEG;IACH,IAAI,CAAC,OAAO,EAAE,mBAAmB;YAInB,aAAa;YA6Fb,cAAc;YAqCd,KAAK;cAOH,YAAY,CAAC,KAAK,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;YAWpD,UAAU;IA8BlB,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAMhC"}
package/dist/index.cjs CHANGED
@@ -1,7 +1,7 @@
1
1
  'use strict';
2
2
 
3
- var aiTracing = require('@mastra/core/ai-tracing');
4
- var logger = require('@mastra/core/logger');
3
+ var observability$1 = require('@mastra/core/observability');
4
+ var observability = require('@mastra/observability');
5
5
  var api = require('@opentelemetry/api');
6
6
  var resources = require('@opentelemetry/resources');
7
7
  var sdkTraceBase = require('@opentelemetry/sdk-trace-base');
@@ -209,6 +209,7 @@ var MastraReadableSpan = class {
209
209
  name;
210
210
  kind;
211
211
  spanContext;
212
+ parentSpanContext;
212
213
  parentSpanId;
213
214
  startTime;
214
215
  endTime;
@@ -276,6 +277,14 @@ var MastraReadableSpan = class {
276
277
  traceFlags: api.TraceFlags.SAMPLED,
277
278
  isRemote: false
278
279
  });
280
+ if (parentSpanId) {
281
+ this.parentSpanContext = {
282
+ traceId: aiSpan.traceId,
283
+ spanId: parentSpanId,
284
+ traceFlags: api.TraceFlags.SAMPLED,
285
+ isRemote: false
286
+ };
287
+ }
279
288
  this.resource = resource || {};
280
289
  this.instrumentationLibrary = instrumentationLibrary || {
281
290
  name: "@mastra/otel",
@@ -296,14 +305,14 @@ var MastraReadableSpan = class {
296
305
 
297
306
  // src/span-converter.ts
298
307
  var SPAN_KIND_MAPPING = {
299
- // LLM operations are CLIENT spans (calling external AI services)
300
- [aiTracing.AISpanType.LLM_GENERATION]: api.SpanKind.CLIENT,
301
- [aiTracing.AISpanType.LLM_CHUNK]: api.SpanKind.CLIENT,
308
+ // Model operations are CLIENT spans (calling external AI services)
309
+ [observability$1.AISpanType.MODEL_GENERATION]: api.SpanKind.CLIENT,
310
+ [observability$1.AISpanType.MODEL_CHUNK]: api.SpanKind.CLIENT,
302
311
  // MCP tool calls are CLIENT (external service calls)
303
- [aiTracing.AISpanType.MCP_TOOL_CALL]: api.SpanKind.CLIENT,
312
+ [observability$1.AISpanType.MCP_TOOL_CALL]: api.SpanKind.CLIENT,
304
313
  // Root spans for agent/workflow are SERVER (entry points)
305
- [aiTracing.AISpanType.AGENT_RUN]: api.SpanKind.SERVER,
306
- [aiTracing.AISpanType.WORKFLOW_RUN]: api.SpanKind.SERVER
314
+ [observability$1.AISpanType.AGENT_RUN]: api.SpanKind.SERVER,
315
+ [observability$1.AISpanType.WORKFLOW_RUN]: api.SpanKind.SERVER
307
316
  };
308
317
  var SpanConverter = class {
309
318
  resource;
@@ -339,7 +348,7 @@ var SpanConverter = class {
339
348
  */
340
349
  getSpanKind(aiSpan) {
341
350
  if (aiSpan.isRootSpan) {
342
- if (aiSpan.type === aiTracing.AISpanType.AGENT_RUN || aiSpan.type === aiTracing.AISpanType.WORKFLOW_RUN) {
351
+ if (aiSpan.type === observability$1.AISpanType.AGENT_RUN || aiSpan.type === observability$1.AISpanType.WORKFLOW_RUN) {
343
352
  return api.SpanKind.SERVER;
344
353
  }
345
354
  }
@@ -350,29 +359,29 @@ var SpanConverter = class {
350
359
  */
351
360
  buildSpanName(aiSpan) {
352
361
  switch (aiSpan.type) {
353
- case aiTracing.AISpanType.LLM_GENERATION: {
362
+ case observability$1.AISpanType.MODEL_GENERATION: {
354
363
  const attrs = aiSpan.attributes;
355
364
  const operation = attrs?.resultType === "tool_selection" ? "tool_selection" : "chat";
356
365
  const model = attrs?.model || "unknown";
357
366
  return `${operation} ${model}`;
358
367
  }
359
- case aiTracing.AISpanType.TOOL_CALL:
360
- case aiTracing.AISpanType.MCP_TOOL_CALL: {
368
+ case observability$1.AISpanType.TOOL_CALL:
369
+ case observability$1.AISpanType.MCP_TOOL_CALL: {
361
370
  const toolAttrs = aiSpan.attributes;
362
371
  const toolName = toolAttrs?.toolId || "unknown";
363
372
  return `tool.execute ${toolName}`;
364
373
  }
365
- case aiTracing.AISpanType.AGENT_RUN: {
374
+ case observability$1.AISpanType.AGENT_RUN: {
366
375
  const agentAttrs = aiSpan.attributes;
367
376
  const agentId = agentAttrs?.agentId || "unknown";
368
377
  return `agent.${agentId}`;
369
378
  }
370
- case aiTracing.AISpanType.WORKFLOW_RUN: {
379
+ case observability$1.AISpanType.WORKFLOW_RUN: {
371
380
  const workflowAttrs = aiSpan.attributes;
372
381
  const workflowId = workflowAttrs?.workflowId || "unknown";
373
382
  return `workflow.${workflowId}`;
374
383
  }
375
- case aiTracing.AISpanType.WORKFLOW_STEP:
384
+ case observability$1.AISpanType.WORKFLOW_STEP:
376
385
  return aiSpan.name;
377
386
  default:
378
387
  return aiSpan.name;
@@ -395,81 +404,81 @@ var SpanConverter = class {
395
404
  if (aiSpan.input !== void 0) {
396
405
  const inputStr = typeof aiSpan.input === "string" ? aiSpan.input : JSON.stringify(aiSpan.input);
397
406
  attributes["input"] = inputStr;
398
- if (aiSpan.type === aiTracing.AISpanType.LLM_GENERATION) {
407
+ if (aiSpan.type === observability$1.AISpanType.MODEL_GENERATION) {
399
408
  attributes["gen_ai.prompt"] = inputStr;
400
- } else if (aiSpan.type === aiTracing.AISpanType.TOOL_CALL || aiSpan.type === aiTracing.AISpanType.MCP_TOOL_CALL) {
409
+ } else if (aiSpan.type === observability$1.AISpanType.TOOL_CALL || aiSpan.type === observability$1.AISpanType.MCP_TOOL_CALL) {
401
410
  attributes["gen_ai.tool.input"] = inputStr;
402
411
  }
403
412
  }
404
413
  if (aiSpan.output !== void 0) {
405
414
  const outputStr = typeof aiSpan.output === "string" ? aiSpan.output : JSON.stringify(aiSpan.output);
406
415
  attributes["output"] = outputStr;
407
- if (aiSpan.type === aiTracing.AISpanType.LLM_GENERATION) {
416
+ if (aiSpan.type === observability$1.AISpanType.MODEL_GENERATION) {
408
417
  attributes["gen_ai.completion"] = outputStr;
409
- } else if (aiSpan.type === aiTracing.AISpanType.TOOL_CALL || aiSpan.type === aiTracing.AISpanType.MCP_TOOL_CALL) {
418
+ } else if (aiSpan.type === observability$1.AISpanType.TOOL_CALL || aiSpan.type === observability$1.AISpanType.MCP_TOOL_CALL) {
410
419
  attributes["gen_ai.tool.output"] = outputStr;
411
420
  }
412
421
  }
413
- if (aiSpan.type === aiTracing.AISpanType.LLM_GENERATION && aiSpan.attributes) {
414
- const llmAttrs = aiSpan.attributes;
415
- if (llmAttrs.model) {
416
- attributes["gen_ai.request.model"] = llmAttrs.model;
422
+ if (aiSpan.type === observability$1.AISpanType.MODEL_GENERATION && aiSpan.attributes) {
423
+ const modelAttrs = aiSpan.attributes;
424
+ if (modelAttrs.model) {
425
+ attributes["gen_ai.request.model"] = modelAttrs.model;
417
426
  }
418
- if (llmAttrs.provider) {
419
- attributes["gen_ai.system"] = llmAttrs.provider;
427
+ if (modelAttrs.provider) {
428
+ attributes["gen_ai.system"] = modelAttrs.provider;
420
429
  }
421
- if (llmAttrs.usage) {
422
- const inputTokens = llmAttrs.usage.inputTokens ?? llmAttrs.usage.promptTokens;
423
- const outputTokens = llmAttrs.usage.outputTokens ?? llmAttrs.usage.completionTokens;
430
+ if (modelAttrs.usage) {
431
+ const inputTokens = modelAttrs.usage.inputTokens ?? modelAttrs.usage.promptTokens;
432
+ const outputTokens = modelAttrs.usage.outputTokens ?? modelAttrs.usage.completionTokens;
424
433
  if (inputTokens !== void 0) {
425
434
  attributes["gen_ai.usage.input_tokens"] = inputTokens;
426
435
  }
427
436
  if (outputTokens !== void 0) {
428
437
  attributes["gen_ai.usage.output_tokens"] = outputTokens;
429
438
  }
430
- if (llmAttrs.usage.totalTokens !== void 0) {
431
- attributes["gen_ai.usage.total_tokens"] = llmAttrs.usage.totalTokens;
439
+ if (modelAttrs.usage.totalTokens !== void 0) {
440
+ attributes["gen_ai.usage.total_tokens"] = modelAttrs.usage.totalTokens;
432
441
  }
433
- if (llmAttrs.usage.reasoningTokens !== void 0) {
434
- attributes["gen_ai.usage.reasoning_tokens"] = llmAttrs.usage.reasoningTokens;
442
+ if (modelAttrs.usage.reasoningTokens !== void 0) {
443
+ attributes["gen_ai.usage.reasoning_tokens"] = modelAttrs.usage.reasoningTokens;
435
444
  }
436
- if (llmAttrs.usage.cachedInputTokens !== void 0) {
437
- attributes["gen_ai.usage.cached_input_tokens"] = llmAttrs.usage.cachedInputTokens;
445
+ if (modelAttrs.usage.cachedInputTokens !== void 0) {
446
+ attributes["gen_ai.usage.cached_input_tokens"] = modelAttrs.usage.cachedInputTokens;
438
447
  }
439
448
  }
440
- if (llmAttrs.parameters) {
441
- if (llmAttrs.parameters.temperature !== void 0) {
442
- attributes["gen_ai.request.temperature"] = llmAttrs.parameters.temperature;
449
+ if (modelAttrs.parameters) {
450
+ if (modelAttrs.parameters.temperature !== void 0) {
451
+ attributes["gen_ai.request.temperature"] = modelAttrs.parameters.temperature;
443
452
  }
444
- if (llmAttrs.parameters.maxOutputTokens !== void 0) {
445
- attributes["gen_ai.request.max_tokens"] = llmAttrs.parameters.maxOutputTokens;
453
+ if (modelAttrs.parameters.maxOutputTokens !== void 0) {
454
+ attributes["gen_ai.request.max_tokens"] = modelAttrs.parameters.maxOutputTokens;
446
455
  }
447
- if (llmAttrs.parameters.topP !== void 0) {
448
- attributes["gen_ai.request.top_p"] = llmAttrs.parameters.topP;
456
+ if (modelAttrs.parameters.topP !== void 0) {
457
+ attributes["gen_ai.request.top_p"] = modelAttrs.parameters.topP;
449
458
  }
450
- if (llmAttrs.parameters.topK !== void 0) {
451
- attributes["gen_ai.request.top_k"] = llmAttrs.parameters.topK;
459
+ if (modelAttrs.parameters.topK !== void 0) {
460
+ attributes["gen_ai.request.top_k"] = modelAttrs.parameters.topK;
452
461
  }
453
- if (llmAttrs.parameters.presencePenalty !== void 0) {
454
- attributes["gen_ai.request.presence_penalty"] = llmAttrs.parameters.presencePenalty;
462
+ if (modelAttrs.parameters.presencePenalty !== void 0) {
463
+ attributes["gen_ai.request.presence_penalty"] = modelAttrs.parameters.presencePenalty;
455
464
  }
456
- if (llmAttrs.parameters.frequencyPenalty !== void 0) {
457
- attributes["gen_ai.request.frequency_penalty"] = llmAttrs.parameters.frequencyPenalty;
465
+ if (modelAttrs.parameters.frequencyPenalty !== void 0) {
466
+ attributes["gen_ai.request.frequency_penalty"] = modelAttrs.parameters.frequencyPenalty;
458
467
  }
459
- if (llmAttrs.parameters.stopSequences) {
460
- attributes["gen_ai.request.stop_sequences"] = JSON.stringify(llmAttrs.parameters.stopSequences);
468
+ if (modelAttrs.parameters.stopSequences) {
469
+ attributes["gen_ai.request.stop_sequences"] = JSON.stringify(modelAttrs.parameters.stopSequences);
461
470
  }
462
471
  }
463
- if (llmAttrs.finishReason) {
464
- attributes["gen_ai.response.finish_reasons"] = llmAttrs.finishReason;
472
+ if (modelAttrs.finishReason) {
473
+ attributes["gen_ai.response.finish_reasons"] = modelAttrs.finishReason;
465
474
  }
466
475
  }
467
- if ((aiSpan.type === aiTracing.AISpanType.TOOL_CALL || aiSpan.type === aiTracing.AISpanType.MCP_TOOL_CALL) && aiSpan.attributes) {
476
+ if ((aiSpan.type === observability$1.AISpanType.TOOL_CALL || aiSpan.type === observability$1.AISpanType.MCP_TOOL_CALL) && aiSpan.attributes) {
468
477
  const toolAttrs = aiSpan.attributes;
469
478
  if (toolAttrs.toolId) {
470
479
  attributes["gen_ai.tool.name"] = toolAttrs.toolId;
471
480
  }
472
- if (aiSpan.type === aiTracing.AISpanType.MCP_TOOL_CALL) {
481
+ if (aiSpan.type === observability$1.AISpanType.MCP_TOOL_CALL) {
473
482
  const mcpAttrs = toolAttrs;
474
483
  if (mcpAttrs.mcpServer) {
475
484
  attributes["mcp.server"] = mcpAttrs.mcpServer;
@@ -486,10 +495,11 @@ var SpanConverter = class {
486
495
  attributes["gen_ai.tool.success"] = toolAttrs.success;
487
496
  }
488
497
  }
489
- if (aiSpan.type === aiTracing.AISpanType.AGENT_RUN && aiSpan.attributes) {
498
+ if (aiSpan.type === observability$1.AISpanType.AGENT_RUN && aiSpan.attributes) {
490
499
  const agentAttrs = aiSpan.attributes;
491
500
  if (agentAttrs.agentId) {
492
501
  attributes["agent.id"] = agentAttrs.agentId;
502
+ attributes["gen_ai.agent.id"] = agentAttrs.agentId;
493
503
  }
494
504
  if (agentAttrs.maxSteps) {
495
505
  attributes["agent.max_steps"] = agentAttrs.maxSteps;
@@ -498,7 +508,7 @@ var SpanConverter = class {
498
508
  attributes["agent.available_tools"] = JSON.stringify(agentAttrs.availableTools);
499
509
  }
500
510
  }
501
- if (aiSpan.type === aiTracing.AISpanType.WORKFLOW_RUN && aiSpan.attributes) {
511
+ if (aiSpan.type === observability$1.AISpanType.WORKFLOW_RUN && aiSpan.attributes) {
502
512
  const workflowAttrs = aiSpan.attributes;
503
513
  if (workflowAttrs.workflowId) {
504
514
  attributes["workflow.id"] = workflowAttrs.workflowId;
@@ -547,16 +557,16 @@ var SpanConverter = class {
547
557
  */
548
558
  getOperationName(aiSpan) {
549
559
  switch (aiSpan.type) {
550
- case aiTracing.AISpanType.LLM_GENERATION: {
560
+ case observability$1.AISpanType.MODEL_GENERATION: {
551
561
  const attrs = aiSpan.attributes;
552
562
  return attrs?.resultType === "tool_selection" ? "tool_selection" : "chat";
553
563
  }
554
- case aiTracing.AISpanType.TOOL_CALL:
555
- case aiTracing.AISpanType.MCP_TOOL_CALL:
564
+ case observability$1.AISpanType.TOOL_CALL:
565
+ case observability$1.AISpanType.MCP_TOOL_CALL:
556
566
  return "tool.execute";
557
- case aiTracing.AISpanType.AGENT_RUN:
567
+ case observability$1.AISpanType.AGENT_RUN:
558
568
  return "agent.run";
559
- case aiTracing.AISpanType.WORKFLOW_RUN:
569
+ case observability$1.AISpanType.WORKFLOW_RUN:
560
570
  return "workflow.run";
561
571
  default:
562
572
  return aiSpan.type.replace(/_/g, ".");
@@ -585,20 +595,18 @@ var SpanConverter = class {
585
595
  };
586
596
 
587
597
  // src/ai-tracing.ts
588
- var OtelExporter = class {
598
+ var OtelExporter = class extends observability.BaseExporter {
589
599
  config;
590
600
  tracingConfig;
591
601
  spanConverter;
592
602
  processor;
593
603
  exporter;
594
604
  isSetup = false;
595
- isDisabled = false;
596
- logger;
597
605
  name = "opentelemetry";
598
606
  constructor(config) {
607
+ super(config);
599
608
  this.config = config;
600
609
  this.spanConverter = new SpanConverter();
601
- this.logger = new logger.ConsoleLogger({ level: config.logLevel ?? "warn" });
602
610
  if (config.logLevel === "debug") {
603
611
  api.diag.setLogger(new api.DiagConsoleLogger(), api.DiagLogLevel.DEBUG);
604
612
  }
@@ -606,11 +614,11 @@ var OtelExporter = class {
606
614
  /**
607
615
  * Initialize with tracing configuration
608
616
  */
609
- init(config) {
610
- this.tracingConfig = config;
617
+ init(options) {
618
+ this.tracingConfig = options.config;
611
619
  }
612
620
  async setupExporter() {
613
- if (this.isSetup) return;
621
+ if (this.isSetup || this.exporter) return;
614
622
  if (!this.config.provider) {
615
623
  this.logger.error(
616
624
  '[OtelExporter] Provider configuration is required. Use the "custom" provider for generic endpoints.'
@@ -625,6 +633,10 @@ var OtelExporter = class {
625
633
  this.isSetup = true;
626
634
  return;
627
635
  }
636
+ if (this.config.exporter) {
637
+ this.exporter = this.config.exporter;
638
+ return;
639
+ }
628
640
  const endpoint = resolved.endpoint;
629
641
  const headers = resolved.headers;
630
642
  const protocol = resolved.protocol;
@@ -678,6 +690,9 @@ var OtelExporter = class {
678
690
  this.isSetup = true;
679
691
  return;
680
692
  }
693
+ }
694
+ async setupProcessor() {
695
+ if (this.processor || this.isSetup) return;
681
696
  let resource = resources.resourceFromAttributes({
682
697
  [semanticConventions.ATTR_SERVICE_NAME]: this.tracingConfig?.serviceName || "mastra-service",
683
698
  [semanticConventions.ATTR_SERVICE_VERSION]: "1.0.0",
@@ -706,13 +721,15 @@ var OtelExporter = class {
706
721
  this.logger.debug(
707
722
  `[OtelExporter] Using BatchSpanProcessor (batch size: ${this.config.batchSize || 512}, delay: 5s)`
708
723
  );
724
+ }
725
+ async setup() {
726
+ if (this.isSetup) return;
727
+ await this.setupExporter();
728
+ await this.setupProcessor();
709
729
  this.isSetup = true;
710
730
  }
711
- async exportEvent(event) {
712
- if (this.isDisabled) {
713
- return;
714
- }
715
- if (event.type !== aiTracing.AITracingEventType.SPAN_ENDED) {
731
+ async _exportEvent(event) {
732
+ if (event.type !== observability$1.AITracingEventType.SPAN_ENDED) {
716
733
  return;
717
734
  }
718
735
  const span = event.exportedSpan;
@@ -720,7 +737,7 @@ var OtelExporter = class {
720
737
  }
721
738
  async exportSpan(span) {
722
739
  if (!this.isSetup) {
723
- await this.setupExporter();
740
+ await this.setup();
724
741
  }
725
742
  if (this.isDisabled || !this.processor) {
726
743
  return;