lmnr 0.4.8__py3-none-any.whl → 0.4.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,261 +1,262 @@
1
- import json
2
-
3
- import pytest
4
- from openai import OpenAI, AsyncOpenAI
5
- from opentelemetry.semconv_ai import SpanAttributes
6
- from lmnr.traceloop_sdk import Traceloop
7
- from lmnr.traceloop_sdk.decorators import workflow, task, aworkflow, atask
8
-
9
-
10
- @pytest.fixture
11
- def openai_client():
12
- return OpenAI()
13
-
14
-
15
- @pytest.fixture
16
- def async_openai_client():
17
- return AsyncOpenAI()
18
-
19
-
20
- @pytest.mark.vcr
21
- def test_simple_workflow(exporter, openai_client):
22
- @task(name="something_creator", version=2)
23
- def create_something(what: str, subject: str):
24
- Traceloop.set_prompt(
25
- "Tell me a {what} about {subject}", {"what": what, "subject": subject}, 5
26
- )
27
- completion = openai_client.chat.completions.create(
28
- model="gpt-3.5-turbo",
29
- messages=[{"role": "user", "content": f"Tell me a {what} about {subject}"}],
30
- )
31
- return completion.choices[0].message.content
32
-
33
- @workflow(name="pirate_joke_generator", version=1)
34
- def joke_workflow():
35
- return create_something("joke", subject="OpenTelemetry")
36
-
37
- joke = joke_workflow()
38
-
39
- spans = exporter.get_finished_spans()
40
- assert [span.name for span in spans] == [
41
- "openai.chat",
42
- "something_creator.task",
43
- "pirate_joke_generator.workflow",
44
- ]
45
- open_ai_span = next(span for span in spans if span.name == "openai.chat")
46
- assert (
47
- open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.content"]
48
- == "Tell me a joke about OpenTelemetry"
49
- )
50
- assert open_ai_span.attributes.get(f"{SpanAttributes.LLM_COMPLETIONS}.0.content")
51
- assert (
52
- open_ai_span.attributes.get("traceloop.prompt.template")
53
- == "Tell me a {what} about {subject}"
54
- )
55
- assert (
56
- open_ai_span.attributes.get("traceloop.prompt.template_variables.what")
57
- == "joke"
58
- )
59
- assert (
60
- open_ai_span.attributes.get("traceloop.prompt.template_variables.subject")
61
- == "OpenTelemetry"
62
- )
63
- assert open_ai_span.attributes.get("traceloop.prompt.version") == 5
64
-
65
- workflow_span = next(
66
- span for span in spans if span.name == "pirate_joke_generator.workflow"
67
- )
68
- task_span = next(span for span in spans if span.name == "something_creator.task")
69
- assert json.loads(task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_INPUT]) == {
70
- "args": ["joke"],
71
- "kwargs": {"subject": "OpenTelemetry"},
72
- }
73
-
74
- assert (
75
- json.loads(task_span.attributes.get(SpanAttributes.TRACELOOP_ENTITY_OUTPUT))
76
- == joke
77
- )
78
- assert task_span.parent.span_id == workflow_span.context.span_id
79
- assert (
80
- workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME]
81
- == "pirate_joke_generator"
82
- )
83
- assert workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 1
84
- assert task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 2
85
-
86
-
87
- @pytest.mark.vcr
88
- @pytest.mark.asyncio
89
- async def test_simple_aworkflow(exporter, async_openai_client):
90
- @atask(name="something_creator", version=2)
91
- async def create_something(what: str, subject: str):
92
- Traceloop.set_prompt(
93
- "Tell me a {what} about {subject}", {"what": what, "subject": subject}, 5
94
- )
95
- completion = await async_openai_client.chat.completions.create(
96
- model="gpt-3.5-turbo",
97
- messages=[{"role": "user", "content": f"Tell me a {what} about {subject}"}],
98
- )
99
- return completion.choices[0].message.content
100
-
101
- @aworkflow(name="pirate_joke_generator", version=1)
102
- async def joke_workflow():
103
- return await create_something("joke", subject="OpenTelemetry")
104
-
105
- joke = await joke_workflow()
106
-
107
- spans = exporter.get_finished_spans()
108
- assert [span.name for span in spans] == [
109
- "openai.chat",
110
- "something_creator.task",
111
- "pirate_joke_generator.workflow",
112
- ]
113
- open_ai_span = next(span for span in spans if span.name == "openai.chat")
114
- assert (
115
- open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.content"]
116
- == "Tell me a joke about OpenTelemetry"
117
- )
118
- assert open_ai_span.attributes.get(f"{SpanAttributes.LLM_COMPLETIONS}.0.content")
119
- assert (
120
- open_ai_span.attributes.get("traceloop.prompt.template")
121
- == "Tell me a {what} about {subject}"
122
- )
123
- assert (
124
- open_ai_span.attributes.get("traceloop.prompt.template_variables.what")
125
- == "joke"
126
- )
127
- assert (
128
- open_ai_span.attributes.get("traceloop.prompt.template_variables.subject")
129
- == "OpenTelemetry"
130
- )
131
- assert open_ai_span.attributes.get("traceloop.prompt.version") == 5
132
-
133
- workflow_span = next(
134
- span for span in spans if span.name == "pirate_joke_generator.workflow"
135
- )
136
- task_span = next(span for span in spans if span.name == "something_creator.task")
137
- assert json.loads(task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_INPUT]) == {
138
- "args": ["joke"],
139
- "kwargs": {"subject": "OpenTelemetry"},
140
- }
141
-
142
- assert (
143
- json.loads(task_span.attributes.get(SpanAttributes.TRACELOOP_ENTITY_OUTPUT))
144
- == joke
145
- )
146
- assert task_span.parent.span_id == workflow_span.context.span_id
147
- assert (
148
- workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME]
149
- == "pirate_joke_generator"
150
- )
151
- assert workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 1
152
- assert task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 2
153
-
154
-
155
- @pytest.mark.vcr
156
- def test_streaming_workflow(exporter, openai_client):
157
-
158
- @task(name="pirate_joke_generator")
159
- def joke_task():
160
- response_stream = openai_client.chat.completions.create(
161
- model="gpt-3.5-turbo",
162
- messages=[
163
- {"role": "user", "content": "Tell me a joke about OpenTelemetry"}
164
- ],
165
- stream=True,
166
- )
167
- for chunk in response_stream:
168
- yield chunk
169
-
170
- @task(name="joke_runner")
171
- def joke_runner():
172
- res = joke_task()
173
- return res
174
-
175
- @workflow(name="joke_manager")
176
- def joke_workflow():
177
- res = joke_runner()
178
- for chunk in res:
179
- pass
180
-
181
- joke_workflow()
182
-
183
- spans = exporter.get_finished_spans()
184
- assert set([span.name for span in spans]) == set(
185
- [
186
- "openai.chat",
187
- "pirate_joke_generator.task",
188
- "joke_runner.task",
189
- "joke_manager.workflow",
190
- ]
191
- )
192
- generator_span = next(
193
- span for span in spans if span.name == "pirate_joke_generator.task"
194
- )
195
- runner_span = next(span for span in spans if span.name == "joke_runner.task")
196
- manager_span = next(span for span in spans if span.name == "joke_manager.workflow")
197
- openai_span = next(span for span in spans if span.name == "openai.chat")
198
-
199
- assert openai_span.parent.span_id == generator_span.context.span_id
200
- assert generator_span.parent.span_id == runner_span.context.span_id
201
- assert runner_span.parent.span_id == manager_span.context.span_id
202
- assert openai_span.end_time <= manager_span.end_time
203
-
204
-
205
- def test_unrelated_entities(exporter):
206
- @workflow(name="workflow_1")
207
- def workflow_1():
208
- return
209
-
210
- @task(name="task_1")
211
- def task_1():
212
- return
213
-
214
- workflow_1()
215
- task_1()
216
-
217
- spans = exporter.get_finished_spans()
218
- assert [span.name for span in spans] == ["workflow_1.workflow", "task_1.task"]
219
-
220
- workflow_1_span = spans[0]
221
- task_1_span = spans[1]
222
-
223
- assert (
224
- workflow_1_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME] == "workflow_1"
225
- )
226
- assert workflow_1_span.attributes[SpanAttributes.TRACELOOP_SPAN_KIND] == "workflow"
227
-
228
- assert task_1_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME] == "task_1"
229
- assert task_1_span.attributes[SpanAttributes.TRACELOOP_SPAN_KIND] == "task"
230
- assert task_1_span.parent is None
231
-
232
-
233
- def test_unserializable_workflow(exporter):
234
- @task(name="unserializable_task")
235
- def unserializable_task(obj: object):
236
- return object()
237
-
238
- @workflow(name="unserializable_workflow")
239
- def unserializable_workflow(obj: object):
240
- return unserializable_task(obj)
241
-
242
- unserializable_task(object())
243
-
244
- spans = exporter.get_finished_spans()
245
- assert [span.name for span in spans] == ["unserializable_task.task"]
246
-
247
-
248
- @pytest.mark.asyncio
249
- async def test_unserializable_async_workflow(exporter):
250
- @atask(name="unserializable_task")
251
- async def unserializable_task(obj: object):
252
- return object()
253
-
254
- @aworkflow(name="unserializable_workflow")
255
- async def unserializable_workflow(obj: object):
256
- return await unserializable_task(obj)
257
-
258
- await unserializable_task(object())
259
-
260
- spans = exporter.get_finished_spans()
261
- assert [span.name for span in spans] == ["unserializable_task.task"]
1
+ # import json
2
+
3
+ # import pytest
4
+ # from openai import OpenAI, AsyncOpenAI
5
+ # from opentelemetry.semconv_ai import SpanAttributes
6
+ # from lmnr.traceloop_sdk import Traceloop
7
+ # from lmnr.traceloop_sdk.decorators import workflow, task, aworkflow, atask
8
+
9
+
10
+ # @pytest.fixture
11
+ # def openai_client():
12
+ # return OpenAI()
13
+
14
+
15
+ # @pytest.fixture
16
+ # def async_openai_client():
17
+ # return AsyncOpenAI()
18
+
19
+
20
+ # # commented out because of the version parameter in task decorator
21
+ # # @pytest.mark.vcr
22
+ # # def test_simple_workflow(exporter, openai_client):
23
+ # # @task(name="something_creator", version=2)
24
+ # # def create_something(what: str, subject: str):
25
+ # # Traceloop.set_prompt(
26
+ # # "Tell me a {what} about {subject}", {"what": what, "subject": subject}, 5
27
+ # # )
28
+ # # completion = openai_client.chat.completions.create(
29
+ # # model="gpt-3.5-turbo",
30
+ # # messages=[{"role": "user", "content": f"Tell me a {what} about {subject}"}],
31
+ # # )
32
+ # # return completion.choices[0].message.content
33
+
34
+ # # @workflow(name="pirate_joke_generator", version=1)
35
+ # # def joke_workflow():
36
+ # # return create_something("joke", subject="OpenTelemetry")
37
+
38
+ # # joke = joke_workflow()
39
+
40
+ # # spans = exporter.get_finished_spans()
41
+ # # assert [span.name for span in spans] == [
42
+ # # "openai.chat",
43
+ # # "something_creator.task",
44
+ # # "pirate_joke_generator.workflow",
45
+ # # ]
46
+ # # open_ai_span = next(span for span in spans if span.name == "openai.chat")
47
+ # # assert (
48
+ # # open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.content"]
49
+ # # == "Tell me a joke about OpenTelemetry"
50
+ # # )
51
+ # # assert open_ai_span.attributes.get(f"{SpanAttributes.LLM_COMPLETIONS}.0.content")
52
+ # # assert (
53
+ # # open_ai_span.attributes.get("traceloop.prompt.template")
54
+ # # == "Tell me a {what} about {subject}"
55
+ # # )
56
+ # # assert (
57
+ # # open_ai_span.attributes.get("traceloop.prompt.template_variables.what")
58
+ # # == "joke"
59
+ # # )
60
+ # # assert (
61
+ # # open_ai_span.attributes.get("traceloop.prompt.template_variables.subject")
62
+ # # == "OpenTelemetry"
63
+ # # )
64
+ # # assert open_ai_span.attributes.get("traceloop.prompt.version") == 5
65
+
66
+ # # workflow_span = next(
67
+ # # span for span in spans if span.name == "pirate_joke_generator.workflow"
68
+ # # )
69
+ # # task_span = next(span for span in spans if span.name == "something_creator.task")
70
+ # # assert json.loads(task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_INPUT]) == {
71
+ # # "args": ["joke"],
72
+ # # "kwargs": {"subject": "OpenTelemetry"},
73
+ # # }
74
+
75
+ # # assert (
76
+ # # json.loads(task_span.attributes.get(SpanAttributes.TRACELOOP_ENTITY_OUTPUT))
77
+ # # == joke
78
+ # # )
79
+ # # assert task_span.parent.span_id == workflow_span.context.span_id
80
+ # # assert (
81
+ # # workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME]
82
+ # # == "pirate_joke_generator"
83
+ # # )
84
+ # # assert workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 1
85
+ # # assert task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 2
86
+
87
+
88
+ # @pytest.mark.vcr
89
+ # @pytest.mark.asyncio
90
+ # async def test_simple_aworkflow(exporter, async_openai_client):
91
+ # @atask(name="something_creator", version=2)
92
+ # async def create_something(what: str, subject: str):
93
+ # Traceloop.set_prompt(
94
+ # "Tell me a {what} about {subject}", {"what": what, "subject": subject}, 5
95
+ # )
96
+ # completion = await async_openai_client.chat.completions.create(
97
+ # model="gpt-3.5-turbo",
98
+ # messages=[{"role": "user", "content": f"Tell me a {what} about {subject}"}],
99
+ # )
100
+ # return completion.choices[0].message.content
101
+
102
+ # @aworkflow(name="pirate_joke_generator", version=1)
103
+ # async def joke_workflow():
104
+ # return await create_something("joke", subject="OpenTelemetry")
105
+
106
+ # joke = await joke_workflow()
107
+
108
+ # spans = exporter.get_finished_spans()
109
+ # assert [span.name for span in spans] == [
110
+ # "openai.chat",
111
+ # "something_creator.task",
112
+ # "pirate_joke_generator.workflow",
113
+ # ]
114
+ # open_ai_span = next(span for span in spans if span.name == "openai.chat")
115
+ # assert (
116
+ # open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.content"]
117
+ # == "Tell me a joke about OpenTelemetry"
118
+ # )
119
+ # assert open_ai_span.attributes.get(f"{SpanAttributes.LLM_COMPLETIONS}.0.content")
120
+ # assert (
121
+ # open_ai_span.attributes.get("traceloop.prompt.template")
122
+ # == "Tell me a {what} about {subject}"
123
+ # )
124
+ # assert (
125
+ # open_ai_span.attributes.get("traceloop.prompt.template_variables.what")
126
+ # == "joke"
127
+ # )
128
+ # assert (
129
+ # open_ai_span.attributes.get("traceloop.prompt.template_variables.subject")
130
+ # == "OpenTelemetry"
131
+ # )
132
+ # assert open_ai_span.attributes.get("traceloop.prompt.version") == 5
133
+
134
+ # workflow_span = next(
135
+ # span for span in spans if span.name == "pirate_joke_generator.workflow"
136
+ # )
137
+ # task_span = next(span for span in spans if span.name == "something_creator.task")
138
+ # assert json.loads(task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_INPUT]) == {
139
+ # "args": ["joke"],
140
+ # "kwargs": {"subject": "OpenTelemetry"},
141
+ # }
142
+
143
+ # assert (
144
+ # json.loads(task_span.attributes.get(SpanAttributes.TRACELOOP_ENTITY_OUTPUT))
145
+ # == joke
146
+ # )
147
+ # assert task_span.parent.span_id == workflow_span.context.span_id
148
+ # assert (
149
+ # workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME]
150
+ # == "pirate_joke_generator"
151
+ # )
152
+ # assert workflow_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 1
153
+ # assert task_span.attributes[SpanAttributes.TRACELOOP_ENTITY_VERSION] == 2
154
+
155
+
156
+ # @pytest.mark.vcr
157
+ # def test_streaming_workflow(exporter, openai_client):
158
+
159
+ # @task(name="pirate_joke_generator")
160
+ # def joke_task():
161
+ # response_stream = openai_client.chat.completions.create(
162
+ # model="gpt-3.5-turbo",
163
+ # messages=[
164
+ # {"role": "user", "content": "Tell me a joke about OpenTelemetry"}
165
+ # ],
166
+ # stream=True,
167
+ # )
168
+ # for chunk in response_stream:
169
+ # yield chunk
170
+
171
+ # @task(name="joke_runner")
172
+ # def joke_runner():
173
+ # res = joke_task()
174
+ # return res
175
+
176
+ # @workflow(name="joke_manager")
177
+ # def joke_workflow():
178
+ # res = joke_runner()
179
+ # for chunk in res:
180
+ # pass
181
+
182
+ # joke_workflow()
183
+
184
+ # spans = exporter.get_finished_spans()
185
+ # assert set([span.name for span in spans]) == set(
186
+ # [
187
+ # "openai.chat",
188
+ # "pirate_joke_generator.task",
189
+ # "joke_runner.task",
190
+ # "joke_manager.workflow",
191
+ # ]
192
+ # )
193
+ # generator_span = next(
194
+ # span for span in spans if span.name == "pirate_joke_generator.task"
195
+ # )
196
+ # runner_span = next(span for span in spans if span.name == "joke_runner.task")
197
+ # manager_span = next(span for span in spans if span.name == "joke_manager.workflow")
198
+ # openai_span = next(span for span in spans if span.name == "openai.chat")
199
+
200
+ # assert openai_span.parent.span_id == generator_span.context.span_id
201
+ # assert generator_span.parent.span_id == runner_span.context.span_id
202
+ # assert runner_span.parent.span_id == manager_span.context.span_id
203
+ # assert openai_span.end_time <= manager_span.end_time
204
+
205
+
206
+ # def test_unrelated_entities(exporter):
207
+ # @workflow(name="workflow_1")
208
+ # def workflow_1():
209
+ # return
210
+
211
+ # @task(name="task_1")
212
+ # def task_1():
213
+ # return
214
+
215
+ # workflow_1()
216
+ # task_1()
217
+
218
+ # spans = exporter.get_finished_spans()
219
+ # assert [span.name for span in spans] == ["workflow_1.workflow", "task_1.task"]
220
+
221
+ # workflow_1_span = spans[0]
222
+ # task_1_span = spans[1]
223
+
224
+ # assert (
225
+ # workflow_1_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME] == "workflow_1"
226
+ # )
227
+ # assert workflow_1_span.attributes[SpanAttributes.TRACELOOP_SPAN_KIND] == "workflow"
228
+
229
+ # assert task_1_span.attributes[SpanAttributes.TRACELOOP_ENTITY_NAME] == "task_1"
230
+ # assert task_1_span.attributes[SpanAttributes.TRACELOOP_SPAN_KIND] == "task"
231
+ # assert task_1_span.parent is None
232
+
233
+
234
+ # def test_unserializable_workflow(exporter):
235
+ # @task(name="unserializable_task")
236
+ # def unserializable_task(obj: object):
237
+ # return object()
238
+
239
+ # @workflow(name="unserializable_workflow")
240
+ # def unserializable_workflow(obj: object):
241
+ # return unserializable_task(obj)
242
+
243
+ # unserializable_task(object())
244
+
245
+ # spans = exporter.get_finished_spans()
246
+ # assert [span.name for span in spans] == ["unserializable_task.task"]
247
+
248
+
249
+ # @pytest.mark.asyncio
250
+ # async def test_unserializable_async_workflow(exporter):
251
+ # @atask(name="unserializable_task")
252
+ # async def unserializable_task(obj: object):
253
+ # return object()
254
+
255
+ # @aworkflow(name="unserializable_workflow")
256
+ # async def unserializable_workflow(obj: object):
257
+ # return await unserializable_task(obj)
258
+
259
+ # await unserializable_task(object())
260
+
261
+ # spans = exporter.get_finished_spans()
262
+ # assert [span.name for span in spans] == ["unserializable_task.task"]
@@ -1,2 +1 @@
1
1
  from lmnr.traceloop_sdk.tracing.context_manager import get_tracer
2
- from lmnr.traceloop_sdk.tracing.tracing import set_workflow_name
@@ -63,9 +63,9 @@ class TracerWrapper(object):
63
63
  def __new__(
64
64
  cls,
65
65
  disable_batch=False,
66
- processor: SpanProcessor = None,
67
- propagator: TextMapPropagator = None,
68
- exporter: SpanExporter = None,
66
+ processor: Optional[SpanProcessor] = None,
67
+ propagator: Optional[TextMapPropagator] = None,
68
+ exporter: Optional[SpanExporter] = None,
69
69
  should_enrich_metrics: bool = True,
70
70
  instruments: Optional[Set[Instruments]] = None,
71
71
  ) -> "TracerWrapper":
@@ -342,14 +342,6 @@ class TracerWrapper(object):
342
342
  self.flush()
343
343
 
344
344
  def _span_processor_on_start(self, span, parent_context):
345
- workflow_name = get_value("workflow_name")
346
- if workflow_name is not None:
347
- span.set_attribute(SpanAttributes.TRACELOOP_WORKFLOW_NAME, workflow_name)
348
-
349
- entity_path = get_value("entity_path")
350
- if entity_path is not None:
351
- span.set_attribute(SpanAttributes.TRACELOOP_ENTITY_PATH, entity_path)
352
-
353
345
  association_properties = get_value("association_properties")
354
346
  if association_properties is not None:
355
347
  _set_association_properties_attributes(span, association_properties)
@@ -444,10 +436,23 @@ class TracerWrapper(object):
444
436
  def set_association_properties(properties: dict) -> None:
445
437
  attach(set_value("association_properties", properties))
446
438
 
447
- # Attach association properties to the current span, if it's a workflow or a task
439
+ # TODO: When called inside observe decorator, this actually sets the properties on the parent span, not the current one
440
+ # Then, processor's on_start will assign this to current span
441
+ span = trace.get_current_span()
442
+ _set_association_properties_attributes(span, properties)
443
+
444
+
445
+ def update_association_properties(properties: dict) -> None:
446
+ """Only adds or updates properties that are not already present"""
447
+ association_properties = get_value("association_properties") or {}
448
+ association_properties.update(properties)
449
+
450
+ attach(set_value("association_properties", association_properties))
451
+
452
+ # TODO: When called inside observe decorator, this actually sets the properties on the parent span, not the current one
453
+ # Then, processor's on_start will assign this to current span
448
454
  span = trace.get_current_span()
449
- if get_value("workflow_name") is not None or get_value("entity_name") is not None:
450
- _set_association_properties_attributes(span, properties)
455
+ _set_association_properties_attributes(span, properties)
451
456
 
452
457
 
453
458
  def _set_association_properties_attributes(span, properties: dict) -> None:
@@ -457,22 +462,6 @@ def _set_association_properties_attributes(span, properties: dict) -> None:
457
462
  )
458
463
 
459
464
 
460
- def set_workflow_name(workflow_name: str) -> None:
461
- attach(set_value("workflow_name", workflow_name))
462
-
463
-
464
- def set_entity_path(entity_path: str) -> None:
465
- attach(set_value("entity_path", entity_path))
466
-
467
-
468
- def get_chained_entity_path(entity_name: str) -> str:
469
- parent = get_value("entity_path")
470
- if parent is None:
471
- return entity_name
472
- else:
473
- return f"{parent}.{entity_name}"
474
-
475
-
476
465
  def set_managed_prompt_tracing_context(
477
466
  key: str,
478
467
  version: int,