divi 0.0.1.dev28__py3-none-any.whl → 0.0.1.dev47__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- divi/__init__.py +3 -2
- divi/decorators/collect.py +34 -0
- divi/decorators/obs_openai.py +3 -2
- divi/decorators/observable.py +18 -51
- divi/decorators/observe.py +47 -0
- divi/evaluation/evaluate.py +28 -24
- divi/evaluation/evaluator.py +27 -14
- divi/evaluation/prompts.py +12 -7
- divi/proto/trace/v1/trace.proto +3 -0
- divi/proto/trace/v1/trace_pb2.py +3 -3
- divi/proto/trace/v1/trace_pb2.pyi +2 -0
- divi/services/datapark/datapark.py +21 -3
- divi/services/init.py +3 -1
- divi/session/session.py +1 -1
- divi/session/setup.py +2 -2
- divi/signals/__init__.py +2 -2
- divi/signals/span.py +83 -0
- divi/signals/trace.py +79 -0
- {divi-0.0.1.dev28.dist-info → divi-0.0.1.dev47.dist-info}/METADATA +1 -1
- {divi-0.0.1.dev28.dist-info → divi-0.0.1.dev47.dist-info}/RECORD +22 -20
- divi/signals/trace/__init__.py +0 -3
- divi/signals/trace/trace.py +0 -151
- {divi-0.0.1.dev28.dist-info → divi-0.0.1.dev47.dist-info}/WHEEL +0 -0
- {divi-0.0.1.dev28.dist-info → divi-0.0.1.dev47.dist-info}/licenses/LICENSE +0 -0
divi/__init__.py
CHANGED
@@ -5,6 +5,7 @@ from .decorators import obs_openai, observable
|
|
5
5
|
from .evaluation import Evaluator, Score
|
6
6
|
from .services import Auth, Core, DataPark
|
7
7
|
from .session import Session
|
8
|
+
from .signals import Kind
|
8
9
|
|
9
10
|
name: str = "divi"
|
10
11
|
|
@@ -14,5 +15,5 @@ _auth: Optional[Auth] = None
|
|
14
15
|
_datapark: Optional[DataPark] = None
|
15
16
|
_evaluator: Optional[Evaluator] = None
|
16
17
|
|
17
|
-
__version__ = "0.0.1.
|
18
|
-
__all__ = ["proto", "obs_openai", "observable", "Score"]
|
18
|
+
__version__ = "0.0.1.dev47"
|
19
|
+
__all__ = ["proto", "obs_openai", "observable", "Score", "Kind"]
|
@@ -0,0 +1,34 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
from google.protobuf.message import Error
|
4
|
+
from openai.types.chat import ChatCompletion
|
5
|
+
from typing_extensions import Dict
|
6
|
+
|
7
|
+
import divi
|
8
|
+
from divi.evaluation.evaluator import EvaluationScore
|
9
|
+
from divi.signals.span import Span
|
10
|
+
|
11
|
+
|
12
|
+
def collect(span: Span, input: Dict[str, Any], result: Any):
|
13
|
+
if not divi._datapark or span.trace_id is None:
|
14
|
+
raise Error("divi._datapark or span.trace_id is None")
|
15
|
+
# TODO: collect inputs and outputs for SPAN_KIND_FUNCTION
|
16
|
+
|
17
|
+
# collect inputs and outputs for SPAN_KIND_LLM
|
18
|
+
if isinstance(result, ChatCompletion):
|
19
|
+
divi._datapark.create_chat_completion(
|
20
|
+
span_id=span.span_id,
|
21
|
+
trace_id=span.trace_id,
|
22
|
+
inputs=input,
|
23
|
+
completion=result,
|
24
|
+
)
|
25
|
+
|
26
|
+
# collect inputs and outputs for SPAN_KIND_EVALUATION
|
27
|
+
if isinstance(result, list) and all(
|
28
|
+
isinstance(x, EvaluationScore) for x in result
|
29
|
+
):
|
30
|
+
divi._datapark.create_scores(
|
31
|
+
span_id=span.span_id,
|
32
|
+
trace_id=span.trace_id,
|
33
|
+
scores=result,
|
34
|
+
)
|
divi/decorators/obs_openai.py
CHANGED
@@ -6,6 +6,7 @@ from typing_extensions import Optional
|
|
6
6
|
|
7
7
|
from divi.decorators.observable import observable
|
8
8
|
from divi.evaluation.scores import Score
|
9
|
+
from divi.signals.span import Kind
|
9
10
|
from divi.utils import is_async
|
10
11
|
|
11
12
|
if TYPE_CHECKING:
|
@@ -21,7 +22,7 @@ def _get_observable_create(
|
|
21
22
|
) -> Callable:
|
22
23
|
@functools.wraps(create)
|
23
24
|
def observable_create(*args, stream: bool = False, **kwargs):
|
24
|
-
decorator = observable(kind=
|
25
|
+
decorator = observable(kind=Kind.llm, name=name, scores=scores)
|
25
26
|
return decorator(create)(*args, stream=stream, **kwargs)
|
26
27
|
|
27
28
|
# TODO Async Observable Create
|
@@ -30,7 +31,7 @@ def _get_observable_create(
|
|
30
31
|
|
31
32
|
def obs_openai(
|
32
33
|
client: C,
|
33
|
-
name: Optional[str] = "
|
34
|
+
name: Optional[str] = "Agent",
|
34
35
|
scores: Optional[list[Score]] = None,
|
35
36
|
) -> C:
|
36
37
|
"""Make OpenAI client observable."""
|
divi/decorators/observable.py
CHANGED
@@ -1,4 +1,3 @@
|
|
1
|
-
import contextvars
|
2
1
|
import functools
|
3
2
|
from typing import (
|
4
3
|
Any,
|
@@ -14,26 +13,15 @@ from typing import (
|
|
14
13
|
runtime_checkable,
|
15
14
|
)
|
16
15
|
|
17
|
-
from
|
18
|
-
|
19
|
-
import divi
|
16
|
+
from divi.decorators.observe import observe
|
20
17
|
from divi.evaluation.evaluate import evaluate_scores
|
21
18
|
from divi.evaluation.scores import Score
|
22
|
-
from divi.proto.trace.v1.trace_pb2 import ScopeSpans
|
23
19
|
from divi.session import SessionExtra
|
24
|
-
from divi.
|
25
|
-
from divi.signals.trace import Span
|
26
|
-
from divi.utils import extract_flattened_inputs
|
20
|
+
from divi.signals.span import Kind, Span
|
27
21
|
|
28
22
|
R = TypeVar("R", covariant=True)
|
29
23
|
P = ParamSpec("P")
|
30
24
|
|
31
|
-
# ContextVar to store the extra information
|
32
|
-
# from the Session and parent Span
|
33
|
-
_SESSION_EXTRA = contextvars.ContextVar[Optional[SessionExtra]](
|
34
|
-
"_SESSION_EXTRA", default=None
|
35
|
-
)
|
36
|
-
|
37
25
|
|
38
26
|
@runtime_checkable
|
39
27
|
class WithSessionExtra(Protocol, Generic[P, R]):
|
@@ -51,7 +39,7 @@ def observable(func: Callable[P, R]) -> WithSessionExtra[P, R]: ...
|
|
51
39
|
|
52
40
|
@overload
|
53
41
|
def observable(
|
54
|
-
kind:
|
42
|
+
kind: Kind = Kind.function,
|
55
43
|
*,
|
56
44
|
name: Optional[str] = None,
|
57
45
|
scores: Optional[list[Score]] = None,
|
@@ -64,7 +52,7 @@ def observable(
|
|
64
52
|
) -> Union[Callable, Callable[[Callable], Callable]]:
|
65
53
|
"""Observable decorator factory."""
|
66
54
|
|
67
|
-
kind = kwargs.pop("kind",
|
55
|
+
kind = kwargs.pop("kind", Kind.function)
|
68
56
|
name = kwargs.pop("name", None)
|
69
57
|
metadata = kwargs.pop("metadata", None)
|
70
58
|
scores: list[Score] = kwargs.pop("scores", None)
|
@@ -74,44 +62,23 @@ def observable(
|
|
74
62
|
def wrapper(
|
75
63
|
*args, session_extra: Optional[SessionExtra] = None, **kwargs
|
76
64
|
):
|
65
|
+
# 1. init the span
|
77
66
|
span = Span(
|
78
67
|
kind=kind, name=name or func.__name__, metadata=metadata
|
79
68
|
)
|
80
|
-
|
81
|
-
#
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
#
|
91
|
-
|
92
|
-
|
93
|
-
raise ValueError("Trace not found in session context.")
|
94
|
-
# TODO: collect inputs and outputs for SPAN_KIND_FUNCTION
|
95
|
-
inputs = extract_flattened_inputs(func, *args, **kwargs)
|
96
|
-
# create the span if it is the root span
|
97
|
-
if divi._datapark and span.trace_id:
|
98
|
-
divi._datapark.create_spans(
|
99
|
-
span.trace_id, ScopeSpans(spans=[span.signal])
|
100
|
-
)
|
101
|
-
# end the trace if it is the root span
|
102
|
-
if divi._datapark and not span.parent_span_id:
|
103
|
-
trace.end()
|
104
|
-
# create the chat completion if it is a chat completion
|
105
|
-
if divi._datapark and isinstance(result, ChatCompletion):
|
106
|
-
divi._datapark.create_chat_completion(
|
107
|
-
span_id=span.span_id,
|
108
|
-
trace_id=trace.trace_id,
|
109
|
-
inputs=inputs,
|
110
|
-
completion=result,
|
111
|
-
)
|
112
|
-
# evaluate the scores if they are provided
|
113
|
-
if scores is not None and scores.__len__() > 0:
|
114
|
-
evaluate_scores(inputs, outputs=result, scores=scores)
|
69
|
+
|
70
|
+
# 2. observe the function
|
71
|
+
result = observe(
|
72
|
+
*args,
|
73
|
+
func=func,
|
74
|
+
span=span,
|
75
|
+
session_extra=session_extra,
|
76
|
+
**kwargs,
|
77
|
+
)
|
78
|
+
|
79
|
+
# 3. evaluate the scores if they are provided
|
80
|
+
messages = kwargs.get("messages", [])
|
81
|
+
evaluate_scores(messages, outputs=result, scores=scores)
|
115
82
|
|
116
83
|
return result
|
117
84
|
|
@@ -0,0 +1,47 @@
|
|
1
|
+
import contextvars
|
2
|
+
from typing import (
|
3
|
+
Callable,
|
4
|
+
Optional,
|
5
|
+
)
|
6
|
+
|
7
|
+
from divi.decorators.collect import collect
|
8
|
+
from divi.session import SessionExtra
|
9
|
+
from divi.session.setup import setup
|
10
|
+
from divi.signals.span import Span
|
11
|
+
from divi.utils import extract_flattened_inputs
|
12
|
+
|
13
|
+
# ContextVar to store the extra information
|
14
|
+
# from the Session and parent Span
|
15
|
+
_SESSION_EXTRA = contextvars.ContextVar[Optional[SessionExtra]](
|
16
|
+
"_SESSION_EXTRA", default=None
|
17
|
+
)
|
18
|
+
|
19
|
+
|
20
|
+
def observe(
|
21
|
+
*args,
|
22
|
+
func: Callable,
|
23
|
+
span: Span,
|
24
|
+
session_extra: Optional[SessionExtra] = None,
|
25
|
+
**kwargs,
|
26
|
+
):
|
27
|
+
session_extra = setup(span, _SESSION_EXTRA.get() or session_extra)
|
28
|
+
# set current context
|
29
|
+
token = _SESSION_EXTRA.set(session_extra)
|
30
|
+
# execute the function
|
31
|
+
span.start()
|
32
|
+
result = func(*args, **kwargs)
|
33
|
+
span.end()
|
34
|
+
# recover parent context
|
35
|
+
_SESSION_EXTRA.reset(token)
|
36
|
+
|
37
|
+
# get the trace to collect data
|
38
|
+
trace = session_extra.get("trace")
|
39
|
+
# end the trace if it is the root span
|
40
|
+
if trace and not span.parent_span_id:
|
41
|
+
trace.end()
|
42
|
+
|
43
|
+
# collect inputs and outputs
|
44
|
+
inputs = extract_flattened_inputs(func, *args, **kwargs)
|
45
|
+
collect(span, inputs, result)
|
46
|
+
|
47
|
+
return result
|
divi/evaluation/evaluate.py
CHANGED
@@ -1,14 +1,18 @@
|
|
1
|
-
import copy
|
2
1
|
import os
|
3
|
-
from typing import
|
2
|
+
from typing import Optional
|
4
3
|
|
5
|
-
from openai.types.chat import
|
4
|
+
from openai.types.chat import (
|
5
|
+
ChatCompletion,
|
6
|
+
ChatCompletionMessageParam,
|
7
|
+
)
|
6
8
|
from typing_extensions import List
|
7
9
|
|
8
10
|
import divi
|
11
|
+
from divi.decorators.observe import observe
|
9
12
|
from divi.evaluation import Evaluator
|
10
13
|
from divi.evaluation.evaluator import EvaluatorConfig
|
11
14
|
from divi.evaluation.scores import Score
|
15
|
+
from divi.signals.span import Kind, Span
|
12
16
|
|
13
17
|
OPENAI_API_KEY = "OPENAI_API_KEY"
|
14
18
|
OPENAI_BASE_URL = "OPENAI_BASE_URL"
|
@@ -29,29 +33,29 @@ def init_evaluator(config: Optional[EvaluatorConfig] = None):
|
|
29
33
|
|
30
34
|
|
31
35
|
def evaluate_scores(
|
32
|
-
|
33
|
-
outputs: ChatCompletion,
|
34
|
-
scores: List[Score],
|
36
|
+
messages: Optional[List[ChatCompletionMessageParam]],
|
37
|
+
outputs: Optional[ChatCompletion],
|
38
|
+
scores: Optional[List[Score]],
|
35
39
|
config: Optional[EvaluatorConfig] = None,
|
36
40
|
):
|
41
|
+
if messages is None or scores is None or scores.__len__() == 0:
|
42
|
+
return
|
37
43
|
if not divi._evaluator:
|
38
44
|
divi._evaluator = init_evaluator(config)
|
39
45
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
# TODO: collect all evaluation scores and link them to span
|
57
|
-
print(evaluation_scores)
|
46
|
+
if isinstance(outputs, ChatCompletion):
|
47
|
+
output_message = outputs.choices[0].message.content
|
48
|
+
if not output_message:
|
49
|
+
return
|
50
|
+
|
51
|
+
evaluation_span = Span(kind=Kind.evaluation, name="Evaluation")
|
52
|
+
observe(
|
53
|
+
func=divi._evaluator.evaluate,
|
54
|
+
span=evaluation_span,
|
55
|
+
target=output_message,
|
56
|
+
conversation="\n".join(
|
57
|
+
f"{m.get('role', 'unknown')}: {m.get('content')}"
|
58
|
+
for m in messages
|
59
|
+
),
|
60
|
+
scores=scores,
|
61
|
+
)
|
divi/evaluation/evaluator.py
CHANGED
@@ -50,34 +50,42 @@ class Evaluator:
|
|
50
50
|
)
|
51
51
|
|
52
52
|
@staticmethod
|
53
|
-
def generate_prompt(conversation: str, score: Score) -> str:
|
53
|
+
def generate_prompt(target: str, conversation: str, score: Score) -> str:
|
54
54
|
return PROMPT_TEMPLATE.format(
|
55
|
-
requirements=PRESET_PROMPT[score.value],
|
55
|
+
requirements=PRESET_PROMPT[score.value],
|
56
|
+
target=target,
|
57
|
+
conversation=conversation,
|
56
58
|
)
|
57
59
|
|
58
60
|
def _sync_evaluate_once(
|
59
|
-
self, conversation: str, score: Score
|
61
|
+
self, target: str, conversation: str, score: Score
|
60
62
|
) -> Optional[EvaluationResult]:
|
61
|
-
prompt = self.generate_prompt(conversation, score)
|
63
|
+
prompt = self.generate_prompt(target, conversation, score)
|
62
64
|
response = self.sync_client.beta.chat.completions.parse(
|
63
65
|
model=self.config.model,
|
64
66
|
messages=[{"role": "user", "content": prompt}],
|
65
67
|
temperature=self.config.temperature,
|
66
68
|
response_format=EvaluationResult,
|
67
69
|
)
|
68
|
-
|
70
|
+
result = response.choices[0].message.parsed
|
71
|
+
if result is not None:
|
72
|
+
result.name = score
|
73
|
+
return result
|
69
74
|
|
70
75
|
async def _async_evaluate_once(
|
71
|
-
self, conversation: str, score: Score
|
76
|
+
self, target: str, conversation: str, score: Score
|
72
77
|
) -> Optional[EvaluationResult]:
|
73
|
-
prompt = self.generate_prompt(conversation, score)
|
78
|
+
prompt = self.generate_prompt(target, conversation, score)
|
74
79
|
response = await self.async_client.beta.chat.completions.parse(
|
75
80
|
model=self.config.model,
|
76
81
|
messages=[{"role": "user", "content": prompt}],
|
77
82
|
temperature=self.config.temperature,
|
78
83
|
response_format=EvaluationResult,
|
79
84
|
)
|
80
|
-
|
85
|
+
result = response.choices[0].message.parsed
|
86
|
+
if result is not None:
|
87
|
+
result.name = score
|
88
|
+
return result
|
81
89
|
|
82
90
|
def _aggregate_result(
|
83
91
|
self, name: Score, evaluations: List[EvaluationResult]
|
@@ -115,13 +123,15 @@ class Evaluator:
|
|
115
123
|
return aggregated_results
|
116
124
|
|
117
125
|
def evaluate_sync(
|
118
|
-
self, conversation: str, scores: list[Score], n_rounds: int
|
126
|
+
self, target: str, conversation: str, scores: list[Score], n_rounds: int
|
119
127
|
) -> List[EvaluationScore]:
|
120
128
|
with concurrent.futures.ThreadPoolExecutor(
|
121
129
|
max_workers=self.config.max_concurrency
|
122
130
|
) as executor:
|
123
131
|
futures = [
|
124
|
-
executor.submit(
|
132
|
+
executor.submit(
|
133
|
+
self._sync_evaluate_once, target, conversation, score
|
134
|
+
)
|
125
135
|
for _ in range(n_rounds)
|
126
136
|
for score in scores
|
127
137
|
]
|
@@ -133,13 +143,15 @@ class Evaluator:
|
|
133
143
|
)
|
134
144
|
|
135
145
|
async def evaluate_async(
|
136
|
-
self, conversation: str, scores: list[Score], n_rounds: int
|
146
|
+
self, target: str, conversation: str, scores: list[Score], n_rounds: int
|
137
147
|
) -> List[EvaluationScore]:
|
138
148
|
semaphore = asyncio.Semaphore(self.config.max_concurrency)
|
139
149
|
|
140
150
|
async def sem_task(score):
|
141
151
|
async with semaphore:
|
142
|
-
return await self._async_evaluate_once(
|
152
|
+
return await self._async_evaluate_once(
|
153
|
+
target, conversation, score
|
154
|
+
)
|
143
155
|
|
144
156
|
tasks = [sem_task(score) for _ in range(n_rounds) for score in scores]
|
145
157
|
evaluations = await asyncio.gather(*tasks)
|
@@ -149,6 +161,7 @@ class Evaluator:
|
|
149
161
|
|
150
162
|
def evaluate(
|
151
163
|
self,
|
164
|
+
target: str,
|
152
165
|
conversation: str,
|
153
166
|
scores: list[Score],
|
154
167
|
n_rounds: int = 5,
|
@@ -156,6 +169,6 @@ class Evaluator:
|
|
156
169
|
) -> List[EvaluationScore]:
|
157
170
|
if mode == "async":
|
158
171
|
return asyncio.run(
|
159
|
-
self.evaluate_async(conversation, scores, n_rounds)
|
172
|
+
self.evaluate_async(target, conversation, scores, n_rounds)
|
160
173
|
)
|
161
|
-
return self.evaluate_sync(conversation, scores, n_rounds)
|
174
|
+
return self.evaluate_sync(target, conversation, scores, n_rounds)
|
divi/evaluation/prompts.py
CHANGED
@@ -1,14 +1,19 @@
|
|
1
1
|
PROMPT_TEMPLATE = (
|
2
|
-
"
|
2
|
+
"The *requirements* of the evaluation task is: {requirements}\n\n"
|
3
|
+
"Below is the *context* of the conversation (for reference only):\n"
|
4
|
+
"{conversation}\n\n"
|
5
|
+
"Now, in view of both the requirements and the context, evaluate the assistant’s response:\n"
|
6
|
+
"{target}\n\n"
|
3
7
|
"Please perform step-by-step reasoning to reach your judgment.\n\n"
|
4
8
|
"Strictly output your answer in the following JSON format:\n"
|
5
|
-
|
6
|
-
"
|
7
|
-
"
|
8
|
-
"
|
9
|
+
"{{\n"
|
10
|
+
' "judgment": bool, # true if the response meets all requirements\n'
|
11
|
+
' "reasoning": "string" # concise explanation, hitting only the key points\n'
|
12
|
+
"}}\n"
|
13
|
+
"Do not output anything else."
|
9
14
|
)
|
10
15
|
|
11
16
|
PRESET_PROMPT = {
|
12
|
-
"task_completion": "
|
13
|
-
"instruction_adherence": "
|
17
|
+
"task_completion": "Assess whether the assistant response fulfills the user's task requirements.",
|
18
|
+
"instruction_adherence": "Assess whether the assistant response strictly follows every instruction given by the user, without omissions, deviations, or hallucinations.",
|
14
19
|
}
|
divi/proto/trace/v1/trace.proto
CHANGED
divi/proto/trace/v1/trace_pb2.py
CHANGED
@@ -25,7 +25,7 @@ _sym_db = _symbol_database.Default()
|
|
25
25
|
from divi.proto.common.v1 import common_pb2 as divi_dot_proto_dot_common_dot_v1_dot_common__pb2
|
26
26
|
|
27
27
|
|
28
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x64ivi/proto/trace/v1/trace.proto\x12\x13\x64ivi.proto.trace.v1\x1a!divi/proto/common/v1/common.proto\"6\n\nScopeSpans\x12(\n\x05spans\x18\x02 \x03(\x0b\x32\x19.divi.proto.trace.v1.Span\"\
|
28
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x64ivi/proto/trace/v1/trace.proto\x12\x13\x64ivi.proto.trace.v1\x1a!divi/proto/common/v1/common.proto\"6\n\nScopeSpans\x12(\n\x05spans\x18\x02 \x03(\x0b\x32\x19.divi.proto.trace.v1.Span\"\xbe\x02\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x16\n\x0eparent_span_id\x18\x03 \x01(\x0c\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x30\n\x04kind\x18\x05 \x01(\x0e\x32\".divi.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x06 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x07 \x01(\x06\x12\x30\n\x08metadata\x18\x08 \x03(\x0b\x32\x1e.divi.proto.common.v1.KeyValue\"O\n\x08SpanKind\x12\x16\n\x12SPAN_KIND_FUNCTION\x10\x00\x12\x11\n\rSPAN_KIND_LLM\x10\x01\x12\x18\n\x14SPAN_KIND_EVALUATION\x10\x02\x42\rZ\x0bservices/pbb\x06proto3')
|
29
29
|
|
30
30
|
_globals = globals()
|
31
31
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
@@ -36,7 +36,7 @@ if not _descriptor._USE_C_DESCRIPTORS:
|
|
36
36
|
_globals['_SCOPESPANS']._serialized_start=91
|
37
37
|
_globals['_SCOPESPANS']._serialized_end=145
|
38
38
|
_globals['_SPAN']._serialized_start=148
|
39
|
-
_globals['_SPAN']._serialized_end=
|
39
|
+
_globals['_SPAN']._serialized_end=466
|
40
40
|
_globals['_SPAN_SPANKIND']._serialized_start=387
|
41
|
-
_globals['_SPAN_SPANKIND']._serialized_end=
|
41
|
+
_globals['_SPAN_SPANKIND']._serialized_end=466
|
42
42
|
# @@protoc_insertion_point(module_scope)
|
@@ -19,8 +19,10 @@ class Span(_message.Message):
|
|
19
19
|
__slots__ = ()
|
20
20
|
SPAN_KIND_FUNCTION: _ClassVar[Span.SpanKind]
|
21
21
|
SPAN_KIND_LLM: _ClassVar[Span.SpanKind]
|
22
|
+
SPAN_KIND_EVALUATION: _ClassVar[Span.SpanKind]
|
22
23
|
SPAN_KIND_FUNCTION: Span.SpanKind
|
23
24
|
SPAN_KIND_LLM: Span.SpanKind
|
25
|
+
SPAN_KIND_EVALUATION: Span.SpanKind
|
24
26
|
TRACE_ID_FIELD_NUMBER: _ClassVar[int]
|
25
27
|
SPAN_ID_FIELD_NUMBER: _ClassVar[int]
|
26
28
|
PARENT_SPAN_ID_FIELD_NUMBER: _ClassVar[int]
|
@@ -4,13 +4,14 @@ from google.protobuf.json_format import MessageToDict
|
|
4
4
|
from openai import NotGiven
|
5
5
|
from openai.types.chat import ChatCompletion
|
6
6
|
from pydantic import UUID4
|
7
|
-
from typing_extensions import Mapping
|
7
|
+
from typing_extensions import List, Mapping
|
8
8
|
|
9
9
|
import divi
|
10
|
+
from divi.evaluation.evaluator import EvaluationScore
|
10
11
|
from divi.proto.trace.v1.trace_pb2 import ScopeSpans
|
11
12
|
from divi.services.service import Service
|
12
13
|
from divi.session.session import SessionSignal
|
13
|
-
from divi.signals.trace
|
14
|
+
from divi.signals.trace import TraceSignal
|
14
15
|
|
15
16
|
|
16
17
|
class DataPark(Service):
|
@@ -58,6 +59,8 @@ class DataPark(Service):
|
|
58
59
|
completion: ChatCompletion,
|
59
60
|
) -> None:
|
60
61
|
hex_span_id = span_id.hex()
|
62
|
+
str_trace_id = str(trace_id)
|
63
|
+
|
61
64
|
self.post_concurrent(
|
62
65
|
{
|
63
66
|
"/api/v1/chat/completions/input": {
|
@@ -66,8 +69,23 @@ class DataPark(Service):
|
|
66
69
|
},
|
67
70
|
"/api/v1/chat/completions": {
|
68
71
|
"span_id": hex_span_id,
|
69
|
-
"trace_id":
|
72
|
+
"trace_id": str_trace_id,
|
70
73
|
"data": completion.model_dump(),
|
71
74
|
},
|
72
75
|
}
|
73
76
|
)
|
77
|
+
|
78
|
+
def create_scores(
|
79
|
+
self,
|
80
|
+
span_id: bytes,
|
81
|
+
trace_id: UUID4,
|
82
|
+
scores: List[EvaluationScore],
|
83
|
+
) -> None:
|
84
|
+
self.post(
|
85
|
+
"/api/v1/chat/completions/scores",
|
86
|
+
payload={
|
87
|
+
"span_id": span_id.hex(),
|
88
|
+
"trace_id": str(trace_id),
|
89
|
+
"data": [score.model_dump() for score in scores],
|
90
|
+
},
|
91
|
+
)
|
divi/services/init.py
CHANGED
@@ -7,7 +7,9 @@ from divi.services.datapark import init as init_datapark
|
|
7
7
|
|
8
8
|
def init():
|
9
9
|
if not divi._auth:
|
10
|
-
divi._auth = init_auth(
|
10
|
+
divi._auth = init_auth(
|
11
|
+
api_key="divi-aa31aef9-bb4c-4a98-aaad-7e12bdacec83"
|
12
|
+
)
|
11
13
|
if not divi._datapark:
|
12
14
|
divi._datapark = init_datapark()
|
13
15
|
# TODO - Uncomment this when the core service is ready
|
divi/session/session.py
CHANGED
divi/session/setup.py
CHANGED
@@ -3,8 +3,8 @@ from typing_extensions import Optional
|
|
3
3
|
import divi
|
4
4
|
from divi.services import init as init_services
|
5
5
|
from divi.session import Session, SessionExtra
|
6
|
-
from divi.signals.
|
7
|
-
from divi.signals.trace
|
6
|
+
from divi.signals.span import Span
|
7
|
+
from divi.signals.trace import Trace
|
8
8
|
|
9
9
|
|
10
10
|
def init_session(name: Optional[str] = None) -> Session:
|
divi/signals/__init__.py
CHANGED
@@ -1,3 +1,3 @@
|
|
1
|
-
from .
|
1
|
+
from .span import Kind
|
2
2
|
|
3
|
-
__all__ = ["
|
3
|
+
__all__ = ["Kind"]
|
divi/signals/span.py
ADDED
@@ -0,0 +1,83 @@
|
|
1
|
+
import os
|
2
|
+
import time
|
3
|
+
from enum import Enum
|
4
|
+
from typing import Any, Mapping, Optional
|
5
|
+
|
6
|
+
from pydantic import UUID4
|
7
|
+
|
8
|
+
import divi
|
9
|
+
from divi.proto.common.v1.common_pb2 import KeyValue
|
10
|
+
from divi.proto.trace.v1.trace_pb2 import ScopeSpans
|
11
|
+
from divi.proto.trace.v1.trace_pb2 import Span as SpanProto
|
12
|
+
|
13
|
+
|
14
|
+
class Kind(int, Enum):
|
15
|
+
"""Enum for the kind of span."""
|
16
|
+
|
17
|
+
function = SpanProto.SpanKind.SPAN_KIND_FUNCTION
|
18
|
+
llm = SpanProto.SpanKind.SPAN_KIND_LLM
|
19
|
+
evaluation = SpanProto.SpanKind.SPAN_KIND_EVALUATION
|
20
|
+
|
21
|
+
|
22
|
+
class Span:
|
23
|
+
def __init__(
|
24
|
+
self,
|
25
|
+
kind: Kind = Kind.function,
|
26
|
+
name: Optional[str] = None,
|
27
|
+
metadata: Optional[Mapping[str, Any]] = None,
|
28
|
+
):
|
29
|
+
# span_id is a FixedString(8)
|
30
|
+
self.span_id: bytes = self._generate_span_id()
|
31
|
+
self.name = name
|
32
|
+
self.kind = kind
|
33
|
+
self.metadata = metadata
|
34
|
+
self.start_time_unix_nano: int | None = None
|
35
|
+
self.end_time_unix_nano: int | None = None
|
36
|
+
|
37
|
+
self.trace_id: UUID4 | None = None
|
38
|
+
self.parent_span_id: bytes | None = None
|
39
|
+
|
40
|
+
@property
|
41
|
+
def signal(self) -> SpanProto:
|
42
|
+
signal: SpanProto = SpanProto(
|
43
|
+
name=self.name,
|
44
|
+
span_id=self.span_id,
|
45
|
+
kind=SpanProto.SpanKind.Name(self.kind),
|
46
|
+
start_time_unix_nano=self.start_time_unix_nano,
|
47
|
+
end_time_unix_nano=self.end_time_unix_nano,
|
48
|
+
trace_id=self.trace_id.bytes if self.trace_id else None,
|
49
|
+
parent_span_id=self.parent_span_id,
|
50
|
+
)
|
51
|
+
signal.metadata.extend(
|
52
|
+
KeyValue(key=k, value=v)
|
53
|
+
for k, v in (self.metadata or dict()).items()
|
54
|
+
)
|
55
|
+
return signal
|
56
|
+
|
57
|
+
@classmethod
|
58
|
+
def _generate_span_id(cls) -> bytes:
|
59
|
+
return os.urandom(8)
|
60
|
+
|
61
|
+
def start(self):
|
62
|
+
"""Start the span by recording the current time in nanoseconds."""
|
63
|
+
self.start_time_unix_nano = time.time_ns()
|
64
|
+
self.upsert_span()
|
65
|
+
|
66
|
+
def end(self):
|
67
|
+
"""End the span by recording the end time in nanoseconds."""
|
68
|
+
if self.start_time_unix_nano is None:
|
69
|
+
raise ValueError("Span must be started before ending.")
|
70
|
+
self.end_time_unix_nano = time.time_ns()
|
71
|
+
self.upsert_span()
|
72
|
+
|
73
|
+
def _add_node(self, trace_id: UUID4, parent_id: Optional[bytes] = None):
|
74
|
+
"""Add node for obs tree."""
|
75
|
+
self.trace_id = trace_id
|
76
|
+
self.parent_span_id = parent_id
|
77
|
+
|
78
|
+
def upsert_span(self):
|
79
|
+
"""Upsert span with datapark."""
|
80
|
+
if divi._datapark and self.trace_id:
|
81
|
+
divi._datapark.create_spans(
|
82
|
+
self.trace_id, ScopeSpans(spans=[self.signal])
|
83
|
+
)
|
divi/signals/trace.py
ADDED
@@ -0,0 +1,79 @@
|
|
1
|
+
from datetime import UTC, datetime
|
2
|
+
from typing import Optional
|
3
|
+
from uuid import uuid4
|
4
|
+
|
5
|
+
from pydantic import UUID4
|
6
|
+
from typing_extensions import TypedDict
|
7
|
+
|
8
|
+
import divi
|
9
|
+
|
10
|
+
|
11
|
+
class NullTime(TypedDict, total=False):
|
12
|
+
"""Null time"""
|
13
|
+
|
14
|
+
Time: str
|
15
|
+
"""Time in iso format"""
|
16
|
+
Valid: bool
|
17
|
+
"""Valid"""
|
18
|
+
|
19
|
+
|
20
|
+
class TraceSignal(TypedDict, total=False):
|
21
|
+
"""Trace request"""
|
22
|
+
|
23
|
+
id: str
|
24
|
+
"""Trace ID UUID4"""
|
25
|
+
start_time: str
|
26
|
+
"""Start time in iso format"""
|
27
|
+
end_time: NullTime
|
28
|
+
"""End time in iso format"""
|
29
|
+
name: Optional[str]
|
30
|
+
|
31
|
+
|
32
|
+
class Trace:
|
33
|
+
def __init__(self, session_id: UUID4, name: Optional[str] = None):
|
34
|
+
self.trace_id: UUID4 = uuid4()
|
35
|
+
self.start_time: str | None = None
|
36
|
+
self.end_time: str | None = None
|
37
|
+
self.name: Optional[str] = name
|
38
|
+
self.session_id: UUID4 = session_id
|
39
|
+
|
40
|
+
self.start()
|
41
|
+
|
42
|
+
@property
|
43
|
+
def signal(self) -> TraceSignal:
|
44
|
+
if self.start_time is None:
|
45
|
+
raise ValueError("Trace must be started.")
|
46
|
+
signal = TraceSignal(
|
47
|
+
id=str(self.trace_id),
|
48
|
+
start_time=self.start_time,
|
49
|
+
name=self.name,
|
50
|
+
)
|
51
|
+
if self.end_time is not None:
|
52
|
+
signal["end_time"] = NullTime(
|
53
|
+
Time=self.end_time,
|
54
|
+
Valid=True,
|
55
|
+
)
|
56
|
+
return signal
|
57
|
+
|
58
|
+
@staticmethod
|
59
|
+
def unix_nano_to_iso(unix_nano: int) -> str:
|
60
|
+
return datetime.utcfromtimestamp(unix_nano / 1e9).isoformat()
|
61
|
+
|
62
|
+
def start(self):
|
63
|
+
"""Start the trace by recording the current time in nanoseconds."""
|
64
|
+
self.start_time = datetime.now(UTC).isoformat()
|
65
|
+
self.upsert_trace()
|
66
|
+
|
67
|
+
def end(self):
|
68
|
+
"""End the trace by recording the end time in nanoseconds."""
|
69
|
+
if self.start_time is None:
|
70
|
+
raise ValueError("Span must be started before ending.")
|
71
|
+
self.end_time = datetime.now(UTC).isoformat()
|
72
|
+
self.upsert_trace()
|
73
|
+
|
74
|
+
def upsert_trace(self):
|
75
|
+
"""Upsert trace with datapark."""
|
76
|
+
if divi._datapark:
|
77
|
+
divi._datapark.upsert_traces(
|
78
|
+
session_id=self.session_id, traces=[self.signal]
|
79
|
+
)
|
@@ -1,12 +1,14 @@
|
|
1
|
-
divi/__init__.py,sha256=
|
1
|
+
divi/__init__.py,sha256=e3T9Znmiwfihr_X4Ly0Z0yoW23xGNQowPz6lCfXcXPI,519
|
2
2
|
divi/utils.py,sha256=fXkjoyo_Lh8AZliKICOP460m0czUcNQjcEcceJbaOVA,1439
|
3
3
|
divi/decorators/__init__.py,sha256=HkyWdC1ctTsVFucCWCkj57JB4NmwONus1d2S2dUbvs4,110
|
4
|
-
divi/decorators/
|
5
|
-
divi/decorators/
|
4
|
+
divi/decorators/collect.py,sha256=5iUxAnbHYx4ISkFg64IK_4miGdrWgbOXLJxKz8lGIv8,1074
|
5
|
+
divi/decorators/obs_openai.py,sha256=ouw3GYDFg6S27tcUzY0dIqz8JX_JM8IOXttzo7HK7nk,1359
|
6
|
+
divi/decorators/observable.py,sha256=isUS3P_07wbZBj2UcRAoYNDceQTIn6zdein3-PWVsi8,2289
|
7
|
+
divi/decorators/observe.py,sha256=I2RVsp2WQep6iTLSxkAlMP8wiRsSYiiYrxR2hJzPxcI,1211
|
6
8
|
divi/evaluation/__init__.py,sha256=3qMHWu_zBh6FJa6-1dZZEWiAblQZurn5doa0OjGvDGs,93
|
7
|
-
divi/evaluation/evaluate.py,sha256=
|
8
|
-
divi/evaluation/evaluator.py,sha256=
|
9
|
-
divi/evaluation/prompts.py,sha256=
|
9
|
+
divi/evaluation/evaluate.py,sha256=lVMCw5vHGa5sJvUyhVDZ9m3Sgl4baCjWhw2OKazhvgM,1861
|
10
|
+
divi/evaluation/evaluator.py,sha256=ulTyfSg2JXxzCCL7hRsn-EBb9UKcpQFA6rVT42mouVQ,5819
|
11
|
+
divi/evaluation/prompts.py,sha256=qiv7TljwV8NTy0iLS2GEWIDFFNXhHKUlgVb-WoZhm4Q,970
|
10
12
|
divi/evaluation/scores.py,sha256=ZgSxfve-ZivX3WU4TGcgPOSpUQVMbG5a15IQNPeq_bQ,173
|
11
13
|
divi/proto/common/v1/common.proto,sha256=Rx8wr0_tOtQ1NseTMnsav4ApD1MDALzQDBA2IvLRTU0,1775
|
12
14
|
divi/proto/common/v1/common_pb2.py,sha256=br61OHQVAi6SI3baFcb5xJv2Xd-AZ04A19xeSjLNMXo,2442
|
@@ -18,12 +20,12 @@ divi/proto/core/health/v1/health_service_pb2_grpc.py,sha256=YmlO94d-G71YBW1XZDSb
|
|
18
20
|
divi/proto/metric/v1/metric.proto,sha256=YHRMLUW-MtakHuibR3PJ0s2w5KgV12kc4737iHw0DTk,585
|
19
21
|
divi/proto/metric/v1/metric_pb2.py,sha256=uvBhyy8QpaES3Jl82yVfsGazW5654XpRnsdGlpVgIRE,1974
|
20
22
|
divi/proto/metric/v1/metric_pb2.pyi,sha256=S7ipsojkD7QZAYefDE4b3PO99Yzc6mOdtSLxH3-b67A,1304
|
21
|
-
divi/proto/trace/v1/trace.proto,sha256=
|
22
|
-
divi/proto/trace/v1/trace_pb2.py,sha256=
|
23
|
-
divi/proto/trace/v1/trace_pb2.pyi,sha256=
|
23
|
+
divi/proto/trace/v1/trace.proto,sha256=tPRIgBZB5KOKj7AoD3NoDZvLwoiJkbLiLqW53Ah-2-0,1367
|
24
|
+
divi/proto/trace/v1/trace_pb2.py,sha256=zMuQO5mN2xl11USHkhi0lLwBAPlYXRU_UG1r0Uu3mJg,2369
|
25
|
+
divi/proto/trace/v1/trace_pb2.pyi,sha256=k4dHYKAusH4I-XSW9KP3maogSWdRL7hVy8HCHhqFWzM,2231
|
24
26
|
divi/services/__init__.py,sha256=TcVJ_gKxyPIcwhT9GgttqHeyk0icW44uE285KmUiyh4,185
|
25
27
|
divi/services/finish.py,sha256=XKPKGJ5cWd5H95G_VpIOlOZOLrcf9StoTs7ayRic2jY,173
|
26
|
-
divi/services/init.py,sha256=
|
28
|
+
divi/services/init.py,sha256=dwXXXbf1-V0iAHZOETiv527TZQ07-waMIR5cSiU3QjI,509
|
27
29
|
divi/services/service.py,sha256=539MhcYfMvsVGjDdu0UtYSZnL2cloaPeYeOSMl2eUy8,1532
|
28
30
|
divi/services/auth/__init__.py,sha256=PIQ9rQ0jcRqcy03a3BOY7wbzwluIRG_4kI_H4J4mRFk,74
|
29
31
|
divi/services/auth/auth.py,sha256=eRcE6Kq8jbBr6YL93HCGDIoga90SoZf3ogOAKeza9WY,445
|
@@ -34,16 +36,16 @@ divi/services/core/core.py,sha256=PRwPtLgrgmCrejUfKf7HJNrAhGS0paFNZ7JwDToEUAk,12
|
|
34
36
|
divi/services/core/finish.py,sha256=dIGQpVXcJY4-tKe7A1_VV3yoSHNCDPfOlUltvzvk6VI,231
|
35
37
|
divi/services/core/init.py,sha256=e7-fgpOPglBXyEoPkgOAnpJk2ApdFbo7LPupxOb8N-w,1966
|
36
38
|
divi/services/datapark/__init__.py,sha256=GbV1mwHE07yutgOlCIYHykSEL5KJ-ApgLutGMzu2eUE,86
|
37
|
-
divi/services/datapark/datapark.py,sha256=
|
39
|
+
divi/services/datapark/datapark.py,sha256=f-qE2kmkLAniIj9mOP3nCbI3A3qkfIUnoVekwQ5w0QE,2781
|
38
40
|
divi/services/datapark/init.py,sha256=C32f9t3eLsxcYNqEyheh6nW455G2oR0YhhdqBcbN3ec,92
|
39
41
|
divi/session/__init__.py,sha256=6lYemv21VQCIHx-xIdi7BxXcPNxVdvE60--8ArReUew,82
|
40
|
-
divi/session/session.py,sha256=
|
41
|
-
divi/session/setup.py,sha256=
|
42
|
+
divi/session/session.py,sha256=QxtEezI447PbtKG2U6cxL1ACae55e8nFfTufAY8pEYI,811
|
43
|
+
divi/session/setup.py,sha256=SHNzCuvOzlrlBJj34_jbzhfa6SXX3oaXrcG8bN0-Xvo,1398
|
42
44
|
divi/session/teardown.py,sha256=YiBz_3yCiljMFEofZ60VmRL5sb8WA5GT7EYF8nFznZ4,133
|
43
|
-
divi/signals/__init__.py,sha256=
|
44
|
-
divi/signals/
|
45
|
-
divi/signals/trace
|
46
|
-
divi-0.0.1.
|
47
|
-
divi-0.0.1.
|
48
|
-
divi-0.0.1.
|
49
|
-
divi-0.0.1.
|
45
|
+
divi/signals/__init__.py,sha256=wfSkkCwkRsFP4aLj8aGHk_k6Y50P5yN44WWlO3XyW18,43
|
46
|
+
divi/signals/span.py,sha256=FQWql6ivAeXGk1HPZCsCjL5mXW6S6Nn9SmOiKH4aXik,2629
|
47
|
+
divi/signals/trace.py,sha256=IoYeTfd6x_Xmxcp4HbFSEne0d48hol4ng2Mb_AO8hZw,2144
|
48
|
+
divi-0.0.1.dev47.dist-info/METADATA,sha256=3QEVpc6O2YUEyMtFJ8kJn3rd8y_xMHGc67ObLg20vfs,497
|
49
|
+
divi-0.0.1.dev47.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
50
|
+
divi-0.0.1.dev47.dist-info/licenses/LICENSE,sha256=5OJuZ4wMMEV0DgF0tofhAlS_KLkaUsZwwwDS2U_GwQ0,1063
|
51
|
+
divi-0.0.1.dev47.dist-info/RECORD,,
|
divi/signals/trace/__init__.py
DELETED
divi/signals/trace/trace.py
DELETED
@@ -1,151 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
import time
|
3
|
-
from datetime import UTC, datetime
|
4
|
-
from typing import Any, Mapping, Optional
|
5
|
-
from uuid import uuid4
|
6
|
-
|
7
|
-
from pydantic import UUID4
|
8
|
-
from typing_extensions import TypedDict
|
9
|
-
|
10
|
-
import divi
|
11
|
-
from divi.proto.common.v1.common_pb2 import KeyValue
|
12
|
-
from divi.proto.trace.v1.trace_pb2 import Span as SpanProto
|
13
|
-
|
14
|
-
|
15
|
-
class NullTime(TypedDict, total=False):
|
16
|
-
"""Null time"""
|
17
|
-
|
18
|
-
Time: str
|
19
|
-
"""Time in iso format"""
|
20
|
-
Valid: bool
|
21
|
-
"""Valid"""
|
22
|
-
|
23
|
-
|
24
|
-
class TraceSignal(TypedDict, total=False):
|
25
|
-
"""Trace request"""
|
26
|
-
|
27
|
-
id: str
|
28
|
-
"""Trace ID UUID4"""
|
29
|
-
start_time: str
|
30
|
-
"""Start time in iso format"""
|
31
|
-
end_time: NullTime
|
32
|
-
"""End time in iso format"""
|
33
|
-
name: Optional[str]
|
34
|
-
|
35
|
-
|
36
|
-
class Trace:
|
37
|
-
def __init__(self, session_id: UUID4, name: Optional[str] = None):
|
38
|
-
self.trace_id: UUID4 = uuid4()
|
39
|
-
self.start_time: str | None = None
|
40
|
-
self.end_time: str | None = None
|
41
|
-
self.name: Optional[str] = name
|
42
|
-
self.session_id: UUID4 = session_id
|
43
|
-
|
44
|
-
self.start()
|
45
|
-
|
46
|
-
@property
|
47
|
-
def signal(self) -> TraceSignal:
|
48
|
-
if self.start_time is None:
|
49
|
-
raise ValueError("Trace must be started.")
|
50
|
-
signal = TraceSignal(
|
51
|
-
id=str(self.trace_id),
|
52
|
-
start_time=self.start_time,
|
53
|
-
name=self.name,
|
54
|
-
)
|
55
|
-
if self.end_time is not None:
|
56
|
-
signal["end_time"] = NullTime(
|
57
|
-
Time=self.end_time,
|
58
|
-
Valid=True,
|
59
|
-
)
|
60
|
-
return signal
|
61
|
-
|
62
|
-
@staticmethod
|
63
|
-
def unix_nano_to_iso(unix_nano: int) -> str:
|
64
|
-
return datetime.utcfromtimestamp(unix_nano / 1e9).isoformat()
|
65
|
-
|
66
|
-
def start(self):
|
67
|
-
"""Start the trace by recording the current time in nanoseconds."""
|
68
|
-
self.start_time = datetime.now(UTC).isoformat()
|
69
|
-
self.upsert_trace()
|
70
|
-
|
71
|
-
def end(self):
|
72
|
-
"""End the trace by recording the end time in nanoseconds."""
|
73
|
-
if self.start_time is None:
|
74
|
-
raise ValueError("Span must be started before ending.")
|
75
|
-
self.end_time = datetime.now(UTC).isoformat()
|
76
|
-
self.upsert_trace()
|
77
|
-
|
78
|
-
def upsert_trace(self):
|
79
|
-
"""Upsert trace with datapark."""
|
80
|
-
if divi._datapark:
|
81
|
-
divi._datapark.upsert_traces(
|
82
|
-
session_id=self.session_id, traces=[self.signal]
|
83
|
-
)
|
84
|
-
|
85
|
-
|
86
|
-
class Span:
|
87
|
-
KIND_MAP = {
|
88
|
-
"function": SpanProto.SpanKind.SPAN_KIND_FUNCTION,
|
89
|
-
"llm": SpanProto.SpanKind.SPAN_KIND_LLM,
|
90
|
-
}
|
91
|
-
|
92
|
-
def __init__(
|
93
|
-
self,
|
94
|
-
kind: str = "function",
|
95
|
-
name: Optional[str] = None,
|
96
|
-
metadata: Optional[Mapping[str, Any]] = None,
|
97
|
-
):
|
98
|
-
# span_id is a FixedString(8)
|
99
|
-
self.span_id: bytes = self._generate_span_id()
|
100
|
-
self.name = name
|
101
|
-
self.kind = kind
|
102
|
-
self.metadata = metadata
|
103
|
-
self.start_time_unix_nano: int | None = None
|
104
|
-
self.end_time_unix_nano: int | None = None
|
105
|
-
|
106
|
-
self.trace_id: UUID4 | None = None
|
107
|
-
self.parent_span_id: bytes | None = None
|
108
|
-
|
109
|
-
@property
|
110
|
-
def signal(self) -> SpanProto:
|
111
|
-
signal: SpanProto = SpanProto(
|
112
|
-
name=self.name,
|
113
|
-
span_id=self.span_id,
|
114
|
-
kind=self._get_kind(self.kind),
|
115
|
-
start_time_unix_nano=self.start_time_unix_nano,
|
116
|
-
end_time_unix_nano=self.end_time_unix_nano,
|
117
|
-
trace_id=self.trace_id.bytes if self.trace_id else None,
|
118
|
-
parent_span_id=self.parent_span_id,
|
119
|
-
)
|
120
|
-
signal.metadata.extend(
|
121
|
-
KeyValue(key=k, value=v)
|
122
|
-
for k, v in (self.metadata or dict()).items()
|
123
|
-
)
|
124
|
-
return signal
|
125
|
-
|
126
|
-
@classmethod
|
127
|
-
def _get_kind(cls, kind: str) -> SpanProto.SpanKind:
|
128
|
-
if (k := cls.KIND_MAP.get(kind)) is None:
|
129
|
-
raise ValueError(
|
130
|
-
f"Unknown kind: {kind}. Now allowed: {cls.KIND_MAP.keys()}"
|
131
|
-
)
|
132
|
-
return k
|
133
|
-
|
134
|
-
@classmethod
|
135
|
-
def _generate_span_id(cls) -> bytes:
|
136
|
-
return os.urandom(8)
|
137
|
-
|
138
|
-
def start(self):
|
139
|
-
"""Start the span by recording the current time in nanoseconds."""
|
140
|
-
self.start_time_unix_nano = time.time_ns()
|
141
|
-
|
142
|
-
def end(self):
|
143
|
-
"""End the span by recording the end time in nanoseconds."""
|
144
|
-
if self.start_time_unix_nano is None:
|
145
|
-
raise ValueError("Span must be started before ending.")
|
146
|
-
self.end_time_unix_nano = time.time_ns()
|
147
|
-
|
148
|
-
def _add_node(self, trace_id: UUID4, parent_id: Optional[bytes] = None):
|
149
|
-
"""Add node for obs tree."""
|
150
|
-
self.trace_id = trace_id
|
151
|
-
self.parent_span_id = parent_id
|
File without changes
|
File without changes
|