agenta 0.17.0__py3-none-any.whl → 0.17.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of agenta might be problematic. Click here for more details.

agenta/__init__.py CHANGED
@@ -13,12 +13,15 @@ from .sdk.types import (
13
13
  FileInputURL,
14
14
  BinaryParam,
15
15
  )
16
+
16
17
  from .sdk.tracing.llm_tracing import Tracing
17
18
  from .sdk.decorators.tracing import instrument
18
19
  from .sdk.decorators.llm_entrypoint import entrypoint, app
19
20
  from .sdk.agenta_init import Config, AgentaSingleton, init
20
21
  from .sdk.utils.helper.openai_cost import calculate_token_usage
21
22
  from .sdk.client import Agenta
23
+ from .sdk.tracing import callbacks
24
+
22
25
 
23
26
  config = PreInitObject("agenta.config", Config)
24
27
  DEFAULT_AGENTA_SINGLETON_INSTANCE = AgentaSingleton()
@@ -33,6 +33,6 @@ class LlmTokens(pydantic.BaseModel):
33
33
  return super().dict(**kwargs_with_defaults)
34
34
 
35
35
  class Config:
36
- frozen = True
36
+ frozen = False
37
37
  smart_union = True
38
38
  json_encoders = {dt.datetime: serialize_datetime}
agenta/sdk/__init__.py CHANGED
@@ -13,13 +13,13 @@ from .types import (
13
13
  FileInputURL,
14
14
  BinaryParam,
15
15
  )
16
+
16
17
  from .tracing.llm_tracing import Tracing
17
18
  from .decorators.tracing import instrument
18
19
  from .decorators.llm_entrypoint import entrypoint, app
19
20
  from .agenta_init import Config, AgentaSingleton, init
20
21
  from .utils.helper.openai_cost import calculate_token_usage
21
22
 
22
-
23
23
  config = PreInitObject("agenta.config", Config)
24
24
  DEFAULT_AGENTA_SINGLETON_INSTANCE = AgentaSingleton()
25
25
  tracing = DEFAULT_AGENTA_SINGLETON_INSTANCE.tracing # type: ignore
@@ -201,7 +201,9 @@ class entrypoint(BaseDecorator):
201
201
  if isinstance(result, Dict):
202
202
  return FuncResponse(**result, latency=round(latency, 4))
203
203
  if isinstance(result, str):
204
- return FuncResponse(message=result, latency=round(latency, 4)) # type: ignore
204
+ return FuncResponse(
205
+ message=result, usage=None, cost=None, latency=round(latency, 4)
206
+ )
205
207
  if isinstance(result, int) or isinstance(result, float):
206
208
  return FuncResponse(
207
209
  message=str(result),
@@ -1,5 +1,6 @@
1
1
  # Stdlib Imports
2
2
  import inspect
3
+ import traceback
3
4
  from functools import wraps
4
5
  from typing import Any, Callable, Optional
5
6
 
@@ -59,6 +60,9 @@ class instrument(BaseDecorator):
59
60
  self.tracing.update_span_status(span=span, value="OK")
60
61
  except Exception as e:
61
62
  result = str(e)
63
+ self.tracing.set_span_attribute(
64
+ {"traceback_exception": traceback.format_exc()}
65
+ )
62
66
  self.tracing.update_span_status(span=span, value="ERROR")
63
67
  finally:
64
68
  self.tracing.end_span(
@@ -87,6 +91,9 @@ class instrument(BaseDecorator):
87
91
  self.tracing.update_span_status(span=span, value="OK")
88
92
  except Exception as e:
89
93
  result = str(e)
94
+ self.tracing.set_span_attribute(
95
+ {"traceback_exception": traceback.format_exc()}
96
+ )
90
97
  self.tracing.update_span_status(span=span, value="ERROR")
91
98
  finally:
92
99
  self.tracing.end_span(
File without changes
@@ -0,0 +1,157 @@
1
+ import agenta as ag
2
+
3
+
4
+ def litellm_handler():
5
+ try:
6
+ from litellm.utils import ModelResponse
7
+ from litellm.integrations.custom_logger import (
8
+ CustomLogger as LitellmCustomLogger,
9
+ )
10
+ except ImportError as exc:
11
+ raise ImportError(
12
+ "The litellm SDK is not installed. Please install it using `pip install litellm`."
13
+ ) from exc
14
+ except Exception as exc:
15
+ raise Exception(
16
+ "Unexpected error occurred when importing litellm: {}".format(exc)
17
+ ) from exc
18
+
19
+ class LitellmHandler(LitellmCustomLogger):
20
+ """This handler is responsible for instrumenting certain events when using litellm to call LLMs.
21
+
22
+ Args:
23
+ LitellmCustomLogger (object): custom logger that allows us to override the events to capture.
24
+ """
25
+
26
+ @property
27
+ def _trace(self):
28
+ return ag.tracing
29
+
30
+ def log_pre_api_call(self, model, messages, kwargs):
31
+ call_type = kwargs.get("call_type")
32
+ span_kind = (
33
+ "llm" if call_type in ["completion", "acompletion"] else "embedding"
34
+ )
35
+ self._trace.start_span(
36
+ name=f"{span_kind}_call",
37
+ input={"messages": kwargs["messages"]},
38
+ spankind=span_kind,
39
+ )
40
+ self._trace.set_span_attribute(
41
+ {
42
+ "model_config": {
43
+ "model": kwargs.get("model"),
44
+ **kwargs.get(
45
+ "optional_params"
46
+ ), # model-specific params passed in
47
+ },
48
+ }
49
+ )
50
+
51
+ def log_stream_event(self, kwargs, response_obj, start_time, end_time):
52
+ self._trace.update_span_status(span=self._trace.active_span, value="OK")
53
+ self._trace.end_span(
54
+ outputs={
55
+ "message": kwargs.get(
56
+ "complete_streaming_response"
57
+ ), # the complete streamed response (only set if `completion(..stream=True)`)
58
+ "usage": response_obj.usage.dict(), # litellm calculates usage
59
+ "cost": kwargs.get(
60
+ "response_cost"
61
+ ), # litellm calculates response cost
62
+ },
63
+ )
64
+
65
+ def log_success_event(
66
+ self, kwargs, response_obj: ModelResponse, start_time, end_time
67
+ ):
68
+ self._trace.update_span_status(span=self._trace.active_span, value="OK")
69
+ self._trace.end_span(
70
+ outputs={
71
+ "message": response_obj.choices[0].message.content,
72
+ "usage": response_obj.usage.dict(), # litellm calculates usage
73
+ "cost": kwargs.get(
74
+ "response_cost"
75
+ ), # litellm calculates response cost
76
+ },
77
+ )
78
+
79
+ def log_failure_event(
80
+ self, kwargs, response_obj: ModelResponse, start_time, end_time
81
+ ):
82
+ self._trace.update_span_status(span=self._trace.active_span, value="ERROR")
83
+ self._trace.set_span_attribute(
84
+ {
85
+ "traceback_exception": kwargs[
86
+ "traceback_exception"
87
+ ], # the traceback generated via `traceback.format_exc()`
88
+ "call_end_time": kwargs[
89
+ "end_time"
90
+ ], # datetime object of when call was completed
91
+ },
92
+ )
93
+ self._trace.end_span(
94
+ outputs={
95
+ "message": kwargs["exception"], # the Exception raised
96
+ "usage": response_obj.usage.dict(), # litellm calculates usage
97
+ "cost": kwargs.get(
98
+ "response_cost"
99
+ ), # litellm calculates response cost
100
+ },
101
+ )
102
+
103
+ async def async_log_stream_event(
104
+ self, kwargs, response_obj, start_time, end_time
105
+ ):
106
+ self._trace.update_span_status(span=self._trace.active_span, value="OK")
107
+ self._trace.end_span(
108
+ outputs={
109
+ "message": kwargs.get(
110
+ "complete_streaming_response"
111
+ ), # the complete streamed response (only set if `completion(..stream=True)`)
112
+ "usage": response_obj.usage.dict(), # litellm calculates usage
113
+ "cost": kwargs.get(
114
+ "response_cost"
115
+ ), # litellm calculates response cost
116
+ },
117
+ )
118
+
119
+ async def async_log_success_event(
120
+ self, kwargs, response_obj, start_time, end_time
121
+ ):
122
+ self._trace.update_span_status(span=self._trace.active_span, value="OK")
123
+ self._trace.end_span(
124
+ outputs={
125
+ "message": response_obj.choices[0].message.content,
126
+ "usage": response_obj.usage.dict(), # litellm calculates usage
127
+ "cost": kwargs.get(
128
+ "response_cost"
129
+ ), # litellm calculates response cost
130
+ },
131
+ )
132
+
133
+ async def async_log_failure_event(
134
+ self, kwargs, response_obj, start_time, end_time
135
+ ):
136
+ self._trace.update_span_status(span=self._trace.active_span, value="ERROR")
137
+ self._trace.set_span_attribute(
138
+ {
139
+ "traceback_exception": kwargs[
140
+ "traceback_exception"
141
+ ], # the traceback generated via `traceback.format_exc()`
142
+ "call_end_time": kwargs[
143
+ "end_time"
144
+ ], # datetime object of when call was completed
145
+ },
146
+ )
147
+ self._trace.end_span(
148
+ outputs={
149
+ "message": kwargs["exception"], # the Exception raised
150
+ "usage": response_obj.usage.dict(), # litellm calculates usage
151
+ "cost": kwargs.get(
152
+ "response_cost"
153
+ ), # litellm calculates response cost
154
+ },
155
+ )
156
+
157
+ return LitellmHandler()
@@ -1,16 +1,21 @@
1
1
  import os
2
2
  from threading import Lock
3
3
  from datetime import datetime, timezone
4
- from typing import Optional, Dict, Any, List, Union
4
+ from typing import Optional, Dict, Any, List
5
5
 
6
6
  from agenta.sdk.tracing.logger import llm_logger
7
7
  from agenta.sdk.tracing.tasks_manager import TaskQueue
8
8
  from agenta.client.backend.client import AsyncAgentaApi
9
9
  from agenta.client.backend.client import AsyncObservabilityClient
10
- from agenta.client.backend.types.create_span import CreateSpan, SpanKind, SpanStatusCode
10
+ from agenta.client.backend.types.create_span import (
11
+ CreateSpan,
12
+ LlmTokens,
13
+ SpanStatusCode,
14
+ )
11
15
 
12
16
  from bson.objectid import ObjectId
13
17
 
18
+
14
19
  VARIANT_TRACKING_FEATURE_FLAG = False
15
20
 
16
21
 
@@ -92,6 +97,13 @@ class Tracing(metaclass=SingletonMeta):
92
97
  self,
93
98
  attributes: Dict[str, Any] = {},
94
99
  ):
100
+ """
101
+ Set attributes for the active span.
102
+
103
+ Args:
104
+ attributes (Dict[str, Any], optional): A dictionary of attributes to set. Defaults to {}.
105
+ """
106
+
95
107
  if (
96
108
  self.active_span is None
97
109
  ): # This is the case where entrypoint wants to save the trace information but the parent span has not been initialized yet
@@ -99,7 +111,7 @@ class Tracing(metaclass=SingletonMeta):
99
111
  self.trace_config_cache[key] = value
100
112
  else:
101
113
  for key, value in attributes.items():
102
- self.active_span.attributes[key] = value
114
+ self.active_span.attributes[key] = value # type: ignore
103
115
 
104
116
  def set_trace_tags(self, tags: List[str]):
105
117
  self.tags.extend(tags)
@@ -150,11 +162,12 @@ class Tracing(metaclass=SingletonMeta):
150
162
  )
151
163
  if VARIANT_TRACKING_FEATURE_FLAG:
152
164
  # TODO: we should get the variant_id and variant_name (and environment) from the config object
153
- span.variant_id = config.variant_id
154
- span.variant_name = (config.variant_name,)
165
+ span.variant_id = config.variant_id # type: ignore
166
+ span.variant_name = (config.variant_name,) # type: ignore
155
167
 
156
168
  else:
157
- span.parent_span_id = self.active_span.id
169
+ span.parent_span_id = self.active_span.id # type: ignore
170
+
158
171
  self.span_dict[span.id] = span
159
172
  self.active_span = span
160
173
 
@@ -164,28 +177,111 @@ class Tracing(metaclass=SingletonMeta):
164
177
  def update_span_status(self, span: CreateSpan, value: str):
165
178
  span.status = value
166
179
 
180
+ def _update_span_cost(self, span: CreateSpan, cost: Optional[float]):
181
+ if cost is not None and isinstance(cost, float):
182
+ if span.cost is None:
183
+ span.cost = cost
184
+ else:
185
+ span.cost += cost
186
+
187
+ def _update_span_tokens(self, span: CreateSpan, tokens: Optional[dict]):
188
+ if isinstance(tokens, LlmTokens):
189
+ tokens = tokens.dict()
190
+ if tokens is not None and isinstance(tokens, dict):
191
+ if span.tokens is None:
192
+ span.tokens = LlmTokens(**tokens)
193
+ else:
194
+ span.tokens.prompt_tokens += tokens["prompt_tokens"]
195
+ span.tokens.completion_tokens += tokens["completion_tokens"]
196
+ span.tokens.total_tokens += tokens["total_tokens"]
197
+
167
198
  def end_span(self, outputs: Dict[str, Any]):
168
199
  """
169
200
  Ends the active span, if it is a parent span, ends the trace too.
201
+
202
+ Args:
203
+ outputs (Dict[str, Any]): A dictionary containing the outputs of the span.
204
+ It should have the following keys:
205
+ - "message" (str): The message output of the span.
206
+ - "cost" (Optional[Any]): The cost of the span.
207
+ - "usage" (Optional[Any]): The number of tokens used in the span.
208
+
209
+ Raises:
210
+ ValueError: If there is no active span to end.
211
+
212
+ Returns:
213
+ None
170
214
  """
215
+
171
216
  if self.active_span is None:
172
217
  raise ValueError("There is no active span to end.")
218
+
173
219
  self.active_span.end_time = datetime.now(timezone.utc)
174
220
  self.active_span.outputs = [outputs.get("message", "")]
175
- self.active_span.cost = outputs.get("cost", None)
176
- self.active_span.tokens = outputs.get("usage", None)
221
+ if self.active_span.spankind in [
222
+ "LLM",
223
+ "RETRIEVER",
224
+ ]: # TODO: Remove this whole part. Setting the cost should be done through set_span_attribute
225
+ self._update_span_cost(self.active_span, outputs.get("cost", None))
226
+ self._update_span_tokens(self.active_span, outputs.get("usage", None))
177
227
 
178
228
  # Push span to list of recorded spans
179
229
  self.pending_spans.append(self.active_span)
180
- self.llm_logger.info(
181
- f"Pushed {self.active_span.spankind} span {self.active_span.id} to recorded spans."
182
- )
183
- if self.active_span.parent_span_id is None:
230
+
231
+ active_span_parent_id = self.active_span.parent_span_id
232
+ if (
233
+ self.active_span.status == SpanStatusCode.ERROR.value
234
+ and active_span_parent_id is not None
235
+ ):
236
+ self.record_exception_and_end_trace(span_parent_id=active_span_parent_id)
237
+
238
+ if active_span_parent_id is None:
184
239
  self.end_trace(parent_span=self.active_span)
240
+
185
241
  else:
186
- self.active_span = self.span_dict[self.active_span.parent_span_id]
242
+ parent_span = self.span_dict[active_span_parent_id]
243
+ self._update_span_cost(parent_span, self.active_span.cost)
244
+ self._update_span_tokens(parent_span, self.active_span.tokens)
245
+ self.active_span = parent_span
246
+
247
+ def record_exception_and_end_trace(self, span_parent_id: str):
248
+ """
249
+ Record an exception and end the trace.
250
+
251
+ Args:
252
+ span_parent_id (str): The ID of the parent span.
253
+
254
+ Returns:
255
+ None
256
+ """
257
+
258
+ parent_span = self.span_dict.get(span_parent_id)
259
+ if parent_span is not None:
260
+ # Update parent span of active span
261
+ parent_span.outputs = self.active_span.outputs # type: ignore
262
+ parent_span.status = "ERROR"
263
+ parent_span.end_time = datetime.now(timezone.utc)
264
+
265
+ # Push parent span to list of recorded spans and end trace
266
+ self.pending_spans.append(parent_span)
267
+ self.end_trace(parent_span=parent_span)
268
+
269
+ # TODO: improve exception logic here.
187
270
 
188
271
  def end_trace(self, parent_span: CreateSpan):
272
+ """
273
+ Ends the active trace and sends the recorded spans for processing.
274
+
275
+ Args:
276
+ parent_span (CreateSpan): The parent span of the trace.
277
+
278
+ Raises:
279
+ RuntimeError: If there is no active trace to end.
280
+
281
+ Returns:
282
+ None
283
+ """
284
+
189
285
  if self.api_key == "":
190
286
  return
191
287
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: agenta
3
- Version: 0.17.0
3
+ Version: 0.17.2
4
4
  Summary: The SDK for agenta is an open-source LLMOps platform.
5
5
  Home-page: https://agenta.ai
6
6
  Keywords: LLMOps,LLM,evaluation,prompt engineering
@@ -1,4 +1,4 @@
1
- agenta/__init__.py,sha256=O7kq034_S58b2yzrtR_m3Qd-0t7FAh9q7Z7_HsoD-9Y,796
1
+ agenta/__init__.py,sha256=geaQThRuZzebZncauJW2SZzKqIT7-ZkIJJf-zZlWHCs,833
2
2
  agenta/cli/evaluation_commands.py,sha256=fs6492tprPId9p8eGO02Xy-NCBm2RZNJLZWcUxugwd8,474
3
3
  agenta/cli/helper.py,sha256=vRxHyeNaltzNIGrfU2vO0H28_rXDzx9QqIZ_S-W6zL4,6212
4
4
  agenta/cli/main.py,sha256=Wz0ODhoeKK3Qg_CFUhu6D909szk05tc8ZVBB6H1-w7k,9763
@@ -85,7 +85,7 @@ agenta/client/backend/types/image.py,sha256=p7Vmp7HlMV3YyXe8SFdXYJjCbPNIypW6NfVG
85
85
  agenta/client/backend/types/invite_request.py,sha256=1nJTUHspzw2WYpUSd4UUtRnjDHM-dqDBvYewgw-hCQE,993
86
86
  agenta/client/backend/types/list_api_keys_response.py,sha256=ZNh7jKwHEMKNp8OV5WJ5XxtKn39DwqK1f8vlFKl54x4,1097
87
87
  agenta/client/backend/types/llm_run_rate_limit.py,sha256=mfT4lTczPxrJvd8ZCOAjPvw58QoM151p_uZT0PWNOJ4,1045
88
- agenta/client/backend/types/llm_tokens.py,sha256=PzJZM5RdXCHGDOkzCN2jQc3iAvCdr0iazpsrNUOE6kk,1069
88
+ agenta/client/backend/types/llm_tokens.py,sha256=J236Fgmz5TeFO0MQA1ZA1QozvR6d3kt8aEUyWkq3jLI,1070
89
89
  agenta/client/backend/types/new_human_evaluation.py,sha256=lIgMjVccSp22RRfMGGLH4-yKjMtJeQvjhlwX9EtAxmY,1150
90
90
  agenta/client/backend/types/new_testset.py,sha256=9NOC1-f_UZASy4ptzidLNcRU6Odq609ayvSQxEva-40,1009
91
91
  agenta/client/backend/types/organization.py,sha256=vJf6Gbz8WCnqabPQmt_t_gfrWPpuvTXgTxKCJKJsrmc,1218
@@ -126,16 +126,18 @@ agenta/docker/docker-assets/entrypoint.sh,sha256=29XK8VQjQsx4hN2j-4JDy-6kQb5y4LC
126
126
  agenta/docker/docker-assets/lambda_function.py,sha256=h4UZSSfqwpfsCgERv6frqwm_4JrYu9rLz3I-LxCfeEg,83
127
127
  agenta/docker/docker-assets/main.py,sha256=7MI-21n81U7N7A0GxebNi0cmGWtJKcR2sPB6FcH2QfA,251
128
128
  agenta/docker/docker_utils.py,sha256=5uHMCzXkCvIsDdEiwbnnn97KkzsFbBvyMwogCsv_Z5U,3509
129
- agenta/sdk/__init__.py,sha256=oHgl-qoEyi3d2VI_Kv-rIMSx9zgs6b5MP62PLq5GqYI,762
129
+ agenta/sdk/__init__.py,sha256=cF0de6DiH-NZWEm0XvPN8_TeC1whPBnDf1WYYE1qK2g,762
130
130
  agenta/sdk/agenta_init.py,sha256=j7qwyDtXfLozWpnayJHPz2aQOzHSGvHo9V6s0FXeUe8,9937
131
131
  agenta/sdk/client.py,sha256=trKyBOYFZRk0v5Eptxvh87yPf50Y9CqY6Qgv4Fy-VH4,2142
132
132
  agenta/sdk/context.py,sha256=q-PxL05-I84puunUAs9LGsffEXcYhDxhQxjuOz2vK90,901
133
133
  agenta/sdk/decorators/base.py,sha256=9aNdX5h8a2mFweuhdO-BQPwXGKY9ONPIdLRhSGAGMfY,217
134
- agenta/sdk/decorators/llm_entrypoint.py,sha256=YM7idjP3kWyZzcEBsBKOMU88zNQ1hq5S_c18FvvTX7c,22709
135
- agenta/sdk/decorators/tracing.py,sha256=bC-YlPQUrHBEqvhLJxr63N0qlo1jvrbt7ro2AMGXXZw,3160
134
+ agenta/sdk/decorators/llm_entrypoint.py,sha256=o1kD14dfXLV3p1OyzgCUA6mIoyDV_YuW__kfXaAKW_I,22754
135
+ agenta/sdk/decorators/tracing.py,sha256=_6lbnhoJgz6q6OkMDNWoLstuv-ZvTz9ExcetW2sTiEY,3447
136
136
  agenta/sdk/router.py,sha256=0sbajvn5C7t18anH6yNo7-oYxldHnYfwcbmQnIXBePw,269
137
+ agenta/sdk/tracing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
138
+ agenta/sdk/tracing/callbacks.py,sha256=E5cA6dzut90WSQ38Zy2y1XkiP6wRQ1j1CGcEo5oCsWc,6312
137
139
  agenta/sdk/tracing/context_manager.py,sha256=HskDaiORoOhjeN375gm05wYnieQzh5UnoIsnSAHkAyc,252
138
- agenta/sdk/tracing/llm_tracing.py,sha256=PmMYQ5N8atYut85Rk2hZ1jmvSF80Duuy6Clf7URcTCA,8193
140
+ agenta/sdk/tracing/llm_tracing.py,sha256=fHtc1tKgdR2CDnFMrfDIxSvyjwFW3Qk2RE8P-y_tCM0,11361
139
141
  agenta/sdk/tracing/logger.py,sha256=GfH7V-jBHcn7h5dbdrnkDMe_ml3wkXFBeoQiqR4KVRc,474
140
142
  agenta/sdk/tracing/tasks_manager.py,sha256=ROrWIaqS2J2HHiJtRWiHKlLY8CCsqToP5VeXu7mamck,3748
141
143
  agenta/sdk/types.py,sha256=KMnQUOdjaHSWctDLIiMHnk0o3c-C47Vm4Mn2kIZ88YI,5740
@@ -157,7 +159,7 @@ agenta/templates/simple_prompt/app.py,sha256=kODgF6lhzsaJPdgL5b21bUki6jkvqjWZzWR
157
159
  agenta/templates/simple_prompt/env.example,sha256=g9AE5bYcGPpxawXMJ96gh8oenEPCHTabsiOnfQo3c5k,70
158
160
  agenta/templates/simple_prompt/requirements.txt,sha256=ywRglRy7pPkw8bljmMEJJ4aOOQKrt9FGKULZ-DGkoBU,23
159
161
  agenta/templates/simple_prompt/template.toml,sha256=DQBtRrF4GU8LBEXOZ-GGuINXMQDKGTEG5y37tnvIUIE,60
160
- agenta-0.17.0.dist-info/METADATA,sha256=wrmoUAYhdoykpo9vPctSciIt-vUuNuWRYCYcsLO-uzs,26458
161
- agenta-0.17.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
162
- agenta-0.17.0.dist-info/entry_points.txt,sha256=PDiu8_8AsL7ibU9v4iNoOKR1S7F2rdxjlEprjM9QOgo,46
163
- agenta-0.17.0.dist-info/RECORD,,
162
+ agenta-0.17.2.dist-info/METADATA,sha256=reCqIluLOXQpWV3p1-chE2o6c7am67d7VT73v_WF2C0,26458
163
+ agenta-0.17.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
164
+ agenta-0.17.2.dist-info/entry_points.txt,sha256=PDiu8_8AsL7ibU9v4iNoOKR1S7F2rdxjlEprjM9QOgo,46
165
+ agenta-0.17.2.dist-info/RECORD,,