opik 1.9.26__py3-none-any.whl → 1.9.41__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (181) hide show
  1. opik/__init__.py +10 -3
  2. opik/api_objects/dataset/rest_operations.py +2 -0
  3. opik/api_objects/experiment/experiment.py +31 -5
  4. opik/api_objects/experiment/helpers.py +34 -10
  5. opik/api_objects/local_recording.py +8 -3
  6. opik/api_objects/opik_client.py +218 -46
  7. opik/api_objects/opik_query_language.py +9 -0
  8. opik/api_objects/prompt/__init__.py +11 -3
  9. opik/api_objects/prompt/base_prompt.py +69 -0
  10. opik/api_objects/prompt/base_prompt_template.py +29 -0
  11. opik/api_objects/prompt/chat/__init__.py +1 -0
  12. opik/api_objects/prompt/chat/chat_prompt.py +193 -0
  13. opik/api_objects/prompt/chat/chat_prompt_template.py +350 -0
  14. opik/api_objects/prompt/{chat_content_renderer_registry.py → chat/content_renderer_registry.py} +31 -34
  15. opik/api_objects/prompt/client.py +101 -30
  16. opik/api_objects/prompt/text/__init__.py +1 -0
  17. opik/api_objects/prompt/{prompt.py → text/prompt.py} +55 -32
  18. opik/api_objects/prompt/{prompt_template.py → text/prompt_template.py} +8 -5
  19. opik/cli/export.py +6 -2
  20. opik/config.py +0 -5
  21. opik/decorator/base_track_decorator.py +37 -40
  22. opik/evaluation/__init__.py +13 -2
  23. opik/evaluation/engine/engine.py +195 -223
  24. opik/evaluation/engine/helpers.py +8 -7
  25. opik/evaluation/engine/metrics_evaluator.py +237 -0
  26. opik/evaluation/evaluation_result.py +35 -1
  27. opik/evaluation/evaluator.py +309 -23
  28. opik/evaluation/models/litellm/util.py +78 -6
  29. opik/evaluation/report.py +14 -2
  30. opik/evaluation/rest_operations.py +6 -9
  31. opik/evaluation/test_case.py +2 -2
  32. opik/evaluation/types.py +9 -1
  33. opik/exceptions.py +17 -0
  34. opik/id_helpers.py +18 -0
  35. opik/integrations/adk/helpers.py +16 -7
  36. opik/integrations/adk/legacy_opik_tracer.py +7 -4
  37. opik/integrations/adk/opik_tracer.py +3 -1
  38. opik/integrations/adk/patchers/adk_otel_tracer/opik_adk_otel_tracer.py +7 -3
  39. opik/integrations/dspy/callback.py +1 -4
  40. opik/integrations/haystack/opik_connector.py +2 -2
  41. opik/integrations/haystack/opik_tracer.py +2 -4
  42. opik/integrations/langchain/opik_tracer.py +1 -4
  43. opik/integrations/llama_index/callback.py +2 -4
  44. opik/integrations/openai/agents/opik_tracing_processor.py +1 -2
  45. opik/integrations/openai/opik_tracker.py +1 -1
  46. opik/opik_context.py +7 -7
  47. opik/rest_api/__init__.py +127 -11
  48. opik/rest_api/dashboards/client.py +65 -2
  49. opik/rest_api/dashboards/raw_client.py +82 -0
  50. opik/rest_api/datasets/client.py +538 -2
  51. opik/rest_api/datasets/raw_client.py +1347 -441
  52. opik/rest_api/experiments/client.py +30 -2
  53. opik/rest_api/experiments/raw_client.py +26 -0
  54. opik/rest_api/optimizations/client.py +302 -0
  55. opik/rest_api/optimizations/raw_client.py +463 -0
  56. opik/rest_api/optimizations/types/optimization_update_status.py +3 -1
  57. opik/rest_api/prompts/__init__.py +2 -2
  58. opik/rest_api/prompts/client.py +34 -4
  59. opik/rest_api/prompts/raw_client.py +32 -2
  60. opik/rest_api/prompts/types/__init__.py +3 -1
  61. opik/rest_api/prompts/types/create_prompt_version_detail_template_structure.py +5 -0
  62. opik/rest_api/prompts/types/prompt_write_template_structure.py +5 -0
  63. opik/rest_api/traces/client.py +6 -6
  64. opik/rest_api/traces/raw_client.py +4 -4
  65. opik/rest_api/types/__init__.py +125 -11
  66. opik/rest_api/types/aggregation_data.py +1 -0
  67. opik/rest_api/types/automation_rule_evaluator.py +23 -1
  68. opik/rest_api/types/automation_rule_evaluator_llm_as_judge.py +2 -0
  69. opik/rest_api/types/automation_rule_evaluator_llm_as_judge_public.py +2 -0
  70. opik/rest_api/types/automation_rule_evaluator_llm_as_judge_write.py +2 -0
  71. opik/rest_api/types/{automation_rule_evaluator_object_public.py → automation_rule_evaluator_object_object_public.py} +32 -10
  72. opik/rest_api/types/automation_rule_evaluator_page_public.py +2 -2
  73. opik/rest_api/types/automation_rule_evaluator_public.py +23 -1
  74. opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge.py +22 -0
  75. opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_public.py +22 -0
  76. opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_write.py +22 -0
  77. opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge.py +2 -0
  78. opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_public.py +2 -0
  79. opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_write.py +2 -0
  80. opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python.py +2 -0
  81. opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_public.py +2 -0
  82. opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_write.py +2 -0
  83. opik/rest_api/types/automation_rule_evaluator_update.py +23 -1
  84. opik/rest_api/types/automation_rule_evaluator_update_llm_as_judge.py +2 -0
  85. opik/rest_api/types/automation_rule_evaluator_update_span_llm_as_judge.py +22 -0
  86. opik/rest_api/types/automation_rule_evaluator_update_trace_thread_llm_as_judge.py +2 -0
  87. opik/rest_api/types/automation_rule_evaluator_update_trace_thread_user_defined_metric_python.py +2 -0
  88. opik/rest_api/types/automation_rule_evaluator_update_user_defined_metric_python.py +2 -0
  89. opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python.py +2 -0
  90. opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_public.py +2 -0
  91. opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_write.py +2 -0
  92. opik/rest_api/types/automation_rule_evaluator_write.py +23 -1
  93. opik/rest_api/types/dashboard_page_public.py +1 -0
  94. opik/rest_api/types/dataset.py +4 -0
  95. opik/rest_api/types/dataset_item.py +1 -0
  96. opik/rest_api/types/dataset_item_compare.py +1 -0
  97. opik/rest_api/types/dataset_item_page_compare.py +1 -0
  98. opik/rest_api/types/dataset_item_page_public.py +1 -0
  99. opik/rest_api/types/dataset_item_public.py +1 -0
  100. opik/rest_api/types/dataset_public.py +4 -0
  101. opik/rest_api/types/dataset_public_status.py +5 -0
  102. opik/rest_api/types/dataset_status.py +5 -0
  103. opik/rest_api/types/dataset_version_diff.py +22 -0
  104. opik/rest_api/types/dataset_version_diff_stats.py +24 -0
  105. opik/rest_api/types/dataset_version_page_public.py +23 -0
  106. opik/rest_api/types/dataset_version_public.py +54 -0
  107. opik/rest_api/types/dataset_version_summary.py +41 -0
  108. opik/rest_api/types/dataset_version_summary_public.py +41 -0
  109. opik/rest_api/types/experiment.py +2 -0
  110. opik/rest_api/types/experiment_public.py +2 -0
  111. opik/rest_api/types/experiment_score.py +20 -0
  112. opik/rest_api/types/experiment_score_public.py +20 -0
  113. opik/rest_api/types/experiment_score_write.py +20 -0
  114. opik/rest_api/types/feedback_score_public.py +4 -0
  115. opik/rest_api/types/group_content_with_aggregations.py +1 -0
  116. opik/rest_api/types/optimization.py +2 -0
  117. opik/rest_api/types/optimization_public.py +2 -0
  118. opik/rest_api/types/optimization_public_status.py +3 -1
  119. opik/rest_api/types/optimization_status.py +3 -1
  120. opik/rest_api/types/optimization_studio_config.py +27 -0
  121. opik/rest_api/types/optimization_studio_config_public.py +27 -0
  122. opik/rest_api/types/optimization_studio_config_write.py +27 -0
  123. opik/rest_api/types/optimization_studio_log.py +22 -0
  124. opik/rest_api/types/optimization_write.py +2 -0
  125. opik/rest_api/types/optimization_write_status.py +3 -1
  126. opik/rest_api/types/prompt.py +6 -0
  127. opik/rest_api/types/prompt_detail.py +6 -0
  128. opik/rest_api/types/prompt_detail_template_structure.py +5 -0
  129. opik/rest_api/types/prompt_public.py +6 -0
  130. opik/rest_api/types/prompt_public_template_structure.py +5 -0
  131. opik/rest_api/types/prompt_template_structure.py +5 -0
  132. opik/rest_api/types/prompt_version.py +2 -0
  133. opik/rest_api/types/prompt_version_detail.py +2 -0
  134. opik/rest_api/types/prompt_version_detail_template_structure.py +5 -0
  135. opik/rest_api/types/prompt_version_public.py +2 -0
  136. opik/rest_api/types/prompt_version_public_template_structure.py +5 -0
  137. opik/rest_api/types/prompt_version_template_structure.py +5 -0
  138. opik/rest_api/types/score_name.py +1 -0
  139. opik/rest_api/types/service_toggles_config.py +5 -0
  140. opik/rest_api/types/span_filter.py +23 -0
  141. opik/rest_api/types/span_filter_operator.py +21 -0
  142. opik/rest_api/types/span_filter_write.py +23 -0
  143. opik/rest_api/types/span_filter_write_operator.py +21 -0
  144. opik/rest_api/types/span_llm_as_judge_code.py +27 -0
  145. opik/rest_api/types/span_llm_as_judge_code_public.py +27 -0
  146. opik/rest_api/types/span_llm_as_judge_code_write.py +27 -0
  147. opik/rest_api/types/studio_evaluation.py +20 -0
  148. opik/rest_api/types/studio_evaluation_public.py +20 -0
  149. opik/rest_api/types/studio_evaluation_write.py +20 -0
  150. opik/rest_api/types/studio_llm_model.py +21 -0
  151. opik/rest_api/types/studio_llm_model_public.py +21 -0
  152. opik/rest_api/types/studio_llm_model_write.py +21 -0
  153. opik/rest_api/types/studio_message.py +20 -0
  154. opik/rest_api/types/studio_message_public.py +20 -0
  155. opik/rest_api/types/studio_message_write.py +20 -0
  156. opik/rest_api/types/studio_metric.py +21 -0
  157. opik/rest_api/types/studio_metric_public.py +21 -0
  158. opik/rest_api/types/studio_metric_write.py +21 -0
  159. opik/rest_api/types/studio_optimizer.py +21 -0
  160. opik/rest_api/types/studio_optimizer_public.py +21 -0
  161. opik/rest_api/types/studio_optimizer_write.py +21 -0
  162. opik/rest_api/types/studio_prompt.py +20 -0
  163. opik/rest_api/types/studio_prompt_public.py +20 -0
  164. opik/rest_api/types/studio_prompt_write.py +20 -0
  165. opik/rest_api/types/trace.py +6 -0
  166. opik/rest_api/types/trace_public.py +6 -0
  167. opik/rest_api/types/trace_thread_filter_write.py +23 -0
  168. opik/rest_api/types/trace_thread_filter_write_operator.py +21 -0
  169. opik/rest_api/types/value_entry.py +2 -0
  170. opik/rest_api/types/value_entry_compare.py +2 -0
  171. opik/rest_api/types/value_entry_experiment_item_bulk_write_view.py +2 -0
  172. opik/rest_api/types/value_entry_public.py +2 -0
  173. opik/synchronization.py +5 -6
  174. opik/{decorator/tracing_runtime_config.py → tracing_runtime_config.py} +6 -7
  175. {opik-1.9.26.dist-info → opik-1.9.41.dist-info}/METADATA +4 -3
  176. {opik-1.9.26.dist-info → opik-1.9.41.dist-info}/RECORD +180 -120
  177. opik/api_objects/prompt/chat_prompt_template.py +0 -200
  178. {opik-1.9.26.dist-info → opik-1.9.41.dist-info}/WHEEL +0 -0
  179. {opik-1.9.26.dist-info → opik-1.9.41.dist-info}/entry_points.txt +0 -0
  180. {opik-1.9.26.dist-info → opik-1.9.41.dist-info}/licenses/LICENSE +0 -0
  181. {opik-1.9.26.dist-info → opik-1.9.41.dist-info}/top_level.txt +0 -0
@@ -12,6 +12,7 @@ from ..core.pydantic_utilities import parse_obj_as
12
12
  from ..core.request_options import RequestOptions
13
13
  from ..core.serialization import convert_and_respect_annotation_metadata
14
14
  from ..errors.bad_request_error import BadRequestError
15
+ from ..errors.conflict_error import ConflictError
15
16
  from ..errors.not_found_error import NotFoundError
16
17
  from ..types.dataset_expansion_response import DatasetExpansionResponse
17
18
  from ..types.dataset_item_filter import DatasetItemFilter
@@ -23,6 +24,9 @@ from ..types.dataset_item_write import DatasetItemWrite
23
24
  from ..types.dataset_item_write_source import DatasetItemWriteSource
24
25
  from ..types.dataset_page_public import DatasetPagePublic
25
26
  from ..types.dataset_public import DatasetPublic
27
+ from ..types.dataset_version_diff import DatasetVersionDiff
28
+ from ..types.dataset_version_page_public import DatasetVersionPagePublic
29
+ from ..types.dataset_version_public import DatasetVersionPublic
26
30
  from ..types.json_node import JsonNode
27
31
  from ..types.page_columns import PageColumns
28
32
  from ..types.project_stats_public import ProjectStatsPublic
@@ -1041,6 +1045,7 @@ class RawDatasetsClient:
1041
1045
  *,
1042
1046
  page: typing.Optional[int] = None,
1043
1047
  size: typing.Optional[int] = None,
1048
+ version: typing.Optional[str] = None,
1044
1049
  filters: typing.Optional[str] = None,
1045
1050
  truncate: typing.Optional[bool] = None,
1046
1051
  request_options: typing.Optional[RequestOptions] = None,
@@ -1056,6 +1061,8 @@ class RawDatasetsClient:
1056
1061
 
1057
1062
  size : typing.Optional[int]
1058
1063
 
1064
+ version : typing.Optional[str]
1065
+
1059
1066
  filters : typing.Optional[str]
1060
1067
 
1061
1068
  truncate : typing.Optional[bool]
@@ -1074,6 +1081,7 @@ class RawDatasetsClient:
1074
1081
  params={
1075
1082
  "page": page,
1076
1083
  "size": size,
1084
+ "version": version,
1077
1085
  "filters": filters,
1078
1086
  "truncate": truncate,
1079
1087
  },
@@ -1201,54 +1209,81 @@ class RawDatasetsClient:
1201
1209
 
1202
1210
  yield stream()
1203
1211
 
1212
+ def compare_dataset_versions(
1213
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
1214
+ ) -> HttpResponse[DatasetVersionDiff]:
1215
+ """
1216
+ Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
1204
1217
 
1205
- class AsyncRawDatasetsClient:
1206
- def __init__(self, *, client_wrapper: AsyncClientWrapper):
1207
- self._client_wrapper = client_wrapper
1218
+ Parameters
1219
+ ----------
1220
+ id : str
1208
1221
 
1209
- async def batch_update_dataset_items(
1210
- self,
1211
- *,
1212
- update: DatasetItemUpdate,
1213
- ids: typing.Optional[typing.Sequence[str]] = OMIT,
1214
- filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
1215
- merge_tags: typing.Optional[bool] = OMIT,
1216
- request_options: typing.Optional[RequestOptions] = None,
1217
- ) -> AsyncHttpResponse[None]:
1222
+ request_options : typing.Optional[RequestOptions]
1223
+ Request-specific configuration.
1224
+
1225
+ Returns
1226
+ -------
1227
+ HttpResponse[DatasetVersionDiff]
1228
+ Diff computed successfully
1218
1229
  """
1219
- Update multiple dataset items
1230
+ _response = self._client_wrapper.httpx_client.request(
1231
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
1232
+ method="GET",
1233
+ request_options=request_options,
1234
+ )
1235
+ try:
1236
+ if 200 <= _response.status_code < 300:
1237
+ _data = typing.cast(
1238
+ DatasetVersionDiff,
1239
+ parse_obj_as(
1240
+ type_=DatasetVersionDiff, # type: ignore
1241
+ object_=_response.json(),
1242
+ ),
1243
+ )
1244
+ return HttpResponse(response=_response, data=_data)
1245
+ if _response.status_code == 404:
1246
+ raise NotFoundError(
1247
+ headers=dict(_response.headers),
1248
+ body=typing.cast(
1249
+ typing.Optional[typing.Any],
1250
+ parse_obj_as(
1251
+ type_=typing.Optional[typing.Any], # type: ignore
1252
+ object_=_response.json(),
1253
+ ),
1254
+ ),
1255
+ )
1256
+ _response_json = _response.json()
1257
+ except JSONDecodeError:
1258
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1259
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1260
+
1261
+ def create_version_tag(
1262
+ self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
1263
+ ) -> HttpResponse[None]:
1264
+ """
1265
+ Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
1220
1266
 
1221
1267
  Parameters
1222
1268
  ----------
1223
- update : DatasetItemUpdate
1224
-
1225
- ids : typing.Optional[typing.Sequence[str]]
1226
- List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
1269
+ version_hash : str
1227
1270
 
1228
- filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
1271
+ id : str
1229
1272
 
1230
- merge_tags : typing.Optional[bool]
1231
- If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
1273
+ tag : str
1232
1274
 
1233
1275
  request_options : typing.Optional[RequestOptions]
1234
1276
  Request-specific configuration.
1235
1277
 
1236
1278
  Returns
1237
1279
  -------
1238
- AsyncHttpResponse[None]
1280
+ HttpResponse[None]
1239
1281
  """
1240
- _response = await self._client_wrapper.httpx_client.request(
1241
- "v1/private/datasets/items/batch",
1242
- method="PATCH",
1282
+ _response = self._client_wrapper.httpx_client.request(
1283
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
1284
+ method="POST",
1243
1285
  json={
1244
- "ids": ids,
1245
- "filters": convert_and_respect_annotation_metadata(
1246
- object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
1247
- ),
1248
- "update": convert_and_respect_annotation_metadata(
1249
- object_=update, annotation=DatasetItemUpdate, direction="write"
1250
- ),
1251
- "merge_tags": merge_tags,
1286
+ "tag": tag,
1252
1287
  },
1253
1288
  headers={
1254
1289
  "content-type": "application/json",
@@ -1258,7 +1293,7 @@ class AsyncRawDatasetsClient:
1258
1293
  )
1259
1294
  try:
1260
1295
  if 200 <= _response.status_code < 300:
1261
- return AsyncHttpResponse(response=_response, data=None)
1296
+ return HttpResponse(response=_response, data=None)
1262
1297
  if _response.status_code == 400:
1263
1298
  raise BadRequestError(
1264
1299
  headers=dict(_response.headers),
@@ -1270,124 +1305,134 @@ class AsyncRawDatasetsClient:
1270
1305
  ),
1271
1306
  ),
1272
1307
  )
1308
+ if _response.status_code == 404:
1309
+ raise NotFoundError(
1310
+ headers=dict(_response.headers),
1311
+ body=typing.cast(
1312
+ typing.Optional[typing.Any],
1313
+ parse_obj_as(
1314
+ type_=typing.Optional[typing.Any], # type: ignore
1315
+ object_=_response.json(),
1316
+ ),
1317
+ ),
1318
+ )
1319
+ if _response.status_code == 409:
1320
+ raise ConflictError(
1321
+ headers=dict(_response.headers),
1322
+ body=typing.cast(
1323
+ typing.Optional[typing.Any],
1324
+ parse_obj_as(
1325
+ type_=typing.Optional[typing.Any], # type: ignore
1326
+ object_=_response.json(),
1327
+ ),
1328
+ ),
1329
+ )
1273
1330
  _response_json = _response.json()
1274
1331
  except JSONDecodeError:
1275
1332
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1276
1333
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1277
1334
 
1278
- async def find_datasets(
1335
+ def list_dataset_versions(
1279
1336
  self,
1337
+ id: str,
1280
1338
  *,
1281
1339
  page: typing.Optional[int] = None,
1282
1340
  size: typing.Optional[int] = None,
1283
- with_experiments_only: typing.Optional[bool] = None,
1284
- with_optimizations_only: typing.Optional[bool] = None,
1285
- prompt_id: typing.Optional[str] = None,
1286
- name: typing.Optional[str] = None,
1287
- sorting: typing.Optional[str] = None,
1288
- filters: typing.Optional[str] = None,
1289
1341
  request_options: typing.Optional[RequestOptions] = None,
1290
- ) -> AsyncHttpResponse[DatasetPagePublic]:
1342
+ ) -> HttpResponse[DatasetVersionPagePublic]:
1291
1343
  """
1292
- Find datasets
1344
+ Get paginated list of versions for a dataset, ordered by creation time (newest first)
1293
1345
 
1294
1346
  Parameters
1295
1347
  ----------
1348
+ id : str
1349
+
1296
1350
  page : typing.Optional[int]
1297
1351
 
1298
1352
  size : typing.Optional[int]
1299
1353
 
1300
- with_experiments_only : typing.Optional[bool]
1301
-
1302
- with_optimizations_only : typing.Optional[bool]
1303
-
1304
- prompt_id : typing.Optional[str]
1305
-
1306
- name : typing.Optional[str]
1307
-
1308
- sorting : typing.Optional[str]
1309
-
1310
- filters : typing.Optional[str]
1311
-
1312
1354
  request_options : typing.Optional[RequestOptions]
1313
1355
  Request-specific configuration.
1314
1356
 
1315
1357
  Returns
1316
1358
  -------
1317
- AsyncHttpResponse[DatasetPagePublic]
1318
- Dataset resource
1359
+ HttpResponse[DatasetVersionPagePublic]
1360
+ Dataset versions
1319
1361
  """
1320
- _response = await self._client_wrapper.httpx_client.request(
1321
- "v1/private/datasets",
1362
+ _response = self._client_wrapper.httpx_client.request(
1363
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
1322
1364
  method="GET",
1323
1365
  params={
1324
1366
  "page": page,
1325
1367
  "size": size,
1326
- "with_experiments_only": with_experiments_only,
1327
- "with_optimizations_only": with_optimizations_only,
1328
- "prompt_id": prompt_id,
1329
- "name": name,
1330
- "sorting": sorting,
1331
- "filters": filters,
1332
1368
  },
1333
1369
  request_options=request_options,
1334
1370
  )
1335
1371
  try:
1336
1372
  if 200 <= _response.status_code < 300:
1337
1373
  _data = typing.cast(
1338
- DatasetPagePublic,
1374
+ DatasetVersionPagePublic,
1339
1375
  parse_obj_as(
1340
- type_=DatasetPagePublic, # type: ignore
1376
+ type_=DatasetVersionPagePublic, # type: ignore
1341
1377
  object_=_response.json(),
1342
1378
  ),
1343
1379
  )
1344
- return AsyncHttpResponse(response=_response, data=_data)
1380
+ return HttpResponse(response=_response, data=_data)
1381
+ if _response.status_code == 400:
1382
+ raise BadRequestError(
1383
+ headers=dict(_response.headers),
1384
+ body=typing.cast(
1385
+ typing.Optional[typing.Any],
1386
+ parse_obj_as(
1387
+ type_=typing.Optional[typing.Any], # type: ignore
1388
+ object_=_response.json(),
1389
+ ),
1390
+ ),
1391
+ )
1345
1392
  _response_json = _response.json()
1346
1393
  except JSONDecodeError:
1347
1394
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1348
1395
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1349
1396
 
1350
- async def create_dataset(
1397
+ def create_dataset_version(
1351
1398
  self,
1399
+ id: str,
1352
1400
  *,
1353
- name: str,
1354
- id: typing.Optional[str] = OMIT,
1355
- visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
1356
1401
  tags: typing.Optional[typing.Sequence[str]] = OMIT,
1357
- description: typing.Optional[str] = OMIT,
1402
+ change_description: typing.Optional[str] = OMIT,
1403
+ metadata: typing.Optional[typing.Dict[str, str]] = OMIT,
1358
1404
  request_options: typing.Optional[RequestOptions] = None,
1359
- ) -> AsyncHttpResponse[None]:
1405
+ ) -> HttpResponse[None]:
1360
1406
  """
1361
- Create dataset
1407
+ Create a new immutable version of the dataset by snapshotting the current state
1362
1408
 
1363
1409
  Parameters
1364
1410
  ----------
1365
- name : str
1366
-
1367
- id : typing.Optional[str]
1368
-
1369
- visibility : typing.Optional[DatasetWriteVisibility]
1411
+ id : str
1370
1412
 
1371
1413
  tags : typing.Optional[typing.Sequence[str]]
1414
+ Optional list of tags for this version
1372
1415
 
1373
- description : typing.Optional[str]
1416
+ change_description : typing.Optional[str]
1417
+ Optional description of changes in this version
1418
+
1419
+ metadata : typing.Optional[typing.Dict[str, str]]
1420
+ Optional user-defined metadata
1374
1421
 
1375
1422
  request_options : typing.Optional[RequestOptions]
1376
1423
  Request-specific configuration.
1377
1424
 
1378
1425
  Returns
1379
1426
  -------
1380
- AsyncHttpResponse[None]
1427
+ HttpResponse[None]
1381
1428
  """
1382
- _response = await self._client_wrapper.httpx_client.request(
1383
- "v1/private/datasets",
1429
+ _response = self._client_wrapper.httpx_client.request(
1430
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
1384
1431
  method="POST",
1385
1432
  json={
1386
- "id": id,
1387
- "name": name,
1388
- "visibility": visibility,
1389
1433
  "tags": tags,
1390
- "description": description,
1434
+ "change_description": change_description,
1435
+ "metadata": metadata,
1391
1436
  },
1392
1437
  headers={
1393
1438
  "content-type": "application/json",
@@ -1397,112 +1442,111 @@ class AsyncRawDatasetsClient:
1397
1442
  )
1398
1443
  try:
1399
1444
  if 200 <= _response.status_code < 300:
1400
- return AsyncHttpResponse(response=_response, data=None)
1445
+ return HttpResponse(response=_response, data=None)
1446
+ if _response.status_code == 400:
1447
+ raise BadRequestError(
1448
+ headers=dict(_response.headers),
1449
+ body=typing.cast(
1450
+ typing.Optional[typing.Any],
1451
+ parse_obj_as(
1452
+ type_=typing.Optional[typing.Any], # type: ignore
1453
+ object_=_response.json(),
1454
+ ),
1455
+ ),
1456
+ )
1457
+ if _response.status_code == 409:
1458
+ raise ConflictError(
1459
+ headers=dict(_response.headers),
1460
+ body=typing.cast(
1461
+ typing.Optional[typing.Any],
1462
+ parse_obj_as(
1463
+ type_=typing.Optional[typing.Any], # type: ignore
1464
+ object_=_response.json(),
1465
+ ),
1466
+ ),
1467
+ )
1401
1468
  _response_json = _response.json()
1402
1469
  except JSONDecodeError:
1403
1470
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1404
1471
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1405
1472
 
1406
- async def create_or_update_dataset_items(
1407
- self,
1408
- *,
1409
- items: typing.Sequence[DatasetItemWrite],
1410
- dataset_name: typing.Optional[str] = OMIT,
1411
- dataset_id: typing.Optional[str] = OMIT,
1412
- request_options: typing.Optional[RequestOptions] = None,
1413
- ) -> AsyncHttpResponse[None]:
1473
+ def delete_version_tag(
1474
+ self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
1475
+ ) -> HttpResponse[None]:
1414
1476
  """
1415
- Create/update dataset items based on dataset item id
1477
+ Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
1416
1478
 
1417
1479
  Parameters
1418
1480
  ----------
1419
- items : typing.Sequence[DatasetItemWrite]
1481
+ version_hash : str
1420
1482
 
1421
- dataset_name : typing.Optional[str]
1422
- If null, dataset_id must be provided
1483
+ tag : str
1423
1484
 
1424
- dataset_id : typing.Optional[str]
1425
- If null, dataset_name must be provided
1485
+ id : str
1426
1486
 
1427
1487
  request_options : typing.Optional[RequestOptions]
1428
1488
  Request-specific configuration.
1429
1489
 
1430
1490
  Returns
1431
1491
  -------
1432
- AsyncHttpResponse[None]
1492
+ HttpResponse[None]
1433
1493
  """
1434
- _response = await self._client_wrapper.httpx_client.request(
1435
- "v1/private/datasets/items",
1436
- method="PUT",
1437
- json={
1438
- "dataset_name": dataset_name,
1439
- "dataset_id": dataset_id,
1440
- "items": convert_and_respect_annotation_metadata(
1441
- object_=items, annotation=typing.Sequence[DatasetItemWrite], direction="write"
1442
- ),
1443
- },
1444
- headers={
1445
- "content-type": "application/json",
1446
- },
1494
+ _response = self._client_wrapper.httpx_client.request(
1495
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
1496
+ method="DELETE",
1447
1497
  request_options=request_options,
1448
- omit=OMIT,
1449
1498
  )
1450
1499
  try:
1451
1500
  if 200 <= _response.status_code < 300:
1452
- return AsyncHttpResponse(response=_response, data=None)
1501
+ return HttpResponse(response=_response, data=None)
1453
1502
  _response_json = _response.json()
1454
1503
  except JSONDecodeError:
1455
1504
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1456
1505
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1457
1506
 
1458
- async def create_dataset_items_from_csv(
1459
- self,
1460
- *,
1461
- file: typing.Dict[str, typing.Optional[typing.Any]],
1462
- dataset_id: str,
1463
- request_options: typing.Optional[RequestOptions] = None,
1464
- ) -> AsyncHttpResponse[None]:
1507
+ def restore_dataset_version(
1508
+ self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
1509
+ ) -> HttpResponse[DatasetVersionPublic]:
1465
1510
  """
1466
- Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
1511
+ Restores the dataset to a previous version state. All draft items are replaced with items from the specified version. If the version is not the latest, a new version snapshot is created. If the version is the latest, only draft items are replaced (revert functionality).
1467
1512
 
1468
1513
  Parameters
1469
1514
  ----------
1470
- file : typing.Dict[str, typing.Optional[typing.Any]]
1515
+ id : str
1471
1516
 
1472
- dataset_id : str
1517
+ version_ref : str
1518
+ Version hash or tag to restore from
1473
1519
 
1474
1520
  request_options : typing.Optional[RequestOptions]
1475
1521
  Request-specific configuration.
1476
1522
 
1477
1523
  Returns
1478
1524
  -------
1479
- AsyncHttpResponse[None]
1525
+ HttpResponse[DatasetVersionPublic]
1526
+ Version restored successfully
1480
1527
  """
1481
- _response = await self._client_wrapper.httpx_client.request(
1482
- "v1/private/datasets/items/from-csv",
1528
+ _response = self._client_wrapper.httpx_client.request(
1529
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
1483
1530
  method="POST",
1484
- data={
1485
- "file": file,
1486
- "dataset_id": dataset_id,
1531
+ json={
1532
+ "version_ref": version_ref,
1533
+ },
1534
+ headers={
1535
+ "content-type": "application/json",
1487
1536
  },
1488
- files={},
1489
1537
  request_options=request_options,
1490
1538
  omit=OMIT,
1491
1539
  )
1492
1540
  try:
1493
1541
  if 200 <= _response.status_code < 300:
1494
- return AsyncHttpResponse(response=_response, data=None)
1495
- if _response.status_code == 400:
1496
- raise BadRequestError(
1497
- headers=dict(_response.headers),
1498
- body=typing.cast(
1499
- typing.Optional[typing.Any],
1500
- parse_obj_as(
1501
- type_=typing.Optional[typing.Any], # type: ignore
1502
- object_=_response.json(),
1503
- ),
1542
+ _data = typing.cast(
1543
+ DatasetVersionPublic,
1544
+ parse_obj_as(
1545
+ type_=DatasetVersionPublic, # type: ignore
1546
+ object_=_response.json(),
1504
1547
  ),
1505
1548
  )
1549
+ return HttpResponse(response=_response, data=_data)
1506
1550
  if _response.status_code == 404:
1507
1551
  raise NotFoundError(
1508
1552
  headers=dict(_response.headers),
@@ -1519,41 +1563,44 @@ class AsyncRawDatasetsClient:
1519
1563
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1520
1564
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1521
1565
 
1522
- async def create_dataset_items_from_spans(
1566
+ def update_dataset_version(
1523
1567
  self,
1524
- dataset_id: str,
1568
+ version_hash: str,
1569
+ id: str,
1525
1570
  *,
1526
- span_ids: typing.Sequence[str],
1527
- enrichment_options: SpanEnrichmentOptions,
1571
+ change_description: typing.Optional[str] = OMIT,
1572
+ tags_to_add: typing.Optional[typing.Sequence[str]] = OMIT,
1528
1573
  request_options: typing.Optional[RequestOptions] = None,
1529
- ) -> AsyncHttpResponse[None]:
1574
+ ) -> HttpResponse[DatasetVersionPublic]:
1530
1575
  """
1531
- Create dataset items from spans with enriched metadata
1576
+ Update a dataset version's change_description and/or add new tags
1532
1577
 
1533
1578
  Parameters
1534
1579
  ----------
1535
- dataset_id : str
1580
+ version_hash : str
1536
1581
 
1537
- span_ids : typing.Sequence[str]
1538
- Set of span IDs to add to the dataset
1582
+ id : str
1539
1583
 
1540
- enrichment_options : SpanEnrichmentOptions
1584
+ change_description : typing.Optional[str]
1585
+ Optional description of changes in this version
1586
+
1587
+ tags_to_add : typing.Optional[typing.Sequence[str]]
1588
+ Optional list of tags to add to this version
1541
1589
 
1542
1590
  request_options : typing.Optional[RequestOptions]
1543
1591
  Request-specific configuration.
1544
1592
 
1545
1593
  Returns
1546
1594
  -------
1547
- AsyncHttpResponse[None]
1595
+ HttpResponse[DatasetVersionPublic]
1596
+ Version updated successfully
1548
1597
  """
1549
- _response = await self._client_wrapper.httpx_client.request(
1550
- f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
1551
- method="POST",
1598
+ _response = self._client_wrapper.httpx_client.request(
1599
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}",
1600
+ method="PATCH",
1552
1601
  json={
1553
- "span_ids": span_ids,
1554
- "enrichment_options": convert_and_respect_annotation_metadata(
1555
- object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
1556
- ),
1602
+ "change_description": change_description,
1603
+ "tags_to_add": tags_to_add,
1557
1604
  },
1558
1605
  headers={
1559
1606
  "content-type": "application/json",
@@ -1563,31 +1610,80 @@ class AsyncRawDatasetsClient:
1563
1610
  )
1564
1611
  try:
1565
1612
  if 200 <= _response.status_code < 300:
1566
- return AsyncHttpResponse(response=_response, data=None)
1613
+ _data = typing.cast(
1614
+ DatasetVersionPublic,
1615
+ parse_obj_as(
1616
+ type_=DatasetVersionPublic, # type: ignore
1617
+ object_=_response.json(),
1618
+ ),
1619
+ )
1620
+ return HttpResponse(response=_response, data=_data)
1621
+ if _response.status_code == 400:
1622
+ raise BadRequestError(
1623
+ headers=dict(_response.headers),
1624
+ body=typing.cast(
1625
+ typing.Optional[typing.Any],
1626
+ parse_obj_as(
1627
+ type_=typing.Optional[typing.Any], # type: ignore
1628
+ object_=_response.json(),
1629
+ ),
1630
+ ),
1631
+ )
1632
+ if _response.status_code == 404:
1633
+ raise NotFoundError(
1634
+ headers=dict(_response.headers),
1635
+ body=typing.cast(
1636
+ typing.Optional[typing.Any],
1637
+ parse_obj_as(
1638
+ type_=typing.Optional[typing.Any], # type: ignore
1639
+ object_=_response.json(),
1640
+ ),
1641
+ ),
1642
+ )
1643
+ if _response.status_code == 409:
1644
+ raise ConflictError(
1645
+ headers=dict(_response.headers),
1646
+ body=typing.cast(
1647
+ typing.Optional[typing.Any],
1648
+ parse_obj_as(
1649
+ type_=typing.Optional[typing.Any], # type: ignore
1650
+ object_=_response.json(),
1651
+ ),
1652
+ ),
1653
+ )
1567
1654
  _response_json = _response.json()
1568
1655
  except JSONDecodeError:
1569
1656
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1570
1657
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1571
1658
 
1572
- async def create_dataset_items_from_traces(
1659
+
1660
+ class AsyncRawDatasetsClient:
1661
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
1662
+ self._client_wrapper = client_wrapper
1663
+
1664
+ async def batch_update_dataset_items(
1573
1665
  self,
1574
- dataset_id: str,
1575
1666
  *,
1576
- trace_ids: typing.Sequence[str],
1577
- enrichment_options: TraceEnrichmentOptions,
1667
+ update: DatasetItemUpdate,
1668
+ ids: typing.Optional[typing.Sequence[str]] = OMIT,
1669
+ filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
1670
+ merge_tags: typing.Optional[bool] = OMIT,
1578
1671
  request_options: typing.Optional[RequestOptions] = None,
1579
1672
  ) -> AsyncHttpResponse[None]:
1580
1673
  """
1581
- Create dataset items from traces with enriched metadata
1674
+ Update multiple dataset items
1582
1675
 
1583
1676
  Parameters
1584
1677
  ----------
1585
- dataset_id : str
1678
+ update : DatasetItemUpdate
1586
1679
 
1587
- trace_ids : typing.Sequence[str]
1588
- Set of trace IDs to add to the dataset
1680
+ ids : typing.Optional[typing.Sequence[str]]
1681
+ List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
1589
1682
 
1590
- enrichment_options : TraceEnrichmentOptions
1683
+ filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
1684
+
1685
+ merge_tags : typing.Optional[bool]
1686
+ If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
1591
1687
 
1592
1688
  request_options : typing.Optional[RequestOptions]
1593
1689
  Request-specific configuration.
@@ -1597,13 +1693,17 @@ class AsyncRawDatasetsClient:
1597
1693
  AsyncHttpResponse[None]
1598
1694
  """
1599
1695
  _response = await self._client_wrapper.httpx_client.request(
1600
- f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-traces",
1601
- method="POST",
1696
+ "v1/private/datasets/items/batch",
1697
+ method="PATCH",
1602
1698
  json={
1603
- "trace_ids": trace_ids,
1604
- "enrichment_options": convert_and_respect_annotation_metadata(
1605
- object_=enrichment_options, annotation=TraceEnrichmentOptions, direction="write"
1699
+ "ids": ids,
1700
+ "filters": convert_and_respect_annotation_metadata(
1701
+ object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
1606
1702
  ),
1703
+ "update": convert_and_respect_annotation_metadata(
1704
+ object_=update, annotation=DatasetItemUpdate, direction="write"
1705
+ ),
1706
+ "merge_tags": merge_tags,
1607
1707
  },
1608
1708
  headers={
1609
1709
  "content-type": "application/json",
@@ -1614,40 +1714,85 @@ class AsyncRawDatasetsClient:
1614
1714
  try:
1615
1715
  if 200 <= _response.status_code < 300:
1616
1716
  return AsyncHttpResponse(response=_response, data=None)
1717
+ if _response.status_code == 400:
1718
+ raise BadRequestError(
1719
+ headers=dict(_response.headers),
1720
+ body=typing.cast(
1721
+ typing.Optional[typing.Any],
1722
+ parse_obj_as(
1723
+ type_=typing.Optional[typing.Any], # type: ignore
1724
+ object_=_response.json(),
1725
+ ),
1726
+ ),
1727
+ )
1617
1728
  _response_json = _response.json()
1618
1729
  except JSONDecodeError:
1619
1730
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1620
1731
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1621
1732
 
1622
- async def get_dataset_by_id(
1623
- self, id: str, *, request_options: typing.Optional[RequestOptions] = None
1624
- ) -> AsyncHttpResponse[DatasetPublic]:
1733
+ async def find_datasets(
1734
+ self,
1735
+ *,
1736
+ page: typing.Optional[int] = None,
1737
+ size: typing.Optional[int] = None,
1738
+ with_experiments_only: typing.Optional[bool] = None,
1739
+ with_optimizations_only: typing.Optional[bool] = None,
1740
+ prompt_id: typing.Optional[str] = None,
1741
+ name: typing.Optional[str] = None,
1742
+ sorting: typing.Optional[str] = None,
1743
+ filters: typing.Optional[str] = None,
1744
+ request_options: typing.Optional[RequestOptions] = None,
1745
+ ) -> AsyncHttpResponse[DatasetPagePublic]:
1625
1746
  """
1626
- Get dataset by id
1747
+ Find datasets
1627
1748
 
1628
1749
  Parameters
1629
1750
  ----------
1630
- id : str
1751
+ page : typing.Optional[int]
1752
+
1753
+ size : typing.Optional[int]
1754
+
1755
+ with_experiments_only : typing.Optional[bool]
1756
+
1757
+ with_optimizations_only : typing.Optional[bool]
1758
+
1759
+ prompt_id : typing.Optional[str]
1760
+
1761
+ name : typing.Optional[str]
1762
+
1763
+ sorting : typing.Optional[str]
1764
+
1765
+ filters : typing.Optional[str]
1631
1766
 
1632
1767
  request_options : typing.Optional[RequestOptions]
1633
1768
  Request-specific configuration.
1634
1769
 
1635
1770
  Returns
1636
1771
  -------
1637
- AsyncHttpResponse[DatasetPublic]
1772
+ AsyncHttpResponse[DatasetPagePublic]
1638
1773
  Dataset resource
1639
1774
  """
1640
1775
  _response = await self._client_wrapper.httpx_client.request(
1641
- f"v1/private/datasets/{jsonable_encoder(id)}",
1776
+ "v1/private/datasets",
1642
1777
  method="GET",
1778
+ params={
1779
+ "page": page,
1780
+ "size": size,
1781
+ "with_experiments_only": with_experiments_only,
1782
+ "with_optimizations_only": with_optimizations_only,
1783
+ "prompt_id": prompt_id,
1784
+ "name": name,
1785
+ "sorting": sorting,
1786
+ "filters": filters,
1787
+ },
1643
1788
  request_options=request_options,
1644
1789
  )
1645
1790
  try:
1646
1791
  if 200 <= _response.status_code < 300:
1647
1792
  _data = typing.cast(
1648
- DatasetPublic,
1793
+ DatasetPagePublic,
1649
1794
  parse_obj_as(
1650
- type_=DatasetPublic, # type: ignore
1795
+ type_=DatasetPagePublic, # type: ignore
1651
1796
  object_=_response.json(),
1652
1797
  ),
1653
1798
  )
@@ -1657,31 +1802,31 @@ class AsyncRawDatasetsClient:
1657
1802
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1658
1803
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1659
1804
 
1660
- async def update_dataset(
1805
+ async def create_dataset(
1661
1806
  self,
1662
- id: str,
1663
1807
  *,
1664
1808
  name: str,
1665
- description: typing.Optional[str] = OMIT,
1666
- visibility: typing.Optional[DatasetUpdateVisibility] = OMIT,
1809
+ id: typing.Optional[str] = OMIT,
1810
+ visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
1667
1811
  tags: typing.Optional[typing.Sequence[str]] = OMIT,
1812
+ description: typing.Optional[str] = OMIT,
1668
1813
  request_options: typing.Optional[RequestOptions] = None,
1669
1814
  ) -> AsyncHttpResponse[None]:
1670
1815
  """
1671
- Update dataset by id
1816
+ Create dataset
1672
1817
 
1673
1818
  Parameters
1674
1819
  ----------
1675
- id : str
1676
-
1677
1820
  name : str
1678
1821
 
1679
- description : typing.Optional[str]
1822
+ id : typing.Optional[str]
1680
1823
 
1681
- visibility : typing.Optional[DatasetUpdateVisibility]
1824
+ visibility : typing.Optional[DatasetWriteVisibility]
1682
1825
 
1683
1826
  tags : typing.Optional[typing.Sequence[str]]
1684
1827
 
1828
+ description : typing.Optional[str]
1829
+
1685
1830
  request_options : typing.Optional[RequestOptions]
1686
1831
  Request-specific configuration.
1687
1832
 
@@ -1690,13 +1835,14 @@ class AsyncRawDatasetsClient:
1690
1835
  AsyncHttpResponse[None]
1691
1836
  """
1692
1837
  _response = await self._client_wrapper.httpx_client.request(
1693
- f"v1/private/datasets/{jsonable_encoder(id)}",
1694
- method="PUT",
1838
+ "v1/private/datasets",
1839
+ method="POST",
1695
1840
  json={
1841
+ "id": id,
1696
1842
  "name": name,
1697
- "description": description,
1698
1843
  "visibility": visibility,
1699
1844
  "tags": tags,
1845
+ "description": description,
1700
1846
  },
1701
1847
  headers={
1702
1848
  "content-type": "application/json",
@@ -1712,15 +1858,26 @@ class AsyncRawDatasetsClient:
1712
1858
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1713
1859
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1714
1860
 
1715
- async def delete_dataset(
1716
- self, id: str, *, request_options: typing.Optional[RequestOptions] = None
1861
+ async def create_or_update_dataset_items(
1862
+ self,
1863
+ *,
1864
+ items: typing.Sequence[DatasetItemWrite],
1865
+ dataset_name: typing.Optional[str] = OMIT,
1866
+ dataset_id: typing.Optional[str] = OMIT,
1867
+ request_options: typing.Optional[RequestOptions] = None,
1717
1868
  ) -> AsyncHttpResponse[None]:
1718
1869
  """
1719
- Delete dataset by id
1870
+ Create/update dataset items based on dataset item id
1720
1871
 
1721
1872
  Parameters
1722
1873
  ----------
1723
- id : str
1874
+ items : typing.Sequence[DatasetItemWrite]
1875
+
1876
+ dataset_name : typing.Optional[str]
1877
+ If null, dataset_id must be provided
1878
+
1879
+ dataset_id : typing.Optional[str]
1880
+ If null, dataset_name must be provided
1724
1881
 
1725
1882
  request_options : typing.Optional[RequestOptions]
1726
1883
  Request-specific configuration.
@@ -1730,9 +1887,20 @@ class AsyncRawDatasetsClient:
1730
1887
  AsyncHttpResponse[None]
1731
1888
  """
1732
1889
  _response = await self._client_wrapper.httpx_client.request(
1733
- f"v1/private/datasets/{jsonable_encoder(id)}",
1734
- method="DELETE",
1890
+ "v1/private/datasets/items",
1891
+ method="PUT",
1892
+ json={
1893
+ "dataset_name": dataset_name,
1894
+ "dataset_id": dataset_id,
1895
+ "items": convert_and_respect_annotation_metadata(
1896
+ object_=items, annotation=typing.Sequence[DatasetItemWrite], direction="write"
1897
+ ),
1898
+ },
1899
+ headers={
1900
+ "content-type": "application/json",
1901
+ },
1735
1902
  request_options=request_options,
1903
+ omit=OMIT,
1736
1904
  )
1737
1905
  try:
1738
1906
  if 200 <= _response.status_code < 300:
@@ -1742,15 +1910,21 @@ class AsyncRawDatasetsClient:
1742
1910
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1743
1911
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1744
1912
 
1745
- async def delete_dataset_by_name(
1746
- self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
1913
+ async def create_dataset_items_from_csv(
1914
+ self,
1915
+ *,
1916
+ file: typing.Dict[str, typing.Optional[typing.Any]],
1917
+ dataset_id: str,
1918
+ request_options: typing.Optional[RequestOptions] = None,
1747
1919
  ) -> AsyncHttpResponse[None]:
1748
1920
  """
1749
- Delete dataset by name
1921
+ Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
1750
1922
 
1751
1923
  Parameters
1752
1924
  ----------
1753
- dataset_name : str
1925
+ file : typing.Dict[str, typing.Optional[typing.Any]]
1926
+
1927
+ dataset_id : str
1754
1928
 
1755
1929
  request_options : typing.Optional[RequestOptions]
1756
1930
  Request-specific configuration.
@@ -1760,34 +1934,65 @@ class AsyncRawDatasetsClient:
1760
1934
  AsyncHttpResponse[None]
1761
1935
  """
1762
1936
  _response = await self._client_wrapper.httpx_client.request(
1763
- "v1/private/datasets/delete",
1937
+ "v1/private/datasets/items/from-csv",
1764
1938
  method="POST",
1765
- json={
1766
- "dataset_name": dataset_name,
1767
- },
1768
- headers={
1769
- "content-type": "application/json",
1939
+ data={
1940
+ "file": file,
1941
+ "dataset_id": dataset_id,
1770
1942
  },
1943
+ files={},
1771
1944
  request_options=request_options,
1772
1945
  omit=OMIT,
1773
1946
  )
1774
1947
  try:
1775
1948
  if 200 <= _response.status_code < 300:
1776
1949
  return AsyncHttpResponse(response=_response, data=None)
1950
+ if _response.status_code == 400:
1951
+ raise BadRequestError(
1952
+ headers=dict(_response.headers),
1953
+ body=typing.cast(
1954
+ typing.Optional[typing.Any],
1955
+ parse_obj_as(
1956
+ type_=typing.Optional[typing.Any], # type: ignore
1957
+ object_=_response.json(),
1958
+ ),
1959
+ ),
1960
+ )
1961
+ if _response.status_code == 404:
1962
+ raise NotFoundError(
1963
+ headers=dict(_response.headers),
1964
+ body=typing.cast(
1965
+ typing.Optional[typing.Any],
1966
+ parse_obj_as(
1967
+ type_=typing.Optional[typing.Any], # type: ignore
1968
+ object_=_response.json(),
1969
+ ),
1970
+ ),
1971
+ )
1777
1972
  _response_json = _response.json()
1778
1973
  except JSONDecodeError:
1779
1974
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1780
1975
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1781
1976
 
1782
- async def delete_dataset_items(
1783
- self, *, item_ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
1977
+ async def create_dataset_items_from_spans(
1978
+ self,
1979
+ dataset_id: str,
1980
+ *,
1981
+ span_ids: typing.Sequence[str],
1982
+ enrichment_options: SpanEnrichmentOptions,
1983
+ request_options: typing.Optional[RequestOptions] = None,
1784
1984
  ) -> AsyncHttpResponse[None]:
1785
1985
  """
1786
- Delete dataset items
1986
+ Create dataset items from spans with enriched metadata
1787
1987
 
1788
1988
  Parameters
1789
1989
  ----------
1790
- item_ids : typing.Sequence[str]
1990
+ dataset_id : str
1991
+
1992
+ span_ids : typing.Sequence[str]
1993
+ Set of span IDs to add to the dataset
1994
+
1995
+ enrichment_options : SpanEnrichmentOptions
1791
1996
 
1792
1997
  request_options : typing.Optional[RequestOptions]
1793
1998
  Request-specific configuration.
@@ -1797,10 +2002,13 @@ class AsyncRawDatasetsClient:
1797
2002
  AsyncHttpResponse[None]
1798
2003
  """
1799
2004
  _response = await self._client_wrapper.httpx_client.request(
1800
- "v1/private/datasets/items/delete",
2005
+ f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
1801
2006
  method="POST",
1802
2007
  json={
1803
- "item_ids": item_ids,
2008
+ "span_ids": span_ids,
2009
+ "enrichment_options": convert_and_respect_annotation_metadata(
2010
+ object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
2011
+ ),
1804
2012
  },
1805
2013
  headers={
1806
2014
  "content-type": "application/json",
@@ -1816,15 +2024,25 @@ class AsyncRawDatasetsClient:
1816
2024
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1817
2025
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1818
2026
 
1819
- async def delete_datasets_batch(
1820
- self, *, ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
2027
+ async def create_dataset_items_from_traces(
2028
+ self,
2029
+ dataset_id: str,
2030
+ *,
2031
+ trace_ids: typing.Sequence[str],
2032
+ enrichment_options: TraceEnrichmentOptions,
2033
+ request_options: typing.Optional[RequestOptions] = None,
1821
2034
  ) -> AsyncHttpResponse[None]:
1822
2035
  """
1823
- Delete datasets batch
2036
+ Create dataset items from traces with enriched metadata
1824
2037
 
1825
2038
  Parameters
1826
2039
  ----------
1827
- ids : typing.Sequence[str]
2040
+ dataset_id : str
2041
+
2042
+ trace_ids : typing.Sequence[str]
2043
+ Set of trace IDs to add to the dataset
2044
+
2045
+ enrichment_options : TraceEnrichmentOptions
1828
2046
 
1829
2047
  request_options : typing.Optional[RequestOptions]
1830
2048
  Request-specific configuration.
@@ -1834,10 +2052,13 @@ class AsyncRawDatasetsClient:
1834
2052
  AsyncHttpResponse[None]
1835
2053
  """
1836
2054
  _response = await self._client_wrapper.httpx_client.request(
1837
- "v1/private/datasets/delete-batch",
2055
+ f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-traces",
1838
2056
  method="POST",
1839
2057
  json={
1840
- "ids": ids,
2058
+ "trace_ids": trace_ids,
2059
+ "enrichment_options": convert_and_respect_annotation_metadata(
2060
+ object_=enrichment_options, annotation=TraceEnrichmentOptions, direction="write"
2061
+ ),
1841
2062
  },
1842
2063
  headers={
1843
2064
  "content-type": "application/json",
@@ -1853,69 +2074,35 @@ class AsyncRawDatasetsClient:
1853
2074
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1854
2075
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1855
2076
 
1856
- async def expand_dataset(
1857
- self,
1858
- id: str,
1859
- *,
1860
- model: str,
1861
- sample_count: typing.Optional[int] = OMIT,
1862
- preserve_fields: typing.Optional[typing.Sequence[str]] = OMIT,
1863
- variation_instructions: typing.Optional[str] = OMIT,
1864
- custom_prompt: typing.Optional[str] = OMIT,
1865
- request_options: typing.Optional[RequestOptions] = None,
1866
- ) -> AsyncHttpResponse[DatasetExpansionResponse]:
2077
+ async def get_dataset_by_id(
2078
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
2079
+ ) -> AsyncHttpResponse[DatasetPublic]:
1867
2080
  """
1868
- Generate synthetic dataset samples using LLM based on existing data patterns
2081
+ Get dataset by id
1869
2082
 
1870
2083
  Parameters
1871
2084
  ----------
1872
2085
  id : str
1873
2086
 
1874
- model : str
1875
- The model to use for synthetic data generation
1876
-
1877
- sample_count : typing.Optional[int]
1878
- Number of synthetic samples to generate
1879
-
1880
- preserve_fields : typing.Optional[typing.Sequence[str]]
1881
- Fields to preserve patterns from original data
1882
-
1883
- variation_instructions : typing.Optional[str]
1884
- Additional instructions for data variation
1885
-
1886
- custom_prompt : typing.Optional[str]
1887
- Custom prompt to use for generation instead of auto-generated one
1888
-
1889
2087
  request_options : typing.Optional[RequestOptions]
1890
2088
  Request-specific configuration.
1891
2089
 
1892
2090
  Returns
1893
2091
  -------
1894
- AsyncHttpResponse[DatasetExpansionResponse]
1895
- Generated synthetic samples
2092
+ AsyncHttpResponse[DatasetPublic]
2093
+ Dataset resource
1896
2094
  """
1897
2095
  _response = await self._client_wrapper.httpx_client.request(
1898
- f"v1/private/datasets/{jsonable_encoder(id)}/expansions",
1899
- method="POST",
1900
- json={
1901
- "model": model,
1902
- "sample_count": sample_count,
1903
- "preserve_fields": preserve_fields,
1904
- "variation_instructions": variation_instructions,
1905
- "custom_prompt": custom_prompt,
1906
- },
1907
- headers={
1908
- "content-type": "application/json",
1909
- },
2096
+ f"v1/private/datasets/{jsonable_encoder(id)}",
2097
+ method="GET",
1910
2098
  request_options=request_options,
1911
- omit=OMIT,
1912
2099
  )
1913
2100
  try:
1914
2101
  if 200 <= _response.status_code < 300:
1915
2102
  _data = typing.cast(
1916
- DatasetExpansionResponse,
2103
+ DatasetPublic,
1917
2104
  parse_obj_as(
1918
- type_=DatasetExpansionResponse, # type: ignore
2105
+ type_=DatasetPublic, # type: ignore
1919
2106
  object_=_response.json(),
1920
2107
  ),
1921
2108
  )
@@ -1925,82 +2112,96 @@ class AsyncRawDatasetsClient:
1925
2112
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1926
2113
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1927
2114
 
1928
- async def find_dataset_items_with_experiment_items(
2115
+ async def update_dataset(
1929
2116
  self,
1930
2117
  id: str,
1931
2118
  *,
1932
- experiment_ids: str,
1933
- page: typing.Optional[int] = None,
1934
- size: typing.Optional[int] = None,
1935
- filters: typing.Optional[str] = None,
1936
- sorting: typing.Optional[str] = None,
1937
- search: typing.Optional[str] = None,
1938
- truncate: typing.Optional[bool] = None,
2119
+ name: str,
2120
+ description: typing.Optional[str] = OMIT,
2121
+ visibility: typing.Optional[DatasetUpdateVisibility] = OMIT,
2122
+ tags: typing.Optional[typing.Sequence[str]] = OMIT,
1939
2123
  request_options: typing.Optional[RequestOptions] = None,
1940
- ) -> AsyncHttpResponse[DatasetItemPageCompare]:
2124
+ ) -> AsyncHttpResponse[None]:
1941
2125
  """
1942
- Find dataset items with experiment items
2126
+ Update dataset by id
1943
2127
 
1944
2128
  Parameters
1945
2129
  ----------
1946
2130
  id : str
1947
2131
 
1948
- experiment_ids : str
2132
+ name : str
1949
2133
 
1950
- page : typing.Optional[int]
2134
+ description : typing.Optional[str]
1951
2135
 
1952
- size : typing.Optional[int]
2136
+ visibility : typing.Optional[DatasetUpdateVisibility]
1953
2137
 
1954
- filters : typing.Optional[str]
2138
+ tags : typing.Optional[typing.Sequence[str]]
1955
2139
 
1956
- sorting : typing.Optional[str]
2140
+ request_options : typing.Optional[RequestOptions]
2141
+ Request-specific configuration.
1957
2142
 
1958
- search : typing.Optional[str]
2143
+ Returns
2144
+ -------
2145
+ AsyncHttpResponse[None]
2146
+ """
2147
+ _response = await self._client_wrapper.httpx_client.request(
2148
+ f"v1/private/datasets/{jsonable_encoder(id)}",
2149
+ method="PUT",
2150
+ json={
2151
+ "name": name,
2152
+ "description": description,
2153
+ "visibility": visibility,
2154
+ "tags": tags,
2155
+ },
2156
+ headers={
2157
+ "content-type": "application/json",
2158
+ },
2159
+ request_options=request_options,
2160
+ omit=OMIT,
2161
+ )
2162
+ try:
2163
+ if 200 <= _response.status_code < 300:
2164
+ return AsyncHttpResponse(response=_response, data=None)
2165
+ _response_json = _response.json()
2166
+ except JSONDecodeError:
2167
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2168
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1959
2169
 
1960
- truncate : typing.Optional[bool]
2170
+ async def delete_dataset(
2171
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
2172
+ ) -> AsyncHttpResponse[None]:
2173
+ """
2174
+ Delete dataset by id
2175
+
2176
+ Parameters
2177
+ ----------
2178
+ id : str
1961
2179
 
1962
2180
  request_options : typing.Optional[RequestOptions]
1963
2181
  Request-specific configuration.
1964
2182
 
1965
2183
  Returns
1966
2184
  -------
1967
- AsyncHttpResponse[DatasetItemPageCompare]
1968
- Dataset item resource
2185
+ AsyncHttpResponse[None]
1969
2186
  """
1970
2187
  _response = await self._client_wrapper.httpx_client.request(
1971
- f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items",
1972
- method="GET",
1973
- params={
1974
- "page": page,
1975
- "size": size,
1976
- "experiment_ids": experiment_ids,
1977
- "filters": filters,
1978
- "sorting": sorting,
1979
- "search": search,
1980
- "truncate": truncate,
1981
- },
2188
+ f"v1/private/datasets/{jsonable_encoder(id)}",
2189
+ method="DELETE",
1982
2190
  request_options=request_options,
1983
2191
  )
1984
2192
  try:
1985
2193
  if 200 <= _response.status_code < 300:
1986
- _data = typing.cast(
1987
- DatasetItemPageCompare,
1988
- parse_obj_as(
1989
- type_=DatasetItemPageCompare, # type: ignore
1990
- object_=_response.json(),
1991
- ),
1992
- )
1993
- return AsyncHttpResponse(response=_response, data=_data)
2194
+ return AsyncHttpResponse(response=_response, data=None)
1994
2195
  _response_json = _response.json()
1995
2196
  except JSONDecodeError:
1996
2197
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1997
2198
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1998
2199
 
1999
- async def get_dataset_by_identifier(
2200
+ async def delete_dataset_by_name(
2000
2201
  self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
2001
- ) -> AsyncHttpResponse[DatasetPublic]:
2202
+ ) -> AsyncHttpResponse[None]:
2002
2203
  """
2003
- Get dataset by name
2204
+ Delete dataset by name
2004
2205
 
2005
2206
  Parameters
2006
2207
  ----------
@@ -2011,11 +2212,10 @@ class AsyncRawDatasetsClient:
2011
2212
 
2012
2213
  Returns
2013
2214
  -------
2014
- AsyncHttpResponse[DatasetPublic]
2015
- Dataset resource
2215
+ AsyncHttpResponse[None]
2016
2216
  """
2017
2217
  _response = await self._client_wrapper.httpx_client.request(
2018
- "v1/private/datasets/retrieve",
2218
+ "v1/private/datasets/delete",
2019
2219
  method="POST",
2020
2220
  json={
2021
2221
  "dataset_name": dataset_name,
@@ -2028,156 +2228,136 @@ class AsyncRawDatasetsClient:
2028
2228
  )
2029
2229
  try:
2030
2230
  if 200 <= _response.status_code < 300:
2031
- _data = typing.cast(
2032
- DatasetPublic,
2033
- parse_obj_as(
2034
- type_=DatasetPublic, # type: ignore
2035
- object_=_response.json(),
2036
- ),
2037
- )
2038
- return AsyncHttpResponse(response=_response, data=_data)
2231
+ return AsyncHttpResponse(response=_response, data=None)
2039
2232
  _response_json = _response.json()
2040
2233
  except JSONDecodeError:
2041
2234
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2042
2235
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2043
2236
 
2044
- async def get_dataset_experiment_items_stats(
2045
- self,
2046
- id: str,
2047
- *,
2048
- experiment_ids: str,
2049
- filters: typing.Optional[str] = None,
2050
- request_options: typing.Optional[RequestOptions] = None,
2051
- ) -> AsyncHttpResponse[ProjectStatsPublic]:
2237
+ async def delete_dataset_items(
2238
+ self, *, item_ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
2239
+ ) -> AsyncHttpResponse[None]:
2052
2240
  """
2053
- Get experiment items stats for dataset
2241
+ Delete dataset items
2054
2242
 
2055
2243
  Parameters
2056
2244
  ----------
2057
- id : str
2058
-
2059
- experiment_ids : str
2060
-
2061
- filters : typing.Optional[str]
2245
+ item_ids : typing.Sequence[str]
2062
2246
 
2063
2247
  request_options : typing.Optional[RequestOptions]
2064
2248
  Request-specific configuration.
2065
2249
 
2066
2250
  Returns
2067
2251
  -------
2068
- AsyncHttpResponse[ProjectStatsPublic]
2069
- Experiment items stats resource
2252
+ AsyncHttpResponse[None]
2070
2253
  """
2071
2254
  _response = await self._client_wrapper.httpx_client.request(
2072
- f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/stats",
2073
- method="GET",
2074
- params={
2075
- "experiment_ids": experiment_ids,
2076
- "filters": filters,
2255
+ "v1/private/datasets/items/delete",
2256
+ method="POST",
2257
+ json={
2258
+ "item_ids": item_ids,
2259
+ },
2260
+ headers={
2261
+ "content-type": "application/json",
2077
2262
  },
2078
2263
  request_options=request_options,
2264
+ omit=OMIT,
2079
2265
  )
2080
2266
  try:
2081
2267
  if 200 <= _response.status_code < 300:
2082
- _data = typing.cast(
2083
- ProjectStatsPublic,
2084
- parse_obj_as(
2085
- type_=ProjectStatsPublic, # type: ignore
2086
- object_=_response.json(),
2087
- ),
2088
- )
2089
- return AsyncHttpResponse(response=_response, data=_data)
2268
+ return AsyncHttpResponse(response=_response, data=None)
2090
2269
  _response_json = _response.json()
2091
2270
  except JSONDecodeError:
2092
2271
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2093
2272
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2094
2273
 
2095
- async def get_dataset_item_by_id(
2096
- self, item_id: str, *, request_options: typing.Optional[RequestOptions] = None
2097
- ) -> AsyncHttpResponse[DatasetItemPublic]:
2274
+ async def delete_datasets_batch(
2275
+ self, *, ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
2276
+ ) -> AsyncHttpResponse[None]:
2098
2277
  """
2099
- Get dataset item by id
2278
+ Delete datasets batch
2100
2279
 
2101
2280
  Parameters
2102
2281
  ----------
2103
- item_id : str
2282
+ ids : typing.Sequence[str]
2104
2283
 
2105
2284
  request_options : typing.Optional[RequestOptions]
2106
2285
  Request-specific configuration.
2107
2286
 
2108
2287
  Returns
2109
2288
  -------
2110
- AsyncHttpResponse[DatasetItemPublic]
2111
- Dataset item resource
2289
+ AsyncHttpResponse[None]
2112
2290
  """
2113
2291
  _response = await self._client_wrapper.httpx_client.request(
2114
- f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2115
- method="GET",
2292
+ "v1/private/datasets/delete-batch",
2293
+ method="POST",
2294
+ json={
2295
+ "ids": ids,
2296
+ },
2297
+ headers={
2298
+ "content-type": "application/json",
2299
+ },
2116
2300
  request_options=request_options,
2301
+ omit=OMIT,
2117
2302
  )
2118
2303
  try:
2119
2304
  if 200 <= _response.status_code < 300:
2120
- _data = typing.cast(
2121
- DatasetItemPublic,
2122
- parse_obj_as(
2123
- type_=DatasetItemPublic, # type: ignore
2124
- object_=_response.json(),
2125
- ),
2126
- )
2127
- return AsyncHttpResponse(response=_response, data=_data)
2305
+ return AsyncHttpResponse(response=_response, data=None)
2128
2306
  _response_json = _response.json()
2129
2307
  except JSONDecodeError:
2130
2308
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2131
2309
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2132
2310
 
2133
- async def patch_dataset_item(
2311
+ async def expand_dataset(
2134
2312
  self,
2135
- item_id: str,
2313
+ id: str,
2136
2314
  *,
2137
- source: DatasetItemWriteSource,
2138
- data: JsonNode,
2139
- id: typing.Optional[str] = OMIT,
2140
- trace_id: typing.Optional[str] = OMIT,
2141
- span_id: typing.Optional[str] = OMIT,
2142
- tags: typing.Optional[typing.Sequence[str]] = OMIT,
2315
+ model: str,
2316
+ sample_count: typing.Optional[int] = OMIT,
2317
+ preserve_fields: typing.Optional[typing.Sequence[str]] = OMIT,
2318
+ variation_instructions: typing.Optional[str] = OMIT,
2319
+ custom_prompt: typing.Optional[str] = OMIT,
2143
2320
  request_options: typing.Optional[RequestOptions] = None,
2144
- ) -> AsyncHttpResponse[None]:
2321
+ ) -> AsyncHttpResponse[DatasetExpansionResponse]:
2145
2322
  """
2146
- Partially update dataset item by id. Only provided fields will be updated.
2323
+ Generate synthetic dataset samples using LLM based on existing data patterns
2147
2324
 
2148
2325
  Parameters
2149
2326
  ----------
2150
- item_id : str
2151
-
2152
- source : DatasetItemWriteSource
2327
+ id : str
2153
2328
 
2154
- data : JsonNode
2329
+ model : str
2330
+ The model to use for synthetic data generation
2155
2331
 
2156
- id : typing.Optional[str]
2332
+ sample_count : typing.Optional[int]
2333
+ Number of synthetic samples to generate
2157
2334
 
2158
- trace_id : typing.Optional[str]
2335
+ preserve_fields : typing.Optional[typing.Sequence[str]]
2336
+ Fields to preserve patterns from original data
2159
2337
 
2160
- span_id : typing.Optional[str]
2338
+ variation_instructions : typing.Optional[str]
2339
+ Additional instructions for data variation
2161
2340
 
2162
- tags : typing.Optional[typing.Sequence[str]]
2341
+ custom_prompt : typing.Optional[str]
2342
+ Custom prompt to use for generation instead of auto-generated one
2163
2343
 
2164
2344
  request_options : typing.Optional[RequestOptions]
2165
2345
  Request-specific configuration.
2166
2346
 
2167
2347
  Returns
2168
2348
  -------
2169
- AsyncHttpResponse[None]
2349
+ AsyncHttpResponse[DatasetExpansionResponse]
2350
+ Generated synthetic samples
2170
2351
  """
2171
2352
  _response = await self._client_wrapper.httpx_client.request(
2172
- f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2173
- method="PATCH",
2353
+ f"v1/private/datasets/{jsonable_encoder(id)}/expansions",
2354
+ method="POST",
2174
2355
  json={
2175
- "id": id,
2176
- "trace_id": trace_id,
2177
- "span_id": span_id,
2178
- "source": source,
2179
- "data": data,
2180
- "tags": tags,
2356
+ "model": model,
2357
+ "sample_count": sample_count,
2358
+ "preserve_fields": preserve_fields,
2359
+ "variation_instructions": variation_instructions,
2360
+ "custom_prompt": custom_prompt,
2181
2361
  },
2182
2362
  headers={
2183
2363
  "content-type": "application/json",
@@ -2187,46 +2367,51 @@ class AsyncRawDatasetsClient:
2187
2367
  )
2188
2368
  try:
2189
2369
  if 200 <= _response.status_code < 300:
2190
- return AsyncHttpResponse(response=_response, data=None)
2191
- if _response.status_code == 404:
2192
- raise NotFoundError(
2193
- headers=dict(_response.headers),
2194
- body=typing.cast(
2195
- typing.Optional[typing.Any],
2196
- parse_obj_as(
2197
- type_=typing.Optional[typing.Any], # type: ignore
2198
- object_=_response.json(),
2199
- ),
2370
+ _data = typing.cast(
2371
+ DatasetExpansionResponse,
2372
+ parse_obj_as(
2373
+ type_=DatasetExpansionResponse, # type: ignore
2374
+ object_=_response.json(),
2200
2375
  ),
2201
2376
  )
2377
+ return AsyncHttpResponse(response=_response, data=_data)
2202
2378
  _response_json = _response.json()
2203
2379
  except JSONDecodeError:
2204
2380
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2205
2381
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2206
2382
 
2207
- async def get_dataset_items(
2383
+ async def find_dataset_items_with_experiment_items(
2208
2384
  self,
2209
2385
  id: str,
2210
2386
  *,
2387
+ experiment_ids: str,
2211
2388
  page: typing.Optional[int] = None,
2212
2389
  size: typing.Optional[int] = None,
2213
2390
  filters: typing.Optional[str] = None,
2391
+ sorting: typing.Optional[str] = None,
2392
+ search: typing.Optional[str] = None,
2214
2393
  truncate: typing.Optional[bool] = None,
2215
2394
  request_options: typing.Optional[RequestOptions] = None,
2216
- ) -> AsyncHttpResponse[DatasetItemPagePublic]:
2395
+ ) -> AsyncHttpResponse[DatasetItemPageCompare]:
2217
2396
  """
2218
- Get dataset items
2397
+ Find dataset items with experiment items
2219
2398
 
2220
2399
  Parameters
2221
2400
  ----------
2222
2401
  id : str
2223
2402
 
2403
+ experiment_ids : str
2404
+
2224
2405
  page : typing.Optional[int]
2225
2406
 
2226
2407
  size : typing.Optional[int]
2227
2408
 
2228
2409
  filters : typing.Optional[str]
2229
2410
 
2411
+ sorting : typing.Optional[str]
2412
+
2413
+ search : typing.Optional[str]
2414
+
2230
2415
  truncate : typing.Optional[bool]
2231
2416
 
2232
2417
  request_options : typing.Optional[RequestOptions]
@@ -2234,16 +2419,19 @@ class AsyncRawDatasetsClient:
2234
2419
 
2235
2420
  Returns
2236
2421
  -------
2237
- AsyncHttpResponse[DatasetItemPagePublic]
2238
- Dataset items resource
2422
+ AsyncHttpResponse[DatasetItemPageCompare]
2423
+ Dataset item resource
2239
2424
  """
2240
2425
  _response = await self._client_wrapper.httpx_client.request(
2241
- f"v1/private/datasets/{jsonable_encoder(id)}/items",
2426
+ f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items",
2242
2427
  method="GET",
2243
2428
  params={
2244
2429
  "page": page,
2245
2430
  "size": size,
2431
+ "experiment_ids": experiment_ids,
2246
2432
  "filters": filters,
2433
+ "sorting": sorting,
2434
+ "search": search,
2247
2435
  "truncate": truncate,
2248
2436
  },
2249
2437
  request_options=request_options,
@@ -2251,9 +2439,9 @@ class AsyncRawDatasetsClient:
2251
2439
  try:
2252
2440
  if 200 <= _response.status_code < 300:
2253
2441
  _data = typing.cast(
2254
- DatasetItemPagePublic,
2442
+ DatasetItemPageCompare,
2255
2443
  parse_obj_as(
2256
- type_=DatasetItemPagePublic, # type: ignore
2444
+ type_=DatasetItemPageCompare, # type: ignore
2257
2445
  object_=_response.json(),
2258
2446
  ),
2259
2447
  )
@@ -2263,44 +2451,93 @@ class AsyncRawDatasetsClient:
2263
2451
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2264
2452
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2265
2453
 
2266
- async def get_dataset_items_output_columns(
2454
+ async def get_dataset_by_identifier(
2455
+ self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
2456
+ ) -> AsyncHttpResponse[DatasetPublic]:
2457
+ """
2458
+ Get dataset by name
2459
+
2460
+ Parameters
2461
+ ----------
2462
+ dataset_name : str
2463
+
2464
+ request_options : typing.Optional[RequestOptions]
2465
+ Request-specific configuration.
2466
+
2467
+ Returns
2468
+ -------
2469
+ AsyncHttpResponse[DatasetPublic]
2470
+ Dataset resource
2471
+ """
2472
+ _response = await self._client_wrapper.httpx_client.request(
2473
+ "v1/private/datasets/retrieve",
2474
+ method="POST",
2475
+ json={
2476
+ "dataset_name": dataset_name,
2477
+ },
2478
+ headers={
2479
+ "content-type": "application/json",
2480
+ },
2481
+ request_options=request_options,
2482
+ omit=OMIT,
2483
+ )
2484
+ try:
2485
+ if 200 <= _response.status_code < 300:
2486
+ _data = typing.cast(
2487
+ DatasetPublic,
2488
+ parse_obj_as(
2489
+ type_=DatasetPublic, # type: ignore
2490
+ object_=_response.json(),
2491
+ ),
2492
+ )
2493
+ return AsyncHttpResponse(response=_response, data=_data)
2494
+ _response_json = _response.json()
2495
+ except JSONDecodeError:
2496
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2497
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2498
+
2499
+ async def get_dataset_experiment_items_stats(
2267
2500
  self,
2268
2501
  id: str,
2269
2502
  *,
2270
- experiment_ids: typing.Optional[str] = None,
2503
+ experiment_ids: str,
2504
+ filters: typing.Optional[str] = None,
2271
2505
  request_options: typing.Optional[RequestOptions] = None,
2272
- ) -> AsyncHttpResponse[PageColumns]:
2506
+ ) -> AsyncHttpResponse[ProjectStatsPublic]:
2273
2507
  """
2274
- Get dataset items output columns
2508
+ Get experiment items stats for dataset
2275
2509
 
2276
2510
  Parameters
2277
2511
  ----------
2278
2512
  id : str
2279
2513
 
2280
- experiment_ids : typing.Optional[str]
2514
+ experiment_ids : str
2515
+
2516
+ filters : typing.Optional[str]
2281
2517
 
2282
2518
  request_options : typing.Optional[RequestOptions]
2283
2519
  Request-specific configuration.
2284
2520
 
2285
2521
  Returns
2286
2522
  -------
2287
- AsyncHttpResponse[PageColumns]
2288
- Dataset item output columns
2523
+ AsyncHttpResponse[ProjectStatsPublic]
2524
+ Experiment items stats resource
2289
2525
  """
2290
2526
  _response = await self._client_wrapper.httpx_client.request(
2291
- f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/output/columns",
2527
+ f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/stats",
2292
2528
  method="GET",
2293
2529
  params={
2294
2530
  "experiment_ids": experiment_ids,
2531
+ "filters": filters,
2295
2532
  },
2296
2533
  request_options=request_options,
2297
2534
  )
2298
2535
  try:
2299
2536
  if 200 <= _response.status_code < 300:
2300
2537
  _data = typing.cast(
2301
- PageColumns,
2538
+ ProjectStatsPublic,
2302
2539
  parse_obj_as(
2303
- type_=PageColumns, # type: ignore
2540
+ type_=ProjectStatsPublic, # type: ignore
2304
2541
  object_=_response.json(),
2305
2542
  ),
2306
2543
  )
@@ -2310,32 +2547,254 @@ class AsyncRawDatasetsClient:
2310
2547
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2311
2548
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2312
2549
 
2313
- @contextlib.asynccontextmanager
2314
- async def stream_dataset_items(
2315
- self,
2316
- *,
2317
- dataset_name: str,
2318
- last_retrieved_id: typing.Optional[str] = OMIT,
2319
- steam_limit: typing.Optional[int] = OMIT,
2320
- request_options: typing.Optional[RequestOptions] = None,
2321
- ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]:
2550
+ async def get_dataset_item_by_id(
2551
+ self, item_id: str, *, request_options: typing.Optional[RequestOptions] = None
2552
+ ) -> AsyncHttpResponse[DatasetItemPublic]:
2322
2553
  """
2323
- Stream dataset items
2554
+ Get dataset item by id
2324
2555
 
2325
2556
  Parameters
2326
2557
  ----------
2327
- dataset_name : str
2328
-
2329
- last_retrieved_id : typing.Optional[str]
2330
-
2331
- steam_limit : typing.Optional[int]
2558
+ item_id : str
2332
2559
 
2333
2560
  request_options : typing.Optional[RequestOptions]
2334
- Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response.
2561
+ Request-specific configuration.
2335
2562
 
2336
2563
  Returns
2337
2564
  -------
2338
- typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
2565
+ AsyncHttpResponse[DatasetItemPublic]
2566
+ Dataset item resource
2567
+ """
2568
+ _response = await self._client_wrapper.httpx_client.request(
2569
+ f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2570
+ method="GET",
2571
+ request_options=request_options,
2572
+ )
2573
+ try:
2574
+ if 200 <= _response.status_code < 300:
2575
+ _data = typing.cast(
2576
+ DatasetItemPublic,
2577
+ parse_obj_as(
2578
+ type_=DatasetItemPublic, # type: ignore
2579
+ object_=_response.json(),
2580
+ ),
2581
+ )
2582
+ return AsyncHttpResponse(response=_response, data=_data)
2583
+ _response_json = _response.json()
2584
+ except JSONDecodeError:
2585
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2586
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2587
+
2588
+ async def patch_dataset_item(
2589
+ self,
2590
+ item_id: str,
2591
+ *,
2592
+ source: DatasetItemWriteSource,
2593
+ data: JsonNode,
2594
+ id: typing.Optional[str] = OMIT,
2595
+ trace_id: typing.Optional[str] = OMIT,
2596
+ span_id: typing.Optional[str] = OMIT,
2597
+ tags: typing.Optional[typing.Sequence[str]] = OMIT,
2598
+ request_options: typing.Optional[RequestOptions] = None,
2599
+ ) -> AsyncHttpResponse[None]:
2600
+ """
2601
+ Partially update dataset item by id. Only provided fields will be updated.
2602
+
2603
+ Parameters
2604
+ ----------
2605
+ item_id : str
2606
+
2607
+ source : DatasetItemWriteSource
2608
+
2609
+ data : JsonNode
2610
+
2611
+ id : typing.Optional[str]
2612
+
2613
+ trace_id : typing.Optional[str]
2614
+
2615
+ span_id : typing.Optional[str]
2616
+
2617
+ tags : typing.Optional[typing.Sequence[str]]
2618
+
2619
+ request_options : typing.Optional[RequestOptions]
2620
+ Request-specific configuration.
2621
+
2622
+ Returns
2623
+ -------
2624
+ AsyncHttpResponse[None]
2625
+ """
2626
+ _response = await self._client_wrapper.httpx_client.request(
2627
+ f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2628
+ method="PATCH",
2629
+ json={
2630
+ "id": id,
2631
+ "trace_id": trace_id,
2632
+ "span_id": span_id,
2633
+ "source": source,
2634
+ "data": data,
2635
+ "tags": tags,
2636
+ },
2637
+ headers={
2638
+ "content-type": "application/json",
2639
+ },
2640
+ request_options=request_options,
2641
+ omit=OMIT,
2642
+ )
2643
+ try:
2644
+ if 200 <= _response.status_code < 300:
2645
+ return AsyncHttpResponse(response=_response, data=None)
2646
+ if _response.status_code == 404:
2647
+ raise NotFoundError(
2648
+ headers=dict(_response.headers),
2649
+ body=typing.cast(
2650
+ typing.Optional[typing.Any],
2651
+ parse_obj_as(
2652
+ type_=typing.Optional[typing.Any], # type: ignore
2653
+ object_=_response.json(),
2654
+ ),
2655
+ ),
2656
+ )
2657
+ _response_json = _response.json()
2658
+ except JSONDecodeError:
2659
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2660
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2661
+
2662
+ async def get_dataset_items(
2663
+ self,
2664
+ id: str,
2665
+ *,
2666
+ page: typing.Optional[int] = None,
2667
+ size: typing.Optional[int] = None,
2668
+ version: typing.Optional[str] = None,
2669
+ filters: typing.Optional[str] = None,
2670
+ truncate: typing.Optional[bool] = None,
2671
+ request_options: typing.Optional[RequestOptions] = None,
2672
+ ) -> AsyncHttpResponse[DatasetItemPagePublic]:
2673
+ """
2674
+ Get dataset items
2675
+
2676
+ Parameters
2677
+ ----------
2678
+ id : str
2679
+
2680
+ page : typing.Optional[int]
2681
+
2682
+ size : typing.Optional[int]
2683
+
2684
+ version : typing.Optional[str]
2685
+
2686
+ filters : typing.Optional[str]
2687
+
2688
+ truncate : typing.Optional[bool]
2689
+
2690
+ request_options : typing.Optional[RequestOptions]
2691
+ Request-specific configuration.
2692
+
2693
+ Returns
2694
+ -------
2695
+ AsyncHttpResponse[DatasetItemPagePublic]
2696
+ Dataset items resource
2697
+ """
2698
+ _response = await self._client_wrapper.httpx_client.request(
2699
+ f"v1/private/datasets/{jsonable_encoder(id)}/items",
2700
+ method="GET",
2701
+ params={
2702
+ "page": page,
2703
+ "size": size,
2704
+ "version": version,
2705
+ "filters": filters,
2706
+ "truncate": truncate,
2707
+ },
2708
+ request_options=request_options,
2709
+ )
2710
+ try:
2711
+ if 200 <= _response.status_code < 300:
2712
+ _data = typing.cast(
2713
+ DatasetItemPagePublic,
2714
+ parse_obj_as(
2715
+ type_=DatasetItemPagePublic, # type: ignore
2716
+ object_=_response.json(),
2717
+ ),
2718
+ )
2719
+ return AsyncHttpResponse(response=_response, data=_data)
2720
+ _response_json = _response.json()
2721
+ except JSONDecodeError:
2722
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2723
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2724
+
2725
+ async def get_dataset_items_output_columns(
2726
+ self,
2727
+ id: str,
2728
+ *,
2729
+ experiment_ids: typing.Optional[str] = None,
2730
+ request_options: typing.Optional[RequestOptions] = None,
2731
+ ) -> AsyncHttpResponse[PageColumns]:
2732
+ """
2733
+ Get dataset items output columns
2734
+
2735
+ Parameters
2736
+ ----------
2737
+ id : str
2738
+
2739
+ experiment_ids : typing.Optional[str]
2740
+
2741
+ request_options : typing.Optional[RequestOptions]
2742
+ Request-specific configuration.
2743
+
2744
+ Returns
2745
+ -------
2746
+ AsyncHttpResponse[PageColumns]
2747
+ Dataset item output columns
2748
+ """
2749
+ _response = await self._client_wrapper.httpx_client.request(
2750
+ f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/output/columns",
2751
+ method="GET",
2752
+ params={
2753
+ "experiment_ids": experiment_ids,
2754
+ },
2755
+ request_options=request_options,
2756
+ )
2757
+ try:
2758
+ if 200 <= _response.status_code < 300:
2759
+ _data = typing.cast(
2760
+ PageColumns,
2761
+ parse_obj_as(
2762
+ type_=PageColumns, # type: ignore
2763
+ object_=_response.json(),
2764
+ ),
2765
+ )
2766
+ return AsyncHttpResponse(response=_response, data=_data)
2767
+ _response_json = _response.json()
2768
+ except JSONDecodeError:
2769
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2770
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2771
+
2772
+ @contextlib.asynccontextmanager
2773
+ async def stream_dataset_items(
2774
+ self,
2775
+ *,
2776
+ dataset_name: str,
2777
+ last_retrieved_id: typing.Optional[str] = OMIT,
2778
+ steam_limit: typing.Optional[int] = OMIT,
2779
+ request_options: typing.Optional[RequestOptions] = None,
2780
+ ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]:
2781
+ """
2782
+ Stream dataset items
2783
+
2784
+ Parameters
2785
+ ----------
2786
+ dataset_name : str
2787
+
2788
+ last_retrieved_id : typing.Optional[str]
2789
+
2790
+ steam_limit : typing.Optional[int]
2791
+
2792
+ request_options : typing.Optional[RequestOptions]
2793
+ Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response.
2794
+
2795
+ Returns
2796
+ -------
2797
+ typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
2339
2798
  Dataset items stream or error during process
2340
2799
  """
2341
2800
  async with self._client_wrapper.httpx_client.stream(
@@ -2370,3 +2829,450 @@ class AsyncRawDatasetsClient:
2370
2829
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2371
2830
 
2372
2831
  yield await stream()
2832
+
2833
+ async def compare_dataset_versions(
2834
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
2835
+ ) -> AsyncHttpResponse[DatasetVersionDiff]:
2836
+ """
2837
+ Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
2838
+
2839
+ Parameters
2840
+ ----------
2841
+ id : str
2842
+
2843
+ request_options : typing.Optional[RequestOptions]
2844
+ Request-specific configuration.
2845
+
2846
+ Returns
2847
+ -------
2848
+ AsyncHttpResponse[DatasetVersionDiff]
2849
+ Diff computed successfully
2850
+ """
2851
+ _response = await self._client_wrapper.httpx_client.request(
2852
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
2853
+ method="GET",
2854
+ request_options=request_options,
2855
+ )
2856
+ try:
2857
+ if 200 <= _response.status_code < 300:
2858
+ _data = typing.cast(
2859
+ DatasetVersionDiff,
2860
+ parse_obj_as(
2861
+ type_=DatasetVersionDiff, # type: ignore
2862
+ object_=_response.json(),
2863
+ ),
2864
+ )
2865
+ return AsyncHttpResponse(response=_response, data=_data)
2866
+ if _response.status_code == 404:
2867
+ raise NotFoundError(
2868
+ headers=dict(_response.headers),
2869
+ body=typing.cast(
2870
+ typing.Optional[typing.Any],
2871
+ parse_obj_as(
2872
+ type_=typing.Optional[typing.Any], # type: ignore
2873
+ object_=_response.json(),
2874
+ ),
2875
+ ),
2876
+ )
2877
+ _response_json = _response.json()
2878
+ except JSONDecodeError:
2879
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2880
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2881
+
2882
+ async def create_version_tag(
2883
+ self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
2884
+ ) -> AsyncHttpResponse[None]:
2885
+ """
2886
+ Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
2887
+
2888
+ Parameters
2889
+ ----------
2890
+ version_hash : str
2891
+
2892
+ id : str
2893
+
2894
+ tag : str
2895
+
2896
+ request_options : typing.Optional[RequestOptions]
2897
+ Request-specific configuration.
2898
+
2899
+ Returns
2900
+ -------
2901
+ AsyncHttpResponse[None]
2902
+ """
2903
+ _response = await self._client_wrapper.httpx_client.request(
2904
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
2905
+ method="POST",
2906
+ json={
2907
+ "tag": tag,
2908
+ },
2909
+ headers={
2910
+ "content-type": "application/json",
2911
+ },
2912
+ request_options=request_options,
2913
+ omit=OMIT,
2914
+ )
2915
+ try:
2916
+ if 200 <= _response.status_code < 300:
2917
+ return AsyncHttpResponse(response=_response, data=None)
2918
+ if _response.status_code == 400:
2919
+ raise BadRequestError(
2920
+ headers=dict(_response.headers),
2921
+ body=typing.cast(
2922
+ typing.Optional[typing.Any],
2923
+ parse_obj_as(
2924
+ type_=typing.Optional[typing.Any], # type: ignore
2925
+ object_=_response.json(),
2926
+ ),
2927
+ ),
2928
+ )
2929
+ if _response.status_code == 404:
2930
+ raise NotFoundError(
2931
+ headers=dict(_response.headers),
2932
+ body=typing.cast(
2933
+ typing.Optional[typing.Any],
2934
+ parse_obj_as(
2935
+ type_=typing.Optional[typing.Any], # type: ignore
2936
+ object_=_response.json(),
2937
+ ),
2938
+ ),
2939
+ )
2940
+ if _response.status_code == 409:
2941
+ raise ConflictError(
2942
+ headers=dict(_response.headers),
2943
+ body=typing.cast(
2944
+ typing.Optional[typing.Any],
2945
+ parse_obj_as(
2946
+ type_=typing.Optional[typing.Any], # type: ignore
2947
+ object_=_response.json(),
2948
+ ),
2949
+ ),
2950
+ )
2951
+ _response_json = _response.json()
2952
+ except JSONDecodeError:
2953
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2954
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2955
+
2956
+ async def list_dataset_versions(
2957
+ self,
2958
+ id: str,
2959
+ *,
2960
+ page: typing.Optional[int] = None,
2961
+ size: typing.Optional[int] = None,
2962
+ request_options: typing.Optional[RequestOptions] = None,
2963
+ ) -> AsyncHttpResponse[DatasetVersionPagePublic]:
2964
+ """
2965
+ Get paginated list of versions for a dataset, ordered by creation time (newest first)
2966
+
2967
+ Parameters
2968
+ ----------
2969
+ id : str
2970
+
2971
+ page : typing.Optional[int]
2972
+
2973
+ size : typing.Optional[int]
2974
+
2975
+ request_options : typing.Optional[RequestOptions]
2976
+ Request-specific configuration.
2977
+
2978
+ Returns
2979
+ -------
2980
+ AsyncHttpResponse[DatasetVersionPagePublic]
2981
+ Dataset versions
2982
+ """
2983
+ _response = await self._client_wrapper.httpx_client.request(
2984
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
2985
+ method="GET",
2986
+ params={
2987
+ "page": page,
2988
+ "size": size,
2989
+ },
2990
+ request_options=request_options,
2991
+ )
2992
+ try:
2993
+ if 200 <= _response.status_code < 300:
2994
+ _data = typing.cast(
2995
+ DatasetVersionPagePublic,
2996
+ parse_obj_as(
2997
+ type_=DatasetVersionPagePublic, # type: ignore
2998
+ object_=_response.json(),
2999
+ ),
3000
+ )
3001
+ return AsyncHttpResponse(response=_response, data=_data)
3002
+ if _response.status_code == 400:
3003
+ raise BadRequestError(
3004
+ headers=dict(_response.headers),
3005
+ body=typing.cast(
3006
+ typing.Optional[typing.Any],
3007
+ parse_obj_as(
3008
+ type_=typing.Optional[typing.Any], # type: ignore
3009
+ object_=_response.json(),
3010
+ ),
3011
+ ),
3012
+ )
3013
+ _response_json = _response.json()
3014
+ except JSONDecodeError:
3015
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
3016
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
3017
+
3018
+ async def create_dataset_version(
3019
+ self,
3020
+ id: str,
3021
+ *,
3022
+ tags: typing.Optional[typing.Sequence[str]] = OMIT,
3023
+ change_description: typing.Optional[str] = OMIT,
3024
+ metadata: typing.Optional[typing.Dict[str, str]] = OMIT,
3025
+ request_options: typing.Optional[RequestOptions] = None,
3026
+ ) -> AsyncHttpResponse[None]:
3027
+ """
3028
+ Create a new immutable version of the dataset by snapshotting the current state
3029
+
3030
+ Parameters
3031
+ ----------
3032
+ id : str
3033
+
3034
+ tags : typing.Optional[typing.Sequence[str]]
3035
+ Optional list of tags for this version
3036
+
3037
+ change_description : typing.Optional[str]
3038
+ Optional description of changes in this version
3039
+
3040
+ metadata : typing.Optional[typing.Dict[str, str]]
3041
+ Optional user-defined metadata
3042
+
3043
+ request_options : typing.Optional[RequestOptions]
3044
+ Request-specific configuration.
3045
+
3046
+ Returns
3047
+ -------
3048
+ AsyncHttpResponse[None]
3049
+ """
3050
+ _response = await self._client_wrapper.httpx_client.request(
3051
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
3052
+ method="POST",
3053
+ json={
3054
+ "tags": tags,
3055
+ "change_description": change_description,
3056
+ "metadata": metadata,
3057
+ },
3058
+ headers={
3059
+ "content-type": "application/json",
3060
+ },
3061
+ request_options=request_options,
3062
+ omit=OMIT,
3063
+ )
3064
+ try:
3065
+ if 200 <= _response.status_code < 300:
3066
+ return AsyncHttpResponse(response=_response, data=None)
3067
+ if _response.status_code == 400:
3068
+ raise BadRequestError(
3069
+ headers=dict(_response.headers),
3070
+ body=typing.cast(
3071
+ typing.Optional[typing.Any],
3072
+ parse_obj_as(
3073
+ type_=typing.Optional[typing.Any], # type: ignore
3074
+ object_=_response.json(),
3075
+ ),
3076
+ ),
3077
+ )
3078
+ if _response.status_code == 409:
3079
+ raise ConflictError(
3080
+ headers=dict(_response.headers),
3081
+ body=typing.cast(
3082
+ typing.Optional[typing.Any],
3083
+ parse_obj_as(
3084
+ type_=typing.Optional[typing.Any], # type: ignore
3085
+ object_=_response.json(),
3086
+ ),
3087
+ ),
3088
+ )
3089
+ _response_json = _response.json()
3090
+ except JSONDecodeError:
3091
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
3092
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
3093
+
3094
+ async def delete_version_tag(
3095
+ self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
3096
+ ) -> AsyncHttpResponse[None]:
3097
+ """
3098
+ Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
3099
+
3100
+ Parameters
3101
+ ----------
3102
+ version_hash : str
3103
+
3104
+ tag : str
3105
+
3106
+ id : str
3107
+
3108
+ request_options : typing.Optional[RequestOptions]
3109
+ Request-specific configuration.
3110
+
3111
+ Returns
3112
+ -------
3113
+ AsyncHttpResponse[None]
3114
+ """
3115
+ _response = await self._client_wrapper.httpx_client.request(
3116
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
3117
+ method="DELETE",
3118
+ request_options=request_options,
3119
+ )
3120
+ try:
3121
+ if 200 <= _response.status_code < 300:
3122
+ return AsyncHttpResponse(response=_response, data=None)
3123
+ _response_json = _response.json()
3124
+ except JSONDecodeError:
3125
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
3126
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
3127
+
3128
+ async def restore_dataset_version(
3129
+ self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
3130
+ ) -> AsyncHttpResponse[DatasetVersionPublic]:
3131
+ """
3132
+ Restores the dataset to a previous version state. All draft items are replaced with items from the specified version. If the version is not the latest, a new version snapshot is created. If the version is the latest, only draft items are replaced (revert functionality).
3133
+
3134
+ Parameters
3135
+ ----------
3136
+ id : str
3137
+
3138
+ version_ref : str
3139
+ Version hash or tag to restore from
3140
+
3141
+ request_options : typing.Optional[RequestOptions]
3142
+ Request-specific configuration.
3143
+
3144
+ Returns
3145
+ -------
3146
+ AsyncHttpResponse[DatasetVersionPublic]
3147
+ Version restored successfully
3148
+ """
3149
+ _response = await self._client_wrapper.httpx_client.request(
3150
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
3151
+ method="POST",
3152
+ json={
3153
+ "version_ref": version_ref,
3154
+ },
3155
+ headers={
3156
+ "content-type": "application/json",
3157
+ },
3158
+ request_options=request_options,
3159
+ omit=OMIT,
3160
+ )
3161
+ try:
3162
+ if 200 <= _response.status_code < 300:
3163
+ _data = typing.cast(
3164
+ DatasetVersionPublic,
3165
+ parse_obj_as(
3166
+ type_=DatasetVersionPublic, # type: ignore
3167
+ object_=_response.json(),
3168
+ ),
3169
+ )
3170
+ return AsyncHttpResponse(response=_response, data=_data)
3171
+ if _response.status_code == 404:
3172
+ raise NotFoundError(
3173
+ headers=dict(_response.headers),
3174
+ body=typing.cast(
3175
+ typing.Optional[typing.Any],
3176
+ parse_obj_as(
3177
+ type_=typing.Optional[typing.Any], # type: ignore
3178
+ object_=_response.json(),
3179
+ ),
3180
+ ),
3181
+ )
3182
+ _response_json = _response.json()
3183
+ except JSONDecodeError:
3184
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
3185
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
3186
+
3187
+ async def update_dataset_version(
3188
+ self,
3189
+ version_hash: str,
3190
+ id: str,
3191
+ *,
3192
+ change_description: typing.Optional[str] = OMIT,
3193
+ tags_to_add: typing.Optional[typing.Sequence[str]] = OMIT,
3194
+ request_options: typing.Optional[RequestOptions] = None,
3195
+ ) -> AsyncHttpResponse[DatasetVersionPublic]:
3196
+ """
3197
+ Update a dataset version's change_description and/or add new tags
3198
+
3199
+ Parameters
3200
+ ----------
3201
+ version_hash : str
3202
+
3203
+ id : str
3204
+
3205
+ change_description : typing.Optional[str]
3206
+ Optional description of changes in this version
3207
+
3208
+ tags_to_add : typing.Optional[typing.Sequence[str]]
3209
+ Optional list of tags to add to this version
3210
+
3211
+ request_options : typing.Optional[RequestOptions]
3212
+ Request-specific configuration.
3213
+
3214
+ Returns
3215
+ -------
3216
+ AsyncHttpResponse[DatasetVersionPublic]
3217
+ Version updated successfully
3218
+ """
3219
+ _response = await self._client_wrapper.httpx_client.request(
3220
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}",
3221
+ method="PATCH",
3222
+ json={
3223
+ "change_description": change_description,
3224
+ "tags_to_add": tags_to_add,
3225
+ },
3226
+ headers={
3227
+ "content-type": "application/json",
3228
+ },
3229
+ request_options=request_options,
3230
+ omit=OMIT,
3231
+ )
3232
+ try:
3233
+ if 200 <= _response.status_code < 300:
3234
+ _data = typing.cast(
3235
+ DatasetVersionPublic,
3236
+ parse_obj_as(
3237
+ type_=DatasetVersionPublic, # type: ignore
3238
+ object_=_response.json(),
3239
+ ),
3240
+ )
3241
+ return AsyncHttpResponse(response=_response, data=_data)
3242
+ if _response.status_code == 400:
3243
+ raise BadRequestError(
3244
+ headers=dict(_response.headers),
3245
+ body=typing.cast(
3246
+ typing.Optional[typing.Any],
3247
+ parse_obj_as(
3248
+ type_=typing.Optional[typing.Any], # type: ignore
3249
+ object_=_response.json(),
3250
+ ),
3251
+ ),
3252
+ )
3253
+ if _response.status_code == 404:
3254
+ raise NotFoundError(
3255
+ headers=dict(_response.headers),
3256
+ body=typing.cast(
3257
+ typing.Optional[typing.Any],
3258
+ parse_obj_as(
3259
+ type_=typing.Optional[typing.Any], # type: ignore
3260
+ object_=_response.json(),
3261
+ ),
3262
+ ),
3263
+ )
3264
+ if _response.status_code == 409:
3265
+ raise ConflictError(
3266
+ headers=dict(_response.headers),
3267
+ body=typing.cast(
3268
+ typing.Optional[typing.Any],
3269
+ parse_obj_as(
3270
+ type_=typing.Optional[typing.Any], # type: ignore
3271
+ object_=_response.json(),
3272
+ ),
3273
+ ),
3274
+ )
3275
+ _response_json = _response.json()
3276
+ except JSONDecodeError:
3277
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
3278
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)