opik 1.9.26__py3-none-any.whl → 1.9.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. opik/__init__.py +10 -3
  2. opik/api_objects/dataset/rest_operations.py +2 -0
  3. opik/api_objects/experiment/experiment.py +31 -5
  4. opik/api_objects/experiment/helpers.py +34 -10
  5. opik/api_objects/local_recording.py +8 -3
  6. opik/api_objects/opik_client.py +218 -46
  7. opik/api_objects/opik_query_language.py +9 -0
  8. opik/api_objects/prompt/__init__.py +11 -3
  9. opik/api_objects/prompt/base_prompt.py +69 -0
  10. opik/api_objects/prompt/base_prompt_template.py +29 -0
  11. opik/api_objects/prompt/chat/__init__.py +1 -0
  12. opik/api_objects/prompt/chat/chat_prompt.py +193 -0
  13. opik/api_objects/prompt/chat/chat_prompt_template.py +350 -0
  14. opik/api_objects/prompt/{chat_content_renderer_registry.py → chat/content_renderer_registry.py} +31 -34
  15. opik/api_objects/prompt/client.py +101 -30
  16. opik/api_objects/prompt/text/__init__.py +1 -0
  17. opik/api_objects/prompt/{prompt.py → text/prompt.py} +55 -32
  18. opik/api_objects/prompt/{prompt_template.py → text/prompt_template.py} +8 -5
  19. opik/cli/export.py +6 -2
  20. opik/config.py +0 -5
  21. opik/decorator/base_track_decorator.py +37 -40
  22. opik/evaluation/__init__.py +13 -2
  23. opik/evaluation/engine/engine.py +195 -223
  24. opik/evaluation/engine/helpers.py +8 -7
  25. opik/evaluation/engine/metrics_evaluator.py +237 -0
  26. opik/evaluation/evaluation_result.py +35 -1
  27. opik/evaluation/evaluator.py +309 -23
  28. opik/evaluation/models/litellm/util.py +78 -6
  29. opik/evaluation/report.py +14 -2
  30. opik/evaluation/rest_operations.py +6 -9
  31. opik/evaluation/test_case.py +2 -2
  32. opik/evaluation/types.py +9 -1
  33. opik/exceptions.py +17 -0
  34. opik/id_helpers.py +18 -0
  35. opik/integrations/adk/helpers.py +16 -7
  36. opik/integrations/adk/legacy_opik_tracer.py +7 -4
  37. opik/integrations/adk/opik_tracer.py +3 -1
  38. opik/integrations/adk/patchers/adk_otel_tracer/opik_adk_otel_tracer.py +7 -3
  39. opik/integrations/dspy/callback.py +1 -4
  40. opik/integrations/haystack/opik_connector.py +2 -2
  41. opik/integrations/haystack/opik_tracer.py +2 -4
  42. opik/integrations/langchain/opik_tracer.py +1 -4
  43. opik/integrations/llama_index/callback.py +2 -4
  44. opik/integrations/openai/agents/opik_tracing_processor.py +1 -2
  45. opik/integrations/openai/opik_tracker.py +1 -1
  46. opik/opik_context.py +7 -7
  47. opik/rest_api/__init__.py +123 -11
  48. opik/rest_api/dashboards/client.py +65 -2
  49. opik/rest_api/dashboards/raw_client.py +82 -0
  50. opik/rest_api/datasets/client.py +441 -2
  51. opik/rest_api/datasets/raw_client.py +1225 -505
  52. opik/rest_api/experiments/client.py +30 -2
  53. opik/rest_api/experiments/raw_client.py +26 -0
  54. opik/rest_api/optimizations/client.py +302 -0
  55. opik/rest_api/optimizations/raw_client.py +463 -0
  56. opik/rest_api/optimizations/types/optimization_update_status.py +3 -1
  57. opik/rest_api/prompts/__init__.py +2 -2
  58. opik/rest_api/prompts/client.py +34 -4
  59. opik/rest_api/prompts/raw_client.py +32 -2
  60. opik/rest_api/prompts/types/__init__.py +3 -1
  61. opik/rest_api/prompts/types/create_prompt_version_detail_template_structure.py +5 -0
  62. opik/rest_api/prompts/types/prompt_write_template_structure.py +5 -0
  63. opik/rest_api/traces/client.py +6 -6
  64. opik/rest_api/traces/raw_client.py +4 -4
  65. opik/rest_api/types/__init__.py +121 -11
  66. opik/rest_api/types/aggregation_data.py +1 -0
  67. opik/rest_api/types/automation_rule_evaluator.py +23 -1
  68. opik/rest_api/types/automation_rule_evaluator_llm_as_judge.py +2 -0
  69. opik/rest_api/types/automation_rule_evaluator_llm_as_judge_public.py +2 -0
  70. opik/rest_api/types/automation_rule_evaluator_llm_as_judge_write.py +2 -0
  71. opik/rest_api/types/{automation_rule_evaluator_object_public.py → automation_rule_evaluator_object_object_public.py} +32 -10
  72. opik/rest_api/types/automation_rule_evaluator_page_public.py +2 -2
  73. opik/rest_api/types/automation_rule_evaluator_public.py +23 -1
  74. opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge.py +22 -0
  75. opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_public.py +22 -0
  76. opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_write.py +22 -0
  77. opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge.py +2 -0
  78. opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_public.py +2 -0
  79. opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_write.py +2 -0
  80. opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python.py +2 -0
  81. opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_public.py +2 -0
  82. opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_write.py +2 -0
  83. opik/rest_api/types/automation_rule_evaluator_update.py +23 -1
  84. opik/rest_api/types/automation_rule_evaluator_update_llm_as_judge.py +2 -0
  85. opik/rest_api/types/automation_rule_evaluator_update_span_llm_as_judge.py +22 -0
  86. opik/rest_api/types/automation_rule_evaluator_update_trace_thread_llm_as_judge.py +2 -0
  87. opik/rest_api/types/automation_rule_evaluator_update_trace_thread_user_defined_metric_python.py +2 -0
  88. opik/rest_api/types/automation_rule_evaluator_update_user_defined_metric_python.py +2 -0
  89. opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python.py +2 -0
  90. opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_public.py +2 -0
  91. opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_write.py +2 -0
  92. opik/rest_api/types/automation_rule_evaluator_write.py +23 -1
  93. opik/rest_api/types/dashboard_page_public.py +1 -0
  94. opik/rest_api/types/dataset.py +2 -0
  95. opik/rest_api/types/dataset_item.py +1 -0
  96. opik/rest_api/types/dataset_item_compare.py +1 -0
  97. opik/rest_api/types/dataset_item_page_compare.py +1 -0
  98. opik/rest_api/types/dataset_item_page_public.py +1 -0
  99. opik/rest_api/types/dataset_item_public.py +1 -0
  100. opik/rest_api/types/dataset_public.py +2 -0
  101. opik/rest_api/types/dataset_public_status.py +5 -0
  102. opik/rest_api/types/dataset_status.py +5 -0
  103. opik/rest_api/types/dataset_version_diff.py +22 -0
  104. opik/rest_api/types/dataset_version_diff_stats.py +24 -0
  105. opik/rest_api/types/dataset_version_page_public.py +23 -0
  106. opik/rest_api/types/dataset_version_public.py +49 -0
  107. opik/rest_api/types/experiment.py +2 -0
  108. opik/rest_api/types/experiment_public.py +2 -0
  109. opik/rest_api/types/experiment_score.py +20 -0
  110. opik/rest_api/types/experiment_score_public.py +20 -0
  111. opik/rest_api/types/experiment_score_write.py +20 -0
  112. opik/rest_api/types/feedback_score_public.py +4 -0
  113. opik/rest_api/types/optimization.py +2 -0
  114. opik/rest_api/types/optimization_public.py +2 -0
  115. opik/rest_api/types/optimization_public_status.py +3 -1
  116. opik/rest_api/types/optimization_status.py +3 -1
  117. opik/rest_api/types/optimization_studio_config.py +27 -0
  118. opik/rest_api/types/optimization_studio_config_public.py +27 -0
  119. opik/rest_api/types/optimization_studio_config_write.py +27 -0
  120. opik/rest_api/types/optimization_studio_log.py +22 -0
  121. opik/rest_api/types/optimization_write.py +2 -0
  122. opik/rest_api/types/optimization_write_status.py +3 -1
  123. opik/rest_api/types/prompt.py +6 -0
  124. opik/rest_api/types/prompt_detail.py +6 -0
  125. opik/rest_api/types/prompt_detail_template_structure.py +5 -0
  126. opik/rest_api/types/prompt_public.py +6 -0
  127. opik/rest_api/types/prompt_public_template_structure.py +5 -0
  128. opik/rest_api/types/prompt_template_structure.py +5 -0
  129. opik/rest_api/types/prompt_version.py +2 -0
  130. opik/rest_api/types/prompt_version_detail.py +2 -0
  131. opik/rest_api/types/prompt_version_detail_template_structure.py +5 -0
  132. opik/rest_api/types/prompt_version_public.py +2 -0
  133. opik/rest_api/types/prompt_version_public_template_structure.py +5 -0
  134. opik/rest_api/types/prompt_version_template_structure.py +5 -0
  135. opik/rest_api/types/score_name.py +1 -0
  136. opik/rest_api/types/service_toggles_config.py +5 -0
  137. opik/rest_api/types/span_filter.py +23 -0
  138. opik/rest_api/types/span_filter_operator.py +21 -0
  139. opik/rest_api/types/span_filter_write.py +23 -0
  140. opik/rest_api/types/span_filter_write_operator.py +21 -0
  141. opik/rest_api/types/span_llm_as_judge_code.py +27 -0
  142. opik/rest_api/types/span_llm_as_judge_code_public.py +27 -0
  143. opik/rest_api/types/span_llm_as_judge_code_write.py +27 -0
  144. opik/rest_api/types/studio_evaluation.py +20 -0
  145. opik/rest_api/types/studio_evaluation_public.py +20 -0
  146. opik/rest_api/types/studio_evaluation_write.py +20 -0
  147. opik/rest_api/types/studio_llm_model.py +21 -0
  148. opik/rest_api/types/studio_llm_model_public.py +21 -0
  149. opik/rest_api/types/studio_llm_model_write.py +21 -0
  150. opik/rest_api/types/studio_message.py +20 -0
  151. opik/rest_api/types/studio_message_public.py +20 -0
  152. opik/rest_api/types/studio_message_write.py +20 -0
  153. opik/rest_api/types/studio_metric.py +21 -0
  154. opik/rest_api/types/studio_metric_public.py +21 -0
  155. opik/rest_api/types/studio_metric_write.py +21 -0
  156. opik/rest_api/types/studio_optimizer.py +21 -0
  157. opik/rest_api/types/studio_optimizer_public.py +21 -0
  158. opik/rest_api/types/studio_optimizer_write.py +21 -0
  159. opik/rest_api/types/studio_prompt.py +20 -0
  160. opik/rest_api/types/studio_prompt_public.py +20 -0
  161. opik/rest_api/types/studio_prompt_write.py +20 -0
  162. opik/rest_api/types/trace.py +6 -0
  163. opik/rest_api/types/trace_public.py +6 -0
  164. opik/rest_api/types/trace_thread_filter_write.py +23 -0
  165. opik/rest_api/types/trace_thread_filter_write_operator.py +21 -0
  166. opik/rest_api/types/value_entry.py +2 -0
  167. opik/rest_api/types/value_entry_compare.py +2 -0
  168. opik/rest_api/types/value_entry_experiment_item_bulk_write_view.py +2 -0
  169. opik/rest_api/types/value_entry_public.py +2 -0
  170. opik/synchronization.py +5 -6
  171. opik/{decorator/tracing_runtime_config.py → tracing_runtime_config.py} +6 -7
  172. {opik-1.9.26.dist-info → opik-1.9.39.dist-info}/METADATA +2 -1
  173. {opik-1.9.26.dist-info → opik-1.9.39.dist-info}/RECORD +177 -119
  174. opik/api_objects/prompt/chat_prompt_template.py +0 -200
  175. {opik-1.9.26.dist-info → opik-1.9.39.dist-info}/WHEEL +0 -0
  176. {opik-1.9.26.dist-info → opik-1.9.39.dist-info}/entry_points.txt +0 -0
  177. {opik-1.9.26.dist-info → opik-1.9.39.dist-info}/licenses/LICENSE +0 -0
  178. {opik-1.9.26.dist-info → opik-1.9.39.dist-info}/top_level.txt +0 -0
@@ -12,6 +12,7 @@ from ..core.pydantic_utilities import parse_obj_as
12
12
  from ..core.request_options import RequestOptions
13
13
  from ..core.serialization import convert_and_respect_annotation_metadata
14
14
  from ..errors.bad_request_error import BadRequestError
15
+ from ..errors.conflict_error import ConflictError
15
16
  from ..errors.not_found_error import NotFoundError
16
17
  from ..types.dataset_expansion_response import DatasetExpansionResponse
17
18
  from ..types.dataset_item_filter import DatasetItemFilter
@@ -23,6 +24,9 @@ from ..types.dataset_item_write import DatasetItemWrite
23
24
  from ..types.dataset_item_write_source import DatasetItemWriteSource
24
25
  from ..types.dataset_page_public import DatasetPagePublic
25
26
  from ..types.dataset_public import DatasetPublic
27
+ from ..types.dataset_version_diff import DatasetVersionDiff
28
+ from ..types.dataset_version_page_public import DatasetVersionPagePublic
29
+ from ..types.dataset_version_public import DatasetVersionPublic
26
30
  from ..types.json_node import JsonNode
27
31
  from ..types.page_columns import PageColumns
28
32
  from ..types.project_stats_public import ProjectStatsPublic
@@ -1041,6 +1045,7 @@ class RawDatasetsClient:
1041
1045
  *,
1042
1046
  page: typing.Optional[int] = None,
1043
1047
  size: typing.Optional[int] = None,
1048
+ version: typing.Optional[str] = None,
1044
1049
  filters: typing.Optional[str] = None,
1045
1050
  truncate: typing.Optional[bool] = None,
1046
1051
  request_options: typing.Optional[RequestOptions] = None,
@@ -1056,6 +1061,8 @@ class RawDatasetsClient:
1056
1061
 
1057
1062
  size : typing.Optional[int]
1058
1063
 
1064
+ version : typing.Optional[str]
1065
+
1059
1066
  filters : typing.Optional[str]
1060
1067
 
1061
1068
  truncate : typing.Optional[bool]
@@ -1074,6 +1081,7 @@ class RawDatasetsClient:
1074
1081
  params={
1075
1082
  "page": page,
1076
1083
  "size": size,
1084
+ "version": version,
1077
1085
  "filters": filters,
1078
1086
  "truncate": truncate,
1079
1087
  },
@@ -1201,54 +1209,81 @@ class RawDatasetsClient:
1201
1209
 
1202
1210
  yield stream()
1203
1211
 
1212
+ def compare_dataset_versions(
1213
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
1214
+ ) -> HttpResponse[DatasetVersionDiff]:
1215
+ """
1216
+ Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
1204
1217
 
1205
- class AsyncRawDatasetsClient:
1206
- def __init__(self, *, client_wrapper: AsyncClientWrapper):
1207
- self._client_wrapper = client_wrapper
1218
+ Parameters
1219
+ ----------
1220
+ id : str
1208
1221
 
1209
- async def batch_update_dataset_items(
1210
- self,
1211
- *,
1212
- update: DatasetItemUpdate,
1213
- ids: typing.Optional[typing.Sequence[str]] = OMIT,
1214
- filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
1215
- merge_tags: typing.Optional[bool] = OMIT,
1216
- request_options: typing.Optional[RequestOptions] = None,
1217
- ) -> AsyncHttpResponse[None]:
1222
+ request_options : typing.Optional[RequestOptions]
1223
+ Request-specific configuration.
1224
+
1225
+ Returns
1226
+ -------
1227
+ HttpResponse[DatasetVersionDiff]
1228
+ Diff computed successfully
1218
1229
  """
1219
- Update multiple dataset items
1230
+ _response = self._client_wrapper.httpx_client.request(
1231
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
1232
+ method="GET",
1233
+ request_options=request_options,
1234
+ )
1235
+ try:
1236
+ if 200 <= _response.status_code < 300:
1237
+ _data = typing.cast(
1238
+ DatasetVersionDiff,
1239
+ parse_obj_as(
1240
+ type_=DatasetVersionDiff, # type: ignore
1241
+ object_=_response.json(),
1242
+ ),
1243
+ )
1244
+ return HttpResponse(response=_response, data=_data)
1245
+ if _response.status_code == 404:
1246
+ raise NotFoundError(
1247
+ headers=dict(_response.headers),
1248
+ body=typing.cast(
1249
+ typing.Optional[typing.Any],
1250
+ parse_obj_as(
1251
+ type_=typing.Optional[typing.Any], # type: ignore
1252
+ object_=_response.json(),
1253
+ ),
1254
+ ),
1255
+ )
1256
+ _response_json = _response.json()
1257
+ except JSONDecodeError:
1258
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1259
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1260
+
1261
+ def create_version_tag(
1262
+ self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
1263
+ ) -> HttpResponse[None]:
1264
+ """
1265
+ Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
1220
1266
 
1221
1267
  Parameters
1222
1268
  ----------
1223
- update : DatasetItemUpdate
1224
-
1225
- ids : typing.Optional[typing.Sequence[str]]
1226
- List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
1269
+ version_hash : str
1227
1270
 
1228
- filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
1271
+ id : str
1229
1272
 
1230
- merge_tags : typing.Optional[bool]
1231
- If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
1273
+ tag : str
1232
1274
 
1233
1275
  request_options : typing.Optional[RequestOptions]
1234
1276
  Request-specific configuration.
1235
1277
 
1236
1278
  Returns
1237
1279
  -------
1238
- AsyncHttpResponse[None]
1280
+ HttpResponse[None]
1239
1281
  """
1240
- _response = await self._client_wrapper.httpx_client.request(
1241
- "v1/private/datasets/items/batch",
1242
- method="PATCH",
1282
+ _response = self._client_wrapper.httpx_client.request(
1283
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
1284
+ method="POST",
1243
1285
  json={
1244
- "ids": ids,
1245
- "filters": convert_and_respect_annotation_metadata(
1246
- object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
1247
- ),
1248
- "update": convert_and_respect_annotation_metadata(
1249
- object_=update, annotation=DatasetItemUpdate, direction="write"
1250
- ),
1251
- "merge_tags": merge_tags,
1286
+ "tag": tag,
1252
1287
  },
1253
1288
  headers={
1254
1289
  "content-type": "application/json",
@@ -1258,7 +1293,7 @@ class AsyncRawDatasetsClient:
1258
1293
  )
1259
1294
  try:
1260
1295
  if 200 <= _response.status_code < 300:
1261
- return AsyncHttpResponse(response=_response, data=None)
1296
+ return HttpResponse(response=_response, data=None)
1262
1297
  if _response.status_code == 400:
1263
1298
  raise BadRequestError(
1264
1299
  headers=dict(_response.headers),
@@ -1270,124 +1305,134 @@ class AsyncRawDatasetsClient:
1270
1305
  ),
1271
1306
  ),
1272
1307
  )
1308
+ if _response.status_code == 404:
1309
+ raise NotFoundError(
1310
+ headers=dict(_response.headers),
1311
+ body=typing.cast(
1312
+ typing.Optional[typing.Any],
1313
+ parse_obj_as(
1314
+ type_=typing.Optional[typing.Any], # type: ignore
1315
+ object_=_response.json(),
1316
+ ),
1317
+ ),
1318
+ )
1319
+ if _response.status_code == 409:
1320
+ raise ConflictError(
1321
+ headers=dict(_response.headers),
1322
+ body=typing.cast(
1323
+ typing.Optional[typing.Any],
1324
+ parse_obj_as(
1325
+ type_=typing.Optional[typing.Any], # type: ignore
1326
+ object_=_response.json(),
1327
+ ),
1328
+ ),
1329
+ )
1273
1330
  _response_json = _response.json()
1274
1331
  except JSONDecodeError:
1275
1332
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1276
1333
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1277
1334
 
1278
- async def find_datasets(
1335
+ def list_dataset_versions(
1279
1336
  self,
1337
+ id: str,
1280
1338
  *,
1281
1339
  page: typing.Optional[int] = None,
1282
1340
  size: typing.Optional[int] = None,
1283
- with_experiments_only: typing.Optional[bool] = None,
1284
- with_optimizations_only: typing.Optional[bool] = None,
1285
- prompt_id: typing.Optional[str] = None,
1286
- name: typing.Optional[str] = None,
1287
- sorting: typing.Optional[str] = None,
1288
- filters: typing.Optional[str] = None,
1289
1341
  request_options: typing.Optional[RequestOptions] = None,
1290
- ) -> AsyncHttpResponse[DatasetPagePublic]:
1342
+ ) -> HttpResponse[DatasetVersionPagePublic]:
1291
1343
  """
1292
- Find datasets
1344
+ Get paginated list of versions for a dataset, ordered by creation time (newest first)
1293
1345
 
1294
1346
  Parameters
1295
1347
  ----------
1348
+ id : str
1349
+
1296
1350
  page : typing.Optional[int]
1297
1351
 
1298
1352
  size : typing.Optional[int]
1299
1353
 
1300
- with_experiments_only : typing.Optional[bool]
1301
-
1302
- with_optimizations_only : typing.Optional[bool]
1303
-
1304
- prompt_id : typing.Optional[str]
1305
-
1306
- name : typing.Optional[str]
1307
-
1308
- sorting : typing.Optional[str]
1309
-
1310
- filters : typing.Optional[str]
1311
-
1312
1354
  request_options : typing.Optional[RequestOptions]
1313
1355
  Request-specific configuration.
1314
1356
 
1315
1357
  Returns
1316
1358
  -------
1317
- AsyncHttpResponse[DatasetPagePublic]
1318
- Dataset resource
1359
+ HttpResponse[DatasetVersionPagePublic]
1360
+ Dataset versions
1319
1361
  """
1320
- _response = await self._client_wrapper.httpx_client.request(
1321
- "v1/private/datasets",
1362
+ _response = self._client_wrapper.httpx_client.request(
1363
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
1322
1364
  method="GET",
1323
1365
  params={
1324
1366
  "page": page,
1325
1367
  "size": size,
1326
- "with_experiments_only": with_experiments_only,
1327
- "with_optimizations_only": with_optimizations_only,
1328
- "prompt_id": prompt_id,
1329
- "name": name,
1330
- "sorting": sorting,
1331
- "filters": filters,
1332
1368
  },
1333
1369
  request_options=request_options,
1334
1370
  )
1335
1371
  try:
1336
1372
  if 200 <= _response.status_code < 300:
1337
1373
  _data = typing.cast(
1338
- DatasetPagePublic,
1374
+ DatasetVersionPagePublic,
1339
1375
  parse_obj_as(
1340
- type_=DatasetPagePublic, # type: ignore
1376
+ type_=DatasetVersionPagePublic, # type: ignore
1341
1377
  object_=_response.json(),
1342
1378
  ),
1343
1379
  )
1344
- return AsyncHttpResponse(response=_response, data=_data)
1380
+ return HttpResponse(response=_response, data=_data)
1381
+ if _response.status_code == 400:
1382
+ raise BadRequestError(
1383
+ headers=dict(_response.headers),
1384
+ body=typing.cast(
1385
+ typing.Optional[typing.Any],
1386
+ parse_obj_as(
1387
+ type_=typing.Optional[typing.Any], # type: ignore
1388
+ object_=_response.json(),
1389
+ ),
1390
+ ),
1391
+ )
1345
1392
  _response_json = _response.json()
1346
1393
  except JSONDecodeError:
1347
1394
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1348
1395
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1349
1396
 
1350
- async def create_dataset(
1397
+ def create_dataset_version(
1351
1398
  self,
1399
+ id: str,
1352
1400
  *,
1353
- name: str,
1354
- id: typing.Optional[str] = OMIT,
1355
- visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
1356
- tags: typing.Optional[typing.Sequence[str]] = OMIT,
1357
- description: typing.Optional[str] = OMIT,
1401
+ tag: typing.Optional[str] = OMIT,
1402
+ change_description: typing.Optional[str] = OMIT,
1403
+ metadata: typing.Optional[typing.Dict[str, str]] = OMIT,
1358
1404
  request_options: typing.Optional[RequestOptions] = None,
1359
- ) -> AsyncHttpResponse[None]:
1405
+ ) -> HttpResponse[None]:
1360
1406
  """
1361
- Create dataset
1407
+ Create a new immutable version of the dataset by snapshotting the current state
1362
1408
 
1363
1409
  Parameters
1364
1410
  ----------
1365
- name : str
1366
-
1367
- id : typing.Optional[str]
1411
+ id : str
1368
1412
 
1369
- visibility : typing.Optional[DatasetWriteVisibility]
1413
+ tag : typing.Optional[str]
1414
+ Optional tag for this version
1370
1415
 
1371
- tags : typing.Optional[typing.Sequence[str]]
1416
+ change_description : typing.Optional[str]
1417
+ Optional description of changes in this version
1372
1418
 
1373
- description : typing.Optional[str]
1419
+ metadata : typing.Optional[typing.Dict[str, str]]
1420
+ Optional user-defined metadata
1374
1421
 
1375
1422
  request_options : typing.Optional[RequestOptions]
1376
1423
  Request-specific configuration.
1377
1424
 
1378
1425
  Returns
1379
1426
  -------
1380
- AsyncHttpResponse[None]
1427
+ HttpResponse[None]
1381
1428
  """
1382
- _response = await self._client_wrapper.httpx_client.request(
1383
- "v1/private/datasets",
1429
+ _response = self._client_wrapper.httpx_client.request(
1430
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
1384
1431
  method="POST",
1385
1432
  json={
1386
- "id": id,
1387
- "name": name,
1388
- "visibility": visibility,
1389
- "tags": tags,
1390
- "description": description,
1433
+ "tag": tag,
1434
+ "change_description": change_description,
1435
+ "metadata": metadata,
1391
1436
  },
1392
1437
  headers={
1393
1438
  "content-type": "application/json",
@@ -1397,112 +1442,111 @@ class AsyncRawDatasetsClient:
1397
1442
  )
1398
1443
  try:
1399
1444
  if 200 <= _response.status_code < 300:
1400
- return AsyncHttpResponse(response=_response, data=None)
1445
+ return HttpResponse(response=_response, data=None)
1446
+ if _response.status_code == 400:
1447
+ raise BadRequestError(
1448
+ headers=dict(_response.headers),
1449
+ body=typing.cast(
1450
+ typing.Optional[typing.Any],
1451
+ parse_obj_as(
1452
+ type_=typing.Optional[typing.Any], # type: ignore
1453
+ object_=_response.json(),
1454
+ ),
1455
+ ),
1456
+ )
1457
+ if _response.status_code == 409:
1458
+ raise ConflictError(
1459
+ headers=dict(_response.headers),
1460
+ body=typing.cast(
1461
+ typing.Optional[typing.Any],
1462
+ parse_obj_as(
1463
+ type_=typing.Optional[typing.Any], # type: ignore
1464
+ object_=_response.json(),
1465
+ ),
1466
+ ),
1467
+ )
1401
1468
  _response_json = _response.json()
1402
1469
  except JSONDecodeError:
1403
1470
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1404
1471
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1405
1472
 
1406
- async def create_or_update_dataset_items(
1407
- self,
1408
- *,
1409
- items: typing.Sequence[DatasetItemWrite],
1410
- dataset_name: typing.Optional[str] = OMIT,
1411
- dataset_id: typing.Optional[str] = OMIT,
1412
- request_options: typing.Optional[RequestOptions] = None,
1413
- ) -> AsyncHttpResponse[None]:
1473
+ def delete_version_tag(
1474
+ self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
1475
+ ) -> HttpResponse[None]:
1414
1476
  """
1415
- Create/update dataset items based on dataset item id
1477
+ Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
1416
1478
 
1417
1479
  Parameters
1418
1480
  ----------
1419
- items : typing.Sequence[DatasetItemWrite]
1481
+ version_hash : str
1420
1482
 
1421
- dataset_name : typing.Optional[str]
1422
- If null, dataset_id must be provided
1483
+ tag : str
1423
1484
 
1424
- dataset_id : typing.Optional[str]
1425
- If null, dataset_name must be provided
1485
+ id : str
1426
1486
 
1427
1487
  request_options : typing.Optional[RequestOptions]
1428
1488
  Request-specific configuration.
1429
1489
 
1430
1490
  Returns
1431
1491
  -------
1432
- AsyncHttpResponse[None]
1492
+ HttpResponse[None]
1433
1493
  """
1434
- _response = await self._client_wrapper.httpx_client.request(
1435
- "v1/private/datasets/items",
1436
- method="PUT",
1437
- json={
1438
- "dataset_name": dataset_name,
1439
- "dataset_id": dataset_id,
1440
- "items": convert_and_respect_annotation_metadata(
1441
- object_=items, annotation=typing.Sequence[DatasetItemWrite], direction="write"
1442
- ),
1443
- },
1444
- headers={
1445
- "content-type": "application/json",
1446
- },
1494
+ _response = self._client_wrapper.httpx_client.request(
1495
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
1496
+ method="DELETE",
1447
1497
  request_options=request_options,
1448
- omit=OMIT,
1449
1498
  )
1450
1499
  try:
1451
1500
  if 200 <= _response.status_code < 300:
1452
- return AsyncHttpResponse(response=_response, data=None)
1501
+ return HttpResponse(response=_response, data=None)
1453
1502
  _response_json = _response.json()
1454
1503
  except JSONDecodeError:
1455
1504
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1456
1505
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1457
1506
 
1458
- async def create_dataset_items_from_csv(
1459
- self,
1460
- *,
1461
- file: typing.Dict[str, typing.Optional[typing.Any]],
1462
- dataset_id: str,
1463
- request_options: typing.Optional[RequestOptions] = None,
1464
- ) -> AsyncHttpResponse[None]:
1507
+ def restore_dataset_version(
1508
+ self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
1509
+ ) -> HttpResponse[DatasetVersionPublic]:
1465
1510
  """
1466
- Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
1511
+ Restores the dataset to a previous version state. All draft items are replaced with items from the specified version. If the version is not the latest, a new version snapshot is created. If the version is the latest, only draft items are replaced (revert functionality).
1467
1512
 
1468
1513
  Parameters
1469
1514
  ----------
1470
- file : typing.Dict[str, typing.Optional[typing.Any]]
1515
+ id : str
1471
1516
 
1472
- dataset_id : str
1517
+ version_ref : str
1518
+ Version hash or tag to restore from
1473
1519
 
1474
1520
  request_options : typing.Optional[RequestOptions]
1475
1521
  Request-specific configuration.
1476
1522
 
1477
1523
  Returns
1478
1524
  -------
1479
- AsyncHttpResponse[None]
1525
+ HttpResponse[DatasetVersionPublic]
1526
+ Version restored successfully
1480
1527
  """
1481
- _response = await self._client_wrapper.httpx_client.request(
1482
- "v1/private/datasets/items/from-csv",
1528
+ _response = self._client_wrapper.httpx_client.request(
1529
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
1483
1530
  method="POST",
1484
- data={
1485
- "file": file,
1486
- "dataset_id": dataset_id,
1531
+ json={
1532
+ "version_ref": version_ref,
1487
1533
  },
1488
- files={},
1489
- request_options=request_options,
1490
- omit=OMIT,
1534
+ headers={
1535
+ "content-type": "application/json",
1536
+ },
1537
+ request_options=request_options,
1538
+ omit=OMIT,
1491
1539
  )
1492
1540
  try:
1493
1541
  if 200 <= _response.status_code < 300:
1494
- return AsyncHttpResponse(response=_response, data=None)
1495
- if _response.status_code == 400:
1496
- raise BadRequestError(
1497
- headers=dict(_response.headers),
1498
- body=typing.cast(
1499
- typing.Optional[typing.Any],
1500
- parse_obj_as(
1501
- type_=typing.Optional[typing.Any], # type: ignore
1502
- object_=_response.json(),
1503
- ),
1542
+ _data = typing.cast(
1543
+ DatasetVersionPublic,
1544
+ parse_obj_as(
1545
+ type_=DatasetVersionPublic, # type: ignore
1546
+ object_=_response.json(),
1504
1547
  ),
1505
1548
  )
1549
+ return HttpResponse(response=_response, data=_data)
1506
1550
  if _response.status_code == 404:
1507
1551
  raise NotFoundError(
1508
1552
  headers=dict(_response.headers),
@@ -1519,25 +1563,34 @@ class AsyncRawDatasetsClient:
1519
1563
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1520
1564
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1521
1565
 
1522
- async def create_dataset_items_from_spans(
1566
+
1567
+ class AsyncRawDatasetsClient:
1568
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
1569
+ self._client_wrapper = client_wrapper
1570
+
1571
+ async def batch_update_dataset_items(
1523
1572
  self,
1524
- dataset_id: str,
1525
1573
  *,
1526
- span_ids: typing.Sequence[str],
1527
- enrichment_options: SpanEnrichmentOptions,
1574
+ update: DatasetItemUpdate,
1575
+ ids: typing.Optional[typing.Sequence[str]] = OMIT,
1576
+ filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
1577
+ merge_tags: typing.Optional[bool] = OMIT,
1528
1578
  request_options: typing.Optional[RequestOptions] = None,
1529
1579
  ) -> AsyncHttpResponse[None]:
1530
1580
  """
1531
- Create dataset items from spans with enriched metadata
1581
+ Update multiple dataset items
1532
1582
 
1533
1583
  Parameters
1534
1584
  ----------
1535
- dataset_id : str
1585
+ update : DatasetItemUpdate
1536
1586
 
1537
- span_ids : typing.Sequence[str]
1538
- Set of span IDs to add to the dataset
1587
+ ids : typing.Optional[typing.Sequence[str]]
1588
+ List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
1539
1589
 
1540
- enrichment_options : SpanEnrichmentOptions
1590
+ filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
1591
+
1592
+ merge_tags : typing.Optional[bool]
1593
+ If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
1541
1594
 
1542
1595
  request_options : typing.Optional[RequestOptions]
1543
1596
  Request-specific configuration.
@@ -1547,13 +1600,17 @@ class AsyncRawDatasetsClient:
1547
1600
  AsyncHttpResponse[None]
1548
1601
  """
1549
1602
  _response = await self._client_wrapper.httpx_client.request(
1550
- f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
1551
- method="POST",
1603
+ "v1/private/datasets/items/batch",
1604
+ method="PATCH",
1552
1605
  json={
1553
- "span_ids": span_ids,
1554
- "enrichment_options": convert_and_respect_annotation_metadata(
1555
- object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
1606
+ "ids": ids,
1607
+ "filters": convert_and_respect_annotation_metadata(
1608
+ object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
1556
1609
  ),
1610
+ "update": convert_and_respect_annotation_metadata(
1611
+ object_=update, annotation=DatasetItemUpdate, direction="write"
1612
+ ),
1613
+ "merge_tags": merge_tags,
1557
1614
  },
1558
1615
  headers={
1559
1616
  "content-type": "application/json",
@@ -1564,90 +1621,85 @@ class AsyncRawDatasetsClient:
1564
1621
  try:
1565
1622
  if 200 <= _response.status_code < 300:
1566
1623
  return AsyncHttpResponse(response=_response, data=None)
1624
+ if _response.status_code == 400:
1625
+ raise BadRequestError(
1626
+ headers=dict(_response.headers),
1627
+ body=typing.cast(
1628
+ typing.Optional[typing.Any],
1629
+ parse_obj_as(
1630
+ type_=typing.Optional[typing.Any], # type: ignore
1631
+ object_=_response.json(),
1632
+ ),
1633
+ ),
1634
+ )
1567
1635
  _response_json = _response.json()
1568
1636
  except JSONDecodeError:
1569
1637
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1570
1638
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1571
1639
 
1572
- async def create_dataset_items_from_traces(
1640
+ async def find_datasets(
1573
1641
  self,
1574
- dataset_id: str,
1575
1642
  *,
1576
- trace_ids: typing.Sequence[str],
1577
- enrichment_options: TraceEnrichmentOptions,
1643
+ page: typing.Optional[int] = None,
1644
+ size: typing.Optional[int] = None,
1645
+ with_experiments_only: typing.Optional[bool] = None,
1646
+ with_optimizations_only: typing.Optional[bool] = None,
1647
+ prompt_id: typing.Optional[str] = None,
1648
+ name: typing.Optional[str] = None,
1649
+ sorting: typing.Optional[str] = None,
1650
+ filters: typing.Optional[str] = None,
1578
1651
  request_options: typing.Optional[RequestOptions] = None,
1579
- ) -> AsyncHttpResponse[None]:
1652
+ ) -> AsyncHttpResponse[DatasetPagePublic]:
1580
1653
  """
1581
- Create dataset items from traces with enriched metadata
1654
+ Find datasets
1582
1655
 
1583
1656
  Parameters
1584
1657
  ----------
1585
- dataset_id : str
1658
+ page : typing.Optional[int]
1586
1659
 
1587
- trace_ids : typing.Sequence[str]
1588
- Set of trace IDs to add to the dataset
1660
+ size : typing.Optional[int]
1589
1661
 
1590
- enrichment_options : TraceEnrichmentOptions
1662
+ with_experiments_only : typing.Optional[bool]
1591
1663
 
1592
- request_options : typing.Optional[RequestOptions]
1593
- Request-specific configuration.
1664
+ with_optimizations_only : typing.Optional[bool]
1594
1665
 
1595
- Returns
1596
- -------
1597
- AsyncHttpResponse[None]
1598
- """
1599
- _response = await self._client_wrapper.httpx_client.request(
1600
- f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-traces",
1601
- method="POST",
1602
- json={
1603
- "trace_ids": trace_ids,
1604
- "enrichment_options": convert_and_respect_annotation_metadata(
1605
- object_=enrichment_options, annotation=TraceEnrichmentOptions, direction="write"
1606
- ),
1607
- },
1608
- headers={
1609
- "content-type": "application/json",
1610
- },
1611
- request_options=request_options,
1612
- omit=OMIT,
1613
- )
1614
- try:
1615
- if 200 <= _response.status_code < 300:
1616
- return AsyncHttpResponse(response=_response, data=None)
1617
- _response_json = _response.json()
1618
- except JSONDecodeError:
1619
- raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1620
- raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1666
+ prompt_id : typing.Optional[str]
1621
1667
 
1622
- async def get_dataset_by_id(
1623
- self, id: str, *, request_options: typing.Optional[RequestOptions] = None
1624
- ) -> AsyncHttpResponse[DatasetPublic]:
1625
- """
1626
- Get dataset by id
1668
+ name : typing.Optional[str]
1627
1669
 
1628
- Parameters
1629
- ----------
1630
- id : str
1670
+ sorting : typing.Optional[str]
1671
+
1672
+ filters : typing.Optional[str]
1631
1673
 
1632
1674
  request_options : typing.Optional[RequestOptions]
1633
1675
  Request-specific configuration.
1634
1676
 
1635
1677
  Returns
1636
1678
  -------
1637
- AsyncHttpResponse[DatasetPublic]
1679
+ AsyncHttpResponse[DatasetPagePublic]
1638
1680
  Dataset resource
1639
1681
  """
1640
1682
  _response = await self._client_wrapper.httpx_client.request(
1641
- f"v1/private/datasets/{jsonable_encoder(id)}",
1683
+ "v1/private/datasets",
1642
1684
  method="GET",
1685
+ params={
1686
+ "page": page,
1687
+ "size": size,
1688
+ "with_experiments_only": with_experiments_only,
1689
+ "with_optimizations_only": with_optimizations_only,
1690
+ "prompt_id": prompt_id,
1691
+ "name": name,
1692
+ "sorting": sorting,
1693
+ "filters": filters,
1694
+ },
1643
1695
  request_options=request_options,
1644
1696
  )
1645
1697
  try:
1646
1698
  if 200 <= _response.status_code < 300:
1647
1699
  _data = typing.cast(
1648
- DatasetPublic,
1700
+ DatasetPagePublic,
1649
1701
  parse_obj_as(
1650
- type_=DatasetPublic, # type: ignore
1702
+ type_=DatasetPagePublic, # type: ignore
1651
1703
  object_=_response.json(),
1652
1704
  ),
1653
1705
  )
@@ -1657,31 +1709,31 @@ class AsyncRawDatasetsClient:
1657
1709
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1658
1710
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1659
1711
 
1660
- async def update_dataset(
1712
+ async def create_dataset(
1661
1713
  self,
1662
- id: str,
1663
1714
  *,
1664
1715
  name: str,
1665
- description: typing.Optional[str] = OMIT,
1666
- visibility: typing.Optional[DatasetUpdateVisibility] = OMIT,
1716
+ id: typing.Optional[str] = OMIT,
1717
+ visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
1667
1718
  tags: typing.Optional[typing.Sequence[str]] = OMIT,
1719
+ description: typing.Optional[str] = OMIT,
1668
1720
  request_options: typing.Optional[RequestOptions] = None,
1669
1721
  ) -> AsyncHttpResponse[None]:
1670
1722
  """
1671
- Update dataset by id
1723
+ Create dataset
1672
1724
 
1673
1725
  Parameters
1674
1726
  ----------
1675
- id : str
1676
-
1677
1727
  name : str
1678
1728
 
1679
- description : typing.Optional[str]
1729
+ id : typing.Optional[str]
1680
1730
 
1681
- visibility : typing.Optional[DatasetUpdateVisibility]
1731
+ visibility : typing.Optional[DatasetWriteVisibility]
1682
1732
 
1683
1733
  tags : typing.Optional[typing.Sequence[str]]
1684
1734
 
1735
+ description : typing.Optional[str]
1736
+
1685
1737
  request_options : typing.Optional[RequestOptions]
1686
1738
  Request-specific configuration.
1687
1739
 
@@ -1690,13 +1742,14 @@ class AsyncRawDatasetsClient:
1690
1742
  AsyncHttpResponse[None]
1691
1743
  """
1692
1744
  _response = await self._client_wrapper.httpx_client.request(
1693
- f"v1/private/datasets/{jsonable_encoder(id)}",
1694
- method="PUT",
1745
+ "v1/private/datasets",
1746
+ method="POST",
1695
1747
  json={
1748
+ "id": id,
1696
1749
  "name": name,
1697
- "description": description,
1698
1750
  "visibility": visibility,
1699
1751
  "tags": tags,
1752
+ "description": description,
1700
1753
  },
1701
1754
  headers={
1702
1755
  "content-type": "application/json",
@@ -1712,45 +1765,26 @@ class AsyncRawDatasetsClient:
1712
1765
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1713
1766
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1714
1767
 
1715
- async def delete_dataset(
1716
- self, id: str, *, request_options: typing.Optional[RequestOptions] = None
1768
+ async def create_or_update_dataset_items(
1769
+ self,
1770
+ *,
1771
+ items: typing.Sequence[DatasetItemWrite],
1772
+ dataset_name: typing.Optional[str] = OMIT,
1773
+ dataset_id: typing.Optional[str] = OMIT,
1774
+ request_options: typing.Optional[RequestOptions] = None,
1717
1775
  ) -> AsyncHttpResponse[None]:
1718
1776
  """
1719
- Delete dataset by id
1777
+ Create/update dataset items based on dataset item id
1720
1778
 
1721
1779
  Parameters
1722
1780
  ----------
1723
- id : str
1724
-
1725
- request_options : typing.Optional[RequestOptions]
1726
- Request-specific configuration.
1727
-
1728
- Returns
1729
- -------
1730
- AsyncHttpResponse[None]
1731
- """
1732
- _response = await self._client_wrapper.httpx_client.request(
1733
- f"v1/private/datasets/{jsonable_encoder(id)}",
1734
- method="DELETE",
1735
- request_options=request_options,
1736
- )
1737
- try:
1738
- if 200 <= _response.status_code < 300:
1739
- return AsyncHttpResponse(response=_response, data=None)
1740
- _response_json = _response.json()
1741
- except JSONDecodeError:
1742
- raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1743
- raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1781
+ items : typing.Sequence[DatasetItemWrite]
1744
1782
 
1745
- async def delete_dataset_by_name(
1746
- self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
1747
- ) -> AsyncHttpResponse[None]:
1748
- """
1749
- Delete dataset by name
1783
+ dataset_name : typing.Optional[str]
1784
+ If null, dataset_id must be provided
1750
1785
 
1751
- Parameters
1752
- ----------
1753
- dataset_name : str
1786
+ dataset_id : typing.Optional[str]
1787
+ If null, dataset_name must be provided
1754
1788
 
1755
1789
  request_options : typing.Optional[RequestOptions]
1756
1790
  Request-specific configuration.
@@ -1760,10 +1794,14 @@ class AsyncRawDatasetsClient:
1760
1794
  AsyncHttpResponse[None]
1761
1795
  """
1762
1796
  _response = await self._client_wrapper.httpx_client.request(
1763
- "v1/private/datasets/delete",
1764
- method="POST",
1797
+ "v1/private/datasets/items",
1798
+ method="PUT",
1765
1799
  json={
1766
1800
  "dataset_name": dataset_name,
1801
+ "dataset_id": dataset_id,
1802
+ "items": convert_and_respect_annotation_metadata(
1803
+ object_=items, annotation=typing.Sequence[DatasetItemWrite], direction="write"
1804
+ ),
1767
1805
  },
1768
1806
  headers={
1769
1807
  "content-type": "application/json",
@@ -1779,15 +1817,21 @@ class AsyncRawDatasetsClient:
1779
1817
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1780
1818
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1781
1819
 
1782
- async def delete_dataset_items(
1783
- self, *, item_ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
1820
+ async def create_dataset_items_from_csv(
1821
+ self,
1822
+ *,
1823
+ file: typing.Dict[str, typing.Optional[typing.Any]],
1824
+ dataset_id: str,
1825
+ request_options: typing.Optional[RequestOptions] = None,
1784
1826
  ) -> AsyncHttpResponse[None]:
1785
1827
  """
1786
- Delete dataset items
1828
+ Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
1787
1829
 
1788
1830
  Parameters
1789
1831
  ----------
1790
- item_ids : typing.Sequence[str]
1832
+ file : typing.Dict[str, typing.Optional[typing.Any]]
1833
+
1834
+ dataset_id : str
1791
1835
 
1792
1836
  request_options : typing.Optional[RequestOptions]
1793
1837
  Request-specific configuration.
@@ -1797,112 +1841,705 @@ class AsyncRawDatasetsClient:
1797
1841
  AsyncHttpResponse[None]
1798
1842
  """
1799
1843
  _response = await self._client_wrapper.httpx_client.request(
1800
- "v1/private/datasets/items/delete",
1844
+ "v1/private/datasets/items/from-csv",
1801
1845
  method="POST",
1802
- json={
1803
- "item_ids": item_ids,
1846
+ data={
1847
+ "file": file,
1848
+ "dataset_id": dataset_id,
1804
1849
  },
1805
- headers={
1806
- "content-type": "application/json",
1850
+ files={},
1851
+ request_options=request_options,
1852
+ omit=OMIT,
1853
+ )
1854
+ try:
1855
+ if 200 <= _response.status_code < 300:
1856
+ return AsyncHttpResponse(response=_response, data=None)
1857
+ if _response.status_code == 400:
1858
+ raise BadRequestError(
1859
+ headers=dict(_response.headers),
1860
+ body=typing.cast(
1861
+ typing.Optional[typing.Any],
1862
+ parse_obj_as(
1863
+ type_=typing.Optional[typing.Any], # type: ignore
1864
+ object_=_response.json(),
1865
+ ),
1866
+ ),
1867
+ )
1868
+ if _response.status_code == 404:
1869
+ raise NotFoundError(
1870
+ headers=dict(_response.headers),
1871
+ body=typing.cast(
1872
+ typing.Optional[typing.Any],
1873
+ parse_obj_as(
1874
+ type_=typing.Optional[typing.Any], # type: ignore
1875
+ object_=_response.json(),
1876
+ ),
1877
+ ),
1878
+ )
1879
+ _response_json = _response.json()
1880
+ except JSONDecodeError:
1881
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1882
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1883
+
1884
+ async def create_dataset_items_from_spans(
1885
+ self,
1886
+ dataset_id: str,
1887
+ *,
1888
+ span_ids: typing.Sequence[str],
1889
+ enrichment_options: SpanEnrichmentOptions,
1890
+ request_options: typing.Optional[RequestOptions] = None,
1891
+ ) -> AsyncHttpResponse[None]:
1892
+ """
1893
+ Create dataset items from spans with enriched metadata
1894
+
1895
+ Parameters
1896
+ ----------
1897
+ dataset_id : str
1898
+
1899
+ span_ids : typing.Sequence[str]
1900
+ Set of span IDs to add to the dataset
1901
+
1902
+ enrichment_options : SpanEnrichmentOptions
1903
+
1904
+ request_options : typing.Optional[RequestOptions]
1905
+ Request-specific configuration.
1906
+
1907
+ Returns
1908
+ -------
1909
+ AsyncHttpResponse[None]
1910
+ """
1911
+ _response = await self._client_wrapper.httpx_client.request(
1912
+ f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
1913
+ method="POST",
1914
+ json={
1915
+ "span_ids": span_ids,
1916
+ "enrichment_options": convert_and_respect_annotation_metadata(
1917
+ object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
1918
+ ),
1919
+ },
1920
+ headers={
1921
+ "content-type": "application/json",
1922
+ },
1923
+ request_options=request_options,
1924
+ omit=OMIT,
1925
+ )
1926
+ try:
1927
+ if 200 <= _response.status_code < 300:
1928
+ return AsyncHttpResponse(response=_response, data=None)
1929
+ _response_json = _response.json()
1930
+ except JSONDecodeError:
1931
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1932
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1933
+
1934
+ async def create_dataset_items_from_traces(
1935
+ self,
1936
+ dataset_id: str,
1937
+ *,
1938
+ trace_ids: typing.Sequence[str],
1939
+ enrichment_options: TraceEnrichmentOptions,
1940
+ request_options: typing.Optional[RequestOptions] = None,
1941
+ ) -> AsyncHttpResponse[None]:
1942
+ """
1943
+ Create dataset items from traces with enriched metadata
1944
+
1945
+ Parameters
1946
+ ----------
1947
+ dataset_id : str
1948
+
1949
+ trace_ids : typing.Sequence[str]
1950
+ Set of trace IDs to add to the dataset
1951
+
1952
+ enrichment_options : TraceEnrichmentOptions
1953
+
1954
+ request_options : typing.Optional[RequestOptions]
1955
+ Request-specific configuration.
1956
+
1957
+ Returns
1958
+ -------
1959
+ AsyncHttpResponse[None]
1960
+ """
1961
+ _response = await self._client_wrapper.httpx_client.request(
1962
+ f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-traces",
1963
+ method="POST",
1964
+ json={
1965
+ "trace_ids": trace_ids,
1966
+ "enrichment_options": convert_and_respect_annotation_metadata(
1967
+ object_=enrichment_options, annotation=TraceEnrichmentOptions, direction="write"
1968
+ ),
1969
+ },
1970
+ headers={
1971
+ "content-type": "application/json",
1972
+ },
1973
+ request_options=request_options,
1974
+ omit=OMIT,
1975
+ )
1976
+ try:
1977
+ if 200 <= _response.status_code < 300:
1978
+ return AsyncHttpResponse(response=_response, data=None)
1979
+ _response_json = _response.json()
1980
+ except JSONDecodeError:
1981
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1982
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1983
+
1984
+ async def get_dataset_by_id(
1985
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
1986
+ ) -> AsyncHttpResponse[DatasetPublic]:
1987
+ """
1988
+ Get dataset by id
1989
+
1990
+ Parameters
1991
+ ----------
1992
+ id : str
1993
+
1994
+ request_options : typing.Optional[RequestOptions]
1995
+ Request-specific configuration.
1996
+
1997
+ Returns
1998
+ -------
1999
+ AsyncHttpResponse[DatasetPublic]
2000
+ Dataset resource
2001
+ """
2002
+ _response = await self._client_wrapper.httpx_client.request(
2003
+ f"v1/private/datasets/{jsonable_encoder(id)}",
2004
+ method="GET",
2005
+ request_options=request_options,
2006
+ )
2007
+ try:
2008
+ if 200 <= _response.status_code < 300:
2009
+ _data = typing.cast(
2010
+ DatasetPublic,
2011
+ parse_obj_as(
2012
+ type_=DatasetPublic, # type: ignore
2013
+ object_=_response.json(),
2014
+ ),
2015
+ )
2016
+ return AsyncHttpResponse(response=_response, data=_data)
2017
+ _response_json = _response.json()
2018
+ except JSONDecodeError:
2019
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2020
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2021
+
2022
+ async def update_dataset(
2023
+ self,
2024
+ id: str,
2025
+ *,
2026
+ name: str,
2027
+ description: typing.Optional[str] = OMIT,
2028
+ visibility: typing.Optional[DatasetUpdateVisibility] = OMIT,
2029
+ tags: typing.Optional[typing.Sequence[str]] = OMIT,
2030
+ request_options: typing.Optional[RequestOptions] = None,
2031
+ ) -> AsyncHttpResponse[None]:
2032
+ """
2033
+ Update dataset by id
2034
+
2035
+ Parameters
2036
+ ----------
2037
+ id : str
2038
+
2039
+ name : str
2040
+
2041
+ description : typing.Optional[str]
2042
+
2043
+ visibility : typing.Optional[DatasetUpdateVisibility]
2044
+
2045
+ tags : typing.Optional[typing.Sequence[str]]
2046
+
2047
+ request_options : typing.Optional[RequestOptions]
2048
+ Request-specific configuration.
2049
+
2050
+ Returns
2051
+ -------
2052
+ AsyncHttpResponse[None]
2053
+ """
2054
+ _response = await self._client_wrapper.httpx_client.request(
2055
+ f"v1/private/datasets/{jsonable_encoder(id)}",
2056
+ method="PUT",
2057
+ json={
2058
+ "name": name,
2059
+ "description": description,
2060
+ "visibility": visibility,
2061
+ "tags": tags,
2062
+ },
2063
+ headers={
2064
+ "content-type": "application/json",
2065
+ },
2066
+ request_options=request_options,
2067
+ omit=OMIT,
2068
+ )
2069
+ try:
2070
+ if 200 <= _response.status_code < 300:
2071
+ return AsyncHttpResponse(response=_response, data=None)
2072
+ _response_json = _response.json()
2073
+ except JSONDecodeError:
2074
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2075
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2076
+
2077
+ async def delete_dataset(
2078
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
2079
+ ) -> AsyncHttpResponse[None]:
2080
+ """
2081
+ Delete dataset by id
2082
+
2083
+ Parameters
2084
+ ----------
2085
+ id : str
2086
+
2087
+ request_options : typing.Optional[RequestOptions]
2088
+ Request-specific configuration.
2089
+
2090
+ Returns
2091
+ -------
2092
+ AsyncHttpResponse[None]
2093
+ """
2094
+ _response = await self._client_wrapper.httpx_client.request(
2095
+ f"v1/private/datasets/{jsonable_encoder(id)}",
2096
+ method="DELETE",
2097
+ request_options=request_options,
2098
+ )
2099
+ try:
2100
+ if 200 <= _response.status_code < 300:
2101
+ return AsyncHttpResponse(response=_response, data=None)
2102
+ _response_json = _response.json()
2103
+ except JSONDecodeError:
2104
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2105
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2106
+
2107
+ async def delete_dataset_by_name(
2108
+ self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
2109
+ ) -> AsyncHttpResponse[None]:
2110
+ """
2111
+ Delete dataset by name
2112
+
2113
+ Parameters
2114
+ ----------
2115
+ dataset_name : str
2116
+
2117
+ request_options : typing.Optional[RequestOptions]
2118
+ Request-specific configuration.
2119
+
2120
+ Returns
2121
+ -------
2122
+ AsyncHttpResponse[None]
2123
+ """
2124
+ _response = await self._client_wrapper.httpx_client.request(
2125
+ "v1/private/datasets/delete",
2126
+ method="POST",
2127
+ json={
2128
+ "dataset_name": dataset_name,
2129
+ },
2130
+ headers={
2131
+ "content-type": "application/json",
2132
+ },
2133
+ request_options=request_options,
2134
+ omit=OMIT,
2135
+ )
2136
+ try:
2137
+ if 200 <= _response.status_code < 300:
2138
+ return AsyncHttpResponse(response=_response, data=None)
2139
+ _response_json = _response.json()
2140
+ except JSONDecodeError:
2141
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2142
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2143
+
2144
+ async def delete_dataset_items(
2145
+ self, *, item_ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
2146
+ ) -> AsyncHttpResponse[None]:
2147
+ """
2148
+ Delete dataset items
2149
+
2150
+ Parameters
2151
+ ----------
2152
+ item_ids : typing.Sequence[str]
2153
+
2154
+ request_options : typing.Optional[RequestOptions]
2155
+ Request-specific configuration.
2156
+
2157
+ Returns
2158
+ -------
2159
+ AsyncHttpResponse[None]
2160
+ """
2161
+ _response = await self._client_wrapper.httpx_client.request(
2162
+ "v1/private/datasets/items/delete",
2163
+ method="POST",
2164
+ json={
2165
+ "item_ids": item_ids,
2166
+ },
2167
+ headers={
2168
+ "content-type": "application/json",
2169
+ },
2170
+ request_options=request_options,
2171
+ omit=OMIT,
2172
+ )
2173
+ try:
2174
+ if 200 <= _response.status_code < 300:
2175
+ return AsyncHttpResponse(response=_response, data=None)
2176
+ _response_json = _response.json()
2177
+ except JSONDecodeError:
2178
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2179
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2180
+
2181
+ async def delete_datasets_batch(
2182
+ self, *, ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
2183
+ ) -> AsyncHttpResponse[None]:
2184
+ """
2185
+ Delete datasets batch
2186
+
2187
+ Parameters
2188
+ ----------
2189
+ ids : typing.Sequence[str]
2190
+
2191
+ request_options : typing.Optional[RequestOptions]
2192
+ Request-specific configuration.
2193
+
2194
+ Returns
2195
+ -------
2196
+ AsyncHttpResponse[None]
2197
+ """
2198
+ _response = await self._client_wrapper.httpx_client.request(
2199
+ "v1/private/datasets/delete-batch",
2200
+ method="POST",
2201
+ json={
2202
+ "ids": ids,
2203
+ },
2204
+ headers={
2205
+ "content-type": "application/json",
2206
+ },
2207
+ request_options=request_options,
2208
+ omit=OMIT,
2209
+ )
2210
+ try:
2211
+ if 200 <= _response.status_code < 300:
2212
+ return AsyncHttpResponse(response=_response, data=None)
2213
+ _response_json = _response.json()
2214
+ except JSONDecodeError:
2215
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2216
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2217
+
2218
+ async def expand_dataset(
2219
+ self,
2220
+ id: str,
2221
+ *,
2222
+ model: str,
2223
+ sample_count: typing.Optional[int] = OMIT,
2224
+ preserve_fields: typing.Optional[typing.Sequence[str]] = OMIT,
2225
+ variation_instructions: typing.Optional[str] = OMIT,
2226
+ custom_prompt: typing.Optional[str] = OMIT,
2227
+ request_options: typing.Optional[RequestOptions] = None,
2228
+ ) -> AsyncHttpResponse[DatasetExpansionResponse]:
2229
+ """
2230
+ Generate synthetic dataset samples using LLM based on existing data patterns
2231
+
2232
+ Parameters
2233
+ ----------
2234
+ id : str
2235
+
2236
+ model : str
2237
+ The model to use for synthetic data generation
2238
+
2239
+ sample_count : typing.Optional[int]
2240
+ Number of synthetic samples to generate
2241
+
2242
+ preserve_fields : typing.Optional[typing.Sequence[str]]
2243
+ Fields to preserve patterns from original data
2244
+
2245
+ variation_instructions : typing.Optional[str]
2246
+ Additional instructions for data variation
2247
+
2248
+ custom_prompt : typing.Optional[str]
2249
+ Custom prompt to use for generation instead of auto-generated one
2250
+
2251
+ request_options : typing.Optional[RequestOptions]
2252
+ Request-specific configuration.
2253
+
2254
+ Returns
2255
+ -------
2256
+ AsyncHttpResponse[DatasetExpansionResponse]
2257
+ Generated synthetic samples
2258
+ """
2259
+ _response = await self._client_wrapper.httpx_client.request(
2260
+ f"v1/private/datasets/{jsonable_encoder(id)}/expansions",
2261
+ method="POST",
2262
+ json={
2263
+ "model": model,
2264
+ "sample_count": sample_count,
2265
+ "preserve_fields": preserve_fields,
2266
+ "variation_instructions": variation_instructions,
2267
+ "custom_prompt": custom_prompt,
2268
+ },
2269
+ headers={
2270
+ "content-type": "application/json",
2271
+ },
2272
+ request_options=request_options,
2273
+ omit=OMIT,
2274
+ )
2275
+ try:
2276
+ if 200 <= _response.status_code < 300:
2277
+ _data = typing.cast(
2278
+ DatasetExpansionResponse,
2279
+ parse_obj_as(
2280
+ type_=DatasetExpansionResponse, # type: ignore
2281
+ object_=_response.json(),
2282
+ ),
2283
+ )
2284
+ return AsyncHttpResponse(response=_response, data=_data)
2285
+ _response_json = _response.json()
2286
+ except JSONDecodeError:
2287
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2288
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2289
+
2290
+ async def find_dataset_items_with_experiment_items(
2291
+ self,
2292
+ id: str,
2293
+ *,
2294
+ experiment_ids: str,
2295
+ page: typing.Optional[int] = None,
2296
+ size: typing.Optional[int] = None,
2297
+ filters: typing.Optional[str] = None,
2298
+ sorting: typing.Optional[str] = None,
2299
+ search: typing.Optional[str] = None,
2300
+ truncate: typing.Optional[bool] = None,
2301
+ request_options: typing.Optional[RequestOptions] = None,
2302
+ ) -> AsyncHttpResponse[DatasetItemPageCompare]:
2303
+ """
2304
+ Find dataset items with experiment items
2305
+
2306
+ Parameters
2307
+ ----------
2308
+ id : str
2309
+
2310
+ experiment_ids : str
2311
+
2312
+ page : typing.Optional[int]
2313
+
2314
+ size : typing.Optional[int]
2315
+
2316
+ filters : typing.Optional[str]
2317
+
2318
+ sorting : typing.Optional[str]
2319
+
2320
+ search : typing.Optional[str]
2321
+
2322
+ truncate : typing.Optional[bool]
2323
+
2324
+ request_options : typing.Optional[RequestOptions]
2325
+ Request-specific configuration.
2326
+
2327
+ Returns
2328
+ -------
2329
+ AsyncHttpResponse[DatasetItemPageCompare]
2330
+ Dataset item resource
2331
+ """
2332
+ _response = await self._client_wrapper.httpx_client.request(
2333
+ f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items",
2334
+ method="GET",
2335
+ params={
2336
+ "page": page,
2337
+ "size": size,
2338
+ "experiment_ids": experiment_ids,
2339
+ "filters": filters,
2340
+ "sorting": sorting,
2341
+ "search": search,
2342
+ "truncate": truncate,
2343
+ },
2344
+ request_options=request_options,
2345
+ )
2346
+ try:
2347
+ if 200 <= _response.status_code < 300:
2348
+ _data = typing.cast(
2349
+ DatasetItemPageCompare,
2350
+ parse_obj_as(
2351
+ type_=DatasetItemPageCompare, # type: ignore
2352
+ object_=_response.json(),
2353
+ ),
2354
+ )
2355
+ return AsyncHttpResponse(response=_response, data=_data)
2356
+ _response_json = _response.json()
2357
+ except JSONDecodeError:
2358
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2359
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2360
+
2361
+ async def get_dataset_by_identifier(
2362
+ self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
2363
+ ) -> AsyncHttpResponse[DatasetPublic]:
2364
+ """
2365
+ Get dataset by name
2366
+
2367
+ Parameters
2368
+ ----------
2369
+ dataset_name : str
2370
+
2371
+ request_options : typing.Optional[RequestOptions]
2372
+ Request-specific configuration.
2373
+
2374
+ Returns
2375
+ -------
2376
+ AsyncHttpResponse[DatasetPublic]
2377
+ Dataset resource
2378
+ """
2379
+ _response = await self._client_wrapper.httpx_client.request(
2380
+ "v1/private/datasets/retrieve",
2381
+ method="POST",
2382
+ json={
2383
+ "dataset_name": dataset_name,
2384
+ },
2385
+ headers={
2386
+ "content-type": "application/json",
2387
+ },
2388
+ request_options=request_options,
2389
+ omit=OMIT,
2390
+ )
2391
+ try:
2392
+ if 200 <= _response.status_code < 300:
2393
+ _data = typing.cast(
2394
+ DatasetPublic,
2395
+ parse_obj_as(
2396
+ type_=DatasetPublic, # type: ignore
2397
+ object_=_response.json(),
2398
+ ),
2399
+ )
2400
+ return AsyncHttpResponse(response=_response, data=_data)
2401
+ _response_json = _response.json()
2402
+ except JSONDecodeError:
2403
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2404
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2405
+
2406
+ async def get_dataset_experiment_items_stats(
2407
+ self,
2408
+ id: str,
2409
+ *,
2410
+ experiment_ids: str,
2411
+ filters: typing.Optional[str] = None,
2412
+ request_options: typing.Optional[RequestOptions] = None,
2413
+ ) -> AsyncHttpResponse[ProjectStatsPublic]:
2414
+ """
2415
+ Get experiment items stats for dataset
2416
+
2417
+ Parameters
2418
+ ----------
2419
+ id : str
2420
+
2421
+ experiment_ids : str
2422
+
2423
+ filters : typing.Optional[str]
2424
+
2425
+ request_options : typing.Optional[RequestOptions]
2426
+ Request-specific configuration.
2427
+
2428
+ Returns
2429
+ -------
2430
+ AsyncHttpResponse[ProjectStatsPublic]
2431
+ Experiment items stats resource
2432
+ """
2433
+ _response = await self._client_wrapper.httpx_client.request(
2434
+ f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/stats",
2435
+ method="GET",
2436
+ params={
2437
+ "experiment_ids": experiment_ids,
2438
+ "filters": filters,
1807
2439
  },
1808
2440
  request_options=request_options,
1809
- omit=OMIT,
1810
2441
  )
1811
2442
  try:
1812
2443
  if 200 <= _response.status_code < 300:
1813
- return AsyncHttpResponse(response=_response, data=None)
2444
+ _data = typing.cast(
2445
+ ProjectStatsPublic,
2446
+ parse_obj_as(
2447
+ type_=ProjectStatsPublic, # type: ignore
2448
+ object_=_response.json(),
2449
+ ),
2450
+ )
2451
+ return AsyncHttpResponse(response=_response, data=_data)
1814
2452
  _response_json = _response.json()
1815
2453
  except JSONDecodeError:
1816
2454
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1817
2455
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1818
2456
 
1819
- async def delete_datasets_batch(
1820
- self, *, ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
1821
- ) -> AsyncHttpResponse[None]:
2457
+ async def get_dataset_item_by_id(
2458
+ self, item_id: str, *, request_options: typing.Optional[RequestOptions] = None
2459
+ ) -> AsyncHttpResponse[DatasetItemPublic]:
1822
2460
  """
1823
- Delete datasets batch
2461
+ Get dataset item by id
1824
2462
 
1825
2463
  Parameters
1826
2464
  ----------
1827
- ids : typing.Sequence[str]
2465
+ item_id : str
1828
2466
 
1829
2467
  request_options : typing.Optional[RequestOptions]
1830
2468
  Request-specific configuration.
1831
2469
 
1832
2470
  Returns
1833
2471
  -------
1834
- AsyncHttpResponse[None]
2472
+ AsyncHttpResponse[DatasetItemPublic]
2473
+ Dataset item resource
1835
2474
  """
1836
2475
  _response = await self._client_wrapper.httpx_client.request(
1837
- "v1/private/datasets/delete-batch",
1838
- method="POST",
1839
- json={
1840
- "ids": ids,
1841
- },
1842
- headers={
1843
- "content-type": "application/json",
1844
- },
2476
+ f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2477
+ method="GET",
1845
2478
  request_options=request_options,
1846
- omit=OMIT,
1847
2479
  )
1848
2480
  try:
1849
2481
  if 200 <= _response.status_code < 300:
1850
- return AsyncHttpResponse(response=_response, data=None)
2482
+ _data = typing.cast(
2483
+ DatasetItemPublic,
2484
+ parse_obj_as(
2485
+ type_=DatasetItemPublic, # type: ignore
2486
+ object_=_response.json(),
2487
+ ),
2488
+ )
2489
+ return AsyncHttpResponse(response=_response, data=_data)
1851
2490
  _response_json = _response.json()
1852
2491
  except JSONDecodeError:
1853
2492
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1854
2493
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1855
2494
 
1856
- async def expand_dataset(
2495
+ async def patch_dataset_item(
1857
2496
  self,
1858
- id: str,
2497
+ item_id: str,
1859
2498
  *,
1860
- model: str,
1861
- sample_count: typing.Optional[int] = OMIT,
1862
- preserve_fields: typing.Optional[typing.Sequence[str]] = OMIT,
1863
- variation_instructions: typing.Optional[str] = OMIT,
1864
- custom_prompt: typing.Optional[str] = OMIT,
2499
+ source: DatasetItemWriteSource,
2500
+ data: JsonNode,
2501
+ id: typing.Optional[str] = OMIT,
2502
+ trace_id: typing.Optional[str] = OMIT,
2503
+ span_id: typing.Optional[str] = OMIT,
2504
+ tags: typing.Optional[typing.Sequence[str]] = OMIT,
1865
2505
  request_options: typing.Optional[RequestOptions] = None,
1866
- ) -> AsyncHttpResponse[DatasetExpansionResponse]:
2506
+ ) -> AsyncHttpResponse[None]:
1867
2507
  """
1868
- Generate synthetic dataset samples using LLM based on existing data patterns
2508
+ Partially update dataset item by id. Only provided fields will be updated.
1869
2509
 
1870
2510
  Parameters
1871
2511
  ----------
1872
- id : str
2512
+ item_id : str
1873
2513
 
1874
- model : str
1875
- The model to use for synthetic data generation
2514
+ source : DatasetItemWriteSource
1876
2515
 
1877
- sample_count : typing.Optional[int]
1878
- Number of synthetic samples to generate
2516
+ data : JsonNode
1879
2517
 
1880
- preserve_fields : typing.Optional[typing.Sequence[str]]
1881
- Fields to preserve patterns from original data
2518
+ id : typing.Optional[str]
1882
2519
 
1883
- variation_instructions : typing.Optional[str]
1884
- Additional instructions for data variation
2520
+ trace_id : typing.Optional[str]
1885
2521
 
1886
- custom_prompt : typing.Optional[str]
1887
- Custom prompt to use for generation instead of auto-generated one
2522
+ span_id : typing.Optional[str]
2523
+
2524
+ tags : typing.Optional[typing.Sequence[str]]
1888
2525
 
1889
2526
  request_options : typing.Optional[RequestOptions]
1890
2527
  Request-specific configuration.
1891
2528
 
1892
2529
  Returns
1893
2530
  -------
1894
- AsyncHttpResponse[DatasetExpansionResponse]
1895
- Generated synthetic samples
2531
+ AsyncHttpResponse[None]
1896
2532
  """
1897
2533
  _response = await self._client_wrapper.httpx_client.request(
1898
- f"v1/private/datasets/{jsonable_encoder(id)}/expansions",
1899
- method="POST",
2534
+ f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2535
+ method="PATCH",
1900
2536
  json={
1901
- "model": model,
1902
- "sample_count": sample_count,
1903
- "preserve_fields": preserve_fields,
1904
- "variation_instructions": variation_instructions,
1905
- "custom_prompt": custom_prompt,
2537
+ "id": id,
2538
+ "trace_id": trace_id,
2539
+ "span_id": span_id,
2540
+ "source": source,
2541
+ "data": data,
2542
+ "tags": tags,
1906
2543
  },
1907
2544
  headers={
1908
2545
  "content-type": "application/json",
@@ -1912,50 +2549,48 @@ class AsyncRawDatasetsClient:
1912
2549
  )
1913
2550
  try:
1914
2551
  if 200 <= _response.status_code < 300:
1915
- _data = typing.cast(
1916
- DatasetExpansionResponse,
1917
- parse_obj_as(
1918
- type_=DatasetExpansionResponse, # type: ignore
1919
- object_=_response.json(),
2552
+ return AsyncHttpResponse(response=_response, data=None)
2553
+ if _response.status_code == 404:
2554
+ raise NotFoundError(
2555
+ headers=dict(_response.headers),
2556
+ body=typing.cast(
2557
+ typing.Optional[typing.Any],
2558
+ parse_obj_as(
2559
+ type_=typing.Optional[typing.Any], # type: ignore
2560
+ object_=_response.json(),
2561
+ ),
1920
2562
  ),
1921
2563
  )
1922
- return AsyncHttpResponse(response=_response, data=_data)
1923
2564
  _response_json = _response.json()
1924
2565
  except JSONDecodeError:
1925
2566
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1926
2567
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1927
2568
 
1928
- async def find_dataset_items_with_experiment_items(
2569
+ async def get_dataset_items(
1929
2570
  self,
1930
2571
  id: str,
1931
2572
  *,
1932
- experiment_ids: str,
1933
2573
  page: typing.Optional[int] = None,
1934
2574
  size: typing.Optional[int] = None,
2575
+ version: typing.Optional[str] = None,
1935
2576
  filters: typing.Optional[str] = None,
1936
- sorting: typing.Optional[str] = None,
1937
- search: typing.Optional[str] = None,
1938
2577
  truncate: typing.Optional[bool] = None,
1939
2578
  request_options: typing.Optional[RequestOptions] = None,
1940
- ) -> AsyncHttpResponse[DatasetItemPageCompare]:
2579
+ ) -> AsyncHttpResponse[DatasetItemPagePublic]:
1941
2580
  """
1942
- Find dataset items with experiment items
2581
+ Get dataset items
1943
2582
 
1944
2583
  Parameters
1945
2584
  ----------
1946
2585
  id : str
1947
2586
 
1948
- experiment_ids : str
1949
-
1950
2587
  page : typing.Optional[int]
1951
2588
 
1952
2589
  size : typing.Optional[int]
1953
2590
 
1954
- filters : typing.Optional[str]
1955
-
1956
- sorting : typing.Optional[str]
2591
+ version : typing.Optional[str]
1957
2592
 
1958
- search : typing.Optional[str]
2593
+ filters : typing.Optional[str]
1959
2594
 
1960
2595
  truncate : typing.Optional[bool]
1961
2596
 
@@ -1964,19 +2599,17 @@ class AsyncRawDatasetsClient:
1964
2599
 
1965
2600
  Returns
1966
2601
  -------
1967
- AsyncHttpResponse[DatasetItemPageCompare]
1968
- Dataset item resource
2602
+ AsyncHttpResponse[DatasetItemPagePublic]
2603
+ Dataset items resource
1969
2604
  """
1970
2605
  _response = await self._client_wrapper.httpx_client.request(
1971
- f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items",
2606
+ f"v1/private/datasets/{jsonable_encoder(id)}/items",
1972
2607
  method="GET",
1973
2608
  params={
1974
2609
  "page": page,
1975
2610
  "size": size,
1976
- "experiment_ids": experiment_ids,
2611
+ "version": version,
1977
2612
  "filters": filters,
1978
- "sorting": sorting,
1979
- "search": search,
1980
2613
  "truncate": truncate,
1981
2614
  },
1982
2615
  request_options=request_options,
@@ -1984,9 +2617,9 @@ class AsyncRawDatasetsClient:
1984
2617
  try:
1985
2618
  if 200 <= _response.status_code < 300:
1986
2619
  _data = typing.cast(
1987
- DatasetItemPageCompare,
2620
+ DatasetItemPagePublic,
1988
2621
  parse_obj_as(
1989
- type_=DatasetItemPageCompare, # type: ignore
2622
+ type_=DatasetItemPagePublic, # type: ignore
1990
2623
  object_=_response.json(),
1991
2624
  ),
1992
2625
  )
@@ -1996,42 +2629,44 @@ class AsyncRawDatasetsClient:
1996
2629
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
1997
2630
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
1998
2631
 
1999
- async def get_dataset_by_identifier(
2000
- self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
2001
- ) -> AsyncHttpResponse[DatasetPublic]:
2632
+ async def get_dataset_items_output_columns(
2633
+ self,
2634
+ id: str,
2635
+ *,
2636
+ experiment_ids: typing.Optional[str] = None,
2637
+ request_options: typing.Optional[RequestOptions] = None,
2638
+ ) -> AsyncHttpResponse[PageColumns]:
2002
2639
  """
2003
- Get dataset by name
2640
+ Get dataset items output columns
2004
2641
 
2005
2642
  Parameters
2006
2643
  ----------
2007
- dataset_name : str
2644
+ id : str
2645
+
2646
+ experiment_ids : typing.Optional[str]
2008
2647
 
2009
2648
  request_options : typing.Optional[RequestOptions]
2010
2649
  Request-specific configuration.
2011
2650
 
2012
2651
  Returns
2013
2652
  -------
2014
- AsyncHttpResponse[DatasetPublic]
2015
- Dataset resource
2653
+ AsyncHttpResponse[PageColumns]
2654
+ Dataset item output columns
2016
2655
  """
2017
2656
  _response = await self._client_wrapper.httpx_client.request(
2018
- "v1/private/datasets/retrieve",
2019
- method="POST",
2020
- json={
2021
- "dataset_name": dataset_name,
2022
- },
2023
- headers={
2024
- "content-type": "application/json",
2657
+ f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/output/columns",
2658
+ method="GET",
2659
+ params={
2660
+ "experiment_ids": experiment_ids,
2025
2661
  },
2026
2662
  request_options=request_options,
2027
- omit=OMIT,
2028
2663
  )
2029
2664
  try:
2030
2665
  if 200 <= _response.status_code < 300:
2031
2666
  _data = typing.cast(
2032
- DatasetPublic,
2667
+ PageColumns,
2033
2668
  parse_obj_as(
2034
- type_=DatasetPublic, # type: ignore
2669
+ type_=PageColumns, # type: ignore
2035
2670
  object_=_response.json(),
2036
2671
  ),
2037
2672
  )
@@ -2041,125 +2676,129 @@ class AsyncRawDatasetsClient:
2041
2676
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2042
2677
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2043
2678
 
2044
- async def get_dataset_experiment_items_stats(
2679
+ @contextlib.asynccontextmanager
2680
+ async def stream_dataset_items(
2045
2681
  self,
2046
- id: str,
2047
2682
  *,
2048
- experiment_ids: str,
2049
- filters: typing.Optional[str] = None,
2683
+ dataset_name: str,
2684
+ last_retrieved_id: typing.Optional[str] = OMIT,
2685
+ steam_limit: typing.Optional[int] = OMIT,
2050
2686
  request_options: typing.Optional[RequestOptions] = None,
2051
- ) -> AsyncHttpResponse[ProjectStatsPublic]:
2687
+ ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]:
2052
2688
  """
2053
- Get experiment items stats for dataset
2689
+ Stream dataset items
2054
2690
 
2055
2691
  Parameters
2056
2692
  ----------
2057
- id : str
2693
+ dataset_name : str
2058
2694
 
2059
- experiment_ids : str
2695
+ last_retrieved_id : typing.Optional[str]
2060
2696
 
2061
- filters : typing.Optional[str]
2697
+ steam_limit : typing.Optional[int]
2062
2698
 
2063
2699
  request_options : typing.Optional[RequestOptions]
2064
- Request-specific configuration.
2700
+ Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response.
2065
2701
 
2066
2702
  Returns
2067
2703
  -------
2068
- AsyncHttpResponse[ProjectStatsPublic]
2069
- Experiment items stats resource
2704
+ typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
2705
+ Dataset items stream or error during process
2070
2706
  """
2071
- _response = await self._client_wrapper.httpx_client.request(
2072
- f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/stats",
2073
- method="GET",
2074
- params={
2075
- "experiment_ids": experiment_ids,
2076
- "filters": filters,
2707
+ async with self._client_wrapper.httpx_client.stream(
2708
+ "v1/private/datasets/items/stream",
2709
+ method="POST",
2710
+ json={
2711
+ "dataset_name": dataset_name,
2712
+ "last_retrieved_id": last_retrieved_id,
2713
+ "steam_limit": steam_limit,
2714
+ },
2715
+ headers={
2716
+ "content-type": "application/json",
2077
2717
  },
2078
2718
  request_options=request_options,
2079
- )
2080
- try:
2081
- if 200 <= _response.status_code < 300:
2082
- _data = typing.cast(
2083
- ProjectStatsPublic,
2084
- parse_obj_as(
2085
- type_=ProjectStatsPublic, # type: ignore
2086
- object_=_response.json(),
2087
- ),
2088
- )
2089
- return AsyncHttpResponse(response=_response, data=_data)
2090
- _response_json = _response.json()
2091
- except JSONDecodeError:
2092
- raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2093
- raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2719
+ omit=OMIT,
2720
+ ) as _response:
2721
+
2722
+ async def stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]:
2723
+ try:
2724
+ if 200 <= _response.status_code < 300:
2725
+ _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None
2726
+ return AsyncHttpResponse(
2727
+ response=_response,
2728
+ data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)),
2729
+ )
2730
+ await _response.aread()
2731
+ _response_json = _response.json()
2732
+ except JSONDecodeError:
2733
+ raise ApiError(
2734
+ status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
2735
+ )
2736
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2094
2737
 
2095
- async def get_dataset_item_by_id(
2096
- self, item_id: str, *, request_options: typing.Optional[RequestOptions] = None
2097
- ) -> AsyncHttpResponse[DatasetItemPublic]:
2738
+ yield await stream()
2739
+
2740
+ async def compare_dataset_versions(
2741
+ self, id: str, *, request_options: typing.Optional[RequestOptions] = None
2742
+ ) -> AsyncHttpResponse[DatasetVersionDiff]:
2098
2743
  """
2099
- Get dataset item by id
2744
+ Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
2100
2745
 
2101
2746
  Parameters
2102
2747
  ----------
2103
- item_id : str
2748
+ id : str
2104
2749
 
2105
2750
  request_options : typing.Optional[RequestOptions]
2106
2751
  Request-specific configuration.
2107
2752
 
2108
2753
  Returns
2109
2754
  -------
2110
- AsyncHttpResponse[DatasetItemPublic]
2111
- Dataset item resource
2755
+ AsyncHttpResponse[DatasetVersionDiff]
2756
+ Diff computed successfully
2112
2757
  """
2113
2758
  _response = await self._client_wrapper.httpx_client.request(
2114
- f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2759
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
2115
2760
  method="GET",
2116
2761
  request_options=request_options,
2117
2762
  )
2118
2763
  try:
2119
2764
  if 200 <= _response.status_code < 300:
2120
2765
  _data = typing.cast(
2121
- DatasetItemPublic,
2766
+ DatasetVersionDiff,
2122
2767
  parse_obj_as(
2123
- type_=DatasetItemPublic, # type: ignore
2768
+ type_=DatasetVersionDiff, # type: ignore
2124
2769
  object_=_response.json(),
2125
2770
  ),
2126
2771
  )
2127
2772
  return AsyncHttpResponse(response=_response, data=_data)
2773
+ if _response.status_code == 404:
2774
+ raise NotFoundError(
2775
+ headers=dict(_response.headers),
2776
+ body=typing.cast(
2777
+ typing.Optional[typing.Any],
2778
+ parse_obj_as(
2779
+ type_=typing.Optional[typing.Any], # type: ignore
2780
+ object_=_response.json(),
2781
+ ),
2782
+ ),
2783
+ )
2128
2784
  _response_json = _response.json()
2129
2785
  except JSONDecodeError:
2130
2786
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2131
2787
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2132
2788
 
2133
- async def patch_dataset_item(
2134
- self,
2135
- item_id: str,
2136
- *,
2137
- source: DatasetItemWriteSource,
2138
- data: JsonNode,
2139
- id: typing.Optional[str] = OMIT,
2140
- trace_id: typing.Optional[str] = OMIT,
2141
- span_id: typing.Optional[str] = OMIT,
2142
- tags: typing.Optional[typing.Sequence[str]] = OMIT,
2143
- request_options: typing.Optional[RequestOptions] = None,
2789
+ async def create_version_tag(
2790
+ self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
2144
2791
  ) -> AsyncHttpResponse[None]:
2145
2792
  """
2146
- Partially update dataset item by id. Only provided fields will be updated.
2793
+ Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
2147
2794
 
2148
2795
  Parameters
2149
2796
  ----------
2150
- item_id : str
2151
-
2152
- source : DatasetItemWriteSource
2153
-
2154
- data : JsonNode
2155
-
2156
- id : typing.Optional[str]
2157
-
2158
- trace_id : typing.Optional[str]
2797
+ version_hash : str
2159
2798
 
2160
- span_id : typing.Optional[str]
2799
+ id : str
2161
2800
 
2162
- tags : typing.Optional[typing.Sequence[str]]
2801
+ tag : str
2163
2802
 
2164
2803
  request_options : typing.Optional[RequestOptions]
2165
2804
  Request-specific configuration.
@@ -2169,15 +2808,10 @@ class AsyncRawDatasetsClient:
2169
2808
  AsyncHttpResponse[None]
2170
2809
  """
2171
2810
  _response = await self._client_wrapper.httpx_client.request(
2172
- f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
2173
- method="PATCH",
2811
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
2812
+ method="POST",
2174
2813
  json={
2175
- "id": id,
2176
- "trace_id": trace_id,
2177
- "span_id": span_id,
2178
- "source": source,
2179
- "data": data,
2180
- "tags": tags,
2814
+ "tag": tag,
2181
2815
  },
2182
2816
  headers={
2183
2817
  "content-type": "application/json",
@@ -2188,6 +2822,17 @@ class AsyncRawDatasetsClient:
2188
2822
  try:
2189
2823
  if 200 <= _response.status_code < 300:
2190
2824
  return AsyncHttpResponse(response=_response, data=None)
2825
+ if _response.status_code == 400:
2826
+ raise BadRequestError(
2827
+ headers=dict(_response.headers),
2828
+ body=typing.cast(
2829
+ typing.Optional[typing.Any],
2830
+ parse_obj_as(
2831
+ type_=typing.Optional[typing.Any], # type: ignore
2832
+ object_=_response.json(),
2833
+ ),
2834
+ ),
2835
+ )
2191
2836
  if _response.status_code == 404:
2192
2837
  raise NotFoundError(
2193
2838
  headers=dict(_response.headers),
@@ -2199,23 +2844,32 @@ class AsyncRawDatasetsClient:
2199
2844
  ),
2200
2845
  ),
2201
2846
  )
2847
+ if _response.status_code == 409:
2848
+ raise ConflictError(
2849
+ headers=dict(_response.headers),
2850
+ body=typing.cast(
2851
+ typing.Optional[typing.Any],
2852
+ parse_obj_as(
2853
+ type_=typing.Optional[typing.Any], # type: ignore
2854
+ object_=_response.json(),
2855
+ ),
2856
+ ),
2857
+ )
2202
2858
  _response_json = _response.json()
2203
2859
  except JSONDecodeError:
2204
2860
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2205
2861
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2206
2862
 
2207
- async def get_dataset_items(
2863
+ async def list_dataset_versions(
2208
2864
  self,
2209
2865
  id: str,
2210
2866
  *,
2211
2867
  page: typing.Optional[int] = None,
2212
2868
  size: typing.Optional[int] = None,
2213
- filters: typing.Optional[str] = None,
2214
- truncate: typing.Optional[bool] = None,
2215
2869
  request_options: typing.Optional[RequestOptions] = None,
2216
- ) -> AsyncHttpResponse[DatasetItemPagePublic]:
2870
+ ) -> AsyncHttpResponse[DatasetVersionPagePublic]:
2217
2871
  """
2218
- Get dataset items
2872
+ Get paginated list of versions for a dataset, ordered by creation time (newest first)
2219
2873
 
2220
2874
  Parameters
2221
2875
  ----------
@@ -2225,148 +2879,214 @@ class AsyncRawDatasetsClient:
2225
2879
 
2226
2880
  size : typing.Optional[int]
2227
2881
 
2228
- filters : typing.Optional[str]
2229
-
2230
- truncate : typing.Optional[bool]
2231
-
2232
2882
  request_options : typing.Optional[RequestOptions]
2233
2883
  Request-specific configuration.
2234
2884
 
2235
2885
  Returns
2236
2886
  -------
2237
- AsyncHttpResponse[DatasetItemPagePublic]
2238
- Dataset items resource
2887
+ AsyncHttpResponse[DatasetVersionPagePublic]
2888
+ Dataset versions
2239
2889
  """
2240
2890
  _response = await self._client_wrapper.httpx_client.request(
2241
- f"v1/private/datasets/{jsonable_encoder(id)}/items",
2891
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
2242
2892
  method="GET",
2243
2893
  params={
2244
2894
  "page": page,
2245
2895
  "size": size,
2246
- "filters": filters,
2247
- "truncate": truncate,
2248
2896
  },
2249
2897
  request_options=request_options,
2250
2898
  )
2251
2899
  try:
2252
2900
  if 200 <= _response.status_code < 300:
2253
2901
  _data = typing.cast(
2254
- DatasetItemPagePublic,
2902
+ DatasetVersionPagePublic,
2255
2903
  parse_obj_as(
2256
- type_=DatasetItemPagePublic, # type: ignore
2904
+ type_=DatasetVersionPagePublic, # type: ignore
2257
2905
  object_=_response.json(),
2258
2906
  ),
2259
2907
  )
2260
2908
  return AsyncHttpResponse(response=_response, data=_data)
2909
+ if _response.status_code == 400:
2910
+ raise BadRequestError(
2911
+ headers=dict(_response.headers),
2912
+ body=typing.cast(
2913
+ typing.Optional[typing.Any],
2914
+ parse_obj_as(
2915
+ type_=typing.Optional[typing.Any], # type: ignore
2916
+ object_=_response.json(),
2917
+ ),
2918
+ ),
2919
+ )
2261
2920
  _response_json = _response.json()
2262
2921
  except JSONDecodeError:
2263
2922
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2264
2923
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2265
2924
 
2266
- async def get_dataset_items_output_columns(
2925
+ async def create_dataset_version(
2267
2926
  self,
2268
2927
  id: str,
2269
2928
  *,
2270
- experiment_ids: typing.Optional[str] = None,
2929
+ tag: typing.Optional[str] = OMIT,
2930
+ change_description: typing.Optional[str] = OMIT,
2931
+ metadata: typing.Optional[typing.Dict[str, str]] = OMIT,
2271
2932
  request_options: typing.Optional[RequestOptions] = None,
2272
- ) -> AsyncHttpResponse[PageColumns]:
2933
+ ) -> AsyncHttpResponse[None]:
2273
2934
  """
2274
- Get dataset items output columns
2935
+ Create a new immutable version of the dataset by snapshotting the current state
2275
2936
 
2276
2937
  Parameters
2277
2938
  ----------
2278
2939
  id : str
2279
2940
 
2280
- experiment_ids : typing.Optional[str]
2941
+ tag : typing.Optional[str]
2942
+ Optional tag for this version
2943
+
2944
+ change_description : typing.Optional[str]
2945
+ Optional description of changes in this version
2946
+
2947
+ metadata : typing.Optional[typing.Dict[str, str]]
2948
+ Optional user-defined metadata
2281
2949
 
2282
2950
  request_options : typing.Optional[RequestOptions]
2283
2951
  Request-specific configuration.
2284
2952
 
2285
2953
  Returns
2286
2954
  -------
2287
- AsyncHttpResponse[PageColumns]
2288
- Dataset item output columns
2955
+ AsyncHttpResponse[None]
2289
2956
  """
2290
2957
  _response = await self._client_wrapper.httpx_client.request(
2291
- f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/output/columns",
2292
- method="GET",
2293
- params={
2294
- "experiment_ids": experiment_ids,
2958
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions",
2959
+ method="POST",
2960
+ json={
2961
+ "tag": tag,
2962
+ "change_description": change_description,
2963
+ "metadata": metadata,
2964
+ },
2965
+ headers={
2966
+ "content-type": "application/json",
2295
2967
  },
2296
2968
  request_options=request_options,
2969
+ omit=OMIT,
2297
2970
  )
2298
2971
  try:
2299
2972
  if 200 <= _response.status_code < 300:
2300
- _data = typing.cast(
2301
- PageColumns,
2302
- parse_obj_as(
2303
- type_=PageColumns, # type: ignore
2304
- object_=_response.json(),
2973
+ return AsyncHttpResponse(response=_response, data=None)
2974
+ if _response.status_code == 400:
2975
+ raise BadRequestError(
2976
+ headers=dict(_response.headers),
2977
+ body=typing.cast(
2978
+ typing.Optional[typing.Any],
2979
+ parse_obj_as(
2980
+ type_=typing.Optional[typing.Any], # type: ignore
2981
+ object_=_response.json(),
2982
+ ),
2983
+ ),
2984
+ )
2985
+ if _response.status_code == 409:
2986
+ raise ConflictError(
2987
+ headers=dict(_response.headers),
2988
+ body=typing.cast(
2989
+ typing.Optional[typing.Any],
2990
+ parse_obj_as(
2991
+ type_=typing.Optional[typing.Any], # type: ignore
2992
+ object_=_response.json(),
2993
+ ),
2305
2994
  ),
2306
2995
  )
2307
- return AsyncHttpResponse(response=_response, data=_data)
2308
2996
  _response_json = _response.json()
2309
2997
  except JSONDecodeError:
2310
2998
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
2311
2999
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2312
3000
 
2313
- @contextlib.asynccontextmanager
2314
- async def stream_dataset_items(
2315
- self,
2316
- *,
2317
- dataset_name: str,
2318
- last_retrieved_id: typing.Optional[str] = OMIT,
2319
- steam_limit: typing.Optional[int] = OMIT,
2320
- request_options: typing.Optional[RequestOptions] = None,
2321
- ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]:
3001
+ async def delete_version_tag(
3002
+ self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
3003
+ ) -> AsyncHttpResponse[None]:
2322
3004
  """
2323
- Stream dataset items
3005
+ Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
2324
3006
 
2325
3007
  Parameters
2326
3008
  ----------
2327
- dataset_name : str
3009
+ version_hash : str
2328
3010
 
2329
- last_retrieved_id : typing.Optional[str]
3011
+ tag : str
2330
3012
 
2331
- steam_limit : typing.Optional[int]
3013
+ id : str
2332
3014
 
2333
3015
  request_options : typing.Optional[RequestOptions]
2334
- Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response.
3016
+ Request-specific configuration.
2335
3017
 
2336
3018
  Returns
2337
3019
  -------
2338
- typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
2339
- Dataset items stream or error during process
3020
+ AsyncHttpResponse[None]
2340
3021
  """
2341
- async with self._client_wrapper.httpx_client.stream(
2342
- "v1/private/datasets/items/stream",
3022
+ _response = await self._client_wrapper.httpx_client.request(
3023
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
3024
+ method="DELETE",
3025
+ request_options=request_options,
3026
+ )
3027
+ try:
3028
+ if 200 <= _response.status_code < 300:
3029
+ return AsyncHttpResponse(response=_response, data=None)
3030
+ _response_json = _response.json()
3031
+ except JSONDecodeError:
3032
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
3033
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
3034
+
3035
+ async def restore_dataset_version(
3036
+ self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
3037
+ ) -> AsyncHttpResponse[DatasetVersionPublic]:
3038
+ """
3039
+ Restores the dataset to a previous version state. All draft items are replaced with items from the specified version. If the version is not the latest, a new version snapshot is created. If the version is the latest, only draft items are replaced (revert functionality).
3040
+
3041
+ Parameters
3042
+ ----------
3043
+ id : str
3044
+
3045
+ version_ref : str
3046
+ Version hash or tag to restore from
3047
+
3048
+ request_options : typing.Optional[RequestOptions]
3049
+ Request-specific configuration.
3050
+
3051
+ Returns
3052
+ -------
3053
+ AsyncHttpResponse[DatasetVersionPublic]
3054
+ Version restored successfully
3055
+ """
3056
+ _response = await self._client_wrapper.httpx_client.request(
3057
+ f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
2343
3058
  method="POST",
2344
3059
  json={
2345
- "dataset_name": dataset_name,
2346
- "last_retrieved_id": last_retrieved_id,
2347
- "steam_limit": steam_limit,
3060
+ "version_ref": version_ref,
2348
3061
  },
2349
3062
  headers={
2350
3063
  "content-type": "application/json",
2351
3064
  },
2352
3065
  request_options=request_options,
2353
3066
  omit=OMIT,
2354
- ) as _response:
2355
-
2356
- async def stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]:
2357
- try:
2358
- if 200 <= _response.status_code < 300:
2359
- _chunk_size = request_options.get("chunk_size", None) if request_options is not None else None
2360
- return AsyncHttpResponse(
2361
- response=_response,
2362
- data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)),
2363
- )
2364
- await _response.aread()
2365
- _response_json = _response.json()
2366
- except JSONDecodeError:
2367
- raise ApiError(
2368
- status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
2369
- )
2370
- raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
2371
-
2372
- yield await stream()
3067
+ )
3068
+ try:
3069
+ if 200 <= _response.status_code < 300:
3070
+ _data = typing.cast(
3071
+ DatasetVersionPublic,
3072
+ parse_obj_as(
3073
+ type_=DatasetVersionPublic, # type: ignore
3074
+ object_=_response.json(),
3075
+ ),
3076
+ )
3077
+ return AsyncHttpResponse(response=_response, data=_data)
3078
+ if _response.status_code == 404:
3079
+ raise NotFoundError(
3080
+ headers=dict(_response.headers),
3081
+ body=typing.cast(
3082
+ typing.Optional[typing.Any],
3083
+ parse_obj_as(
3084
+ type_=typing.Optional[typing.Any], # type: ignore
3085
+ object_=_response.json(),
3086
+ ),
3087
+ ),
3088
+ )
3089
+ _response_json = _response.json()
3090
+ except JSONDecodeError:
3091
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
3092
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)