opik 1.9.5__py3-none-any.whl → 1.9.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opik/__init__.py +10 -3
- opik/anonymizer/__init__.py +5 -0
- opik/anonymizer/anonymizer.py +12 -0
- opik/anonymizer/factory.py +80 -0
- opik/anonymizer/recursive_anonymizer.py +64 -0
- opik/anonymizer/rules.py +56 -0
- opik/anonymizer/rules_anonymizer.py +35 -0
- opik/api_objects/dataset/rest_operations.py +5 -0
- opik/api_objects/experiment/experiment.py +46 -49
- opik/api_objects/experiment/helpers.py +34 -10
- opik/api_objects/local_recording.py +8 -3
- opik/api_objects/opik_client.py +230 -48
- opik/api_objects/opik_query_language.py +9 -0
- opik/api_objects/prompt/__init__.py +11 -3
- opik/api_objects/prompt/base_prompt.py +69 -0
- opik/api_objects/prompt/base_prompt_template.py +29 -0
- opik/api_objects/prompt/chat/__init__.py +1 -0
- opik/api_objects/prompt/chat/chat_prompt.py +193 -0
- opik/api_objects/prompt/chat/chat_prompt_template.py +350 -0
- opik/api_objects/prompt/{chat_content_renderer_registry.py → chat/content_renderer_registry.py} +37 -35
- opik/api_objects/prompt/client.py +101 -30
- opik/api_objects/prompt/text/__init__.py +1 -0
- opik/api_objects/prompt/text/prompt.py +174 -0
- opik/api_objects/prompt/{prompt_template.py → text/prompt_template.py} +10 -6
- opik/api_objects/prompt/types.py +1 -1
- opik/cli/export.py +6 -2
- opik/cli/usage_report/charts.py +39 -10
- opik/cli/usage_report/cli.py +164 -45
- opik/cli/usage_report/pdf.py +14 -1
- opik/config.py +0 -5
- opik/decorator/base_track_decorator.py +37 -40
- opik/decorator/context_manager/span_context_manager.py +9 -0
- opik/decorator/context_manager/trace_context_manager.py +5 -0
- opik/dict_utils.py +3 -3
- opik/evaluation/__init__.py +13 -2
- opik/evaluation/engine/engine.py +195 -223
- opik/evaluation/engine/helpers.py +8 -7
- opik/evaluation/engine/metrics_evaluator.py +237 -0
- opik/evaluation/evaluation_result.py +35 -1
- opik/evaluation/evaluator.py +318 -30
- opik/evaluation/models/litellm/util.py +78 -6
- opik/evaluation/models/model_capabilities.py +33 -0
- opik/evaluation/report.py +14 -2
- opik/evaluation/rest_operations.py +36 -33
- opik/evaluation/test_case.py +2 -2
- opik/evaluation/types.py +9 -1
- opik/exceptions.py +17 -0
- opik/hooks/__init__.py +17 -1
- opik/hooks/anonymizer_hook.py +36 -0
- opik/id_helpers.py +18 -0
- opik/integrations/adk/helpers.py +16 -7
- opik/integrations/adk/legacy_opik_tracer.py +7 -4
- opik/integrations/adk/opik_tracer.py +3 -1
- opik/integrations/adk/patchers/adk_otel_tracer/opik_adk_otel_tracer.py +7 -3
- opik/integrations/adk/recursive_callback_injector.py +1 -6
- opik/integrations/dspy/callback.py +1 -4
- opik/integrations/haystack/opik_connector.py +2 -2
- opik/integrations/haystack/opik_tracer.py +2 -4
- opik/integrations/langchain/opik_tracer.py +273 -82
- opik/integrations/llama_index/callback.py +110 -108
- opik/integrations/openai/agents/opik_tracing_processor.py +1 -2
- opik/integrations/openai/opik_tracker.py +1 -1
- opik/message_processing/batching/batchers.py +11 -7
- opik/message_processing/encoder_helpers.py +79 -0
- opik/message_processing/messages.py +25 -1
- opik/message_processing/online_message_processor.py +23 -8
- opik/opik_context.py +7 -7
- opik/rest_api/__init__.py +188 -12
- opik/rest_api/client.py +3 -0
- opik/rest_api/dashboards/__init__.py +4 -0
- opik/rest_api/dashboards/client.py +462 -0
- opik/rest_api/dashboards/raw_client.py +648 -0
- opik/rest_api/datasets/client.py +893 -89
- opik/rest_api/datasets/raw_client.py +1328 -87
- opik/rest_api/experiments/client.py +30 -2
- opik/rest_api/experiments/raw_client.py +26 -0
- opik/rest_api/feedback_definitions/types/find_feedback_definitions_request_type.py +1 -1
- opik/rest_api/optimizations/client.py +302 -0
- opik/rest_api/optimizations/raw_client.py +463 -0
- opik/rest_api/optimizations/types/optimization_update_status.py +3 -1
- opik/rest_api/prompts/__init__.py +2 -2
- opik/rest_api/prompts/client.py +34 -4
- opik/rest_api/prompts/raw_client.py +32 -2
- opik/rest_api/prompts/types/__init__.py +3 -1
- opik/rest_api/prompts/types/create_prompt_version_detail_template_structure.py +5 -0
- opik/rest_api/prompts/types/prompt_write_template_structure.py +5 -0
- opik/rest_api/spans/__init__.py +0 -2
- opik/rest_api/spans/client.py +148 -64
- opik/rest_api/spans/raw_client.py +210 -83
- opik/rest_api/spans/types/__init__.py +0 -2
- opik/rest_api/traces/client.py +241 -73
- opik/rest_api/traces/raw_client.py +344 -90
- opik/rest_api/types/__init__.py +200 -15
- opik/rest_api/types/aggregation_data.py +1 -0
- opik/rest_api/types/alert_trigger_config_public_type.py +6 -1
- opik/rest_api/types/alert_trigger_config_type.py +6 -1
- opik/rest_api/types/alert_trigger_config_write_type.py +6 -1
- opik/rest_api/types/automation_rule_evaluator.py +23 -1
- opik/rest_api/types/automation_rule_evaluator_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_llm_as_judge_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_llm_as_judge_write.py +2 -0
- opik/rest_api/types/{automation_rule_evaluator_object_public.py → automation_rule_evaluator_object_object_public.py} +32 -10
- opik/rest_api/types/automation_rule_evaluator_page_public.py +2 -2
- opik/rest_api/types/automation_rule_evaluator_public.py +23 -1
- opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_public.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_write.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_write.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_write.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update.py +23 -1
- opik/rest_api/types/automation_rule_evaluator_update_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update_span_llm_as_judge.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_update_trace_thread_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update_trace_thread_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_write.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_write.py +23 -1
- opik/rest_api/types/boolean_feedback_definition.py +25 -0
- opik/rest_api/types/boolean_feedback_definition_create.py +20 -0
- opik/rest_api/types/boolean_feedback_definition_public.py +25 -0
- opik/rest_api/types/boolean_feedback_definition_update.py +20 -0
- opik/rest_api/types/boolean_feedback_detail.py +29 -0
- opik/rest_api/types/boolean_feedback_detail_create.py +29 -0
- opik/rest_api/types/boolean_feedback_detail_public.py +29 -0
- opik/rest_api/types/boolean_feedback_detail_update.py +29 -0
- opik/rest_api/types/dashboard_page_public.py +24 -0
- opik/rest_api/types/dashboard_public.py +30 -0
- opik/rest_api/types/dataset.py +2 -0
- opik/rest_api/types/dataset_item.py +2 -0
- opik/rest_api/types/dataset_item_compare.py +2 -0
- opik/rest_api/types/dataset_item_filter.py +23 -0
- opik/rest_api/types/dataset_item_filter_operator.py +21 -0
- opik/rest_api/types/dataset_item_page_compare.py +1 -0
- opik/rest_api/types/dataset_item_page_public.py +1 -0
- opik/rest_api/types/dataset_item_public.py +2 -0
- opik/rest_api/types/dataset_item_update.py +39 -0
- opik/rest_api/types/dataset_item_write.py +1 -0
- opik/rest_api/types/dataset_public.py +2 -0
- opik/rest_api/types/dataset_public_status.py +5 -0
- opik/rest_api/types/dataset_status.py +5 -0
- opik/rest_api/types/dataset_version_diff.py +22 -0
- opik/rest_api/types/dataset_version_diff_stats.py +24 -0
- opik/rest_api/types/dataset_version_page_public.py +23 -0
- opik/rest_api/types/dataset_version_public.py +49 -0
- opik/rest_api/types/experiment.py +2 -0
- opik/rest_api/types/experiment_public.py +2 -0
- opik/rest_api/types/experiment_score.py +20 -0
- opik/rest_api/types/experiment_score_public.py +20 -0
- opik/rest_api/types/experiment_score_write.py +20 -0
- opik/rest_api/types/feedback.py +20 -1
- opik/rest_api/types/feedback_create.py +16 -1
- opik/rest_api/types/feedback_object_public.py +22 -1
- opik/rest_api/types/feedback_public.py +20 -1
- opik/rest_api/types/feedback_score_public.py +4 -0
- opik/rest_api/types/feedback_update.py +16 -1
- opik/rest_api/types/image_url.py +20 -0
- opik/rest_api/types/image_url_public.py +20 -0
- opik/rest_api/types/image_url_write.py +20 -0
- opik/rest_api/types/llm_as_judge_message.py +5 -1
- opik/rest_api/types/llm_as_judge_message_content.py +24 -0
- opik/rest_api/types/llm_as_judge_message_content_public.py +24 -0
- opik/rest_api/types/llm_as_judge_message_content_write.py +24 -0
- opik/rest_api/types/llm_as_judge_message_public.py +5 -1
- opik/rest_api/types/llm_as_judge_message_write.py +5 -1
- opik/rest_api/types/llm_as_judge_model_parameters.py +2 -0
- opik/rest_api/types/llm_as_judge_model_parameters_public.py +2 -0
- opik/rest_api/types/llm_as_judge_model_parameters_write.py +2 -0
- opik/rest_api/types/optimization.py +2 -0
- opik/rest_api/types/optimization_public.py +2 -0
- opik/rest_api/types/optimization_public_status.py +3 -1
- opik/rest_api/types/optimization_status.py +3 -1
- opik/rest_api/types/optimization_studio_config.py +27 -0
- opik/rest_api/types/optimization_studio_config_public.py +27 -0
- opik/rest_api/types/optimization_studio_config_write.py +27 -0
- opik/rest_api/types/optimization_studio_log.py +22 -0
- opik/rest_api/types/optimization_write.py +2 -0
- opik/rest_api/types/optimization_write_status.py +3 -1
- opik/rest_api/types/prompt.py +6 -0
- opik/rest_api/types/prompt_detail.py +6 -0
- opik/rest_api/types/prompt_detail_template_structure.py +5 -0
- opik/rest_api/types/prompt_public.py +6 -0
- opik/rest_api/types/prompt_public_template_structure.py +5 -0
- opik/rest_api/types/prompt_template_structure.py +5 -0
- opik/rest_api/types/prompt_version.py +2 -0
- opik/rest_api/types/prompt_version_detail.py +2 -0
- opik/rest_api/types/prompt_version_detail_template_structure.py +5 -0
- opik/rest_api/types/prompt_version_public.py +2 -0
- opik/rest_api/types/prompt_version_public_template_structure.py +5 -0
- opik/rest_api/types/prompt_version_template_structure.py +5 -0
- opik/rest_api/types/score_name.py +1 -0
- opik/rest_api/types/service_toggles_config.py +6 -0
- opik/rest_api/types/span_enrichment_options.py +31 -0
- opik/rest_api/types/span_filter.py +23 -0
- opik/rest_api/types/span_filter_operator.py +21 -0
- opik/rest_api/types/span_filter_write.py +23 -0
- opik/rest_api/types/span_filter_write_operator.py +21 -0
- opik/rest_api/types/span_llm_as_judge_code.py +27 -0
- opik/rest_api/types/span_llm_as_judge_code_public.py +27 -0
- opik/rest_api/types/span_llm_as_judge_code_write.py +27 -0
- opik/rest_api/types/span_update.py +46 -0
- opik/rest_api/types/studio_evaluation.py +20 -0
- opik/rest_api/types/studio_evaluation_public.py +20 -0
- opik/rest_api/types/studio_evaluation_write.py +20 -0
- opik/rest_api/types/studio_llm_model.py +21 -0
- opik/rest_api/types/studio_llm_model_public.py +21 -0
- opik/rest_api/types/studio_llm_model_write.py +21 -0
- opik/rest_api/types/studio_message.py +20 -0
- opik/rest_api/types/studio_message_public.py +20 -0
- opik/rest_api/types/studio_message_write.py +20 -0
- opik/rest_api/types/studio_metric.py +21 -0
- opik/rest_api/types/studio_metric_public.py +21 -0
- opik/rest_api/types/studio_metric_write.py +21 -0
- opik/rest_api/types/studio_optimizer.py +21 -0
- opik/rest_api/types/studio_optimizer_public.py +21 -0
- opik/rest_api/types/studio_optimizer_write.py +21 -0
- opik/rest_api/types/studio_prompt.py +20 -0
- opik/rest_api/types/studio_prompt_public.py +20 -0
- opik/rest_api/types/studio_prompt_write.py +20 -0
- opik/rest_api/types/trace.py +6 -0
- opik/rest_api/types/trace_public.py +6 -0
- opik/rest_api/types/trace_thread_filter_write.py +23 -0
- opik/rest_api/types/trace_thread_filter_write_operator.py +21 -0
- opik/rest_api/types/trace_thread_update.py +19 -0
- opik/rest_api/types/trace_update.py +39 -0
- opik/rest_api/types/value_entry.py +2 -0
- opik/rest_api/types/value_entry_compare.py +2 -0
- opik/rest_api/types/value_entry_experiment_item_bulk_write_view.py +2 -0
- opik/rest_api/types/value_entry_public.py +2 -0
- opik/rest_api/types/video_url.py +19 -0
- opik/rest_api/types/video_url_public.py +19 -0
- opik/rest_api/types/video_url_write.py +19 -0
- opik/synchronization.py +5 -6
- opik/{decorator/tracing_runtime_config.py → tracing_runtime_config.py} +6 -7
- {opik-1.9.5.dist-info → opik-1.9.39.dist-info}/METADATA +5 -4
- {opik-1.9.5.dist-info → opik-1.9.39.dist-info}/RECORD +246 -151
- opik/api_objects/prompt/chat_prompt_template.py +0 -164
- opik/api_objects/prompt/prompt.py +0 -131
- /opik/rest_api/{spans/types → types}/span_update_type.py +0 -0
- {opik-1.9.5.dist-info → opik-1.9.39.dist-info}/WHEEL +0 -0
- {opik-1.9.5.dist-info → opik-1.9.39.dist-info}/entry_points.txt +0 -0
- {opik-1.9.5.dist-info → opik-1.9.39.dist-info}/licenses/LICENSE +0 -0
- {opik-1.9.5.dist-info → opik-1.9.39.dist-info}/top_level.txt +0 -0
|
@@ -11,15 +11,26 @@ from ..core.jsonable_encoder import jsonable_encoder
|
|
|
11
11
|
from ..core.pydantic_utilities import parse_obj_as
|
|
12
12
|
from ..core.request_options import RequestOptions
|
|
13
13
|
from ..core.serialization import convert_and_respect_annotation_metadata
|
|
14
|
+
from ..errors.bad_request_error import BadRequestError
|
|
15
|
+
from ..errors.conflict_error import ConflictError
|
|
16
|
+
from ..errors.not_found_error import NotFoundError
|
|
14
17
|
from ..types.dataset_expansion_response import DatasetExpansionResponse
|
|
18
|
+
from ..types.dataset_item_filter import DatasetItemFilter
|
|
15
19
|
from ..types.dataset_item_page_compare import DatasetItemPageCompare
|
|
16
20
|
from ..types.dataset_item_page_public import DatasetItemPagePublic
|
|
17
21
|
from ..types.dataset_item_public import DatasetItemPublic
|
|
22
|
+
from ..types.dataset_item_update import DatasetItemUpdate
|
|
18
23
|
from ..types.dataset_item_write import DatasetItemWrite
|
|
24
|
+
from ..types.dataset_item_write_source import DatasetItemWriteSource
|
|
19
25
|
from ..types.dataset_page_public import DatasetPagePublic
|
|
20
26
|
from ..types.dataset_public import DatasetPublic
|
|
27
|
+
from ..types.dataset_version_diff import DatasetVersionDiff
|
|
28
|
+
from ..types.dataset_version_page_public import DatasetVersionPagePublic
|
|
29
|
+
from ..types.dataset_version_public import DatasetVersionPublic
|
|
30
|
+
from ..types.json_node import JsonNode
|
|
21
31
|
from ..types.page_columns import PageColumns
|
|
22
32
|
from ..types.project_stats_public import ProjectStatsPublic
|
|
33
|
+
from ..types.span_enrichment_options import SpanEnrichmentOptions
|
|
23
34
|
from ..types.trace_enrichment_options import TraceEnrichmentOptions
|
|
24
35
|
from .types.dataset_update_visibility import DatasetUpdateVisibility
|
|
25
36
|
from .types.dataset_write_visibility import DatasetWriteVisibility
|
|
@@ -32,6 +43,75 @@ class RawDatasetsClient:
|
|
|
32
43
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
33
44
|
self._client_wrapper = client_wrapper
|
|
34
45
|
|
|
46
|
+
def batch_update_dataset_items(
|
|
47
|
+
self,
|
|
48
|
+
*,
|
|
49
|
+
update: DatasetItemUpdate,
|
|
50
|
+
ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
51
|
+
filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
|
|
52
|
+
merge_tags: typing.Optional[bool] = OMIT,
|
|
53
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
54
|
+
) -> HttpResponse[None]:
|
|
55
|
+
"""
|
|
56
|
+
Update multiple dataset items
|
|
57
|
+
|
|
58
|
+
Parameters
|
|
59
|
+
----------
|
|
60
|
+
update : DatasetItemUpdate
|
|
61
|
+
|
|
62
|
+
ids : typing.Optional[typing.Sequence[str]]
|
|
63
|
+
List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
|
|
64
|
+
|
|
65
|
+
filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
|
|
66
|
+
|
|
67
|
+
merge_tags : typing.Optional[bool]
|
|
68
|
+
If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
|
|
69
|
+
|
|
70
|
+
request_options : typing.Optional[RequestOptions]
|
|
71
|
+
Request-specific configuration.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
HttpResponse[None]
|
|
76
|
+
"""
|
|
77
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
78
|
+
"v1/private/datasets/items/batch",
|
|
79
|
+
method="PATCH",
|
|
80
|
+
json={
|
|
81
|
+
"ids": ids,
|
|
82
|
+
"filters": convert_and_respect_annotation_metadata(
|
|
83
|
+
object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
|
|
84
|
+
),
|
|
85
|
+
"update": convert_and_respect_annotation_metadata(
|
|
86
|
+
object_=update, annotation=DatasetItemUpdate, direction="write"
|
|
87
|
+
),
|
|
88
|
+
"merge_tags": merge_tags,
|
|
89
|
+
},
|
|
90
|
+
headers={
|
|
91
|
+
"content-type": "application/json",
|
|
92
|
+
},
|
|
93
|
+
request_options=request_options,
|
|
94
|
+
omit=OMIT,
|
|
95
|
+
)
|
|
96
|
+
try:
|
|
97
|
+
if 200 <= _response.status_code < 300:
|
|
98
|
+
return HttpResponse(response=_response, data=None)
|
|
99
|
+
if _response.status_code == 400:
|
|
100
|
+
raise BadRequestError(
|
|
101
|
+
headers=dict(_response.headers),
|
|
102
|
+
body=typing.cast(
|
|
103
|
+
typing.Optional[typing.Any],
|
|
104
|
+
parse_obj_as(
|
|
105
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
106
|
+
object_=_response.json(),
|
|
107
|
+
),
|
|
108
|
+
),
|
|
109
|
+
)
|
|
110
|
+
_response_json = _response.json()
|
|
111
|
+
except JSONDecodeError:
|
|
112
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
113
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
114
|
+
|
|
35
115
|
def find_datasets(
|
|
36
116
|
self,
|
|
37
117
|
*,
|
|
@@ -212,6 +292,120 @@ class RawDatasetsClient:
|
|
|
212
292
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
213
293
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
214
294
|
|
|
295
|
+
def create_dataset_items_from_csv(
|
|
296
|
+
self,
|
|
297
|
+
*,
|
|
298
|
+
file: typing.Dict[str, typing.Optional[typing.Any]],
|
|
299
|
+
dataset_id: str,
|
|
300
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
301
|
+
) -> HttpResponse[None]:
|
|
302
|
+
"""
|
|
303
|
+
Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
|
|
304
|
+
|
|
305
|
+
Parameters
|
|
306
|
+
----------
|
|
307
|
+
file : typing.Dict[str, typing.Optional[typing.Any]]
|
|
308
|
+
|
|
309
|
+
dataset_id : str
|
|
310
|
+
|
|
311
|
+
request_options : typing.Optional[RequestOptions]
|
|
312
|
+
Request-specific configuration.
|
|
313
|
+
|
|
314
|
+
Returns
|
|
315
|
+
-------
|
|
316
|
+
HttpResponse[None]
|
|
317
|
+
"""
|
|
318
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
319
|
+
"v1/private/datasets/items/from-csv",
|
|
320
|
+
method="POST",
|
|
321
|
+
data={
|
|
322
|
+
"file": file,
|
|
323
|
+
"dataset_id": dataset_id,
|
|
324
|
+
},
|
|
325
|
+
files={},
|
|
326
|
+
request_options=request_options,
|
|
327
|
+
omit=OMIT,
|
|
328
|
+
)
|
|
329
|
+
try:
|
|
330
|
+
if 200 <= _response.status_code < 300:
|
|
331
|
+
return HttpResponse(response=_response, data=None)
|
|
332
|
+
if _response.status_code == 400:
|
|
333
|
+
raise BadRequestError(
|
|
334
|
+
headers=dict(_response.headers),
|
|
335
|
+
body=typing.cast(
|
|
336
|
+
typing.Optional[typing.Any],
|
|
337
|
+
parse_obj_as(
|
|
338
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
339
|
+
object_=_response.json(),
|
|
340
|
+
),
|
|
341
|
+
),
|
|
342
|
+
)
|
|
343
|
+
if _response.status_code == 404:
|
|
344
|
+
raise NotFoundError(
|
|
345
|
+
headers=dict(_response.headers),
|
|
346
|
+
body=typing.cast(
|
|
347
|
+
typing.Optional[typing.Any],
|
|
348
|
+
parse_obj_as(
|
|
349
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
350
|
+
object_=_response.json(),
|
|
351
|
+
),
|
|
352
|
+
),
|
|
353
|
+
)
|
|
354
|
+
_response_json = _response.json()
|
|
355
|
+
except JSONDecodeError:
|
|
356
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
357
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
358
|
+
|
|
359
|
+
def create_dataset_items_from_spans(
|
|
360
|
+
self,
|
|
361
|
+
dataset_id: str,
|
|
362
|
+
*,
|
|
363
|
+
span_ids: typing.Sequence[str],
|
|
364
|
+
enrichment_options: SpanEnrichmentOptions,
|
|
365
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
366
|
+
) -> HttpResponse[None]:
|
|
367
|
+
"""
|
|
368
|
+
Create dataset items from spans with enriched metadata
|
|
369
|
+
|
|
370
|
+
Parameters
|
|
371
|
+
----------
|
|
372
|
+
dataset_id : str
|
|
373
|
+
|
|
374
|
+
span_ids : typing.Sequence[str]
|
|
375
|
+
Set of span IDs to add to the dataset
|
|
376
|
+
|
|
377
|
+
enrichment_options : SpanEnrichmentOptions
|
|
378
|
+
|
|
379
|
+
request_options : typing.Optional[RequestOptions]
|
|
380
|
+
Request-specific configuration.
|
|
381
|
+
|
|
382
|
+
Returns
|
|
383
|
+
-------
|
|
384
|
+
HttpResponse[None]
|
|
385
|
+
"""
|
|
386
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
387
|
+
f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
|
|
388
|
+
method="POST",
|
|
389
|
+
json={
|
|
390
|
+
"span_ids": span_ids,
|
|
391
|
+
"enrichment_options": convert_and_respect_annotation_metadata(
|
|
392
|
+
object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
|
|
393
|
+
),
|
|
394
|
+
},
|
|
395
|
+
headers={
|
|
396
|
+
"content-type": "application/json",
|
|
397
|
+
},
|
|
398
|
+
request_options=request_options,
|
|
399
|
+
omit=OMIT,
|
|
400
|
+
)
|
|
401
|
+
try:
|
|
402
|
+
if 200 <= _response.status_code < 300:
|
|
403
|
+
return HttpResponse(response=_response, data=None)
|
|
404
|
+
_response_json = _response.json()
|
|
405
|
+
except JSONDecodeError:
|
|
406
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
407
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
408
|
+
|
|
215
409
|
def create_dataset_items_from_traces(
|
|
216
410
|
self,
|
|
217
411
|
dataset_id: str,
|
|
@@ -771,12 +965,87 @@ class RawDatasetsClient:
|
|
|
771
965
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
772
966
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
773
967
|
|
|
968
|
+
def patch_dataset_item(
|
|
969
|
+
self,
|
|
970
|
+
item_id: str,
|
|
971
|
+
*,
|
|
972
|
+
source: DatasetItemWriteSource,
|
|
973
|
+
data: JsonNode,
|
|
974
|
+
id: typing.Optional[str] = OMIT,
|
|
975
|
+
trace_id: typing.Optional[str] = OMIT,
|
|
976
|
+
span_id: typing.Optional[str] = OMIT,
|
|
977
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
978
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
979
|
+
) -> HttpResponse[None]:
|
|
980
|
+
"""
|
|
981
|
+
Partially update dataset item by id. Only provided fields will be updated.
|
|
982
|
+
|
|
983
|
+
Parameters
|
|
984
|
+
----------
|
|
985
|
+
item_id : str
|
|
986
|
+
|
|
987
|
+
source : DatasetItemWriteSource
|
|
988
|
+
|
|
989
|
+
data : JsonNode
|
|
990
|
+
|
|
991
|
+
id : typing.Optional[str]
|
|
992
|
+
|
|
993
|
+
trace_id : typing.Optional[str]
|
|
994
|
+
|
|
995
|
+
span_id : typing.Optional[str]
|
|
996
|
+
|
|
997
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
998
|
+
|
|
999
|
+
request_options : typing.Optional[RequestOptions]
|
|
1000
|
+
Request-specific configuration.
|
|
1001
|
+
|
|
1002
|
+
Returns
|
|
1003
|
+
-------
|
|
1004
|
+
HttpResponse[None]
|
|
1005
|
+
"""
|
|
1006
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1007
|
+
f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
|
|
1008
|
+
method="PATCH",
|
|
1009
|
+
json={
|
|
1010
|
+
"id": id,
|
|
1011
|
+
"trace_id": trace_id,
|
|
1012
|
+
"span_id": span_id,
|
|
1013
|
+
"source": source,
|
|
1014
|
+
"data": data,
|
|
1015
|
+
"tags": tags,
|
|
1016
|
+
},
|
|
1017
|
+
headers={
|
|
1018
|
+
"content-type": "application/json",
|
|
1019
|
+
},
|
|
1020
|
+
request_options=request_options,
|
|
1021
|
+
omit=OMIT,
|
|
1022
|
+
)
|
|
1023
|
+
try:
|
|
1024
|
+
if 200 <= _response.status_code < 300:
|
|
1025
|
+
return HttpResponse(response=_response, data=None)
|
|
1026
|
+
if _response.status_code == 404:
|
|
1027
|
+
raise NotFoundError(
|
|
1028
|
+
headers=dict(_response.headers),
|
|
1029
|
+
body=typing.cast(
|
|
1030
|
+
typing.Optional[typing.Any],
|
|
1031
|
+
parse_obj_as(
|
|
1032
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1033
|
+
object_=_response.json(),
|
|
1034
|
+
),
|
|
1035
|
+
),
|
|
1036
|
+
)
|
|
1037
|
+
_response_json = _response.json()
|
|
1038
|
+
except JSONDecodeError:
|
|
1039
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1040
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1041
|
+
|
|
774
1042
|
def get_dataset_items(
|
|
775
1043
|
self,
|
|
776
1044
|
id: str,
|
|
777
1045
|
*,
|
|
778
1046
|
page: typing.Optional[int] = None,
|
|
779
1047
|
size: typing.Optional[int] = None,
|
|
1048
|
+
version: typing.Optional[str] = None,
|
|
780
1049
|
filters: typing.Optional[str] = None,
|
|
781
1050
|
truncate: typing.Optional[bool] = None,
|
|
782
1051
|
request_options: typing.Optional[RequestOptions] = None,
|
|
@@ -792,6 +1061,8 @@ class RawDatasetsClient:
|
|
|
792
1061
|
|
|
793
1062
|
size : typing.Optional[int]
|
|
794
1063
|
|
|
1064
|
+
version : typing.Optional[str]
|
|
1065
|
+
|
|
795
1066
|
filters : typing.Optional[str]
|
|
796
1067
|
|
|
797
1068
|
truncate : typing.Optional[bool]
|
|
@@ -810,6 +1081,7 @@ class RawDatasetsClient:
|
|
|
810
1081
|
params={
|
|
811
1082
|
"page": page,
|
|
812
1083
|
"size": size,
|
|
1084
|
+
"version": version,
|
|
813
1085
|
"filters": filters,
|
|
814
1086
|
"truncate": truncate,
|
|
815
1087
|
},
|
|
@@ -937,124 +1209,547 @@ class RawDatasetsClient:
|
|
|
937
1209
|
|
|
938
1210
|
yield stream()
|
|
939
1211
|
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
self._client_wrapper = client_wrapper
|
|
944
|
-
|
|
945
|
-
async def find_datasets(
|
|
946
|
-
self,
|
|
947
|
-
*,
|
|
948
|
-
page: typing.Optional[int] = None,
|
|
949
|
-
size: typing.Optional[int] = None,
|
|
950
|
-
with_experiments_only: typing.Optional[bool] = None,
|
|
951
|
-
with_optimizations_only: typing.Optional[bool] = None,
|
|
952
|
-
prompt_id: typing.Optional[str] = None,
|
|
953
|
-
name: typing.Optional[str] = None,
|
|
954
|
-
sorting: typing.Optional[str] = None,
|
|
955
|
-
filters: typing.Optional[str] = None,
|
|
956
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
957
|
-
) -> AsyncHttpResponse[DatasetPagePublic]:
|
|
1212
|
+
def compare_dataset_versions(
|
|
1213
|
+
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1214
|
+
) -> HttpResponse[DatasetVersionDiff]:
|
|
958
1215
|
"""
|
|
959
|
-
|
|
1216
|
+
Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
|
|
960
1217
|
|
|
961
1218
|
Parameters
|
|
962
1219
|
----------
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
size : typing.Optional[int]
|
|
966
|
-
|
|
967
|
-
with_experiments_only : typing.Optional[bool]
|
|
968
|
-
|
|
969
|
-
with_optimizations_only : typing.Optional[bool]
|
|
970
|
-
|
|
971
|
-
prompt_id : typing.Optional[str]
|
|
972
|
-
|
|
973
|
-
name : typing.Optional[str]
|
|
974
|
-
|
|
975
|
-
sorting : typing.Optional[str]
|
|
976
|
-
|
|
977
|
-
filters : typing.Optional[str]
|
|
1220
|
+
id : str
|
|
978
1221
|
|
|
979
1222
|
request_options : typing.Optional[RequestOptions]
|
|
980
1223
|
Request-specific configuration.
|
|
981
1224
|
|
|
982
1225
|
Returns
|
|
983
1226
|
-------
|
|
984
|
-
|
|
985
|
-
|
|
1227
|
+
HttpResponse[DatasetVersionDiff]
|
|
1228
|
+
Diff computed successfully
|
|
986
1229
|
"""
|
|
987
|
-
_response =
|
|
988
|
-
"v1/private/datasets",
|
|
1230
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1231
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
|
|
989
1232
|
method="GET",
|
|
990
|
-
params={
|
|
991
|
-
"page": page,
|
|
992
|
-
"size": size,
|
|
993
|
-
"with_experiments_only": with_experiments_only,
|
|
994
|
-
"with_optimizations_only": with_optimizations_only,
|
|
995
|
-
"prompt_id": prompt_id,
|
|
996
|
-
"name": name,
|
|
997
|
-
"sorting": sorting,
|
|
998
|
-
"filters": filters,
|
|
999
|
-
},
|
|
1000
1233
|
request_options=request_options,
|
|
1001
1234
|
)
|
|
1002
1235
|
try:
|
|
1003
1236
|
if 200 <= _response.status_code < 300:
|
|
1004
1237
|
_data = typing.cast(
|
|
1005
|
-
|
|
1238
|
+
DatasetVersionDiff,
|
|
1006
1239
|
parse_obj_as(
|
|
1007
|
-
type_=
|
|
1240
|
+
type_=DatasetVersionDiff, # type: ignore
|
|
1008
1241
|
object_=_response.json(),
|
|
1009
1242
|
),
|
|
1010
1243
|
)
|
|
1011
|
-
return
|
|
1244
|
+
return HttpResponse(response=_response, data=_data)
|
|
1245
|
+
if _response.status_code == 404:
|
|
1246
|
+
raise NotFoundError(
|
|
1247
|
+
headers=dict(_response.headers),
|
|
1248
|
+
body=typing.cast(
|
|
1249
|
+
typing.Optional[typing.Any],
|
|
1250
|
+
parse_obj_as(
|
|
1251
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1252
|
+
object_=_response.json(),
|
|
1253
|
+
),
|
|
1254
|
+
),
|
|
1255
|
+
)
|
|
1012
1256
|
_response_json = _response.json()
|
|
1013
1257
|
except JSONDecodeError:
|
|
1014
1258
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1015
1259
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1016
1260
|
|
|
1017
|
-
|
|
1018
|
-
self,
|
|
1019
|
-
|
|
1020
|
-
name: str,
|
|
1021
|
-
id: typing.Optional[str] = OMIT,
|
|
1022
|
-
visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
|
|
1023
|
-
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1024
|
-
description: typing.Optional[str] = OMIT,
|
|
1025
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
1026
|
-
) -> AsyncHttpResponse[None]:
|
|
1261
|
+
def create_version_tag(
|
|
1262
|
+
self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
|
|
1263
|
+
) -> HttpResponse[None]:
|
|
1027
1264
|
"""
|
|
1028
|
-
|
|
1265
|
+
Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
|
|
1029
1266
|
|
|
1030
1267
|
Parameters
|
|
1031
1268
|
----------
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
id : typing.Optional[str]
|
|
1035
|
-
|
|
1036
|
-
visibility : typing.Optional[DatasetWriteVisibility]
|
|
1269
|
+
version_hash : str
|
|
1037
1270
|
|
|
1038
|
-
|
|
1271
|
+
id : str
|
|
1039
1272
|
|
|
1040
|
-
|
|
1273
|
+
tag : str
|
|
1041
1274
|
|
|
1042
1275
|
request_options : typing.Optional[RequestOptions]
|
|
1043
1276
|
Request-specific configuration.
|
|
1044
1277
|
|
|
1045
1278
|
Returns
|
|
1046
1279
|
-------
|
|
1047
|
-
|
|
1280
|
+
HttpResponse[None]
|
|
1048
1281
|
"""
|
|
1049
|
-
_response =
|
|
1050
|
-
"v1/private/datasets",
|
|
1282
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1283
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
|
|
1051
1284
|
method="POST",
|
|
1052
1285
|
json={
|
|
1053
|
-
"
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
"
|
|
1057
|
-
|
|
1286
|
+
"tag": tag,
|
|
1287
|
+
},
|
|
1288
|
+
headers={
|
|
1289
|
+
"content-type": "application/json",
|
|
1290
|
+
},
|
|
1291
|
+
request_options=request_options,
|
|
1292
|
+
omit=OMIT,
|
|
1293
|
+
)
|
|
1294
|
+
try:
|
|
1295
|
+
if 200 <= _response.status_code < 300:
|
|
1296
|
+
return HttpResponse(response=_response, data=None)
|
|
1297
|
+
if _response.status_code == 400:
|
|
1298
|
+
raise BadRequestError(
|
|
1299
|
+
headers=dict(_response.headers),
|
|
1300
|
+
body=typing.cast(
|
|
1301
|
+
typing.Optional[typing.Any],
|
|
1302
|
+
parse_obj_as(
|
|
1303
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1304
|
+
object_=_response.json(),
|
|
1305
|
+
),
|
|
1306
|
+
),
|
|
1307
|
+
)
|
|
1308
|
+
if _response.status_code == 404:
|
|
1309
|
+
raise NotFoundError(
|
|
1310
|
+
headers=dict(_response.headers),
|
|
1311
|
+
body=typing.cast(
|
|
1312
|
+
typing.Optional[typing.Any],
|
|
1313
|
+
parse_obj_as(
|
|
1314
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1315
|
+
object_=_response.json(),
|
|
1316
|
+
),
|
|
1317
|
+
),
|
|
1318
|
+
)
|
|
1319
|
+
if _response.status_code == 409:
|
|
1320
|
+
raise ConflictError(
|
|
1321
|
+
headers=dict(_response.headers),
|
|
1322
|
+
body=typing.cast(
|
|
1323
|
+
typing.Optional[typing.Any],
|
|
1324
|
+
parse_obj_as(
|
|
1325
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1326
|
+
object_=_response.json(),
|
|
1327
|
+
),
|
|
1328
|
+
),
|
|
1329
|
+
)
|
|
1330
|
+
_response_json = _response.json()
|
|
1331
|
+
except JSONDecodeError:
|
|
1332
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1333
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1334
|
+
|
|
1335
|
+
def list_dataset_versions(
|
|
1336
|
+
self,
|
|
1337
|
+
id: str,
|
|
1338
|
+
*,
|
|
1339
|
+
page: typing.Optional[int] = None,
|
|
1340
|
+
size: typing.Optional[int] = None,
|
|
1341
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1342
|
+
) -> HttpResponse[DatasetVersionPagePublic]:
|
|
1343
|
+
"""
|
|
1344
|
+
Get paginated list of versions for a dataset, ordered by creation time (newest first)
|
|
1345
|
+
|
|
1346
|
+
Parameters
|
|
1347
|
+
----------
|
|
1348
|
+
id : str
|
|
1349
|
+
|
|
1350
|
+
page : typing.Optional[int]
|
|
1351
|
+
|
|
1352
|
+
size : typing.Optional[int]
|
|
1353
|
+
|
|
1354
|
+
request_options : typing.Optional[RequestOptions]
|
|
1355
|
+
Request-specific configuration.
|
|
1356
|
+
|
|
1357
|
+
Returns
|
|
1358
|
+
-------
|
|
1359
|
+
HttpResponse[DatasetVersionPagePublic]
|
|
1360
|
+
Dataset versions
|
|
1361
|
+
"""
|
|
1362
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1363
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions",
|
|
1364
|
+
method="GET",
|
|
1365
|
+
params={
|
|
1366
|
+
"page": page,
|
|
1367
|
+
"size": size,
|
|
1368
|
+
},
|
|
1369
|
+
request_options=request_options,
|
|
1370
|
+
)
|
|
1371
|
+
try:
|
|
1372
|
+
if 200 <= _response.status_code < 300:
|
|
1373
|
+
_data = typing.cast(
|
|
1374
|
+
DatasetVersionPagePublic,
|
|
1375
|
+
parse_obj_as(
|
|
1376
|
+
type_=DatasetVersionPagePublic, # type: ignore
|
|
1377
|
+
object_=_response.json(),
|
|
1378
|
+
),
|
|
1379
|
+
)
|
|
1380
|
+
return HttpResponse(response=_response, data=_data)
|
|
1381
|
+
if _response.status_code == 400:
|
|
1382
|
+
raise BadRequestError(
|
|
1383
|
+
headers=dict(_response.headers),
|
|
1384
|
+
body=typing.cast(
|
|
1385
|
+
typing.Optional[typing.Any],
|
|
1386
|
+
parse_obj_as(
|
|
1387
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1388
|
+
object_=_response.json(),
|
|
1389
|
+
),
|
|
1390
|
+
),
|
|
1391
|
+
)
|
|
1392
|
+
_response_json = _response.json()
|
|
1393
|
+
except JSONDecodeError:
|
|
1394
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1395
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1396
|
+
|
|
1397
|
+
def create_dataset_version(
|
|
1398
|
+
self,
|
|
1399
|
+
id: str,
|
|
1400
|
+
*,
|
|
1401
|
+
tag: typing.Optional[str] = OMIT,
|
|
1402
|
+
change_description: typing.Optional[str] = OMIT,
|
|
1403
|
+
metadata: typing.Optional[typing.Dict[str, str]] = OMIT,
|
|
1404
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1405
|
+
) -> HttpResponse[None]:
|
|
1406
|
+
"""
|
|
1407
|
+
Create a new immutable version of the dataset by snapshotting the current state
|
|
1408
|
+
|
|
1409
|
+
Parameters
|
|
1410
|
+
----------
|
|
1411
|
+
id : str
|
|
1412
|
+
|
|
1413
|
+
tag : typing.Optional[str]
|
|
1414
|
+
Optional tag for this version
|
|
1415
|
+
|
|
1416
|
+
change_description : typing.Optional[str]
|
|
1417
|
+
Optional description of changes in this version
|
|
1418
|
+
|
|
1419
|
+
metadata : typing.Optional[typing.Dict[str, str]]
|
|
1420
|
+
Optional user-defined metadata
|
|
1421
|
+
|
|
1422
|
+
request_options : typing.Optional[RequestOptions]
|
|
1423
|
+
Request-specific configuration.
|
|
1424
|
+
|
|
1425
|
+
Returns
|
|
1426
|
+
-------
|
|
1427
|
+
HttpResponse[None]
|
|
1428
|
+
"""
|
|
1429
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1430
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions",
|
|
1431
|
+
method="POST",
|
|
1432
|
+
json={
|
|
1433
|
+
"tag": tag,
|
|
1434
|
+
"change_description": change_description,
|
|
1435
|
+
"metadata": metadata,
|
|
1436
|
+
},
|
|
1437
|
+
headers={
|
|
1438
|
+
"content-type": "application/json",
|
|
1439
|
+
},
|
|
1440
|
+
request_options=request_options,
|
|
1441
|
+
omit=OMIT,
|
|
1442
|
+
)
|
|
1443
|
+
try:
|
|
1444
|
+
if 200 <= _response.status_code < 300:
|
|
1445
|
+
return HttpResponse(response=_response, data=None)
|
|
1446
|
+
if _response.status_code == 400:
|
|
1447
|
+
raise BadRequestError(
|
|
1448
|
+
headers=dict(_response.headers),
|
|
1449
|
+
body=typing.cast(
|
|
1450
|
+
typing.Optional[typing.Any],
|
|
1451
|
+
parse_obj_as(
|
|
1452
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1453
|
+
object_=_response.json(),
|
|
1454
|
+
),
|
|
1455
|
+
),
|
|
1456
|
+
)
|
|
1457
|
+
if _response.status_code == 409:
|
|
1458
|
+
raise ConflictError(
|
|
1459
|
+
headers=dict(_response.headers),
|
|
1460
|
+
body=typing.cast(
|
|
1461
|
+
typing.Optional[typing.Any],
|
|
1462
|
+
parse_obj_as(
|
|
1463
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1464
|
+
object_=_response.json(),
|
|
1465
|
+
),
|
|
1466
|
+
),
|
|
1467
|
+
)
|
|
1468
|
+
_response_json = _response.json()
|
|
1469
|
+
except JSONDecodeError:
|
|
1470
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1471
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1472
|
+
|
|
1473
|
+
def delete_version_tag(
|
|
1474
|
+
self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1475
|
+
) -> HttpResponse[None]:
|
|
1476
|
+
"""
|
|
1477
|
+
Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
|
|
1478
|
+
|
|
1479
|
+
Parameters
|
|
1480
|
+
----------
|
|
1481
|
+
version_hash : str
|
|
1482
|
+
|
|
1483
|
+
tag : str
|
|
1484
|
+
|
|
1485
|
+
id : str
|
|
1486
|
+
|
|
1487
|
+
request_options : typing.Optional[RequestOptions]
|
|
1488
|
+
Request-specific configuration.
|
|
1489
|
+
|
|
1490
|
+
Returns
|
|
1491
|
+
-------
|
|
1492
|
+
HttpResponse[None]
|
|
1493
|
+
"""
|
|
1494
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1495
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
|
|
1496
|
+
method="DELETE",
|
|
1497
|
+
request_options=request_options,
|
|
1498
|
+
)
|
|
1499
|
+
try:
|
|
1500
|
+
if 200 <= _response.status_code < 300:
|
|
1501
|
+
return HttpResponse(response=_response, data=None)
|
|
1502
|
+
_response_json = _response.json()
|
|
1503
|
+
except JSONDecodeError:
|
|
1504
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1505
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1506
|
+
|
|
1507
|
+
def restore_dataset_version(
|
|
1508
|
+
self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
|
|
1509
|
+
) -> HttpResponse[DatasetVersionPublic]:
|
|
1510
|
+
"""
|
|
1511
|
+
Restores the dataset to a previous version state. All draft items are replaced with items from the specified version. If the version is not the latest, a new version snapshot is created. If the version is the latest, only draft items are replaced (revert functionality).
|
|
1512
|
+
|
|
1513
|
+
Parameters
|
|
1514
|
+
----------
|
|
1515
|
+
id : str
|
|
1516
|
+
|
|
1517
|
+
version_ref : str
|
|
1518
|
+
Version hash or tag to restore from
|
|
1519
|
+
|
|
1520
|
+
request_options : typing.Optional[RequestOptions]
|
|
1521
|
+
Request-specific configuration.
|
|
1522
|
+
|
|
1523
|
+
Returns
|
|
1524
|
+
-------
|
|
1525
|
+
HttpResponse[DatasetVersionPublic]
|
|
1526
|
+
Version restored successfully
|
|
1527
|
+
"""
|
|
1528
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1529
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
|
|
1530
|
+
method="POST",
|
|
1531
|
+
json={
|
|
1532
|
+
"version_ref": version_ref,
|
|
1533
|
+
},
|
|
1534
|
+
headers={
|
|
1535
|
+
"content-type": "application/json",
|
|
1536
|
+
},
|
|
1537
|
+
request_options=request_options,
|
|
1538
|
+
omit=OMIT,
|
|
1539
|
+
)
|
|
1540
|
+
try:
|
|
1541
|
+
if 200 <= _response.status_code < 300:
|
|
1542
|
+
_data = typing.cast(
|
|
1543
|
+
DatasetVersionPublic,
|
|
1544
|
+
parse_obj_as(
|
|
1545
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
1546
|
+
object_=_response.json(),
|
|
1547
|
+
),
|
|
1548
|
+
)
|
|
1549
|
+
return HttpResponse(response=_response, data=_data)
|
|
1550
|
+
if _response.status_code == 404:
|
|
1551
|
+
raise NotFoundError(
|
|
1552
|
+
headers=dict(_response.headers),
|
|
1553
|
+
body=typing.cast(
|
|
1554
|
+
typing.Optional[typing.Any],
|
|
1555
|
+
parse_obj_as(
|
|
1556
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1557
|
+
object_=_response.json(),
|
|
1558
|
+
),
|
|
1559
|
+
),
|
|
1560
|
+
)
|
|
1561
|
+
_response_json = _response.json()
|
|
1562
|
+
except JSONDecodeError:
|
|
1563
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1564
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1565
|
+
|
|
1566
|
+
|
|
1567
|
+
class AsyncRawDatasetsClient:
|
|
1568
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
1569
|
+
self._client_wrapper = client_wrapper
|
|
1570
|
+
|
|
1571
|
+
async def batch_update_dataset_items(
|
|
1572
|
+
self,
|
|
1573
|
+
*,
|
|
1574
|
+
update: DatasetItemUpdate,
|
|
1575
|
+
ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1576
|
+
filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
|
|
1577
|
+
merge_tags: typing.Optional[bool] = OMIT,
|
|
1578
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1579
|
+
) -> AsyncHttpResponse[None]:
|
|
1580
|
+
"""
|
|
1581
|
+
Update multiple dataset items
|
|
1582
|
+
|
|
1583
|
+
Parameters
|
|
1584
|
+
----------
|
|
1585
|
+
update : DatasetItemUpdate
|
|
1586
|
+
|
|
1587
|
+
ids : typing.Optional[typing.Sequence[str]]
|
|
1588
|
+
List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
|
|
1589
|
+
|
|
1590
|
+
filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
|
|
1591
|
+
|
|
1592
|
+
merge_tags : typing.Optional[bool]
|
|
1593
|
+
If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
|
|
1594
|
+
|
|
1595
|
+
request_options : typing.Optional[RequestOptions]
|
|
1596
|
+
Request-specific configuration.
|
|
1597
|
+
|
|
1598
|
+
Returns
|
|
1599
|
+
-------
|
|
1600
|
+
AsyncHttpResponse[None]
|
|
1601
|
+
"""
|
|
1602
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1603
|
+
"v1/private/datasets/items/batch",
|
|
1604
|
+
method="PATCH",
|
|
1605
|
+
json={
|
|
1606
|
+
"ids": ids,
|
|
1607
|
+
"filters": convert_and_respect_annotation_metadata(
|
|
1608
|
+
object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
|
|
1609
|
+
),
|
|
1610
|
+
"update": convert_and_respect_annotation_metadata(
|
|
1611
|
+
object_=update, annotation=DatasetItemUpdate, direction="write"
|
|
1612
|
+
),
|
|
1613
|
+
"merge_tags": merge_tags,
|
|
1614
|
+
},
|
|
1615
|
+
headers={
|
|
1616
|
+
"content-type": "application/json",
|
|
1617
|
+
},
|
|
1618
|
+
request_options=request_options,
|
|
1619
|
+
omit=OMIT,
|
|
1620
|
+
)
|
|
1621
|
+
try:
|
|
1622
|
+
if 200 <= _response.status_code < 300:
|
|
1623
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
1624
|
+
if _response.status_code == 400:
|
|
1625
|
+
raise BadRequestError(
|
|
1626
|
+
headers=dict(_response.headers),
|
|
1627
|
+
body=typing.cast(
|
|
1628
|
+
typing.Optional[typing.Any],
|
|
1629
|
+
parse_obj_as(
|
|
1630
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1631
|
+
object_=_response.json(),
|
|
1632
|
+
),
|
|
1633
|
+
),
|
|
1634
|
+
)
|
|
1635
|
+
_response_json = _response.json()
|
|
1636
|
+
except JSONDecodeError:
|
|
1637
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1638
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1639
|
+
|
|
1640
|
+
async def find_datasets(
|
|
1641
|
+
self,
|
|
1642
|
+
*,
|
|
1643
|
+
page: typing.Optional[int] = None,
|
|
1644
|
+
size: typing.Optional[int] = None,
|
|
1645
|
+
with_experiments_only: typing.Optional[bool] = None,
|
|
1646
|
+
with_optimizations_only: typing.Optional[bool] = None,
|
|
1647
|
+
prompt_id: typing.Optional[str] = None,
|
|
1648
|
+
name: typing.Optional[str] = None,
|
|
1649
|
+
sorting: typing.Optional[str] = None,
|
|
1650
|
+
filters: typing.Optional[str] = None,
|
|
1651
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1652
|
+
) -> AsyncHttpResponse[DatasetPagePublic]:
|
|
1653
|
+
"""
|
|
1654
|
+
Find datasets
|
|
1655
|
+
|
|
1656
|
+
Parameters
|
|
1657
|
+
----------
|
|
1658
|
+
page : typing.Optional[int]
|
|
1659
|
+
|
|
1660
|
+
size : typing.Optional[int]
|
|
1661
|
+
|
|
1662
|
+
with_experiments_only : typing.Optional[bool]
|
|
1663
|
+
|
|
1664
|
+
with_optimizations_only : typing.Optional[bool]
|
|
1665
|
+
|
|
1666
|
+
prompt_id : typing.Optional[str]
|
|
1667
|
+
|
|
1668
|
+
name : typing.Optional[str]
|
|
1669
|
+
|
|
1670
|
+
sorting : typing.Optional[str]
|
|
1671
|
+
|
|
1672
|
+
filters : typing.Optional[str]
|
|
1673
|
+
|
|
1674
|
+
request_options : typing.Optional[RequestOptions]
|
|
1675
|
+
Request-specific configuration.
|
|
1676
|
+
|
|
1677
|
+
Returns
|
|
1678
|
+
-------
|
|
1679
|
+
AsyncHttpResponse[DatasetPagePublic]
|
|
1680
|
+
Dataset resource
|
|
1681
|
+
"""
|
|
1682
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1683
|
+
"v1/private/datasets",
|
|
1684
|
+
method="GET",
|
|
1685
|
+
params={
|
|
1686
|
+
"page": page,
|
|
1687
|
+
"size": size,
|
|
1688
|
+
"with_experiments_only": with_experiments_only,
|
|
1689
|
+
"with_optimizations_only": with_optimizations_only,
|
|
1690
|
+
"prompt_id": prompt_id,
|
|
1691
|
+
"name": name,
|
|
1692
|
+
"sorting": sorting,
|
|
1693
|
+
"filters": filters,
|
|
1694
|
+
},
|
|
1695
|
+
request_options=request_options,
|
|
1696
|
+
)
|
|
1697
|
+
try:
|
|
1698
|
+
if 200 <= _response.status_code < 300:
|
|
1699
|
+
_data = typing.cast(
|
|
1700
|
+
DatasetPagePublic,
|
|
1701
|
+
parse_obj_as(
|
|
1702
|
+
type_=DatasetPagePublic, # type: ignore
|
|
1703
|
+
object_=_response.json(),
|
|
1704
|
+
),
|
|
1705
|
+
)
|
|
1706
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
1707
|
+
_response_json = _response.json()
|
|
1708
|
+
except JSONDecodeError:
|
|
1709
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1710
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1711
|
+
|
|
1712
|
+
async def create_dataset(
|
|
1713
|
+
self,
|
|
1714
|
+
*,
|
|
1715
|
+
name: str,
|
|
1716
|
+
id: typing.Optional[str] = OMIT,
|
|
1717
|
+
visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
|
|
1718
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1719
|
+
description: typing.Optional[str] = OMIT,
|
|
1720
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1721
|
+
) -> AsyncHttpResponse[None]:
|
|
1722
|
+
"""
|
|
1723
|
+
Create dataset
|
|
1724
|
+
|
|
1725
|
+
Parameters
|
|
1726
|
+
----------
|
|
1727
|
+
name : str
|
|
1728
|
+
|
|
1729
|
+
id : typing.Optional[str]
|
|
1730
|
+
|
|
1731
|
+
visibility : typing.Optional[DatasetWriteVisibility]
|
|
1732
|
+
|
|
1733
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
1734
|
+
|
|
1735
|
+
description : typing.Optional[str]
|
|
1736
|
+
|
|
1737
|
+
request_options : typing.Optional[RequestOptions]
|
|
1738
|
+
Request-specific configuration.
|
|
1739
|
+
|
|
1740
|
+
Returns
|
|
1741
|
+
-------
|
|
1742
|
+
AsyncHttpResponse[None]
|
|
1743
|
+
"""
|
|
1744
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1745
|
+
"v1/private/datasets",
|
|
1746
|
+
method="POST",
|
|
1747
|
+
json={
|
|
1748
|
+
"id": id,
|
|
1749
|
+
"name": name,
|
|
1750
|
+
"visibility": visibility,
|
|
1751
|
+
"tags": tags,
|
|
1752
|
+
"description": description,
|
|
1058
1753
|
},
|
|
1059
1754
|
headers={
|
|
1060
1755
|
"content-type": "application/json",
|
|
@@ -1122,6 +1817,120 @@ class AsyncRawDatasetsClient:
|
|
|
1122
1817
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1123
1818
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1124
1819
|
|
|
1820
|
+
async def create_dataset_items_from_csv(
|
|
1821
|
+
self,
|
|
1822
|
+
*,
|
|
1823
|
+
file: typing.Dict[str, typing.Optional[typing.Any]],
|
|
1824
|
+
dataset_id: str,
|
|
1825
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1826
|
+
) -> AsyncHttpResponse[None]:
|
|
1827
|
+
"""
|
|
1828
|
+
Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
|
|
1829
|
+
|
|
1830
|
+
Parameters
|
|
1831
|
+
----------
|
|
1832
|
+
file : typing.Dict[str, typing.Optional[typing.Any]]
|
|
1833
|
+
|
|
1834
|
+
dataset_id : str
|
|
1835
|
+
|
|
1836
|
+
request_options : typing.Optional[RequestOptions]
|
|
1837
|
+
Request-specific configuration.
|
|
1838
|
+
|
|
1839
|
+
Returns
|
|
1840
|
+
-------
|
|
1841
|
+
AsyncHttpResponse[None]
|
|
1842
|
+
"""
|
|
1843
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1844
|
+
"v1/private/datasets/items/from-csv",
|
|
1845
|
+
method="POST",
|
|
1846
|
+
data={
|
|
1847
|
+
"file": file,
|
|
1848
|
+
"dataset_id": dataset_id,
|
|
1849
|
+
},
|
|
1850
|
+
files={},
|
|
1851
|
+
request_options=request_options,
|
|
1852
|
+
omit=OMIT,
|
|
1853
|
+
)
|
|
1854
|
+
try:
|
|
1855
|
+
if 200 <= _response.status_code < 300:
|
|
1856
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
1857
|
+
if _response.status_code == 400:
|
|
1858
|
+
raise BadRequestError(
|
|
1859
|
+
headers=dict(_response.headers),
|
|
1860
|
+
body=typing.cast(
|
|
1861
|
+
typing.Optional[typing.Any],
|
|
1862
|
+
parse_obj_as(
|
|
1863
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1864
|
+
object_=_response.json(),
|
|
1865
|
+
),
|
|
1866
|
+
),
|
|
1867
|
+
)
|
|
1868
|
+
if _response.status_code == 404:
|
|
1869
|
+
raise NotFoundError(
|
|
1870
|
+
headers=dict(_response.headers),
|
|
1871
|
+
body=typing.cast(
|
|
1872
|
+
typing.Optional[typing.Any],
|
|
1873
|
+
parse_obj_as(
|
|
1874
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1875
|
+
object_=_response.json(),
|
|
1876
|
+
),
|
|
1877
|
+
),
|
|
1878
|
+
)
|
|
1879
|
+
_response_json = _response.json()
|
|
1880
|
+
except JSONDecodeError:
|
|
1881
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1882
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1883
|
+
|
|
1884
|
+
async def create_dataset_items_from_spans(
|
|
1885
|
+
self,
|
|
1886
|
+
dataset_id: str,
|
|
1887
|
+
*,
|
|
1888
|
+
span_ids: typing.Sequence[str],
|
|
1889
|
+
enrichment_options: SpanEnrichmentOptions,
|
|
1890
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1891
|
+
) -> AsyncHttpResponse[None]:
|
|
1892
|
+
"""
|
|
1893
|
+
Create dataset items from spans with enriched metadata
|
|
1894
|
+
|
|
1895
|
+
Parameters
|
|
1896
|
+
----------
|
|
1897
|
+
dataset_id : str
|
|
1898
|
+
|
|
1899
|
+
span_ids : typing.Sequence[str]
|
|
1900
|
+
Set of span IDs to add to the dataset
|
|
1901
|
+
|
|
1902
|
+
enrichment_options : SpanEnrichmentOptions
|
|
1903
|
+
|
|
1904
|
+
request_options : typing.Optional[RequestOptions]
|
|
1905
|
+
Request-specific configuration.
|
|
1906
|
+
|
|
1907
|
+
Returns
|
|
1908
|
+
-------
|
|
1909
|
+
AsyncHttpResponse[None]
|
|
1910
|
+
"""
|
|
1911
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1912
|
+
f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
|
|
1913
|
+
method="POST",
|
|
1914
|
+
json={
|
|
1915
|
+
"span_ids": span_ids,
|
|
1916
|
+
"enrichment_options": convert_and_respect_annotation_metadata(
|
|
1917
|
+
object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
|
|
1918
|
+
),
|
|
1919
|
+
},
|
|
1920
|
+
headers={
|
|
1921
|
+
"content-type": "application/json",
|
|
1922
|
+
},
|
|
1923
|
+
request_options=request_options,
|
|
1924
|
+
omit=OMIT,
|
|
1925
|
+
)
|
|
1926
|
+
try:
|
|
1927
|
+
if 200 <= _response.status_code < 300:
|
|
1928
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
1929
|
+
_response_json = _response.json()
|
|
1930
|
+
except JSONDecodeError:
|
|
1931
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1932
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1933
|
+
|
|
1125
1934
|
async def create_dataset_items_from_traces(
|
|
1126
1935
|
self,
|
|
1127
1936
|
dataset_id: str,
|
|
@@ -1649,35 +2458,109 @@ class AsyncRawDatasetsClient:
|
|
|
1649
2458
|
self, item_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1650
2459
|
) -> AsyncHttpResponse[DatasetItemPublic]:
|
|
1651
2460
|
"""
|
|
1652
|
-
Get dataset item by id
|
|
2461
|
+
Get dataset item by id
|
|
2462
|
+
|
|
2463
|
+
Parameters
|
|
2464
|
+
----------
|
|
2465
|
+
item_id : str
|
|
2466
|
+
|
|
2467
|
+
request_options : typing.Optional[RequestOptions]
|
|
2468
|
+
Request-specific configuration.
|
|
2469
|
+
|
|
2470
|
+
Returns
|
|
2471
|
+
-------
|
|
2472
|
+
AsyncHttpResponse[DatasetItemPublic]
|
|
2473
|
+
Dataset item resource
|
|
2474
|
+
"""
|
|
2475
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2476
|
+
f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
|
|
2477
|
+
method="GET",
|
|
2478
|
+
request_options=request_options,
|
|
2479
|
+
)
|
|
2480
|
+
try:
|
|
2481
|
+
if 200 <= _response.status_code < 300:
|
|
2482
|
+
_data = typing.cast(
|
|
2483
|
+
DatasetItemPublic,
|
|
2484
|
+
parse_obj_as(
|
|
2485
|
+
type_=DatasetItemPublic, # type: ignore
|
|
2486
|
+
object_=_response.json(),
|
|
2487
|
+
),
|
|
2488
|
+
)
|
|
2489
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2490
|
+
_response_json = _response.json()
|
|
2491
|
+
except JSONDecodeError:
|
|
2492
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2493
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2494
|
+
|
|
2495
|
+
async def patch_dataset_item(
|
|
2496
|
+
self,
|
|
2497
|
+
item_id: str,
|
|
2498
|
+
*,
|
|
2499
|
+
source: DatasetItemWriteSource,
|
|
2500
|
+
data: JsonNode,
|
|
2501
|
+
id: typing.Optional[str] = OMIT,
|
|
2502
|
+
trace_id: typing.Optional[str] = OMIT,
|
|
2503
|
+
span_id: typing.Optional[str] = OMIT,
|
|
2504
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
2505
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2506
|
+
) -> AsyncHttpResponse[None]:
|
|
2507
|
+
"""
|
|
2508
|
+
Partially update dataset item by id. Only provided fields will be updated.
|
|
1653
2509
|
|
|
1654
2510
|
Parameters
|
|
1655
2511
|
----------
|
|
1656
2512
|
item_id : str
|
|
1657
2513
|
|
|
2514
|
+
source : DatasetItemWriteSource
|
|
2515
|
+
|
|
2516
|
+
data : JsonNode
|
|
2517
|
+
|
|
2518
|
+
id : typing.Optional[str]
|
|
2519
|
+
|
|
2520
|
+
trace_id : typing.Optional[str]
|
|
2521
|
+
|
|
2522
|
+
span_id : typing.Optional[str]
|
|
2523
|
+
|
|
2524
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
2525
|
+
|
|
1658
2526
|
request_options : typing.Optional[RequestOptions]
|
|
1659
2527
|
Request-specific configuration.
|
|
1660
2528
|
|
|
1661
2529
|
Returns
|
|
1662
2530
|
-------
|
|
1663
|
-
AsyncHttpResponse[
|
|
1664
|
-
Dataset item resource
|
|
2531
|
+
AsyncHttpResponse[None]
|
|
1665
2532
|
"""
|
|
1666
2533
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1667
2534
|
f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
|
|
1668
|
-
method="
|
|
2535
|
+
method="PATCH",
|
|
2536
|
+
json={
|
|
2537
|
+
"id": id,
|
|
2538
|
+
"trace_id": trace_id,
|
|
2539
|
+
"span_id": span_id,
|
|
2540
|
+
"source": source,
|
|
2541
|
+
"data": data,
|
|
2542
|
+
"tags": tags,
|
|
2543
|
+
},
|
|
2544
|
+
headers={
|
|
2545
|
+
"content-type": "application/json",
|
|
2546
|
+
},
|
|
1669
2547
|
request_options=request_options,
|
|
2548
|
+
omit=OMIT,
|
|
1670
2549
|
)
|
|
1671
2550
|
try:
|
|
1672
2551
|
if 200 <= _response.status_code < 300:
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
2552
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2553
|
+
if _response.status_code == 404:
|
|
2554
|
+
raise NotFoundError(
|
|
2555
|
+
headers=dict(_response.headers),
|
|
2556
|
+
body=typing.cast(
|
|
2557
|
+
typing.Optional[typing.Any],
|
|
2558
|
+
parse_obj_as(
|
|
2559
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2560
|
+
object_=_response.json(),
|
|
2561
|
+
),
|
|
1678
2562
|
),
|
|
1679
2563
|
)
|
|
1680
|
-
return AsyncHttpResponse(response=_response, data=_data)
|
|
1681
2564
|
_response_json = _response.json()
|
|
1682
2565
|
except JSONDecodeError:
|
|
1683
2566
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
@@ -1689,6 +2572,7 @@ class AsyncRawDatasetsClient:
|
|
|
1689
2572
|
*,
|
|
1690
2573
|
page: typing.Optional[int] = None,
|
|
1691
2574
|
size: typing.Optional[int] = None,
|
|
2575
|
+
version: typing.Optional[str] = None,
|
|
1692
2576
|
filters: typing.Optional[str] = None,
|
|
1693
2577
|
truncate: typing.Optional[bool] = None,
|
|
1694
2578
|
request_options: typing.Optional[RequestOptions] = None,
|
|
@@ -1704,6 +2588,8 @@ class AsyncRawDatasetsClient:
|
|
|
1704
2588
|
|
|
1705
2589
|
size : typing.Optional[int]
|
|
1706
2590
|
|
|
2591
|
+
version : typing.Optional[str]
|
|
2592
|
+
|
|
1707
2593
|
filters : typing.Optional[str]
|
|
1708
2594
|
|
|
1709
2595
|
truncate : typing.Optional[bool]
|
|
@@ -1722,6 +2608,7 @@ class AsyncRawDatasetsClient:
|
|
|
1722
2608
|
params={
|
|
1723
2609
|
"page": page,
|
|
1724
2610
|
"size": size,
|
|
2611
|
+
"version": version,
|
|
1725
2612
|
"filters": filters,
|
|
1726
2613
|
"truncate": truncate,
|
|
1727
2614
|
},
|
|
@@ -1849,3 +2736,357 @@ class AsyncRawDatasetsClient:
|
|
|
1849
2736
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1850
2737
|
|
|
1851
2738
|
yield await stream()
|
|
2739
|
+
|
|
2740
|
+
async def compare_dataset_versions(
|
|
2741
|
+
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2742
|
+
) -> AsyncHttpResponse[DatasetVersionDiff]:
|
|
2743
|
+
"""
|
|
2744
|
+
Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
|
|
2745
|
+
|
|
2746
|
+
Parameters
|
|
2747
|
+
----------
|
|
2748
|
+
id : str
|
|
2749
|
+
|
|
2750
|
+
request_options : typing.Optional[RequestOptions]
|
|
2751
|
+
Request-specific configuration.
|
|
2752
|
+
|
|
2753
|
+
Returns
|
|
2754
|
+
-------
|
|
2755
|
+
AsyncHttpResponse[DatasetVersionDiff]
|
|
2756
|
+
Diff computed successfully
|
|
2757
|
+
"""
|
|
2758
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2759
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
|
|
2760
|
+
method="GET",
|
|
2761
|
+
request_options=request_options,
|
|
2762
|
+
)
|
|
2763
|
+
try:
|
|
2764
|
+
if 200 <= _response.status_code < 300:
|
|
2765
|
+
_data = typing.cast(
|
|
2766
|
+
DatasetVersionDiff,
|
|
2767
|
+
parse_obj_as(
|
|
2768
|
+
type_=DatasetVersionDiff, # type: ignore
|
|
2769
|
+
object_=_response.json(),
|
|
2770
|
+
),
|
|
2771
|
+
)
|
|
2772
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2773
|
+
if _response.status_code == 404:
|
|
2774
|
+
raise NotFoundError(
|
|
2775
|
+
headers=dict(_response.headers),
|
|
2776
|
+
body=typing.cast(
|
|
2777
|
+
typing.Optional[typing.Any],
|
|
2778
|
+
parse_obj_as(
|
|
2779
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2780
|
+
object_=_response.json(),
|
|
2781
|
+
),
|
|
2782
|
+
),
|
|
2783
|
+
)
|
|
2784
|
+
_response_json = _response.json()
|
|
2785
|
+
except JSONDecodeError:
|
|
2786
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2787
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2788
|
+
|
|
2789
|
+
async def create_version_tag(
|
|
2790
|
+
self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
|
|
2791
|
+
) -> AsyncHttpResponse[None]:
|
|
2792
|
+
"""
|
|
2793
|
+
Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
|
|
2794
|
+
|
|
2795
|
+
Parameters
|
|
2796
|
+
----------
|
|
2797
|
+
version_hash : str
|
|
2798
|
+
|
|
2799
|
+
id : str
|
|
2800
|
+
|
|
2801
|
+
tag : str
|
|
2802
|
+
|
|
2803
|
+
request_options : typing.Optional[RequestOptions]
|
|
2804
|
+
Request-specific configuration.
|
|
2805
|
+
|
|
2806
|
+
Returns
|
|
2807
|
+
-------
|
|
2808
|
+
AsyncHttpResponse[None]
|
|
2809
|
+
"""
|
|
2810
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2811
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
|
|
2812
|
+
method="POST",
|
|
2813
|
+
json={
|
|
2814
|
+
"tag": tag,
|
|
2815
|
+
},
|
|
2816
|
+
headers={
|
|
2817
|
+
"content-type": "application/json",
|
|
2818
|
+
},
|
|
2819
|
+
request_options=request_options,
|
|
2820
|
+
omit=OMIT,
|
|
2821
|
+
)
|
|
2822
|
+
try:
|
|
2823
|
+
if 200 <= _response.status_code < 300:
|
|
2824
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2825
|
+
if _response.status_code == 400:
|
|
2826
|
+
raise BadRequestError(
|
|
2827
|
+
headers=dict(_response.headers),
|
|
2828
|
+
body=typing.cast(
|
|
2829
|
+
typing.Optional[typing.Any],
|
|
2830
|
+
parse_obj_as(
|
|
2831
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2832
|
+
object_=_response.json(),
|
|
2833
|
+
),
|
|
2834
|
+
),
|
|
2835
|
+
)
|
|
2836
|
+
if _response.status_code == 404:
|
|
2837
|
+
raise NotFoundError(
|
|
2838
|
+
headers=dict(_response.headers),
|
|
2839
|
+
body=typing.cast(
|
|
2840
|
+
typing.Optional[typing.Any],
|
|
2841
|
+
parse_obj_as(
|
|
2842
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2843
|
+
object_=_response.json(),
|
|
2844
|
+
),
|
|
2845
|
+
),
|
|
2846
|
+
)
|
|
2847
|
+
if _response.status_code == 409:
|
|
2848
|
+
raise ConflictError(
|
|
2849
|
+
headers=dict(_response.headers),
|
|
2850
|
+
body=typing.cast(
|
|
2851
|
+
typing.Optional[typing.Any],
|
|
2852
|
+
parse_obj_as(
|
|
2853
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2854
|
+
object_=_response.json(),
|
|
2855
|
+
),
|
|
2856
|
+
),
|
|
2857
|
+
)
|
|
2858
|
+
_response_json = _response.json()
|
|
2859
|
+
except JSONDecodeError:
|
|
2860
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2861
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2862
|
+
|
|
2863
|
+
async def list_dataset_versions(
|
|
2864
|
+
self,
|
|
2865
|
+
id: str,
|
|
2866
|
+
*,
|
|
2867
|
+
page: typing.Optional[int] = None,
|
|
2868
|
+
size: typing.Optional[int] = None,
|
|
2869
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2870
|
+
) -> AsyncHttpResponse[DatasetVersionPagePublic]:
|
|
2871
|
+
"""
|
|
2872
|
+
Get paginated list of versions for a dataset, ordered by creation time (newest first)
|
|
2873
|
+
|
|
2874
|
+
Parameters
|
|
2875
|
+
----------
|
|
2876
|
+
id : str
|
|
2877
|
+
|
|
2878
|
+
page : typing.Optional[int]
|
|
2879
|
+
|
|
2880
|
+
size : typing.Optional[int]
|
|
2881
|
+
|
|
2882
|
+
request_options : typing.Optional[RequestOptions]
|
|
2883
|
+
Request-specific configuration.
|
|
2884
|
+
|
|
2885
|
+
Returns
|
|
2886
|
+
-------
|
|
2887
|
+
AsyncHttpResponse[DatasetVersionPagePublic]
|
|
2888
|
+
Dataset versions
|
|
2889
|
+
"""
|
|
2890
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2891
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions",
|
|
2892
|
+
method="GET",
|
|
2893
|
+
params={
|
|
2894
|
+
"page": page,
|
|
2895
|
+
"size": size,
|
|
2896
|
+
},
|
|
2897
|
+
request_options=request_options,
|
|
2898
|
+
)
|
|
2899
|
+
try:
|
|
2900
|
+
if 200 <= _response.status_code < 300:
|
|
2901
|
+
_data = typing.cast(
|
|
2902
|
+
DatasetVersionPagePublic,
|
|
2903
|
+
parse_obj_as(
|
|
2904
|
+
type_=DatasetVersionPagePublic, # type: ignore
|
|
2905
|
+
object_=_response.json(),
|
|
2906
|
+
),
|
|
2907
|
+
)
|
|
2908
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2909
|
+
if _response.status_code == 400:
|
|
2910
|
+
raise BadRequestError(
|
|
2911
|
+
headers=dict(_response.headers),
|
|
2912
|
+
body=typing.cast(
|
|
2913
|
+
typing.Optional[typing.Any],
|
|
2914
|
+
parse_obj_as(
|
|
2915
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2916
|
+
object_=_response.json(),
|
|
2917
|
+
),
|
|
2918
|
+
),
|
|
2919
|
+
)
|
|
2920
|
+
_response_json = _response.json()
|
|
2921
|
+
except JSONDecodeError:
|
|
2922
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2923
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2924
|
+
|
|
2925
|
+
async def create_dataset_version(
|
|
2926
|
+
self,
|
|
2927
|
+
id: str,
|
|
2928
|
+
*,
|
|
2929
|
+
tag: typing.Optional[str] = OMIT,
|
|
2930
|
+
change_description: typing.Optional[str] = OMIT,
|
|
2931
|
+
metadata: typing.Optional[typing.Dict[str, str]] = OMIT,
|
|
2932
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2933
|
+
) -> AsyncHttpResponse[None]:
|
|
2934
|
+
"""
|
|
2935
|
+
Create a new immutable version of the dataset by snapshotting the current state
|
|
2936
|
+
|
|
2937
|
+
Parameters
|
|
2938
|
+
----------
|
|
2939
|
+
id : str
|
|
2940
|
+
|
|
2941
|
+
tag : typing.Optional[str]
|
|
2942
|
+
Optional tag for this version
|
|
2943
|
+
|
|
2944
|
+
change_description : typing.Optional[str]
|
|
2945
|
+
Optional description of changes in this version
|
|
2946
|
+
|
|
2947
|
+
metadata : typing.Optional[typing.Dict[str, str]]
|
|
2948
|
+
Optional user-defined metadata
|
|
2949
|
+
|
|
2950
|
+
request_options : typing.Optional[RequestOptions]
|
|
2951
|
+
Request-specific configuration.
|
|
2952
|
+
|
|
2953
|
+
Returns
|
|
2954
|
+
-------
|
|
2955
|
+
AsyncHttpResponse[None]
|
|
2956
|
+
"""
|
|
2957
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2958
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions",
|
|
2959
|
+
method="POST",
|
|
2960
|
+
json={
|
|
2961
|
+
"tag": tag,
|
|
2962
|
+
"change_description": change_description,
|
|
2963
|
+
"metadata": metadata,
|
|
2964
|
+
},
|
|
2965
|
+
headers={
|
|
2966
|
+
"content-type": "application/json",
|
|
2967
|
+
},
|
|
2968
|
+
request_options=request_options,
|
|
2969
|
+
omit=OMIT,
|
|
2970
|
+
)
|
|
2971
|
+
try:
|
|
2972
|
+
if 200 <= _response.status_code < 300:
|
|
2973
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2974
|
+
if _response.status_code == 400:
|
|
2975
|
+
raise BadRequestError(
|
|
2976
|
+
headers=dict(_response.headers),
|
|
2977
|
+
body=typing.cast(
|
|
2978
|
+
typing.Optional[typing.Any],
|
|
2979
|
+
parse_obj_as(
|
|
2980
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2981
|
+
object_=_response.json(),
|
|
2982
|
+
),
|
|
2983
|
+
),
|
|
2984
|
+
)
|
|
2985
|
+
if _response.status_code == 409:
|
|
2986
|
+
raise ConflictError(
|
|
2987
|
+
headers=dict(_response.headers),
|
|
2988
|
+
body=typing.cast(
|
|
2989
|
+
typing.Optional[typing.Any],
|
|
2990
|
+
parse_obj_as(
|
|
2991
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2992
|
+
object_=_response.json(),
|
|
2993
|
+
),
|
|
2994
|
+
),
|
|
2995
|
+
)
|
|
2996
|
+
_response_json = _response.json()
|
|
2997
|
+
except JSONDecodeError:
|
|
2998
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2999
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
3000
|
+
|
|
3001
|
+
async def delete_version_tag(
|
|
3002
|
+
self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
3003
|
+
) -> AsyncHttpResponse[None]:
|
|
3004
|
+
"""
|
|
3005
|
+
Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
|
|
3006
|
+
|
|
3007
|
+
Parameters
|
|
3008
|
+
----------
|
|
3009
|
+
version_hash : str
|
|
3010
|
+
|
|
3011
|
+
tag : str
|
|
3012
|
+
|
|
3013
|
+
id : str
|
|
3014
|
+
|
|
3015
|
+
request_options : typing.Optional[RequestOptions]
|
|
3016
|
+
Request-specific configuration.
|
|
3017
|
+
|
|
3018
|
+
Returns
|
|
3019
|
+
-------
|
|
3020
|
+
AsyncHttpResponse[None]
|
|
3021
|
+
"""
|
|
3022
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
3023
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
|
|
3024
|
+
method="DELETE",
|
|
3025
|
+
request_options=request_options,
|
|
3026
|
+
)
|
|
3027
|
+
try:
|
|
3028
|
+
if 200 <= _response.status_code < 300:
|
|
3029
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
3030
|
+
_response_json = _response.json()
|
|
3031
|
+
except JSONDecodeError:
|
|
3032
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
3033
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
3034
|
+
|
|
3035
|
+
async def restore_dataset_version(
|
|
3036
|
+
self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
|
|
3037
|
+
) -> AsyncHttpResponse[DatasetVersionPublic]:
|
|
3038
|
+
"""
|
|
3039
|
+
Restores the dataset to a previous version state. All draft items are replaced with items from the specified version. If the version is not the latest, a new version snapshot is created. If the version is the latest, only draft items are replaced (revert functionality).
|
|
3040
|
+
|
|
3041
|
+
Parameters
|
|
3042
|
+
----------
|
|
3043
|
+
id : str
|
|
3044
|
+
|
|
3045
|
+
version_ref : str
|
|
3046
|
+
Version hash or tag to restore from
|
|
3047
|
+
|
|
3048
|
+
request_options : typing.Optional[RequestOptions]
|
|
3049
|
+
Request-specific configuration.
|
|
3050
|
+
|
|
3051
|
+
Returns
|
|
3052
|
+
-------
|
|
3053
|
+
AsyncHttpResponse[DatasetVersionPublic]
|
|
3054
|
+
Version restored successfully
|
|
3055
|
+
"""
|
|
3056
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
3057
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
|
|
3058
|
+
method="POST",
|
|
3059
|
+
json={
|
|
3060
|
+
"version_ref": version_ref,
|
|
3061
|
+
},
|
|
3062
|
+
headers={
|
|
3063
|
+
"content-type": "application/json",
|
|
3064
|
+
},
|
|
3065
|
+
request_options=request_options,
|
|
3066
|
+
omit=OMIT,
|
|
3067
|
+
)
|
|
3068
|
+
try:
|
|
3069
|
+
if 200 <= _response.status_code < 300:
|
|
3070
|
+
_data = typing.cast(
|
|
3071
|
+
DatasetVersionPublic,
|
|
3072
|
+
parse_obj_as(
|
|
3073
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
3074
|
+
object_=_response.json(),
|
|
3075
|
+
),
|
|
3076
|
+
)
|
|
3077
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
3078
|
+
if _response.status_code == 404:
|
|
3079
|
+
raise NotFoundError(
|
|
3080
|
+
headers=dict(_response.headers),
|
|
3081
|
+
body=typing.cast(
|
|
3082
|
+
typing.Optional[typing.Any],
|
|
3083
|
+
parse_obj_as(
|
|
3084
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3085
|
+
object_=_response.json(),
|
|
3086
|
+
),
|
|
3087
|
+
),
|
|
3088
|
+
)
|
|
3089
|
+
_response_json = _response.json()
|
|
3090
|
+
except JSONDecodeError:
|
|
3091
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
3092
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|