opik 1.8.39__py3-none-any.whl → 1.9.71__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- opik/__init__.py +19 -3
- opik/anonymizer/__init__.py +5 -0
- opik/anonymizer/anonymizer.py +12 -0
- opik/anonymizer/factory.py +80 -0
- opik/anonymizer/recursive_anonymizer.py +64 -0
- opik/anonymizer/rules.py +56 -0
- opik/anonymizer/rules_anonymizer.py +35 -0
- opik/api_objects/attachment/attachment_context.py +36 -0
- opik/api_objects/attachment/attachments_extractor.py +153 -0
- opik/api_objects/attachment/client.py +1 -0
- opik/api_objects/attachment/converters.py +2 -0
- opik/api_objects/attachment/decoder.py +18 -0
- opik/api_objects/attachment/decoder_base64.py +83 -0
- opik/api_objects/attachment/decoder_helpers.py +137 -0
- opik/api_objects/data_helpers.py +79 -0
- opik/api_objects/dataset/dataset.py +64 -4
- opik/api_objects/dataset/rest_operations.py +11 -2
- opik/api_objects/experiment/experiment.py +57 -57
- opik/api_objects/experiment/experiment_item.py +2 -1
- opik/api_objects/experiment/experiments_client.py +64 -0
- opik/api_objects/experiment/helpers.py +35 -11
- opik/api_objects/experiment/rest_operations.py +65 -5
- opik/api_objects/helpers.py +8 -5
- opik/api_objects/local_recording.py +81 -0
- opik/api_objects/opik_client.py +600 -108
- opik/api_objects/opik_query_language.py +39 -5
- opik/api_objects/prompt/__init__.py +12 -2
- opik/api_objects/prompt/base_prompt.py +69 -0
- opik/api_objects/prompt/base_prompt_template.py +29 -0
- opik/api_objects/prompt/chat/__init__.py +1 -0
- opik/api_objects/prompt/chat/chat_prompt.py +210 -0
- opik/api_objects/prompt/chat/chat_prompt_template.py +350 -0
- opik/api_objects/prompt/chat/content_renderer_registry.py +203 -0
- opik/api_objects/prompt/client.py +189 -47
- opik/api_objects/prompt/text/__init__.py +1 -0
- opik/api_objects/prompt/text/prompt.py +174 -0
- opik/api_objects/prompt/{prompt_template.py → text/prompt_template.py} +10 -6
- opik/api_objects/prompt/types.py +23 -0
- opik/api_objects/search_helpers.py +89 -0
- opik/api_objects/span/span_data.py +35 -25
- opik/api_objects/threads/threads_client.py +39 -5
- opik/api_objects/trace/trace_client.py +52 -2
- opik/api_objects/trace/trace_data.py +15 -24
- opik/api_objects/validation_helpers.py +3 -3
- opik/cli/__init__.py +5 -0
- opik/cli/__main__.py +6 -0
- opik/cli/configure.py +66 -0
- opik/cli/exports/__init__.py +131 -0
- opik/cli/exports/dataset.py +278 -0
- opik/cli/exports/experiment.py +784 -0
- opik/cli/exports/project.py +685 -0
- opik/cli/exports/prompt.py +578 -0
- opik/cli/exports/utils.py +406 -0
- opik/cli/harbor.py +39 -0
- opik/cli/healthcheck.py +21 -0
- opik/cli/imports/__init__.py +439 -0
- opik/cli/imports/dataset.py +143 -0
- opik/cli/imports/experiment.py +1192 -0
- opik/cli/imports/project.py +262 -0
- opik/cli/imports/prompt.py +177 -0
- opik/cli/imports/utils.py +280 -0
- opik/cli/main.py +49 -0
- opik/cli/proxy.py +93 -0
- opik/cli/usage_report/__init__.py +16 -0
- opik/cli/usage_report/charts.py +783 -0
- opik/cli/usage_report/cli.py +274 -0
- opik/cli/usage_report/constants.py +9 -0
- opik/cli/usage_report/extraction.py +749 -0
- opik/cli/usage_report/pdf.py +244 -0
- opik/cli/usage_report/statistics.py +78 -0
- opik/cli/usage_report/utils.py +235 -0
- opik/config.py +13 -7
- opik/configurator/configure.py +17 -0
- opik/datetime_helpers.py +12 -0
- opik/decorator/arguments_helpers.py +9 -1
- opik/decorator/base_track_decorator.py +205 -133
- opik/decorator/context_manager/span_context_manager.py +123 -0
- opik/decorator/context_manager/trace_context_manager.py +84 -0
- opik/decorator/opik_args/__init__.py +13 -0
- opik/decorator/opik_args/api_classes.py +71 -0
- opik/decorator/opik_args/helpers.py +120 -0
- opik/decorator/span_creation_handler.py +25 -6
- opik/dict_utils.py +3 -3
- opik/evaluation/__init__.py +13 -2
- opik/evaluation/engine/engine.py +272 -75
- opik/evaluation/engine/evaluation_tasks_executor.py +6 -3
- opik/evaluation/engine/helpers.py +31 -6
- opik/evaluation/engine/metrics_evaluator.py +237 -0
- opik/evaluation/evaluation_result.py +168 -2
- opik/evaluation/evaluator.py +533 -62
- opik/evaluation/metrics/__init__.py +103 -4
- opik/evaluation/metrics/aggregated_metric.py +35 -6
- opik/evaluation/metrics/base_metric.py +1 -1
- opik/evaluation/metrics/conversation/__init__.py +48 -0
- opik/evaluation/metrics/conversation/conversation_thread_metric.py +56 -2
- opik/evaluation/metrics/conversation/g_eval_wrappers.py +19 -0
- opik/evaluation/metrics/conversation/helpers.py +14 -15
- opik/evaluation/metrics/conversation/heuristics/__init__.py +14 -0
- opik/evaluation/metrics/conversation/heuristics/degeneration/__init__.py +3 -0
- opik/evaluation/metrics/conversation/heuristics/degeneration/metric.py +189 -0
- opik/evaluation/metrics/conversation/heuristics/degeneration/phrases.py +12 -0
- opik/evaluation/metrics/conversation/heuristics/knowledge_retention/__init__.py +3 -0
- opik/evaluation/metrics/conversation/heuristics/knowledge_retention/metric.py +172 -0
- opik/evaluation/metrics/conversation/llm_judges/__init__.py +32 -0
- opik/evaluation/metrics/conversation/{conversational_coherence → llm_judges/conversational_coherence}/metric.py +22 -17
- opik/evaluation/metrics/conversation/{conversational_coherence → llm_judges/conversational_coherence}/templates.py +1 -1
- opik/evaluation/metrics/conversation/llm_judges/g_eval_wrappers.py +442 -0
- opik/evaluation/metrics/conversation/{session_completeness → llm_judges/session_completeness}/metric.py +13 -7
- opik/evaluation/metrics/conversation/{session_completeness → llm_judges/session_completeness}/templates.py +1 -1
- opik/evaluation/metrics/conversation/llm_judges/user_frustration/__init__.py +0 -0
- opik/evaluation/metrics/conversation/{user_frustration → llm_judges/user_frustration}/metric.py +21 -14
- opik/evaluation/metrics/conversation/{user_frustration → llm_judges/user_frustration}/templates.py +1 -1
- opik/evaluation/metrics/conversation/types.py +4 -5
- opik/evaluation/metrics/conversation_types.py +9 -0
- opik/evaluation/metrics/heuristics/bertscore.py +107 -0
- opik/evaluation/metrics/heuristics/bleu.py +35 -15
- opik/evaluation/metrics/heuristics/chrf.py +127 -0
- opik/evaluation/metrics/heuristics/contains.py +47 -11
- opik/evaluation/metrics/heuristics/distribution_metrics.py +331 -0
- opik/evaluation/metrics/heuristics/gleu.py +113 -0
- opik/evaluation/metrics/heuristics/language_adherence.py +123 -0
- opik/evaluation/metrics/heuristics/meteor.py +119 -0
- opik/evaluation/metrics/heuristics/prompt_injection.py +150 -0
- opik/evaluation/metrics/heuristics/readability.py +129 -0
- opik/evaluation/metrics/heuristics/rouge.py +26 -9
- opik/evaluation/metrics/heuristics/spearman.py +88 -0
- opik/evaluation/metrics/heuristics/tone.py +155 -0
- opik/evaluation/metrics/heuristics/vader_sentiment.py +77 -0
- opik/evaluation/metrics/llm_judges/answer_relevance/metric.py +20 -5
- opik/evaluation/metrics/llm_judges/context_precision/metric.py +20 -6
- opik/evaluation/metrics/llm_judges/context_recall/metric.py +20 -6
- opik/evaluation/metrics/llm_judges/g_eval/__init__.py +5 -0
- opik/evaluation/metrics/llm_judges/g_eval/metric.py +219 -68
- opik/evaluation/metrics/llm_judges/g_eval/parser.py +102 -52
- opik/evaluation/metrics/llm_judges/g_eval/presets.py +209 -0
- opik/evaluation/metrics/llm_judges/g_eval_presets/__init__.py +36 -0
- opik/evaluation/metrics/llm_judges/g_eval_presets/agent_assessment.py +77 -0
- opik/evaluation/metrics/llm_judges/g_eval_presets/bias_classifier.py +181 -0
- opik/evaluation/metrics/llm_judges/g_eval_presets/compliance_risk.py +41 -0
- opik/evaluation/metrics/llm_judges/g_eval_presets/prompt_uncertainty.py +41 -0
- opik/evaluation/metrics/llm_judges/g_eval_presets/qa_suite.py +146 -0
- opik/evaluation/metrics/llm_judges/hallucination/metric.py +16 -3
- opik/evaluation/metrics/llm_judges/llm_juries/__init__.py +3 -0
- opik/evaluation/metrics/llm_judges/llm_juries/metric.py +76 -0
- opik/evaluation/metrics/llm_judges/moderation/metric.py +16 -4
- opik/evaluation/metrics/llm_judges/structure_output_compliance/__init__.py +0 -0
- opik/evaluation/metrics/llm_judges/structure_output_compliance/metric.py +144 -0
- opik/evaluation/metrics/llm_judges/structure_output_compliance/parser.py +79 -0
- opik/evaluation/metrics/llm_judges/structure_output_compliance/schema.py +15 -0
- opik/evaluation/metrics/llm_judges/structure_output_compliance/template.py +50 -0
- opik/evaluation/metrics/llm_judges/syc_eval/__init__.py +0 -0
- opik/evaluation/metrics/llm_judges/syc_eval/metric.py +252 -0
- opik/evaluation/metrics/llm_judges/syc_eval/parser.py +82 -0
- opik/evaluation/metrics/llm_judges/syc_eval/template.py +155 -0
- opik/evaluation/metrics/llm_judges/trajectory_accuracy/metric.py +20 -5
- opik/evaluation/metrics/llm_judges/usefulness/metric.py +16 -4
- opik/evaluation/metrics/ragas_metric.py +43 -23
- opik/evaluation/models/__init__.py +8 -0
- opik/evaluation/models/base_model.py +107 -1
- opik/evaluation/models/langchain/langchain_chat_model.py +15 -7
- opik/evaluation/models/langchain/message_converters.py +97 -15
- opik/evaluation/models/litellm/litellm_chat_model.py +156 -29
- opik/evaluation/models/litellm/util.py +125 -0
- opik/evaluation/models/litellm/warning_filters.py +16 -4
- opik/evaluation/models/model_capabilities.py +187 -0
- opik/evaluation/models/models_factory.py +25 -3
- opik/evaluation/preprocessing.py +92 -0
- opik/evaluation/report.py +70 -12
- opik/evaluation/rest_operations.py +49 -45
- opik/evaluation/samplers/__init__.py +4 -0
- opik/evaluation/samplers/base_dataset_sampler.py +40 -0
- opik/evaluation/samplers/random_dataset_sampler.py +48 -0
- opik/evaluation/score_statistics.py +66 -0
- opik/evaluation/scorers/__init__.py +4 -0
- opik/evaluation/scorers/scorer_function.py +55 -0
- opik/evaluation/scorers/scorer_wrapper_metric.py +130 -0
- opik/evaluation/test_case.py +3 -2
- opik/evaluation/test_result.py +1 -0
- opik/evaluation/threads/evaluator.py +31 -3
- opik/evaluation/threads/helpers.py +3 -2
- opik/evaluation/types.py +9 -1
- opik/exceptions.py +33 -0
- opik/file_upload/file_uploader.py +13 -0
- opik/file_upload/upload_options.py +2 -0
- opik/hooks/__init__.py +23 -0
- opik/hooks/anonymizer_hook.py +36 -0
- opik/hooks/httpx_client_hook.py +112 -0
- opik/httpx_client.py +12 -9
- opik/id_helpers.py +18 -0
- opik/integrations/adk/graph/subgraph_edges_builders.py +1 -2
- opik/integrations/adk/helpers.py +16 -7
- opik/integrations/adk/legacy_opik_tracer.py +7 -4
- opik/integrations/adk/opik_tracer.py +14 -1
- opik/integrations/adk/patchers/adk_otel_tracer/opik_adk_otel_tracer.py +7 -3
- opik/integrations/adk/recursive_callback_injector.py +4 -7
- opik/integrations/bedrock/converse/__init__.py +0 -0
- opik/integrations/bedrock/converse/chunks_aggregator.py +188 -0
- opik/integrations/bedrock/{converse_decorator.py → converse/converse_decorator.py} +4 -3
- opik/integrations/bedrock/invoke_agent_decorator.py +5 -4
- opik/integrations/bedrock/invoke_model/__init__.py +0 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/__init__.py +78 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/api.py +45 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/base.py +23 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/claude.py +121 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/format_detector.py +107 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/llama.py +108 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/mistral.py +118 -0
- opik/integrations/bedrock/invoke_model/chunks_aggregator/nova.py +99 -0
- opik/integrations/bedrock/invoke_model/invoke_model_decorator.py +178 -0
- opik/integrations/bedrock/invoke_model/response_types.py +34 -0
- opik/integrations/bedrock/invoke_model/stream_wrappers.py +122 -0
- opik/integrations/bedrock/invoke_model/usage_converters.py +87 -0
- opik/integrations/bedrock/invoke_model/usage_extraction.py +108 -0
- opik/integrations/bedrock/opik_tracker.py +42 -4
- opik/integrations/bedrock/types.py +19 -0
- opik/integrations/crewai/crewai_decorator.py +8 -51
- opik/integrations/crewai/opik_tracker.py +31 -10
- opik/integrations/crewai/patchers/__init__.py +5 -0
- opik/integrations/crewai/patchers/flow.py +118 -0
- opik/integrations/crewai/patchers/litellm_completion.py +30 -0
- opik/integrations/crewai/patchers/llm_client.py +207 -0
- opik/integrations/dspy/callback.py +80 -17
- opik/integrations/dspy/parsers.py +168 -0
- opik/integrations/harbor/__init__.py +17 -0
- opik/integrations/harbor/experiment_service.py +269 -0
- opik/integrations/harbor/opik_tracker.py +528 -0
- opik/integrations/haystack/opik_connector.py +2 -2
- opik/integrations/haystack/opik_tracer.py +3 -7
- opik/integrations/langchain/__init__.py +3 -1
- opik/integrations/langchain/helpers.py +96 -0
- opik/integrations/langchain/langgraph_async_context_bridge.py +131 -0
- opik/integrations/langchain/langgraph_tracer_injector.py +88 -0
- opik/integrations/langchain/opik_encoder_extension.py +1 -1
- opik/integrations/langchain/opik_tracer.py +474 -229
- opik/integrations/litellm/__init__.py +5 -0
- opik/integrations/litellm/completion_chunks_aggregator.py +115 -0
- opik/integrations/litellm/litellm_completion_decorator.py +242 -0
- opik/integrations/litellm/opik_tracker.py +43 -0
- opik/integrations/litellm/stream_patchers.py +151 -0
- opik/integrations/llama_index/callback.py +146 -107
- opik/integrations/openai/agents/opik_tracing_processor.py +1 -2
- opik/integrations/openai/openai_chat_completions_decorator.py +2 -16
- opik/integrations/openai/opik_tracker.py +1 -1
- opik/integrations/sagemaker/auth.py +5 -1
- opik/llm_usage/google_usage.py +3 -1
- opik/llm_usage/opik_usage.py +7 -8
- opik/llm_usage/opik_usage_factory.py +4 -2
- opik/logging_messages.py +6 -0
- opik/message_processing/batching/base_batcher.py +14 -21
- opik/message_processing/batching/batch_manager.py +22 -10
- opik/message_processing/batching/batch_manager_constuctors.py +10 -0
- opik/message_processing/batching/batchers.py +59 -27
- opik/message_processing/batching/flushing_thread.py +0 -3
- opik/message_processing/emulation/__init__.py +0 -0
- opik/message_processing/emulation/emulator_message_processor.py +578 -0
- opik/message_processing/emulation/local_emulator_message_processor.py +140 -0
- opik/message_processing/emulation/models.py +162 -0
- opik/message_processing/encoder_helpers.py +79 -0
- opik/message_processing/messages.py +56 -1
- opik/message_processing/preprocessing/__init__.py +0 -0
- opik/message_processing/preprocessing/attachments_preprocessor.py +70 -0
- opik/message_processing/preprocessing/batching_preprocessor.py +53 -0
- opik/message_processing/preprocessing/constants.py +1 -0
- opik/message_processing/preprocessing/file_upload_preprocessor.py +38 -0
- opik/message_processing/preprocessing/preprocessor.py +36 -0
- opik/message_processing/processors/__init__.py +0 -0
- opik/message_processing/processors/attachments_extraction_processor.py +146 -0
- opik/message_processing/processors/message_processors.py +92 -0
- opik/message_processing/processors/message_processors_chain.py +96 -0
- opik/message_processing/{message_processors.py → processors/online_message_processor.py} +85 -29
- opik/message_processing/queue_consumer.py +9 -3
- opik/message_processing/streamer.py +71 -33
- opik/message_processing/streamer_constructors.py +43 -10
- opik/opik_context.py +16 -4
- opik/plugins/pytest/hooks.py +5 -3
- opik/rest_api/__init__.py +346 -15
- opik/rest_api/alerts/__init__.py +7 -0
- opik/rest_api/alerts/client.py +667 -0
- opik/rest_api/alerts/raw_client.py +1015 -0
- opik/rest_api/alerts/types/__init__.py +7 -0
- opik/rest_api/alerts/types/get_webhook_examples_request_alert_type.py +5 -0
- opik/rest_api/annotation_queues/__init__.py +4 -0
- opik/rest_api/annotation_queues/client.py +668 -0
- opik/rest_api/annotation_queues/raw_client.py +1019 -0
- opik/rest_api/automation_rule_evaluators/client.py +34 -2
- opik/rest_api/automation_rule_evaluators/raw_client.py +24 -0
- opik/rest_api/client.py +15 -0
- opik/rest_api/dashboards/__init__.py +4 -0
- opik/rest_api/dashboards/client.py +462 -0
- opik/rest_api/dashboards/raw_client.py +648 -0
- opik/rest_api/datasets/client.py +1310 -44
- opik/rest_api/datasets/raw_client.py +2269 -358
- opik/rest_api/experiments/__init__.py +2 -2
- opik/rest_api/experiments/client.py +191 -5
- opik/rest_api/experiments/raw_client.py +301 -7
- opik/rest_api/experiments/types/__init__.py +4 -1
- opik/rest_api/experiments/types/experiment_update_status.py +5 -0
- opik/rest_api/experiments/types/experiment_update_type.py +5 -0
- opik/rest_api/experiments/types/experiment_write_status.py +5 -0
- opik/rest_api/feedback_definitions/types/find_feedback_definitions_request_type.py +1 -1
- opik/rest_api/llm_provider_key/client.py +20 -0
- opik/rest_api/llm_provider_key/raw_client.py +20 -0
- opik/rest_api/llm_provider_key/types/provider_api_key_write_provider.py +1 -1
- opik/rest_api/manual_evaluation/__init__.py +4 -0
- opik/rest_api/manual_evaluation/client.py +347 -0
- opik/rest_api/manual_evaluation/raw_client.py +543 -0
- opik/rest_api/optimizations/client.py +145 -9
- opik/rest_api/optimizations/raw_client.py +237 -13
- opik/rest_api/optimizations/types/optimization_update_status.py +3 -1
- opik/rest_api/prompts/__init__.py +2 -2
- opik/rest_api/prompts/client.py +227 -6
- opik/rest_api/prompts/raw_client.py +331 -2
- opik/rest_api/prompts/types/__init__.py +3 -1
- opik/rest_api/prompts/types/create_prompt_version_detail_template_structure.py +5 -0
- opik/rest_api/prompts/types/prompt_write_template_structure.py +5 -0
- opik/rest_api/spans/__init__.py +0 -2
- opik/rest_api/spans/client.py +238 -76
- opik/rest_api/spans/raw_client.py +307 -95
- opik/rest_api/spans/types/__init__.py +0 -2
- opik/rest_api/traces/client.py +572 -161
- opik/rest_api/traces/raw_client.py +736 -229
- opik/rest_api/types/__init__.py +352 -17
- opik/rest_api/types/aggregation_data.py +1 -0
- opik/rest_api/types/alert.py +33 -0
- opik/rest_api/types/alert_alert_type.py +5 -0
- opik/rest_api/types/alert_page_public.py +24 -0
- opik/rest_api/types/alert_public.py +33 -0
- opik/rest_api/types/alert_public_alert_type.py +5 -0
- opik/rest_api/types/alert_trigger.py +27 -0
- opik/rest_api/types/alert_trigger_config.py +28 -0
- opik/rest_api/types/alert_trigger_config_public.py +28 -0
- opik/rest_api/types/alert_trigger_config_public_type.py +10 -0
- opik/rest_api/types/alert_trigger_config_type.py +10 -0
- opik/rest_api/types/alert_trigger_config_write.py +22 -0
- opik/rest_api/types/alert_trigger_config_write_type.py +10 -0
- opik/rest_api/types/alert_trigger_event_type.py +19 -0
- opik/rest_api/types/alert_trigger_public.py +27 -0
- opik/rest_api/types/alert_trigger_public_event_type.py +19 -0
- opik/rest_api/types/alert_trigger_write.py +23 -0
- opik/rest_api/types/alert_trigger_write_event_type.py +19 -0
- opik/rest_api/types/alert_write.py +28 -0
- opik/rest_api/types/alert_write_alert_type.py +5 -0
- opik/rest_api/types/annotation_queue.py +42 -0
- opik/rest_api/types/annotation_queue_batch.py +27 -0
- opik/rest_api/types/annotation_queue_item_ids.py +19 -0
- opik/rest_api/types/annotation_queue_page_public.py +28 -0
- opik/rest_api/types/annotation_queue_public.py +38 -0
- opik/rest_api/types/annotation_queue_public_scope.py +5 -0
- opik/rest_api/types/annotation_queue_reviewer.py +20 -0
- opik/rest_api/types/annotation_queue_reviewer_public.py +20 -0
- opik/rest_api/types/annotation_queue_scope.py +5 -0
- opik/rest_api/types/annotation_queue_write.py +31 -0
- opik/rest_api/types/annotation_queue_write_scope.py +5 -0
- opik/rest_api/types/audio_url.py +19 -0
- opik/rest_api/types/audio_url_public.py +19 -0
- opik/rest_api/types/audio_url_write.py +19 -0
- opik/rest_api/types/automation_rule_evaluator.py +62 -2
- opik/rest_api/types/automation_rule_evaluator_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_llm_as_judge_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_llm_as_judge_write.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_object_object_public.py +155 -0
- opik/rest_api/types/automation_rule_evaluator_page_public.py +3 -2
- opik/rest_api/types/automation_rule_evaluator_public.py +57 -2
- opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_public.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_span_llm_as_judge_write.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_span_user_defined_metric_python.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_span_user_defined_metric_python_public.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_span_user_defined_metric_python_write.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_llm_as_judge_write.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_trace_thread_user_defined_metric_python_write.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update.py +51 -1
- opik/rest_api/types/automation_rule_evaluator_update_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update_span_llm_as_judge.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_update_span_user_defined_metric_python.py +22 -0
- opik/rest_api/types/automation_rule_evaluator_update_trace_thread_llm_as_judge.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update_trace_thread_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_update_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_public.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_user_defined_metric_python_write.py +2 -0
- opik/rest_api/types/automation_rule_evaluator_write.py +51 -1
- opik/rest_api/types/boolean_feedback_definition.py +25 -0
- opik/rest_api/types/boolean_feedback_definition_create.py +20 -0
- opik/rest_api/types/boolean_feedback_definition_public.py +25 -0
- opik/rest_api/types/boolean_feedback_definition_update.py +20 -0
- opik/rest_api/types/boolean_feedback_detail.py +29 -0
- opik/rest_api/types/boolean_feedback_detail_create.py +29 -0
- opik/rest_api/types/boolean_feedback_detail_public.py +29 -0
- opik/rest_api/types/boolean_feedback_detail_update.py +29 -0
- opik/rest_api/types/dashboard_page_public.py +24 -0
- opik/rest_api/types/dashboard_public.py +30 -0
- opik/rest_api/types/dataset.py +4 -0
- opik/rest_api/types/dataset_expansion.py +42 -0
- opik/rest_api/types/dataset_expansion_response.py +39 -0
- opik/rest_api/types/dataset_item.py +2 -0
- opik/rest_api/types/dataset_item_changes_public.py +5 -0
- opik/rest_api/types/dataset_item_compare.py +2 -0
- opik/rest_api/types/dataset_item_filter.py +27 -0
- opik/rest_api/types/dataset_item_filter_operator.py +21 -0
- opik/rest_api/types/dataset_item_page_compare.py +5 -0
- opik/rest_api/types/dataset_item_page_public.py +5 -0
- opik/rest_api/types/dataset_item_public.py +2 -0
- opik/rest_api/types/dataset_item_update.py +39 -0
- opik/rest_api/types/dataset_item_write.py +1 -0
- opik/rest_api/types/dataset_public.py +4 -0
- opik/rest_api/types/dataset_public_status.py +5 -0
- opik/rest_api/types/dataset_status.py +5 -0
- opik/rest_api/types/dataset_version_diff.py +22 -0
- opik/rest_api/types/dataset_version_diff_stats.py +24 -0
- opik/rest_api/types/dataset_version_page_public.py +23 -0
- opik/rest_api/types/dataset_version_public.py +59 -0
- opik/rest_api/types/dataset_version_summary.py +46 -0
- opik/rest_api/types/dataset_version_summary_public.py +46 -0
- opik/rest_api/types/experiment.py +7 -2
- opik/rest_api/types/experiment_group_response.py +2 -0
- opik/rest_api/types/experiment_public.py +7 -2
- opik/rest_api/types/experiment_public_status.py +5 -0
- opik/rest_api/types/experiment_score.py +20 -0
- opik/rest_api/types/experiment_score_public.py +20 -0
- opik/rest_api/types/experiment_score_write.py +20 -0
- opik/rest_api/types/experiment_status.py +5 -0
- opik/rest_api/types/feedback.py +25 -1
- opik/rest_api/types/feedback_create.py +20 -1
- opik/rest_api/types/feedback_object_public.py +27 -1
- opik/rest_api/types/feedback_public.py +25 -1
- opik/rest_api/types/feedback_score_batch_item.py +2 -1
- opik/rest_api/types/feedback_score_batch_item_thread.py +2 -1
- opik/rest_api/types/feedback_score_public.py +4 -0
- opik/rest_api/types/feedback_update.py +20 -1
- opik/rest_api/types/group_content_with_aggregations.py +1 -0
- opik/rest_api/types/group_detail.py +19 -0
- opik/rest_api/types/group_details.py +20 -0
- opik/rest_api/types/guardrail.py +1 -0
- opik/rest_api/types/guardrail_write.py +1 -0
- opik/rest_api/types/ids_holder.py +19 -0
- opik/rest_api/types/image_url.py +20 -0
- opik/rest_api/types/image_url_public.py +20 -0
- opik/rest_api/types/image_url_write.py +20 -0
- opik/rest_api/types/llm_as_judge_message.py +5 -1
- opik/rest_api/types/llm_as_judge_message_content.py +26 -0
- opik/rest_api/types/llm_as_judge_message_content_public.py +26 -0
- opik/rest_api/types/llm_as_judge_message_content_write.py +26 -0
- opik/rest_api/types/llm_as_judge_message_public.py +5 -1
- opik/rest_api/types/llm_as_judge_message_write.py +5 -1
- opik/rest_api/types/llm_as_judge_model_parameters.py +3 -0
- opik/rest_api/types/llm_as_judge_model_parameters_public.py +3 -0
- opik/rest_api/types/llm_as_judge_model_parameters_write.py +3 -0
- opik/rest_api/types/manual_evaluation_request.py +38 -0
- opik/rest_api/types/manual_evaluation_request_entity_type.py +5 -0
- opik/rest_api/types/manual_evaluation_response.py +27 -0
- opik/rest_api/types/optimization.py +4 -2
- opik/rest_api/types/optimization_public.py +4 -2
- opik/rest_api/types/optimization_public_status.py +3 -1
- opik/rest_api/types/optimization_status.py +3 -1
- opik/rest_api/types/optimization_studio_config.py +27 -0
- opik/rest_api/types/optimization_studio_config_public.py +27 -0
- opik/rest_api/types/optimization_studio_config_write.py +27 -0
- opik/rest_api/types/optimization_studio_log.py +22 -0
- opik/rest_api/types/optimization_write.py +4 -2
- opik/rest_api/types/optimization_write_status.py +3 -1
- opik/rest_api/types/project.py +1 -0
- opik/rest_api/types/project_detailed.py +1 -0
- opik/rest_api/types/project_reference.py +31 -0
- opik/rest_api/types/project_reference_public.py +31 -0
- opik/rest_api/types/project_stats_summary_item.py +1 -0
- opik/rest_api/types/prompt.py +6 -0
- opik/rest_api/types/prompt_detail.py +6 -0
- opik/rest_api/types/prompt_detail_template_structure.py +5 -0
- opik/rest_api/types/prompt_public.py +6 -0
- opik/rest_api/types/prompt_public_template_structure.py +5 -0
- opik/rest_api/types/prompt_template_structure.py +5 -0
- opik/rest_api/types/prompt_version.py +3 -0
- opik/rest_api/types/prompt_version_detail.py +3 -0
- opik/rest_api/types/prompt_version_detail_template_structure.py +5 -0
- opik/rest_api/types/prompt_version_link.py +1 -0
- opik/rest_api/types/prompt_version_link_public.py +1 -0
- opik/rest_api/types/prompt_version_page_public.py +5 -0
- opik/rest_api/types/prompt_version_public.py +3 -0
- opik/rest_api/types/prompt_version_public_template_structure.py +5 -0
- opik/rest_api/types/prompt_version_template_structure.py +5 -0
- opik/rest_api/types/prompt_version_update.py +33 -0
- opik/rest_api/types/provider_api_key.py +9 -0
- opik/rest_api/types/provider_api_key_provider.py +1 -1
- opik/rest_api/types/provider_api_key_public.py +9 -0
- opik/rest_api/types/provider_api_key_public_provider.py +1 -1
- opik/rest_api/types/score_name.py +1 -0
- opik/rest_api/types/service_toggles_config.py +18 -0
- opik/rest_api/types/span.py +1 -2
- opik/rest_api/types/span_enrichment_options.py +31 -0
- opik/rest_api/types/span_experiment_item_bulk_write_view.py +1 -2
- opik/rest_api/types/span_filter.py +23 -0
- opik/rest_api/types/span_filter_operator.py +21 -0
- opik/rest_api/types/span_filter_write.py +23 -0
- opik/rest_api/types/span_filter_write_operator.py +21 -0
- opik/rest_api/types/span_llm_as_judge_code.py +27 -0
- opik/rest_api/types/span_llm_as_judge_code_public.py +27 -0
- opik/rest_api/types/span_llm_as_judge_code_write.py +27 -0
- opik/rest_api/types/span_public.py +1 -2
- opik/rest_api/types/span_update.py +46 -0
- opik/rest_api/types/span_user_defined_metric_python_code.py +20 -0
- opik/rest_api/types/span_user_defined_metric_python_code_public.py +20 -0
- opik/rest_api/types/span_user_defined_metric_python_code_write.py +20 -0
- opik/rest_api/types/span_write.py +1 -2
- opik/rest_api/types/studio_evaluation.py +20 -0
- opik/rest_api/types/studio_evaluation_public.py +20 -0
- opik/rest_api/types/studio_evaluation_write.py +20 -0
- opik/rest_api/types/studio_llm_model.py +21 -0
- opik/rest_api/types/studio_llm_model_public.py +21 -0
- opik/rest_api/types/studio_llm_model_write.py +21 -0
- opik/rest_api/types/studio_message.py +20 -0
- opik/rest_api/types/studio_message_public.py +20 -0
- opik/rest_api/types/studio_message_write.py +20 -0
- opik/rest_api/types/studio_metric.py +21 -0
- opik/rest_api/types/studio_metric_public.py +21 -0
- opik/rest_api/types/studio_metric_write.py +21 -0
- opik/rest_api/types/studio_optimizer.py +21 -0
- opik/rest_api/types/studio_optimizer_public.py +21 -0
- opik/rest_api/types/studio_optimizer_write.py +21 -0
- opik/rest_api/types/studio_prompt.py +20 -0
- opik/rest_api/types/studio_prompt_public.py +20 -0
- opik/rest_api/types/studio_prompt_write.py +20 -0
- opik/rest_api/types/trace.py +11 -2
- opik/rest_api/types/trace_enrichment_options.py +32 -0
- opik/rest_api/types/trace_experiment_item_bulk_write_view.py +1 -2
- opik/rest_api/types/trace_filter.py +23 -0
- opik/rest_api/types/trace_filter_operator.py +21 -0
- opik/rest_api/types/trace_filter_write.py +23 -0
- opik/rest_api/types/trace_filter_write_operator.py +21 -0
- opik/rest_api/types/trace_public.py +11 -2
- opik/rest_api/types/trace_thread_filter_write.py +23 -0
- opik/rest_api/types/trace_thread_filter_write_operator.py +21 -0
- opik/rest_api/types/trace_thread_identifier.py +1 -0
- opik/rest_api/types/trace_update.py +39 -0
- opik/rest_api/types/trace_write.py +1 -2
- opik/rest_api/types/value_entry.py +2 -0
- opik/rest_api/types/value_entry_compare.py +2 -0
- opik/rest_api/types/value_entry_experiment_item_bulk_write_view.py +2 -0
- opik/rest_api/types/value_entry_public.py +2 -0
- opik/rest_api/types/video_url.py +19 -0
- opik/rest_api/types/video_url_public.py +19 -0
- opik/rest_api/types/video_url_write.py +19 -0
- opik/rest_api/types/webhook.py +28 -0
- opik/rest_api/types/webhook_examples.py +19 -0
- opik/rest_api/types/webhook_public.py +28 -0
- opik/rest_api/types/webhook_test_result.py +23 -0
- opik/rest_api/types/webhook_test_result_status.py +5 -0
- opik/rest_api/types/webhook_write.py +23 -0
- opik/rest_api/types/welcome_wizard_tracking.py +22 -0
- opik/rest_api/types/workspace_configuration.py +5 -0
- opik/rest_api/welcome_wizard/__init__.py +4 -0
- opik/rest_api/welcome_wizard/client.py +195 -0
- opik/rest_api/welcome_wizard/raw_client.py +208 -0
- opik/rest_api/workspaces/client.py +14 -2
- opik/rest_api/workspaces/raw_client.py +10 -0
- opik/s3_httpx_client.py +14 -1
- opik/simulation/__init__.py +6 -0
- opik/simulation/simulated_user.py +99 -0
- opik/simulation/simulator.py +108 -0
- opik/synchronization.py +5 -6
- opik/{decorator/tracing_runtime_config.py → tracing_runtime_config.py} +6 -7
- opik/types.py +36 -0
- opik/validation/chat_prompt_messages.py +241 -0
- opik/validation/feedback_score.py +3 -3
- opik/validation/validator.py +28 -0
- opik-1.9.71.dist-info/METADATA +370 -0
- opik-1.9.71.dist-info/RECORD +1110 -0
- opik/api_objects/prompt/prompt.py +0 -112
- opik/cli.py +0 -193
- opik/hooks.py +0 -13
- opik/integrations/bedrock/chunks_aggregator.py +0 -55
- opik/integrations/bedrock/helpers.py +0 -8
- opik/rest_api/types/automation_rule_evaluator_object_public.py +0 -100
- opik/rest_api/types/json_node_experiment_item_bulk_write_view.py +0 -5
- opik-1.8.39.dist-info/METADATA +0 -339
- opik-1.8.39.dist-info/RECORD +0 -790
- /opik/{evaluation/metrics/conversation/conversational_coherence → decorator/context_manager}/__init__.py +0 -0
- /opik/evaluation/metrics/conversation/{session_completeness → llm_judges/conversational_coherence}/__init__.py +0 -0
- /opik/evaluation/metrics/conversation/{conversational_coherence → llm_judges/conversational_coherence}/schema.py +0 -0
- /opik/evaluation/metrics/conversation/{user_frustration → llm_judges/session_completeness}/__init__.py +0 -0
- /opik/evaluation/metrics/conversation/{session_completeness → llm_judges/session_completeness}/schema.py +0 -0
- /opik/evaluation/metrics/conversation/{user_frustration → llm_judges/user_frustration}/schema.py +0 -0
- /opik/integrations/bedrock/{stream_wrappers.py → converse/stream_wrappers.py} +0 -0
- /opik/rest_api/{spans/types → types}/span_update_type.py +0 -0
- {opik-1.8.39.dist-info → opik-1.9.71.dist-info}/WHEEL +0 -0
- {opik-1.8.39.dist-info → opik-1.9.71.dist-info}/entry_points.txt +0 -0
- {opik-1.8.39.dist-info → opik-1.9.71.dist-info}/licenses/LICENSE +0 -0
- {opik-1.8.39.dist-info → opik-1.9.71.dist-info}/top_level.txt +0 -0
|
@@ -11,13 +11,28 @@ from ..core.jsonable_encoder import jsonable_encoder
|
|
|
11
11
|
from ..core.pydantic_utilities import parse_obj_as
|
|
12
12
|
from ..core.request_options import RequestOptions
|
|
13
13
|
from ..core.serialization import convert_and_respect_annotation_metadata
|
|
14
|
+
from ..errors.bad_request_error import BadRequestError
|
|
15
|
+
from ..errors.conflict_error import ConflictError
|
|
16
|
+
from ..errors.not_found_error import NotFoundError
|
|
17
|
+
from ..types.dataset_expansion_response import DatasetExpansionResponse
|
|
18
|
+
from ..types.dataset_item_changes_public import DatasetItemChangesPublic
|
|
19
|
+
from ..types.dataset_item_filter import DatasetItemFilter
|
|
14
20
|
from ..types.dataset_item_page_compare import DatasetItemPageCompare
|
|
15
21
|
from ..types.dataset_item_page_public import DatasetItemPagePublic
|
|
16
22
|
from ..types.dataset_item_public import DatasetItemPublic
|
|
23
|
+
from ..types.dataset_item_update import DatasetItemUpdate
|
|
17
24
|
from ..types.dataset_item_write import DatasetItemWrite
|
|
25
|
+
from ..types.dataset_item_write_source import DatasetItemWriteSource
|
|
18
26
|
from ..types.dataset_page_public import DatasetPagePublic
|
|
19
27
|
from ..types.dataset_public import DatasetPublic
|
|
28
|
+
from ..types.dataset_version_diff import DatasetVersionDiff
|
|
29
|
+
from ..types.dataset_version_page_public import DatasetVersionPagePublic
|
|
30
|
+
from ..types.dataset_version_public import DatasetVersionPublic
|
|
31
|
+
from ..types.json_node import JsonNode
|
|
20
32
|
from ..types.page_columns import PageColumns
|
|
33
|
+
from ..types.project_stats_public import ProjectStatsPublic
|
|
34
|
+
from ..types.span_enrichment_options import SpanEnrichmentOptions
|
|
35
|
+
from ..types.trace_enrichment_options import TraceEnrichmentOptions
|
|
21
36
|
from .types.dataset_update_visibility import DatasetUpdateVisibility
|
|
22
37
|
from .types.dataset_write_visibility import DatasetWriteVisibility
|
|
23
38
|
|
|
@@ -29,6 +44,175 @@ class RawDatasetsClient:
|
|
|
29
44
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
30
45
|
self._client_wrapper = client_wrapper
|
|
31
46
|
|
|
47
|
+
def apply_dataset_item_changes(
|
|
48
|
+
self,
|
|
49
|
+
id: str,
|
|
50
|
+
*,
|
|
51
|
+
request: DatasetItemChangesPublic,
|
|
52
|
+
override: typing.Optional[bool] = None,
|
|
53
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
54
|
+
) -> HttpResponse[DatasetVersionPublic]:
|
|
55
|
+
"""
|
|
56
|
+
Apply delta changes (add, edit, delete) to a dataset version with conflict detection.
|
|
57
|
+
|
|
58
|
+
This endpoint:
|
|
59
|
+
- Creates a new version with the applied changes
|
|
60
|
+
- Validates that baseVersion matches the latest version (unless override=true)
|
|
61
|
+
- Returns 409 Conflict if baseVersion is stale and override is not set
|
|
62
|
+
|
|
63
|
+
Use `override=true` query parameter to force version creation even with stale baseVersion.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
id : str
|
|
68
|
+
|
|
69
|
+
request : DatasetItemChangesPublic
|
|
70
|
+
|
|
71
|
+
override : typing.Optional[bool]
|
|
72
|
+
|
|
73
|
+
request_options : typing.Optional[RequestOptions]
|
|
74
|
+
Request-specific configuration.
|
|
75
|
+
|
|
76
|
+
Returns
|
|
77
|
+
-------
|
|
78
|
+
HttpResponse[DatasetVersionPublic]
|
|
79
|
+
Version created successfully
|
|
80
|
+
"""
|
|
81
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
82
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items/changes",
|
|
83
|
+
method="POST",
|
|
84
|
+
params={
|
|
85
|
+
"override": override,
|
|
86
|
+
},
|
|
87
|
+
json=request,
|
|
88
|
+
headers={
|
|
89
|
+
"content-type": "application/json",
|
|
90
|
+
},
|
|
91
|
+
request_options=request_options,
|
|
92
|
+
omit=OMIT,
|
|
93
|
+
)
|
|
94
|
+
try:
|
|
95
|
+
if 200 <= _response.status_code < 300:
|
|
96
|
+
_data = typing.cast(
|
|
97
|
+
DatasetVersionPublic,
|
|
98
|
+
parse_obj_as(
|
|
99
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
100
|
+
object_=_response.json(),
|
|
101
|
+
),
|
|
102
|
+
)
|
|
103
|
+
return HttpResponse(response=_response, data=_data)
|
|
104
|
+
if _response.status_code == 400:
|
|
105
|
+
raise BadRequestError(
|
|
106
|
+
headers=dict(_response.headers),
|
|
107
|
+
body=typing.cast(
|
|
108
|
+
typing.Optional[typing.Any],
|
|
109
|
+
parse_obj_as(
|
|
110
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
111
|
+
object_=_response.json(),
|
|
112
|
+
),
|
|
113
|
+
),
|
|
114
|
+
)
|
|
115
|
+
if _response.status_code == 404:
|
|
116
|
+
raise NotFoundError(
|
|
117
|
+
headers=dict(_response.headers),
|
|
118
|
+
body=typing.cast(
|
|
119
|
+
typing.Optional[typing.Any],
|
|
120
|
+
parse_obj_as(
|
|
121
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
122
|
+
object_=_response.json(),
|
|
123
|
+
),
|
|
124
|
+
),
|
|
125
|
+
)
|
|
126
|
+
if _response.status_code == 409:
|
|
127
|
+
raise ConflictError(
|
|
128
|
+
headers=dict(_response.headers),
|
|
129
|
+
body=typing.cast(
|
|
130
|
+
typing.Optional[typing.Any],
|
|
131
|
+
parse_obj_as(
|
|
132
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
133
|
+
object_=_response.json(),
|
|
134
|
+
),
|
|
135
|
+
),
|
|
136
|
+
)
|
|
137
|
+
_response_json = _response.json()
|
|
138
|
+
except JSONDecodeError:
|
|
139
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
140
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
141
|
+
|
|
142
|
+
def batch_update_dataset_items(
|
|
143
|
+
self,
|
|
144
|
+
*,
|
|
145
|
+
update: DatasetItemUpdate,
|
|
146
|
+
ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
147
|
+
filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
|
|
148
|
+
dataset_id: typing.Optional[str] = OMIT,
|
|
149
|
+
merge_tags: typing.Optional[bool] = OMIT,
|
|
150
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
151
|
+
) -> HttpResponse[None]:
|
|
152
|
+
"""
|
|
153
|
+
Update multiple dataset items
|
|
154
|
+
|
|
155
|
+
Parameters
|
|
156
|
+
----------
|
|
157
|
+
update : DatasetItemUpdate
|
|
158
|
+
|
|
159
|
+
ids : typing.Optional[typing.Sequence[str]]
|
|
160
|
+
List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
|
|
161
|
+
|
|
162
|
+
filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
|
|
163
|
+
|
|
164
|
+
dataset_id : typing.Optional[str]
|
|
165
|
+
Dataset ID. Required when using 'filters', optional when using 'ids'.
|
|
166
|
+
|
|
167
|
+
merge_tags : typing.Optional[bool]
|
|
168
|
+
If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
|
|
169
|
+
|
|
170
|
+
request_options : typing.Optional[RequestOptions]
|
|
171
|
+
Request-specific configuration.
|
|
172
|
+
|
|
173
|
+
Returns
|
|
174
|
+
-------
|
|
175
|
+
HttpResponse[None]
|
|
176
|
+
"""
|
|
177
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
178
|
+
"v1/private/datasets/items/batch",
|
|
179
|
+
method="PATCH",
|
|
180
|
+
json={
|
|
181
|
+
"ids": ids,
|
|
182
|
+
"filters": convert_and_respect_annotation_metadata(
|
|
183
|
+
object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
|
|
184
|
+
),
|
|
185
|
+
"dataset_id": dataset_id,
|
|
186
|
+
"update": convert_and_respect_annotation_metadata(
|
|
187
|
+
object_=update, annotation=DatasetItemUpdate, direction="write"
|
|
188
|
+
),
|
|
189
|
+
"merge_tags": merge_tags,
|
|
190
|
+
},
|
|
191
|
+
headers={
|
|
192
|
+
"content-type": "application/json",
|
|
193
|
+
},
|
|
194
|
+
request_options=request_options,
|
|
195
|
+
omit=OMIT,
|
|
196
|
+
)
|
|
197
|
+
try:
|
|
198
|
+
if 200 <= _response.status_code < 300:
|
|
199
|
+
return HttpResponse(response=_response, data=None)
|
|
200
|
+
if _response.status_code == 400:
|
|
201
|
+
raise BadRequestError(
|
|
202
|
+
headers=dict(_response.headers),
|
|
203
|
+
body=typing.cast(
|
|
204
|
+
typing.Optional[typing.Any],
|
|
205
|
+
parse_obj_as(
|
|
206
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
207
|
+
object_=_response.json(),
|
|
208
|
+
),
|
|
209
|
+
),
|
|
210
|
+
)
|
|
211
|
+
_response_json = _response.json()
|
|
212
|
+
except JSONDecodeError:
|
|
213
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
214
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
215
|
+
|
|
32
216
|
def find_datasets(
|
|
33
217
|
self,
|
|
34
218
|
*,
|
|
@@ -209,6 +393,170 @@ class RawDatasetsClient:
|
|
|
209
393
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
210
394
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
211
395
|
|
|
396
|
+
def create_dataset_items_from_csv(
|
|
397
|
+
self,
|
|
398
|
+
*,
|
|
399
|
+
file: typing.Dict[str, typing.Optional[typing.Any]],
|
|
400
|
+
dataset_id: str,
|
|
401
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
402
|
+
) -> HttpResponse[None]:
|
|
403
|
+
"""
|
|
404
|
+
Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
|
|
405
|
+
|
|
406
|
+
Parameters
|
|
407
|
+
----------
|
|
408
|
+
file : typing.Dict[str, typing.Optional[typing.Any]]
|
|
409
|
+
|
|
410
|
+
dataset_id : str
|
|
411
|
+
|
|
412
|
+
request_options : typing.Optional[RequestOptions]
|
|
413
|
+
Request-specific configuration.
|
|
414
|
+
|
|
415
|
+
Returns
|
|
416
|
+
-------
|
|
417
|
+
HttpResponse[None]
|
|
418
|
+
"""
|
|
419
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
420
|
+
"v1/private/datasets/items/from-csv",
|
|
421
|
+
method="POST",
|
|
422
|
+
data={
|
|
423
|
+
"file": file,
|
|
424
|
+
"dataset_id": dataset_id,
|
|
425
|
+
},
|
|
426
|
+
files={},
|
|
427
|
+
request_options=request_options,
|
|
428
|
+
omit=OMIT,
|
|
429
|
+
)
|
|
430
|
+
try:
|
|
431
|
+
if 200 <= _response.status_code < 300:
|
|
432
|
+
return HttpResponse(response=_response, data=None)
|
|
433
|
+
if _response.status_code == 400:
|
|
434
|
+
raise BadRequestError(
|
|
435
|
+
headers=dict(_response.headers),
|
|
436
|
+
body=typing.cast(
|
|
437
|
+
typing.Optional[typing.Any],
|
|
438
|
+
parse_obj_as(
|
|
439
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
440
|
+
object_=_response.json(),
|
|
441
|
+
),
|
|
442
|
+
),
|
|
443
|
+
)
|
|
444
|
+
if _response.status_code == 404:
|
|
445
|
+
raise NotFoundError(
|
|
446
|
+
headers=dict(_response.headers),
|
|
447
|
+
body=typing.cast(
|
|
448
|
+
typing.Optional[typing.Any],
|
|
449
|
+
parse_obj_as(
|
|
450
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
451
|
+
object_=_response.json(),
|
|
452
|
+
),
|
|
453
|
+
),
|
|
454
|
+
)
|
|
455
|
+
_response_json = _response.json()
|
|
456
|
+
except JSONDecodeError:
|
|
457
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
458
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
459
|
+
|
|
460
|
+
def create_dataset_items_from_spans(
|
|
461
|
+
self,
|
|
462
|
+
dataset_id: str,
|
|
463
|
+
*,
|
|
464
|
+
span_ids: typing.Sequence[str],
|
|
465
|
+
enrichment_options: SpanEnrichmentOptions,
|
|
466
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
467
|
+
) -> HttpResponse[None]:
|
|
468
|
+
"""
|
|
469
|
+
Create dataset items from spans with enriched metadata
|
|
470
|
+
|
|
471
|
+
Parameters
|
|
472
|
+
----------
|
|
473
|
+
dataset_id : str
|
|
474
|
+
|
|
475
|
+
span_ids : typing.Sequence[str]
|
|
476
|
+
Set of span IDs to add to the dataset
|
|
477
|
+
|
|
478
|
+
enrichment_options : SpanEnrichmentOptions
|
|
479
|
+
|
|
480
|
+
request_options : typing.Optional[RequestOptions]
|
|
481
|
+
Request-specific configuration.
|
|
482
|
+
|
|
483
|
+
Returns
|
|
484
|
+
-------
|
|
485
|
+
HttpResponse[None]
|
|
486
|
+
"""
|
|
487
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
488
|
+
f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
|
|
489
|
+
method="POST",
|
|
490
|
+
json={
|
|
491
|
+
"span_ids": span_ids,
|
|
492
|
+
"enrichment_options": convert_and_respect_annotation_metadata(
|
|
493
|
+
object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
|
|
494
|
+
),
|
|
495
|
+
},
|
|
496
|
+
headers={
|
|
497
|
+
"content-type": "application/json",
|
|
498
|
+
},
|
|
499
|
+
request_options=request_options,
|
|
500
|
+
omit=OMIT,
|
|
501
|
+
)
|
|
502
|
+
try:
|
|
503
|
+
if 200 <= _response.status_code < 300:
|
|
504
|
+
return HttpResponse(response=_response, data=None)
|
|
505
|
+
_response_json = _response.json()
|
|
506
|
+
except JSONDecodeError:
|
|
507
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
508
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
509
|
+
|
|
510
|
+
def create_dataset_items_from_traces(
|
|
511
|
+
self,
|
|
512
|
+
dataset_id: str,
|
|
513
|
+
*,
|
|
514
|
+
trace_ids: typing.Sequence[str],
|
|
515
|
+
enrichment_options: TraceEnrichmentOptions,
|
|
516
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
517
|
+
) -> HttpResponse[None]:
|
|
518
|
+
"""
|
|
519
|
+
Create dataset items from traces with enriched metadata
|
|
520
|
+
|
|
521
|
+
Parameters
|
|
522
|
+
----------
|
|
523
|
+
dataset_id : str
|
|
524
|
+
|
|
525
|
+
trace_ids : typing.Sequence[str]
|
|
526
|
+
Set of trace IDs to add to the dataset
|
|
527
|
+
|
|
528
|
+
enrichment_options : TraceEnrichmentOptions
|
|
529
|
+
|
|
530
|
+
request_options : typing.Optional[RequestOptions]
|
|
531
|
+
Request-specific configuration.
|
|
532
|
+
|
|
533
|
+
Returns
|
|
534
|
+
-------
|
|
535
|
+
HttpResponse[None]
|
|
536
|
+
"""
|
|
537
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
538
|
+
f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-traces",
|
|
539
|
+
method="POST",
|
|
540
|
+
json={
|
|
541
|
+
"trace_ids": trace_ids,
|
|
542
|
+
"enrichment_options": convert_and_respect_annotation_metadata(
|
|
543
|
+
object_=enrichment_options, annotation=TraceEnrichmentOptions, direction="write"
|
|
544
|
+
),
|
|
545
|
+
},
|
|
546
|
+
headers={
|
|
547
|
+
"content-type": "application/json",
|
|
548
|
+
},
|
|
549
|
+
request_options=request_options,
|
|
550
|
+
omit=OMIT,
|
|
551
|
+
)
|
|
552
|
+
try:
|
|
553
|
+
if 200 <= _response.status_code < 300:
|
|
554
|
+
return HttpResponse(response=_response, data=None)
|
|
555
|
+
_response_json = _response.json()
|
|
556
|
+
except JSONDecodeError:
|
|
557
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
558
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
559
|
+
|
|
212
560
|
def get_dataset_by_id(
|
|
213
561
|
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
214
562
|
) -> HttpResponse[DatasetPublic]:
|
|
@@ -368,14 +716,30 @@ class RawDatasetsClient:
|
|
|
368
716
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
369
717
|
|
|
370
718
|
def delete_dataset_items(
|
|
371
|
-
self,
|
|
719
|
+
self,
|
|
720
|
+
*,
|
|
721
|
+
item_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
722
|
+
dataset_id: typing.Optional[str] = OMIT,
|
|
723
|
+
filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
|
|
724
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
372
725
|
) -> HttpResponse[None]:
|
|
373
726
|
"""
|
|
374
|
-
Delete dataset items
|
|
727
|
+
Delete dataset items using one of two modes:
|
|
728
|
+
1. **Delete by IDs**: Provide 'item_ids' to delete specific items by their IDs
|
|
729
|
+
2. **Delete by filters**: Provide 'dataset_id' with optional 'filters' to delete items matching criteria
|
|
730
|
+
|
|
731
|
+
When using filters, an empty 'filters' array will delete all items in the specified dataset.
|
|
375
732
|
|
|
376
733
|
Parameters
|
|
377
734
|
----------
|
|
378
|
-
item_ids : typing.Sequence[str]
|
|
735
|
+
item_ids : typing.Optional[typing.Sequence[str]]
|
|
736
|
+
List of dataset item IDs to delete (max 1000). Use this to delete specific items by their IDs. Mutually exclusive with 'dataset_id' and 'filters'.
|
|
737
|
+
|
|
738
|
+
dataset_id : typing.Optional[str]
|
|
739
|
+
Dataset ID to scope the deletion. Required when using 'filters'. Mutually exclusive with 'item_ids'.
|
|
740
|
+
|
|
741
|
+
filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
|
|
742
|
+
Filters to select dataset items to delete within the specified dataset. Must be used with 'dataset_id'. Mutually exclusive with 'item_ids'. Empty array means 'delete all items in the dataset'.
|
|
379
743
|
|
|
380
744
|
request_options : typing.Optional[RequestOptions]
|
|
381
745
|
Request-specific configuration.
|
|
@@ -389,6 +753,10 @@ class RawDatasetsClient:
|
|
|
389
753
|
method="POST",
|
|
390
754
|
json={
|
|
391
755
|
"item_ids": item_ids,
|
|
756
|
+
"dataset_id": dataset_id,
|
|
757
|
+
"filters": convert_and_respect_annotation_metadata(
|
|
758
|
+
object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
|
|
759
|
+
),
|
|
392
760
|
},
|
|
393
761
|
headers={
|
|
394
762
|
"content-type": "application/json",
|
|
@@ -399,6 +767,17 @@ class RawDatasetsClient:
|
|
|
399
767
|
try:
|
|
400
768
|
if 200 <= _response.status_code < 300:
|
|
401
769
|
return HttpResponse(response=_response, data=None)
|
|
770
|
+
if _response.status_code == 400:
|
|
771
|
+
raise BadRequestError(
|
|
772
|
+
headers=dict(_response.headers),
|
|
773
|
+
body=typing.cast(
|
|
774
|
+
typing.Optional[typing.Any],
|
|
775
|
+
parse_obj_as(
|
|
776
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
777
|
+
object_=_response.json(),
|
|
778
|
+
),
|
|
779
|
+
),
|
|
780
|
+
)
|
|
402
781
|
_response_json = _response.json()
|
|
403
782
|
except JSONDecodeError:
|
|
404
783
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
@@ -441,60 +820,69 @@ class RawDatasetsClient:
|
|
|
441
820
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
442
821
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
443
822
|
|
|
444
|
-
def
|
|
823
|
+
def expand_dataset(
|
|
445
824
|
self,
|
|
446
825
|
id: str,
|
|
447
826
|
*,
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
827
|
+
model: str,
|
|
828
|
+
sample_count: typing.Optional[int] = OMIT,
|
|
829
|
+
preserve_fields: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
830
|
+
variation_instructions: typing.Optional[str] = OMIT,
|
|
831
|
+
custom_prompt: typing.Optional[str] = OMIT,
|
|
453
832
|
request_options: typing.Optional[RequestOptions] = None,
|
|
454
|
-
) -> HttpResponse[
|
|
833
|
+
) -> HttpResponse[DatasetExpansionResponse]:
|
|
455
834
|
"""
|
|
456
|
-
|
|
835
|
+
Generate synthetic dataset samples using LLM based on existing data patterns
|
|
457
836
|
|
|
458
837
|
Parameters
|
|
459
838
|
----------
|
|
460
839
|
id : str
|
|
461
840
|
|
|
462
|
-
|
|
841
|
+
model : str
|
|
842
|
+
The model to use for synthetic data generation
|
|
463
843
|
|
|
464
|
-
|
|
844
|
+
sample_count : typing.Optional[int]
|
|
845
|
+
Number of synthetic samples to generate
|
|
465
846
|
|
|
466
|
-
|
|
847
|
+
preserve_fields : typing.Optional[typing.Sequence[str]]
|
|
848
|
+
Fields to preserve patterns from original data
|
|
467
849
|
|
|
468
|
-
|
|
850
|
+
variation_instructions : typing.Optional[str]
|
|
851
|
+
Additional instructions for data variation
|
|
469
852
|
|
|
470
|
-
|
|
853
|
+
custom_prompt : typing.Optional[str]
|
|
854
|
+
Custom prompt to use for generation instead of auto-generated one
|
|
471
855
|
|
|
472
856
|
request_options : typing.Optional[RequestOptions]
|
|
473
857
|
Request-specific configuration.
|
|
474
858
|
|
|
475
859
|
Returns
|
|
476
860
|
-------
|
|
477
|
-
HttpResponse[
|
|
478
|
-
|
|
861
|
+
HttpResponse[DatasetExpansionResponse]
|
|
862
|
+
Generated synthetic samples
|
|
479
863
|
"""
|
|
480
864
|
_response = self._client_wrapper.httpx_client.request(
|
|
481
|
-
f"v1/private/datasets/{jsonable_encoder(id)}/
|
|
482
|
-
method="
|
|
483
|
-
|
|
484
|
-
"
|
|
485
|
-
"
|
|
486
|
-
"
|
|
487
|
-
"
|
|
488
|
-
"
|
|
865
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/expansions",
|
|
866
|
+
method="POST",
|
|
867
|
+
json={
|
|
868
|
+
"model": model,
|
|
869
|
+
"sample_count": sample_count,
|
|
870
|
+
"preserve_fields": preserve_fields,
|
|
871
|
+
"variation_instructions": variation_instructions,
|
|
872
|
+
"custom_prompt": custom_prompt,
|
|
873
|
+
},
|
|
874
|
+
headers={
|
|
875
|
+
"content-type": "application/json",
|
|
489
876
|
},
|
|
490
877
|
request_options=request_options,
|
|
878
|
+
omit=OMIT,
|
|
491
879
|
)
|
|
492
880
|
try:
|
|
493
881
|
if 200 <= _response.status_code < 300:
|
|
494
882
|
_data = typing.cast(
|
|
495
|
-
|
|
883
|
+
DatasetExpansionResponse,
|
|
496
884
|
parse_obj_as(
|
|
497
|
-
type_=
|
|
885
|
+
type_=DatasetExpansionResponse, # type: ignore
|
|
498
886
|
object_=_response.json(),
|
|
499
887
|
),
|
|
500
888
|
)
|
|
@@ -504,15 +892,86 @@ class RawDatasetsClient:
|
|
|
504
892
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
505
893
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
506
894
|
|
|
507
|
-
def
|
|
508
|
-
self,
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
895
|
+
def find_dataset_items_with_experiment_items(
|
|
896
|
+
self,
|
|
897
|
+
id: str,
|
|
898
|
+
*,
|
|
899
|
+
experiment_ids: str,
|
|
900
|
+
page: typing.Optional[int] = None,
|
|
901
|
+
size: typing.Optional[int] = None,
|
|
902
|
+
filters: typing.Optional[str] = None,
|
|
903
|
+
sorting: typing.Optional[str] = None,
|
|
904
|
+
search: typing.Optional[str] = None,
|
|
905
|
+
truncate: typing.Optional[bool] = None,
|
|
906
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
907
|
+
) -> HttpResponse[DatasetItemPageCompare]:
|
|
908
|
+
"""
|
|
909
|
+
Find dataset items with experiment items
|
|
910
|
+
|
|
911
|
+
Parameters
|
|
912
|
+
----------
|
|
913
|
+
id : str
|
|
914
|
+
|
|
915
|
+
experiment_ids : str
|
|
916
|
+
|
|
917
|
+
page : typing.Optional[int]
|
|
918
|
+
|
|
919
|
+
size : typing.Optional[int]
|
|
920
|
+
|
|
921
|
+
filters : typing.Optional[str]
|
|
922
|
+
|
|
923
|
+
sorting : typing.Optional[str]
|
|
924
|
+
|
|
925
|
+
search : typing.Optional[str]
|
|
926
|
+
|
|
927
|
+
truncate : typing.Optional[bool]
|
|
928
|
+
|
|
929
|
+
request_options : typing.Optional[RequestOptions]
|
|
930
|
+
Request-specific configuration.
|
|
931
|
+
|
|
932
|
+
Returns
|
|
933
|
+
-------
|
|
934
|
+
HttpResponse[DatasetItemPageCompare]
|
|
935
|
+
Dataset item resource
|
|
936
|
+
"""
|
|
937
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
938
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items",
|
|
939
|
+
method="GET",
|
|
940
|
+
params={
|
|
941
|
+
"page": page,
|
|
942
|
+
"size": size,
|
|
943
|
+
"experiment_ids": experiment_ids,
|
|
944
|
+
"filters": filters,
|
|
945
|
+
"sorting": sorting,
|
|
946
|
+
"search": search,
|
|
947
|
+
"truncate": truncate,
|
|
948
|
+
},
|
|
949
|
+
request_options=request_options,
|
|
950
|
+
)
|
|
951
|
+
try:
|
|
952
|
+
if 200 <= _response.status_code < 300:
|
|
953
|
+
_data = typing.cast(
|
|
954
|
+
DatasetItemPageCompare,
|
|
955
|
+
parse_obj_as(
|
|
956
|
+
type_=DatasetItemPageCompare, # type: ignore
|
|
957
|
+
object_=_response.json(),
|
|
958
|
+
),
|
|
959
|
+
)
|
|
960
|
+
return HttpResponse(response=_response, data=_data)
|
|
961
|
+
_response_json = _response.json()
|
|
962
|
+
except JSONDecodeError:
|
|
963
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
964
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
965
|
+
|
|
966
|
+
def get_dataset_by_identifier(
|
|
967
|
+
self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
|
|
968
|
+
) -> HttpResponse[DatasetPublic]:
|
|
969
|
+
"""
|
|
970
|
+
Get dataset by name
|
|
971
|
+
|
|
972
|
+
Parameters
|
|
973
|
+
----------
|
|
974
|
+
dataset_name : str
|
|
516
975
|
|
|
517
976
|
request_options : typing.Optional[RequestOptions]
|
|
518
977
|
Request-specific configuration.
|
|
@@ -549,6 +1008,57 @@ class RawDatasetsClient:
|
|
|
549
1008
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
550
1009
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
551
1010
|
|
|
1011
|
+
def get_dataset_experiment_items_stats(
|
|
1012
|
+
self,
|
|
1013
|
+
id: str,
|
|
1014
|
+
*,
|
|
1015
|
+
experiment_ids: str,
|
|
1016
|
+
filters: typing.Optional[str] = None,
|
|
1017
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1018
|
+
) -> HttpResponse[ProjectStatsPublic]:
|
|
1019
|
+
"""
|
|
1020
|
+
Get experiment items stats for dataset
|
|
1021
|
+
|
|
1022
|
+
Parameters
|
|
1023
|
+
----------
|
|
1024
|
+
id : str
|
|
1025
|
+
|
|
1026
|
+
experiment_ids : str
|
|
1027
|
+
|
|
1028
|
+
filters : typing.Optional[str]
|
|
1029
|
+
|
|
1030
|
+
request_options : typing.Optional[RequestOptions]
|
|
1031
|
+
Request-specific configuration.
|
|
1032
|
+
|
|
1033
|
+
Returns
|
|
1034
|
+
-------
|
|
1035
|
+
HttpResponse[ProjectStatsPublic]
|
|
1036
|
+
Experiment items stats resource
|
|
1037
|
+
"""
|
|
1038
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1039
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/stats",
|
|
1040
|
+
method="GET",
|
|
1041
|
+
params={
|
|
1042
|
+
"experiment_ids": experiment_ids,
|
|
1043
|
+
"filters": filters,
|
|
1044
|
+
},
|
|
1045
|
+
request_options=request_options,
|
|
1046
|
+
)
|
|
1047
|
+
try:
|
|
1048
|
+
if 200 <= _response.status_code < 300:
|
|
1049
|
+
_data = typing.cast(
|
|
1050
|
+
ProjectStatsPublic,
|
|
1051
|
+
parse_obj_as(
|
|
1052
|
+
type_=ProjectStatsPublic, # type: ignore
|
|
1053
|
+
object_=_response.json(),
|
|
1054
|
+
),
|
|
1055
|
+
)
|
|
1056
|
+
return HttpResponse(response=_response, data=_data)
|
|
1057
|
+
_response_json = _response.json()
|
|
1058
|
+
except JSONDecodeError:
|
|
1059
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1060
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1061
|
+
|
|
552
1062
|
def get_dataset_item_by_id(
|
|
553
1063
|
self, item_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
554
1064
|
) -> HttpResponse[DatasetItemPublic]:
|
|
@@ -587,12 +1097,88 @@ class RawDatasetsClient:
|
|
|
587
1097
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
588
1098
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
589
1099
|
|
|
1100
|
+
def patch_dataset_item(
|
|
1101
|
+
self,
|
|
1102
|
+
item_id: str,
|
|
1103
|
+
*,
|
|
1104
|
+
source: DatasetItemWriteSource,
|
|
1105
|
+
data: JsonNode,
|
|
1106
|
+
id: typing.Optional[str] = OMIT,
|
|
1107
|
+
trace_id: typing.Optional[str] = OMIT,
|
|
1108
|
+
span_id: typing.Optional[str] = OMIT,
|
|
1109
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1110
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1111
|
+
) -> HttpResponse[None]:
|
|
1112
|
+
"""
|
|
1113
|
+
Partially update dataset item by id. Only provided fields will be updated.
|
|
1114
|
+
|
|
1115
|
+
Parameters
|
|
1116
|
+
----------
|
|
1117
|
+
item_id : str
|
|
1118
|
+
|
|
1119
|
+
source : DatasetItemWriteSource
|
|
1120
|
+
|
|
1121
|
+
data : JsonNode
|
|
1122
|
+
|
|
1123
|
+
id : typing.Optional[str]
|
|
1124
|
+
|
|
1125
|
+
trace_id : typing.Optional[str]
|
|
1126
|
+
|
|
1127
|
+
span_id : typing.Optional[str]
|
|
1128
|
+
|
|
1129
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
1130
|
+
|
|
1131
|
+
request_options : typing.Optional[RequestOptions]
|
|
1132
|
+
Request-specific configuration.
|
|
1133
|
+
|
|
1134
|
+
Returns
|
|
1135
|
+
-------
|
|
1136
|
+
HttpResponse[None]
|
|
1137
|
+
"""
|
|
1138
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1139
|
+
f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
|
|
1140
|
+
method="PATCH",
|
|
1141
|
+
json={
|
|
1142
|
+
"id": id,
|
|
1143
|
+
"trace_id": trace_id,
|
|
1144
|
+
"span_id": span_id,
|
|
1145
|
+
"source": source,
|
|
1146
|
+
"data": data,
|
|
1147
|
+
"tags": tags,
|
|
1148
|
+
},
|
|
1149
|
+
headers={
|
|
1150
|
+
"content-type": "application/json",
|
|
1151
|
+
},
|
|
1152
|
+
request_options=request_options,
|
|
1153
|
+
omit=OMIT,
|
|
1154
|
+
)
|
|
1155
|
+
try:
|
|
1156
|
+
if 200 <= _response.status_code < 300:
|
|
1157
|
+
return HttpResponse(response=_response, data=None)
|
|
1158
|
+
if _response.status_code == 404:
|
|
1159
|
+
raise NotFoundError(
|
|
1160
|
+
headers=dict(_response.headers),
|
|
1161
|
+
body=typing.cast(
|
|
1162
|
+
typing.Optional[typing.Any],
|
|
1163
|
+
parse_obj_as(
|
|
1164
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1165
|
+
object_=_response.json(),
|
|
1166
|
+
),
|
|
1167
|
+
),
|
|
1168
|
+
)
|
|
1169
|
+
_response_json = _response.json()
|
|
1170
|
+
except JSONDecodeError:
|
|
1171
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1172
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1173
|
+
|
|
590
1174
|
def get_dataset_items(
|
|
591
1175
|
self,
|
|
592
1176
|
id: str,
|
|
593
1177
|
*,
|
|
594
1178
|
page: typing.Optional[int] = None,
|
|
595
1179
|
size: typing.Optional[int] = None,
|
|
1180
|
+
version: typing.Optional[str] = None,
|
|
1181
|
+
filters: typing.Optional[str] = None,
|
|
596
1182
|
truncate: typing.Optional[bool] = None,
|
|
597
1183
|
request_options: typing.Optional[RequestOptions] = None,
|
|
598
1184
|
) -> HttpResponse[DatasetItemPagePublic]:
|
|
@@ -607,6 +1193,10 @@ class RawDatasetsClient:
|
|
|
607
1193
|
|
|
608
1194
|
size : typing.Optional[int]
|
|
609
1195
|
|
|
1196
|
+
version : typing.Optional[str]
|
|
1197
|
+
|
|
1198
|
+
filters : typing.Optional[str]
|
|
1199
|
+
|
|
610
1200
|
truncate : typing.Optional[bool]
|
|
611
1201
|
|
|
612
1202
|
request_options : typing.Optional[RequestOptions]
|
|
@@ -623,6 +1213,8 @@ class RawDatasetsClient:
|
|
|
623
1213
|
params={
|
|
624
1214
|
"page": page,
|
|
625
1215
|
"size": size,
|
|
1216
|
+
"version": version,
|
|
1217
|
+
"filters": filters,
|
|
626
1218
|
"truncate": truncate,
|
|
627
1219
|
},
|
|
628
1220
|
request_options=request_options,
|
|
@@ -749,124 +1341,81 @@ class RawDatasetsClient:
|
|
|
749
1341
|
|
|
750
1342
|
yield stream()
|
|
751
1343
|
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
self._client_wrapper = client_wrapper
|
|
756
|
-
|
|
757
|
-
async def find_datasets(
|
|
758
|
-
self,
|
|
759
|
-
*,
|
|
760
|
-
page: typing.Optional[int] = None,
|
|
761
|
-
size: typing.Optional[int] = None,
|
|
762
|
-
with_experiments_only: typing.Optional[bool] = None,
|
|
763
|
-
with_optimizations_only: typing.Optional[bool] = None,
|
|
764
|
-
prompt_id: typing.Optional[str] = None,
|
|
765
|
-
name: typing.Optional[str] = None,
|
|
766
|
-
sorting: typing.Optional[str] = None,
|
|
767
|
-
filters: typing.Optional[str] = None,
|
|
768
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
769
|
-
) -> AsyncHttpResponse[DatasetPagePublic]:
|
|
1344
|
+
def compare_dataset_versions(
|
|
1345
|
+
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1346
|
+
) -> HttpResponse[DatasetVersionDiff]:
|
|
770
1347
|
"""
|
|
771
|
-
|
|
1348
|
+
Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
|
|
772
1349
|
|
|
773
1350
|
Parameters
|
|
774
1351
|
----------
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
size : typing.Optional[int]
|
|
778
|
-
|
|
779
|
-
with_experiments_only : typing.Optional[bool]
|
|
780
|
-
|
|
781
|
-
with_optimizations_only : typing.Optional[bool]
|
|
782
|
-
|
|
783
|
-
prompt_id : typing.Optional[str]
|
|
784
|
-
|
|
785
|
-
name : typing.Optional[str]
|
|
786
|
-
|
|
787
|
-
sorting : typing.Optional[str]
|
|
788
|
-
|
|
789
|
-
filters : typing.Optional[str]
|
|
1352
|
+
id : str
|
|
790
1353
|
|
|
791
1354
|
request_options : typing.Optional[RequestOptions]
|
|
792
1355
|
Request-specific configuration.
|
|
793
1356
|
|
|
794
1357
|
Returns
|
|
795
1358
|
-------
|
|
796
|
-
|
|
797
|
-
|
|
1359
|
+
HttpResponse[DatasetVersionDiff]
|
|
1360
|
+
Diff computed successfully
|
|
798
1361
|
"""
|
|
799
|
-
_response =
|
|
800
|
-
"v1/private/datasets",
|
|
1362
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1363
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
|
|
801
1364
|
method="GET",
|
|
802
|
-
params={
|
|
803
|
-
"page": page,
|
|
804
|
-
"size": size,
|
|
805
|
-
"with_experiments_only": with_experiments_only,
|
|
806
|
-
"with_optimizations_only": with_optimizations_only,
|
|
807
|
-
"prompt_id": prompt_id,
|
|
808
|
-
"name": name,
|
|
809
|
-
"sorting": sorting,
|
|
810
|
-
"filters": filters,
|
|
811
|
-
},
|
|
812
1365
|
request_options=request_options,
|
|
813
1366
|
)
|
|
814
1367
|
try:
|
|
815
1368
|
if 200 <= _response.status_code < 300:
|
|
816
1369
|
_data = typing.cast(
|
|
817
|
-
|
|
1370
|
+
DatasetVersionDiff,
|
|
818
1371
|
parse_obj_as(
|
|
819
|
-
type_=
|
|
1372
|
+
type_=DatasetVersionDiff, # type: ignore
|
|
820
1373
|
object_=_response.json(),
|
|
821
1374
|
),
|
|
822
1375
|
)
|
|
823
|
-
return
|
|
1376
|
+
return HttpResponse(response=_response, data=_data)
|
|
1377
|
+
if _response.status_code == 404:
|
|
1378
|
+
raise NotFoundError(
|
|
1379
|
+
headers=dict(_response.headers),
|
|
1380
|
+
body=typing.cast(
|
|
1381
|
+
typing.Optional[typing.Any],
|
|
1382
|
+
parse_obj_as(
|
|
1383
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1384
|
+
object_=_response.json(),
|
|
1385
|
+
),
|
|
1386
|
+
),
|
|
1387
|
+
)
|
|
824
1388
|
_response_json = _response.json()
|
|
825
1389
|
except JSONDecodeError:
|
|
826
1390
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
827
1391
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
828
1392
|
|
|
829
|
-
|
|
830
|
-
self,
|
|
831
|
-
|
|
832
|
-
name: str,
|
|
833
|
-
id: typing.Optional[str] = OMIT,
|
|
834
|
-
visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
|
|
835
|
-
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
836
|
-
description: typing.Optional[str] = OMIT,
|
|
837
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
838
|
-
) -> AsyncHttpResponse[None]:
|
|
1393
|
+
def create_version_tag(
|
|
1394
|
+
self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
|
|
1395
|
+
) -> HttpResponse[None]:
|
|
839
1396
|
"""
|
|
840
|
-
|
|
1397
|
+
Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
|
|
841
1398
|
|
|
842
1399
|
Parameters
|
|
843
1400
|
----------
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
id : typing.Optional[str]
|
|
847
|
-
|
|
848
|
-
visibility : typing.Optional[DatasetWriteVisibility]
|
|
1401
|
+
version_hash : str
|
|
849
1402
|
|
|
850
|
-
|
|
1403
|
+
id : str
|
|
851
1404
|
|
|
852
|
-
|
|
1405
|
+
tag : str
|
|
853
1406
|
|
|
854
1407
|
request_options : typing.Optional[RequestOptions]
|
|
855
1408
|
Request-specific configuration.
|
|
856
1409
|
|
|
857
1410
|
Returns
|
|
858
1411
|
-------
|
|
859
|
-
|
|
1412
|
+
HttpResponse[None]
|
|
860
1413
|
"""
|
|
861
|
-
_response =
|
|
862
|
-
"v1/private/datasets",
|
|
1414
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1415
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
|
|
863
1416
|
method="POST",
|
|
864
1417
|
json={
|
|
865
|
-
"
|
|
866
|
-
"name": name,
|
|
867
|
-
"visibility": visibility,
|
|
868
|
-
"tags": tags,
|
|
869
|
-
"description": description,
|
|
1418
|
+
"tag": tag,
|
|
870
1419
|
},
|
|
871
1420
|
headers={
|
|
872
1421
|
"content-type": "application/json",
|
|
@@ -876,142 +1425,238 @@ class AsyncRawDatasetsClient:
|
|
|
876
1425
|
)
|
|
877
1426
|
try:
|
|
878
1427
|
if 200 <= _response.status_code < 300:
|
|
879
|
-
return
|
|
1428
|
+
return HttpResponse(response=_response, data=None)
|
|
1429
|
+
if _response.status_code == 400:
|
|
1430
|
+
raise BadRequestError(
|
|
1431
|
+
headers=dict(_response.headers),
|
|
1432
|
+
body=typing.cast(
|
|
1433
|
+
typing.Optional[typing.Any],
|
|
1434
|
+
parse_obj_as(
|
|
1435
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1436
|
+
object_=_response.json(),
|
|
1437
|
+
),
|
|
1438
|
+
),
|
|
1439
|
+
)
|
|
1440
|
+
if _response.status_code == 404:
|
|
1441
|
+
raise NotFoundError(
|
|
1442
|
+
headers=dict(_response.headers),
|
|
1443
|
+
body=typing.cast(
|
|
1444
|
+
typing.Optional[typing.Any],
|
|
1445
|
+
parse_obj_as(
|
|
1446
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1447
|
+
object_=_response.json(),
|
|
1448
|
+
),
|
|
1449
|
+
),
|
|
1450
|
+
)
|
|
1451
|
+
if _response.status_code == 409:
|
|
1452
|
+
raise ConflictError(
|
|
1453
|
+
headers=dict(_response.headers),
|
|
1454
|
+
body=typing.cast(
|
|
1455
|
+
typing.Optional[typing.Any],
|
|
1456
|
+
parse_obj_as(
|
|
1457
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1458
|
+
object_=_response.json(),
|
|
1459
|
+
),
|
|
1460
|
+
),
|
|
1461
|
+
)
|
|
880
1462
|
_response_json = _response.json()
|
|
881
1463
|
except JSONDecodeError:
|
|
882
1464
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
883
1465
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
884
1466
|
|
|
885
|
-
|
|
1467
|
+
def delete_version_tag(
|
|
1468
|
+
self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1469
|
+
) -> HttpResponse[None]:
|
|
1470
|
+
"""
|
|
1471
|
+
Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
|
|
1472
|
+
|
|
1473
|
+
Parameters
|
|
1474
|
+
----------
|
|
1475
|
+
version_hash : str
|
|
1476
|
+
|
|
1477
|
+
tag : str
|
|
1478
|
+
|
|
1479
|
+
id : str
|
|
1480
|
+
|
|
1481
|
+
request_options : typing.Optional[RequestOptions]
|
|
1482
|
+
Request-specific configuration.
|
|
1483
|
+
|
|
1484
|
+
Returns
|
|
1485
|
+
-------
|
|
1486
|
+
HttpResponse[None]
|
|
1487
|
+
"""
|
|
1488
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1489
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
|
|
1490
|
+
method="DELETE",
|
|
1491
|
+
request_options=request_options,
|
|
1492
|
+
)
|
|
1493
|
+
try:
|
|
1494
|
+
if 200 <= _response.status_code < 300:
|
|
1495
|
+
return HttpResponse(response=_response, data=None)
|
|
1496
|
+
_response_json = _response.json()
|
|
1497
|
+
except JSONDecodeError:
|
|
1498
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1499
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1500
|
+
|
|
1501
|
+
def list_dataset_versions(
|
|
886
1502
|
self,
|
|
1503
|
+
id: str,
|
|
887
1504
|
*,
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
dataset_id: typing.Optional[str] = OMIT,
|
|
1505
|
+
page: typing.Optional[int] = None,
|
|
1506
|
+
size: typing.Optional[int] = None,
|
|
891
1507
|
request_options: typing.Optional[RequestOptions] = None,
|
|
892
|
-
) ->
|
|
1508
|
+
) -> HttpResponse[DatasetVersionPagePublic]:
|
|
893
1509
|
"""
|
|
894
|
-
|
|
1510
|
+
Get paginated list of versions for a dataset, ordered by creation time (newest first)
|
|
895
1511
|
|
|
896
1512
|
Parameters
|
|
897
1513
|
----------
|
|
898
|
-
|
|
1514
|
+
id : str
|
|
899
1515
|
|
|
900
|
-
|
|
901
|
-
If null, dataset_id must be provided
|
|
1516
|
+
page : typing.Optional[int]
|
|
902
1517
|
|
|
903
|
-
|
|
904
|
-
If null, dataset_name must be provided
|
|
1518
|
+
size : typing.Optional[int]
|
|
905
1519
|
|
|
906
1520
|
request_options : typing.Optional[RequestOptions]
|
|
907
1521
|
Request-specific configuration.
|
|
908
1522
|
|
|
909
1523
|
Returns
|
|
910
1524
|
-------
|
|
911
|
-
|
|
1525
|
+
HttpResponse[DatasetVersionPagePublic]
|
|
1526
|
+
Dataset versions
|
|
912
1527
|
"""
|
|
913
|
-
_response =
|
|
914
|
-
"v1/private/datasets/
|
|
915
|
-
method="
|
|
916
|
-
|
|
917
|
-
"
|
|
918
|
-
"
|
|
919
|
-
"items": convert_and_respect_annotation_metadata(
|
|
920
|
-
object_=items, annotation=typing.Sequence[DatasetItemWrite], direction="write"
|
|
921
|
-
),
|
|
922
|
-
},
|
|
923
|
-
headers={
|
|
924
|
-
"content-type": "application/json",
|
|
1528
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1529
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions",
|
|
1530
|
+
method="GET",
|
|
1531
|
+
params={
|
|
1532
|
+
"page": page,
|
|
1533
|
+
"size": size,
|
|
925
1534
|
},
|
|
926
1535
|
request_options=request_options,
|
|
927
|
-
omit=OMIT,
|
|
928
1536
|
)
|
|
929
1537
|
try:
|
|
930
1538
|
if 200 <= _response.status_code < 300:
|
|
931
|
-
|
|
1539
|
+
_data = typing.cast(
|
|
1540
|
+
DatasetVersionPagePublic,
|
|
1541
|
+
parse_obj_as(
|
|
1542
|
+
type_=DatasetVersionPagePublic, # type: ignore
|
|
1543
|
+
object_=_response.json(),
|
|
1544
|
+
),
|
|
1545
|
+
)
|
|
1546
|
+
return HttpResponse(response=_response, data=_data)
|
|
1547
|
+
if _response.status_code == 400:
|
|
1548
|
+
raise BadRequestError(
|
|
1549
|
+
headers=dict(_response.headers),
|
|
1550
|
+
body=typing.cast(
|
|
1551
|
+
typing.Optional[typing.Any],
|
|
1552
|
+
parse_obj_as(
|
|
1553
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1554
|
+
object_=_response.json(),
|
|
1555
|
+
),
|
|
1556
|
+
),
|
|
1557
|
+
)
|
|
932
1558
|
_response_json = _response.json()
|
|
933
1559
|
except JSONDecodeError:
|
|
934
1560
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
935
1561
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
936
1562
|
|
|
937
|
-
|
|
938
|
-
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
939
|
-
) ->
|
|
1563
|
+
def restore_dataset_version(
|
|
1564
|
+
self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
|
|
1565
|
+
) -> HttpResponse[DatasetVersionPublic]:
|
|
940
1566
|
"""
|
|
941
|
-
|
|
1567
|
+
Restores the dataset to a previous version state by creating a new version with items copied from the specified version. If the version is already the latest, returns it as-is (no-op).
|
|
942
1568
|
|
|
943
1569
|
Parameters
|
|
944
1570
|
----------
|
|
945
1571
|
id : str
|
|
946
1572
|
|
|
1573
|
+
version_ref : str
|
|
1574
|
+
Version hash or tag to restore from
|
|
1575
|
+
|
|
947
1576
|
request_options : typing.Optional[RequestOptions]
|
|
948
1577
|
Request-specific configuration.
|
|
949
1578
|
|
|
950
1579
|
Returns
|
|
951
1580
|
-------
|
|
952
|
-
|
|
953
|
-
|
|
1581
|
+
HttpResponse[DatasetVersionPublic]
|
|
1582
|
+
Version restored successfully
|
|
954
1583
|
"""
|
|
955
|
-
_response =
|
|
956
|
-
f"v1/private/datasets/{jsonable_encoder(id)}",
|
|
957
|
-
method="
|
|
1584
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1585
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
|
|
1586
|
+
method="POST",
|
|
1587
|
+
json={
|
|
1588
|
+
"version_ref": version_ref,
|
|
1589
|
+
},
|
|
1590
|
+
headers={
|
|
1591
|
+
"content-type": "application/json",
|
|
1592
|
+
},
|
|
958
1593
|
request_options=request_options,
|
|
1594
|
+
omit=OMIT,
|
|
959
1595
|
)
|
|
960
1596
|
try:
|
|
961
1597
|
if 200 <= _response.status_code < 300:
|
|
962
1598
|
_data = typing.cast(
|
|
963
|
-
|
|
1599
|
+
DatasetVersionPublic,
|
|
964
1600
|
parse_obj_as(
|
|
965
|
-
type_=
|
|
1601
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
966
1602
|
object_=_response.json(),
|
|
967
1603
|
),
|
|
968
1604
|
)
|
|
969
|
-
return
|
|
1605
|
+
return HttpResponse(response=_response, data=_data)
|
|
1606
|
+
if _response.status_code == 404:
|
|
1607
|
+
raise NotFoundError(
|
|
1608
|
+
headers=dict(_response.headers),
|
|
1609
|
+
body=typing.cast(
|
|
1610
|
+
typing.Optional[typing.Any],
|
|
1611
|
+
parse_obj_as(
|
|
1612
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1613
|
+
object_=_response.json(),
|
|
1614
|
+
),
|
|
1615
|
+
),
|
|
1616
|
+
)
|
|
970
1617
|
_response_json = _response.json()
|
|
971
1618
|
except JSONDecodeError:
|
|
972
1619
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
973
1620
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
974
1621
|
|
|
975
|
-
|
|
1622
|
+
def update_dataset_version(
|
|
976
1623
|
self,
|
|
1624
|
+
version_hash: str,
|
|
977
1625
|
id: str,
|
|
978
1626
|
*,
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
visibility: typing.Optional[DatasetUpdateVisibility] = OMIT,
|
|
982
|
-
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1627
|
+
change_description: typing.Optional[str] = OMIT,
|
|
1628
|
+
tags_to_add: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
983
1629
|
request_options: typing.Optional[RequestOptions] = None,
|
|
984
|
-
) ->
|
|
1630
|
+
) -> HttpResponse[DatasetVersionPublic]:
|
|
985
1631
|
"""
|
|
986
|
-
Update dataset
|
|
1632
|
+
Update a dataset version's change_description and/or add new tags
|
|
987
1633
|
|
|
988
1634
|
Parameters
|
|
989
1635
|
----------
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
name : str
|
|
1636
|
+
version_hash : str
|
|
993
1637
|
|
|
994
|
-
|
|
1638
|
+
id : str
|
|
995
1639
|
|
|
996
|
-
|
|
1640
|
+
change_description : typing.Optional[str]
|
|
1641
|
+
Optional description of changes in this version
|
|
997
1642
|
|
|
998
|
-
|
|
1643
|
+
tags_to_add : typing.Optional[typing.Sequence[str]]
|
|
1644
|
+
Optional list of tags to add to this version
|
|
999
1645
|
|
|
1000
1646
|
request_options : typing.Optional[RequestOptions]
|
|
1001
1647
|
Request-specific configuration.
|
|
1002
1648
|
|
|
1003
1649
|
Returns
|
|
1004
1650
|
-------
|
|
1005
|
-
|
|
1651
|
+
HttpResponse[DatasetVersionPublic]
|
|
1652
|
+
Version updated successfully
|
|
1006
1653
|
"""
|
|
1007
|
-
_response =
|
|
1008
|
-
f"v1/private/datasets/{jsonable_encoder(id)}",
|
|
1009
|
-
method="
|
|
1654
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1655
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}",
|
|
1656
|
+
method="PATCH",
|
|
1010
1657
|
json={
|
|
1011
|
-
"
|
|
1012
|
-
"
|
|
1013
|
-
"visibility": visibility,
|
|
1014
|
-
"tags": tags,
|
|
1658
|
+
"change_description": change_description,
|
|
1659
|
+
"tags_to_add": tags_to_add,
|
|
1015
1660
|
},
|
|
1016
1661
|
headers={
|
|
1017
1662
|
"content-type": "application/json",
|
|
@@ -1021,51 +1666,179 @@ class AsyncRawDatasetsClient:
|
|
|
1021
1666
|
)
|
|
1022
1667
|
try:
|
|
1023
1668
|
if 200 <= _response.status_code < 300:
|
|
1024
|
-
|
|
1669
|
+
_data = typing.cast(
|
|
1670
|
+
DatasetVersionPublic,
|
|
1671
|
+
parse_obj_as(
|
|
1672
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
1673
|
+
object_=_response.json(),
|
|
1674
|
+
),
|
|
1675
|
+
)
|
|
1676
|
+
return HttpResponse(response=_response, data=_data)
|
|
1677
|
+
if _response.status_code == 400:
|
|
1678
|
+
raise BadRequestError(
|
|
1679
|
+
headers=dict(_response.headers),
|
|
1680
|
+
body=typing.cast(
|
|
1681
|
+
typing.Optional[typing.Any],
|
|
1682
|
+
parse_obj_as(
|
|
1683
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1684
|
+
object_=_response.json(),
|
|
1685
|
+
),
|
|
1686
|
+
),
|
|
1687
|
+
)
|
|
1688
|
+
if _response.status_code == 404:
|
|
1689
|
+
raise NotFoundError(
|
|
1690
|
+
headers=dict(_response.headers),
|
|
1691
|
+
body=typing.cast(
|
|
1692
|
+
typing.Optional[typing.Any],
|
|
1693
|
+
parse_obj_as(
|
|
1694
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1695
|
+
object_=_response.json(),
|
|
1696
|
+
),
|
|
1697
|
+
),
|
|
1698
|
+
)
|
|
1699
|
+
if _response.status_code == 409:
|
|
1700
|
+
raise ConflictError(
|
|
1701
|
+
headers=dict(_response.headers),
|
|
1702
|
+
body=typing.cast(
|
|
1703
|
+
typing.Optional[typing.Any],
|
|
1704
|
+
parse_obj_as(
|
|
1705
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1706
|
+
object_=_response.json(),
|
|
1707
|
+
),
|
|
1708
|
+
),
|
|
1709
|
+
)
|
|
1025
1710
|
_response_json = _response.json()
|
|
1026
1711
|
except JSONDecodeError:
|
|
1027
1712
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1028
1713
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1029
1714
|
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1715
|
+
|
|
1716
|
+
class AsyncRawDatasetsClient:
|
|
1717
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
1718
|
+
self._client_wrapper = client_wrapper
|
|
1719
|
+
|
|
1720
|
+
async def apply_dataset_item_changes(
|
|
1721
|
+
self,
|
|
1722
|
+
id: str,
|
|
1723
|
+
*,
|
|
1724
|
+
request: DatasetItemChangesPublic,
|
|
1725
|
+
override: typing.Optional[bool] = None,
|
|
1726
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1727
|
+
) -> AsyncHttpResponse[DatasetVersionPublic]:
|
|
1033
1728
|
"""
|
|
1034
|
-
|
|
1729
|
+
Apply delta changes (add, edit, delete) to a dataset version with conflict detection.
|
|
1730
|
+
|
|
1731
|
+
This endpoint:
|
|
1732
|
+
- Creates a new version with the applied changes
|
|
1733
|
+
- Validates that baseVersion matches the latest version (unless override=true)
|
|
1734
|
+
- Returns 409 Conflict if baseVersion is stale and override is not set
|
|
1735
|
+
|
|
1736
|
+
Use `override=true` query parameter to force version creation even with stale baseVersion.
|
|
1035
1737
|
|
|
1036
1738
|
Parameters
|
|
1037
1739
|
----------
|
|
1038
1740
|
id : str
|
|
1039
1741
|
|
|
1742
|
+
request : DatasetItemChangesPublic
|
|
1743
|
+
|
|
1744
|
+
override : typing.Optional[bool]
|
|
1745
|
+
|
|
1040
1746
|
request_options : typing.Optional[RequestOptions]
|
|
1041
1747
|
Request-specific configuration.
|
|
1042
1748
|
|
|
1043
1749
|
Returns
|
|
1044
1750
|
-------
|
|
1045
|
-
AsyncHttpResponse[
|
|
1751
|
+
AsyncHttpResponse[DatasetVersionPublic]
|
|
1752
|
+
Version created successfully
|
|
1046
1753
|
"""
|
|
1047
1754
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1048
|
-
f"v1/private/datasets/{jsonable_encoder(id)}",
|
|
1049
|
-
method="
|
|
1755
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items/changes",
|
|
1756
|
+
method="POST",
|
|
1757
|
+
params={
|
|
1758
|
+
"override": override,
|
|
1759
|
+
},
|
|
1760
|
+
json=request,
|
|
1761
|
+
headers={
|
|
1762
|
+
"content-type": "application/json",
|
|
1763
|
+
},
|
|
1050
1764
|
request_options=request_options,
|
|
1765
|
+
omit=OMIT,
|
|
1051
1766
|
)
|
|
1052
1767
|
try:
|
|
1053
1768
|
if 200 <= _response.status_code < 300:
|
|
1054
|
-
|
|
1769
|
+
_data = typing.cast(
|
|
1770
|
+
DatasetVersionPublic,
|
|
1771
|
+
parse_obj_as(
|
|
1772
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
1773
|
+
object_=_response.json(),
|
|
1774
|
+
),
|
|
1775
|
+
)
|
|
1776
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
1777
|
+
if _response.status_code == 400:
|
|
1778
|
+
raise BadRequestError(
|
|
1779
|
+
headers=dict(_response.headers),
|
|
1780
|
+
body=typing.cast(
|
|
1781
|
+
typing.Optional[typing.Any],
|
|
1782
|
+
parse_obj_as(
|
|
1783
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1784
|
+
object_=_response.json(),
|
|
1785
|
+
),
|
|
1786
|
+
),
|
|
1787
|
+
)
|
|
1788
|
+
if _response.status_code == 404:
|
|
1789
|
+
raise NotFoundError(
|
|
1790
|
+
headers=dict(_response.headers),
|
|
1791
|
+
body=typing.cast(
|
|
1792
|
+
typing.Optional[typing.Any],
|
|
1793
|
+
parse_obj_as(
|
|
1794
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1795
|
+
object_=_response.json(),
|
|
1796
|
+
),
|
|
1797
|
+
),
|
|
1798
|
+
)
|
|
1799
|
+
if _response.status_code == 409:
|
|
1800
|
+
raise ConflictError(
|
|
1801
|
+
headers=dict(_response.headers),
|
|
1802
|
+
body=typing.cast(
|
|
1803
|
+
typing.Optional[typing.Any],
|
|
1804
|
+
parse_obj_as(
|
|
1805
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1806
|
+
object_=_response.json(),
|
|
1807
|
+
),
|
|
1808
|
+
),
|
|
1809
|
+
)
|
|
1055
1810
|
_response_json = _response.json()
|
|
1056
1811
|
except JSONDecodeError:
|
|
1057
1812
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1058
1813
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1059
1814
|
|
|
1060
|
-
async def
|
|
1061
|
-
self,
|
|
1815
|
+
async def batch_update_dataset_items(
|
|
1816
|
+
self,
|
|
1817
|
+
*,
|
|
1818
|
+
update: DatasetItemUpdate,
|
|
1819
|
+
ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1820
|
+
filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
|
|
1821
|
+
dataset_id: typing.Optional[str] = OMIT,
|
|
1822
|
+
merge_tags: typing.Optional[bool] = OMIT,
|
|
1823
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1062
1824
|
) -> AsyncHttpResponse[None]:
|
|
1063
1825
|
"""
|
|
1064
|
-
|
|
1826
|
+
Update multiple dataset items
|
|
1065
1827
|
|
|
1066
1828
|
Parameters
|
|
1067
1829
|
----------
|
|
1068
|
-
|
|
1830
|
+
update : DatasetItemUpdate
|
|
1831
|
+
|
|
1832
|
+
ids : typing.Optional[typing.Sequence[str]]
|
|
1833
|
+
List of dataset item IDs to update (max 1000). Mutually exclusive with 'filters'.
|
|
1834
|
+
|
|
1835
|
+
filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
|
|
1836
|
+
|
|
1837
|
+
dataset_id : typing.Optional[str]
|
|
1838
|
+
Dataset ID. Required when using 'filters', optional when using 'ids'.
|
|
1839
|
+
|
|
1840
|
+
merge_tags : typing.Optional[bool]
|
|
1841
|
+
If true, merge tags with existing tags instead of replacing them. Default: false. When using 'filters', this is automatically set to true.
|
|
1069
1842
|
|
|
1070
1843
|
request_options : typing.Optional[RequestOptions]
|
|
1071
1844
|
Request-specific configuration.
|
|
@@ -1075,10 +1848,18 @@ class AsyncRawDatasetsClient:
|
|
|
1075
1848
|
AsyncHttpResponse[None]
|
|
1076
1849
|
"""
|
|
1077
1850
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1078
|
-
"v1/private/datasets/
|
|
1079
|
-
method="
|
|
1851
|
+
"v1/private/datasets/items/batch",
|
|
1852
|
+
method="PATCH",
|
|
1080
1853
|
json={
|
|
1081
|
-
"
|
|
1854
|
+
"ids": ids,
|
|
1855
|
+
"filters": convert_and_respect_annotation_metadata(
|
|
1856
|
+
object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
|
|
1857
|
+
),
|
|
1858
|
+
"dataset_id": dataset_id,
|
|
1859
|
+
"update": convert_and_respect_annotation_metadata(
|
|
1860
|
+
object_=update, annotation=DatasetItemUpdate, direction="write"
|
|
1861
|
+
),
|
|
1862
|
+
"merge_tags": merge_tags,
|
|
1082
1863
|
},
|
|
1083
1864
|
headers={
|
|
1084
1865
|
"content-type": "application/json",
|
|
@@ -1089,171 +1870,1228 @@ class AsyncRawDatasetsClient:
|
|
|
1089
1870
|
try:
|
|
1090
1871
|
if 200 <= _response.status_code < 300:
|
|
1091
1872
|
return AsyncHttpResponse(response=_response, data=None)
|
|
1873
|
+
if _response.status_code == 400:
|
|
1874
|
+
raise BadRequestError(
|
|
1875
|
+
headers=dict(_response.headers),
|
|
1876
|
+
body=typing.cast(
|
|
1877
|
+
typing.Optional[typing.Any],
|
|
1878
|
+
parse_obj_as(
|
|
1879
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1880
|
+
object_=_response.json(),
|
|
1881
|
+
),
|
|
1882
|
+
),
|
|
1883
|
+
)
|
|
1092
1884
|
_response_json = _response.json()
|
|
1093
1885
|
except JSONDecodeError:
|
|
1094
1886
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1095
1887
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1096
1888
|
|
|
1097
|
-
async def
|
|
1098
|
-
self,
|
|
1099
|
-
|
|
1889
|
+
async def find_datasets(
|
|
1890
|
+
self,
|
|
1891
|
+
*,
|
|
1892
|
+
page: typing.Optional[int] = None,
|
|
1893
|
+
size: typing.Optional[int] = None,
|
|
1894
|
+
with_experiments_only: typing.Optional[bool] = None,
|
|
1895
|
+
with_optimizations_only: typing.Optional[bool] = None,
|
|
1896
|
+
prompt_id: typing.Optional[str] = None,
|
|
1897
|
+
name: typing.Optional[str] = None,
|
|
1898
|
+
sorting: typing.Optional[str] = None,
|
|
1899
|
+
filters: typing.Optional[str] = None,
|
|
1900
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1901
|
+
) -> AsyncHttpResponse[DatasetPagePublic]:
|
|
1100
1902
|
"""
|
|
1101
|
-
|
|
1903
|
+
Find datasets
|
|
1102
1904
|
|
|
1103
1905
|
Parameters
|
|
1104
1906
|
----------
|
|
1105
|
-
|
|
1907
|
+
page : typing.Optional[int]
|
|
1908
|
+
|
|
1909
|
+
size : typing.Optional[int]
|
|
1910
|
+
|
|
1911
|
+
with_experiments_only : typing.Optional[bool]
|
|
1912
|
+
|
|
1913
|
+
with_optimizations_only : typing.Optional[bool]
|
|
1914
|
+
|
|
1915
|
+
prompt_id : typing.Optional[str]
|
|
1916
|
+
|
|
1917
|
+
name : typing.Optional[str]
|
|
1918
|
+
|
|
1919
|
+
sorting : typing.Optional[str]
|
|
1920
|
+
|
|
1921
|
+
filters : typing.Optional[str]
|
|
1106
1922
|
|
|
1107
1923
|
request_options : typing.Optional[RequestOptions]
|
|
1108
1924
|
Request-specific configuration.
|
|
1109
1925
|
|
|
1110
1926
|
Returns
|
|
1111
1927
|
-------
|
|
1112
|
-
AsyncHttpResponse[
|
|
1928
|
+
AsyncHttpResponse[DatasetPagePublic]
|
|
1929
|
+
Dataset resource
|
|
1113
1930
|
"""
|
|
1114
1931
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1115
|
-
"v1/private/datasets
|
|
1116
|
-
method="
|
|
1117
|
-
|
|
1118
|
-
"
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
"
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1932
|
+
"v1/private/datasets",
|
|
1933
|
+
method="GET",
|
|
1934
|
+
params={
|
|
1935
|
+
"page": page,
|
|
1936
|
+
"size": size,
|
|
1937
|
+
"with_experiments_only": with_experiments_only,
|
|
1938
|
+
"with_optimizations_only": with_optimizations_only,
|
|
1939
|
+
"prompt_id": prompt_id,
|
|
1940
|
+
"name": name,
|
|
1941
|
+
"sorting": sorting,
|
|
1942
|
+
"filters": filters,
|
|
1943
|
+
},
|
|
1944
|
+
request_options=request_options,
|
|
1945
|
+
)
|
|
1946
|
+
try:
|
|
1947
|
+
if 200 <= _response.status_code < 300:
|
|
1948
|
+
_data = typing.cast(
|
|
1949
|
+
DatasetPagePublic,
|
|
1950
|
+
parse_obj_as(
|
|
1951
|
+
type_=DatasetPagePublic, # type: ignore
|
|
1952
|
+
object_=_response.json(),
|
|
1953
|
+
),
|
|
1954
|
+
)
|
|
1955
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
1956
|
+
_response_json = _response.json()
|
|
1957
|
+
except JSONDecodeError:
|
|
1958
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1959
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1960
|
+
|
|
1961
|
+
async def create_dataset(
|
|
1962
|
+
self,
|
|
1963
|
+
*,
|
|
1964
|
+
name: str,
|
|
1965
|
+
id: typing.Optional[str] = OMIT,
|
|
1966
|
+
visibility: typing.Optional[DatasetWriteVisibility] = OMIT,
|
|
1967
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1968
|
+
description: typing.Optional[str] = OMIT,
|
|
1969
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1970
|
+
) -> AsyncHttpResponse[None]:
|
|
1971
|
+
"""
|
|
1972
|
+
Create dataset
|
|
1973
|
+
|
|
1974
|
+
Parameters
|
|
1975
|
+
----------
|
|
1976
|
+
name : str
|
|
1977
|
+
|
|
1978
|
+
id : typing.Optional[str]
|
|
1979
|
+
|
|
1980
|
+
visibility : typing.Optional[DatasetWriteVisibility]
|
|
1981
|
+
|
|
1982
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
1983
|
+
|
|
1984
|
+
description : typing.Optional[str]
|
|
1985
|
+
|
|
1986
|
+
request_options : typing.Optional[RequestOptions]
|
|
1987
|
+
Request-specific configuration.
|
|
1988
|
+
|
|
1989
|
+
Returns
|
|
1990
|
+
-------
|
|
1991
|
+
AsyncHttpResponse[None]
|
|
1992
|
+
"""
|
|
1993
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1994
|
+
"v1/private/datasets",
|
|
1995
|
+
method="POST",
|
|
1996
|
+
json={
|
|
1997
|
+
"id": id,
|
|
1998
|
+
"name": name,
|
|
1999
|
+
"visibility": visibility,
|
|
2000
|
+
"tags": tags,
|
|
2001
|
+
"description": description,
|
|
2002
|
+
},
|
|
2003
|
+
headers={
|
|
2004
|
+
"content-type": "application/json",
|
|
2005
|
+
},
|
|
2006
|
+
request_options=request_options,
|
|
2007
|
+
omit=OMIT,
|
|
2008
|
+
)
|
|
2009
|
+
try:
|
|
2010
|
+
if 200 <= _response.status_code < 300:
|
|
2011
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2012
|
+
_response_json = _response.json()
|
|
2013
|
+
except JSONDecodeError:
|
|
2014
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2015
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2016
|
+
|
|
2017
|
+
async def create_or_update_dataset_items(
|
|
2018
|
+
self,
|
|
2019
|
+
*,
|
|
2020
|
+
items: typing.Sequence[DatasetItemWrite],
|
|
2021
|
+
dataset_name: typing.Optional[str] = OMIT,
|
|
2022
|
+
dataset_id: typing.Optional[str] = OMIT,
|
|
2023
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2024
|
+
) -> AsyncHttpResponse[None]:
|
|
2025
|
+
"""
|
|
2026
|
+
Create/update dataset items based on dataset item id
|
|
2027
|
+
|
|
2028
|
+
Parameters
|
|
2029
|
+
----------
|
|
2030
|
+
items : typing.Sequence[DatasetItemWrite]
|
|
2031
|
+
|
|
2032
|
+
dataset_name : typing.Optional[str]
|
|
2033
|
+
If null, dataset_id must be provided
|
|
2034
|
+
|
|
2035
|
+
dataset_id : typing.Optional[str]
|
|
2036
|
+
If null, dataset_name must be provided
|
|
2037
|
+
|
|
2038
|
+
request_options : typing.Optional[RequestOptions]
|
|
2039
|
+
Request-specific configuration.
|
|
2040
|
+
|
|
2041
|
+
Returns
|
|
2042
|
+
-------
|
|
2043
|
+
AsyncHttpResponse[None]
|
|
2044
|
+
"""
|
|
2045
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2046
|
+
"v1/private/datasets/items",
|
|
2047
|
+
method="PUT",
|
|
2048
|
+
json={
|
|
2049
|
+
"dataset_name": dataset_name,
|
|
2050
|
+
"dataset_id": dataset_id,
|
|
2051
|
+
"items": convert_and_respect_annotation_metadata(
|
|
2052
|
+
object_=items, annotation=typing.Sequence[DatasetItemWrite], direction="write"
|
|
2053
|
+
),
|
|
2054
|
+
},
|
|
2055
|
+
headers={
|
|
2056
|
+
"content-type": "application/json",
|
|
2057
|
+
},
|
|
2058
|
+
request_options=request_options,
|
|
2059
|
+
omit=OMIT,
|
|
2060
|
+
)
|
|
2061
|
+
try:
|
|
2062
|
+
if 200 <= _response.status_code < 300:
|
|
2063
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2064
|
+
_response_json = _response.json()
|
|
2065
|
+
except JSONDecodeError:
|
|
2066
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2067
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2068
|
+
|
|
2069
|
+
async def create_dataset_items_from_csv(
|
|
2070
|
+
self,
|
|
2071
|
+
*,
|
|
2072
|
+
file: typing.Dict[str, typing.Optional[typing.Any]],
|
|
2073
|
+
dataset_id: str,
|
|
2074
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2075
|
+
) -> AsyncHttpResponse[None]:
|
|
2076
|
+
"""
|
|
2077
|
+
Create dataset items from uploaded CSV file. CSV should have headers in the first row. Processing happens asynchronously in batches.
|
|
2078
|
+
|
|
2079
|
+
Parameters
|
|
2080
|
+
----------
|
|
2081
|
+
file : typing.Dict[str, typing.Optional[typing.Any]]
|
|
2082
|
+
|
|
2083
|
+
dataset_id : str
|
|
2084
|
+
|
|
2085
|
+
request_options : typing.Optional[RequestOptions]
|
|
2086
|
+
Request-specific configuration.
|
|
2087
|
+
|
|
2088
|
+
Returns
|
|
2089
|
+
-------
|
|
2090
|
+
AsyncHttpResponse[None]
|
|
2091
|
+
"""
|
|
2092
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2093
|
+
"v1/private/datasets/items/from-csv",
|
|
2094
|
+
method="POST",
|
|
2095
|
+
data={
|
|
2096
|
+
"file": file,
|
|
2097
|
+
"dataset_id": dataset_id,
|
|
2098
|
+
},
|
|
2099
|
+
files={},
|
|
2100
|
+
request_options=request_options,
|
|
2101
|
+
omit=OMIT,
|
|
2102
|
+
)
|
|
2103
|
+
try:
|
|
2104
|
+
if 200 <= _response.status_code < 300:
|
|
2105
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2106
|
+
if _response.status_code == 400:
|
|
2107
|
+
raise BadRequestError(
|
|
2108
|
+
headers=dict(_response.headers),
|
|
2109
|
+
body=typing.cast(
|
|
2110
|
+
typing.Optional[typing.Any],
|
|
2111
|
+
parse_obj_as(
|
|
2112
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2113
|
+
object_=_response.json(),
|
|
2114
|
+
),
|
|
2115
|
+
),
|
|
2116
|
+
)
|
|
2117
|
+
if _response.status_code == 404:
|
|
2118
|
+
raise NotFoundError(
|
|
2119
|
+
headers=dict(_response.headers),
|
|
2120
|
+
body=typing.cast(
|
|
2121
|
+
typing.Optional[typing.Any],
|
|
2122
|
+
parse_obj_as(
|
|
2123
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2124
|
+
object_=_response.json(),
|
|
2125
|
+
),
|
|
2126
|
+
),
|
|
2127
|
+
)
|
|
2128
|
+
_response_json = _response.json()
|
|
2129
|
+
except JSONDecodeError:
|
|
2130
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2131
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2132
|
+
|
|
2133
|
+
async def create_dataset_items_from_spans(
|
|
2134
|
+
self,
|
|
2135
|
+
dataset_id: str,
|
|
2136
|
+
*,
|
|
2137
|
+
span_ids: typing.Sequence[str],
|
|
2138
|
+
enrichment_options: SpanEnrichmentOptions,
|
|
2139
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2140
|
+
) -> AsyncHttpResponse[None]:
|
|
2141
|
+
"""
|
|
2142
|
+
Create dataset items from spans with enriched metadata
|
|
2143
|
+
|
|
2144
|
+
Parameters
|
|
2145
|
+
----------
|
|
2146
|
+
dataset_id : str
|
|
2147
|
+
|
|
2148
|
+
span_ids : typing.Sequence[str]
|
|
2149
|
+
Set of span IDs to add to the dataset
|
|
2150
|
+
|
|
2151
|
+
enrichment_options : SpanEnrichmentOptions
|
|
2152
|
+
|
|
2153
|
+
request_options : typing.Optional[RequestOptions]
|
|
2154
|
+
Request-specific configuration.
|
|
2155
|
+
|
|
2156
|
+
Returns
|
|
2157
|
+
-------
|
|
2158
|
+
AsyncHttpResponse[None]
|
|
2159
|
+
"""
|
|
2160
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2161
|
+
f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-spans",
|
|
2162
|
+
method="POST",
|
|
2163
|
+
json={
|
|
2164
|
+
"span_ids": span_ids,
|
|
2165
|
+
"enrichment_options": convert_and_respect_annotation_metadata(
|
|
2166
|
+
object_=enrichment_options, annotation=SpanEnrichmentOptions, direction="write"
|
|
2167
|
+
),
|
|
2168
|
+
},
|
|
2169
|
+
headers={
|
|
2170
|
+
"content-type": "application/json",
|
|
2171
|
+
},
|
|
2172
|
+
request_options=request_options,
|
|
2173
|
+
omit=OMIT,
|
|
2174
|
+
)
|
|
2175
|
+
try:
|
|
2176
|
+
if 200 <= _response.status_code < 300:
|
|
2177
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2178
|
+
_response_json = _response.json()
|
|
2179
|
+
except JSONDecodeError:
|
|
2180
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2181
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2182
|
+
|
|
2183
|
+
async def create_dataset_items_from_traces(
|
|
2184
|
+
self,
|
|
2185
|
+
dataset_id: str,
|
|
2186
|
+
*,
|
|
2187
|
+
trace_ids: typing.Sequence[str],
|
|
2188
|
+
enrichment_options: TraceEnrichmentOptions,
|
|
2189
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2190
|
+
) -> AsyncHttpResponse[None]:
|
|
2191
|
+
"""
|
|
2192
|
+
Create dataset items from traces with enriched metadata
|
|
2193
|
+
|
|
2194
|
+
Parameters
|
|
2195
|
+
----------
|
|
2196
|
+
dataset_id : str
|
|
2197
|
+
|
|
2198
|
+
trace_ids : typing.Sequence[str]
|
|
2199
|
+
Set of trace IDs to add to the dataset
|
|
2200
|
+
|
|
2201
|
+
enrichment_options : TraceEnrichmentOptions
|
|
2202
|
+
|
|
2203
|
+
request_options : typing.Optional[RequestOptions]
|
|
2204
|
+
Request-specific configuration.
|
|
2205
|
+
|
|
2206
|
+
Returns
|
|
2207
|
+
-------
|
|
2208
|
+
AsyncHttpResponse[None]
|
|
2209
|
+
"""
|
|
2210
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2211
|
+
f"v1/private/datasets/{jsonable_encoder(dataset_id)}/items/from-traces",
|
|
2212
|
+
method="POST",
|
|
2213
|
+
json={
|
|
2214
|
+
"trace_ids": trace_ids,
|
|
2215
|
+
"enrichment_options": convert_and_respect_annotation_metadata(
|
|
2216
|
+
object_=enrichment_options, annotation=TraceEnrichmentOptions, direction="write"
|
|
2217
|
+
),
|
|
2218
|
+
},
|
|
2219
|
+
headers={
|
|
2220
|
+
"content-type": "application/json",
|
|
2221
|
+
},
|
|
2222
|
+
request_options=request_options,
|
|
2223
|
+
omit=OMIT,
|
|
2224
|
+
)
|
|
2225
|
+
try:
|
|
2226
|
+
if 200 <= _response.status_code < 300:
|
|
2227
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2228
|
+
_response_json = _response.json()
|
|
2229
|
+
except JSONDecodeError:
|
|
2230
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2231
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2232
|
+
|
|
2233
|
+
async def get_dataset_by_id(
|
|
2234
|
+
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2235
|
+
) -> AsyncHttpResponse[DatasetPublic]:
|
|
2236
|
+
"""
|
|
2237
|
+
Get dataset by id
|
|
2238
|
+
|
|
2239
|
+
Parameters
|
|
2240
|
+
----------
|
|
2241
|
+
id : str
|
|
2242
|
+
|
|
2243
|
+
request_options : typing.Optional[RequestOptions]
|
|
2244
|
+
Request-specific configuration.
|
|
2245
|
+
|
|
2246
|
+
Returns
|
|
2247
|
+
-------
|
|
2248
|
+
AsyncHttpResponse[DatasetPublic]
|
|
2249
|
+
Dataset resource
|
|
2250
|
+
"""
|
|
2251
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2252
|
+
f"v1/private/datasets/{jsonable_encoder(id)}",
|
|
2253
|
+
method="GET",
|
|
2254
|
+
request_options=request_options,
|
|
2255
|
+
)
|
|
2256
|
+
try:
|
|
2257
|
+
if 200 <= _response.status_code < 300:
|
|
2258
|
+
_data = typing.cast(
|
|
2259
|
+
DatasetPublic,
|
|
2260
|
+
parse_obj_as(
|
|
2261
|
+
type_=DatasetPublic, # type: ignore
|
|
2262
|
+
object_=_response.json(),
|
|
2263
|
+
),
|
|
2264
|
+
)
|
|
2265
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2266
|
+
_response_json = _response.json()
|
|
2267
|
+
except JSONDecodeError:
|
|
2268
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2269
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2270
|
+
|
|
2271
|
+
async def update_dataset(
|
|
2272
|
+
self,
|
|
2273
|
+
id: str,
|
|
2274
|
+
*,
|
|
2275
|
+
name: str,
|
|
2276
|
+
description: typing.Optional[str] = OMIT,
|
|
2277
|
+
visibility: typing.Optional[DatasetUpdateVisibility] = OMIT,
|
|
2278
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
2279
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2280
|
+
) -> AsyncHttpResponse[None]:
|
|
2281
|
+
"""
|
|
2282
|
+
Update dataset by id
|
|
2283
|
+
|
|
2284
|
+
Parameters
|
|
2285
|
+
----------
|
|
2286
|
+
id : str
|
|
2287
|
+
|
|
2288
|
+
name : str
|
|
2289
|
+
|
|
2290
|
+
description : typing.Optional[str]
|
|
2291
|
+
|
|
2292
|
+
visibility : typing.Optional[DatasetUpdateVisibility]
|
|
2293
|
+
|
|
2294
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
2295
|
+
|
|
2296
|
+
request_options : typing.Optional[RequestOptions]
|
|
2297
|
+
Request-specific configuration.
|
|
2298
|
+
|
|
2299
|
+
Returns
|
|
2300
|
+
-------
|
|
2301
|
+
AsyncHttpResponse[None]
|
|
2302
|
+
"""
|
|
2303
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2304
|
+
f"v1/private/datasets/{jsonable_encoder(id)}",
|
|
2305
|
+
method="PUT",
|
|
2306
|
+
json={
|
|
2307
|
+
"name": name,
|
|
2308
|
+
"description": description,
|
|
2309
|
+
"visibility": visibility,
|
|
2310
|
+
"tags": tags,
|
|
2311
|
+
},
|
|
2312
|
+
headers={
|
|
2313
|
+
"content-type": "application/json",
|
|
2314
|
+
},
|
|
2315
|
+
request_options=request_options,
|
|
2316
|
+
omit=OMIT,
|
|
2317
|
+
)
|
|
2318
|
+
try:
|
|
2319
|
+
if 200 <= _response.status_code < 300:
|
|
2320
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2321
|
+
_response_json = _response.json()
|
|
2322
|
+
except JSONDecodeError:
|
|
2323
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2324
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2325
|
+
|
|
2326
|
+
async def delete_dataset(
|
|
2327
|
+
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2328
|
+
) -> AsyncHttpResponse[None]:
|
|
2329
|
+
"""
|
|
2330
|
+
Delete dataset by id
|
|
2331
|
+
|
|
2332
|
+
Parameters
|
|
2333
|
+
----------
|
|
2334
|
+
id : str
|
|
2335
|
+
|
|
2336
|
+
request_options : typing.Optional[RequestOptions]
|
|
2337
|
+
Request-specific configuration.
|
|
2338
|
+
|
|
2339
|
+
Returns
|
|
2340
|
+
-------
|
|
2341
|
+
AsyncHttpResponse[None]
|
|
2342
|
+
"""
|
|
2343
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2344
|
+
f"v1/private/datasets/{jsonable_encoder(id)}",
|
|
2345
|
+
method="DELETE",
|
|
2346
|
+
request_options=request_options,
|
|
2347
|
+
)
|
|
2348
|
+
try:
|
|
2349
|
+
if 200 <= _response.status_code < 300:
|
|
2350
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2351
|
+
_response_json = _response.json()
|
|
2352
|
+
except JSONDecodeError:
|
|
2353
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2354
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2355
|
+
|
|
2356
|
+
async def delete_dataset_by_name(
|
|
2357
|
+
self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
|
|
2358
|
+
) -> AsyncHttpResponse[None]:
|
|
2359
|
+
"""
|
|
2360
|
+
Delete dataset by name
|
|
2361
|
+
|
|
2362
|
+
Parameters
|
|
2363
|
+
----------
|
|
2364
|
+
dataset_name : str
|
|
2365
|
+
|
|
2366
|
+
request_options : typing.Optional[RequestOptions]
|
|
2367
|
+
Request-specific configuration.
|
|
2368
|
+
|
|
2369
|
+
Returns
|
|
2370
|
+
-------
|
|
2371
|
+
AsyncHttpResponse[None]
|
|
2372
|
+
"""
|
|
2373
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2374
|
+
"v1/private/datasets/delete",
|
|
2375
|
+
method="POST",
|
|
2376
|
+
json={
|
|
2377
|
+
"dataset_name": dataset_name,
|
|
2378
|
+
},
|
|
2379
|
+
headers={
|
|
2380
|
+
"content-type": "application/json",
|
|
2381
|
+
},
|
|
2382
|
+
request_options=request_options,
|
|
2383
|
+
omit=OMIT,
|
|
2384
|
+
)
|
|
2385
|
+
try:
|
|
2386
|
+
if 200 <= _response.status_code < 300:
|
|
2387
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2388
|
+
_response_json = _response.json()
|
|
2389
|
+
except JSONDecodeError:
|
|
2390
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2391
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2392
|
+
|
|
2393
|
+
async def delete_dataset_items(
|
|
2394
|
+
self,
|
|
2395
|
+
*,
|
|
2396
|
+
item_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
2397
|
+
dataset_id: typing.Optional[str] = OMIT,
|
|
2398
|
+
filters: typing.Optional[typing.Sequence[DatasetItemFilter]] = OMIT,
|
|
2399
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2400
|
+
) -> AsyncHttpResponse[None]:
|
|
2401
|
+
"""
|
|
2402
|
+
Delete dataset items using one of two modes:
|
|
2403
|
+
1. **Delete by IDs**: Provide 'item_ids' to delete specific items by their IDs
|
|
2404
|
+
2. **Delete by filters**: Provide 'dataset_id' with optional 'filters' to delete items matching criteria
|
|
2405
|
+
|
|
2406
|
+
When using filters, an empty 'filters' array will delete all items in the specified dataset.
|
|
2407
|
+
|
|
2408
|
+
Parameters
|
|
2409
|
+
----------
|
|
2410
|
+
item_ids : typing.Optional[typing.Sequence[str]]
|
|
2411
|
+
List of dataset item IDs to delete (max 1000). Use this to delete specific items by their IDs. Mutually exclusive with 'dataset_id' and 'filters'.
|
|
2412
|
+
|
|
2413
|
+
dataset_id : typing.Optional[str]
|
|
2414
|
+
Dataset ID to scope the deletion. Required when using 'filters'. Mutually exclusive with 'item_ids'.
|
|
2415
|
+
|
|
2416
|
+
filters : typing.Optional[typing.Sequence[DatasetItemFilter]]
|
|
2417
|
+
Filters to select dataset items to delete within the specified dataset. Must be used with 'dataset_id'. Mutually exclusive with 'item_ids'. Empty array means 'delete all items in the dataset'.
|
|
2418
|
+
|
|
2419
|
+
request_options : typing.Optional[RequestOptions]
|
|
2420
|
+
Request-specific configuration.
|
|
2421
|
+
|
|
2422
|
+
Returns
|
|
2423
|
+
-------
|
|
2424
|
+
AsyncHttpResponse[None]
|
|
2425
|
+
"""
|
|
2426
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2427
|
+
"v1/private/datasets/items/delete",
|
|
2428
|
+
method="POST",
|
|
2429
|
+
json={
|
|
2430
|
+
"item_ids": item_ids,
|
|
2431
|
+
"dataset_id": dataset_id,
|
|
2432
|
+
"filters": convert_and_respect_annotation_metadata(
|
|
2433
|
+
object_=filters, annotation=typing.Sequence[DatasetItemFilter], direction="write"
|
|
2434
|
+
),
|
|
2435
|
+
},
|
|
2436
|
+
headers={
|
|
2437
|
+
"content-type": "application/json",
|
|
2438
|
+
},
|
|
2439
|
+
request_options=request_options,
|
|
2440
|
+
omit=OMIT,
|
|
2441
|
+
)
|
|
2442
|
+
try:
|
|
2443
|
+
if 200 <= _response.status_code < 300:
|
|
2444
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2445
|
+
if _response.status_code == 400:
|
|
2446
|
+
raise BadRequestError(
|
|
2447
|
+
headers=dict(_response.headers),
|
|
2448
|
+
body=typing.cast(
|
|
2449
|
+
typing.Optional[typing.Any],
|
|
2450
|
+
parse_obj_as(
|
|
2451
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2452
|
+
object_=_response.json(),
|
|
2453
|
+
),
|
|
2454
|
+
),
|
|
2455
|
+
)
|
|
2456
|
+
_response_json = _response.json()
|
|
2457
|
+
except JSONDecodeError:
|
|
2458
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2459
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2460
|
+
|
|
2461
|
+
async def delete_datasets_batch(
|
|
2462
|
+
self, *, ids: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
|
|
2463
|
+
) -> AsyncHttpResponse[None]:
|
|
2464
|
+
"""
|
|
2465
|
+
Delete datasets batch
|
|
2466
|
+
|
|
2467
|
+
Parameters
|
|
2468
|
+
----------
|
|
2469
|
+
ids : typing.Sequence[str]
|
|
2470
|
+
|
|
2471
|
+
request_options : typing.Optional[RequestOptions]
|
|
2472
|
+
Request-specific configuration.
|
|
2473
|
+
|
|
2474
|
+
Returns
|
|
2475
|
+
-------
|
|
2476
|
+
AsyncHttpResponse[None]
|
|
2477
|
+
"""
|
|
2478
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2479
|
+
"v1/private/datasets/delete-batch",
|
|
2480
|
+
method="POST",
|
|
2481
|
+
json={
|
|
2482
|
+
"ids": ids,
|
|
2483
|
+
},
|
|
2484
|
+
headers={
|
|
2485
|
+
"content-type": "application/json",
|
|
2486
|
+
},
|
|
2487
|
+
request_options=request_options,
|
|
2488
|
+
omit=OMIT,
|
|
2489
|
+
)
|
|
2490
|
+
try:
|
|
2491
|
+
if 200 <= _response.status_code < 300:
|
|
2492
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2493
|
+
_response_json = _response.json()
|
|
2494
|
+
except JSONDecodeError:
|
|
2495
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2496
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2497
|
+
|
|
2498
|
+
async def expand_dataset(
|
|
2499
|
+
self,
|
|
2500
|
+
id: str,
|
|
2501
|
+
*,
|
|
2502
|
+
model: str,
|
|
2503
|
+
sample_count: typing.Optional[int] = OMIT,
|
|
2504
|
+
preserve_fields: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
2505
|
+
variation_instructions: typing.Optional[str] = OMIT,
|
|
2506
|
+
custom_prompt: typing.Optional[str] = OMIT,
|
|
2507
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2508
|
+
) -> AsyncHttpResponse[DatasetExpansionResponse]:
|
|
2509
|
+
"""
|
|
2510
|
+
Generate synthetic dataset samples using LLM based on existing data patterns
|
|
2511
|
+
|
|
2512
|
+
Parameters
|
|
2513
|
+
----------
|
|
2514
|
+
id : str
|
|
2515
|
+
|
|
2516
|
+
model : str
|
|
2517
|
+
The model to use for synthetic data generation
|
|
2518
|
+
|
|
2519
|
+
sample_count : typing.Optional[int]
|
|
2520
|
+
Number of synthetic samples to generate
|
|
2521
|
+
|
|
2522
|
+
preserve_fields : typing.Optional[typing.Sequence[str]]
|
|
2523
|
+
Fields to preserve patterns from original data
|
|
2524
|
+
|
|
2525
|
+
variation_instructions : typing.Optional[str]
|
|
2526
|
+
Additional instructions for data variation
|
|
2527
|
+
|
|
2528
|
+
custom_prompt : typing.Optional[str]
|
|
2529
|
+
Custom prompt to use for generation instead of auto-generated one
|
|
2530
|
+
|
|
2531
|
+
request_options : typing.Optional[RequestOptions]
|
|
2532
|
+
Request-specific configuration.
|
|
2533
|
+
|
|
2534
|
+
Returns
|
|
2535
|
+
-------
|
|
2536
|
+
AsyncHttpResponse[DatasetExpansionResponse]
|
|
2537
|
+
Generated synthetic samples
|
|
2538
|
+
"""
|
|
2539
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2540
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/expansions",
|
|
2541
|
+
method="POST",
|
|
2542
|
+
json={
|
|
2543
|
+
"model": model,
|
|
2544
|
+
"sample_count": sample_count,
|
|
2545
|
+
"preserve_fields": preserve_fields,
|
|
2546
|
+
"variation_instructions": variation_instructions,
|
|
2547
|
+
"custom_prompt": custom_prompt,
|
|
2548
|
+
},
|
|
2549
|
+
headers={
|
|
2550
|
+
"content-type": "application/json",
|
|
2551
|
+
},
|
|
2552
|
+
request_options=request_options,
|
|
2553
|
+
omit=OMIT,
|
|
2554
|
+
)
|
|
2555
|
+
try:
|
|
2556
|
+
if 200 <= _response.status_code < 300:
|
|
2557
|
+
_data = typing.cast(
|
|
2558
|
+
DatasetExpansionResponse,
|
|
2559
|
+
parse_obj_as(
|
|
2560
|
+
type_=DatasetExpansionResponse, # type: ignore
|
|
2561
|
+
object_=_response.json(),
|
|
2562
|
+
),
|
|
2563
|
+
)
|
|
2564
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2565
|
+
_response_json = _response.json()
|
|
2566
|
+
except JSONDecodeError:
|
|
2567
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2568
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2569
|
+
|
|
2570
|
+
async def find_dataset_items_with_experiment_items(
|
|
2571
|
+
self,
|
|
2572
|
+
id: str,
|
|
2573
|
+
*,
|
|
2574
|
+
experiment_ids: str,
|
|
2575
|
+
page: typing.Optional[int] = None,
|
|
2576
|
+
size: typing.Optional[int] = None,
|
|
2577
|
+
filters: typing.Optional[str] = None,
|
|
2578
|
+
sorting: typing.Optional[str] = None,
|
|
2579
|
+
search: typing.Optional[str] = None,
|
|
2580
|
+
truncate: typing.Optional[bool] = None,
|
|
2581
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2582
|
+
) -> AsyncHttpResponse[DatasetItemPageCompare]:
|
|
2583
|
+
"""
|
|
2584
|
+
Find dataset items with experiment items
|
|
2585
|
+
|
|
2586
|
+
Parameters
|
|
2587
|
+
----------
|
|
2588
|
+
id : str
|
|
2589
|
+
|
|
2590
|
+
experiment_ids : str
|
|
2591
|
+
|
|
2592
|
+
page : typing.Optional[int]
|
|
2593
|
+
|
|
2594
|
+
size : typing.Optional[int]
|
|
2595
|
+
|
|
2596
|
+
filters : typing.Optional[str]
|
|
2597
|
+
|
|
2598
|
+
sorting : typing.Optional[str]
|
|
2599
|
+
|
|
2600
|
+
search : typing.Optional[str]
|
|
2601
|
+
|
|
2602
|
+
truncate : typing.Optional[bool]
|
|
2603
|
+
|
|
2604
|
+
request_options : typing.Optional[RequestOptions]
|
|
2605
|
+
Request-specific configuration.
|
|
2606
|
+
|
|
2607
|
+
Returns
|
|
2608
|
+
-------
|
|
2609
|
+
AsyncHttpResponse[DatasetItemPageCompare]
|
|
2610
|
+
Dataset item resource
|
|
2611
|
+
"""
|
|
2612
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2613
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items",
|
|
2614
|
+
method="GET",
|
|
2615
|
+
params={
|
|
2616
|
+
"page": page,
|
|
2617
|
+
"size": size,
|
|
2618
|
+
"experiment_ids": experiment_ids,
|
|
2619
|
+
"filters": filters,
|
|
2620
|
+
"sorting": sorting,
|
|
2621
|
+
"search": search,
|
|
2622
|
+
"truncate": truncate,
|
|
2623
|
+
},
|
|
2624
|
+
request_options=request_options,
|
|
2625
|
+
)
|
|
2626
|
+
try:
|
|
2627
|
+
if 200 <= _response.status_code < 300:
|
|
2628
|
+
_data = typing.cast(
|
|
2629
|
+
DatasetItemPageCompare,
|
|
2630
|
+
parse_obj_as(
|
|
2631
|
+
type_=DatasetItemPageCompare, # type: ignore
|
|
2632
|
+
object_=_response.json(),
|
|
2633
|
+
),
|
|
2634
|
+
)
|
|
2635
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2636
|
+
_response_json = _response.json()
|
|
2637
|
+
except JSONDecodeError:
|
|
2638
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2639
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2640
|
+
|
|
2641
|
+
async def get_dataset_by_identifier(
|
|
2642
|
+
self, *, dataset_name: str, request_options: typing.Optional[RequestOptions] = None
|
|
2643
|
+
) -> AsyncHttpResponse[DatasetPublic]:
|
|
2644
|
+
"""
|
|
2645
|
+
Get dataset by name
|
|
2646
|
+
|
|
2647
|
+
Parameters
|
|
2648
|
+
----------
|
|
2649
|
+
dataset_name : str
|
|
2650
|
+
|
|
2651
|
+
request_options : typing.Optional[RequestOptions]
|
|
2652
|
+
Request-specific configuration.
|
|
2653
|
+
|
|
2654
|
+
Returns
|
|
2655
|
+
-------
|
|
2656
|
+
AsyncHttpResponse[DatasetPublic]
|
|
2657
|
+
Dataset resource
|
|
2658
|
+
"""
|
|
2659
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2660
|
+
"v1/private/datasets/retrieve",
|
|
2661
|
+
method="POST",
|
|
2662
|
+
json={
|
|
2663
|
+
"dataset_name": dataset_name,
|
|
2664
|
+
},
|
|
2665
|
+
headers={
|
|
2666
|
+
"content-type": "application/json",
|
|
2667
|
+
},
|
|
2668
|
+
request_options=request_options,
|
|
2669
|
+
omit=OMIT,
|
|
2670
|
+
)
|
|
2671
|
+
try:
|
|
2672
|
+
if 200 <= _response.status_code < 300:
|
|
2673
|
+
_data = typing.cast(
|
|
2674
|
+
DatasetPublic,
|
|
2675
|
+
parse_obj_as(
|
|
2676
|
+
type_=DatasetPublic, # type: ignore
|
|
2677
|
+
object_=_response.json(),
|
|
2678
|
+
),
|
|
2679
|
+
)
|
|
2680
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2681
|
+
_response_json = _response.json()
|
|
2682
|
+
except JSONDecodeError:
|
|
2683
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2684
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2685
|
+
|
|
2686
|
+
async def get_dataset_experiment_items_stats(
|
|
2687
|
+
self,
|
|
2688
|
+
id: str,
|
|
2689
|
+
*,
|
|
2690
|
+
experiment_ids: str,
|
|
2691
|
+
filters: typing.Optional[str] = None,
|
|
2692
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2693
|
+
) -> AsyncHttpResponse[ProjectStatsPublic]:
|
|
2694
|
+
"""
|
|
2695
|
+
Get experiment items stats for dataset
|
|
2696
|
+
|
|
2697
|
+
Parameters
|
|
2698
|
+
----------
|
|
2699
|
+
id : str
|
|
2700
|
+
|
|
2701
|
+
experiment_ids : str
|
|
2702
|
+
|
|
2703
|
+
filters : typing.Optional[str]
|
|
2704
|
+
|
|
2705
|
+
request_options : typing.Optional[RequestOptions]
|
|
2706
|
+
Request-specific configuration.
|
|
2707
|
+
|
|
2708
|
+
Returns
|
|
2709
|
+
-------
|
|
2710
|
+
AsyncHttpResponse[ProjectStatsPublic]
|
|
2711
|
+
Experiment items stats resource
|
|
2712
|
+
"""
|
|
2713
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2714
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/stats",
|
|
2715
|
+
method="GET",
|
|
2716
|
+
params={
|
|
2717
|
+
"experiment_ids": experiment_ids,
|
|
2718
|
+
"filters": filters,
|
|
2719
|
+
},
|
|
2720
|
+
request_options=request_options,
|
|
2721
|
+
)
|
|
2722
|
+
try:
|
|
2723
|
+
if 200 <= _response.status_code < 300:
|
|
2724
|
+
_data = typing.cast(
|
|
2725
|
+
ProjectStatsPublic,
|
|
2726
|
+
parse_obj_as(
|
|
2727
|
+
type_=ProjectStatsPublic, # type: ignore
|
|
2728
|
+
object_=_response.json(),
|
|
2729
|
+
),
|
|
2730
|
+
)
|
|
2731
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2732
|
+
_response_json = _response.json()
|
|
2733
|
+
except JSONDecodeError:
|
|
2734
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2735
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2736
|
+
|
|
2737
|
+
async def get_dataset_item_by_id(
|
|
2738
|
+
self, item_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2739
|
+
) -> AsyncHttpResponse[DatasetItemPublic]:
|
|
2740
|
+
"""
|
|
2741
|
+
Get dataset item by id
|
|
2742
|
+
|
|
2743
|
+
Parameters
|
|
2744
|
+
----------
|
|
2745
|
+
item_id : str
|
|
2746
|
+
|
|
2747
|
+
request_options : typing.Optional[RequestOptions]
|
|
2748
|
+
Request-specific configuration.
|
|
2749
|
+
|
|
2750
|
+
Returns
|
|
2751
|
+
-------
|
|
2752
|
+
AsyncHttpResponse[DatasetItemPublic]
|
|
2753
|
+
Dataset item resource
|
|
2754
|
+
"""
|
|
2755
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2756
|
+
f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
|
|
2757
|
+
method="GET",
|
|
2758
|
+
request_options=request_options,
|
|
2759
|
+
)
|
|
2760
|
+
try:
|
|
2761
|
+
if 200 <= _response.status_code < 300:
|
|
2762
|
+
_data = typing.cast(
|
|
2763
|
+
DatasetItemPublic,
|
|
2764
|
+
parse_obj_as(
|
|
2765
|
+
type_=DatasetItemPublic, # type: ignore
|
|
2766
|
+
object_=_response.json(),
|
|
2767
|
+
),
|
|
2768
|
+
)
|
|
2769
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2770
|
+
_response_json = _response.json()
|
|
2771
|
+
except JSONDecodeError:
|
|
2772
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2773
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2774
|
+
|
|
2775
|
+
async def patch_dataset_item(
|
|
2776
|
+
self,
|
|
2777
|
+
item_id: str,
|
|
2778
|
+
*,
|
|
2779
|
+
source: DatasetItemWriteSource,
|
|
2780
|
+
data: JsonNode,
|
|
2781
|
+
id: typing.Optional[str] = OMIT,
|
|
2782
|
+
trace_id: typing.Optional[str] = OMIT,
|
|
2783
|
+
span_id: typing.Optional[str] = OMIT,
|
|
2784
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
2785
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2786
|
+
) -> AsyncHttpResponse[None]:
|
|
2787
|
+
"""
|
|
2788
|
+
Partially update dataset item by id. Only provided fields will be updated.
|
|
2789
|
+
|
|
2790
|
+
Parameters
|
|
2791
|
+
----------
|
|
2792
|
+
item_id : str
|
|
2793
|
+
|
|
2794
|
+
source : DatasetItemWriteSource
|
|
2795
|
+
|
|
2796
|
+
data : JsonNode
|
|
2797
|
+
|
|
2798
|
+
id : typing.Optional[str]
|
|
2799
|
+
|
|
2800
|
+
trace_id : typing.Optional[str]
|
|
2801
|
+
|
|
2802
|
+
span_id : typing.Optional[str]
|
|
2803
|
+
|
|
2804
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
2805
|
+
|
|
2806
|
+
request_options : typing.Optional[RequestOptions]
|
|
2807
|
+
Request-specific configuration.
|
|
2808
|
+
|
|
2809
|
+
Returns
|
|
2810
|
+
-------
|
|
2811
|
+
AsyncHttpResponse[None]
|
|
2812
|
+
"""
|
|
2813
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2814
|
+
f"v1/private/datasets/items/{jsonable_encoder(item_id)}",
|
|
2815
|
+
method="PATCH",
|
|
2816
|
+
json={
|
|
2817
|
+
"id": id,
|
|
2818
|
+
"trace_id": trace_id,
|
|
2819
|
+
"span_id": span_id,
|
|
2820
|
+
"source": source,
|
|
2821
|
+
"data": data,
|
|
2822
|
+
"tags": tags,
|
|
2823
|
+
},
|
|
2824
|
+
headers={
|
|
2825
|
+
"content-type": "application/json",
|
|
2826
|
+
},
|
|
2827
|
+
request_options=request_options,
|
|
2828
|
+
omit=OMIT,
|
|
2829
|
+
)
|
|
2830
|
+
try:
|
|
2831
|
+
if 200 <= _response.status_code < 300:
|
|
2832
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
2833
|
+
if _response.status_code == 404:
|
|
2834
|
+
raise NotFoundError(
|
|
2835
|
+
headers=dict(_response.headers),
|
|
2836
|
+
body=typing.cast(
|
|
2837
|
+
typing.Optional[typing.Any],
|
|
2838
|
+
parse_obj_as(
|
|
2839
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
2840
|
+
object_=_response.json(),
|
|
2841
|
+
),
|
|
2842
|
+
),
|
|
2843
|
+
)
|
|
2844
|
+
_response_json = _response.json()
|
|
2845
|
+
except JSONDecodeError:
|
|
2846
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2847
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2848
|
+
|
|
2849
|
+
async def get_dataset_items(
|
|
2850
|
+
self,
|
|
2851
|
+
id: str,
|
|
2852
|
+
*,
|
|
2853
|
+
page: typing.Optional[int] = None,
|
|
2854
|
+
size: typing.Optional[int] = None,
|
|
2855
|
+
version: typing.Optional[str] = None,
|
|
2856
|
+
filters: typing.Optional[str] = None,
|
|
2857
|
+
truncate: typing.Optional[bool] = None,
|
|
2858
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2859
|
+
) -> AsyncHttpResponse[DatasetItemPagePublic]:
|
|
2860
|
+
"""
|
|
2861
|
+
Get dataset items
|
|
2862
|
+
|
|
2863
|
+
Parameters
|
|
2864
|
+
----------
|
|
2865
|
+
id : str
|
|
2866
|
+
|
|
2867
|
+
page : typing.Optional[int]
|
|
2868
|
+
|
|
2869
|
+
size : typing.Optional[int]
|
|
2870
|
+
|
|
2871
|
+
version : typing.Optional[str]
|
|
2872
|
+
|
|
2873
|
+
filters : typing.Optional[str]
|
|
2874
|
+
|
|
2875
|
+
truncate : typing.Optional[bool]
|
|
2876
|
+
|
|
2877
|
+
request_options : typing.Optional[RequestOptions]
|
|
2878
|
+
Request-specific configuration.
|
|
2879
|
+
|
|
2880
|
+
Returns
|
|
2881
|
+
-------
|
|
2882
|
+
AsyncHttpResponse[DatasetItemPagePublic]
|
|
2883
|
+
Dataset items resource
|
|
2884
|
+
"""
|
|
2885
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2886
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items",
|
|
2887
|
+
method="GET",
|
|
2888
|
+
params={
|
|
2889
|
+
"page": page,
|
|
2890
|
+
"size": size,
|
|
2891
|
+
"version": version,
|
|
2892
|
+
"filters": filters,
|
|
2893
|
+
"truncate": truncate,
|
|
2894
|
+
},
|
|
2895
|
+
request_options=request_options,
|
|
2896
|
+
)
|
|
2897
|
+
try:
|
|
2898
|
+
if 200 <= _response.status_code < 300:
|
|
2899
|
+
_data = typing.cast(
|
|
2900
|
+
DatasetItemPagePublic,
|
|
2901
|
+
parse_obj_as(
|
|
2902
|
+
type_=DatasetItemPagePublic, # type: ignore
|
|
2903
|
+
object_=_response.json(),
|
|
2904
|
+
),
|
|
2905
|
+
)
|
|
2906
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2907
|
+
_response_json = _response.json()
|
|
2908
|
+
except JSONDecodeError:
|
|
2909
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2910
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2911
|
+
|
|
2912
|
+
async def get_dataset_items_output_columns(
|
|
2913
|
+
self,
|
|
2914
|
+
id: str,
|
|
2915
|
+
*,
|
|
2916
|
+
experiment_ids: typing.Optional[str] = None,
|
|
2917
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2918
|
+
) -> AsyncHttpResponse[PageColumns]:
|
|
2919
|
+
"""
|
|
2920
|
+
Get dataset items output columns
|
|
2921
|
+
|
|
2922
|
+
Parameters
|
|
2923
|
+
----------
|
|
2924
|
+
id : str
|
|
2925
|
+
|
|
2926
|
+
experiment_ids : typing.Optional[str]
|
|
2927
|
+
|
|
2928
|
+
request_options : typing.Optional[RequestOptions]
|
|
2929
|
+
Request-specific configuration.
|
|
2930
|
+
|
|
2931
|
+
Returns
|
|
2932
|
+
-------
|
|
2933
|
+
AsyncHttpResponse[PageColumns]
|
|
2934
|
+
Dataset item output columns
|
|
2935
|
+
"""
|
|
2936
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2937
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/items/experiments/items/output/columns",
|
|
2938
|
+
method="GET",
|
|
2939
|
+
params={
|
|
2940
|
+
"experiment_ids": experiment_ids,
|
|
2941
|
+
},
|
|
2942
|
+
request_options=request_options,
|
|
1125
2943
|
)
|
|
1126
2944
|
try:
|
|
1127
2945
|
if 200 <= _response.status_code < 300:
|
|
1128
|
-
|
|
2946
|
+
_data = typing.cast(
|
|
2947
|
+
PageColumns,
|
|
2948
|
+
parse_obj_as(
|
|
2949
|
+
type_=PageColumns, # type: ignore
|
|
2950
|
+
object_=_response.json(),
|
|
2951
|
+
),
|
|
2952
|
+
)
|
|
2953
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
1129
2954
|
_response_json = _response.json()
|
|
1130
2955
|
except JSONDecodeError:
|
|
1131
2956
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1132
2957
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1133
2958
|
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
2959
|
+
@contextlib.asynccontextmanager
|
|
2960
|
+
async def stream_dataset_items(
|
|
2961
|
+
self,
|
|
2962
|
+
*,
|
|
2963
|
+
dataset_name: str,
|
|
2964
|
+
last_retrieved_id: typing.Optional[str] = OMIT,
|
|
2965
|
+
steam_limit: typing.Optional[int] = OMIT,
|
|
2966
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2967
|
+
) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]:
|
|
1137
2968
|
"""
|
|
1138
|
-
|
|
2969
|
+
Stream dataset items
|
|
1139
2970
|
|
|
1140
2971
|
Parameters
|
|
1141
2972
|
----------
|
|
1142
|
-
|
|
2973
|
+
dataset_name : str
|
|
2974
|
+
|
|
2975
|
+
last_retrieved_id : typing.Optional[str]
|
|
2976
|
+
|
|
2977
|
+
steam_limit : typing.Optional[int]
|
|
1143
2978
|
|
|
1144
2979
|
request_options : typing.Optional[RequestOptions]
|
|
1145
|
-
Request-specific configuration.
|
|
2980
|
+
Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response.
|
|
1146
2981
|
|
|
1147
2982
|
Returns
|
|
1148
2983
|
-------
|
|
1149
|
-
AsyncHttpResponse[
|
|
2984
|
+
typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
|
|
2985
|
+
Dataset items stream or error during process
|
|
1150
2986
|
"""
|
|
1151
|
-
|
|
1152
|
-
"v1/private/datasets/
|
|
2987
|
+
async with self._client_wrapper.httpx_client.stream(
|
|
2988
|
+
"v1/private/datasets/items/stream",
|
|
1153
2989
|
method="POST",
|
|
1154
2990
|
json={
|
|
1155
|
-
"
|
|
2991
|
+
"dataset_name": dataset_name,
|
|
2992
|
+
"last_retrieved_id": last_retrieved_id,
|
|
2993
|
+
"steam_limit": steam_limit,
|
|
1156
2994
|
},
|
|
1157
2995
|
headers={
|
|
1158
2996
|
"content-type": "application/json",
|
|
1159
2997
|
},
|
|
1160
2998
|
request_options=request_options,
|
|
1161
2999
|
omit=OMIT,
|
|
1162
|
-
)
|
|
1163
|
-
try:
|
|
1164
|
-
if 200 <= _response.status_code < 300:
|
|
1165
|
-
return AsyncHttpResponse(response=_response, data=None)
|
|
1166
|
-
_response_json = _response.json()
|
|
1167
|
-
except JSONDecodeError:
|
|
1168
|
-
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1169
|
-
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
3000
|
+
) as _response:
|
|
1170
3001
|
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
3002
|
+
async def stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]:
|
|
3003
|
+
try:
|
|
3004
|
+
if 200 <= _response.status_code < 300:
|
|
3005
|
+
_chunk_size = request_options.get("chunk_size", None) if request_options is not None else None
|
|
3006
|
+
return AsyncHttpResponse(
|
|
3007
|
+
response=_response,
|
|
3008
|
+
data=(_chunk async for _chunk in _response.aiter_bytes(chunk_size=_chunk_size)),
|
|
3009
|
+
)
|
|
3010
|
+
await _response.aread()
|
|
3011
|
+
_response_json = _response.json()
|
|
3012
|
+
except JSONDecodeError:
|
|
3013
|
+
raise ApiError(
|
|
3014
|
+
status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
|
|
3015
|
+
)
|
|
3016
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
3017
|
+
|
|
3018
|
+
yield await stream()
|
|
3019
|
+
|
|
3020
|
+
async def compare_dataset_versions(
|
|
3021
|
+
self, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
3022
|
+
) -> AsyncHttpResponse[DatasetVersionDiff]:
|
|
1182
3023
|
"""
|
|
1183
|
-
|
|
3024
|
+
Compare the latest committed dataset version with the current draft state. This endpoint provides insights into changes made since the last version was committed. The comparison calculates additions, modifications, deletions, and unchanged items between the latest version snapshot and current draft.
|
|
1184
3025
|
|
|
1185
3026
|
Parameters
|
|
1186
3027
|
----------
|
|
1187
3028
|
id : str
|
|
1188
3029
|
|
|
1189
|
-
experiment_ids : str
|
|
1190
|
-
|
|
1191
|
-
page : typing.Optional[int]
|
|
1192
|
-
|
|
1193
|
-
size : typing.Optional[int]
|
|
1194
|
-
|
|
1195
|
-
filters : typing.Optional[str]
|
|
1196
|
-
|
|
1197
|
-
truncate : typing.Optional[bool]
|
|
1198
|
-
|
|
1199
3030
|
request_options : typing.Optional[RequestOptions]
|
|
1200
3031
|
Request-specific configuration.
|
|
1201
3032
|
|
|
1202
3033
|
Returns
|
|
1203
3034
|
-------
|
|
1204
|
-
AsyncHttpResponse[
|
|
1205
|
-
|
|
3035
|
+
AsyncHttpResponse[DatasetVersionDiff]
|
|
3036
|
+
Diff computed successfully
|
|
1206
3037
|
"""
|
|
1207
3038
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1208
|
-
f"v1/private/datasets/{jsonable_encoder(id)}/
|
|
3039
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/diff",
|
|
1209
3040
|
method="GET",
|
|
1210
|
-
params={
|
|
1211
|
-
"page": page,
|
|
1212
|
-
"size": size,
|
|
1213
|
-
"experiment_ids": experiment_ids,
|
|
1214
|
-
"filters": filters,
|
|
1215
|
-
"truncate": truncate,
|
|
1216
|
-
},
|
|
1217
3041
|
request_options=request_options,
|
|
1218
3042
|
)
|
|
1219
3043
|
try:
|
|
1220
3044
|
if 200 <= _response.status_code < 300:
|
|
1221
3045
|
_data = typing.cast(
|
|
1222
|
-
|
|
3046
|
+
DatasetVersionDiff,
|
|
1223
3047
|
parse_obj_as(
|
|
1224
|
-
type_=
|
|
3048
|
+
type_=DatasetVersionDiff, # type: ignore
|
|
1225
3049
|
object_=_response.json(),
|
|
1226
3050
|
),
|
|
1227
3051
|
)
|
|
1228
3052
|
return AsyncHttpResponse(response=_response, data=_data)
|
|
3053
|
+
if _response.status_code == 404:
|
|
3054
|
+
raise NotFoundError(
|
|
3055
|
+
headers=dict(_response.headers),
|
|
3056
|
+
body=typing.cast(
|
|
3057
|
+
typing.Optional[typing.Any],
|
|
3058
|
+
parse_obj_as(
|
|
3059
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3060
|
+
object_=_response.json(),
|
|
3061
|
+
),
|
|
3062
|
+
),
|
|
3063
|
+
)
|
|
1229
3064
|
_response_json = _response.json()
|
|
1230
3065
|
except JSONDecodeError:
|
|
1231
3066
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1232
3067
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1233
3068
|
|
|
1234
|
-
async def
|
|
1235
|
-
self, *,
|
|
1236
|
-
) -> AsyncHttpResponse[
|
|
3069
|
+
async def create_version_tag(
|
|
3070
|
+
self, version_hash: str, id: str, *, tag: str, request_options: typing.Optional[RequestOptions] = None
|
|
3071
|
+
) -> AsyncHttpResponse[None]:
|
|
1237
3072
|
"""
|
|
1238
|
-
|
|
3073
|
+
Add a tag to a specific dataset version for easy reference (e.g., 'baseline', 'v1.0', 'production')
|
|
1239
3074
|
|
|
1240
3075
|
Parameters
|
|
1241
3076
|
----------
|
|
1242
|
-
|
|
3077
|
+
version_hash : str
|
|
3078
|
+
|
|
3079
|
+
id : str
|
|
3080
|
+
|
|
3081
|
+
tag : str
|
|
1243
3082
|
|
|
1244
3083
|
request_options : typing.Optional[RequestOptions]
|
|
1245
3084
|
Request-specific configuration.
|
|
1246
3085
|
|
|
1247
3086
|
Returns
|
|
1248
3087
|
-------
|
|
1249
|
-
AsyncHttpResponse[
|
|
1250
|
-
Dataset resource
|
|
3088
|
+
AsyncHttpResponse[None]
|
|
1251
3089
|
"""
|
|
1252
3090
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1253
|
-
"v1/private/datasets/
|
|
3091
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}/tags",
|
|
1254
3092
|
method="POST",
|
|
1255
3093
|
json={
|
|
1256
|
-
"
|
|
3094
|
+
"tag": tag,
|
|
1257
3095
|
},
|
|
1258
3096
|
headers={
|
|
1259
3097
|
"content-type": "application/json",
|
|
@@ -1263,68 +3101,89 @@ class AsyncRawDatasetsClient:
|
|
|
1263
3101
|
)
|
|
1264
3102
|
try:
|
|
1265
3103
|
if 200 <= _response.status_code < 300:
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
3104
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
3105
|
+
if _response.status_code == 400:
|
|
3106
|
+
raise BadRequestError(
|
|
3107
|
+
headers=dict(_response.headers),
|
|
3108
|
+
body=typing.cast(
|
|
3109
|
+
typing.Optional[typing.Any],
|
|
3110
|
+
parse_obj_as(
|
|
3111
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3112
|
+
object_=_response.json(),
|
|
3113
|
+
),
|
|
3114
|
+
),
|
|
3115
|
+
)
|
|
3116
|
+
if _response.status_code == 404:
|
|
3117
|
+
raise NotFoundError(
|
|
3118
|
+
headers=dict(_response.headers),
|
|
3119
|
+
body=typing.cast(
|
|
3120
|
+
typing.Optional[typing.Any],
|
|
3121
|
+
parse_obj_as(
|
|
3122
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3123
|
+
object_=_response.json(),
|
|
3124
|
+
),
|
|
3125
|
+
),
|
|
3126
|
+
)
|
|
3127
|
+
if _response.status_code == 409:
|
|
3128
|
+
raise ConflictError(
|
|
3129
|
+
headers=dict(_response.headers),
|
|
3130
|
+
body=typing.cast(
|
|
3131
|
+
typing.Optional[typing.Any],
|
|
3132
|
+
parse_obj_as(
|
|
3133
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3134
|
+
object_=_response.json(),
|
|
3135
|
+
),
|
|
1271
3136
|
),
|
|
1272
3137
|
)
|
|
1273
|
-
return AsyncHttpResponse(response=_response, data=_data)
|
|
1274
3138
|
_response_json = _response.json()
|
|
1275
3139
|
except JSONDecodeError:
|
|
1276
3140
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1277
3141
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1278
3142
|
|
|
1279
|
-
async def
|
|
1280
|
-
self,
|
|
1281
|
-
) -> AsyncHttpResponse[
|
|
3143
|
+
async def delete_version_tag(
|
|
3144
|
+
self, version_hash: str, tag: str, id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
3145
|
+
) -> AsyncHttpResponse[None]:
|
|
1282
3146
|
"""
|
|
1283
|
-
|
|
3147
|
+
Remove a tag from a dataset version. The version itself is not deleted, only the tag reference.
|
|
1284
3148
|
|
|
1285
3149
|
Parameters
|
|
1286
3150
|
----------
|
|
1287
|
-
|
|
3151
|
+
version_hash : str
|
|
3152
|
+
|
|
3153
|
+
tag : str
|
|
3154
|
+
|
|
3155
|
+
id : str
|
|
1288
3156
|
|
|
1289
3157
|
request_options : typing.Optional[RequestOptions]
|
|
1290
3158
|
Request-specific configuration.
|
|
1291
3159
|
|
|
1292
3160
|
Returns
|
|
1293
3161
|
-------
|
|
1294
|
-
AsyncHttpResponse[
|
|
1295
|
-
Dataset item resource
|
|
3162
|
+
AsyncHttpResponse[None]
|
|
1296
3163
|
"""
|
|
1297
3164
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1298
|
-
f"v1/private/datasets/
|
|
1299
|
-
method="
|
|
3165
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/{jsonable_encoder(version_hash)}/tags/{jsonable_encoder(tag)}",
|
|
3166
|
+
method="DELETE",
|
|
1300
3167
|
request_options=request_options,
|
|
1301
3168
|
)
|
|
1302
3169
|
try:
|
|
1303
3170
|
if 200 <= _response.status_code < 300:
|
|
1304
|
-
|
|
1305
|
-
DatasetItemPublic,
|
|
1306
|
-
parse_obj_as(
|
|
1307
|
-
type_=DatasetItemPublic, # type: ignore
|
|
1308
|
-
object_=_response.json(),
|
|
1309
|
-
),
|
|
1310
|
-
)
|
|
1311
|
-
return AsyncHttpResponse(response=_response, data=_data)
|
|
3171
|
+
return AsyncHttpResponse(response=_response, data=None)
|
|
1312
3172
|
_response_json = _response.json()
|
|
1313
3173
|
except JSONDecodeError:
|
|
1314
3174
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1315
3175
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1316
3176
|
|
|
1317
|
-
async def
|
|
3177
|
+
async def list_dataset_versions(
|
|
1318
3178
|
self,
|
|
1319
3179
|
id: str,
|
|
1320
3180
|
*,
|
|
1321
3181
|
page: typing.Optional[int] = None,
|
|
1322
3182
|
size: typing.Optional[int] = None,
|
|
1323
|
-
truncate: typing.Optional[bool] = None,
|
|
1324
3183
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1325
|
-
) -> AsyncHttpResponse[
|
|
3184
|
+
) -> AsyncHttpResponse[DatasetVersionPagePublic]:
|
|
1326
3185
|
"""
|
|
1327
|
-
Get dataset
|
|
3186
|
+
Get paginated list of versions for a dataset, ordered by creation time (newest first)
|
|
1328
3187
|
|
|
1329
3188
|
Parameters
|
|
1330
3189
|
----------
|
|
@@ -1334,145 +3193,197 @@ class AsyncRawDatasetsClient:
|
|
|
1334
3193
|
|
|
1335
3194
|
size : typing.Optional[int]
|
|
1336
3195
|
|
|
1337
|
-
truncate : typing.Optional[bool]
|
|
1338
|
-
|
|
1339
3196
|
request_options : typing.Optional[RequestOptions]
|
|
1340
3197
|
Request-specific configuration.
|
|
1341
3198
|
|
|
1342
3199
|
Returns
|
|
1343
3200
|
-------
|
|
1344
|
-
AsyncHttpResponse[
|
|
1345
|
-
Dataset
|
|
3201
|
+
AsyncHttpResponse[DatasetVersionPagePublic]
|
|
3202
|
+
Dataset versions
|
|
1346
3203
|
"""
|
|
1347
3204
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1348
|
-
f"v1/private/datasets/{jsonable_encoder(id)}/
|
|
3205
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions",
|
|
1349
3206
|
method="GET",
|
|
1350
3207
|
params={
|
|
1351
3208
|
"page": page,
|
|
1352
3209
|
"size": size,
|
|
1353
|
-
"truncate": truncate,
|
|
1354
3210
|
},
|
|
1355
3211
|
request_options=request_options,
|
|
1356
3212
|
)
|
|
1357
3213
|
try:
|
|
1358
3214
|
if 200 <= _response.status_code < 300:
|
|
1359
3215
|
_data = typing.cast(
|
|
1360
|
-
|
|
3216
|
+
DatasetVersionPagePublic,
|
|
1361
3217
|
parse_obj_as(
|
|
1362
|
-
type_=
|
|
3218
|
+
type_=DatasetVersionPagePublic, # type: ignore
|
|
1363
3219
|
object_=_response.json(),
|
|
1364
3220
|
),
|
|
1365
3221
|
)
|
|
1366
3222
|
return AsyncHttpResponse(response=_response, data=_data)
|
|
3223
|
+
if _response.status_code == 400:
|
|
3224
|
+
raise BadRequestError(
|
|
3225
|
+
headers=dict(_response.headers),
|
|
3226
|
+
body=typing.cast(
|
|
3227
|
+
typing.Optional[typing.Any],
|
|
3228
|
+
parse_obj_as(
|
|
3229
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3230
|
+
object_=_response.json(),
|
|
3231
|
+
),
|
|
3232
|
+
),
|
|
3233
|
+
)
|
|
1367
3234
|
_response_json = _response.json()
|
|
1368
3235
|
except JSONDecodeError:
|
|
1369
3236
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1370
3237
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1371
3238
|
|
|
1372
|
-
async def
|
|
1373
|
-
self,
|
|
1374
|
-
|
|
1375
|
-
*,
|
|
1376
|
-
experiment_ids: typing.Optional[str] = None,
|
|
1377
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
1378
|
-
) -> AsyncHttpResponse[PageColumns]:
|
|
3239
|
+
async def restore_dataset_version(
|
|
3240
|
+
self, id: str, *, version_ref: str, request_options: typing.Optional[RequestOptions] = None
|
|
3241
|
+
) -> AsyncHttpResponse[DatasetVersionPublic]:
|
|
1379
3242
|
"""
|
|
1380
|
-
|
|
3243
|
+
Restores the dataset to a previous version state by creating a new version with items copied from the specified version. If the version is already the latest, returns it as-is (no-op).
|
|
1381
3244
|
|
|
1382
3245
|
Parameters
|
|
1383
3246
|
----------
|
|
1384
3247
|
id : str
|
|
1385
3248
|
|
|
1386
|
-
|
|
3249
|
+
version_ref : str
|
|
3250
|
+
Version hash or tag to restore from
|
|
1387
3251
|
|
|
1388
3252
|
request_options : typing.Optional[RequestOptions]
|
|
1389
3253
|
Request-specific configuration.
|
|
1390
3254
|
|
|
1391
3255
|
Returns
|
|
1392
3256
|
-------
|
|
1393
|
-
AsyncHttpResponse[
|
|
1394
|
-
|
|
3257
|
+
AsyncHttpResponse[DatasetVersionPublic]
|
|
3258
|
+
Version restored successfully
|
|
1395
3259
|
"""
|
|
1396
3260
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1397
|
-
f"v1/private/datasets/{jsonable_encoder(id)}/
|
|
1398
|
-
method="
|
|
1399
|
-
|
|
1400
|
-
"
|
|
3261
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/restore",
|
|
3262
|
+
method="POST",
|
|
3263
|
+
json={
|
|
3264
|
+
"version_ref": version_ref,
|
|
3265
|
+
},
|
|
3266
|
+
headers={
|
|
3267
|
+
"content-type": "application/json",
|
|
1401
3268
|
},
|
|
1402
3269
|
request_options=request_options,
|
|
3270
|
+
omit=OMIT,
|
|
1403
3271
|
)
|
|
1404
3272
|
try:
|
|
1405
3273
|
if 200 <= _response.status_code < 300:
|
|
1406
3274
|
_data = typing.cast(
|
|
1407
|
-
|
|
3275
|
+
DatasetVersionPublic,
|
|
1408
3276
|
parse_obj_as(
|
|
1409
|
-
type_=
|
|
3277
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
1410
3278
|
object_=_response.json(),
|
|
1411
3279
|
),
|
|
1412
3280
|
)
|
|
1413
3281
|
return AsyncHttpResponse(response=_response, data=_data)
|
|
3282
|
+
if _response.status_code == 404:
|
|
3283
|
+
raise NotFoundError(
|
|
3284
|
+
headers=dict(_response.headers),
|
|
3285
|
+
body=typing.cast(
|
|
3286
|
+
typing.Optional[typing.Any],
|
|
3287
|
+
parse_obj_as(
|
|
3288
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3289
|
+
object_=_response.json(),
|
|
3290
|
+
),
|
|
3291
|
+
),
|
|
3292
|
+
)
|
|
1414
3293
|
_response_json = _response.json()
|
|
1415
3294
|
except JSONDecodeError:
|
|
1416
3295
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1417
3296
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1418
3297
|
|
|
1419
|
-
|
|
1420
|
-
async def stream_dataset_items(
|
|
3298
|
+
async def update_dataset_version(
|
|
1421
3299
|
self,
|
|
3300
|
+
version_hash: str,
|
|
3301
|
+
id: str,
|
|
1422
3302
|
*,
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
steam_limit: typing.Optional[int] = OMIT,
|
|
3303
|
+
change_description: typing.Optional[str] = OMIT,
|
|
3304
|
+
tags_to_add: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1426
3305
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1427
|
-
) ->
|
|
3306
|
+
) -> AsyncHttpResponse[DatasetVersionPublic]:
|
|
1428
3307
|
"""
|
|
1429
|
-
|
|
3308
|
+
Update a dataset version's change_description and/or add new tags
|
|
1430
3309
|
|
|
1431
3310
|
Parameters
|
|
1432
3311
|
----------
|
|
1433
|
-
|
|
3312
|
+
version_hash : str
|
|
1434
3313
|
|
|
1435
|
-
|
|
3314
|
+
id : str
|
|
1436
3315
|
|
|
1437
|
-
|
|
3316
|
+
change_description : typing.Optional[str]
|
|
3317
|
+
Optional description of changes in this version
|
|
3318
|
+
|
|
3319
|
+
tags_to_add : typing.Optional[typing.Sequence[str]]
|
|
3320
|
+
Optional list of tags to add to this version
|
|
1438
3321
|
|
|
1439
3322
|
request_options : typing.Optional[RequestOptions]
|
|
1440
|
-
Request-specific configuration.
|
|
3323
|
+
Request-specific configuration.
|
|
1441
3324
|
|
|
1442
3325
|
Returns
|
|
1443
3326
|
-------
|
|
1444
|
-
|
|
1445
|
-
|
|
3327
|
+
AsyncHttpResponse[DatasetVersionPublic]
|
|
3328
|
+
Version updated successfully
|
|
1446
3329
|
"""
|
|
1447
|
-
|
|
1448
|
-
"v1/private/datasets/
|
|
1449
|
-
method="
|
|
3330
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
3331
|
+
f"v1/private/datasets/{jsonable_encoder(id)}/versions/hash/{jsonable_encoder(version_hash)}",
|
|
3332
|
+
method="PATCH",
|
|
1450
3333
|
json={
|
|
1451
|
-
"
|
|
1452
|
-
"
|
|
1453
|
-
"steam_limit": steam_limit,
|
|
3334
|
+
"change_description": change_description,
|
|
3335
|
+
"tags_to_add": tags_to_add,
|
|
1454
3336
|
},
|
|
1455
3337
|
headers={
|
|
1456
3338
|
"content-type": "application/json",
|
|
1457
3339
|
},
|
|
1458
3340
|
request_options=request_options,
|
|
1459
3341
|
omit=OMIT,
|
|
1460
|
-
)
|
|
1461
|
-
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1467
|
-
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
3342
|
+
)
|
|
3343
|
+
try:
|
|
3344
|
+
if 200 <= _response.status_code < 300:
|
|
3345
|
+
_data = typing.cast(
|
|
3346
|
+
DatasetVersionPublic,
|
|
3347
|
+
parse_obj_as(
|
|
3348
|
+
type_=DatasetVersionPublic, # type: ignore
|
|
3349
|
+
object_=_response.json(),
|
|
3350
|
+
),
|
|
3351
|
+
)
|
|
3352
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
3353
|
+
if _response.status_code == 400:
|
|
3354
|
+
raise BadRequestError(
|
|
3355
|
+
headers=dict(_response.headers),
|
|
3356
|
+
body=typing.cast(
|
|
3357
|
+
typing.Optional[typing.Any],
|
|
3358
|
+
parse_obj_as(
|
|
3359
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3360
|
+
object_=_response.json(),
|
|
3361
|
+
),
|
|
3362
|
+
),
|
|
3363
|
+
)
|
|
3364
|
+
if _response.status_code == 404:
|
|
3365
|
+
raise NotFoundError(
|
|
3366
|
+
headers=dict(_response.headers),
|
|
3367
|
+
body=typing.cast(
|
|
3368
|
+
typing.Optional[typing.Any],
|
|
3369
|
+
parse_obj_as(
|
|
3370
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3371
|
+
object_=_response.json(),
|
|
3372
|
+
),
|
|
3373
|
+
),
|
|
3374
|
+
)
|
|
3375
|
+
if _response.status_code == 409:
|
|
3376
|
+
raise ConflictError(
|
|
3377
|
+
headers=dict(_response.headers),
|
|
3378
|
+
body=typing.cast(
|
|
3379
|
+
typing.Optional[typing.Any],
|
|
3380
|
+
parse_obj_as(
|
|
3381
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
3382
|
+
object_=_response.json(),
|
|
3383
|
+
),
|
|
3384
|
+
),
|
|
3385
|
+
)
|
|
3386
|
+
_response_json = _response.json()
|
|
3387
|
+
except JSONDecodeError:
|
|
3388
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
3389
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|