label-studio-sdk 1.0.20__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of label-studio-sdk might be problematic. Click here for more details.
- label_studio_sdk/__init__.py +345 -180
- label_studio_sdk/actions/__init__.py +4 -0
- label_studio_sdk/actions/client.py +46 -10
- label_studio_sdk/actions/types/__init__.py +4 -0
- label_studio_sdk/actions/types/actions_create_request_filters.py +2 -2
- label_studio_sdk/actions/types/actions_create_request_filters_items_item.py +2 -2
- label_studio_sdk/actions/types/actions_create_request_id.py +7 -7
- label_studio_sdk/actions/types/actions_create_request_selected_items_excluded.py +2 -2
- label_studio_sdk/actions/types/actions_create_request_selected_items_included.py +2 -2
- label_studio_sdk/actions/types/actions_list_response_item.py +25 -0
- label_studio_sdk/actions/types/actions_list_response_item_dialog.py +22 -0
- label_studio_sdk/annotations/__init__.py +2 -2
- label_studio_sdk/annotations/client.py +379 -243
- label_studio_sdk/annotations/types/__init__.py +4 -2
- label_studio_sdk/annotations/types/annotation_bulk_serializer_with_selected_items_request_last_action.py +7 -0
- label_studio_sdk/annotations/types/annotations_create_bulk_response_item.py +8 -5
- label_studio_sdk/base_client.py +32 -24
- label_studio_sdk/billing/__init__.py +2 -0
- label_studio_sdk/billing/client.py +112 -0
- label_studio_sdk/comments/client.py +378 -140
- label_studio_sdk/converter/README.md +207 -0
- label_studio_sdk/converter/imports/coco.py +132 -23
- label_studio_sdk/core/__init__.py +4 -0
- label_studio_sdk/core/unchecked_base_model.py +305 -0
- label_studio_sdk/environment.py +1 -1
- label_studio_sdk/errors/__init__.py +10 -1
- label_studio_sdk/errors/forbidden_error.py +9 -0
- label_studio_sdk/errors/method_not_allowed_error.py +9 -0
- label_studio_sdk/export_storage/__init__.py +1 -24
- label_studio_sdk/export_storage/azure/__init__.py +0 -3
- label_studio_sdk/export_storage/azure/client.py +231 -273
- label_studio_sdk/export_storage/client.py +5 -5
- label_studio_sdk/export_storage/gcs/__init__.py +0 -3
- label_studio_sdk/export_storage/gcs/client.py +231 -273
- label_studio_sdk/export_storage/local/__init__.py +0 -3
- label_studio_sdk/export_storage/local/client.py +211 -253
- label_studio_sdk/export_storage/redis/__init__.py +0 -3
- label_studio_sdk/export_storage/redis/client.py +239 -281
- label_studio_sdk/export_storage/s3/__init__.py +0 -3
- label_studio_sdk/export_storage/s3/client.py +254 -296
- label_studio_sdk/export_storage/s3s/client.py +694 -210
- label_studio_sdk/export_storage/types/export_storage_list_types_response_item.py +2 -2
- label_studio_sdk/files/client.py +52 -71
- label_studio_sdk/import_storage/__init__.py +1 -24
- label_studio_sdk/import_storage/azure/__init__.py +0 -3
- label_studio_sdk/import_storage/azure/client.py +249 -299
- label_studio_sdk/import_storage/client.py +5 -5
- label_studio_sdk/import_storage/gcs/__init__.py +0 -3
- label_studio_sdk/import_storage/gcs/client.py +249 -299
- label_studio_sdk/import_storage/local/__init__.py +0 -3
- label_studio_sdk/import_storage/local/client.py +211 -257
- label_studio_sdk/import_storage/redis/__init__.py +0 -3
- label_studio_sdk/import_storage/redis/client.py +239 -285
- label_studio_sdk/import_storage/s3/__init__.py +0 -3
- label_studio_sdk/import_storage/s3/client.py +274 -324
- label_studio_sdk/import_storage/s3s/client.py +728 -434
- label_studio_sdk/import_storage/types/import_storage_list_types_response_item.py +2 -2
- label_studio_sdk/jwt_settings/client.py +56 -58
- label_studio_sdk/label_interface/control_tags.py +48 -8
- label_studio_sdk/label_interface/interface.py +261 -56
- label_studio_sdk/ml/__init__.py +2 -16
- label_studio_sdk/ml/client.py +196 -179
- label_studio_sdk/ml/types/__init__.py +2 -12
- label_studio_sdk/ml/types/ml_list_model_versions_response.py +20 -0
- label_studio_sdk/model_providers/__init__.py +3 -0
- label_studio_sdk/model_providers/client.py +280 -228
- label_studio_sdk/model_providers/types/__init__.py +5 -0
- label_studio_sdk/{prompts/types/prompts_batch_predictions_response.py → model_providers/types/model_providers_list_model_provider_choices_response.py} +3 -3
- label_studio_sdk/organizations/__init__.py +5 -0
- label_studio_sdk/organizations/client.py +331 -0
- label_studio_sdk/organizations/members/__init__.py +2 -0
- label_studio_sdk/organizations/members/client.py +636 -0
- label_studio_sdk/predictions/client.py +29 -77
- label_studio_sdk/projects/__init__.py +18 -9
- label_studio_sdk/projects/client.py +905 -414
- label_studio_sdk/projects/exports/__init__.py +2 -2
- label_studio_sdk/projects/exports/client.py +336 -396
- label_studio_sdk/projects/exports/client_ext.py +30 -30
- label_studio_sdk/projects/exports/types/__init__.py +1 -2
- label_studio_sdk/projects/exports/types/exports_convert_response.py +5 -9
- label_studio_sdk/projects/pauses/client.py +114 -105
- label_studio_sdk/projects/stats/__init__.py +5 -0
- label_studio_sdk/projects/stats/client.py +175 -0
- label_studio_sdk/projects/stats/types/__init__.py +8 -0
- label_studio_sdk/projects/stats/types/stats_iaa_response.py +44 -0
- label_studio_sdk/projects/stats/types/stats_iaa_response_common_tasks.py +7 -0
- label_studio_sdk/projects/stats/types/stats_iaa_response_iaa.py +5 -0
- label_studio_sdk/{types/base_task_file_upload.py → projects/stats/types/stats_iaa_response_std.py} +1 -1
- label_studio_sdk/projects/types/__init__.py +10 -6
- label_studio_sdk/projects/types/lse_project_create_request_sampling.py +7 -0
- label_studio_sdk/projects/types/lse_project_create_request_skip_queue.py +7 -0
- label_studio_sdk/projects/types/patched_lse_project_update_request_sampling.py +7 -0
- label_studio_sdk/projects/types/patched_lse_project_update_request_skip_queue.py +7 -0
- label_studio_sdk/{prompts/types/prompts_batch_failed_predictions_response.py → projects/types/projects_duplicate_response.py} +8 -5
- label_studio_sdk/projects/types/projects_import_tasks_response.py +2 -2
- label_studio_sdk/projects/types/projects_list_request_filter.py +1 -1
- label_studio_sdk/prompts/__init__.py +4 -10
- label_studio_sdk/prompts/client.py +511 -442
- label_studio_sdk/prompts/indicators/__init__.py +3 -0
- label_studio_sdk/prompts/indicators/client.py +47 -49
- label_studio_sdk/prompts/indicators/types/__init__.py +5 -0
- label_studio_sdk/{types/key_indicator_value.py → prompts/indicators/types/indicators_list_response_item.py} +3 -3
- label_studio_sdk/prompts/runs/client.py +113 -135
- label_studio_sdk/prompts/types/__init__.py +2 -12
- label_studio_sdk/prompts/types/prompts_compatible_projects_request_project_type.py +7 -0
- label_studio_sdk/prompts/versions/client.py +372 -312
- label_studio_sdk/tasks/__init__.py +2 -2
- label_studio_sdk/tasks/client.py +514 -213
- label_studio_sdk/tasks/types/__init__.py +1 -2
- label_studio_sdk/tokens/client.py +160 -152
- label_studio_sdk/tokens/client_ext.py +3 -3
- label_studio_sdk/types/__init__.py +276 -142
- label_studio_sdk/{webhooks/types/webhooks_update_request_actions_item.py → types/actions_enum.py} +4 -1
- label_studio_sdk/types/all_roles_project_list.py +197 -0
- label_studio_sdk/types/all_roles_project_list_sampling.py +7 -0
- label_studio_sdk/types/all_roles_project_list_skip_queue.py +7 -0
- label_studio_sdk/types/annotated_enum.py +5 -0
- label_studio_sdk/types/annotation.py +24 -10
- label_studio_sdk/types/annotation_last_action.py +3 -15
- label_studio_sdk/types/{annotations_dm_field.py → annotation_request.py} +21 -30
- label_studio_sdk/types/annotation_request_last_action.py +7 -0
- label_studio_sdk/types/assignment_settings.py +31 -0
- label_studio_sdk/types/assignment_settings_label_stream_task_distribution.py +7 -0
- label_studio_sdk/types/assignment_settings_request.py +32 -0
- label_studio_sdk/types/assignment_settings_request_label_stream_task_distribution.py +7 -0
- label_studio_sdk/types/associated_project.py +30 -0
- label_studio_sdk/types/auth_method_enum.py +5 -0
- label_studio_sdk/types/azure_blob_export_storage.py +8 -12
- label_studio_sdk/types/azure_blob_import_storage.py +8 -12
- label_studio_sdk/types/{prompt_associated_projects_item_id.py → batch_failed_predictions.py} +4 -4
- label_studio_sdk/types/{access_token_response.py → batch_predictions.py} +6 -8
- label_studio_sdk/types/billing_checks.py +39 -0
- label_studio_sdk/types/billing_flags.py +44 -0
- label_studio_sdk/types/billing_info_response.py +22 -0
- label_studio_sdk/types/blank_enum.py +5 -0
- label_studio_sdk/types/{key_indicators_item_extra_kpis_item.py → blueprint_list.py} +12 -6
- label_studio_sdk/types/budget_reset_period_enum.py +5 -0
- label_studio_sdk/types/child_filter.py +44 -0
- label_studio_sdk/types/comment.py +39 -14
- label_studio_sdk/types/comment_request.py +32 -0
- label_studio_sdk/types/comment_serializer_with_expanded_user.py +53 -0
- label_studio_sdk/types/converted_format.py +5 -5
- label_studio_sdk/types/{api_token_response.py → converted_format_request.py} +8 -15
- label_studio_sdk/types/count_limit.py +22 -0
- label_studio_sdk/types/custom_scripts_editable_by_enum.py +5 -0
- label_studio_sdk/types/default_role_enum.py +5 -0
- label_studio_sdk/types/edition_enum.py +5 -0
- label_studio_sdk/types/export.py +7 -7
- label_studio_sdk/types/file_upload.py +5 -5
- label_studio_sdk/types/filter.py +9 -6
- label_studio_sdk/types/filter_group.py +3 -3
- label_studio_sdk/types/finished_enum.py +5 -0
- label_studio_sdk/types/gcs_export_storage.py +8 -12
- label_studio_sdk/types/gcs_import_storage.py +8 -12
- label_studio_sdk/types/{rotate_token_response.py → hotkeys.py} +5 -8
- label_studio_sdk/types/{base_task.py → import_api_request.py} +11 -34
- label_studio_sdk/types/inference_run_cost_estimate.py +2 -2
- label_studio_sdk/types/label_stream_task_distribution_enum.py +5 -0
- label_studio_sdk/types/{annotations_dm_field_last_action.py → last_action_enum.py} +1 -1
- label_studio_sdk/types/local_files_export_storage.py +8 -12
- label_studio_sdk/types/local_files_import_storage.py +8 -12
- label_studio_sdk/types/{annotation_filter_options.py → lse_annotation_filter_options.py} +12 -2
- label_studio_sdk/types/lse_annotation_filter_options_request.py +42 -0
- label_studio_sdk/types/lse_annotation_filter_options_request_reviewed.py +7 -0
- label_studio_sdk/types/lse_annotation_filter_options_reviewed.py +7 -0
- label_studio_sdk/types/{export_snapshot.py → lse_export_create.py} +11 -11
- label_studio_sdk/types/lse_fields.py +49 -0
- label_studio_sdk/types/lse_fields_onboarding_state.py +8 -0
- label_studio_sdk/types/lse_fields_trial_role.py +8 -0
- label_studio_sdk/types/lse_key_indicator_value.py +35 -0
- label_studio_sdk/types/lse_organization.py +57 -0
- label_studio_sdk/types/lse_organization_custom_scripts_editable_by.py +7 -0
- label_studio_sdk/types/{key_indicators_item_additional_kpis_item.py → lse_organization_member_list.py} +12 -7
- label_studio_sdk/types/lse_project_create.py +196 -0
- label_studio_sdk/types/lse_project_create_sampling.py +7 -0
- label_studio_sdk/types/lse_project_create_skip_queue.py +7 -0
- label_studio_sdk/types/lse_project_update.py +215 -0
- label_studio_sdk/types/lse_project_update_sampling.py +7 -0
- label_studio_sdk/types/lse_project_update_skip_queue.py +7 -0
- label_studio_sdk/types/lse_s3export_storage.py +134 -0
- label_studio_sdk/{import_storage/s3/types/s3create_response.py → types/lse_s3export_storage_request.py} +47 -21
- label_studio_sdk/{import_storage/s3/types/s3update_response.py → types/lse_s3import_storage.py} +60 -21
- label_studio_sdk/types/{s3s_import_storage.py → lse_s3import_storage_request.py} +32 -21
- label_studio_sdk/types/lse_task.py +117 -0
- label_studio_sdk/types/{data_manager_task_serializer_drafts_item.py → lse_task_drafts_item.py} +2 -2
- label_studio_sdk/types/lse_task_filter_options.py +63 -0
- label_studio_sdk/types/lse_task_filter_options_annotated.py +7 -0
- label_studio_sdk/types/lse_task_filter_options_finished.py +7 -0
- label_studio_sdk/types/lse_task_filter_options_request.py +63 -0
- label_studio_sdk/types/lse_task_filter_options_request_annotated.py +7 -0
- label_studio_sdk/types/lse_task_filter_options_request_finished.py +7 -0
- label_studio_sdk/types/lse_task_filter_options_request_reviewed.py +7 -0
- label_studio_sdk/types/lse_task_filter_options_request_skipped.py +7 -0
- label_studio_sdk/types/lse_task_filter_options_reviewed.py +7 -0
- label_studio_sdk/types/lse_task_filter_options_skipped.py +7 -0
- label_studio_sdk/types/{data_manager_task_serializer_predictions_item.py → lse_task_predictions_item.py} +4 -5
- label_studio_sdk/types/lse_task_serializer_for_annotators.py +54 -0
- label_studio_sdk/types/lse_task_serializer_for_annotators_drafts_item.py +22 -0
- label_studio_sdk/types/lse_task_serializer_for_annotators_predictions_item.py +28 -0
- label_studio_sdk/types/lse_task_serializer_for_reviewers.py +117 -0
- label_studio_sdk/types/lse_task_serializer_for_reviewers_drafts_item.py +22 -0
- label_studio_sdk/types/lse_task_serializer_for_reviewers_predictions_item.py +28 -0
- label_studio_sdk/types/lse_user.py +49 -0
- label_studio_sdk/types/{base_user.py → lse_user_api.py} +17 -6
- label_studio_sdk/types/lse_user_organization_member_list.py +48 -0
- label_studio_sdk/types/lseapi_token_create.py +21 -0
- label_studio_sdk/types/lseapi_token_list.py +21 -0
- label_studio_sdk/types/lsejwt_settings.py +32 -0
- label_studio_sdk/types/maybe_expanded_comment.py +7 -0
- label_studio_sdk/types/ml_backend.py +16 -17
- label_studio_sdk/types/mode_enum.py +5 -0
- label_studio_sdk/types/model_interface.py +44 -0
- label_studio_sdk/types/model_interface_request.py +40 -0
- label_studio_sdk/types/model_interface_serializer_get.py +45 -0
- label_studio_sdk/types/model_provider_connection.py +48 -17
- label_studio_sdk/types/model_provider_connection_budget_reset_period.py +3 -1
- label_studio_sdk/types/model_provider_connection_request.py +71 -0
- label_studio_sdk/types/model_run.py +40 -0
- label_studio_sdk/types/{inference_run_status.py → model_run_status_enum.py} +1 -1
- label_studio_sdk/types/null_enum.py +3 -0
- label_studio_sdk/types/onboarding_state_enum.py +7 -0
- label_studio_sdk/types/organization_billing.py +20 -0
- label_studio_sdk/types/organization_id.py +28 -0
- label_studio_sdk/types/organization_invite.py +20 -0
- label_studio_sdk/types/organization_member.py +37 -0
- label_studio_sdk/types/organization_membership.py +24 -0
- label_studio_sdk/{projects/types/projects_list_response.py → types/paginated_all_roles_project_list_list.py} +5 -5
- label_studio_sdk/types/paginated_lse_organization_member_list_list.py +23 -0
- label_studio_sdk/types/{jwt_settings_response.py → paginated_role_based_task_list.py} +11 -9
- label_studio_sdk/types/pause.py +55 -14
- label_studio_sdk/types/pause_request.py +41 -0
- label_studio_sdk/types/prediction.py +7 -11
- label_studio_sdk/types/prediction_request.py +56 -0
- label_studio_sdk/types/project.py +32 -39
- label_studio_sdk/types/project_import.py +12 -13
- label_studio_sdk/types/project_label_config.py +2 -2
- label_studio_sdk/types/project_label_config_request.py +22 -0
- label_studio_sdk/types/project_sampling.py +3 -3
- label_studio_sdk/types/project_skip_queue.py +3 -1
- label_studio_sdk/types/project_subset_enum.py +5 -0
- label_studio_sdk/types/prompts_status_enum.py +16 -0
- label_studio_sdk/types/{prompt_version_provider.py → provider_enum.py} +1 -1
- label_studio_sdk/types/reason_enum.py +7 -0
- label_studio_sdk/types/redis_export_storage.py +8 -12
- label_studio_sdk/types/redis_import_storage.py +8 -12
- label_studio_sdk/types/refined_prompt_response.py +5 -6
- label_studio_sdk/types/requeue_rejected_tasks_mode_enum.py +5 -0
- label_studio_sdk/types/review_criteria_enum.py +5 -0
- label_studio_sdk/types/review_settings.py +80 -0
- label_studio_sdk/types/review_settings_request.py +80 -0
- label_studio_sdk/types/review_settings_request_requeue_rejected_tasks_mode.py +8 -0
- label_studio_sdk/types/review_settings_request_review_criteria.py +7 -0
- label_studio_sdk/types/review_settings_requeue_rejected_tasks_mode.py +8 -0
- label_studio_sdk/types/review_settings_review_criteria.py +7 -0
- label_studio_sdk/types/reviewed_enum.py +5 -0
- label_studio_sdk/types/role9e7enum.py +5 -0
- label_studio_sdk/types/role_based_task.py +8 -0
- label_studio_sdk/types/s3export_storage.py +8 -12
- label_studio_sdk/types/s3import_storage.py +8 -12
- label_studio_sdk/types/sampling_enum.py +7 -0
- label_studio_sdk/types/scope_enum.py +5 -0
- label_studio_sdk/types/selected_items_request.py +23 -0
- label_studio_sdk/types/serialization_option.py +2 -6
- label_studio_sdk/types/serialization_option_request.py +22 -0
- label_studio_sdk/types/serialization_options.py +17 -5
- label_studio_sdk/types/serialization_options_request.py +47 -0
- label_studio_sdk/types/skill_name_enum.py +5 -0
- label_studio_sdk/types/skip_queue_enum.py +5 -0
- label_studio_sdk/types/skipped_enum.py +5 -0
- label_studio_sdk/types/state_enum.py +5 -0
- label_studio_sdk/types/status7bf_enum.py +5 -0
- label_studio_sdk/types/{azure_blob_import_storage_status.py → status_c5a_enum.py} +2 -2
- label_studio_sdk/types/third_party_model_version.py +65 -0
- label_studio_sdk/types/third_party_model_version_request.py +54 -0
- label_studio_sdk/types/token_refresh_response.py +19 -0
- label_studio_sdk/types/token_rotate_response.py +19 -0
- label_studio_sdk/types/trial_role_enum.py +16 -0
- label_studio_sdk/types/user_simple.py +8 -5
- label_studio_sdk/types/user_simple_request.py +28 -0
- label_studio_sdk/types/version_response.py +49 -0
- label_studio_sdk/types/view.py +8 -15
- label_studio_sdk/types/webhook.py +9 -13
- label_studio_sdk/types/webhook_serializer_for_update.py +15 -13
- label_studio_sdk/types/workspace.py +14 -34
- label_studio_sdk/types/workspace_member_create.py +27 -0
- label_studio_sdk/types/workspace_member_list.py +24 -0
- label_studio_sdk/users/client.py +604 -87
- label_studio_sdk/users/types/users_get_token_response.py +4 -11
- label_studio_sdk/users/types/users_reset_token_response.py +4 -11
- label_studio_sdk/versions/__init__.py +0 -3
- label_studio_sdk/versions/client.py +14 -14
- label_studio_sdk/views/client.py +227 -141
- label_studio_sdk/views/types/views_create_request_data.py +2 -2
- label_studio_sdk/views/types/views_create_request_data_filters.py +2 -2
- label_studio_sdk/views/types/views_create_request_data_filters_items_item.py +2 -2
- label_studio_sdk/views/types/views_update_request_data.py +2 -2
- label_studio_sdk/views/types/views_update_request_data_filters.py +2 -2
- label_studio_sdk/views/types/views_update_request_data_filters_items_item.py +2 -2
- label_studio_sdk/webhooks/__init__.py +36 -2
- label_studio_sdk/webhooks/client.py +173 -367
- label_studio_sdk/webhooks/types/__init__.py +34 -2
- label_studio_sdk/webhooks/types/webhooks_info_response.py +80 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_annotation_created.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_annotation_updated.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_annotations_created.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_annotations_deleted.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_label_link_created.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_label_link_deleted.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_label_link_updated.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_project_created.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_project_deleted.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_project_updated.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_review_created.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_review_updated.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_reviews_deleted.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_tasks_created.py +24 -0
- label_studio_sdk/webhooks/types/webhooks_info_response_tasks_deleted.py +24 -0
- label_studio_sdk/workspaces/__init__.py +1 -2
- label_studio_sdk/workspaces/client.py +97 -117
- label_studio_sdk/workspaces/members/__init__.py +0 -3
- label_studio_sdk/workspaces/members/client.py +65 -81
- {label_studio_sdk-1.0.20.dist-info → label_studio_sdk-2.0.1.dist-info}/METADATA +73 -25
- label_studio_sdk-2.0.1.dist-info/RECORD +435 -0
- {label_studio_sdk-1.0.20.dist-info → label_studio_sdk-2.0.1.dist-info}/WHEEL +1 -1
- label_studio_sdk/annotations/types/annotations_create_bulk_request_selected_items.py +0 -34
- label_studio_sdk/export_storage/azure/types/__init__.py +0 -6
- label_studio_sdk/export_storage/azure/types/azure_create_response.py +0 -57
- label_studio_sdk/export_storage/azure/types/azure_update_response.py +0 -57
- label_studio_sdk/export_storage/gcs/types/__init__.py +0 -6
- label_studio_sdk/export_storage/gcs/types/gcs_create_response.py +0 -57
- label_studio_sdk/export_storage/gcs/types/gcs_update_response.py +0 -57
- label_studio_sdk/export_storage/local/types/__init__.py +0 -6
- label_studio_sdk/export_storage/local/types/local_create_response.py +0 -47
- label_studio_sdk/export_storage/local/types/local_update_response.py +0 -47
- label_studio_sdk/export_storage/redis/types/__init__.py +0 -6
- label_studio_sdk/export_storage/redis/types/redis_create_response.py +0 -62
- label_studio_sdk/export_storage/redis/types/redis_update_response.py +0 -62
- label_studio_sdk/export_storage/s3/types/__init__.py +0 -6
- label_studio_sdk/export_storage/s3/types/s3create_response.py +0 -81
- label_studio_sdk/export_storage/s3/types/s3update_response.py +0 -81
- label_studio_sdk/import_storage/azure/types/__init__.py +0 -6
- label_studio_sdk/import_storage/azure/types/azure_create_response.py +0 -72
- label_studio_sdk/import_storage/azure/types/azure_update_response.py +0 -72
- label_studio_sdk/import_storage/gcs/types/__init__.py +0 -6
- label_studio_sdk/import_storage/gcs/types/gcs_create_response.py +0 -72
- label_studio_sdk/import_storage/gcs/types/gcs_update_response.py +0 -72
- label_studio_sdk/import_storage/local/types/__init__.py +0 -6
- label_studio_sdk/import_storage/local/types/local_create_response.py +0 -47
- label_studio_sdk/import_storage/local/types/local_update_response.py +0 -47
- label_studio_sdk/import_storage/redis/types/__init__.py +0 -6
- label_studio_sdk/import_storage/redis/types/redis_create_response.py +0 -62
- label_studio_sdk/import_storage/redis/types/redis_update_response.py +0 -62
- label_studio_sdk/import_storage/s3/types/__init__.py +0 -6
- label_studio_sdk/ml/types/ml_create_response.py +0 -68
- label_studio_sdk/ml/types/ml_create_response_auth_method.py +0 -5
- label_studio_sdk/ml/types/ml_update_response.py +0 -68
- label_studio_sdk/ml/types/ml_update_response_auth_method.py +0 -5
- label_studio_sdk/projects/exports/types/exports_list_formats_response_item.py +0 -44
- label_studio_sdk/projects/types/projects_create_response.py +0 -91
- label_studio_sdk/projects/types/projects_update_response.py +0 -96
- label_studio_sdk/prompts/types/prompts_batch_failed_predictions_request_failed_predictions_item.py +0 -32
- label_studio_sdk/prompts/types/prompts_batch_predictions_request_results_item.py +0 -59
- label_studio_sdk/tasks/types/tasks_list_response.py +0 -38
- label_studio_sdk/types/annotation_completed_by.py +0 -6
- label_studio_sdk/types/azure_blob_export_storage_status.py +0 -7
- label_studio_sdk/types/base_task_updated_by.py +0 -7
- label_studio_sdk/types/comment_created_by.py +0 -5
- label_studio_sdk/types/converted_format_status.py +0 -5
- label_studio_sdk/types/data_manager_task_serializer.py +0 -118
- label_studio_sdk/types/data_manager_task_serializer_annotators_item.py +0 -5
- label_studio_sdk/types/data_manager_task_serializer_comment_authors_item.py +0 -5
- label_studio_sdk/types/data_manager_task_serializer_predictions_item_model_run.py +0 -5
- label_studio_sdk/types/export_format.py +0 -25
- label_studio_sdk/types/export_snapshot_status.py +0 -5
- label_studio_sdk/types/export_status.py +0 -5
- label_studio_sdk/types/gcs_export_storage_status.py +0 -7
- label_studio_sdk/types/gcs_import_storage_status.py +0 -7
- label_studio_sdk/types/inference_run.py +0 -34
- label_studio_sdk/types/inference_run_created_by.py +0 -5
- label_studio_sdk/types/inference_run_organization.py +0 -5
- label_studio_sdk/types/inference_run_project_subset.py +0 -5
- label_studio_sdk/types/key_indicators.py +0 -6
- label_studio_sdk/types/key_indicators_item.py +0 -41
- label_studio_sdk/types/local_files_export_storage_status.py +0 -7
- label_studio_sdk/types/local_files_import_storage_status.py +0 -7
- label_studio_sdk/types/ml_backend_auth_method.py +0 -5
- label_studio_sdk/types/ml_backend_state.py +0 -5
- label_studio_sdk/types/model_provider_connection_created_by.py +0 -5
- label_studio_sdk/types/model_provider_connection_organization.py +0 -5
- label_studio_sdk/types/model_provider_connection_provider.py +0 -7
- label_studio_sdk/types/model_provider_connection_scope.py +0 -5
- label_studio_sdk/types/pause_paused_by.py +0 -5
- label_studio_sdk/types/project_import_status.py +0 -5
- label_studio_sdk/types/prompt.py +0 -71
- label_studio_sdk/types/prompt_associated_projects_item.py +0 -6
- label_studio_sdk/types/prompt_created_by.py +0 -5
- label_studio_sdk/types/prompt_organization.py +0 -5
- label_studio_sdk/types/prompt_version.py +0 -32
- label_studio_sdk/types/prompt_version_created_by.py +0 -5
- label_studio_sdk/types/prompt_version_organization.py +0 -5
- label_studio_sdk/types/redis_export_storage_status.py +0 -7
- label_studio_sdk/types/redis_import_storage_status.py +0 -7
- label_studio_sdk/types/refined_prompt_response_refinement_status.py +0 -7
- label_studio_sdk/types/s3export_storage_status.py +0 -7
- label_studio_sdk/types/s3import_storage_status.py +0 -7
- label_studio_sdk/types/s3s_export_storage.py +0 -73
- label_studio_sdk/types/s3s_import_storage_status.py +0 -7
- label_studio_sdk/types/task.py +0 -156
- label_studio_sdk/types/task_annotators_item.py +0 -5
- label_studio_sdk/types/task_comment_authors_item.py +0 -5
- label_studio_sdk/types/task_filter_options.py +0 -39
- label_studio_sdk/types/webhook_actions_item.py +0 -21
- label_studio_sdk/types/webhook_serializer_for_update_actions_item.py +0 -21
- label_studio_sdk/versions/types/__init__.py +0 -6
- label_studio_sdk/versions/types/versions_get_response.py +0 -73
- label_studio_sdk/versions/types/versions_get_response_edition.py +0 -5
- label_studio_sdk/workspaces/members/types/__init__.py +0 -6
- label_studio_sdk/workspaces/members/types/members_create_response.py +0 -22
- label_studio_sdk/workspaces/members/types/members_list_response_item.py +0 -22
- label_studio_sdk-1.0.20.dist-info/RECORD +0 -374
- {label_studio_sdk-1.0.20.dist-info → label_studio_sdk-2.0.1.dist-info}/LICENSE +0 -0
|
@@ -3,16 +3,12 @@
|
|
|
3
3
|
import typing
|
|
4
4
|
from ...core.client_wrapper import SyncClientWrapper
|
|
5
5
|
from ...core.request_options import RequestOptions
|
|
6
|
-
from ...types.prompt_version import PromptVersion
|
|
7
6
|
from ...core.jsonable_encoder import jsonable_encoder
|
|
8
|
-
from ...core.pydantic_utilities import parse_obj_as
|
|
9
7
|
from json.decoder import JSONDecodeError
|
|
10
8
|
from ...core.api_error import ApiError
|
|
11
|
-
from ...types.
|
|
12
|
-
from ...
|
|
13
|
-
|
|
14
|
-
from ...types.prompt_version_organization import PromptVersionOrganization
|
|
15
|
-
from ...core.serialization import convert_and_respect_annotation_metadata
|
|
9
|
+
from ...types.third_party_model_version import ThirdPartyModelVersion
|
|
10
|
+
from ...core.unchecked_base_model import construct_type
|
|
11
|
+
from ...types.provider_enum import ProviderEnum
|
|
16
12
|
from ...types.inference_run_cost_estimate import InferenceRunCostEstimate
|
|
17
13
|
from ...types.refined_prompt_response import RefinedPromptResponse
|
|
18
14
|
from ...core.client_wrapper import AsyncClientWrapper
|
|
@@ -25,21 +21,72 @@ class VersionsClient:
|
|
|
25
21
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
26
22
|
self._client_wrapper = client_wrapper
|
|
27
23
|
|
|
28
|
-
def
|
|
24
|
+
def get_default_version_name(self, id: int, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
29
25
|
"""
|
|
30
|
-
Get
|
|
26
|
+
Get default prompt version name
|
|
31
27
|
|
|
32
28
|
Parameters
|
|
33
29
|
----------
|
|
34
30
|
id : int
|
|
35
|
-
Prompt ID
|
|
36
31
|
|
|
37
32
|
request_options : typing.Optional[RequestOptions]
|
|
38
33
|
Request-specific configuration.
|
|
39
34
|
|
|
40
35
|
Returns
|
|
41
36
|
-------
|
|
42
|
-
|
|
37
|
+
None
|
|
38
|
+
|
|
39
|
+
Examples
|
|
40
|
+
--------
|
|
41
|
+
from label_studio_sdk import LabelStudio
|
|
42
|
+
|
|
43
|
+
client = LabelStudio(
|
|
44
|
+
api_key="YOUR_API_KEY",
|
|
45
|
+
)
|
|
46
|
+
client.prompts.versions.get_default_version_name(
|
|
47
|
+
id=1,
|
|
48
|
+
)
|
|
49
|
+
"""
|
|
50
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
51
|
+
f"api/prompts/{jsonable_encoder(id)}/get-default-version-name",
|
|
52
|
+
method="GET",
|
|
53
|
+
request_options=request_options,
|
|
54
|
+
)
|
|
55
|
+
try:
|
|
56
|
+
if 200 <= _response.status_code < 300:
|
|
57
|
+
return
|
|
58
|
+
_response_json = _response.json()
|
|
59
|
+
except JSONDecodeError:
|
|
60
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
61
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
62
|
+
|
|
63
|
+
def list(
|
|
64
|
+
self,
|
|
65
|
+
prompt_id_: int,
|
|
66
|
+
*,
|
|
67
|
+
prompt_id: int,
|
|
68
|
+
ordering: typing.Optional[str] = None,
|
|
69
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
70
|
+
) -> typing.List[ThirdPartyModelVersion]:
|
|
71
|
+
"""
|
|
72
|
+
List all versions of a prompt.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
prompt_id_ : int
|
|
77
|
+
|
|
78
|
+
prompt_id : int
|
|
79
|
+
A unique integer value identifying the model ID to list versions for.
|
|
80
|
+
|
|
81
|
+
ordering : typing.Optional[str]
|
|
82
|
+
Which field to use when ordering the results.
|
|
83
|
+
|
|
84
|
+
request_options : typing.Optional[RequestOptions]
|
|
85
|
+
Request-specific configuration.
|
|
86
|
+
|
|
87
|
+
Returns
|
|
88
|
+
-------
|
|
89
|
+
typing.List[ThirdPartyModelVersion]
|
|
43
90
|
|
|
44
91
|
|
|
45
92
|
Examples
|
|
@@ -50,20 +97,25 @@ class VersionsClient:
|
|
|
50
97
|
api_key="YOUR_API_KEY",
|
|
51
98
|
)
|
|
52
99
|
client.prompts.versions.list(
|
|
53
|
-
|
|
100
|
+
prompt_id_=1,
|
|
101
|
+
prompt_id=1,
|
|
54
102
|
)
|
|
55
103
|
"""
|
|
56
104
|
_response = self._client_wrapper.httpx_client.request(
|
|
57
|
-
f"api/prompts/{jsonable_encoder(
|
|
105
|
+
f"api/prompts/{jsonable_encoder(prompt_id_)}/versions",
|
|
58
106
|
method="GET",
|
|
107
|
+
params={
|
|
108
|
+
"ordering": ordering,
|
|
109
|
+
"prompt_id": prompt_id,
|
|
110
|
+
},
|
|
59
111
|
request_options=request_options,
|
|
60
112
|
)
|
|
61
113
|
try:
|
|
62
114
|
if 200 <= _response.status_code < 300:
|
|
63
115
|
return typing.cast(
|
|
64
|
-
typing.List[
|
|
65
|
-
|
|
66
|
-
type_=typing.List[
|
|
116
|
+
typing.List[ThirdPartyModelVersion],
|
|
117
|
+
construct_type(
|
|
118
|
+
type_=typing.List[ThirdPartyModelVersion], # type: ignore
|
|
67
119
|
object_=_response.json(),
|
|
68
120
|
),
|
|
69
121
|
)
|
|
@@ -74,54 +126,57 @@ class VersionsClient:
|
|
|
74
126
|
|
|
75
127
|
def create(
|
|
76
128
|
self,
|
|
77
|
-
|
|
129
|
+
prompt_id: int,
|
|
78
130
|
*,
|
|
79
|
-
title:
|
|
131
|
+
title: str,
|
|
132
|
+
prompt: str,
|
|
133
|
+
provider_model_id: str,
|
|
80
134
|
parent_model: typing.Optional[int] = OMIT,
|
|
135
|
+
provider: typing.Optional[ProviderEnum] = OMIT,
|
|
81
136
|
model_provider_connection: typing.Optional[int] = OMIT,
|
|
82
|
-
|
|
83
|
-
provider: typing.Optional[PromptVersionProvider] = OMIT,
|
|
84
|
-
provider_model_id: typing.Optional[str] = OMIT,
|
|
85
|
-
created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
|
|
86
|
-
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
87
|
-
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
88
|
-
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
137
|
+
organization: typing.Optional[int] = OMIT,
|
|
89
138
|
request_options: typing.Optional[RequestOptions] = None,
|
|
90
|
-
) ->
|
|
139
|
+
) -> ThirdPartyModelVersion:
|
|
91
140
|
"""
|
|
92
141
|
Create a new version of a prompt.
|
|
93
142
|
|
|
94
143
|
Parameters
|
|
95
144
|
----------
|
|
96
|
-
|
|
97
|
-
Prompt ID
|
|
98
|
-
|
|
99
|
-
title : typing.Optional[str]
|
|
100
|
-
|
|
101
|
-
parent_model : typing.Optional[int]
|
|
145
|
+
prompt_id : int
|
|
102
146
|
|
|
103
|
-
|
|
147
|
+
title : str
|
|
148
|
+
Model name
|
|
104
149
|
|
|
105
|
-
prompt :
|
|
150
|
+
prompt : str
|
|
151
|
+
Prompt to execute
|
|
106
152
|
|
|
107
|
-
|
|
153
|
+
provider_model_id : str
|
|
154
|
+
The model ID to use within the given provider, e.g. gpt-3.5
|
|
108
155
|
|
|
109
|
-
|
|
156
|
+
parent_model : typing.Optional[int]
|
|
157
|
+
Parent model interface ID
|
|
110
158
|
|
|
111
|
-
|
|
159
|
+
provider : typing.Optional[ProviderEnum]
|
|
160
|
+
The model provider to use e.g. OpenAI
|
|
112
161
|
|
|
113
|
-
|
|
162
|
+
* `OpenAI` - OpenAI
|
|
163
|
+
* `AzureOpenAI` - AzureOpenAI
|
|
164
|
+
* `AzureAIFoundry` - AzureAIFoundry
|
|
165
|
+
* `VertexAI` - VertexAI
|
|
166
|
+
* `Gemini` - Gemini
|
|
167
|
+
* `Anthropic` - Anthropic
|
|
168
|
+
* `Custom` - Custom
|
|
114
169
|
|
|
115
|
-
|
|
170
|
+
model_provider_connection : typing.Optional[int]
|
|
116
171
|
|
|
117
|
-
organization : typing.Optional[
|
|
172
|
+
organization : typing.Optional[int]
|
|
118
173
|
|
|
119
174
|
request_options : typing.Optional[RequestOptions]
|
|
120
175
|
Request-specific configuration.
|
|
121
176
|
|
|
122
177
|
Returns
|
|
123
178
|
-------
|
|
124
|
-
|
|
179
|
+
ThirdPartyModelVersion
|
|
125
180
|
|
|
126
181
|
|
|
127
182
|
Examples
|
|
@@ -132,27 +187,23 @@ class VersionsClient:
|
|
|
132
187
|
api_key="YOUR_API_KEY",
|
|
133
188
|
)
|
|
134
189
|
client.prompts.versions.create(
|
|
135
|
-
|
|
190
|
+
prompt_id=1,
|
|
191
|
+
title="title",
|
|
192
|
+
prompt="prompt",
|
|
193
|
+
provider_model_id="provider_model_id",
|
|
136
194
|
)
|
|
137
195
|
"""
|
|
138
196
|
_response = self._client_wrapper.httpx_client.request(
|
|
139
|
-
f"api/prompts/{jsonable_encoder(
|
|
197
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions",
|
|
140
198
|
method="POST",
|
|
141
199
|
json={
|
|
142
|
-
"title": title,
|
|
143
200
|
"parent_model": parent_model,
|
|
144
|
-
"
|
|
201
|
+
"title": title,
|
|
145
202
|
"prompt": prompt,
|
|
146
203
|
"provider": provider,
|
|
147
204
|
"provider_model_id": provider_model_id,
|
|
148
|
-
"
|
|
149
|
-
|
|
150
|
-
),
|
|
151
|
-
"created_at": created_at,
|
|
152
|
-
"updated_at": updated_at,
|
|
153
|
-
"organization": convert_and_respect_annotation_metadata(
|
|
154
|
-
object_=organization, annotation=PromptVersionOrganization, direction="write"
|
|
155
|
-
),
|
|
205
|
+
"model_provider_connection": model_provider_connection,
|
|
206
|
+
"organization": organization,
|
|
156
207
|
},
|
|
157
208
|
request_options=request_options,
|
|
158
209
|
omit=OMIT,
|
|
@@ -160,9 +211,9 @@ class VersionsClient:
|
|
|
160
211
|
try:
|
|
161
212
|
if 200 <= _response.status_code < 300:
|
|
162
213
|
return typing.cast(
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
type_=
|
|
214
|
+
ThirdPartyModelVersion,
|
|
215
|
+
construct_type(
|
|
216
|
+
type_=ThirdPartyModelVersion, # type: ignore
|
|
166
217
|
object_=_response.json(),
|
|
167
218
|
),
|
|
168
219
|
)
|
|
@@ -172,25 +223,23 @@ class VersionsClient:
|
|
|
172
223
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
173
224
|
|
|
174
225
|
def get(
|
|
175
|
-
self,
|
|
176
|
-
) ->
|
|
226
|
+
self, prompt_id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
227
|
+
) -> ThirdPartyModelVersion:
|
|
177
228
|
"""
|
|
178
|
-
|
|
229
|
+
Retrieve a specific prompt of a model.
|
|
179
230
|
|
|
180
231
|
Parameters
|
|
181
232
|
----------
|
|
182
|
-
|
|
183
|
-
Prompt ID
|
|
233
|
+
prompt_id : int
|
|
184
234
|
|
|
185
235
|
version_id : int
|
|
186
|
-
Prompt Version ID
|
|
187
236
|
|
|
188
237
|
request_options : typing.Optional[RequestOptions]
|
|
189
238
|
Request-specific configuration.
|
|
190
239
|
|
|
191
240
|
Returns
|
|
192
241
|
-------
|
|
193
|
-
|
|
242
|
+
ThirdPartyModelVersion
|
|
194
243
|
|
|
195
244
|
|
|
196
245
|
Examples
|
|
@@ -201,21 +250,21 @@ class VersionsClient:
|
|
|
201
250
|
api_key="YOUR_API_KEY",
|
|
202
251
|
)
|
|
203
252
|
client.prompts.versions.get(
|
|
204
|
-
|
|
253
|
+
prompt_id=1,
|
|
205
254
|
version_id=1,
|
|
206
255
|
)
|
|
207
256
|
"""
|
|
208
257
|
_response = self._client_wrapper.httpx_client.request(
|
|
209
|
-
f"api/prompts/{jsonable_encoder(
|
|
258
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}",
|
|
210
259
|
method="GET",
|
|
211
260
|
request_options=request_options,
|
|
212
261
|
)
|
|
213
262
|
try:
|
|
214
263
|
if 200 <= _response.status_code < 300:
|
|
215
264
|
return typing.cast(
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
type_=
|
|
265
|
+
ThirdPartyModelVersion,
|
|
266
|
+
construct_type(
|
|
267
|
+
type_=ThirdPartyModelVersion, # type: ignore
|
|
219
268
|
object_=_response.json(),
|
|
220
269
|
),
|
|
221
270
|
)
|
|
@@ -224,17 +273,17 @@ class VersionsClient:
|
|
|
224
273
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
225
274
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
226
275
|
|
|
227
|
-
def delete(
|
|
276
|
+
def delete(
|
|
277
|
+
self, prompt_id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
278
|
+
) -> None:
|
|
228
279
|
"""
|
|
229
|
-
Delete a prompt version by ID
|
|
280
|
+
Delete a prompt version by ID
|
|
230
281
|
|
|
231
282
|
Parameters
|
|
232
283
|
----------
|
|
233
|
-
|
|
234
|
-
Prompt ID
|
|
284
|
+
prompt_id : int
|
|
235
285
|
|
|
236
286
|
version_id : int
|
|
237
|
-
Prompt Version ID
|
|
238
287
|
|
|
239
288
|
request_options : typing.Optional[RequestOptions]
|
|
240
289
|
Request-specific configuration.
|
|
@@ -251,12 +300,12 @@ class VersionsClient:
|
|
|
251
300
|
api_key="YOUR_API_KEY",
|
|
252
301
|
)
|
|
253
302
|
client.prompts.versions.delete(
|
|
254
|
-
|
|
303
|
+
prompt_id=1,
|
|
255
304
|
version_id=1,
|
|
256
305
|
)
|
|
257
306
|
"""
|
|
258
307
|
_response = self._client_wrapper.httpx_client.request(
|
|
259
|
-
f"api/prompts/{jsonable_encoder(
|
|
308
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}",
|
|
260
309
|
method="DELETE",
|
|
261
310
|
request_options=request_options,
|
|
262
311
|
)
|
|
@@ -270,58 +319,60 @@ class VersionsClient:
|
|
|
270
319
|
|
|
271
320
|
def update(
|
|
272
321
|
self,
|
|
273
|
-
|
|
322
|
+
prompt_id: int,
|
|
274
323
|
version_id: int,
|
|
275
324
|
*,
|
|
276
|
-
title: typing.Optional[str] = OMIT,
|
|
277
325
|
parent_model: typing.Optional[int] = OMIT,
|
|
278
|
-
|
|
326
|
+
title: typing.Optional[str] = OMIT,
|
|
279
327
|
prompt: typing.Optional[str] = OMIT,
|
|
280
|
-
provider: typing.Optional[
|
|
328
|
+
provider: typing.Optional[ProviderEnum] = OMIT,
|
|
281
329
|
provider_model_id: typing.Optional[str] = OMIT,
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
285
|
-
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
330
|
+
model_provider_connection: typing.Optional[int] = OMIT,
|
|
331
|
+
organization: typing.Optional[int] = OMIT,
|
|
286
332
|
request_options: typing.Optional[RequestOptions] = None,
|
|
287
|
-
) ->
|
|
333
|
+
) -> ThirdPartyModelVersion:
|
|
288
334
|
"""
|
|
289
|
-
Update a prompt version by ID.
|
|
335
|
+
Update a specific prompt version by ID.
|
|
290
336
|
|
|
291
337
|
Parameters
|
|
292
338
|
----------
|
|
293
|
-
|
|
294
|
-
Prompt ID
|
|
339
|
+
prompt_id : int
|
|
295
340
|
|
|
296
341
|
version_id : int
|
|
297
|
-
Prompt Version ID
|
|
298
|
-
|
|
299
|
-
title : typing.Optional[str]
|
|
300
342
|
|
|
301
343
|
parent_model : typing.Optional[int]
|
|
344
|
+
Parent model interface ID
|
|
302
345
|
|
|
303
|
-
|
|
346
|
+
title : typing.Optional[str]
|
|
347
|
+
Model name
|
|
304
348
|
|
|
305
349
|
prompt : typing.Optional[str]
|
|
350
|
+
Prompt to execute
|
|
306
351
|
|
|
307
|
-
provider : typing.Optional[
|
|
308
|
-
|
|
309
|
-
provider_model_id : typing.Optional[str]
|
|
352
|
+
provider : typing.Optional[ProviderEnum]
|
|
353
|
+
The model provider to use e.g. OpenAI
|
|
310
354
|
|
|
311
|
-
|
|
355
|
+
* `OpenAI` - OpenAI
|
|
356
|
+
* `AzureOpenAI` - AzureOpenAI
|
|
357
|
+
* `AzureAIFoundry` - AzureAIFoundry
|
|
358
|
+
* `VertexAI` - VertexAI
|
|
359
|
+
* `Gemini` - Gemini
|
|
360
|
+
* `Anthropic` - Anthropic
|
|
361
|
+
* `Custom` - Custom
|
|
312
362
|
|
|
313
|
-
|
|
363
|
+
provider_model_id : typing.Optional[str]
|
|
364
|
+
The model ID to use within the given provider, e.g. gpt-3.5
|
|
314
365
|
|
|
315
|
-
|
|
366
|
+
model_provider_connection : typing.Optional[int]
|
|
316
367
|
|
|
317
|
-
organization : typing.Optional[
|
|
368
|
+
organization : typing.Optional[int]
|
|
318
369
|
|
|
319
370
|
request_options : typing.Optional[RequestOptions]
|
|
320
371
|
Request-specific configuration.
|
|
321
372
|
|
|
322
373
|
Returns
|
|
323
374
|
-------
|
|
324
|
-
|
|
375
|
+
ThirdPartyModelVersion
|
|
325
376
|
|
|
326
377
|
|
|
327
378
|
Examples
|
|
@@ -332,28 +383,24 @@ class VersionsClient:
|
|
|
332
383
|
api_key="YOUR_API_KEY",
|
|
333
384
|
)
|
|
334
385
|
client.prompts.versions.update(
|
|
335
|
-
|
|
386
|
+
prompt_id=1,
|
|
336
387
|
version_id=1,
|
|
337
388
|
)
|
|
338
389
|
"""
|
|
339
390
|
_response = self._client_wrapper.httpx_client.request(
|
|
340
|
-
f"api/prompts/{jsonable_encoder(
|
|
391
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}",
|
|
341
392
|
method="PATCH",
|
|
342
393
|
json={
|
|
343
|
-
"title": title,
|
|
344
394
|
"parent_model": parent_model,
|
|
345
|
-
"
|
|
395
|
+
"title": title,
|
|
346
396
|
"prompt": prompt,
|
|
347
397
|
"provider": provider,
|
|
348
398
|
"provider_model_id": provider_model_id,
|
|
349
|
-
"
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
"
|
|
354
|
-
"organization": convert_and_respect_annotation_metadata(
|
|
355
|
-
object_=organization, annotation=PromptVersionOrganization, direction="write"
|
|
356
|
-
),
|
|
399
|
+
"model_provider_connection": model_provider_connection,
|
|
400
|
+
"organization": organization,
|
|
401
|
+
},
|
|
402
|
+
headers={
|
|
403
|
+
"content-type": "application/json",
|
|
357
404
|
},
|
|
358
405
|
request_options=request_options,
|
|
359
406
|
omit=OMIT,
|
|
@@ -361,9 +408,9 @@ class VersionsClient:
|
|
|
361
408
|
try:
|
|
362
409
|
if 200 <= _response.status_code < 300:
|
|
363
410
|
return typing.cast(
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
type_=
|
|
411
|
+
ThirdPartyModelVersion,
|
|
412
|
+
construct_type(
|
|
413
|
+
type_=ThirdPartyModelVersion, # type: ignore
|
|
367
414
|
object_=_response.json(),
|
|
368
415
|
),
|
|
369
416
|
)
|
|
@@ -373,30 +420,16 @@ class VersionsClient:
|
|
|
373
420
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
374
421
|
|
|
375
422
|
def cost_estimate(
|
|
376
|
-
self,
|
|
377
|
-
prompt_id: int,
|
|
378
|
-
version_id: int,
|
|
379
|
-
*,
|
|
380
|
-
project_id: int,
|
|
381
|
-
project_subset: int,
|
|
382
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
423
|
+
self, prompt_id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
383
424
|
) -> InferenceRunCostEstimate:
|
|
384
425
|
"""
|
|
385
|
-
Get
|
|
426
|
+
Get an estimate of the cost for making an inference run on the selected Prompt Version and Project/ProjectSubset
|
|
386
427
|
|
|
387
428
|
Parameters
|
|
388
429
|
----------
|
|
389
430
|
prompt_id : int
|
|
390
|
-
Prompt ID
|
|
391
431
|
|
|
392
432
|
version_id : int
|
|
393
|
-
Prompt Version ID
|
|
394
|
-
|
|
395
|
-
project_id : int
|
|
396
|
-
ID of the project to get an estimate for running on
|
|
397
|
-
|
|
398
|
-
project_subset : int
|
|
399
|
-
Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT')
|
|
400
433
|
|
|
401
434
|
request_options : typing.Optional[RequestOptions]
|
|
402
435
|
Request-specific configuration.
|
|
@@ -404,7 +437,7 @@ class VersionsClient:
|
|
|
404
437
|
Returns
|
|
405
438
|
-------
|
|
406
439
|
InferenceRunCostEstimate
|
|
407
|
-
|
|
440
|
+
Cost estimate response
|
|
408
441
|
|
|
409
442
|
Examples
|
|
410
443
|
--------
|
|
@@ -416,24 +449,18 @@ class VersionsClient:
|
|
|
416
449
|
client.prompts.versions.cost_estimate(
|
|
417
450
|
prompt_id=1,
|
|
418
451
|
version_id=1,
|
|
419
|
-
project_id=1,
|
|
420
|
-
project_subset=1,
|
|
421
452
|
)
|
|
422
453
|
"""
|
|
423
454
|
_response = self._client_wrapper.httpx_client.request(
|
|
424
455
|
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate",
|
|
425
|
-
method="
|
|
426
|
-
params={
|
|
427
|
-
"project_id": project_id,
|
|
428
|
-
"project_subset": project_subset,
|
|
429
|
-
},
|
|
456
|
+
method="GET",
|
|
430
457
|
request_options=request_options,
|
|
431
458
|
)
|
|
432
459
|
try:
|
|
433
460
|
if 200 <= _response.status_code < 300:
|
|
434
461
|
return typing.cast(
|
|
435
462
|
InferenceRunCostEstimate,
|
|
436
|
-
|
|
463
|
+
construct_type(
|
|
437
464
|
type_=InferenceRunCostEstimate, # type: ignore
|
|
438
465
|
object_=_response.json(),
|
|
439
466
|
),
|
|
@@ -448,7 +475,7 @@ class VersionsClient:
|
|
|
448
475
|
prompt_id: int,
|
|
449
476
|
version_id: int,
|
|
450
477
|
*,
|
|
451
|
-
refinement_job_id: str,
|
|
478
|
+
refinement_job_id: typing.Optional[str] = None,
|
|
452
479
|
request_options: typing.Optional[RequestOptions] = None,
|
|
453
480
|
) -> RefinedPromptResponse:
|
|
454
481
|
"""
|
|
@@ -457,12 +484,10 @@ class VersionsClient:
|
|
|
457
484
|
Parameters
|
|
458
485
|
----------
|
|
459
486
|
prompt_id : int
|
|
460
|
-
Prompt ID
|
|
461
487
|
|
|
462
488
|
version_id : int
|
|
463
|
-
Prompt Version ID
|
|
464
489
|
|
|
465
|
-
refinement_job_id : str
|
|
490
|
+
refinement_job_id : typing.Optional[str]
|
|
466
491
|
Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint
|
|
467
492
|
|
|
468
493
|
request_options : typing.Optional[RequestOptions]
|
|
@@ -471,7 +496,7 @@ class VersionsClient:
|
|
|
471
496
|
Returns
|
|
472
497
|
-------
|
|
473
498
|
RefinedPromptResponse
|
|
474
|
-
|
|
499
|
+
Refined prompt response
|
|
475
500
|
|
|
476
501
|
Examples
|
|
477
502
|
--------
|
|
@@ -483,7 +508,6 @@ class VersionsClient:
|
|
|
483
508
|
client.prompts.versions.get_refined_prompt(
|
|
484
509
|
prompt_id=1,
|
|
485
510
|
version_id=1,
|
|
486
|
-
refinement_job_id="refinement_job_id",
|
|
487
511
|
)
|
|
488
512
|
"""
|
|
489
513
|
_response = self._client_wrapper.httpx_client.request(
|
|
@@ -498,7 +522,7 @@ class VersionsClient:
|
|
|
498
522
|
if 200 <= _response.status_code < 300:
|
|
499
523
|
return typing.cast(
|
|
500
524
|
RefinedPromptResponse,
|
|
501
|
-
|
|
525
|
+
construct_type(
|
|
502
526
|
type_=RefinedPromptResponse, # type: ignore
|
|
503
527
|
object_=_response.json(),
|
|
504
528
|
),
|
|
@@ -513,10 +537,10 @@ class VersionsClient:
|
|
|
513
537
|
prompt_id: int,
|
|
514
538
|
version_id: int,
|
|
515
539
|
*,
|
|
540
|
+
teacher_model_provider_connection_id: int,
|
|
541
|
+
teacher_model_name: str,
|
|
542
|
+
project_id: int,
|
|
516
543
|
async_: typing.Optional[bool] = None,
|
|
517
|
-
teacher_model_provider_connection_id: typing.Optional[int] = OMIT,
|
|
518
|
-
teacher_model_name: typing.Optional[str] = OMIT,
|
|
519
|
-
project_id: typing.Optional[int] = OMIT,
|
|
520
544
|
request_options: typing.Optional[RequestOptions] = None,
|
|
521
545
|
) -> RefinedPromptResponse:
|
|
522
546
|
"""
|
|
@@ -525,30 +549,28 @@ class VersionsClient:
|
|
|
525
549
|
Parameters
|
|
526
550
|
----------
|
|
527
551
|
prompt_id : int
|
|
528
|
-
Prompt ID
|
|
529
552
|
|
|
530
553
|
version_id : int
|
|
531
|
-
Base Prompt Version ID
|
|
532
554
|
|
|
533
|
-
|
|
534
|
-
Run the refinement job asynchronously
|
|
535
|
-
|
|
536
|
-
teacher_model_provider_connection_id : typing.Optional[int]
|
|
555
|
+
teacher_model_provider_connection_id : int
|
|
537
556
|
Model Provider Connection ID to use to refine the prompt
|
|
538
557
|
|
|
539
|
-
teacher_model_name :
|
|
558
|
+
teacher_model_name : str
|
|
540
559
|
Name of the model to use to refine the prompt
|
|
541
560
|
|
|
542
|
-
project_id :
|
|
561
|
+
project_id : int
|
|
543
562
|
Project ID to target the refined prompt for
|
|
544
563
|
|
|
564
|
+
async_ : typing.Optional[bool]
|
|
565
|
+
Whether to run the refinement asynchronously
|
|
566
|
+
|
|
545
567
|
request_options : typing.Optional[RequestOptions]
|
|
546
568
|
Request-specific configuration.
|
|
547
569
|
|
|
548
570
|
Returns
|
|
549
571
|
-------
|
|
550
572
|
RefinedPromptResponse
|
|
551
|
-
|
|
573
|
+
Refined prompt response
|
|
552
574
|
|
|
553
575
|
Examples
|
|
554
576
|
--------
|
|
@@ -560,6 +582,9 @@ class VersionsClient:
|
|
|
560
582
|
client.prompts.versions.refine_prompt(
|
|
561
583
|
prompt_id=1,
|
|
562
584
|
version_id=1,
|
|
585
|
+
teacher_model_provider_connection_id=1,
|
|
586
|
+
teacher_model_name="teacher_model_name",
|
|
587
|
+
project_id=1,
|
|
563
588
|
)
|
|
564
589
|
"""
|
|
565
590
|
_response = self._client_wrapper.httpx_client.request(
|
|
@@ -583,7 +608,7 @@ class VersionsClient:
|
|
|
583
608
|
if 200 <= _response.status_code < 300:
|
|
584
609
|
return typing.cast(
|
|
585
610
|
RefinedPromptResponse,
|
|
586
|
-
|
|
611
|
+
construct_type(
|
|
587
612
|
type_=RefinedPromptResponse, # type: ignore
|
|
588
613
|
object_=_response.json(),
|
|
589
614
|
),
|
|
@@ -598,23 +623,82 @@ class AsyncVersionsClient:
|
|
|
598
623
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
599
624
|
self._client_wrapper = client_wrapper
|
|
600
625
|
|
|
601
|
-
async def
|
|
626
|
+
async def get_default_version_name(
|
|
602
627
|
self, id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
603
|
-
) ->
|
|
628
|
+
) -> None:
|
|
604
629
|
"""
|
|
605
|
-
Get
|
|
630
|
+
Get default prompt version name
|
|
606
631
|
|
|
607
632
|
Parameters
|
|
608
633
|
----------
|
|
609
634
|
id : int
|
|
610
|
-
Prompt ID
|
|
611
635
|
|
|
612
636
|
request_options : typing.Optional[RequestOptions]
|
|
613
637
|
Request-specific configuration.
|
|
614
638
|
|
|
615
639
|
Returns
|
|
616
640
|
-------
|
|
617
|
-
|
|
641
|
+
None
|
|
642
|
+
|
|
643
|
+
Examples
|
|
644
|
+
--------
|
|
645
|
+
import asyncio
|
|
646
|
+
|
|
647
|
+
from label_studio_sdk import AsyncLabelStudio
|
|
648
|
+
|
|
649
|
+
client = AsyncLabelStudio(
|
|
650
|
+
api_key="YOUR_API_KEY",
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
async def main() -> None:
|
|
655
|
+
await client.prompts.versions.get_default_version_name(
|
|
656
|
+
id=1,
|
|
657
|
+
)
|
|
658
|
+
|
|
659
|
+
|
|
660
|
+
asyncio.run(main())
|
|
661
|
+
"""
|
|
662
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
663
|
+
f"api/prompts/{jsonable_encoder(id)}/get-default-version-name",
|
|
664
|
+
method="GET",
|
|
665
|
+
request_options=request_options,
|
|
666
|
+
)
|
|
667
|
+
try:
|
|
668
|
+
if 200 <= _response.status_code < 300:
|
|
669
|
+
return
|
|
670
|
+
_response_json = _response.json()
|
|
671
|
+
except JSONDecodeError:
|
|
672
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
673
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
674
|
+
|
|
675
|
+
async def list(
|
|
676
|
+
self,
|
|
677
|
+
prompt_id_: int,
|
|
678
|
+
*,
|
|
679
|
+
prompt_id: int,
|
|
680
|
+
ordering: typing.Optional[str] = None,
|
|
681
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
682
|
+
) -> typing.List[ThirdPartyModelVersion]:
|
|
683
|
+
"""
|
|
684
|
+
List all versions of a prompt.
|
|
685
|
+
|
|
686
|
+
Parameters
|
|
687
|
+
----------
|
|
688
|
+
prompt_id_ : int
|
|
689
|
+
|
|
690
|
+
prompt_id : int
|
|
691
|
+
A unique integer value identifying the model ID to list versions for.
|
|
692
|
+
|
|
693
|
+
ordering : typing.Optional[str]
|
|
694
|
+
Which field to use when ordering the results.
|
|
695
|
+
|
|
696
|
+
request_options : typing.Optional[RequestOptions]
|
|
697
|
+
Request-specific configuration.
|
|
698
|
+
|
|
699
|
+
Returns
|
|
700
|
+
-------
|
|
701
|
+
typing.List[ThirdPartyModelVersion]
|
|
618
702
|
|
|
619
703
|
|
|
620
704
|
Examples
|
|
@@ -630,23 +714,28 @@ class AsyncVersionsClient:
|
|
|
630
714
|
|
|
631
715
|
async def main() -> None:
|
|
632
716
|
await client.prompts.versions.list(
|
|
633
|
-
|
|
717
|
+
prompt_id_=1,
|
|
718
|
+
prompt_id=1,
|
|
634
719
|
)
|
|
635
720
|
|
|
636
721
|
|
|
637
722
|
asyncio.run(main())
|
|
638
723
|
"""
|
|
639
724
|
_response = await self._client_wrapper.httpx_client.request(
|
|
640
|
-
f"api/prompts/{jsonable_encoder(
|
|
725
|
+
f"api/prompts/{jsonable_encoder(prompt_id_)}/versions",
|
|
641
726
|
method="GET",
|
|
727
|
+
params={
|
|
728
|
+
"ordering": ordering,
|
|
729
|
+
"prompt_id": prompt_id,
|
|
730
|
+
},
|
|
642
731
|
request_options=request_options,
|
|
643
732
|
)
|
|
644
733
|
try:
|
|
645
734
|
if 200 <= _response.status_code < 300:
|
|
646
735
|
return typing.cast(
|
|
647
|
-
typing.List[
|
|
648
|
-
|
|
649
|
-
type_=typing.List[
|
|
736
|
+
typing.List[ThirdPartyModelVersion],
|
|
737
|
+
construct_type(
|
|
738
|
+
type_=typing.List[ThirdPartyModelVersion], # type: ignore
|
|
650
739
|
object_=_response.json(),
|
|
651
740
|
),
|
|
652
741
|
)
|
|
@@ -657,54 +746,57 @@ class AsyncVersionsClient:
|
|
|
657
746
|
|
|
658
747
|
async def create(
|
|
659
748
|
self,
|
|
660
|
-
|
|
749
|
+
prompt_id: int,
|
|
661
750
|
*,
|
|
662
|
-
title:
|
|
751
|
+
title: str,
|
|
752
|
+
prompt: str,
|
|
753
|
+
provider_model_id: str,
|
|
663
754
|
parent_model: typing.Optional[int] = OMIT,
|
|
755
|
+
provider: typing.Optional[ProviderEnum] = OMIT,
|
|
664
756
|
model_provider_connection: typing.Optional[int] = OMIT,
|
|
665
|
-
|
|
666
|
-
provider: typing.Optional[PromptVersionProvider] = OMIT,
|
|
667
|
-
provider_model_id: typing.Optional[str] = OMIT,
|
|
668
|
-
created_by: typing.Optional[PromptVersionCreatedBy] = OMIT,
|
|
669
|
-
created_at: typing.Optional[dt.datetime] = OMIT,
|
|
670
|
-
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
671
|
-
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
757
|
+
organization: typing.Optional[int] = OMIT,
|
|
672
758
|
request_options: typing.Optional[RequestOptions] = None,
|
|
673
|
-
) ->
|
|
759
|
+
) -> ThirdPartyModelVersion:
|
|
674
760
|
"""
|
|
675
761
|
Create a new version of a prompt.
|
|
676
762
|
|
|
677
763
|
Parameters
|
|
678
764
|
----------
|
|
679
|
-
|
|
680
|
-
Prompt ID
|
|
681
|
-
|
|
682
|
-
title : typing.Optional[str]
|
|
683
|
-
|
|
684
|
-
parent_model : typing.Optional[int]
|
|
765
|
+
prompt_id : int
|
|
685
766
|
|
|
686
|
-
|
|
767
|
+
title : str
|
|
768
|
+
Model name
|
|
687
769
|
|
|
688
|
-
prompt :
|
|
770
|
+
prompt : str
|
|
771
|
+
Prompt to execute
|
|
689
772
|
|
|
690
|
-
|
|
773
|
+
provider_model_id : str
|
|
774
|
+
The model ID to use within the given provider, e.g. gpt-3.5
|
|
691
775
|
|
|
692
|
-
|
|
776
|
+
parent_model : typing.Optional[int]
|
|
777
|
+
Parent model interface ID
|
|
693
778
|
|
|
694
|
-
|
|
779
|
+
provider : typing.Optional[ProviderEnum]
|
|
780
|
+
The model provider to use e.g. OpenAI
|
|
695
781
|
|
|
696
|
-
|
|
782
|
+
* `OpenAI` - OpenAI
|
|
783
|
+
* `AzureOpenAI` - AzureOpenAI
|
|
784
|
+
* `AzureAIFoundry` - AzureAIFoundry
|
|
785
|
+
* `VertexAI` - VertexAI
|
|
786
|
+
* `Gemini` - Gemini
|
|
787
|
+
* `Anthropic` - Anthropic
|
|
788
|
+
* `Custom` - Custom
|
|
697
789
|
|
|
698
|
-
|
|
790
|
+
model_provider_connection : typing.Optional[int]
|
|
699
791
|
|
|
700
|
-
organization : typing.Optional[
|
|
792
|
+
organization : typing.Optional[int]
|
|
701
793
|
|
|
702
794
|
request_options : typing.Optional[RequestOptions]
|
|
703
795
|
Request-specific configuration.
|
|
704
796
|
|
|
705
797
|
Returns
|
|
706
798
|
-------
|
|
707
|
-
|
|
799
|
+
ThirdPartyModelVersion
|
|
708
800
|
|
|
709
801
|
|
|
710
802
|
Examples
|
|
@@ -720,30 +812,26 @@ class AsyncVersionsClient:
|
|
|
720
812
|
|
|
721
813
|
async def main() -> None:
|
|
722
814
|
await client.prompts.versions.create(
|
|
723
|
-
|
|
815
|
+
prompt_id=1,
|
|
816
|
+
title="title",
|
|
817
|
+
prompt="prompt",
|
|
818
|
+
provider_model_id="provider_model_id",
|
|
724
819
|
)
|
|
725
820
|
|
|
726
821
|
|
|
727
822
|
asyncio.run(main())
|
|
728
823
|
"""
|
|
729
824
|
_response = await self._client_wrapper.httpx_client.request(
|
|
730
|
-
f"api/prompts/{jsonable_encoder(
|
|
825
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions",
|
|
731
826
|
method="POST",
|
|
732
827
|
json={
|
|
733
|
-
"title": title,
|
|
734
828
|
"parent_model": parent_model,
|
|
735
|
-
"
|
|
829
|
+
"title": title,
|
|
736
830
|
"prompt": prompt,
|
|
737
831
|
"provider": provider,
|
|
738
832
|
"provider_model_id": provider_model_id,
|
|
739
|
-
"
|
|
740
|
-
|
|
741
|
-
),
|
|
742
|
-
"created_at": created_at,
|
|
743
|
-
"updated_at": updated_at,
|
|
744
|
-
"organization": convert_and_respect_annotation_metadata(
|
|
745
|
-
object_=organization, annotation=PromptVersionOrganization, direction="write"
|
|
746
|
-
),
|
|
833
|
+
"model_provider_connection": model_provider_connection,
|
|
834
|
+
"organization": organization,
|
|
747
835
|
},
|
|
748
836
|
request_options=request_options,
|
|
749
837
|
omit=OMIT,
|
|
@@ -751,9 +839,9 @@ class AsyncVersionsClient:
|
|
|
751
839
|
try:
|
|
752
840
|
if 200 <= _response.status_code < 300:
|
|
753
841
|
return typing.cast(
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
type_=
|
|
842
|
+
ThirdPartyModelVersion,
|
|
843
|
+
construct_type(
|
|
844
|
+
type_=ThirdPartyModelVersion, # type: ignore
|
|
757
845
|
object_=_response.json(),
|
|
758
846
|
),
|
|
759
847
|
)
|
|
@@ -763,25 +851,23 @@ class AsyncVersionsClient:
|
|
|
763
851
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
764
852
|
|
|
765
853
|
async def get(
|
|
766
|
-
self,
|
|
767
|
-
) ->
|
|
854
|
+
self, prompt_id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
855
|
+
) -> ThirdPartyModelVersion:
|
|
768
856
|
"""
|
|
769
|
-
|
|
857
|
+
Retrieve a specific prompt of a model.
|
|
770
858
|
|
|
771
859
|
Parameters
|
|
772
860
|
----------
|
|
773
|
-
|
|
774
|
-
Prompt ID
|
|
861
|
+
prompt_id : int
|
|
775
862
|
|
|
776
863
|
version_id : int
|
|
777
|
-
Prompt Version ID
|
|
778
864
|
|
|
779
865
|
request_options : typing.Optional[RequestOptions]
|
|
780
866
|
Request-specific configuration.
|
|
781
867
|
|
|
782
868
|
Returns
|
|
783
869
|
-------
|
|
784
|
-
|
|
870
|
+
ThirdPartyModelVersion
|
|
785
871
|
|
|
786
872
|
|
|
787
873
|
Examples
|
|
@@ -797,7 +883,7 @@ class AsyncVersionsClient:
|
|
|
797
883
|
|
|
798
884
|
async def main() -> None:
|
|
799
885
|
await client.prompts.versions.get(
|
|
800
|
-
|
|
886
|
+
prompt_id=1,
|
|
801
887
|
version_id=1,
|
|
802
888
|
)
|
|
803
889
|
|
|
@@ -805,16 +891,16 @@ class AsyncVersionsClient:
|
|
|
805
891
|
asyncio.run(main())
|
|
806
892
|
"""
|
|
807
893
|
_response = await self._client_wrapper.httpx_client.request(
|
|
808
|
-
f"api/prompts/{jsonable_encoder(
|
|
894
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}",
|
|
809
895
|
method="GET",
|
|
810
896
|
request_options=request_options,
|
|
811
897
|
)
|
|
812
898
|
try:
|
|
813
899
|
if 200 <= _response.status_code < 300:
|
|
814
900
|
return typing.cast(
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
type_=
|
|
901
|
+
ThirdPartyModelVersion,
|
|
902
|
+
construct_type(
|
|
903
|
+
type_=ThirdPartyModelVersion, # type: ignore
|
|
818
904
|
object_=_response.json(),
|
|
819
905
|
),
|
|
820
906
|
)
|
|
@@ -824,18 +910,16 @@ class AsyncVersionsClient:
|
|
|
824
910
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
825
911
|
|
|
826
912
|
async def delete(
|
|
827
|
-
self,
|
|
913
|
+
self, prompt_id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
828
914
|
) -> None:
|
|
829
915
|
"""
|
|
830
|
-
Delete a prompt version by ID
|
|
916
|
+
Delete a prompt version by ID
|
|
831
917
|
|
|
832
918
|
Parameters
|
|
833
919
|
----------
|
|
834
|
-
|
|
835
|
-
Prompt ID
|
|
920
|
+
prompt_id : int
|
|
836
921
|
|
|
837
922
|
version_id : int
|
|
838
|
-
Prompt Version ID
|
|
839
923
|
|
|
840
924
|
request_options : typing.Optional[RequestOptions]
|
|
841
925
|
Request-specific configuration.
|
|
@@ -857,7 +941,7 @@ class AsyncVersionsClient:
|
|
|
857
941
|
|
|
858
942
|
async def main() -> None:
|
|
859
943
|
await client.prompts.versions.delete(
|
|
860
|
-
|
|
944
|
+
prompt_id=1,
|
|
861
945
|
version_id=1,
|
|
862
946
|
)
|
|
863
947
|
|
|
@@ -865,7 +949,7 @@ class AsyncVersionsClient:
|
|
|
865
949
|
asyncio.run(main())
|
|
866
950
|
"""
|
|
867
951
|
_response = await self._client_wrapper.httpx_client.request(
|
|
868
|
-
f"api/prompts/{jsonable_encoder(
|
|
952
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}",
|
|
869
953
|
method="DELETE",
|
|
870
954
|
request_options=request_options,
|
|
871
955
|
)
|
|
@@ -879,58 +963,60 @@ class AsyncVersionsClient:
|
|
|
879
963
|
|
|
880
964
|
async def update(
|
|
881
965
|
self,
|
|
882
|
-
|
|
966
|
+
prompt_id: int,
|
|
883
967
|
version_id: int,
|
|
884
968
|
*,
|
|
885
|
-
title: typing.Optional[str] = OMIT,
|
|
886
969
|
parent_model: typing.Optional[int] = OMIT,
|
|
887
|
-
|
|
970
|
+
title: typing.Optional[str] = OMIT,
|
|
888
971
|
prompt: typing.Optional[str] = OMIT,
|
|
889
|
-
provider: typing.Optional[
|
|
972
|
+
provider: typing.Optional[ProviderEnum] = OMIT,
|
|
890
973
|
provider_model_id: typing.Optional[str] = OMIT,
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
updated_at: typing.Optional[dt.datetime] = OMIT,
|
|
894
|
-
organization: typing.Optional[PromptVersionOrganization] = OMIT,
|
|
974
|
+
model_provider_connection: typing.Optional[int] = OMIT,
|
|
975
|
+
organization: typing.Optional[int] = OMIT,
|
|
895
976
|
request_options: typing.Optional[RequestOptions] = None,
|
|
896
|
-
) ->
|
|
977
|
+
) -> ThirdPartyModelVersion:
|
|
897
978
|
"""
|
|
898
|
-
Update a prompt version by ID.
|
|
979
|
+
Update a specific prompt version by ID.
|
|
899
980
|
|
|
900
981
|
Parameters
|
|
901
982
|
----------
|
|
902
|
-
|
|
903
|
-
Prompt ID
|
|
983
|
+
prompt_id : int
|
|
904
984
|
|
|
905
985
|
version_id : int
|
|
906
|
-
Prompt Version ID
|
|
907
|
-
|
|
908
|
-
title : typing.Optional[str]
|
|
909
986
|
|
|
910
987
|
parent_model : typing.Optional[int]
|
|
988
|
+
Parent model interface ID
|
|
911
989
|
|
|
912
|
-
|
|
990
|
+
title : typing.Optional[str]
|
|
991
|
+
Model name
|
|
913
992
|
|
|
914
993
|
prompt : typing.Optional[str]
|
|
994
|
+
Prompt to execute
|
|
915
995
|
|
|
916
|
-
provider : typing.Optional[
|
|
917
|
-
|
|
918
|
-
provider_model_id : typing.Optional[str]
|
|
996
|
+
provider : typing.Optional[ProviderEnum]
|
|
997
|
+
The model provider to use e.g. OpenAI
|
|
919
998
|
|
|
920
|
-
|
|
999
|
+
* `OpenAI` - OpenAI
|
|
1000
|
+
* `AzureOpenAI` - AzureOpenAI
|
|
1001
|
+
* `AzureAIFoundry` - AzureAIFoundry
|
|
1002
|
+
* `VertexAI` - VertexAI
|
|
1003
|
+
* `Gemini` - Gemini
|
|
1004
|
+
* `Anthropic` - Anthropic
|
|
1005
|
+
* `Custom` - Custom
|
|
921
1006
|
|
|
922
|
-
|
|
1007
|
+
provider_model_id : typing.Optional[str]
|
|
1008
|
+
The model ID to use within the given provider, e.g. gpt-3.5
|
|
923
1009
|
|
|
924
|
-
|
|
1010
|
+
model_provider_connection : typing.Optional[int]
|
|
925
1011
|
|
|
926
|
-
organization : typing.Optional[
|
|
1012
|
+
organization : typing.Optional[int]
|
|
927
1013
|
|
|
928
1014
|
request_options : typing.Optional[RequestOptions]
|
|
929
1015
|
Request-specific configuration.
|
|
930
1016
|
|
|
931
1017
|
Returns
|
|
932
1018
|
-------
|
|
933
|
-
|
|
1019
|
+
ThirdPartyModelVersion
|
|
934
1020
|
|
|
935
1021
|
|
|
936
1022
|
Examples
|
|
@@ -946,7 +1032,7 @@ class AsyncVersionsClient:
|
|
|
946
1032
|
|
|
947
1033
|
async def main() -> None:
|
|
948
1034
|
await client.prompts.versions.update(
|
|
949
|
-
|
|
1035
|
+
prompt_id=1,
|
|
950
1036
|
version_id=1,
|
|
951
1037
|
)
|
|
952
1038
|
|
|
@@ -954,23 +1040,19 @@ class AsyncVersionsClient:
|
|
|
954
1040
|
asyncio.run(main())
|
|
955
1041
|
"""
|
|
956
1042
|
_response = await self._client_wrapper.httpx_client.request(
|
|
957
|
-
f"api/prompts/{jsonable_encoder(
|
|
1043
|
+
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}",
|
|
958
1044
|
method="PATCH",
|
|
959
1045
|
json={
|
|
960
|
-
"title": title,
|
|
961
1046
|
"parent_model": parent_model,
|
|
962
|
-
"
|
|
1047
|
+
"title": title,
|
|
963
1048
|
"prompt": prompt,
|
|
964
1049
|
"provider": provider,
|
|
965
1050
|
"provider_model_id": provider_model_id,
|
|
966
|
-
"
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
"
|
|
971
|
-
"organization": convert_and_respect_annotation_metadata(
|
|
972
|
-
object_=organization, annotation=PromptVersionOrganization, direction="write"
|
|
973
|
-
),
|
|
1051
|
+
"model_provider_connection": model_provider_connection,
|
|
1052
|
+
"organization": organization,
|
|
1053
|
+
},
|
|
1054
|
+
headers={
|
|
1055
|
+
"content-type": "application/json",
|
|
974
1056
|
},
|
|
975
1057
|
request_options=request_options,
|
|
976
1058
|
omit=OMIT,
|
|
@@ -978,9 +1060,9 @@ class AsyncVersionsClient:
|
|
|
978
1060
|
try:
|
|
979
1061
|
if 200 <= _response.status_code < 300:
|
|
980
1062
|
return typing.cast(
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
type_=
|
|
1063
|
+
ThirdPartyModelVersion,
|
|
1064
|
+
construct_type(
|
|
1065
|
+
type_=ThirdPartyModelVersion, # type: ignore
|
|
984
1066
|
object_=_response.json(),
|
|
985
1067
|
),
|
|
986
1068
|
)
|
|
@@ -990,30 +1072,16 @@ class AsyncVersionsClient:
|
|
|
990
1072
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
991
1073
|
|
|
992
1074
|
async def cost_estimate(
|
|
993
|
-
self,
|
|
994
|
-
prompt_id: int,
|
|
995
|
-
version_id: int,
|
|
996
|
-
*,
|
|
997
|
-
project_id: int,
|
|
998
|
-
project_subset: int,
|
|
999
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
1075
|
+
self, prompt_id: int, version_id: int, *, request_options: typing.Optional[RequestOptions] = None
|
|
1000
1076
|
) -> InferenceRunCostEstimate:
|
|
1001
1077
|
"""
|
|
1002
|
-
Get
|
|
1078
|
+
Get an estimate of the cost for making an inference run on the selected Prompt Version and Project/ProjectSubset
|
|
1003
1079
|
|
|
1004
1080
|
Parameters
|
|
1005
1081
|
----------
|
|
1006
1082
|
prompt_id : int
|
|
1007
|
-
Prompt ID
|
|
1008
1083
|
|
|
1009
1084
|
version_id : int
|
|
1010
|
-
Prompt Version ID
|
|
1011
|
-
|
|
1012
|
-
project_id : int
|
|
1013
|
-
ID of the project to get an estimate for running on
|
|
1014
|
-
|
|
1015
|
-
project_subset : int
|
|
1016
|
-
Subset of the project to get an estimate for running on (e.g. 'All', 'Sample', or 'HasGT')
|
|
1017
1085
|
|
|
1018
1086
|
request_options : typing.Optional[RequestOptions]
|
|
1019
1087
|
Request-specific configuration.
|
|
@@ -1021,7 +1089,7 @@ class AsyncVersionsClient:
|
|
|
1021
1089
|
Returns
|
|
1022
1090
|
-------
|
|
1023
1091
|
InferenceRunCostEstimate
|
|
1024
|
-
|
|
1092
|
+
Cost estimate response
|
|
1025
1093
|
|
|
1026
1094
|
Examples
|
|
1027
1095
|
--------
|
|
@@ -1038,8 +1106,6 @@ class AsyncVersionsClient:
|
|
|
1038
1106
|
await client.prompts.versions.cost_estimate(
|
|
1039
1107
|
prompt_id=1,
|
|
1040
1108
|
version_id=1,
|
|
1041
|
-
project_id=1,
|
|
1042
|
-
project_subset=1,
|
|
1043
1109
|
)
|
|
1044
1110
|
|
|
1045
1111
|
|
|
@@ -1047,18 +1113,14 @@ class AsyncVersionsClient:
|
|
|
1047
1113
|
"""
|
|
1048
1114
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1049
1115
|
f"api/prompts/{jsonable_encoder(prompt_id)}/versions/{jsonable_encoder(version_id)}/cost-estimate",
|
|
1050
|
-
method="
|
|
1051
|
-
params={
|
|
1052
|
-
"project_id": project_id,
|
|
1053
|
-
"project_subset": project_subset,
|
|
1054
|
-
},
|
|
1116
|
+
method="GET",
|
|
1055
1117
|
request_options=request_options,
|
|
1056
1118
|
)
|
|
1057
1119
|
try:
|
|
1058
1120
|
if 200 <= _response.status_code < 300:
|
|
1059
1121
|
return typing.cast(
|
|
1060
1122
|
InferenceRunCostEstimate,
|
|
1061
|
-
|
|
1123
|
+
construct_type(
|
|
1062
1124
|
type_=InferenceRunCostEstimate, # type: ignore
|
|
1063
1125
|
object_=_response.json(),
|
|
1064
1126
|
),
|
|
@@ -1073,7 +1135,7 @@ class AsyncVersionsClient:
|
|
|
1073
1135
|
prompt_id: int,
|
|
1074
1136
|
version_id: int,
|
|
1075
1137
|
*,
|
|
1076
|
-
refinement_job_id: str,
|
|
1138
|
+
refinement_job_id: typing.Optional[str] = None,
|
|
1077
1139
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1078
1140
|
) -> RefinedPromptResponse:
|
|
1079
1141
|
"""
|
|
@@ -1082,12 +1144,10 @@ class AsyncVersionsClient:
|
|
|
1082
1144
|
Parameters
|
|
1083
1145
|
----------
|
|
1084
1146
|
prompt_id : int
|
|
1085
|
-
Prompt ID
|
|
1086
1147
|
|
|
1087
1148
|
version_id : int
|
|
1088
|
-
Prompt Version ID
|
|
1089
1149
|
|
|
1090
|
-
refinement_job_id : str
|
|
1150
|
+
refinement_job_id : typing.Optional[str]
|
|
1091
1151
|
Refinement Job ID acquired from the `POST /api/prompts/{prompt_id}/versions/{version_id}/refine` endpoint
|
|
1092
1152
|
|
|
1093
1153
|
request_options : typing.Optional[RequestOptions]
|
|
@@ -1096,7 +1156,7 @@ class AsyncVersionsClient:
|
|
|
1096
1156
|
Returns
|
|
1097
1157
|
-------
|
|
1098
1158
|
RefinedPromptResponse
|
|
1099
|
-
|
|
1159
|
+
Refined prompt response
|
|
1100
1160
|
|
|
1101
1161
|
Examples
|
|
1102
1162
|
--------
|
|
@@ -1113,7 +1173,6 @@ class AsyncVersionsClient:
|
|
|
1113
1173
|
await client.prompts.versions.get_refined_prompt(
|
|
1114
1174
|
prompt_id=1,
|
|
1115
1175
|
version_id=1,
|
|
1116
|
-
refinement_job_id="refinement_job_id",
|
|
1117
1176
|
)
|
|
1118
1177
|
|
|
1119
1178
|
|
|
@@ -1131,7 +1190,7 @@ class AsyncVersionsClient:
|
|
|
1131
1190
|
if 200 <= _response.status_code < 300:
|
|
1132
1191
|
return typing.cast(
|
|
1133
1192
|
RefinedPromptResponse,
|
|
1134
|
-
|
|
1193
|
+
construct_type(
|
|
1135
1194
|
type_=RefinedPromptResponse, # type: ignore
|
|
1136
1195
|
object_=_response.json(),
|
|
1137
1196
|
),
|
|
@@ -1146,10 +1205,10 @@ class AsyncVersionsClient:
|
|
|
1146
1205
|
prompt_id: int,
|
|
1147
1206
|
version_id: int,
|
|
1148
1207
|
*,
|
|
1208
|
+
teacher_model_provider_connection_id: int,
|
|
1209
|
+
teacher_model_name: str,
|
|
1210
|
+
project_id: int,
|
|
1149
1211
|
async_: typing.Optional[bool] = None,
|
|
1150
|
-
teacher_model_provider_connection_id: typing.Optional[int] = OMIT,
|
|
1151
|
-
teacher_model_name: typing.Optional[str] = OMIT,
|
|
1152
|
-
project_id: typing.Optional[int] = OMIT,
|
|
1153
1212
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1154
1213
|
) -> RefinedPromptResponse:
|
|
1155
1214
|
"""
|
|
@@ -1158,30 +1217,28 @@ class AsyncVersionsClient:
|
|
|
1158
1217
|
Parameters
|
|
1159
1218
|
----------
|
|
1160
1219
|
prompt_id : int
|
|
1161
|
-
Prompt ID
|
|
1162
1220
|
|
|
1163
1221
|
version_id : int
|
|
1164
|
-
Base Prompt Version ID
|
|
1165
|
-
|
|
1166
|
-
async_ : typing.Optional[bool]
|
|
1167
|
-
Run the refinement job asynchronously
|
|
1168
1222
|
|
|
1169
|
-
teacher_model_provider_connection_id :
|
|
1223
|
+
teacher_model_provider_connection_id : int
|
|
1170
1224
|
Model Provider Connection ID to use to refine the prompt
|
|
1171
1225
|
|
|
1172
|
-
teacher_model_name :
|
|
1226
|
+
teacher_model_name : str
|
|
1173
1227
|
Name of the model to use to refine the prompt
|
|
1174
1228
|
|
|
1175
|
-
project_id :
|
|
1229
|
+
project_id : int
|
|
1176
1230
|
Project ID to target the refined prompt for
|
|
1177
1231
|
|
|
1232
|
+
async_ : typing.Optional[bool]
|
|
1233
|
+
Whether to run the refinement asynchronously
|
|
1234
|
+
|
|
1178
1235
|
request_options : typing.Optional[RequestOptions]
|
|
1179
1236
|
Request-specific configuration.
|
|
1180
1237
|
|
|
1181
1238
|
Returns
|
|
1182
1239
|
-------
|
|
1183
1240
|
RefinedPromptResponse
|
|
1184
|
-
|
|
1241
|
+
Refined prompt response
|
|
1185
1242
|
|
|
1186
1243
|
Examples
|
|
1187
1244
|
--------
|
|
@@ -1198,6 +1255,9 @@ class AsyncVersionsClient:
|
|
|
1198
1255
|
await client.prompts.versions.refine_prompt(
|
|
1199
1256
|
prompt_id=1,
|
|
1200
1257
|
version_id=1,
|
|
1258
|
+
teacher_model_provider_connection_id=1,
|
|
1259
|
+
teacher_model_name="teacher_model_name",
|
|
1260
|
+
project_id=1,
|
|
1201
1261
|
)
|
|
1202
1262
|
|
|
1203
1263
|
|
|
@@ -1224,7 +1284,7 @@ class AsyncVersionsClient:
|
|
|
1224
1284
|
if 200 <= _response.status_code < 300:
|
|
1225
1285
|
return typing.cast(
|
|
1226
1286
|
RefinedPromptResponse,
|
|
1227
|
-
|
|
1287
|
+
construct_type(
|
|
1228
1288
|
type_=RefinedPromptResponse, # type: ignore
|
|
1229
1289
|
object_=_response.json(),
|
|
1230
1290
|
),
|