mirascope 2.0.0__py3-none-any.whl → 2.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mirascope/__init__.py +2 -11
- mirascope/graphs/__init__.py +22 -0
- mirascope/graphs/finite_state_machine.py +625 -0
- mirascope/llm/__init__.py +15 -96
- mirascope/llm/agents/__init__.py +15 -0
- mirascope/llm/agents/agent.py +97 -0
- mirascope/llm/agents/agent_template.py +45 -0
- mirascope/llm/agents/decorator.py +176 -0
- mirascope/llm/calls/__init__.py +1 -2
- mirascope/llm/calls/base_call.py +33 -0
- mirascope/llm/calls/calls.py +58 -84
- mirascope/llm/calls/decorator.py +120 -140
- mirascope/llm/clients/__init__.py +34 -0
- mirascope/llm/clients/_missing_import_stubs.py +47 -0
- mirascope/llm/clients/anthropic/__init__.py +25 -0
- mirascope/llm/{providers/openai/completions → clients/anthropic}/_utils/__init__.py +0 -2
- mirascope/llm/{providers → clients}/anthropic/_utils/decode.py +22 -66
- mirascope/llm/clients/anthropic/_utils/encode.py +243 -0
- mirascope/llm/clients/anthropic/clients.py +819 -0
- mirascope/llm/clients/anthropic/model_ids.py +8 -0
- mirascope/llm/{providers → clients}/base/__init__.py +5 -4
- mirascope/llm/{providers → clients}/base/_utils.py +17 -78
- mirascope/llm/{providers/base/base_provider.py → clients/base/client.py} +145 -468
- mirascope/llm/{models → clients/base}/params.py +37 -16
- mirascope/llm/clients/google/__init__.py +20 -0
- mirascope/llm/{providers/openai/responses → clients/google}/_utils/__init__.py +0 -2
- mirascope/llm/{providers → clients}/google/_utils/decode.py +22 -98
- mirascope/llm/{providers → clients}/google/_utils/encode.py +46 -168
- mirascope/llm/clients/google/clients.py +853 -0
- mirascope/llm/clients/google/model_ids.py +15 -0
- mirascope/llm/clients/openai/__init__.py +25 -0
- mirascope/llm/clients/openai/completions/__init__.py +28 -0
- mirascope/llm/{providers/google → clients/openai/completions}/_utils/__init__.py +0 -4
- mirascope/llm/{providers → clients}/openai/completions/_utils/decode.py +9 -74
- mirascope/llm/{providers → clients}/openai/completions/_utils/encode.py +52 -70
- mirascope/llm/clients/openai/completions/_utils/model_features.py +81 -0
- mirascope/llm/clients/openai/completions/clients.py +833 -0
- mirascope/llm/clients/openai/completions/model_ids.py +8 -0
- mirascope/llm/clients/openai/responses/__init__.py +26 -0
- mirascope/llm/clients/openai/responses/_utils/__init__.py +13 -0
- mirascope/llm/{providers → clients}/openai/responses/_utils/decode.py +14 -80
- mirascope/llm/{providers → clients}/openai/responses/_utils/encode.py +41 -92
- mirascope/llm/clients/openai/responses/_utils/model_features.py +87 -0
- mirascope/llm/clients/openai/responses/clients.py +832 -0
- mirascope/llm/clients/openai/responses/model_ids.py +8 -0
- mirascope/llm/clients/openai/shared/__init__.py +7 -0
- mirascope/llm/clients/openai/shared/_utils.py +55 -0
- mirascope/llm/clients/providers.py +175 -0
- mirascope/llm/content/__init__.py +2 -3
- mirascope/llm/content/tool_call.py +0 -6
- mirascope/llm/content/tool_output.py +5 -22
- mirascope/llm/context/_utils.py +6 -19
- mirascope/llm/exceptions.py +43 -298
- mirascope/llm/formatting/__init__.py +2 -19
- mirascope/llm/formatting/_utils.py +74 -0
- mirascope/llm/formatting/format.py +30 -219
- mirascope/llm/formatting/from_call_args.py +2 -2
- mirascope/llm/formatting/partial.py +7 -80
- mirascope/llm/formatting/types.py +64 -21
- mirascope/llm/mcp/__init__.py +2 -2
- mirascope/llm/mcp/client.py +118 -0
- mirascope/llm/messages/__init__.py +0 -3
- mirascope/llm/messages/message.py +5 -13
- mirascope/llm/models/__init__.py +2 -7
- mirascope/llm/models/models.py +139 -315
- mirascope/llm/prompts/__init__.py +12 -13
- mirascope/llm/prompts/_utils.py +43 -14
- mirascope/llm/prompts/decorator.py +204 -144
- mirascope/llm/prompts/protocols.py +59 -25
- mirascope/llm/responses/__init__.py +1 -9
- mirascope/llm/responses/_utils.py +12 -102
- mirascope/llm/responses/base_response.py +6 -18
- mirascope/llm/responses/base_stream_response.py +50 -173
- mirascope/llm/responses/finish_reason.py +0 -1
- mirascope/llm/responses/response.py +13 -34
- mirascope/llm/responses/root_response.py +29 -100
- mirascope/llm/responses/stream_response.py +31 -40
- mirascope/llm/tools/__init__.py +2 -9
- mirascope/llm/tools/_utils.py +3 -12
- mirascope/llm/tools/decorator.py +16 -25
- mirascope/llm/tools/protocols.py +4 -4
- mirascope/llm/tools/tool_schema.py +19 -87
- mirascope/llm/tools/toolkit.py +27 -35
- mirascope/llm/tools/tools.py +41 -135
- {mirascope-2.0.0.dist-info → mirascope-2.0.0a1.dist-info}/METADATA +13 -90
- mirascope-2.0.0a1.dist-info/RECORD +102 -0
- {mirascope-2.0.0.dist-info → mirascope-2.0.0a1.dist-info}/WHEEL +1 -1
- {mirascope-2.0.0.dist-info → mirascope-2.0.0a1.dist-info}/licenses/LICENSE +1 -1
- mirascope/_stubs.py +0 -363
- mirascope/api/__init__.py +0 -14
- mirascope/api/_generated/README.md +0 -207
- mirascope/api/_generated/__init__.py +0 -440
- mirascope/api/_generated/annotations/__init__.py +0 -33
- mirascope/api/_generated/annotations/client.py +0 -506
- mirascope/api/_generated/annotations/raw_client.py +0 -1414
- mirascope/api/_generated/annotations/types/__init__.py +0 -31
- mirascope/api/_generated/annotations/types/annotations_create_request_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_create_response.py +0 -48
- mirascope/api/_generated/annotations/types/annotations_create_response_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_get_response.py +0 -48
- mirascope/api/_generated/annotations/types/annotations_get_response_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_list_request_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_list_response.py +0 -21
- mirascope/api/_generated/annotations/types/annotations_list_response_annotations_item.py +0 -50
- mirascope/api/_generated/annotations/types/annotations_list_response_annotations_item_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_update_request_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_update_response.py +0 -48
- mirascope/api/_generated/annotations/types/annotations_update_response_label.py +0 -5
- mirascope/api/_generated/api_keys/__init__.py +0 -17
- mirascope/api/_generated/api_keys/client.py +0 -530
- mirascope/api/_generated/api_keys/raw_client.py +0 -1236
- mirascope/api/_generated/api_keys/types/__init__.py +0 -15
- mirascope/api/_generated/api_keys/types/api_keys_create_response.py +0 -28
- mirascope/api/_generated/api_keys/types/api_keys_get_response.py +0 -27
- mirascope/api/_generated/api_keys/types/api_keys_list_all_for_org_response_item.py +0 -40
- mirascope/api/_generated/api_keys/types/api_keys_list_response_item.py +0 -27
- mirascope/api/_generated/client.py +0 -211
- mirascope/api/_generated/core/__init__.py +0 -52
- mirascope/api/_generated/core/api_error.py +0 -23
- mirascope/api/_generated/core/client_wrapper.py +0 -46
- mirascope/api/_generated/core/datetime_utils.py +0 -28
- mirascope/api/_generated/core/file.py +0 -67
- mirascope/api/_generated/core/force_multipart.py +0 -16
- mirascope/api/_generated/core/http_client.py +0 -543
- mirascope/api/_generated/core/http_response.py +0 -55
- mirascope/api/_generated/core/jsonable_encoder.py +0 -100
- mirascope/api/_generated/core/pydantic_utilities.py +0 -255
- mirascope/api/_generated/core/query_encoder.py +0 -58
- mirascope/api/_generated/core/remove_none_from_dict.py +0 -11
- mirascope/api/_generated/core/request_options.py +0 -35
- mirascope/api/_generated/core/serialization.py +0 -276
- mirascope/api/_generated/docs/__init__.py +0 -4
- mirascope/api/_generated/docs/client.py +0 -91
- mirascope/api/_generated/docs/raw_client.py +0 -178
- mirascope/api/_generated/environment.py +0 -9
- mirascope/api/_generated/environments/__init__.py +0 -23
- mirascope/api/_generated/environments/client.py +0 -649
- mirascope/api/_generated/environments/raw_client.py +0 -1567
- mirascope/api/_generated/environments/types/__init__.py +0 -25
- mirascope/api/_generated/environments/types/environments_create_response.py +0 -24
- mirascope/api/_generated/environments/types/environments_get_analytics_response.py +0 -60
- mirascope/api/_generated/environments/types/environments_get_analytics_response_top_functions_item.py +0 -24
- mirascope/api/_generated/environments/types/environments_get_analytics_response_top_models_item.py +0 -22
- mirascope/api/_generated/environments/types/environments_get_response.py +0 -24
- mirascope/api/_generated/environments/types/environments_list_response_item.py +0 -24
- mirascope/api/_generated/environments/types/environments_update_response.py +0 -24
- mirascope/api/_generated/errors/__init__.py +0 -25
- mirascope/api/_generated/errors/bad_request_error.py +0 -14
- mirascope/api/_generated/errors/conflict_error.py +0 -14
- mirascope/api/_generated/errors/forbidden_error.py +0 -11
- mirascope/api/_generated/errors/internal_server_error.py +0 -10
- mirascope/api/_generated/errors/not_found_error.py +0 -11
- mirascope/api/_generated/errors/payment_required_error.py +0 -15
- mirascope/api/_generated/errors/service_unavailable_error.py +0 -14
- mirascope/api/_generated/errors/too_many_requests_error.py +0 -15
- mirascope/api/_generated/errors/unauthorized_error.py +0 -11
- mirascope/api/_generated/functions/__init__.py +0 -39
- mirascope/api/_generated/functions/client.py +0 -647
- mirascope/api/_generated/functions/raw_client.py +0 -1890
- mirascope/api/_generated/functions/types/__init__.py +0 -53
- mirascope/api/_generated/functions/types/functions_create_request_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_create_response.py +0 -37
- mirascope/api/_generated/functions/types/functions_create_response_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_find_by_hash_response.py +0 -39
- mirascope/api/_generated/functions/types/functions_find_by_hash_response_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_get_by_env_response.py +0 -53
- mirascope/api/_generated/functions/types/functions_get_by_env_response_dependencies_value.py +0 -22
- mirascope/api/_generated/functions/types/functions_get_response.py +0 -37
- mirascope/api/_generated/functions/types/functions_get_response_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_list_by_env_response.py +0 -25
- mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item.py +0 -56
- mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item_dependencies_value.py +0 -22
- mirascope/api/_generated/functions/types/functions_list_response.py +0 -21
- mirascope/api/_generated/functions/types/functions_list_response_functions_item.py +0 -41
- mirascope/api/_generated/functions/types/functions_list_response_functions_item_dependencies_value.py +0 -20
- mirascope/api/_generated/health/__init__.py +0 -7
- mirascope/api/_generated/health/client.py +0 -92
- mirascope/api/_generated/health/raw_client.py +0 -175
- mirascope/api/_generated/health/types/__init__.py +0 -8
- mirascope/api/_generated/health/types/health_check_response.py +0 -22
- mirascope/api/_generated/health/types/health_check_response_status.py +0 -5
- mirascope/api/_generated/organization_invitations/__init__.py +0 -33
- mirascope/api/_generated/organization_invitations/client.py +0 -546
- mirascope/api/_generated/organization_invitations/raw_client.py +0 -1519
- mirascope/api/_generated/organization_invitations/types/__init__.py +0 -53
- mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response.py +0 -34
- mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_request_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response.py +0 -48
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_status.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response.py +0 -48
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_status.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item.py +0 -48
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_status.py +0 -7
- mirascope/api/_generated/organization_memberships/__init__.py +0 -19
- mirascope/api/_generated/organization_memberships/client.py +0 -302
- mirascope/api/_generated/organization_memberships/raw_client.py +0 -736
- mirascope/api/_generated/organization_memberships/types/__init__.py +0 -27
- mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item.py +0 -33
- mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item_role.py +0 -7
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_request_role.py +0 -7
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response.py +0 -31
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response_role.py +0 -7
- mirascope/api/_generated/organizations/__init__.py +0 -51
- mirascope/api/_generated/organizations/client.py +0 -869
- mirascope/api/_generated/organizations/raw_client.py +0 -2593
- mirascope/api/_generated/organizations/types/__init__.py +0 -71
- mirascope/api/_generated/organizations/types/organizations_create_payment_intent_response.py +0 -24
- mirascope/api/_generated/organizations/types/organizations_create_response.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_create_response_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_get_response.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_get_response_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_list_response_item.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_list_response_item_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_request_target_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response.py +0 -47
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item.py +0 -33
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item_resource.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_router_balance_response.py +0 -24
- mirascope/api/_generated/organizations/types/organizations_subscription_response.py +0 -53
- mirascope/api/_generated/organizations/types/organizations_subscription_response_current_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_subscription_response_payment_method.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change.py +0 -34
- mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change_target_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_update_response.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_update_response_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_update_subscription_request_target_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_update_subscription_response.py +0 -35
- mirascope/api/_generated/project_memberships/__init__.py +0 -25
- mirascope/api/_generated/project_memberships/client.py +0 -437
- mirascope/api/_generated/project_memberships/raw_client.py +0 -1039
- mirascope/api/_generated/project_memberships/types/__init__.py +0 -29
- mirascope/api/_generated/project_memberships/types/project_memberships_create_request_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_create_response.py +0 -35
- mirascope/api/_generated/project_memberships/types/project_memberships_create_response_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item.py +0 -33
- mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_update_request_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_update_response.py +0 -35
- mirascope/api/_generated/project_memberships/types/project_memberships_update_response_role.py +0 -7
- mirascope/api/_generated/projects/__init__.py +0 -7
- mirascope/api/_generated/projects/client.py +0 -428
- mirascope/api/_generated/projects/raw_client.py +0 -1302
- mirascope/api/_generated/projects/types/__init__.py +0 -10
- mirascope/api/_generated/projects/types/projects_create_response.py +0 -25
- mirascope/api/_generated/projects/types/projects_get_response.py +0 -25
- mirascope/api/_generated/projects/types/projects_list_response_item.py +0 -25
- mirascope/api/_generated/projects/types/projects_update_response.py +0 -25
- mirascope/api/_generated/reference.md +0 -4915
- mirascope/api/_generated/tags/__init__.py +0 -19
- mirascope/api/_generated/tags/client.py +0 -504
- mirascope/api/_generated/tags/raw_client.py +0 -1288
- mirascope/api/_generated/tags/types/__init__.py +0 -17
- mirascope/api/_generated/tags/types/tags_create_response.py +0 -41
- mirascope/api/_generated/tags/types/tags_get_response.py +0 -41
- mirascope/api/_generated/tags/types/tags_list_response.py +0 -23
- mirascope/api/_generated/tags/types/tags_list_response_tags_item.py +0 -41
- mirascope/api/_generated/tags/types/tags_update_response.py +0 -41
- mirascope/api/_generated/token_cost/__init__.py +0 -7
- mirascope/api/_generated/token_cost/client.py +0 -160
- mirascope/api/_generated/token_cost/raw_client.py +0 -264
- mirascope/api/_generated/token_cost/types/__init__.py +0 -8
- mirascope/api/_generated/token_cost/types/token_cost_calculate_request_usage.py +0 -54
- mirascope/api/_generated/token_cost/types/token_cost_calculate_response.py +0 -52
- mirascope/api/_generated/traces/__init__.py +0 -97
- mirascope/api/_generated/traces/client.py +0 -1103
- mirascope/api/_generated/traces/raw_client.py +0 -2322
- mirascope/api/_generated/traces/types/__init__.py +0 -155
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item.py +0 -29
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource.py +0 -27
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item.py +0 -23
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value.py +0 -38
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_array_value.py +0 -19
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value_values_item.py +0 -20
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item.py +0 -29
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope.py +0 -31
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item.py +0 -23
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value.py +0 -38
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_array_value.py +0 -19
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value_values_item.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item.py +0 -48
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item.py +0 -23
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value.py +0 -38
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_array_value.py +0 -19
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value.py +0 -24
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value_values_item.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_status.py +0 -20
- mirascope/api/_generated/traces/types/traces_create_response.py +0 -24
- mirascope/api/_generated/traces/types/traces_create_response_partial_success.py +0 -22
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response.py +0 -60
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response_top_functions_item.py +0 -24
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response_top_models_item.py +0 -22
- mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response.py +0 -33
- mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response_spans_item.py +0 -88
- mirascope/api/_generated/traces/types/traces_get_trace_detail_response.py +0 -33
- mirascope/api/_generated/traces/types/traces_get_trace_detail_response_spans_item.py +0 -88
- mirascope/api/_generated/traces/types/traces_list_by_function_hash_response.py +0 -25
- mirascope/api/_generated/traces/types/traces_list_by_function_hash_response_traces_item.py +0 -44
- mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item_operator.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_by.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_order.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_by_env_response.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_by_env_response_spans_item.py +0 -50
- mirascope/api/_generated/traces/types/traces_search_request_attribute_filters_item.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_request_attribute_filters_item_operator.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_request_sort_by.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_request_sort_order.py +0 -5
- mirascope/api/_generated/traces/types/traces_search_response.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_response_spans_item.py +0 -50
- mirascope/api/_generated/types/__init__.py +0 -85
- mirascope/api/_generated/types/already_exists_error.py +0 -22
- mirascope/api/_generated/types/already_exists_error_tag.py +0 -5
- mirascope/api/_generated/types/bad_request_error_body.py +0 -50
- mirascope/api/_generated/types/click_house_error.py +0 -22
- mirascope/api/_generated/types/database_error.py +0 -22
- mirascope/api/_generated/types/database_error_tag.py +0 -5
- mirascope/api/_generated/types/date.py +0 -3
- mirascope/api/_generated/types/http_api_decode_error.py +0 -27
- mirascope/api/_generated/types/http_api_decode_error_tag.py +0 -5
- mirascope/api/_generated/types/immutable_resource_error.py +0 -22
- mirascope/api/_generated/types/internal_server_error_body.py +0 -49
- mirascope/api/_generated/types/issue.py +0 -38
- mirascope/api/_generated/types/issue_tag.py +0 -10
- mirascope/api/_generated/types/not_found_error_body.py +0 -22
- mirascope/api/_generated/types/not_found_error_tag.py +0 -5
- mirascope/api/_generated/types/number_from_string.py +0 -3
- mirascope/api/_generated/types/permission_denied_error.py +0 -22
- mirascope/api/_generated/types/permission_denied_error_tag.py +0 -5
- mirascope/api/_generated/types/plan_limit_exceeded_error.py +0 -32
- mirascope/api/_generated/types/plan_limit_exceeded_error_tag.py +0 -7
- mirascope/api/_generated/types/pricing_unavailable_error.py +0 -23
- mirascope/api/_generated/types/property_key.py +0 -7
- mirascope/api/_generated/types/property_key_key.py +0 -25
- mirascope/api/_generated/types/property_key_key_tag.py +0 -5
- mirascope/api/_generated/types/rate_limit_error.py +0 -31
- mirascope/api/_generated/types/rate_limit_error_tag.py +0 -5
- mirascope/api/_generated/types/service_unavailable_error_body.py +0 -24
- mirascope/api/_generated/types/service_unavailable_error_tag.py +0 -7
- mirascope/api/_generated/types/stripe_error.py +0 -20
- mirascope/api/_generated/types/subscription_past_due_error.py +0 -31
- mirascope/api/_generated/types/subscription_past_due_error_tag.py +0 -7
- mirascope/api/_generated/types/unauthorized_error_body.py +0 -21
- mirascope/api/_generated/types/unauthorized_error_tag.py +0 -5
- mirascope/api/client.py +0 -255
- mirascope/api/settings.py +0 -99
- mirascope/llm/formatting/output_parser.py +0 -178
- mirascope/llm/formatting/primitives.py +0 -192
- mirascope/llm/mcp/mcp_client.py +0 -130
- mirascope/llm/messages/_utils.py +0 -34
- mirascope/llm/models/thinking_config.py +0 -61
- mirascope/llm/prompts/prompts.py +0 -487
- mirascope/llm/providers/__init__.py +0 -62
- mirascope/llm/providers/anthropic/__init__.py +0 -11
- mirascope/llm/providers/anthropic/_utils/__init__.py +0 -27
- mirascope/llm/providers/anthropic/_utils/beta_decode.py +0 -282
- mirascope/llm/providers/anthropic/_utils/beta_encode.py +0 -266
- mirascope/llm/providers/anthropic/_utils/encode.py +0 -418
- mirascope/llm/providers/anthropic/_utils/errors.py +0 -46
- mirascope/llm/providers/anthropic/beta_provider.py +0 -374
- mirascope/llm/providers/anthropic/model_id.py +0 -23
- mirascope/llm/providers/anthropic/model_info.py +0 -87
- mirascope/llm/providers/anthropic/provider.py +0 -479
- mirascope/llm/providers/google/__init__.py +0 -6
- mirascope/llm/providers/google/_utils/errors.py +0 -50
- mirascope/llm/providers/google/model_id.py +0 -22
- mirascope/llm/providers/google/model_info.py +0 -63
- mirascope/llm/providers/google/provider.py +0 -492
- mirascope/llm/providers/mirascope/__init__.py +0 -5
- mirascope/llm/providers/mirascope/_utils.py +0 -73
- mirascope/llm/providers/mirascope/provider.py +0 -349
- mirascope/llm/providers/mlx/__init__.py +0 -9
- mirascope/llm/providers/mlx/_utils.py +0 -141
- mirascope/llm/providers/mlx/encoding/__init__.py +0 -8
- mirascope/llm/providers/mlx/encoding/base.py +0 -72
- mirascope/llm/providers/mlx/encoding/transformers.py +0 -150
- mirascope/llm/providers/mlx/mlx.py +0 -254
- mirascope/llm/providers/mlx/model_id.py +0 -17
- mirascope/llm/providers/mlx/provider.py +0 -452
- mirascope/llm/providers/model_id.py +0 -16
- mirascope/llm/providers/ollama/__init__.py +0 -7
- mirascope/llm/providers/ollama/provider.py +0 -71
- mirascope/llm/providers/openai/__init__.py +0 -15
- mirascope/llm/providers/openai/_utils/__init__.py +0 -5
- mirascope/llm/providers/openai/_utils/errors.py +0 -46
- mirascope/llm/providers/openai/completions/__init__.py +0 -7
- mirascope/llm/providers/openai/completions/base_provider.py +0 -542
- mirascope/llm/providers/openai/completions/provider.py +0 -22
- mirascope/llm/providers/openai/model_id.py +0 -31
- mirascope/llm/providers/openai/model_info.py +0 -303
- mirascope/llm/providers/openai/provider.py +0 -441
- mirascope/llm/providers/openai/responses/__init__.py +0 -5
- mirascope/llm/providers/openai/responses/provider.py +0 -513
- mirascope/llm/providers/provider_id.py +0 -24
- mirascope/llm/providers/provider_registry.py +0 -299
- mirascope/llm/providers/together/__init__.py +0 -7
- mirascope/llm/providers/together/provider.py +0 -40
- mirascope/llm/responses/usage.py +0 -95
- mirascope/ops/__init__.py +0 -111
- mirascope/ops/_internal/__init__.py +0 -5
- mirascope/ops/_internal/closure.py +0 -1169
- mirascope/ops/_internal/configuration.py +0 -177
- mirascope/ops/_internal/context.py +0 -76
- mirascope/ops/_internal/exporters/__init__.py +0 -26
- mirascope/ops/_internal/exporters/exporters.py +0 -395
- mirascope/ops/_internal/exporters/processors.py +0 -104
- mirascope/ops/_internal/exporters/types.py +0 -165
- mirascope/ops/_internal/exporters/utils.py +0 -29
- mirascope/ops/_internal/instrumentation/__init__.py +0 -8
- mirascope/ops/_internal/instrumentation/llm/__init__.py +0 -8
- mirascope/ops/_internal/instrumentation/llm/common.py +0 -530
- mirascope/ops/_internal/instrumentation/llm/cost.py +0 -190
- mirascope/ops/_internal/instrumentation/llm/encode.py +0 -238
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/__init__.py +0 -38
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_input_messages.py +0 -31
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_output_messages.py +0 -38
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_system_instructions.py +0 -18
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/shared.py +0 -100
- mirascope/ops/_internal/instrumentation/llm/llm.py +0 -161
- mirascope/ops/_internal/instrumentation/llm/model.py +0 -1798
- mirascope/ops/_internal/instrumentation/llm/response.py +0 -521
- mirascope/ops/_internal/instrumentation/llm/serialize.py +0 -300
- mirascope/ops/_internal/propagation.py +0 -198
- mirascope/ops/_internal/protocols.py +0 -133
- mirascope/ops/_internal/session.py +0 -139
- mirascope/ops/_internal/spans.py +0 -232
- mirascope/ops/_internal/traced_calls.py +0 -375
- mirascope/ops/_internal/traced_functions.py +0 -523
- mirascope/ops/_internal/tracing.py +0 -353
- mirascope/ops/_internal/types.py +0 -13
- mirascope/ops/_internal/utils.py +0 -123
- mirascope/ops/_internal/versioned_calls.py +0 -512
- mirascope/ops/_internal/versioned_functions.py +0 -357
- mirascope/ops/_internal/versioning.py +0 -303
- mirascope/ops/exceptions.py +0 -21
- mirascope-2.0.0.dist-info/RECORD +0 -423
- /mirascope/llm/{providers → clients}/base/kwargs.py +0 -0
- /mirascope/llm/{providers → clients}/google/message.py +0 -0
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""Google registered LLM models."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal, TypeAlias
|
|
4
|
+
|
|
5
|
+
GoogleModelId: TypeAlias = (
|
|
6
|
+
Literal[
|
|
7
|
+
"gemini-2.5-pro",
|
|
8
|
+
"gemini-2.5-flash",
|
|
9
|
+
"gemini-2.5-flash-lite",
|
|
10
|
+
"gemini-2.0-flash",
|
|
11
|
+
"gemini-2.0-flash-lite",
|
|
12
|
+
]
|
|
13
|
+
| str
|
|
14
|
+
)
|
|
15
|
+
"""The Google model ids registered with Mirascope."""
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""OpenAI client implementation."""
|
|
2
|
+
|
|
3
|
+
from .completions import (
|
|
4
|
+
OpenAICompletionsClient,
|
|
5
|
+
OpenAICompletionsModelId,
|
|
6
|
+
client as completions_client,
|
|
7
|
+
get_client as get_completions_client,
|
|
8
|
+
)
|
|
9
|
+
from .responses import (
|
|
10
|
+
OpenAIResponsesClient,
|
|
11
|
+
OpenAIResponsesModelId,
|
|
12
|
+
client as responses_client,
|
|
13
|
+
get_client as get_responses_client,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"OpenAICompletionsClient",
|
|
18
|
+
"OpenAICompletionsModelId",
|
|
19
|
+
"OpenAIResponsesClient",
|
|
20
|
+
"OpenAIResponsesModelId",
|
|
21
|
+
"completions_client",
|
|
22
|
+
"get_completions_client",
|
|
23
|
+
"get_responses_client",
|
|
24
|
+
"responses_client",
|
|
25
|
+
]
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
if TYPE_CHECKING:
|
|
4
|
+
from .clients import OpenAICompletionsClient, client, get_client
|
|
5
|
+
from .model_ids import OpenAICompletionsModelId
|
|
6
|
+
else:
|
|
7
|
+
try:
|
|
8
|
+
from .clients import OpenAICompletionsClient, client, get_client
|
|
9
|
+
from .model_ids import OpenAICompletionsModelId
|
|
10
|
+
except ImportError: # pragma: no cover
|
|
11
|
+
from ..._missing_import_stubs import (
|
|
12
|
+
create_client_stub,
|
|
13
|
+
create_import_error_stub,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
OpenAICompletionsClient = create_client_stub(
|
|
17
|
+
"openai", "OpenAICompletionsClient"
|
|
18
|
+
)
|
|
19
|
+
OpenAICompletionsModelId = str
|
|
20
|
+
client = create_import_error_stub("openai", "OpenAICompletionsClient")
|
|
21
|
+
get_client = create_import_error_stub("openai", "OpenAICompletionsClient")
|
|
22
|
+
|
|
23
|
+
__all__ = [
|
|
24
|
+
"OpenAICompletionsClient",
|
|
25
|
+
"OpenAICompletionsModelId",
|
|
26
|
+
"client",
|
|
27
|
+
"get_client",
|
|
28
|
+
]
|
|
@@ -1,17 +1,13 @@
|
|
|
1
|
-
from ...base._utils import get_include_thoughts
|
|
2
1
|
from .decode import (
|
|
3
2
|
decode_async_stream,
|
|
4
3
|
decode_response,
|
|
5
4
|
decode_stream,
|
|
6
5
|
)
|
|
7
6
|
from .encode import encode_request
|
|
8
|
-
from .errors import GOOGLE_ERROR_MAP
|
|
9
7
|
|
|
10
8
|
__all__ = [
|
|
11
|
-
"GOOGLE_ERROR_MAP",
|
|
12
9
|
"decode_async_stream",
|
|
13
10
|
"decode_response",
|
|
14
11
|
"decode_stream",
|
|
15
12
|
"encode_request",
|
|
16
|
-
"get_include_thoughts",
|
|
17
13
|
]
|
|
@@ -4,7 +4,6 @@ from typing import Literal
|
|
|
4
4
|
|
|
5
5
|
from openai import AsyncStream, Stream
|
|
6
6
|
from openai.types import chat as openai_types
|
|
7
|
-
from openai.types.completion_usage import CompletionUsage
|
|
8
7
|
|
|
9
8
|
from .....content import (
|
|
10
9
|
AssistantContentPart,
|
|
@@ -24,10 +23,8 @@ from .....responses import (
|
|
|
24
23
|
FinishReason,
|
|
25
24
|
FinishReasonChunk,
|
|
26
25
|
RawStreamEventChunk,
|
|
27
|
-
Usage,
|
|
28
|
-
UsageDeltaChunk,
|
|
29
26
|
)
|
|
30
|
-
from
|
|
27
|
+
from ..model_ids import OpenAICompletionsModelId
|
|
31
28
|
|
|
32
29
|
OPENAI_FINISH_REASON_MAP = {
|
|
33
30
|
"length": FinishReason.MAX_TOKENS,
|
|
@@ -35,40 +32,11 @@ OPENAI_FINISH_REASON_MAP = {
|
|
|
35
32
|
}
|
|
36
33
|
|
|
37
34
|
|
|
38
|
-
def _decode_usage(
|
|
39
|
-
usage: CompletionUsage | None,
|
|
40
|
-
) -> Usage | None:
|
|
41
|
-
"""Convert OpenAI CompletionUsage to Mirascope Usage."""
|
|
42
|
-
if usage is None: # pragma: no cover
|
|
43
|
-
return None
|
|
44
|
-
|
|
45
|
-
return Usage(
|
|
46
|
-
input_tokens=usage.prompt_tokens,
|
|
47
|
-
output_tokens=usage.completion_tokens,
|
|
48
|
-
cache_read_tokens=(
|
|
49
|
-
usage.prompt_tokens_details.cached_tokens
|
|
50
|
-
if usage.prompt_tokens_details
|
|
51
|
-
else None
|
|
52
|
-
)
|
|
53
|
-
or 0,
|
|
54
|
-
cache_write_tokens=0,
|
|
55
|
-
reasoning_tokens=(
|
|
56
|
-
usage.completion_tokens_details.reasoning_tokens
|
|
57
|
-
if usage.completion_tokens_details
|
|
58
|
-
else None
|
|
59
|
-
)
|
|
60
|
-
or 0,
|
|
61
|
-
raw=usage,
|
|
62
|
-
)
|
|
63
|
-
|
|
64
|
-
|
|
65
35
|
def decode_response(
|
|
66
36
|
response: openai_types.ChatCompletion,
|
|
67
|
-
model_id:
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
) -> tuple[AssistantMessage, FinishReason | None, Usage | None]:
|
|
71
|
-
"""Convert OpenAI ChatCompletion to mirascope AssistantMessage and usage."""
|
|
37
|
+
model_id: OpenAICompletionsModelId,
|
|
38
|
+
) -> tuple[AssistantMessage, FinishReason | None]:
|
|
39
|
+
"""Convert OpenAI ChatCompletion to mirascope AssistantMessage."""
|
|
72
40
|
choice = response.choices[0]
|
|
73
41
|
message = choice.message
|
|
74
42
|
refused = False
|
|
@@ -101,14 +69,12 @@ def decode_response(
|
|
|
101
69
|
|
|
102
70
|
assistant_message = AssistantMessage(
|
|
103
71
|
content=parts,
|
|
104
|
-
|
|
72
|
+
provider="openai:completions",
|
|
105
73
|
model_id=model_id,
|
|
106
|
-
provider_model_name=provider_model_name or model_name(model_id, "completions"),
|
|
107
74
|
raw_message=message.model_dump(exclude_none=True),
|
|
108
75
|
)
|
|
109
76
|
|
|
110
|
-
|
|
111
|
-
return assistant_message, finish_reason, usage
|
|
77
|
+
return assistant_message, finish_reason
|
|
112
78
|
|
|
113
79
|
|
|
114
80
|
class _OpenAIChunkProcessor:
|
|
@@ -117,33 +83,12 @@ class _OpenAIChunkProcessor:
|
|
|
117
83
|
def __init__(self) -> None:
|
|
118
84
|
self.current_content_type: Literal["text", "tool_call"] | None = None
|
|
119
85
|
self.current_tool_index: int | None = None
|
|
120
|
-
self.current_tool_id: str | None = None
|
|
121
86
|
self.refusal_encountered = False
|
|
122
87
|
|
|
123
88
|
def process_chunk(self, chunk: openai_types.ChatCompletionChunk) -> ChunkIterator:
|
|
124
89
|
"""Process a single OpenAI chunk and yield the appropriate content chunks."""
|
|
125
90
|
yield RawStreamEventChunk(raw_stream_event=chunk)
|
|
126
91
|
|
|
127
|
-
if chunk.usage:
|
|
128
|
-
usage = chunk.usage
|
|
129
|
-
yield UsageDeltaChunk(
|
|
130
|
-
input_tokens=usage.prompt_tokens,
|
|
131
|
-
output_tokens=usage.completion_tokens,
|
|
132
|
-
cache_read_tokens=(
|
|
133
|
-
usage.prompt_tokens_details.cached_tokens
|
|
134
|
-
if usage.prompt_tokens_details
|
|
135
|
-
else None
|
|
136
|
-
)
|
|
137
|
-
or 0,
|
|
138
|
-
cache_write_tokens=0,
|
|
139
|
-
reasoning_tokens=(
|
|
140
|
-
usage.completion_tokens_details.reasoning_tokens
|
|
141
|
-
if usage.completion_tokens_details
|
|
142
|
-
else None
|
|
143
|
-
)
|
|
144
|
-
or 0,
|
|
145
|
-
)
|
|
146
|
-
|
|
147
92
|
choice = chunk.choices[0] if chunk.choices else None
|
|
148
93
|
if not choice:
|
|
149
94
|
return # pragma: no cover
|
|
@@ -181,9 +126,7 @@ class _OpenAIChunkProcessor:
|
|
|
181
126
|
self.current_tool_index is not None
|
|
182
127
|
and self.current_tool_index < index
|
|
183
128
|
):
|
|
184
|
-
|
|
185
|
-
raise RuntimeError("No current_tool_id for ToolCallChunk")
|
|
186
|
-
yield ToolCallEndChunk(id=self.current_tool_id)
|
|
129
|
+
yield ToolCallEndChunk()
|
|
187
130
|
self.current_tool_index = None
|
|
188
131
|
|
|
189
132
|
if self.current_tool_index is None:
|
|
@@ -204,23 +147,15 @@ class _OpenAIChunkProcessor:
|
|
|
204
147
|
id=tool_id,
|
|
205
148
|
name=name,
|
|
206
149
|
)
|
|
207
|
-
self.current_tool_id = tool_id
|
|
208
150
|
|
|
209
151
|
if tool_call_delta.function and tool_call_delta.function.arguments:
|
|
210
|
-
|
|
211
|
-
raise RuntimeError("No current_tool_id for ToolCallChunk")
|
|
212
|
-
yield ToolCallChunk(
|
|
213
|
-
id=self.current_tool_id,
|
|
214
|
-
delta=tool_call_delta.function.arguments,
|
|
215
|
-
)
|
|
152
|
+
yield ToolCallChunk(delta=tool_call_delta.function.arguments)
|
|
216
153
|
|
|
217
154
|
if choice.finish_reason:
|
|
218
155
|
if self.current_content_type == "text":
|
|
219
156
|
yield TextEndChunk()
|
|
220
157
|
elif self.current_content_type == "tool_call":
|
|
221
|
-
|
|
222
|
-
raise RuntimeError("No current_tool_id for ToolCallChunk")
|
|
223
|
-
yield ToolCallEndChunk(id=self.current_tool_id)
|
|
158
|
+
yield ToolCallEndChunk()
|
|
224
159
|
elif self.current_content_type is not None: # pragma: no cover
|
|
225
160
|
raise NotImplementedError()
|
|
226
161
|
|
|
@@ -1,29 +1,29 @@
|
|
|
1
1
|
"""OpenAI completions message encoding and request preparation."""
|
|
2
2
|
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
3
|
from collections.abc import Sequence
|
|
6
4
|
from functools import lru_cache
|
|
7
|
-
from typing import
|
|
5
|
+
from typing import TypedDict, cast
|
|
8
6
|
|
|
9
7
|
from openai import Omit
|
|
10
8
|
from openai.types import chat as openai_types, shared_params as shared_openai_types
|
|
11
9
|
from openai.types.shared_params.response_format_json_schema import JSONSchema
|
|
12
10
|
|
|
13
|
-
from .....exceptions import
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
from .....tools import FORMAT_TOOL_NAME, AnyToolSchema, BaseToolkit
|
|
17
|
-
from ....base import _utils as _base_utils
|
|
18
|
-
from ...model_id import OpenAIModelId, model_name
|
|
19
|
-
from ...model_info import (
|
|
20
|
-
MODELS_WITHOUT_AUDIO_SUPPORT,
|
|
21
|
-
MODELS_WITHOUT_JSON_OBJECT_SUPPORT,
|
|
22
|
-
MODELS_WITHOUT_JSON_SCHEMA_SUPPORT,
|
|
11
|
+
from .....exceptions import (
|
|
12
|
+
FeatureNotSupportedError,
|
|
13
|
+
FormattingModeNotSupportedError,
|
|
23
14
|
)
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
15
|
+
from .....formatting import (
|
|
16
|
+
Format,
|
|
17
|
+
FormattableT,
|
|
18
|
+
_utils as _formatting_utils,
|
|
19
|
+
resolve_format,
|
|
20
|
+
)
|
|
21
|
+
from .....messages import AssistantMessage, Message, UserMessage
|
|
22
|
+
from .....tools import FORMAT_TOOL_NAME, BaseToolkit, ToolSchema
|
|
23
|
+
from ....base import Params, _utils as _base_utils
|
|
24
|
+
from ...shared import _utils as _shared_utils
|
|
25
|
+
from ..model_ids import OpenAICompletionsModelId
|
|
26
|
+
from .model_features import MODEL_FEATURES
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
class ChatCompletionCreateKwargs(TypedDict, total=False):
|
|
@@ -49,7 +49,7 @@ class ChatCompletionCreateKwargs(TypedDict, total=False):
|
|
|
49
49
|
|
|
50
50
|
def _encode_user_message(
|
|
51
51
|
message: UserMessage,
|
|
52
|
-
model_id:
|
|
52
|
+
model_id: OpenAICompletionsModelId,
|
|
53
53
|
) -> list[openai_types.ChatCompletionMessageParam]:
|
|
54
54
|
"""Convert Mirascope `UserMessage` to a list of OpenAI `ChatCompletionMessageParam`.
|
|
55
55
|
|
|
@@ -98,11 +98,11 @@ def _encode_user_message(
|
|
|
98
98
|
)
|
|
99
99
|
current_content.append(content)
|
|
100
100
|
elif part.type == "audio":
|
|
101
|
-
|
|
102
|
-
if
|
|
101
|
+
model_status = MODEL_FEATURES.get(model_id)
|
|
102
|
+
if model_status == "no_audio_support":
|
|
103
103
|
raise FeatureNotSupportedError(
|
|
104
104
|
feature="Audio inputs",
|
|
105
|
-
|
|
105
|
+
provider="openai:completions",
|
|
106
106
|
message=f"Model '{model_id}' does not support audio inputs.",
|
|
107
107
|
)
|
|
108
108
|
|
|
@@ -111,7 +111,7 @@ def _encode_user_message(
|
|
|
111
111
|
if audio_format not in ("wav", "mp3"):
|
|
112
112
|
raise FeatureNotSupportedError(
|
|
113
113
|
feature=f"Audio format: {audio_format}",
|
|
114
|
-
|
|
114
|
+
provider="openai:completions",
|
|
115
115
|
message="OpenAI only supports 'wav' and 'mp3' audio formats.",
|
|
116
116
|
) # pragma: no cover
|
|
117
117
|
audio_content = openai_types.ChatCompletionContentPartInputAudioParam(
|
|
@@ -127,7 +127,7 @@ def _encode_user_message(
|
|
|
127
127
|
result.append(
|
|
128
128
|
openai_types.ChatCompletionToolMessageParam(
|
|
129
129
|
role="tool",
|
|
130
|
-
content=str(part.
|
|
130
|
+
content=str(part.value),
|
|
131
131
|
tool_call_id=part.id,
|
|
132
132
|
)
|
|
133
133
|
)
|
|
@@ -141,16 +141,15 @@ def _encode_user_message(
|
|
|
141
141
|
|
|
142
142
|
|
|
143
143
|
def _encode_assistant_message(
|
|
144
|
-
message: AssistantMessage, model_id:
|
|
144
|
+
message: AssistantMessage, model_id: OpenAICompletionsModelId, encode_thoughts: bool
|
|
145
145
|
) -> openai_types.ChatCompletionAssistantMessageParam:
|
|
146
146
|
"""Convert Mirascope `AssistantMessage` to OpenAI `ChatCompletionAssistantMessageParam`."""
|
|
147
147
|
|
|
148
148
|
if (
|
|
149
|
-
message.
|
|
150
|
-
and message.
|
|
151
|
-
== model_name(model_id=model_id, api_mode="completions")
|
|
149
|
+
message.provider == "openai:completions"
|
|
150
|
+
and message.model_id == model_id
|
|
152
151
|
and message.raw_message
|
|
153
|
-
and not
|
|
152
|
+
and not encode_thoughts
|
|
154
153
|
):
|
|
155
154
|
return cast(
|
|
156
155
|
openai_types.ChatCompletionAssistantMessageParam, message.raw_message
|
|
@@ -174,7 +173,7 @@ def _encode_assistant_message(
|
|
|
174
173
|
)
|
|
175
174
|
)
|
|
176
175
|
elif part.type == "thought":
|
|
177
|
-
if
|
|
176
|
+
if encode_thoughts:
|
|
178
177
|
text_params.append(
|
|
179
178
|
openai_types.ChatCompletionContentPartTextParam(
|
|
180
179
|
text="**Thinking:** " + part.thought, type="text"
|
|
@@ -189,7 +188,7 @@ def _encode_assistant_message(
|
|
|
189
188
|
elif text_params:
|
|
190
189
|
content = text_params
|
|
191
190
|
|
|
192
|
-
message_params
|
|
191
|
+
message_params = {
|
|
193
192
|
"role": "assistant",
|
|
194
193
|
"content": content,
|
|
195
194
|
}
|
|
@@ -200,7 +199,7 @@ def _encode_assistant_message(
|
|
|
200
199
|
|
|
201
200
|
|
|
202
201
|
def _encode_message(
|
|
203
|
-
message: Message, model_id:
|
|
202
|
+
message: Message, model_id: OpenAICompletionsModelId, encode_thoughts: bool
|
|
204
203
|
) -> list[openai_types.ChatCompletionMessageParam]:
|
|
205
204
|
"""Convert a Mirascope `Message` to OpenAI `ChatCompletionMessageParam` format.
|
|
206
205
|
|
|
@@ -221,29 +220,26 @@ def _encode_message(
|
|
|
221
220
|
elif message.role == "user":
|
|
222
221
|
return _encode_user_message(message, model_id)
|
|
223
222
|
elif message.role == "assistant":
|
|
224
|
-
return [_encode_assistant_message(message, model_id,
|
|
223
|
+
return [_encode_assistant_message(message, model_id, encode_thoughts)]
|
|
225
224
|
else:
|
|
226
225
|
raise ValueError(f"Unsupported role: {message.role}") # pragma: no cover
|
|
227
226
|
|
|
228
227
|
|
|
229
228
|
@lru_cache(maxsize=128)
|
|
230
229
|
def _convert_tool_to_tool_param(
|
|
231
|
-
tool:
|
|
230
|
+
tool: ToolSchema,
|
|
232
231
|
) -> openai_types.ChatCompletionToolParam:
|
|
233
232
|
"""Convert a single Mirascope `Tool` to OpenAI ChatCompletionToolParam with caching."""
|
|
234
233
|
schema_dict = tool.parameters.model_dump(by_alias=True, exclude_none=True)
|
|
235
234
|
schema_dict["type"] = "object"
|
|
236
|
-
|
|
237
|
-
strict = True if tool.strict is None else tool.strict
|
|
238
|
-
if strict:
|
|
239
|
-
_base_utils.ensure_all_properties_required(schema_dict)
|
|
235
|
+
_shared_utils._ensure_additional_properties_false(schema_dict)
|
|
240
236
|
return openai_types.ChatCompletionToolParam(
|
|
241
237
|
type="function",
|
|
242
238
|
function={
|
|
243
239
|
"name": tool.name,
|
|
244
240
|
"description": tool.description,
|
|
245
241
|
"parameters": schema_dict,
|
|
246
|
-
"strict": strict,
|
|
242
|
+
"strict": tool.strict,
|
|
247
243
|
},
|
|
248
244
|
)
|
|
249
245
|
|
|
@@ -261,7 +257,7 @@ def _create_strict_response_format(
|
|
|
261
257
|
"""
|
|
262
258
|
schema = format.schema.copy()
|
|
263
259
|
|
|
264
|
-
|
|
260
|
+
_shared_utils._ensure_additional_properties_false(schema)
|
|
265
261
|
|
|
266
262
|
json_schema = JSONSchema(
|
|
267
263
|
name=format.name,
|
|
@@ -278,36 +274,24 @@ def _create_strict_response_format(
|
|
|
278
274
|
|
|
279
275
|
def encode_request(
|
|
280
276
|
*,
|
|
281
|
-
model_id:
|
|
277
|
+
model_id: OpenAICompletionsModelId,
|
|
282
278
|
messages: Sequence[Message],
|
|
283
|
-
tools: Sequence[
|
|
284
|
-
format: type[FormattableT]
|
|
285
|
-
| Format[FormattableT]
|
|
286
|
-
| OutputParser[FormattableT]
|
|
287
|
-
| None,
|
|
279
|
+
tools: Sequence[ToolSchema] | BaseToolkit | None,
|
|
280
|
+
format: type[FormattableT] | Format[FormattableT] | None,
|
|
288
281
|
params: Params,
|
|
289
282
|
) -> tuple[Sequence[Message], Format[FormattableT] | None, ChatCompletionCreateKwargs]:
|
|
290
283
|
"""Prepares a request for the `OpenAI.chat.completions.create` method."""
|
|
291
|
-
if model_id.endswith(":responses"):
|
|
292
|
-
raise FeatureNotSupportedError(
|
|
293
|
-
feature="responses API",
|
|
294
|
-
provider_id="openai:completions",
|
|
295
|
-
model_id=model_id,
|
|
296
|
-
message=f"Can't use completions client for responses model: {model_id}",
|
|
297
|
-
)
|
|
298
|
-
base_model_name = model_name(model_id, None)
|
|
299
|
-
|
|
300
284
|
kwargs: ChatCompletionCreateKwargs = ChatCompletionCreateKwargs(
|
|
301
285
|
{
|
|
302
|
-
"model":
|
|
286
|
+
"model": model_id,
|
|
303
287
|
}
|
|
304
288
|
)
|
|
305
|
-
|
|
289
|
+
encode_thoughts = False
|
|
306
290
|
|
|
307
291
|
with _base_utils.ensure_all_params_accessed(
|
|
308
292
|
params=params,
|
|
309
|
-
|
|
310
|
-
unsupported_params=["top_k"],
|
|
293
|
+
provider="openai:completions",
|
|
294
|
+
unsupported_params=["top_k", "thinking"],
|
|
311
295
|
) as param_accessor:
|
|
312
296
|
if param_accessor.temperature is not None:
|
|
313
297
|
kwargs["temperature"] = param_accessor.temperature
|
|
@@ -320,24 +304,24 @@ def encode_request(
|
|
|
320
304
|
kwargs["seed"] = param_accessor.seed
|
|
321
305
|
if param_accessor.stop_sequences is not None:
|
|
322
306
|
kwargs["stop"] = param_accessor.stop_sequences
|
|
323
|
-
if param_accessor.
|
|
324
|
-
|
|
325
|
-
if thinking.get("encode_thoughts_as_text"):
|
|
326
|
-
encode_thoughts_as_text = True
|
|
307
|
+
if param_accessor.encode_thoughts_as_text is not None:
|
|
308
|
+
encode_thoughts = True
|
|
327
309
|
|
|
328
310
|
tools = tools.tools if isinstance(tools, BaseToolkit) else tools or []
|
|
329
311
|
|
|
330
312
|
openai_tools = [_convert_tool_to_tool_param(tool) for tool in tools]
|
|
331
313
|
|
|
332
|
-
model_supports_strict =
|
|
314
|
+
model_supports_strict = (
|
|
315
|
+
model_id not in _shared_utils.MODELS_WITHOUT_JSON_SCHEMA_SUPPORT
|
|
316
|
+
)
|
|
333
317
|
default_mode = "strict" if model_supports_strict else "tool"
|
|
334
318
|
format = resolve_format(format, default_mode=default_mode)
|
|
335
319
|
if format is not None:
|
|
336
320
|
if format.mode == "strict":
|
|
337
321
|
if not model_supports_strict:
|
|
338
|
-
raise
|
|
339
|
-
|
|
340
|
-
|
|
322
|
+
raise FormattingModeNotSupportedError(
|
|
323
|
+
formatting_mode="strict",
|
|
324
|
+
provider="openai:completions",
|
|
341
325
|
model_id=model_id,
|
|
342
326
|
)
|
|
343
327
|
kwargs["response_format"] = _create_strict_response_format(format)
|
|
@@ -350,11 +334,11 @@ def encode_request(
|
|
|
350
334
|
"function": {"name": FORMAT_TOOL_NAME},
|
|
351
335
|
}
|
|
352
336
|
kwargs["parallel_tool_calls"] = False
|
|
353
|
-
format_tool_schema =
|
|
337
|
+
format_tool_schema = _formatting_utils.create_tool_schema(format)
|
|
354
338
|
openai_tools.append(_convert_tool_to_tool_param(format_tool_schema))
|
|
355
339
|
elif (
|
|
356
340
|
format.mode == "json"
|
|
357
|
-
and
|
|
341
|
+
and model_id not in _shared_utils.MODELS_WITHOUT_JSON_OBJECT_SUPPORT
|
|
358
342
|
):
|
|
359
343
|
kwargs["response_format"] = {"type": "json_object"}
|
|
360
344
|
|
|
@@ -368,9 +352,7 @@ def encode_request(
|
|
|
368
352
|
|
|
369
353
|
encoded_messages: list[openai_types.ChatCompletionMessageParam] = []
|
|
370
354
|
for message in messages:
|
|
371
|
-
encoded_messages.extend(
|
|
372
|
-
_encode_message(message, model_id, encode_thoughts_as_text)
|
|
373
|
-
)
|
|
355
|
+
encoded_messages.extend(_encode_message(message, model_id, encode_thoughts))
|
|
374
356
|
kwargs["messages"] = encoded_messages
|
|
375
357
|
|
|
376
358
|
return messages, format, kwargs
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""OpenAI ChatCompletions models categorized by audio support.
|
|
2
|
+
|
|
3
|
+
This file is auto-generated by scripts/update_openai_completions_model_features.py
|
|
4
|
+
Run that script to update this map when OpenAI releases new models.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Literal
|
|
8
|
+
|
|
9
|
+
AudioFeatureSupport = Literal["supports_audio", "no_audio_support", "unavailable"]
|
|
10
|
+
|
|
11
|
+
MODEL_FEATURES: dict[str, AudioFeatureSupport] = {
|
|
12
|
+
"chatgpt-4o-latest": "no_audio_support",
|
|
13
|
+
"codex-mini-latest": "unavailable",
|
|
14
|
+
"gpt-3.5-turbo": "no_audio_support",
|
|
15
|
+
"gpt-3.5-turbo-0125": "no_audio_support",
|
|
16
|
+
"gpt-3.5-turbo-0301": "unavailable",
|
|
17
|
+
"gpt-3.5-turbo-0613": "unavailable",
|
|
18
|
+
"gpt-3.5-turbo-1106": "no_audio_support",
|
|
19
|
+
"gpt-3.5-turbo-16k": "no_audio_support",
|
|
20
|
+
"gpt-3.5-turbo-16k-0613": "unavailable",
|
|
21
|
+
"gpt-4": "no_audio_support",
|
|
22
|
+
"gpt-4-0125-preview": "no_audio_support",
|
|
23
|
+
"gpt-4-0314": "unavailable",
|
|
24
|
+
"gpt-4-0613": "no_audio_support",
|
|
25
|
+
"gpt-4-1106-preview": "no_audio_support",
|
|
26
|
+
"gpt-4-32k": "unavailable",
|
|
27
|
+
"gpt-4-32k-0314": "unavailable",
|
|
28
|
+
"gpt-4-32k-0613": "unavailable",
|
|
29
|
+
"gpt-4-turbo": "no_audio_support",
|
|
30
|
+
"gpt-4-turbo-2024-04-09": "no_audio_support",
|
|
31
|
+
"gpt-4-turbo-preview": "no_audio_support",
|
|
32
|
+
"gpt-4-vision-preview": "unavailable",
|
|
33
|
+
"gpt-4.1": "no_audio_support",
|
|
34
|
+
"gpt-4.1-2025-04-14": "no_audio_support",
|
|
35
|
+
"gpt-4.1-mini": "no_audio_support",
|
|
36
|
+
"gpt-4.1-mini-2025-04-14": "no_audio_support",
|
|
37
|
+
"gpt-4.1-nano": "no_audio_support",
|
|
38
|
+
"gpt-4.1-nano-2025-04-14": "no_audio_support",
|
|
39
|
+
"gpt-4o": "no_audio_support",
|
|
40
|
+
"gpt-4o-2024-05-13": "no_audio_support",
|
|
41
|
+
"gpt-4o-2024-08-06": "no_audio_support",
|
|
42
|
+
"gpt-4o-2024-11-20": "no_audio_support",
|
|
43
|
+
"gpt-4o-audio-preview": "supports_audio",
|
|
44
|
+
"gpt-4o-audio-preview-2024-10-01": "supports_audio",
|
|
45
|
+
"gpt-4o-audio-preview-2024-12-17": "supports_audio",
|
|
46
|
+
"gpt-4o-audio-preview-2025-06-03": "supports_audio",
|
|
47
|
+
"gpt-4o-mini": "no_audio_support",
|
|
48
|
+
"gpt-4o-mini-2024-07-18": "no_audio_support",
|
|
49
|
+
"gpt-4o-mini-audio-preview": "supports_audio",
|
|
50
|
+
"gpt-4o-mini-audio-preview-2024-12-17": "supports_audio",
|
|
51
|
+
"gpt-4o-mini-search-preview": "no_audio_support",
|
|
52
|
+
"gpt-4o-mini-search-preview-2025-03-11": "no_audio_support",
|
|
53
|
+
"gpt-4o-search-preview": "no_audio_support",
|
|
54
|
+
"gpt-4o-search-preview-2025-03-11": "no_audio_support",
|
|
55
|
+
"gpt-5": "no_audio_support",
|
|
56
|
+
"gpt-5-2025-08-07": "no_audio_support",
|
|
57
|
+
"gpt-5-chat-latest": "no_audio_support",
|
|
58
|
+
"gpt-5-mini": "no_audio_support",
|
|
59
|
+
"gpt-5-mini-2025-08-07": "no_audio_support",
|
|
60
|
+
"gpt-5-nano": "no_audio_support",
|
|
61
|
+
"gpt-5-nano-2025-08-07": "no_audio_support",
|
|
62
|
+
"o1": "no_audio_support",
|
|
63
|
+
"o1-2024-12-17": "no_audio_support",
|
|
64
|
+
"o1-mini": "no_audio_support",
|
|
65
|
+
"o1-mini-2024-09-12": "no_audio_support",
|
|
66
|
+
"o1-preview": "unavailable",
|
|
67
|
+
"o1-preview-2024-09-12": "unavailable",
|
|
68
|
+
"o3": "no_audio_support",
|
|
69
|
+
"o3-2025-04-16": "no_audio_support",
|
|
70
|
+
"o3-mini": "no_audio_support",
|
|
71
|
+
"o3-mini-2025-01-31": "no_audio_support",
|
|
72
|
+
"o4-mini": "no_audio_support",
|
|
73
|
+
"o4-mini-2025-04-16": "no_audio_support",
|
|
74
|
+
}
|
|
75
|
+
"""Map of model IDs to their status.
|
|
76
|
+
|
|
77
|
+
- "supports_audio": model exists and supports audio inputs
|
|
78
|
+
- "no_audio_support": model exists but does not support audio inputs
|
|
79
|
+
- "unavailable": model has been deprecated or removed
|
|
80
|
+
- If model not in map: assume it exists and supports audio (optimistic default)
|
|
81
|
+
"""
|