mirascope 2.0.0a5__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mirascope/__init__.py +10 -1
- mirascope/_stubs.py +363 -0
- mirascope/api/__init__.py +8 -0
- mirascope/api/_generated/__init__.py +285 -2
- mirascope/api/_generated/annotations/__init__.py +33 -0
- mirascope/api/_generated/annotations/client.py +506 -0
- mirascope/api/_generated/annotations/raw_client.py +1414 -0
- mirascope/api/_generated/annotations/types/__init__.py +31 -0
- mirascope/api/_generated/annotations/types/annotations_create_request_label.py +5 -0
- mirascope/api/_generated/annotations/types/annotations_create_response.py +48 -0
- mirascope/api/_generated/annotations/types/annotations_create_response_label.py +5 -0
- mirascope/api/_generated/annotations/types/annotations_get_response.py +48 -0
- mirascope/api/_generated/annotations/types/annotations_get_response_label.py +5 -0
- mirascope/api/_generated/annotations/types/annotations_list_request_label.py +5 -0
- mirascope/api/_generated/annotations/types/annotations_list_response.py +21 -0
- mirascope/api/_generated/annotations/types/annotations_list_response_annotations_item.py +50 -0
- mirascope/api/_generated/annotations/types/annotations_list_response_annotations_item_label.py +5 -0
- mirascope/api/_generated/annotations/types/annotations_update_request_label.py +5 -0
- mirascope/api/_generated/annotations/types/annotations_update_response.py +48 -0
- mirascope/api/_generated/annotations/types/annotations_update_response_label.py +5 -0
- mirascope/api/_generated/api_keys/__init__.py +12 -2
- mirascope/api/_generated/api_keys/client.py +77 -0
- mirascope/api/_generated/api_keys/raw_client.py +422 -39
- mirascope/api/_generated/api_keys/types/__init__.py +7 -1
- mirascope/api/_generated/api_keys/types/api_keys_create_response.py +4 -12
- mirascope/api/_generated/api_keys/types/api_keys_get_response.py +4 -12
- mirascope/api/_generated/api_keys/types/api_keys_list_all_for_org_response_item.py +40 -0
- mirascope/api/_generated/api_keys/types/api_keys_list_response_item.py +4 -12
- mirascope/api/_generated/client.py +42 -0
- mirascope/api/_generated/core/client_wrapper.py +2 -14
- mirascope/api/_generated/core/datetime_utils.py +1 -3
- mirascope/api/_generated/core/file.py +2 -5
- mirascope/api/_generated/core/http_client.py +36 -112
- mirascope/api/_generated/core/jsonable_encoder.py +1 -3
- mirascope/api/_generated/core/pydantic_utilities.py +19 -74
- mirascope/api/_generated/core/query_encoder.py +1 -3
- mirascope/api/_generated/core/serialization.py +4 -10
- mirascope/api/_generated/docs/client.py +2 -6
- mirascope/api/_generated/docs/raw_client.py +51 -5
- mirascope/api/_generated/environment.py +3 -3
- mirascope/api/_generated/environments/__init__.py +6 -0
- mirascope/api/_generated/environments/client.py +117 -0
- mirascope/api/_generated/environments/raw_client.py +530 -51
- mirascope/api/_generated/environments/types/__init__.py +10 -0
- mirascope/api/_generated/environments/types/environments_create_response.py +1 -3
- mirascope/api/_generated/environments/types/environments_get_analytics_response.py +60 -0
- mirascope/api/_generated/environments/types/environments_get_analytics_response_top_functions_item.py +24 -0
- mirascope/api/_generated/environments/types/environments_get_analytics_response_top_models_item.py +22 -0
- mirascope/api/_generated/environments/types/environments_get_response.py +1 -3
- mirascope/api/_generated/environments/types/environments_list_response_item.py +1 -3
- mirascope/api/_generated/environments/types/environments_update_response.py +1 -3
- mirascope/api/_generated/errors/__init__.py +8 -0
- mirascope/api/_generated/errors/bad_request_error.py +1 -2
- mirascope/api/_generated/errors/conflict_error.py +1 -2
- mirascope/api/_generated/errors/forbidden_error.py +1 -5
- mirascope/api/_generated/errors/internal_server_error.py +1 -6
- mirascope/api/_generated/errors/not_found_error.py +1 -5
- mirascope/api/_generated/errors/payment_required_error.py +15 -0
- mirascope/api/_generated/errors/service_unavailable_error.py +14 -0
- mirascope/api/_generated/errors/too_many_requests_error.py +15 -0
- mirascope/api/_generated/errors/unauthorized_error.py +11 -0
- mirascope/api/_generated/functions/__init__.py +39 -0
- mirascope/api/_generated/functions/client.py +647 -0
- mirascope/api/_generated/functions/raw_client.py +1890 -0
- mirascope/api/_generated/functions/types/__init__.py +53 -0
- mirascope/api/_generated/functions/types/functions_create_request_dependencies_value.py +20 -0
- mirascope/api/_generated/functions/types/functions_create_response.py +37 -0
- mirascope/api/_generated/functions/types/functions_create_response_dependencies_value.py +20 -0
- mirascope/api/_generated/functions/types/functions_find_by_hash_response.py +39 -0
- mirascope/api/_generated/functions/types/functions_find_by_hash_response_dependencies_value.py +20 -0
- mirascope/api/_generated/functions/types/functions_get_by_env_response.py +53 -0
- mirascope/api/_generated/functions/types/functions_get_by_env_response_dependencies_value.py +22 -0
- mirascope/api/_generated/functions/types/functions_get_response.py +37 -0
- mirascope/api/_generated/functions/types/functions_get_response_dependencies_value.py +20 -0
- mirascope/api/_generated/functions/types/functions_list_by_env_response.py +25 -0
- mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item.py +56 -0
- mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item_dependencies_value.py +22 -0
- mirascope/api/_generated/functions/types/functions_list_response.py +21 -0
- mirascope/api/_generated/functions/types/functions_list_response_functions_item.py +41 -0
- mirascope/api/_generated/functions/types/functions_list_response_functions_item_dependencies_value.py +20 -0
- mirascope/api/_generated/health/client.py +2 -6
- mirascope/api/_generated/health/raw_client.py +51 -5
- mirascope/api/_generated/health/types/health_check_response.py +1 -3
- mirascope/api/_generated/organization_invitations/__init__.py +33 -0
- mirascope/api/_generated/organization_invitations/client.py +546 -0
- mirascope/api/_generated/organization_invitations/raw_client.py +1519 -0
- mirascope/api/_generated/organization_invitations/types/__init__.py +53 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response.py +34 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response_role.py +7 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_request_role.py +7 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response.py +48 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_role.py +7 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_status.py +7 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response.py +48 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_role.py +7 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_status.py +7 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item.py +48 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_role.py +7 -0
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_status.py +7 -0
- mirascope/api/_generated/organization_memberships/__init__.py +19 -0
- mirascope/api/_generated/organization_memberships/client.py +302 -0
- mirascope/api/_generated/organization_memberships/raw_client.py +736 -0
- mirascope/api/_generated/organization_memberships/types/__init__.py +27 -0
- mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item.py +33 -0
- mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item_role.py +7 -0
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_request_role.py +7 -0
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response.py +31 -0
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response_role.py +7 -0
- mirascope/api/_generated/organizations/__init__.py +26 -0
- mirascope/api/_generated/organizations/client.py +465 -0
- mirascope/api/_generated/organizations/raw_client.py +1799 -108
- mirascope/api/_generated/organizations/types/__init__.py +48 -0
- mirascope/api/_generated/organizations/types/organizations_create_payment_intent_response.py +24 -0
- mirascope/api/_generated/organizations/types/organizations_create_response.py +4 -3
- mirascope/api/_generated/organizations/types/organizations_create_response_role.py +1 -3
- mirascope/api/_generated/organizations/types/organizations_get_response.py +4 -3
- mirascope/api/_generated/organizations/types/organizations_get_response_role.py +1 -3
- mirascope/api/_generated/organizations/types/organizations_list_response_item.py +4 -3
- mirascope/api/_generated/organizations/types/organizations_list_response_item_role.py +1 -3
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_request_target_plan.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response.py +47 -0
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item.py +33 -0
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item_resource.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_router_balance_response.py +24 -0
- mirascope/api/_generated/organizations/types/organizations_subscription_response.py +53 -0
- mirascope/api/_generated/organizations/types/organizations_subscription_response_current_plan.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_subscription_response_payment_method.py +26 -0
- mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change.py +34 -0
- mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change_target_plan.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_update_response.py +4 -3
- mirascope/api/_generated/organizations/types/organizations_update_response_role.py +1 -3
- mirascope/api/_generated/organizations/types/organizations_update_subscription_request_target_plan.py +7 -0
- mirascope/api/_generated/organizations/types/organizations_update_subscription_response.py +35 -0
- mirascope/api/_generated/project_memberships/__init__.py +25 -0
- mirascope/api/_generated/project_memberships/client.py +437 -0
- mirascope/api/_generated/project_memberships/raw_client.py +1039 -0
- mirascope/api/_generated/project_memberships/types/__init__.py +29 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_create_request_role.py +7 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_create_response.py +35 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_create_response_role.py +7 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item.py +33 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item_role.py +7 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_update_request_role.py +7 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_update_response.py +35 -0
- mirascope/api/_generated/project_memberships/types/project_memberships_update_response_role.py +7 -0
- mirascope/api/_generated/projects/__init__.py +2 -12
- mirascope/api/_generated/projects/client.py +17 -71
- mirascope/api/_generated/projects/raw_client.py +295 -51
- mirascope/api/_generated/projects/types/__init__.py +1 -6
- mirascope/api/_generated/projects/types/projects_create_response.py +3 -9
- mirascope/api/_generated/projects/types/projects_get_response.py +3 -9
- mirascope/api/_generated/projects/types/projects_list_response_item.py +3 -9
- mirascope/api/_generated/projects/types/projects_update_response.py +3 -9
- mirascope/api/_generated/reference.md +3619 -182
- mirascope/api/_generated/tags/__init__.py +19 -0
- mirascope/api/_generated/tags/client.py +504 -0
- mirascope/api/_generated/tags/raw_client.py +1288 -0
- mirascope/api/_generated/tags/types/__init__.py +17 -0
- mirascope/api/_generated/tags/types/tags_create_response.py +41 -0
- mirascope/api/_generated/tags/types/tags_get_response.py +41 -0
- mirascope/api/_generated/tags/types/tags_list_response.py +23 -0
- mirascope/api/_generated/tags/types/tags_list_response_tags_item.py +41 -0
- mirascope/api/_generated/tags/types/tags_update_response.py +41 -0
- mirascope/api/_generated/token_cost/__init__.py +7 -0
- mirascope/api/_generated/token_cost/client.py +160 -0
- mirascope/api/_generated/token_cost/raw_client.py +264 -0
- mirascope/api/_generated/token_cost/types/__init__.py +8 -0
- mirascope/api/_generated/token_cost/types/token_cost_calculate_request_usage.py +54 -0
- mirascope/api/_generated/token_cost/types/token_cost_calculate_response.py +52 -0
- mirascope/api/_generated/traces/__init__.py +42 -0
- mirascope/api/_generated/traces/client.py +941 -0
- mirascope/api/_generated/traces/raw_client.py +2177 -23
- mirascope/api/_generated/traces/types/__init__.py +60 -0
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item.py +4 -11
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item.py +1 -3
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value.py +8 -24
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_array_value.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value.py +3 -9
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value_values_item.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item.py +3 -9
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope.py +4 -8
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value.py +8 -24
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_array_value.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value.py +3 -9
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value_values_item.py +1 -3
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item.py +6 -18
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item.py +3 -9
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value.py +8 -24
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_array_value.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value_values_item.py +1 -3
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_status.py +2 -6
- mirascope/api/_generated/traces/types/traces_create_response.py +2 -5
- mirascope/api/_generated/traces/types/traces_create_response_partial_success.py +3 -9
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response.py +60 -0
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response_top_functions_item.py +24 -0
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response_top_models_item.py +22 -0
- mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response.py +33 -0
- mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response_spans_item.py +88 -0
- mirascope/api/_generated/traces/types/traces_get_trace_detail_response.py +33 -0
- mirascope/api/_generated/traces/types/traces_get_trace_detail_response_spans_item.py +88 -0
- mirascope/api/_generated/traces/types/traces_list_by_function_hash_response.py +25 -0
- mirascope/api/_generated/traces/types/traces_list_by_function_hash_response_traces_item.py +44 -0
- mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item.py +26 -0
- mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item_operator.py +7 -0
- mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_by.py +7 -0
- mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_order.py +7 -0
- mirascope/api/_generated/traces/types/traces_search_by_env_response.py +26 -0
- mirascope/api/_generated/traces/types/traces_search_by_env_response_spans_item.py +50 -0
- mirascope/api/_generated/traces/types/traces_search_request_attribute_filters_item.py +26 -0
- mirascope/api/_generated/traces/types/traces_search_request_attribute_filters_item_operator.py +7 -0
- mirascope/api/_generated/traces/types/traces_search_request_sort_by.py +7 -0
- mirascope/api/_generated/traces/types/traces_search_request_sort_order.py +5 -0
- mirascope/api/_generated/traces/types/traces_search_response.py +26 -0
- mirascope/api/_generated/traces/types/traces_search_response_spans_item.py +50 -0
- mirascope/api/_generated/types/__init__.py +48 -0
- mirascope/api/_generated/types/already_exists_error.py +1 -3
- mirascope/api/_generated/types/bad_request_error_body.py +50 -0
- mirascope/api/_generated/types/click_house_error.py +22 -0
- mirascope/api/_generated/types/database_error.py +1 -3
- mirascope/api/_generated/types/date.py +3 -0
- mirascope/api/_generated/types/http_api_decode_error.py +1 -3
- mirascope/api/_generated/types/immutable_resource_error.py +22 -0
- mirascope/api/_generated/types/internal_server_error_body.py +49 -0
- mirascope/api/_generated/types/issue.py +1 -3
- mirascope/api/_generated/types/issue_tag.py +1 -8
- mirascope/api/_generated/types/not_found_error_body.py +1 -3
- mirascope/api/_generated/types/number_from_string.py +3 -0
- mirascope/api/_generated/types/permission_denied_error.py +1 -3
- mirascope/api/_generated/types/permission_denied_error_tag.py +1 -3
- mirascope/api/_generated/types/plan_limit_exceeded_error.py +32 -0
- mirascope/api/_generated/types/plan_limit_exceeded_error_tag.py +7 -0
- mirascope/api/_generated/types/pricing_unavailable_error.py +23 -0
- mirascope/api/_generated/types/property_key_key.py +1 -3
- mirascope/api/_generated/types/rate_limit_error.py +31 -0
- mirascope/api/_generated/types/rate_limit_error_tag.py +5 -0
- mirascope/api/_generated/types/service_unavailable_error_body.py +24 -0
- mirascope/api/_generated/types/service_unavailable_error_tag.py +7 -0
- mirascope/api/_generated/types/stripe_error.py +20 -0
- mirascope/api/_generated/types/subscription_past_due_error.py +31 -0
- mirascope/api/_generated/types/subscription_past_due_error_tag.py +7 -0
- mirascope/api/_generated/types/unauthorized_error_body.py +21 -0
- mirascope/api/_generated/types/unauthorized_error_tag.py +5 -0
- mirascope/api/settings.py +19 -1
- mirascope/llm/__init__.py +55 -8
- mirascope/llm/calls/__init__.py +2 -1
- mirascope/llm/calls/calls.py +3 -1
- mirascope/llm/calls/decorator.py +21 -7
- mirascope/llm/content/tool_call.py +6 -0
- mirascope/llm/content/tool_output.py +22 -5
- mirascope/llm/exceptions.py +284 -71
- mirascope/llm/formatting/__init__.py +19 -2
- mirascope/llm/formatting/format.py +219 -30
- mirascope/llm/formatting/output_parser.py +178 -0
- mirascope/llm/formatting/partial.py +80 -7
- mirascope/llm/formatting/primitives.py +192 -0
- mirascope/llm/formatting/types.py +21 -64
- mirascope/llm/mcp/__init__.py +2 -2
- mirascope/llm/mcp/mcp_client.py +130 -0
- mirascope/llm/messages/__init__.py +3 -0
- mirascope/llm/messages/_utils.py +34 -0
- mirascope/llm/models/__init__.py +5 -0
- mirascope/llm/models/models.py +137 -69
- mirascope/llm/{providers/base → models}/params.py +16 -37
- mirascope/llm/models/thinking_config.py +61 -0
- mirascope/llm/prompts/_utils.py +0 -32
- mirascope/llm/prompts/decorator.py +16 -5
- mirascope/llm/prompts/prompts.py +131 -68
- mirascope/llm/providers/__init__.py +18 -2
- mirascope/llm/providers/anthropic/__init__.py +3 -21
- mirascope/llm/providers/anthropic/_utils/__init__.py +2 -0
- mirascope/llm/providers/anthropic/_utils/beta_decode.py +22 -11
- mirascope/llm/providers/anthropic/_utils/beta_encode.py +75 -25
- mirascope/llm/providers/anthropic/_utils/decode.py +22 -11
- mirascope/llm/providers/anthropic/_utils/encode.py +82 -20
- mirascope/llm/providers/anthropic/_utils/errors.py +2 -2
- mirascope/llm/providers/anthropic/beta_provider.py +64 -18
- mirascope/llm/providers/anthropic/provider.py +91 -33
- mirascope/llm/providers/base/__init__.py +0 -2
- mirascope/llm/providers/base/_utils.py +55 -11
- mirascope/llm/providers/base/base_provider.py +116 -37
- mirascope/llm/providers/google/__init__.py +2 -17
- mirascope/llm/providers/google/_utils/__init__.py +2 -0
- mirascope/llm/providers/google/_utils/decode.py +37 -15
- mirascope/llm/providers/google/_utils/encode.py +127 -19
- mirascope/llm/providers/google/_utils/errors.py +3 -2
- mirascope/llm/providers/google/model_info.py +1 -0
- mirascope/llm/providers/google/provider.py +68 -19
- mirascope/llm/providers/mirascope/__init__.py +5 -0
- mirascope/llm/providers/mirascope/_utils.py +73 -0
- mirascope/llm/providers/mirascope/provider.py +349 -0
- mirascope/llm/providers/mlx/__init__.py +2 -17
- mirascope/llm/providers/mlx/_utils.py +8 -3
- mirascope/llm/providers/mlx/encoding/base.py +5 -2
- mirascope/llm/providers/mlx/encoding/transformers.py +5 -2
- mirascope/llm/providers/mlx/mlx.py +23 -6
- mirascope/llm/providers/mlx/provider.py +42 -13
- mirascope/llm/providers/ollama/__init__.py +1 -13
- mirascope/llm/providers/openai/_utils/errors.py +2 -2
- mirascope/llm/providers/openai/completions/__init__.py +2 -20
- mirascope/llm/providers/openai/completions/_utils/decode.py +14 -3
- mirascope/llm/providers/openai/completions/_utils/encode.py +35 -28
- mirascope/llm/providers/openai/completions/base_provider.py +40 -11
- mirascope/llm/providers/openai/provider.py +40 -10
- mirascope/llm/providers/openai/responses/__init__.py +1 -17
- mirascope/llm/providers/openai/responses/_utils/__init__.py +2 -0
- mirascope/llm/providers/openai/responses/_utils/decode.py +21 -8
- mirascope/llm/providers/openai/responses/_utils/encode.py +59 -19
- mirascope/llm/providers/openai/responses/provider.py +56 -18
- mirascope/llm/providers/provider_id.py +1 -0
- mirascope/llm/providers/provider_registry.py +96 -19
- mirascope/llm/providers/together/__init__.py +1 -13
- mirascope/llm/responses/__init__.py +6 -1
- mirascope/llm/responses/_utils.py +102 -12
- mirascope/llm/responses/base_response.py +5 -2
- mirascope/llm/responses/base_stream_response.py +139 -45
- mirascope/llm/responses/response.py +2 -1
- mirascope/llm/responses/root_response.py +89 -17
- mirascope/llm/responses/stream_response.py +6 -9
- mirascope/llm/tools/decorator.py +17 -8
- mirascope/llm/tools/tool_schema.py +43 -10
- mirascope/llm/tools/toolkit.py +35 -27
- mirascope/llm/tools/tools.py +123 -30
- mirascope/ops/__init__.py +64 -109
- mirascope/ops/_internal/configuration.py +82 -31
- mirascope/ops/_internal/exporters/exporters.py +64 -11
- mirascope/ops/_internal/instrumentation/llm/common.py +530 -0
- mirascope/ops/_internal/instrumentation/llm/cost.py +190 -0
- mirascope/ops/_internal/instrumentation/llm/encode.py +1 -1
- mirascope/ops/_internal/instrumentation/llm/llm.py +116 -1243
- mirascope/ops/_internal/instrumentation/llm/model.py +1798 -0
- mirascope/ops/_internal/instrumentation/llm/response.py +521 -0
- mirascope/ops/_internal/instrumentation/llm/serialize.py +300 -0
- mirascope/ops/_internal/protocols.py +83 -1
- mirascope/ops/_internal/traced_calls.py +4 -0
- mirascope/ops/_internal/traced_functions.py +141 -12
- mirascope/ops/_internal/tracing.py +78 -1
- mirascope/ops/_internal/utils.py +52 -4
- mirascope/ops/_internal/versioned_functions.py +54 -43
- {mirascope-2.0.0a5.dist-info → mirascope-2.0.1.dist-info}/METADATA +14 -13
- mirascope-2.0.1.dist-info/RECORD +423 -0
- {mirascope-2.0.0a5.dist-info → mirascope-2.0.1.dist-info}/licenses/LICENSE +1 -1
- mirascope/llm/formatting/_utils.py +0 -78
- mirascope/llm/mcp/client.py +0 -118
- mirascope/llm/providers/_missing_import_stubs.py +0 -49
- mirascope-2.0.0a5.dist-info/RECORD +0 -265
- {mirascope-2.0.0a5.dist-info → mirascope-2.0.1.dist-info}/WHEEL +0 -0
mirascope/llm/prompts/prompts.py
CHANGED
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
"""Concrete Prompt classes for generating messages with tools and formatting."""
|
|
2
2
|
|
|
3
|
+
from collections.abc import Sequence
|
|
3
4
|
from dataclasses import dataclass
|
|
4
5
|
from typing import Generic, overload
|
|
5
6
|
|
|
6
7
|
from ..context import Context, DepsT
|
|
7
|
-
from ..formatting import Format, FormattableT
|
|
8
|
+
from ..formatting import Format, FormattableT, OutputParser
|
|
9
|
+
from ..messages import Message, promote_to_messages
|
|
8
10
|
from ..models import Model
|
|
11
|
+
from ..providers import ModelId
|
|
9
12
|
from ..responses import (
|
|
10
13
|
AsyncContextResponse,
|
|
11
14
|
AsyncContextStreamResponse,
|
|
@@ -23,7 +26,6 @@ from ..tools import (
|
|
|
23
26
|
Toolkit,
|
|
24
27
|
)
|
|
25
28
|
from ..types import P
|
|
26
|
-
from . import _utils
|
|
27
29
|
from .protocols import (
|
|
28
30
|
AsyncContextMessageTemplate,
|
|
29
31
|
AsyncMessageTemplate,
|
|
@@ -49,60 +51,86 @@ class Prompt(Generic[P, FormattableT]):
|
|
|
49
51
|
toolkit: Toolkit
|
|
50
52
|
"""The toolkit containing this prompt's tools."""
|
|
51
53
|
|
|
52
|
-
format:
|
|
54
|
+
format: (
|
|
55
|
+
type[FormattableT] | Format[FormattableT] | OutputParser[FormattableT] | None
|
|
56
|
+
)
|
|
53
57
|
"""The response format for the generated response."""
|
|
54
58
|
|
|
59
|
+
def messages(self, *args: P.args, **kwargs: P.kwargs) -> Sequence[Message]:
|
|
60
|
+
"""Return the `Messages` from invoking this prompt."""
|
|
61
|
+
return promote_to_messages(self.fn(*args, **kwargs))
|
|
62
|
+
|
|
55
63
|
@overload
|
|
56
64
|
def __call__(
|
|
57
|
-
self: "Prompt[P, None]",
|
|
65
|
+
self: "Prompt[P, None]",
|
|
66
|
+
model: Model | ModelId,
|
|
67
|
+
*args: P.args,
|
|
68
|
+
**kwargs: P.kwargs,
|
|
58
69
|
) -> Response: ...
|
|
59
70
|
|
|
60
71
|
@overload
|
|
61
72
|
def __call__(
|
|
62
|
-
self: "Prompt[P, FormattableT]",
|
|
73
|
+
self: "Prompt[P, FormattableT]",
|
|
74
|
+
model: Model | ModelId,
|
|
75
|
+
*args: P.args,
|
|
76
|
+
**kwargs: P.kwargs,
|
|
63
77
|
) -> Response[FormattableT]: ...
|
|
64
78
|
|
|
65
79
|
def __call__(
|
|
66
|
-
self, model: Model, *args: P.args, **kwargs: P.kwargs
|
|
80
|
+
self, model: Model | ModelId, *args: P.args, **kwargs: P.kwargs
|
|
67
81
|
) -> Response | Response[FormattableT]:
|
|
68
82
|
"""Generates a response using the provided model."""
|
|
69
83
|
return self.call(model, *args, **kwargs)
|
|
70
84
|
|
|
71
85
|
@overload
|
|
72
86
|
def call(
|
|
73
|
-
self: "Prompt[P, None]",
|
|
87
|
+
self: "Prompt[P, None]",
|
|
88
|
+
model: Model | ModelId,
|
|
89
|
+
*args: P.args,
|
|
90
|
+
**kwargs: P.kwargs,
|
|
74
91
|
) -> Response: ...
|
|
75
92
|
|
|
76
93
|
@overload
|
|
77
94
|
def call(
|
|
78
|
-
self: "Prompt[P, FormattableT]",
|
|
95
|
+
self: "Prompt[P, FormattableT]",
|
|
96
|
+
model: Model | ModelId,
|
|
97
|
+
*args: P.args,
|
|
98
|
+
**kwargs: P.kwargs,
|
|
79
99
|
) -> Response[FormattableT]: ...
|
|
80
100
|
|
|
81
101
|
def call(
|
|
82
|
-
self, model: Model, *args: P.args, **kwargs: P.kwargs
|
|
102
|
+
self, model: Model | ModelId, *args: P.args, **kwargs: P.kwargs
|
|
83
103
|
) -> Response | Response[FormattableT]:
|
|
84
104
|
"""Generates a response using the provided model."""
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
105
|
+
if isinstance(model, str):
|
|
106
|
+
model = Model(model)
|
|
107
|
+
messages = self.messages(*args, **kwargs)
|
|
108
|
+
return model.call(messages, tools=self.toolkit, format=self.format)
|
|
88
109
|
|
|
89
110
|
@overload
|
|
90
111
|
def stream(
|
|
91
|
-
self: "Prompt[P, None]",
|
|
112
|
+
self: "Prompt[P, None]",
|
|
113
|
+
model: Model | ModelId,
|
|
114
|
+
*args: P.args,
|
|
115
|
+
**kwargs: P.kwargs,
|
|
92
116
|
) -> StreamResponse: ...
|
|
93
117
|
|
|
94
118
|
@overload
|
|
95
119
|
def stream(
|
|
96
|
-
self: "Prompt[P, FormattableT]",
|
|
120
|
+
self: "Prompt[P, FormattableT]",
|
|
121
|
+
model: Model | ModelId,
|
|
122
|
+
*args: P.args,
|
|
123
|
+
**kwargs: P.kwargs,
|
|
97
124
|
) -> StreamResponse[FormattableT]: ...
|
|
98
125
|
|
|
99
126
|
def stream(
|
|
100
|
-
self, model: Model, *args: P.args, **kwargs: P.kwargs
|
|
127
|
+
self, model: Model | ModelId, *args: P.args, **kwargs: P.kwargs
|
|
101
128
|
) -> StreamResponse | StreamResponse[FormattableT]:
|
|
102
129
|
"""Generates a streaming response using the provided model."""
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
130
|
+
if isinstance(model, str):
|
|
131
|
+
model = Model(model)
|
|
132
|
+
messages = self.messages(*args, **kwargs)
|
|
133
|
+
return model.stream(messages, tools=self.toolkit, format=self.format)
|
|
106
134
|
|
|
107
135
|
|
|
108
136
|
@dataclass
|
|
@@ -122,72 +150,87 @@ class AsyncPrompt(Generic[P, FormattableT]):
|
|
|
122
150
|
toolkit: AsyncToolkit
|
|
123
151
|
"""The toolkit containing this prompt's async tools."""
|
|
124
152
|
|
|
125
|
-
format:
|
|
153
|
+
format: (
|
|
154
|
+
type[FormattableT] | Format[FormattableT] | OutputParser[FormattableT] | None
|
|
155
|
+
)
|
|
126
156
|
"""The response format for the generated response."""
|
|
127
157
|
|
|
158
|
+
async def messages(self, *args: P.args, **kwargs: P.kwargs) -> Sequence[Message]:
|
|
159
|
+
"""Return the `Messages` from invoking this prompt."""
|
|
160
|
+
return promote_to_messages(await self.fn(*args, **kwargs))
|
|
161
|
+
|
|
128
162
|
@overload
|
|
129
163
|
async def __call__(
|
|
130
|
-
self: "AsyncPrompt[P, None]",
|
|
164
|
+
self: "AsyncPrompt[P, None]",
|
|
165
|
+
model: Model | ModelId,
|
|
166
|
+
*args: P.args,
|
|
167
|
+
**kwargs: P.kwargs,
|
|
131
168
|
) -> AsyncResponse: ...
|
|
132
169
|
|
|
133
170
|
@overload
|
|
134
171
|
async def __call__(
|
|
135
172
|
self: "AsyncPrompt[P, FormattableT]",
|
|
136
|
-
model: Model,
|
|
173
|
+
model: Model | ModelId,
|
|
137
174
|
*args: P.args,
|
|
138
175
|
**kwargs: P.kwargs,
|
|
139
176
|
) -> AsyncResponse[FormattableT]: ...
|
|
140
177
|
|
|
141
178
|
async def __call__(
|
|
142
|
-
self, model: Model, *args: P.args, **kwargs: P.kwargs
|
|
179
|
+
self, model: Model | ModelId, *args: P.args, **kwargs: P.kwargs
|
|
143
180
|
) -> AsyncResponse | AsyncResponse[FormattableT]:
|
|
144
181
|
"""Generates a response using the provided model asynchronously."""
|
|
145
182
|
return await self.call(model, *args, **kwargs)
|
|
146
183
|
|
|
147
184
|
@overload
|
|
148
185
|
async def call(
|
|
149
|
-
self: "AsyncPrompt[P, None]",
|
|
186
|
+
self: "AsyncPrompt[P, None]",
|
|
187
|
+
model: Model | ModelId,
|
|
188
|
+
*args: P.args,
|
|
189
|
+
**kwargs: P.kwargs,
|
|
150
190
|
) -> AsyncResponse: ...
|
|
151
191
|
|
|
152
192
|
@overload
|
|
153
193
|
async def call(
|
|
154
194
|
self: "AsyncPrompt[P, FormattableT]",
|
|
155
|
-
model: Model,
|
|
195
|
+
model: Model | ModelId,
|
|
156
196
|
*args: P.args,
|
|
157
197
|
**kwargs: P.kwargs,
|
|
158
198
|
) -> AsyncResponse[FormattableT]: ...
|
|
159
199
|
|
|
160
200
|
async def call(
|
|
161
|
-
self, model: Model, *args: P.args, **kwargs: P.kwargs
|
|
201
|
+
self, model: Model | ModelId, *args: P.args, **kwargs: P.kwargs
|
|
162
202
|
) -> AsyncResponse | AsyncResponse[FormattableT]:
|
|
163
203
|
"""Generates a response using the provided model asynchronously."""
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
)
|
|
204
|
+
if isinstance(model, str):
|
|
205
|
+
model = Model(model)
|
|
206
|
+
messages = await self.messages(*args, **kwargs)
|
|
207
|
+
return await model.call_async(messages, tools=self.toolkit, format=self.format)
|
|
169
208
|
|
|
170
209
|
@overload
|
|
171
210
|
async def stream(
|
|
172
|
-
self: "AsyncPrompt[P, None]",
|
|
211
|
+
self: "AsyncPrompt[P, None]",
|
|
212
|
+
model: Model | ModelId,
|
|
213
|
+
*args: P.args,
|
|
214
|
+
**kwargs: P.kwargs,
|
|
173
215
|
) -> AsyncStreamResponse: ...
|
|
174
216
|
|
|
175
217
|
@overload
|
|
176
218
|
async def stream(
|
|
177
219
|
self: "AsyncPrompt[P, FormattableT]",
|
|
178
|
-
model: Model,
|
|
220
|
+
model: Model | ModelId,
|
|
179
221
|
*args: P.args,
|
|
180
222
|
**kwargs: P.kwargs,
|
|
181
223
|
) -> AsyncStreamResponse[FormattableT]: ...
|
|
182
224
|
|
|
183
225
|
async def stream(
|
|
184
|
-
self, model: Model, *args: P.args, **kwargs: P.kwargs
|
|
226
|
+
self, model: Model | ModelId, *args: P.args, **kwargs: P.kwargs
|
|
185
227
|
) -> AsyncStreamResponse | AsyncStreamResponse[FormattableT]:
|
|
186
228
|
"""Generates a streaming response using the provided model asynchronously."""
|
|
187
|
-
|
|
188
|
-
|
|
229
|
+
if isinstance(model, str):
|
|
230
|
+
model = Model(model)
|
|
231
|
+
messages = await self.messages(*args, **kwargs)
|
|
189
232
|
return await model.stream_async(
|
|
190
|
-
messages
|
|
233
|
+
messages, tools=self.toolkit, format=self.format
|
|
191
234
|
)
|
|
192
235
|
|
|
193
236
|
|
|
@@ -209,13 +252,21 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
209
252
|
toolkit: ContextToolkit[DepsT]
|
|
210
253
|
"""The toolkit containing this prompt's context-aware tools."""
|
|
211
254
|
|
|
212
|
-
format:
|
|
255
|
+
format: (
|
|
256
|
+
type[FormattableT] | Format[FormattableT] | OutputParser[FormattableT] | None
|
|
257
|
+
)
|
|
213
258
|
"""The response format for the generated response."""
|
|
214
259
|
|
|
260
|
+
def messages(
|
|
261
|
+
self, ctx: Context[DepsT], *args: P.args, **kwargs: P.kwargs
|
|
262
|
+
) -> Sequence[Message]:
|
|
263
|
+
"""Return the `Messages` from invoking this prompt."""
|
|
264
|
+
return promote_to_messages(self.fn(ctx, *args, **kwargs))
|
|
265
|
+
|
|
215
266
|
@overload
|
|
216
267
|
def __call__(
|
|
217
268
|
self: "ContextPrompt[P, DepsT, None]",
|
|
218
|
-
model: Model,
|
|
269
|
+
model: Model | ModelId,
|
|
219
270
|
ctx: Context[DepsT],
|
|
220
271
|
*args: P.args,
|
|
221
272
|
**kwargs: P.kwargs,
|
|
@@ -224,7 +275,7 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
224
275
|
@overload
|
|
225
276
|
def __call__(
|
|
226
277
|
self: "ContextPrompt[P, DepsT, FormattableT]",
|
|
227
|
-
model: Model,
|
|
278
|
+
model: Model | ModelId,
|
|
228
279
|
ctx: Context[DepsT],
|
|
229
280
|
*args: P.args,
|
|
230
281
|
**kwargs: P.kwargs,
|
|
@@ -232,7 +283,7 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
232
283
|
|
|
233
284
|
def __call__(
|
|
234
285
|
self,
|
|
235
|
-
model: Model,
|
|
286
|
+
model: Model | ModelId,
|
|
236
287
|
ctx: Context[DepsT],
|
|
237
288
|
*args: P.args,
|
|
238
289
|
**kwargs: P.kwargs,
|
|
@@ -243,7 +294,7 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
243
294
|
@overload
|
|
244
295
|
def call(
|
|
245
296
|
self: "ContextPrompt[P, DepsT, None]",
|
|
246
|
-
model: Model,
|
|
297
|
+
model: Model | ModelId,
|
|
247
298
|
ctx: Context[DepsT],
|
|
248
299
|
*args: P.args,
|
|
249
300
|
**kwargs: P.kwargs,
|
|
@@ -252,7 +303,7 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
252
303
|
@overload
|
|
253
304
|
def call(
|
|
254
305
|
self: "ContextPrompt[P, DepsT, FormattableT]",
|
|
255
|
-
model: Model,
|
|
306
|
+
model: Model | ModelId,
|
|
256
307
|
ctx: Context[DepsT],
|
|
257
308
|
*args: P.args,
|
|
258
309
|
**kwargs: P.kwargs,
|
|
@@ -260,22 +311,23 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
260
311
|
|
|
261
312
|
def call(
|
|
262
313
|
self,
|
|
263
|
-
model: Model,
|
|
314
|
+
model: Model | ModelId,
|
|
264
315
|
ctx: Context[DepsT],
|
|
265
316
|
*args: P.args,
|
|
266
317
|
**kwargs: P.kwargs,
|
|
267
318
|
) -> ContextResponse[DepsT, None] | ContextResponse[DepsT, FormattableT]:
|
|
268
319
|
"""Generates a response using the provided model."""
|
|
269
|
-
|
|
270
|
-
|
|
320
|
+
if isinstance(model, str):
|
|
321
|
+
model = Model(model)
|
|
322
|
+
messages = self.messages(ctx, *args, **kwargs)
|
|
271
323
|
return model.context_call(
|
|
272
|
-
ctx=ctx,
|
|
324
|
+
messages, ctx=ctx, tools=self.toolkit, format=self.format
|
|
273
325
|
)
|
|
274
326
|
|
|
275
327
|
@overload
|
|
276
328
|
def stream(
|
|
277
329
|
self: "ContextPrompt[P, DepsT, None]",
|
|
278
|
-
model: Model,
|
|
330
|
+
model: Model | ModelId,
|
|
279
331
|
ctx: Context[DepsT],
|
|
280
332
|
*args: P.args,
|
|
281
333
|
**kwargs: P.kwargs,
|
|
@@ -284,7 +336,7 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
284
336
|
@overload
|
|
285
337
|
def stream(
|
|
286
338
|
self: "ContextPrompt[P, DepsT, FormattableT]",
|
|
287
|
-
model: Model,
|
|
339
|
+
model: Model | ModelId,
|
|
288
340
|
ctx: Context[DepsT],
|
|
289
341
|
*args: P.args,
|
|
290
342
|
**kwargs: P.kwargs,
|
|
@@ -292,7 +344,7 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
292
344
|
|
|
293
345
|
def stream(
|
|
294
346
|
self,
|
|
295
|
-
model: Model,
|
|
347
|
+
model: Model | ModelId,
|
|
296
348
|
ctx: Context[DepsT],
|
|
297
349
|
*args: P.args,
|
|
298
350
|
**kwargs: P.kwargs,
|
|
@@ -300,10 +352,11 @@ class ContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
300
352
|
ContextStreamResponse[DepsT, None] | ContextStreamResponse[DepsT, FormattableT]
|
|
301
353
|
):
|
|
302
354
|
"""Generates a streaming response using the provided model."""
|
|
303
|
-
|
|
304
|
-
|
|
355
|
+
if isinstance(model, str):
|
|
356
|
+
model = Model(model)
|
|
357
|
+
messages = self.messages(ctx, *args, **kwargs)
|
|
305
358
|
return model.context_stream(
|
|
306
|
-
ctx=ctx,
|
|
359
|
+
messages, ctx=ctx, tools=self.toolkit, format=self.format
|
|
307
360
|
)
|
|
308
361
|
|
|
309
362
|
|
|
@@ -325,13 +378,21 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
325
378
|
toolkit: AsyncContextToolkit[DepsT]
|
|
326
379
|
"""The toolkit containing this prompt's async context-aware tools."""
|
|
327
380
|
|
|
328
|
-
format:
|
|
381
|
+
format: (
|
|
382
|
+
type[FormattableT] | Format[FormattableT] | OutputParser[FormattableT] | None
|
|
383
|
+
)
|
|
329
384
|
"""The response format for the generated response."""
|
|
330
385
|
|
|
386
|
+
async def messages(
|
|
387
|
+
self, ctx: Context[DepsT], *args: P.args, **kwargs: P.kwargs
|
|
388
|
+
) -> Sequence[Message]:
|
|
389
|
+
"""Return the `Messages` from invoking this prompt."""
|
|
390
|
+
return promote_to_messages(await self.fn(ctx, *args, **kwargs))
|
|
391
|
+
|
|
331
392
|
@overload
|
|
332
393
|
async def __call__(
|
|
333
394
|
self: "AsyncContextPrompt[P, DepsT, None]",
|
|
334
|
-
model: Model,
|
|
395
|
+
model: Model | ModelId,
|
|
335
396
|
ctx: Context[DepsT],
|
|
336
397
|
*args: P.args,
|
|
337
398
|
**kwargs: P.kwargs,
|
|
@@ -340,7 +401,7 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
340
401
|
@overload
|
|
341
402
|
async def __call__(
|
|
342
403
|
self: "AsyncContextPrompt[P, DepsT, FormattableT]",
|
|
343
|
-
model: Model,
|
|
404
|
+
model: Model | ModelId,
|
|
344
405
|
ctx: Context[DepsT],
|
|
345
406
|
*args: P.args,
|
|
346
407
|
**kwargs: P.kwargs,
|
|
@@ -348,7 +409,7 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
348
409
|
|
|
349
410
|
async def __call__(
|
|
350
411
|
self,
|
|
351
|
-
model: Model,
|
|
412
|
+
model: Model | ModelId,
|
|
352
413
|
ctx: Context[DepsT],
|
|
353
414
|
*args: P.args,
|
|
354
415
|
**kwargs: P.kwargs,
|
|
@@ -359,7 +420,7 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
359
420
|
@overload
|
|
360
421
|
async def call(
|
|
361
422
|
self: "AsyncContextPrompt[P, DepsT, None]",
|
|
362
|
-
model: Model,
|
|
423
|
+
model: Model | ModelId,
|
|
363
424
|
ctx: Context[DepsT],
|
|
364
425
|
*args: P.args,
|
|
365
426
|
**kwargs: P.kwargs,
|
|
@@ -368,7 +429,7 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
368
429
|
@overload
|
|
369
430
|
async def call(
|
|
370
431
|
self: "AsyncContextPrompt[P, DepsT, FormattableT]",
|
|
371
|
-
model: Model,
|
|
432
|
+
model: Model | ModelId,
|
|
372
433
|
ctx: Context[DepsT],
|
|
373
434
|
*args: P.args,
|
|
374
435
|
**kwargs: P.kwargs,
|
|
@@ -376,22 +437,23 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
376
437
|
|
|
377
438
|
async def call(
|
|
378
439
|
self,
|
|
379
|
-
model: Model,
|
|
440
|
+
model: Model | ModelId,
|
|
380
441
|
ctx: Context[DepsT],
|
|
381
442
|
*args: P.args,
|
|
382
443
|
**kwargs: P.kwargs,
|
|
383
444
|
) -> AsyncContextResponse[DepsT, None] | AsyncContextResponse[DepsT, FormattableT]:
|
|
384
445
|
"""Generates a response using the provided model asynchronously."""
|
|
385
|
-
|
|
386
|
-
|
|
446
|
+
if isinstance(model, str):
|
|
447
|
+
model = Model(model)
|
|
448
|
+
messages = await self.messages(ctx, *args, **kwargs)
|
|
387
449
|
return await model.context_call_async(
|
|
388
|
-
ctx=ctx,
|
|
450
|
+
messages, ctx=ctx, tools=self.toolkit, format=self.format
|
|
389
451
|
)
|
|
390
452
|
|
|
391
453
|
@overload
|
|
392
454
|
async def stream(
|
|
393
455
|
self: "AsyncContextPrompt[P, DepsT, None]",
|
|
394
|
-
model: Model,
|
|
456
|
+
model: Model | ModelId,
|
|
395
457
|
ctx: Context[DepsT],
|
|
396
458
|
*args: P.args,
|
|
397
459
|
**kwargs: P.kwargs,
|
|
@@ -400,7 +462,7 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
400
462
|
@overload
|
|
401
463
|
async def stream(
|
|
402
464
|
self: "AsyncContextPrompt[P, DepsT, FormattableT]",
|
|
403
|
-
model: Model,
|
|
465
|
+
model: Model | ModelId,
|
|
404
466
|
ctx: Context[DepsT],
|
|
405
467
|
*args: P.args,
|
|
406
468
|
**kwargs: P.kwargs,
|
|
@@ -408,7 +470,7 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
408
470
|
|
|
409
471
|
async def stream(
|
|
410
472
|
self,
|
|
411
|
-
model: Model,
|
|
473
|
+
model: Model | ModelId,
|
|
412
474
|
ctx: Context[DepsT],
|
|
413
475
|
*args: P.args,
|
|
414
476
|
**kwargs: P.kwargs,
|
|
@@ -417,8 +479,9 @@ class AsyncContextPrompt(Generic[P, DepsT, FormattableT]):
|
|
|
417
479
|
| AsyncContextStreamResponse[DepsT, FormattableT]
|
|
418
480
|
):
|
|
419
481
|
"""Generates a streaming response using the provided model asynchronously."""
|
|
420
|
-
|
|
421
|
-
|
|
482
|
+
if isinstance(model, str):
|
|
483
|
+
model = Model(model)
|
|
484
|
+
messages = await self.messages(ctx, *args, **kwargs)
|
|
422
485
|
return await model.context_stream_async(
|
|
423
|
-
ctx=ctx,
|
|
486
|
+
messages, ctx=ctx, tools=self.toolkit, format=self.format
|
|
424
487
|
)
|
|
@@ -1,11 +1,27 @@
|
|
|
1
1
|
"""Interfaces for LLM providers."""
|
|
2
2
|
|
|
3
|
+
from ..._stubs import stub_module_if_missing
|
|
4
|
+
|
|
5
|
+
# Stub modules for missing optional dependencies BEFORE importing
|
|
6
|
+
# This must happen before any imports from these modules
|
|
7
|
+
# Note: We only stub top-level provider modules, not their submodules.
|
|
8
|
+
# The _StubModule will automatically handle nested attribute access.
|
|
9
|
+
stub_module_if_missing("mirascope.llm.providers.anthropic", "anthropic")
|
|
10
|
+
stub_module_if_missing("mirascope.llm.providers.google", "google")
|
|
11
|
+
stub_module_if_missing("mirascope.llm.providers.mlx", "mlx")
|
|
12
|
+
stub_module_if_missing("mirascope.llm.providers.openai", "openai")
|
|
13
|
+
stub_module_if_missing("mirascope.llm.providers.together", "openai")
|
|
14
|
+
stub_module_if_missing("mirascope.llm.providers.ollama", "openai")
|
|
15
|
+
|
|
16
|
+
# Now imports work regardless of which packages are installed
|
|
17
|
+
# ruff: noqa: E402
|
|
3
18
|
from .anthropic import (
|
|
4
19
|
AnthropicModelId,
|
|
5
20
|
AnthropicProvider,
|
|
6
21
|
)
|
|
7
|
-
from .base import BaseProvider,
|
|
22
|
+
from .base import BaseProvider, Provider
|
|
8
23
|
from .google import GoogleModelId, GoogleProvider
|
|
24
|
+
from .mirascope import MirascopeProvider
|
|
9
25
|
from .mlx import MLXModelId, MLXProvider
|
|
10
26
|
from .model_id import ModelId
|
|
11
27
|
from .ollama import OllamaProvider
|
|
@@ -32,11 +48,11 @@ __all__ = [
|
|
|
32
48
|
"GoogleProvider",
|
|
33
49
|
"MLXModelId",
|
|
34
50
|
"MLXProvider",
|
|
51
|
+
"MirascopeProvider",
|
|
35
52
|
"ModelId",
|
|
36
53
|
"OllamaProvider",
|
|
37
54
|
"OpenAIModelId",
|
|
38
55
|
"OpenAIProvider",
|
|
39
|
-
"Params",
|
|
40
56
|
"Provider",
|
|
41
57
|
"ProviderId",
|
|
42
58
|
"TogetherProvider",
|
|
@@ -1,26 +1,8 @@
|
|
|
1
1
|
"""Anthropic client implementation."""
|
|
2
2
|
|
|
3
|
-
from
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
from .beta_provider import AnthropicBetaProvider
|
|
7
|
-
from .model_id import AnthropicModelId
|
|
8
|
-
from .provider import AnthropicProvider
|
|
9
|
-
else:
|
|
10
|
-
try:
|
|
11
|
-
from .beta_provider import AnthropicBetaProvider
|
|
12
|
-
from .model_id import AnthropicModelId
|
|
13
|
-
from .provider import AnthropicProvider
|
|
14
|
-
except ImportError: # pragma: no cover
|
|
15
|
-
from .._missing_import_stubs import (
|
|
16
|
-
create_provider_stub,
|
|
17
|
-
)
|
|
18
|
-
|
|
19
|
-
AnthropicBetaProvider = create_provider_stub(
|
|
20
|
-
"anthropic", "AnthropicBetaProvider"
|
|
21
|
-
)
|
|
22
|
-
AnthropicProvider = create_provider_stub("anthropic", "AnthropicProvider")
|
|
23
|
-
AnthropicModelId = str
|
|
3
|
+
from .beta_provider import AnthropicBetaProvider
|
|
4
|
+
from .model_id import AnthropicModelId
|
|
5
|
+
from .provider import AnthropicProvider
|
|
24
6
|
|
|
25
7
|
__all__ = [
|
|
26
8
|
"AnthropicBetaProvider",
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""Shared Anthropic utilities."""
|
|
2
2
|
|
|
3
|
+
from ...base._utils import get_include_thoughts
|
|
3
4
|
from .decode import decode_async_stream, decode_response, decode_stream
|
|
4
5
|
from .encode import (
|
|
5
6
|
DEFAULT_FORMAT_MODE,
|
|
@@ -21,5 +22,6 @@ __all__ = [
|
|
|
21
22
|
"decode_stream",
|
|
22
23
|
"encode_image_mime_type",
|
|
23
24
|
"encode_request",
|
|
25
|
+
"get_include_thoughts",
|
|
24
26
|
"process_params",
|
|
25
27
|
]
|
|
@@ -74,10 +74,15 @@ def _decode_beta_assistant_content(content: BetaContentBlock) -> AssistantConten
|
|
|
74
74
|
def beta_decode_response(
|
|
75
75
|
response: ParsedBetaMessage[Any],
|
|
76
76
|
model_id: str,
|
|
77
|
+
*,
|
|
78
|
+
include_thoughts: bool,
|
|
77
79
|
) -> tuple[AssistantMessage, FinishReason | None, Usage]:
|
|
78
80
|
"""Convert Beta message to mirascope AssistantMessage and usage."""
|
|
81
|
+
content = [_decode_beta_assistant_content(part) for part in response.content]
|
|
82
|
+
if not include_thoughts:
|
|
83
|
+
content = [part for part in content if part.type != "thought"]
|
|
79
84
|
assistant_message = AssistantMessage(
|
|
80
|
-
content=
|
|
85
|
+
content=content,
|
|
81
86
|
provider_id="anthropic",
|
|
82
87
|
model_id=model_id,
|
|
83
88
|
provider_model_name=model_name(model_id),
|
|
@@ -108,10 +113,11 @@ BetaContentBlockParam: TypeAlias = (
|
|
|
108
113
|
class _BetaChunkProcessor:
|
|
109
114
|
"""Processes Beta stream events and maintains state across events."""
|
|
110
115
|
|
|
111
|
-
def __init__(self) -> None:
|
|
116
|
+
def __init__(self, *, include_thoughts: bool) -> None:
|
|
112
117
|
self.current_block_param: BetaContentBlockParam | None = None
|
|
113
118
|
self.accumulated_tool_json: str = ""
|
|
114
119
|
self.accumulated_blocks: list[BetaContentBlockParam] = []
|
|
120
|
+
self.include_thoughts = include_thoughts
|
|
115
121
|
|
|
116
122
|
def process_event(self, event: BetaRawMessageStreamEvent) -> ChunkIterator:
|
|
117
123
|
"""Process a single Beta event and yield the appropriate content chunks."""
|
|
@@ -144,7 +150,8 @@ class _BetaChunkProcessor:
|
|
|
144
150
|
"thinking": "",
|
|
145
151
|
"signature": "",
|
|
146
152
|
}
|
|
147
|
-
|
|
153
|
+
if self.include_thoughts:
|
|
154
|
+
yield ThoughtStartChunk()
|
|
148
155
|
elif content_block.type == "redacted_thinking": # pragma: no cover
|
|
149
156
|
self.current_block_param = {
|
|
150
157
|
"type": "redacted_thinking",
|
|
@@ -174,14 +181,17 @@ class _BetaChunkProcessor:
|
|
|
174
181
|
f"Received input_json_delta for {self.current_block_param['type']} block"
|
|
175
182
|
)
|
|
176
183
|
self.accumulated_tool_json += delta.partial_json
|
|
177
|
-
yield ToolCallChunk(
|
|
184
|
+
yield ToolCallChunk(
|
|
185
|
+
id=self.current_block_param["id"], delta=delta.partial_json
|
|
186
|
+
)
|
|
178
187
|
elif delta.type == "thinking_delta":
|
|
179
188
|
if self.current_block_param["type"] != "thinking": # pragma: no cover
|
|
180
189
|
raise RuntimeError(
|
|
181
190
|
f"Received thinking_delta for {self.current_block_param['type']} block"
|
|
182
191
|
)
|
|
183
192
|
self.current_block_param["thinking"] += delta.thinking
|
|
184
|
-
|
|
193
|
+
if self.include_thoughts:
|
|
194
|
+
yield ThoughtChunk(delta=delta.thinking)
|
|
185
195
|
elif delta.type == "signature_delta":
|
|
186
196
|
if self.current_block_param["type"] != "thinking": # pragma: no cover
|
|
187
197
|
raise RuntimeError(
|
|
@@ -211,9 +221,10 @@ class _BetaChunkProcessor:
|
|
|
211
221
|
if self.accumulated_tool_json
|
|
212
222
|
else {}
|
|
213
223
|
)
|
|
214
|
-
yield ToolCallEndChunk()
|
|
224
|
+
yield ToolCallEndChunk(id=self.current_block_param["id"])
|
|
215
225
|
elif block_type == "thinking":
|
|
216
|
-
|
|
226
|
+
if self.include_thoughts:
|
|
227
|
+
yield ThoughtEndChunk()
|
|
217
228
|
else:
|
|
218
229
|
raise NotImplementedError
|
|
219
230
|
|
|
@@ -249,10 +260,10 @@ class _BetaChunkProcessor:
|
|
|
249
260
|
|
|
250
261
|
|
|
251
262
|
def beta_decode_stream(
|
|
252
|
-
beta_stream_manager: BetaMessageStreamManager[Any],
|
|
263
|
+
beta_stream_manager: BetaMessageStreamManager[Any], *, include_thoughts: bool
|
|
253
264
|
) -> ChunkIterator:
|
|
254
265
|
"""Returns a ChunkIterator converted from a Beta MessageStreamManager."""
|
|
255
|
-
processor = _BetaChunkProcessor()
|
|
266
|
+
processor = _BetaChunkProcessor(include_thoughts=include_thoughts)
|
|
256
267
|
with beta_stream_manager as stream:
|
|
257
268
|
for event in stream._raw_stream: # pyright: ignore[reportPrivateUsage]
|
|
258
269
|
yield from processor.process_event(event)
|
|
@@ -260,10 +271,10 @@ def beta_decode_stream(
|
|
|
260
271
|
|
|
261
272
|
|
|
262
273
|
async def beta_decode_async_stream(
|
|
263
|
-
beta_stream_manager: BetaAsyncMessageStreamManager[Any],
|
|
274
|
+
beta_stream_manager: BetaAsyncMessageStreamManager[Any], *, include_thoughts: bool
|
|
264
275
|
) -> AsyncChunkIterator:
|
|
265
276
|
"""Returns an AsyncChunkIterator converted from a Beta MessageStreamManager."""
|
|
266
|
-
processor = _BetaChunkProcessor()
|
|
277
|
+
processor = _BetaChunkProcessor(include_thoughts=include_thoughts)
|
|
267
278
|
async with beta_stream_manager as stream:
|
|
268
279
|
async for event in stream._raw_stream: # pyright: ignore[reportPrivateUsage]
|
|
269
280
|
for item in processor.process_event(event):
|