mirascope 2.0.0__py3-none-any.whl → 2.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mirascope/__init__.py +2 -11
- mirascope/graphs/__init__.py +22 -0
- mirascope/graphs/finite_state_machine.py +625 -0
- mirascope/llm/__init__.py +15 -96
- mirascope/llm/agents/__init__.py +15 -0
- mirascope/llm/agents/agent.py +97 -0
- mirascope/llm/agents/agent_template.py +45 -0
- mirascope/llm/agents/decorator.py +176 -0
- mirascope/llm/calls/__init__.py +1 -2
- mirascope/llm/calls/base_call.py +33 -0
- mirascope/llm/calls/calls.py +58 -84
- mirascope/llm/calls/decorator.py +120 -140
- mirascope/llm/clients/__init__.py +34 -0
- mirascope/llm/clients/_missing_import_stubs.py +47 -0
- mirascope/llm/clients/anthropic/__init__.py +25 -0
- mirascope/llm/{providers/openai/completions → clients/anthropic}/_utils/__init__.py +0 -2
- mirascope/llm/{providers → clients}/anthropic/_utils/decode.py +22 -66
- mirascope/llm/clients/anthropic/_utils/encode.py +243 -0
- mirascope/llm/clients/anthropic/clients.py +819 -0
- mirascope/llm/clients/anthropic/model_ids.py +8 -0
- mirascope/llm/{providers → clients}/base/__init__.py +5 -4
- mirascope/llm/{providers → clients}/base/_utils.py +17 -78
- mirascope/llm/{providers/base/base_provider.py → clients/base/client.py} +145 -468
- mirascope/llm/{models → clients/base}/params.py +37 -16
- mirascope/llm/clients/google/__init__.py +20 -0
- mirascope/llm/{providers/openai/responses → clients/google}/_utils/__init__.py +0 -2
- mirascope/llm/{providers → clients}/google/_utils/decode.py +22 -98
- mirascope/llm/{providers → clients}/google/_utils/encode.py +46 -168
- mirascope/llm/clients/google/clients.py +853 -0
- mirascope/llm/clients/google/model_ids.py +15 -0
- mirascope/llm/clients/openai/__init__.py +25 -0
- mirascope/llm/clients/openai/completions/__init__.py +28 -0
- mirascope/llm/{providers/google → clients/openai/completions}/_utils/__init__.py +0 -4
- mirascope/llm/{providers → clients}/openai/completions/_utils/decode.py +9 -74
- mirascope/llm/{providers → clients}/openai/completions/_utils/encode.py +52 -70
- mirascope/llm/clients/openai/completions/_utils/model_features.py +81 -0
- mirascope/llm/clients/openai/completions/clients.py +833 -0
- mirascope/llm/clients/openai/completions/model_ids.py +8 -0
- mirascope/llm/clients/openai/responses/__init__.py +26 -0
- mirascope/llm/clients/openai/responses/_utils/__init__.py +13 -0
- mirascope/llm/{providers → clients}/openai/responses/_utils/decode.py +14 -80
- mirascope/llm/{providers → clients}/openai/responses/_utils/encode.py +41 -92
- mirascope/llm/clients/openai/responses/_utils/model_features.py +87 -0
- mirascope/llm/clients/openai/responses/clients.py +832 -0
- mirascope/llm/clients/openai/responses/model_ids.py +8 -0
- mirascope/llm/clients/openai/shared/__init__.py +7 -0
- mirascope/llm/clients/openai/shared/_utils.py +55 -0
- mirascope/llm/clients/providers.py +175 -0
- mirascope/llm/content/__init__.py +2 -3
- mirascope/llm/content/tool_call.py +0 -6
- mirascope/llm/content/tool_output.py +5 -22
- mirascope/llm/context/_utils.py +6 -19
- mirascope/llm/exceptions.py +43 -298
- mirascope/llm/formatting/__init__.py +2 -19
- mirascope/llm/formatting/_utils.py +74 -0
- mirascope/llm/formatting/format.py +30 -219
- mirascope/llm/formatting/from_call_args.py +2 -2
- mirascope/llm/formatting/partial.py +7 -80
- mirascope/llm/formatting/types.py +64 -21
- mirascope/llm/mcp/__init__.py +2 -2
- mirascope/llm/mcp/client.py +118 -0
- mirascope/llm/messages/__init__.py +0 -3
- mirascope/llm/messages/message.py +5 -13
- mirascope/llm/models/__init__.py +2 -7
- mirascope/llm/models/models.py +139 -315
- mirascope/llm/prompts/__init__.py +12 -13
- mirascope/llm/prompts/_utils.py +43 -14
- mirascope/llm/prompts/decorator.py +204 -144
- mirascope/llm/prompts/protocols.py +59 -25
- mirascope/llm/responses/__init__.py +1 -9
- mirascope/llm/responses/_utils.py +12 -102
- mirascope/llm/responses/base_response.py +6 -18
- mirascope/llm/responses/base_stream_response.py +50 -173
- mirascope/llm/responses/finish_reason.py +0 -1
- mirascope/llm/responses/response.py +13 -34
- mirascope/llm/responses/root_response.py +29 -100
- mirascope/llm/responses/stream_response.py +31 -40
- mirascope/llm/tools/__init__.py +2 -9
- mirascope/llm/tools/_utils.py +3 -12
- mirascope/llm/tools/decorator.py +16 -25
- mirascope/llm/tools/protocols.py +4 -4
- mirascope/llm/tools/tool_schema.py +19 -87
- mirascope/llm/tools/toolkit.py +27 -35
- mirascope/llm/tools/tools.py +41 -135
- {mirascope-2.0.0.dist-info → mirascope-2.0.0a1.dist-info}/METADATA +13 -90
- mirascope-2.0.0a1.dist-info/RECORD +102 -0
- {mirascope-2.0.0.dist-info → mirascope-2.0.0a1.dist-info}/WHEEL +1 -1
- {mirascope-2.0.0.dist-info → mirascope-2.0.0a1.dist-info}/licenses/LICENSE +1 -1
- mirascope/_stubs.py +0 -363
- mirascope/api/__init__.py +0 -14
- mirascope/api/_generated/README.md +0 -207
- mirascope/api/_generated/__init__.py +0 -440
- mirascope/api/_generated/annotations/__init__.py +0 -33
- mirascope/api/_generated/annotations/client.py +0 -506
- mirascope/api/_generated/annotations/raw_client.py +0 -1414
- mirascope/api/_generated/annotations/types/__init__.py +0 -31
- mirascope/api/_generated/annotations/types/annotations_create_request_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_create_response.py +0 -48
- mirascope/api/_generated/annotations/types/annotations_create_response_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_get_response.py +0 -48
- mirascope/api/_generated/annotations/types/annotations_get_response_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_list_request_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_list_response.py +0 -21
- mirascope/api/_generated/annotations/types/annotations_list_response_annotations_item.py +0 -50
- mirascope/api/_generated/annotations/types/annotations_list_response_annotations_item_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_update_request_label.py +0 -5
- mirascope/api/_generated/annotations/types/annotations_update_response.py +0 -48
- mirascope/api/_generated/annotations/types/annotations_update_response_label.py +0 -5
- mirascope/api/_generated/api_keys/__init__.py +0 -17
- mirascope/api/_generated/api_keys/client.py +0 -530
- mirascope/api/_generated/api_keys/raw_client.py +0 -1236
- mirascope/api/_generated/api_keys/types/__init__.py +0 -15
- mirascope/api/_generated/api_keys/types/api_keys_create_response.py +0 -28
- mirascope/api/_generated/api_keys/types/api_keys_get_response.py +0 -27
- mirascope/api/_generated/api_keys/types/api_keys_list_all_for_org_response_item.py +0 -40
- mirascope/api/_generated/api_keys/types/api_keys_list_response_item.py +0 -27
- mirascope/api/_generated/client.py +0 -211
- mirascope/api/_generated/core/__init__.py +0 -52
- mirascope/api/_generated/core/api_error.py +0 -23
- mirascope/api/_generated/core/client_wrapper.py +0 -46
- mirascope/api/_generated/core/datetime_utils.py +0 -28
- mirascope/api/_generated/core/file.py +0 -67
- mirascope/api/_generated/core/force_multipart.py +0 -16
- mirascope/api/_generated/core/http_client.py +0 -543
- mirascope/api/_generated/core/http_response.py +0 -55
- mirascope/api/_generated/core/jsonable_encoder.py +0 -100
- mirascope/api/_generated/core/pydantic_utilities.py +0 -255
- mirascope/api/_generated/core/query_encoder.py +0 -58
- mirascope/api/_generated/core/remove_none_from_dict.py +0 -11
- mirascope/api/_generated/core/request_options.py +0 -35
- mirascope/api/_generated/core/serialization.py +0 -276
- mirascope/api/_generated/docs/__init__.py +0 -4
- mirascope/api/_generated/docs/client.py +0 -91
- mirascope/api/_generated/docs/raw_client.py +0 -178
- mirascope/api/_generated/environment.py +0 -9
- mirascope/api/_generated/environments/__init__.py +0 -23
- mirascope/api/_generated/environments/client.py +0 -649
- mirascope/api/_generated/environments/raw_client.py +0 -1567
- mirascope/api/_generated/environments/types/__init__.py +0 -25
- mirascope/api/_generated/environments/types/environments_create_response.py +0 -24
- mirascope/api/_generated/environments/types/environments_get_analytics_response.py +0 -60
- mirascope/api/_generated/environments/types/environments_get_analytics_response_top_functions_item.py +0 -24
- mirascope/api/_generated/environments/types/environments_get_analytics_response_top_models_item.py +0 -22
- mirascope/api/_generated/environments/types/environments_get_response.py +0 -24
- mirascope/api/_generated/environments/types/environments_list_response_item.py +0 -24
- mirascope/api/_generated/environments/types/environments_update_response.py +0 -24
- mirascope/api/_generated/errors/__init__.py +0 -25
- mirascope/api/_generated/errors/bad_request_error.py +0 -14
- mirascope/api/_generated/errors/conflict_error.py +0 -14
- mirascope/api/_generated/errors/forbidden_error.py +0 -11
- mirascope/api/_generated/errors/internal_server_error.py +0 -10
- mirascope/api/_generated/errors/not_found_error.py +0 -11
- mirascope/api/_generated/errors/payment_required_error.py +0 -15
- mirascope/api/_generated/errors/service_unavailable_error.py +0 -14
- mirascope/api/_generated/errors/too_many_requests_error.py +0 -15
- mirascope/api/_generated/errors/unauthorized_error.py +0 -11
- mirascope/api/_generated/functions/__init__.py +0 -39
- mirascope/api/_generated/functions/client.py +0 -647
- mirascope/api/_generated/functions/raw_client.py +0 -1890
- mirascope/api/_generated/functions/types/__init__.py +0 -53
- mirascope/api/_generated/functions/types/functions_create_request_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_create_response.py +0 -37
- mirascope/api/_generated/functions/types/functions_create_response_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_find_by_hash_response.py +0 -39
- mirascope/api/_generated/functions/types/functions_find_by_hash_response_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_get_by_env_response.py +0 -53
- mirascope/api/_generated/functions/types/functions_get_by_env_response_dependencies_value.py +0 -22
- mirascope/api/_generated/functions/types/functions_get_response.py +0 -37
- mirascope/api/_generated/functions/types/functions_get_response_dependencies_value.py +0 -20
- mirascope/api/_generated/functions/types/functions_list_by_env_response.py +0 -25
- mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item.py +0 -56
- mirascope/api/_generated/functions/types/functions_list_by_env_response_functions_item_dependencies_value.py +0 -22
- mirascope/api/_generated/functions/types/functions_list_response.py +0 -21
- mirascope/api/_generated/functions/types/functions_list_response_functions_item.py +0 -41
- mirascope/api/_generated/functions/types/functions_list_response_functions_item_dependencies_value.py +0 -20
- mirascope/api/_generated/health/__init__.py +0 -7
- mirascope/api/_generated/health/client.py +0 -92
- mirascope/api/_generated/health/raw_client.py +0 -175
- mirascope/api/_generated/health/types/__init__.py +0 -8
- mirascope/api/_generated/health/types/health_check_response.py +0 -22
- mirascope/api/_generated/health/types/health_check_response_status.py +0 -5
- mirascope/api/_generated/organization_invitations/__init__.py +0 -33
- mirascope/api/_generated/organization_invitations/client.py +0 -546
- mirascope/api/_generated/organization_invitations/raw_client.py +0 -1519
- mirascope/api/_generated/organization_invitations/types/__init__.py +0 -53
- mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response.py +0 -34
- mirascope/api/_generated/organization_invitations/types/organization_invitations_accept_response_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_request_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response.py +0 -48
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_create_response_status.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response.py +0 -48
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_get_response_status.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item.py +0 -48
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_role.py +0 -7
- mirascope/api/_generated/organization_invitations/types/organization_invitations_list_response_item_status.py +0 -7
- mirascope/api/_generated/organization_memberships/__init__.py +0 -19
- mirascope/api/_generated/organization_memberships/client.py +0 -302
- mirascope/api/_generated/organization_memberships/raw_client.py +0 -736
- mirascope/api/_generated/organization_memberships/types/__init__.py +0 -27
- mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item.py +0 -33
- mirascope/api/_generated/organization_memberships/types/organization_memberships_list_response_item_role.py +0 -7
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_request_role.py +0 -7
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response.py +0 -31
- mirascope/api/_generated/organization_memberships/types/organization_memberships_update_response_role.py +0 -7
- mirascope/api/_generated/organizations/__init__.py +0 -51
- mirascope/api/_generated/organizations/client.py +0 -869
- mirascope/api/_generated/organizations/raw_client.py +0 -2593
- mirascope/api/_generated/organizations/types/__init__.py +0 -71
- mirascope/api/_generated/organizations/types/organizations_create_payment_intent_response.py +0 -24
- mirascope/api/_generated/organizations/types/organizations_create_response.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_create_response_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_get_response.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_get_response_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_list_response_item.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_list_response_item_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_request_target_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response.py +0 -47
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item.py +0 -33
- mirascope/api/_generated/organizations/types/organizations_preview_subscription_change_response_validation_errors_item_resource.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_router_balance_response.py +0 -24
- mirascope/api/_generated/organizations/types/organizations_subscription_response.py +0 -53
- mirascope/api/_generated/organizations/types/organizations_subscription_response_current_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_subscription_response_payment_method.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change.py +0 -34
- mirascope/api/_generated/organizations/types/organizations_subscription_response_scheduled_change_target_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_update_response.py +0 -26
- mirascope/api/_generated/organizations/types/organizations_update_response_role.py +0 -5
- mirascope/api/_generated/organizations/types/organizations_update_subscription_request_target_plan.py +0 -7
- mirascope/api/_generated/organizations/types/organizations_update_subscription_response.py +0 -35
- mirascope/api/_generated/project_memberships/__init__.py +0 -25
- mirascope/api/_generated/project_memberships/client.py +0 -437
- mirascope/api/_generated/project_memberships/raw_client.py +0 -1039
- mirascope/api/_generated/project_memberships/types/__init__.py +0 -29
- mirascope/api/_generated/project_memberships/types/project_memberships_create_request_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_create_response.py +0 -35
- mirascope/api/_generated/project_memberships/types/project_memberships_create_response_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item.py +0 -33
- mirascope/api/_generated/project_memberships/types/project_memberships_list_response_item_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_update_request_role.py +0 -7
- mirascope/api/_generated/project_memberships/types/project_memberships_update_response.py +0 -35
- mirascope/api/_generated/project_memberships/types/project_memberships_update_response_role.py +0 -7
- mirascope/api/_generated/projects/__init__.py +0 -7
- mirascope/api/_generated/projects/client.py +0 -428
- mirascope/api/_generated/projects/raw_client.py +0 -1302
- mirascope/api/_generated/projects/types/__init__.py +0 -10
- mirascope/api/_generated/projects/types/projects_create_response.py +0 -25
- mirascope/api/_generated/projects/types/projects_get_response.py +0 -25
- mirascope/api/_generated/projects/types/projects_list_response_item.py +0 -25
- mirascope/api/_generated/projects/types/projects_update_response.py +0 -25
- mirascope/api/_generated/reference.md +0 -4915
- mirascope/api/_generated/tags/__init__.py +0 -19
- mirascope/api/_generated/tags/client.py +0 -504
- mirascope/api/_generated/tags/raw_client.py +0 -1288
- mirascope/api/_generated/tags/types/__init__.py +0 -17
- mirascope/api/_generated/tags/types/tags_create_response.py +0 -41
- mirascope/api/_generated/tags/types/tags_get_response.py +0 -41
- mirascope/api/_generated/tags/types/tags_list_response.py +0 -23
- mirascope/api/_generated/tags/types/tags_list_response_tags_item.py +0 -41
- mirascope/api/_generated/tags/types/tags_update_response.py +0 -41
- mirascope/api/_generated/token_cost/__init__.py +0 -7
- mirascope/api/_generated/token_cost/client.py +0 -160
- mirascope/api/_generated/token_cost/raw_client.py +0 -264
- mirascope/api/_generated/token_cost/types/__init__.py +0 -8
- mirascope/api/_generated/token_cost/types/token_cost_calculate_request_usage.py +0 -54
- mirascope/api/_generated/token_cost/types/token_cost_calculate_response.py +0 -52
- mirascope/api/_generated/traces/__init__.py +0 -97
- mirascope/api/_generated/traces/client.py +0 -1103
- mirascope/api/_generated/traces/raw_client.py +0 -2322
- mirascope/api/_generated/traces/types/__init__.py +0 -155
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item.py +0 -29
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource.py +0 -27
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item.py +0 -23
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value.py +0 -38
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_array_value.py +0 -19
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value_values_item.py +0 -20
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item.py +0 -29
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope.py +0 -31
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item.py +0 -23
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value.py +0 -38
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_array_value.py +0 -19
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value_values_item.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item.py +0 -48
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item.py +0 -23
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value.py +0 -38
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_array_value.py +0 -19
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value.py +0 -24
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value_values_item.py +0 -22
- mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_status.py +0 -20
- mirascope/api/_generated/traces/types/traces_create_response.py +0 -24
- mirascope/api/_generated/traces/types/traces_create_response_partial_success.py +0 -22
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response.py +0 -60
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response_top_functions_item.py +0 -24
- mirascope/api/_generated/traces/types/traces_get_analytics_summary_response_top_models_item.py +0 -22
- mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response.py +0 -33
- mirascope/api/_generated/traces/types/traces_get_trace_detail_by_env_response_spans_item.py +0 -88
- mirascope/api/_generated/traces/types/traces_get_trace_detail_response.py +0 -33
- mirascope/api/_generated/traces/types/traces_get_trace_detail_response_spans_item.py +0 -88
- mirascope/api/_generated/traces/types/traces_list_by_function_hash_response.py +0 -25
- mirascope/api/_generated/traces/types/traces_list_by_function_hash_response_traces_item.py +0 -44
- mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_by_env_request_attribute_filters_item_operator.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_by.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_by_env_request_sort_order.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_by_env_response.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_by_env_response_spans_item.py +0 -50
- mirascope/api/_generated/traces/types/traces_search_request_attribute_filters_item.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_request_attribute_filters_item_operator.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_request_sort_by.py +0 -7
- mirascope/api/_generated/traces/types/traces_search_request_sort_order.py +0 -5
- mirascope/api/_generated/traces/types/traces_search_response.py +0 -26
- mirascope/api/_generated/traces/types/traces_search_response_spans_item.py +0 -50
- mirascope/api/_generated/types/__init__.py +0 -85
- mirascope/api/_generated/types/already_exists_error.py +0 -22
- mirascope/api/_generated/types/already_exists_error_tag.py +0 -5
- mirascope/api/_generated/types/bad_request_error_body.py +0 -50
- mirascope/api/_generated/types/click_house_error.py +0 -22
- mirascope/api/_generated/types/database_error.py +0 -22
- mirascope/api/_generated/types/database_error_tag.py +0 -5
- mirascope/api/_generated/types/date.py +0 -3
- mirascope/api/_generated/types/http_api_decode_error.py +0 -27
- mirascope/api/_generated/types/http_api_decode_error_tag.py +0 -5
- mirascope/api/_generated/types/immutable_resource_error.py +0 -22
- mirascope/api/_generated/types/internal_server_error_body.py +0 -49
- mirascope/api/_generated/types/issue.py +0 -38
- mirascope/api/_generated/types/issue_tag.py +0 -10
- mirascope/api/_generated/types/not_found_error_body.py +0 -22
- mirascope/api/_generated/types/not_found_error_tag.py +0 -5
- mirascope/api/_generated/types/number_from_string.py +0 -3
- mirascope/api/_generated/types/permission_denied_error.py +0 -22
- mirascope/api/_generated/types/permission_denied_error_tag.py +0 -5
- mirascope/api/_generated/types/plan_limit_exceeded_error.py +0 -32
- mirascope/api/_generated/types/plan_limit_exceeded_error_tag.py +0 -7
- mirascope/api/_generated/types/pricing_unavailable_error.py +0 -23
- mirascope/api/_generated/types/property_key.py +0 -7
- mirascope/api/_generated/types/property_key_key.py +0 -25
- mirascope/api/_generated/types/property_key_key_tag.py +0 -5
- mirascope/api/_generated/types/rate_limit_error.py +0 -31
- mirascope/api/_generated/types/rate_limit_error_tag.py +0 -5
- mirascope/api/_generated/types/service_unavailable_error_body.py +0 -24
- mirascope/api/_generated/types/service_unavailable_error_tag.py +0 -7
- mirascope/api/_generated/types/stripe_error.py +0 -20
- mirascope/api/_generated/types/subscription_past_due_error.py +0 -31
- mirascope/api/_generated/types/subscription_past_due_error_tag.py +0 -7
- mirascope/api/_generated/types/unauthorized_error_body.py +0 -21
- mirascope/api/_generated/types/unauthorized_error_tag.py +0 -5
- mirascope/api/client.py +0 -255
- mirascope/api/settings.py +0 -99
- mirascope/llm/formatting/output_parser.py +0 -178
- mirascope/llm/formatting/primitives.py +0 -192
- mirascope/llm/mcp/mcp_client.py +0 -130
- mirascope/llm/messages/_utils.py +0 -34
- mirascope/llm/models/thinking_config.py +0 -61
- mirascope/llm/prompts/prompts.py +0 -487
- mirascope/llm/providers/__init__.py +0 -62
- mirascope/llm/providers/anthropic/__init__.py +0 -11
- mirascope/llm/providers/anthropic/_utils/__init__.py +0 -27
- mirascope/llm/providers/anthropic/_utils/beta_decode.py +0 -282
- mirascope/llm/providers/anthropic/_utils/beta_encode.py +0 -266
- mirascope/llm/providers/anthropic/_utils/encode.py +0 -418
- mirascope/llm/providers/anthropic/_utils/errors.py +0 -46
- mirascope/llm/providers/anthropic/beta_provider.py +0 -374
- mirascope/llm/providers/anthropic/model_id.py +0 -23
- mirascope/llm/providers/anthropic/model_info.py +0 -87
- mirascope/llm/providers/anthropic/provider.py +0 -479
- mirascope/llm/providers/google/__init__.py +0 -6
- mirascope/llm/providers/google/_utils/errors.py +0 -50
- mirascope/llm/providers/google/model_id.py +0 -22
- mirascope/llm/providers/google/model_info.py +0 -63
- mirascope/llm/providers/google/provider.py +0 -492
- mirascope/llm/providers/mirascope/__init__.py +0 -5
- mirascope/llm/providers/mirascope/_utils.py +0 -73
- mirascope/llm/providers/mirascope/provider.py +0 -349
- mirascope/llm/providers/mlx/__init__.py +0 -9
- mirascope/llm/providers/mlx/_utils.py +0 -141
- mirascope/llm/providers/mlx/encoding/__init__.py +0 -8
- mirascope/llm/providers/mlx/encoding/base.py +0 -72
- mirascope/llm/providers/mlx/encoding/transformers.py +0 -150
- mirascope/llm/providers/mlx/mlx.py +0 -254
- mirascope/llm/providers/mlx/model_id.py +0 -17
- mirascope/llm/providers/mlx/provider.py +0 -452
- mirascope/llm/providers/model_id.py +0 -16
- mirascope/llm/providers/ollama/__init__.py +0 -7
- mirascope/llm/providers/ollama/provider.py +0 -71
- mirascope/llm/providers/openai/__init__.py +0 -15
- mirascope/llm/providers/openai/_utils/__init__.py +0 -5
- mirascope/llm/providers/openai/_utils/errors.py +0 -46
- mirascope/llm/providers/openai/completions/__init__.py +0 -7
- mirascope/llm/providers/openai/completions/base_provider.py +0 -542
- mirascope/llm/providers/openai/completions/provider.py +0 -22
- mirascope/llm/providers/openai/model_id.py +0 -31
- mirascope/llm/providers/openai/model_info.py +0 -303
- mirascope/llm/providers/openai/provider.py +0 -441
- mirascope/llm/providers/openai/responses/__init__.py +0 -5
- mirascope/llm/providers/openai/responses/provider.py +0 -513
- mirascope/llm/providers/provider_id.py +0 -24
- mirascope/llm/providers/provider_registry.py +0 -299
- mirascope/llm/providers/together/__init__.py +0 -7
- mirascope/llm/providers/together/provider.py +0 -40
- mirascope/llm/responses/usage.py +0 -95
- mirascope/ops/__init__.py +0 -111
- mirascope/ops/_internal/__init__.py +0 -5
- mirascope/ops/_internal/closure.py +0 -1169
- mirascope/ops/_internal/configuration.py +0 -177
- mirascope/ops/_internal/context.py +0 -76
- mirascope/ops/_internal/exporters/__init__.py +0 -26
- mirascope/ops/_internal/exporters/exporters.py +0 -395
- mirascope/ops/_internal/exporters/processors.py +0 -104
- mirascope/ops/_internal/exporters/types.py +0 -165
- mirascope/ops/_internal/exporters/utils.py +0 -29
- mirascope/ops/_internal/instrumentation/__init__.py +0 -8
- mirascope/ops/_internal/instrumentation/llm/__init__.py +0 -8
- mirascope/ops/_internal/instrumentation/llm/common.py +0 -530
- mirascope/ops/_internal/instrumentation/llm/cost.py +0 -190
- mirascope/ops/_internal/instrumentation/llm/encode.py +0 -238
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/__init__.py +0 -38
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_input_messages.py +0 -31
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_output_messages.py +0 -38
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_system_instructions.py +0 -18
- mirascope/ops/_internal/instrumentation/llm/gen_ai_types/shared.py +0 -100
- mirascope/ops/_internal/instrumentation/llm/llm.py +0 -161
- mirascope/ops/_internal/instrumentation/llm/model.py +0 -1798
- mirascope/ops/_internal/instrumentation/llm/response.py +0 -521
- mirascope/ops/_internal/instrumentation/llm/serialize.py +0 -300
- mirascope/ops/_internal/propagation.py +0 -198
- mirascope/ops/_internal/protocols.py +0 -133
- mirascope/ops/_internal/session.py +0 -139
- mirascope/ops/_internal/spans.py +0 -232
- mirascope/ops/_internal/traced_calls.py +0 -375
- mirascope/ops/_internal/traced_functions.py +0 -523
- mirascope/ops/_internal/tracing.py +0 -353
- mirascope/ops/_internal/types.py +0 -13
- mirascope/ops/_internal/utils.py +0 -123
- mirascope/ops/_internal/versioned_calls.py +0 -512
- mirascope/ops/_internal/versioned_functions.py +0 -357
- mirascope/ops/_internal/versioning.py +0 -303
- mirascope/ops/exceptions.py +0 -21
- mirascope-2.0.0.dist-info/RECORD +0 -423
- /mirascope/llm/{providers → clients}/base/kwargs.py +0 -0
- /mirascope/llm/{providers → clients}/google/message.py +0 -0
mirascope/llm/calls/calls.py
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
"""The Call module for generating responses using LLMs."""
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
-
from typing import Generic,
|
|
4
|
+
from typing import Generic, overload
|
|
5
5
|
|
|
6
6
|
from ..context import Context, DepsT
|
|
7
7
|
from ..formatting import FormattableT
|
|
8
|
-
from ..models import Model, use_model
|
|
9
8
|
from ..prompts import (
|
|
10
9
|
AsyncContextPrompt,
|
|
11
10
|
AsyncPrompt,
|
|
@@ -22,39 +21,19 @@ from ..responses import (
|
|
|
22
21
|
Response,
|
|
23
22
|
StreamResponse,
|
|
24
23
|
)
|
|
24
|
+
from ..tools import (
|
|
25
|
+
AsyncContextToolkit,
|
|
26
|
+
AsyncToolkit,
|
|
27
|
+
ContextToolkit,
|
|
28
|
+
Toolkit,
|
|
29
|
+
)
|
|
25
30
|
from ..types import P
|
|
26
|
-
|
|
27
|
-
CallT = TypeVar("CallT", bound="BaseCall")
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
@dataclass
|
|
31
|
-
class BaseCall:
|
|
32
|
-
"""Base class for all Call types with shared model functionality."""
|
|
33
|
-
|
|
34
|
-
default_model: Model
|
|
35
|
-
"""The default model that will be used if no model is set in context."""
|
|
36
|
-
|
|
37
|
-
@property
|
|
38
|
-
def model(self) -> Model:
|
|
39
|
-
"""The model used for generating responses. May be overwritten via `with llm.model(...)`."""
|
|
40
|
-
return use_model(self.default_model)
|
|
31
|
+
from .base_call import BaseCall
|
|
41
32
|
|
|
42
33
|
|
|
43
34
|
@dataclass
|
|
44
|
-
class Call(BaseCall, Generic[P, FormattableT]):
|
|
45
|
-
"""A
|
|
46
|
-
|
|
47
|
-
Created by decorating a `MessageTemplate` with `llm.call`. The decorated function
|
|
48
|
-
becomes directly callable to generate responses, with the `Model` bundled in.
|
|
49
|
-
|
|
50
|
-
A `Call` is essentially: `MessageTemplate` + tools + format + `Model`.
|
|
51
|
-
It can be invoked directly: `call(*args, **kwargs)` (no model argument needed).
|
|
52
|
-
|
|
53
|
-
The model can be overridden at runtime using `with llm.model(...)` context manager.
|
|
54
|
-
"""
|
|
55
|
-
|
|
56
|
-
prompt: Prompt[P, FormattableT]
|
|
57
|
-
"""The underlying Prompt instance that generates messages with tools and format."""
|
|
35
|
+
class Call(BaseCall[P, Prompt, Toolkit, FormattableT], Generic[P, FormattableT]):
|
|
36
|
+
"""A class for generating responses using LLMs."""
|
|
58
37
|
|
|
59
38
|
@overload
|
|
60
39
|
def __call__(
|
|
@@ -84,7 +63,10 @@ class Call(BaseCall, Generic[P, FormattableT]):
|
|
|
84
63
|
self, *args: P.args, **kwargs: P.kwargs
|
|
85
64
|
) -> Response | Response[FormattableT]:
|
|
86
65
|
"""Generates a response using the LLM."""
|
|
87
|
-
|
|
66
|
+
messages = self.fn(*args, **kwargs)
|
|
67
|
+
return self.model.call(
|
|
68
|
+
messages=messages, tools=self.toolkit, format=self.format
|
|
69
|
+
)
|
|
88
70
|
|
|
89
71
|
@overload
|
|
90
72
|
def stream(
|
|
@@ -100,24 +82,18 @@ class Call(BaseCall, Generic[P, FormattableT]):
|
|
|
100
82
|
self, *args: P.args, **kwargs: P.kwargs
|
|
101
83
|
) -> StreamResponse | StreamResponse[FormattableT]:
|
|
102
84
|
"""Generates a streaming response using the LLM."""
|
|
103
|
-
|
|
85
|
+
messages = self.fn(*args, **kwargs)
|
|
86
|
+
return self.model.stream(
|
|
87
|
+
messages=messages, tools=self.toolkit, format=self.format
|
|
88
|
+
)
|
|
104
89
|
|
|
105
90
|
|
|
106
91
|
@dataclass
|
|
107
|
-
class AsyncCall(
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
An `AsyncCall` is essentially: async `MessageTemplate` + tools + format + `Model`.
|
|
114
|
-
It can be invoked directly: `await call(*args, **kwargs)` (no model argument needed).
|
|
115
|
-
|
|
116
|
-
The model can be overridden at runtime using `with llm.model(...)` context manager.
|
|
117
|
-
"""
|
|
118
|
-
|
|
119
|
-
prompt: AsyncPrompt[P, FormattableT]
|
|
120
|
-
"""The underlying AsyncPrompt instance that generates messages with tools and format."""
|
|
92
|
+
class AsyncCall(
|
|
93
|
+
BaseCall[P, AsyncPrompt, AsyncToolkit, FormattableT],
|
|
94
|
+
Generic[P, FormattableT],
|
|
95
|
+
):
|
|
96
|
+
"""A class for generating responses using LLMs asynchronously."""
|
|
121
97
|
|
|
122
98
|
@overload
|
|
123
99
|
async def __call__(
|
|
@@ -132,7 +108,7 @@ class AsyncCall(BaseCall, Generic[P, FormattableT]):
|
|
|
132
108
|
async def __call__(
|
|
133
109
|
self, *args: P.args, **kwargs: P.kwargs
|
|
134
110
|
) -> AsyncResponse | AsyncResponse[FormattableT]:
|
|
135
|
-
"""Generates a
|
|
111
|
+
"""Generates a Asyncresponse using the LLM asynchronously."""
|
|
136
112
|
return await self.call(*args, **kwargs)
|
|
137
113
|
|
|
138
114
|
@overload
|
|
@@ -149,7 +125,10 @@ class AsyncCall(BaseCall, Generic[P, FormattableT]):
|
|
|
149
125
|
self, *args: P.args, **kwargs: P.kwargs
|
|
150
126
|
) -> AsyncResponse | AsyncResponse[FormattableT]:
|
|
151
127
|
"""Generates a response using the LLM asynchronously."""
|
|
152
|
-
|
|
128
|
+
messages = await self.fn(*args, **kwargs)
|
|
129
|
+
return await self.model.call_async(
|
|
130
|
+
messages=messages, tools=self.toolkit, format=self.format
|
|
131
|
+
)
|
|
153
132
|
|
|
154
133
|
@overload
|
|
155
134
|
async def stream(
|
|
@@ -165,25 +144,18 @@ class AsyncCall(BaseCall, Generic[P, FormattableT]):
|
|
|
165
144
|
self, *args: P.args, **kwargs: P.kwargs
|
|
166
145
|
) -> AsyncStreamResponse[FormattableT] | AsyncStreamResponse:
|
|
167
146
|
"""Generates a streaming response using the LLM asynchronously."""
|
|
168
|
-
|
|
147
|
+
messages = await self.fn(*args, **kwargs)
|
|
148
|
+
return await self.model.stream_async(
|
|
149
|
+
messages=messages, tools=self.toolkit, format=self.format
|
|
150
|
+
)
|
|
169
151
|
|
|
170
152
|
|
|
171
153
|
@dataclass
|
|
172
|
-
class ContextCall(
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
responses with context dependencies, with the `Model` bundled in.
|
|
178
|
-
|
|
179
|
-
A `ContextCall` is essentially: `ContextMessageTemplate` + tools + format + `Model`.
|
|
180
|
-
It can be invoked directly: `call(ctx, *args, **kwargs)` (no model argument needed).
|
|
181
|
-
|
|
182
|
-
The model can be overridden at runtime using `with llm.model(...)` context manager.
|
|
183
|
-
"""
|
|
184
|
-
|
|
185
|
-
prompt: ContextPrompt[P, DepsT, FormattableT]
|
|
186
|
-
"""The underlying ContextPrompt instance that generates messages with tools and format."""
|
|
154
|
+
class ContextCall(
|
|
155
|
+
BaseCall[P, ContextPrompt, ContextToolkit[DepsT], FormattableT],
|
|
156
|
+
Generic[P, DepsT, FormattableT],
|
|
157
|
+
):
|
|
158
|
+
"""A class for generating responses using LLMs."""
|
|
187
159
|
|
|
188
160
|
@overload
|
|
189
161
|
def __call__(
|
|
@@ -227,7 +199,10 @@ class ContextCall(BaseCall, Generic[P, DepsT, FormattableT]):
|
|
|
227
199
|
self, ctx: Context[DepsT], *args: P.args, **kwargs: P.kwargs
|
|
228
200
|
) -> ContextResponse[DepsT, None] | ContextResponse[DepsT, FormattableT]:
|
|
229
201
|
"""Generates a response using the LLM."""
|
|
230
|
-
|
|
202
|
+
messages = self.fn(ctx, *args, **kwargs)
|
|
203
|
+
return self.model.context_call(
|
|
204
|
+
ctx=ctx, messages=messages, tools=self.toolkit, format=self.format
|
|
205
|
+
)
|
|
231
206
|
|
|
232
207
|
@overload
|
|
233
208
|
def stream(
|
|
@@ -251,25 +226,18 @@ class ContextCall(BaseCall, Generic[P, DepsT, FormattableT]):
|
|
|
251
226
|
ContextStreamResponse[DepsT, None] | ContextStreamResponse[DepsT, FormattableT]
|
|
252
227
|
):
|
|
253
228
|
"""Generates a streaming response using the LLM."""
|
|
254
|
-
|
|
229
|
+
messages = self.fn(ctx, *args, **kwargs)
|
|
230
|
+
return self.model.context_stream(
|
|
231
|
+
ctx=ctx, messages=messages, tools=self.toolkit, format=self.format
|
|
232
|
+
)
|
|
255
233
|
|
|
256
234
|
|
|
257
235
|
@dataclass
|
|
258
|
-
class AsyncContextCall(
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
responses asynchronously with context dependencies, with the `Model` bundled in.
|
|
264
|
-
|
|
265
|
-
An `AsyncContextCall` is essentially: async `ContextMessageTemplate` + tools + format + `Model`.
|
|
266
|
-
It can be invoked directly: `await call(ctx, *args, **kwargs)` (no model argument needed).
|
|
267
|
-
|
|
268
|
-
The model can be overridden at runtime using `with llm.model(...)` context manager.
|
|
269
|
-
"""
|
|
270
|
-
|
|
271
|
-
prompt: AsyncContextPrompt[P, DepsT, FormattableT]
|
|
272
|
-
"""The underlying AsyncContextPrompt instance that generates messages with tools and format."""
|
|
236
|
+
class AsyncContextCall(
|
|
237
|
+
BaseCall[P, AsyncContextPrompt, AsyncContextToolkit[DepsT], FormattableT],
|
|
238
|
+
Generic[P, DepsT, FormattableT],
|
|
239
|
+
):
|
|
240
|
+
"""A class for generating responses using LLMs asynchronously."""
|
|
273
241
|
|
|
274
242
|
@overload
|
|
275
243
|
async def __call__(
|
|
@@ -313,7 +281,10 @@ class AsyncContextCall(BaseCall, Generic[P, DepsT, FormattableT]):
|
|
|
313
281
|
self, ctx: Context[DepsT], *args: P.args, **kwargs: P.kwargs
|
|
314
282
|
) -> AsyncContextResponse[DepsT, None] | AsyncContextResponse[DepsT, FormattableT]:
|
|
315
283
|
"""Generates a response using the LLM asynchronously."""
|
|
316
|
-
|
|
284
|
+
messages = await self.fn(ctx, *args, **kwargs)
|
|
285
|
+
return await self.model.context_call_async(
|
|
286
|
+
ctx=ctx, messages=messages, tools=self.toolkit, format=self.format
|
|
287
|
+
)
|
|
317
288
|
|
|
318
289
|
@overload
|
|
319
290
|
async def stream(
|
|
@@ -338,4 +309,7 @@ class AsyncContextCall(BaseCall, Generic[P, DepsT, FormattableT]):
|
|
|
338
309
|
| AsyncContextStreamResponse[DepsT, FormattableT]
|
|
339
310
|
):
|
|
340
311
|
"""Generates a streaming response using the LLM asynchronously."""
|
|
341
|
-
|
|
312
|
+
messages = await self.fn(ctx, *args, **kwargs)
|
|
313
|
+
return await self.model.context_stream_async(
|
|
314
|
+
ctx=ctx, messages=messages, tools=self.toolkit, format=self.format
|
|
315
|
+
)
|
mirascope/llm/calls/decorator.py
CHANGED
|
@@ -4,24 +4,29 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
from collections.abc import Sequence
|
|
6
6
|
from dataclasses import dataclass
|
|
7
|
-
from typing import
|
|
7
|
+
from typing import Generic, Literal, cast, overload
|
|
8
8
|
from typing_extensions import Unpack
|
|
9
9
|
|
|
10
|
+
from ..clients import (
|
|
11
|
+
AnthropicModelId,
|
|
12
|
+
GoogleModelId,
|
|
13
|
+
ModelId,
|
|
14
|
+
OpenAICompletionsModelId,
|
|
15
|
+
OpenAIResponsesModelId,
|
|
16
|
+
Params,
|
|
17
|
+
Provider,
|
|
18
|
+
)
|
|
10
19
|
from ..context import DepsT
|
|
11
|
-
from ..formatting import Format, FormattableT
|
|
20
|
+
from ..formatting import Format, FormattableT
|
|
12
21
|
from ..models import Model
|
|
13
22
|
from ..prompts import (
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
MessageTemplate,
|
|
21
|
-
Prompt,
|
|
22
|
-
_utils,
|
|
23
|
+
AsyncContextPromptable,
|
|
24
|
+
AsyncPromptable,
|
|
25
|
+
ContextPromptable,
|
|
26
|
+
Promptable,
|
|
27
|
+
_utils as _prompt_utils,
|
|
28
|
+
prompt,
|
|
23
29
|
)
|
|
24
|
-
from ..providers import ModelId
|
|
25
30
|
from ..tools import (
|
|
26
31
|
AsyncContextTool,
|
|
27
32
|
AsyncContextToolkit,
|
|
@@ -36,40 +41,19 @@ from ..tools import (
|
|
|
36
41
|
from ..types import P
|
|
37
42
|
from .calls import AsyncCall, AsyncContextCall, Call, ContextCall
|
|
38
43
|
|
|
39
|
-
if TYPE_CHECKING:
|
|
40
|
-
from ..models import Params
|
|
41
|
-
|
|
42
44
|
|
|
43
45
|
@dataclass(kw_only=True)
|
|
44
46
|
class CallDecorator(Generic[ToolT, FormattableT]):
|
|
45
|
-
"""
|
|
46
|
-
|
|
47
|
-
Takes a raw prompt function that returns message content and wraps it with tools,
|
|
48
|
-
format, and a model to create a `Call` that can be invoked directly without needing
|
|
49
|
-
to pass a model argument.
|
|
50
|
-
|
|
51
|
-
The decorator automatically detects whether the function is async or context-aware
|
|
52
|
-
and creates the appropriate `Call` variant (`Call`, `AsyncCall`, `ContextCall`, or `AsyncContextCall`).
|
|
53
|
-
|
|
54
|
-
Conceptually: `CallDecorator` = `PromptDecorator` + `Model`
|
|
55
|
-
Result: `Call` = `MessageTemplate` + tools + format + `Model`
|
|
56
|
-
"""
|
|
47
|
+
"""A decorator for converting prompts to calls."""
|
|
57
48
|
|
|
58
49
|
model: Model
|
|
59
|
-
"""The default model to use with this call. May be overridden."""
|
|
60
|
-
|
|
61
50
|
tools: Sequence[ToolT] | None
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
format: (
|
|
65
|
-
type[FormattableT] | Format[FormattableT] | OutputParser[FormattableT] | None
|
|
66
|
-
)
|
|
67
|
-
"""The structured output format off the prompt, if any."""
|
|
51
|
+
format: type[FormattableT] | Format[FormattableT] | None
|
|
68
52
|
|
|
69
53
|
@overload
|
|
70
54
|
def __call__(
|
|
71
55
|
self: CallDecorator[AsyncTool | AsyncContextTool[DepsT], FormattableT],
|
|
72
|
-
fn:
|
|
56
|
+
fn: AsyncContextPromptable[P, DepsT],
|
|
73
57
|
) -> AsyncContextCall[P, DepsT, FormattableT]:
|
|
74
58
|
"""Decorate an async context prompt into an AsyncContextCall."""
|
|
75
59
|
...
|
|
@@ -77,31 +61,31 @@ class CallDecorator(Generic[ToolT, FormattableT]):
|
|
|
77
61
|
@overload
|
|
78
62
|
def __call__(
|
|
79
63
|
self: CallDecorator[Tool | ContextTool[DepsT], FormattableT],
|
|
80
|
-
fn:
|
|
64
|
+
fn: ContextPromptable[P, DepsT],
|
|
81
65
|
) -> ContextCall[P, DepsT, FormattableT]:
|
|
82
66
|
"""Decorate a context prompt into a ContextCall."""
|
|
83
67
|
...
|
|
84
68
|
|
|
85
69
|
@overload
|
|
86
70
|
def __call__(
|
|
87
|
-
self: CallDecorator[AsyncTool, FormattableT], fn:
|
|
71
|
+
self: CallDecorator[AsyncTool, FormattableT], fn: AsyncPromptable[P]
|
|
88
72
|
) -> AsyncCall[P, FormattableT]:
|
|
89
73
|
"""Decorate an async prompt into an AsyncCall."""
|
|
90
74
|
...
|
|
91
75
|
|
|
92
76
|
@overload
|
|
93
77
|
def __call__(
|
|
94
|
-
self: CallDecorator[Tool, FormattableT], fn:
|
|
78
|
+
self: CallDecorator[Tool, FormattableT], fn: Promptable[P]
|
|
95
79
|
) -> Call[P, FormattableT]:
|
|
96
80
|
"""Decorate a prompt into a Call."""
|
|
97
81
|
...
|
|
98
82
|
|
|
99
83
|
def __call__(
|
|
100
84
|
self,
|
|
101
|
-
fn:
|
|
102
|
-
|
|
|
103
|
-
|
|
|
104
|
-
|
|
|
85
|
+
fn: ContextPromptable[P, DepsT]
|
|
86
|
+
| AsyncContextPromptable[P, DepsT]
|
|
87
|
+
| Promptable[P]
|
|
88
|
+
| AsyncPromptable[P],
|
|
105
89
|
) -> (
|
|
106
90
|
ContextCall[P, DepsT, FormattableT]
|
|
107
91
|
| AsyncContextCall[P, DepsT, FormattableT]
|
|
@@ -109,131 +93,123 @@ class CallDecorator(Generic[ToolT, FormattableT]):
|
|
|
109
93
|
| AsyncCall[P, FormattableT]
|
|
110
94
|
):
|
|
111
95
|
"""Decorates a prompt into a Call or ContextCall."""
|
|
112
|
-
is_context =
|
|
113
|
-
is_async =
|
|
96
|
+
is_context = _prompt_utils.is_context_promptable(fn)
|
|
97
|
+
is_async = _prompt_utils.is_async_promptable(fn)
|
|
114
98
|
|
|
115
99
|
if is_context and is_async:
|
|
116
100
|
tools = cast(
|
|
117
101
|
Sequence[AsyncTool | AsyncContextTool[DepsT]] | None, self.tools
|
|
118
102
|
)
|
|
119
|
-
prompt = AsyncContextPrompt(
|
|
120
|
-
fn=fn,
|
|
121
|
-
toolkit=AsyncContextToolkit(tools=tools),
|
|
122
|
-
format=self.format,
|
|
123
|
-
)
|
|
124
103
|
return AsyncContextCall(
|
|
125
|
-
|
|
104
|
+
fn=prompt(fn),
|
|
126
105
|
default_model=self.model,
|
|
106
|
+
format=self.format,
|
|
107
|
+
toolkit=AsyncContextToolkit(tools=tools),
|
|
127
108
|
)
|
|
128
109
|
elif is_context:
|
|
129
110
|
tools = cast(Sequence[Tool | ContextTool[DepsT]] | None, self.tools)
|
|
130
|
-
prompt = ContextPrompt(
|
|
131
|
-
fn=fn,
|
|
132
|
-
toolkit=ContextToolkit(tools=tools),
|
|
133
|
-
format=self.format,
|
|
134
|
-
)
|
|
135
111
|
return ContextCall(
|
|
136
|
-
|
|
112
|
+
fn=prompt(fn),
|
|
137
113
|
default_model=self.model,
|
|
114
|
+
format=self.format,
|
|
115
|
+
toolkit=ContextToolkit(tools=tools),
|
|
138
116
|
)
|
|
139
117
|
elif is_async:
|
|
140
118
|
tools = cast(Sequence[AsyncTool] | None, self.tools)
|
|
141
|
-
prompt = AsyncPrompt(
|
|
142
|
-
fn=fn, toolkit=AsyncToolkit(tools=tools), format=self.format
|
|
143
|
-
)
|
|
144
119
|
return AsyncCall(
|
|
145
|
-
|
|
120
|
+
fn=prompt(fn),
|
|
146
121
|
default_model=self.model,
|
|
122
|
+
format=self.format,
|
|
123
|
+
toolkit=AsyncToolkit(tools=tools),
|
|
147
124
|
)
|
|
148
125
|
else:
|
|
149
126
|
tools = cast(Sequence[Tool] | None, self.tools)
|
|
150
|
-
prompt = Prompt(fn=fn, toolkit=Toolkit(tools=tools), format=self.format)
|
|
151
127
|
return Call(
|
|
152
|
-
|
|
128
|
+
fn=prompt(fn),
|
|
153
129
|
default_model=self.model,
|
|
130
|
+
format=self.format,
|
|
131
|
+
toolkit=Toolkit(tools=tools),
|
|
154
132
|
)
|
|
155
133
|
|
|
156
134
|
|
|
157
135
|
@overload
|
|
158
136
|
def call(
|
|
159
|
-
model: ModelId,
|
|
160
137
|
*,
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
|
165
|
-
| None = None,
|
|
138
|
+
provider: Literal["anthropic"],
|
|
139
|
+
model_id: AnthropicModelId,
|
|
140
|
+
tools: list[ToolT] | None = None,
|
|
141
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
166
142
|
**params: Unpack[Params],
|
|
167
143
|
) -> CallDecorator[ToolT, FormattableT]:
|
|
168
|
-
"""
|
|
144
|
+
"""Decorate a prompt into a Call using Anthropic models."""
|
|
145
|
+
...
|
|
169
146
|
|
|
170
|
-
|
|
171
|
-
|
|
147
|
+
|
|
148
|
+
@overload
|
|
149
|
+
def call(
|
|
150
|
+
*,
|
|
151
|
+
provider: Literal["google"],
|
|
152
|
+
model_id: GoogleModelId,
|
|
153
|
+
tools: list[ToolT] | None = None,
|
|
154
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
155
|
+
**params: Unpack[Params],
|
|
156
|
+
) -> CallDecorator[ToolT, FormattableT]:
|
|
157
|
+
"""Decorate a prompt into a Call using Google models."""
|
|
172
158
|
...
|
|
173
159
|
|
|
174
160
|
|
|
175
161
|
@overload
|
|
176
162
|
def call(
|
|
177
|
-
model: Model,
|
|
178
163
|
*,
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
|
183
|
-
|
|
164
|
+
provider: Literal["openai:completions"],
|
|
165
|
+
model_id: OpenAICompletionsModelId,
|
|
166
|
+
tools: list[ToolT] | None = None,
|
|
167
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
168
|
+
**params: Unpack[Params],
|
|
184
169
|
) -> CallDecorator[ToolT, FormattableT]:
|
|
185
|
-
"""
|
|
170
|
+
"""Decorate a prompt into a Call using OpenAI models."""
|
|
171
|
+
...
|
|
186
172
|
|
|
187
|
-
|
|
188
|
-
|
|
173
|
+
|
|
174
|
+
@overload
|
|
175
|
+
def call(
|
|
176
|
+
*,
|
|
177
|
+
provider: Literal["openai:responses", "openai"],
|
|
178
|
+
model_id: OpenAIResponsesModelId,
|
|
179
|
+
tools: list[ToolT] | None = None,
|
|
180
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
181
|
+
**params: Unpack[Params],
|
|
182
|
+
) -> CallDecorator[ToolT, FormattableT]:
|
|
183
|
+
"""Decorate a prompt into a Call using OpenAI models (Responses API)."""
|
|
184
|
+
...
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@overload
|
|
188
|
+
def call(
|
|
189
|
+
*,
|
|
190
|
+
provider: Provider,
|
|
191
|
+
model_id: ModelId,
|
|
192
|
+
tools: list[ToolT] | None = None,
|
|
193
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
194
|
+
**params: Unpack[Params],
|
|
195
|
+
) -> CallDecorator[ToolT, FormattableT]:
|
|
196
|
+
"""Decorate a prompt into a Call using a generic provider and model."""
|
|
189
197
|
...
|
|
190
198
|
|
|
191
199
|
|
|
192
200
|
def call(
|
|
193
|
-
model: ModelId | Model,
|
|
194
201
|
*,
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
|
199
|
-
| None = None,
|
|
202
|
+
provider: Provider,
|
|
203
|
+
model_id: ModelId,
|
|
204
|
+
tools: list[ToolT] | None = None,
|
|
205
|
+
format: type[FormattableT] | Format[FormattableT] | None = None,
|
|
200
206
|
**params: Unpack[Params],
|
|
201
207
|
) -> CallDecorator[ToolT, FormattableT]:
|
|
202
|
-
"""
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
directly to generate LLM responses without needing to pass a model argument.
|
|
208
|
-
|
|
209
|
-
The decorator automatically detects the function type:
|
|
210
|
-
- If the first parameter is named `'ctx'` with type `llm.Context[T]` (or a subclass thereof),
|
|
211
|
-
creates a `ContextCall`
|
|
212
|
-
- If the function is async, creates an `AsyncCall` or `AsyncContextCall`
|
|
213
|
-
- Otherwise, creates a regular `Call`
|
|
214
|
-
|
|
215
|
-
The model specified in the decorator can be overridden at runtime using the
|
|
216
|
-
`llm.model()` context manager. When overridden, the context model completely
|
|
217
|
-
replaces the decorated model, including all parameters.
|
|
218
|
-
|
|
219
|
-
Conceptual flow:
|
|
220
|
-
- `MessageTemplate`: raw function returning content
|
|
221
|
-
- `@llm.prompt`: `MessageTemplate` → `Prompt`
|
|
222
|
-
Includes tools and format, if applicable. Can be called by providing a `Model`.
|
|
223
|
-
- `@llm.call`: `MessageTemplate` → `Call`. Includes a model, tools, and format. The
|
|
224
|
-
model may be created on the fly from a model identifier and optional params, or
|
|
225
|
-
provided outright.
|
|
226
|
-
|
|
227
|
-
Args:
|
|
228
|
-
model: A model ID string (e.g., "openai/gpt-4") or a `Model` instance
|
|
229
|
-
tools: Optional `Sequence` of tools to make available to the LLM
|
|
230
|
-
format: Optional response format class (`BaseModel`) or Format instance
|
|
231
|
-
**params: Additional call parameters (temperature, max_tokens, etc.)
|
|
232
|
-
Only available when passing a model ID string
|
|
233
|
-
|
|
234
|
-
Returns:
|
|
235
|
-
A `CallDecorator` that converts prompt functions into `Call` variants
|
|
236
|
-
(`Call`, `AsyncCall`, `ContextCall`, or `AsyncContextCall`)
|
|
208
|
+
"""Returns a decorator for turning prompt template functions into generations.
|
|
209
|
+
|
|
210
|
+
This decorator creates a `Call` or `ContextCall` that can be used with prompt functions.
|
|
211
|
+
If the first parameter is typed as `llm.Context[T]`, it creates a ContextCall.
|
|
212
|
+
Otherwise, it creates a regular Call.
|
|
237
213
|
|
|
238
214
|
Example:
|
|
239
215
|
|
|
@@ -241,12 +217,15 @@ def call(
|
|
|
241
217
|
```python
|
|
242
218
|
from mirascope import llm
|
|
243
219
|
|
|
244
|
-
@llm.call(
|
|
245
|
-
|
|
246
|
-
|
|
220
|
+
@llm.call(
|
|
221
|
+
provider="openai:completions",
|
|
222
|
+
model_id="gpt-4o-mini",
|
|
223
|
+
)
|
|
224
|
+
def answer_question(question: str) -> str:
|
|
225
|
+
return f"Answer this question: {question}"
|
|
247
226
|
|
|
248
|
-
response: llm.Response =
|
|
249
|
-
print(response
|
|
227
|
+
response: llm.Response = answer_question("What is the capital of France?")
|
|
228
|
+
print(response)
|
|
250
229
|
```
|
|
251
230
|
|
|
252
231
|
Example:
|
|
@@ -257,19 +236,20 @@ def call(
|
|
|
257
236
|
from mirascope import llm
|
|
258
237
|
|
|
259
238
|
@dataclass
|
|
260
|
-
class
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
239
|
+
class Personality:
|
|
240
|
+
vibe: str
|
|
241
|
+
|
|
242
|
+
@llm.call(
|
|
243
|
+
provider="openai:completions",
|
|
244
|
+
model_id="gpt-4o-mini",
|
|
245
|
+
)
|
|
246
|
+
def answer_question(ctx: llm.Context[Personality], question: str) -> str:
|
|
247
|
+
return f"Your vibe is {ctx.deps.vibe}. Answer this question: {question}"
|
|
248
|
+
|
|
249
|
+
ctx = llm.Context(deps=Personality(vibe="snarky"))
|
|
250
|
+
response = answer_question(ctx, "What is the capital of France?")
|
|
251
|
+
print(response)
|
|
271
252
|
```
|
|
272
253
|
"""
|
|
273
|
-
|
|
274
|
-
model = Model(model, **params)
|
|
254
|
+
model = Model(provider=provider, model_id=model_id, **params)
|
|
275
255
|
return CallDecorator(model=model, tools=tools, format=format)
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"""Client interfaces for LLM providers."""
|
|
2
|
+
|
|
3
|
+
from .anthropic import (
|
|
4
|
+
AnthropicClient,
|
|
5
|
+
AnthropicModelId,
|
|
6
|
+
)
|
|
7
|
+
from .base import BaseClient, ClientT, Params
|
|
8
|
+
from .google import GoogleClient, GoogleModelId
|
|
9
|
+
from .openai import (
|
|
10
|
+
OpenAICompletionsClient,
|
|
11
|
+
OpenAICompletionsModelId,
|
|
12
|
+
OpenAIResponsesClient,
|
|
13
|
+
OpenAIResponsesModelId,
|
|
14
|
+
)
|
|
15
|
+
from .providers import PROVIDERS, ModelId, Provider, client, get_client
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"PROVIDERS",
|
|
19
|
+
"AnthropicClient",
|
|
20
|
+
"AnthropicModelId",
|
|
21
|
+
"BaseClient",
|
|
22
|
+
"ClientT",
|
|
23
|
+
"GoogleClient",
|
|
24
|
+
"GoogleModelId",
|
|
25
|
+
"ModelId",
|
|
26
|
+
"OpenAICompletionsClient",
|
|
27
|
+
"OpenAICompletionsModelId",
|
|
28
|
+
"OpenAIResponsesClient",
|
|
29
|
+
"OpenAIResponsesModelId",
|
|
30
|
+
"Params",
|
|
31
|
+
"Provider",
|
|
32
|
+
"client",
|
|
33
|
+
"get_client",
|
|
34
|
+
]
|