llama-stack-client 0.4.4__tar.gz → 0.5.0rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_stack_client-0.5.0rc1/.release-please-manifest.json +3 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/CHANGELOG.md +18 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/PKG-INFO +4 -3
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/README.md +3 -2
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/pyproject.toml +1 -1
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/requirements-dev.lock +12 -14
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_base_client.py +139 -13
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_compat.py +3 -3
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_models.py +16 -1
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_types.py +9 -0
- llama_stack_client-0.5.0rc1/src/llama_stack_client/_utils/_json.py +41 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_version.py +1 -1
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_client.py +185 -2
- llama_stack_client-0.5.0rc1/tests/test_utils/test_json.py +132 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/uv.lock +185 -158
- llama_stack_client-0.4.4/.release-please-manifest.json +0 -3
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/.gitignore +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/CODE_OF_CONDUCT.md +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/CONTRIBUTING.md +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/LICENSE +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/SECURITY.md +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/api.md +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/bin/check-release-environment +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/bin/publish-pypi +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/examples/.keep +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/examples/README.md +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/examples/interactive_agent_cli.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/release-please-config.json +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_client.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_constants.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_exceptions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_files.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_qs.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_resource.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_streaming.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_compat.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_datetime_parse.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_logs.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_proxy.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_reflection.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_resources_proxy.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_streams.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_sync.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_transform.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_typing.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_utils/_utils.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_wrappers.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/.keep +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/agent.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/client_tool.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/event_logger.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/event_synthesizer.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/react/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/react/agent.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/react/prompts.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/react/tool_parser.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/tool_parser.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/turn_events.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/agents/types.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/common/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/common/utils.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/configure.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/constants.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/datasets/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/datasets/datasets.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/datasets/list.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/datasets/register.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/datasets/unregister.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval/eval.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval/run_benchmark.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval/run_scoring.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval/utils.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval_tasks/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval_tasks/eval_tasks.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/eval_tasks/list.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/inference/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/inference/inference.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/inspect/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/inspect/inspect.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/inspect/version.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/llama_stack_client.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/models/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/models/models.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/post_training/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/post_training/post_training.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/providers/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/providers/inspect.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/providers/list.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/providers/providers.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/scoring_functions/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/scoring_functions/list.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/scoring_functions/scoring_functions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/shields/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/shields/shields.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/toolgroups/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/toolgroups/toolgroups.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/vector_dbs/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/cli/vector_dbs/vector_dbs.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/inference/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/inference/event_logger.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/inference/utils.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/inline/inline.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/stream_printer.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/lib/tools/mcp_oauth.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/pagination.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/py.typed +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/admin.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/alpha.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/benchmarks.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/eval/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/eval/eval.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/eval/jobs.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/inference.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/post_training/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/post_training/job.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/alpha/post_training/post_training.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/batches.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/beta/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/beta/beta.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/beta/datasets.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/chat/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/chat/chat.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/chat/completions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/completions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/conversations/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/conversations/conversations.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/conversations/items.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/embeddings.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/files.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/inspect.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/models/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/models/models.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/models/openai.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/moderations.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/prompts/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/prompts/prompts.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/prompts/versions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/providers.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/responses/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/responses/input_items.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/responses/responses.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/routes.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/safety.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/scoring.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/scoring_functions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/shields.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/tool_runtime.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/toolgroups.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/tools.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/vector_io.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/vector_stores/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/vector_stores/file_batches.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/vector_stores/files.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/resources/vector_stores/vector_stores.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/admin_list_routes_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/benchmark.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/benchmark_config_param.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/benchmark_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/benchmark_register_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/eval/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/eval_evaluate_rows_alpha_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/eval_evaluate_rows_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/eval_run_eval_alpha_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/eval_run_eval_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/evaluate_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/inference_rerank_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/inference_rerank_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/job.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/list_benchmarks_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/list_post_training_jobs_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training/job_artifacts_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training/job_artifacts_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training/job_cancel_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training/job_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training/job_status_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training/job_status_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training_job.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training_preference_optimize_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/alpha/post_training_supervised_fine_tune_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/batch_cancel_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/batch_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/batch_create_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/batch_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/batch_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/batch_retrieve_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/dataset_appendrows_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/dataset_iterrows_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/dataset_iterrows_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/dataset_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/dataset_register_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/dataset_register_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/dataset_retrieve_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/beta/list_datasets_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/chat/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/chat/completion_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/chat/completion_create_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/chat/completion_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/chat/completion_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/chat/completion_retrieve_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/chat_completion_chunk.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/completion_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/completion_create_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversation_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversation_delete_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversation_object.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversation_update_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversations/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversations/item_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversations/item_create_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversations/item_delete_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversations/item_get_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversations/item_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/conversations/item_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/create_embeddings_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/create_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/delete_file_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/embedding_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/file.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/file_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/file_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/list_files_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/list_models_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/list_prompts_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/list_scoring_functions_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/list_shields_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/list_tool_groups_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/list_vector_stores_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/model.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/model_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/model_register_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/model_register_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/model_retrieve_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/models/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/moderation_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/prompt.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/prompt_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/prompt_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/prompt_retrieve_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/prompt_set_default_version_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/prompt_update_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/prompts/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/provider_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/query_chunks_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/response_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/response_delete_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/response_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/response_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/response_object.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/response_object_stream.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/responses/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/responses/input_item_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/responses/input_item_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/route_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/route_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/run_shield_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/safety_run_shield_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_fn.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_fn_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_fn_params_param.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_function_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_function_register_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_score_batch_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_score_batch_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_score_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/scoring_score_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/health_info.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/interleaved_content.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/interleaved_content_item.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/list_providers_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/list_routes_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/param_type.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/provider_info.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/route_info.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/safety_violation.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/sampling_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/scoring_result.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/system_message.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared/version_info.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared_params/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared_params/sampling_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shared_params/system_message.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shield.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shield_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/shield_register_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_def.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_group.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_invocation_result.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_runtime_invoke_tool_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_runtime_list_tools_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/tool_runtime_list_tools_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/toolgroup_list_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/toolgroup_register_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_io_insert_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_io_query_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_store.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_store_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_store_delete_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_store_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_store_search_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_store_search_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_store_update_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_batch_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_batch_list_files_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_content_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_content_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_create_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_delete_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_list_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/file_update_params.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/list_vector_store_files_in_batch_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/vector_store_file.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/types/vector_stores/vector_store_file_batches.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/eval/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/eval/test_jobs.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/post_training/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/post_training/test_job.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/test_admin.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/test_benchmarks.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/test_eval.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/test_inference.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/alpha/test_post_training.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/beta/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/beta/test_datasets.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/chat/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/chat/test_completions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/conversations/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/conversations/test_items.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/models/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/models/test_openai.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/prompts/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/prompts/test_versions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/responses/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/responses/test_input_items.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_batches.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_completions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_conversations.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_embeddings.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_files.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_inspect.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_models.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_moderations.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_prompts.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_providers.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_responses.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_routes.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_safety.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_scoring.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_scoring_functions.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_shields.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_tool_runtime.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_toolgroups.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_tools.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_vector_io.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/test_vector_stores.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/vector_stores/__init__.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/vector_stores/test_file_batches.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/api_resources/vector_stores/test_files.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/conftest.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/integration/test_agent_responses_e2e.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/integration/test_agent_turn_step_events.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/lib/agents/test_agent_responses.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/sample_file.txt +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_deepcopy.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_extract_files.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_files.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_models.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_qs.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_required_args.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_response.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_streaming.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_transform.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_utils/test_datetime_parse.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_utils/test_proxy.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/test_utils/test_typing.py +0 -0
- {llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/tests/utils.py +0 -0
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## 0.5.0-alpha.1 (2026-02-04)
|
|
4
|
+
|
|
5
|
+
Full Changelog: [v0.4.0-alpha.15...v0.5.0-alpha.1](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.15...v0.5.0-alpha.1)
|
|
6
|
+
|
|
7
|
+
### Features
|
|
8
|
+
|
|
9
|
+
* **client:** add custom JSON encoder for extended type support ([ab99ad0](https://github.com/llamastack/llama-stack-client-python/commit/ab99ad06208995a0c6b8424d03023f6045c57cab))
|
|
10
|
+
* **client:** add support for binary request streaming ([d17dede](https://github.com/llamastack/llama-stack-client-python/commit/d17dede18fa45e3433bea4923d4b280331257975))
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
### Chores
|
|
14
|
+
|
|
15
|
+
* **ci:** upgrade `actions/github-script` ([b6d410f](https://github.com/llamastack/llama-stack-client-python/commit/b6d410fbaa61ade006142c755b8abff2639aa461))
|
|
16
|
+
* **internal:** codegen related update ([a176b2e](https://github.com/llamastack/llama-stack-client-python/commit/a176b2e9501b6855ba31f420ea23f1e94170e7aa))
|
|
17
|
+
* **internal:** codegen related update ([4cf153d](https://github.com/llamastack/llama-stack-client-python/commit/4cf153ddfbe68ce5966ec1d199e3c6fb69c1abe0))
|
|
18
|
+
* **internal:** update `actions/checkout` version ([527e428](https://github.com/llamastack/llama-stack-client-python/commit/527e4289cbd3f696cfd3f4b288784683819317be))
|
|
19
|
+
* **internal:** version bump ([580d0ff](https://github.com/llamastack/llama-stack-client-python/commit/580d0ffc4b0540294cf42bd28d6dd3254586133f))
|
|
20
|
+
|
|
3
21
|
## 0.4.0-alpha.15 (2026-01-06)
|
|
4
22
|
|
|
5
23
|
Full Changelog: [v0.4.0-alpha.14...v0.4.0-alpha.15](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.14...v0.4.0-alpha.15)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: llama_stack_client
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.0rc1
|
|
4
4
|
Summary: The official Python library for the llama-stack-client API
|
|
5
5
|
Project-URL: Homepage, https://github.com/llamastack/llama-stack-client-python
|
|
6
6
|
Project-URL: Repository, https://github.com/llamastack/llama-stack-client-python
|
|
@@ -61,7 +61,8 @@ You can find more example apps with client SDKs to talk with the Llama Stack ser
|
|
|
61
61
|
## Installation
|
|
62
62
|
|
|
63
63
|
```sh
|
|
64
|
-
|
|
64
|
+
# install from PyPI
|
|
65
|
+
pip install '--pre llama_stack_client'
|
|
65
66
|
```
|
|
66
67
|
|
|
67
68
|
## Usage
|
|
@@ -146,7 +147,7 @@ You can enable this by installing `aiohttp`:
|
|
|
146
147
|
|
|
147
148
|
```sh
|
|
148
149
|
# install from PyPI
|
|
149
|
-
pip install --pre llama_stack_client[aiohttp]
|
|
150
|
+
pip install '--pre llama_stack_client[aiohttp]'
|
|
150
151
|
```
|
|
151
152
|
|
|
152
153
|
Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`:
|
|
@@ -21,7 +21,8 @@ You can find more example apps with client SDKs to talk with the Llama Stack ser
|
|
|
21
21
|
## Installation
|
|
22
22
|
|
|
23
23
|
```sh
|
|
24
|
-
|
|
24
|
+
# install from PyPI
|
|
25
|
+
pip install '--pre llama_stack_client'
|
|
25
26
|
```
|
|
26
27
|
|
|
27
28
|
## Usage
|
|
@@ -106,7 +107,7 @@ You can enable this by installing `aiohttp`:
|
|
|
106
107
|
|
|
107
108
|
```sh
|
|
108
109
|
# install from PyPI
|
|
109
|
-
pip install --pre llama_stack_client[aiohttp]
|
|
110
|
+
pip install '--pre llama_stack_client[aiohttp]'
|
|
110
111
|
```
|
|
111
112
|
|
|
112
113
|
Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`:
|
|
@@ -7,7 +7,7 @@ anyio==4.12.0
|
|
|
7
7
|
# via
|
|
8
8
|
# httpx
|
|
9
9
|
# llama-stack-client
|
|
10
|
-
black==
|
|
10
|
+
black==26.1.0
|
|
11
11
|
certifi==2025.11.12
|
|
12
12
|
# via
|
|
13
13
|
# httpcore
|
|
@@ -33,7 +33,7 @@ distro==1.9.0
|
|
|
33
33
|
# via llama-stack-client
|
|
34
34
|
execnet==2.1.2
|
|
35
35
|
# via pytest-xdist
|
|
36
|
-
filelock==3.20.
|
|
36
|
+
filelock==3.20.3
|
|
37
37
|
# via virtualenv
|
|
38
38
|
fire==0.7.1
|
|
39
39
|
# via llama-stack-client
|
|
@@ -45,7 +45,7 @@ httpx==0.28.1
|
|
|
45
45
|
# via
|
|
46
46
|
# llama-stack-client
|
|
47
47
|
# respx
|
|
48
|
-
identify==2.6.
|
|
48
|
+
identify==2.6.16
|
|
49
49
|
# via pre-commit
|
|
50
50
|
idna==3.11
|
|
51
51
|
# via
|
|
@@ -68,15 +68,15 @@ nodeenv==1.9.1
|
|
|
68
68
|
# via
|
|
69
69
|
# pre-commit
|
|
70
70
|
# pyright
|
|
71
|
-
numpy==2.
|
|
71
|
+
numpy==2.4.1
|
|
72
72
|
# via pandas
|
|
73
73
|
packaging==25.0
|
|
74
74
|
# via
|
|
75
75
|
# black
|
|
76
76
|
# pytest
|
|
77
|
-
pandas==
|
|
77
|
+
pandas==3.0.0
|
|
78
78
|
# via llama-stack-client
|
|
79
|
-
pathspec==0.
|
|
79
|
+
pathspec==1.0.3
|
|
80
80
|
# via
|
|
81
81
|
# black
|
|
82
82
|
# mypy
|
|
@@ -108,10 +108,8 @@ pytest-asyncio==1.3.0
|
|
|
108
108
|
pytest-xdist==3.8.0
|
|
109
109
|
python-dateutil==2.9.0.post0
|
|
110
110
|
# via pandas
|
|
111
|
-
pytokens==0.
|
|
111
|
+
pytokens==0.4.0
|
|
112
112
|
# via black
|
|
113
|
-
pytz==2025.2
|
|
114
|
-
# via pandas
|
|
115
113
|
pyyaml==6.0.3
|
|
116
114
|
# via
|
|
117
115
|
# pre-commit
|
|
@@ -126,7 +124,7 @@ six==1.17.0
|
|
|
126
124
|
# via python-dateutil
|
|
127
125
|
sniffio==1.3.1
|
|
128
126
|
# via llama-stack-client
|
|
129
|
-
termcolor==3.
|
|
127
|
+
termcolor==3.3.0
|
|
130
128
|
# via
|
|
131
129
|
# fire
|
|
132
130
|
# llama-stack-client
|
|
@@ -145,13 +143,13 @@ typing-extensions==4.15.0
|
|
|
145
143
|
# typing-inspection
|
|
146
144
|
typing-inspection==0.4.2
|
|
147
145
|
# via pydantic
|
|
148
|
-
tzdata==2025.3
|
|
146
|
+
tzdata==2025.3 ; sys_platform == 'emscripten' or sys_platform == 'win32'
|
|
149
147
|
# via pandas
|
|
150
|
-
urllib3==2.6.
|
|
148
|
+
urllib3==2.6.3
|
|
151
149
|
# via requests
|
|
152
|
-
virtualenv==20.
|
|
150
|
+
virtualenv==20.36.1
|
|
153
151
|
# via pre-commit
|
|
154
|
-
wcwidth==0.
|
|
152
|
+
wcwidth==0.3.1
|
|
155
153
|
# via prompt-toolkit
|
|
156
154
|
zipp==3.23.0
|
|
157
155
|
# via importlib-metadata
|
{llama_stack_client-0.4.4 → llama_stack_client-0.5.0rc1}/src/llama_stack_client/_base_client.py
RENAMED
|
@@ -15,6 +15,7 @@ import asyncio
|
|
|
15
15
|
import inspect
|
|
16
16
|
import logging
|
|
17
17
|
import platform
|
|
18
|
+
import warnings
|
|
18
19
|
import email.utils
|
|
19
20
|
from types import TracebackType
|
|
20
21
|
from random import random
|
|
@@ -57,9 +58,11 @@ from ._types import (
|
|
|
57
58
|
ResponseT,
|
|
58
59
|
AnyMapping,
|
|
59
60
|
PostParser,
|
|
61
|
+
BinaryTypes,
|
|
60
62
|
RequestFiles,
|
|
61
63
|
HttpxSendArgs,
|
|
62
64
|
RequestOptions,
|
|
65
|
+
AsyncBinaryTypes,
|
|
63
66
|
HttpxRequestFiles,
|
|
64
67
|
ModelBuilderProtocol,
|
|
65
68
|
not_given,
|
|
@@ -89,6 +92,7 @@ from ._exceptions import (
|
|
|
89
92
|
APIConnectionError,
|
|
90
93
|
APIResponseValidationError,
|
|
91
94
|
)
|
|
95
|
+
from ._utils._json import openapi_dumps
|
|
92
96
|
|
|
93
97
|
log: logging.Logger = logging.getLogger(__name__)
|
|
94
98
|
|
|
@@ -483,8 +487,19 @@ class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]):
|
|
|
483
487
|
retries_taken: int = 0,
|
|
484
488
|
) -> httpx.Request:
|
|
485
489
|
if log.isEnabledFor(logging.DEBUG):
|
|
486
|
-
log.debug(
|
|
487
|
-
|
|
490
|
+
log.debug(
|
|
491
|
+
"Request options: %s",
|
|
492
|
+
model_dump(
|
|
493
|
+
options,
|
|
494
|
+
exclude_unset=True,
|
|
495
|
+
# Pydantic v1 can't dump every type we support in content, so we exclude it for now.
|
|
496
|
+
exclude={
|
|
497
|
+
"content",
|
|
498
|
+
}
|
|
499
|
+
if PYDANTIC_V1
|
|
500
|
+
else {},
|
|
501
|
+
),
|
|
502
|
+
)
|
|
488
503
|
kwargs: dict[str, Any] = {}
|
|
489
504
|
|
|
490
505
|
json_data = options.json_data
|
|
@@ -538,10 +553,18 @@ class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]):
|
|
|
538
553
|
is_body_allowed = options.method.lower() != "get"
|
|
539
554
|
|
|
540
555
|
if is_body_allowed:
|
|
541
|
-
if
|
|
556
|
+
if options.content is not None and json_data is not None:
|
|
557
|
+
raise TypeError("Passing both `content` and `json_data` is not supported")
|
|
558
|
+
if options.content is not None and files is not None:
|
|
559
|
+
raise TypeError("Passing both `content` and `files` is not supported")
|
|
560
|
+
if options.content is not None:
|
|
561
|
+
kwargs["content"] = options.content
|
|
562
|
+
elif isinstance(json_data, bytes):
|
|
542
563
|
kwargs["content"] = json_data
|
|
543
|
-
|
|
544
|
-
|
|
564
|
+
elif not files:
|
|
565
|
+
# Don't set content when JSON is sent as multipart/form-data,
|
|
566
|
+
# since httpx's content param overrides other body arguments
|
|
567
|
+
kwargs["content"] = openapi_dumps(json_data) if is_given(json_data) and json_data is not None else None
|
|
545
568
|
kwargs["files"] = files
|
|
546
569
|
else:
|
|
547
570
|
headers.pop("Content-Type", None)
|
|
@@ -1204,6 +1227,7 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1204
1227
|
*,
|
|
1205
1228
|
cast_to: Type[ResponseT],
|
|
1206
1229
|
body: Body | None = None,
|
|
1230
|
+
content: BinaryTypes | None = None,
|
|
1207
1231
|
options: RequestOptions = {},
|
|
1208
1232
|
files: RequestFiles | None = None,
|
|
1209
1233
|
stream: Literal[False] = False,
|
|
@@ -1216,6 +1240,7 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1216
1240
|
*,
|
|
1217
1241
|
cast_to: Type[ResponseT],
|
|
1218
1242
|
body: Body | None = None,
|
|
1243
|
+
content: BinaryTypes | None = None,
|
|
1219
1244
|
options: RequestOptions = {},
|
|
1220
1245
|
files: RequestFiles | None = None,
|
|
1221
1246
|
stream: Literal[True],
|
|
@@ -1229,6 +1254,7 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1229
1254
|
*,
|
|
1230
1255
|
cast_to: Type[ResponseT],
|
|
1231
1256
|
body: Body | None = None,
|
|
1257
|
+
content: BinaryTypes | None = None,
|
|
1232
1258
|
options: RequestOptions = {},
|
|
1233
1259
|
files: RequestFiles | None = None,
|
|
1234
1260
|
stream: bool,
|
|
@@ -1241,13 +1267,25 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1241
1267
|
*,
|
|
1242
1268
|
cast_to: Type[ResponseT],
|
|
1243
1269
|
body: Body | None = None,
|
|
1270
|
+
content: BinaryTypes | None = None,
|
|
1244
1271
|
options: RequestOptions = {},
|
|
1245
1272
|
files: RequestFiles | None = None,
|
|
1246
1273
|
stream: bool = False,
|
|
1247
1274
|
stream_cls: type[_StreamT] | None = None,
|
|
1248
1275
|
) -> ResponseT | _StreamT:
|
|
1276
|
+
if body is not None and content is not None:
|
|
1277
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1278
|
+
if files is not None and content is not None:
|
|
1279
|
+
raise TypeError("Passing both `files` and `content` is not supported")
|
|
1280
|
+
if isinstance(body, bytes):
|
|
1281
|
+
warnings.warn(
|
|
1282
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1283
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1284
|
+
DeprecationWarning,
|
|
1285
|
+
stacklevel=2,
|
|
1286
|
+
)
|
|
1249
1287
|
opts = FinalRequestOptions.construct(
|
|
1250
|
-
method="post", url=path, json_data=body, files=to_httpx_files(files), **options
|
|
1288
|
+
method="post", url=path, json_data=body, content=content, files=to_httpx_files(files), **options
|
|
1251
1289
|
)
|
|
1252
1290
|
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
|
|
1253
1291
|
|
|
@@ -1257,11 +1295,23 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1257
1295
|
*,
|
|
1258
1296
|
cast_to: Type[ResponseT],
|
|
1259
1297
|
body: Body | None = None,
|
|
1298
|
+
content: BinaryTypes | None = None,
|
|
1260
1299
|
files: RequestFiles | None = None,
|
|
1261
1300
|
options: RequestOptions = {},
|
|
1262
1301
|
) -> ResponseT:
|
|
1302
|
+
if body is not None and content is not None:
|
|
1303
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1304
|
+
if files is not None and content is not None:
|
|
1305
|
+
raise TypeError("Passing both `files` and `content` is not supported")
|
|
1306
|
+
if isinstance(body, bytes):
|
|
1307
|
+
warnings.warn(
|
|
1308
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1309
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1310
|
+
DeprecationWarning,
|
|
1311
|
+
stacklevel=2,
|
|
1312
|
+
)
|
|
1263
1313
|
opts = FinalRequestOptions.construct(
|
|
1264
|
-
method="patch", url=path, json_data=body, files=to_httpx_files(files), **options
|
|
1314
|
+
method="patch", url=path, json_data=body, content=content, files=to_httpx_files(files), **options
|
|
1265
1315
|
)
|
|
1266
1316
|
return self.request(cast_to, opts)
|
|
1267
1317
|
|
|
@@ -1271,11 +1321,23 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1271
1321
|
*,
|
|
1272
1322
|
cast_to: Type[ResponseT],
|
|
1273
1323
|
body: Body | None = None,
|
|
1324
|
+
content: BinaryTypes | None = None,
|
|
1274
1325
|
files: RequestFiles | None = None,
|
|
1275
1326
|
options: RequestOptions = {},
|
|
1276
1327
|
) -> ResponseT:
|
|
1328
|
+
if body is not None and content is not None:
|
|
1329
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1330
|
+
if files is not None and content is not None:
|
|
1331
|
+
raise TypeError("Passing both `files` and `content` is not supported")
|
|
1332
|
+
if isinstance(body, bytes):
|
|
1333
|
+
warnings.warn(
|
|
1334
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1335
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1336
|
+
DeprecationWarning,
|
|
1337
|
+
stacklevel=2,
|
|
1338
|
+
)
|
|
1277
1339
|
opts = FinalRequestOptions.construct(
|
|
1278
|
-
method="put", url=path, json_data=body, files=to_httpx_files(files), **options
|
|
1340
|
+
method="put", url=path, json_data=body, content=content, files=to_httpx_files(files), **options
|
|
1279
1341
|
)
|
|
1280
1342
|
return self.request(cast_to, opts)
|
|
1281
1343
|
|
|
@@ -1285,9 +1347,19 @@ class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]):
|
|
|
1285
1347
|
*,
|
|
1286
1348
|
cast_to: Type[ResponseT],
|
|
1287
1349
|
body: Body | None = None,
|
|
1350
|
+
content: BinaryTypes | None = None,
|
|
1288
1351
|
options: RequestOptions = {},
|
|
1289
1352
|
) -> ResponseT:
|
|
1290
|
-
|
|
1353
|
+
if body is not None and content is not None:
|
|
1354
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1355
|
+
if isinstance(body, bytes):
|
|
1356
|
+
warnings.warn(
|
|
1357
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1358
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1359
|
+
DeprecationWarning,
|
|
1360
|
+
stacklevel=2,
|
|
1361
|
+
)
|
|
1362
|
+
opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, content=content, **options)
|
|
1291
1363
|
return self.request(cast_to, opts)
|
|
1292
1364
|
|
|
1293
1365
|
def get_api_list(
|
|
@@ -1727,6 +1799,7 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1727
1799
|
*,
|
|
1728
1800
|
cast_to: Type[ResponseT],
|
|
1729
1801
|
body: Body | None = None,
|
|
1802
|
+
content: AsyncBinaryTypes | None = None,
|
|
1730
1803
|
files: RequestFiles | None = None,
|
|
1731
1804
|
options: RequestOptions = {},
|
|
1732
1805
|
stream: Literal[False] = False,
|
|
@@ -1739,6 +1812,7 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1739
1812
|
*,
|
|
1740
1813
|
cast_to: Type[ResponseT],
|
|
1741
1814
|
body: Body | None = None,
|
|
1815
|
+
content: AsyncBinaryTypes | None = None,
|
|
1742
1816
|
files: RequestFiles | None = None,
|
|
1743
1817
|
options: RequestOptions = {},
|
|
1744
1818
|
stream: Literal[True],
|
|
@@ -1752,6 +1826,7 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1752
1826
|
*,
|
|
1753
1827
|
cast_to: Type[ResponseT],
|
|
1754
1828
|
body: Body | None = None,
|
|
1829
|
+
content: AsyncBinaryTypes | None = None,
|
|
1755
1830
|
files: RequestFiles | None = None,
|
|
1756
1831
|
options: RequestOptions = {},
|
|
1757
1832
|
stream: bool,
|
|
@@ -1764,13 +1839,25 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1764
1839
|
*,
|
|
1765
1840
|
cast_to: Type[ResponseT],
|
|
1766
1841
|
body: Body | None = None,
|
|
1842
|
+
content: AsyncBinaryTypes | None = None,
|
|
1767
1843
|
files: RequestFiles | None = None,
|
|
1768
1844
|
options: RequestOptions = {},
|
|
1769
1845
|
stream: bool = False,
|
|
1770
1846
|
stream_cls: type[_AsyncStreamT] | None = None,
|
|
1771
1847
|
) -> ResponseT | _AsyncStreamT:
|
|
1848
|
+
if body is not None and content is not None:
|
|
1849
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1850
|
+
if files is not None and content is not None:
|
|
1851
|
+
raise TypeError("Passing both `files` and `content` is not supported")
|
|
1852
|
+
if isinstance(body, bytes):
|
|
1853
|
+
warnings.warn(
|
|
1854
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1855
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1856
|
+
DeprecationWarning,
|
|
1857
|
+
stacklevel=2,
|
|
1858
|
+
)
|
|
1772
1859
|
opts = FinalRequestOptions.construct(
|
|
1773
|
-
method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options
|
|
1860
|
+
method="post", url=path, json_data=body, content=content, files=await async_to_httpx_files(files), **options
|
|
1774
1861
|
)
|
|
1775
1862
|
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
|
|
1776
1863
|
|
|
@@ -1780,11 +1867,28 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1780
1867
|
*,
|
|
1781
1868
|
cast_to: Type[ResponseT],
|
|
1782
1869
|
body: Body | None = None,
|
|
1870
|
+
content: AsyncBinaryTypes | None = None,
|
|
1783
1871
|
files: RequestFiles | None = None,
|
|
1784
1872
|
options: RequestOptions = {},
|
|
1785
1873
|
) -> ResponseT:
|
|
1874
|
+
if body is not None and content is not None:
|
|
1875
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1876
|
+
if files is not None and content is not None:
|
|
1877
|
+
raise TypeError("Passing both `files` and `content` is not supported")
|
|
1878
|
+
if isinstance(body, bytes):
|
|
1879
|
+
warnings.warn(
|
|
1880
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1881
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1882
|
+
DeprecationWarning,
|
|
1883
|
+
stacklevel=2,
|
|
1884
|
+
)
|
|
1786
1885
|
opts = FinalRequestOptions.construct(
|
|
1787
|
-
method="patch",
|
|
1886
|
+
method="patch",
|
|
1887
|
+
url=path,
|
|
1888
|
+
json_data=body,
|
|
1889
|
+
content=content,
|
|
1890
|
+
files=await async_to_httpx_files(files),
|
|
1891
|
+
**options,
|
|
1788
1892
|
)
|
|
1789
1893
|
return await self.request(cast_to, opts)
|
|
1790
1894
|
|
|
@@ -1794,11 +1898,23 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1794
1898
|
*,
|
|
1795
1899
|
cast_to: Type[ResponseT],
|
|
1796
1900
|
body: Body | None = None,
|
|
1901
|
+
content: AsyncBinaryTypes | None = None,
|
|
1797
1902
|
files: RequestFiles | None = None,
|
|
1798
1903
|
options: RequestOptions = {},
|
|
1799
1904
|
) -> ResponseT:
|
|
1905
|
+
if body is not None and content is not None:
|
|
1906
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1907
|
+
if files is not None and content is not None:
|
|
1908
|
+
raise TypeError("Passing both `files` and `content` is not supported")
|
|
1909
|
+
if isinstance(body, bytes):
|
|
1910
|
+
warnings.warn(
|
|
1911
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1912
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1913
|
+
DeprecationWarning,
|
|
1914
|
+
stacklevel=2,
|
|
1915
|
+
)
|
|
1800
1916
|
opts = FinalRequestOptions.construct(
|
|
1801
|
-
method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options
|
|
1917
|
+
method="put", url=path, json_data=body, content=content, files=await async_to_httpx_files(files), **options
|
|
1802
1918
|
)
|
|
1803
1919
|
return await self.request(cast_to, opts)
|
|
1804
1920
|
|
|
@@ -1808,9 +1924,19 @@ class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]):
|
|
|
1808
1924
|
*,
|
|
1809
1925
|
cast_to: Type[ResponseT],
|
|
1810
1926
|
body: Body | None = None,
|
|
1927
|
+
content: AsyncBinaryTypes | None = None,
|
|
1811
1928
|
options: RequestOptions = {},
|
|
1812
1929
|
) -> ResponseT:
|
|
1813
|
-
|
|
1930
|
+
if body is not None and content is not None:
|
|
1931
|
+
raise TypeError("Passing both `body` and `content` is not supported")
|
|
1932
|
+
if isinstance(body, bytes):
|
|
1933
|
+
warnings.warn(
|
|
1934
|
+
"Passing raw bytes as `body` is deprecated and will be removed in a future version. "
|
|
1935
|
+
"Please pass raw bytes via the `content` parameter instead.",
|
|
1936
|
+
DeprecationWarning,
|
|
1937
|
+
stacklevel=2,
|
|
1938
|
+
)
|
|
1939
|
+
opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, content=content, **options)
|
|
1814
1940
|
return await self.request(cast_to, opts)
|
|
1815
1941
|
|
|
1816
1942
|
def get_api_list(
|
|
@@ -145,6 +145,7 @@ def model_dump(
|
|
|
145
145
|
exclude_defaults: bool = False,
|
|
146
146
|
warnings: bool = True,
|
|
147
147
|
mode: Literal["json", "python"] = "python",
|
|
148
|
+
by_alias: bool | None = None,
|
|
148
149
|
) -> dict[str, Any]:
|
|
149
150
|
if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
|
|
150
151
|
return model.model_dump(
|
|
@@ -154,13 +155,12 @@ def model_dump(
|
|
|
154
155
|
exclude_defaults=exclude_defaults,
|
|
155
156
|
# warnings are not supported in Pydantic v1
|
|
156
157
|
warnings=True if PYDANTIC_V1 else warnings,
|
|
158
|
+
by_alias=by_alias,
|
|
157
159
|
)
|
|
158
160
|
return cast(
|
|
159
161
|
"dict[str, Any]",
|
|
160
162
|
model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast]
|
|
161
|
-
exclude=exclude,
|
|
162
|
-
exclude_unset=exclude_unset,
|
|
163
|
-
exclude_defaults=exclude_defaults,
|
|
163
|
+
exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, by_alias=bool(by_alias)
|
|
164
164
|
),
|
|
165
165
|
)
|
|
166
166
|
|
|
@@ -9,7 +9,20 @@ from __future__ import annotations
|
|
|
9
9
|
import os
|
|
10
10
|
import inspect
|
|
11
11
|
import weakref
|
|
12
|
-
from typing import
|
|
12
|
+
from typing import (
|
|
13
|
+
IO,
|
|
14
|
+
TYPE_CHECKING,
|
|
15
|
+
Any,
|
|
16
|
+
Type,
|
|
17
|
+
Union,
|
|
18
|
+
Generic,
|
|
19
|
+
TypeVar,
|
|
20
|
+
Callable,
|
|
21
|
+
Iterable,
|
|
22
|
+
Optional,
|
|
23
|
+
AsyncIterable,
|
|
24
|
+
cast,
|
|
25
|
+
)
|
|
13
26
|
from datetime import date, datetime
|
|
14
27
|
from typing_extensions import (
|
|
15
28
|
List,
|
|
@@ -793,6 +806,7 @@ class FinalRequestOptionsInput(TypedDict, total=False):
|
|
|
793
806
|
timeout: float | Timeout | None
|
|
794
807
|
files: HttpxRequestFiles | None
|
|
795
808
|
idempotency_key: str
|
|
809
|
+
content: Union[bytes, bytearray, IO[bytes], Iterable[bytes], AsyncIterable[bytes], None]
|
|
796
810
|
json_data: Body
|
|
797
811
|
extra_json: AnyMapping
|
|
798
812
|
follow_redirects: bool
|
|
@@ -811,6 +825,7 @@ class FinalRequestOptions(pydantic.BaseModel):
|
|
|
811
825
|
post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven()
|
|
812
826
|
follow_redirects: Union[bool, None] = None
|
|
813
827
|
|
|
828
|
+
content: Union[bytes, bytearray, IO[bytes], Iterable[bytes], AsyncIterable[bytes], None] = None
|
|
814
829
|
# It should be noted that we cannot use `json` here as that would override
|
|
815
830
|
# a BaseModel method in an incompatible fashion.
|
|
816
831
|
json_data: Union[Body, None] = None
|
|
@@ -19,9 +19,11 @@ from typing import (
|
|
|
19
19
|
Mapping,
|
|
20
20
|
TypeVar,
|
|
21
21
|
Callable,
|
|
22
|
+
Iterable,
|
|
22
23
|
Iterator,
|
|
23
24
|
Optional,
|
|
24
25
|
Sequence,
|
|
26
|
+
AsyncIterable,
|
|
25
27
|
)
|
|
26
28
|
from typing_extensions import (
|
|
27
29
|
Set,
|
|
@@ -62,6 +64,13 @@ if TYPE_CHECKING:
|
|
|
62
64
|
else:
|
|
63
65
|
Base64FileInput = Union[IO[bytes], PathLike]
|
|
64
66
|
FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8.
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# Used for sending raw binary data / streaming data in request bodies
|
|
70
|
+
# e.g. for file uploads without multipart encoding
|
|
71
|
+
BinaryTypes = Union[bytes, bytearray, IO[bytes], Iterable[bytes]]
|
|
72
|
+
AsyncBinaryTypes = Union[bytes, bytearray, IO[bytes], AsyncIterable[bytes]]
|
|
73
|
+
|
|
65
74
|
FileTypes = Union[
|
|
66
75
|
# file (or bytes)
|
|
67
76
|
FileContent,
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
2
|
+
# All rights reserved.
|
|
3
|
+
#
|
|
4
|
+
# This source code is licensed under the terms described in the LICENSE file in
|
|
5
|
+
# the root directory of this source tree.
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
from typing import Any
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from typing_extensions import override
|
|
11
|
+
|
|
12
|
+
import pydantic
|
|
13
|
+
|
|
14
|
+
from .._compat import model_dump
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def openapi_dumps(obj: Any) -> bytes:
|
|
18
|
+
"""
|
|
19
|
+
Serialize an object to UTF-8 encoded JSON bytes.
|
|
20
|
+
|
|
21
|
+
Extends the standard json.dumps with support for additional types
|
|
22
|
+
commonly used in the SDK, such as `datetime`, `pydantic.BaseModel`, etc.
|
|
23
|
+
"""
|
|
24
|
+
return json.dumps(
|
|
25
|
+
obj,
|
|
26
|
+
cls=_CustomEncoder,
|
|
27
|
+
# Uses the same defaults as httpx's JSON serialization
|
|
28
|
+
ensure_ascii=False,
|
|
29
|
+
separators=(",", ":"),
|
|
30
|
+
allow_nan=False,
|
|
31
|
+
).encode()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class _CustomEncoder(json.JSONEncoder):
|
|
35
|
+
@override
|
|
36
|
+
def default(self, o: Any) -> Any:
|
|
37
|
+
if isinstance(o, datetime):
|
|
38
|
+
return o.isoformat()
|
|
39
|
+
if isinstance(o, pydantic.BaseModel):
|
|
40
|
+
return model_dump(o, exclude_unset=True, mode="json", by_alias=True)
|
|
41
|
+
return super().default(o)
|