llama-stack-api 0.4.4__py3-none-any.whl → 0.5.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. llama_stack_api/__init__.py +175 -20
  2. llama_stack_api/agents/__init__.py +38 -0
  3. llama_stack_api/agents/api.py +52 -0
  4. llama_stack_api/agents/fastapi_routes.py +268 -0
  5. llama_stack_api/agents/models.py +181 -0
  6. llama_stack_api/common/errors.py +15 -0
  7. llama_stack_api/connectors/__init__.py +38 -0
  8. llama_stack_api/connectors/api.py +50 -0
  9. llama_stack_api/connectors/fastapi_routes.py +103 -0
  10. llama_stack_api/connectors/models.py +103 -0
  11. llama_stack_api/conversations/__init__.py +61 -0
  12. llama_stack_api/conversations/api.py +44 -0
  13. llama_stack_api/conversations/fastapi_routes.py +177 -0
  14. llama_stack_api/conversations/models.py +245 -0
  15. llama_stack_api/datasetio/__init__.py +34 -0
  16. llama_stack_api/datasetio/api.py +42 -0
  17. llama_stack_api/datasetio/fastapi_routes.py +94 -0
  18. llama_stack_api/datasetio/models.py +48 -0
  19. llama_stack_api/eval/__init__.py +55 -0
  20. llama_stack_api/eval/api.py +51 -0
  21. llama_stack_api/eval/compat.py +300 -0
  22. llama_stack_api/eval/fastapi_routes.py +126 -0
  23. llama_stack_api/eval/models.py +141 -0
  24. llama_stack_api/inference/__init__.py +207 -0
  25. llama_stack_api/inference/api.py +93 -0
  26. llama_stack_api/inference/fastapi_routes.py +243 -0
  27. llama_stack_api/inference/models.py +1035 -0
  28. llama_stack_api/models/__init__.py +47 -0
  29. llama_stack_api/models/api.py +38 -0
  30. llama_stack_api/models/fastapi_routes.py +104 -0
  31. llama_stack_api/{models.py → models/models.py} +65 -79
  32. llama_stack_api/openai_responses.py +32 -6
  33. llama_stack_api/post_training/__init__.py +73 -0
  34. llama_stack_api/post_training/api.py +36 -0
  35. llama_stack_api/post_training/fastapi_routes.py +116 -0
  36. llama_stack_api/{post_training.py → post_training/models.py} +55 -86
  37. llama_stack_api/prompts/__init__.py +47 -0
  38. llama_stack_api/prompts/api.py +44 -0
  39. llama_stack_api/prompts/fastapi_routes.py +163 -0
  40. llama_stack_api/prompts/models.py +177 -0
  41. llama_stack_api/resource.py +0 -1
  42. llama_stack_api/safety/__init__.py +37 -0
  43. llama_stack_api/safety/api.py +29 -0
  44. llama_stack_api/safety/datatypes.py +83 -0
  45. llama_stack_api/safety/fastapi_routes.py +55 -0
  46. llama_stack_api/safety/models.py +38 -0
  47. llama_stack_api/schema_utils.py +47 -4
  48. llama_stack_api/scoring/__init__.py +66 -0
  49. llama_stack_api/scoring/api.py +35 -0
  50. llama_stack_api/scoring/fastapi_routes.py +67 -0
  51. llama_stack_api/scoring/models.py +81 -0
  52. llama_stack_api/scoring_functions/__init__.py +50 -0
  53. llama_stack_api/scoring_functions/api.py +39 -0
  54. llama_stack_api/scoring_functions/fastapi_routes.py +108 -0
  55. llama_stack_api/{scoring_functions.py → scoring_functions/models.py} +67 -64
  56. llama_stack_api/shields/__init__.py +41 -0
  57. llama_stack_api/shields/api.py +39 -0
  58. llama_stack_api/shields/fastapi_routes.py +104 -0
  59. llama_stack_api/shields/models.py +74 -0
  60. llama_stack_api/validators.py +46 -0
  61. llama_stack_api/vector_io/__init__.py +88 -0
  62. llama_stack_api/vector_io/api.py +234 -0
  63. llama_stack_api/vector_io/fastapi_routes.py +447 -0
  64. llama_stack_api/{vector_io.py → vector_io/models.py} +99 -377
  65. {llama_stack_api-0.4.4.dist-info → llama_stack_api-0.5.0rc1.dist-info}/METADATA +1 -1
  66. llama_stack_api-0.5.0rc1.dist-info/RECORD +115 -0
  67. llama_stack_api/agents.py +0 -173
  68. llama_stack_api/connectors.py +0 -146
  69. llama_stack_api/conversations.py +0 -270
  70. llama_stack_api/datasetio.py +0 -55
  71. llama_stack_api/eval.py +0 -137
  72. llama_stack_api/inference.py +0 -1169
  73. llama_stack_api/prompts.py +0 -203
  74. llama_stack_api/safety.py +0 -132
  75. llama_stack_api/scoring.py +0 -93
  76. llama_stack_api/shields.py +0 -93
  77. llama_stack_api-0.4.4.dist-info/RECORD +0 -70
  78. {llama_stack_api-0.4.4.dist-info → llama_stack_api-0.5.0rc1.dist-info}/WHEEL +0 -0
  79. {llama_stack_api-0.4.4.dist-info → llama_stack_api-0.5.0rc1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,103 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """Connectors API models.
8
+
9
+ This module contains the Pydantic models for the Connectors API.
10
+ """
11
+
12
+ from enum import StrEnum
13
+
14
+ from pydantic import BaseModel, Field
15
+
16
+ from llama_stack_api.schema_utils import json_schema_type
17
+ from llama_stack_api.tools import ToolDef
18
+
19
+
20
+ @json_schema_type
21
+ class ConnectorType(StrEnum):
22
+ """Type of connector."""
23
+
24
+ MCP = "mcp"
25
+
26
+
27
+ class CommonConnectorFields(BaseModel):
28
+ """Common fields for all connectors"""
29
+
30
+ connector_type: ConnectorType = Field(default=ConnectorType.MCP)
31
+ connector_id: str = Field(..., description="Identifier for the connector")
32
+ url: str = Field(..., description="URL of the connector")
33
+ server_label: str | None = Field(default=None, description="Label of the server")
34
+
35
+
36
+ @json_schema_type
37
+ class Connector(CommonConnectorFields):
38
+ """A connector registered in Llama Stack"""
39
+
40
+ model_config = {"populate_by_name": True}
41
+ server_name: str | None = Field(default=None, description="Name of the server")
42
+ server_description: str | None = Field(default=None, description="Description of the server")
43
+ server_version: str | None = Field(default=None, description="Version of the server")
44
+
45
+
46
+ @json_schema_type
47
+ class ConnectorInput(CommonConnectorFields):
48
+ """Input for creating a connector"""
49
+
50
+
51
+ # Path parameter models (single field for create_path_dependency)
52
+
53
+
54
+ @json_schema_type
55
+ class GetConnectorRequest(BaseModel):
56
+ """Request model for getting a connector by ID."""
57
+
58
+ connector_id: str = Field(..., description="Identifier for the connector")
59
+
60
+
61
+ @json_schema_type
62
+ class ListConnectorToolsRequest(BaseModel):
63
+ """Request model for listing tools from a connector."""
64
+
65
+ connector_id: str = Field(..., description="Identifier for the connector")
66
+
67
+
68
+ @json_schema_type
69
+ class GetConnectorToolRequest(BaseModel):
70
+ """Request model for getting a tool from a connector."""
71
+
72
+ connector_id: str = Field(..., description="Identifier for the connector")
73
+ tool_name: str = Field(..., description="Name of the tool")
74
+
75
+
76
+ # Response models
77
+
78
+
79
+ @json_schema_type
80
+ class ListConnectorsResponse(BaseModel):
81
+ """Response containing a list of configured connectors"""
82
+
83
+ data: list[Connector]
84
+
85
+
86
+ @json_schema_type
87
+ class ListToolsResponse(BaseModel):
88
+ """Response containing a list of tools"""
89
+
90
+ data: list[ToolDef]
91
+
92
+
93
+ __all__ = [
94
+ "ConnectorType",
95
+ "CommonConnectorFields",
96
+ "Connector",
97
+ "ConnectorInput",
98
+ "GetConnectorRequest",
99
+ "ListConnectorsResponse",
100
+ "ListConnectorToolsRequest",
101
+ "ListToolsResponse",
102
+ "GetConnectorToolRequest",
103
+ ]
@@ -0,0 +1,61 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """Conversations API protocol and models.
8
+
9
+ This module contains the Conversations protocol definition.
10
+ Pydantic models are defined in llama_stack_api.conversations.models.
11
+ The FastAPI router is defined in llama_stack_api.conversations.fastapi_routes.
12
+ """
13
+
14
+ # Import fastapi_routes for router factory access
15
+ from . import fastapi_routes
16
+
17
+ # Import protocol for re-export
18
+ from .api import Conversations
19
+
20
+ # Import models for re-export
21
+ from .models import (
22
+ AddItemsRequest,
23
+ Conversation,
24
+ ConversationDeletedResource,
25
+ ConversationItem,
26
+ ConversationItemCreateRequest,
27
+ ConversationItemDeletedResource,
28
+ ConversationItemInclude,
29
+ ConversationItemList,
30
+ ConversationMessage,
31
+ CreateConversationRequest,
32
+ DeleteConversationRequest,
33
+ DeleteItemRequest,
34
+ GetConversationRequest,
35
+ ListItemsRequest,
36
+ Metadata,
37
+ RetrieveItemRequest,
38
+ UpdateConversationRequest,
39
+ )
40
+
41
+ __all__ = [
42
+ "Conversations",
43
+ "Conversation",
44
+ "ConversationMessage",
45
+ "ConversationItem",
46
+ "ConversationDeletedResource",
47
+ "ConversationItemCreateRequest",
48
+ "ConversationItemInclude",
49
+ "ConversationItemList",
50
+ "ConversationItemDeletedResource",
51
+ "Metadata",
52
+ "CreateConversationRequest",
53
+ "GetConversationRequest",
54
+ "UpdateConversationRequest",
55
+ "DeleteConversationRequest",
56
+ "AddItemsRequest",
57
+ "RetrieveItemRequest",
58
+ "ListItemsRequest",
59
+ "DeleteItemRequest",
60
+ "fastapi_routes",
61
+ ]
@@ -0,0 +1,44 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ from typing import Protocol, runtime_checkable
8
+
9
+ from .models import (
10
+ AddItemsRequest,
11
+ Conversation,
12
+ ConversationDeletedResource,
13
+ ConversationItem,
14
+ ConversationItemDeletedResource,
15
+ ConversationItemList,
16
+ CreateConversationRequest,
17
+ DeleteConversationRequest,
18
+ DeleteItemRequest,
19
+ GetConversationRequest,
20
+ ListItemsRequest,
21
+ RetrieveItemRequest,
22
+ UpdateConversationRequest,
23
+ )
24
+
25
+
26
+ @runtime_checkable
27
+ class Conversations(Protocol):
28
+ """Protocol for conversation management operations."""
29
+
30
+ async def create_conversation(self, request: CreateConversationRequest) -> Conversation: ...
31
+
32
+ async def get_conversation(self, request: GetConversationRequest) -> Conversation: ...
33
+
34
+ async def update_conversation(self, conversation_id: str, request: UpdateConversationRequest) -> Conversation: ...
35
+
36
+ async def openai_delete_conversation(self, request: DeleteConversationRequest) -> ConversationDeletedResource: ...
37
+
38
+ async def add_items(self, conversation_id: str, request: AddItemsRequest) -> ConversationItemList: ...
39
+
40
+ async def retrieve(self, request: RetrieveItemRequest) -> ConversationItem: ...
41
+
42
+ async def list_items(self, request: ListItemsRequest) -> ConversationItemList: ...
43
+
44
+ async def openai_delete_conversation_item(self, request: DeleteItemRequest) -> ConversationItemDeletedResource: ...
@@ -0,0 +1,177 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """FastAPI router for the Conversations API.
8
+
9
+ This module defines the FastAPI router for the Conversations API using standard
10
+ FastAPI route decorators.
11
+ """
12
+
13
+ from typing import Annotated, Literal
14
+
15
+ from fastapi import APIRouter, Body, Depends, Path
16
+ from pydantic import BaseModel
17
+
18
+ from llama_stack_api.router_utils import create_path_dependency, create_query_dependency, standard_responses
19
+ from llama_stack_api.version import LLAMA_STACK_API_V1
20
+
21
+ from .api import Conversations
22
+ from .models import (
23
+ AddItemsRequest,
24
+ Conversation,
25
+ ConversationDeletedResource,
26
+ ConversationItem,
27
+ ConversationItemDeletedResource,
28
+ ConversationItemInclude,
29
+ ConversationItemList,
30
+ CreateConversationRequest,
31
+ DeleteConversationRequest,
32
+ DeleteItemRequest,
33
+ GetConversationRequest,
34
+ ListItemsRequest,
35
+ RetrieveItemRequest,
36
+ UpdateConversationRequest,
37
+ )
38
+
39
+
40
+ class _ListItemsQueryParams(BaseModel):
41
+ """Query parameters for list_items endpoint (excludes conversation_id path param).
42
+
43
+ This is a subset of ListItemsRequest that only includes query parameters,
44
+ excluding the conversation_id which is a path parameter.
45
+ """
46
+
47
+ after: str | None = None
48
+ include: list[ConversationItemInclude] | None = None
49
+ limit: int | None = None
50
+ order: Literal["asc", "desc"] | None = None
51
+
52
+
53
+ # Dependency functions for request models
54
+ get_conversation_request = create_path_dependency(GetConversationRequest)
55
+ delete_conversation_request = create_path_dependency(DeleteConversationRequest)
56
+ get_list_items_query_params = create_query_dependency(_ListItemsQueryParams)
57
+
58
+
59
+ def create_router(impl: Conversations) -> APIRouter:
60
+ """Create a FastAPI router for the Conversations API."""
61
+ router = APIRouter(
62
+ prefix=f"/{LLAMA_STACK_API_V1}",
63
+ tags=["Conversations"],
64
+ responses=standard_responses,
65
+ )
66
+
67
+ @router.post(
68
+ "/conversations",
69
+ response_model=Conversation,
70
+ summary="Create a conversation.",
71
+ description="Create a conversation.",
72
+ responses={200: {"description": "The created conversation object."}},
73
+ )
74
+ async def create_conversation(
75
+ request: Annotated[CreateConversationRequest, Body(...)],
76
+ ) -> Conversation:
77
+ return await impl.create_conversation(request)
78
+
79
+ @router.get(
80
+ "/conversations/{conversation_id}",
81
+ response_model=Conversation,
82
+ summary="Retrieve a conversation.",
83
+ description="Get a conversation with the given ID.",
84
+ responses={200: {"description": "The conversation object."}},
85
+ )
86
+ async def get_conversation(
87
+ request: Annotated[GetConversationRequest, Depends(get_conversation_request)],
88
+ ) -> Conversation:
89
+ return await impl.get_conversation(request)
90
+
91
+ @router.post(
92
+ "/conversations/{conversation_id}",
93
+ response_model=Conversation,
94
+ summary="Update a conversation.",
95
+ description="Update a conversation's metadata with the given ID.",
96
+ responses={200: {"description": "The updated conversation object."}},
97
+ )
98
+ async def update_conversation(
99
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
100
+ request: Annotated[UpdateConversationRequest, Body(...)],
101
+ ) -> Conversation:
102
+ return await impl.update_conversation(conversation_id, request)
103
+
104
+ @router.delete(
105
+ "/conversations/{conversation_id}",
106
+ response_model=ConversationDeletedResource,
107
+ summary="Delete a conversation.",
108
+ description="Delete a conversation with the given ID.",
109
+ responses={200: {"description": "The deleted conversation resource."}},
110
+ )
111
+ async def delete_conversation(
112
+ request: Annotated[DeleteConversationRequest, Depends(delete_conversation_request)],
113
+ ) -> ConversationDeletedResource:
114
+ return await impl.openai_delete_conversation(request)
115
+
116
+ @router.post(
117
+ "/conversations/{conversation_id}/items",
118
+ response_model=ConversationItemList,
119
+ summary="Create items.",
120
+ description="Create items in the conversation.",
121
+ responses={200: {"description": "List of created items."}},
122
+ )
123
+ async def add_items(
124
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
125
+ request: Annotated[AddItemsRequest, Body(...)],
126
+ ) -> ConversationItemList:
127
+ return await impl.add_items(conversation_id, request)
128
+
129
+ @router.get(
130
+ "/conversations/{conversation_id}/items/{item_id}",
131
+ response_model=ConversationItem,
132
+ summary="Retrieve an item.",
133
+ description="Retrieve a conversation item.",
134
+ responses={200: {"description": "The conversation item."}},
135
+ )
136
+ async def retrieve_item(
137
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
138
+ item_id: Annotated[str, Path(description="The item identifier.")],
139
+ ) -> ConversationItem:
140
+ request = RetrieveItemRequest(conversation_id=conversation_id, item_id=item_id)
141
+ return await impl.retrieve(request)
142
+
143
+ @router.get(
144
+ "/conversations/{conversation_id}/items",
145
+ response_model=ConversationItemList,
146
+ summary="List items.",
147
+ description="List items in the conversation.",
148
+ responses={200: {"description": "List of conversation items."}},
149
+ )
150
+ async def list_items(
151
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
152
+ query_params: Annotated[_ListItemsQueryParams, Depends(get_list_items_query_params)],
153
+ ) -> ConversationItemList:
154
+ request = ListItemsRequest(
155
+ conversation_id=conversation_id,
156
+ after=query_params.after,
157
+ include=query_params.include,
158
+ limit=query_params.limit,
159
+ order=query_params.order,
160
+ )
161
+ return await impl.list_items(request)
162
+
163
+ @router.delete(
164
+ "/conversations/{conversation_id}/items/{item_id}",
165
+ response_model=ConversationItemDeletedResource,
166
+ summary="Delete an item.",
167
+ description="Delete a conversation item.",
168
+ responses={200: {"description": "The deleted item resource."}},
169
+ )
170
+ async def delete_item(
171
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
172
+ item_id: Annotated[str, Path(description="The item identifier.")],
173
+ ) -> ConversationItemDeletedResource:
174
+ request = DeleteItemRequest(conversation_id=conversation_id, item_id=item_id)
175
+ return await impl.openai_delete_conversation_item(request)
176
+
177
+ return router
@@ -0,0 +1,245 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """Pydantic models for Conversations API requests and responses.
8
+
9
+ This module defines the request and response models for the Conversations API
10
+ using Pydantic with Field descriptions for OpenAPI schema generation.
11
+ """
12
+
13
+ from enum import StrEnum
14
+ from typing import Annotated, Literal
15
+
16
+ from pydantic import BaseModel, Field
17
+
18
+ from llama_stack_api.openai_responses import (
19
+ OpenAIResponseInputFunctionToolCallOutput,
20
+ OpenAIResponseMCPApprovalRequest,
21
+ OpenAIResponseMCPApprovalResponse,
22
+ OpenAIResponseMessage,
23
+ OpenAIResponseOutputMessageFileSearchToolCall,
24
+ OpenAIResponseOutputMessageFunctionToolCall,
25
+ OpenAIResponseOutputMessageMCPCall,
26
+ OpenAIResponseOutputMessageMCPListTools,
27
+ OpenAIResponseOutputMessageWebSearchToolCall,
28
+ )
29
+ from llama_stack_api.schema_utils import json_schema_type, register_schema
30
+
31
+ Metadata = dict[str, str]
32
+
33
+
34
+ @json_schema_type
35
+ class Conversation(BaseModel):
36
+ """OpenAI-compatible conversation object."""
37
+
38
+ id: str = Field(..., description="The unique ID of the conversation.")
39
+ object: Literal["conversation"] = Field(
40
+ default="conversation", description="The object type, which is always conversation."
41
+ )
42
+ created_at: int = Field(
43
+ ..., description="The time at which the conversation was created, measured in seconds since the Unix epoch."
44
+ )
45
+ metadata: Metadata | None = Field(
46
+ default=None,
47
+ description="Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.",
48
+ )
49
+ items: list[dict] | None = Field(
50
+ default=None,
51
+ description="Initial items to include in the conversation context. You may add up to 20 items at a time.",
52
+ )
53
+
54
+
55
+ @json_schema_type
56
+ class ConversationMessage(BaseModel):
57
+ """OpenAI-compatible message item for conversations."""
58
+
59
+ id: str = Field(..., description="unique identifier for this message")
60
+ content: list[dict] = Field(..., description="message content")
61
+ role: str = Field(..., description="message role")
62
+ status: str = Field(..., description="message status")
63
+ type: Literal["message"] = "message"
64
+ object: Literal["message"] = "message"
65
+
66
+
67
+ ConversationItem = Annotated[
68
+ OpenAIResponseMessage
69
+ | OpenAIResponseOutputMessageWebSearchToolCall
70
+ | OpenAIResponseOutputMessageFileSearchToolCall
71
+ | OpenAIResponseOutputMessageFunctionToolCall
72
+ | OpenAIResponseInputFunctionToolCallOutput
73
+ | OpenAIResponseMCPApprovalRequest
74
+ | OpenAIResponseMCPApprovalResponse
75
+ | OpenAIResponseOutputMessageMCPCall
76
+ | OpenAIResponseOutputMessageMCPListTools
77
+ | OpenAIResponseOutputMessageMCPCall
78
+ | OpenAIResponseOutputMessageMCPListTools,
79
+ Field(discriminator="type"),
80
+ ]
81
+ register_schema(ConversationItem, name="ConversationItem")
82
+
83
+
84
+ @json_schema_type
85
+ class ConversationDeletedResource(BaseModel):
86
+ """Response for deleted conversation."""
87
+
88
+ id: str = Field(..., description="The deleted conversation identifier")
89
+ object: str = Field(default="conversation.deleted", description="Object type")
90
+ deleted: bool = Field(default=True, description="Whether the object was deleted")
91
+
92
+
93
+ @json_schema_type
94
+ class ConversationItemCreateRequest(BaseModel):
95
+ """Request body for creating conversation items."""
96
+
97
+ items: list[ConversationItem] = Field(
98
+ ...,
99
+ description="Items to include in the conversation context. You may add up to 20 items at a time.",
100
+ max_length=20,
101
+ )
102
+
103
+
104
+ class ConversationItemInclude(StrEnum):
105
+ """Specify additional output data to include in the model response."""
106
+
107
+ web_search_call_action_sources = "web_search_call.action.sources"
108
+ code_interpreter_call_outputs = "code_interpreter_call.outputs"
109
+ computer_call_output_output_image_url = "computer_call_output.output.image_url"
110
+ file_search_call_results = "file_search_call.results"
111
+ message_input_image_image_url = "message.input_image.image_url"
112
+ message_output_text_logprobs = "message.output_text.logprobs"
113
+ reasoning_encrypted_content = "reasoning.encrypted_content"
114
+
115
+
116
+ @json_schema_type
117
+ class ConversationItemList(BaseModel):
118
+ """List of conversation items with pagination."""
119
+
120
+ object: str = Field(default="list", description="Object type")
121
+ data: list[ConversationItem] = Field(..., description="List of conversation items")
122
+ first_id: str | None = Field(default=None, description="The ID of the first item in the list")
123
+ last_id: str | None = Field(default=None, description="The ID of the last item in the list")
124
+ has_more: bool = Field(default=False, description="Whether there are more items available")
125
+
126
+
127
+ @json_schema_type
128
+ class ConversationItemDeletedResource(BaseModel):
129
+ """Response for deleted conversation item."""
130
+
131
+ id: str = Field(..., description="The deleted item identifier")
132
+ object: str = Field(default="conversation.item.deleted", description="Object type")
133
+ deleted: bool = Field(default=True, description="Whether the object was deleted")
134
+
135
+
136
+ # Request models for each endpoint
137
+
138
+
139
+ @json_schema_type
140
+ class CreateConversationRequest(BaseModel):
141
+ """Request model for creating a conversation."""
142
+
143
+ items: list[ConversationItem] | None = Field(
144
+ default=None,
145
+ description="Initial items to include in the conversation context.",
146
+ )
147
+ metadata: Metadata | None = Field(
148
+ default=None,
149
+ description="Set of key-value pairs that can be attached to an object.",
150
+ )
151
+
152
+
153
+ @json_schema_type
154
+ class GetConversationRequest(BaseModel):
155
+ """Request model for getting a conversation by ID."""
156
+
157
+ conversation_id: str = Field(..., description="The conversation identifier.")
158
+
159
+
160
+ @json_schema_type
161
+ class UpdateConversationRequest(BaseModel):
162
+ """Request model for updating a conversation's metadata."""
163
+
164
+ metadata: Metadata = Field(
165
+ ...,
166
+ description="Set of key-value pairs that can be attached to an object.",
167
+ )
168
+
169
+
170
+ @json_schema_type
171
+ class DeleteConversationRequest(BaseModel):
172
+ """Request model for deleting a conversation."""
173
+
174
+ conversation_id: str = Field(..., description="The conversation identifier.")
175
+
176
+
177
+ @json_schema_type
178
+ class AddItemsRequest(BaseModel):
179
+ """Request model for adding items to a conversation."""
180
+
181
+ items: list[ConversationItem] = Field(
182
+ ...,
183
+ description="Items to include in the conversation context. You may add up to 20 items at a time.",
184
+ max_length=20,
185
+ )
186
+
187
+
188
+ @json_schema_type
189
+ class RetrieveItemRequest(BaseModel):
190
+ """Request model for retrieving a conversation item."""
191
+
192
+ conversation_id: str = Field(..., description="The conversation identifier.")
193
+ item_id: str = Field(..., description="The item identifier.")
194
+
195
+
196
+ @json_schema_type
197
+ class ListItemsRequest(BaseModel):
198
+ """Request model for listing items in a conversation."""
199
+
200
+ conversation_id: str = Field(..., description="The conversation identifier.")
201
+ after: str | None = Field(
202
+ default=None,
203
+ description="An item ID to list items after, used in pagination.",
204
+ )
205
+ include: list[ConversationItemInclude] | None = Field(
206
+ default=None,
207
+ description="Specify additional output data to include in the response.",
208
+ )
209
+ limit: int | None = Field(
210
+ default=None,
211
+ description="A limit on the number of objects to be returned (1-100, default 20).",
212
+ )
213
+ order: Literal["asc", "desc"] | None = Field(
214
+ default=None,
215
+ description="The order to return items in (asc or desc, default desc).",
216
+ )
217
+
218
+
219
+ @json_schema_type
220
+ class DeleteItemRequest(BaseModel):
221
+ """Request model for deleting a conversation item."""
222
+
223
+ conversation_id: str = Field(..., description="The conversation identifier.")
224
+ item_id: str = Field(..., description="The item identifier.")
225
+
226
+
227
+ __all__ = [
228
+ "Metadata",
229
+ "Conversation",
230
+ "ConversationMessage",
231
+ "ConversationItem",
232
+ "ConversationDeletedResource",
233
+ "ConversationItemCreateRequest",
234
+ "ConversationItemInclude",
235
+ "ConversationItemList",
236
+ "ConversationItemDeletedResource",
237
+ "CreateConversationRequest",
238
+ "GetConversationRequest",
239
+ "UpdateConversationRequest",
240
+ "DeleteConversationRequest",
241
+ "AddItemsRequest",
242
+ "RetrieveItemRequest",
243
+ "ListItemsRequest",
244
+ "DeleteItemRequest",
245
+ ]
@@ -0,0 +1,34 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """DatasetIO API protocol and models.
8
+
9
+ This module contains the DatasetIO protocol definition.
10
+ Pydantic models are defined in llama_stack_api.datasetio.models.
11
+ The FastAPI router is defined in llama_stack_api.datasetio.fastapi_routes.
12
+ """
13
+
14
+ # Import fastapi_routes for router factory access
15
+ from . import fastapi_routes
16
+
17
+ # Import protocol for FastAPI router
18
+ from .api import DatasetIO, DatasetStore
19
+
20
+ # Import models for re-export
21
+ from .models import (
22
+ AppendRowsRequest,
23
+ IterRowsRequest,
24
+ PaginatedResponse,
25
+ )
26
+
27
+ __all__ = [
28
+ "DatasetIO",
29
+ "DatasetStore",
30
+ "AppendRowsRequest",
31
+ "IterRowsRequest",
32
+ "PaginatedResponse",
33
+ "fastapi_routes",
34
+ ]