llama-stack-api 0.4.3__py3-none-any.whl → 0.5.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (117) hide show
  1. llama_stack_api/__init__.py +1100 -0
  2. llama_stack_api/admin/__init__.py +45 -0
  3. llama_stack_api/admin/api.py +72 -0
  4. llama_stack_api/admin/fastapi_routes.py +117 -0
  5. llama_stack_api/admin/models.py +113 -0
  6. llama_stack_api/agents/__init__.py +38 -0
  7. llama_stack_api/agents/api.py +52 -0
  8. llama_stack_api/agents/fastapi_routes.py +268 -0
  9. llama_stack_api/agents/models.py +181 -0
  10. llama_stack_api/batches/__init__.py +40 -0
  11. llama_stack_api/batches/api.py +53 -0
  12. llama_stack_api/batches/fastapi_routes.py +113 -0
  13. llama_stack_api/batches/models.py +78 -0
  14. llama_stack_api/benchmarks/__init__.py +43 -0
  15. llama_stack_api/benchmarks/api.py +39 -0
  16. llama_stack_api/benchmarks/fastapi_routes.py +109 -0
  17. llama_stack_api/benchmarks/models.py +109 -0
  18. llama_stack_api/common/__init__.py +5 -0
  19. llama_stack_api/common/content_types.py +101 -0
  20. llama_stack_api/common/errors.py +110 -0
  21. llama_stack_api/common/job_types.py +38 -0
  22. llama_stack_api/common/responses.py +77 -0
  23. llama_stack_api/common/training_types.py +47 -0
  24. llama_stack_api/common/type_system.py +146 -0
  25. llama_stack_api/connectors/__init__.py +38 -0
  26. llama_stack_api/connectors/api.py +50 -0
  27. llama_stack_api/connectors/fastapi_routes.py +103 -0
  28. llama_stack_api/connectors/models.py +103 -0
  29. llama_stack_api/conversations/__init__.py +61 -0
  30. llama_stack_api/conversations/api.py +44 -0
  31. llama_stack_api/conversations/fastapi_routes.py +177 -0
  32. llama_stack_api/conversations/models.py +245 -0
  33. llama_stack_api/datasetio/__init__.py +34 -0
  34. llama_stack_api/datasetio/api.py +42 -0
  35. llama_stack_api/datasetio/fastapi_routes.py +94 -0
  36. llama_stack_api/datasetio/models.py +48 -0
  37. llama_stack_api/datasets/__init__.py +61 -0
  38. llama_stack_api/datasets/api.py +35 -0
  39. llama_stack_api/datasets/fastapi_routes.py +104 -0
  40. llama_stack_api/datasets/models.py +152 -0
  41. llama_stack_api/datatypes.py +373 -0
  42. llama_stack_api/eval/__init__.py +55 -0
  43. llama_stack_api/eval/api.py +51 -0
  44. llama_stack_api/eval/compat.py +300 -0
  45. llama_stack_api/eval/fastapi_routes.py +126 -0
  46. llama_stack_api/eval/models.py +141 -0
  47. llama_stack_api/file_processors/__init__.py +27 -0
  48. llama_stack_api/file_processors/api.py +64 -0
  49. llama_stack_api/file_processors/fastapi_routes.py +78 -0
  50. llama_stack_api/file_processors/models.py +42 -0
  51. llama_stack_api/files/__init__.py +35 -0
  52. llama_stack_api/files/api.py +51 -0
  53. llama_stack_api/files/fastapi_routes.py +124 -0
  54. llama_stack_api/files/models.py +107 -0
  55. llama_stack_api/inference/__init__.py +207 -0
  56. llama_stack_api/inference/api.py +93 -0
  57. llama_stack_api/inference/fastapi_routes.py +243 -0
  58. llama_stack_api/inference/models.py +1035 -0
  59. llama_stack_api/inspect_api/__init__.py +37 -0
  60. llama_stack_api/inspect_api/api.py +25 -0
  61. llama_stack_api/inspect_api/fastapi_routes.py +76 -0
  62. llama_stack_api/inspect_api/models.py +28 -0
  63. llama_stack_api/internal/__init__.py +9 -0
  64. llama_stack_api/internal/kvstore.py +28 -0
  65. llama_stack_api/internal/sqlstore.py +81 -0
  66. llama_stack_api/models/__init__.py +47 -0
  67. llama_stack_api/models/api.py +38 -0
  68. llama_stack_api/models/fastapi_routes.py +104 -0
  69. llama_stack_api/models/models.py +157 -0
  70. llama_stack_api/openai_responses.py +1494 -0
  71. llama_stack_api/post_training/__init__.py +73 -0
  72. llama_stack_api/post_training/api.py +36 -0
  73. llama_stack_api/post_training/fastapi_routes.py +116 -0
  74. llama_stack_api/post_training/models.py +339 -0
  75. llama_stack_api/prompts/__init__.py +47 -0
  76. llama_stack_api/prompts/api.py +44 -0
  77. llama_stack_api/prompts/fastapi_routes.py +163 -0
  78. llama_stack_api/prompts/models.py +177 -0
  79. llama_stack_api/providers/__init__.py +33 -0
  80. llama_stack_api/providers/api.py +16 -0
  81. llama_stack_api/providers/fastapi_routes.py +57 -0
  82. llama_stack_api/providers/models.py +24 -0
  83. llama_stack_api/rag_tool.py +168 -0
  84. llama_stack_api/resource.py +36 -0
  85. llama_stack_api/router_utils.py +160 -0
  86. llama_stack_api/safety/__init__.py +37 -0
  87. llama_stack_api/safety/api.py +29 -0
  88. llama_stack_api/safety/datatypes.py +83 -0
  89. llama_stack_api/safety/fastapi_routes.py +55 -0
  90. llama_stack_api/safety/models.py +38 -0
  91. llama_stack_api/schema_utils.py +251 -0
  92. llama_stack_api/scoring/__init__.py +66 -0
  93. llama_stack_api/scoring/api.py +35 -0
  94. llama_stack_api/scoring/fastapi_routes.py +67 -0
  95. llama_stack_api/scoring/models.py +81 -0
  96. llama_stack_api/scoring_functions/__init__.py +50 -0
  97. llama_stack_api/scoring_functions/api.py +39 -0
  98. llama_stack_api/scoring_functions/fastapi_routes.py +108 -0
  99. llama_stack_api/scoring_functions/models.py +214 -0
  100. llama_stack_api/shields/__init__.py +41 -0
  101. llama_stack_api/shields/api.py +39 -0
  102. llama_stack_api/shields/fastapi_routes.py +104 -0
  103. llama_stack_api/shields/models.py +74 -0
  104. llama_stack_api/tools.py +226 -0
  105. llama_stack_api/validators.py +46 -0
  106. llama_stack_api/vector_io/__init__.py +88 -0
  107. llama_stack_api/vector_io/api.py +234 -0
  108. llama_stack_api/vector_io/fastapi_routes.py +447 -0
  109. llama_stack_api/vector_io/models.py +663 -0
  110. llama_stack_api/vector_stores.py +53 -0
  111. llama_stack_api/version.py +9 -0
  112. {llama_stack_api-0.4.3.dist-info → llama_stack_api-0.5.0rc1.dist-info}/METADATA +1 -1
  113. llama_stack_api-0.5.0rc1.dist-info/RECORD +115 -0
  114. llama_stack_api-0.5.0rc1.dist-info/top_level.txt +1 -0
  115. llama_stack_api-0.4.3.dist-info/RECORD +0 -4
  116. llama_stack_api-0.4.3.dist-info/top_level.txt +0 -1
  117. {llama_stack_api-0.4.3.dist-info → llama_stack_api-0.5.0rc1.dist-info}/WHEEL +0 -0
@@ -0,0 +1,177 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """FastAPI router for the Conversations API.
8
+
9
+ This module defines the FastAPI router for the Conversations API using standard
10
+ FastAPI route decorators.
11
+ """
12
+
13
+ from typing import Annotated, Literal
14
+
15
+ from fastapi import APIRouter, Body, Depends, Path
16
+ from pydantic import BaseModel
17
+
18
+ from llama_stack_api.router_utils import create_path_dependency, create_query_dependency, standard_responses
19
+ from llama_stack_api.version import LLAMA_STACK_API_V1
20
+
21
+ from .api import Conversations
22
+ from .models import (
23
+ AddItemsRequest,
24
+ Conversation,
25
+ ConversationDeletedResource,
26
+ ConversationItem,
27
+ ConversationItemDeletedResource,
28
+ ConversationItemInclude,
29
+ ConversationItemList,
30
+ CreateConversationRequest,
31
+ DeleteConversationRequest,
32
+ DeleteItemRequest,
33
+ GetConversationRequest,
34
+ ListItemsRequest,
35
+ RetrieveItemRequest,
36
+ UpdateConversationRequest,
37
+ )
38
+
39
+
40
+ class _ListItemsQueryParams(BaseModel):
41
+ """Query parameters for list_items endpoint (excludes conversation_id path param).
42
+
43
+ This is a subset of ListItemsRequest that only includes query parameters,
44
+ excluding the conversation_id which is a path parameter.
45
+ """
46
+
47
+ after: str | None = None
48
+ include: list[ConversationItemInclude] | None = None
49
+ limit: int | None = None
50
+ order: Literal["asc", "desc"] | None = None
51
+
52
+
53
+ # Dependency functions for request models
54
+ get_conversation_request = create_path_dependency(GetConversationRequest)
55
+ delete_conversation_request = create_path_dependency(DeleteConversationRequest)
56
+ get_list_items_query_params = create_query_dependency(_ListItemsQueryParams)
57
+
58
+
59
+ def create_router(impl: Conversations) -> APIRouter:
60
+ """Create a FastAPI router for the Conversations API."""
61
+ router = APIRouter(
62
+ prefix=f"/{LLAMA_STACK_API_V1}",
63
+ tags=["Conversations"],
64
+ responses=standard_responses,
65
+ )
66
+
67
+ @router.post(
68
+ "/conversations",
69
+ response_model=Conversation,
70
+ summary="Create a conversation.",
71
+ description="Create a conversation.",
72
+ responses={200: {"description": "The created conversation object."}},
73
+ )
74
+ async def create_conversation(
75
+ request: Annotated[CreateConversationRequest, Body(...)],
76
+ ) -> Conversation:
77
+ return await impl.create_conversation(request)
78
+
79
+ @router.get(
80
+ "/conversations/{conversation_id}",
81
+ response_model=Conversation,
82
+ summary="Retrieve a conversation.",
83
+ description="Get a conversation with the given ID.",
84
+ responses={200: {"description": "The conversation object."}},
85
+ )
86
+ async def get_conversation(
87
+ request: Annotated[GetConversationRequest, Depends(get_conversation_request)],
88
+ ) -> Conversation:
89
+ return await impl.get_conversation(request)
90
+
91
+ @router.post(
92
+ "/conversations/{conversation_id}",
93
+ response_model=Conversation,
94
+ summary="Update a conversation.",
95
+ description="Update a conversation's metadata with the given ID.",
96
+ responses={200: {"description": "The updated conversation object."}},
97
+ )
98
+ async def update_conversation(
99
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
100
+ request: Annotated[UpdateConversationRequest, Body(...)],
101
+ ) -> Conversation:
102
+ return await impl.update_conversation(conversation_id, request)
103
+
104
+ @router.delete(
105
+ "/conversations/{conversation_id}",
106
+ response_model=ConversationDeletedResource,
107
+ summary="Delete a conversation.",
108
+ description="Delete a conversation with the given ID.",
109
+ responses={200: {"description": "The deleted conversation resource."}},
110
+ )
111
+ async def delete_conversation(
112
+ request: Annotated[DeleteConversationRequest, Depends(delete_conversation_request)],
113
+ ) -> ConversationDeletedResource:
114
+ return await impl.openai_delete_conversation(request)
115
+
116
+ @router.post(
117
+ "/conversations/{conversation_id}/items",
118
+ response_model=ConversationItemList,
119
+ summary="Create items.",
120
+ description="Create items in the conversation.",
121
+ responses={200: {"description": "List of created items."}},
122
+ )
123
+ async def add_items(
124
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
125
+ request: Annotated[AddItemsRequest, Body(...)],
126
+ ) -> ConversationItemList:
127
+ return await impl.add_items(conversation_id, request)
128
+
129
+ @router.get(
130
+ "/conversations/{conversation_id}/items/{item_id}",
131
+ response_model=ConversationItem,
132
+ summary="Retrieve an item.",
133
+ description="Retrieve a conversation item.",
134
+ responses={200: {"description": "The conversation item."}},
135
+ )
136
+ async def retrieve_item(
137
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
138
+ item_id: Annotated[str, Path(description="The item identifier.")],
139
+ ) -> ConversationItem:
140
+ request = RetrieveItemRequest(conversation_id=conversation_id, item_id=item_id)
141
+ return await impl.retrieve(request)
142
+
143
+ @router.get(
144
+ "/conversations/{conversation_id}/items",
145
+ response_model=ConversationItemList,
146
+ summary="List items.",
147
+ description="List items in the conversation.",
148
+ responses={200: {"description": "List of conversation items."}},
149
+ )
150
+ async def list_items(
151
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
152
+ query_params: Annotated[_ListItemsQueryParams, Depends(get_list_items_query_params)],
153
+ ) -> ConversationItemList:
154
+ request = ListItemsRequest(
155
+ conversation_id=conversation_id,
156
+ after=query_params.after,
157
+ include=query_params.include,
158
+ limit=query_params.limit,
159
+ order=query_params.order,
160
+ )
161
+ return await impl.list_items(request)
162
+
163
+ @router.delete(
164
+ "/conversations/{conversation_id}/items/{item_id}",
165
+ response_model=ConversationItemDeletedResource,
166
+ summary="Delete an item.",
167
+ description="Delete a conversation item.",
168
+ responses={200: {"description": "The deleted item resource."}},
169
+ )
170
+ async def delete_item(
171
+ conversation_id: Annotated[str, Path(description="The conversation identifier.")],
172
+ item_id: Annotated[str, Path(description="The item identifier.")],
173
+ ) -> ConversationItemDeletedResource:
174
+ request = DeleteItemRequest(conversation_id=conversation_id, item_id=item_id)
175
+ return await impl.openai_delete_conversation_item(request)
176
+
177
+ return router
@@ -0,0 +1,245 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """Pydantic models for Conversations API requests and responses.
8
+
9
+ This module defines the request and response models for the Conversations API
10
+ using Pydantic with Field descriptions for OpenAPI schema generation.
11
+ """
12
+
13
+ from enum import StrEnum
14
+ from typing import Annotated, Literal
15
+
16
+ from pydantic import BaseModel, Field
17
+
18
+ from llama_stack_api.openai_responses import (
19
+ OpenAIResponseInputFunctionToolCallOutput,
20
+ OpenAIResponseMCPApprovalRequest,
21
+ OpenAIResponseMCPApprovalResponse,
22
+ OpenAIResponseMessage,
23
+ OpenAIResponseOutputMessageFileSearchToolCall,
24
+ OpenAIResponseOutputMessageFunctionToolCall,
25
+ OpenAIResponseOutputMessageMCPCall,
26
+ OpenAIResponseOutputMessageMCPListTools,
27
+ OpenAIResponseOutputMessageWebSearchToolCall,
28
+ )
29
+ from llama_stack_api.schema_utils import json_schema_type, register_schema
30
+
31
+ Metadata = dict[str, str]
32
+
33
+
34
+ @json_schema_type
35
+ class Conversation(BaseModel):
36
+ """OpenAI-compatible conversation object."""
37
+
38
+ id: str = Field(..., description="The unique ID of the conversation.")
39
+ object: Literal["conversation"] = Field(
40
+ default="conversation", description="The object type, which is always conversation."
41
+ )
42
+ created_at: int = Field(
43
+ ..., description="The time at which the conversation was created, measured in seconds since the Unix epoch."
44
+ )
45
+ metadata: Metadata | None = Field(
46
+ default=None,
47
+ description="Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.",
48
+ )
49
+ items: list[dict] | None = Field(
50
+ default=None,
51
+ description="Initial items to include in the conversation context. You may add up to 20 items at a time.",
52
+ )
53
+
54
+
55
+ @json_schema_type
56
+ class ConversationMessage(BaseModel):
57
+ """OpenAI-compatible message item for conversations."""
58
+
59
+ id: str = Field(..., description="unique identifier for this message")
60
+ content: list[dict] = Field(..., description="message content")
61
+ role: str = Field(..., description="message role")
62
+ status: str = Field(..., description="message status")
63
+ type: Literal["message"] = "message"
64
+ object: Literal["message"] = "message"
65
+
66
+
67
+ ConversationItem = Annotated[
68
+ OpenAIResponseMessage
69
+ | OpenAIResponseOutputMessageWebSearchToolCall
70
+ | OpenAIResponseOutputMessageFileSearchToolCall
71
+ | OpenAIResponseOutputMessageFunctionToolCall
72
+ | OpenAIResponseInputFunctionToolCallOutput
73
+ | OpenAIResponseMCPApprovalRequest
74
+ | OpenAIResponseMCPApprovalResponse
75
+ | OpenAIResponseOutputMessageMCPCall
76
+ | OpenAIResponseOutputMessageMCPListTools
77
+ | OpenAIResponseOutputMessageMCPCall
78
+ | OpenAIResponseOutputMessageMCPListTools,
79
+ Field(discriminator="type"),
80
+ ]
81
+ register_schema(ConversationItem, name="ConversationItem")
82
+
83
+
84
+ @json_schema_type
85
+ class ConversationDeletedResource(BaseModel):
86
+ """Response for deleted conversation."""
87
+
88
+ id: str = Field(..., description="The deleted conversation identifier")
89
+ object: str = Field(default="conversation.deleted", description="Object type")
90
+ deleted: bool = Field(default=True, description="Whether the object was deleted")
91
+
92
+
93
+ @json_schema_type
94
+ class ConversationItemCreateRequest(BaseModel):
95
+ """Request body for creating conversation items."""
96
+
97
+ items: list[ConversationItem] = Field(
98
+ ...,
99
+ description="Items to include in the conversation context. You may add up to 20 items at a time.",
100
+ max_length=20,
101
+ )
102
+
103
+
104
+ class ConversationItemInclude(StrEnum):
105
+ """Specify additional output data to include in the model response."""
106
+
107
+ web_search_call_action_sources = "web_search_call.action.sources"
108
+ code_interpreter_call_outputs = "code_interpreter_call.outputs"
109
+ computer_call_output_output_image_url = "computer_call_output.output.image_url"
110
+ file_search_call_results = "file_search_call.results"
111
+ message_input_image_image_url = "message.input_image.image_url"
112
+ message_output_text_logprobs = "message.output_text.logprobs"
113
+ reasoning_encrypted_content = "reasoning.encrypted_content"
114
+
115
+
116
+ @json_schema_type
117
+ class ConversationItemList(BaseModel):
118
+ """List of conversation items with pagination."""
119
+
120
+ object: str = Field(default="list", description="Object type")
121
+ data: list[ConversationItem] = Field(..., description="List of conversation items")
122
+ first_id: str | None = Field(default=None, description="The ID of the first item in the list")
123
+ last_id: str | None = Field(default=None, description="The ID of the last item in the list")
124
+ has_more: bool = Field(default=False, description="Whether there are more items available")
125
+
126
+
127
+ @json_schema_type
128
+ class ConversationItemDeletedResource(BaseModel):
129
+ """Response for deleted conversation item."""
130
+
131
+ id: str = Field(..., description="The deleted item identifier")
132
+ object: str = Field(default="conversation.item.deleted", description="Object type")
133
+ deleted: bool = Field(default=True, description="Whether the object was deleted")
134
+
135
+
136
+ # Request models for each endpoint
137
+
138
+
139
+ @json_schema_type
140
+ class CreateConversationRequest(BaseModel):
141
+ """Request model for creating a conversation."""
142
+
143
+ items: list[ConversationItem] | None = Field(
144
+ default=None,
145
+ description="Initial items to include in the conversation context.",
146
+ )
147
+ metadata: Metadata | None = Field(
148
+ default=None,
149
+ description="Set of key-value pairs that can be attached to an object.",
150
+ )
151
+
152
+
153
+ @json_schema_type
154
+ class GetConversationRequest(BaseModel):
155
+ """Request model for getting a conversation by ID."""
156
+
157
+ conversation_id: str = Field(..., description="The conversation identifier.")
158
+
159
+
160
+ @json_schema_type
161
+ class UpdateConversationRequest(BaseModel):
162
+ """Request model for updating a conversation's metadata."""
163
+
164
+ metadata: Metadata = Field(
165
+ ...,
166
+ description="Set of key-value pairs that can be attached to an object.",
167
+ )
168
+
169
+
170
+ @json_schema_type
171
+ class DeleteConversationRequest(BaseModel):
172
+ """Request model for deleting a conversation."""
173
+
174
+ conversation_id: str = Field(..., description="The conversation identifier.")
175
+
176
+
177
+ @json_schema_type
178
+ class AddItemsRequest(BaseModel):
179
+ """Request model for adding items to a conversation."""
180
+
181
+ items: list[ConversationItem] = Field(
182
+ ...,
183
+ description="Items to include in the conversation context. You may add up to 20 items at a time.",
184
+ max_length=20,
185
+ )
186
+
187
+
188
+ @json_schema_type
189
+ class RetrieveItemRequest(BaseModel):
190
+ """Request model for retrieving a conversation item."""
191
+
192
+ conversation_id: str = Field(..., description="The conversation identifier.")
193
+ item_id: str = Field(..., description="The item identifier.")
194
+
195
+
196
+ @json_schema_type
197
+ class ListItemsRequest(BaseModel):
198
+ """Request model for listing items in a conversation."""
199
+
200
+ conversation_id: str = Field(..., description="The conversation identifier.")
201
+ after: str | None = Field(
202
+ default=None,
203
+ description="An item ID to list items after, used in pagination.",
204
+ )
205
+ include: list[ConversationItemInclude] | None = Field(
206
+ default=None,
207
+ description="Specify additional output data to include in the response.",
208
+ )
209
+ limit: int | None = Field(
210
+ default=None,
211
+ description="A limit on the number of objects to be returned (1-100, default 20).",
212
+ )
213
+ order: Literal["asc", "desc"] | None = Field(
214
+ default=None,
215
+ description="The order to return items in (asc or desc, default desc).",
216
+ )
217
+
218
+
219
+ @json_schema_type
220
+ class DeleteItemRequest(BaseModel):
221
+ """Request model for deleting a conversation item."""
222
+
223
+ conversation_id: str = Field(..., description="The conversation identifier.")
224
+ item_id: str = Field(..., description="The item identifier.")
225
+
226
+
227
+ __all__ = [
228
+ "Metadata",
229
+ "Conversation",
230
+ "ConversationMessage",
231
+ "ConversationItem",
232
+ "ConversationDeletedResource",
233
+ "ConversationItemCreateRequest",
234
+ "ConversationItemInclude",
235
+ "ConversationItemList",
236
+ "ConversationItemDeletedResource",
237
+ "CreateConversationRequest",
238
+ "GetConversationRequest",
239
+ "UpdateConversationRequest",
240
+ "DeleteConversationRequest",
241
+ "AddItemsRequest",
242
+ "RetrieveItemRequest",
243
+ "ListItemsRequest",
244
+ "DeleteItemRequest",
245
+ ]
@@ -0,0 +1,34 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """DatasetIO API protocol and models.
8
+
9
+ This module contains the DatasetIO protocol definition.
10
+ Pydantic models are defined in llama_stack_api.datasetio.models.
11
+ The FastAPI router is defined in llama_stack_api.datasetio.fastapi_routes.
12
+ """
13
+
14
+ # Import fastapi_routes for router factory access
15
+ from . import fastapi_routes
16
+
17
+ # Import protocol for FastAPI router
18
+ from .api import DatasetIO, DatasetStore
19
+
20
+ # Import models for re-export
21
+ from .models import (
22
+ AppendRowsRequest,
23
+ IterRowsRequest,
24
+ PaginatedResponse,
25
+ )
26
+
27
+ __all__ = [
28
+ "DatasetIO",
29
+ "DatasetStore",
30
+ "AppendRowsRequest",
31
+ "IterRowsRequest",
32
+ "PaginatedResponse",
33
+ "fastapi_routes",
34
+ ]
@@ -0,0 +1,42 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """DatasetIO API protocol definition.
8
+
9
+ This module contains the DatasetIO protocol definition.
10
+ Pydantic models are defined in llama_stack_api.datasetio.models.
11
+ The FastAPI router is defined in llama_stack_api.datasetio.fastapi_routes.
12
+ """
13
+
14
+ from typing import Protocol, runtime_checkable
15
+
16
+ from llama_stack_api.datasets import Dataset
17
+
18
+ from .models import (
19
+ AppendRowsRequest,
20
+ IterRowsRequest,
21
+ PaginatedResponse,
22
+ )
23
+
24
+
25
+ class DatasetStore(Protocol):
26
+ def get_dataset(self, dataset_id: str) -> Dataset: ...
27
+
28
+
29
+ @runtime_checkable
30
+ class DatasetIO(Protocol):
31
+ """Protocol for dataset I/O operations.
32
+
33
+ The DatasetIO API provides operations for reading and writing data to datasets.
34
+ This includes iterating over rows and appending new rows to existing datasets.
35
+ """
36
+
37
+ # keeping for aligning with inference/safety, but this is not used
38
+ dataset_store: DatasetStore
39
+
40
+ async def iterrows(self, request: IterRowsRequest) -> PaginatedResponse: ...
41
+
42
+ async def append_rows(self, request: AppendRowsRequest) -> None: ...
@@ -0,0 +1,94 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """FastAPI router for the DatasetIO API.
8
+
9
+ This module defines the FastAPI router for the DatasetIO API using standard
10
+ FastAPI route decorators.
11
+ """
12
+
13
+ from typing import Annotated
14
+
15
+ from fastapi import APIRouter, Body, Path, Query
16
+
17
+ from llama_stack_api.common.responses import PaginatedResponse
18
+ from llama_stack_api.router_utils import standard_responses
19
+ from llama_stack_api.version import LLAMA_STACK_API_V1BETA
20
+
21
+ from .api import DatasetIO
22
+ from .models import (
23
+ AppendRowsRequest,
24
+ IterRowsRequest,
25
+ )
26
+
27
+
28
+ def create_router(impl: DatasetIO) -> APIRouter:
29
+ """Create a FastAPI router for the DatasetIO API.
30
+
31
+ Args:
32
+ impl: The DatasetIO implementation instance
33
+
34
+ Returns:
35
+ APIRouter configured for the DatasetIO API
36
+ """
37
+ router = APIRouter(
38
+ prefix=f"/{LLAMA_STACK_API_V1BETA}",
39
+ tags=["DatasetIO"],
40
+ responses=standard_responses,
41
+ )
42
+
43
+ @router.get(
44
+ "/datasetio/iterrows/{dataset_id:path}",
45
+ response_model=PaginatedResponse,
46
+ summary="Get a paginated list of rows from a dataset.",
47
+ description="""Get a paginated list of rows from a dataset.
48
+
49
+ Uses offset-based pagination where:
50
+ - start_index: The starting index (0-based). If None, starts from beginning.
51
+ - limit: Number of items to return. If None or -1, returns all items.
52
+
53
+ The response includes:
54
+ - data: List of items for the current page.
55
+ - has_more: Whether there are more items available after this set.""",
56
+ responses={
57
+ 200: {"description": "A PaginatedResponse containing the rows."},
58
+ },
59
+ )
60
+ async def iterrows(
61
+ dataset_id: Annotated[str, Path(description="The ID of the dataset to get the rows from.")],
62
+ start_index: Annotated[
63
+ int | None, Query(description="Index into dataset for the first row to get. Get all rows if None.")
64
+ ] = None,
65
+ limit: Annotated[int | None, Query(description="The number of rows to get.")] = None,
66
+ ) -> PaginatedResponse:
67
+ request = IterRowsRequest(
68
+ dataset_id=dataset_id,
69
+ start_index=start_index,
70
+ limit=limit,
71
+ )
72
+ return await impl.iterrows(request)
73
+
74
+ @router.post(
75
+ "/datasetio/append-rows/{dataset_id:path}",
76
+ status_code=204,
77
+ summary="Append rows to a dataset.",
78
+ description="Append rows to a dataset.",
79
+ responses={
80
+ 204: {"description": "Rows were successfully appended."},
81
+ },
82
+ )
83
+ async def append_rows(
84
+ dataset_id: Annotated[str, Path(description="The ID of the dataset to append the rows to.")],
85
+ request: Annotated[AppendRowsRequest, Body(...)],
86
+ ) -> None:
87
+ # Override the dataset_id from the path
88
+ request_with_id = AppendRowsRequest(
89
+ dataset_id=dataset_id,
90
+ rows=request.rows,
91
+ )
92
+ return await impl.append_rows(request_with_id)
93
+
94
+ return router
@@ -0,0 +1,48 @@
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the terms described in the LICENSE file in
5
+ # the root directory of this source tree.
6
+
7
+ """Pydantic models for DatasetIO API requests and responses.
8
+
9
+ This module defines the request and response models for the DatasetIO API
10
+ using Pydantic with Field descriptions for OpenAPI schema generation.
11
+ """
12
+
13
+ from typing import Any
14
+
15
+ from pydantic import BaseModel, Field
16
+
17
+ from llama_stack_api.common.responses import PaginatedResponse
18
+ from llama_stack_api.schema_utils import json_schema_type
19
+
20
+
21
+ @json_schema_type
22
+ class IterRowsRequest(BaseModel):
23
+ """Request model for iterating over rows in a dataset."""
24
+
25
+ dataset_id: str = Field(..., description="The ID of the dataset to get the rows from.")
26
+ start_index: int | None = Field(
27
+ default=None,
28
+ description="Index into dataset for the first row to get. Get all rows if None.",
29
+ )
30
+ limit: int | None = Field(
31
+ default=None,
32
+ description="The number of rows to get.",
33
+ )
34
+
35
+
36
+ @json_schema_type
37
+ class AppendRowsRequest(BaseModel):
38
+ """Request model for appending rows to a dataset."""
39
+
40
+ dataset_id: str = Field(..., description="The ID of the dataset to append the rows to.")
41
+ rows: list[dict[str, Any]] = Field(..., description="The rows to append to the dataset.")
42
+
43
+
44
+ __all__ = [
45
+ "AppendRowsRequest",
46
+ "IterRowsRequest",
47
+ "PaginatedResponse",
48
+ ]