latitude-sdk 1.0.2__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- latitude_sdk/client/payloads.py +6 -0
- latitude_sdk/client/router.py +11 -0
- latitude_sdk/sdk/latitude.py +1 -1
- latitude_sdk/sdk/prompts.py +63 -56
- latitude_sdk/sdk/types.py +97 -45
- {latitude_sdk-1.0.2.dist-info → latitude_sdk-1.1.0.dist-info}/METADATA +23 -1
- {latitude_sdk-1.0.2.dist-info → latitude_sdk-1.1.0.dist-info}/RECORD +8 -8
- {latitude_sdk-1.0.2.dist-info → latitude_sdk-1.1.0.dist-info}/WHEEL +0 -0
latitude_sdk/client/payloads.py
CHANGED
@@ -23,6 +23,10 @@ class GetPromptRequestParams(PromptRequestParams, Model):
|
|
23
23
|
path: str
|
24
24
|
|
25
25
|
|
26
|
+
class GetAllPromptRequestParams(PromptRequestParams, Model):
|
27
|
+
pass
|
28
|
+
|
29
|
+
|
26
30
|
class GetOrCreatePromptRequestParams(PromptRequestParams, Model):
|
27
31
|
pass
|
28
32
|
|
@@ -90,6 +94,7 @@ class CreateEvaluationResultRequestBody(Model):
|
|
90
94
|
|
91
95
|
RequestParams = Union[
|
92
96
|
GetPromptRequestParams,
|
97
|
+
GetAllPromptRequestParams,
|
93
98
|
GetOrCreatePromptRequestParams,
|
94
99
|
RunPromptRequestParams,
|
95
100
|
ChatPromptRequestParams,
|
@@ -111,6 +116,7 @@ RequestBody = Union[
|
|
111
116
|
|
112
117
|
class RequestHandler(StrEnum):
|
113
118
|
GetPrompt = "GET_PROMPT"
|
119
|
+
GetAllPrompts = "GET_ALL_PROMPTS"
|
114
120
|
GetOrCreatePrompt = "GET_OR_CREATE_PROMPT"
|
115
121
|
RunPrompt = "RUN_PROMPT"
|
116
122
|
ChatPrompt = "CHAT_PROMPT"
|
latitude_sdk/client/router.py
CHANGED
@@ -4,6 +4,7 @@ from latitude_sdk.client.payloads import (
|
|
4
4
|
ChatPromptRequestParams,
|
5
5
|
CreateEvaluationResultRequestParams,
|
6
6
|
CreateLogRequestParams,
|
7
|
+
GetAllPromptRequestParams,
|
7
8
|
GetOrCreatePromptRequestParams,
|
8
9
|
GetPromptRequestParams,
|
9
10
|
RequestHandler,
|
@@ -36,6 +37,14 @@ class Router:
|
|
36
37
|
version_uuid=params.version_uuid,
|
37
38
|
).prompt(params.path)
|
38
39
|
|
40
|
+
if handler == RequestHandler.GetAllPrompts:
|
41
|
+
assert isinstance(params, GetAllPromptRequestParams)
|
42
|
+
|
43
|
+
return "GET", self.prompts(
|
44
|
+
project_id=params.project_id,
|
45
|
+
version_uuid=params.version_uuid,
|
46
|
+
).all_prompts
|
47
|
+
|
39
48
|
elif handler == RequestHandler.GetOrCreatePrompt:
|
40
49
|
assert isinstance(params, GetOrCreatePromptRequestParams)
|
41
50
|
|
@@ -94,6 +103,7 @@ class Router:
|
|
94
103
|
|
95
104
|
class Prompts(Model):
|
96
105
|
prompt: Callable[[str], str]
|
106
|
+
all_prompts: str
|
97
107
|
get_or_create: str
|
98
108
|
run: str
|
99
109
|
logs: str
|
@@ -102,6 +112,7 @@ class Router:
|
|
102
112
|
base_url = f"{self.commits_url(project_id, version_uuid)}/documents"
|
103
113
|
|
104
114
|
return self.Prompts(
|
115
|
+
all_prompts=f"{base_url}",
|
105
116
|
prompt=lambda path: f"{base_url}/{path}",
|
106
117
|
get_or_create=f"{base_url}/get-or-create",
|
107
118
|
run=f"{base_url}/run",
|
latitude_sdk/sdk/latitude.py
CHANGED
latitude_sdk/sdk/prompts.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import asyncio
|
2
|
-
from typing import Any, AsyncGenerator,
|
2
|
+
from typing import Any, AsyncGenerator, List, Optional, Sequence, Union
|
3
3
|
|
4
4
|
from promptl_ai import Adapter, Message, MessageLike, Promptl, ToolMessage, ToolResultContent
|
5
5
|
from promptl_ai.bindings.types import _Message
|
@@ -9,6 +9,7 @@ from latitude_sdk.client import (
|
|
9
9
|
ChatPromptRequestParams,
|
10
10
|
Client,
|
11
11
|
ClientEvent,
|
12
|
+
GetAllPromptRequestParams,
|
12
13
|
GetOrCreatePromptRequestBody,
|
13
14
|
GetOrCreatePromptRequestParams,
|
14
15
|
GetPromptRequestParams,
|
@@ -18,12 +19,8 @@ from latitude_sdk.client import (
|
|
18
19
|
)
|
19
20
|
from latitude_sdk.sdk.errors import ApiError, ApiErrorCodes
|
20
21
|
from latitude_sdk.sdk.types import (
|
21
|
-
ChainEventCompleted,
|
22
|
-
ChainEventError,
|
23
22
|
ChainEvents,
|
24
|
-
|
25
|
-
ChainEventStepCompleted,
|
26
|
-
FinishedEvent,
|
23
|
+
FinishedResult,
|
27
24
|
OnStep,
|
28
25
|
OnToolCall,
|
29
26
|
OnToolCallDetails,
|
@@ -33,8 +30,11 @@ from latitude_sdk.sdk.types import (
|
|
33
30
|
StreamCallbacks,
|
34
31
|
StreamEvents,
|
35
32
|
StreamTypes,
|
33
|
+
ToolCall,
|
36
34
|
ToolResult,
|
35
|
+
_LatitudeEvent,
|
37
36
|
)
|
37
|
+
from latitude_sdk.util import Adapter as AdapterUtil
|
38
38
|
from latitude_sdk.util import Model
|
39
39
|
|
40
40
|
_PROVIDER_TO_ADAPTER = {
|
@@ -69,6 +69,13 @@ class GetPromptResult(Prompt, Model):
|
|
69
69
|
pass
|
70
70
|
|
71
71
|
|
72
|
+
_GetAllPromptResults = AdapterUtil[List[GetPromptResult]](List[GetPromptResult])
|
73
|
+
|
74
|
+
|
75
|
+
class GetAllPromptOptions(PromptOptions, Model):
|
76
|
+
pass
|
77
|
+
|
78
|
+
|
72
79
|
class GetOrCreatePromptOptions(PromptOptions, Model):
|
73
80
|
prompt: Optional[str] = None
|
74
81
|
|
@@ -79,36 +86,36 @@ class GetOrCreatePromptResult(Prompt, Model):
|
|
79
86
|
|
80
87
|
class RunPromptOptions(StreamCallbacks, PromptOptions, Model):
|
81
88
|
custom_identifier: Optional[str] = None
|
82
|
-
parameters: Optional[
|
83
|
-
tools: Optional[
|
89
|
+
parameters: Optional[dict[str, Any]] = None
|
90
|
+
tools: Optional[dict[str, OnToolCall]] = None
|
84
91
|
stream: Optional[bool] = None
|
85
92
|
|
86
93
|
|
87
|
-
class RunPromptResult(
|
94
|
+
class RunPromptResult(FinishedResult, Model):
|
88
95
|
pass
|
89
96
|
|
90
97
|
|
91
98
|
class ChatPromptOptions(StreamCallbacks, Model):
|
92
|
-
tools: Optional[
|
99
|
+
tools: Optional[dict[str, OnToolCall]] = None
|
93
100
|
stream: Optional[bool] = None
|
94
101
|
|
95
102
|
|
96
|
-
class ChatPromptResult(
|
103
|
+
class ChatPromptResult(FinishedResult, Model):
|
97
104
|
pass
|
98
105
|
|
99
106
|
|
100
107
|
class RenderPromptOptions(Model):
|
101
|
-
parameters: Optional[
|
108
|
+
parameters: Optional[dict[str, Any]] = None
|
102
109
|
adapter: Optional[Adapter] = None
|
103
110
|
|
104
111
|
|
105
112
|
class RenderPromptResult(Model):
|
106
|
-
messages:
|
107
|
-
config:
|
113
|
+
messages: list[MessageLike]
|
114
|
+
config: dict[str, Any]
|
108
115
|
|
109
116
|
|
110
117
|
class RenderChainOptions(Model):
|
111
|
-
parameters: Optional[
|
118
|
+
parameters: Optional[dict[str, Any]] = None
|
112
119
|
adapter: Optional[Adapter] = None
|
113
120
|
|
114
121
|
|
@@ -137,32 +144,27 @@ class Prompts:
|
|
137
144
|
|
138
145
|
async def _handle_stream(
|
139
146
|
self, stream: AsyncGenerator[ClientEvent, Any], on_event: Optional[StreamCallbacks.OnEvent]
|
140
|
-
) ->
|
147
|
+
) -> FinishedResult:
|
141
148
|
uuid = None
|
142
|
-
conversation:
|
149
|
+
conversation: list[Message] = []
|
143
150
|
response = None
|
151
|
+
tool_requests: list[ToolCall] = []
|
144
152
|
|
145
153
|
async for stream_event in stream:
|
146
154
|
event = None
|
147
155
|
|
148
156
|
if stream_event.event == str(StreamEvents.Latitude):
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
event = ChainEventStep.model_validate_json(stream_event.data)
|
153
|
-
conversation.extend(event.messages)
|
157
|
+
event = _LatitudeEvent.validate_json(stream_event.data)
|
158
|
+
conversation = event.messages
|
159
|
+
uuid = event.uuid
|
154
160
|
|
155
|
-
|
156
|
-
event = ChainEventStepCompleted.model_validate_json(stream_event.data)
|
157
|
-
|
158
|
-
elif type == str(ChainEvents.Completed):
|
159
|
-
event = ChainEventCompleted.model_validate_json(stream_event.data)
|
160
|
-
uuid = event.uuid
|
161
|
-
conversation.extend(event.messages or [])
|
161
|
+
if event.type == ChainEvents.ProviderCompleted:
|
162
162
|
response = event.response
|
163
163
|
|
164
|
-
elif type ==
|
165
|
-
|
164
|
+
elif event.type == ChainEvents.ToolsRequested:
|
165
|
+
tool_requests = event.tools
|
166
|
+
|
167
|
+
elif event.type == ChainEvents.ChainError:
|
166
168
|
raise ApiError(
|
167
169
|
status=400,
|
168
170
|
code=ApiErrorCodes.AIRunError,
|
@@ -170,14 +172,6 @@ class Prompts:
|
|
170
172
|
response=stream_event.data,
|
171
173
|
)
|
172
174
|
|
173
|
-
else:
|
174
|
-
raise ApiError(
|
175
|
-
status=500,
|
176
|
-
code=ApiErrorCodes.InternalServerError,
|
177
|
-
message=f"Unknown latitude event: {type}",
|
178
|
-
response=stream_event.data,
|
179
|
-
)
|
180
|
-
|
181
175
|
elif stream_event.event == str(StreamEvents.Provider):
|
182
176
|
event = stream_event.json()
|
183
177
|
event["event"] = StreamEvents.Provider
|
@@ -201,8 +195,7 @@ class Prompts:
|
|
201
195
|
response="Stream ended without a chain-complete event. Missing uuid or response.",
|
202
196
|
)
|
203
197
|
|
204
|
-
|
205
|
-
return FinishedEvent(uuid=uuid, conversation=conversation, response=response)
|
198
|
+
return FinishedResult(uuid=uuid, conversation=conversation, response=response, tool_requests=tool_requests)
|
206
199
|
|
207
200
|
@staticmethod
|
208
201
|
def _pause_tool_execution() -> Any:
|
@@ -210,9 +203,9 @@ class Prompts:
|
|
210
203
|
|
211
204
|
@staticmethod
|
212
205
|
async def _wrap_tool_handler(
|
213
|
-
handler: OnToolCall, arguments:
|
206
|
+
handler: OnToolCall, arguments: dict[str, Any], details: OnToolCallDetails
|
214
207
|
) -> ToolResult:
|
215
|
-
tool_result:
|
208
|
+
tool_result: dict[str, Any] = {"id": details.id, "name": details.name}
|
216
209
|
|
217
210
|
try:
|
218
211
|
result = await handler(arguments, details)
|
@@ -225,10 +218,10 @@ class Prompts:
|
|
225
218
|
return ToolResult(**tool_result, result=str(exception), is_error=True)
|
226
219
|
|
227
220
|
async def _handle_tool_calls(
|
228
|
-
self, result:
|
229
|
-
) -> Optional[
|
221
|
+
self, result: FinishedResult, options: Union[RunPromptOptions, ChatPromptOptions]
|
222
|
+
) -> Optional[FinishedResult]:
|
230
223
|
# Seems Python cannot infer the type
|
231
|
-
assert result.response.type == StreamTypes.Text and result.
|
224
|
+
assert result.response.type == StreamTypes.Text and result.tool_requests is not None
|
232
225
|
|
233
226
|
if not options.tools:
|
234
227
|
raise ApiError(
|
@@ -238,7 +231,7 @@ class Prompts:
|
|
238
231
|
response="Tools not supplied",
|
239
232
|
)
|
240
233
|
|
241
|
-
for tool_call in result.
|
234
|
+
for tool_call in result.tool_requests:
|
242
235
|
if tool_call.name not in options.tools:
|
243
236
|
raise ApiError(
|
244
237
|
status=400,
|
@@ -258,10 +251,10 @@ class Prompts:
|
|
258
251
|
conversation_uuid=result.uuid,
|
259
252
|
messages=result.conversation,
|
260
253
|
pause_execution=self._pause_tool_execution,
|
261
|
-
requested_tool_calls=result.
|
254
|
+
requested_tool_calls=result.tool_requests,
|
262
255
|
),
|
263
256
|
)
|
264
|
-
for tool_call in result.
|
257
|
+
for tool_call in result.tool_requests
|
265
258
|
],
|
266
259
|
return_exceptions=False,
|
267
260
|
)
|
@@ -282,7 +275,7 @@ class Prompts:
|
|
282
275
|
|
283
276
|
next_result = await self.chat(result.uuid, tool_messages, ChatPromptOptions(**dict(options)))
|
284
277
|
|
285
|
-
return
|
278
|
+
return FinishedResult(**dict(next_result)) if next_result else None
|
286
279
|
|
287
280
|
async def get(self, path: str, options: Optional[GetPromptOptions] = None) -> GetPromptResult:
|
288
281
|
options = GetPromptOptions(**{**dict(self._options), **dict(options or {})})
|
@@ -299,6 +292,20 @@ class Prompts:
|
|
299
292
|
) as response:
|
300
293
|
return GetPromptResult.model_validate_json(response.content)
|
301
294
|
|
295
|
+
async def get_all(self, options: Optional[GetAllPromptOptions] = None) -> List[GetPromptResult]:
|
296
|
+
options = GetAllPromptOptions(**{**dict(self._options), **dict(options or {})})
|
297
|
+
self._ensure_prompt_options(options)
|
298
|
+
assert options.project_id is not None
|
299
|
+
|
300
|
+
async with self._client.request(
|
301
|
+
handler=RequestHandler.GetAllPrompts,
|
302
|
+
params=GetAllPromptRequestParams(
|
303
|
+
project_id=options.project_id,
|
304
|
+
version_uuid=options.version_uuid,
|
305
|
+
),
|
306
|
+
) as response:
|
307
|
+
return _GetAllPromptResults.validate_json(response.content)
|
308
|
+
|
302
309
|
async def get_or_create(
|
303
310
|
self, path: str, options: Optional[GetOrCreatePromptOptions] = None
|
304
311
|
) -> GetOrCreatePromptResult:
|
@@ -343,7 +350,7 @@ class Prompts:
|
|
343
350
|
else:
|
344
351
|
result = RunPromptResult.model_validate_json(response.content)
|
345
352
|
|
346
|
-
if options.tools and result.response.type == StreamTypes.Text and result.
|
353
|
+
if options.tools and result.response.type == StreamTypes.Text and result.tool_requests:
|
347
354
|
try:
|
348
355
|
# NOTE: The last sdk.chat called will already call on_finished
|
349
356
|
final_result = await self._handle_tool_calls(result, options)
|
@@ -352,7 +359,7 @@ class Prompts:
|
|
352
359
|
pass
|
353
360
|
|
354
361
|
if options.on_finished:
|
355
|
-
options.on_finished(
|
362
|
+
options.on_finished(FinishedResult(**dict(result)))
|
356
363
|
|
357
364
|
return RunPromptResult(**dict(result))
|
358
365
|
|
@@ -395,7 +402,7 @@ class Prompts:
|
|
395
402
|
else:
|
396
403
|
result = ChatPromptResult.model_validate_json(response.content)
|
397
404
|
|
398
|
-
if options.tools and result.response.type == StreamTypes.Text and result.
|
405
|
+
if options.tools and result.response.type == StreamTypes.Text and result.tool_requests:
|
399
406
|
try:
|
400
407
|
# NOTE: The last sdk.chat called will already call on_finished
|
401
408
|
final_result = await self._handle_tool_calls(result, options)
|
@@ -404,7 +411,7 @@ class Prompts:
|
|
404
411
|
pass
|
405
412
|
|
406
413
|
if options.on_finished:
|
407
|
-
options.on_finished(
|
414
|
+
options.on_finished(FinishedResult(**dict(result)))
|
408
415
|
|
409
416
|
return ChatPromptResult(**dict(result))
|
410
417
|
|
@@ -424,8 +431,8 @@ class Prompts:
|
|
424
431
|
|
425
432
|
return None
|
426
433
|
|
427
|
-
def _adapt_prompt_config(self, config:
|
428
|
-
adapted_config:
|
434
|
+
def _adapt_prompt_config(self, config: dict[str, Any], adapter: Adapter) -> dict[str, Any]:
|
435
|
+
adapted_config: dict[str, Any] = {}
|
429
436
|
|
430
437
|
# NOTE: Should we delete attributes not supported by the provider?
|
431
438
|
for attr, value in config.items():
|
latitude_sdk/sdk/types.py
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
from datetime import datetime
|
2
|
-
from typing import Any, Callable,
|
2
|
+
from typing import Any, Callable, Literal, Optional, Protocol, Sequence, Union, runtime_checkable
|
3
3
|
|
4
4
|
from promptl_ai import Message, MessageLike
|
5
5
|
|
6
6
|
from latitude_sdk.sdk.errors import ApiError
|
7
|
-
from latitude_sdk.util import Field, Model, StrEnum
|
7
|
+
from latitude_sdk.util import Adapter, Field, Model, StrEnum
|
8
8
|
|
9
9
|
|
10
10
|
class DbErrorRef(Model):
|
@@ -19,14 +19,27 @@ class Providers(StrEnum):
|
|
19
19
|
Mistral = "mistral"
|
20
20
|
Azure = "azure"
|
21
21
|
Google = "google"
|
22
|
+
GoogleVertex = "google_vertex"
|
23
|
+
AnthropicVertex = "anthropic_vertex"
|
22
24
|
Custom = "custom"
|
23
25
|
|
24
26
|
|
27
|
+
class ParameterType(StrEnum):
|
28
|
+
Text = "text"
|
29
|
+
File = "file"
|
30
|
+
Image = "image"
|
31
|
+
|
32
|
+
|
33
|
+
class PromptParameter(Model):
|
34
|
+
type: ParameterType
|
35
|
+
|
36
|
+
|
25
37
|
class Prompt(Model):
|
26
38
|
uuid: str
|
27
39
|
path: str
|
28
40
|
content: str
|
29
|
-
config:
|
41
|
+
config: dict[str, Any]
|
42
|
+
parameters: dict[str, PromptParameter]
|
30
43
|
provider: Optional[Providers] = None
|
31
44
|
|
32
45
|
|
@@ -49,7 +62,7 @@ class FinishReason(StrEnum):
|
|
49
62
|
class ToolCall(Model):
|
50
63
|
id: str
|
51
64
|
name: str
|
52
|
-
arguments:
|
65
|
+
arguments: dict[str, Any]
|
53
66
|
|
54
67
|
|
55
68
|
class ToolResult(Model):
|
@@ -67,7 +80,7 @@ class StreamTypes(StrEnum):
|
|
67
80
|
class ChainTextResponse(Model):
|
68
81
|
type: Literal[StreamTypes.Text] = Field(default=StreamTypes.Text, alias=str("streamType"))
|
69
82
|
text: str
|
70
|
-
tool_calls:
|
83
|
+
tool_calls: list[ToolCall] = Field(alias=str("toolCalls"))
|
71
84
|
usage: ModelUsage
|
72
85
|
|
73
86
|
|
@@ -89,66 +102,105 @@ class ChainError(Model):
|
|
89
102
|
class StreamEvents(StrEnum):
|
90
103
|
Latitude = "latitude-event"
|
91
104
|
Provider = "provider-event"
|
92
|
-
Finished = "finished-event"
|
93
105
|
|
94
106
|
|
95
|
-
ProviderEvent =
|
107
|
+
ProviderEvent = dict[str, Any]
|
96
108
|
|
97
109
|
|
98
110
|
class ChainEvents(StrEnum):
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
111
|
+
ChainStarted = "chain-started"
|
112
|
+
StepStarted = "step-started"
|
113
|
+
ProviderStarted = "provider-started"
|
114
|
+
ProviderCompleted = "provider-completed"
|
115
|
+
ToolsStarted = "tools-started"
|
116
|
+
ToolCompleted = "tool-completed"
|
117
|
+
StepCompleted = "step-completed"
|
118
|
+
ChainCompleted = "chain-completed"
|
119
|
+
ChainError = "chain-error"
|
120
|
+
ToolsRequested = "tools-requested"
|
121
|
+
|
122
|
+
|
123
|
+
class GenericChainEvent(Model):
|
124
|
+
event: Literal[StreamEvents.Latitude] = StreamEvents.Latitude
|
125
|
+
messages: list[Message]
|
126
|
+
uuid: str
|
103
127
|
|
104
128
|
|
105
|
-
class
|
106
|
-
|
107
|
-
type: Literal[ChainEvents.Step] = ChainEvents.Step
|
108
|
-
uuid: Optional[str] = None
|
109
|
-
is_last_step: bool = Field(alias=str("isLastStep"))
|
110
|
-
config: Dict[str, Any]
|
111
|
-
messages: List[Message]
|
129
|
+
class ChainEventChainStarted(GenericChainEvent):
|
130
|
+
type: Literal[ChainEvents.ChainStarted] = ChainEvents.ChainStarted
|
112
131
|
|
113
132
|
|
114
|
-
class
|
115
|
-
|
116
|
-
type: Literal[ChainEvents.StepCompleted] = ChainEvents.StepCompleted
|
117
|
-
uuid: Optional[str] = None
|
118
|
-
response: ChainResponse
|
133
|
+
class ChainEventStepStarted(GenericChainEvent):
|
134
|
+
type: Literal[ChainEvents.StepStarted] = ChainEvents.StepStarted
|
119
135
|
|
120
136
|
|
121
|
-
class
|
122
|
-
|
123
|
-
|
124
|
-
|
137
|
+
class ChainEventProviderStarted(GenericChainEvent):
|
138
|
+
type: Literal[ChainEvents.ProviderStarted] = ChainEvents.ProviderStarted
|
139
|
+
config: dict[str, Any]
|
140
|
+
|
141
|
+
|
142
|
+
class ChainEventProviderCompleted(GenericChainEvent):
|
143
|
+
type: Literal[ChainEvents.ProviderCompleted] = ChainEvents.ProviderCompleted
|
144
|
+
provider_log_uuid: str = Field(alias=str("providerLogUuid"))
|
145
|
+
token_usage: ModelUsage = Field(alias=str("tokenUsage"))
|
125
146
|
finish_reason: FinishReason = Field(alias=str("finishReason"))
|
126
|
-
config: Dict[str, Any]
|
127
|
-
messages: Optional[List[Message]] = None
|
128
|
-
object: Optional[Any] = None
|
129
147
|
response: ChainResponse
|
130
148
|
|
131
149
|
|
132
|
-
class
|
133
|
-
|
134
|
-
|
150
|
+
class ChainEventToolsStarted(GenericChainEvent):
|
151
|
+
type: Literal[ChainEvents.ToolsStarted] = ChainEvents.ToolsStarted
|
152
|
+
tools: list[ToolCall]
|
153
|
+
|
154
|
+
|
155
|
+
class ChainEventToolCompleted(GenericChainEvent):
|
156
|
+
type: Literal[ChainEvents.ToolCompleted] = ChainEvents.ToolCompleted
|
157
|
+
|
158
|
+
|
159
|
+
class ChainEventStepCompleted(GenericChainEvent):
|
160
|
+
type: Literal[ChainEvents.StepCompleted] = ChainEvents.StepCompleted
|
161
|
+
|
162
|
+
|
163
|
+
class ChainEventChainCompleted(GenericChainEvent):
|
164
|
+
type: Literal[ChainEvents.ChainCompleted] = ChainEvents.ChainCompleted
|
165
|
+
token_usage: ModelUsage = Field(alias=str("tokenUsage"))
|
166
|
+
finish_reason: FinishReason = Field(alias=str("finishReason"))
|
167
|
+
|
168
|
+
|
169
|
+
class ChainEventChainError(GenericChainEvent):
|
170
|
+
type: Literal[ChainEvents.ChainError] = ChainEvents.ChainError
|
135
171
|
error: ChainError
|
136
172
|
|
137
173
|
|
138
|
-
|
174
|
+
class ChainEventToolsRequested(GenericChainEvent):
|
175
|
+
type: Literal[ChainEvents.ToolsRequested] = ChainEvents.ToolsRequested
|
176
|
+
tools: list[ToolCall]
|
177
|
+
|
139
178
|
|
179
|
+
ChainEvent = Union[
|
180
|
+
ChainEventChainStarted,
|
181
|
+
ChainEventStepStarted,
|
182
|
+
ChainEventProviderStarted,
|
183
|
+
ChainEventProviderCompleted,
|
184
|
+
ChainEventToolsStarted,
|
185
|
+
ChainEventToolCompleted,
|
186
|
+
ChainEventStepCompleted,
|
187
|
+
ChainEventChainCompleted,
|
188
|
+
ChainEventChainError,
|
189
|
+
ChainEventToolsRequested,
|
190
|
+
]
|
140
191
|
|
141
192
|
LatitudeEvent = ChainEvent
|
193
|
+
_LatitudeEvent = Adapter[LatitudeEvent](LatitudeEvent)
|
142
194
|
|
143
195
|
|
144
|
-
class
|
145
|
-
event: Literal[StreamEvents.Finished] = StreamEvents.Finished
|
196
|
+
class FinishedResult(Model):
|
146
197
|
uuid: str
|
147
|
-
conversation:
|
198
|
+
conversation: list[Message]
|
148
199
|
response: ChainResponse
|
200
|
+
tool_requests: list[ToolCall] = Field(alias=str("toolRequests"))
|
149
201
|
|
150
202
|
|
151
|
-
StreamEvent = Union[ProviderEvent, LatitudeEvent
|
203
|
+
StreamEvent = Union[ProviderEvent, LatitudeEvent]
|
152
204
|
|
153
205
|
|
154
206
|
class LogSources(StrEnum):
|
@@ -166,7 +218,7 @@ class Log(Model):
|
|
166
218
|
commit_id: int = Field(alias=str("commitId"))
|
167
219
|
resolved_content: str = Field(alias=str("resolvedContent"))
|
168
220
|
content_hash: str = Field(alias=str("contentHash"))
|
169
|
-
parameters:
|
221
|
+
parameters: dict[str, Any]
|
170
222
|
custom_identifier: Optional[str] = Field(default=None, alias=str("customIdentifier"))
|
171
223
|
duration: Optional[int] = None
|
172
224
|
created_at: datetime = Field(alias=str("createdAt"))
|
@@ -204,7 +256,7 @@ class StreamCallbacks(Model):
|
|
204
256
|
|
205
257
|
@runtime_checkable
|
206
258
|
class OnFinished(Protocol):
|
207
|
-
def __call__(self,
|
259
|
+
def __call__(self, result: FinishedResult): ...
|
208
260
|
|
209
261
|
on_finished: Optional[OnFinished] = None
|
210
262
|
|
@@ -219,27 +271,27 @@ class OnToolCallDetails(Model):
|
|
219
271
|
id: str
|
220
272
|
name: str
|
221
273
|
conversation_uuid: str
|
222
|
-
messages:
|
274
|
+
messages: list[Message]
|
223
275
|
pause_execution: Callable[[], ToolResult]
|
224
|
-
requested_tool_calls:
|
276
|
+
requested_tool_calls: list[ToolCall]
|
225
277
|
|
226
278
|
|
227
279
|
@runtime_checkable
|
228
280
|
class OnToolCall(Protocol):
|
229
|
-
async def __call__(self, arguments:
|
281
|
+
async def __call__(self, arguments: dict[str, Any], details: OnToolCallDetails) -> Any: ...
|
230
282
|
|
231
283
|
|
232
284
|
@runtime_checkable
|
233
285
|
class OnStep(Protocol):
|
234
286
|
async def __call__(
|
235
|
-
self, messages:
|
287
|
+
self, messages: list[MessageLike], config: dict[str, Any]
|
236
288
|
) -> Union[str, MessageLike, Sequence[MessageLike]]: ...
|
237
289
|
|
238
290
|
|
239
291
|
class SdkOptions(Model):
|
240
292
|
project_id: Optional[int] = None
|
241
293
|
version_uuid: Optional[str] = None
|
242
|
-
tools: Optional[
|
294
|
+
tools: Optional[dict[str, OnToolCall]] = None
|
243
295
|
|
244
296
|
|
245
297
|
class GatewayOptions(Model):
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: latitude-sdk
|
3
|
-
Version: 1.0
|
3
|
+
Version: 1.1.0
|
4
4
|
Summary: Latitude SDK for Python
|
5
5
|
Project-URL: repository, https://github.com/latitude-dev/latitude-llm/tree/main/packages/sdks/python
|
6
6
|
Project-URL: homepage, https://github.com/latitude-dev/latitude-llm/tree/main/packages/sdks/python#readme
|
@@ -60,6 +60,28 @@ Requires uv `0.5.10` or higher.
|
|
60
60
|
- Build package: `uv build`
|
61
61
|
- Publish package: `uv publish`
|
62
62
|
|
63
|
+
## Run only one test
|
64
|
+
|
65
|
+
```python
|
66
|
+
import pytest
|
67
|
+
|
68
|
+
@pytest.mark.only
|
69
|
+
async def my_test(self):
|
70
|
+
# ... your code
|
71
|
+
```
|
72
|
+
|
73
|
+
And then run the tests with the marker `only`:
|
74
|
+
|
75
|
+
```sh
|
76
|
+
uv run scripts/test.py -m only
|
77
|
+
```
|
78
|
+
|
79
|
+
Other way is all in line:
|
80
|
+
|
81
|
+
```python
|
82
|
+
uv run scripts/test.py <test_path>::<test_case>::<test_name>
|
83
|
+
```
|
84
|
+
|
63
85
|
## License
|
64
86
|
|
65
87
|
The SDK is licensed under the [LGPL-3.0 License](https://opensource.org/licenses/LGPL-3.0) - read the [LICENSE](/LICENSE) file for details.
|
@@ -2,19 +2,19 @@ latitude_sdk/__init__.py,sha256=-AbNXLmzDZeGbRdDIOpNjdCbacOvLBflSJwQtLlZfgk,19
|
|
2
2
|
latitude_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
3
|
latitude_sdk/client/__init__.py,sha256=d8CnNB8UoGwcftiIeeC0twdg01qNvfpj-v7O40I7IiE,68
|
4
4
|
latitude_sdk/client/client.py,sha256=Oc4COkVFR1vFewVKZzUIvztJi_yTxeSMoyeML-ivVsY,4389
|
5
|
-
latitude_sdk/client/payloads.py,sha256=
|
6
|
-
latitude_sdk/client/router.py,sha256=
|
5
|
+
latitude_sdk/client/payloads.py,sha256=GCMX9my5lj9bzTQNbgs3jcr9M0b-axt5g9xqey2bnSs,2860
|
6
|
+
latitude_sdk/client/router.py,sha256=CiJnhXDJaqMh7H_bG4bOX-dlrELcpHVH_8l0ZRz7UZo,4414
|
7
7
|
latitude_sdk/env/__init__.py,sha256=66of5veJ-u1aNI025L65Rrj321AjrYevMqomTMYIrPQ,19
|
8
8
|
latitude_sdk/env/env.py,sha256=MnXexPOHE6aXcAszrDCbW7hzACUv4YtU1bfxpYwvHNw,455
|
9
9
|
latitude_sdk/sdk/__init__.py,sha256=C9LlIjfnrS7KOK3-ruXKmbT77nSQMm23nZ6-t8sO8ME,137
|
10
10
|
latitude_sdk/sdk/errors.py,sha256=9GlGdDE8LGy3dE2Ry_BipBg-tDbQx7LWXJfSnTJSSBE,1747
|
11
11
|
latitude_sdk/sdk/evaluations.py,sha256=fDGtAWjdPG9OuKLit6u-jufVleC1EnshRplK6RN8iyg,2277
|
12
|
-
latitude_sdk/sdk/latitude.py,sha256=
|
12
|
+
latitude_sdk/sdk/latitude.py,sha256=bfamm58iaUWgUCzi_llgQCRTNmdJ2haFOIkR6yozub4,2928
|
13
13
|
latitude_sdk/sdk/logs.py,sha256=CyHkRJvPl_p7wTSvR9bgxEI5akS0Tjc9FeQRb2C2vMg,1997
|
14
|
-
latitude_sdk/sdk/prompts.py,sha256=
|
15
|
-
latitude_sdk/sdk/types.py,sha256=
|
14
|
+
latitude_sdk/sdk/prompts.py,sha256=VaBZr58aQtREWFlpZnKD8tWRFgGwy8Q7TP0fLUElC48,16853
|
15
|
+
latitude_sdk/sdk/types.py,sha256=_VMTl2BEpdIfKLZexKGALzd6ml4ayYQf6_yly088BXo,8371
|
16
16
|
latitude_sdk/util/__init__.py,sha256=alIDGBnxWH4JvP-UW-7N99seBBi0r1GV1h8f1ERFBec,21
|
17
17
|
latitude_sdk/util/utils.py,sha256=hMOmF-u1QaDgOwXN6ME6n4TaQ70yZKLvijDUqNCMwXI,2844
|
18
|
-
latitude_sdk-1.0.
|
19
|
-
latitude_sdk-1.0.
|
20
|
-
latitude_sdk-1.0.
|
18
|
+
latitude_sdk-1.1.0.dist-info/METADATA,sha256=pQzBicd7e2sXPGJvVehmaA8KHBL38cxHOSvxlVfkqbc,2368
|
19
|
+
latitude_sdk-1.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
20
|
+
latitude_sdk-1.1.0.dist-info/RECORD,,
|
File without changes
|