livellm 1.1.0__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
livellm/livellm.py CHANGED
@@ -1,6 +1,8 @@
1
1
  """LiveLLM Client - Python client for the LiveLLM Proxy and Realtime APIs."""
2
+ import asyncio
2
3
  import httpx
3
4
  import json
5
+ import warnings
4
6
  from typing import List, Optional, AsyncIterator, Union
5
7
  from .models.common import Settings, SuccessResponse
6
8
  from .models.agent.agent import AgentRequest, AgentResponse
@@ -164,6 +166,7 @@ class LivellmClient:
164
166
  Should be called when you're done using the client.
165
167
  """
166
168
  for config in self.settings:
169
+ config: Settings = config
167
170
  await self.delete_config(config.uid)
168
171
  await self.client.aclose()
169
172
 
@@ -175,6 +178,32 @@ class LivellmClient:
175
178
  """Async context manager exit."""
176
179
  await self.cleanup()
177
180
 
181
+ def __del__(self):
182
+ """
183
+ Destructor to clean up resources when the client is garbage collected.
184
+ This will close the HTTP client and attempt to delete configs if cleanup wasn't called.
185
+ Note: It's recommended to use the async context manager or call cleanup() explicitly.
186
+ """
187
+ # Warn user if cleanup wasn't called
188
+ if self.settings:
189
+ warnings.warn(
190
+ "LivellmClient is being garbage collected without explicit cleanup. "
191
+ "Provider configs may not be deleted from the server. "
192
+ "Consider using 'async with' or calling 'await client.cleanup()' explicitly.",
193
+ ResourceWarning,
194
+ stacklevel=2
195
+ )
196
+
197
+ # Close the httpx client synchronously
198
+ # httpx.AsyncClient stores a sync Transport that needs cleanup
199
+ try:
200
+ with httpx.Client(base_url=self.base_url) as client:
201
+ for config in self.settings:
202
+ config: Settings = config
203
+ client.delete("providers/config/{config.uid}", headers=self.headers)
204
+ except Exception:
205
+ # Silently fail - we're in a destructor
206
+ pass
178
207
 
179
208
  async def agent_run(
180
209
  self,
@@ -1,18 +1,17 @@
1
1
  # models for chat messages
2
- from pydantic import BaseModel, Field, model_validator
2
+ from pydantic import BaseModel, Field, model_validator, field_serializer
3
3
  from enum import Enum
4
- from typing import Optional
4
+ from typing import Optional, Union
5
5
 
6
- class MessageRole(Enum):
6
+ class MessageRole(str, Enum):
7
7
  USER = "user"
8
8
  MODEL = "model"
9
9
  SYSTEM = "system"
10
10
 
11
11
 
12
12
  class Message(BaseModel):
13
- role: MessageRole = Field(..., description="The role of the message")
14
-
15
-
13
+ role: Union[MessageRole, str] = Field(..., description="The role of the message")
14
+
16
15
  class TextMessage(Message):
17
16
  content: str = Field(..., description="The content of the message")
18
17
 
@@ -3,7 +3,7 @@ from pydantic import BaseModel, Field, field_validator
3
3
  from typing import Literal
4
4
  from enum import Enum
5
5
 
6
- class ToolKind(Enum):
6
+ class ToolKind(str, Enum):
7
7
  WEB_SEARCH = "web_search"
8
8
  MCP_STREAMABLE_SERVER = "mcp_streamable_server"
9
9
 
@@ -6,7 +6,7 @@ from ..common import BaseRequest
6
6
  SpeakStreamResponse: TypeAlias = Tuple[AsyncIterator[bytes], str, int]
7
7
 
8
8
 
9
- class SpeakMimeType(Enum):
9
+ class SpeakMimeType(str, Enum):
10
10
  PCM = "audio/pcm"
11
11
  WAV = "audio/wav"
12
12
  MP3 = "audio/mpeg"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: livellm
3
- Version: 1.1.0
3
+ Version: 1.1.1
4
4
  Summary: Python client for the LiveLLM Server
5
5
  Project-URL: Homepage, https://github.com/qalby-tech/livellm-client-py
6
6
  Project-URL: Repository, https://github.com/qalby-tech/livellm-client-py
@@ -39,18 +39,19 @@ Python client library for the LiveLLM Server - a unified proxy for AI agent, aud
39
39
  - 🛠️ **Agent tools** - Web search and MCP server integration
40
40
  - 🎙️ **Audio services** - Text-to-speech and transcription
41
41
  - ⚡ **Fallback strategies** - Sequential and parallel fallback handling
42
- - 📦 **Context manager support** - Automatic cleanup with async context managers
42
+ - 📦 **Smart resource management** - Automatic cleanup via GC, context managers, or manual control
43
+ - 🧹 **Memory safe** - No resource leaks with multiple cleanup strategies
43
44
 
44
45
  ## Installation
45
46
 
46
47
  ```bash
47
- pip install livellm-client
48
+ pip install livellm
48
49
  ```
49
50
 
50
51
  Or with development dependencies:
51
52
 
52
53
  ```bash
53
- pip install livellm-client[testing]
54
+ pip install livellm[testing]
54
55
  ```
55
56
 
56
57
  ## Quick Start
@@ -181,6 +182,18 @@ print(f"Output: {response.output}")
181
182
  print(f"Tokens used - Input: {response.usage.input_tokens}, Output: {response.usage.output_tokens}")
182
183
  ```
183
184
 
185
+ **Note:** You can use either `MessageRole` enum or string values for the `role` parameter:
186
+
187
+ ```python
188
+ # Using enum (recommended for type safety)
189
+ TextMessage(role=MessageRole.USER, content="Hello")
190
+
191
+ # Using string (more convenient)
192
+ TextMessage(role="user", content="Hello")
193
+
194
+ # Both work identically and serialize correctly
195
+ ```
196
+
184
197
  #### Streaming Agent Response
185
198
 
186
199
  ```python
@@ -417,9 +430,32 @@ fallback_request = AudioFallbackRequest(
417
430
  audio = await client.speak(fallback_request)
418
431
  ```
419
432
 
420
- ## Context Manager Support
433
+ ## Resource Management
434
+
435
+ The client provides multiple ways to manage resources and cleanup:
436
+
437
+ ### 1. Automatic Cleanup (Garbage Collection)
438
+
439
+ The client automatically cleans up when garbage collected:
440
+
441
+ ```python
442
+ async def main():
443
+ client = LivellmClient(base_url="http://localhost:8000")
444
+
445
+ # Use client...
446
+ response = await client.ping()
447
+
448
+ # No explicit cleanup needed - handled automatically when object is destroyed
449
+ # Note: Provider configs are deleted synchronously from the server
450
+
451
+ asyncio.run(main())
452
+ ```
453
+
454
+ **Note**: While automatic cleanup works, it shows a `ResourceWarning` if configs exist to encourage explicit cleanup for immediate resource release.
421
455
 
422
- The client supports async context managers for automatic cleanup:
456
+ ### 2. Context Manager (Recommended)
457
+
458
+ Use async context managers for guaranteed cleanup:
423
459
 
424
460
  ```python
425
461
  async with LivellmClient(base_url="http://localhost:8000") as client:
@@ -433,17 +469,32 @@ async with LivellmClient(base_url="http://localhost:8000") as client:
433
469
  # Automatically cleans up configs and closes HTTP client
434
470
  ```
435
471
 
436
- Or manually:
472
+ ### 3. Manual Cleanup
473
+
474
+ Explicitly call cleanup in a try/finally block:
437
475
 
438
476
  ```python
439
477
  client = LivellmClient(base_url="http://localhost:8000")
440
478
  try:
441
479
  # Use client...
442
- pass
480
+ response = await client.ping()
443
481
  finally:
444
482
  await client.cleanup()
445
483
  ```
446
484
 
485
+ ### Cleanup Behavior
486
+
487
+ The `cleanup()` method:
488
+ - Deletes all provider configs created by the client
489
+ - Closes the HTTP client connection
490
+ - Is idempotent (safe to call multiple times)
491
+
492
+ The `__del__()` destructor (automatic cleanup):
493
+ - Triggers when the object is garbage collected
494
+ - Synchronously deletes provider configs from the server
495
+ - Closes the HTTP client connection
496
+ - Shows a `ResourceWarning` if configs exist (to encourage explicit cleanup)
497
+
447
498
  ## API Reference
448
499
 
449
500
  ### Client Methods
@@ -468,8 +519,9 @@ finally:
468
519
  - `transcribe_json(request: TranscribeRequest | TranscribeFallbackRequest) -> TranscribeResponse` - JSON transcription
469
520
 
470
521
  #### Cleanup
471
- - `cleanup() -> None` - Clean up resources and close client
522
+ - `cleanup() -> None` - Clean up resources and close client (async)
472
523
  - `__aenter__() / __aexit__()` - Async context manager support
524
+ - `__del__()` - Automatic cleanup when garbage collected (sync)
473
525
 
474
526
  ### Models
475
527
 
@@ -1,17 +1,17 @@
1
1
  livellm/__init__.py,sha256=JG_0-UCfQI_3D0Y2PzobZLS5OhJwK76i8t81ye0KpfY,279
2
- livellm/livellm.py,sha256=0C4LpQy3EOzxNQ6ltIZqStquYuV1WoKcJSFMVtkI4Sk,8635
2
+ livellm/livellm.py,sha256=4S51uZtNk3YUZ4Wx6Arqbnu4jWkL8rkn67ICskY8WGg,9927
3
3
  livellm/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  livellm/models/__init__.py,sha256=JBUd1GkeDexLSdjSOcUet78snu0NNxnhU7mBN3BhqIA,1199
5
5
  livellm/models/common.py,sha256=YqRwP6ChWbRdoen4MU6RO4u6HeM0mQJbXiiRV4DuauM,1740
6
6
  livellm/models/fallback.py,sha256=AybOrNEP_5JwgForVTWGlK39PWvvapjj4UP7sx3e5qU,1144
7
7
  livellm/models/agent/__init__.py,sha256=KVm6AgQoWEaoq47QAG4Ou4NimoXOTkjXC-0-gnMRLZ8,476
8
8
  livellm/models/agent/agent.py,sha256=2qCh-SsHPhrL7-phv0HpojNgixXg42FhVW8tOv8K7z8,980
9
- livellm/models/agent/chat.py,sha256=whDuFo8ddR6dPwKo0mMZS7LzCBkL3sVb4lr7tVXjg-M,983
10
- livellm/models/agent/tools.py,sha256=gHyVUjK6HzXB6Sd64TIM6pLjmTKSGe-fOEH9ELs5dgg,1398
9
+ livellm/models/agent/chat.py,sha256=zGfeEHx0luwq23pqWF1megcuEDUl6IhV4keLJeZry_A,1028
10
+ livellm/models/agent/tools.py,sha256=wVWfx6_jxL3IcmX_Nt_PonZ3RQLtpfqJnszHz32BQiU,1403
11
11
  livellm/models/audio/__init__.py,sha256=sz2NxCOfFGVvp-XQUsdgOR_TYBO1Wb-8LLXaZDEiAZk,282
12
- livellm/models/audio/speak.py,sha256=4cJhXonImeohL2Fltc2ub_aCGeTAoew6Hnz-myrrR8k,1001
12
+ livellm/models/audio/speak.py,sha256=KvENOE_Lf8AWBhzCMqu1dqGYv4WqaLf7fuWz8OYfJo8,1006
13
13
  livellm/models/audio/transcribe.py,sha256=0XtK_f5cYPO4VMD0lh6tYYQQPFaj4g3N2eK7nzuEjKY,2111
14
- livellm-1.1.0.dist-info/METADATA,sha256=RuCnzRRlAF5VhAmpyalSqhx2aWYrJlSdPWAmt92GFIo,15207
15
- livellm-1.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
16
- livellm-1.1.0.dist-info/licenses/LICENSE,sha256=yapGO2C_00ymEx6TADdbU8Oyc1bWOrZY-fjP-agmFL4,1071
17
- livellm-1.1.0.dist-info/RECORD,,
14
+ livellm-1.1.1.dist-info/METADATA,sha256=LKhvr1FkbJYli1s4J5Ve5isAp_IV7mA25ZOcv_BfQ6s,16896
15
+ livellm-1.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
16
+ livellm-1.1.1.dist-info/licenses/LICENSE,sha256=yapGO2C_00ymEx6TADdbU8Oyc1bWOrZY-fjP-agmFL4,1071
17
+ livellm-1.1.1.dist-info/RECORD,,