ccs-llmconnector 1.1.1__py3-none-any.whl → 1.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ccs_llmconnector-1.1.1.dist-info → ccs_llmconnector-1.1.4.dist-info}/METADATA +1 -1
- ccs_llmconnector-1.1.4.dist-info/RECORD +16 -0
- {ccs_llmconnector-1.1.1.dist-info → ccs_llmconnector-1.1.4.dist-info}/WHEEL +1 -1
- llmconnector/__init__.py +23 -21
- llmconnector/anthropic_client.py +266 -266
- llmconnector/client.py +566 -301
- llmconnector/client_cli.py +42 -42
- llmconnector/gemini_client.py +390 -57
- llmconnector/grok_client.py +270 -270
- llmconnector/openai_client.py +407 -263
- llmconnector/types.py +66 -48
- llmconnector/utils.py +77 -77
- ccs_llmconnector-1.1.1.dist-info/RECORD +0 -16
- {ccs_llmconnector-1.1.1.dist-info → ccs_llmconnector-1.1.4.dist-info}/entry_points.txt +0 -0
- {ccs_llmconnector-1.1.1.dist-info → ccs_llmconnector-1.1.4.dist-info}/licenses/LICENSE +0 -0
- {ccs_llmconnector-1.1.1.dist-info → ccs_llmconnector-1.1.4.dist-info}/top_level.txt +0 -0
llmconnector/types.py
CHANGED
|
@@ -1,49 +1,67 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Optional, Sequence, TypedDict, Union
|
|
6
|
+
|
|
7
|
+
ImageInput = Union[str, Path]
|
|
8
|
+
EmbeddingVector = list[float]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Message(TypedDict):
|
|
12
|
+
role: str
|
|
13
|
+
content: str
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
MessageSequence = Sequence[Message]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class RequestOptions(TypedDict, total=False):
|
|
20
|
+
request_id: str
|
|
21
|
+
timeout_s: float
|
|
22
|
+
max_retries: int
|
|
23
|
+
retry_backoff_s: float
|
|
24
|
+
reasoning_effort: str
|
|
25
|
+
max_tokens: int
|
|
26
|
+
images: Sequence[ImageInput]
|
|
27
|
+
messages: MessageSequence
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass(frozen=True)
|
|
31
|
+
class TokenUsage:
|
|
32
|
+
input_tokens: int | None = None
|
|
33
|
+
output_tokens: int | None = None
|
|
34
|
+
total_tokens: int | None = None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass(frozen=True)
|
|
38
|
+
class LLMResponse:
|
|
39
|
+
text: str
|
|
40
|
+
usage: TokenUsage | None = None
|
|
41
|
+
provider: str | None = None
|
|
42
|
+
model: str | None = None
|
|
43
|
+
raw: Any | None = None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def normalize_messages(
|
|
47
|
+
*,
|
|
48
|
+
prompt: Optional[str],
|
|
49
|
+
messages: Optional[MessageSequence],
|
|
50
|
+
) -> list[Message]:
|
|
51
|
+
result: list[Message] = []
|
|
52
|
+
if messages:
|
|
53
|
+
for message in messages:
|
|
54
|
+
role = message.get("role")
|
|
55
|
+
content = message.get("content")
|
|
56
|
+
if not isinstance(role, str) or not role:
|
|
57
|
+
raise ValueError("message role must be a non-empty string")
|
|
58
|
+
if content is None:
|
|
59
|
+
content = ""
|
|
60
|
+
if not isinstance(content, str):
|
|
61
|
+
raise ValueError("message content must be a string")
|
|
62
|
+
result.append({"role": role, "content": content})
|
|
63
|
+
|
|
64
|
+
if prompt:
|
|
65
|
+
result.append({"role": "user", "content": prompt})
|
|
66
|
+
|
|
49
67
|
return result
|
llmconnector/utils.py
CHANGED
|
@@ -1,78 +1,78 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import asyncio
|
|
4
|
-
import logging
|
|
5
|
-
import time
|
|
6
|
-
from typing import Awaitable, Callable, TypeVar
|
|
7
|
-
|
|
8
|
-
T = TypeVar("T")
|
|
9
|
-
logger = logging.getLogger(__name__)
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def clamp_retries(max_retries: int | None) -> int:
|
|
13
|
-
if max_retries is None:
|
|
14
|
-
return 0
|
|
15
|
-
if max_retries < 0:
|
|
16
|
-
return 0
|
|
17
|
-
return max_retries
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def compute_delay(attempt: int, retry_backoff_s: float) -> float:
|
|
21
|
-
return retry_backoff_s * (2**attempt)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
def run_with_retries(
|
|
25
|
-
*,
|
|
26
|
-
func: Callable[[], T],
|
|
27
|
-
max_retries: int,
|
|
28
|
-
retry_backoff_s: float,
|
|
29
|
-
request_id: str | None = None,
|
|
30
|
-
) -> T:
|
|
31
|
-
attempt = 0
|
|
32
|
-
while True:
|
|
33
|
-
try:
|
|
34
|
-
return func()
|
|
35
|
-
except Exception as exc:
|
|
36
|
-
if attempt >= max_retries:
|
|
37
|
-
raise
|
|
38
|
-
delay = compute_delay(attempt, retry_backoff_s)
|
|
39
|
-
logger.warning(
|
|
40
|
-
"Retrying LLM request: attempt=%d delay=%.2fs request_id=%s error=%s",
|
|
41
|
-
attempt + 1,
|
|
42
|
-
delay,
|
|
43
|
-
request_id,
|
|
44
|
-
exc,
|
|
45
|
-
)
|
|
46
|
-
time.sleep(delay)
|
|
47
|
-
attempt += 1
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
async def run_with_retries_async(
|
|
51
|
-
*,
|
|
52
|
-
func: Callable[[], Awaitable[T]],
|
|
53
|
-
max_retries: int,
|
|
54
|
-
retry_backoff_s: float,
|
|
55
|
-
request_id: str | None = None,
|
|
56
|
-
) -> T:
|
|
57
|
-
attempt = 0
|
|
58
|
-
while True:
|
|
59
|
-
try:
|
|
60
|
-
return await func()
|
|
61
|
-
except Exception as exc:
|
|
62
|
-
if attempt >= max_retries:
|
|
63
|
-
raise
|
|
64
|
-
delay = compute_delay(attempt, retry_backoff_s)
|
|
65
|
-
logger.warning(
|
|
66
|
-
"Retrying LLM request (async): attempt=%d delay=%.2fs request_id=%s error=%s",
|
|
67
|
-
attempt + 1,
|
|
68
|
-
delay,
|
|
69
|
-
request_id,
|
|
70
|
-
exc,
|
|
71
|
-
)
|
|
72
|
-
await asyncio.sleep(delay)
|
|
73
|
-
attempt += 1
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
async def run_sync_in_thread(func: Callable[[], T]) -> T:
|
|
77
|
-
loop = asyncio.get_running_loop()
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
from typing import Awaitable, Callable, TypeVar
|
|
7
|
+
|
|
8
|
+
T = TypeVar("T")
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def clamp_retries(max_retries: int | None) -> int:
|
|
13
|
+
if max_retries is None:
|
|
14
|
+
return 0
|
|
15
|
+
if max_retries < 0:
|
|
16
|
+
return 0
|
|
17
|
+
return max_retries
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def compute_delay(attempt: int, retry_backoff_s: float) -> float:
|
|
21
|
+
return retry_backoff_s * (2**attempt)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def run_with_retries(
|
|
25
|
+
*,
|
|
26
|
+
func: Callable[[], T],
|
|
27
|
+
max_retries: int,
|
|
28
|
+
retry_backoff_s: float,
|
|
29
|
+
request_id: str | None = None,
|
|
30
|
+
) -> T:
|
|
31
|
+
attempt = 0
|
|
32
|
+
while True:
|
|
33
|
+
try:
|
|
34
|
+
return func()
|
|
35
|
+
except Exception as exc:
|
|
36
|
+
if attempt >= max_retries:
|
|
37
|
+
raise
|
|
38
|
+
delay = compute_delay(attempt, retry_backoff_s)
|
|
39
|
+
logger.warning(
|
|
40
|
+
"Retrying LLM request: attempt=%d delay=%.2fs request_id=%s error=%s",
|
|
41
|
+
attempt + 1,
|
|
42
|
+
delay,
|
|
43
|
+
request_id,
|
|
44
|
+
exc,
|
|
45
|
+
)
|
|
46
|
+
time.sleep(delay)
|
|
47
|
+
attempt += 1
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
async def run_with_retries_async(
|
|
51
|
+
*,
|
|
52
|
+
func: Callable[[], Awaitable[T]],
|
|
53
|
+
max_retries: int,
|
|
54
|
+
retry_backoff_s: float,
|
|
55
|
+
request_id: str | None = None,
|
|
56
|
+
) -> T:
|
|
57
|
+
attempt = 0
|
|
58
|
+
while True:
|
|
59
|
+
try:
|
|
60
|
+
return await func()
|
|
61
|
+
except Exception as exc:
|
|
62
|
+
if attempt >= max_retries:
|
|
63
|
+
raise
|
|
64
|
+
delay = compute_delay(attempt, retry_backoff_s)
|
|
65
|
+
logger.warning(
|
|
66
|
+
"Retrying LLM request (async): attempt=%d delay=%.2fs request_id=%s error=%s",
|
|
67
|
+
attempt + 1,
|
|
68
|
+
delay,
|
|
69
|
+
request_id,
|
|
70
|
+
exc,
|
|
71
|
+
)
|
|
72
|
+
await asyncio.sleep(delay)
|
|
73
|
+
attempt += 1
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
async def run_sync_in_thread(func: Callable[[], T]) -> T:
|
|
77
|
+
loop = asyncio.get_running_loop()
|
|
78
78
|
return await loop.run_in_executor(None, func)
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
ccs_llmconnector-1.1.1.dist-info/licenses/LICENSE,sha256=rPcz2YmBB9VUWZTLJcRO_B4jKDpqmGRYi2eSI-unysg,1083
|
|
2
|
-
llmconnector/__init__.py,sha256=w68d7BCkhjjqMaOG9VDkPoYR4PNbv8NJqGmAV_Vyn6g,1358
|
|
3
|
-
llmconnector/anthropic_client.py,sha256=hGFZlUQ4yAs_H8StxWzHAYmqwgNT6lGI4yW8UmKhtm8,13260
|
|
4
|
-
llmconnector/client.py,sha256=2rhDpcBKKbQ1kGp7-Bk8ci_VVe9DEEvBf9nH4hKwg3w,13849
|
|
5
|
-
llmconnector/client_cli.py,sha256=ojLPNJ14lFycsLWtEMtdGpV033v2-23qPMuWqFlnySA,11463
|
|
6
|
-
llmconnector/gemini_client.py,sha256=_XPy6C5WQeuP_GY2eb3dy4DMFX9Oj8DRhb9u7XA5TfI,19155
|
|
7
|
-
llmconnector/grok_client.py,sha256=uicZI28-vw7go677zZB5MTkw5aqWp2UP_GaguNUU99o,11254
|
|
8
|
-
llmconnector/openai_client.py,sha256=wvrEFzltg8PctM2DcNZ_7qmRPpF-CB8WQek6iYoRvR4,11051
|
|
9
|
-
llmconnector/py.typed,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2
|
|
10
|
-
llmconnector/types.py,sha256=JhXKjKncdUB2yPd-bqjOUnl-4Wgs6GIleDMChmwPW8w,1287
|
|
11
|
-
llmconnector/utils.py,sha256=Tw6wQLh7Cjk6igLJ-MnYGSSPJBUGl5-7R1xB0Aj7o8Y,2047
|
|
12
|
-
ccs_llmconnector-1.1.1.dist-info/METADATA,sha256=k4esAN7SCw583miw_IqV9sehyt0GGcgGylpLLPAL35s,17001
|
|
13
|
-
ccs_llmconnector-1.1.1.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
|
|
14
|
-
ccs_llmconnector-1.1.1.dist-info/entry_points.txt,sha256=eFvLY3nHAG_QhaKlemhhK7echfezW0KiMdSNMZOStLc,60
|
|
15
|
-
ccs_llmconnector-1.1.1.dist-info/top_level.txt,sha256=Doer7TAUsN8UXQfPHPNsuBXVNCz2uV-Q0v4t4fwv_MM,13
|
|
16
|
-
ccs_llmconnector-1.1.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|