davidkhala.ai 0.0.3__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/PKG-INFO +4 -4
  2. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/agent/langgraph.py +2 -4
  3. davidkhala_ai-0.0.5/davidkhala/ai/api/__init__.py +47 -0
  4. davidkhala_ai-0.0.5/davidkhala/ai/api/openrouter.py +60 -0
  5. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/api/siliconflow.py +5 -2
  6. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/model.py +1 -1
  7. davidkhala_ai-0.0.5/davidkhala/ai/openai/__init__.py +63 -0
  8. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/pyproject.toml +3 -3
  9. davidkhala_ai-0.0.3/davidkhala/ai/agent/dify.py +0 -0
  10. davidkhala_ai-0.0.3/davidkhala/ai/api/__init__.py +0 -65
  11. davidkhala_ai-0.0.3/davidkhala/ai/api/open.py +0 -35
  12. davidkhala_ai-0.0.3/davidkhala/ai/openai/__init__.py +0 -57
  13. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/.gitignore +0 -0
  14. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/README.md +0 -0
  15. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/__init__.py +0 -0
  16. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/agent/__init__.py +0 -0
  17. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/ali/__init__.py +0 -0
  18. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/ali/dashscope.py +0 -0
  19. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/google/__init__.py +0 -0
  20. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/google/adk.py +0 -0
  21. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/google/gemini.py +0 -0
  22. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/openai/azure.py +0 -0
  23. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/openai/native.py +0 -0
  24. {davidkhala_ai-0.0.3 → davidkhala_ai-0.0.5}/davidkhala/ai/opik.py +0 -0
@@ -1,12 +1,14 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: davidkhala.ai
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: misc AI modules
5
5
  Requires-Python: >=3.13
6
6
  Provides-Extra: ali
7
7
  Requires-Dist: dashscope; extra == 'ali'
8
8
  Provides-Extra: api
9
- Requires-Dist: requests; extra == 'api'
9
+ Requires-Dist: davidkhala-utils[http-request]; extra == 'api'
10
+ Provides-Extra: azure
11
+ Requires-Dist: openai; extra == 'azure'
10
12
  Provides-Extra: google
11
13
  Requires-Dist: google-adk; extra == 'google'
12
14
  Requires-Dist: google-genai; extra == 'google'
@@ -14,7 +16,5 @@ Provides-Extra: langchain
14
16
  Requires-Dist: langchain; extra == 'langchain'
15
17
  Requires-Dist: langchain-openai; extra == 'langchain'
16
18
  Requires-Dist: langgraph; extra == 'langchain'
17
- Provides-Extra: openai
18
- Requires-Dist: openai; extra == 'openai'
19
19
  Provides-Extra: telemetry
20
20
  Requires-Dist: opik; (python_version < '3.14') and extra == 'telemetry'
@@ -1,8 +1,6 @@
1
1
  from langchain_openai import ChatOpenAI
2
2
  from langgraph.prebuilt import create_react_agent
3
3
 
4
- from davidkhala.ai.api.open import Leaderboard
5
-
6
4
 
7
5
  class Agent:
8
6
 
@@ -18,10 +16,10 @@ class Agent:
18
16
 
19
17
 
20
18
  class OpenRouterModel:
21
- def __init__(self, api_key, leaderboard: Leaderboard = None):
19
+ def __init__(self, api_key, leaderboard: dict = None):
22
20
  self.api_key = api_key
23
21
 
24
- if leaderboard is not None:
22
+ if leaderboard:
25
23
  self.headers = {
26
24
  "HTTP-Referer": leaderboard['url'],
27
25
  "X-Title": leaderboard['name'],
@@ -0,0 +1,47 @@
1
+ import datetime
2
+ from abc import abstractmethod
3
+
4
+ from davidkhala.http_request import Request
5
+
6
+ from davidkhala.ai.model import AbstractClient
7
+
8
+
9
+ class API(AbstractClient):
10
+ def __init__(self, api_key: str, base_url: str):
11
+ self.api_key = api_key
12
+ self.base_url = base_url + '/v1'
13
+ self._ = Request({"bearer": api_key})
14
+
15
+ @property
16
+ @abstractmethod
17
+ def free_models(self) -> list[str]:
18
+ ...
19
+
20
+ def chat(self, *user_prompt: str, **kwargs):
21
+ messages = [
22
+ *self.messages,
23
+ *[{
24
+ "role": "user",
25
+ "content": _
26
+ } for _ in user_prompt],
27
+ ]
28
+
29
+ json = {
30
+ "messages": messages,
31
+ **kwargs,
32
+ }
33
+
34
+ response = self._.request(f"{self.base_url}/chat/completions", "POST", json=json)
35
+
36
+ return {
37
+ "data": list(map(lambda x: x['message']['content'], response['choices'])),
38
+ "meta": {
39
+ "usage": response['usage'],
40
+ "created": datetime.datetime.fromtimestamp(response['created'])
41
+ },
42
+ 'model': response['model'],
43
+ }
44
+
45
+ def list_models(self):
46
+ response = self._.request(f"{self.base_url}/models", "GET")
47
+ return response['data']
@@ -0,0 +1,60 @@
1
+ import time
2
+
3
+ import requests
4
+ from davidkhala.http_request import default_on_response
5
+ from requests import Response
6
+
7
+ from davidkhala.ai.api import API
8
+
9
+
10
+ class OpenRouter(API):
11
+ @property
12
+ def free_models(self) -> list[str]:
13
+ return list(
14
+ map(lambda model: model['id'],
15
+ filter(lambda model: model['id'].endswith(':free'), self.list_models())
16
+ )
17
+ )
18
+
19
+ @staticmethod
20
+ def on_response(response: requests.Response):
21
+ r = default_on_response(response)
22
+ # openrouter special error on response.ok
23
+ err = r.get('error')
24
+ if err:
25
+ derived_response = Response()
26
+ derived_response.status_code = err['code']
27
+ derived_response.reason = err['message']
28
+ derived_response.metadata = err.get("metadata")
29
+
30
+ derived_response.raise_for_status()
31
+ return r
32
+
33
+ def __init__(self, api_key: str, *models: str, **kwargs):
34
+
35
+ super().__init__(api_key, 'https://openrouter.ai/api')
36
+
37
+ if 'leaderboard' in kwargs and type(kwargs['leaderboard']) is dict:
38
+ self._.options["headers"]["HTTP-Referer"] = kwargs['leaderboard'][
39
+ 'url'] # Site URL for rankings on openrouter.ai.
40
+ self._.options["headers"]["X-Title"] = kwargs['leaderboard'][
41
+ 'name'] # Site title for rankings on openrouter.ai.
42
+ self.models = models
43
+
44
+ self._.on_response = OpenRouter.on_response
45
+
46
+ def chat(self, *user_prompt: str, **kwargs):
47
+ if self.models:
48
+ kwargs["models"] = self.models
49
+ else:
50
+ kwargs["model"] = self.model
51
+
52
+ try:
53
+ r = super().chat(*user_prompt, **kwargs)
54
+ except requests.HTTPError as e:
55
+ if e.response.status_code == 429 and kwargs.get('retry'):
56
+ time.sleep(1)
57
+ return self.chat(*user_prompt, **kwargs)
58
+ if self.models:
59
+ assert r['model'] in self.models
60
+ return r
@@ -32,6 +32,9 @@ class SiliconFlow(API):
32
32
  'Kwai-Kolors/Kolors'
33
33
  ]
34
34
 
35
- def __init__(self, api_key: str, model: str):
35
+ def __init__(self, api_key: str):
36
36
  super().__init__(api_key, 'https://api.siliconflow.cn')
37
- self.model = model
37
+ self._.options['timeout'] = 50
38
+ def chat(self, *user_prompt: str, **kwargs):
39
+ kwargs['model'] = self.model
40
+ return super().chat(*user_prompt, **kwargs)
@@ -16,7 +16,7 @@ class AbstractClient(ABC):
16
16
  def as_embeddings(self, model: str):
17
17
  self.model = model
18
18
 
19
- def chat(self, user_prompt: str, **kwargs):
19
+ def chat(self, *user_prompt, **kwargs):
20
20
  ...
21
21
 
22
22
  def encode(self, *_input: str) -> List[List[float]]:
@@ -0,0 +1,63 @@
1
+ import runpy
2
+ from typing import Union, Literal, List
3
+
4
+ from openai import OpenAI, AsyncOpenAI
5
+
6
+ from davidkhala.ai.model import AbstractClient
7
+
8
+
9
+ class Client(AbstractClient):
10
+ client: OpenAI
11
+ encoding_format: Literal["float", "base64"] = "float"
12
+ n = 1
13
+
14
+ def connect(self):
15
+ self.client.models.list()
16
+
17
+ def encode(self, *_input: str) -> List[List[float]]:
18
+ response = self.client.embeddings.create(
19
+ model=self.model,
20
+ input=list(_input),
21
+ encoding_format=self.encoding_format
22
+ )
23
+ return [item.embedding for item in response.data]
24
+
25
+ def chat(self, *user_prompt):
26
+
27
+ messages = [
28
+ *self.messages,
29
+ ]
30
+ for prompt in user_prompt:
31
+ message = {
32
+ "role": "user"
33
+ }
34
+ if type(prompt) == str:
35
+ message['content'] = prompt
36
+ elif type(prompt) == dict:
37
+ message['content'] = [
38
+ {"type": "text", "text": prompt['text']},
39
+ {
40
+ "type": "image_url",
41
+ "image_url": {
42
+ "url": prompt['image_url'],
43
+ }
44
+ },
45
+ ]
46
+ messages.append(message)
47
+ response = self.client.chat.completions.create(
48
+ model=self.model,
49
+ messages=messages,
50
+ n=self.n
51
+ )
52
+ contents = [choice.message.content for choice in response.choices]
53
+ assert len(contents) == self.n
54
+ return contents
55
+
56
+ def disconnect(self):
57
+ self.client.close()
58
+
59
+
60
+ def with_opik(instance: Union[OpenAI, AsyncOpenAI]):
61
+ from opik.integrations.openai import track_openai
62
+ runpy.run_path('../opik.py')
63
+ return track_openai(instance)
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "davidkhala.ai"
3
- version = "0.0.3"
3
+ version = "0.0.5"
4
4
  description = "misc AI modules"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.13"
@@ -14,12 +14,12 @@ google = [
14
14
  "google-genai", "google-adk"
15
15
  ]
16
16
  api = [
17
- "requests",
17
+ "davidkhala.utils[http_request]",
18
18
  ]
19
19
  ali = [
20
20
  "dashscope"
21
21
  ]
22
- openai = [
22
+ azure = [
23
23
  "openai"
24
24
  ]
25
25
  telemetry =[
File without changes
@@ -1,65 +0,0 @@
1
- import datetime
2
- from abc import abstractmethod, ABC
3
-
4
- import requests
5
-
6
- # TODO Think openrouter as exceptional case
7
- class API(ABC):
8
- def __init__(self, api_key: str, base_url: str):
9
- self.base_url = base_url+'/v1'
10
- self.model = None
11
- self.headers = {
12
- "Authorization": f"Bearer {api_key}",
13
- }
14
- @property
15
- @abstractmethod
16
- def free_models(self)->list[str]:
17
- ...
18
-
19
- def pre_request(self, headers: dict, data: dict):
20
- data["model"] = self.model
21
- def chat(self, prompt, system_prompt: str = None):
22
-
23
-
24
- messages = [
25
- {
26
- "role": "user",
27
- "content": prompt
28
- }
29
- ]
30
- if system_prompt is not None:
31
- messages.append({
32
- "role": "system",
33
- "content": system_prompt
34
- })
35
- json = {
36
- "messages": messages
37
- }
38
- self.pre_request(self.headers, json)
39
- # timeout=50 to cater siliconflow
40
- response = requests.post(f"{self.base_url}/chat/completions", headers=self.headers, json=json, timeout=50)
41
- parsed_response = API.parse(response)
42
-
43
-
44
- return {
45
- "data": list(map(lambda x: x['message']['content'], parsed_response['choices'])),
46
- "meta": {
47
- "usage": parsed_response['usage'],
48
- "created": datetime.datetime.fromtimestamp(parsed_response['created'])
49
- }
50
- }
51
- @staticmethod
52
- def parse(response):
53
- parsed_response = response.json()
54
-
55
- match parsed_response:
56
- case dict():
57
- err = parsed_response.get('error')
58
- if err is not None:
59
- raise Exception(err)
60
- case str():
61
- raise Exception(parsed_response)
62
- return parsed_response
63
- def list_models(self):
64
- response = requests.get(f"{self.base_url}/models", headers=self.headers)
65
- return API.parse(response)['data']
@@ -1,35 +0,0 @@
1
- from typing import TypedDict, Optional
2
-
3
- from davidkhala.ai.api import API
4
-
5
-
6
- class Leaderboard(TypedDict):
7
- url:Optional[str]
8
- name:Optional[str]
9
-
10
- class OpenRouter(API):
11
- @property
12
- def free_models(self) -> list[str]:
13
- return list(
14
- map(lambda model: model['id'],
15
- filter(lambda model: model['id'].endswith(':free'), self.list_models())
16
- )
17
- )
18
-
19
- def __init__(self, api_key: str, models: list[str] = None, *,
20
- leaderboard: Leaderboard = None):
21
-
22
- super().__init__(api_key, 'https://openrouter.ai/api')
23
- self.leaderboard = leaderboard
24
- if models is None:
25
- models = [self.free_models[0]]
26
- self.models = models
27
- # TODO Hard to multi-model supports here
28
- def pre_request(self, headers: dict, data: dict):
29
- if self.leaderboard is not None:
30
- headers["HTTP-Referer"] = self.leaderboard['url'], # Optional. Site URL for rankings on openrouter.ai.
31
- headers["X-Title"] = self.leaderboard['name'], # Optional. Site title for rankings on openrouter.ai.
32
- if len(self.models) > 1:
33
- data["models"] = self.models
34
- else:
35
- data["model"] = self.models[0]
@@ -1,57 +0,0 @@
1
- import runpy
2
- from typing import Union, Literal, List
3
-
4
- from openai import OpenAI, AsyncOpenAI
5
-
6
- from davidkhala.ai.model import AbstractClient
7
-
8
-
9
- class Client(AbstractClient):
10
- client: OpenAI
11
- encoding_format: Literal["float", "base64"] = "float"
12
-
13
- def connect(self):
14
- self.client.models.list()
15
-
16
- def encode(self, *_input: str) -> List[List[float]]:
17
- response = self.client.embeddings.create(
18
- model=self.model,
19
- input=list(_input),
20
- encoding_format=self.encoding_format
21
- )
22
- return [item.embedding for item in response.data]
23
-
24
- def chat(self, user_prompt, image: str = None):
25
-
26
- message = {
27
- "role": "user"
28
- }
29
- if image is None:
30
- message['content'] = user_prompt
31
- else:
32
- message['content'] = [
33
- {"type": "text", "text": user_prompt},
34
- {
35
- "type": "image_url",
36
- "image_url": {
37
- "url": image,
38
- }
39
- },
40
- ]
41
- response = self.client.chat.completions.create(
42
- model=self.model,
43
- messages=[
44
- *self.messages,
45
- message
46
- ],
47
- )
48
- return response.choices[0].message.content
49
-
50
- def disconnect(self):
51
- self.client.close()
52
-
53
-
54
- def with_opik(instance: Union[OpenAI, AsyncOpenAI]):
55
- from opik.integrations.openai import track_openai
56
- runpy.run_path('../opik.py')
57
- return track_openai(instance)
File without changes
File without changes