davidkhala.ai 0.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ .venv
2
+ uv.lock
3
+ .idea
@@ -0,0 +1,17 @@
1
+ Metadata-Version: 2.4
2
+ Name: davidkhala.ai
3
+ Version: 0.0.0
4
+ Summary: misc AI modules
5
+ Requires-Python: >=3.13
6
+ Requires-Dist: opik
7
+ Provides-Extra: api
8
+ Requires-Dist: requests; extra == 'api'
9
+ Provides-Extra: google
10
+ Requires-Dist: google-adk; extra == 'google'
11
+ Requires-Dist: google-genai; extra == 'google'
12
+ Provides-Extra: langchain
13
+ Requires-Dist: langchain; extra == 'langchain'
14
+ Requires-Dist: langchain-openai; extra == 'langchain'
15
+ Requires-Dist: langgraph; extra == 'langchain'
16
+ Provides-Extra: openai
17
+ Requires-Dist: openai; extra == 'openai'
File without changes
File without changes
File without changes
@@ -0,0 +1,37 @@
1
+ from langchain_openai import ChatOpenAI
2
+ from langgraph.prebuilt import create_react_agent
3
+
4
+ from davidkhala.ai.api.open import Leaderboard
5
+
6
+
7
+ class Agent:
8
+
9
+ def __init__(self, model, instruction, *tools):
10
+ self.agent = create_react_agent(
11
+ model=model,
12
+ tools=tools,
13
+ prompt=instruction
14
+ )
15
+
16
+ def invoke(self, content):
17
+ return self.agent.invoke({"messages": [{"role": "user", "content": content}]})['messages'][-1]
18
+
19
+
20
+ class OpenRouterModel:
21
+ def __init__(self, api_key, leaderboard: Leaderboard = None):
22
+ self.api_key = api_key
23
+
24
+ if leaderboard is not None:
25
+ self.headers = {
26
+ "HTTP-Referer": leaderboard['url'],
27
+ "X-Title": leaderboard['name'],
28
+ }
29
+
30
+ def init_chat_model(self, model):
31
+ """https://openrouter.ai/docs/community/lang-chain"""
32
+ return ChatOpenAI(
33
+ base_url='https://openrouter.ai/api/v1',
34
+ model=model,
35
+ api_key=self.api_key,
36
+ default_headers=getattr(self, "headers", None)
37
+ )
@@ -0,0 +1,66 @@
1
+ import datetime
2
+ from abc import abstractmethod, ABC
3
+
4
+ import requests
5
+
6
+
7
+ class API(ABC):
8
+ def __init__(self, api_key: str, base_url: str):
9
+ self.base_url = base_url+'/v1'
10
+ self.model = None
11
+ self.headers = {
12
+ "Authorization": f"Bearer {api_key}",
13
+ }
14
+ @property
15
+ @abstractmethod
16
+ def free_models(self)->list[str]:
17
+ ...
18
+
19
+ @abstractmethod
20
+ def pre_request(self, headers: dict, data: dict):
21
+ data["model"] = self.model
22
+ def chat(self, prompt, system_prompt: str = None):
23
+
24
+
25
+ messages = [
26
+ {
27
+ "role": "user",
28
+ "content": prompt
29
+ }
30
+ ]
31
+ if system_prompt is not None:
32
+ messages.append({
33
+ "role": "system",
34
+ "content": system_prompt
35
+ })
36
+ json = {
37
+ "messages": messages
38
+ }
39
+ self.pre_request(self.headers, json)
40
+ # timeout=50 to cater siliconflow
41
+ response = requests.post(f"{self.base_url}/chat/completions", headers=self.headers, json=json, timeout=50)
42
+ parsed_response = API.parse(response)
43
+
44
+
45
+ return {
46
+ "data": list(map(lambda x: x['message']['content'], parsed_response['choices'])),
47
+ "meta": {
48
+ "usage": parsed_response['usage'],
49
+ "created": datetime.datetime.fromtimestamp(parsed_response['created'])
50
+ }
51
+ }
52
+ @staticmethod
53
+ def parse(response):
54
+ parsed_response = response.json()
55
+
56
+ match parsed_response:
57
+ case dict():
58
+ err = parsed_response.get('error')
59
+ if err is not None:
60
+ raise Exception(err)
61
+ case str():
62
+ raise Exception(parsed_response)
63
+ return parsed_response
64
+ def list_models(self):
65
+ response = requests.get(f"{self.base_url}/models", headers=self.headers)
66
+ return API.parse(response)['data']
@@ -0,0 +1,35 @@
1
+ from typing import TypedDict, Optional
2
+
3
+ from davidkhala.ai.api import API
4
+
5
+
6
+ class Leaderboard(TypedDict):
7
+ url:Optional[str]
8
+ name:Optional[str]
9
+
10
+ class OpenRouter(API):
11
+ @property
12
+ def free_models(self) -> list[str]:
13
+ return list(
14
+ map(lambda model: model['id'],
15
+ filter(lambda model: model['id'].endswith(':free'), self.list_models())
16
+ )
17
+ )
18
+
19
+ def __init__(self, api_key: str, models: list[str] = None, *,
20
+ leaderboard: Leaderboard = None):
21
+
22
+ super().__init__(api_key, 'https://openrouter.ai/api')
23
+ self.leaderboard = leaderboard
24
+ if models is None:
25
+ models = [self.free_models[0]]
26
+ self.models = models
27
+
28
+ def pre_request(self, headers: dict, data: dict):
29
+ if self.leaderboard is not None:
30
+ headers["HTTP-Referer"] = self.leaderboard['url'], # Optional. Site URL for rankings on openrouter.ai.
31
+ headers["X-Title"] = self.leaderboard['name'], # Optional. Site title for rankings on openrouter.ai.
32
+ if len(self.models) > 1:
33
+ data["models"] = self.models
34
+ else:
35
+ data["model"] = self.models[0]
@@ -0,0 +1,39 @@
1
+ from davidkhala.ai.api import API
2
+
3
+
4
+ class SiliconFlow(API):
5
+ @property
6
+ def free_models(self) -> list[str]:
7
+ """
8
+ Cannot be lively fetched by list_models
9
+ """
10
+ return [
11
+ # chat section
12
+ 'THUDM/GLM-4.1V-9B-Thinking'
13
+ 'THUDM/GLM-Z1-9B-0414'
14
+ 'THUDM/GLM-4-9B-0414'
15
+ 'THUDM/glm-4-9b-chat'
16
+ 'Qwen/Qwen3-8B'
17
+ 'Qwen/Qwen2.5-7B-Instruct'
18
+ 'Qwen/Qwen2.5-Coder-7B-Instruct'
19
+ 'internlm/internlm2_5-7b-chat'
20
+ 'deepseek-ai/DeepSeek-R1-0528-Qwen3-8B',
21
+ 'deepseek-ai/DeepSeek-R1-Distill-Qwen-7B',
22
+ # embedding and reranker
23
+ 'BAAI/bge-m3'
24
+ 'BAAI/bge-reranker-v2-m3'
25
+ 'BAAI/bge-large-zh-v1.5'
26
+ 'BAAI/bge-large-en-v1.5'
27
+ 'netease-youdao/bce-reranker-base_v1'
28
+ 'netease-youdao/bce-embedding-base_v1'
29
+ # Audio
30
+ 'FunAudioLLM/SenseVoiceSmall'
31
+ # image
32
+ 'Kwai-Kolors/Kolors'
33
+ ]
34
+
35
+ def __init__(self, api_key: str, model: str):
36
+ super().__init__(api_key, 'https://api.siliconflow.cn')
37
+ self.model = model
38
+ def pre_request(self, headers: dict, data: dict):
39
+ super().pre_request(headers, data)
File without changes
@@ -0,0 +1,20 @@
1
+ from google.adk.agents import Agent
2
+
3
+ def with_opik(agent:Agent, **options):
4
+ from opik.integrations.adk import OpikTracer
5
+
6
+ _ = OpikTracer(
7
+ name=agent.name,
8
+ metadata={
9
+ "model": agent.model,
10
+ "framework": "google-adk",
11
+ },
12
+ **options
13
+ )
14
+ agent.before_agent_callback = _.before_agent_callback
15
+ agent.after_agent_callback = _.after_agent_callback
16
+ agent.before_model_callback = _.before_model_callback
17
+ agent.after_model_callback = _.after_model_callback
18
+ agent.before_tool_callback = _.before_tool_callback
19
+ agent.after_tool_callback = _.after_tool_callback
20
+
@@ -0,0 +1,9 @@
1
+ import runpy
2
+
3
+ from google.genai import Client
4
+
5
+
6
+ def with_opik(client: Client) -> Client:
7
+ from opik.integrations.genai import track_genai
8
+ runpy.run_path('../opik.py')
9
+ return track_genai(client)
@@ -0,0 +1,67 @@
1
+ import runpy
2
+ from abc import ABC
3
+ from typing import Union, Optional, Literal, List
4
+
5
+ from openai import OpenAI, AsyncOpenAI
6
+
7
+
8
+ class Client(ABC):
9
+ api_key: str
10
+ base_url: str
11
+ model: Optional[str]
12
+ messages = []
13
+ client: OpenAI
14
+
15
+ def as_chat(self, model, sys_prompt: str = None):
16
+ self.model = model
17
+ if sys_prompt is not None:
18
+ self.messages = [{"role": "system", "content": sys_prompt}]
19
+
20
+ def as_embeddings(self, model, encoding_format: str = "float"):
21
+ self.model = model
22
+
23
+ def connect(self):
24
+ self.client.models.list()
25
+
26
+ def encode(self, _input: str, _format: Literal["float", "base64"] = "float")-> List[float]:
27
+ response = self.client.embeddings.create(
28
+ model=self.model,
29
+ input=_input,
30
+ encoding_format=_format
31
+ )
32
+ return response.data[0].embedding
33
+
34
+ def chat(self, user_prompt, image: str = None):
35
+
36
+ message = {
37
+ "role": "user"
38
+ }
39
+ if image is None:
40
+ message['content'] = user_prompt
41
+ else:
42
+ message['content'] = [
43
+ {"type": "text", "text": user_prompt},
44
+ {
45
+ "type": "image_url",
46
+ "image_url": {
47
+ "url": image,
48
+ }
49
+ },
50
+ ]
51
+ response = self.client.chat.completions.create(
52
+ model=self.model,
53
+ messages=[
54
+ *self.messages,
55
+ message
56
+ ],
57
+ )
58
+ return response.choices[0].message.content
59
+
60
+ def disconnect(self):
61
+ self.client.close()
62
+
63
+
64
+ def with_opik(instance: Union[OpenAI, AsyncOpenAI]):
65
+ from opik.integrations.openai import track_openai
66
+ runpy.run_path('../opik.py')
67
+ return track_openai(instance)
@@ -0,0 +1,27 @@
1
+ import warnings
2
+
3
+ from openai import AzureOpenAI, OpenAI
4
+
5
+ from davidkhala.ai.openai import Client
6
+
7
+
8
+ class ModelDeploymentClient(Client):
9
+ def __init__(self, key, deployment):
10
+ self.client = AzureOpenAI(
11
+ api_version="2024-12-01-preview", # mandatory
12
+ azure_endpoint=f"https://{deployment}.cognitiveservices.azure.com/",
13
+ api_key=key,
14
+ )
15
+
16
+
17
+ @warnings.deprecated("Azure Open AI is deprecated. Please migrate to Azure AI Foundry")
18
+ class OpenAIClient(Client):
19
+
20
+ def __init__(self, api_key, project):
21
+ self.client = OpenAI(
22
+ base_url=f"https://{project}.openai.azure.com/openai/v1/",
23
+ api_key=api_key,
24
+ )
25
+
26
+ def as_chat(self, model="gpt-oss-120b", sys_prompt: str = None):
27
+ super().as_chat(model, sys_prompt)
@@ -0,0 +1,12 @@
1
+ from openai import OpenAI
2
+
3
+ from davidkhala.ai.openai import Client
4
+
5
+
6
+ class NativeClient(Client):
7
+ def __init__(self, api_key, base_url=None):
8
+ self.client = OpenAI(
9
+ api_key=api_key,
10
+ base_url=base_url
11
+ )
12
+
@@ -0,0 +1,2 @@
1
+ from opik import configure
2
+ configure()
@@ -0,0 +1,39 @@
1
+ [project]
2
+ name = "davidkhala.ai"
3
+ version = "0.0.0"
4
+ description = "misc AI modules"
5
+ readme = "README.md"
6
+ requires-python = ">=3.13"
7
+ dependencies = [
8
+ "opik",
9
+ ]
10
+
11
+ [project.optional-dependencies]
12
+ langchain = [
13
+ "langgraph", "langchain",
14
+ "langchain-openai" # required by openRouter
15
+ ]
16
+ google = [
17
+ "google-genai", "google-adk"
18
+ ]
19
+ api = [
20
+ "requests",
21
+ ]
22
+ openai = [
23
+ "openai"
24
+ ]
25
+ [tool.hatch.build.targets.sdist]
26
+ include = ["davidkhala"]
27
+
28
+ [tool.hatch.build.targets.wheel]
29
+ include = ["davidkhala"]
30
+
31
+ [build-system]
32
+ requires = ["hatchling"]
33
+ build-backend = "hatchling.build"
34
+
35
+ [dependency-groups]
36
+ dev = [
37
+ "pytest",
38
+
39
+ ]