davidkhala.ai 0.2.0__tar.gz → 0.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/PKG-INFO +5 -1
  2. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/console/plugin.py +1 -1
  3. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/plugins/popular.py +4 -1
  4. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/ali/dashscope.py +4 -8
  5. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/api/__init__.py +2 -1
  6. davidkhala_ai-0.2.1/davidkhala/ai/mistral/__init__.py +33 -0
  7. davidkhala_ai-0.2.1/davidkhala/ai/model/__init__.py +44 -0
  8. davidkhala_ai-0.2.1/davidkhala/ai/model/chat.py +19 -0
  9. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/openai/__init__.py +24 -16
  10. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/openai/azure.py +5 -4
  11. davidkhala_ai-0.2.1/davidkhala/ai/openai/databricks.py +23 -0
  12. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/openai/native.py +3 -2
  13. davidkhala_ai-0.2.1/davidkhala/ai/openai/opik.py +10 -0
  14. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/openrouter/__init__.py +1 -0
  15. davidkhala_ai-0.2.1/davidkhala/ai/you.py +55 -0
  16. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/pyproject.toml +3 -1
  17. davidkhala_ai-0.2.0/davidkhala/ai/model.py +0 -28
  18. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/.gitignore +0 -0
  19. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/README.md +0 -0
  20. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/__init__.py +0 -0
  21. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/README.md +0 -0
  22. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/__init__.py +0 -0
  23. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/__init__.py +0 -0
  24. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/api/__init__.py +0 -0
  25. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/api/app.py +0 -0
  26. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/api/knowledge.py +0 -0
  27. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/const.py +0 -0
  28. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/interface.py +0 -0
  29. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/model.py +0 -0
  30. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/__init__.py +0 -0
  31. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/console/__init__.py +0 -0
  32. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/console/knowledge.py +0 -0
  33. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/console/session.py +0 -0
  34. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/db/__init__.py +0 -0
  35. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/db/app.py +0 -0
  36. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/db/knowledge.py +0 -0
  37. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/db/orm.py +0 -0
  38. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/ops/db/sys.py +0 -0
  39. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/plugins/__init__.py +0 -0
  40. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/plugins/file.py +0 -0
  41. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/plugins/firecrawl.py +0 -0
  42. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/dify/plugins/jina.py +0 -0
  43. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/langgraph.py +0 -0
  44. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/agent/ragflow.py +0 -0
  45. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/ali/__init__.py +0 -0
  46. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/ali/agentbay.py +0 -0
  47. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/api/openrouter.py +0 -0
  48. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/api/siliconflow.py +0 -0
  49. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/google/__init__.py +0 -0
  50. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/google/adk.py +0 -0
  51. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/google/gemini.py +0 -0
  52. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/huggingface/BAAI.py +0 -0
  53. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/huggingface/__init__.py +0 -0
  54. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/huggingface/inference.py +0 -0
  55. {davidkhala_ai-0.2.0 → davidkhala_ai-0.2.1}/davidkhala/ai/opik.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: davidkhala.ai
3
- Version: 0.2.0
3
+ Version: 0.2.1
4
4
  Summary: misc AI modules
5
5
  Requires-Python: >=3.12
6
6
  Provides-Extra: ali
@@ -28,12 +28,16 @@ Provides-Extra: langchain
28
28
  Requires-Dist: langchain; extra == 'langchain'
29
29
  Requires-Dist: langchain-openai; (python_version < '3.14') and extra == 'langchain'
30
30
  Requires-Dist: langgraph; extra == 'langchain'
31
+ Provides-Extra: mistral
32
+ Requires-Dist: mistralai; extra == 'mistral'
31
33
  Provides-Extra: openrouter
32
34
  Requires-Dist: openrouter; extra == 'openrouter'
33
35
  Provides-Extra: ragflow
34
36
  Requires-Dist: ragflow-sdk; extra == 'ragflow'
35
37
  Provides-Extra: telemetry
36
38
  Requires-Dist: opik; (python_version < '3.14') and extra == 'telemetry'
39
+ Provides-Extra: you
40
+ Requires-Dist: youdotcom; extra == 'you'
37
41
  Description-Content-Type: text/markdown
38
42
 
39
43
  # davidkhala.ai
@@ -40,7 +40,7 @@ class ConsolePlugin(API):
40
40
  return _
41
41
 
42
42
  def get(self, *plugin_names: str) -> list[dict]:
43
- "inspect installed plugins"
43
+ """inspect installed plugins"""
44
44
  url = f"{self.base_url}/list/installations/ids"
45
45
  r = self.request(url, method="POST", json={
46
46
  'plugin_ids': plugin_names,
@@ -32,5 +32,8 @@ class Node:
32
32
  'junjiem/db_query',
33
33
  'junjiem/db_query_pre_auth',
34
34
  ]
35
-
35
+ web = [
36
+ 'langgenius/searxng',
37
+ 'langgenius/firecrawl'
38
+ ]
36
39
 
@@ -4,7 +4,7 @@ from http import HTTPStatus
4
4
  from dashscope.api_entities.dashscope_response import DashScopeAPIResponse
5
5
 
6
6
  from dashscope import Generation, TextEmbedding
7
- from davidkhala.ai.model import AbstractClient
7
+ from davidkhala.ai.model import AbstractClient, MessageDict
8
8
 
9
9
 
10
10
  class ModelEnum(str, Enum):
@@ -21,11 +21,10 @@ class API(AbstractClient):
21
21
  Unsupported to use international base_url "https://dashscope-intl.aliyuncs.com"
22
22
  """
23
23
 
24
- model: ModelEnum
25
-
26
24
  def __init__(self, api_key):
25
+ super().__init__()
27
26
  self.api_key = api_key
28
-
27
+ self.model: ModelEnum|None = None
29
28
  def as_embeddings(self, model=ModelEnum.EMBED):
30
29
  super().as_embeddings(model)
31
30
 
@@ -44,10 +43,7 @@ class API(AbstractClient):
44
43
  else:
45
44
  kwargs['messages'] = [
46
45
  *self.messages,
47
- {
48
- "role": "user",
49
- 'content': user_prompt
50
- }
46
+ MessageDict(role='user',content=user_prompt),
51
47
  ]
52
48
  # prompt 和 messages 是互斥的参数:如果你使用了 messages,就不要再传 prompt
53
49
  r = Generation.call(
@@ -8,7 +8,8 @@ from davidkhala.ai.model import AbstractClient
8
8
 
9
9
  class API(AbstractClient, Request):
10
10
  def __init__(self, api_key: str, base_url: str):
11
- super().__init__({
11
+ AbstractClient.__init__(self)
12
+ Request.__init__(self,{
12
13
  "bearer": api_key
13
14
  })
14
15
  self.base_url = base_url + '/v1'
@@ -0,0 +1,33 @@
1
+ # https://github.com/mistralai/client-python
2
+
3
+ from davidkhala.ai.model import AbstractClient
4
+ from mistralai import Mistral, ChatCompletionResponse, ResponseFormat
5
+ from davidkhala.ai.model.chat import on_response
6
+
7
+ class Client(AbstractClient):
8
+ n = 1
9
+
10
+ def __init__(self, api_key: str):
11
+ self.api_key = api_key
12
+ self.client = Mistral(api_key=api_key)
13
+ self.model = "mistral-large-latest"
14
+ self.messages = []
15
+
16
+ def __enter__(self):
17
+ self.client.__enter__()
18
+ return self
19
+
20
+ def __exit__(self, exc_type, exc_val, exc_tb):
21
+ return self.client.__exit__(exc_type, exc_val, exc_tb)
22
+
23
+ def chat(self, *user_prompt, **kwargs):
24
+ response: ChatCompletionResponse = self.client.chat.complete(
25
+ model=self.model,
26
+ messages=[
27
+ *self.messages,
28
+ *[{"content": m, "role": "user"} for m in user_prompt]
29
+ ], stream=False, response_format=ResponseFormat(type='text'),
30
+ n=self.n,
31
+ )
32
+
33
+ return on_response(response, self.n)
@@ -0,0 +1,44 @@
1
+ from abc import ABC
2
+ from typing import Protocol, TypedDict
3
+
4
+
5
+ class MessageDict(TypedDict):
6
+ content: str | list
7
+ role: str
8
+
9
+
10
+ class ClientProtocol(Protocol):
11
+ api_key: str
12
+ base_url: str
13
+ model: str | None
14
+ messages: list[MessageDict] | None
15
+
16
+
17
+ class AbstractClient(ABC, ClientProtocol):
18
+
19
+ def __init__(self):
20
+ self.model = None
21
+ self.messages = []
22
+
23
+ def as_chat(self, model: str, sys_prompt: str = None):
24
+ self.model = model
25
+ if sys_prompt is not None:
26
+ self.messages = [MessageDict(role='system', content=sys_prompt)]
27
+
28
+ def as_embeddings(self, model: str):
29
+ self.model = model
30
+
31
+ def chat(self, *user_prompt, **kwargs):
32
+ ...
33
+
34
+ def encode(self, *_input: str) -> list[list[float]]:
35
+ ...
36
+
37
+ def connect(self):
38
+ ...
39
+
40
+ def close(self):
41
+ ...
42
+
43
+ def __exit__(self, exc_type, exc_val, exc_tb):
44
+ self.close()
@@ -0,0 +1,19 @@
1
+ from typing import Protocol, Any
2
+
3
+
4
+ class MessageProtocol(Protocol):
5
+ content: str | Any
6
+
7
+
8
+ class ChoiceProtocol(Protocol):
9
+ message: MessageProtocol
10
+
11
+
12
+ class ChoicesAware(Protocol):
13
+ choices: list[ChoiceProtocol]
14
+
15
+
16
+ def on_response(response: ChoicesAware, n: int):
17
+ contents = [choice.message.content for choice in response.choices]
18
+ assert len(contents) == n
19
+ return contents
@@ -1,18 +1,30 @@
1
- import runpy
2
- from typing import Union, Literal
1
+ from typing import Literal
3
2
 
4
- from openai import OpenAI, AsyncOpenAI
3
+ from httpx import URL
4
+ from openai import OpenAI
5
5
 
6
6
  from davidkhala.ai.model import AbstractClient
7
+ from davidkhala.ai.model.chat import on_response
7
8
 
8
9
 
9
10
  class Client(AbstractClient):
10
- client: OpenAI
11
- encoding_format: Literal["float", "base64"] = "float"
12
- n = 1
13
-
11
+ def __init__(self, client: OpenAI):
12
+ super().__init__()
13
+ self.client:OpenAI = client
14
+ self.base_url:URL = client.base_url
15
+ self.api_key = client.api_key
16
+ self.encoding_format:Literal["float", "base64"] = "float"
17
+ self.n:int = 1
14
18
  def connect(self):
15
- self.client.models.list()
19
+ try:
20
+ type(self).models.fget(self)
21
+ return True
22
+ except: # TODO make specific
23
+ return False
24
+
25
+ @property
26
+ def models(self):
27
+ return self.client.models.list()
16
28
 
17
29
  def encode(self, *_input: str) -> list[list[float]]:
18
30
  response = self.client.embeddings.create(
@@ -50,15 +62,11 @@ class Client(AbstractClient):
50
62
  n=self.n,
51
63
  **kwargs
52
64
  )
53
- contents = [choice.message.content for choice in response.choices]
54
- assert len(contents) == self.n
55
- return contents
56
65
 
57
- def disconnect(self):
66
+ return on_response(response, self.n)
67
+
68
+ def close(self):
58
69
  self.client.close()
59
70
 
60
71
 
61
- def with_opik(instance: Union[OpenAI, AsyncOpenAI]):
62
- from opik.integrations.openai import track_openai
63
- runpy.run_path('../opik.py')
64
- return track_openai(instance)
72
+
@@ -10,23 +10,24 @@ class AzureHosted(Client):
10
10
  raise ValueError('Web search options not supported in any models of Azure AI Foundry')
11
11
  return super().chat(*user_prompt, **kwargs)
12
12
 
13
+
13
14
  class ModelDeploymentClient(AzureHosted):
14
15
  def __init__(self, key, deployment):
15
- self.client = AzureOpenAI(
16
+ super().__init__(AzureOpenAI(
16
17
  api_version="2024-12-01-preview", # mandatory
17
18
  azure_endpoint=f"https://{deployment}.cognitiveservices.azure.com/",
18
19
  api_key=key,
19
- )
20
+ ))
20
21
 
21
22
 
22
23
  @deprecated("Azure Open AI is deprecated. Please migrate to Microsoft Foundry")
23
24
  class OpenAIClient(AzureHosted):
24
25
 
25
26
  def __init__(self, api_key, project):
26
- self.client = OpenAI(
27
+ super().__init__(OpenAI(
27
28
  base_url=f"https://{project}.openai.azure.com/openai/v1/",
28
29
  api_key=api_key,
29
- )
30
+ ))
30
31
 
31
32
  def as_chat(self, model="gpt-oss-120b", sys_prompt: str = None):
32
33
  super().as_chat(model, sys_prompt)
@@ -0,0 +1,23 @@
1
+ from openai import OpenAI
2
+ from davidkhala.ai.openai import Client as BaseClient
3
+
4
+
5
+ class Client(BaseClient):
6
+ def __init__(self, host: str, token: str):
7
+ super().__init__(OpenAI(
8
+ base_url=f"https://{host}/serving-endpoints",
9
+ api_key=token
10
+ ))
11
+
12
+ def chat(self, *user_prompt, **kwargs):
13
+ """Databricks always reasoning"""
14
+ rs = super().chat(*user_prompt, **kwargs)
15
+ for r in rs:
16
+ assert len(r) == 2
17
+ assert r[0]['type'] == 'reasoning'
18
+ for s in r[0]['summary']:
19
+ assert s['type'] =='summary_text'
20
+ yield s['text']
21
+ assert r[1]['type'] == 'text'
22
+ yield r[1]['text']
23
+
@@ -7,10 +7,11 @@ from davidkhala.ai.openai import Client
7
7
 
8
8
  class NativeClient(Client):
9
9
  def __init__(self, api_key, base_url=None):
10
- self.client = OpenAI(
10
+ super().__init__(OpenAI(
11
11
  api_key=api_key,
12
12
  base_url=base_url
13
- )
13
+ ))
14
+
14
15
 
15
16
  def chat(self, *user_prompt, web_search:Optional[Literal["low", "medium", "high"]]=None, **kwargs):
16
17
  opts = {
@@ -0,0 +1,10 @@
1
+ import runpy
2
+
3
+ from openai import OpenAI, AsyncOpenAI
4
+ from typing import Union
5
+
6
+
7
+ def bind(instance: Union[OpenAI, AsyncOpenAI]):
8
+ from opik.integrations.openai import track_openai
9
+ runpy.run_path('../opik.py')
10
+ return track_openai(instance)
@@ -6,6 +6,7 @@ from openrouter import OpenRouter
6
6
 
7
7
  class Client(AbstractClient):
8
8
  def __init__(self, api_key: str):
9
+ super().__init__()
9
10
  self.api_key = api_key
10
11
  self.client = OpenRouter(api_key)
11
12
 
@@ -0,0 +1,55 @@
1
+ from typing import AsyncIterator
2
+
3
+ from youdotcom import You, models
4
+ from youdotcom.models import Web
5
+ from youdotcom.types.typesafe_models import AgentType, get_text_tokens, Format
6
+ from youdotcom.utils import eventstreaming
7
+
8
+
9
+ class Client:
10
+ def __init__(self, api_key: str):
11
+ self.client = You(api_key_auth=api_key)
12
+
13
+ def chat(self, user_prompt: str, *, tools: list[models.Tool] = None) -> str:
14
+ res = self.client.agents.runs.create(
15
+ agent=AgentType.ADVANCED if tools else AgentType.EXPRESS,
16
+ input=user_prompt,
17
+ stream=False,
18
+ tools=tools,
19
+ )
20
+
21
+ return "".join(get_text_tokens(res))
22
+
23
+ def scrape(self, *urls: str) -> list[str]:
24
+ """
25
+ :return the content of the web pages as Markdown format (incl. metadata)
26
+ """
27
+ res = self.client.contents.generate(
28
+ urls=list(urls),
29
+ format_=Format.MARKDOWN,
30
+ )
31
+ return [_.markdown for _ in res]
32
+
33
+ def search(self, query: str) -> list[Web]:
34
+ res = self.client.search.unified(
35
+ query=query
36
+ )
37
+ return res.results.web
38
+
39
+ async def async_chat(self, user_prompt: str) -> AsyncIterator[str]:
40
+ res: eventstreaming.EventStreamAsync[models.Data] = await self.client.agents.runs.create_async(
41
+ agent=AgentType.EXPRESS,
42
+ input=user_prompt,
43
+ stream=True,
44
+ )
45
+
46
+ async for event in res:
47
+ if event.type == 'response.output_text.delta':
48
+ yield event.response.delta
49
+
50
+ def __enter__(self):
51
+ self.client.__enter__()
52
+ return self
53
+
54
+ def __exit__(self, exc_type, exc_val, exc_tb):
55
+ self.client.close()
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "davidkhala.ai"
3
- version = "0.2.0"
3
+ version = "0.2.1"
4
4
  description = "misc AI modules"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.12"
@@ -17,6 +17,8 @@ hf = [
17
17
  "huggingface_hub", 'hf_xet',
18
18
  'onnxruntime', 'onnx' # for test only
19
19
  ]
20
+ mistral = ["mistralai"]
21
+ you = ['youdotcom']
20
22
  openrouter = ["openrouter"]
21
23
  ali = ["dashscope", "wuying-agentbay-sdk", "davidkhala.utils"]
22
24
  azure = ["openai", "davidkhala.utils"]
@@ -1,28 +0,0 @@
1
- from abc import ABC
2
- from typing import Optional
3
-
4
-
5
- class AbstractClient(ABC):
6
- api_key: str
7
- base_url: str
8
- model: Optional[str]
9
- messages = []
10
-
11
- def as_chat(self, model: str, sys_prompt: str = None):
12
- self.model = model
13
- if sys_prompt is not None:
14
- self.messages = [{"role": "system", "content": sys_prompt}]
15
-
16
- def as_embeddings(self, model: str):
17
- self.model = model
18
-
19
- def chat(self, *user_prompt, **kwargs):
20
- ...
21
-
22
- def encode(self, *_input: str) -> list[list[float]]:
23
- ...
24
- def connect(self):
25
- ...
26
-
27
- def disconnect(self):
28
- ...
File without changes
File without changes