distributed-a2a 0.1.5rc2__tar.gz → 0.1.5rc4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {distributed_a2a-0.1.5rc2/distributed_a2a.egg-info → distributed_a2a-0.1.5rc4}/PKG-INFO +1 -1
  2. distributed_a2a-0.1.5rc4/distributed_a2a/model.py +50 -0
  3. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4/distributed_a2a.egg-info}/PKG-INFO +1 -1
  4. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/pyproject.toml +1 -1
  5. distributed_a2a-0.1.5rc2/distributed_a2a/model.py +0 -50
  6. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/LICENSE +0 -0
  7. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/MANIFEST.in +0 -0
  8. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/README.md +0 -0
  9. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a/__init__.py +0 -0
  10. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a/agent.py +0 -0
  11. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a/client.py +0 -0
  12. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a/executors.py +0 -0
  13. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a/registry.py +0 -0
  14. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a/server.py +0 -0
  15. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a.egg-info/SOURCES.txt +0 -0
  16. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a.egg-info/dependency_links.txt +0 -0
  17. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a.egg-info/requires.txt +0 -0
  18. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/distributed_a2a.egg-info/top_level.txt +0 -0
  19. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/requirements.txt +0 -0
  20. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/setup.cfg +0 -0
  21. {distributed_a2a-0.1.5rc2 → distributed_a2a-0.1.5rc4}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: distributed_a2a
3
- Version: 0.1.5rc2
3
+ Version: 0.1.5rc4
4
4
  Summary: A library for building A2A agents with routing capabilities
5
5
  Home-page: https://github.com/Barra-Technologies/distributed-a2a
6
6
  Author: Fabian Bell
@@ -0,0 +1,50 @@
1
+ from typing import List
2
+
3
+ from langchain_core.language_models import BaseChatModel
4
+ from langchain_openai import ChatOpenAI
5
+ from pydantic import BaseModel, Field
6
+
7
+
8
+ class SkillConfig(BaseModel):
9
+ id: str = Field(description="The id of the skill e.g. weather")
10
+ name: str = Field(description="The name of the skill e.g. weather")
11
+ description: str = Field(description="A short description of the skill")
12
+ tags: List[str] = Field(description="The tags associated with the skill")
13
+
14
+
15
+ class LLMConfig(BaseModel):
16
+ base_url: str = Field(description="The base url of the LLM provider")
17
+ model: str = Field(description="The model to use for the LLM e.g. gpt-3.5-turbo")
18
+ api_key_env: str = Field(description="The environment variable containing the api key for the LLM provider")
19
+ reasoning_effort: str = Field(description="The reasoning effort to use for the LLM e.g. high", default="high")
20
+ system_prompt: str = Field(description="The system prompt to use for the LLM")
21
+
22
+
23
+ class CardConfig(BaseModel):
24
+ name: str = Field(description="The name of the agent" )
25
+ description: str = Field(description="A short description of the agent")
26
+ version: str = Field(description="The version of the agent")
27
+ default_input_modes: List[str] = Field(description="The default input modes supported by the agent", default=["text","text/plaintext"])
28
+ default_output_modes: List[str] = Field(description="The default output modes supported by the agent", default=["text","text/plaintext"])
29
+ preferred_transport_protocol: str = Field(description="The preferred transport protocol for the agent", default="HTTP+JSON")
30
+ url: str = Field(description="The url of the agent")
31
+ skills: List[SkillConfig] = Field(description="The skills supported by the agent", default=[])
32
+
33
+
34
+ class AgentItem(BaseModel):
35
+ card: CardConfig = Field(description="The agent card configuration node")
36
+ llm: LLMConfig = Field(description="The LLM configuration node")
37
+
38
+
39
+ class AgentConfig(BaseModel):
40
+ agent: AgentItem = Field(description="The agent configuration node")
41
+
42
+
43
+
44
+ def get_model(api_key: str, model: str, base_url: str, reasoning_effort: str) -> BaseChatModel:
45
+ return ChatOpenAI(
46
+ model=model,
47
+ base_url=base_url,
48
+ api_key=lambda: api_key,
49
+ reasoning_effort=reasoning_effort
50
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: distributed_a2a
3
- Version: 0.1.5rc2
3
+ Version: 0.1.5rc4
4
4
  Summary: A library for building A2A agents with routing capabilities
5
5
  Home-page: https://github.com/Barra-Technologies/distributed-a2a
6
6
  Author: Fabian Bell
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "distributed_a2a"
7
- version = "0.1.5rc2"
7
+ version = "0.1.5rc4"
8
8
  description = "A library for building A2A agents with routing capabilities"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.10"
@@ -1,50 +0,0 @@
1
- from typing import List
2
-
3
- from langchain_core.language_models import BaseChatModel
4
- from langchain_openai import ChatOpenAI
5
- from pydantic import BaseModel
6
-
7
-
8
- class SkillConfig(BaseModel):
9
- id: str
10
- name: str
11
- description: str
12
- tags: List[str]
13
-
14
-
15
- class LLMConfig(BaseModel):
16
- base_url: str
17
- model: str
18
- api_key_env: str
19
- reasoning_effort: str
20
- system_prompt: str
21
-
22
-
23
- class CardConfig(BaseModel):
24
- name: str
25
- version: str
26
- default_input_modes: List[str]
27
- default_output_modes: List[str]
28
- preferred_transport_protocol: str
29
- url: str
30
- description: str
31
- skills: List[SkillConfig]
32
-
33
-
34
- class AgentItem(BaseModel):
35
- card: CardConfig
36
- llm: LLMConfig
37
-
38
-
39
- class AgentConfig(BaseModel):
40
- agent: AgentItem
41
-
42
-
43
-
44
- def get_model(api_key: str, model: str, base_url: str, reasoning_effort: str) -> BaseChatModel:
45
- return ChatOpenAI(
46
- model=model,
47
- base_url=base_url,
48
- api_key=lambda: api_key,
49
- reasoning_effort=reasoning_effort
50
- )