agentic-blocks 0.1.3__tar.gz → 0.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: agentic-blocks
3
- Version: 0.1.3
3
+ Version: 0.1.4
4
4
  Summary: Simple building blocks for agentic AI systems with MCP client and conversation management
5
5
  Author-email: Magnus Bjelkenhed <bjelkenhed@gmail.com>
6
6
  License: MIT
@@ -14,7 +14,7 @@ agentic_blocks = []
14
14
 
15
15
  [project]
16
16
  name = "agentic-blocks"
17
- version = "0.1.3"
17
+ version = "0.1.4"
18
18
  description = "Simple building blocks for agentic AI systems with MCP client and conversation management"
19
19
  readme = "README.md"
20
20
  requires-python = ">=3.11"
@@ -22,6 +22,7 @@ def call_llm(
22
22
  tools: Optional[List[Dict[str, Any]]] = None,
23
23
  api_key: Optional[str] = None,
24
24
  model: str = "gpt-4o-mini",
25
+ base_url: Optional[str] = None,
25
26
  **kwargs,
26
27
  ) -> str:
27
28
  """
@@ -32,6 +33,7 @@ def call_llm(
32
33
  tools: Optional list of tools in OpenAI function calling format
33
34
  api_key: OpenAI API key (if not provided, loads from .env OPENAI_API_KEY)
34
35
  model: Model name to use for completion
36
+ base_url: Base URL for the API (useful for VLLM or other OpenAI-compatible servers)
35
37
  **kwargs: Additional parameters to pass to OpenAI API
36
38
 
37
39
  Returns:
@@ -47,13 +49,18 @@ def call_llm(
47
49
  if not api_key:
48
50
  api_key = os.getenv("OPENAI_API_KEY")
49
51
 
50
- if not api_key:
52
+ if not api_key and not base_url:
51
53
  raise LLMError(
52
54
  "OpenAI API key not found. Set OPENAI_API_KEY environment variable or pass api_key parameter."
53
55
  )
54
56
 
55
57
  # Initialize OpenAI client
56
- client = OpenAI(api_key=api_key)
58
+ client_kwargs = {}
59
+ if api_key:
60
+ client_kwargs["api_key"] = api_key
61
+ if base_url:
62
+ client_kwargs["base_url"] = base_url
63
+ client = OpenAI(**client_kwargs)
57
64
 
58
65
  # Handle different message input types
59
66
  if isinstance(messages, Messages):
@@ -16,6 +16,23 @@ from mcp.client.streamable_http import streamablehttp_client
16
16
  logger = logging.getLogger(__name__)
17
17
 
18
18
 
19
+ def handle_jupyter_env():
20
+ """Apply nest_asyncio if running in a Jupyter notebook environment."""
21
+ try:
22
+ # Check if we're in a running event loop (like Jupyter)
23
+ asyncio.get_running_loop()
24
+ try:
25
+ import nest_asyncio
26
+ nest_asyncio.apply()
27
+ except ImportError:
28
+ logger.warning(
29
+ "nest_asyncio not available. Install with: pip install nest-asyncio"
30
+ )
31
+ except RuntimeError:
32
+ # No event loop running, no need for nest_asyncio
33
+ pass
34
+
35
+
19
36
  class MCPEndpointError(Exception):
20
37
  """Exception raised when there's an error connecting to or using an MCP endpoint."""
21
38
 
@@ -77,6 +94,7 @@ class MCPClient:
77
94
  Raises:
78
95
  MCPEndpointError: If connection or listing fails
79
96
  """
97
+ handle_jupyter_env()
80
98
  return asyncio.run(self.list_tools_async())
81
99
 
82
100
  async def list_tools_async(self) -> List[Dict[str, Any]]:
@@ -158,6 +176,7 @@ class MCPClient:
158
176
  Raises:
159
177
  MCPEndpointError: If connection or tool call fails
160
178
  """
179
+ handle_jupyter_env()
161
180
  return asyncio.run(self.call_tool_async(tool_name, arguments))
162
181
 
163
182
  async def call_tool_async(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: agentic-blocks
3
- Version: 0.1.3
3
+ Version: 0.1.4
4
4
  Summary: Simple building blocks for agentic AI systems with MCP client and conversation management
5
5
  Author-email: Magnus Bjelkenhed <bjelkenhed@gmail.com>
6
6
  License: MIT
File without changes
File without changes
File without changes