sparkContext 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,13 @@
1
+ Metadata-Version: 2.4
2
+ Name: sparkContext
3
+ Version: 0.1.0
4
+ Author: DeepModel
5
+ Classifier: Programming Language :: Python :: 3
6
+ Classifier: License :: OSI Approved :: MIT License
7
+ Classifier: Operating System :: OS Independent
8
+ Requires-Python: >=3.8
9
+ Requires-Dist: httpx>=0.24.0
10
+ Dynamic: author
11
+ Dynamic: classifier
12
+ Dynamic: requires-dist
13
+ Dynamic: requires-python
@@ -0,0 +1,251 @@
1
+ # Spark SDK
2
+
3
+ A Python SDK for interacting with the DeepModel Spark API.
4
+
5
+ ---
6
+
7
+ ## Installation
8
+
9
+ ### Install from PyPI
10
+ ```bash
11
+ pip install spark-sdk
12
+ ```
13
+
14
+ ### Install from Source
15
+ ```bash
16
+ # 1. Clone the repository
17
+ git clone https://github.com/your-org/dm-spark-sdk.git
18
+ cd dm-spark-sdk
19
+
20
+ # 2. Install dependencies
21
+ pip install setuptools wheel twine httpx
22
+
23
+ # 3. Build the package
24
+ python setup.py sdist bdist_wheel
25
+
26
+ # 4. Install the built package
27
+ pip install dist\spark_sdk-0.1.0-py3-none-any.whl
28
+
29
+ # Upload to PyPI
30
+ twine upload dist/*
31
+ ```
32
+
33
+ ---
34
+
35
+ ## Quick Start
36
+
37
+ ```python
38
+ import asyncio
39
+ import spark_sdk
40
+
41
+ async def main():
42
+ # Initialize the client
43
+ client = spark_sdk.Client(
44
+ api_key="your_api_key",
45
+ api_url="https://your-api-url.com"
46
+ )
47
+
48
+ # Get all unified tools and agents
49
+ result = await client.get_unified_tools()
50
+ print(result)
51
+
52
+ # Invoke an MCP tool
53
+ invoke_result = await client.invoke_tool(
54
+ connection_id="uuid-here",
55
+ target_type=spark_sdk.MCP,
56
+ tool_or_agent_name="create_issue",
57
+ arguments={"title": "Bug report", "body": "Something went wrong"},
58
+ query="create a github issue"
59
+ )
60
+ print(invoke_result)
61
+
62
+ asyncio.run(main())
63
+ ```
64
+
65
+ ---
66
+
67
+ ## Usage
68
+
69
+ ### Initialize the Client
70
+
71
+ ```python
72
+ import spark_sdk
73
+
74
+ client = spark_sdk.Client(
75
+ api_key="your_api_key",
76
+ api_url="https://your-api-url.com"
77
+ )
78
+ ```
79
+
80
+ | Parameter | Type | Required | Description |
81
+ |-----------|--------|----------|------------------------------------|
82
+ | `api_key` | `str` | ✅ Yes | Your API key for authentication |
83
+ | `api_url` | `str` | ✅ Yes | The base URL of the Spark API |
84
+
85
+ ---
86
+
87
+ ### `get_unified_tools()`
88
+
89
+ Retrieves all MCP tools and worker agents accessible to the user, including their connection status and payload schemas.
90
+
91
+ ```python
92
+ result = await client.get_unified_tools()
93
+ ```
94
+
95
+ #### Response Structure
96
+
97
+ ```json
98
+ {
99
+ "success": true,
100
+ "tools": {
101
+ "mcp_tools": {
102
+ "github": [
103
+ {
104
+ "tool_name": "create_issue",
105
+ "is_configured": true,
106
+ "connection_id": "uuid-here",
107
+ "description": "Creates a GitHub issue",
108
+ "payload_schema": {},
109
+ "icon_url": "https://..."
110
+ }
111
+ ]
112
+ },
113
+ "worker_agents": [
114
+ {
115
+ "tool_name": "My Agent",
116
+ "is_configured": true,
117
+ "connection_id": "uuid-here",
118
+ "description": "A custom worker agent",
119
+ "payload_schema": {},
120
+ "agent_avatar_url": "https://..."
121
+ }
122
+ ]
123
+ },
124
+ "status_code": 200,
125
+ "detail": null
126
+ }
127
+ ```
128
+
129
+ | Field | Type | Description |
130
+ |---------------|----------------|----------------------------------------------|
131
+ | `success` | `bool` | Whether the request was successful |
132
+ | `tools` | `dict` | All MCP tools and worker agents |
133
+ | `status_code` | `int` | HTTP status code of the response |
134
+ | `detail` | `str` / `null` | Error detail message if request failed |
135
+
136
+ ---
137
+
138
+ ### `invoke_tool()`
139
+
140
+ Directly invoke an MCP tool or worker agent with provided arguments. No LLM inference is performed — arguments are used as-is.
141
+
142
+ ```python
143
+ result = await client.invoke_tool(
144
+ connection_id="uuid-here",
145
+ target_type=spark_sdk.MCP, # or spark_sdk.AGENT
146
+ tool_or_agent_name="create_issue",
147
+ arguments={"title": "Bug report", "body": "Something went wrong"},
148
+ query="create a github issue" # optional
149
+ )
150
+ ```
151
+
152
+ #### Parameters
153
+
154
+ | Parameter | Type | Required | Description |
155
+ |----------------------|-----------------|----------|------------------------------------------------------|
156
+ | `connection_id` | `str` / `UUID` | ✅ Yes | The connection ID of the tool or agent |
157
+ | `target_type` | `str` | ✅ Yes | `spark_sdk.MCP` (`"mcp"`) or `spark_sdk.AGENT` (`"agent"`) |
158
+ | `tool_or_agent_name` | `str` | ✅ Yes | The name of the tool or agent to invoke |
159
+ | `arguments` | `dict` | ✅ Yes | Arguments to pass to the tool or agent |
160
+ | `query` | `str` | ❌ No | Optional user query context for agent invocation |
161
+
162
+ #### Response Structure
163
+
164
+ ```json
165
+ {
166
+ "success": true,
167
+ "tool_or_agent_name": "create_issue",
168
+ "connection_id": "uuid-here",
169
+ "result": {},
170
+ "status_code": 200,
171
+ "detail": null
172
+ }
173
+ ```
174
+
175
+ | Field | Type | Description |
176
+ |----------------------|----------------|---------------------------------------------------|
177
+ | `success` | `bool` | Whether the invocation was successful |
178
+ | `tool_or_agent_name` | `str` | The name of the tool or agent invoked |
179
+ | `connection_id` | `str` | The connection ID used |
180
+ | `result` | `any` | The result returned by the tool or agent |
181
+ | `status_code` | `int` | HTTP status code of the response |
182
+ | `detail` | `str` / `null` | Error detail message if invocation failed |
183
+
184
+ #### Target Type Constants
185
+
186
+ ```python
187
+ spark_sdk.MCP # "mcp" — for MCP tools
188
+ spark_sdk.AGENT # "agent" — for worker agents
189
+ ```
190
+
191
+ ---
192
+
193
+ ### Error Handling
194
+
195
+ All errors are returned as part of the response dictionary rather than raising exceptions:
196
+
197
+ ```python
198
+ result = await client.get_unified_tools()
199
+
200
+ if not result.get("success"):
201
+ print(f"Error {result.get('status_code')}: {result.get('detail')}")
202
+ else:
203
+ print(result.get("tools"))
204
+ ```
205
+
206
+ | Error Type | `status_code` | `detail` Example |
207
+ |------------------|---------------|------------------------------------------------------|
208
+ | HTTP Error | e.g. `401` | `"No detail provided"` |
209
+ | Request Error | `400` | `"Request error: Connection refused"` |
210
+ | Unexpected Error | `500` | `"An unexpected error occurred: ..."` |
211
+
212
+ ---
213
+
214
+ ## Project Structure
215
+
216
+ ```
217
+ dm-spark-sdk/
218
+ ├── spark_sdk/
219
+ │ ├── __init__.py # Exposes Client
220
+ │ └── client.py # SparkClient class with all methods
221
+ ├── setup.py # Package configuration
222
+ └── README.md
223
+ ```
224
+
225
+ ---
226
+
227
+ ## Requirements
228
+
229
+ - Python `>= 3.8`
230
+ - `httpx >= 0.24.0`
231
+
232
+ ---
233
+
234
+ ## Publishing to PyPI
235
+
236
+ ```bash
237
+ # Install twine
238
+ pip install twine
239
+
240
+ # Build the package
241
+ python setup.py sdist bdist_wheel
242
+
243
+ # Upload to PyPI
244
+ twine upload dist/*
245
+ ```
246
+
247
+ ---
248
+
249
+ ## License
250
+
251
+ This project is licensed under the MIT License.
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,17 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ setup(
4
+ name='sparkContext',
5
+ version='0.1.0',
6
+ author='DeepModel',
7
+ packages=find_packages(),
8
+ install_requires=[
9
+ 'httpx>=0.24.0',
10
+ ],
11
+ python_requires='>=3.8',
12
+ classifiers=[
13
+ 'Programming Language :: Python :: 3',
14
+ 'License :: OSI Approved :: MIT License',
15
+ 'Operating System :: OS Independent',
16
+ ],
17
+ )
@@ -0,0 +1,9 @@
1
+ from .client import SparkClient
2
+
3
+ Client = SparkClient
4
+
5
+ # Target type constants for invoke_tool
6
+ MCP = "mcp"
7
+ AGENT = "agent"
8
+
9
+ __all__ = ["Client", "SparkClient", "MCP", "AGENT"]
@@ -0,0 +1,166 @@
1
+ import httpx
2
+ from typing import Any, Dict, Optional, Union
3
+ from uuid import UUID
4
+
5
+
6
+ class SparkClient:
7
+ """
8
+ Spark SDK Client.
9
+
10
+ Usage:
11
+ import sparkContext
12
+ client = sparkContext.Client(api_key="your_api_key", api_url="https://your-api-url.com")
13
+ result = await client.get_tools()
14
+ """
15
+ def __init__(self, api_key: str, api_url: str):
16
+ """
17
+ Initialize the Spark SDK Client.
18
+
19
+ Args:
20
+ api_key (str): Your API key for authentication.
21
+ api_url (str): The base URL of the Spark API.
22
+ """
23
+ if not api_key:
24
+ raise ValueError("API key is required.")
25
+ if not api_url:
26
+ raise ValueError("API URL is required.")
27
+
28
+ self.api_key = api_key
29
+ self.api_url = api_url.rstrip('/')
30
+ self.headers = {
31
+ "x-api-key": self.api_key,
32
+ "Content-Type": "application/json"
33
+ }
34
+
35
+
36
+ async def get_unified_tools(self):
37
+ """
38
+ Get all tools available in the Spark.
39
+ Returns all available tools along with connected and not connected tools.
40
+
41
+ Returns:
42
+ dict: A dictionary containing all available tools along with connected and not connected tools.
43
+ """
44
+ endpoint = f"{self.api_url}/utils/unified-tools-and-agents"
45
+
46
+ async with httpx.AsyncClient(timeout=None) as client:
47
+ try:
48
+ response = await client.get(endpoint, headers=self.headers)
49
+ response.raise_for_status()
50
+ return {
51
+ "success": True,
52
+ "tools": response.json(),
53
+ "status_code": response.status_code,
54
+ "detail": None
55
+ }
56
+ except httpx.HTTPStatusError as e:
57
+ return {
58
+ "success": False,
59
+ "tools": None,
60
+ "status_code": e.response.status_code,
61
+ "detail": e.response.json().get("detail", "No detail provided")
62
+ }
63
+ except httpx.RequestError as e:
64
+ return {
65
+ "success": False,
66
+ "tools": None,
67
+ "status_code": 400,
68
+ "detail": f"An error occurred while making the request: {str(e)}"
69
+ }
70
+ except Exception as e:
71
+ return {
72
+ "success": False,
73
+ "tools": None,
74
+ "status_code": 500,
75
+ "detail": f"An unexpected error occurred: {str(e)}"
76
+ }
77
+
78
+
79
+ async def invoke_tool(self, connection_id: Union[str, UUID], target_type: str, tool_or_agent_name: str, arguments: Dict[str, Any], query: Optional[str] = "") -> dict:
80
+ """
81
+ Directly invoke an MCP tool or worker agent with provided arguments.
82
+ No LLM inference is performed — arguments are used as-is.
83
+ Required parameters are validated before execution.
84
+
85
+ Args:
86
+ connection_id (str): The connection ID of the tool or agent.
87
+ target_type (str): The type of target to invoke. Either "mcp" or "agent".
88
+ tool_or_agent_name (str): The name of the tool or agent to invoke.
89
+ arguments (Dict[str, Any]): The arguments to pass to the tool or agent.
90
+ query (Optional[str]): Optional user query context for agent invocation.
91
+
92
+ Returns:
93
+ dict: A dictionary containing:
94
+ - success (bool): Whether the invocation was successful
95
+ - tool_or_agent_name (str): The name of the tool or agent invoked
96
+ - connection_id (str): The connection ID of the tool or agent invoked
97
+ - result (Any): The result of the invocation
98
+ - status_code (int): The HTTP status code of the response
99
+ - detail (Optional[str]): Additional error details if any
100
+
101
+ Example:
102
+ result = await client.invoke_tool(
103
+ connection_id="uuid-here",
104
+ target_type="mcp",
105
+ tool_or_agent_name="create_issue",
106
+ arguments={"title": "Bug report", "body": "Something went wrong"},
107
+ query="create a github issue"
108
+ )
109
+ """
110
+ if target_type not in ("mcp", "agent"):
111
+ return {
112
+ "success": False,
113
+ "tool_or_agent_name": tool_or_agent_name,
114
+ "connection_id": connection_id,
115
+ "result": None,
116
+ "status_code": 400,
117
+ "detail": "Invalid target_type. Must be 'mcp' or 'agent'."
118
+ }
119
+
120
+ endpoint = f"{self.api_url}/utils/tools/invoke"
121
+ params = {"query": query} if query else {}
122
+ payload = {
123
+ "connection_id": connection_id,
124
+ "target_type": target_type.lower(),
125
+ "tool_or_agent_name": tool_or_agent_name,
126
+ "arguments": arguments
127
+ }
128
+
129
+ async with httpx.AsyncClient(timeout=None) as client:
130
+ try:
131
+ response = await client.post(
132
+ endpoint,
133
+ headers=self.headers,
134
+ json=payload,
135
+ params=params
136
+ )
137
+ response.raise_for_status()
138
+ return response.json()
139
+
140
+ except httpx.HTTPStatusError as e:
141
+ return {
142
+ "success": False,
143
+ "tool_or_agent_name": tool_or_agent_name,
144
+ "connection_id": connection_id,
145
+ "result": None,
146
+ "status_code": e.response.status_code,
147
+ "detail": e.response.json().get("detail", "No detail provided")
148
+ }
149
+ except httpx.RequestError as e:
150
+ return {
151
+ "success": False,
152
+ "tool_or_agent_name": tool_or_agent_name,
153
+ "connection_id": connection_id,
154
+ "result": None,
155
+ "status_code": 400,
156
+ "detail": f"Request error: {str(e)}"
157
+ }
158
+ except Exception as e:
159
+ return {
160
+ "success": False,
161
+ "tool_or_agent_name": tool_or_agent_name,
162
+ "connection_id": connection_id,
163
+ "result": None,
164
+ "status_code": 500,
165
+ "detail": f"Unexpected error: {str(e)}"
166
+ }
@@ -0,0 +1,13 @@
1
+ Metadata-Version: 2.4
2
+ Name: sparkContext
3
+ Version: 0.1.0
4
+ Author: DeepModel
5
+ Classifier: Programming Language :: Python :: 3
6
+ Classifier: License :: OSI Approved :: MIT License
7
+ Classifier: Operating System :: OS Independent
8
+ Requires-Python: >=3.8
9
+ Requires-Dist: httpx>=0.24.0
10
+ Dynamic: author
11
+ Dynamic: classifier
12
+ Dynamic: requires-dist
13
+ Dynamic: requires-python
@@ -0,0 +1,9 @@
1
+ README.md
2
+ setup.py
3
+ sparkContext/__init__.py
4
+ sparkContext/client.py
5
+ sparkContext.egg-info/PKG-INFO
6
+ sparkContext.egg-info/SOURCES.txt
7
+ sparkContext.egg-info/dependency_links.txt
8
+ sparkContext.egg-info/requires.txt
9
+ sparkContext.egg-info/top_level.txt
@@ -0,0 +1 @@
1
+ httpx>=0.24.0
@@ -0,0 +1 @@
1
+ sparkContext