distributex-cloud 2.0.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- distributex_cloud-2.0.7/MANIFEST.in +7 -0
- distributex_cloud-2.0.7/PKG-INFO +36 -0
- distributex_cloud-2.0.7/distributex/__init__.py +20 -0
- distributex_cloud-2.0.7/distributex/client.py +621 -0
- distributex_cloud-2.0.7/distributex_cloud.egg-info/PKG-INFO +36 -0
- distributex_cloud-2.0.7/distributex_cloud.egg-info/SOURCES.txt +10 -0
- distributex_cloud-2.0.7/distributex_cloud.egg-info/dependency_links.txt +1 -0
- distributex_cloud-2.0.7/distributex_cloud.egg-info/requires.txt +6 -0
- distributex_cloud-2.0.7/distributex_cloud.egg-info/top_level.txt +1 -0
- distributex_cloud-2.0.7/pyproject.toml +52 -0
- distributex_cloud-2.0.7/setup.cfg +4 -0
- distributex_cloud-2.0.7/setup.py +42 -0
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: distributex-cloud
|
|
3
|
+
Version: 2.0.7
|
|
4
|
+
Summary: Distributed computing platform - run code on global resource pool
|
|
5
|
+
Home-page: https://github.com/DistributeX-Cloud/distributex-cli-public
|
|
6
|
+
Author: DistributeX Team
|
|
7
|
+
Author-email: DistributeX Team <dev@distributex.cloud>
|
|
8
|
+
License: MIT
|
|
9
|
+
Project-URL: Homepage, https://github.com/DistributeX-Cloud/distributex-cli-public
|
|
10
|
+
Project-URL: Documentation, https://distributex.cloud/docs
|
|
11
|
+
Project-URL: Dashboard, https://distributex.cloud
|
|
12
|
+
Project-URL: Repository, https://github.com/DistributeX-Cloud/distributex-cli-public
|
|
13
|
+
Project-URL: Issues, https://github.com/DistributeX-Cloud/distributex-cli-public/issues
|
|
14
|
+
Keywords: distributed,computing,cloud,parallel,gpu,cpu,ml,ai
|
|
15
|
+
Classifier: Development Status :: 4 - Beta
|
|
16
|
+
Classifier: Intended Audience :: Developers
|
|
17
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
18
|
+
Classifier: Topic :: System :: Distributed Computing
|
|
19
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Operating System :: OS Independent
|
|
27
|
+
Requires-Python: >=3.8
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
Requires-Dist: requests>=2.28.0
|
|
30
|
+
Provides-Extra: dev
|
|
31
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
32
|
+
Requires-Dist: black>=22.0.0; extra == "dev"
|
|
33
|
+
Requires-Dist: flake8>=4.0.0; extra == "dev"
|
|
34
|
+
Dynamic: author
|
|
35
|
+
Dynamic: home-page
|
|
36
|
+
Dynamic: requires-python
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""
|
|
2
|
+
DistributeX Python SDK
|
|
3
|
+
======================
|
|
4
|
+
Distributed computing platform for running code on a global pool of resources.
|
|
5
|
+
|
|
6
|
+
Quick Start:
|
|
7
|
+
from distributex import DistributeX
|
|
8
|
+
|
|
9
|
+
dx = DistributeX(api_key="your_api_key")
|
|
10
|
+
result = dx.run(my_function, args=(data,), workers=4, gpu=True)
|
|
11
|
+
|
|
12
|
+
Documentation:
|
|
13
|
+
https://distributex.cloud/
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from .client import DistributeX, Task
|
|
17
|
+
|
|
18
|
+
__version__ = "2.0.7"
|
|
19
|
+
__author__ = "DistributeX Team"
|
|
20
|
+
__all__ = ["DistributeX", "Task"]
|
|
@@ -0,0 +1,621 @@
|
|
|
1
|
+
"""
|
|
2
|
+
DistributeX Python SDK v2.1 - WITH PACKAGE BUNDLING
|
|
3
|
+
====================================================
|
|
4
|
+
Workers use packages from developer's environment
|
|
5
|
+
No installation needed on worker side
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
import json
|
|
10
|
+
import time
|
|
11
|
+
import requests
|
|
12
|
+
import inspect
|
|
13
|
+
import hashlib
|
|
14
|
+
import base64
|
|
15
|
+
import ast
|
|
16
|
+
import re
|
|
17
|
+
import sys
|
|
18
|
+
import subprocess
|
|
19
|
+
import tempfile
|
|
20
|
+
import tarfile
|
|
21
|
+
import shutil
|
|
22
|
+
from typing import Any, Callable, Optional, Dict, List, Set
|
|
23
|
+
from dataclasses import dataclass
|
|
24
|
+
import textwrap
|
|
25
|
+
|
|
26
|
+
__version__ = "2.1.0"
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class Task:
|
|
30
|
+
id: str
|
|
31
|
+
status: str
|
|
32
|
+
progress: float = 0.0
|
|
33
|
+
error: Optional[str] = None
|
|
34
|
+
|
|
35
|
+
# ============================================================================
|
|
36
|
+
# PACKAGE BUNDLER - Bundles developer's packages with the script
|
|
37
|
+
# ============================================================================
|
|
38
|
+
class PackageBundler:
|
|
39
|
+
"""Bundles installed packages from developer's environment"""
|
|
40
|
+
|
|
41
|
+
@staticmethod
|
|
42
|
+
def get_package_info(package_name: str) -> Optional[Dict]:
|
|
43
|
+
"""Get information about an installed package"""
|
|
44
|
+
try:
|
|
45
|
+
import importlib.metadata
|
|
46
|
+
dist = importlib.metadata.distribution(package_name)
|
|
47
|
+
return {
|
|
48
|
+
'name': dist.metadata['Name'],
|
|
49
|
+
'version': dist.version,
|
|
50
|
+
'location': str(dist.locate_file('')),
|
|
51
|
+
}
|
|
52
|
+
except:
|
|
53
|
+
return None
|
|
54
|
+
@staticmethod
|
|
55
|
+
def get_all_dependencies(packages: List[str]) -> Set[str]:
|
|
56
|
+
"""Get all packages and their dependencies"""
|
|
57
|
+
all_packages = set(packages)
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
import importlib.metadata
|
|
61
|
+
|
|
62
|
+
def get_deps(pkg_name: str):
|
|
63
|
+
try:
|
|
64
|
+
dist = importlib.metadata.distribution(pkg_name)
|
|
65
|
+
if dist.requires:
|
|
66
|
+
for req in dist.requires:
|
|
67
|
+
# Parse requirement string (e.g., "requests>=2.0.0")
|
|
68
|
+
dep_name = req.split()[0].split('>')[0].split('<')[0].split('=')[0].split('!')[0]
|
|
69
|
+
if dep_name not in all_packages:
|
|
70
|
+
all_packages.add(dep_name)
|
|
71
|
+
get_deps(dep_name) # Recursive
|
|
72
|
+
except:
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
for pkg in list(packages):
|
|
76
|
+
get_deps(pkg)
|
|
77
|
+
|
|
78
|
+
except:
|
|
79
|
+
pass
|
|
80
|
+
|
|
81
|
+
return all_packages
|
|
82
|
+
|
|
83
|
+
@staticmethod
|
|
84
|
+
def bundle_packages(packages: List[str], bundle_dir: str) -> str:
|
|
85
|
+
"""Bundle packages WITH all dependencies"""
|
|
86
|
+
|
|
87
|
+
# Get all dependencies
|
|
88
|
+
all_packages = PackageBundler.get_all_dependencies(packages)
|
|
89
|
+
|
|
90
|
+
print(f"📦 Bundling {len(all_packages)} packages (including dependencies)...")
|
|
91
|
+
|
|
92
|
+
packages_dir = os.path.join(bundle_dir, 'packages')
|
|
93
|
+
os.makedirs(packages_dir, exist_ok=True)
|
|
94
|
+
|
|
95
|
+
# Download all packages at once (more efficient)
|
|
96
|
+
try:
|
|
97
|
+
print(f" Downloading packages...")
|
|
98
|
+
subprocess.run([
|
|
99
|
+
sys.executable, '-m', 'pip', 'download',
|
|
100
|
+
'--dest', packages_dir,
|
|
101
|
+
*all_packages # Download all at once
|
|
102
|
+
], check=True, capture_output=True)
|
|
103
|
+
except subprocess.CalledProcessError as e:
|
|
104
|
+
print(f" ⚠️ Some packages failed to download: {e}")
|
|
105
|
+
|
|
106
|
+
# Create tarball
|
|
107
|
+
tarball_path = os.path.join(bundle_dir, 'packages.tar.gz')
|
|
108
|
+
with tarfile.open(tarball_path, 'w:gz') as tar:
|
|
109
|
+
tar.add(packages_dir, arcname='packages')
|
|
110
|
+
|
|
111
|
+
package_files = [f for f in os.listdir(packages_dir)
|
|
112
|
+
if f.endswith(('.whl', '.tar.gz'))]
|
|
113
|
+
size_mb = os.path.getsize(tarball_path) / (1024 * 1024)
|
|
114
|
+
print(f"✅ Bundled {len(package_files)} packages ({size_mb:.2f} MB)")
|
|
115
|
+
|
|
116
|
+
return tarball_path
|
|
117
|
+
|
|
118
|
+
@staticmethod
|
|
119
|
+
def create_install_script(packages: List[str]) -> str:
|
|
120
|
+
"""Create script to install bundled packages on worker"""
|
|
121
|
+
return f"""
|
|
122
|
+
# Install bundled packages (offline)
|
|
123
|
+
import subprocess
|
|
124
|
+
import sys
|
|
125
|
+
import os
|
|
126
|
+
|
|
127
|
+
packages_dir = os.path.join(os.getcwd(), 'packages')
|
|
128
|
+
if os.path.exists(packages_dir):
|
|
129
|
+
print("Installing bundled packages...")
|
|
130
|
+
|
|
131
|
+
# Find all .whl and .tar.gz files
|
|
132
|
+
package_files = []
|
|
133
|
+
for file in os.listdir(packages_dir):
|
|
134
|
+
if file.endswith(('.whl', '.tar.gz')):
|
|
135
|
+
package_files.append(os.path.join(packages_dir, file))
|
|
136
|
+
|
|
137
|
+
if package_files:
|
|
138
|
+
subprocess.run([
|
|
139
|
+
sys.executable, '-m', 'pip', 'install',
|
|
140
|
+
'--no-index', # Don't use PyPI
|
|
141
|
+
'--find-links', packages_dir, # Use local files
|
|
142
|
+
'--quiet',
|
|
143
|
+
'--disable-pip-version-check',
|
|
144
|
+
*package_files
|
|
145
|
+
], check=True)
|
|
146
|
+
print(f"✅ Installed {{len(package_files)}} bundled packages")
|
|
147
|
+
else:
|
|
148
|
+
print("⚠️ No package files found in bundle")
|
|
149
|
+
else:
|
|
150
|
+
print("⚠️ No packages directory found - packages may need to be installed")
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
# ============================================================================
|
|
154
|
+
# IMPORT DETECTOR
|
|
155
|
+
# ============================================================================
|
|
156
|
+
class ImportDetector(ast.NodeVisitor):
|
|
157
|
+
"""AST visitor to detect all imports in a function"""
|
|
158
|
+
|
|
159
|
+
def __init__(self):
|
|
160
|
+
self.imports = set()
|
|
161
|
+
self.from_imports = set()
|
|
162
|
+
|
|
163
|
+
def visit_Import(self, node):
|
|
164
|
+
for alias in node.names:
|
|
165
|
+
self.imports.add(alias.name.split('.')[0])
|
|
166
|
+
self.generic_visit(node)
|
|
167
|
+
|
|
168
|
+
def visit_ImportFrom(self, node):
|
|
169
|
+
if node.module:
|
|
170
|
+
self.from_imports.add(node.module.split('.')[0])
|
|
171
|
+
self.generic_visit(node)
|
|
172
|
+
|
|
173
|
+
@classmethod
|
|
174
|
+
def detect_imports(cls, code: str) -> Set[str]:
|
|
175
|
+
"""Extract all package names imported in code"""
|
|
176
|
+
try:
|
|
177
|
+
tree = ast.parse(code)
|
|
178
|
+
detector = cls()
|
|
179
|
+
detector.visit(tree)
|
|
180
|
+
|
|
181
|
+
all_imports = detector.imports | detector.from_imports
|
|
182
|
+
|
|
183
|
+
# Python standard library (don't bundle these)
|
|
184
|
+
stdlib = {
|
|
185
|
+
'os', 'sys', 're', 'json', 'time', 'datetime', 'math',
|
|
186
|
+
'random', 'collections', 'itertools', 'functools', 'operator',
|
|
187
|
+
'pathlib', 'io', 'typing', 'dataclasses', 'enum', 'abc',
|
|
188
|
+
'contextlib', 'copy', 'pickle', 'hashlib', 'base64', 'struct',
|
|
189
|
+
'array', 'queue', 'threading', 'multiprocessing', 'subprocess',
|
|
190
|
+
'socket', 'ssl', 'http', 'urllib', 'email', 'html', 'xml',
|
|
191
|
+
'csv', 'configparser', 'logging', 'unittest', 'doctest',
|
|
192
|
+
'argparse', 'getpass', 'tempfile', 'shutil', 'glob', 'fnmatch'
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
return {pkg for pkg in all_imports if pkg not in stdlib}
|
|
196
|
+
|
|
197
|
+
except SyntaxError:
|
|
198
|
+
return set()
|
|
199
|
+
|
|
200
|
+
# ============================================================================
|
|
201
|
+
# FUNCTION SERIALIZER - WITH PACKAGE BUNDLING
|
|
202
|
+
# ============================================================================
|
|
203
|
+
class FunctionSerializer:
|
|
204
|
+
"""Serialize Python functions with bundled packages"""
|
|
205
|
+
|
|
206
|
+
@staticmethod
|
|
207
|
+
def extract_function_source(func: Callable) -> str:
|
|
208
|
+
try:
|
|
209
|
+
source = inspect.getsource(func)
|
|
210
|
+
return textwrap.dedent(source)
|
|
211
|
+
except (OSError, TypeError):
|
|
212
|
+
func_name = getattr(func, '__name__', 'function')
|
|
213
|
+
return f"def {func_name}(*args, **kwargs):\n raise NotImplementedError('Cannot serialize this function type')\n"
|
|
214
|
+
|
|
215
|
+
@staticmethod
|
|
216
|
+
def extract_dependencies(func: Callable) -> Set[str]:
|
|
217
|
+
source = FunctionSerializer.extract_function_source(func)
|
|
218
|
+
return ImportDetector.detect_imports(source)
|
|
219
|
+
|
|
220
|
+
@staticmethod
|
|
221
|
+
def create_executable_script(
|
|
222
|
+
func: Callable,
|
|
223
|
+
args: tuple,
|
|
224
|
+
kwargs: dict,
|
|
225
|
+
bundle_packages: bool = True
|
|
226
|
+
) -> Dict[str, Any]:
|
|
227
|
+
"""Create executable script with bundled packages"""
|
|
228
|
+
|
|
229
|
+
func_source = FunctionSerializer.extract_function_source(func)
|
|
230
|
+
func_name = func.__name__
|
|
231
|
+
|
|
232
|
+
# Detect required packages
|
|
233
|
+
packages = FunctionSerializer.extract_dependencies(func)
|
|
234
|
+
|
|
235
|
+
# Separate import lines from function body
|
|
236
|
+
import_lines = []
|
|
237
|
+
function_body = []
|
|
238
|
+
in_imports = True
|
|
239
|
+
|
|
240
|
+
for line in func_source.split('\n'):
|
|
241
|
+
stripped = line.strip()
|
|
242
|
+
if stripped.startswith('import ') or stripped.startswith('from '):
|
|
243
|
+
if in_imports:
|
|
244
|
+
import_lines.append(line)
|
|
245
|
+
else:
|
|
246
|
+
function_body.append(line)
|
|
247
|
+
elif stripped.startswith('def '):
|
|
248
|
+
in_imports = False
|
|
249
|
+
function_body.append(line)
|
|
250
|
+
else:
|
|
251
|
+
in_imports = False
|
|
252
|
+
function_body.append(line)
|
|
253
|
+
|
|
254
|
+
# Build script
|
|
255
|
+
script_parts = [
|
|
256
|
+
'#!/usr/bin/env python3',
|
|
257
|
+
'"""',
|
|
258
|
+
f'DistributeX Task - Function: {func_name}',
|
|
259
|
+
f'SDK Version: {__version__}',
|
|
260
|
+
'Using bundled packages from developer environment',
|
|
261
|
+
'"""',
|
|
262
|
+
'',
|
|
263
|
+
]
|
|
264
|
+
|
|
265
|
+
# Add package installer for bundled packages
|
|
266
|
+
if bundle_packages and packages:
|
|
267
|
+
script_parts.extend([
|
|
268
|
+
'# Install bundled packages',
|
|
269
|
+
PackageBundler.create_install_script(list(packages)),
|
|
270
|
+
'',
|
|
271
|
+
])
|
|
272
|
+
|
|
273
|
+
# Add imports
|
|
274
|
+
if import_lines:
|
|
275
|
+
script_parts.append('# Required imports')
|
|
276
|
+
script_parts.extend(import_lines)
|
|
277
|
+
script_parts.append('')
|
|
278
|
+
|
|
279
|
+
script_parts.extend([
|
|
280
|
+
'import json',
|
|
281
|
+
'import traceback',
|
|
282
|
+
'',
|
|
283
|
+
])
|
|
284
|
+
|
|
285
|
+
# Add function
|
|
286
|
+
script_parts.append('# User function')
|
|
287
|
+
script_parts.extend(function_body)
|
|
288
|
+
script_parts.append('')
|
|
289
|
+
|
|
290
|
+
# Add execution wrapper
|
|
291
|
+
script_parts.extend([
|
|
292
|
+
'# Execution wrapper',
|
|
293
|
+
'def main():',
|
|
294
|
+
' """Execute function and save results"""',
|
|
295
|
+
f' args = {repr(args)}',
|
|
296
|
+
f' kwargs = {repr(kwargs)}',
|
|
297
|
+
'',
|
|
298
|
+
f' print(f"Executing {func_name} with bundled packages")',
|
|
299
|
+
'',
|
|
300
|
+
' try:',
|
|
301
|
+
f' result = {func_name}(*args, **kwargs)',
|
|
302
|
+
'',
|
|
303
|
+
' result_data = {',
|
|
304
|
+
' "success": True,',
|
|
305
|
+
' "result": result,',
|
|
306
|
+
f' "function": "{func_name}"',
|
|
307
|
+
' }',
|
|
308
|
+
'',
|
|
309
|
+
' with open("result.json", "w") as f:',
|
|
310
|
+
' json.dump(result_data, f, indent=2, default=str)',
|
|
311
|
+
'',
|
|
312
|
+
' print(f"✅ Execution complete!")',
|
|
313
|
+
' print(f"Result: {result}")',
|
|
314
|
+
' return 0',
|
|
315
|
+
'',
|
|
316
|
+
' except Exception as e:',
|
|
317
|
+
' error_msg = str(e)',
|
|
318
|
+
' error_trace = traceback.format_exc()',
|
|
319
|
+
'',
|
|
320
|
+
' result_data = {',
|
|
321
|
+
' "success": False,',
|
|
322
|
+
' "error": error_msg,',
|
|
323
|
+
' "traceback": error_trace',
|
|
324
|
+
' }',
|
|
325
|
+
'',
|
|
326
|
+
' with open("result.json", "w") as f:',
|
|
327
|
+
' json.dump(result_data, f, indent=2)',
|
|
328
|
+
'',
|
|
329
|
+
' print(f"❌ Error: {error_msg}")',
|
|
330
|
+
' print(error_trace)',
|
|
331
|
+
' return 1',
|
|
332
|
+
'',
|
|
333
|
+
'if __name__ == "__main__":',
|
|
334
|
+
' import sys',
|
|
335
|
+
' sys.exit(main())',
|
|
336
|
+
])
|
|
337
|
+
|
|
338
|
+
script = '\n'.join(script_parts)
|
|
339
|
+
|
|
340
|
+
return {
|
|
341
|
+
'script': script,
|
|
342
|
+
'packages': sorted(packages),
|
|
343
|
+
'hash': hashlib.sha256(script.encode()).hexdigest(),
|
|
344
|
+
'bundle_packages': bundle_packages
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
# ============================================================================
|
|
348
|
+
# MAIN CLIENT - WITH PACKAGE BUNDLING
|
|
349
|
+
# ============================================================================
|
|
350
|
+
class DistributeX:
|
|
351
|
+
"""DistributeX Client with Package Bundling"""
|
|
352
|
+
|
|
353
|
+
def __init__(self, api_key=None, base_url="https://distributex.cloud", debug=False):
|
|
354
|
+
self.api_key = api_key or os.getenv("DISTRIBUTEX_API_KEY")
|
|
355
|
+
|
|
356
|
+
if not self.api_key:
|
|
357
|
+
raise ValueError(
|
|
358
|
+
"API key required!\n\n"
|
|
359
|
+
"Option 1 - Pass directly:\n"
|
|
360
|
+
" dx = DistributeX(api_key='dx_your_key')\n\n"
|
|
361
|
+
"Option 2 - Set environment variable:\n"
|
|
362
|
+
" export DISTRIBUTEX_API_KEY='dx_your_key'\n\n"
|
|
363
|
+
"Get your API key at: https://distributex.cloud/api-dashboard"
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
self.base_url = base_url.rstrip('/')
|
|
367
|
+
self.debug = debug
|
|
368
|
+
self.session = requests.Session()
|
|
369
|
+
self.session.headers.update({
|
|
370
|
+
"Authorization": f"Bearer {self.api_key}",
|
|
371
|
+
"Content-Type": "application/json",
|
|
372
|
+
"User-Agent": f"DistributeX-Python-SDK/{__version__}"
|
|
373
|
+
})
|
|
374
|
+
|
|
375
|
+
if self.debug:
|
|
376
|
+
print(f"DistributeX SDK v{__version__} - Package Bundling Enabled")
|
|
377
|
+
print(f"Connected to: {self.base_url}")
|
|
378
|
+
|
|
379
|
+
def run(self,
|
|
380
|
+
func: Callable,
|
|
381
|
+
args: tuple = (),
|
|
382
|
+
kwargs: Optional[dict] = None,
|
|
383
|
+
workers: int = 1,
|
|
384
|
+
cpu_per_worker: int = 2,
|
|
385
|
+
ram_per_worker: int = 2048,
|
|
386
|
+
gpu: bool = False,
|
|
387
|
+
cuda: bool = False,
|
|
388
|
+
storage: int = 10,
|
|
389
|
+
timeout: int = 3600,
|
|
390
|
+
priority: int = 5,
|
|
391
|
+
wait: bool = True,
|
|
392
|
+
bundle_packages: bool = True) -> Any:
|
|
393
|
+
"""
|
|
394
|
+
Run a function on the distributed network
|
|
395
|
+
|
|
396
|
+
Args:
|
|
397
|
+
bundle_packages: If True, bundles packages from your environment
|
|
398
|
+
If False, workers will install packages from PyPI
|
|
399
|
+
"""
|
|
400
|
+
kwargs = kwargs or {}
|
|
401
|
+
|
|
402
|
+
print(f"\n{'=' * 60}")
|
|
403
|
+
print(f"SUBMITTING TASK: {func.__name__}")
|
|
404
|
+
print(f"{'=' * 60}")
|
|
405
|
+
|
|
406
|
+
# Create temporary working directory
|
|
407
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
408
|
+
# Serialize function
|
|
409
|
+
serialized = FunctionSerializer.create_executable_script(
|
|
410
|
+
func, args, kwargs, bundle_packages=bundle_packages
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
script_source = serialized['script']
|
|
414
|
+
detected_packages = serialized['packages']
|
|
415
|
+
script_hash = serialized['hash']
|
|
416
|
+
|
|
417
|
+
if detected_packages:
|
|
418
|
+
print(f"📦 Detected packages: {', '.join(detected_packages)}")
|
|
419
|
+
|
|
420
|
+
# Bundle packages if requested
|
|
421
|
+
packages_tarball = None
|
|
422
|
+
if bundle_packages and detected_packages:
|
|
423
|
+
packages_tarball = PackageBundler.bundle_packages(
|
|
424
|
+
list(detected_packages),
|
|
425
|
+
temp_dir
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
# Create combined archive (script + packages)
|
|
429
|
+
archive_path = os.path.join(temp_dir, 'task.tar.gz')
|
|
430
|
+
with tarfile.open(archive_path, 'w:gz') as tar:
|
|
431
|
+
# Add script
|
|
432
|
+
script_path = os.path.join(temp_dir, 'script.py')
|
|
433
|
+
with open(script_path, 'w') as f:
|
|
434
|
+
f.write(script_source)
|
|
435
|
+
tar.add(script_path, arcname='script.py')
|
|
436
|
+
|
|
437
|
+
# Add bundled packages if available
|
|
438
|
+
if packages_tarball and os.path.exists(packages_tarball):
|
|
439
|
+
with tarfile.open(packages_tarball, 'r:gz') as pkg_tar:
|
|
440
|
+
for member in pkg_tar.getmembers():
|
|
441
|
+
tar.addfile(member, pkg_tar.extractfile(member))
|
|
442
|
+
|
|
443
|
+
# Read archive as base64
|
|
444
|
+
with open(archive_path, 'rb') as f:
|
|
445
|
+
archive_data = f.read()
|
|
446
|
+
|
|
447
|
+
archive_b64 = base64.b64encode(archive_data).decode('ascii')
|
|
448
|
+
archive_size = len(archive_data) / (1024 * 1024)
|
|
449
|
+
|
|
450
|
+
print(f"📤 Uploading task bundle ({archive_size:.2f} MB)...")
|
|
451
|
+
|
|
452
|
+
# Submit task
|
|
453
|
+
task_data = {
|
|
454
|
+
'name': f'Function: {func.__name__}',
|
|
455
|
+
'taskType': 'script_execution',
|
|
456
|
+
'runtime': 'python',
|
|
457
|
+
'executionScript': archive_b64,
|
|
458
|
+
'scriptHash': script_hash,
|
|
459
|
+
'workers': workers,
|
|
460
|
+
'cpuPerWorker': cpu_per_worker,
|
|
461
|
+
'ramPerWorker': ram_per_worker,
|
|
462
|
+
'gpuRequired': gpu,
|
|
463
|
+
'requiresCuda': cuda,
|
|
464
|
+
'storageRequired': storage,
|
|
465
|
+
'timeout': timeout,
|
|
466
|
+
'priority': priority,
|
|
467
|
+
'bundledPackages': bundle_packages,
|
|
468
|
+
'packageList': list(detected_packages) if detected_packages else []
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
try:
|
|
472
|
+
resp = self.session.post(
|
|
473
|
+
f"{self.base_url}/api/tasks/execute",
|
|
474
|
+
json=task_data,
|
|
475
|
+
timeout=60 # Longer timeout for upload
|
|
476
|
+
)
|
|
477
|
+
resp.raise_for_status()
|
|
478
|
+
result = resp.json()
|
|
479
|
+
|
|
480
|
+
except requests.exceptions.RequestException as e:
|
|
481
|
+
print(f"❌ API request failed: {e}")
|
|
482
|
+
if hasattr(e, 'response') and e.response is not None:
|
|
483
|
+
print(f"Response: {e.response.text[:500]}")
|
|
484
|
+
raise RuntimeError(f"Failed to submit task: {e}")
|
|
485
|
+
|
|
486
|
+
if not result.get('success', True):
|
|
487
|
+
error_msg = result.get('message', 'Unknown error')
|
|
488
|
+
raise RuntimeError(f"Task submission failed: {error_msg}")
|
|
489
|
+
|
|
490
|
+
task_id = result.get('id')
|
|
491
|
+
if not task_id:
|
|
492
|
+
raise RuntimeError("API did not return task ID")
|
|
493
|
+
|
|
494
|
+
print(f"✅ Task submitted successfully!")
|
|
495
|
+
print(f" Task ID: {task_id}")
|
|
496
|
+
print(f" Status: {result.get('status', 'pending')}")
|
|
497
|
+
|
|
498
|
+
if bundle_packages and detected_packages:
|
|
499
|
+
print(f" 📦 Using bundled packages from your environment")
|
|
500
|
+
else:
|
|
501
|
+
print(f" 📥 Worker will install packages from PyPI")
|
|
502
|
+
|
|
503
|
+
task = Task(id=task_id, status=result.get('status', 'pending'))
|
|
504
|
+
|
|
505
|
+
if not wait:
|
|
506
|
+
return task
|
|
507
|
+
|
|
508
|
+
print(f"\n⏳ Waiting for execution...\n")
|
|
509
|
+
return self._wait_for_result(task_id)
|
|
510
|
+
|
|
511
|
+
def _wait_for_result(self, task_id: str) -> Any:
|
|
512
|
+
"""Wait for task completion and return result"""
|
|
513
|
+
last_progress = -1
|
|
514
|
+
|
|
515
|
+
while True:
|
|
516
|
+
try:
|
|
517
|
+
task = self.get_task(task_id)
|
|
518
|
+
|
|
519
|
+
# Show progress
|
|
520
|
+
if task.progress != last_progress and task.progress > 0:
|
|
521
|
+
print(f"Progress: {task.progress:.1f}%", end='\r', flush=True)
|
|
522
|
+
last_progress = task.progress
|
|
523
|
+
|
|
524
|
+
# Check completion
|
|
525
|
+
if task.status == 'completed':
|
|
526
|
+
print('\r' + ' ' * 40 + '\r', end='', flush=True)
|
|
527
|
+
print(f"✅ Execution completed!\n")
|
|
528
|
+
result = self.get_result(task_id)
|
|
529
|
+
|
|
530
|
+
# Extract clean result
|
|
531
|
+
if isinstance(result, dict):
|
|
532
|
+
if 'output' in result:
|
|
533
|
+
return result['output']
|
|
534
|
+
if 'result' in result:
|
|
535
|
+
return result['result']
|
|
536
|
+
return result
|
|
537
|
+
return result
|
|
538
|
+
|
|
539
|
+
# Check failure
|
|
540
|
+
if task.status == 'failed':
|
|
541
|
+
print(f"\n❌ Task failed: {task.error}")
|
|
542
|
+
raise RuntimeError(task.error or 'Task failed')
|
|
543
|
+
|
|
544
|
+
time.sleep(5)
|
|
545
|
+
|
|
546
|
+
except KeyboardInterrupt:
|
|
547
|
+
print(f"\nInterrupted")
|
|
548
|
+
raise
|
|
549
|
+
|
|
550
|
+
def get_task(self, task_id: str) -> Task:
|
|
551
|
+
"""Get task status"""
|
|
552
|
+
try:
|
|
553
|
+
r = self.session.get(f"{self.base_url}/api/tasks/{task_id}", timeout=10)
|
|
554
|
+
r.raise_for_status()
|
|
555
|
+
data = r.json()
|
|
556
|
+
|
|
557
|
+
return Task(
|
|
558
|
+
id=data['id'],
|
|
559
|
+
status=data['status'],
|
|
560
|
+
progress=data.get('progressPercent', 0),
|
|
561
|
+
error=data.get('errorMessage')
|
|
562
|
+
)
|
|
563
|
+
except requests.exceptions.RequestException as e:
|
|
564
|
+
raise RuntimeError(f"Failed to get task status: {e}")
|
|
565
|
+
|
|
566
|
+
def get_result(self, task_id: str) -> Any:
|
|
567
|
+
"""Get task result"""
|
|
568
|
+
try:
|
|
569
|
+
r = self.session.get(f"{self.base_url}/api/tasks/{task_id}/result", timeout=10)
|
|
570
|
+
r.raise_for_status()
|
|
571
|
+
|
|
572
|
+
content_type = r.headers.get('content-type', '')
|
|
573
|
+
|
|
574
|
+
if 'json' in content_type:
|
|
575
|
+
data = r.json()
|
|
576
|
+
if isinstance(data, dict) and 'result' in data:
|
|
577
|
+
return data['result']
|
|
578
|
+
return data
|
|
579
|
+
|
|
580
|
+
return r.text
|
|
581
|
+
|
|
582
|
+
except requests.exceptions.RequestException as e:
|
|
583
|
+
raise RuntimeError(f"Failed to get result: {e}")
|
|
584
|
+
|
|
585
|
+
# ============================================================================
|
|
586
|
+
# EXAMPLE USAGE
|
|
587
|
+
# ============================================================================
|
|
588
|
+
if __name__ == '__main__':
|
|
589
|
+
api_key = os.getenv('DISTRIBUTEX_API_KEY')
|
|
590
|
+
if not api_key:
|
|
591
|
+
print("❌ Set DISTRIBUTEX_API_KEY environment variable")
|
|
592
|
+
sys.exit(1)
|
|
593
|
+
|
|
594
|
+
dx = DistributeX(api_key=api_key, debug=True)
|
|
595
|
+
|
|
596
|
+
print("\n" + "=" * 60)
|
|
597
|
+
print("Example: Using bundled packages from your environment")
|
|
598
|
+
print("=" * 60)
|
|
599
|
+
|
|
600
|
+
def compute_with_numpy(n):
|
|
601
|
+
import numpy as np
|
|
602
|
+
print(f"Using NumPy version: {np.__version__}")
|
|
603
|
+
|
|
604
|
+
# Heavy computation
|
|
605
|
+
matrix = np.random.rand(n, n)
|
|
606
|
+
result = np.linalg.eigvals(matrix)
|
|
607
|
+
|
|
608
|
+
return f"Computed eigenvalues for {n}x{n} matrix using YOUR NumPy installation"
|
|
609
|
+
|
|
610
|
+
# Run with bundled packages
|
|
611
|
+
result = dx.run(
|
|
612
|
+
compute_with_numpy,
|
|
613
|
+
args=(1000,),
|
|
614
|
+
cpu_per_worker=4,
|
|
615
|
+
ram_per_worker=4096,
|
|
616
|
+
bundle_packages=True, # Use your installed packages
|
|
617
|
+
wait=True
|
|
618
|
+
)
|
|
619
|
+
|
|
620
|
+
print(f"\n✅ Result: {result}")
|
|
621
|
+
print("\n🎉 Worker used your bundled NumPy - no installation needed!")
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: distributex-cloud
|
|
3
|
+
Version: 2.0.7
|
|
4
|
+
Summary: Distributed computing platform - run code on global resource pool
|
|
5
|
+
Home-page: https://github.com/DistributeX-Cloud/distributex-cli-public
|
|
6
|
+
Author: DistributeX Team
|
|
7
|
+
Author-email: DistributeX Team <dev@distributex.cloud>
|
|
8
|
+
License: MIT
|
|
9
|
+
Project-URL: Homepage, https://github.com/DistributeX-Cloud/distributex-cli-public
|
|
10
|
+
Project-URL: Documentation, https://distributex.cloud/docs
|
|
11
|
+
Project-URL: Dashboard, https://distributex.cloud
|
|
12
|
+
Project-URL: Repository, https://github.com/DistributeX-Cloud/distributex-cli-public
|
|
13
|
+
Project-URL: Issues, https://github.com/DistributeX-Cloud/distributex-cli-public/issues
|
|
14
|
+
Keywords: distributed,computing,cloud,parallel,gpu,cpu,ml,ai
|
|
15
|
+
Classifier: Development Status :: 4 - Beta
|
|
16
|
+
Classifier: Intended Audience :: Developers
|
|
17
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
18
|
+
Classifier: Topic :: System :: Distributed Computing
|
|
19
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Operating System :: OS Independent
|
|
27
|
+
Requires-Python: >=3.8
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
Requires-Dist: requests>=2.28.0
|
|
30
|
+
Provides-Extra: dev
|
|
31
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
32
|
+
Requires-Dist: black>=22.0.0; extra == "dev"
|
|
33
|
+
Requires-Dist: flake8>=4.0.0; extra == "dev"
|
|
34
|
+
Dynamic: author
|
|
35
|
+
Dynamic: home-page
|
|
36
|
+
Dynamic: requires-python
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
MANIFEST.in
|
|
2
|
+
pyproject.toml
|
|
3
|
+
setup.py
|
|
4
|
+
distributex/__init__.py
|
|
5
|
+
distributex/client.py
|
|
6
|
+
distributex_cloud.egg-info/PKG-INFO
|
|
7
|
+
distributex_cloud.egg-info/SOURCES.txt
|
|
8
|
+
distributex_cloud.egg-info/dependency_links.txt
|
|
9
|
+
distributex_cloud.egg-info/requires.txt
|
|
10
|
+
distributex_cloud.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
distributex
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "distributex-cloud"
|
|
7
|
+
version = "2.0.7"
|
|
8
|
+
description = "Distributed computing platform - run code on global resource pool"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.8"
|
|
11
|
+
license = {text = "MIT"}
|
|
12
|
+
keywords = ["distributed", "computing", "cloud", "parallel", "gpu", "cpu", "ml", "ai"]
|
|
13
|
+
|
|
14
|
+
authors = [
|
|
15
|
+
{ name = "DistributeX Team", email = "dev@distributex.cloud" }
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
classifiers = [
|
|
19
|
+
"Development Status :: 4 - Beta",
|
|
20
|
+
"Intended Audience :: Developers",
|
|
21
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
22
|
+
"Topic :: System :: Distributed Computing",
|
|
23
|
+
"License :: OSI Approved :: MIT License",
|
|
24
|
+
"Programming Language :: Python :: 3",
|
|
25
|
+
"Programming Language :: Python :: 3.8",
|
|
26
|
+
"Programming Language :: Python :: 3.9",
|
|
27
|
+
"Programming Language :: Python :: 3.10",
|
|
28
|
+
"Programming Language :: Python :: 3.11",
|
|
29
|
+
"Programming Language :: Python :: 3.12",
|
|
30
|
+
"Operating System :: OS Independent",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
dependencies = [
|
|
34
|
+
"requests>=2.28.0",
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
[project.optional-dependencies]
|
|
38
|
+
dev = [
|
|
39
|
+
"pytest>=7.0.0",
|
|
40
|
+
"black>=22.0.0",
|
|
41
|
+
"flake8>=4.0.0",
|
|
42
|
+
]
|
|
43
|
+
|
|
44
|
+
[project.urls]
|
|
45
|
+
Homepage = "https://github.com/DistributeX-Cloud/distributex-cli-public"
|
|
46
|
+
Documentation = "https://distributex.cloud/docs"
|
|
47
|
+
Dashboard = "https://distributex.cloud"
|
|
48
|
+
Repository = "https://github.com/DistributeX-Cloud/distributex-cli-public"
|
|
49
|
+
Issues = "https://github.com/DistributeX-Cloud/distributex-cli-public/issues"
|
|
50
|
+
|
|
51
|
+
[tool.setuptools]
|
|
52
|
+
packages = ["distributex"]
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"""
|
|
2
|
+
DistributeX Cloud SDK Setup
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from setuptools import setup, find_packages
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
# Read README
|
|
9
|
+
readme_file = Path(__file__).parent / "README.md"
|
|
10
|
+
if readme_file.exists():
|
|
11
|
+
with open(readme_file, "r", encoding="utf-8") as f:
|
|
12
|
+
long_description = f.read()
|
|
13
|
+
else:
|
|
14
|
+
long_description = "DistributeX - Distributed Computing SDK"
|
|
15
|
+
|
|
16
|
+
setup(
|
|
17
|
+
name="distributex-cloud",
|
|
18
|
+
version = "2.0.7",
|
|
19
|
+
author="DistributeX Team",
|
|
20
|
+
author_email="unavailable",
|
|
21
|
+
description="Distributed computing platform - run code on global resource pool",
|
|
22
|
+
long_description=long_description,
|
|
23
|
+
long_description_content_type="text/markdown",
|
|
24
|
+
url="https://github.com/DistributeX-Cloud/distributex-cli-public",
|
|
25
|
+
packages=find_packages(),
|
|
26
|
+
classifiers=[
|
|
27
|
+
"Development Status :: 4 - Beta",
|
|
28
|
+
"Intended Audience :: Developers",
|
|
29
|
+
"License :: OSI Approved :: MIT License",
|
|
30
|
+
"Programming Language :: Python :: 3",
|
|
31
|
+
"Programming Language :: Python :: 3.8",
|
|
32
|
+
"Programming Language :: Python :: 3.9",
|
|
33
|
+
"Programming Language :: Python :: 3.10",
|
|
34
|
+
"Programming Language :: Python :: 3.11",
|
|
35
|
+
"Programming Language :: Python :: 3.12",
|
|
36
|
+
],
|
|
37
|
+
python_requires=">=3.8",
|
|
38
|
+
install_requires=[
|
|
39
|
+
"requests>=2.28.0",
|
|
40
|
+
],
|
|
41
|
+
keywords=["distributed", "computing", "cloud", "parallel", "gpu"],
|
|
42
|
+
)
|