cycls 0.0.2.31__py3-none-any.whl → 0.0.2.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cycls/__init__.py +2 -1
- cycls/cycls.py +1 -0
- cycls/runtime.py +395 -0
- cycls/sdk.py +22 -0
- {cycls-0.0.2.31.dist-info → cycls-0.0.2.32.dist-info}/METADATA +3 -1
- cycls-0.0.2.32.dist-info/RECORD +9 -0
- cycls-0.0.2.31.dist-info/RECORD +0 -7
- {cycls-0.0.2.31.dist-info → cycls-0.0.2.32.dist-info}/WHEEL +0 -0
cycls/__init__.py
CHANGED
|
@@ -1 +1,2 @@
|
|
|
1
|
-
from .cycls import Agent
|
|
1
|
+
from .cycls import Agent
|
|
2
|
+
from .sdk import function
|
cycls/cycls.py
CHANGED
cycls/runtime.py
ADDED
|
@@ -0,0 +1,395 @@
|
|
|
1
|
+
import docker
|
|
2
|
+
import cloudpickle
|
|
3
|
+
import tempfile
|
|
4
|
+
import hashlib
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
import shutil
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from contextlib import contextmanager
|
|
10
|
+
import tarfile
|
|
11
|
+
|
|
12
|
+
# --- Docker Client Initialization ---
|
|
13
|
+
try:
|
|
14
|
+
docker_client = docker.from_env()
|
|
15
|
+
except docker.errors.DockerException:
|
|
16
|
+
print("❌ Error: Docker is not running or not installed.")
|
|
17
|
+
print("Please start the Docker daemon and try again.")
|
|
18
|
+
sys.exit(1)
|
|
19
|
+
|
|
20
|
+
# --- Top-Level Helper Functions ---
|
|
21
|
+
|
|
22
|
+
def _bootstrap_script(payload_file: str, result_file: str) -> str:
|
|
23
|
+
"""Generates the Python script that runs inside the Docker container."""
|
|
24
|
+
return f"""
|
|
25
|
+
import cloudpickle
|
|
26
|
+
import sys
|
|
27
|
+
import os
|
|
28
|
+
import traceback
|
|
29
|
+
from pathlib import Path
|
|
30
|
+
|
|
31
|
+
if __name__ == "__main__":
|
|
32
|
+
io_dir = Path(sys.argv[1])
|
|
33
|
+
payload_path = io_dir / '{payload_file}'
|
|
34
|
+
result_path = io_dir / '{result_file}'
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
with open(payload_path, 'rb') as f:
|
|
38
|
+
func, args, kwargs = cloudpickle.load(f)
|
|
39
|
+
|
|
40
|
+
result = func(*args, **kwargs)
|
|
41
|
+
|
|
42
|
+
with open(result_path, 'wb') as f:
|
|
43
|
+
cloudpickle.dump(result, f)
|
|
44
|
+
|
|
45
|
+
except Exception as e:
|
|
46
|
+
traceback.print_exc(file=sys.stderr)
|
|
47
|
+
sys.exit(1)
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def _hash_path(path_str: str) -> str:
|
|
51
|
+
"""Hashes a file or a directory's contents to create a deterministic signature."""
|
|
52
|
+
h = hashlib.sha256()
|
|
53
|
+
p = Path(path_str)
|
|
54
|
+
if p.is_file():
|
|
55
|
+
with p.open('rb') as f:
|
|
56
|
+
while chunk := f.read(65536):
|
|
57
|
+
h.update(chunk)
|
|
58
|
+
elif p.is_dir():
|
|
59
|
+
for root, dirs, files in os.walk(p, topdown=True):
|
|
60
|
+
dirs.sort()
|
|
61
|
+
files.sort()
|
|
62
|
+
for name in files:
|
|
63
|
+
filepath = Path(root) / name
|
|
64
|
+
relpath = filepath.relative_to(p)
|
|
65
|
+
h.update(str(relpath).encode())
|
|
66
|
+
with filepath.open('rb') as f:
|
|
67
|
+
while chunk := f.read(65536):
|
|
68
|
+
h.update(chunk)
|
|
69
|
+
return h.hexdigest()
|
|
70
|
+
|
|
71
|
+
def _copy_path(src_path: Path, dest_path: Path):
|
|
72
|
+
"""Recursively copies a file or directory to a destination path."""
|
|
73
|
+
if src_path.is_dir():
|
|
74
|
+
shutil.copytree(src_path, dest_path, dirs_exist_ok=True)
|
|
75
|
+
else:
|
|
76
|
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
77
|
+
shutil.copy(src_path, dest_path)
|
|
78
|
+
|
|
79
|
+
# --- Main Runtime Class ---
|
|
80
|
+
|
|
81
|
+
class Runtime:
|
|
82
|
+
"""
|
|
83
|
+
Handles building a Docker image and executing a function within a container.
|
|
84
|
+
"""
|
|
85
|
+
def __init__(self, func, name, python_version=None, pip_packages=None, apt_packages=None, run_commands=None, copy=None, base_url=None, api_key=None):
|
|
86
|
+
self.func = func
|
|
87
|
+
self.python_version = python_version or "3.12"
|
|
88
|
+
self.pip_packages = sorted(pip_packages or [])
|
|
89
|
+
self.apt_packages = sorted(apt_packages or [])
|
|
90
|
+
self.run_commands = sorted(run_commands or [])
|
|
91
|
+
self.copy = copy or {}
|
|
92
|
+
self.name = name
|
|
93
|
+
self.base_url = base_url or "https://service-core-280879789566.me-central1.run.app"
|
|
94
|
+
self.image_prefix = f"cycls/{name}"
|
|
95
|
+
|
|
96
|
+
# Standard paths and filenames used inside the container
|
|
97
|
+
self.io_dir = "/app/io"
|
|
98
|
+
self.runner_filename = "runner.py"
|
|
99
|
+
self.runner_path = f"/app/{self.runner_filename}"
|
|
100
|
+
self.payload_file = "payload.pkl"
|
|
101
|
+
self.result_file = "result.pkl"
|
|
102
|
+
|
|
103
|
+
self.runner_script = _bootstrap_script(self.payload_file, self.result_file)
|
|
104
|
+
self.tag = self._generate_base_tag()
|
|
105
|
+
|
|
106
|
+
self.api_key = api_key
|
|
107
|
+
|
|
108
|
+
def _generate_base_tag(self) -> str:
|
|
109
|
+
"""Creates a unique tag for the base Docker image based on its dependencies."""
|
|
110
|
+
signature_parts = [
|
|
111
|
+
"".join(self.python_version),
|
|
112
|
+
"".join(self.pip_packages),
|
|
113
|
+
"".join(self.apt_packages),
|
|
114
|
+
"".join(self.run_commands),
|
|
115
|
+
self.runner_script
|
|
116
|
+
]
|
|
117
|
+
for src, dst in sorted(self.copy.items()):
|
|
118
|
+
if not Path(src).exists():
|
|
119
|
+
raise FileNotFoundError(f"Path in 'copy' not found: {src}")
|
|
120
|
+
content_hash = _hash_path(src)
|
|
121
|
+
signature_parts.append(f"copy:{src}>{dst}:{content_hash}")
|
|
122
|
+
|
|
123
|
+
signature = "".join(signature_parts)
|
|
124
|
+
image_hash = hashlib.sha256(signature.encode()).hexdigest()
|
|
125
|
+
return f"{self.image_prefix}:{image_hash[:16]}"
|
|
126
|
+
|
|
127
|
+
def _generate_dockerfile(self, port=None) -> str:
|
|
128
|
+
"""Generates a multi-stage Dockerfile string."""
|
|
129
|
+
run_pip_install = f"RUN pip install --no-cache-dir cloudpickle {' '.join(self.pip_packages)}"
|
|
130
|
+
run_apt_install = (
|
|
131
|
+
f"RUN apt-get update && apt-get install -y --no-install-recommends {' '.join(self.apt_packages)}"
|
|
132
|
+
if self.apt_packages else ""
|
|
133
|
+
)
|
|
134
|
+
run_shell_commands = "\n".join([f"RUN {cmd}" for cmd in self.run_commands]) if self.run_commands else ""
|
|
135
|
+
copy_lines = "\n".join([f"COPY {src} {dst}" for src, dst in self.copy.items()])
|
|
136
|
+
expose_line = f"EXPOSE {port}" if port else ""
|
|
137
|
+
|
|
138
|
+
return f"""
|
|
139
|
+
# STAGE 1: Base image with all dependencies
|
|
140
|
+
FROM python:{self.python_version}-slim as base
|
|
141
|
+
ENV PIP_ROOT_USER_ACTION=ignore
|
|
142
|
+
ENV PYTHONUNBUFFERED=1
|
|
143
|
+
RUN mkdir -p {self.io_dir}
|
|
144
|
+
{run_apt_install}
|
|
145
|
+
{run_pip_install}
|
|
146
|
+
{run_shell_commands}
|
|
147
|
+
{copy_lines}
|
|
148
|
+
COPY {self.runner_filename} {self.runner_path}
|
|
149
|
+
ENTRYPOINT ["python", "{self.runner_path}", "{self.io_dir}"]
|
|
150
|
+
|
|
151
|
+
# STAGE 2: Final deployable image with the payload "baked in"
|
|
152
|
+
FROM base
|
|
153
|
+
{expose_line}
|
|
154
|
+
COPY {self.payload_file} {self.io_dir}/
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
def _prepare_build_context(self, workdir: Path, include_payload=False, args=None, kwargs=None):
|
|
158
|
+
"""Prepares a complete build context in the given directory."""
|
|
159
|
+
port = kwargs.get('port') if kwargs else None
|
|
160
|
+
|
|
161
|
+
(workdir / "Dockerfile").write_text(self._generate_dockerfile(port=port))
|
|
162
|
+
(workdir / self.runner_filename).write_text(self.runner_script)
|
|
163
|
+
|
|
164
|
+
if include_payload:
|
|
165
|
+
payload_bytes = cloudpickle.dumps((self.func, args or [], kwargs or {}))
|
|
166
|
+
(workdir / self.payload_file).write_bytes(payload_bytes)
|
|
167
|
+
|
|
168
|
+
if self.copy:
|
|
169
|
+
for src in self.copy.keys():
|
|
170
|
+
_copy_path(Path(src), workdir / src)
|
|
171
|
+
|
|
172
|
+
def _build_image_if_needed(self):
|
|
173
|
+
"""Checks if the base Docker image exists locally and builds it if not."""
|
|
174
|
+
try:
|
|
175
|
+
docker_client.images.get(self.tag)
|
|
176
|
+
print(f"✅ Found cached base image: {self.tag}")
|
|
177
|
+
return
|
|
178
|
+
except docker.errors.ImageNotFound:
|
|
179
|
+
print(f"🛠️ Building new base image: {self.tag}")
|
|
180
|
+
|
|
181
|
+
with tempfile.TemporaryDirectory() as tmpdir_str:
|
|
182
|
+
tmpdir = Path(tmpdir_str)
|
|
183
|
+
# Prepare context without payload for the base image
|
|
184
|
+
self._prepare_build_context(tmpdir)
|
|
185
|
+
|
|
186
|
+
print("--- 🐳 Docker Build Logs (Base Image) ---")
|
|
187
|
+
response_generator = docker_client.api.build(
|
|
188
|
+
path=str(tmpdir),
|
|
189
|
+
tag=self.tag,
|
|
190
|
+
forcerm=True,
|
|
191
|
+
decode=True,
|
|
192
|
+
target='base' # Only build the 'base' stage
|
|
193
|
+
)
|
|
194
|
+
try:
|
|
195
|
+
for chunk in response_generator:
|
|
196
|
+
if 'stream' in chunk:
|
|
197
|
+
print(chunk['stream'].strip())
|
|
198
|
+
print("----------------------------------------")
|
|
199
|
+
print(f"✅ Base image built successfully: {self.tag}")
|
|
200
|
+
except docker.errors.BuildError as e:
|
|
201
|
+
print(f"\n❌ Docker build failed. Reason: {e}")
|
|
202
|
+
raise
|
|
203
|
+
|
|
204
|
+
@contextmanager
|
|
205
|
+
def runner(self, *args, **kwargs):
|
|
206
|
+
"""Context manager to set up, run, and tear down the container for local execution."""
|
|
207
|
+
port = kwargs.get('port', None)
|
|
208
|
+
self._build_image_if_needed()
|
|
209
|
+
container = None
|
|
210
|
+
ports_mapping = {f'{port}/tcp': port} if port else None
|
|
211
|
+
|
|
212
|
+
with tempfile.TemporaryDirectory() as tmpdir_str:
|
|
213
|
+
tmpdir = Path(tmpdir_str)
|
|
214
|
+
payload_path = tmpdir / self.payload_file
|
|
215
|
+
result_path = tmpdir / self.result_file
|
|
216
|
+
|
|
217
|
+
with payload_path.open('wb') as f:
|
|
218
|
+
cloudpickle.dump((self.func, args, kwargs), f)
|
|
219
|
+
|
|
220
|
+
try:
|
|
221
|
+
container = docker_client.containers.create(
|
|
222
|
+
image=self.tag,
|
|
223
|
+
volumes={str(tmpdir): {'bind': self.io_dir, 'mode': 'rw'}},
|
|
224
|
+
ports=ports_mapping
|
|
225
|
+
)
|
|
226
|
+
container.start()
|
|
227
|
+
yield container, result_path
|
|
228
|
+
finally:
|
|
229
|
+
if container:
|
|
230
|
+
print("\n🧹 Cleaning up container...")
|
|
231
|
+
try:
|
|
232
|
+
container.stop(timeout=5)
|
|
233
|
+
container.remove()
|
|
234
|
+
print("✅ Container stopped and removed.")
|
|
235
|
+
except docker.errors.APIError as e:
|
|
236
|
+
print(f"⚠️ Could not clean up container: {e}")
|
|
237
|
+
|
|
238
|
+
def run(self, *args, **kwargs):
|
|
239
|
+
"""Executes the function in a new Docker container and waits for the result."""
|
|
240
|
+
print(f"🚀 Running function '{self.name}' in container...")
|
|
241
|
+
try:
|
|
242
|
+
with self.runner(*args, **kwargs) as (container, result_path):
|
|
243
|
+
print("--- 🪵 Container Logs (streaming) ---")
|
|
244
|
+
for chunk in container.logs(stream=True, follow=True):
|
|
245
|
+
print(chunk.decode('utf-8').strip())
|
|
246
|
+
print("------------------------------------")
|
|
247
|
+
|
|
248
|
+
result_status = container.wait()
|
|
249
|
+
if result_status['StatusCode'] != 0:
|
|
250
|
+
print(f"\n❌ Error: Container exited with code: {result_status['StatusCode']}")
|
|
251
|
+
return None
|
|
252
|
+
|
|
253
|
+
if result_path.exists():
|
|
254
|
+
with result_path.open('rb') as f:
|
|
255
|
+
result = cloudpickle.load(f)
|
|
256
|
+
print("✅ Function executed successfully.")
|
|
257
|
+
return result
|
|
258
|
+
else:
|
|
259
|
+
print("\n❌ Error: Result file not found.")
|
|
260
|
+
return None
|
|
261
|
+
except (KeyboardInterrupt, docker.errors.DockerException) as e:
|
|
262
|
+
print(f"\n🛑 Operation stopped: {e}")
|
|
263
|
+
return None
|
|
264
|
+
|
|
265
|
+
def build(self, *args, **kwargs):
|
|
266
|
+
"""Builds a self-contained, deployable Docker image locally."""
|
|
267
|
+
print("📦 Building self-contained image for deployment...")
|
|
268
|
+
payload_hash = hashlib.sha256(cloudpickle.dumps((self.func, args, kwargs))).hexdigest()[:16]
|
|
269
|
+
final_tag = f"{self.image_prefix}:deploy-{payload_hash}"
|
|
270
|
+
|
|
271
|
+
try:
|
|
272
|
+
docker_client.images.get(final_tag)
|
|
273
|
+
print(f"✅ Found cached deployable image: {final_tag}")
|
|
274
|
+
return final_tag
|
|
275
|
+
except docker.errors.ImageNotFound:
|
|
276
|
+
print(f"🛠️ Building new deployable image: {final_tag}")
|
|
277
|
+
|
|
278
|
+
with tempfile.TemporaryDirectory() as tmpdir_str:
|
|
279
|
+
tmpdir = Path(tmpdir_str)
|
|
280
|
+
self._prepare_build_context(tmpdir, include_payload=True, args=args, kwargs=kwargs)
|
|
281
|
+
|
|
282
|
+
print("--- 🐳 Docker Build Logs (Final Image) ---")
|
|
283
|
+
response_generator = docker_client.api.build(
|
|
284
|
+
path=str(tmpdir), tag=final_tag, forcerm=True, decode=True
|
|
285
|
+
)
|
|
286
|
+
try:
|
|
287
|
+
for chunk in response_generator:
|
|
288
|
+
if 'stream' in chunk:
|
|
289
|
+
print(chunk['stream'].strip())
|
|
290
|
+
print("-----------------------------------------")
|
|
291
|
+
print(f"✅ Image built successfully: {final_tag}")
|
|
292
|
+
port = kwargs.get('port') if kwargs else None
|
|
293
|
+
print(f"🤖 Run: docker run --rm -d -p {port}:{port} {final_tag}")
|
|
294
|
+
return final_tag
|
|
295
|
+
except docker.errors.BuildError as e:
|
|
296
|
+
print(f"\n❌ Docker build failed. Reason: {e}")
|
|
297
|
+
return None
|
|
298
|
+
|
|
299
|
+
def deploy(self, *args, **kwargs):
|
|
300
|
+
"""Deploys the function by sending it to a remote build server."""
|
|
301
|
+
import requests
|
|
302
|
+
|
|
303
|
+
print(f"🚀 Preparing to deploy function '{self.name}'")
|
|
304
|
+
|
|
305
|
+
# 1. Prepare the build context and compress it into a tarball
|
|
306
|
+
payload_hash = hashlib.sha256(cloudpickle.dumps((self.func, args, kwargs))).hexdigest()[:16]
|
|
307
|
+
archive_name = f"source-{self.tag.split(':')[1]}-{payload_hash}.tar.gz"
|
|
308
|
+
|
|
309
|
+
with tempfile.TemporaryDirectory() as tmpdir_str:
|
|
310
|
+
tmpdir = Path(tmpdir_str)
|
|
311
|
+
self._prepare_build_context(tmpdir, include_payload=True, args=args, kwargs=kwargs)
|
|
312
|
+
|
|
313
|
+
archive_path = Path(tmpdir_str) / archive_name
|
|
314
|
+
with tarfile.open(archive_path, "w:gz") as tar:
|
|
315
|
+
# Add all files from the context to the tar archive
|
|
316
|
+
for f in tmpdir.glob("**/*"):
|
|
317
|
+
if f.is_file():
|
|
318
|
+
tar.add(f, arcname=f.relative_to(tmpdir))
|
|
319
|
+
|
|
320
|
+
# 2. Prepare the request payload
|
|
321
|
+
port = kwargs.get('port', 8080)
|
|
322
|
+
data_payload = {
|
|
323
|
+
"function_name": self.name,
|
|
324
|
+
"port": port,
|
|
325
|
+
# "memory": "1Gi" # You could make this a parameter
|
|
326
|
+
}
|
|
327
|
+
headers = {
|
|
328
|
+
"X-API-Key": self.api_key
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
# 3. Upload to the deploy server
|
|
332
|
+
print("📦 Uploading build context to the deploy server...")
|
|
333
|
+
try:
|
|
334
|
+
with open(archive_path, 'rb') as f:
|
|
335
|
+
files = {'source_archive': (archive_name, f, 'application/gzip')}
|
|
336
|
+
|
|
337
|
+
response = requests.post(
|
|
338
|
+
f"{self.base_url}/v1/deploy",
|
|
339
|
+
data=data_payload,
|
|
340
|
+
files=files,
|
|
341
|
+
headers=headers,
|
|
342
|
+
timeout=1800 # Set a long timeout for the entire process
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
# 4. Handle the server's response
|
|
346
|
+
response.raise_for_status() # Raise an exception for 4xx/5xx errors
|
|
347
|
+
result = response.json()
|
|
348
|
+
|
|
349
|
+
print(f"✅ Deployment successful!")
|
|
350
|
+
print(f"🔗 Service is available at: {result['url']}")
|
|
351
|
+
return result['url']
|
|
352
|
+
|
|
353
|
+
except requests.exceptions.HTTPError as e:
|
|
354
|
+
print(f"❌ Deployment failed. Server returned error: {e.response.status_code}")
|
|
355
|
+
try:
|
|
356
|
+
# Try to print the detailed error message from the server
|
|
357
|
+
print(f" Reason: {e.response.json()['detail']}")
|
|
358
|
+
except:
|
|
359
|
+
print(f" Reason: {e.response.text}")
|
|
360
|
+
return None
|
|
361
|
+
except requests.exceptions.RequestException as e:
|
|
362
|
+
print(f"❌ Could not connect to the deploy server: {e}")
|
|
363
|
+
return None
|
|
364
|
+
|
|
365
|
+
def Deploy(self, *args, **kwargs):
|
|
366
|
+
try:
|
|
367
|
+
from .shared import upload_file_to_cloud, build_and_deploy_to_cloud
|
|
368
|
+
except ImportError:
|
|
369
|
+
print("❌ Shared not found. This is an internal method.")
|
|
370
|
+
return None
|
|
371
|
+
|
|
372
|
+
port = kwargs.get('port', 8080)
|
|
373
|
+
|
|
374
|
+
with tempfile.TemporaryDirectory() as tmpdir_str:
|
|
375
|
+
tmpdir = Path(tmpdir_str)
|
|
376
|
+
self._prepare_build_context(tmpdir, include_payload=True, args=args, kwargs=kwargs)
|
|
377
|
+
|
|
378
|
+
archive_path = Path(tmpdir_str) / "source.tar.gz"
|
|
379
|
+
with tarfile.open(archive_path, "w:gz") as tar:
|
|
380
|
+
for f in tmpdir.glob("**/*"):
|
|
381
|
+
if f.is_file():
|
|
382
|
+
tar.add(f, arcname=f.relative_to(tmpdir))
|
|
383
|
+
|
|
384
|
+
archive_name = upload_file_to_cloud(self.name, archive_path)
|
|
385
|
+
|
|
386
|
+
try:
|
|
387
|
+
service = build_and_deploy_to_cloud(
|
|
388
|
+
function_name=self.name,
|
|
389
|
+
gcs_object_name=archive_name,
|
|
390
|
+
port=port,
|
|
391
|
+
memory="1Gi"
|
|
392
|
+
)
|
|
393
|
+
except Exception as e:
|
|
394
|
+
print(f"❌ Cloud Deployment Failed: {e}")
|
|
395
|
+
return None
|
cycls/sdk.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from .runtime import Runtime
|
|
2
|
+
|
|
3
|
+
def function(python_version="3.12", pip_install=None, apt_install=None, run_commands=None, copy=None, name=None, base_url=None, api_key=None):
|
|
4
|
+
# """
|
|
5
|
+
# A decorator factory that transforms a Python function into a containerized,
|
|
6
|
+
# remotely executable object.
|
|
7
|
+
|
|
8
|
+
# Args:
|
|
9
|
+
# pip (list[str], optional): A list of pip packages to install.
|
|
10
|
+
# apt (list[str], optional): A list of apt packages to install.
|
|
11
|
+
# copy (list[str], optional): A list of local paths to copy to the
|
|
12
|
+
# same path inside the image. For static dependencies.
|
|
13
|
+
# name (str, optional): A name for this function. Defaults to the function's name.
|
|
14
|
+
|
|
15
|
+
# Returns:
|
|
16
|
+
# A decorator that replaces the decorated function with a Runtime instance.
|
|
17
|
+
# """
|
|
18
|
+
def decorator(func):
|
|
19
|
+
Name = name or func.__name__ # should be moved to runtime... or default?
|
|
20
|
+
copy_dict = {i:i for i in copy or []}
|
|
21
|
+
return Runtime(func, Name.replace('_', '-'), python_version, pip_install, apt_install, run_commands, copy_dict, base_url, api_key)
|
|
22
|
+
return decorator
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cycls
|
|
3
|
-
Version: 0.0.2.
|
|
3
|
+
Version: 0.0.2.32
|
|
4
4
|
Summary: Cycls SDK
|
|
5
5
|
Author: Mohammed J. AlRujayi
|
|
6
6
|
Author-email: mj@cycls.com
|
|
@@ -12,6 +12,8 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.12
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.13
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.14
|
|
15
|
+
Requires-Dist: cloudpickle (>=3.1.1,<4.0.0)
|
|
16
|
+
Requires-Dist: docker (>=7.1.0,<8.0.0)
|
|
15
17
|
Requires-Dist: fastapi (>=0.111.0,<0.112.0)
|
|
16
18
|
Requires-Dist: httpx (>=0.27.0,<0.28.0)
|
|
17
19
|
Requires-Dist: jwt (>=1.4.0,<2.0.0)
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
cycls/__init__.py,sha256=9oKTafASGMuJbq_Sk0sfCSQKKwUD8mcdYFovDDzVuW4,50
|
|
2
|
+
cycls/cycls.py,sha256=rLhssEPs4EwDbNsO5cGdKkP888dOaYToMPKVLZWan5U,7479
|
|
3
|
+
cycls/runtime.py,sha256=quBV8G8DkqsK_QzTraiYe7P9x9W5X1Zl8XDsvhUlPus,15939
|
|
4
|
+
cycls/sdk.py,sha256=EcUYi0pGKtCajKzwLg3uH8CRPfc78KBHXgB6ShABxdE,1110
|
|
5
|
+
cycls/theme/assets/index-D0-uI8sw.js,sha256=aUsqm9HZtEJz38o-0MW12ZVeOlSeKigwc_fYJBntiyI,1068551
|
|
6
|
+
cycls/theme/index.html,sha256=epB4cgSjC7xJOXpVuCwt9r7ivoGvLiXSrxsoOgINw58,895
|
|
7
|
+
cycls-0.0.2.32.dist-info/METADATA,sha256=si7x9VRhZUM9ZlrBN3Iusp6iemuHyWoOK-UT6WIZrmA,5666
|
|
8
|
+
cycls-0.0.2.32.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
9
|
+
cycls-0.0.2.32.dist-info/RECORD,,
|
cycls-0.0.2.31.dist-info/RECORD
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
cycls/__init__.py,sha256=D808D5xNP8OdQStdABmWqjGfuDNpVgbJhRo0KRKzo7g,24
|
|
2
|
-
cycls/cycls.py,sha256=qW0KzZ7GLMIJ5tLPYDusvBv0HjKJ1NhfuYVu4BbF8Z4,7434
|
|
3
|
-
cycls/theme/assets/index-D0-uI8sw.js,sha256=aUsqm9HZtEJz38o-0MW12ZVeOlSeKigwc_fYJBntiyI,1068551
|
|
4
|
-
cycls/theme/index.html,sha256=epB4cgSjC7xJOXpVuCwt9r7ivoGvLiXSrxsoOgINw58,895
|
|
5
|
-
cycls-0.0.2.31.dist-info/METADATA,sha256=0ldNTvnYxncmbitmY5oRGIGB1f8abh4uefgp9uEFl3s,5583
|
|
6
|
-
cycls-0.0.2.31.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
7
|
-
cycls-0.0.2.31.dist-info/RECORD,,
|
|
File without changes
|