cycls 0.0.2.31__tar.gz → 0.0.2.33__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cycls
3
- Version: 0.0.2.31
3
+ Version: 0.0.2.33
4
4
  Summary: Cycls SDK
5
5
  Author: Mohammed J. AlRujayi
6
6
  Author-email: mj@cycls.com
@@ -12,6 +12,8 @@ Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: Programming Language :: Python :: 3.13
14
14
  Classifier: Programming Language :: Python :: 3.14
15
+ Requires-Dist: cloudpickle (>=3.1.1,<4.0.0)
16
+ Requires-Dist: docker (>=7.1.0,<8.0.0)
15
17
  Requires-Dist: fastapi (>=0.111.0,<0.112.0)
16
18
  Requires-Dist: httpx (>=0.27.0,<0.28.0)
17
19
  Requires-Dist: jwt (>=1.4.0,<2.0.0)
@@ -0,0 +1,2 @@
1
+ from .cycls import Agent
2
+ from .sdk import function
@@ -164,5 +164,6 @@ class Agent:
164
164
  with modal.enable_output(), run_app(app=self.app, client=self.client):
165
165
  while True: time.sleep(10)
166
166
 
167
+ # poetry config pypi-token.pypi <your-token>
167
168
  # poetry run python agent.py
168
169
  # poetry publish --build
@@ -0,0 +1,400 @@
1
+ import docker
2
+ import cloudpickle
3
+ import tempfile
4
+ import hashlib
5
+ import os
6
+ import sys
7
+ import shutil
8
+ from pathlib import Path
9
+ from contextlib import contextmanager
10
+ import tarfile
11
+
12
+ # --- Docker Client Initialization ---
13
+ try:
14
+ docker_client = docker.from_env()
15
+ except docker.errors.DockerException:
16
+ print("❌ Error: Docker is not running or not installed.")
17
+ print("Please start the Docker daemon and try again.")
18
+ sys.exit(1)
19
+
20
+ # --- Top-Level Helper Functions ---
21
+
22
+ def _bootstrap_script(payload_file: str, result_file: str) -> str:
23
+ """Generates the Python script that runs inside the Docker container."""
24
+ return f"""
25
+ import cloudpickle
26
+ import sys
27
+ import os
28
+ import traceback
29
+ from pathlib import Path
30
+
31
+ if __name__ == "__main__":
32
+ io_dir = Path(sys.argv[1])
33
+ payload_path = io_dir / '{payload_file}'
34
+ result_path = io_dir / '{result_file}'
35
+
36
+ try:
37
+ with open(payload_path, 'rb') as f:
38
+ func, args, kwargs = cloudpickle.load(f)
39
+
40
+ result = func(*args, **kwargs)
41
+
42
+ with open(result_path, 'wb') as f:
43
+ cloudpickle.dump(result, f)
44
+
45
+ except Exception as e:
46
+ traceback.print_exc(file=sys.stderr)
47
+ sys.exit(1)
48
+ """
49
+
50
+ def _hash_path(path_str: str) -> str:
51
+ """Hashes a file or a directory's contents to create a deterministic signature."""
52
+ h = hashlib.sha256()
53
+ p = Path(path_str)
54
+ if p.is_file():
55
+ with p.open('rb') as f:
56
+ while chunk := f.read(65536):
57
+ h.update(chunk)
58
+ elif p.is_dir():
59
+ for root, dirs, files in os.walk(p, topdown=True):
60
+ dirs.sort()
61
+ files.sort()
62
+ for name in files:
63
+ filepath = Path(root) / name
64
+ relpath = filepath.relative_to(p)
65
+ h.update(str(relpath).encode())
66
+ with filepath.open('rb') as f:
67
+ while chunk := f.read(65536):
68
+ h.update(chunk)
69
+ return h.hexdigest()
70
+
71
+ def _copy_path(src_path: Path, dest_path: Path):
72
+ """Recursively copies a file or directory to a destination path."""
73
+ if src_path.is_dir():
74
+ shutil.copytree(src_path, dest_path, dirs_exist_ok=True)
75
+ else:
76
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
77
+ shutil.copy(src_path, dest_path)
78
+
79
+ # --- Main Runtime Class ---
80
+
81
+ class Runtime:
82
+ """
83
+ Handles building a Docker image and executing a function within a container.
84
+ """
85
+ def __init__(self, func, name, python_version=None, pip_packages=None, apt_packages=None, run_commands=None, copy=None, base_url=None, api_key=None):
86
+ self.func = func
87
+ self.python_version = python_version or "3.12"
88
+ self.pip_packages = sorted(pip_packages or [])
89
+ self.apt_packages = sorted(apt_packages or [])
90
+ self.run_commands = sorted(run_commands or [])
91
+ self.copy = copy or {}
92
+ self.name = name
93
+ self.base_url = base_url or "https://service-core-280879789566.me-central1.run.app"
94
+ self.image_prefix = f"cycls/{name}"
95
+
96
+ # Standard paths and filenames used inside the container
97
+ self.io_dir = "/app/io"
98
+ self.runner_filename = "runner.py"
99
+ self.runner_path = f"/app/{self.runner_filename}"
100
+ self.payload_file = "payload.pkl"
101
+ self.result_file = "result.pkl"
102
+
103
+ self.runner_script = _bootstrap_script(self.payload_file, self.result_file)
104
+ self.tag = self._generate_base_tag()
105
+
106
+ self.api_key = api_key
107
+
108
+ def _generate_base_tag(self) -> str:
109
+ """Creates a unique tag for the base Docker image based on its dependencies."""
110
+ signature_parts = [
111
+ "".join(self.python_version),
112
+ "".join(self.pip_packages),
113
+ "".join(self.apt_packages),
114
+ "".join(self.run_commands),
115
+ self.runner_script
116
+ ]
117
+ for src, dst in sorted(self.copy.items()):
118
+ if not Path(src).exists():
119
+ raise FileNotFoundError(f"Path in 'copy' not found: {src}")
120
+ content_hash = _hash_path(src)
121
+ signature_parts.append(f"copy:{src}>{dst}:{content_hash}")
122
+
123
+ signature = "".join(signature_parts)
124
+ image_hash = hashlib.sha256(signature.encode()).hexdigest()
125
+ return f"{self.image_prefix}:{image_hash[:16]}"
126
+
127
+ def _generate_dockerfile(self, port=None) -> str:
128
+ """Generates a multi-stage Dockerfile string."""
129
+ run_pip_install = f"RUN pip install --no-cache-dir cloudpickle {' '.join(self.pip_packages)}"
130
+ run_apt_install = (
131
+ f"RUN apt-get update && apt-get install -y --no-install-recommends {' '.join(self.apt_packages)}"
132
+ if self.apt_packages else ""
133
+ )
134
+ run_shell_commands = "\n".join([f"RUN {cmd}" for cmd in self.run_commands]) if self.run_commands else ""
135
+ copy_lines = "\n".join([f"COPY {src} {dst}" for src, dst in self.copy.items()])
136
+ expose_line = f"EXPOSE {port}" if port else ""
137
+
138
+ return f"""
139
+ # STAGE 1: Base image with all dependencies
140
+ FROM python:{self.python_version}-slim as base
141
+ ENV PIP_ROOT_USER_ACTION=ignore
142
+ ENV PYTHONUNBUFFERED=1
143
+ RUN mkdir -p {self.io_dir}
144
+ {run_apt_install}
145
+ {run_pip_install}
146
+ {run_shell_commands}
147
+ {copy_lines}
148
+ COPY {self.runner_filename} {self.runner_path}
149
+ ENTRYPOINT ["python", "{self.runner_path}", "{self.io_dir}"]
150
+
151
+ # STAGE 2: Final deployable image with the payload "baked in"
152
+ FROM base
153
+ {expose_line}
154
+ COPY {self.payload_file} {self.io_dir}/
155
+ """
156
+
157
+ def _prepare_build_context(self, workdir: Path, include_payload=False, args=None, kwargs=None):
158
+ """Prepares a complete build context in the given directory."""
159
+ port = kwargs.get('port') if kwargs else None
160
+
161
+ (workdir / "Dockerfile").write_text(self._generate_dockerfile(port=port))
162
+ (workdir / self.runner_filename).write_text(self.runner_script)
163
+
164
+ if include_payload:
165
+ payload_bytes = cloudpickle.dumps((self.func, args or [], kwargs or {}))
166
+ (workdir / self.payload_file).write_bytes(payload_bytes)
167
+
168
+ if self.copy:
169
+ for src in self.copy.keys():
170
+ _copy_path(Path(src), workdir / src)
171
+
172
+ def _build_image_if_needed(self):
173
+ """Checks if the base Docker image exists locally and builds it if not."""
174
+ try:
175
+ docker_client.images.get(self.tag)
176
+ print(f"✅ Found cached base image: {self.tag}")
177
+ return
178
+ except docker.errors.ImageNotFound:
179
+ print(f"🛠️ Building new base image: {self.tag}")
180
+
181
+ # with tempfile.TemporaryDirectory() as tmpdir_str:
182
+ with tempfile.TemporaryDirectory(dir="/tmp") as tmpdir_str:
183
+ tmpdir = Path(tmpdir_str)
184
+ # Prepare context without payload for the base image
185
+ self._prepare_build_context(tmpdir)
186
+
187
+ print("--- 🐳 Docker Build Logs (Base Image) ---")
188
+ response_generator = docker_client.api.build(
189
+ path=str(tmpdir),
190
+ tag=self.tag,
191
+ forcerm=True,
192
+ decode=True,
193
+ target='base' # Only build the 'base' stage
194
+ )
195
+ try:
196
+ for chunk in response_generator:
197
+ if 'stream' in chunk:
198
+ print(chunk['stream'].strip())
199
+ print("----------------------------------------")
200
+ print(f"✅ Base image built successfully: {self.tag}")
201
+ except docker.errors.BuildError as e:
202
+ print(f"\n❌ Docker build failed. Reason: {e}")
203
+ raise
204
+
205
+ @contextmanager
206
+ def runner(self, *args, **kwargs):
207
+ """Context manager to set up, run, and tear down the container for local execution."""
208
+ port = kwargs.get('port', None)
209
+ self._build_image_if_needed()
210
+ container = None
211
+ ports_mapping = {f'{port}/tcp': port} if port else None
212
+
213
+ # with tempfile.TemporaryDirectory() as tmpdir_str:
214
+ with tempfile.TemporaryDirectory(dir="/tmp") as tmpdir_str:
215
+ tmpdir = Path(tmpdir_str)
216
+ payload_path = tmpdir / self.payload_file
217
+ result_path = tmpdir / self.result_file
218
+
219
+ with payload_path.open('wb') as f:
220
+ cloudpickle.dump((self.func, args, kwargs), f)
221
+
222
+ try:
223
+ container = docker_client.containers.create(
224
+ image=self.tag,
225
+ volumes={str(tmpdir): {'bind': self.io_dir, 'mode': 'rw'}},
226
+ ports=ports_mapping
227
+ )
228
+ container.start()
229
+ yield container, result_path
230
+ finally:
231
+ if container:
232
+ print("\n🧹 Cleaning up container...")
233
+ try:
234
+ container.stop(timeout=5)
235
+ container.remove()
236
+ print("✅ Container stopped and removed.")
237
+ except docker.errors.APIError as e:
238
+ print(f"⚠️ Could not clean up container: {e}")
239
+
240
+ def run(self, *args, **kwargs):
241
+ """Executes the function in a new Docker container and waits for the result."""
242
+ print(f"🚀 Running function '{self.name}' in container...")
243
+ try:
244
+ with self.runner(*args, **kwargs) as (container, result_path):
245
+ print("--- 🪵 Container Logs (streaming) ---")
246
+ for chunk in container.logs(stream=True, follow=True):
247
+ print(chunk.decode('utf-8').strip())
248
+ print("------------------------------------")
249
+
250
+ result_status = container.wait()
251
+ if result_status['StatusCode'] != 0:
252
+ print(f"\n❌ Error: Container exited with code: {result_status['StatusCode']}")
253
+ return None
254
+
255
+ if result_path.exists():
256
+ with result_path.open('rb') as f:
257
+ result = cloudpickle.load(f)
258
+ print("✅ Function executed successfully.")
259
+ return result
260
+ else:
261
+ print("\n❌ Error: Result file not found.")
262
+ return None
263
+ except (KeyboardInterrupt, docker.errors.DockerException) as e:
264
+ print(f"\n🛑 Operation stopped: {e}")
265
+ return None
266
+
267
+ def build(self, *args, **kwargs):
268
+ """Builds a self-contained, deployable Docker image locally."""
269
+ print("📦 Building self-contained image for deployment...")
270
+ payload_hash = hashlib.sha256(cloudpickle.dumps((self.func, args, kwargs))).hexdigest()[:16]
271
+ final_tag = f"{self.image_prefix}:deploy-{payload_hash}"
272
+
273
+ try:
274
+ docker_client.images.get(final_tag)
275
+ print(f"✅ Found cached deployable image: {final_tag}")
276
+ return final_tag
277
+ except docker.errors.ImageNotFound:
278
+ print(f"🛠️ Building new deployable image: {final_tag}")
279
+
280
+ # with tempfile.TemporaryDirectory() as tmpdir_str:
281
+ with tempfile.TemporaryDirectory(dir="/tmp") as tmpdir_str:
282
+ tmpdir = Path(tmpdir_str)
283
+ self._prepare_build_context(tmpdir, include_payload=True, args=args, kwargs=kwargs)
284
+
285
+ print("--- 🐳 Docker Build Logs (Final Image) ---")
286
+ response_generator = docker_client.api.build(
287
+ path=str(tmpdir), tag=final_tag, forcerm=True, decode=True
288
+ )
289
+ try:
290
+ for chunk in response_generator:
291
+ if 'stream' in chunk:
292
+ print(chunk['stream'].strip())
293
+ print("-----------------------------------------")
294
+ print(f"✅ Image built successfully: {final_tag}")
295
+ port = kwargs.get('port') if kwargs else None
296
+ print(f"🤖 Run: docker run --rm -d -p {port}:{port} {final_tag}")
297
+ return final_tag
298
+ except docker.errors.BuildError as e:
299
+ print(f"\n❌ Docker build failed. Reason: {e}")
300
+ return None
301
+
302
+ def deploy(self, *args, **kwargs):
303
+ """Deploys the function by sending it to a remote build server."""
304
+ import requests
305
+
306
+ print(f"🚀 Preparing to deploy function '{self.name}'")
307
+
308
+ # 1. Prepare the build context and compress it into a tarball
309
+ payload_hash = hashlib.sha256(cloudpickle.dumps((self.func, args, kwargs))).hexdigest()[:16]
310
+ archive_name = f"source-{self.tag.split(':')[1]}-{payload_hash}.tar.gz"
311
+
312
+ # with tempfile.TemporaryDirectory() as tmpdir_str:
313
+ with tempfile.TemporaryDirectory(dir="/tmp") as tmpdir_str:
314
+ tmpdir = Path(tmpdir_str)
315
+ self._prepare_build_context(tmpdir, include_payload=True, args=args, kwargs=kwargs)
316
+
317
+ archive_path = Path(tmpdir_str) / archive_name
318
+ with tarfile.open(archive_path, "w:gz") as tar:
319
+ # Add all files from the context to the tar archive
320
+ for f in tmpdir.glob("**/*"):
321
+ if f.is_file():
322
+ tar.add(f, arcname=f.relative_to(tmpdir))
323
+
324
+ # 2. Prepare the request payload
325
+ port = kwargs.get('port', 8080)
326
+ data_payload = {
327
+ "function_name": self.name,
328
+ "port": port,
329
+ # "memory": "1Gi" # You could make this a parameter
330
+ }
331
+ headers = {
332
+ "X-API-Key": self.api_key
333
+ }
334
+
335
+ # 3. Upload to the deploy server
336
+ print("📦 Uploading build context to the deploy server...")
337
+ try:
338
+ with open(archive_path, 'rb') as f:
339
+ files = {'source_archive': (archive_name, f, 'application/gzip')}
340
+
341
+ response = requests.post(
342
+ f"{self.base_url}/v1/deploy",
343
+ data=data_payload,
344
+ files=files,
345
+ headers=headers,
346
+ timeout=1800 # Set a long timeout for the entire process
347
+ )
348
+
349
+ # 4. Handle the server's response
350
+ response.raise_for_status() # Raise an exception for 4xx/5xx errors
351
+ result = response.json()
352
+
353
+ print(f"✅ Deployment successful!")
354
+ print(f"🔗 Service is available at: {result['url']}")
355
+ return result['url']
356
+
357
+ except requests.exceptions.HTTPError as e:
358
+ print(f"❌ Deployment failed. Server returned error: {e.response.status_code}")
359
+ try:
360
+ # Try to print the detailed error message from the server
361
+ print(f" Reason: {e.response.json()['detail']}")
362
+ except:
363
+ print(f" Reason: {e.response.text}")
364
+ return None
365
+ except requests.exceptions.RequestException as e:
366
+ print(f"❌ Could not connect to the deploy server: {e}")
367
+ return None
368
+
369
+ def Deploy(self, *args, **kwargs):
370
+ try:
371
+ from .shared import upload_file_to_cloud, build_and_deploy_to_cloud
372
+ except ImportError:
373
+ print("❌ Shared not found. This is an internal method.")
374
+ return None
375
+
376
+ port = kwargs.get('port', 8080)
377
+
378
+ # with tempfile.TemporaryDirectory() as tmpdir_str:
379
+ with tempfile.TemporaryDirectory(dir="/tmp") as tmpdir_str:
380
+ tmpdir = Path(tmpdir_str)
381
+ self._prepare_build_context(tmpdir, include_payload=True, args=args, kwargs=kwargs)
382
+
383
+ archive_path = Path(tmpdir_str) / "source.tar.gz"
384
+ with tarfile.open(archive_path, "w:gz") as tar:
385
+ for f in tmpdir.glob("**/*"):
386
+ if f.is_file():
387
+ tar.add(f, arcname=f.relative_to(tmpdir))
388
+
389
+ archive_name = upload_file_to_cloud(self.name, archive_path)
390
+
391
+ try:
392
+ service = build_and_deploy_to_cloud(
393
+ function_name=self.name,
394
+ gcs_object_name=archive_name,
395
+ port=port,
396
+ memory="1Gi"
397
+ )
398
+ except Exception as e:
399
+ print(f"❌ Cloud Deployment Failed: {e}")
400
+ return None
@@ -0,0 +1,22 @@
1
+ from .runtime import Runtime
2
+
3
+ def function(python_version="3.12", pip_install=None, apt_install=None, run_commands=None, copy=None, name=None, base_url=None, api_key=None):
4
+ # """
5
+ # A decorator factory that transforms a Python function into a containerized,
6
+ # remotely executable object.
7
+
8
+ # Args:
9
+ # pip (list[str], optional): A list of pip packages to install.
10
+ # apt (list[str], optional): A list of apt packages to install.
11
+ # copy (list[str], optional): A list of local paths to copy to the
12
+ # same path inside the image. For static dependencies.
13
+ # name (str, optional): A name for this function. Defaults to the function's name.
14
+
15
+ # Returns:
16
+ # A decorator that replaces the decorated function with a Runtime instance.
17
+ # """
18
+ def decorator(func):
19
+ Name = name or func.__name__ # should be moved to runtime... or default?
20
+ copy_dict = {i:i for i in copy or []}
21
+ return Runtime(func, Name.replace('_', '-'), python_version, pip_install, apt_install, run_commands, copy_dict, base_url, api_key)
22
+ return decorator
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "cycls"
3
- version = "0.0.2.31"
3
+ version = "0.0.2.33"
4
4
 
5
5
  packages = [{ include = "cycls" }]
6
6
  include = ["cycls/theme/**/*"]
@@ -14,6 +14,8 @@ fastapi = "^0.111.0"
14
14
  httpx = "^0.27.0"
15
15
  modal = "^1.1.0"
16
16
  jwt = "^1.4.0"
17
+ docker = "^7.1.0"
18
+ cloudpickle = "^3.1.1"
17
19
 
18
20
 
19
21
  [build-system]
@@ -1 +0,0 @@
1
- from .cycls import Agent
File without changes