cycls 0.0.2.82__py3-none-any.whl → 0.0.2.84__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cycls/__init__.py CHANGED
@@ -1,20 +1,14 @@
1
- import sys
2
- from types import ModuleType
3
- from .sdk import function, agent
4
- from .runtime import Runtime
1
+ from . import function as _function_module
2
+ from .function import function, Function
3
+ from .app import app, App
5
4
 
6
- class _Module(ModuleType):
7
- def __getattr__(self, name):
8
- from . import sdk
9
- if name in ("api_key", "base_url"):
10
- return getattr(sdk, name)
11
- raise AttributeError(f"module 'cycls' has no attribute '{name}'")
12
-
13
- def __setattr__(self, name, value):
14
- from . import sdk
15
- if name in ("api_key", "base_url"):
16
- setattr(sdk, name, value)
17
- return
18
- super().__setattr__(name, value)
5
+ def __getattr__(name):
6
+ if name in ("api_key", "base_url"):
7
+ return getattr(_function_module, name)
8
+ raise AttributeError(f"module 'cycls' has no attribute '{name}'")
19
9
 
20
- sys.modules[__name__].__class__ = _Module
10
+ def __setattr__(name, value):
11
+ if name in ("api_key", "base_url"):
12
+ setattr(_function_module, name, value)
13
+ else:
14
+ raise AttributeError(f"module 'cycls' has no attribute '{name}'")
cycls/app.py ADDED
@@ -0,0 +1,88 @@
1
+ import os
2
+ import uvicorn
3
+ import importlib.resources
4
+
5
+ from .function import Function, _get_api_key, _get_base_url
6
+ from .web import web, Config
7
+
8
+ CYCLS_PATH = importlib.resources.files('cycls')
9
+
10
+ THEMES = ["default", "dev"]
11
+
12
+
13
+ class App(Function):
14
+ """App extends Function with web UI serving capabilities."""
15
+
16
+ def __init__(self, func, name, theme="default", pip=None, apt=None, copy=None, copy_public=None,
17
+ auth=False, org=None, header=None, intro=None, title=None, plan="free", analytics=False):
18
+ if theme not in THEMES:
19
+ raise ValueError(f"Unknown theme: {theme}. Available: {THEMES}")
20
+ self.user_func = func
21
+ self.theme = theme
22
+ self.copy_public = copy_public or []
23
+
24
+ self.config = Config(
25
+ header=header,
26
+ intro=intro,
27
+ title=title,
28
+ auth=auth,
29
+ plan=plan,
30
+ analytics=analytics,
31
+ org=org,
32
+ )
33
+
34
+ # Build files dict for Function (theme is inside cycls/)
35
+ files = {str(CYCLS_PATH): "cycls"}
36
+ files.update({f: f for f in copy or []})
37
+ files.update({f: f"public/{f}" for f in self.copy_public})
38
+
39
+ super().__init__(
40
+ func=func,
41
+ name=name,
42
+ pip=["fastapi[standard]", "pyjwt", "cryptography", "uvicorn", "docker", *(pip or [])],
43
+ apt=apt,
44
+ copy=files,
45
+ base_url=_get_base_url(),
46
+ api_key=_get_api_key()
47
+ )
48
+
49
+ def __call__(self, *args, **kwargs):
50
+ return self.user_func(*args, **kwargs)
51
+
52
+ def _prepare_func(self, prod):
53
+ self.config.set_prod(prod)
54
+ self.config.public_path = f"cycls/themes/{self.theme}"
55
+ user_func, config, name = self.user_func, self.config, self.name
56
+ self.func = lambda port: __import__("cycls").web.serve(user_func, config, name, port)
57
+
58
+ def _local(self, port=8080):
59
+ """Run directly with uvicorn (no Docker)."""
60
+ print(f"Starting local server at localhost:{port}")
61
+ self.config.public_path = str(CYCLS_PATH.joinpath(f"themes/{self.theme}"))
62
+ self.config.set_prod(False)
63
+ uvicorn.run(web(self.user_func, self.config), host="0.0.0.0", port=port)
64
+
65
+ def local(self, port=8080, watch=True):
66
+ """Run locally in Docker with file watching by default."""
67
+ if os.environ.get('_CYCLS_WATCH'):
68
+ watch = False
69
+ self._prepare_func(prod=False)
70
+ self.watch(port=port) if watch else self.run(port=port)
71
+
72
+ def deploy(self, port=8080):
73
+ """Deploy to production."""
74
+ if self.api_key is None:
75
+ raise RuntimeError("Missing API key. Set cycls.api_key or CYCLS_API_KEY environment variable.")
76
+ self._prepare_func(prod=True)
77
+ return super().deploy(port=port)
78
+
79
+
80
+ def app(name=None, **kwargs):
81
+ """Decorator that transforms a function into a deployable App."""
82
+ if kwargs.get("plan") == "cycls_pass":
83
+ kwargs["auth"] = True
84
+ kwargs["analytics"] = True
85
+
86
+ def decorator(func):
87
+ return App(func=func, name=name or func.__name__, **kwargs)
88
+ return decorator
@@ -10,23 +10,47 @@ import shutil
10
10
  from pathlib import Path
11
11
  import tarfile
12
12
 
13
- from .grpc import RuntimeClient
14
-
15
13
  os.environ["DOCKER_BUILDKIT"] = "1"
16
14
 
17
- GRPC_PORT = 50051
18
15
  BASE_IMAGE = "ghcr.io/cycls/base:python3.12"
19
16
  BASE_PACKAGES = {"cloudpickle", "cryptography", "fastapi", "fastapi[standard]",
20
17
  "pydantic", "pyjwt", "uvicorn", "uvicorn[standard]", "httpx"}
21
- GRPC_PACKAGES = {"grpcio", "protobuf"}
22
18
 
23
- # Simple entrypoint for deployed services - loads pickled function+args and runs it
24
19
  ENTRYPOINT_PY = '''import cloudpickle
25
20
  with open("/app/function.pkl", "rb") as f:
26
21
  func, args, kwargs = cloudpickle.load(f)
27
22
  func(*args, **kwargs)
28
23
  '''
29
24
 
25
+ RUNNER_PY = '''import cloudpickle
26
+ import sys
27
+ import traceback
28
+ from pathlib import Path
29
+
30
+ io_dir = Path(sys.argv[1])
31
+ payload_path = io_dir / "payload.pkl"
32
+ result_path = io_dir / "result.pkl"
33
+
34
+ try:
35
+ with open(payload_path, "rb") as f:
36
+ func, args, kwargs = cloudpickle.load(f)
37
+ result = func(*args, **kwargs)
38
+ with open(result_path, "wb") as f:
39
+ cloudpickle.dump(result, f)
40
+ except Exception:
41
+ traceback.print_exc()
42
+ sys.exit(1)
43
+ '''
44
+
45
+ # Module-level configuration
46
+ api_key = None
47
+ base_url = None
48
+
49
+ def _get_api_key():
50
+ return api_key or os.getenv("CYCLS_API_KEY")
51
+
52
+ def _get_base_url():
53
+ return base_url or os.getenv("CYCLS_BASE_URL")
30
54
 
31
55
  def _hash_path(path_str: str) -> str:
32
56
  h = hashlib.sha256()
@@ -56,40 +80,34 @@ def _copy_path(src_path: Path, dest_path: Path):
56
80
  shutil.copy(src_path, dest_path)
57
81
 
58
82
 
59
- class Runtime:
60
- """Executes functions in Docker containers. Uses gRPC for local dev, pickle for deploy."""
83
+ class Function:
84
+ """Executes functions in Docker containers."""
61
85
 
62
- def __init__(self, func, name, python_version=None, pip_packages=None, apt_packages=None,
86
+ def __init__(self, func, name, python_version=None, pip=None, apt=None,
63
87
  run_commands=None, copy=None, base_url=None, api_key=None, base_image=None):
64
88
  self.func = func
65
- self.name = name
89
+ self.name = name.replace('_', '-')
66
90
  self.python_version = python_version or f"{sys.version_info.major}.{sys.version_info.minor}"
67
- self.apt_packages = sorted(apt_packages or [])
91
+ self.apt = sorted(apt or [])
68
92
  self.run_commands = sorted(run_commands or [])
69
- self.copy = copy or {}
93
+ self.copy = {f: f for f in copy} if isinstance(copy, list) else (copy or {})
70
94
  self.base_image = base_image or BASE_IMAGE
71
95
  self.base_url = base_url or "https://service-core-280879789566.me-central1.run.app"
72
96
  self.api_key = api_key
73
97
 
74
- # Compute pip packages (gRPC only needed for local dev, added dynamically)
75
- user_packages = set(pip_packages or [])
98
+ user_packages = set(pip or [])
76
99
  if self.base_image == BASE_IMAGE:
77
- self.pip_packages = sorted(user_packages - BASE_PACKAGES)
100
+ self.pip = sorted(user_packages - BASE_PACKAGES)
78
101
  else:
79
- self.pip_packages = sorted(user_packages | {"cloudpickle"})
102
+ self.pip = sorted(user_packages | {"cloudpickle"})
80
103
 
81
- self.image_prefix = f"cycls/{name}"
82
- self.managed_label = "cycls.runtime"
104
+ self.image_prefix = f"cycls/{self.name}"
105
+ self.managed_label = "cycls.function"
83
106
  self._docker_client = None
84
-
85
- # Local dev state (gRPC container)
86
107
  self._container = None
87
- self._client = None
88
- self._host_port = None
89
108
 
90
109
  @property
91
110
  def docker_client(self):
92
- """Lazily initializes and returns a Docker client."""
93
111
  if self._docker_client is None:
94
112
  try:
95
113
  print("Initializing Docker client...")
@@ -103,15 +121,12 @@ class Runtime:
103
121
  return self._docker_client
104
122
 
105
123
  def _perform_auto_cleanup(self, keep_tag=None):
106
- """Clean up old containers and dev images (preserve deploy-* images)."""
107
124
  try:
108
- # Remove old containers
109
125
  current_id = self._container.id if self._container else None
110
126
  for container in self.docker_client.containers.list(all=True, filters={"label": self.managed_label}):
111
127
  if container.id != current_id:
112
128
  container.remove(force=True)
113
129
 
114
- # Remove old dev images globally (keep deploy-* and current)
115
130
  cleaned = 0
116
131
  for image in self.docker_client.images.list(filters={"label": self.managed_label}):
117
132
  is_deploy = any(":deploy-" in t for t in image.tags)
@@ -125,9 +140,8 @@ class Runtime:
125
140
  print(f"Warning: cleanup error: {e}")
126
141
 
127
142
  def _image_tag(self, extra_parts=None) -> str:
128
- """Creates a unique tag based on image configuration."""
129
- parts = [self.base_image, self.python_version, "".join(self.pip_packages),
130
- "".join(self.apt_packages), "".join(self.run_commands)]
143
+ parts = [self.base_image, self.python_version, "".join(self.pip),
144
+ "".join(self.apt), "".join(self.run_commands)]
131
145
  for src, dst in sorted(self.copy.items()):
132
146
  if not Path(src).exists():
133
147
  raise FileNotFoundError(f"Path in 'copy' not found: {src}")
@@ -136,20 +150,18 @@ class Runtime:
136
150
  parts.extend(extra_parts)
137
151
  return f"{self.image_prefix}:{hashlib.sha256(''.join(parts).encode()).hexdigest()[:16]}"
138
152
 
139
- def _dockerfile_preamble(self, pip_extras=None) -> str:
140
- """Common Dockerfile setup: base image, apt, pip, run commands, copy."""
153
+ def _dockerfile_preamble(self) -> str:
141
154
  lines = [f"FROM {self.base_image}"]
142
155
 
143
156
  if self.base_image != BASE_IMAGE:
144
157
  lines.append("ENV PIP_ROOT_USER_ACTION=ignore PYTHONUNBUFFERED=1")
145
158
  lines.append("WORKDIR /app")
146
159
 
147
- if self.apt_packages:
148
- lines.append(f"RUN apt-get update && apt-get install -y --no-install-recommends {' '.join(self.apt_packages)}")
160
+ if self.apt:
161
+ lines.append(f"RUN apt-get update && apt-get install -y --no-install-recommends {' '.join(self.apt)}")
149
162
 
150
- all_pip = list(self.pip_packages) + list(pip_extras or [])
151
- if all_pip:
152
- lines.append(f"RUN uv pip install --system --no-cache {' '.join(all_pip)}")
163
+ if self.pip:
164
+ lines.append(f"RUN uv pip install --system --no-cache {' '.join(self.pip)}")
153
165
 
154
166
  for cmd in self.run_commands:
155
167
  lines.append(f"RUN {cmd}")
@@ -159,16 +171,13 @@ class Runtime:
159
171
 
160
172
  return "\n".join(lines)
161
173
 
162
- def _dockerfile_grpc(self) -> str:
163
- """Dockerfile for local dev: gRPC server."""
164
- return f"""{self._dockerfile_preamble(pip_extras=GRPC_PACKAGES)}
165
- COPY grpc_runtime/ /app/grpc_runtime/
166
- EXPOSE {GRPC_PORT}
167
- CMD ["python", "-m", "grpc_runtime.server", "--port", "{GRPC_PORT}"]
174
+ def _dockerfile_local(self) -> str:
175
+ return f"""{self._dockerfile_preamble()}
176
+ COPY runner.py /app/runner.py
177
+ ENTRYPOINT ["python", "/app/runner.py", "/io"]
168
178
  """
169
179
 
170
180
  def _dockerfile_deploy(self, port: int) -> str:
171
- """Dockerfile for deploy: baked-in function via pickle."""
172
181
  return f"""{self._dockerfile_preamble()}
173
182
  COPY function.pkl /app/function.pkl
174
183
  COPY entrypoint.py /app/entrypoint.py
@@ -177,14 +186,12 @@ CMD ["python", "entrypoint.py"]
177
186
  """
178
187
 
179
188
  def _copy_user_files(self, workdir: Path):
180
- """Copy user-specified files to build context."""
181
189
  context_files_dir = workdir / "context_files"
182
190
  context_files_dir.mkdir()
183
191
  for src, dst in self.copy.items():
184
192
  _copy_path(Path(src).resolve(), context_files_dir / dst)
185
193
 
186
194
  def _build_image(self, tag: str, workdir: Path) -> str:
187
- """Build a Docker image from a prepared context."""
188
195
  print("--- Docker Build Logs ---")
189
196
  try:
190
197
  for chunk in self.docker_client.api.build(
@@ -200,9 +207,8 @@ CMD ["python", "entrypoint.py"]
200
207
  print(f"\nDocker build failed: {e}")
201
208
  raise
202
209
 
203
- def _ensure_grpc_image(self) -> str:
204
- """Build local dev image with gRPC server if needed."""
205
- tag = self._image_tag(extra_parts=["grpc-v2"])
210
+ def _ensure_local_image(self) -> str:
211
+ tag = self._image_tag(extra_parts=["local-v1"])
206
212
  try:
207
213
  self.docker_client.images.get(tag)
208
214
  print(f"Found cached image: {tag}")
@@ -213,176 +219,118 @@ CMD ["python", "entrypoint.py"]
213
219
  with tempfile.TemporaryDirectory() as tmpdir:
214
220
  workdir = Path(tmpdir)
215
221
  self._copy_user_files(workdir)
216
- (workdir / "Dockerfile").write_text(self._dockerfile_grpc())
217
-
218
- # Copy gRPC runtime
219
- grpc_src = Path(__file__).parent / "grpc"
220
- shutil.copytree(grpc_src, workdir / "grpc_runtime",
221
- ignore=shutil.ignore_patterns('*.proto', '__pycache__'))
222
-
222
+ (workdir / "Dockerfile").write_text(self._dockerfile_local())
223
+ (workdir / "runner.py").write_text(RUNNER_PY)
223
224
  return self._build_image(tag, workdir)
224
225
 
225
- def _ensure_container(self, service_port=None):
226
- """Start container if not running, return gRPC client."""
227
- if self._client and self._container:
226
+ def _cleanup_container(self):
227
+ if getattr(self, '_container', None):
228
228
  try:
229
- self._container.reload()
230
- if self._container.status == 'running':
231
- return self._client
229
+ self._container.stop(timeout=3)
230
+ self._container.remove()
232
231
  except docker.errors.NotFound:
233
- pass # Container was removed externally
232
+ pass
234
233
  except docker.errors.APIError:
235
- pass # Docker API issue, will recreate
236
- self._cleanup_container()
234
+ pass
235
+ self._container = None
237
236
 
238
- tag = self._ensure_grpc_image()
237
+ @contextlib.contextmanager
238
+ def runner(self, *args, **kwargs):
239
+ service_port = kwargs.get('port')
240
+ tag = self._ensure_local_image()
239
241
  self._perform_auto_cleanup(keep_tag=tag)
240
242
 
241
- # Port mappings (fixed ports avoid race conditions)
242
- ports = {f'{GRPC_PORT}/tcp': GRPC_PORT}
243
- if service_port:
244
- ports[f'{service_port}/tcp'] = service_port
245
-
246
- self._container = self.docker_client.containers.run(
247
- tag, detach=True, ports=ports, labels={self.managed_label: "true"}
248
- )
249
- self._host_port = GRPC_PORT
250
- self._client = RuntimeClient(port=self._host_port)
251
- if not self._client.wait_ready(timeout=10):
252
- raise RuntimeError("Container failed to start")
253
- print(f"Container ready on port {self._host_port}")
254
- return self._client
243
+ ports = {f'{service_port}/tcp': service_port} if service_port else None
244
+
245
+ with tempfile.TemporaryDirectory() as io_dir:
246
+ io_path = Path(io_dir)
247
+ payload_path = io_path / "payload.pkl"
248
+ result_path = io_path / "result.pkl"
249
+
250
+ with open(payload_path, 'wb') as f:
251
+ cloudpickle.dump((self.func, args, kwargs), f)
255
252
 
256
- def _cleanup_container(self):
257
- """Stop and remove the warm container."""
258
- if self._client:
259
- self._client.close()
260
- self._client = None
261
- if self._container:
262
253
  try:
263
- self._container.stop(timeout=3)
264
- self._container.remove()
265
- except docker.errors.NotFound:
266
- pass # Already removed
267
- except docker.errors.APIError:
268
- pass # Best effort cleanup
269
- self._container = None
270
- self._host_port = None
254
+ self._container = self.docker_client.containers.create(
255
+ image=tag,
256
+ volumes={str(io_path): {'bind': '/io', 'mode': 'rw'}},
257
+ ports=ports,
258
+ labels={self.managed_label: "true"}
259
+ )
260
+ self._container.start()
261
+ yield self._container, result_path
262
+ finally:
263
+ self._cleanup_container()
271
264
 
272
265
  def run(self, *args, **kwargs):
273
- """Execute the function in a container and return the result."""
274
266
  service_port = kwargs.get('port')
275
267
  print(f"Running '{self.name}'...")
268
+
276
269
  try:
277
- client = self._ensure_container(service_port=service_port)
278
-
279
- # Blocking service: fire gRPC, stream Docker logs
280
- if service_port:
281
- client.fire(self.func, *args, **kwargs)
282
- print(f"Service running on port {service_port}")
283
- print("--- 🪵 Container Logs ---")
284
- for chunk in self._container.logs(stream=True, follow=True):
270
+ with self.runner(*args, **kwargs) as (container, result_path):
271
+ print("--- Container Logs ---")
272
+ for chunk in container.logs(stream=True, follow=True):
285
273
  print(chunk.decode(), end='')
286
- return None
274
+ print("----------------------")
275
+
276
+ status = container.wait()
277
+ if status['StatusCode'] != 0:
278
+ print(f"Error: Container exited with code {status['StatusCode']}")
279
+ return None
287
280
 
288
- # Regular function: execute, then print logs
289
- result = client.call(self.func, *args, **kwargs)
290
- logs = self._container.logs().decode()
291
- if logs.strip():
292
- print("--- 🪵 Container Logs ---")
293
- print(logs, end='')
294
- print("-------------------------")
295
- return result
281
+ if service_port:
282
+ return None
283
+
284
+ if result_path.exists():
285
+ with open(result_path, 'rb') as f:
286
+ return cloudpickle.load(f)
287
+ else:
288
+ print("Error: Result file not found")
289
+ return None
296
290
 
297
291
  except KeyboardInterrupt:
298
- print("\n-------------------------")
292
+ print("\n----------------------")
299
293
  print("Stopping...")
300
- self._cleanup_container()
301
294
  return None
302
295
  except Exception as e:
303
296
  print(f"Error: {e}")
304
297
  return None
305
298
 
306
- def stream(self, *args, **kwargs):
307
- """Execute the function and yield streamed results."""
308
- service_port = kwargs.get('port')
309
- client = self._ensure_container(service_port=service_port)
310
- yield from client.execute(self.func, *args, **kwargs)
311
-
312
- @contextlib.contextmanager
313
- def runner(self, *args, **kwargs):
314
- """Context manager for running a service. Yields (container, client)."""
315
- service_port = kwargs.get('port')
316
- try:
317
- client = self._ensure_container(service_port=service_port)
318
- client.fire(self.func, *args, **kwargs)
319
- yield self._container, client
320
- finally:
321
- self._cleanup_container()
322
-
323
299
  def watch(self, *args, **kwargs):
324
- """Run with file watching - restarts script on changes."""
300
+ if os.environ.get('_CYCLS_WATCH'):
301
+ return self.run(*args, **kwargs)
302
+
325
303
  try:
326
304
  from watchfiles import watch as watchfiles_watch
327
305
  except ImportError:
328
- print("watchfiles not installed (enables auto-reload on file changes).")
329
- print("Install with: pip install watchfiles")
330
- print("Running without file watching...")
306
+ print("watchfiles not installed. pip install watchfiles")
331
307
  return self.run(*args, **kwargs)
332
308
 
333
- import inspect
334
309
  import subprocess
335
310
 
336
- # Find the user's script (outside cycls package)
337
- cycls_pkg = Path(__file__).parent.resolve()
338
- main_script = None
339
- for frame_info in inspect.stack():
340
- filepath = Path(frame_info.filename).resolve()
341
- if filepath.suffix == '.py' and not str(filepath).startswith(str(cycls_pkg)):
342
- main_script = filepath
343
- break
344
-
345
- if not main_script:
346
- print("Could not find script to watch.")
347
- return self.run(*args, **kwargs)
348
-
349
- # Build watch paths
350
- watch_paths = [main_script]
351
- watch_paths.extend([Path(src).resolve() for src in self.copy.keys() if Path(src).exists()])
311
+ script = Path(sys.argv[0]).resolve()
312
+ watch_paths = [script] + [Path(p).resolve() for p in self.copy if Path(p).exists()]
352
313
 
353
- print(f"👀 Watching:")
354
- for p in watch_paths:
355
- print(f" {p}")
356
- print()
314
+ print(f"Watching: {[p.name for p in watch_paths]}\n")
357
315
 
358
316
  while True:
359
- print(f"🚀 Starting {main_script.name}...")
360
- proc = subprocess.Popen(
361
- [sys.executable, str(main_script)],
362
- env={**os.environ, '_CYCLS_WATCH': '1'}
363
- )
364
-
317
+ proc = subprocess.Popen([sys.executable, str(script)], env={**os.environ, '_CYCLS_WATCH': '1'})
365
318
  try:
366
319
  for changes in watchfiles_watch(*watch_paths):
367
- print(f"\n🔄 Changed: {[Path(c[1]).name for c in changes]}")
320
+ print(f"\nChanged: {[Path(c[1]).name for c in changes]}")
368
321
  break
369
-
370
322
  proc.terminate()
371
323
  proc.wait(timeout=3)
372
324
  except subprocess.TimeoutExpired:
373
325
  proc.kill()
374
326
  except KeyboardInterrupt:
375
- print("\nStopping...")
376
327
  proc.terminate()
377
328
  proc.wait(timeout=3)
378
329
  return
379
330
 
380
- print()
381
-
382
331
  def _prepare_deploy_context(self, workdir: Path, port: int, args=(), kwargs=None):
383
- """Prepare build context for deploy: pickle function+args + entrypoint."""
384
332
  kwargs = kwargs or {}
385
- kwargs['port'] = port # Ensure port is in kwargs
333
+ kwargs['port'] = port
386
334
  self._copy_user_files(workdir)
387
335
  (workdir / "Dockerfile").write_text(self._dockerfile_deploy(port))
388
336
  (workdir / "entrypoint.py").write_text(ENTRYPOINT_PY)
@@ -390,7 +338,6 @@ CMD ["python", "entrypoint.py"]
390
338
  cloudpickle.dump((self.func, args, kwargs), f)
391
339
 
392
340
  def build(self, *args, **kwargs):
393
- """Build a deployable Docker image locally."""
394
341
  port = kwargs.pop('port', 8080)
395
342
  payload = cloudpickle.dumps((self.func, args, {**kwargs, 'port': port}))
396
343
  tag = f"{self.image_prefix}:deploy-{hashlib.sha256(payload).hexdigest()[:16]}"
@@ -410,7 +357,6 @@ CMD ["python", "entrypoint.py"]
410
357
  return tag
411
358
 
412
359
  def deploy(self, *args, **kwargs):
413
- """Deploy the function to a remote build server."""
414
360
  import requests
415
361
 
416
362
  port = kwargs.pop('port', 8080)
@@ -456,5 +402,11 @@ CMD ["python", "entrypoint.py"]
456
402
  return None
457
403
 
458
404
  def __del__(self):
459
- """Cleanup on garbage collection."""
460
405
  self._cleanup_container()
406
+
407
+
408
+ def function(name=None, **kwargs):
409
+ """Decorator that transforms a Python function into a containerized Function."""
410
+ def decorator(func):
411
+ return Function(func, name or func.__name__, **kwargs, base_url=_get_base_url(), api_key=_get_api_key())
412
+ return decorator
cycls/web.py CHANGED
@@ -2,19 +2,25 @@ import json, inspect
2
2
  from pathlib import Path
3
3
  from pydantic import BaseModel
4
4
  from typing import Optional
5
+ from .auth import PK_LIVE, PK_TEST, JWKS_PROD, JWKS_TEST
5
6
 
6
7
  class Config(BaseModel):
7
8
  public_path: str = "theme"
8
- header: str = ""
9
- intro: str = ""
10
- title: str = ""
9
+ header: Optional[str] = None
10
+ intro: Optional[str] = None
11
+ title: Optional[str] = None
11
12
  prod: bool = False
12
13
  auth: bool = False
13
14
  plan: str = "free"
14
15
  analytics: bool = False
15
16
  org: Optional[str] = None
16
- pk: str = ""
17
- jwks: str = ""
17
+ pk: Optional[str] = None
18
+ jwks: Optional[str] = None
19
+
20
+ def set_prod(self, prod: bool):
21
+ self.prod = prod
22
+ self.pk = PK_LIVE if prod else PK_TEST
23
+ self.jwks = JWKS_PROD if prod else JWKS_TEST
18
24
 
19
25
  async def openai_encoder(stream):
20
26
  if inspect.isasyncgen(stream):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cycls
3
- Version: 0.0.2.82
3
+ Version: 0.0.2.84
4
4
  Summary: Distribute Intelligence
5
5
  Author: Mohammed J. AlRujayi
6
6
  Author-email: mj@cycls.com
@@ -12,14 +12,10 @@ Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: Programming Language :: Python :: 3.13
14
14
  Classifier: Programming Language :: Python :: 3.14
15
- Provides-Extra: modal
16
15
  Requires-Dist: cloudpickle (>=3.1.1,<4.0.0)
17
16
  Requires-Dist: docker (>=7.1.0,<8.0.0)
18
17
  Requires-Dist: fastapi (>=0.111.0,<0.112.0)
19
- Requires-Dist: grpcio (>=1.76.0,<2.0.0)
20
18
  Requires-Dist: httpx (>=0.27.0,<0.28.0)
21
- Requires-Dist: modal (>=1.1.0,<2.0.0) ; extra == "modal"
22
- Requires-Dist: protobuf (>=6.0,<7.0)
23
19
  Requires-Dist: pyjwt (>=2.8.0,<3.0.0)
24
20
  Description-Content-Type: text/markdown
25
21
 
@@ -49,6 +45,8 @@ The open-source SDK for distributing AI agents.
49
45
 
50
46
  The function is the unit of abstraction in cycls. Your agent logic lives in a plain Python function — the decorator layers on everything else: containerization, authentication, deployment, analytics. You write the function, the `@` handles the infrastructure.
51
47
 
48
+ Cycls is beautifully lean (~862 lines!) and focused on deployment infrastructure for any Python function, not agent logic itself.
49
+
52
50
  ## Distribute Intelligence
53
51
 
54
52
  Write a function. Deploy it as an API, a web interface, or both. Add authentication, analytics, and monetization with flags.
@@ -58,8 +56,8 @@ import cycls
58
56
 
59
57
  cycls.api_key = "YOUR_CYCLS_API_KEY"
60
58
 
61
- @cycls.agent(pip=["openai"])
62
- async def agent(context):
59
+ @cycls.app(pip=["openai"])
60
+ async def app(context):
63
61
  from openai import AsyncOpenAI
64
62
  client = AsyncOpenAI()
65
63
 
@@ -76,7 +74,7 @@ async def agent(context):
76
74
  elif event.type == "response.output_text.delta":
77
75
  yield event.delta
78
76
 
79
- agent.deploy() # Live at https://agent.cycls.ai
77
+ app.deploy() # Live at https://agent.cycls.ai
80
78
  ```
81
79
 
82
80
  ## Installation
@@ -99,9 +97,9 @@ Requires Docker.
99
97
  ## Running
100
98
 
101
99
  ```python
102
- agent.local() # Development with hot-reload (localhost:8080)
103
- agent.local(watch=False) # Development without hot-reload
104
- agent.deploy() # Production: https://agent.cycls.ai
100
+ app.local() # Development with hot-reload (localhost:8080)
101
+ app.local(watch=False) # Development without hot-reload
102
+ app.deploy() # Production: https://agent.cycls.ai
105
103
  ```
106
104
 
107
105
  Get an API key at [cycls.com](https://cycls.com).
@@ -109,8 +107,8 @@ Get an API key at [cycls.com](https://cycls.com).
109
107
  ## Authentication & Analytics
110
108
 
111
109
  ```python
112
- @cycls.agent(pip=["openai"], auth=True, analytics=True)
113
- async def agent(context):
110
+ @cycls.app(pip=["openai"], auth=True, analytics=True)
111
+ async def app(context):
114
112
  # context.user available when auth=True
115
113
  user = context.user # User(id, email, name, plans)
116
114
  yield f"Hello {user.name}!"
@@ -127,7 +125,7 @@ async def agent(context):
127
125
  Yield structured objects for rich streaming responses:
128
126
 
129
127
  ```python
130
- @cycls.agent()
128
+ @cycls.app()
131
129
  async def demo(context):
132
130
  yield {"type": "thinking", "thinking": "Analyzing the request..."}
133
131
  yield "Here's what I found:\n\n"
@@ -169,7 +167,7 @@ This works seamlessly with OpenAI's reasoning models - just map reasoning summar
169
167
  ## Context Object
170
168
 
171
169
  ```python
172
- @cycls.agent()
170
+ @cycls.app()
173
171
  async def chat(context):
174
172
  context.messages # [{"role": "user", "content": "..."}]
175
173
  context.messages.raw # Full data including UI component parts
@@ -202,13 +200,13 @@ See [docs/streaming-protocol.md](docs/streaming-protocol.md) for frontend integr
202
200
  Define your entire runtime in the decorator:
203
201
 
204
202
  ```python
205
- @cycls.agent(
203
+ @cycls.app(
206
204
  pip=["openai", "pandas", "numpy"],
207
205
  apt=["ffmpeg", "libmagic1"],
208
206
  copy=["./utils.py", "./models/", "/absolute/path/to/config.json"],
209
207
  copy_public=["./assets/logo.png", "./static/"],
210
208
  )
211
- async def my_agent(context):
209
+ async def my_app(context):
212
210
  ...
213
211
  ```
214
212
 
@@ -243,7 +241,7 @@ copy=[
243
241
  Then import them in your function:
244
242
 
245
243
  ```python
246
- @cycls.agent(copy=["./utils.py"])
244
+ @cycls.app(copy=["./utils.py"])
247
245
  async def chat(context):
248
246
  from utils import helper_function # Your bundled module
249
247
  ...
@@ -257,7 +255,7 @@ Files and directories served at the `/public` endpoint. Perfect for images, down
257
255
  copy_public=["./assets/logo.png", "./downloads/"]
258
256
  ```
259
257
 
260
- Access them at `https://your-agent.cycls.ai/public/logo.png`.
258
+ Access them at `https://your-app.cycls.ai/public/logo.png`.
261
259
 
262
260
  ---
263
261
 
@@ -0,0 +1,14 @@
1
+ cycls/__init__.py,sha256=efbq0vRijGOByKtz9bRF8WQFYmnPSgZV1DH-54s6iwQ,493
2
+ cycls/app.py,sha256=xoPCEBJm8lqVwqa2qnvmhfFW1pWn9CbWIp--CBo2Tqw,3107
3
+ cycls/auth.py,sha256=xkndHZyCfnlertMMEKerCJjf23N3fVcTRVTTSXTTuzg,247
4
+ cycls/cli.py,sha256=cVbIkTDnVofohvByyYUrXF_RYDQZVQECJqo7cPBPJfs,4781
5
+ cycls/function.py,sha256=3N8PqpVSVaU6RSrIwCvTCUwTm0kOchQN6jSz89v_hXE,15120
6
+ cycls/themes/default/assets/index-C2r4Daz3.js,sha256=OGzjspxo0uUTdtQIzWZNgFMhGUtW4wSVuW33iA7oLFM,1351283
7
+ cycls/themes/default/assets/index-DWGS8zpa.css,sha256=SxylXQV1qgQ0sw9QlMxcu9jwWTu8XPOnWZju8upUpCM,6504
8
+ cycls/themes/default/index.html,sha256=WPUqVzJyh-aO2ulWTbXwOVAfWHaj0yWTGd9e8HOEro4,828
9
+ cycls/themes/dev/index.html,sha256=QJBHkdNuMMiwQU7o8dN8__8YQeQB45D37D-NCXIWB2Q,11585
10
+ cycls/web.py,sha256=KU7UsqIWFR32BOoKgDRPfcMooyrfytl3anp0XVSl5O4,5317
11
+ cycls-0.0.2.84.dist-info/METADATA,sha256=otcLx-nidkZMpdpeuHt9akSLyTKN31Aj9AJekCzTNEM,8706
12
+ cycls-0.0.2.84.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
13
+ cycls-0.0.2.84.dist-info/entry_points.txt,sha256=vEhqUxFhhuzCKWtq02LbMnT3wpUqdfgcM3Yh-jjXom8,40
14
+ cycls-0.0.2.84.dist-info/RECORD,,
@@ -0,0 +1,3 @@
1
+ [console_scripts]
2
+ cycls=cycls.cli:main
3
+
cycls/grpc/__init__.py DELETED
@@ -1,3 +0,0 @@
1
- from .client import RuntimeClient
2
-
3
- __all__ = ["RuntimeClient"]
cycls/grpc/client.py DELETED
@@ -1,71 +0,0 @@
1
- import cloudpickle
2
- import grpc
3
-
4
- try:
5
- from . import runtime_pb2
6
- from . import runtime_pb2_grpc
7
- except ImportError:
8
- import runtime_pb2
9
- import runtime_pb2_grpc
10
-
11
-
12
- class RuntimeClient:
13
- def __init__(self, host='localhost', port=50051, timeout=None):
14
- self.host = host
15
- self.port = port
16
- self.timeout = timeout
17
- self._channel = None
18
- self._stub = None
19
-
20
- def _connect(self):
21
- if self._channel is None:
22
- self._channel = grpc.insecure_channel(f'{self.host}:{self.port}')
23
- self._stub = runtime_pb2_grpc.RuntimeStub(self._channel)
24
- return self._stub
25
-
26
- def execute(self, func, *args, **kwargs):
27
- """Execute function and yield streamed results."""
28
- stub = self._connect()
29
- payload = cloudpickle.dumps((func, args, kwargs))
30
- request = runtime_pb2.Request(payload=payload)
31
-
32
- for response in stub.Execute(request, timeout=self.timeout):
33
- result = cloudpickle.loads(response.data)
34
- if response.error:
35
- raise RuntimeError(f"Function execution failed: {result}")
36
- yield result
37
-
38
- def call(self, func, *args, **kwargs):
39
- """Execute and return single result (or list if multiple)."""
40
- results = list(self.execute(func, *args, **kwargs))
41
- return results[0] if len(results) == 1 else results
42
-
43
- def fire(self, func, *args, **kwargs):
44
- """Fire off execution without waiting for response."""
45
- stub = self._connect()
46
- payload = cloudpickle.dumps((func, args, kwargs))
47
- request = runtime_pb2.Request(payload=payload)
48
- # Start the stream - gRPC sends request immediately
49
- self._active_stream = stub.Execute(request)
50
-
51
- def wait_ready(self, timeout=10):
52
- """Wait for channel to be ready."""
53
- if self._channel is None:
54
- self._connect()
55
- try:
56
- grpc.channel_ready_future(self._channel).result(timeout=timeout)
57
- return True
58
- except grpc.FutureTimeoutError:
59
- return False
60
-
61
- def close(self):
62
- if self._channel:
63
- self._channel.close()
64
- self._channel = None
65
- self._stub = None
66
-
67
- def __enter__(self):
68
- return self
69
-
70
- def __exit__(self, *args):
71
- self.close()
cycls/grpc/runtime.proto DELETED
@@ -1,18 +0,0 @@
1
- syntax = "proto3";
2
-
3
- package runtime;
4
-
5
- service Runtime {
6
- rpc Execute(Request) returns (stream Response);
7
- }
8
-
9
- message Request {
10
- bytes payload = 1;
11
- }
12
-
13
- message Response {
14
- bytes data = 1;
15
- bool error = 2;
16
- bytes log = 3;
17
- bool is_log = 4;
18
- }
cycls/grpc/runtime_pb2.py DELETED
@@ -1,40 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- # Generated by the protocol buffer compiler. DO NOT EDIT!
3
- # NO CHECKED-IN PROTOBUF GENCODE
4
- # source: runtime.proto
5
- # Protobuf Python Version: 6.31.1
6
- """Generated protocol buffer code."""
7
- from google.protobuf import descriptor as _descriptor
8
- from google.protobuf import descriptor_pool as _descriptor_pool
9
- from google.protobuf import runtime_version as _runtime_version
10
- from google.protobuf import symbol_database as _symbol_database
11
- from google.protobuf.internal import builder as _builder
12
- _runtime_version.ValidateProtobufRuntimeVersion(
13
- _runtime_version.Domain.PUBLIC,
14
- 6,
15
- 31,
16
- 1,
17
- '',
18
- 'runtime.proto'
19
- )
20
- # @@protoc_insertion_point(imports)
21
-
22
- _sym_db = _symbol_database.Default()
23
-
24
-
25
-
26
-
27
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rruntime.proto\x12\x07runtime\"\x1a\n\x07Request\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\"D\n\x08Response\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\r\n\x05\x65rror\x18\x02 \x01(\x08\x12\x0b\n\x03log\x18\x03 \x01(\x0c\x12\x0e\n\x06is_log\x18\x04 \x01(\x08\x32;\n\x07Runtime\x12\x30\n\x07\x45xecute\x12\x10.runtime.Request\x1a\x11.runtime.Response0\x01\x62\x06proto3')
28
-
29
- _globals = globals()
30
- _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
- _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'runtime_pb2', _globals)
32
- if not _descriptor._USE_C_DESCRIPTORS:
33
- DESCRIPTOR._loaded_options = None
34
- _globals['_REQUEST']._serialized_start=26
35
- _globals['_REQUEST']._serialized_end=52
36
- _globals['_RESPONSE']._serialized_start=54
37
- _globals['_RESPONSE']._serialized_end=122
38
- _globals['_RUNTIME']._serialized_start=124
39
- _globals['_RUNTIME']._serialized_end=183
40
- # @@protoc_insertion_point(module_scope)
@@ -1,100 +0,0 @@
1
- # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
- """Client and server classes corresponding to protobuf-defined services."""
3
- import grpc
4
- import warnings
5
-
6
- try:
7
- from . import runtime_pb2 as runtime__pb2
8
- except ImportError:
9
- import runtime_pb2 as runtime__pb2
10
-
11
- GRPC_GENERATED_VERSION = '1.76.0'
12
- GRPC_VERSION = grpc.__version__
13
- _version_not_supported = False
14
-
15
- try:
16
- from grpc._utilities import first_version_is_lower
17
- _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
18
- except ImportError:
19
- _version_not_supported = True
20
-
21
- if _version_not_supported:
22
- raise RuntimeError(
23
- f'The grpc package installed is at version {GRPC_VERSION},'
24
- + ' but the generated code in runtime_pb2_grpc.py depends on'
25
- + f' grpcio>={GRPC_GENERATED_VERSION}.'
26
- + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
27
- + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
28
- )
29
-
30
-
31
- class RuntimeStub(object):
32
- """Missing associated documentation comment in .proto file."""
33
-
34
- def __init__(self, channel):
35
- """Constructor.
36
-
37
- Args:
38
- channel: A grpc.Channel.
39
- """
40
- self.Execute = channel.unary_stream(
41
- '/runtime.Runtime/Execute',
42
- request_serializer=runtime__pb2.Request.SerializeToString,
43
- response_deserializer=runtime__pb2.Response.FromString,
44
- _registered_method=True)
45
-
46
-
47
- class RuntimeServicer(object):
48
- """Missing associated documentation comment in .proto file."""
49
-
50
- def Execute(self, request, context):
51
- """Missing associated documentation comment in .proto file."""
52
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
53
- context.set_details('Method not implemented!')
54
- raise NotImplementedError('Method not implemented!')
55
-
56
-
57
- def add_RuntimeServicer_to_server(servicer, server):
58
- rpc_method_handlers = {
59
- 'Execute': grpc.unary_stream_rpc_method_handler(
60
- servicer.Execute,
61
- request_deserializer=runtime__pb2.Request.FromString,
62
- response_serializer=runtime__pb2.Response.SerializeToString,
63
- ),
64
- }
65
- generic_handler = grpc.method_handlers_generic_handler(
66
- 'runtime.Runtime', rpc_method_handlers)
67
- server.add_generic_rpc_handlers((generic_handler,))
68
- server.add_registered_method_handlers('runtime.Runtime', rpc_method_handlers)
69
-
70
-
71
- # This class is part of an EXPERIMENTAL API.
72
- class Runtime(object):
73
- """Missing associated documentation comment in .proto file."""
74
-
75
- @staticmethod
76
- def Execute(request,
77
- target,
78
- options=(),
79
- channel_credentials=None,
80
- call_credentials=None,
81
- insecure=False,
82
- compression=None,
83
- wait_for_ready=None,
84
- timeout=None,
85
- metadata=None):
86
- return grpc.experimental.unary_stream(
87
- request,
88
- target,
89
- '/runtime.Runtime/Execute',
90
- runtime__pb2.Request.SerializeToString,
91
- runtime__pb2.Response.FromString,
92
- options,
93
- channel_credentials,
94
- insecure,
95
- call_credentials,
96
- compression,
97
- wait_for_ready,
98
- timeout,
99
- metadata,
100
- _registered_method=True)
cycls/grpc/server.py DELETED
@@ -1,60 +0,0 @@
1
- import asyncio
2
- import inspect
3
- import traceback
4
- import cloudpickle
5
- import grpc
6
- from concurrent import futures
7
-
8
- try:
9
- from . import runtime_pb2
10
- from . import runtime_pb2_grpc
11
- except ImportError:
12
- import runtime_pb2
13
- import runtime_pb2_grpc
14
-
15
-
16
- class RuntimeServicer(runtime_pb2_grpc.RuntimeServicer):
17
- def Execute(self, request, context):
18
- try:
19
- func, args, kwargs = cloudpickle.loads(request.payload)
20
- result = func(*args, **kwargs)
21
-
22
- # Handle coroutines
23
- if inspect.iscoroutine(result):
24
- result = asyncio.run(result)
25
-
26
- # Handle async generators
27
- if inspect.isasyncgen(result):
28
- async def collect():
29
- items = []
30
- async for item in result:
31
- items.append(item)
32
- return items
33
- result = iter(asyncio.run(collect()))
34
-
35
- # Stream results for generators, single response otherwise
36
- if inspect.isgenerator(result):
37
- for chunk in result:
38
- yield runtime_pb2.Response(data=cloudpickle.dumps(chunk))
39
- else:
40
- yield runtime_pb2.Response(data=cloudpickle.dumps(result))
41
-
42
- except Exception as e:
43
- error_msg = f"{type(e).__name__}: {e}\n{traceback.format_exc()}"
44
- yield runtime_pb2.Response(data=cloudpickle.dumps(error_msg), error=True)
45
-
46
-
47
- def serve(port=50051):
48
- server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
49
- runtime_pb2_grpc.add_RuntimeServicer_to_server(RuntimeServicer(), server)
50
- server.add_insecure_port(f'[::]:{port}')
51
- server.start()
52
- server.wait_for_termination()
53
-
54
-
55
- if __name__ == '__main__':
56
- import argparse
57
- parser = argparse.ArgumentParser()
58
- parser.add_argument('--port', type=int, default=50051)
59
- args = parser.parse_args()
60
- serve(args.port)
cycls/sdk.py DELETED
@@ -1,186 +0,0 @@
1
- import os, time, uvicorn
2
- from .runtime import Runtime
3
- from .web import web, Config
4
- from .auth import PK_LIVE, PK_TEST, JWKS_PROD, JWKS_TEST
5
- import importlib.resources
6
-
7
- CYCLS_PATH = importlib.resources.files('cycls')
8
-
9
- # Module-level configuration
10
- api_key = None
11
- base_url = None
12
-
13
- themes = {
14
- "default": CYCLS_PATH.joinpath('default-theme'),
15
- "dev": CYCLS_PATH.joinpath('dev-theme'),
16
- }
17
-
18
- def _resolve_theme(theme):
19
- """Resolve theme - accepts string name or path"""
20
- if isinstance(theme, str):
21
- if theme in themes:
22
- return themes[theme]
23
- raise ValueError(f"Unknown theme: {theme}. Available: {list(themes.keys())}")
24
- return theme
25
-
26
- def _set_prod(config: Config, prod: bool):
27
- config.prod = prod
28
- config.pk = PK_LIVE if prod else PK_TEST
29
- config.jwks = JWKS_PROD if prod else JWKS_TEST
30
-
31
- class AgentRuntime:
32
- """Wraps an agent function with local/deploy/modal capabilities."""
33
-
34
- def __init__(self, func, name, theme, pip, apt, copy, copy_public, modal_keys, auth, org, domain, header, intro, title, plan, analytics):
35
- self.func = func
36
- self.name = name
37
- self.theme = _resolve_theme(theme)
38
- self.pip = pip
39
- self.apt = apt
40
- self.copy = copy
41
- self.copy_public = copy_public
42
- self.modal_keys = modal_keys
43
- self.domain = domain or f"{name}.cycls.ai"
44
-
45
- self.config = Config(
46
- header=header,
47
- intro=intro,
48
- title=title,
49
- auth=auth,
50
- plan=plan,
51
- analytics=analytics,
52
- org=org,
53
- )
54
-
55
- def __call__(self, *args, **kwargs):
56
- """Make the runtime callable - delegates to the wrapped function."""
57
- return self.func(*args, **kwargs)
58
-
59
- def _local(self, port=8080):
60
- """Run directly with uvicorn (no Docker)."""
61
- print(f"Starting local server at localhost:{port}")
62
- self.config.public_path = self.theme
63
- _set_prod(self.config, False)
64
- uvicorn.run(web(self.func, self.config), host="0.0.0.0", port=port)
65
-
66
- def _runtime(self, prod=False):
67
- """Create a Runtime instance for deployment."""
68
- _set_prod(self.config, prod)
69
- config_dict = self.config.model_dump()
70
-
71
- # Extract to local variables to avoid capturing self in lambda (cloudpickle issue)
72
- func = self.func
73
- name = self.name
74
-
75
- files = {str(self.theme): "theme", str(CYCLS_PATH)+"/web.py": "web.py"}
76
- files.update({f: f for f in self.copy})
77
- files.update({f: f"public/{f}" for f in self.copy_public})
78
-
79
- return Runtime(
80
- func=lambda port: __import__("web").serve(func, config_dict, name, port),
81
- name=name,
82
- apt_packages=self.apt,
83
- pip_packages=["fastapi[standard]", "pyjwt", "cryptography", "uvicorn", *self.pip],
84
- copy=files,
85
- base_url=base_url,
86
- api_key=api_key
87
- )
88
-
89
- def local(self, port=8080, watch=True):
90
- """Run locally in Docker with file watching by default."""
91
- if os.environ.get('_CYCLS_WATCH'):
92
- watch = False
93
- runtime = self._runtime(prod=False)
94
- runtime.watch(port=port) if watch else runtime.run(port=port)
95
-
96
- def deploy(self, port=8080):
97
- """Deploy to production."""
98
- if api_key is None:
99
- raise RuntimeError("Missing API key. Set cycls.api_key before calling deploy().")
100
- runtime = self._runtime(prod=True)
101
- return runtime.deploy(port=port)
102
-
103
- def modal(self, prod=False):
104
- import modal
105
- from modal.runner import run_app
106
-
107
- # Extract to local variables to avoid capturing self in lambda
108
- func = self.func
109
- name = self.name
110
- domain = self.domain
111
-
112
- client = modal.Client.from_credentials(*self.modal_keys)
113
- image = (modal.Image.debian_slim()
114
- .pip_install("fastapi[standard]", "pyjwt", "cryptography", *self.pip)
115
- .apt_install(*self.apt)
116
- .add_local_dir(self.theme, "/root/theme")
117
- .add_local_file(str(CYCLS_PATH)+"/web.py", "/root/web.py"))
118
-
119
- for item in self.copy:
120
- image = image.add_local_file(item, f"/root/{item}") if "." in item else image.add_local_dir(item, f'/root/{item}')
121
-
122
- for item in self.copy_public:
123
- image = image.add_local_file(item, f"/root/public/{item}") if "." in item else image.add_local_dir(item, f'/root/public/{item}')
124
-
125
- app = modal.App("development", image=image)
126
-
127
- _set_prod(self.config, prod)
128
- config_dict = self.config.model_dump()
129
-
130
- app.function(serialized=True, name=name)(
131
- modal.asgi_app(label=name, custom_domains=[domain])
132
- (lambda: __import__("web").web(func, config_dict))
133
- )
134
-
135
- if prod:
136
- print(f"Deployed to => https://{domain}")
137
- app.deploy(client=client, name=name)
138
- else:
139
- with modal.enable_output():
140
- run_app(app=app, client=client)
141
- print("Modal development server is running. Press Ctrl+C to stop.")
142
- with modal.enable_output(), run_app(app=app, client=client):
143
- while True: time.sleep(10)
144
-
145
-
146
- def agent(name=None, pip=None, apt=None, copy=None, copy_public=None, theme="default", modal_keys=None, auth=False, org=None, domain=None, header="", intro="", title="", plan="free", analytics=False):
147
- """Decorator that transforms a function into a deployable agent."""
148
- pip = pip or []
149
- apt = apt or []
150
- copy = copy or []
151
- copy_public = copy_public or []
152
- modal_keys = modal_keys or ["", ""]
153
-
154
- if plan == "cycls_pass":
155
- auth = True
156
- analytics = True
157
-
158
- def decorator(func):
159
- agent_name = name or func.__name__.replace('_', '-')
160
- return AgentRuntime(
161
- func=func,
162
- name=agent_name,
163
- theme=theme,
164
- pip=pip,
165
- apt=apt,
166
- copy=copy,
167
- copy_public=copy_public,
168
- modal_keys=modal_keys,
169
- auth=auth,
170
- org=org,
171
- domain=domain,
172
- header=header,
173
- intro=intro,
174
- title=title,
175
- plan=plan,
176
- analytics=analytics,
177
- )
178
- return decorator
179
-
180
- def function(python_version=None, pip=None, apt=None, run_commands=None, copy=None, name=None):
181
- """Decorator that transforms a Python function into a containerized, remotely executable object."""
182
- def decorator(func):
183
- func_name = name or func.__name__
184
- copy_dict = {i: i for i in copy or []}
185
- return Runtime(func, func_name.replace('_', '-'), python_version, pip, apt, run_commands, copy_dict, base_url, api_key)
186
- return decorator
@@ -1,20 +0,0 @@
1
- cycls/__init__.py,sha256=vyI1d_8VP4XW7MliFuUs_P3O9KQxyCwQu-JkxrCyhPQ,597
2
- cycls/auth.py,sha256=xkndHZyCfnlertMMEKerCJjf23N3fVcTRVTTSXTTuzg,247
3
- cycls/chat.py,sha256=cVbIkTDnVofohvByyYUrXF_RYDQZVQECJqo7cPBPJfs,4781
4
- cycls/default-theme/assets/index-C2r4Daz3.js,sha256=OGzjspxo0uUTdtQIzWZNgFMhGUtW4wSVuW33iA7oLFM,1351283
5
- cycls/default-theme/assets/index-DWGS8zpa.css,sha256=SxylXQV1qgQ0sw9QlMxcu9jwWTu8XPOnWZju8upUpCM,6504
6
- cycls/default-theme/index.html,sha256=WPUqVzJyh-aO2ulWTbXwOVAfWHaj0yWTGd9e8HOEro4,828
7
- cycls/dev-theme/index.html,sha256=QJBHkdNuMMiwQU7o8dN8__8YQeQB45D37D-NCXIWB2Q,11585
8
- cycls/grpc/__init__.py,sha256=sr8UQMgJEHyBreBKV8xz8UCd0zDP5lhjXTnfkOB_yOY,63
9
- cycls/grpc/client.py,sha256=GJoWlh2gP3YWEQ75-dX-IInyOdr2YSM8Cuw0lI-vD1I,2302
10
- cycls/grpc/runtime.proto,sha256=B1AqrNIXOtr3Xsyzfc2Z1OCBepa6hsi4DJ4a3Pf33IQ,244
11
- cycls/grpc/runtime_pb2.py,sha256=vEJo8FGP5aWPSDqzjZldfctduA2ojiyvoody7vpf-1w,1703
12
- cycls/grpc/runtime_pb2_grpc.py,sha256=KFd8KqGbiNsKm8X39Q9_BPwXjeZUiDl8O_4aTlEys3k,3394
13
- cycls/grpc/server.py,sha256=pfb4bo06NKDv0OpknqMSMjB9f8HUR41EZau1c6_XU5A,1911
14
- cycls/runtime.py,sha256=CORCjI-AW-cE6zg_blQsbLHchcM6coJo6b3N4PriuWo,18072
15
- cycls/sdk.py,sha256=yOLAOx26qaAgddLu7QcAroeaQ2eWqIjCvM7QmCe-OJs,6760
16
- cycls/web.py,sha256=mRzuOVZPEMW6bLkzEDHI25AWYBREqbtoHsBzgGbUH4g,5038
17
- cycls-0.0.2.82.dist-info/METADATA,sha256=NUxfEBA8eEphR60zB5MfkvBWUrNk0OlYo1afVBgMlLU,8759
18
- cycls-0.0.2.82.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
19
- cycls-0.0.2.82.dist-info/entry_points.txt,sha256=0NBXjzFFxdtO57z3vdaQEy68KmYTEgwK9Gvo34AOTb4,41
20
- cycls-0.0.2.82.dist-info/RECORD,,
@@ -1,3 +0,0 @@
1
- [console_scripts]
2
- cycls=cycls.chat:main
3
-
File without changes
File without changes
File without changes