cycls 0.0.2.82__tar.gz → 0.0.2.83__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cycls
3
- Version: 0.0.2.82
3
+ Version: 0.0.2.83
4
4
  Summary: Distribute Intelligence
5
5
  Author: Mohammed J. AlRujayi
6
6
  Author-email: mj@cycls.com
@@ -16,10 +16,8 @@ Provides-Extra: modal
16
16
  Requires-Dist: cloudpickle (>=3.1.1,<4.0.0)
17
17
  Requires-Dist: docker (>=7.1.0,<8.0.0)
18
18
  Requires-Dist: fastapi (>=0.111.0,<0.112.0)
19
- Requires-Dist: grpcio (>=1.76.0,<2.0.0)
20
19
  Requires-Dist: httpx (>=0.27.0,<0.28.0)
21
20
  Requires-Dist: modal (>=1.1.0,<2.0.0) ; extra == "modal"
22
- Requires-Dist: protobuf (>=6.0,<7.0)
23
21
  Requires-Dist: pyjwt (>=2.8.0,<3.0.0)
24
22
  Description-Content-Type: text/markdown
25
23
 
@@ -10,23 +10,40 @@ import shutil
10
10
  from pathlib import Path
11
11
  import tarfile
12
12
 
13
- from .grpc import RuntimeClient
14
-
15
13
  os.environ["DOCKER_BUILDKIT"] = "1"
16
14
 
17
- GRPC_PORT = 50051
18
15
  BASE_IMAGE = "ghcr.io/cycls/base:python3.12"
19
16
  BASE_PACKAGES = {"cloudpickle", "cryptography", "fastapi", "fastapi[standard]",
20
17
  "pydantic", "pyjwt", "uvicorn", "uvicorn[standard]", "httpx"}
21
- GRPC_PACKAGES = {"grpcio", "protobuf"}
22
18
 
23
- # Simple entrypoint for deployed services - loads pickled function+args and runs it
19
+ # Entrypoint for deployed services - loads pickled function+args and runs it
24
20
  ENTRYPOINT_PY = '''import cloudpickle
25
21
  with open("/app/function.pkl", "rb") as f:
26
22
  func, args, kwargs = cloudpickle.load(f)
27
23
  func(*args, **kwargs)
28
24
  '''
29
25
 
26
+ # Runner script for local dev - reads pickle from volume, writes result back
27
+ RUNNER_PY = '''import cloudpickle
28
+ import sys
29
+ import traceback
30
+ from pathlib import Path
31
+
32
+ io_dir = Path(sys.argv[1])
33
+ payload_path = io_dir / "payload.pkl"
34
+ result_path = io_dir / "result.pkl"
35
+
36
+ try:
37
+ with open(payload_path, "rb") as f:
38
+ func, args, kwargs = cloudpickle.load(f)
39
+ result = func(*args, **kwargs)
40
+ with open(result_path, "wb") as f:
41
+ cloudpickle.dump(result, f)
42
+ except Exception:
43
+ traceback.print_exc()
44
+ sys.exit(1)
45
+ '''
46
+
30
47
 
31
48
  def _hash_path(path_str: str) -> str:
32
49
  h = hashlib.sha256()
@@ -57,7 +74,7 @@ def _copy_path(src_path: Path, dest_path: Path):
57
74
 
58
75
 
59
76
  class Runtime:
60
- """Executes functions in Docker containers. Uses gRPC for local dev, pickle for deploy."""
77
+ """Executes functions in Docker containers. Uses file-based pickle for communication."""
61
78
 
62
79
  def __init__(self, func, name, python_version=None, pip_packages=None, apt_packages=None,
63
80
  run_commands=None, copy=None, base_url=None, api_key=None, base_image=None):
@@ -71,7 +88,6 @@ class Runtime:
71
88
  self.base_url = base_url or "https://service-core-280879789566.me-central1.run.app"
72
89
  self.api_key = api_key
73
90
 
74
- # Compute pip packages (gRPC only needed for local dev, added dynamically)
75
91
  user_packages = set(pip_packages or [])
76
92
  if self.base_image == BASE_IMAGE:
77
93
  self.pip_packages = sorted(user_packages - BASE_PACKAGES)
@@ -81,11 +97,7 @@ class Runtime:
81
97
  self.image_prefix = f"cycls/{name}"
82
98
  self.managed_label = "cycls.runtime"
83
99
  self._docker_client = None
84
-
85
- # Local dev state (gRPC container)
86
100
  self._container = None
87
- self._client = None
88
- self._host_port = None
89
101
 
90
102
  @property
91
103
  def docker_client(self):
@@ -105,13 +117,11 @@ class Runtime:
105
117
  def _perform_auto_cleanup(self, keep_tag=None):
106
118
  """Clean up old containers and dev images (preserve deploy-* images)."""
107
119
  try:
108
- # Remove old containers
109
120
  current_id = self._container.id if self._container else None
110
121
  for container in self.docker_client.containers.list(all=True, filters={"label": self.managed_label}):
111
122
  if container.id != current_id:
112
123
  container.remove(force=True)
113
124
 
114
- # Remove old dev images globally (keep deploy-* and current)
115
125
  cleaned = 0
116
126
  for image in self.docker_client.images.list(filters={"label": self.managed_label}):
117
127
  is_deploy = any(":deploy-" in t for t in image.tags)
@@ -136,7 +146,7 @@ class Runtime:
136
146
  parts.extend(extra_parts)
137
147
  return f"{self.image_prefix}:{hashlib.sha256(''.join(parts).encode()).hexdigest()[:16]}"
138
148
 
139
- def _dockerfile_preamble(self, pip_extras=None) -> str:
149
+ def _dockerfile_preamble(self) -> str:
140
150
  """Common Dockerfile setup: base image, apt, pip, run commands, copy."""
141
151
  lines = [f"FROM {self.base_image}"]
142
152
 
@@ -147,9 +157,8 @@ class Runtime:
147
157
  if self.apt_packages:
148
158
  lines.append(f"RUN apt-get update && apt-get install -y --no-install-recommends {' '.join(self.apt_packages)}")
149
159
 
150
- all_pip = list(self.pip_packages) + list(pip_extras or [])
151
- if all_pip:
152
- lines.append(f"RUN uv pip install --system --no-cache {' '.join(all_pip)}")
160
+ if self.pip_packages:
161
+ lines.append(f"RUN uv pip install --system --no-cache {' '.join(self.pip_packages)}")
153
162
 
154
163
  for cmd in self.run_commands:
155
164
  lines.append(f"RUN {cmd}")
@@ -159,12 +168,11 @@ class Runtime:
159
168
 
160
169
  return "\n".join(lines)
161
170
 
162
- def _dockerfile_grpc(self) -> str:
163
- """Dockerfile for local dev: gRPC server."""
164
- return f"""{self._dockerfile_preamble(pip_extras=GRPC_PACKAGES)}
165
- COPY grpc_runtime/ /app/grpc_runtime/
166
- EXPOSE {GRPC_PORT}
167
- CMD ["python", "-m", "grpc_runtime.server", "--port", "{GRPC_PORT}"]
171
+ def _dockerfile_local(self) -> str:
172
+ """Dockerfile for local dev: runner script with volume mount."""
173
+ return f"""{self._dockerfile_preamble()}
174
+ COPY runner.py /app/runner.py
175
+ ENTRYPOINT ["python", "/app/runner.py", "/io"]
168
176
  """
169
177
 
170
178
  def _dockerfile_deploy(self, port: int) -> str:
@@ -200,9 +208,9 @@ CMD ["python", "entrypoint.py"]
200
208
  print(f"\nDocker build failed: {e}")
201
209
  raise
202
210
 
203
- def _ensure_grpc_image(self) -> str:
204
- """Build local dev image with gRPC server if needed."""
205
- tag = self._image_tag(extra_parts=["grpc-v2"])
211
+ def _ensure_local_image(self) -> str:
212
+ """Build local dev image if needed."""
213
+ tag = self._image_tag(extra_parts=["local-v1"])
206
214
  try:
207
215
  self.docker_client.images.get(tag)
208
216
  print(f"Found cached image: {tag}")
@@ -213,176 +221,123 @@ CMD ["python", "entrypoint.py"]
213
221
  with tempfile.TemporaryDirectory() as tmpdir:
214
222
  workdir = Path(tmpdir)
215
223
  self._copy_user_files(workdir)
216
- (workdir / "Dockerfile").write_text(self._dockerfile_grpc())
217
-
218
- # Copy gRPC runtime
219
- grpc_src = Path(__file__).parent / "grpc"
220
- shutil.copytree(grpc_src, workdir / "grpc_runtime",
221
- ignore=shutil.ignore_patterns('*.proto', '__pycache__'))
222
-
224
+ (workdir / "Dockerfile").write_text(self._dockerfile_local())
225
+ (workdir / "runner.py").write_text(RUNNER_PY)
223
226
  return self._build_image(tag, workdir)
224
227
 
225
- def _ensure_container(self, service_port=None):
226
- """Start container if not running, return gRPC client."""
227
- if self._client and self._container:
228
- try:
229
- self._container.reload()
230
- if self._container.status == 'running':
231
- return self._client
232
- except docker.errors.NotFound:
233
- pass # Container was removed externally
234
- except docker.errors.APIError:
235
- pass # Docker API issue, will recreate
236
- self._cleanup_container()
237
-
238
- tag = self._ensure_grpc_image()
239
- self._perform_auto_cleanup(keep_tag=tag)
240
-
241
- # Port mappings (fixed ports avoid race conditions)
242
- ports = {f'{GRPC_PORT}/tcp': GRPC_PORT}
243
- if service_port:
244
- ports[f'{service_port}/tcp'] = service_port
245
-
246
- self._container = self.docker_client.containers.run(
247
- tag, detach=True, ports=ports, labels={self.managed_label: "true"}
248
- )
249
- self._host_port = GRPC_PORT
250
- self._client = RuntimeClient(port=self._host_port)
251
- if not self._client.wait_ready(timeout=10):
252
- raise RuntimeError("Container failed to start")
253
- print(f"Container ready on port {self._host_port}")
254
- return self._client
255
-
256
228
  def _cleanup_container(self):
257
- """Stop and remove the warm container."""
258
- if self._client:
259
- self._client.close()
260
- self._client = None
229
+ """Stop and remove the container."""
261
230
  if self._container:
262
231
  try:
263
232
  self._container.stop(timeout=3)
264
233
  self._container.remove()
265
234
  except docker.errors.NotFound:
266
- pass # Already removed
235
+ pass
267
236
  except docker.errors.APIError:
268
- pass # Best effort cleanup
237
+ pass
269
238
  self._container = None
270
- self._host_port = None
239
+
240
+ @contextlib.contextmanager
241
+ def runner(self, *args, **kwargs):
242
+ """Context manager for running a function in a container."""
243
+ service_port = kwargs.get('port')
244
+ tag = self._ensure_local_image()
245
+ self._perform_auto_cleanup(keep_tag=tag)
246
+
247
+ ports = {f'{service_port}/tcp': service_port} if service_port else None
248
+
249
+ with tempfile.TemporaryDirectory() as io_dir:
250
+ io_path = Path(io_dir)
251
+ payload_path = io_path / "payload.pkl"
252
+ result_path = io_path / "result.pkl"
253
+
254
+ with open(payload_path, 'wb') as f:
255
+ cloudpickle.dump((self.func, args, kwargs), f)
256
+
257
+ try:
258
+ self._container = self.docker_client.containers.create(
259
+ image=tag,
260
+ volumes={str(io_path): {'bind': '/io', 'mode': 'rw'}},
261
+ ports=ports,
262
+ labels={self.managed_label: "true"}
263
+ )
264
+ self._container.start()
265
+ yield self._container, result_path
266
+ finally:
267
+ self._cleanup_container()
271
268
 
272
269
  def run(self, *args, **kwargs):
273
270
  """Execute the function in a container and return the result."""
274
271
  service_port = kwargs.get('port')
275
272
  print(f"Running '{self.name}'...")
273
+
276
274
  try:
277
- client = self._ensure_container(service_port=service_port)
278
-
279
- # Blocking service: fire gRPC, stream Docker logs
280
- if service_port:
281
- client.fire(self.func, *args, **kwargs)
282
- print(f"Service running on port {service_port}")
283
- print("--- 🪵 Container Logs ---")
284
- for chunk in self._container.logs(stream=True, follow=True):
275
+ with self.runner(*args, **kwargs) as (container, result_path):
276
+ print("--- Container Logs ---")
277
+ for chunk in container.logs(stream=True, follow=True):
285
278
  print(chunk.decode(), end='')
286
- return None
279
+ print("----------------------")
287
280
 
288
- # Regular function: execute, then print logs
289
- result = client.call(self.func, *args, **kwargs)
290
- logs = self._container.logs().decode()
291
- if logs.strip():
292
- print("--- 🪵 Container Logs ---")
293
- print(logs, end='')
294
- print("-------------------------")
295
- return result
281
+ status = container.wait()
282
+ if status['StatusCode'] != 0:
283
+ print(f"Error: Container exited with code {status['StatusCode']}")
284
+ return None
285
+
286
+ if service_port:
287
+ return None # Service mode, no result
288
+
289
+ if result_path.exists():
290
+ with open(result_path, 'rb') as f:
291
+ return cloudpickle.load(f)
292
+ else:
293
+ print("Error: Result file not found")
294
+ return None
296
295
 
297
296
  except KeyboardInterrupt:
298
- print("\n-------------------------")
297
+ print("\n----------------------")
299
298
  print("Stopping...")
300
- self._cleanup_container()
301
299
  return None
302
300
  except Exception as e:
303
301
  print(f"Error: {e}")
304
302
  return None
305
303
 
306
- def stream(self, *args, **kwargs):
307
- """Execute the function and yield streamed results."""
308
- service_port = kwargs.get('port')
309
- client = self._ensure_container(service_port=service_port)
310
- yield from client.execute(self.func, *args, **kwargs)
311
-
312
- @contextlib.contextmanager
313
- def runner(self, *args, **kwargs):
314
- """Context manager for running a service. Yields (container, client)."""
315
- service_port = kwargs.get('port')
316
- try:
317
- client = self._ensure_container(service_port=service_port)
318
- client.fire(self.func, *args, **kwargs)
319
- yield self._container, client
320
- finally:
321
- self._cleanup_container()
322
-
323
304
  def watch(self, *args, **kwargs):
324
- """Run with file watching - restarts script on changes."""
305
+ """Run with file watching - restarts on changes."""
306
+ if os.environ.get('_CYCLS_WATCH'):
307
+ return self.run(*args, **kwargs)
308
+
325
309
  try:
326
310
  from watchfiles import watch as watchfiles_watch
327
311
  except ImportError:
328
- print("watchfiles not installed (enables auto-reload on file changes).")
329
- print("Install with: pip install watchfiles")
330
- print("Running without file watching...")
312
+ print("watchfiles not installed. pip install watchfiles")
331
313
  return self.run(*args, **kwargs)
332
314
 
333
- import inspect
334
315
  import subprocess
335
316
 
336
- # Find the user's script (outside cycls package)
337
- cycls_pkg = Path(__file__).parent.resolve()
338
- main_script = None
339
- for frame_info in inspect.stack():
340
- filepath = Path(frame_info.filename).resolve()
341
- if filepath.suffix == '.py' and not str(filepath).startswith(str(cycls_pkg)):
342
- main_script = filepath
343
- break
344
-
345
- if not main_script:
346
- print("Could not find script to watch.")
347
- return self.run(*args, **kwargs)
348
-
349
- # Build watch paths
350
- watch_paths = [main_script]
351
- watch_paths.extend([Path(src).resolve() for src in self.copy.keys() if Path(src).exists()])
317
+ script = Path(sys.argv[0]).resolve()
318
+ watch_paths = [script] + [Path(p).resolve() for p in self.copy if Path(p).exists()]
352
319
 
353
- print(f"👀 Watching:")
354
- for p in watch_paths:
355
- print(f" {p}")
356
- print()
320
+ print(f"Watching: {[p.name for p in watch_paths]}\n")
357
321
 
358
322
  while True:
359
- print(f"🚀 Starting {main_script.name}...")
360
- proc = subprocess.Popen(
361
- [sys.executable, str(main_script)],
362
- env={**os.environ, '_CYCLS_WATCH': '1'}
363
- )
364
-
323
+ proc = subprocess.Popen([sys.executable, str(script)], env={**os.environ, '_CYCLS_WATCH': '1'})
365
324
  try:
366
325
  for changes in watchfiles_watch(*watch_paths):
367
- print(f"\n🔄 Changed: {[Path(c[1]).name for c in changes]}")
326
+ print(f"\nChanged: {[Path(c[1]).name for c in changes]}")
368
327
  break
369
-
370
328
  proc.terminate()
371
329
  proc.wait(timeout=3)
372
330
  except subprocess.TimeoutExpired:
373
331
  proc.kill()
374
332
  except KeyboardInterrupt:
375
- print("\nStopping...")
376
333
  proc.terminate()
377
334
  proc.wait(timeout=3)
378
335
  return
379
336
 
380
- print()
381
-
382
337
  def _prepare_deploy_context(self, workdir: Path, port: int, args=(), kwargs=None):
383
338
  """Prepare build context for deploy: pickle function+args + entrypoint."""
384
339
  kwargs = kwargs or {}
385
- kwargs['port'] = port # Ensure port is in kwargs
340
+ kwargs['port'] = port
386
341
  self._copy_user_files(workdir)
387
342
  (workdir / "Dockerfile").write_text(self._dockerfile_deploy(port))
388
343
  (workdir / "entrypoint.py").write_text(ENTRYPOINT_PY)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "cycls"
3
- version = "0.0.2.82"
3
+ version = "0.0.2.83"
4
4
 
5
5
  packages = [{ include = "cycls" }]
6
6
  include = ["cycls/theme/**/*"]
@@ -15,8 +15,6 @@ httpx = "^0.27.0"
15
15
  pyjwt = "^2.8.0"
16
16
  docker = "^7.1.0"
17
17
  cloudpickle = "^3.1.1"
18
- grpcio = "^1.76.0"
19
- protobuf = "^6.0"
20
18
 
21
19
  [tool.poetry.scripts]
22
20
  cycls = "cycls.chat:main"
@@ -1,3 +0,0 @@
1
- from .client import RuntimeClient
2
-
3
- __all__ = ["RuntimeClient"]
@@ -1,71 +0,0 @@
1
- import cloudpickle
2
- import grpc
3
-
4
- try:
5
- from . import runtime_pb2
6
- from . import runtime_pb2_grpc
7
- except ImportError:
8
- import runtime_pb2
9
- import runtime_pb2_grpc
10
-
11
-
12
- class RuntimeClient:
13
- def __init__(self, host='localhost', port=50051, timeout=None):
14
- self.host = host
15
- self.port = port
16
- self.timeout = timeout
17
- self._channel = None
18
- self._stub = None
19
-
20
- def _connect(self):
21
- if self._channel is None:
22
- self._channel = grpc.insecure_channel(f'{self.host}:{self.port}')
23
- self._stub = runtime_pb2_grpc.RuntimeStub(self._channel)
24
- return self._stub
25
-
26
- def execute(self, func, *args, **kwargs):
27
- """Execute function and yield streamed results."""
28
- stub = self._connect()
29
- payload = cloudpickle.dumps((func, args, kwargs))
30
- request = runtime_pb2.Request(payload=payload)
31
-
32
- for response in stub.Execute(request, timeout=self.timeout):
33
- result = cloudpickle.loads(response.data)
34
- if response.error:
35
- raise RuntimeError(f"Function execution failed: {result}")
36
- yield result
37
-
38
- def call(self, func, *args, **kwargs):
39
- """Execute and return single result (or list if multiple)."""
40
- results = list(self.execute(func, *args, **kwargs))
41
- return results[0] if len(results) == 1 else results
42
-
43
- def fire(self, func, *args, **kwargs):
44
- """Fire off execution without waiting for response."""
45
- stub = self._connect()
46
- payload = cloudpickle.dumps((func, args, kwargs))
47
- request = runtime_pb2.Request(payload=payload)
48
- # Start the stream - gRPC sends request immediately
49
- self._active_stream = stub.Execute(request)
50
-
51
- def wait_ready(self, timeout=10):
52
- """Wait for channel to be ready."""
53
- if self._channel is None:
54
- self._connect()
55
- try:
56
- grpc.channel_ready_future(self._channel).result(timeout=timeout)
57
- return True
58
- except grpc.FutureTimeoutError:
59
- return False
60
-
61
- def close(self):
62
- if self._channel:
63
- self._channel.close()
64
- self._channel = None
65
- self._stub = None
66
-
67
- def __enter__(self):
68
- return self
69
-
70
- def __exit__(self, *args):
71
- self.close()
@@ -1,18 +0,0 @@
1
- syntax = "proto3";
2
-
3
- package runtime;
4
-
5
- service Runtime {
6
- rpc Execute(Request) returns (stream Response);
7
- }
8
-
9
- message Request {
10
- bytes payload = 1;
11
- }
12
-
13
- message Response {
14
- bytes data = 1;
15
- bool error = 2;
16
- bytes log = 3;
17
- bool is_log = 4;
18
- }
@@ -1,40 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- # Generated by the protocol buffer compiler. DO NOT EDIT!
3
- # NO CHECKED-IN PROTOBUF GENCODE
4
- # source: runtime.proto
5
- # Protobuf Python Version: 6.31.1
6
- """Generated protocol buffer code."""
7
- from google.protobuf import descriptor as _descriptor
8
- from google.protobuf import descriptor_pool as _descriptor_pool
9
- from google.protobuf import runtime_version as _runtime_version
10
- from google.protobuf import symbol_database as _symbol_database
11
- from google.protobuf.internal import builder as _builder
12
- _runtime_version.ValidateProtobufRuntimeVersion(
13
- _runtime_version.Domain.PUBLIC,
14
- 6,
15
- 31,
16
- 1,
17
- '',
18
- 'runtime.proto'
19
- )
20
- # @@protoc_insertion_point(imports)
21
-
22
- _sym_db = _symbol_database.Default()
23
-
24
-
25
-
26
-
27
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rruntime.proto\x12\x07runtime\"\x1a\n\x07Request\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\"D\n\x08Response\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\r\n\x05\x65rror\x18\x02 \x01(\x08\x12\x0b\n\x03log\x18\x03 \x01(\x0c\x12\x0e\n\x06is_log\x18\x04 \x01(\x08\x32;\n\x07Runtime\x12\x30\n\x07\x45xecute\x12\x10.runtime.Request\x1a\x11.runtime.Response0\x01\x62\x06proto3')
28
-
29
- _globals = globals()
30
- _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
- _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'runtime_pb2', _globals)
32
- if not _descriptor._USE_C_DESCRIPTORS:
33
- DESCRIPTOR._loaded_options = None
34
- _globals['_REQUEST']._serialized_start=26
35
- _globals['_REQUEST']._serialized_end=52
36
- _globals['_RESPONSE']._serialized_start=54
37
- _globals['_RESPONSE']._serialized_end=122
38
- _globals['_RUNTIME']._serialized_start=124
39
- _globals['_RUNTIME']._serialized_end=183
40
- # @@protoc_insertion_point(module_scope)
@@ -1,100 +0,0 @@
1
- # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
- """Client and server classes corresponding to protobuf-defined services."""
3
- import grpc
4
- import warnings
5
-
6
- try:
7
- from . import runtime_pb2 as runtime__pb2
8
- except ImportError:
9
- import runtime_pb2 as runtime__pb2
10
-
11
- GRPC_GENERATED_VERSION = '1.76.0'
12
- GRPC_VERSION = grpc.__version__
13
- _version_not_supported = False
14
-
15
- try:
16
- from grpc._utilities import first_version_is_lower
17
- _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
18
- except ImportError:
19
- _version_not_supported = True
20
-
21
- if _version_not_supported:
22
- raise RuntimeError(
23
- f'The grpc package installed is at version {GRPC_VERSION},'
24
- + ' but the generated code in runtime_pb2_grpc.py depends on'
25
- + f' grpcio>={GRPC_GENERATED_VERSION}.'
26
- + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
27
- + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
28
- )
29
-
30
-
31
- class RuntimeStub(object):
32
- """Missing associated documentation comment in .proto file."""
33
-
34
- def __init__(self, channel):
35
- """Constructor.
36
-
37
- Args:
38
- channel: A grpc.Channel.
39
- """
40
- self.Execute = channel.unary_stream(
41
- '/runtime.Runtime/Execute',
42
- request_serializer=runtime__pb2.Request.SerializeToString,
43
- response_deserializer=runtime__pb2.Response.FromString,
44
- _registered_method=True)
45
-
46
-
47
- class RuntimeServicer(object):
48
- """Missing associated documentation comment in .proto file."""
49
-
50
- def Execute(self, request, context):
51
- """Missing associated documentation comment in .proto file."""
52
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
53
- context.set_details('Method not implemented!')
54
- raise NotImplementedError('Method not implemented!')
55
-
56
-
57
- def add_RuntimeServicer_to_server(servicer, server):
58
- rpc_method_handlers = {
59
- 'Execute': grpc.unary_stream_rpc_method_handler(
60
- servicer.Execute,
61
- request_deserializer=runtime__pb2.Request.FromString,
62
- response_serializer=runtime__pb2.Response.SerializeToString,
63
- ),
64
- }
65
- generic_handler = grpc.method_handlers_generic_handler(
66
- 'runtime.Runtime', rpc_method_handlers)
67
- server.add_generic_rpc_handlers((generic_handler,))
68
- server.add_registered_method_handlers('runtime.Runtime', rpc_method_handlers)
69
-
70
-
71
- # This class is part of an EXPERIMENTAL API.
72
- class Runtime(object):
73
- """Missing associated documentation comment in .proto file."""
74
-
75
- @staticmethod
76
- def Execute(request,
77
- target,
78
- options=(),
79
- channel_credentials=None,
80
- call_credentials=None,
81
- insecure=False,
82
- compression=None,
83
- wait_for_ready=None,
84
- timeout=None,
85
- metadata=None):
86
- return grpc.experimental.unary_stream(
87
- request,
88
- target,
89
- '/runtime.Runtime/Execute',
90
- runtime__pb2.Request.SerializeToString,
91
- runtime__pb2.Response.FromString,
92
- options,
93
- channel_credentials,
94
- insecure,
95
- call_credentials,
96
- compression,
97
- wait_for_ready,
98
- timeout,
99
- metadata,
100
- _registered_method=True)
@@ -1,60 +0,0 @@
1
- import asyncio
2
- import inspect
3
- import traceback
4
- import cloudpickle
5
- import grpc
6
- from concurrent import futures
7
-
8
- try:
9
- from . import runtime_pb2
10
- from . import runtime_pb2_grpc
11
- except ImportError:
12
- import runtime_pb2
13
- import runtime_pb2_grpc
14
-
15
-
16
- class RuntimeServicer(runtime_pb2_grpc.RuntimeServicer):
17
- def Execute(self, request, context):
18
- try:
19
- func, args, kwargs = cloudpickle.loads(request.payload)
20
- result = func(*args, **kwargs)
21
-
22
- # Handle coroutines
23
- if inspect.iscoroutine(result):
24
- result = asyncio.run(result)
25
-
26
- # Handle async generators
27
- if inspect.isasyncgen(result):
28
- async def collect():
29
- items = []
30
- async for item in result:
31
- items.append(item)
32
- return items
33
- result = iter(asyncio.run(collect()))
34
-
35
- # Stream results for generators, single response otherwise
36
- if inspect.isgenerator(result):
37
- for chunk in result:
38
- yield runtime_pb2.Response(data=cloudpickle.dumps(chunk))
39
- else:
40
- yield runtime_pb2.Response(data=cloudpickle.dumps(result))
41
-
42
- except Exception as e:
43
- error_msg = f"{type(e).__name__}: {e}\n{traceback.format_exc()}"
44
- yield runtime_pb2.Response(data=cloudpickle.dumps(error_msg), error=True)
45
-
46
-
47
- def serve(port=50051):
48
- server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
49
- runtime_pb2_grpc.add_RuntimeServicer_to_server(RuntimeServicer(), server)
50
- server.add_insecure_port(f'[::]:{port}')
51
- server.start()
52
- server.wait_for_termination()
53
-
54
-
55
- if __name__ == '__main__':
56
- import argparse
57
- parser = argparse.ArgumentParser()
58
- parser.add_argument('--port', type=int, default=50051)
59
- args = parser.parse_args()
60
- serve(args.port)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes