cycls 0.0.2.73__tar.gz → 0.0.2.75__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cycls
3
- Version: 0.0.2.73
3
+ Version: 0.0.2.75
4
4
  Summary: Distribute Intelligence
5
5
  Author: Mohammed J. AlRujayi
6
6
  Author-email: mj@cycls.com
@@ -16,6 +16,7 @@ Provides-Extra: modal
16
16
  Requires-Dist: cloudpickle (>=3.1.1,<4.0.0)
17
17
  Requires-Dist: docker (>=7.1.0,<8.0.0)
18
18
  Requires-Dist: fastapi (>=0.111.0,<0.112.0)
19
+ Requires-Dist: grpcio (>=1.76.0,<2.0.0)
19
20
  Requires-Dist: httpx (>=0.27.0,<0.28.0)
20
21
  Requires-Dist: modal (>=1.1.0,<2.0.0) ; extra == "modal"
21
22
  Requires-Dist: pyjwt (>=2.8.0,<3.0.0)
@@ -47,8 +48,6 @@ The open-source SDK for distributing AI agents.
47
48
 
48
49
  ## Distribute Intelligence
49
50
 
50
- AI capabilities shouldn't be locked in notebooks or trapped behind months of infrastructure work. Cycls turns your Python functions into production services - complete with APIs, interfaces, auth, and analytics. You focus on the intelligence. Cycls handles the distribution.
51
-
52
51
  Write a function. Deploy it as an API, a web interface, or both. Add authentication, analytics, and monetization with flags.
53
52
 
54
53
  ```python
@@ -24,8 +24,6 @@ The open-source SDK for distributing AI agents.
24
24
 
25
25
  ## Distribute Intelligence
26
26
 
27
- AI capabilities shouldn't be locked in notebooks or trapped behind months of infrastructure work. Cycls turns your Python functions into production services - complete with APIs, interfaces, auth, and analytics. You focus on the intelligence. Cycls handles the distribution.
28
-
29
27
  Write a function. Deploy it as an API, a web interface, or both. Add authentication, analytics, and monetization with flags.
30
28
 
31
29
  ```python
@@ -0,0 +1,20 @@
1
+ import sys
2
+ from types import ModuleType
3
+ from .sdk import function, agent
4
+ from .runtime import Runtime
5
+
6
+ class _Module(ModuleType):
7
+ def __getattr__(self, name):
8
+ from . import sdk
9
+ if name in ("api_key", "base_url"):
10
+ return getattr(sdk, name)
11
+ raise AttributeError(f"module 'cycls' has no attribute '{name}'")
12
+
13
+ def __setattr__(self, name, value):
14
+ from . import sdk
15
+ if name in ("api_key", "base_url"):
16
+ setattr(sdk, name, value)
17
+ return
18
+ super().__setattr__(name, value)
19
+
20
+ sys.modules[__name__].__class__ = _Module
@@ -0,0 +1,3 @@
1
+ from .client import RuntimeClient
2
+
3
+ __all__ = ["RuntimeClient"]
@@ -0,0 +1,71 @@
1
+ import cloudpickle
2
+ import grpc
3
+
4
+ try:
5
+ from . import runtime_pb2
6
+ from . import runtime_pb2_grpc
7
+ except ImportError:
8
+ import runtime_pb2
9
+ import runtime_pb2_grpc
10
+
11
+
12
+ class RuntimeClient:
13
+ def __init__(self, host='localhost', port=50051, timeout=None):
14
+ self.host = host
15
+ self.port = port
16
+ self.timeout = timeout
17
+ self._channel = None
18
+ self._stub = None
19
+
20
+ def _connect(self):
21
+ if self._channel is None:
22
+ self._channel = grpc.insecure_channel(f'{self.host}:{self.port}')
23
+ self._stub = runtime_pb2_grpc.RuntimeStub(self._channel)
24
+ return self._stub
25
+
26
+ def execute(self, func, *args, **kwargs):
27
+ """Execute function and yield streamed results."""
28
+ stub = self._connect()
29
+ payload = cloudpickle.dumps((func, args, kwargs))
30
+ request = runtime_pb2.Request(payload=payload)
31
+
32
+ for response in stub.Execute(request, timeout=self.timeout):
33
+ result = cloudpickle.loads(response.data)
34
+ if response.error:
35
+ raise RuntimeError(result)
36
+ yield result
37
+
38
+ def call(self, func, *args, **kwargs):
39
+ """Execute and return single result (or list if multiple)."""
40
+ results = list(self.execute(func, *args, **kwargs))
41
+ return results[0] if len(results) == 1 else results
42
+
43
+ def fire(self, func, *args, **kwargs):
44
+ """Fire off execution without waiting for response."""
45
+ stub = self._connect()
46
+ payload = cloudpickle.dumps((func, args, kwargs))
47
+ request = runtime_pb2.Request(payload=payload)
48
+ # Start the stream - gRPC sends request immediately
49
+ self._active_stream = stub.Execute(request)
50
+
51
+ def wait_ready(self, timeout=10):
52
+ """Wait for channel to be ready."""
53
+ if self._channel is None:
54
+ self._connect()
55
+ try:
56
+ grpc.channel_ready_future(self._channel).result(timeout=timeout)
57
+ return True
58
+ except grpc.FutureTimeoutError:
59
+ return False
60
+
61
+ def close(self):
62
+ if self._channel:
63
+ self._channel.close()
64
+ self._channel = None
65
+ self._stub = None
66
+
67
+ def __enter__(self):
68
+ return self
69
+
70
+ def __exit__(self, *args):
71
+ self.close()
@@ -0,0 +1,18 @@
1
+ syntax = "proto3";
2
+
3
+ package runtime;
4
+
5
+ service Runtime {
6
+ rpc Execute(Request) returns (stream Response);
7
+ }
8
+
9
+ message Request {
10
+ bytes payload = 1;
11
+ }
12
+
13
+ message Response {
14
+ bytes data = 1;
15
+ bool error = 2;
16
+ bytes log = 3;
17
+ bool is_log = 4;
18
+ }
@@ -0,0 +1,40 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: runtime.proto
5
+ # Protobuf Python Version: 6.31.1
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 31,
16
+ 1,
17
+ '',
18
+ 'runtime.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rruntime.proto\x12\x07runtime\"\x1a\n\x07Request\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\"D\n\x08Response\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\r\n\x05\x65rror\x18\x02 \x01(\x08\x12\x0b\n\x03log\x18\x03 \x01(\x0c\x12\x0e\n\x06is_log\x18\x04 \x01(\x08\x32;\n\x07Runtime\x12\x30\n\x07\x45xecute\x12\x10.runtime.Request\x1a\x11.runtime.Response0\x01\x62\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'runtime_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ DESCRIPTOR._loaded_options = None
34
+ _globals['_REQUEST']._serialized_start=26
35
+ _globals['_REQUEST']._serialized_end=52
36
+ _globals['_RESPONSE']._serialized_start=54
37
+ _globals['_RESPONSE']._serialized_end=122
38
+ _globals['_RUNTIME']._serialized_start=124
39
+ _globals['_RUNTIME']._serialized_end=183
40
+ # @@protoc_insertion_point(module_scope)
@@ -0,0 +1,100 @@
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+ import warnings
5
+
6
+ try:
7
+ from . import runtime_pb2 as runtime__pb2
8
+ except ImportError:
9
+ import runtime_pb2 as runtime__pb2
10
+
11
+ GRPC_GENERATED_VERSION = '1.76.0'
12
+ GRPC_VERSION = grpc.__version__
13
+ _version_not_supported = False
14
+
15
+ try:
16
+ from grpc._utilities import first_version_is_lower
17
+ _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
18
+ except ImportError:
19
+ _version_not_supported = True
20
+
21
+ if _version_not_supported:
22
+ raise RuntimeError(
23
+ f'The grpc package installed is at version {GRPC_VERSION},'
24
+ + ' but the generated code in runtime_pb2_grpc.py depends on'
25
+ + f' grpcio>={GRPC_GENERATED_VERSION}.'
26
+ + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
27
+ + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
28
+ )
29
+
30
+
31
+ class RuntimeStub(object):
32
+ """Missing associated documentation comment in .proto file."""
33
+
34
+ def __init__(self, channel):
35
+ """Constructor.
36
+
37
+ Args:
38
+ channel: A grpc.Channel.
39
+ """
40
+ self.Execute = channel.unary_stream(
41
+ '/runtime.Runtime/Execute',
42
+ request_serializer=runtime__pb2.Request.SerializeToString,
43
+ response_deserializer=runtime__pb2.Response.FromString,
44
+ _registered_method=True)
45
+
46
+
47
+ class RuntimeServicer(object):
48
+ """Missing associated documentation comment in .proto file."""
49
+
50
+ def Execute(self, request, context):
51
+ """Missing associated documentation comment in .proto file."""
52
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
53
+ context.set_details('Method not implemented!')
54
+ raise NotImplementedError('Method not implemented!')
55
+
56
+
57
+ def add_RuntimeServicer_to_server(servicer, server):
58
+ rpc_method_handlers = {
59
+ 'Execute': grpc.unary_stream_rpc_method_handler(
60
+ servicer.Execute,
61
+ request_deserializer=runtime__pb2.Request.FromString,
62
+ response_serializer=runtime__pb2.Response.SerializeToString,
63
+ ),
64
+ }
65
+ generic_handler = grpc.method_handlers_generic_handler(
66
+ 'runtime.Runtime', rpc_method_handlers)
67
+ server.add_generic_rpc_handlers((generic_handler,))
68
+ server.add_registered_method_handlers('runtime.Runtime', rpc_method_handlers)
69
+
70
+
71
+ # This class is part of an EXPERIMENTAL API.
72
+ class Runtime(object):
73
+ """Missing associated documentation comment in .proto file."""
74
+
75
+ @staticmethod
76
+ def Execute(request,
77
+ target,
78
+ options=(),
79
+ channel_credentials=None,
80
+ call_credentials=None,
81
+ insecure=False,
82
+ compression=None,
83
+ wait_for_ready=None,
84
+ timeout=None,
85
+ metadata=None):
86
+ return grpc.experimental.unary_stream(
87
+ request,
88
+ target,
89
+ '/runtime.Runtime/Execute',
90
+ runtime__pb2.Request.SerializeToString,
91
+ runtime__pb2.Response.FromString,
92
+ options,
93
+ channel_credentials,
94
+ insecure,
95
+ call_credentials,
96
+ compression,
97
+ wait_for_ready,
98
+ timeout,
99
+ metadata,
100
+ _registered_method=True)
@@ -0,0 +1,60 @@
1
+ import asyncio
2
+ import inspect
3
+ import traceback
4
+ import cloudpickle
5
+ import grpc
6
+ from concurrent import futures
7
+
8
+ try:
9
+ from . import runtime_pb2
10
+ from . import runtime_pb2_grpc
11
+ except ImportError:
12
+ import runtime_pb2
13
+ import runtime_pb2_grpc
14
+
15
+
16
+ class RuntimeServicer(runtime_pb2_grpc.RuntimeServicer):
17
+ def Execute(self, request, context):
18
+ try:
19
+ func, args, kwargs = cloudpickle.loads(request.payload)
20
+ result = func(*args, **kwargs)
21
+
22
+ # Handle coroutines
23
+ if inspect.iscoroutine(result):
24
+ result = asyncio.run(result)
25
+
26
+ # Handle async generators
27
+ if inspect.isasyncgen(result):
28
+ async def collect():
29
+ items = []
30
+ async for item in result:
31
+ items.append(item)
32
+ return items
33
+ result = iter(asyncio.run(collect()))
34
+
35
+ # Stream results for generators, single response otherwise
36
+ if inspect.isgenerator(result):
37
+ for chunk in result:
38
+ yield runtime_pb2.Response(data=cloudpickle.dumps(chunk))
39
+ else:
40
+ yield runtime_pb2.Response(data=cloudpickle.dumps(result))
41
+
42
+ except Exception as e:
43
+ error_msg = f"{type(e).__name__}: {e}\n{traceback.format_exc()}"
44
+ yield runtime_pb2.Response(data=cloudpickle.dumps(error_msg), error=True)
45
+
46
+
47
+ def serve(port=50051):
48
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
49
+ runtime_pb2_grpc.add_RuntimeServicer_to_server(RuntimeServicer(), server)
50
+ server.add_insecure_port(f'[::]:{port}')
51
+ server.start()
52
+ server.wait_for_termination()
53
+
54
+
55
+ if __name__ == '__main__':
56
+ import argparse
57
+ parser = argparse.ArgumentParser()
58
+ parser.add_argument('--port', type=int, default=50051)
59
+ args = parser.parse_args()
60
+ serve(args.port)