tetra-rp 0.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ class SingletonMixin:
2
+ _instances = {}
3
+
4
+ def __new__(cls, *args, **kwargs):
5
+ if cls not in cls._instances:
6
+ cls._instances[cls] = super().__new__(cls)
7
+ return cls._instances[cls]
tetra_rp/logger.py ADDED
@@ -0,0 +1,34 @@
1
+ import logging
2
+ import os
3
+ import sys
4
+ from typing import Union, Optional
5
+
6
+
7
+ def setup_logging(
8
+ level: Union[int, str] = logging.INFO, stream=sys.stdout, fmt: Optional[str] = None
9
+ ):
10
+ """
11
+ Sets up the root logger with a stream handler and basic formatting.
12
+ Does nothing if handlers are already configured.
13
+ """
14
+ if isinstance(level, str):
15
+ level = getattr(logging, level.upper(), logging.INFO)
16
+
17
+ if fmt is None:
18
+ if level == logging.DEBUG:
19
+ fmt = "%(asctime)s | %(levelname)-5s | %(name)s | %(filename)s:%(lineno)d | %(message)s"
20
+ else:
21
+ # Default format for INFO level and above
22
+ fmt = "%(asctime)s | %(levelname)-5s | %(message)s"
23
+
24
+ root_logger = logging.getLogger()
25
+ if not root_logger.hasHandlers():
26
+ handler = logging.StreamHandler(stream)
27
+ handler.setFormatter(logging.Formatter(fmt))
28
+ root_logger.setLevel(level)
29
+ root_logger.addHandler(handler)
30
+
31
+ # Optionally allow log level override via env var
32
+ env_level = os.environ.get("LOG_LEVEL")
33
+ if env_level:
34
+ root_logger.setLevel(env_level.upper())
File without changes
@@ -0,0 +1,57 @@
1
+ # TODO: generate using betterproto
2
+
3
+ from abc import ABC, abstractmethod
4
+ from typing import List, Dict, Optional
5
+ from pydantic import BaseModel, Field
6
+
7
+
8
+ class FunctionRequest(BaseModel):
9
+ function_name: str = Field(
10
+ description="Name of the function to execute",
11
+ )
12
+ function_code: str = Field(
13
+ description="Source code of the function to execute",
14
+ )
15
+ args: List = Field(
16
+ default_factory=list,
17
+ description="List of base64-encoded cloudpickle-serialized arguments",
18
+ )
19
+ kwargs: Dict = Field(
20
+ default_factory=dict,
21
+ description="Dictionary of base64-encoded cloudpickle-serialized keyword arguments",
22
+ )
23
+ dependencies: Optional[List] = Field(
24
+ default=None,
25
+ description="Optional list of pip packages to install before executing the function",
26
+ )
27
+ system_dependencies: Optional[List] = Field(
28
+ default=None,
29
+ description="Optional list of system dependencies to install before executing the function",
30
+ )
31
+
32
+
33
+ class FunctionResponse(BaseModel):
34
+ success: bool = Field(
35
+ description="Indicates if the function execution was successful",
36
+ )
37
+ result: Optional[str] = Field(
38
+ default=None,
39
+ description="Base64-encoded cloudpickle-serialized result of the function",
40
+ )
41
+ error: Optional[str] = Field(
42
+ default=None,
43
+ description="Error message if the function execution failed",
44
+ )
45
+ stdout: Optional[str] = Field(
46
+ default=None,
47
+ description="Captured standard output from the function execution",
48
+ )
49
+
50
+
51
+ class RemoteExecutorStub(ABC):
52
+ """Abstract base class for remote execution."""
53
+
54
+ @abstractmethod
55
+ async def ExecuteFunction(self, request: FunctionRequest) -> FunctionResponse:
56
+ """Execute a function on the remote resource."""
57
+ raise NotImplementedError("Subclasses should implement this method.")
@@ -0,0 +1,5 @@
1
+ from .registry import stub_resource
2
+
3
+ __all__ = [
4
+ "stub_resource",
5
+ ]
@@ -0,0 +1,133 @@
1
+ import ast
2
+ import base64
3
+ import inspect
4
+ import textwrap
5
+ import hashlib
6
+ import traceback
7
+ import cloudpickle
8
+ import logging
9
+ from ..core.resources import LiveServerless
10
+ from ..protos.remote_execution import (
11
+ FunctionRequest,
12
+ FunctionResponse,
13
+ RemoteExecutorStub,
14
+ )
15
+
16
+ log = logging.getLogger(__name__)
17
+
18
+
19
+ # global in memory cache, TODO: use a more robust cache in future
20
+ _SERIALIZED_FUNCTION_CACHE = {}
21
+
22
+
23
+ def get_function_source(func):
24
+ """Extract the function source code without the decorator."""
25
+ # Get the source code of the decorated function
26
+ source = inspect.getsource(func)
27
+
28
+ # Parse the source code
29
+ module = ast.parse(source)
30
+
31
+ # Find the function definition node
32
+ function_def = None
33
+ for node in ast.walk(module):
34
+ if isinstance(node, ast.FunctionDef) and node.name == func.__name__:
35
+ function_def = node
36
+ break
37
+
38
+ if not function_def:
39
+ raise ValueError(f"Could not find function definition for {func.__name__}")
40
+
41
+ # Get the line and column offsets
42
+ lineno = function_def.lineno - 1 # Line numbers are 1-based
43
+
44
+ # Split into lines and extract just the function part
45
+ lines = source.split("\n")
46
+ function_lines = lines[lineno:]
47
+
48
+ # Dedent to remove any extra indentation
49
+ function_source = textwrap.dedent("\n".join(function_lines))
50
+
51
+ # Return the function hash for cache key
52
+ source_hash = hashlib.sha256(function_source.encode("utf-8")).hexdigest()
53
+
54
+ return function_source, source_hash
55
+
56
+
57
+ class LiveServerlessStub(RemoteExecutorStub):
58
+ """Adapter class to make Runpod endpoints look like gRPC stubs."""
59
+
60
+ def __init__(self, server: LiveServerless):
61
+ self.server = server
62
+
63
+ def prepare_request(self, func, dependencies, system_dependencies, *args, **kwargs):
64
+ source, src_hash = get_function_source(func)
65
+
66
+ request = {
67
+ "function_name": func.__name__,
68
+ "dependencies": dependencies,
69
+ "system_dependencies": system_dependencies,
70
+ }
71
+
72
+ # check if the function is already cached
73
+ if src_hash not in _SERIALIZED_FUNCTION_CACHE:
74
+ # Cache the serialized function
75
+ _SERIALIZED_FUNCTION_CACHE[src_hash] = source
76
+
77
+ request["function_code"] = _SERIALIZED_FUNCTION_CACHE[src_hash]
78
+
79
+ # Serialize arguments using cloudpickle
80
+ if args:
81
+ request["args"] = [
82
+ base64.b64encode(cloudpickle.dumps(arg)).decode("utf-8") for arg in args
83
+ ]
84
+ if kwargs:
85
+ request["kwargs"] = {
86
+ k: base64.b64encode(cloudpickle.dumps(v)).decode("utf-8")
87
+ for k, v in kwargs.items()
88
+ }
89
+
90
+ return FunctionRequest(**request)
91
+
92
+ def handle_response(self, response: FunctionResponse):
93
+ if not (response.success or response.error):
94
+ raise ValueError("Invalid response from server")
95
+
96
+ if response.stdout:
97
+ for line in response.stdout.splitlines():
98
+ log.info(f"Remote | {line}")
99
+
100
+ if response.success:
101
+ if response.result is None:
102
+ raise ValueError("Response result is None")
103
+ return cloudpickle.loads(base64.b64decode(response.result))
104
+ else:
105
+ raise Exception(f"Remote execution failed: {response.error}")
106
+
107
+ async def ExecuteFunction(
108
+ self, request: FunctionRequest, sync: bool = False
109
+ ) -> FunctionResponse:
110
+ try:
111
+ # Convert the gRPC request to Runpod format
112
+ payload = request.model_dump(exclude_none=True)
113
+
114
+ if sync:
115
+ job = await self.server.run_sync(payload)
116
+ else:
117
+ job = await self.server.run(payload)
118
+
119
+ if job.error:
120
+ return FunctionResponse(
121
+ success=False,
122
+ error=job.error,
123
+ stdout=job.output.get("stdout", ""),
124
+ )
125
+
126
+ return FunctionResponse(**job.output)
127
+
128
+ except Exception as e:
129
+ error_traceback = traceback.format_exc()
130
+ return FunctionResponse(
131
+ success=False,
132
+ error=f"{str(e)}\n{error_traceback}",
133
+ )
@@ -0,0 +1,85 @@
1
+ import logging
2
+ from functools import singledispatch
3
+ from .live_serverless import LiveServerlessStub
4
+ from .serverless import ServerlessEndpointStub
5
+ from ..core.resources import (
6
+ CpuServerlessEndpoint,
7
+ LiveServerless,
8
+ ServerlessEndpoint,
9
+ )
10
+
11
+
12
+ log = logging.getLogger(__name__)
13
+
14
+
15
+ @singledispatch
16
+ def stub_resource(resource, **extra):
17
+ async def fallback(*args, **kwargs):
18
+ return {"error": f"Cannot stub {resource.__class__.__name__}."}
19
+
20
+ return fallback
21
+
22
+
23
+ @stub_resource.register(LiveServerless)
24
+ def _(resource, **extra):
25
+ async def stubbed_resource(
26
+ func, dependencies, system_dependencies, *args, **kwargs
27
+ ) -> dict:
28
+ if args == (None,):
29
+ # cleanup: when the function is called with no args
30
+ args = []
31
+
32
+ stub = LiveServerlessStub(resource)
33
+ request = stub.prepare_request(
34
+ func, dependencies, system_dependencies, *args, **kwargs
35
+ )
36
+ response = await stub.ExecuteFunction(request)
37
+ return stub.handle_response(response)
38
+
39
+ return stubbed_resource
40
+
41
+
42
+ @stub_resource.register(ServerlessEndpoint)
43
+ def _(resource, **extra):
44
+ async def stubbed_resource(
45
+ func, dependencies, system_dependencies, *args, **kwargs
46
+ ) -> dict:
47
+ if args == (None,):
48
+ # cleanup: when the function is called with no args
49
+ args = []
50
+
51
+ if dependencies or system_dependencies:
52
+ log.warning(
53
+ "Dependencies are not supported for ServerlessEndpoint. "
54
+ "They will be ignored."
55
+ )
56
+
57
+ stub = ServerlessEndpointStub(resource)
58
+ payload = stub.prepare_payload(func, *args, **kwargs)
59
+ response = await stub.execute(payload, sync=extra.get("sync", False))
60
+ return stub.handle_response(response)
61
+
62
+ return stubbed_resource
63
+
64
+
65
+ @stub_resource.register(CpuServerlessEndpoint)
66
+ def _(resource, **extra):
67
+ async def stubbed_resource(
68
+ func, dependencies, system_dependencies, *args, **kwargs
69
+ ) -> dict:
70
+ if args == (None,):
71
+ # cleanup: when the function is called with no args
72
+ args = []
73
+
74
+ if dependencies or system_dependencies:
75
+ log.warning(
76
+ "Dependencies are not supported for CpuServerlessEndpoint. "
77
+ "They will be ignored."
78
+ )
79
+
80
+ stub = ServerlessEndpointStub(resource)
81
+ payload = stub.prepare_payload(func, *args, **kwargs)
82
+ response = await stub.execute(payload, sync=extra.get("sync", False))
83
+ return stub.handle_response(response)
84
+
85
+ return stubbed_resource
@@ -0,0 +1,30 @@
1
+ from ..core.resources import ServerlessEndpoint, JobOutput
2
+
3
+
4
+ class ServerlessEndpointStub:
5
+ """Adapter class to make Runpod endpoints requests."""
6
+
7
+ def __init__(self, server: ServerlessEndpoint):
8
+ self.server = server
9
+
10
+ def prepare_payload(self, func, *args, **kwargs) -> dict:
11
+ return func(*args, **kwargs)
12
+
13
+ async def execute(self, payload: dict, sync: bool = False) -> JobOutput:
14
+ """
15
+ Executes a serverless endpoint request with the payload.
16
+ Returns a JobOutput object.
17
+ """
18
+ if sync:
19
+ return await self.server.run_sync(payload)
20
+ else:
21
+ return await self.server.run(payload)
22
+
23
+ def handle_response(self, response: JobOutput):
24
+ if response.output:
25
+ return response.output
26
+
27
+ if response.error:
28
+ raise Exception(f"Remote execution failed: {response.error}")
29
+
30
+ raise ValueError("Invalid response from server")