tetra-rp 0.6.0__py3-none-any.whl → 0.24.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. tetra_rp/__init__.py +109 -19
  2. tetra_rp/cli/commands/__init__.py +1 -0
  3. tetra_rp/cli/commands/apps.py +143 -0
  4. tetra_rp/cli/commands/build.py +1082 -0
  5. tetra_rp/cli/commands/build_utils/__init__.py +1 -0
  6. tetra_rp/cli/commands/build_utils/handler_generator.py +176 -0
  7. tetra_rp/cli/commands/build_utils/lb_handler_generator.py +309 -0
  8. tetra_rp/cli/commands/build_utils/manifest.py +430 -0
  9. tetra_rp/cli/commands/build_utils/mothership_handler_generator.py +75 -0
  10. tetra_rp/cli/commands/build_utils/scanner.py +596 -0
  11. tetra_rp/cli/commands/deploy.py +580 -0
  12. tetra_rp/cli/commands/init.py +123 -0
  13. tetra_rp/cli/commands/resource.py +108 -0
  14. tetra_rp/cli/commands/run.py +296 -0
  15. tetra_rp/cli/commands/test_mothership.py +458 -0
  16. tetra_rp/cli/commands/undeploy.py +533 -0
  17. tetra_rp/cli/main.py +97 -0
  18. tetra_rp/cli/utils/__init__.py +1 -0
  19. tetra_rp/cli/utils/app.py +15 -0
  20. tetra_rp/cli/utils/conda.py +127 -0
  21. tetra_rp/cli/utils/deployment.py +530 -0
  22. tetra_rp/cli/utils/ignore.py +143 -0
  23. tetra_rp/cli/utils/skeleton.py +184 -0
  24. tetra_rp/cli/utils/skeleton_template/.env.example +4 -0
  25. tetra_rp/cli/utils/skeleton_template/.flashignore +40 -0
  26. tetra_rp/cli/utils/skeleton_template/.gitignore +44 -0
  27. tetra_rp/cli/utils/skeleton_template/README.md +263 -0
  28. tetra_rp/cli/utils/skeleton_template/main.py +44 -0
  29. tetra_rp/cli/utils/skeleton_template/mothership.py +55 -0
  30. tetra_rp/cli/utils/skeleton_template/pyproject.toml +58 -0
  31. tetra_rp/cli/utils/skeleton_template/requirements.txt +1 -0
  32. tetra_rp/cli/utils/skeleton_template/workers/__init__.py +0 -0
  33. tetra_rp/cli/utils/skeleton_template/workers/cpu/__init__.py +19 -0
  34. tetra_rp/cli/utils/skeleton_template/workers/cpu/endpoint.py +36 -0
  35. tetra_rp/cli/utils/skeleton_template/workers/gpu/__init__.py +19 -0
  36. tetra_rp/cli/utils/skeleton_template/workers/gpu/endpoint.py +61 -0
  37. tetra_rp/client.py +136 -33
  38. tetra_rp/config.py +29 -0
  39. tetra_rp/core/api/runpod.py +591 -39
  40. tetra_rp/core/deployment.py +232 -0
  41. tetra_rp/core/discovery.py +425 -0
  42. tetra_rp/core/exceptions.py +50 -0
  43. tetra_rp/core/resources/__init__.py +27 -9
  44. tetra_rp/core/resources/app.py +738 -0
  45. tetra_rp/core/resources/base.py +139 -4
  46. tetra_rp/core/resources/constants.py +21 -0
  47. tetra_rp/core/resources/cpu.py +115 -13
  48. tetra_rp/core/resources/gpu.py +182 -16
  49. tetra_rp/core/resources/live_serverless.py +153 -16
  50. tetra_rp/core/resources/load_balancer_sls_resource.py +440 -0
  51. tetra_rp/core/resources/network_volume.py +126 -31
  52. tetra_rp/core/resources/resource_manager.py +436 -35
  53. tetra_rp/core/resources/serverless.py +537 -120
  54. tetra_rp/core/resources/serverless_cpu.py +201 -0
  55. tetra_rp/core/resources/template.py +1 -59
  56. tetra_rp/core/utils/constants.py +10 -0
  57. tetra_rp/core/utils/file_lock.py +260 -0
  58. tetra_rp/core/utils/http.py +67 -0
  59. tetra_rp/core/utils/lru_cache.py +75 -0
  60. tetra_rp/core/utils/singleton.py +36 -1
  61. tetra_rp/core/validation.py +44 -0
  62. tetra_rp/execute_class.py +301 -0
  63. tetra_rp/protos/remote_execution.py +98 -9
  64. tetra_rp/runtime/__init__.py +1 -0
  65. tetra_rp/runtime/circuit_breaker.py +274 -0
  66. tetra_rp/runtime/config.py +12 -0
  67. tetra_rp/runtime/exceptions.py +49 -0
  68. tetra_rp/runtime/generic_handler.py +206 -0
  69. tetra_rp/runtime/lb_handler.py +189 -0
  70. tetra_rp/runtime/load_balancer.py +160 -0
  71. tetra_rp/runtime/manifest_fetcher.py +192 -0
  72. tetra_rp/runtime/metrics.py +325 -0
  73. tetra_rp/runtime/models.py +73 -0
  74. tetra_rp/runtime/mothership_provisioner.py +512 -0
  75. tetra_rp/runtime/production_wrapper.py +266 -0
  76. tetra_rp/runtime/reliability_config.py +149 -0
  77. tetra_rp/runtime/retry_manager.py +118 -0
  78. tetra_rp/runtime/serialization.py +124 -0
  79. tetra_rp/runtime/service_registry.py +346 -0
  80. tetra_rp/runtime/state_manager_client.py +248 -0
  81. tetra_rp/stubs/live_serverless.py +35 -17
  82. tetra_rp/stubs/load_balancer_sls.py +357 -0
  83. tetra_rp/stubs/registry.py +145 -19
  84. {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/METADATA +398 -60
  85. tetra_rp-0.24.0.dist-info/RECORD +99 -0
  86. {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/WHEEL +1 -1
  87. tetra_rp-0.24.0.dist-info/entry_points.txt +2 -0
  88. tetra_rp/core/pool/cluster_manager.py +0 -177
  89. tetra_rp/core/pool/dataclass.py +0 -18
  90. tetra_rp/core/pool/ex.py +0 -38
  91. tetra_rp/core/pool/job.py +0 -22
  92. tetra_rp/core/pool/worker.py +0 -19
  93. tetra_rp/core/resources/utils.py +0 -50
  94. tetra_rp/core/utils/json.py +0 -33
  95. tetra_rp-0.6.0.dist-info/RECORD +0 -39
  96. /tetra_rp/{core/pool → cli}/__init__.py +0 -0
  97. {tetra_rp-0.6.0.dist-info → tetra_rp-0.24.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,42 @@
1
+ import threading
2
+
3
+
1
4
  class SingletonMixin:
5
+ """Thread-safe singleton mixin class.
6
+
7
+ Uses threading.Lock to ensure only one instance is created
8
+ per class, even under concurrent access.
9
+ """
10
+
2
11
  _instances = {}
12
+ _lock = threading.Lock()
3
13
 
4
14
  def __new__(cls, *args, **kwargs):
15
+ # Use double-checked locking pattern for performance
5
16
  if cls not in cls._instances:
6
- cls._instances[cls] = super().__new__(cls)
17
+ with cls._lock:
18
+ # Check again inside the lock (double-checked locking)
19
+ if cls not in cls._instances:
20
+ cls._instances[cls] = super().__new__(cls)
7
21
  return cls._instances[cls]
22
+
23
+ def __reduce__(self):
24
+ """Custom pickle support to handle the singleton pattern.
25
+
26
+ Returns the class and arguments needed to reconstruct the instance,
27
+ skipping the threading.Lock which can't be pickled in all contexts.
28
+ """
29
+ # For subclasses of SingletonMixin, return enough to reconstruct via __new__
30
+ # which will return the singleton instance
31
+ return (
32
+ self.__class__,
33
+ (), # No args - __new__ will use the cached instance
34
+ self.__getstate__() if hasattr(self, "__getstate__") else self.__dict__,
35
+ )
36
+
37
+ def __setstate__(self, state):
38
+ """Restore object state from pickle."""
39
+ if hasattr(self, "__setstate__"):
40
+ super().__setstate__(state)
41
+ else:
42
+ self.__dict__.update(state)
@@ -0,0 +1,44 @@
1
+ """Validation utilities for tetra_rp configuration.
2
+
3
+ Provides validation functions for required environment variables and configuration.
4
+ """
5
+
6
+ import os
7
+
8
+ from tetra_rp.core.exceptions import RunpodAPIKeyError
9
+
10
+
11
+ def validate_api_key() -> str:
12
+ """Validate that RUNPOD_API_KEY environment variable is set.
13
+
14
+ Returns:
15
+ The API key value if present.
16
+
17
+ Raises:
18
+ RunpodAPIKeyError: If RUNPOD_API_KEY is not set or is empty.
19
+ """
20
+ api_key = os.getenv("RUNPOD_API_KEY")
21
+
22
+ if not api_key or not api_key.strip():
23
+ raise RunpodAPIKeyError()
24
+
25
+ return api_key
26
+
27
+
28
+ def validate_api_key_with_context(operation: str) -> str:
29
+ """Validate API key with additional context about the operation.
30
+
31
+ Args:
32
+ operation: Description of what operation requires the API key.
33
+
34
+ Returns:
35
+ The API key value if present.
36
+
37
+ Raises:
38
+ RunpodAPIKeyError: If RUNPOD_API_KEY is not set, with operation context.
39
+ """
40
+ try:
41
+ return validate_api_key()
42
+ except RunpodAPIKeyError as e:
43
+ context_message = f"Cannot {operation}: {str(e)}"
44
+ raise RunpodAPIKeyError(context_message) from e
@@ -0,0 +1,301 @@
1
+ """
2
+ Class execution module for remote class instantiation and method calls.
3
+
4
+ This module provides functionality to create and execute remote class instances,
5
+ with automatic caching of class serialization data to improve performance and
6
+ prevent memory leaks through LRU eviction.
7
+ """
8
+
9
+ import hashlib
10
+ import inspect
11
+ import logging
12
+ import textwrap
13
+ import uuid
14
+ from typing import List, Optional, Type
15
+
16
+ import cloudpickle
17
+
18
+ from .core.resources import ResourceManager, ServerlessResource
19
+ from .core.utils.constants import HASH_TRUNCATE_LENGTH, UUID_FALLBACK_LENGTH
20
+ from .core.utils.lru_cache import LRUCache
21
+ from .protos.remote_execution import FunctionRequest
22
+ from .runtime.exceptions import SerializationError
23
+ from .runtime.serialization import serialize_args, serialize_kwargs
24
+ from .stubs import stub_resource
25
+
26
+ log = logging.getLogger(__name__)
27
+
28
+ # Global in-memory cache for serialized class data with LRU eviction
29
+ _SERIALIZED_CLASS_CACHE = LRUCache(max_size=1000)
30
+
31
+
32
+ def serialize_constructor_args(args, kwargs):
33
+ """Serialize constructor arguments for caching."""
34
+ return serialize_args(args), serialize_kwargs(kwargs)
35
+
36
+
37
+ def get_or_cache_class_data(
38
+ cls: Type, args: tuple, kwargs: dict, cache_key: str
39
+ ) -> str:
40
+ """Get class code from cache or extract and cache it."""
41
+ if cache_key not in _SERIALIZED_CLASS_CACHE:
42
+ # Cache miss - extract and cache class code
43
+ clean_class_code = extract_class_code_simple(cls)
44
+
45
+ try:
46
+ serialized_args, serialized_kwargs = serialize_constructor_args(
47
+ args, kwargs
48
+ )
49
+
50
+ # Cache the serialized data
51
+ _SERIALIZED_CLASS_CACHE.set(
52
+ cache_key,
53
+ {
54
+ "class_code": clean_class_code,
55
+ "constructor_args": serialized_args,
56
+ "constructor_kwargs": serialized_kwargs,
57
+ },
58
+ )
59
+
60
+ log.debug(f"Cached class data for {cls.__name__} with key: {cache_key}")
61
+
62
+ except (TypeError, AttributeError, OSError, SerializationError) as e:
63
+ log.warning(
64
+ f"Could not serialize constructor arguments for {cls.__name__}: {e}"
65
+ )
66
+ log.warning(
67
+ f"Skipping constructor argument caching for {cls.__name__} due to unserializable arguments"
68
+ )
69
+
70
+ # Store minimal cache entry to avoid repeated attempts
71
+ _SERIALIZED_CLASS_CACHE.set(
72
+ cache_key,
73
+ {
74
+ "class_code": clean_class_code,
75
+ "constructor_args": None, # Signal that args couldn't be cached
76
+ "constructor_kwargs": None,
77
+ },
78
+ )
79
+
80
+ return clean_class_code
81
+ else:
82
+ # Cache hit - retrieve cached data
83
+ cached_data = _SERIALIZED_CLASS_CACHE.get(cache_key)
84
+ log.debug(
85
+ f"Retrieved cached class data for {cls.__name__} with key: {cache_key}"
86
+ )
87
+ return cached_data["class_code"]
88
+
89
+
90
+ def extract_class_code_simple(cls: Type) -> str:
91
+ """Extract clean class code without decorators and proper indentation"""
92
+ try:
93
+ # Get source code
94
+ source = inspect.getsource(cls)
95
+
96
+ # Split into lines
97
+ lines = source.split("\n")
98
+
99
+ # Find the class definition line (starts with 'class' and contains ':')
100
+ class_start_idx = -1
101
+ for i, line in enumerate(lines):
102
+ stripped = line.strip()
103
+ if stripped.startswith("class ") and ":" in stripped:
104
+ class_start_idx = i
105
+ break
106
+
107
+ if class_start_idx == -1:
108
+ raise ValueError("Could not find class definition")
109
+
110
+ # Take lines from class definition onwards (ignore everything before)
111
+ class_lines = lines[class_start_idx:]
112
+
113
+ # Remove empty lines at the end
114
+ while class_lines and not class_lines[-1].strip():
115
+ class_lines.pop()
116
+
117
+ # Join back and dedent to remove any leading indentation
118
+ class_code = "\n".join(class_lines)
119
+ class_code = textwrap.dedent(class_code)
120
+
121
+ # Validate the code by trying to compile it
122
+ compile(class_code, "<string>", "exec")
123
+
124
+ log.debug(f"Successfully extracted class code for {cls.__name__}")
125
+ return class_code
126
+
127
+ except Exception as e:
128
+ log.warning(f"Could not extract class code for {cls.__name__}: {e}")
129
+ log.warning("Falling back to basic class structure")
130
+
131
+ # Enhanced fallback: try to preserve method signatures
132
+ fallback_methods = []
133
+ for name, method in inspect.getmembers(cls, predicate=inspect.isfunction):
134
+ try:
135
+ sig = inspect.signature(method)
136
+ fallback_methods.append(f" def {name}{sig}:")
137
+ fallback_methods.append(" pass")
138
+ fallback_methods.append("")
139
+ except (TypeError, ValueError, OSError) as e:
140
+ log.warning(f"Could not extract method signature for {name}: {e}")
141
+ fallback_methods.append(f" def {name}(self, *args, **kwargs):")
142
+ fallback_methods.append(" pass")
143
+ fallback_methods.append("")
144
+
145
+ fallback_code = f"""class {cls.__name__}:
146
+ def __init__(self, *args, **kwargs):
147
+ pass
148
+
149
+ {chr(10).join(fallback_methods)}"""
150
+
151
+ return fallback_code
152
+
153
+
154
+ def get_class_cache_key(
155
+ cls: Type, constructor_args: tuple, constructor_kwargs: dict
156
+ ) -> str:
157
+ """Generate a cache key for class serialization based on class source and constructor args.
158
+
159
+ Args:
160
+ cls: The class type to generate a key for
161
+ constructor_args: Positional arguments passed to class constructor
162
+ constructor_kwargs: Keyword arguments passed to class constructor
163
+
164
+ Returns:
165
+ A unique cache key string, or a UUID-based fallback if serialization fails
166
+
167
+ Note:
168
+ Falls back to UUID-based key if constructor arguments cannot be serialized,
169
+ which disables caching benefits but maintains functionality.
170
+ """
171
+ try:
172
+ # Get class source code for hashing
173
+ class_source = extract_class_code_simple(cls)
174
+
175
+ # Create hash of class source
176
+ class_hash = hashlib.sha256(class_source.encode()).hexdigest()
177
+
178
+ # Create hash of constructor arguments
179
+ args_data = cloudpickle.dumps((constructor_args, constructor_kwargs))
180
+ args_hash = hashlib.sha256(args_data).hexdigest()
181
+
182
+ # Combine hashes for final cache key
183
+ cache_key = f"{cls.__name__}_{class_hash[:HASH_TRUNCATE_LENGTH]}_{args_hash[:HASH_TRUNCATE_LENGTH]}"
184
+
185
+ log.debug(f"Generated cache key for {cls.__name__}: {cache_key}")
186
+ return cache_key
187
+
188
+ except (TypeError, AttributeError, OSError) as e:
189
+ log.warning(f"Could not generate cache key for {cls.__name__}: {e}")
190
+ # Fallback to basic key without caching benefits
191
+ return f"{cls.__name__}_{uuid.uuid4().hex[:UUID_FALLBACK_LENGTH]}"
192
+
193
+
194
+ def create_remote_class(
195
+ cls: Type,
196
+ resource_config: ServerlessResource,
197
+ dependencies: Optional[List[str]],
198
+ system_dependencies: Optional[List[str]],
199
+ accelerate_downloads: bool,
200
+ extra: dict,
201
+ ):
202
+ """
203
+ Create a remote class wrapper.
204
+ """
205
+ # Validate inputs
206
+ if not inspect.isclass(cls):
207
+ raise TypeError(f"Expected a class, got {type(cls).__name__}")
208
+ if not hasattr(cls, "__name__"):
209
+ raise ValueError("Class must have a __name__ attribute")
210
+
211
+ class RemoteClassWrapper:
212
+ def __init__(self, *args, **kwargs):
213
+ self._class_type = cls
214
+ self._resource_config = resource_config
215
+ self._dependencies = dependencies or []
216
+ self._system_dependencies = system_dependencies or []
217
+ self._accelerate_downloads = accelerate_downloads
218
+ self._extra = extra
219
+ self._constructor_args = args
220
+ self._constructor_kwargs = kwargs
221
+ self._instance_id = (
222
+ f"{cls.__name__}_{uuid.uuid4().hex[:UUID_FALLBACK_LENGTH]}"
223
+ )
224
+ self._initialized = False
225
+
226
+ # Generate cache key and get class code
227
+ self._cache_key = get_class_cache_key(cls, args, kwargs)
228
+ self._clean_class_code = get_or_cache_class_data(
229
+ cls, args, kwargs, self._cache_key
230
+ )
231
+
232
+ log.debug(f"Created remote class wrapper for {cls.__name__}")
233
+
234
+ async def _ensure_initialized(self):
235
+ """Ensure the remote instance is created."""
236
+ if self._initialized:
237
+ return
238
+
239
+ # Get remote resource
240
+ resource_manager = ResourceManager()
241
+ remote_resource = await resource_manager.get_or_deploy_resource(
242
+ self._resource_config
243
+ )
244
+ self._stub = stub_resource(remote_resource, **self._extra)
245
+
246
+ # Create the remote instance by calling a method (which will trigger instance creation)
247
+ # We'll do this on first method call
248
+ self._initialized = True
249
+
250
+ def __getattr__(self, name):
251
+ """Dynamically create method proxies for all class methods."""
252
+ if name.startswith("_"):
253
+ raise AttributeError(
254
+ f"'{self.__class__.__name__}' object has no attribute '{name}'"
255
+ )
256
+
257
+ async def method_proxy(*args, **kwargs):
258
+ await self._ensure_initialized()
259
+
260
+ # Get cached data
261
+ cached_data = _SERIALIZED_CLASS_CACHE.get(self._cache_key)
262
+
263
+ # Serialize method arguments (these change per call, so no caching)
264
+ method_args = serialize_args(args)
265
+ method_kwargs = serialize_kwargs(kwargs)
266
+
267
+ # Handle constructor args - use cached if available, else serialize fresh
268
+ if cached_data["constructor_args"] is not None:
269
+ # Use cached constructor args
270
+ constructor_args = cached_data["constructor_args"]
271
+ constructor_kwargs = cached_data["constructor_kwargs"]
272
+ else:
273
+ # Constructor args couldn't be cached due to serialization issues
274
+ # Serialize them fresh for each method call (fallback behavior)
275
+ constructor_args = serialize_args(self._constructor_args)
276
+ constructor_kwargs = serialize_kwargs(self._constructor_kwargs)
277
+
278
+ request = FunctionRequest(
279
+ execution_type="class",
280
+ class_name=self._class_type.__name__,
281
+ class_code=cached_data["class_code"],
282
+ method_name=name,
283
+ args=method_args,
284
+ kwargs=method_kwargs,
285
+ constructor_args=constructor_args,
286
+ constructor_kwargs=constructor_kwargs,
287
+ dependencies=self._dependencies,
288
+ system_dependencies=self._system_dependencies,
289
+ accelerate_downloads=self._accelerate_downloads,
290
+ instance_id=self._instance_id,
291
+ create_new_instance=not hasattr(
292
+ self, "_stub"
293
+ ), # Create new only on first call
294
+ )
295
+
296
+ # Execute via stub
297
+ return await self._stub.execute_class_method(request) # type: ignore
298
+
299
+ return method_proxy
300
+
301
+ return RemoteClassWrapper
@@ -1,36 +1,118 @@
1
- # TODO: generate using betterproto
1
+ """Remote execution protocol definitions using Pydantic models.
2
+
3
+ This module defines the request/response protocol for remote function and class execution.
4
+ The models align with the protobuf schema for communication with remote workers.
5
+ """
2
6
 
3
7
  from abc import ABC, abstractmethod
4
- from typing import List, Dict, Optional
5
- from pydantic import BaseModel, Field
8
+ from typing import Any, Dict, List, Optional
9
+
10
+ from pydantic import BaseModel, Field, model_validator
6
11
 
7
12
 
8
13
  class FunctionRequest(BaseModel):
9
- function_name: str = Field(
14
+ """Request model for remote function or class execution.
15
+
16
+ Supports both function-based execution and class instantiation with method calls.
17
+ All serialized data (args, kwargs, etc.) are base64-encoded cloudpickle strings.
18
+ """
19
+
20
+ # MADE OPTIONAL - can be None for class-only execution
21
+ function_name: Optional[str] = Field(
22
+ default=None,
10
23
  description="Name of the function to execute",
11
24
  )
12
- function_code: str = Field(
25
+ function_code: Optional[str] = Field(
26
+ default=None,
13
27
  description="Source code of the function to execute",
14
28
  )
15
- args: List = Field(
29
+ args: List[str] = Field(
16
30
  default_factory=list,
17
31
  description="List of base64-encoded cloudpickle-serialized arguments",
18
32
  )
19
- kwargs: Dict = Field(
33
+ kwargs: Dict[str, str] = Field(
20
34
  default_factory=dict,
21
35
  description="Dictionary of base64-encoded cloudpickle-serialized keyword arguments",
22
36
  )
23
- dependencies: Optional[List] = Field(
37
+ dependencies: Optional[List[str]] = Field(
24
38
  default=None,
25
39
  description="Optional list of pip packages to install before executing the function",
26
40
  )
27
- system_dependencies: Optional[List] = Field(
41
+ system_dependencies: Optional[List[str]] = Field(
28
42
  default=None,
29
43
  description="Optional list of system dependencies to install before executing the function",
30
44
  )
31
45
 
46
+ # NEW FIELDS FOR CLASS SUPPORT
47
+ execution_type: str = Field(
48
+ default="function", description="Type of execution: 'function' or 'class'"
49
+ )
50
+ class_name: Optional[str] = Field(
51
+ default=None,
52
+ description="Name of the class to instantiate (for class execution)",
53
+ )
54
+ class_code: Optional[str] = Field(
55
+ default=None,
56
+ description="Source code of the class to instantiate (for class execution)",
57
+ )
58
+ constructor_args: List[str] = Field(
59
+ default_factory=list,
60
+ description="List of base64-encoded cloudpickle-serialized constructor arguments",
61
+ )
62
+ constructor_kwargs: Dict[str, str] = Field(
63
+ default_factory=dict,
64
+ description="Dictionary of base64-encoded cloudpickle-serialized constructor keyword arguments",
65
+ )
66
+ method_name: str = Field(
67
+ default="__call__",
68
+ description="Name of the method to call on the class instance",
69
+ )
70
+ instance_id: Optional[str] = Field(
71
+ default=None,
72
+ description="Unique identifier for the class instance (for persistence)",
73
+ )
74
+ create_new_instance: bool = Field(
75
+ default=True,
76
+ description="Whether to create a new instance or reuse existing one",
77
+ )
78
+
79
+ # Download acceleration fields
80
+ accelerate_downloads: bool = Field(
81
+ default=True,
82
+ description="Enable download acceleration for dependencies and models",
83
+ )
84
+
85
+ @model_validator(mode="after")
86
+ def validate_execution_requirements(self) -> "FunctionRequest":
87
+ """Validate that required fields are provided based on execution_type.
88
+
89
+ Note: function_code and class_code are optional to support Flash deployments
90
+ where code is pre-deployed and not sent with the request.
91
+ """
92
+ if self.execution_type == "function":
93
+ if self.function_name is None:
94
+ raise ValueError(
95
+ 'function_name is required when execution_type is "function"'
96
+ )
97
+ # function_code is optional - absent for Flash deployments
98
+
99
+ elif self.execution_type == "class":
100
+ if self.class_name is None:
101
+ raise ValueError(
102
+ 'class_name is required when execution_type is "class"'
103
+ )
104
+ # class_code is optional - absent for Flash deployments
105
+
106
+ return self
107
+
32
108
 
33
109
  class FunctionResponse(BaseModel):
110
+ """Response model for remote function or class execution results.
111
+
112
+ Contains execution results, error information, and metadata about class instances
113
+ when applicable. The result field contains base64-encoded cloudpickle data.
114
+ """
115
+
34
116
  success: bool = Field(
35
117
  description="Indicates if the function execution was successful",
36
118
  )
@@ -46,6 +128,13 @@ class FunctionResponse(BaseModel):
46
128
  default=None,
47
129
  description="Captured standard output from the function execution",
48
130
  )
131
+ instance_id: Optional[str] = Field(
132
+ default=None, description="ID of the class instance that was used/created"
133
+ )
134
+ instance_info: Optional[Dict[str, Any]] = Field(
135
+ default=None,
136
+ description="Metadata about the class instance (creation time, call count, etc.)",
137
+ )
49
138
 
50
139
 
51
140
  class RemoteExecutorStub(ABC):
@@ -0,0 +1 @@
1
+ """Flash runtime utilities for production execution."""