indexify 0.2.44__py3-none-any.whl → 0.2.45__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- indexify/__init__.py +2 -0
- indexify/cli.py +41 -80
- indexify/executor/api_objects.py +2 -0
- indexify/executor/downloader.py +23 -25
- indexify/executor/executor.py +29 -35
- indexify/executor/function_executor/function_executor.py +120 -19
- indexify/executor/function_executor/function_executor_state.py +75 -0
- indexify/executor/function_executor/invocation_state_client.py +232 -0
- indexify/executor/function_executor/server/function_executor_server.py +24 -0
- indexify/executor/function_executor/server/function_executor_server_factory.py +43 -0
- indexify/executor/function_executor/server/subprocess_function_executor_server.py +25 -0
- indexify/executor/function_executor/{process_function_executor_factory.py → server/subprocess_function_executor_server_factory.py} +21 -21
- indexify/executor/function_executor/single_task_runner.py +160 -0
- indexify/executor/function_executor/task_input.py +23 -0
- indexify/executor/function_executor/task_output.py +36 -0
- indexify/executor/task_reporter.py +10 -17
- indexify/executor/task_runner.py +104 -0
- indexify/function_executor/function_executor_service.py +22 -7
- indexify/function_executor/handlers/run_function/handler.py +13 -12
- indexify/function_executor/invocation_state/invocation_state_proxy_server.py +170 -0
- indexify/function_executor/invocation_state/proxied_invocation_state.py +24 -0
- indexify/function_executor/invocation_state/response_validator.py +29 -0
- indexify/function_executor/proto/function_executor.proto +47 -0
- indexify/function_executor/proto/function_executor_pb2.py +23 -11
- indexify/function_executor/proto/function_executor_pb2.pyi +70 -0
- indexify/function_executor/proto/function_executor_pb2_grpc.py +50 -0
- indexify/functions_sdk/graph.py +3 -3
- indexify/functions_sdk/image.py +142 -9
- indexify/functions_sdk/indexify_functions.py +45 -79
- indexify/functions_sdk/invocation_state/invocation_state.py +22 -0
- indexify/functions_sdk/invocation_state/local_invocation_state.py +30 -0
- indexify/http_client.py +0 -17
- {indexify-0.2.44.dist-info → indexify-0.2.45.dist-info}/METADATA +1 -1
- indexify-0.2.45.dist-info/RECORD +60 -0
- indexify/executor/function_executor/function_executor_factory.py +0 -26
- indexify/executor/function_executor/function_executor_map.py +0 -91
- indexify/executor/function_executor/process_function_executor.py +0 -64
- indexify/executor/function_worker.py +0 -253
- indexify-0.2.44.dist-info/RECORD +0 -50
- {indexify-0.2.44.dist-info → indexify-0.2.45.dist-info}/LICENSE.txt +0 -0
- {indexify-0.2.44.dist-info → indexify-0.2.45.dist-info}/WHEEL +0 -0
- {indexify-0.2.44.dist-info → indexify-0.2.45.dist-info}/entry_points.txt +0 -0
indexify/__init__.py
CHANGED
@@ -3,6 +3,7 @@ from .functions_sdk.graph import Graph
|
|
3
3
|
from .functions_sdk.image import Image
|
4
4
|
from .functions_sdk.indexify_functions import (
|
5
5
|
IndexifyFunction,
|
6
|
+
IndexifyRouter,
|
6
7
|
get_ctx,
|
7
8
|
indexify_function,
|
8
9
|
indexify_router,
|
@@ -23,6 +24,7 @@ __all__ = [
|
|
23
24
|
"indexify_function",
|
24
25
|
"get_ctx",
|
25
26
|
"IndexifyFunction",
|
27
|
+
"IndexifyRouter",
|
26
28
|
"indexify_router",
|
27
29
|
"DEFAULT_SERVICE_URL",
|
28
30
|
"IndexifyClient",
|
indexify/cli.py
CHANGED
@@ -2,7 +2,6 @@ from .logging import configure_logging_early, configure_production_logging
|
|
2
2
|
|
3
3
|
configure_logging_early()
|
4
4
|
|
5
|
-
|
6
5
|
import asyncio
|
7
6
|
import os
|
8
7
|
import shutil
|
@@ -23,16 +22,14 @@ from rich.text import Text
|
|
23
22
|
from rich.theme import Theme
|
24
23
|
|
25
24
|
from indexify.executor.executor import Executor
|
25
|
+
from indexify.executor.function_executor.server.subprocess_function_executor_server_factory import (
|
26
|
+
SubprocessFunctionExecutorServerFactory,
|
27
|
+
)
|
26
28
|
from indexify.function_executor.function_executor_service import (
|
27
29
|
FunctionExecutorService,
|
28
30
|
)
|
29
31
|
from indexify.function_executor.server import Server as FunctionExecutorServer
|
30
|
-
from indexify.functions_sdk.image import
|
31
|
-
LOCAL_PYTHON_VERSION,
|
32
|
-
GetDefaultPythonImage,
|
33
|
-
Image,
|
34
|
-
)
|
35
|
-
from indexify.http_client import IndexifyClient
|
32
|
+
from indexify.functions_sdk.image import Build, GetDefaultPythonImage, Image
|
36
33
|
|
37
34
|
logger = structlog.get_logger(module=__name__)
|
38
35
|
|
@@ -48,9 +45,6 @@ custom_theme = Theme(
|
|
48
45
|
console = Console(theme=custom_theme)
|
49
46
|
|
50
47
|
app = typer.Typer(pretty_exceptions_enable=False, no_args_is_help=True)
|
51
|
-
config_path_option: Optional[str] = typer.Option(
|
52
|
-
None, help="Path to the TLS configuration file"
|
53
|
-
)
|
54
48
|
|
55
49
|
|
56
50
|
@app.command(
|
@@ -161,6 +155,32 @@ def build_image(
|
|
161
155
|
_create_image(obj, python_sdk_path)
|
162
156
|
|
163
157
|
|
158
|
+
@app.command(help="Build platform images for function names")
|
159
|
+
def build_platform_image(
|
160
|
+
workflow_file_path: Annotated[str, typer.Argument()],
|
161
|
+
image_names: Optional[List[str]] = None,
|
162
|
+
build_service="https://api.tensorlake.ai/images/v1",
|
163
|
+
):
|
164
|
+
|
165
|
+
globals_dict = {}
|
166
|
+
|
167
|
+
# Add the folder in the workflow file path to the current Python path
|
168
|
+
folder_path = os.path.dirname(workflow_file_path)
|
169
|
+
if folder_path not in sys.path:
|
170
|
+
sys.path.append(folder_path)
|
171
|
+
|
172
|
+
try:
|
173
|
+
exec(open(workflow_file_path).read(), globals_dict)
|
174
|
+
except FileNotFoundError as e:
|
175
|
+
raise Exception(
|
176
|
+
f"Could not find workflow file to execute at: " f"`{workflow_file_path}`"
|
177
|
+
)
|
178
|
+
for _, obj in globals_dict.items():
|
179
|
+
if type(obj) and isinstance(obj, Image):
|
180
|
+
if image_names is None or obj._image_name in image_names:
|
181
|
+
_create_platform_image(obj, build_service)
|
182
|
+
|
183
|
+
|
164
184
|
@app.command(help="Build default image for indexify")
|
165
185
|
def build_default_image(
|
166
186
|
python_version: Optional[str] = typer.Option(
|
@@ -186,7 +206,9 @@ def executor(
|
|
186
206
|
dev: Annotated[
|
187
207
|
bool, typer.Option("--dev", "-d", help="Run the executor in development mode")
|
188
208
|
] = False,
|
189
|
-
config_path: Optional[str] =
|
209
|
+
config_path: Optional[str] = typer.Option(
|
210
|
+
None, help="Path to the TLS configuration file"
|
211
|
+
),
|
190
212
|
executor_cache: Optional[str] = typer.Option(
|
191
213
|
"~/.indexify/executor_cache", help="Path to the executor cache directory"
|
192
214
|
),
|
@@ -228,9 +250,10 @@ def executor(
|
|
228
250
|
code_path=executor_cache,
|
229
251
|
name_alias=name_alias,
|
230
252
|
image_hash=image_hash,
|
231
|
-
|
253
|
+
function_executor_server_factory=SubprocessFunctionExecutorServerFactory(
|
254
|
+
development_mode=dev
|
255
|
+
),
|
232
256
|
)
|
233
|
-
|
234
257
|
try:
|
235
258
|
asyncio.get_event_loop().run_until_complete(executor.run())
|
236
259
|
except asyncio.CancelledError:
|
@@ -242,11 +265,9 @@ def function_executor(
|
|
242
265
|
function_executor_server_address: str = typer.Option(
|
243
266
|
help="Function Executor server address"
|
244
267
|
),
|
245
|
-
indexify_server_address: str = typer.Option(help="Indexify server address"),
|
246
268
|
dev: Annotated[
|
247
269
|
bool, typer.Option("--dev", "-d", help="Run the executor in development mode")
|
248
270
|
] = False,
|
249
|
-
config_path: Optional[str] = config_path_option,
|
250
271
|
):
|
251
272
|
if not dev:
|
252
273
|
configure_production_logging()
|
@@ -254,15 +275,11 @@ def function_executor(
|
|
254
275
|
logger.info(
|
255
276
|
"starting function executor server",
|
256
277
|
function_executor_server_address=function_executor_server_address,
|
257
|
-
indexify_server_address=indexify_server_address,
|
258
|
-
config_path=config_path,
|
259
278
|
)
|
260
279
|
|
261
280
|
FunctionExecutorServer(
|
262
281
|
server_address=function_executor_server_address,
|
263
|
-
service=FunctionExecutorService(
|
264
|
-
indexify_server_address=indexify_server_address, config_path=config_path
|
265
|
-
),
|
282
|
+
service=FunctionExecutorService(),
|
266
283
|
).run()
|
267
284
|
|
268
285
|
|
@@ -275,63 +292,7 @@ def _create_image(image: Image, python_sdk_path):
|
|
275
292
|
|
276
293
|
|
277
294
|
def _build_image(image: Image, python_sdk_path: Optional[str] = None):
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
client = docker.from_env()
|
283
|
-
client.ping()
|
284
|
-
except Exception as e:
|
285
|
-
console.print(
|
286
|
-
Text("Unable to connect with docker: ", style="red bold"),
|
287
|
-
Text(f"{e}", style="red"),
|
288
|
-
)
|
289
|
-
exit(-1)
|
290
|
-
|
291
|
-
docker_contents = [
|
292
|
-
f"FROM {image._base_image}",
|
293
|
-
"RUN mkdir -p ~/.indexify",
|
294
|
-
"RUN touch ~/.indexify/image_name",
|
295
|
-
f"RUN echo {image._image_name} > ~/.indexify/image_name",
|
296
|
-
f"RUN echo {image.hash()} > ~/.indexify/image_hash",
|
297
|
-
"WORKDIR /app",
|
298
|
-
]
|
299
|
-
|
300
|
-
docker_contents.extend(["RUN " + i for i in image._run_strs])
|
301
|
-
|
302
|
-
if python_sdk_path is not None:
|
303
|
-
logging.info(
|
304
|
-
f"Building image {image._image_name} with local version of the SDK"
|
305
|
-
)
|
306
|
-
if not os.path.exists(python_sdk_path):
|
307
|
-
print(f"error: {python_sdk_path} does not exist")
|
308
|
-
os.exit(1)
|
309
|
-
docker_contents.append(f"COPY {python_sdk_path} /app/python-sdk")
|
310
|
-
docker_contents.append("RUN (cd /app/python-sdk && pip install .)")
|
311
|
-
else:
|
312
|
-
docker_contents.append(f"RUN pip install indexify=={image._sdk_version}")
|
313
|
-
|
314
|
-
docker_file = "\n".join(docker_contents)
|
315
|
-
|
316
|
-
import docker.api.build
|
317
|
-
|
318
|
-
docker.api.build.process_dockerfile = lambda dockerfile, path: (
|
319
|
-
"Dockerfile",
|
320
|
-
dockerfile,
|
321
|
-
)
|
322
|
-
|
323
|
-
console.print("Creating image using Dockerfile contents:", style="cyan bold")
|
324
|
-
print(f"{docker_file}")
|
325
|
-
|
326
|
-
client = docker.from_env()
|
327
|
-
image_name = f"{image._image_name}:{image._tag}"
|
328
|
-
(_image, generator) = client.images.build(
|
329
|
-
path=".",
|
330
|
-
dockerfile=docker_file,
|
331
|
-
tag=image_name,
|
332
|
-
rm=True,
|
333
|
-
)
|
334
|
-
for result in generator:
|
335
|
-
print(result)
|
336
|
-
|
337
|
-
print(f"built image: {image_name}")
|
295
|
+
built_image, output = image.build(python_sdk_path=python_sdk_path)
|
296
|
+
for line in output:
|
297
|
+
print(line)
|
298
|
+
print(f"built image: {built_image.tags[0]}")
|
indexify/executor/api_objects.py
CHANGED
@@ -12,6 +12,8 @@ class Task(BaseModel):
|
|
12
12
|
input_key: str
|
13
13
|
reducer_output_id: Optional[str] = None
|
14
14
|
graph_version: int
|
15
|
+
image_uri: Optional[str] = None
|
16
|
+
"image_uri defines the URI of the image of this task. Optional since some executors do not require it."
|
15
17
|
|
16
18
|
|
17
19
|
class ExecutorMetadata(BaseModel):
|
indexify/executor/downloader.py
CHANGED
@@ -13,12 +13,6 @@ from ..common_util import get_httpx_client
|
|
13
13
|
from .api_objects import Task
|
14
14
|
|
15
15
|
|
16
|
-
class DownloadedInputs:
|
17
|
-
def __init__(self, input: SerializedObject, init_value: Optional[SerializedObject]):
|
18
|
-
self.input = input
|
19
|
-
self.init_value = init_value
|
20
|
-
|
21
|
-
|
22
16
|
class Downloader:
|
23
17
|
def __init__(
|
24
18
|
self, code_path: str, base_url: str, config_path: Optional[str] = None
|
@@ -78,22 +72,22 @@ class Downloader:
|
|
78
72
|
# are atomic operations at filesystem level.
|
79
73
|
os.replace(tmp_path, path)
|
80
74
|
|
81
|
-
async def
|
75
|
+
async def download_input(self, task: Task) -> SerializedObject:
|
82
76
|
logger = self._task_logger(task)
|
83
77
|
|
84
|
-
input: SerializedObject
|
85
78
|
first_function_in_graph = task.invocation_id == task.input_key.split("|")[-1]
|
86
79
|
if first_function_in_graph:
|
87
80
|
# The first function in Graph gets its input from graph invocation payload.
|
88
|
-
|
81
|
+
return await self._fetch_graph_invocation_payload(task, logger)
|
89
82
|
else:
|
90
|
-
|
83
|
+
return await self._fetch_function_input(task, logger)
|
91
84
|
|
92
|
-
|
93
|
-
if task.reducer_output_id is
|
94
|
-
|
85
|
+
async def download_init_value(self, task: Task) -> Optional[SerializedObject]:
|
86
|
+
if task.reducer_output_id is None:
|
87
|
+
return None
|
95
88
|
|
96
|
-
|
89
|
+
logger = self._task_logger(task)
|
90
|
+
return await self._fetch_function_init_value(task, logger)
|
97
91
|
|
98
92
|
def _task_logger(self, task: Task) -> Any:
|
99
93
|
return structlog.get_logger(
|
@@ -142,7 +136,7 @@ class Downloader:
|
|
142
136
|
self, url: str, resource_description: str, logger: Any
|
143
137
|
) -> SerializedObject:
|
144
138
|
logger.info(f"fetching {resource_description}", url=url)
|
145
|
-
response = await self._client.get(url)
|
139
|
+
response: httpx.Response = await self._client.get(url)
|
146
140
|
try:
|
147
141
|
response.raise_for_status()
|
148
142
|
except httpx.HTTPStatusError as e:
|
@@ -153,13 +147,17 @@ class Downloader:
|
|
153
147
|
)
|
154
148
|
raise
|
155
149
|
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
150
|
+
return serialized_object_from_http_response(response)
|
151
|
+
|
152
|
+
|
153
|
+
def serialized_object_from_http_response(response: httpx.Response) -> SerializedObject:
|
154
|
+
# We're hardcoding the content type currently used by Python SDK. It might change in the future.
|
155
|
+
# There's no other way for now to determine if the response is a bytes or string.
|
156
|
+
if response.headers["content-type"] == "application/octet-stream":
|
157
|
+
return SerializedObject(
|
158
|
+
bytes=response.content, content_type=response.headers["content-type"]
|
159
|
+
)
|
160
|
+
else:
|
161
|
+
return SerializedObject(
|
162
|
+
string=response.text, content_type=response.headers["content-type"]
|
163
|
+
)
|
indexify/executor/executor.py
CHANGED
@@ -10,17 +10,13 @@ from indexify.function_executor.proto.function_executor_pb2 import (
|
|
10
10
|
)
|
11
11
|
|
12
12
|
from .api_objects import Task
|
13
|
-
from .downloader import
|
14
|
-
from .function_executor.
|
15
|
-
|
16
|
-
)
|
17
|
-
from .function_worker import (
|
18
|
-
FunctionWorker,
|
19
|
-
FunctionWorkerInput,
|
20
|
-
FunctionWorkerOutput,
|
13
|
+
from .downloader import Downloader
|
14
|
+
from .function_executor.server.function_executor_server_factory import (
|
15
|
+
FunctionExecutorServerFactory,
|
21
16
|
)
|
22
17
|
from .task_fetcher import TaskFetcher
|
23
18
|
from .task_reporter import TaskReporter
|
19
|
+
from .task_runner import TaskInput, TaskOutput, TaskRunner
|
24
20
|
|
25
21
|
|
26
22
|
class Executor:
|
@@ -28,8 +24,8 @@ class Executor:
|
|
28
24
|
self,
|
29
25
|
executor_id: str,
|
30
26
|
code_path: Path,
|
27
|
+
function_executor_server_factory: FunctionExecutorServerFactory,
|
31
28
|
server_addr: str = "localhost:8900",
|
32
|
-
development_mode: bool = False,
|
33
29
|
config_path: Optional[str] = None,
|
34
30
|
name_alias: Optional[str] = None,
|
35
31
|
image_hash: Optional[str] = None,
|
@@ -42,16 +38,14 @@ class Executor:
|
|
42
38
|
self._logger.info("running the extractor with TLS enabled")
|
43
39
|
protocol = "https"
|
44
40
|
|
45
|
-
self._function_worker = FunctionWorker(
|
46
|
-
function_executor_factory=ProcessFunctionExecutorFactory(
|
47
|
-
indexify_server_address=server_addr,
|
48
|
-
development_mode=development_mode,
|
49
|
-
config_path=config_path,
|
50
|
-
)
|
51
|
-
)
|
52
41
|
self._server_addr = server_addr
|
53
42
|
self._base_url = f"{protocol}://{self._server_addr}"
|
54
43
|
self._code_path = code_path
|
44
|
+
self._task_runnner = TaskRunner(
|
45
|
+
function_executor_server_factory=function_executor_server_factory,
|
46
|
+
base_url=self._base_url,
|
47
|
+
config_path=config_path,
|
48
|
+
)
|
55
49
|
self._downloader = Downloader(
|
56
50
|
code_path=code_path, base_url=self._base_url, config_path=config_path
|
57
51
|
)
|
@@ -92,39 +86,39 @@ class Executor:
|
|
92
86
|
|
93
87
|
Doesn't raise any Exceptions. All errors are reported to the server."""
|
94
88
|
logger = self._task_logger(task)
|
95
|
-
output: Optional[
|
89
|
+
output: Optional[TaskOutput] = None
|
96
90
|
|
97
91
|
try:
|
98
92
|
graph: SerializedObject = await self._downloader.download_graph(task)
|
99
|
-
input:
|
100
|
-
|
101
|
-
|
93
|
+
input: SerializedObject = await self._downloader.download_input(task)
|
94
|
+
init_value: Optional[SerializedObject] = (
|
95
|
+
await self._downloader.download_init_value(task)
|
96
|
+
)
|
97
|
+
logger.info("task_execution_started")
|
98
|
+
output: TaskOutput = await self._task_runnner.run(
|
99
|
+
TaskInput(
|
102
100
|
task=task,
|
103
101
|
graph=graph,
|
104
|
-
|
105
|
-
|
102
|
+
input=input,
|
103
|
+
init_value=init_value,
|
104
|
+
),
|
105
|
+
logger=logger,
|
106
106
|
)
|
107
107
|
logger.info("task_execution_finished", success=output.success)
|
108
108
|
except Exception as e:
|
109
|
-
|
109
|
+
output = TaskOutput.internal_error(task)
|
110
|
+
logger.error("task_execution_failed", exc_info=e)
|
110
111
|
|
111
|
-
await self._report_task_outcome(
|
112
|
+
await self._report_task_outcome(output=output, logger=logger)
|
112
113
|
|
113
|
-
async def _report_task_outcome(
|
114
|
-
|
115
|
-
) -> None:
|
116
|
-
"""Reports the task with the given output to the server.
|
117
|
-
|
118
|
-
None output means that the task execution didn't finish due to an internal error.
|
119
|
-
Doesn't raise any exceptions."""
|
114
|
+
async def _report_task_outcome(self, output: TaskOutput, logger: Any) -> None:
|
115
|
+
"""Reports the task with the given output to the server."""
|
120
116
|
reporting_retries: int = 0
|
121
117
|
|
122
118
|
while True:
|
123
119
|
logger = logger.bind(retries=reporting_retries)
|
124
120
|
try:
|
125
|
-
await self._task_reporter.report(
|
126
|
-
task=task, output=output, logger=logger
|
127
|
-
)
|
121
|
+
await self._task_reporter.report(output=output, logger=logger)
|
128
122
|
break
|
129
123
|
except Exception as e:
|
130
124
|
logger.error(
|
@@ -137,7 +131,7 @@ class Executor:
|
|
137
131
|
async def _shutdown(self, loop):
|
138
132
|
self._logger.info("shutting_down")
|
139
133
|
self._should_run = False
|
140
|
-
await self.
|
134
|
+
await self._task_runnner.shutdown()
|
141
135
|
for task in asyncio.all_tasks(loop):
|
142
136
|
task.cancel()
|
143
137
|
|
@@ -1,32 +1,133 @@
|
|
1
|
+
import asyncio
|
1
2
|
from typing import Any, Optional
|
2
3
|
|
3
4
|
import grpc
|
4
5
|
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
6
|
+
from indexify.common_util import get_httpx_client
|
7
|
+
from indexify.function_executor.proto.function_executor_pb2 import (
|
8
|
+
InitializeRequest,
|
9
|
+
InitializeResponse,
|
10
|
+
)
|
11
|
+
from indexify.function_executor.proto.function_executor_pb2_grpc import (
|
12
|
+
FunctionExecutorStub,
|
13
|
+
)
|
14
|
+
|
15
|
+
from .invocation_state_client import InvocationStateClient
|
16
|
+
from .server.function_executor_server import (
|
17
|
+
FUNCTION_EXECUTOR_SERVER_READY_TIMEOUT_SEC,
|
18
|
+
FunctionExecutorServer,
|
19
|
+
)
|
20
|
+
from .server.function_executor_server_factory import (
|
21
|
+
FunctionExecutorServerConfiguration,
|
22
|
+
FunctionExecutorServerFactory,
|
23
|
+
)
|
9
24
|
|
10
25
|
|
11
26
|
class FunctionExecutor:
|
12
|
-
"""
|
27
|
+
"""Executor side class supporting a running FunctionExecutorServer.
|
28
|
+
|
29
|
+
FunctionExecutor primary responsibility is creation and initialization
|
30
|
+
of all resources associated with a particular Function Executor Server
|
31
|
+
including the Server itself. FunctionExecutor owns all these resources
|
32
|
+
and provides other Executor components with access to them.
|
13
33
|
|
14
|
-
|
15
|
-
|
34
|
+
Addition of any business logic besides resource management is discouraged.
|
35
|
+
Please add such logic to other classes managed by this class.
|
16
36
|
"""
|
17
37
|
|
18
|
-
|
19
|
-
|
38
|
+
def __init__(self, server_factory: FunctionExecutorServerFactory, logger: Any):
|
39
|
+
self._server_factory: FunctionExecutorServerFactory = server_factory
|
40
|
+
self._logger = logger.bind(module=__name__)
|
41
|
+
self._server: Optional[FunctionExecutorServer] = None
|
42
|
+
self._channel: Optional[grpc.aio.Channel] = None
|
43
|
+
self._invocation_state_client: Optional[InvocationStateClient] = None
|
44
|
+
self._initialized = False
|
45
|
+
|
46
|
+
async def initialize(
|
47
|
+
self,
|
48
|
+
config: FunctionExecutorServerConfiguration,
|
49
|
+
initialize_request: InitializeRequest,
|
50
|
+
base_url: str,
|
51
|
+
config_path: Optional[str],
|
52
|
+
):
|
53
|
+
"""Creates and initializes a FunctionExecutorServer and all resources associated with it."""
|
54
|
+
try:
|
55
|
+
self._server = await self._server_factory.create(
|
56
|
+
config=config, logger=self._logger
|
57
|
+
)
|
58
|
+
self._channel = await self._server.create_channel(self._logger)
|
59
|
+
await _channel_ready(self._channel)
|
60
|
+
|
61
|
+
stub: FunctionExecutorStub = FunctionExecutorStub(self._channel)
|
62
|
+
await _initialize_server(stub, initialize_request)
|
63
|
+
|
64
|
+
self._invocation_state_client = InvocationStateClient(
|
65
|
+
stub=stub,
|
66
|
+
base_url=base_url,
|
67
|
+
http_client=get_httpx_client(config_path=config_path, make_async=True),
|
68
|
+
graph=initialize_request.graph_name,
|
69
|
+
namespace=initialize_request.namespace,
|
70
|
+
logger=self._logger,
|
71
|
+
)
|
72
|
+
await self._invocation_state_client.start()
|
73
|
+
|
74
|
+
self._initialized = True
|
75
|
+
except Exception:
|
76
|
+
await self.destroy()
|
77
|
+
raise
|
78
|
+
|
79
|
+
def channel(self) -> grpc.aio.Channel:
|
80
|
+
self._check_initialized()
|
81
|
+
return self._channel
|
82
|
+
|
83
|
+
def invocation_state_client(self) -> InvocationStateClient:
|
84
|
+
self._check_initialized()
|
85
|
+
return self._invocation_state_client
|
86
|
+
|
87
|
+
async def destroy(self):
|
88
|
+
"""Destroys all resources owned by this FunctionExecutor.
|
89
|
+
|
90
|
+
Never raises any exceptions but logs them."""
|
91
|
+
try:
|
92
|
+
if self._invocation_state_client is not None:
|
93
|
+
await self._invocation_state_client.destroy()
|
94
|
+
self._invocation_state_client = None
|
95
|
+
except Exception as e:
|
96
|
+
self._logger.error(
|
97
|
+
"failed to destroy FunctionExecutor invocation state client", exc_info=e
|
98
|
+
)
|
99
|
+
|
100
|
+
try:
|
101
|
+
if self._channel is not None:
|
102
|
+
await self._channel.close()
|
103
|
+
self._channel = None
|
104
|
+
except Exception as e:
|
105
|
+
self._logger.error(
|
106
|
+
"failed to close FunctionExecutorServer channel", exc_info=e
|
107
|
+
)
|
108
|
+
|
109
|
+
try:
|
110
|
+
if self._server is not None:
|
111
|
+
await self._server_factory.destroy(self._server, self._logger)
|
112
|
+
self._server = None
|
113
|
+
except Exception as e:
|
114
|
+
self._logger.error("failed to destroy FunctionExecutorServer", exc_info=e)
|
115
|
+
|
116
|
+
def _check_initialized(self):
|
117
|
+
if not self._initialized:
|
118
|
+
raise RuntimeError("FunctionExecutor is not initialized")
|
119
|
+
|
20
120
|
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
121
|
+
async def _channel_ready(channel: grpc.aio.Channel):
|
122
|
+
await asyncio.wait_for(
|
123
|
+
channel.channel_ready(),
|
124
|
+
timeout=FUNCTION_EXECUTOR_SERVER_READY_TIMEOUT_SEC,
|
125
|
+
)
|
26
126
|
|
27
|
-
def state(self) -> Optional[Any]:
|
28
|
-
"""Returns optional state object.
|
29
127
|
|
30
|
-
|
31
|
-
|
32
|
-
|
128
|
+
async def _initialize_server(
|
129
|
+
stub: FunctionExecutorStub, initialize_request: InitializeRequest
|
130
|
+
):
|
131
|
+
initialize_response: InitializeResponse = await stub.initialize(initialize_request)
|
132
|
+
if not initialize_response.success:
|
133
|
+
raise Exception("initialize RPC failed at function executor server")
|
@@ -0,0 +1,75 @@
|
|
1
|
+
import asyncio
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from .function_executor import FunctionExecutor
|
5
|
+
|
6
|
+
|
7
|
+
class FunctionExecutorState:
|
8
|
+
"""State of a Function Executor with a particular ID.
|
9
|
+
|
10
|
+
The Function Executor might not exist, i.e. not yet created or destroyed.
|
11
|
+
This object represents all such states. Any state modification must be done
|
12
|
+
under the lock.
|
13
|
+
"""
|
14
|
+
|
15
|
+
def __init__(self, function_id_with_version: str, function_id_without_version: str):
|
16
|
+
self.function_id_with_version: str = function_id_with_version
|
17
|
+
self.function_id_without_version: str = function_id_without_version
|
18
|
+
self.function_executor: Optional[FunctionExecutor] = None
|
19
|
+
self.running_tasks: int = 0
|
20
|
+
self.lock: asyncio.Lock = asyncio.Lock()
|
21
|
+
self.running_tasks_change_notifier: asyncio.Condition = asyncio.Condition(
|
22
|
+
lock=self.lock
|
23
|
+
)
|
24
|
+
|
25
|
+
def increment_running_tasks(self) -> None:
|
26
|
+
"""Increments the number of running tasks.
|
27
|
+
|
28
|
+
The caller must hold the lock.
|
29
|
+
"""
|
30
|
+
self.check_locked()
|
31
|
+
self.running_tasks += 1
|
32
|
+
self.running_tasks_change_notifier.notify_all()
|
33
|
+
|
34
|
+
def decrement_running_tasks(self) -> None:
|
35
|
+
"""Decrements the number of running tasks.
|
36
|
+
|
37
|
+
The caller must hold the lock.
|
38
|
+
"""
|
39
|
+
self.check_locked()
|
40
|
+
self.running_tasks -= 1
|
41
|
+
self.running_tasks_change_notifier.notify_all()
|
42
|
+
|
43
|
+
async def wait_running_tasks_less(self, value: int) -> None:
|
44
|
+
"""Waits until the number of running tasks is less than the supplied value.
|
45
|
+
|
46
|
+
The caller must hold the lock.
|
47
|
+
"""
|
48
|
+
self.check_locked()
|
49
|
+
while self.running_tasks >= value:
|
50
|
+
await self.running_tasks_change_notifier.wait()
|
51
|
+
|
52
|
+
async def destroy_function_executor(self) -> None:
|
53
|
+
"""Destroys the Function Executor if it exists.
|
54
|
+
|
55
|
+
The caller must hold the lock."""
|
56
|
+
self.check_locked()
|
57
|
+
if self.function_executor is not None:
|
58
|
+
await self.function_executor.destroy()
|
59
|
+
self.function_executor = None
|
60
|
+
|
61
|
+
async def destroy_function_executor_not_locked(self) -> None:
|
62
|
+
"""Destroys the Function Executor if it exists.
|
63
|
+
|
64
|
+
The caller doesn't need to hold the lock but this call
|
65
|
+
might make the state inconsistent."""
|
66
|
+
if self.function_executor is not None:
|
67
|
+
# Atomically hide the destroyed Function Executor from other asyncio tasks.
|
68
|
+
ref = self.function_executor
|
69
|
+
self.function_executor = None
|
70
|
+
await ref.destroy()
|
71
|
+
|
72
|
+
def check_locked(self) -> None:
|
73
|
+
"""Raises an exception if the lock is not held."""
|
74
|
+
if not self.lock.locked():
|
75
|
+
raise RuntimeError("The FunctionExecutorState lock must be held.")
|