indexify 0.3.5__tar.gz → 0.3.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {indexify-0.3.5 → indexify-0.3.6}/PKG-INFO +3 -4
- {indexify-0.3.5 → indexify-0.3.6}/pyproject.toml +4 -7
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/cli/cli.py +2 -2
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/downloader.py +2 -3
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/executor.py +2 -3
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/function_executor.py +3 -4
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/health_checker.py +2 -3
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/invocation_state_client.py +3 -4
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/single_task_runner.py +2 -4
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/task_input.py +1 -1
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/task_output.py +1 -1
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/task_fetcher.py +1 -2
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/task_reporter.py +3 -4
- indexify-0.3.5/src/indexify/function_executor/README.md +0 -18
- indexify-0.3.5/src/indexify/function_executor/handlers/run_function/function_inputs_loader.py +0 -53
- indexify-0.3.5/src/indexify/function_executor/handlers/run_function/handler.py +0 -126
- indexify-0.3.5/src/indexify/function_executor/handlers/run_function/request_validator.py +0 -26
- indexify-0.3.5/src/indexify/function_executor/handlers/run_function/response_helper.py +0 -96
- indexify-0.3.5/src/indexify/function_executor/info.py +0 -16
- indexify-0.3.5/src/indexify/function_executor/initialize_request_validator.py +0 -21
- indexify-0.3.5/src/indexify/function_executor/invocation_state/invocation_state_proxy_server.py +0 -170
- indexify-0.3.5/src/indexify/function_executor/invocation_state/proxied_invocation_state.py +0 -22
- indexify-0.3.5/src/indexify/function_executor/invocation_state/response_validator.py +0 -29
- indexify-0.3.5/src/indexify/function_executor/main.py +0 -51
- indexify-0.3.5/src/indexify/function_executor/proto/function_executor.proto +0 -148
- indexify-0.3.5/src/indexify/function_executor/proto/function_executor_pb2.py +0 -73
- indexify-0.3.5/src/indexify/function_executor/proto/function_executor_pb2.pyi +0 -247
- indexify-0.3.5/src/indexify/function_executor/proto/function_executor_pb2_grpc.py +0 -307
- indexify-0.3.5/src/indexify/function_executor/proto/message_validator.py +0 -38
- indexify-0.3.5/src/indexify/function_executor/proto/server_configuration.py +0 -19
- indexify-0.3.5/src/indexify/function_executor/server.py +0 -29
- indexify-0.3.5/src/indexify/function_executor/service.py +0 -145
- indexify-0.3.5/src/indexify/utils/README.md +0 -3
- indexify-0.3.5/src/indexify/utils/http_client.py +0 -88
- indexify-0.3.5/src/indexify/utils/logging.py +0 -66
- {indexify-0.3.5 → indexify-0.3.6}/README.md +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/README.md +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/api_objects.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/function_executor_state.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/server/client_configuration.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/server/function_executor_server.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/server/function_executor_server_factory.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/server/subprocess_function_executor_server.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/server/subprocess_function_executor_server_factory.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/runtime_probes.py +0 -0
- {indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/task_runner.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: indexify
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.6
|
4
4
|
Summary: Open Source Indexify components and helper tools
|
5
5
|
Home-page: https://github.com/tensorlakeai/indexify
|
6
6
|
License: Apache 2.0
|
@@ -14,8 +14,7 @@ Classifier: Programming Language :: Python :: 3.10
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.11
|
15
15
|
Classifier: Programming Language :: Python :: 3.12
|
16
16
|
Classifier: Programming Language :: Python :: 3.13
|
17
|
-
Requires-Dist: grpcio (==1.
|
18
|
-
Requires-Dist: grpcio-tools (==1.68.1)
|
17
|
+
Requires-Dist: grpcio (==1.70.0)
|
19
18
|
Requires-Dist: httpx-sse (>=0.4.0,<0.5.0)
|
20
19
|
Requires-Dist: httpx[http2] (>=0.27,<0.28)
|
21
20
|
Requires-Dist: nanoid (>=2.0.0,<3.0.0)
|
@@ -23,7 +22,7 @@ Requires-Dist: pydantic (==2.10.4)
|
|
23
22
|
Requires-Dist: pyyaml (>=6,<7)
|
24
23
|
Requires-Dist: rich (>=13.9.2,<14.0.0)
|
25
24
|
Requires-Dist: structlog (>=24.4.0,<25.0.0)
|
26
|
-
Requires-Dist: tensorlake (>=0.1.
|
25
|
+
Requires-Dist: tensorlake (>=0.1.13)
|
27
26
|
Requires-Dist: typer (>=0.12,<0.13)
|
28
27
|
Project-URL: Repository, https://github.com/tensorlakeai/indexify
|
29
28
|
Description-Content-Type: text/markdown
|
@@ -1,7 +1,7 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "indexify"
|
3
3
|
# Incremented if any of the components provided in this packages are updated.
|
4
|
-
version = "0.3.
|
4
|
+
version = "0.3.6"
|
5
5
|
description = "Open Source Indexify components and helper tools"
|
6
6
|
authors = ["Tensorlake Inc. <support@tensorlake.ai>"]
|
7
7
|
license = "Apache 2.0"
|
@@ -11,7 +11,6 @@ repository = "https://github.com/tensorlakeai/indexify"
|
|
11
11
|
|
12
12
|
[tool.poetry.scripts]
|
13
13
|
indexify-cli = "indexify.cli.cli:app"
|
14
|
-
function-executor = "indexify.function_executor.main:main"
|
15
14
|
|
16
15
|
[tool.poetry.dependencies]
|
17
16
|
# Common dependencies
|
@@ -19,15 +18,13 @@ python = "^3.9"
|
|
19
18
|
structlog = "^24.4.0"
|
20
19
|
pyyaml = "^6"
|
21
20
|
httpx = { version = "^0.27", extras = ["http2"] }
|
22
|
-
grpcio = "1.
|
23
|
-
|
24
|
-
# Function Executor only
|
25
|
-
grpcio-tools = "1.68.1"
|
26
|
-
tensorlake = ">=0.1.11"
|
21
|
+
grpcio = "1.70.0"
|
27
22
|
|
28
23
|
# Executor only
|
29
24
|
pydantic = "2.10.4"
|
30
25
|
httpx-sse = "^0.4.0"
|
26
|
+
# Adds function-executor binary and utils lib.
|
27
|
+
tensorlake = ">=0.1.13"
|
31
28
|
|
32
29
|
# CLI only
|
33
30
|
rich = "^13.9.2"
|
@@ -1,4 +1,4 @@
|
|
1
|
-
from
|
1
|
+
from tensorlake.utils.logging import (
|
2
2
|
configure_development_mode_logging,
|
3
3
|
configure_logging_early,
|
4
4
|
configure_production_mode_logging,
|
@@ -292,7 +292,7 @@ def _parse_function_uris(uri_strs: Optional[List[str]]) -> Optional[List[Functio
|
|
292
292
|
|
293
293
|
def _create_image(image: Image, python_sdk_path):
|
294
294
|
console.print(
|
295
|
-
Text("Creating
|
295
|
+
Text("Creating image for ", style="cyan"),
|
296
296
|
Text(f"`{image._image_name}`", style="cyan bold"),
|
297
297
|
)
|
298
298
|
_build_image(image=image, python_sdk_path=python_sdk_path)
|
@@ -4,9 +4,8 @@ from typing import Any, Optional
|
|
4
4
|
|
5
5
|
import httpx
|
6
6
|
import structlog
|
7
|
-
|
8
|
-
from
|
9
|
-
from indexify.utils.http_client import get_httpx_client
|
7
|
+
from tensorlake.function_executor.proto.function_executor_pb2 import SerializedObject
|
8
|
+
from tensorlake.utils.http_client import get_httpx_client
|
10
9
|
|
11
10
|
from .api_objects import Task
|
12
11
|
|
@@ -4,9 +4,8 @@ from pathlib import Path
|
|
4
4
|
from typing import Any, List, Optional
|
5
5
|
|
6
6
|
import structlog
|
7
|
-
|
8
|
-
from
|
9
|
-
from indexify.utils.logging import suppress as suppress_logging
|
7
|
+
from tensorlake.function_executor.proto.function_executor_pb2 import SerializedObject
|
8
|
+
from tensorlake.utils.logging import suppress as suppress_logging
|
10
9
|
|
11
10
|
from .api_objects import FunctionURI, Task
|
12
11
|
from .downloader import Downloader
|
{indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/function_executor.py
RENAMED
@@ -2,15 +2,14 @@ import asyncio
|
|
2
2
|
from typing import Any, Optional
|
3
3
|
|
4
4
|
import grpc
|
5
|
-
|
6
|
-
from indexify.function_executor.proto.function_executor_pb2 import (
|
5
|
+
from tensorlake.function_executor.proto.function_executor_pb2 import (
|
7
6
|
InitializeRequest,
|
8
7
|
InitializeResponse,
|
9
8
|
)
|
10
|
-
from
|
9
|
+
from tensorlake.function_executor.proto.function_executor_pb2_grpc import (
|
11
10
|
FunctionExecutorStub,
|
12
11
|
)
|
13
|
-
from
|
12
|
+
from tensorlake.utils.http_client import get_httpx_client
|
14
13
|
|
15
14
|
from .health_checker import HealthChecker
|
16
15
|
from .invocation_state_client import InvocationStateClient
|
@@ -3,12 +3,11 @@ from collections.abc import Awaitable, Callable
|
|
3
3
|
from typing import Any, Optional
|
4
4
|
|
5
5
|
from grpc.aio import AioRpcError
|
6
|
-
|
7
|
-
from indexify.function_executor.proto.function_executor_pb2 import (
|
6
|
+
from tensorlake.function_executor.proto.function_executor_pb2 import (
|
8
7
|
HealthCheckRequest,
|
9
8
|
HealthCheckResponse,
|
10
9
|
)
|
11
|
-
from
|
10
|
+
from tensorlake.function_executor.proto.function_executor_pb2_grpc import (
|
12
11
|
FunctionExecutorStub,
|
13
12
|
)
|
14
13
|
|
{indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/invocation_state_client.py
RENAMED
@@ -3,18 +3,17 @@ from typing import Any, AsyncGenerator, Optional, Union
|
|
3
3
|
|
4
4
|
import grpc
|
5
5
|
import httpx
|
6
|
-
|
7
|
-
from indexify.function_executor.proto.function_executor_pb2 import (
|
6
|
+
from tensorlake.function_executor.proto.function_executor_pb2 import (
|
8
7
|
GetInvocationStateResponse,
|
9
8
|
InvocationStateRequest,
|
10
9
|
InvocationStateResponse,
|
11
10
|
SerializedObject,
|
12
11
|
SetInvocationStateResponse,
|
13
12
|
)
|
14
|
-
from
|
13
|
+
from tensorlake.function_executor.proto.function_executor_pb2_grpc import (
|
15
14
|
FunctionExecutorStub,
|
16
15
|
)
|
17
|
-
from
|
16
|
+
from tensorlake.function_executor.proto.message_validator import MessageValidator
|
18
17
|
|
19
18
|
from ..downloader import serialized_object_from_http_response
|
20
19
|
|
{indexify-0.3.5 → indexify-0.3.6}/src/indexify/executor/function_executor/single_task_runner.py
RENAMED
@@ -2,14 +2,12 @@ from collections.abc import Awaitable, Callable
|
|
2
2
|
from typing import Any, Optional
|
3
3
|
|
4
4
|
import grpc
|
5
|
-
from
|
6
|
-
|
7
|
-
from indexify.function_executor.proto.function_executor_pb2 import (
|
5
|
+
from tensorlake.function_executor.proto.function_executor_pb2 import (
|
8
6
|
InitializeRequest,
|
9
7
|
RunTaskRequest,
|
10
8
|
RunTaskResponse,
|
11
9
|
)
|
12
|
-
from
|
10
|
+
from tensorlake.function_executor.proto.function_executor_pb2_grpc import (
|
13
11
|
FunctionExecutorStub,
|
14
12
|
)
|
15
13
|
|
@@ -4,8 +4,7 @@ from typing import AsyncGenerator, List, Optional
|
|
4
4
|
|
5
5
|
import structlog
|
6
6
|
from httpx_sse import aconnect_sse
|
7
|
-
|
8
|
-
from indexify.utils.http_client import get_httpx_client
|
7
|
+
from tensorlake.utils.http_client import get_httpx_client
|
9
8
|
|
10
9
|
from .api_objects import ExecutorMetadata, FunctionURI, Task
|
11
10
|
from .runtime_probes import ProbeInfo, RuntimeProbes
|
@@ -3,11 +3,10 @@ from typing import Any, List, Optional, Tuple
|
|
3
3
|
|
4
4
|
import nanoid
|
5
5
|
from httpx import Timeout
|
6
|
+
from tensorlake.function_executor.proto.function_executor_pb2 import FunctionOutput
|
7
|
+
from tensorlake.utils.http_client import get_httpx_client
|
6
8
|
|
7
|
-
from
|
8
|
-
from indexify.utils.http_client import get_httpx_client
|
9
|
-
|
10
|
-
from .api_objects import RouterOutput, Task, TaskResult
|
9
|
+
from .api_objects import RouterOutput, TaskResult
|
11
10
|
from .task_runner import TaskOutput
|
12
11
|
|
13
12
|
|
@@ -1,18 +0,0 @@
|
|
1
|
-
## Overview
|
2
|
-
|
3
|
-
Function Executor is a process with an API that allows to load and run a customer Function in Indexify.
|
4
|
-
Each function run is a task. The tasks can be executed concurrently. The API client controls
|
5
|
-
the desired concurrency. Killing the process allows to free all the resources that a loaded customer
|
6
|
-
functon is using. This is helpful because the SDK doesn't provide any callbacks to customer code to free
|
7
|
-
resources it's using. Even if there was such callback customer code still might misbehave.
|
8
|
-
|
9
|
-
## Deployment
|
10
|
-
|
11
|
-
A Function Executor is created and destroyed by another component called Executor. It also calls the
|
12
|
-
Function Executor APIs. The server is not expected to be deployed or managed manually by Indexify users
|
13
|
-
as it's a low level component.
|
14
|
-
|
15
|
-
## Threat model
|
16
|
-
|
17
|
-
Customer code is assumed to be not trusted. Function Executor must not obtain any credentials that grant
|
18
|
-
access to resources not owned by the customer who owns the function.
|
indexify-0.3.5/src/indexify/function_executor/handlers/run_function/function_inputs_loader.py
DELETED
@@ -1,53 +0,0 @@
|
|
1
|
-
from typing import Optional
|
2
|
-
|
3
|
-
from tensorlake.functions_sdk.data_objects import TensorlakeData
|
4
|
-
from tensorlake.functions_sdk.object_serializer import get_serializer
|
5
|
-
|
6
|
-
from ...proto.function_executor_pb2 import RunTaskRequest, SerializedObject
|
7
|
-
|
8
|
-
|
9
|
-
class FunctionInputs:
|
10
|
-
def __init__(
|
11
|
-
self, input: TensorlakeData, init_value: Optional[TensorlakeData] = None
|
12
|
-
):
|
13
|
-
self.input = input
|
14
|
-
self.init_value = init_value
|
15
|
-
|
16
|
-
|
17
|
-
class FunctionInputsLoader:
|
18
|
-
def __init__(self, request: RunTaskRequest):
|
19
|
-
self._request = request
|
20
|
-
|
21
|
-
def load(self) -> FunctionInputs:
|
22
|
-
return FunctionInputs(
|
23
|
-
input=self._function_input(),
|
24
|
-
init_value=self._accumulator_input(),
|
25
|
-
)
|
26
|
-
|
27
|
-
def _function_input(self) -> TensorlakeData:
|
28
|
-
return _to_indexify_data(
|
29
|
-
self._request.graph_invocation_id, self._request.function_input
|
30
|
-
)
|
31
|
-
|
32
|
-
def _accumulator_input(self) -> Optional[TensorlakeData]:
|
33
|
-
return (
|
34
|
-
_to_indexify_data(
|
35
|
-
self._request.graph_invocation_id, self._request.function_init_value
|
36
|
-
)
|
37
|
-
if self._request.HasField("function_init_value")
|
38
|
-
else None
|
39
|
-
)
|
40
|
-
|
41
|
-
|
42
|
-
def _to_indexify_data(
|
43
|
-
input_id: str, serialized_object: SerializedObject
|
44
|
-
) -> TensorlakeData:
|
45
|
-
return TensorlakeData(
|
46
|
-
input_id=input_id,
|
47
|
-
payload=(
|
48
|
-
serialized_object.bytes
|
49
|
-
if serialized_object.HasField("bytes")
|
50
|
-
else serialized_object.string
|
51
|
-
),
|
52
|
-
encoder=get_serializer(serialized_object.content_type).encoding_type,
|
53
|
-
)
|
@@ -1,126 +0,0 @@
|
|
1
|
-
import io
|
2
|
-
import sys
|
3
|
-
import traceback
|
4
|
-
from contextlib import redirect_stderr, redirect_stdout
|
5
|
-
from typing import Any
|
6
|
-
|
7
|
-
from tensorlake.functions_sdk.functions import (
|
8
|
-
FunctionCallResult,
|
9
|
-
GraphInvocationContext,
|
10
|
-
RouterCallResult,
|
11
|
-
TensorlakeFunctionWrapper,
|
12
|
-
TensorlakeRouter,
|
13
|
-
)
|
14
|
-
from tensorlake.functions_sdk.invocation_state.invocation_state import InvocationState
|
15
|
-
|
16
|
-
from ...proto.function_executor_pb2 import RunTaskRequest, RunTaskResponse
|
17
|
-
from .function_inputs_loader import FunctionInputs, FunctionInputsLoader
|
18
|
-
from .response_helper import ResponseHelper
|
19
|
-
|
20
|
-
|
21
|
-
class Handler:
|
22
|
-
def __init__(
|
23
|
-
self,
|
24
|
-
request: RunTaskRequest,
|
25
|
-
graph_name: str,
|
26
|
-
graph_version: str,
|
27
|
-
function_name: str,
|
28
|
-
invocation_state: InvocationState,
|
29
|
-
function_wrapper: TensorlakeFunctionWrapper,
|
30
|
-
logger: Any,
|
31
|
-
):
|
32
|
-
self._invocation_id: str = request.graph_invocation_id
|
33
|
-
self._graph_name: str = graph_name
|
34
|
-
self._graph_version: str = graph_version
|
35
|
-
self._function_name: str = function_name
|
36
|
-
self._invocation_state: InvocationState = invocation_state
|
37
|
-
self._logger = logger.bind(
|
38
|
-
graph_invocation_id=request.graph_invocation_id,
|
39
|
-
task_id=request.task_id,
|
40
|
-
)
|
41
|
-
self._function_wrapper = function_wrapper
|
42
|
-
self._input_loader = FunctionInputsLoader(request)
|
43
|
-
self._response_helper = ResponseHelper(task_id=request.task_id)
|
44
|
-
# TODO: use files for stdout, stderr capturing. This puts a natural and thus reasonable
|
45
|
-
# rate limit on the rate of writes and allows to not consume expensive memory for function logs.
|
46
|
-
self._func_stdout: io.StringIO = io.StringIO()
|
47
|
-
self._func_stderr: io.StringIO = io.StringIO()
|
48
|
-
|
49
|
-
def run(self) -> RunTaskResponse:
|
50
|
-
"""Runs the task.
|
51
|
-
|
52
|
-
Raises an exception if our own code failed, customer function failure doesn't result in any exception.
|
53
|
-
Details of customer function failure are returned in the response.
|
54
|
-
"""
|
55
|
-
self._logger.info("running function")
|
56
|
-
inputs: FunctionInputs = self._input_loader.load()
|
57
|
-
self._flush_logs()
|
58
|
-
return self._run_func_safe_and_captured(inputs)
|
59
|
-
|
60
|
-
def _run_func_safe_and_captured(self, inputs: FunctionInputs) -> RunTaskResponse:
|
61
|
-
"""Runs the customer function while capturing what happened in it.
|
62
|
-
|
63
|
-
Function stdout and stderr are captured so they don't get into Function Executor process stdout
|
64
|
-
and stderr. Never throws an Exception. Caller can determine if the function succeeded
|
65
|
-
using the response.
|
66
|
-
"""
|
67
|
-
try:
|
68
|
-
with redirect_stdout(self._func_stdout), redirect_stderr(self._func_stderr):
|
69
|
-
return self._run_func(inputs)
|
70
|
-
except Exception:
|
71
|
-
return self._response_helper.failure_response(
|
72
|
-
message=traceback.format_exc(),
|
73
|
-
stdout=self._func_stdout.getvalue(),
|
74
|
-
stderr=self._func_stderr.getvalue(),
|
75
|
-
)
|
76
|
-
|
77
|
-
def _run_func(self, inputs: FunctionInputs) -> RunTaskResponse:
|
78
|
-
ctx: GraphInvocationContext = GraphInvocationContext(
|
79
|
-
invocation_id=self._invocation_id,
|
80
|
-
graph_name=self._graph_name,
|
81
|
-
graph_version=self._graph_version,
|
82
|
-
invocation_state=self._invocation_state,
|
83
|
-
)
|
84
|
-
if _is_router(self._function_wrapper):
|
85
|
-
result: RouterCallResult = self._function_wrapper.invoke_router(
|
86
|
-
ctx, self._function_name, inputs.input
|
87
|
-
)
|
88
|
-
return self._response_helper.router_response(
|
89
|
-
result=result,
|
90
|
-
stdout=self._func_stdout.getvalue(),
|
91
|
-
stderr=self._func_stderr.getvalue(),
|
92
|
-
)
|
93
|
-
else:
|
94
|
-
result: FunctionCallResult = self._function_wrapper.invoke_fn_ser(
|
95
|
-
ctx, self._function_name, inputs.input, inputs.init_value
|
96
|
-
)
|
97
|
-
return self._response_helper.function_response(
|
98
|
-
result=result,
|
99
|
-
is_reducer=_function_is_reducer(self._function_wrapper),
|
100
|
-
stdout=self._func_stdout.getvalue(),
|
101
|
-
stderr=self._func_stderr.getvalue(),
|
102
|
-
)
|
103
|
-
|
104
|
-
def _flush_logs(self) -> None:
|
105
|
-
# Flush any logs buffered in memory before running the function with stdout, stderr capture.
|
106
|
-
# Otherwise our logs logged before this point will end up in the function's stdout.
|
107
|
-
# structlog.PrintLogger uses print function. This is why flushing with print works.
|
108
|
-
print("", flush=True)
|
109
|
-
sys.stdout.flush()
|
110
|
-
sys.stderr.flush()
|
111
|
-
|
112
|
-
|
113
|
-
def _is_router(func_wrapper: TensorlakeFunctionWrapper) -> bool:
|
114
|
-
"""Determines if the function is a router.
|
115
|
-
|
116
|
-
A function is a router if it is an instance of TensorlakeRouter or if it is an TensorlakeRouter class.
|
117
|
-
"""
|
118
|
-
return str(
|
119
|
-
type(func_wrapper.indexify_function)
|
120
|
-
) == "<class 'tensorlake.functions_sdk.functions.TensorlakeRouter'>" or isinstance(
|
121
|
-
func_wrapper.indexify_function, TensorlakeRouter
|
122
|
-
)
|
123
|
-
|
124
|
-
|
125
|
-
def _function_is_reducer(func_wrapper: TensorlakeFunctionWrapper) -> bool:
|
126
|
-
return func_wrapper.indexify_function.accumulate is not None
|
@@ -1,26 +0,0 @@
|
|
1
|
-
from typing import Any
|
2
|
-
|
3
|
-
from ...proto.function_executor_pb2 import RunTaskRequest
|
4
|
-
from ...proto.message_validator import MessageValidator
|
5
|
-
|
6
|
-
|
7
|
-
class RequestValidator:
|
8
|
-
def __init__(self, request: RunTaskRequest):
|
9
|
-
self._request = request
|
10
|
-
self._message_validator = MessageValidator(request)
|
11
|
-
|
12
|
-
def check(self):
|
13
|
-
"""Validates the request.
|
14
|
-
|
15
|
-
Raises: ValueError: If the request is invalid.
|
16
|
-
"""
|
17
|
-
(
|
18
|
-
self._message_validator.required_field("namespace")
|
19
|
-
.required_field("graph_name")
|
20
|
-
.required_field("graph_version")
|
21
|
-
.required_field("function_name")
|
22
|
-
.required_field("graph_invocation_id")
|
23
|
-
.required_field("task_id")
|
24
|
-
.required_serialized_object("function_input")
|
25
|
-
.optional_serialized_object("function_init_value")
|
26
|
-
)
|
@@ -1,96 +0,0 @@
|
|
1
|
-
from typing import List
|
2
|
-
|
3
|
-
from tensorlake.functions_sdk.data_objects import TensorlakeData
|
4
|
-
from tensorlake.functions_sdk.functions import FunctionCallResult, RouterCallResult
|
5
|
-
from tensorlake.functions_sdk.object_serializer import get_serializer
|
6
|
-
|
7
|
-
from ...proto.function_executor_pb2 import (
|
8
|
-
FunctionOutput,
|
9
|
-
RouterOutput,
|
10
|
-
RunTaskResponse,
|
11
|
-
SerializedObject,
|
12
|
-
)
|
13
|
-
|
14
|
-
|
15
|
-
class ResponseHelper:
|
16
|
-
"""Helper class for generating RunFunctionResponse."""
|
17
|
-
|
18
|
-
def __init__(self, task_id: str):
|
19
|
-
self._task_id = task_id
|
20
|
-
|
21
|
-
def function_response(
|
22
|
-
self,
|
23
|
-
result: FunctionCallResult,
|
24
|
-
is_reducer: bool,
|
25
|
-
stdout: str = "",
|
26
|
-
stderr: str = "",
|
27
|
-
) -> RunTaskResponse:
|
28
|
-
if result.traceback_msg is None:
|
29
|
-
return RunTaskResponse(
|
30
|
-
task_id=self._task_id,
|
31
|
-
function_output=self._to_function_output(result.ser_outputs),
|
32
|
-
router_output=None,
|
33
|
-
stdout=stdout,
|
34
|
-
stderr=stderr,
|
35
|
-
is_reducer=is_reducer,
|
36
|
-
success=True,
|
37
|
-
)
|
38
|
-
else:
|
39
|
-
return self.failure_response(
|
40
|
-
message=result.traceback_msg,
|
41
|
-
stdout=stdout,
|
42
|
-
stderr=stderr,
|
43
|
-
)
|
44
|
-
|
45
|
-
def router_response(
|
46
|
-
self,
|
47
|
-
result: RouterCallResult,
|
48
|
-
stdout: str = "",
|
49
|
-
stderr: str = "",
|
50
|
-
) -> RunTaskResponse:
|
51
|
-
if result.traceback_msg is None:
|
52
|
-
return RunTaskResponse(
|
53
|
-
task_id=self._task_id,
|
54
|
-
function_output=None,
|
55
|
-
router_output=RouterOutput(edges=result.edges),
|
56
|
-
stdout=stdout,
|
57
|
-
stderr=stderr,
|
58
|
-
is_reducer=False,
|
59
|
-
success=True,
|
60
|
-
)
|
61
|
-
else:
|
62
|
-
return self.failure_response(
|
63
|
-
message=result.traceback_msg,
|
64
|
-
stdout=stdout,
|
65
|
-
stderr=stderr,
|
66
|
-
)
|
67
|
-
|
68
|
-
def failure_response(
|
69
|
-
self, message: str, stdout: str, stderr: str
|
70
|
-
) -> RunTaskResponse:
|
71
|
-
stderr = "\n".join([stderr, message])
|
72
|
-
return RunTaskResponse(
|
73
|
-
task_id=self._task_id,
|
74
|
-
function_output=None,
|
75
|
-
router_output=None,
|
76
|
-
stdout=stdout,
|
77
|
-
stderr=stderr,
|
78
|
-
is_reducer=False,
|
79
|
-
success=False,
|
80
|
-
)
|
81
|
-
|
82
|
-
def _to_function_output(self, outputs: List[TensorlakeData]) -> FunctionOutput:
|
83
|
-
output = FunctionOutput(outputs=[])
|
84
|
-
for ix_data in outputs:
|
85
|
-
serialized_object: SerializedObject = SerializedObject(
|
86
|
-
content_type=get_serializer(ix_data.encoder).content_type,
|
87
|
-
)
|
88
|
-
if isinstance(ix_data.payload, bytes):
|
89
|
-
serialized_object.bytes = ix_data.payload
|
90
|
-
elif isinstance(ix_data.payload, str):
|
91
|
-
serialized_object.string = ix_data.payload
|
92
|
-
else:
|
93
|
-
raise ValueError(f"Unsupported payload type: {type(ix_data.payload)}")
|
94
|
-
|
95
|
-
output.outputs.append(serialized_object)
|
96
|
-
return output
|
@@ -1,16 +0,0 @@
|
|
1
|
-
import importlib.metadata
|
2
|
-
import sys
|
3
|
-
from typing import Any, Dict
|
4
|
-
|
5
|
-
|
6
|
-
def info_response_kv_args() -> Dict[str, Any]:
|
7
|
-
sdk_version = importlib.metadata.version("tensorlake")
|
8
|
-
python_version = (
|
9
|
-
f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
|
10
|
-
)
|
11
|
-
return {
|
12
|
-
"version": "0.1.0",
|
13
|
-
"sdk_version": sdk_version,
|
14
|
-
"sdk_language": "python",
|
15
|
-
"sdk_language_version": python_version,
|
16
|
-
}
|
@@ -1,21 +0,0 @@
|
|
1
|
-
from .proto.function_executor_pb2 import InitializeRequest
|
2
|
-
from .proto.message_validator import MessageValidator
|
3
|
-
|
4
|
-
|
5
|
-
class InitializeRequestValidator:
|
6
|
-
def __init__(self, request: InitializeRequest):
|
7
|
-
self._request = request
|
8
|
-
self._message_validator = MessageValidator(request)
|
9
|
-
|
10
|
-
def check(self):
|
11
|
-
"""Validates the request.
|
12
|
-
|
13
|
-
Raises: ValueError: If the request is invalid.
|
14
|
-
"""
|
15
|
-
(
|
16
|
-
self._message_validator.required_field("namespace")
|
17
|
-
.required_field("graph_name")
|
18
|
-
.required_field("graph_version")
|
19
|
-
.required_field("function_name")
|
20
|
-
.required_serialized_object("graph")
|
21
|
-
)
|