indexify 0.2.41__tar.gz → 0.2.43__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. {indexify-0.2.41 → indexify-0.2.43}/PKG-INFO +2 -2
  2. {indexify-0.2.41 → indexify-0.2.43}/indexify/cli.py +3 -3
  3. indexify-0.2.43/indexify/executor/executor.py +155 -0
  4. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/function_worker.py +2 -4
  5. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/task_fetcher.py +1 -2
  6. indexify-0.2.43/indexify/executor/task_reporter.py +215 -0
  7. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/proto/message_validator.py +6 -6
  8. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/graph.py +0 -2
  9. {indexify-0.2.41 → indexify-0.2.43}/pyproject.toml +2 -2
  10. indexify-0.2.41/indexify/executor/agent.py +0 -262
  11. indexify-0.2.41/indexify/executor/executor_tasks.py +0 -58
  12. indexify-0.2.41/indexify/executor/task_reporter.py +0 -155
  13. indexify-0.2.41/indexify/executor/task_store.py +0 -132
  14. indexify-0.2.41/indexify/functions_sdk/local_cache.py +0 -46
  15. {indexify-0.2.41 → indexify-0.2.43}/LICENSE.txt +0 -0
  16. {indexify-0.2.41 → indexify-0.2.43}/README.md +0 -0
  17. {indexify-0.2.41 → indexify-0.2.43}/indexify/__init__.py +0 -0
  18. {indexify-0.2.41 → indexify-0.2.43}/indexify/common_util.py +0 -0
  19. {indexify-0.2.41 → indexify-0.2.43}/indexify/data_loaders/__init__.py +0 -0
  20. {indexify-0.2.41 → indexify-0.2.43}/indexify/data_loaders/local_directory_loader.py +0 -0
  21. {indexify-0.2.41 → indexify-0.2.43}/indexify/data_loaders/url_loader.py +0 -0
  22. {indexify-0.2.41 → indexify-0.2.43}/indexify/error.py +0 -0
  23. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/api_objects.py +0 -0
  24. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/downloader.py +0 -0
  25. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/function_executor/function_executor.py +0 -0
  26. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/function_executor/function_executor_factory.py +0 -0
  27. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/function_executor/function_executor_map.py +0 -0
  28. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/function_executor/process_function_executor.py +0 -0
  29. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/function_executor/process_function_executor_factory.py +0 -0
  30. {indexify-0.2.41 → indexify-0.2.43}/indexify/executor/runtime_probes.py +0 -0
  31. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/function_executor_service.py +0 -0
  32. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/handlers/run_function/function_inputs_loader.py +0 -0
  33. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/handlers/run_function/handler.py +0 -0
  34. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/handlers/run_function/request_validator.py +0 -0
  35. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/handlers/run_function/response_helper.py +0 -0
  36. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/initialize_request_validator.py +0 -0
  37. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/proto/configuration.py +0 -0
  38. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/proto/function_executor.proto +0 -0
  39. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/proto/function_executor_pb2.py +0 -0
  40. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/proto/function_executor_pb2.pyi +0 -0
  41. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/proto/function_executor_pb2_grpc.py +0 -0
  42. {indexify-0.2.41 → indexify-0.2.43}/indexify/function_executor/server.py +0 -0
  43. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/data_objects.py +0 -0
  44. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/graph_definition.py +0 -0
  45. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/graph_validation.py +0 -0
  46. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/image.py +0 -0
  47. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/indexify_functions.py +0 -0
  48. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/object_serializer.py +0 -0
  49. {indexify-0.2.41 → indexify-0.2.43}/indexify/functions_sdk/pipeline.py +0 -0
  50. {indexify-0.2.41 → indexify-0.2.43}/indexify/http_client.py +0 -0
  51. {indexify-0.2.41 → indexify-0.2.43}/indexify/logging.py +0 -0
  52. {indexify-0.2.41 → indexify-0.2.43}/indexify/remote_graph.py +0 -0
  53. {indexify-0.2.41 → indexify-0.2.43}/indexify/remote_pipeline.py +0 -0
  54. {indexify-0.2.41 → indexify-0.2.43}/indexify/settings.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: indexify
3
- Version: 0.2.41
3
+ Version: 0.2.43
4
4
  Summary: Python Client for Indexify
5
5
  Home-page: https://github.com/tensorlakeai/indexify
6
6
  License: Apache 2.0
@@ -25,7 +25,7 @@ Requires-Dist: pydantic (==2.10.2)
25
25
  Requires-Dist: pyyaml (>=6,<7)
26
26
  Requires-Dist: rich (>=13.9.2,<14.0.0)
27
27
  Requires-Dist: structlog (>=24.4.0,<25.0.0)
28
- Requires-Dist: typer (>=0.13.0,<0.14.0)
28
+ Requires-Dist: typer (>=0.12,<0.13)
29
29
  Project-URL: Repository, https://github.com/tensorlakeai/indexify
30
30
  Description-Content-Type: text/markdown
31
31
 
@@ -22,7 +22,7 @@ from rich.panel import Panel
22
22
  from rich.text import Text
23
23
  from rich.theme import Theme
24
24
 
25
- from indexify.executor.agent import ExtractorAgent
25
+ from indexify.executor.executor import Executor
26
26
  from indexify.function_executor.function_executor_service import (
27
27
  FunctionExecutorService,
28
28
  )
@@ -221,7 +221,7 @@ def executor(
221
221
  shutil.rmtree(executor_cache)
222
222
  Path(executor_cache).mkdir(parents=True, exist_ok=True)
223
223
 
224
- agent = ExtractorAgent(
224
+ executor = Executor(
225
225
  id,
226
226
  server_addr=server_addr,
227
227
  config_path=config_path,
@@ -232,7 +232,7 @@ def executor(
232
232
  )
233
233
 
234
234
  try:
235
- asyncio.get_event_loop().run_until_complete(agent.run())
235
+ asyncio.get_event_loop().run_until_complete(executor.run())
236
236
  except asyncio.CancelledError:
237
237
  logger.info("graceful shutdown")
238
238
 
@@ -0,0 +1,155 @@
1
+ import asyncio
2
+ import signal
3
+ from pathlib import Path
4
+ from typing import Any, Optional
5
+
6
+ import structlog
7
+
8
+ from indexify.function_executor.proto.function_executor_pb2 import (
9
+ SerializedObject,
10
+ )
11
+
12
+ from .api_objects import Task
13
+ from .downloader import DownloadedInputs, Downloader
14
+ from .function_executor.process_function_executor_factory import (
15
+ ProcessFunctionExecutorFactory,
16
+ )
17
+ from .function_worker import (
18
+ FunctionWorker,
19
+ FunctionWorkerInput,
20
+ FunctionWorkerOutput,
21
+ )
22
+ from .task_fetcher import TaskFetcher
23
+ from .task_reporter import TaskReporter
24
+
25
+
26
+ class Executor:
27
+ def __init__(
28
+ self,
29
+ executor_id: str,
30
+ code_path: Path,
31
+ server_addr: str = "localhost:8900",
32
+ development_mode: bool = False,
33
+ config_path: Optional[str] = None,
34
+ name_alias: Optional[str] = None,
35
+ image_hash: Optional[str] = None,
36
+ ):
37
+ self._logger = structlog.get_logger(module=__name__)
38
+ self._should_run = True
39
+ self._config_path = config_path
40
+ protocol: str = "http"
41
+ if config_path:
42
+ self._logger.info("running the extractor with TLS enabled")
43
+ protocol = "https"
44
+
45
+ self._function_worker = FunctionWorker(
46
+ function_executor_factory=ProcessFunctionExecutorFactory(
47
+ indexify_server_address=server_addr,
48
+ development_mode=development_mode,
49
+ config_path=config_path,
50
+ )
51
+ )
52
+ self._server_addr = server_addr
53
+ self._base_url = f"{protocol}://{self._server_addr}"
54
+ self._code_path = code_path
55
+ self._downloader = Downloader(
56
+ code_path=code_path, base_url=self._base_url, config_path=config_path
57
+ )
58
+ self._task_fetcher = TaskFetcher(
59
+ protocol=protocol,
60
+ indexify_server_addr=self._server_addr,
61
+ executor_id=executor_id,
62
+ name_alias=name_alias,
63
+ image_hash=image_hash,
64
+ config_path=config_path,
65
+ )
66
+ self._task_reporter = TaskReporter(
67
+ base_url=self._base_url,
68
+ executor_id=executor_id,
69
+ config_path=self._config_path,
70
+ )
71
+
72
+ async def run(self):
73
+ asyncio.get_event_loop().add_signal_handler(
74
+ signal.SIGINT, self.shutdown, asyncio.get_event_loop()
75
+ )
76
+ asyncio.get_event_loop().add_signal_handler(
77
+ signal.SIGTERM, self.shutdown, asyncio.get_event_loop()
78
+ )
79
+
80
+ while self._should_run:
81
+ try:
82
+ async for task in self._task_fetcher.run():
83
+ asyncio.create_task(self._run_task(task))
84
+ except Exception as e:
85
+ self._logger.error(
86
+ "failed fetching tasks, retrying in 5 seconds", exc_info=e
87
+ )
88
+ await asyncio.sleep(5)
89
+
90
+ async def _run_task(self, task: Task) -> None:
91
+ """Runs the supplied task.
92
+
93
+ Doesn't raise any Exceptions. All errors are reported to the server."""
94
+ logger = self._task_logger(task)
95
+ output: Optional[FunctionWorkerOutput] = None
96
+
97
+ try:
98
+ graph: SerializedObject = await self._downloader.download_graph(task)
99
+ input: DownloadedInputs = await self._downloader.download_inputs(task)
100
+ output = await self._function_worker.run(
101
+ input=FunctionWorkerInput(
102
+ task=task,
103
+ graph=graph,
104
+ function_input=input,
105
+ )
106
+ )
107
+ logger.info("task_execution_finished", success=output.success)
108
+ except Exception as e:
109
+ logger.error("failed running the task", exc_info=e)
110
+
111
+ await self._report_task_outcome(task=task, output=output, logger=logger)
112
+
113
+ async def _report_task_outcome(
114
+ self, task: Task, output: Optional[FunctionWorkerOutput], logger: Any
115
+ ) -> None:
116
+ """Reports the task with the given output to the server.
117
+
118
+ None output means that the task execution didn't finish due to an internal error.
119
+ Doesn't raise any exceptions."""
120
+ reporting_retries: int = 0
121
+
122
+ while True:
123
+ logger = logger.bind(retries=reporting_retries)
124
+ try:
125
+ await self._task_reporter.report(
126
+ task=task, output=output, logger=logger
127
+ )
128
+ break
129
+ except Exception as e:
130
+ logger.error(
131
+ "failed_to_report_task",
132
+ exc_info=e,
133
+ )
134
+ reporting_retries += 1
135
+ await asyncio.sleep(5)
136
+
137
+ async def _shutdown(self, loop):
138
+ self._logger.info("shutting_down")
139
+ self._should_run = False
140
+ await self._function_worker.shutdown()
141
+ for task in asyncio.all_tasks(loop):
142
+ task.cancel()
143
+
144
+ def shutdown(self, loop):
145
+ loop.create_task(self._shutdown(loop))
146
+
147
+ def _task_logger(self, task: Task) -> Any:
148
+ return self._logger.bind(
149
+ namespace=task.namespace,
150
+ graph=task.compute_graph,
151
+ graph_version=task.graph_version,
152
+ invocation_id=task.invocation_id,
153
+ function_name=task.compute_fn,
154
+ task_id=task.id,
155
+ )
@@ -31,13 +31,11 @@ class FunctionWorkerInput:
31
31
  def __init__(
32
32
  self,
33
33
  task: Task,
34
- graph: Optional[SerializedObject] = None,
35
- function_input: Optional[DownloadedInputs] = None,
34
+ graph: SerializedObject,
35
+ function_input: DownloadedInputs,
36
36
  ):
37
37
  self.task = task
38
- # Must not be None when running the task.
39
38
  self.graph = graph
40
- # Must not be None when running the task.
41
39
  self.function_input = function_input
42
40
 
43
41
 
@@ -2,7 +2,6 @@ import json
2
2
  from importlib.metadata import version
3
3
  from typing import AsyncGenerator, Optional
4
4
 
5
- import httpx
6
5
  import structlog
7
6
  from httpx_sse import aconnect_sse
8
7
 
@@ -66,7 +65,7 @@ class TaskFetcher:
66
65
  except Exception as e:
67
66
  await event_source.response.aread()
68
67
  raise Exception(
69
- "Failed to register at server. "
68
+ "failed to register at server. "
70
69
  f"Response code: {event_source.response.status_code}. "
71
70
  f"Response text: '{event_source.response.text}'."
72
71
  ) from e
@@ -0,0 +1,215 @@
1
+ import asyncio
2
+ from typing import Any, List, Optional, Tuple
3
+
4
+ import nanoid
5
+ from httpx import Timeout
6
+
7
+ from indexify.common_util import get_httpx_client
8
+ from indexify.executor.api_objects import RouterOutput, Task, TaskResult
9
+ from indexify.function_executor.proto.function_executor_pb2 import (
10
+ FunctionOutput,
11
+ )
12
+
13
+ from .function_worker import FunctionWorkerOutput
14
+
15
+
16
+ # https://github.com/psf/requests/issues/1081#issuecomment-428504128
17
+ class ForceMultipartDict(dict):
18
+ def __bool__(self):
19
+ return True
20
+
21
+
22
+ FORCE_MULTIPART = ForceMultipartDict()
23
+ UTF_8_CONTENT_TYPE = "application/octet-stream"
24
+
25
+
26
+ class TaskOutputSummary:
27
+ def __init__(self):
28
+ self.output_count: int = 0
29
+ self.output_total_bytes: int = 0
30
+ self.router_output_count: int = 0
31
+ self.stdout_count: int = 0
32
+ self.stdout_total_bytes: int = 0
33
+ self.stderr_count: int = 0
34
+ self.stderr_total_bytes: int = 0
35
+ self.total_bytes: int = 0
36
+
37
+
38
+ class TaskReporter:
39
+ def __init__(
40
+ self, base_url: str, executor_id: str, config_path: Optional[str] = None
41
+ ):
42
+ self._base_url = base_url
43
+ self._executor_id = executor_id
44
+ # Use thread-safe sync client due to issues with async client.
45
+ # Async client attempts to use connections it already closed.
46
+ # See e.g. https://github.com/encode/httpx/issues/2337.
47
+ # Creating a new async client for each request fixes this but it
48
+ # results in not reusing established TCP connections to server.
49
+ self._client = get_httpx_client(config_path, make_async=False)
50
+
51
+ async def report(
52
+ self, task: Task, output: Optional[FunctionWorkerOutput], logger: Any
53
+ ):
54
+ """Reports result of the supplied task.
55
+
56
+ If FunctionWorkerOutput is None this means that the task didn't finish and failed with internal error.
57
+ """
58
+ logger = logger.bind(module=__name__)
59
+ task_result, output_files, output_summary = self._process_task_output(
60
+ task, output
61
+ )
62
+ task_result_data = task_result.model_dump_json(exclude_none=True)
63
+
64
+ logger.info(
65
+ "reporting task outcome",
66
+ total_bytes=output_summary.total_bytes,
67
+ total_files=output_summary.output_count
68
+ + output_summary.stdout_count
69
+ + output_summary.stderr_count,
70
+ output_files=output_summary.output_count,
71
+ output_bytes=output_summary.total_bytes,
72
+ router_output_count=output_summary.router_output_count,
73
+ stdout_bytes=output_summary.stdout_total_bytes,
74
+ stderr_bytes=output_summary.stderr_total_bytes,
75
+ )
76
+
77
+ kwargs = {
78
+ "data": {"task_result": task_result_data},
79
+ # Use httpx default timeout of 5s for all timeout types.
80
+ # For read timeouts, use 5 minutes to allow for large file uploads.
81
+ "timeout": Timeout(
82
+ 5.0,
83
+ read=5.0 * 60,
84
+ ),
85
+ "files": output_files if len(output_files) > 0 else FORCE_MULTIPART,
86
+ }
87
+ # Run in a separate thread to not block the main event loop.
88
+ response = await asyncio.to_thread(
89
+ self._client.post, url=f"{self._base_url}/internal/ingest_files", **kwargs
90
+ )
91
+
92
+ try:
93
+ response.raise_for_status()
94
+ except Exception as e:
95
+ # Caller catches and logs the exception.
96
+ raise Exception(
97
+ "failed to report task outcome. "
98
+ f"Response code: {response.status_code}. "
99
+ f"Response text: '{response.text}'."
100
+ ) from e
101
+
102
+ def _process_task_output(
103
+ self, task: Task, output: Optional[FunctionWorkerOutput]
104
+ ) -> Tuple[TaskResult, List[Any], TaskOutputSummary]:
105
+ task_result = TaskResult(
106
+ outcome="failure",
107
+ namespace=task.namespace,
108
+ compute_graph=task.compute_graph,
109
+ compute_fn=task.compute_fn,
110
+ invocation_id=task.invocation_id,
111
+ executor_id=self._executor_id,
112
+ task_id=task.id,
113
+ )
114
+ output_files: List[Any] = []
115
+ summary: TaskOutputSummary = TaskOutputSummary()
116
+ if output is None:
117
+ return task_result, output_files, summary
118
+
119
+ task_result.outcome = "success" if output.success else "failure"
120
+ task_result.reducer = output.reducer
121
+
122
+ _process_function_output(
123
+ function_output=output.function_output,
124
+ output_files=output_files,
125
+ summary=summary,
126
+ )
127
+ _process_router_output(
128
+ router_output=output.router_output, task_result=task_result, summary=summary
129
+ )
130
+ _process_stdout(
131
+ stdout=output.stdout, output_files=output_files, summary=summary
132
+ )
133
+ _process_stderr(
134
+ stderr=output.stderr, output_files=output_files, summary=summary
135
+ )
136
+
137
+ summary.total_bytes = (
138
+ summary.output_total_bytes
139
+ + summary.stdout_total_bytes
140
+ + summary.stderr_total_bytes
141
+ )
142
+
143
+ return task_result, output_files, summary
144
+
145
+
146
+ def _process_function_output(
147
+ function_output: Optional[FunctionOutput],
148
+ output_files: List[Any],
149
+ summary: TaskOutputSummary,
150
+ ) -> None:
151
+ if function_output is None:
152
+ return
153
+
154
+ for output in function_output.outputs or []:
155
+ payload = output.bytes if output.HasField("bytes") else output.string
156
+ output_files.append(
157
+ (
158
+ "node_outputs",
159
+ (nanoid.generate(), payload, output.content_type),
160
+ )
161
+ )
162
+ summary.output_count += 1
163
+ summary.output_total_bytes += len(payload)
164
+
165
+
166
+ def _process_router_output(
167
+ router_output: Optional[RouterOutput],
168
+ task_result: TaskResult,
169
+ summary: TaskOutputSummary,
170
+ ) -> None:
171
+ if router_output is None:
172
+ return
173
+
174
+ task_result.router_output = RouterOutput(edges=router_output.edges)
175
+ summary.router_output_count += 1
176
+
177
+
178
+ def _process_stdout(
179
+ stdout: Optional[str], output_files: List[Any], summary: TaskOutputSummary
180
+ ) -> None:
181
+ if stdout is None:
182
+ return
183
+
184
+ output_files.append(
185
+ (
186
+ "stdout",
187
+ (
188
+ nanoid.generate(),
189
+ stdout.encode(),
190
+ UTF_8_CONTENT_TYPE,
191
+ ),
192
+ )
193
+ )
194
+ summary.stdout_count += 1
195
+ summary.stdout_total_bytes += len(stdout)
196
+
197
+
198
+ def _process_stderr(
199
+ stderr: Optional[str], output_files: List[Any], summary: TaskOutputSummary
200
+ ) -> None:
201
+ if stderr is None:
202
+ return
203
+
204
+ output_files.append(
205
+ (
206
+ "stderr",
207
+ (
208
+ nanoid.generate(),
209
+ stderr.encode(),
210
+ UTF_8_CONTENT_TYPE,
211
+ ),
212
+ )
213
+ )
214
+ summary.stderr_count += 1
215
+ summary.stderr_total_bytes += len(stderr)
@@ -1,4 +1,4 @@
1
- from typing import Any, Self
1
+ from typing import Any
2
2
 
3
3
  from .function_executor_pb2 import SerializedObject
4
4
 
@@ -7,21 +7,21 @@ class MessageValidator:
7
7
  def __init__(self, message: Any):
8
8
  self._message = message
9
9
 
10
- def required_field(self, field_name: str) -> Self:
10
+ def required_field(self, field_name: str) -> "MessageValidator":
11
11
  if not self._message.HasField(field_name):
12
12
  raise ValueError(
13
13
  f"Field '{field_name}' is required in {type(self._message).__name__}"
14
14
  )
15
15
  return self
16
16
 
17
- def required_serialized_object(self, field_name: str) -> Self:
17
+ def required_serialized_object(self, field_name: str) -> "MessageValidator":
18
18
  """Validates the SerializedObject.
19
19
 
20
20
  Raises: ValueError: If the SerializedObject is invalid or not present."""
21
21
  self.required_field(field_name)
22
22
  return self.optional_serialized_object(field_name)
23
23
 
24
- def optional_serialized_object(self, field_name: str) -> Self:
24
+ def optional_serialized_object(self, field_name: str) -> "MessageValidator":
25
25
  """Validates the SerializedObject.
26
26
 
27
27
  Raises: ValueError: If the SerializedObject is invalid."""
@@ -32,7 +32,7 @@ class MessageValidator:
32
32
  if not serializedObject.HasField("string") and not serializedObject.HasField(
33
33
  "bytes"
34
34
  ):
35
- raise ValueError("oneof 'data' is requred in SerializedObject")
35
+ raise ValueError("oneof 'data' is required in SerializedObject")
36
36
  if not serializedObject.HasField("content_type"):
37
- raise ValueError("Field 'content_type' is requred in SerializedObject")
37
+ raise ValueError("Field 'content_type' is required in SerializedObject")
38
38
  return self
@@ -37,7 +37,6 @@ from .indexify_functions import (
37
37
  IndexifyRouter,
38
38
  RouterCallResult,
39
39
  )
40
- from .local_cache import CacheAwareFunctionWrapper
41
40
  from .object_serializer import get_serializer
42
41
 
43
42
  RouterFn = Annotated[
@@ -86,7 +85,6 @@ class Graph:
86
85
 
87
86
  # Storage for local execution
88
87
  self._results: Dict[str, Dict[str, List[IndexifyData]]] = {}
89
- self._cache = CacheAwareFunctionWrapper("./indexify_local_runner_cache")
90
88
  self._accumulator_values: Dict[str, IndexifyData] = {}
91
89
  self._local_graph_ctx: Optional[GraphInvocationContext] = None
92
90
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "indexify"
3
- version = "0.2.41"
3
+ version = "0.2.43"
4
4
  description = "Python Client for Indexify"
5
5
  authors = ["Tensorlake Inc. <support@tensorlake.ai>"]
6
6
  license = "Apache 2.0"
@@ -20,7 +20,7 @@ cloudpickle = "^3.1.0"
20
20
  rich = "^13.9.2"
21
21
  nanoid = "^2.0.0"
22
22
  docker = "^7.1.0"
23
- typer = "^0.13.0"
23
+ typer = "^0.12"
24
24
  httpx-sse = "^0.4.0"
25
25
  structlog = "^24.4.0"
26
26
  grpcio = "1.68.1"