indexify 0.2.23__tar.gz → 0.2.24__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {indexify-0.2.23 → indexify-0.2.24}/PKG-INFO +2 -1
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/agent.py +1 -3
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/function_worker.py +1 -9
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/task_reporter.py +0 -11
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/task_store.py +0 -1
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/data_objects.py +2 -3
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/graph.py +8 -6
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/graph_definition.py +1 -1
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/indexify_functions.py +14 -37
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/object_serializer.py +21 -2
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/pipeline.py +3 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/http_client.py +19 -8
- indexify-0.2.24/indexify/settings.py +1 -0
- {indexify-0.2.23 → indexify-0.2.24}/pyproject.toml +2 -1
- indexify-0.2.23/indexify/settings.py +0 -2
- {indexify-0.2.23 → indexify-0.2.24}/LICENSE.txt +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/README.md +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/__init__.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/cli.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/data_loaders/__init__.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/data_loaders/local_directory_loader.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/data_loaders/url_loader.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/error.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/api_objects.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/downloader.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/executor_tasks.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/image_dependency_installer.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/indexify_executor.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/executor/runtime_probes.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/graph_validation.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/image.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/functions_sdk/local_cache.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/remote_graph.py +0 -0
- {indexify-0.2.23 → indexify-0.2.24}/indexify/remote_pipeline.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: indexify
|
3
|
-
Version: 0.2.
|
3
|
+
Version: 0.2.24
|
4
4
|
Summary: Python Client for Indexify
|
5
5
|
Home-page: https://github.com/tensorlakeai/indexify
|
6
6
|
License: Apache 2.0
|
@@ -18,6 +18,7 @@ Requires-Dist: cloudpickle (>=3.1.0,<4.0.0)
|
|
18
18
|
Requires-Dist: docker (>=7.1.0,<8.0.0)
|
19
19
|
Requires-Dist: httpx-sse (>=0.4.0,<0.5.0)
|
20
20
|
Requires-Dist: httpx[http2] (>=0,<1)
|
21
|
+
Requires-Dist: jsonpickle (>=3.3.0,<4.0.0)
|
21
22
|
Requires-Dist: msgpack (>=1.1.0,<2.0.0)
|
22
23
|
Requires-Dist: nanoid (>=2.0.0,<3.0.0)
|
23
24
|
Requires-Dist: pydantic (>=2.9.2,<3.0.0)
|
@@ -18,7 +18,6 @@ from rich.theme import Theme
|
|
18
18
|
from indexify.functions_sdk.data_objects import (
|
19
19
|
FunctionWorkerOutput,
|
20
20
|
IndexifyData,
|
21
|
-
RouterOutput,
|
22
21
|
)
|
23
22
|
from indexify.functions_sdk.graph_definition import ComputeGraphMetadata
|
24
23
|
from indexify.http_client import IndexifyClient
|
@@ -315,7 +314,7 @@ class ExtractorAgent:
|
|
315
314
|
task=async_task.task,
|
316
315
|
task_outcome="failure",
|
317
316
|
outputs=[],
|
318
|
-
|
317
|
+
stderr=str(async_task.exception()),
|
319
318
|
)
|
320
319
|
self._task_store.complete(outcome=completed_task)
|
321
320
|
continue
|
@@ -332,7 +331,6 @@ class ExtractorAgent:
|
|
332
331
|
task_outcome=task_outcome,
|
333
332
|
outputs=outputs.fn_outputs,
|
334
333
|
router_output=outputs.router_output,
|
335
|
-
errors=outputs.exception,
|
336
334
|
stdout=outputs.stdout,
|
337
335
|
stderr=outputs.stderr,
|
338
336
|
reducer=outputs.reducer,
|
@@ -40,7 +40,6 @@ class FunctionOutput(BaseModel):
|
|
40
40
|
router_output: Optional[RouterOutput]
|
41
41
|
reducer: bool = False
|
42
42
|
success: bool = True
|
43
|
-
exception: Optional[str] = None
|
44
43
|
stdout: str = ""
|
45
44
|
stderr: str = ""
|
46
45
|
|
@@ -111,7 +110,6 @@ class FunctionWorker:
|
|
111
110
|
# TODO - bring back running in a separate process
|
112
111
|
except Exception as e:
|
113
112
|
return FunctionWorkerOutput(
|
114
|
-
exception=str(e),
|
115
113
|
stdout=e.stdout,
|
116
114
|
stderr=e.stderr,
|
117
115
|
reducer=e.is_reducer,
|
@@ -121,7 +119,6 @@ class FunctionWorker:
|
|
121
119
|
return FunctionWorkerOutput(
|
122
120
|
fn_outputs=result.fn_outputs,
|
123
121
|
router_output=result.router_output,
|
124
|
-
exception=result.exception,
|
125
122
|
stdout=result.stdout,
|
126
123
|
stderr=result.stderr,
|
127
124
|
reducer=result.reducer,
|
@@ -152,7 +149,6 @@ def _run_function(
|
|
152
149
|
router_output = None
|
153
150
|
fn_output = None
|
154
151
|
has_failed = False
|
155
|
-
exception_msg = None
|
156
152
|
print(
|
157
153
|
f"[bold] function_worker: [/bold] invoking function {fn_name} in graph {graph_name}"
|
158
154
|
)
|
@@ -180,7 +176,6 @@ def _run_function(
|
|
180
176
|
if router_call_result.traceback_msg is not None:
|
181
177
|
print(router_call_result.traceback_msg, file=sys.stderr)
|
182
178
|
has_failed = True
|
183
|
-
exception_msg = router_call_result.traceback_msg
|
184
179
|
else:
|
185
180
|
fn_call_result: FunctionCallResult = fn.invoke_fn_ser(
|
186
181
|
fn_name, input, init_value
|
@@ -190,11 +185,9 @@ def _run_function(
|
|
190
185
|
if fn_call_result.traceback_msg is not None:
|
191
186
|
print(fn_call_result.traceback_msg, file=sys.stderr)
|
192
187
|
has_failed = True
|
193
|
-
|
194
|
-
except Exception as e:
|
188
|
+
except Exception:
|
195
189
|
print(traceback.format_exc(), file=sys.stderr)
|
196
190
|
has_failed = True
|
197
|
-
exception_msg = str(e)
|
198
191
|
|
199
192
|
# WARNING - IF THIS FAILS, WE WILL NOT BE ABLE TO RECOVER
|
200
193
|
# ANY LOGS
|
@@ -202,7 +195,6 @@ def _run_function(
|
|
202
195
|
return FunctionOutput(
|
203
196
|
fn_outputs=None,
|
204
197
|
router_output=None,
|
205
|
-
exception=exception_msg,
|
206
198
|
stdout=stdout_capture.getvalue(),
|
207
199
|
stderr=stderr_capture.getvalue(),
|
208
200
|
reducer=is_reducer,
|
@@ -37,17 +37,6 @@ class TaskReporter:
|
|
37
37
|
("node_outputs", (nanoid.generate(), io.BytesIO(output_bytes)))
|
38
38
|
)
|
39
39
|
|
40
|
-
if completed_task.errors:
|
41
|
-
print(
|
42
|
-
f"[bold]task-reporter[/bold] uploading error of size: {len(completed_task.errors)}"
|
43
|
-
)
|
44
|
-
fn_outputs.append(
|
45
|
-
(
|
46
|
-
"exception_msg",
|
47
|
-
(nanoid.generate(), io.BytesIO(completed_task.errors.encode())),
|
48
|
-
)
|
49
|
-
)
|
50
|
-
|
51
40
|
if completed_task.stdout:
|
52
41
|
print(
|
53
42
|
f"[bold]task-reporter[/bold] uploading stdout of size: {len(completed_task.stdout)}"
|
@@ -14,7 +14,6 @@ class CompletedTask(BaseModel):
|
|
14
14
|
task_outcome: Literal["success", "failure"]
|
15
15
|
outputs: Optional[List[IndexifyData]] = None
|
16
16
|
router_output: Optional[RouterOutput] = None
|
17
|
-
errors: Optional[str] = None
|
18
17
|
stdout: Optional[str] = None
|
19
18
|
stderr: Optional[str] = None
|
20
19
|
reducer: bool = False
|
@@ -1,4 +1,4 @@
|
|
1
|
-
from typing import Any, Dict, List, Optional, Union
|
1
|
+
from typing import Any, Dict, List, Optional, Union, Literal
|
2
2
|
|
3
3
|
from pydantic import BaseModel, Json
|
4
4
|
|
@@ -17,13 +17,12 @@ class RouterOutput(BaseModel):
|
|
17
17
|
class IndexifyData(BaseModel):
|
18
18
|
id: Optional[str] = None
|
19
19
|
payload: bytes
|
20
|
-
|
20
|
+
encoder: Literal["cloudpickle", "json"] = "cloudpickle"
|
21
21
|
|
22
22
|
|
23
23
|
class FunctionWorkerOutput(BaseModel):
|
24
24
|
fn_outputs: Optional[List[IndexifyData]]
|
25
25
|
router_output: Optional[RouterOutput]
|
26
|
-
exception: Optional[str]
|
27
26
|
stdout: Optional[str]
|
28
27
|
stderr: Optional[str]
|
29
28
|
reducer: bool = False
|
@@ -160,6 +160,7 @@ class Graph:
|
|
160
160
|
reducer=start_node.accumulate is not None,
|
161
161
|
image_name=start_node.image._image_name,
|
162
162
|
image_information=start_node.image.to_image_information(),
|
163
|
+
payload_encoder=start_node.encoder
|
163
164
|
)
|
164
165
|
metadata_edges = self.edges.copy()
|
165
166
|
metadata_nodes = {}
|
@@ -171,7 +172,7 @@ class Graph:
|
|
171
172
|
description=node.description or "",
|
172
173
|
source_fn=node_name,
|
173
174
|
target_fns=self.routers[node_name],
|
174
|
-
payload_encoder=node.
|
175
|
+
payload_encoder=node.encoder,
|
175
176
|
image_name=node.image._image_name,
|
176
177
|
image_information=node.image.to_image_information(),
|
177
178
|
)
|
@@ -185,6 +186,7 @@ class Graph:
|
|
185
186
|
reducer=node.accumulate is not None,
|
186
187
|
image_name=node.image._image_name,
|
187
188
|
image_information=node.image.to_image_information(),
|
189
|
+
encoder=node.encoder,
|
188
190
|
)
|
189
191
|
)
|
190
192
|
|
@@ -202,16 +204,16 @@ class Graph:
|
|
202
204
|
|
203
205
|
def run(self, block_until_done: bool = False, **kwargs) -> str:
|
204
206
|
start_node = self.nodes[self._start_node]
|
205
|
-
serializer = get_serializer(start_node.
|
206
|
-
input = IndexifyData(id=generate(), payload=serializer.serialize(kwargs))
|
207
|
+
serializer = get_serializer(start_node.encoder)
|
208
|
+
input = IndexifyData(id=generate(), payload=serializer.serialize(kwargs), encoder=start_node.encoder)
|
207
209
|
print(f"[bold] Invoking {self._start_node}[/bold]")
|
208
210
|
outputs = defaultdict(list)
|
209
211
|
self._accumulator_values[input.id] = {}
|
210
212
|
for k, v in self.accumulator_zero_values.items():
|
211
213
|
node = self.nodes[k]
|
212
|
-
serializer = get_serializer(node.
|
214
|
+
serializer = get_serializer(node.encoder)
|
213
215
|
self._accumulator_values[input.id] = {
|
214
|
-
k: IndexifyData(payload=serializer.serialize(v))
|
216
|
+
k: IndexifyData(payload=serializer.serialize(v), encoder=node.encoder)
|
215
217
|
}
|
216
218
|
self._results[input.id] = outputs
|
217
219
|
enable_cache = kwargs.get("enable_cache", True)
|
@@ -289,7 +291,7 @@ class Graph:
|
|
289
291
|
raise ValueError(f"no results found for fn {fn_name} on graph {self.name}")
|
290
292
|
fn = self.nodes[fn_name]
|
291
293
|
fn_model = self.get_function(fn_name).get_output_model()
|
292
|
-
serializer = get_serializer(fn.
|
294
|
+
serializer = get_serializer(fn.encoder)
|
293
295
|
outputs = []
|
294
296
|
for result in results[fn_name]:
|
295
297
|
payload_dict = serializer.deserialize(result.payload)
|
@@ -22,7 +22,7 @@ from typing_extensions import get_type_hints
|
|
22
22
|
|
23
23
|
from .data_objects import IndexifyData
|
24
24
|
from .image import DEFAULT_IMAGE_3_10, Image
|
25
|
-
from .object_serializer import
|
25
|
+
from .object_serializer import get_serializer
|
26
26
|
|
27
27
|
|
28
28
|
class GraphInvocationContext(BaseModel):
|
@@ -119,7 +119,7 @@ class IndexifyFunction:
|
|
119
119
|
image: Optional[Image] = DEFAULT_IMAGE_3_10
|
120
120
|
placement_constraints: List[PlacementConstraints] = []
|
121
121
|
accumulate: Optional[Type[Any]] = None
|
122
|
-
|
122
|
+
encoder: Optional[str] = "cloudpickle"
|
123
123
|
|
124
124
|
def run(self, *args, **kwargs) -> Union[List[Any], Any]:
|
125
125
|
pass
|
@@ -131,7 +131,7 @@ class IndexifyFunction:
|
|
131
131
|
|
132
132
|
@classmethod
|
133
133
|
def deserialize_output(cls, output: IndexifyData) -> Any:
|
134
|
-
serializer = get_serializer(cls.
|
134
|
+
serializer = get_serializer(cls.encoder)
|
135
135
|
return serializer.deserialize(output.payload)
|
136
136
|
|
137
137
|
|
@@ -140,7 +140,7 @@ class IndexifyRouter:
|
|
140
140
|
description: str = ""
|
141
141
|
image: Optional[Image] = DEFAULT_IMAGE_3_10
|
142
142
|
placement_constraints: List[PlacementConstraints] = []
|
143
|
-
|
143
|
+
encoder: Optional[str] = "cloudpickle"
|
144
144
|
|
145
145
|
def run(self, *args, **kwargs) -> Optional[List[IndexifyFunction]]:
|
146
146
|
pass
|
@@ -151,7 +151,7 @@ def indexify_router(
|
|
151
151
|
description: Optional[str] = "",
|
152
152
|
image: Optional[Image] = DEFAULT_IMAGE_3_10,
|
153
153
|
placement_constraints: List[PlacementConstraints] = [],
|
154
|
-
|
154
|
+
encoder: Optional[str] = "cloudpickle",
|
155
155
|
):
|
156
156
|
def construct(fn):
|
157
157
|
args = locals().copy()
|
@@ -174,7 +174,7 @@ def indexify_router(
|
|
174
174
|
setattr(IndexifyRo, key, value)
|
175
175
|
|
176
176
|
IndexifyRo.image = image
|
177
|
-
IndexifyRo.
|
177
|
+
IndexifyRo.encoder = encoder
|
178
178
|
return IndexifyRo
|
179
179
|
|
180
180
|
return construct
|
@@ -185,7 +185,7 @@ def indexify_function(
|
|
185
185
|
description: Optional[str] = "",
|
186
186
|
image: Optional[Image] = DEFAULT_IMAGE_3_10,
|
187
187
|
accumulate: Optional[Type[BaseModel]] = None,
|
188
|
-
|
188
|
+
encoder: Optional[str] = "cloudpickle",
|
189
189
|
placement_constraints: List[PlacementConstraints] = [],
|
190
190
|
):
|
191
191
|
def construct(fn):
|
@@ -207,10 +207,9 @@ def indexify_function(
|
|
207
207
|
for key, value in args.items():
|
208
208
|
if key != "fn" and key != "self":
|
209
209
|
setattr(IndexifyFn, key, value)
|
210
|
-
|
211
210
|
IndexifyFn.image = image
|
212
211
|
IndexifyFn.accumulate = accumulate
|
213
|
-
IndexifyFn.
|
212
|
+
IndexifyFn.encoder = encoder
|
214
213
|
return IndexifyFn
|
215
214
|
|
216
215
|
return construct
|
@@ -303,7 +302,7 @@ class IndexifyFunctionWrapper:
|
|
303
302
|
self, name: str, input: IndexifyData, acc: Optional[Any] = None
|
304
303
|
) -> FunctionCallResult:
|
305
304
|
input = self.deserialize_input(name, input)
|
306
|
-
serializer = get_serializer(self.indexify_function.
|
305
|
+
serializer = get_serializer(self.indexify_function.encoder)
|
307
306
|
if acc is not None:
|
308
307
|
acc = self.indexify_function.accumulate.model_validate(
|
309
308
|
serializer.deserialize(acc.payload)
|
@@ -314,7 +313,7 @@ class IndexifyFunctionWrapper:
|
|
314
313
|
)
|
315
314
|
outputs, err = self.run_fn(input, acc=acc)
|
316
315
|
ser_outputs = [
|
317
|
-
IndexifyData(payload=serializer.serialize(output)) for output in outputs
|
316
|
+
IndexifyData(payload=serializer.serialize(output), encoder=self.indexify_function.encoder) for output in outputs
|
318
317
|
]
|
319
318
|
return FunctionCallResult(ser_outputs=ser_outputs, traceback_msg=err)
|
320
319
|
|
@@ -324,32 +323,10 @@ class IndexifyFunctionWrapper:
|
|
324
323
|
return RouterCallResult(edges=edges, traceback_msg=err)
|
325
324
|
|
326
325
|
def deserialize_input(self, compute_fn: str, indexify_data: IndexifyData) -> Any:
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
arg_types = {}
|
332
|
-
for name, param in signature.parameters.items():
|
333
|
-
if (
|
334
|
-
param.annotation != inspect.Parameter.empty
|
335
|
-
and param.annotation != getattr(compute_fn, "accumulate", None)
|
336
|
-
):
|
337
|
-
arg_types[name] = param.annotation
|
338
|
-
if len(arg_types) > 1:
|
339
|
-
raise ValueError(
|
340
|
-
f"Compute function {compute_fn} has multiple arguments, but only one is supported"
|
341
|
-
)
|
342
|
-
elif len(arg_types) == 0:
|
343
|
-
raise ValueError(f"Compute function {compute_fn} has no arguments")
|
344
|
-
arg_name, arg_type = next(iter(arg_types.items()))
|
345
|
-
if arg_type is None:
|
346
|
-
raise ValueError(f"Argument {arg_name} has no type annotation")
|
347
|
-
if is_pydantic_model_from_annotation(arg_type):
|
348
|
-
if len(payload.keys()) == 1 and isinstance(list(payload.values())[0], dict):
|
349
|
-
payload = list(payload.values())[0]
|
350
|
-
return arg_type.model_validate(payload)
|
351
|
-
return payload
|
352
|
-
|
326
|
+
encoder = indexify_data.encoder
|
327
|
+
payload = indexify_data.payload
|
328
|
+
serializer = get_serializer(encoder)
|
329
|
+
return serializer.deserialize(payload)
|
353
330
|
|
354
331
|
def get_ctx() -> GraphInvocationContext:
|
355
332
|
frame = inspect.currentframe()
|
@@ -1,5 +1,6 @@
|
|
1
1
|
from typing import Any, List
|
2
2
|
|
3
|
+
import jsonpickle
|
3
4
|
import cloudpickle
|
4
5
|
import msgpack
|
5
6
|
from pydantic import BaseModel
|
@@ -12,8 +13,26 @@ def get_serializer(serializer_type: str) -> Any:
|
|
12
13
|
return CloudPickleSerializer()
|
13
14
|
elif serializer_type == "msgpack":
|
14
15
|
return MsgPackSerializer()
|
15
|
-
|
16
|
-
|
16
|
+
elif serializer_type == "json":
|
17
|
+
return JsonSerializer()
|
18
|
+
raise ValueError(f"Unknown serializer type: {serializer_type}")
|
19
|
+
|
20
|
+
class JsonSerializer:
|
21
|
+
@staticmethod
|
22
|
+
def serialize(data: Any) -> str:
|
23
|
+
return jsonpickle.encode(data)
|
24
|
+
|
25
|
+
@staticmethod
|
26
|
+
def deserialize(data: str) -> Any:
|
27
|
+
return jsonpickle.decode(data)
|
28
|
+
|
29
|
+
@staticmethod
|
30
|
+
def serialize_list(data: List[Any]) -> str:
|
31
|
+
return jsonpickle.encode(data)
|
32
|
+
|
33
|
+
@staticmethod
|
34
|
+
def deserialize_list(data: str) -> List[Any]:
|
35
|
+
return jsonpickle.decode(data)
|
17
36
|
|
18
37
|
|
19
38
|
class CloudPickleSerializer:
|
@@ -14,8 +14,7 @@ from indexify.error import ApiException, GraphStillProcessing
|
|
14
14
|
from indexify.functions_sdk.data_objects import IndexifyData
|
15
15
|
from indexify.functions_sdk.graph import ComputeGraphMetadata, Graph
|
16
16
|
from indexify.functions_sdk.indexify_functions import IndexifyFunction
|
17
|
-
from indexify.settings import DEFAULT_SERVICE_URL
|
18
|
-
|
17
|
+
from indexify.settings import DEFAULT_SERVICE_URL
|
19
18
|
|
20
19
|
class InvocationEventPayload(BaseModel):
|
21
20
|
invocation_id: str
|
@@ -47,6 +46,7 @@ class IndexifyClient:
|
|
47
46
|
service_url: str = DEFAULT_SERVICE_URL,
|
48
47
|
config_path: Optional[str] = None,
|
49
48
|
namespace: str = "default",
|
49
|
+
api_key: Optional[str] = None,
|
50
50
|
**kwargs,
|
51
51
|
):
|
52
52
|
if os.environ.get("INDEXIFY_URL"):
|
@@ -74,6 +74,10 @@ class IndexifyClient:
|
|
74
74
|
self._timeout = kwargs.get("timeout")
|
75
75
|
self._graphs: Dict[str, Graph] = {}
|
76
76
|
self._fns: Dict[str, IndexifyFunction] = {}
|
77
|
+
self._api_key = api_key
|
78
|
+
if not self._api_key:
|
79
|
+
print("API key not provided. Trying to fetch from environment TENSORLAKE_API_KEY variable")
|
80
|
+
self._api_key = os.getenv("TENSORLAKE_API_KEY")
|
77
81
|
|
78
82
|
def _request(self, method: str, **kwargs) -> httpx.Response:
|
79
83
|
try:
|
@@ -87,10 +91,9 @@ class IndexifyClient:
|
|
87
91
|
raise ApiException(response.text)
|
88
92
|
except httpx.ConnectError:
|
89
93
|
message = (
|
90
|
-
f"Make sure the server is running and
|
94
|
+
f"Make sure the server is running and accessible at {self._service_url}"
|
91
95
|
)
|
92
96
|
ex = ApiException(message=message)
|
93
|
-
print(ex)
|
94
97
|
raise ex
|
95
98
|
return response
|
96
99
|
|
@@ -100,7 +103,7 @@ class IndexifyClient:
|
|
100
103
|
cert_path: str,
|
101
104
|
key_path: str,
|
102
105
|
ca_bundle_path: Optional[str] = None,
|
103
|
-
service_url: str =
|
106
|
+
service_url: str = DEFAULT_SERVICE_URL,
|
104
107
|
*args,
|
105
108
|
**kwargs,
|
106
109
|
) -> "IndexifyClient":
|
@@ -140,17 +143,25 @@ class IndexifyClient:
|
|
140
143
|
verify=verify_option,
|
141
144
|
)
|
142
145
|
return client
|
146
|
+
|
147
|
+
def _add_api_key(self, kwargs):
|
148
|
+
if self._api_key:
|
149
|
+
kwargs["headers"] = {"Authorization": f"Bearer {self._api_key}"}
|
143
150
|
|
144
151
|
def _get(self, endpoint: str, **kwargs) -> httpx.Response:
|
152
|
+
self._add_api_key(kwargs)
|
145
153
|
return self._request("GET", url=f"{self._service_url}/{endpoint}", **kwargs)
|
146
154
|
|
147
155
|
def _post(self, endpoint: str, **kwargs) -> httpx.Response:
|
156
|
+
self._add_api_key(kwargs)
|
148
157
|
return self._request("POST", url=f"{self._service_url}/{endpoint}", **kwargs)
|
149
158
|
|
150
159
|
def _put(self, endpoint: str, **kwargs) -> httpx.Response:
|
160
|
+
self._add_api_key(kwargs)
|
151
161
|
return self._request("PUT", url=f"{self._service_url}/{endpoint}", **kwargs)
|
152
162
|
|
153
163
|
def _delete(self, endpoint: str, **kwargs) -> httpx.Response:
|
164
|
+
self._add_api_key(kwargs)
|
154
165
|
return self._request("DELETE", url=f"{self._service_url}/{endpoint}", **kwargs)
|
155
166
|
|
156
167
|
def _close(self):
|
@@ -259,14 +270,14 @@ class IndexifyClient:
|
|
259
270
|
) -> str:
|
260
271
|
ser_input = cloudpickle.dumps(kwargs)
|
261
272
|
params = {"block_until_finish": block_until_done}
|
273
|
+
kwargs = {"headers": {"Content-Type": "application/cbor"}, "data": ser_input, "params":params}
|
274
|
+
self._add_api_key(kwargs)
|
262
275
|
with httpx.Client() as client:
|
263
276
|
with connect_sse(
|
264
277
|
client,
|
265
278
|
"POST",
|
266
279
|
f"{self.service_url}/namespaces/{self.namespace}/compute_graphs/{graph}/invoke_object",
|
267
|
-
|
268
|
-
data=ser_input,
|
269
|
-
params=params,
|
280
|
+
**kwargs,
|
270
281
|
) as event_source:
|
271
282
|
if not event_source.response.is_success:
|
272
283
|
resp = event_source.response.read().decode("utf-8")
|
@@ -0,0 +1 @@
|
|
1
|
+
DEFAULT_SERVICE_URL = "https://api.tensorlake.ai"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "indexify"
|
3
|
-
version = "0.2.
|
3
|
+
version = "0.2.24"
|
4
4
|
description = "Python Client for Indexify"
|
5
5
|
authors = ["Tensorlake Inc. <support@tensorlake.ai>"]
|
6
6
|
license = "Apache 2.0"
|
@@ -23,6 +23,7 @@ docker = "^7.1.0"
|
|
23
23
|
msgpack= "^1.1.0"
|
24
24
|
typer = "^0.12.5"
|
25
25
|
httpx-sse = "^0.4.0"
|
26
|
+
jsonpickle = "^3.3.0"
|
26
27
|
|
27
28
|
[tool.poetry.dev-dependencies]
|
28
29
|
black = "^22.3.0"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|