datacrunch 1.9.0__tar.gz → 1.10.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. {datacrunch-1.9.0 → datacrunch-1.10.0}/PKG-INFO +21 -6
  2. {datacrunch-1.9.0 → datacrunch-1.10.0}/README.md +20 -5
  3. datacrunch-1.10.0/datacrunch/InferenceClient/__init__.py +3 -0
  4. datacrunch-1.10.0/datacrunch/InferenceClient/inference_client.py +343 -0
  5. datacrunch-1.10.0/datacrunch/__version__.py +1 -0
  6. datacrunch-1.10.0/datacrunch/containers/containers.py +958 -0
  7. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/datacrunch.py +4 -2
  8. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch.egg-info/PKG-INFO +21 -6
  9. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch.egg-info/SOURCES.txt +2 -0
  10. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/conftest.py +2 -0
  11. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/containers/test_containers.py +87 -20
  12. datacrunch-1.9.0/datacrunch/__version__.py +0 -1
  13. datacrunch-1.9.0/datacrunch/containers/containers.py +0 -722
  14. {datacrunch-1.9.0 → datacrunch-1.10.0}/LICENSE +0 -0
  15. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/__init__.py +0 -0
  16. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/authentication/__init__.py +0 -0
  17. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/authentication/authentication.py +0 -0
  18. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/balance/__init__.py +0 -0
  19. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/balance/balance.py +0 -0
  20. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/constants.py +0 -0
  21. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/containers/__init__.py +0 -0
  22. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/exceptions.py +0 -0
  23. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/helpers.py +0 -0
  24. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/http_client/__init__.py +0 -0
  25. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/http_client/http_client.py +0 -0
  26. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/images/__init__.py +0 -0
  27. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/images/images.py +0 -0
  28. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/instance_types/__init__.py +0 -0
  29. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/instance_types/instance_types.py +0 -0
  30. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/instances/__init__.py +0 -0
  31. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/instances/instances.py +0 -0
  32. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/locations/__init__.py +0 -0
  33. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/locations/locations.py +0 -0
  34. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/ssh_keys/__init__.py +0 -0
  35. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/ssh_keys/ssh_keys.py +0 -0
  36. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/startup_scripts/__init__.py +0 -0
  37. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/startup_scripts/startup_scripts.py +0 -0
  38. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/volume_types/__init__.py +0 -0
  39. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/volume_types/volume_types.py +0 -0
  40. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/volumes/__init__.py +0 -0
  41. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch/volumes/volumes.py +0 -0
  42. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch.egg-info/dependency_links.txt +0 -0
  43. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch.egg-info/requires.txt +0 -0
  44. {datacrunch-1.9.0 → datacrunch-1.10.0}/datacrunch.egg-info/top_level.txt +0 -0
  45. {datacrunch-1.9.0 → datacrunch-1.10.0}/setup.cfg +0 -0
  46. {datacrunch-1.9.0 → datacrunch-1.10.0}/setup.py +0 -0
  47. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/__init__.py +0 -0
  48. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/integration_tests/__init__.py +0 -0
  49. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/integration_tests/conftest.py +0 -0
  50. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/integration_tests/test_instances.py +0 -0
  51. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/integration_tests/test_locations.py +0 -0
  52. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/integration_tests/test_volumes.py +0 -0
  53. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/__init__.py +0 -0
  54. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/authentication/__init__.py +0 -0
  55. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/authentication/test_authentication.py +0 -0
  56. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/balance/__init__.py +0 -0
  57. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/balance/test_balance.py +0 -0
  58. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/containers/__init__.py +0 -0
  59. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/http_client/__init__.py +0 -0
  60. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/http_client/test_http_client.py +0 -0
  61. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/images/__init__.py +0 -0
  62. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/images/test_images.py +0 -0
  63. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/instance_types/__init__.py +0 -0
  64. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/instance_types/test_instance_types.py +0 -0
  65. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/instances/__init__.py +0 -0
  66. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/instances/test_instances.py +0 -0
  67. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/ssh_keys/__init__.py +0 -0
  68. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/ssh_keys/test_ssh_keys.py +0 -0
  69. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/startup_scripts/__init__.py +0 -0
  70. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/startup_scripts/test_startup_scripts.py +0 -0
  71. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/test_datacrunch.py +0 -0
  72. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/test_exceptions.py +0 -0
  73. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/volume_types/__init__.py +0 -0
  74. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/volume_types/test_volume_types.py +0 -0
  75. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/volumes/__init__.py +0 -0
  76. {datacrunch-1.9.0 → datacrunch-1.10.0}/tests/unit_tests/volumes/test_volumes.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacrunch
3
- Version: 1.9.0
3
+ Version: 1.10.0
4
4
  Summary: Official Python SDK for DataCrunch Public API
5
5
  Home-page: https://github.com/DataCrunch-io
6
6
  Author: DataCrunch Oy
@@ -65,26 +65,41 @@ DataCrunch's Public API documentation [is available here](https://api.datacrunch
65
65
 
66
66
  - Generate your client credentials - [instructions in the public API docs](https://api.datacrunch.io/v1/docs#description/quick-start-guide).
67
67
 
68
- - Add the client secret to an environment variable (don't want it to be hardcoded):
68
+
69
+ - Add your client id and client secret to an environment variable (don't want it to be hardcoded):
69
70
 
70
71
  Linux (bash):
71
72
 
72
73
  ```bash
73
- export DATACRUNCH_CLIENT_SECRET=Z4CZq02rdwdB7ISV0k4Z2gtwAFKiyvr2U1l0KDIeYi
74
+ export DATACRUNCH_CLIENT_ID=YOUR_ID_HERE
75
+ export DATACRUNCH_CLIENT_SECRET=YOUR_SECRET_HERE
74
76
  ```
75
77
 
78
+ - To enable sending inference requests from SDK you must generate an inference key - [Instructions on inference authorization](https://docs.datacrunch.io/inference/authorization)
79
+
80
+
81
+ - Add your inference key to an environment variable
82
+
83
+ Linux (bash):
84
+
85
+ ```bash
86
+ export DATACRUNCH_INFERENCE_KEY=YOUR_API_KEY_HERE
87
+ ```
88
+
76
89
  Other platforms:
77
90
  https://en.wikipedia.org/wiki/Environment_variable
78
91
 
92
+
93
+
79
94
  - Example for creating a new instance:
80
95
 
81
96
  ```python
82
97
  import os
83
98
  from datacrunch import DataCrunchClient
84
99
 
85
- # Get client secret from environment variable
100
+ # Get credentials from environment variables
101
+ CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
86
102
  CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
87
- CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi'
88
103
 
89
104
  # Create datcrunch client
90
105
  datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
@@ -159,7 +174,7 @@ Create this file in the root directory of the project:
159
174
  from datacrunch.datacrunch import DataCrunchClient
160
175
 
161
176
  CLIENT_SECRET = 'secret'
162
- CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi'
177
+ CLIENT_ID = 'your-id'
163
178
 
164
179
  # Create datcrunch client
165
180
  datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET, base_url='http://localhost:3001/v1')
@@ -24,26 +24,41 @@ DataCrunch's Public API documentation [is available here](https://api.datacrunch
24
24
 
25
25
  - Generate your client credentials - [instructions in the public API docs](https://api.datacrunch.io/v1/docs#description/quick-start-guide).
26
26
 
27
- - Add the client secret to an environment variable (don't want it to be hardcoded):
27
+
28
+ - Add your client id and client secret to an environment variable (don't want it to be hardcoded):
28
29
 
29
30
  Linux (bash):
30
31
 
31
32
  ```bash
32
- export DATACRUNCH_CLIENT_SECRET=Z4CZq02rdwdB7ISV0k4Z2gtwAFKiyvr2U1l0KDIeYi
33
+ export DATACRUNCH_CLIENT_ID=YOUR_ID_HERE
34
+ export DATACRUNCH_CLIENT_SECRET=YOUR_SECRET_HERE
33
35
  ```
34
36
 
37
+ - To enable sending inference requests from SDK you must generate an inference key - [Instructions on inference authorization](https://docs.datacrunch.io/inference/authorization)
38
+
39
+
40
+ - Add your inference key to an environment variable
41
+
42
+ Linux (bash):
43
+
44
+ ```bash
45
+ export DATACRUNCH_INFERENCE_KEY=YOUR_API_KEY_HERE
46
+ ```
47
+
35
48
  Other platforms:
36
49
  https://en.wikipedia.org/wiki/Environment_variable
37
50
 
51
+
52
+
38
53
  - Example for creating a new instance:
39
54
 
40
55
  ```python
41
56
  import os
42
57
  from datacrunch import DataCrunchClient
43
58
 
44
- # Get client secret from environment variable
59
+ # Get credentials from environment variables
60
+ CLIENT_ID = os.environ.get('DATACRUNCH_CLIENT_ID')
45
61
  CLIENT_SECRET = os.environ['DATACRUNCH_CLIENT_SECRET']
46
- CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi'
47
62
 
48
63
  # Create datcrunch client
49
64
  datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET)
@@ -118,7 +133,7 @@ Create this file in the root directory of the project:
118
133
  from datacrunch.datacrunch import DataCrunchClient
119
134
 
120
135
  CLIENT_SECRET = 'secret'
121
- CLIENT_ID = 'Ibk5bdxV64lKAWOqYnvSi'
136
+ CLIENT_ID = 'your-id'
122
137
 
123
138
  # Create datcrunch client
124
139
  datacrunch = DataCrunchClient(CLIENT_ID, CLIENT_SECRET, base_url='http://localhost:3001/v1')
@@ -0,0 +1,3 @@
1
+ from .inference_client import InferenceClient, InferenceResponse
2
+
3
+ __all__ = ['InferenceClient', 'InferenceResponse']
@@ -0,0 +1,343 @@
1
+ from dataclasses import dataclass
2
+ from dataclasses_json import dataclass_json, Undefined # type: ignore
3
+ import requests
4
+ from requests.structures import CaseInsensitiveDict
5
+ from typing import Optional, Dict, Any, Union, Generator
6
+ from urllib.parse import urlparse
7
+ from enum import Enum
8
+
9
+ class InferenceClientError(Exception):
10
+ """Base exception for InferenceClient errors."""
11
+ pass
12
+
13
+ class AsyncStatus(int, Enum):
14
+ Initialized = 0
15
+ Queue = 1
16
+ Inference = 2
17
+ Completed = 3
18
+
19
+ @dataclass_json(undefined=Undefined.EXCLUDE)
20
+ @dataclass
21
+ class InferenceResponse:
22
+ headers: CaseInsensitiveDict[str]
23
+ status_code: int
24
+ status_text: str
25
+ _original_response: requests.Response
26
+ _stream: bool = False
27
+
28
+ def _is_stream_response(self, headers: CaseInsensitiveDict[str]) -> bool:
29
+ """Check if the response headers indicate a streaming response.
30
+
31
+ Args:
32
+ headers: The response headers to check
33
+
34
+ Returns:
35
+ bool: True if the response is likely a stream, False otherwise
36
+ """
37
+ # Standard chunked transfer encoding
38
+ is_chunked_transfer = headers.get(
39
+ 'Transfer-Encoding', '').lower() == 'chunked'
40
+ # Server-Sent Events content type
41
+ is_event_stream = headers.get(
42
+ 'Content-Type', '').lower() == 'text/event-stream'
43
+ # NDJSON
44
+ is_ndjson = headers.get(
45
+ 'Content-Type', '').lower() == 'application/x-ndjson'
46
+ # Stream JSON
47
+ is_stream_json = headers.get(
48
+ 'Content-Type', '').lower() == 'application/stream+json'
49
+ # Keep-alive
50
+ is_keep_alive = headers.get(
51
+ 'Connection', '').lower() == 'keep-alive'
52
+ # No content length
53
+ has_no_content_length = 'Content-Length' not in headers
54
+
55
+ # No Content-Length with keep-alive often suggests streaming (though not definitive)
56
+ is_keep_alive_and_no_content_length = is_keep_alive and has_no_content_length
57
+
58
+ return (self._stream or is_chunked_transfer or is_event_stream or is_ndjson or
59
+ is_stream_json or is_keep_alive_and_no_content_length)
60
+
61
+ def output(self, is_text: bool = False) -> Any:
62
+ try:
63
+ if is_text:
64
+ return self._original_response.text
65
+ return self._original_response.json()
66
+ except Exception as e:
67
+ # if the response is a stream (check headers), raise relevant error
68
+ if self._is_stream_response(self._original_response.headers):
69
+ raise InferenceClientError(
70
+ f"Response might be a stream, use the stream method instead")
71
+ raise InferenceClientError(
72
+ f"Failed to parse response as JSON: {str(e)}")
73
+
74
+ def stream(self, chunk_size: int = 512, as_text: bool = True) -> Generator[Any, None, None]:
75
+ """Stream the response content.
76
+
77
+ Args:
78
+ chunk_size: Size of chunks to stream, in bytes
79
+ as_text: If True, stream as text using iter_lines. If False, stream as binary using iter_content.
80
+
81
+ Returns:
82
+ Generator yielding chunks of the response
83
+ """
84
+ if as_text:
85
+ for chunk in self._original_response.iter_lines(chunk_size=chunk_size):
86
+ if chunk:
87
+ yield chunk
88
+ else:
89
+ for chunk in self._original_response.iter_content(chunk_size=chunk_size):
90
+ if chunk:
91
+ yield chunk
92
+
93
+
94
+ class InferenceClient:
95
+ def __init__(self, inference_key: str, endpoint_base_url: str, timeout_seconds: int = 60 * 5) -> None:
96
+ """
97
+ Initialize the InferenceClient.
98
+
99
+ Args:
100
+ inference_key: The authentication key for the API
101
+ endpoint_base_url: The base URL for the API
102
+ timeout_seconds: Request timeout in seconds
103
+
104
+ Raises:
105
+ InferenceClientError: If the parameters are invalid
106
+ """
107
+ if not inference_key:
108
+ raise InferenceClientError("inference_key cannot be empty")
109
+
110
+ parsed_url = urlparse(endpoint_base_url)
111
+ if not parsed_url.scheme or not parsed_url.netloc:
112
+ raise InferenceClientError("endpoint_base_url must be a valid URL")
113
+
114
+ self.inference_key = inference_key
115
+ self.endpoint_base_url = endpoint_base_url.rstrip('/')
116
+ self.base_domain = self.endpoint_base_url[:self.endpoint_base_url.rindex(
117
+ '/')]
118
+ self.deployment_name = self.endpoint_base_url[self.endpoint_base_url.rindex(
119
+ '/')+1:]
120
+ self.timeout_seconds = timeout_seconds
121
+ self._session = requests.Session()
122
+ self._global_headers = {
123
+ 'Authorization': f'Bearer {inference_key}',
124
+ 'Content-Type': 'application/json'
125
+ }
126
+
127
+ def __enter__(self):
128
+ return self
129
+
130
+ def __exit__(self, exc_type, exc_val, exc_tb):
131
+ self._session.close()
132
+
133
+ @property
134
+ def global_headers(self) -> Dict[str, str]:
135
+ """
136
+ Get the current global headers that will be used for all requests.
137
+
138
+ Returns:
139
+ Dictionary of current global headers
140
+ """
141
+ return self._global_headers.copy()
142
+
143
+ def set_global_header(self, key: str, value: str) -> None:
144
+ """
145
+ Set or update a global header that will be used for all requests.
146
+
147
+ Args:
148
+ key: Header name
149
+ value: Header value
150
+ """
151
+ self._global_headers[key] = value
152
+
153
+ def set_global_headers(self, headers: Dict[str, str]) -> None:
154
+ """
155
+ Set multiple global headers at once that will be used for all requests.
156
+
157
+ Args:
158
+ headers: Dictionary of headers to set globally
159
+ """
160
+ self._global_headers.update(headers)
161
+
162
+ def remove_global_header(self, key: str) -> None:
163
+ """
164
+ Remove a global header.
165
+
166
+ Args:
167
+ key: Header name to remove from global headers
168
+ """
169
+ if key in self._global_headers:
170
+ del self._global_headers[key]
171
+
172
+ def _build_url(self, path: str) -> str:
173
+ """Construct the full URL by joining the base URL with the path."""
174
+ return f"{self.endpoint_base_url}/{path.lstrip('/')}"
175
+
176
+ def _build_request_headers(self, request_headers: Optional[Dict[str, str]] = None) -> Dict[str, str]:
177
+ """
178
+ Build the final headers by merging global headers with request-specific headers.
179
+
180
+ Args:
181
+ request_headers: Optional headers specific to this request
182
+
183
+ Returns:
184
+ Merged headers dictionary
185
+ """
186
+ headers = self._global_headers.copy()
187
+ if request_headers:
188
+ headers.update(request_headers)
189
+ return headers
190
+
191
+ def _make_request(self, method: str, path: str, **kwargs) -> requests.Response:
192
+ """
193
+ Make an HTTP request with error handling.
194
+
195
+ Args:
196
+ method: HTTP method to use
197
+ path: API endpoint path
198
+ **kwargs: Additional arguments to pass to the request
199
+
200
+ Returns:
201
+ Response object from the request
202
+
203
+ Raises:
204
+ InferenceClientError: If the request fails
205
+ """
206
+ timeout = kwargs.pop('timeout_seconds', self.timeout_seconds)
207
+ try:
208
+ response = self._session.request(
209
+ method=method,
210
+ url=self._build_url(path),
211
+ headers=self._build_request_headers(
212
+ kwargs.pop('headers', None)),
213
+ timeout=timeout,
214
+ **kwargs
215
+ )
216
+ response.raise_for_status()
217
+ return response
218
+ except requests.exceptions.Timeout:
219
+ raise InferenceClientError(
220
+ f"Request to {path} timed out after {timeout} seconds")
221
+ except requests.exceptions.RequestException as e:
222
+ raise InferenceClientError(f"Request to {path} failed: {str(e)}")
223
+
224
+ def run_sync(self, data: Dict[str, Any], path: str = "", timeout_seconds: int = 60 * 5, headers: Optional[Dict[str, str]] = None, http_method: str = "POST", stream: bool = False):
225
+ response = self._make_request(
226
+ http_method, path, json=data, timeout_seconds=timeout_seconds, headers=headers, stream=stream)
227
+
228
+ return InferenceResponse(
229
+ headers=response.headers,
230
+ status_code=response.status_code,
231
+ status_text=response.reason,
232
+ _original_response=response
233
+ )
234
+
235
+ def run(self, data: Dict[str, Any], path: str = "", timeout_seconds: int = 60 * 5, headers: Optional[Dict[str, str]] = None, http_method: str = "POST", no_response: bool = False):
236
+ # Add relevant headers to the request, to indicate that the request is async
237
+ headers = headers or {}
238
+ if no_response:
239
+ # If no_response is True, use the "Prefer: respond-async-proxy" header to run async and don't wait for the response
240
+ headers['Prefer'] = 'respond-async-proxy'
241
+ self._make_request(
242
+ http_method, path, json=data, timeout_seconds=timeout_seconds, headers=headers)
243
+ return
244
+ # Add the "Prefer: respond-async" header to the request, to run async and wait for the response
245
+ headers['Prefer'] = 'respond-async'
246
+
247
+ response = self._make_request(
248
+ http_method, path, json=data, timeout_seconds=timeout_seconds, headers=headers)
249
+
250
+ result = response.json()
251
+ execution_id = result['Id']
252
+
253
+ return AsyncInferenceExecution(self, execution_id, AsyncStatus.Initialized)
254
+
255
+ def get(self, path: str, params: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, str]] = None, timeout_seconds: Optional[int] = None) -> requests.Response:
256
+ return self._make_request('GET', path, params=params, headers=headers, timeout_seconds=timeout_seconds)
257
+
258
+ def post(self, path: str, json: Optional[Dict[str, Any]] = None, data: Optional[Union[str, Dict[str, Any]]] = None,
259
+ params: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, str]] = None, timeout_seconds: Optional[int] = None) -> requests.Response:
260
+ return self._make_request('POST', path, json=json, data=data, params=params, headers=headers, timeout_seconds=timeout_seconds)
261
+
262
+ def put(self, path: str, json: Optional[Dict[str, Any]] = None, data: Optional[Union[str, Dict[str, Any]]] = None,
263
+ params: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, str]] = None, timeout_seconds: Optional[int] = None) -> requests.Response:
264
+ return self._make_request('PUT', path, json=json, data=data, params=params, headers=headers, timeout_seconds=timeout_seconds)
265
+
266
+ def delete(self, path: str, params: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, str]] = None, timeout_seconds: Optional[int] = None) -> requests.Response:
267
+ return self._make_request('DELETE', path, params=params, headers=headers, timeout_seconds=timeout_seconds)
268
+
269
+ def patch(self, path: str, json: Optional[Dict[str, Any]] = None, data: Optional[Union[str, Dict[str, Any]]] = None,
270
+ params: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, str]] = None, timeout_seconds: Optional[int] = None) -> requests.Response:
271
+ return self._make_request('PATCH', path, json=json, data=data, params=params, headers=headers, timeout_seconds=timeout_seconds)
272
+
273
+ def head(self, path: str, params: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, str]] = None, timeout_seconds: Optional[int] = None) -> requests.Response:
274
+ return self._make_request('HEAD', path, params=params, headers=headers, timeout_seconds=timeout_seconds)
275
+
276
+ def options(self, path: str, params: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, str]] = None, timeout_seconds: Optional[int] = None) -> requests.Response:
277
+ return self._make_request('OPTIONS', path, params=params, headers=headers, timeout_seconds=timeout_seconds)
278
+
279
+ def health(self, healthcheck_path: str = "/health") -> requests.Response:
280
+ """
281
+ Check the health status of the API.
282
+
283
+ Returns:
284
+ requests.Response: The response from the health check
285
+
286
+ Raises:
287
+ InferenceClientError: If the health check fails
288
+ """
289
+ try:
290
+ return self.get(healthcheck_path)
291
+ except InferenceClientError as e:
292
+ raise InferenceClientError(f"Health check failed: {str(e)}")
293
+
294
+
295
+ @dataclass_json(undefined=Undefined.EXCLUDE)
296
+ @dataclass
297
+ class AsyncInferenceExecution:
298
+ _inference_client: 'InferenceClient'
299
+ id: str
300
+ _status: AsyncStatus
301
+ INFERENCE_ID_HEADER = 'X-Inference-Id'
302
+
303
+ def status(self) -> AsyncStatus:
304
+ """Get the current stored status of the async inference execution. Only the status value type
305
+
306
+ Returns:
307
+ AsyncStatus: The status object
308
+ """
309
+
310
+ return self._status
311
+
312
+ def status_json(self) -> Dict[str, Any]:
313
+ """Get the current status of the async inference execution. Return the status json
314
+
315
+ Returns:
316
+ Dict[str, Any]: The status response containing the execution status and other metadata
317
+ """
318
+ url = f'{self._inference_client.base_domain}/status/{self._inference_client.deployment_name}'
319
+ response = self._inference_client._session.get(
320
+ url, headers=self._inference_client._build_request_headers({self.INFERENCE_ID_HEADER: self.id}))
321
+
322
+ response_json = response.json()
323
+ self._status = AsyncStatus(response_json['Status'])
324
+
325
+ return response_json
326
+
327
+ def result(self) -> Dict[str, Any]:
328
+ """Get the results of the async inference execution.
329
+
330
+ Returns:
331
+ Dict[str, Any]: The results of the inference execution
332
+ """
333
+ url = f'{self._inference_client.base_domain}/result/{self._inference_client.deployment_name}'
334
+ response = self._inference_client._session.get(
335
+ url, headers=self._inference_client._build_request_headers({self.INFERENCE_ID_HEADER: self.id}))
336
+
337
+ if response.headers['Content-Type'] == 'application/json':
338
+ return response.json()
339
+ else:
340
+ return {'result': response.text}
341
+
342
+ # alias for get_results
343
+ output = result
@@ -0,0 +1 @@
1
+ VERSION = '1.10.0'