fxn 0.0.40__py3-none-any.whl → 0.0.42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. fxn/__init__.py +3 -1
  2. fxn/beta/__init__.py +6 -0
  3. fxn/beta/client.py +16 -0
  4. fxn/beta/prediction.py +16 -0
  5. fxn/beta/remote.py +207 -0
  6. fxn/c/__init__.py +7 -10
  7. fxn/c/configuration.py +114 -56
  8. fxn/c/fxnc.py +42 -22
  9. fxn/c/map.py +60 -30
  10. fxn/c/prediction.py +72 -33
  11. fxn/c/predictor.py +55 -27
  12. fxn/c/stream.py +33 -15
  13. fxn/c/value.py +215 -42
  14. fxn/cli/__init__.py +14 -12
  15. fxn/cli/auth.py +1 -1
  16. fxn/cli/misc.py +1 -1
  17. fxn/cli/{predict.py → predictions.py} +33 -36
  18. fxn/cli/predictors.py +3 -51
  19. fxn/client.py +58 -0
  20. fxn/compile/__init__.py +7 -0
  21. fxn/compile/compile.py +80 -0
  22. fxn/compile/sandbox.py +177 -0
  23. fxn/compile/signature.py +183 -0
  24. fxn/function.py +10 -6
  25. fxn/lib/__init__.py +1 -1
  26. fxn/lib/linux/arm64/libFunction.so +0 -0
  27. fxn/lib/linux/x86_64/libFunction.so +0 -0
  28. fxn/lib/macos/arm64/Function.dylib +0 -0
  29. fxn/lib/macos/x86_64/Function.dylib +0 -0
  30. fxn/lib/windows/arm64/Function.dll +0 -0
  31. fxn/lib/windows/x86_64/Function.dll +0 -0
  32. fxn/services/__init__.py +4 -4
  33. fxn/services/prediction.py +180 -351
  34. fxn/services/predictor.py +14 -187
  35. fxn/services/user.py +16 -42
  36. fxn/types/__init__.py +4 -4
  37. fxn/types/dtype.py +1 -1
  38. fxn/types/prediction.py +20 -10
  39. fxn/types/predictor.py +18 -32
  40. fxn/types/user.py +9 -15
  41. fxn/version.py +2 -2
  42. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/METADATA +5 -5
  43. fxn-0.0.42.dist-info/RECORD +47 -0
  44. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/WHEEL +1 -1
  45. fxn/api/__init__.py +0 -6
  46. fxn/api/client.py +0 -43
  47. fxn/c/dtype.py +0 -26
  48. fxn/c/status.py +0 -12
  49. fxn/c/version.py +0 -13
  50. fxn/cli/env.py +0 -40
  51. fxn-0.0.40.dist-info/RECORD +0 -44
  52. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/LICENSE +0 -0
  53. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/entry_points.txt +0 -0
  54. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/top_level.txt +0 -0
@@ -1,47 +1,57 @@
1
1
  #
2
2
  # Function
3
- # Copyright © 2024 NatML Inc. All Rights Reserved.
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
- from ctypes import byref, cast, c_char_p, c_double, c_int32, c_uint8, c_void_p, create_string_buffer, string_at, CDLL, POINTER
7
6
  from dataclasses import asdict, is_dataclass
8
7
  from datetime import datetime, timezone
9
- from importlib import resources
10
8
  from io import BytesIO
11
- from json import dumps, loads
12
- from numpy import array, dtype, int32, ndarray, zeros
13
- from numpy.ctypeslib import as_array, as_ctypes_type
14
- from numpy.typing import NDArray
9
+ from numpy import array, ndarray
15
10
  from pathlib import Path
16
11
  from PIL import Image
17
- from platform import machine, system
18
12
  from pydantic import BaseModel
19
- from requests import get, post
13
+ from requests import get
14
+ from rich.progress import Progress, TextColumn, BarColumn, DownloadColumn, TransferSpeedColumn, TimeRemainingColumn
20
15
  from tempfile import gettempdir
21
- from typing import Any, AsyncIterator, Dict, List, Optional, Union
16
+ from typing import Any, AsyncIterator
22
17
  from urllib.parse import urlparse
23
18
 
24
- from ..api import GraphClient
25
- from ..c import load_fxnc, FXNConfigurationRef, FXNDtype, FXNPredictionRef, FXNPredictorRef, FXNStatus, FXNValueRef, FXNValueFlags, FXNValueMapRef
19
+ from ..c import Configuration, Predictor, Prediction as CPrediction, Value as CValue, ValueFlags, ValueMap
20
+ from ..client import FunctionClient
26
21
  from ..types import Acceleration, Prediction, PredictionResource
27
22
 
23
+ Value = ndarray | str | float | int | bool | list[Any] | dict[str, Any] | Image.Image | BytesIO | memoryview
24
+
28
25
  class PredictionService:
29
26
 
30
- def __init__ (self, client: GraphClient):
27
+ def __init__ (self, client: FunctionClient):
31
28
  self.client = client
32
- self.__fxnc = PredictionService.__load_fxnc()
33
29
  self.__cache = { }
34
- self.__cache_dir = self.__class__.__get_resource_dir() / ".fxn" / "cache"
30
+ self.__cache_dir = self.__class__.__get_home_dir() / ".fxn" / "cache"
35
31
  self.__cache_dir.mkdir(parents=True, exist_ok=True)
36
32
 
33
+ def ready (self, tag: str, **kwargs) -> bool:
34
+ """
35
+ Check whether a predictor has been preloaded and is ready to make predictions.
36
+
37
+ Parameters:
38
+ tag (str): Predictor tag.
39
+
40
+ Returns:
41
+ bool: Whether the predictor is ready to make predictions.
42
+ """
43
+ return tag in self.__cache
44
+
37
45
  def create (
38
46
  self,
39
47
  tag: str,
40
48
  *,
41
- inputs: Optional[Dict[str, ndarray | str | float | int | bool | List[Any] | Dict[str, Any] | Path | Image.Image]] = None,
42
- acceleration: Acceleration=Acceleration.Default,
49
+ inputs: dict[str, Value] | None=None,
50
+ acceleration: Acceleration=Acceleration.Auto,
51
+ device=None,
43
52
  client_id: str=None,
44
- configuration_id: str=None
53
+ configuration_id: str=None,
54
+ verbose: bool=False
45
55
  ) -> Prediction:
46
56
  """
47
57
  Create a prediction.
@@ -52,339 +62,201 @@ class PredictionService:
52
62
  acceleration (Acceleration): Prediction acceleration.
53
63
  client_id (str): Function client identifier. Specify this to override the current client identifier.
54
64
  configuration_id (str): Configuration identifier. Specify this to override the current client configuration identifier.
65
+ verbose (bool): Enable verbose logging.
55
66
 
56
67
  Returns:
57
68
  Prediction: Created prediction.
58
69
  """
59
- # Check if cached
60
- if tag in self.__cache:
61
- return self.__predict(tag=tag, predictor=self.__cache[tag], inputs=inputs)
62
- # Query
63
- response = post(
64
- f"{self.client.api_url}/predict/{tag}",
65
- json={ },
66
- headers={
67
- "Authorization": f"Bearer {self.client.access_key}",
68
- "fxn-client": client_id if client_id is not None else self.__get_client_id(),
69
- "fxn-configuration-token": configuration_id if configuration_id is not None else self.__get_configuration_id()
70
- }
71
- )
72
- # Check
73
- prediction = response.json()
74
- try:
75
- response.raise_for_status()
76
- except Exception as ex:
77
- error = prediction["errors"][0]["message"] if "errors" in prediction else str(ex)
78
- raise RuntimeError(error)
79
- # Check raw prediction
80
- prediction = Prediction(**prediction)
81
70
  if inputs is None:
82
- return prediction
83
- # Create edge prediction
84
- predictor = self.__load(prediction, acceleration=acceleration)
85
- self.__cache[tag] = predictor
86
- prediction = self.__predict(tag=tag, predictor=predictor, inputs=inputs)
87
- # Return
88
- return prediction
89
-
90
- async def stream ( # INCOMPLETE # Streaming support
71
+ return self.__create_raw_prediction(
72
+ tag=tag,
73
+ client_id=client_id,
74
+ configuration_id=configuration_id
75
+ )
76
+ predictor = self.__get_predictor(
77
+ tag=tag,
78
+ acceleration=acceleration,
79
+ device=device,
80
+ client_id=client_id,
81
+ configuration_id=configuration_id,
82
+ verbose=verbose
83
+ )
84
+ with (
85
+ self.__to_value_map(inputs) as input_map,
86
+ predictor.create_prediction(input_map) as prediction
87
+ ):
88
+ return self.__to_prediction(tag, prediction)
89
+
90
+ async def stream (
91
91
  self,
92
92
  tag: str,
93
93
  *,
94
- inputs: Dict[str, float | int | str | bool | NDArray | List[Any] | Dict[str, Any] | Path | Image.Image] = {},
95
- acceleration: Acceleration=Acceleration.Default,
96
- client_id: str=None,
97
- configuration_id: str=None
94
+ inputs: dict[str, Value],
95
+ acceleration: Acceleration=Acceleration.Auto,
96
+ device=None
98
97
  ) -> AsyncIterator[Prediction]:
99
98
  """
100
- Create a streaming prediction.
101
-
102
- NOTE: This feature is currently experimental.
99
+ Stream a prediction.
103
100
 
104
101
  Parameters:
105
102
  tag (str): Predictor tag.
106
103
  inputs (dict): Input values.
107
104
  acceleration (Acceleration): Prediction acceleration.
108
- client_id (str): Function client identifier. Specify this to override the current client identifier.
109
- configuration_id (str): Configuration identifier. Specify this to override the current client configuration identifier.
110
105
 
111
106
  Returns:
112
107
  Prediction: Created prediction.
113
108
  """
114
- # Check if cached
115
- if tag in self.__cache:
116
- yield self.__predict(tag=tag, predictor=self.__cache[tag], inputs=inputs)
117
- return
118
- # Create prediction
119
- prediction = self.create(
109
+ predictor = self.__get_predictor(
120
110
  tag=tag,
121
- client_id=client_id,
122
- configuration_id=configuration_id
111
+ acceleration=acceleration,
112
+ device=device,
123
113
  )
124
- # Make single prediction
125
- predictor = self.__load(prediction, acceleration=acceleration)
126
- self.__cache[tag] = predictor
127
- prediction = self.__predict(tag=tag, predictor=predictor, inputs=inputs)
128
- # Yield
129
- yield prediction
114
+ with (
115
+ self.__to_value_map(inputs) as input_map,
116
+ predictor.stream_prediction(input_map) as stream
117
+ ):
118
+ for prediction in stream:
119
+ with prediction:
120
+ yield self.__to_prediction(tag, prediction)
130
121
 
131
- @classmethod
132
- def __load_fxnc (cls) -> Optional[CDLL]:
133
- os = system().lower()
134
- os = "macos" if os == "darwin" else os
135
- arch = machine().lower()
136
- arch = "arm64" if arch == "aarch64" else arch
137
- arch = "x86_64" if arch in ["x64", "amd64"] else arch
138
- package = f"fxn.lib.{os}.{arch}"
139
- resource = "libFunction.so"
140
- resource = "Function.dylib" if os == "macos" else resource
141
- resource = "Function.dll" if os == "windows" else resource
142
- with resources.path(package, resource) as fxnc_path:
143
- return load_fxnc(fxnc_path)
144
-
145
- def __get_client_id (self) -> str:
146
- # Get
147
- buffer = create_string_buffer(64)
148
- status = self.__fxnc.FXNConfigurationGetClientID(buffer, len(buffer))
149
- assert status.value == FXNStatus.OK, \
150
- f"Failed to retrieve prediction client identifier with status: {status.value}"
151
- client_id = buffer.value.decode("utf-8")
152
- # Return
153
- return client_id
154
-
155
- def __get_configuration_id (self) -> Optional[str]:
156
- # Check
157
- if not self.__fxnc:
158
- return None
159
- # Get
160
- buffer = create_string_buffer(2048)
161
- status = self.__fxnc.FXNConfigurationGetUniqueID(buffer, len(buffer))
162
- assert status.value == FXNStatus.OK, \
163
- f"Failed to retrieve prediction configuration identifier with error: {self.__class__.__status_to_error(status.value)}"
164
- uid = buffer.value.decode("utf-8")
165
- # Return
166
- return uid
122
+ def __create_raw_prediction (
123
+ self,
124
+ tag: str,
125
+ client_id: str=None,
126
+ configuration_id: str=None
127
+ ) -> Prediction:
128
+ client_id = client_id if client_id is not None else Configuration.get_client_id()
129
+ configuration_id = configuration_id if configuration_id is not None else Configuration.get_unique_id()
130
+ prediction = self.client.request(
131
+ method="POST",
132
+ path="/predictions",
133
+ body={
134
+ "tag": tag,
135
+ "clientId": client_id,
136
+ "configurationId": configuration_id,
137
+ },
138
+ response_type=Prediction
139
+ )
140
+ return prediction
167
141
 
168
- def __load (
142
+ def __get_predictor (
169
143
  self,
170
- prediction: Prediction,
171
- *,
172
- acceleration: Acceleration=Acceleration.Default
173
- ) -> type[FXNPredictorRef]:
174
- fxnc = self.__fxnc
175
- configuration = FXNConfigurationRef()
176
- try:
177
- # Create configuration
178
- status = fxnc.FXNConfigurationCreate(byref(configuration))
179
- assert status.value == FXNStatus.OK, \
180
- f"Failed to create {prediction.tag} configuration with error: {self.__class__.__status_to_error(status.value)}"
181
- status = fxnc.FXNConfigurationSetTag(configuration, prediction.tag.encode())
182
- assert status.value == FXNStatus.OK, \
183
- f"Failed to set configuration tag with error: {self.__class__.__status_to_error(status.value)}"
184
- status = fxnc.FXNConfigurationSetToken(configuration, prediction.configuration.encode())
185
- assert status.value == FXNStatus.OK, \
186
- f"Failed to set configuration token with error: {self.__class__.__status_to_error(status.value)}"
187
- status = fxnc.FXNConfigurationSetAcceleration(configuration, int(acceleration))
188
- assert status.value == FXNStatus.OK, \
189
- f"Failed to set configuration acceleration with error: {self.__class__.__status_to_error(status.value)}"
144
+ tag: str,
145
+ acceleration: Acceleration=Acceleration.Auto,
146
+ device=None,
147
+ client_id: str=None,
148
+ configuration_id: str=None,
149
+ verbose: bool=False
150
+ ) -> Predictor:
151
+ if tag in self.__cache:
152
+ return self.__cache[tag]
153
+ prediction = self.__create_raw_prediction(
154
+ tag=tag,
155
+ client_id=client_id,
156
+ configuration_id=configuration_id
157
+ )
158
+ with Configuration() as configuration, Progress(
159
+ TextColumn("[bold blue]{task.fields[filename]}"),
160
+ BarColumn(),
161
+ DownloadColumn(),
162
+ TransferSpeedColumn(),
163
+ TimeRemainingColumn(),
164
+ disable=not verbose
165
+ ) as progress:
166
+ configuration.tag = prediction.tag
167
+ configuration.token = prediction.configuration
168
+ configuration.acceleration = acceleration
169
+ configuration.device = device
190
170
  for resource in prediction.resources:
191
- if resource.type == "fxn": # CHECK # Remove in fxnc 0.0.27
192
- continue
193
- path = self.__get_resource_path(resource)
194
- status = fxnc.FXNConfigurationAddResource(configuration, resource.type.encode(), str(path).encode())
195
- assert status.value == FXNStatus.OK, \
196
- f"Failed to set prediction configuration resource with type {resource.type} for tag {prediction.tag} with error: {self.__class__.__status_to_error(status.value)}"
197
- # Create predictor
198
- predictor = FXNPredictorRef()
199
- status = fxnc.FXNPredictorCreate(configuration, byref(predictor))
200
- assert status.value == FXNStatus.OK, \
201
- f"Failed to create prediction for tag {prediction.tag} with error: {self.__class__.__status_to_error(status.value)}"
202
- # Return
203
- return predictor
204
- finally:
205
- fxnc.FXNConfigurationRelease(configuration)
206
-
207
- def __predict (self, *, tag: str, predictor, inputs: Dict[str, Any]) -> Prediction:
208
- fxnc = self.__fxnc
209
- input_map = FXNValueMapRef()
210
- prediction = FXNPredictionRef()
211
- try:
212
- # Marshal inputs
213
- status = fxnc.FXNValueMapCreate(byref(input_map))
214
- assert status.value == FXNStatus.OK, \
215
- f"Failed to create {tag} prediction because input values could not be provided to the predictor with error: {self.__class__.__status_to_error(status.value)}"
216
- for name, value in inputs.items():
217
- value = self.__to_value(value)
218
- fxnc.FXNValueMapSetValue(input_map, name.encode(), value)
219
- # Predict
220
- status = fxnc.FXNPredictorCreatePrediction(predictor, input_map, byref(prediction))
221
- assert status.value == FXNStatus.OK, \
222
- f"Failed to create {tag} prediction with error: {self.__class__.__status_to_error(status.value)}"
223
- # Marshal prediction
224
- id = create_string_buffer(256)
225
- error = create_string_buffer(2048)
226
- latency = c_double()
227
- status = fxnc.FXNPredictionGetID(prediction, id, len(id))
228
- assert status.value == FXNStatus.OK, \
229
- f"Failed to get {tag} prediction identifier with error: {self.__class__.__status_to_error(status.value)}"
230
- status = fxnc.FXNPredictionGetLatency(prediction, byref(latency))
231
- assert status.value == FXNStatus.OK, \
232
- f"Failed to get {tag} prediction latency with error: {self.__class__.__status_to_error(status.value)}"
233
- fxnc.FXNPredictionGetError(prediction, error, len(error))
234
- id = id.value.decode("utf-8")
235
- latency = latency.value
236
- error = error.value.decode("utf-8")
237
- log_length = c_int32()
238
- fxnc.FXNPredictionGetLogLength(prediction, byref(log_length))
239
- logs = create_string_buffer(log_length.value + 1)
240
- fxnc.FXNPredictionGetLogs(prediction, logs, len(logs))
241
- logs = logs.value.decode("utf-8")
242
- # Marshal outputs
243
- results = []
244
- output_count = c_int32()
245
- output_map = FXNValueMapRef()
246
- status = fxnc.FXNPredictionGetResults(prediction, byref(output_map))
247
- assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction results with error: {self.__class__.__status_to_error(status.value)}"
248
- status = fxnc.FXNValueMapGetSize(output_map, byref(output_count))
249
- assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction result count with error: {self.__class__.__status_to_error(status.value)}"
250
- for idx in range(output_count.value):
251
- name = create_string_buffer(256)
252
- status = fxnc.FXNValueMapGetKey(output_map, idx, name, len(name))
253
- assert status.value == FXNStatus.OK, \
254
- f"Failed to get {tag} prediction output name at index {idx} with error: {self.__class__.__status_to_error(status.value)}"
255
- value = FXNValueRef()
256
- status = fxnc.FXNValueMapGetValue(output_map, name, byref(value))
257
- assert status.value == FXNStatus.OK, \
258
- f"Failed to get {tag} prediction output value at index {idx} with error: {self.__class__.__status_to_error(status.value)}"
259
- name = name.value.decode("utf-8")
260
- value = self.__to_object(value)
261
- results.append(value)
262
- # Return
263
- return Prediction(
264
- id=id,
265
- tag=tag,
266
- results=results if not error else None,
267
- latency=latency,
268
- error=error if error else None,
269
- logs=logs,
270
- created=datetime.now(timezone.utc).isoformat()
271
- )
272
- finally:
273
- fxnc.FXNPredictionRelease(prediction)
274
- fxnc.FXNValueMapRelease(input_map)
171
+ path = self.__download_resource(resource, progress=progress)
172
+ configuration.add_resource(resource.type, path)
173
+ predictor = Predictor(configuration)
174
+ self.__cache[tag] = predictor
175
+ return predictor
275
176
 
177
+ def __to_value_map (self, inputs: dict[str, Value]) -> ValueMap:
178
+ map = ValueMap()
179
+ for name, value in inputs.items():
180
+ map[name] = self.__to_value(value)
181
+ return map
182
+
276
183
  def __to_value (
277
184
  self,
278
- value: float | int | bool | str | NDArray | List[Any] | Dict[str, Any] | Image.Image | bytes | bytearray | memoryview | BytesIO | None
279
- ) -> type[FXNValueRef]:
280
- value = PredictionService.__try_ensure_serializable(value)
281
- fxnc = self.__fxnc
282
- result = FXNValueRef()
283
- if result is None:
284
- fxnc.FXNValueCreateNull(byref(result))
185
+ value: Value,
186
+ *,
187
+ flags: ValueFlags=ValueFlags.NONE
188
+ ) -> CValue:
189
+ value = self.__class__.__try_ensure_serializable(value)
190
+ if value is None:
191
+ return CValue.create_null()
285
192
  elif isinstance(value, bool):
286
- return self.__to_value(array(value, dtype="bool"))
193
+ return self.__to_value(array(value, dtype="bool"), flags=flags | ValueFlags.COPY_DATA)
287
194
  elif isinstance(value, int):
288
- return self.__to_value(array(value, dtype="int32"))
195
+ return self.__to_value(array(value, dtype="int32"), flags=flags | ValueFlags.COPY_DATA)
289
196
  elif isinstance(value, float):
290
- return self.__to_value(array(value, dtype="float32"))
197
+ return self.__to_value(array(value, dtype="float32"), flags=flags | ValueFlags.COPY_DATA)
291
198
  elif isinstance(value, ndarray):
292
- dtype = _NP_TO_FXN_DTYPE.get(value.dtype)
293
- assert dtype is not None, f"Failed to convert numpy array to Function value because array data type is not supported: {value.dtype}"
294
- fxnc.FXNValueCreateArray(
295
- value.ctypes.data_as(c_void_p),
296
- value.ctypes.shape_as(c_int32),
297
- len(value.shape),
298
- dtype,
299
- FXNValueFlags.NONE,
300
- byref(result)
301
- )
199
+ return CValue.create_array(value, flags=flags)
302
200
  elif isinstance(value, str):
303
- fxnc.FXNValueCreateString(value.encode(), byref(result))
201
+ return CValue.create_string(value)
304
202
  elif isinstance(value, list):
305
- fxnc.FXNValueCreateList(dumps(value).encode(), byref(result))
203
+ return CValue.create_list(value)
306
204
  elif isinstance(value, dict):
307
- fxnc.FXNValueCreateDict(dumps(value).encode(), byref(result))
205
+ return CValue.create_dict(value)
308
206
  elif isinstance(value, Image.Image):
309
- value = array(value)
310
- status = fxnc.FXNValueCreateImage(
311
- value.ctypes.data_as(c_void_p),
312
- value.shape[1],
313
- value.shape[0],
314
- value.shape[2],
315
- FXNValueFlags.COPY_DATA,
316
- byref(result)
317
- )
318
- assert status.value == FXNStatus.OK, f"Failed to create image value with error: {self.__class__.__status_to_error(status.value)}"
207
+ return CValue.create_image(value)
319
208
  elif isinstance(value, (bytes, bytearray, memoryview, BytesIO)):
320
- copy = isinstance(value, memoryview)
321
- view = memoryview(value.getvalue() if isinstance(value, BytesIO) else value) if not isinstance(value, memoryview) else value
322
- buffer = (c_uint8 * len(view)).from_buffer(view)
323
- fxnc.FXNValueCreateBinary(
324
- buffer,
325
- len(view),
326
- FXNValueFlags.COPY_DATA if copy else FXNValueFlags.NONE,
327
- byref(result)
328
- )
209
+ flags |= ValueFlags.COPY_DATA if not isinstance(value, memoryview) else 0
210
+ view_or_bytes = value.getvalue() if isinstance(value, BytesIO) else value
211
+ view = memoryview(view_or_bytes) if not isinstance(view_or_bytes, memoryview) else view_or_bytes
212
+ return CValue.create_binary(view, flags=flags)
329
213
  else:
330
- raise RuntimeError(f"Failed to convert Python value to Function value because Python value has an unsupported type: {type(value)}")
331
- return result
214
+ raise RuntimeError(f"Failed to convert object to Function value because object has an unsupported type: {type(value)}")
215
+
216
+ def __to_prediction (self, tag: str, raw_prediction: CPrediction) -> Prediction:
217
+ output_map = raw_prediction.results
218
+ results = [output_map[output_map.key(idx)].to_object() for idx in range(len(output_map))] if output_map else None
219
+ prediction = Prediction(
220
+ id=raw_prediction.id,
221
+ tag=tag,
222
+ results=results,
223
+ latency=raw_prediction.latency,
224
+ error=raw_prediction.error,
225
+ logs=raw_prediction.logs,
226
+ created=datetime.now(timezone.utc).isoformat()
227
+ )
228
+ return prediction
332
229
 
333
- def __to_object (
230
+ def __download_resource (
334
231
  self,
335
- value: type[FXNValueRef]
336
- ) -> float | int | bool | str | NDArray | List[Any] | Dict[str, Any] | Image.Image | BytesIO | None:
337
- # Type
338
- fxnc = self.__fxnc
339
- dtype = FXNDtype()
340
- status = fxnc.FXNValueGetType(value, byref(dtype))
341
- assert status.value == FXNStatus.OK, f"Failed to get value data type with error: {self.__class__.__status_to_error(status.value)}"
342
- dtype = dtype.value
343
- # Get data
344
- data = c_void_p()
345
- status = fxnc.FXNValueGetData(value, byref(data))
346
- assert status.value == FXNStatus.OK, f"Failed to get value data with error: {self.__class__.__status_to_error(status.value)}"
347
- # Get shape
348
- dims = c_int32()
349
- status = fxnc.FXNValueGetDimensions(value, byref(dims))
350
- assert status.value == FXNStatus.OK, f"Failed to get value dimensions with error: {self.__class__.__status_to_error(status.value)}"
351
- shape = zeros(dims.value, dtype=int32)
352
- status = fxnc.FXNValueGetShape(value, shape.ctypes.data_as(POINTER(c_int32)), dims)
353
- assert status.value == FXNStatus.OK, f"Failed to get value shape with error: {self.__class__.__status_to_error(status.value)}"
354
- # Switch
355
- if dtype == FXNDtype.NULL:
356
- return None
357
- elif dtype in _FXN_TO_NP_DTYPE:
358
- dtype_c = as_ctypes_type(_FXN_TO_NP_DTYPE[dtype])
359
- tensor = as_array(cast(data, POINTER(dtype_c)), shape)
360
- return tensor.item() if len(tensor.shape) == 0 else tensor.copy()
361
- elif dtype == FXNDtype.STRING:
362
- return cast(data, c_char_p).value.decode()
363
- elif dtype == FXNDtype.LIST:
364
- return loads(cast(data, c_char_p).value.decode())
365
- elif dtype == FXNDtype.DICT:
366
- return loads(cast(data, c_char_p).value.decode())
367
- elif dtype == FXNDtype.IMAGE:
368
- pixel_buffer = as_array(cast(data, POINTER(c_uint8)), shape)
369
- return Image.fromarray(pixel_buffer.copy())
370
- elif dtype == FXNDtype.BINARY:
371
- return BytesIO(string_at(data, shape[0]))
372
- else:
373
- raise RuntimeError(f"Failed to convert Function value to Python value because Function value has unsupported type: {dtype}")
232
+ resource: PredictionResource,
233
+ *,
234
+ progress: Progress
235
+ ) -> Path:
236
+ path = self.__get_resource_path(resource)
237
+ if path.exists():
238
+ return path
239
+ path.parent.mkdir(parents=True, exist_ok=True)
240
+ response = get(resource.url, stream=True)
241
+ response.raise_for_status()
242
+ size = int(response.headers.get("content-length", 0))
243
+ stem = Path(urlparse(resource.url).path).name
244
+ task = progress.add_task(f"Downloading", filename=stem, total=size)
245
+ with open(path, "wb") as fp:
246
+ for chunk in response.iter_content(chunk_size=8192):
247
+ if chunk:
248
+ fp.write(chunk)
249
+ progress.update(task, advance=len(chunk))
250
+ return path
374
251
 
375
252
  def __get_resource_path (self, resource: PredictionResource) -> Path:
376
- res_name = Path(urlparse(resource.url).path).name
377
- res_path = self.__cache_dir / res_name
378
- if res_path.exists():
379
- return res_path
380
- req = get(resource.url)
381
- req.raise_for_status()
382
- with open(res_path, "wb") as f:
383
- f.write(req.content)
384
- return res_path
253
+ stem = Path(urlparse(resource.url).path).name
254
+ path = self.__cache_dir / stem
255
+ path = path / resource.name if resource.name else path
256
+ return path
385
257
 
386
258
  @classmethod
387
- def __get_resource_dir (cls) -> Path:
259
+ def __get_home_dir (cls) -> Path:
388
260
  try:
389
261
  check = Path.home() / ".fxntest"
390
262
  with open(check, "w") as f:
@@ -404,47 +276,4 @@ class PredictionService:
404
276
  return asdict(object)
405
277
  if isinstance(object, BaseModel):
406
278
  return object.model_dump(mode="json", by_alias=True)
407
- return object
408
-
409
- @classmethod
410
- def __status_to_error (cls, status: int) -> str:
411
- if status == FXNStatus.ERROR_INVALID_ARGUMENT:
412
- return "FXN_ERROR_INVALID_ARGUMENT"
413
- elif status == FXNStatus.ERROR_INVALID_OPERATION:
414
- return "FXN_ERROR_INVALID_OPERATION"
415
- elif status == FXNStatus.ERROR_NOT_IMPLEMENTED:
416
- return "FXN_ERROR_NOT_IMPLEMENTED"
417
- return ""
418
-
419
- PREDICTION_FIELDS = f"""
420
- id
421
- tag
422
- type
423
- configuration
424
- resources {{
425
- type
426
- url
427
- name
428
- }}
429
- latency
430
- error
431
- logs
432
- created
433
- """
434
-
435
- _FXN_TO_NP_DTYPE = {
436
- FXNDtype.FLOAT16: dtype("float16"),
437
- FXNDtype.FLOAT32: dtype("float32"),
438
- FXNDtype.FLOAT64: dtype("float64"),
439
- FXNDtype.INT8: dtype("int8"),
440
- FXNDtype.INT16: dtype("int16"),
441
- FXNDtype.INT32: dtype("int32"),
442
- FXNDtype.INT64: dtype("int64"),
443
- FXNDtype.UINT8: dtype("uint8"),
444
- FXNDtype.UINT16: dtype("uint16"),
445
- FXNDtype.UINT32: dtype("uint32"),
446
- FXNDtype.UINT64: dtype("uint64"),
447
- FXNDtype.BOOL: dtype("bool"),
448
- }
449
-
450
- _NP_TO_FXN_DTYPE = { value: key for key, value in _FXN_TO_NP_DTYPE.items() }
279
+ return object