fxn 0.0.34__py3-none-any.whl → 0.0.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. fxn/c/__init__.py +16 -0
  2. fxn/c/configuration.py +60 -0
  3. fxn/c/dtype.py +26 -0
  4. fxn/c/fxnc.py +28 -0
  5. fxn/c/map.py +34 -0
  6. fxn/c/prediction.py +37 -0
  7. fxn/c/predictor.py +31 -0
  8. fxn/c/status.py +12 -0
  9. fxn/c/stream.py +22 -0
  10. fxn/c/value.py +50 -0
  11. fxn/c/version.py +13 -0
  12. fxn/cli/__init__.py +8 -8
  13. fxn/cli/auth.py +1 -1
  14. fxn/cli/predict.py +3 -4
  15. fxn/cli/predictors.py +1 -40
  16. fxn/function.py +10 -11
  17. fxn/lib/macos/arm64/Function.dylib +0 -0
  18. fxn/lib/macos/x86_64/Function.dylib +0 -0
  19. fxn/lib/windows/arm64/Function.dll +0 -0
  20. fxn/lib/windows/x86_64/Function.dll +0 -0
  21. fxn/services/environment.py +1 -1
  22. fxn/services/prediction.py +456 -0
  23. fxn/services/predictor.py +3 -70
  24. fxn/services/storage.py +3 -4
  25. fxn/services/user.py +1 -1
  26. fxn/types/__init__.py +2 -3
  27. fxn/types/prediction.py +0 -4
  28. fxn/types/predictor.py +15 -22
  29. fxn/version.py +1 -1
  30. {fxn-0.0.34.dist-info → fxn-0.0.36.dist-info}/METADATA +27 -29
  31. fxn-0.0.36.dist-info/RECORD +49 -0
  32. {fxn-0.0.34.dist-info → fxn-0.0.36.dist-info}/WHEEL +1 -1
  33. fxn/libs/linux/__init__.py +0 -4
  34. fxn/libs/macos/Function.dylib +0 -0
  35. fxn/libs/macos/__init__.py +0 -4
  36. fxn/libs/windows/Function.dll +0 -0
  37. fxn/libs/windows/__init__.py +0 -4
  38. fxn/services/prediction/__init__.py +0 -6
  39. fxn/services/prediction/fxnc.py +0 -301
  40. fxn/services/prediction/service.py +0 -512
  41. fxn/types/value.py +0 -22
  42. fxn-0.0.34.dist-info/RECORD +0 -42
  43. /fxn/{graph → api}/__init__.py +0 -0
  44. /fxn/{graph → api}/client.py +0 -0
  45. /fxn/{libs → lib}/__init__.py +0 -0
  46. {fxn-0.0.34.dist-info → fxn-0.0.36.dist-info}/LICENSE +0 -0
  47. {fxn-0.0.34.dist-info → fxn-0.0.36.dist-info}/entry_points.txt +0 -0
  48. {fxn-0.0.34.dist-info → fxn-0.0.36.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,456 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2024 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from ctypes import byref, cast, c_char_p, c_double, c_int32, c_uint8, c_void_p, create_string_buffer, string_at, CDLL, POINTER
7
+ from dataclasses import asdict, is_dataclass
8
+ from datetime import datetime, timezone
9
+ from importlib import resources
10
+ from io import BytesIO
11
+ from json import dumps, loads
12
+ from numpy import array, dtype, int32, ndarray, zeros
13
+ from numpy.ctypeslib import as_array, as_ctypes_type
14
+ from numpy.typing import NDArray
15
+ from pathlib import Path
16
+ from PIL import Image
17
+ from platform import machine, system
18
+ from pydantic import BaseModel
19
+ from requests import get, post
20
+ from typing import Any, AsyncIterator, Dict, List, Optional, Union
21
+ from urllib.parse import urlparse
22
+
23
+ from ..api import GraphClient
24
+ from ..c import load_fxnc, FXNConfigurationRef, FXNDtype, FXNPredictionRef, FXNPredictorRef, FXNStatus, FXNValueRef, FXNValueFlags, FXNValueMapRef
25
+ from ..types import Acceleration, Prediction, PredictionResource
26
+
27
+ class PredictionService:
28
+
29
+ def __init__ (self, client: GraphClient):
30
+ self.client = client
31
+ self.__fxnc = PredictionService.__load_fxnc()
32
+ self.__cache = { }
33
+
34
+ def create (
35
+ self,
36
+ tag: str,
37
+ *,
38
+ inputs: Optional[Dict[str, Union[ndarray, str, float, int, bool, List, Dict[str, Any], Path, Image.Image]]] = None,
39
+ acceleration: Acceleration=Acceleration.Default,
40
+ client_id: str=None,
41
+ configuration_id: str=None
42
+ ) -> Prediction:
43
+ """
44
+ Create a prediction.
45
+
46
+ Parameters:
47
+ tag (str): Predictor tag.
48
+ inputs (dict): Input values.
49
+ acceleration (Acceleration): Prediction acceleration.
50
+ client_id (str): Function client identifier. Specify this to override the current client identifier.
51
+ configuration_id (str): Configuration identifier. Specify this to override the current client configuration identifier.
52
+
53
+ Returns:
54
+ Prediction: Created prediction.
55
+ """
56
+ # Check if cached
57
+ if tag in self.__cache:
58
+ return self.__predict(tag=tag, predictor=self.__cache[tag], inputs=inputs)
59
+ # Query
60
+ response = post(
61
+ f"{self.client.api_url}/predict/{tag}?rawOutputs=true",
62
+ json={ },
63
+ headers={
64
+ "Authorization": f"Bearer {self.client.access_key}",
65
+ "fxn-client": client_id if client_id is not None else self.__get_client_id(),
66
+ "fxn-configuration-token": configuration_id if configuration_id is not None else self.__get_configuration_id()
67
+ }
68
+ )
69
+ # Check
70
+ prediction = response.json()
71
+ try:
72
+ response.raise_for_status()
73
+ except Exception as ex:
74
+ error = prediction["errors"][0]["message"] if "errors" in prediction else str(ex)
75
+ raise RuntimeError(error)
76
+ # Check raw prediction
77
+ prediction = Prediction(**prediction)
78
+ if inputs is None:
79
+ return prediction
80
+ # Create edge prediction
81
+ predictor = self.__load(prediction, acceleration=acceleration)
82
+ self.__cache[tag] = predictor
83
+ prediction = self.__predict(tag=tag, predictor=predictor, inputs=inputs)
84
+ # Return
85
+ return prediction
86
+
87
+ async def stream ( # INCOMPLETE # Streaming support
88
+ self,
89
+ tag: str,
90
+ *,
91
+ inputs: Dict[str, Union[float, int, str, bool, NDArray, List[Any], Dict[str, Any], Path, Image.Image]] = {},
92
+ acceleration: Acceleration=Acceleration.Default,
93
+ client_id: str=None,
94
+ configuration_id: str=None
95
+ ) -> AsyncIterator[Prediction]:
96
+ """
97
+ Create a streaming prediction.
98
+
99
+ NOTE: This feature is currently experimental.
100
+
101
+ Parameters:
102
+ tag (str): Predictor tag.
103
+ inputs (dict): Input values.
104
+ acceleration (Acceleration): Prediction acceleration.
105
+ client_id (str): Function client identifier. Specify this to override the current client identifier.
106
+ configuration_id (str): Configuration identifier. Specify this to override the current client configuration identifier.
107
+
108
+ Returns:
109
+ Prediction: Created prediction.
110
+ """
111
+ # Check if cached
112
+ if tag in self.__cache:
113
+ yield self.__predict(tag=tag, predictor=self.__cache[tag], inputs=inputs)
114
+ return
115
+ # Create prediction
116
+ prediction = self.create(
117
+ tag=tag,
118
+ client_id=client_id,
119
+ configuration_id=configuration_id
120
+ )
121
+ # Make single prediction
122
+ predictor = self.__load(prediction, acceleration=acceleration)
123
+ self.__cache[tag] = predictor
124
+ prediction = self.__predict(tag=tag, predictor=predictor, inputs=inputs)
125
+ # Yield
126
+ yield prediction
127
+
128
+ @classmethod
129
+ def __load_fxnc (self) -> Optional[CDLL]:
130
+ # Get resource
131
+ package, resource = None, None
132
+ os = system()
133
+ if os == "Darwin":
134
+ package = f"fxn.lib.macos.{machine()}"
135
+ resource = f"Function.dylib"
136
+ elif os == "Linux" and False: # INCOMPLETE # Linux
137
+ package = f"fxn.lib.linux.{machine()}"
138
+ resource = f"libFunction.so"
139
+ elif os == "Windows":
140
+ package = f"fxn.lib.windows.{machine()}"
141
+ resource = f"Function.dll"
142
+ else:
143
+ return None
144
+ # Load
145
+ with resources.path(package, resource) as fxnc_path:
146
+ return load_fxnc(fxnc_path)
147
+
148
+ def __get_client_id (self) -> str:
149
+ # Fallback if fxnc failed to load
150
+ if not self.__fxnc:
151
+ return {
152
+ "Darwin": f"macos-{machine()}",
153
+ "Linux": f"linux-{machine()}",
154
+ "Windows": f"windows-{machine()}"
155
+ }[system()]
156
+ # Get
157
+ buffer = create_string_buffer(64)
158
+ status = self.__fxnc.FXNConfigurationGetClientID(buffer, len(buffer))
159
+ assert status.value == FXNStatus.OK, \
160
+ f"Failed to retrieve prediction client identifier with status: {status.value}"
161
+ client_id = buffer.value.decode("utf-8")
162
+ # Return
163
+ return client_id
164
+
165
+ def __get_configuration_id (self) -> Optional[str]:
166
+ # Check
167
+ if not self.__fxnc:
168
+ return None
169
+ # Get
170
+ buffer = create_string_buffer(2048)
171
+ status = self.__fxnc.FXNConfigurationGetUniqueID(buffer, len(buffer))
172
+ assert status.value == FXNStatus.OK, \
173
+ f"Failed to retrieve prediction configuration identifier with error: {self.__class__.__status_to_error(status.value)}"
174
+ uid = buffer.value.decode("utf-8")
175
+ # Return
176
+ return uid
177
+
178
+ def __load (
179
+ self,
180
+ prediction: Prediction,
181
+ *,
182
+ acceleration: Acceleration=Acceleration.Default
183
+ ) -> type[FXNPredictorRef]:
184
+ fxnc = self.__fxnc
185
+ configuration = FXNConfigurationRef()
186
+ try:
187
+ # Create configuration
188
+ status = fxnc.FXNConfigurationCreate(byref(configuration))
189
+ assert status.value == FXNStatus.OK, \
190
+ f"Failed to create {prediction.tag} configuration with error: {self.__class__.__status_to_error(status.value)}"
191
+ status = fxnc.FXNConfigurationSetTag(configuration, prediction.tag.encode())
192
+ assert status.value == FXNStatus.OK, \
193
+ f"Failed to set configuration tag with error: {self.__class__.__status_to_error(status.value)}"
194
+ status = fxnc.FXNConfigurationSetToken(configuration, prediction.configuration.encode())
195
+ assert status.value == FXNStatus.OK, \
196
+ f"Failed to set configuration token with error: {self.__class__.__status_to_error(status.value)}"
197
+ status = fxnc.FXNConfigurationSetAcceleration(configuration, int(acceleration))
198
+ assert status.value == FXNStatus.OK, \
199
+ f"Failed to set configuration acceleration with error: {self.__class__.__status_to_error(status.value)}"
200
+ for resource in prediction.resources:
201
+ if resource.type == "fxn": # CHECK # Remove in fxnc 0.0.27
202
+ continue
203
+ path = self.__get_resource_path(resource)
204
+ status = fxnc.FXNConfigurationAddResource(configuration, resource.type.encode(), str(path).encode())
205
+ assert status.value == FXNStatus.OK, \
206
+ f"Failed to set prediction configuration resource with type {resource.type} for tag {prediction.tag} with error: {self.__class__.__status_to_error(status.value)}"
207
+ # Create predictor
208
+ predictor = FXNPredictorRef()
209
+ status = fxnc.FXNPredictorCreate(configuration, byref(predictor))
210
+ assert status.value == FXNStatus.OK, \
211
+ f"Failed to create prediction for tag {prediction.tag} with error: {self.__class__.__status_to_error(status.value)}"
212
+ # Return
213
+ return predictor
214
+ finally:
215
+ fxnc.FXNConfigurationRelease(configuration)
216
+
217
+ def __predict (self, *, tag: str, predictor, inputs: Dict[str, Any]) -> Prediction:
218
+ fxnc = self.__fxnc
219
+ input_map = FXNValueMapRef()
220
+ prediction = FXNPredictionRef()
221
+ try:
222
+ # Marshal inputs
223
+ status = fxnc.FXNValueMapCreate(byref(input_map))
224
+ assert status.value == FXNStatus.OK, \
225
+ f"Failed to create {tag} prediction because input values could not be provided to the predictor with error: {self.__class__.__status_to_error(status.value)}"
226
+ for name, value in inputs.items():
227
+ value = self.__to_value(value)
228
+ fxnc.FXNValueMapSetValue(input_map, name.encode(), value)
229
+ # Predict
230
+ status = fxnc.FXNPredictorCreatePrediction(predictor, input_map, byref(prediction))
231
+ assert status.value == FXNStatus.OK, \
232
+ f"Failed to create {tag} prediction with error: {self.__class__.__status_to_error(status.value)}"
233
+ # Marshal prediction
234
+ id = create_string_buffer(256)
235
+ error = create_string_buffer(2048)
236
+ latency = c_double()
237
+ status = fxnc.FXNPredictionGetID(prediction, id, len(id))
238
+ assert status.value == FXNStatus.OK, \
239
+ f"Failed to get {tag} prediction identifier with error: {self.__class__.__status_to_error(status.value)}"
240
+ status = fxnc.FXNPredictionGetLatency(prediction, byref(latency))
241
+ assert status.value == FXNStatus.OK, \
242
+ f"Failed to get {tag} prediction latency with error: {self.__class__.__status_to_error(status.value)}"
243
+ fxnc.FXNPredictionGetError(prediction, error, len(error))
244
+ id = id.value.decode("utf-8")
245
+ latency = latency.value
246
+ error = error.value.decode("utf-8")
247
+ log_length = c_int32()
248
+ fxnc.FXNPredictionGetLogLength(prediction, byref(log_length))
249
+ logs = create_string_buffer(log_length.value + 1)
250
+ fxnc.FXNPredictionGetLogs(prediction, logs, len(logs))
251
+ logs = logs.value.decode("utf-8")
252
+ # Marshal outputs
253
+ results = []
254
+ output_count = c_int32()
255
+ output_map = FXNValueMapRef()
256
+ status = fxnc.FXNPredictionGetResults(prediction, byref(output_map))
257
+ assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction results with error: {self.__class__.__status_to_error(status.value)}"
258
+ status = fxnc.FXNValueMapGetSize(output_map, byref(output_count))
259
+ assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction result count with error: {self.__class__.__status_to_error(status.value)}"
260
+ for idx in range(output_count.value):
261
+ name = create_string_buffer(256)
262
+ status = fxnc.FXNValueMapGetKey(output_map, idx, name, len(name))
263
+ assert status.value == FXNStatus.OK, \
264
+ f"Failed to get {tag} prediction output name at index {idx} with error: {self.__class__.__status_to_error(status.value)}"
265
+ value = FXNValueRef()
266
+ status = fxnc.FXNValueMapGetValue(output_map, name, byref(value))
267
+ assert status.value == FXNStatus.OK, \
268
+ f"Failed to get {tag} prediction output value at index {idx} with error: {self.__class__.__status_to_error(status.value)}"
269
+ name = name.value.decode("utf-8")
270
+ value = self.__to_object(value)
271
+ results.append(value)
272
+ # Return
273
+ return Prediction(
274
+ id=id,
275
+ tag=tag,
276
+ results=results if not error else None,
277
+ latency=latency,
278
+ error=error if error else None,
279
+ logs=logs,
280
+ created=datetime.now(timezone.utc).isoformat()
281
+ )
282
+ finally:
283
+ fxnc.FXNPredictionRelease(prediction)
284
+ fxnc.FXNValueMapRelease(input_map)
285
+
286
+ def __to_value (
287
+ self,
288
+ value: Union[float, int, bool, str, NDArray, List[Any], Dict[str, Any], Image.Image, bytes, bytearray, memoryview, BytesIO, None]
289
+ ) -> type[FXNValueRef]:
290
+ value = PredictionService.__try_ensure_serializable(value)
291
+ fxnc = self.__fxnc
292
+ result = FXNValueRef()
293
+ if result is None:
294
+ fxnc.FXNValueCreateNull(byref(result))
295
+ elif isinstance(value, bool):
296
+ return self.__to_value(array(value, dtype="bool"))
297
+ elif isinstance(value, int):
298
+ return self.__to_value(array(value, dtype="int32"))
299
+ elif isinstance(value, float):
300
+ return self.__to_value(array(value, dtype="float32"))
301
+ elif isinstance(value, ndarray):
302
+ dtype = _NP_TO_FXN_DTYPE.get(value.dtype)
303
+ assert dtype is not None, f"Failed to convert numpy array to Function value because array data type is not supported: {value.dtype}"
304
+ fxnc.FXNValueCreateArray(
305
+ value.ctypes.data_as(c_void_p),
306
+ value.ctypes.shape_as(c_int32),
307
+ len(value.shape),
308
+ dtype,
309
+ FXNValueFlags.NONE,
310
+ byref(result)
311
+ )
312
+ elif isinstance(value, str):
313
+ fxnc.FXNValueCreateString(value.encode(), byref(result))
314
+ elif isinstance(value, list):
315
+ fxnc.FXNValueCreateList(dumps(value).encode(), byref(result))
316
+ elif isinstance(value, dict):
317
+ fxnc.FXNValueCreateDict(dumps(value).encode(), byref(result))
318
+ elif isinstance(value, Image.Image):
319
+ value = array(value)
320
+ status = fxnc.FXNValueCreateImage(
321
+ value.ctypes.data_as(c_void_p),
322
+ value.shape[1],
323
+ value.shape[0],
324
+ value.shape[2],
325
+ FXNValueFlags.COPY_DATA,
326
+ byref(result)
327
+ )
328
+ assert status.value == FXNStatus.OK, f"Failed to create image value with error: {self.__class__.__status_to_error(status.value)}"
329
+ elif isinstance(value, (bytes, bytearray, memoryview, BytesIO)):
330
+ copy = isinstance(value, memoryview)
331
+ view = memoryview(value.getvalue() if isinstance(value, BytesIO) else value) if not isinstance(value, memoryview) else value
332
+ buffer = (c_uint8 * len(view)).from_buffer(view)
333
+ fxnc.FXNValueCreateBinary(
334
+ buffer,
335
+ len(view),
336
+ FXNValueFlags.COPY_DATA if copy else FXNValueFlags.NONE,
337
+ byref(result)
338
+ )
339
+ else:
340
+ raise RuntimeError(f"Failed to convert Python value to Function value because Python value has an unsupported type: {type(value)}")
341
+ return result
342
+
343
+ def __to_object (
344
+ self,
345
+ value: type[FXNValueRef]
346
+ ) -> Union[float, int, bool, str, NDArray, List[Any], Dict[str, Any], Image.Image, BytesIO, None]:
347
+ # Type
348
+ fxnc = self.__fxnc
349
+ dtype = FXNDtype()
350
+ status = fxnc.FXNValueGetType(value, byref(dtype))
351
+ assert status.value == FXNStatus.OK, f"Failed to get value data type with error: {self.__class__.__status_to_error(status.value)}"
352
+ dtype = dtype.value
353
+ # Get data
354
+ data = c_void_p()
355
+ status = fxnc.FXNValueGetData(value, byref(data))
356
+ assert status.value == FXNStatus.OK, f"Failed to get value data with error: {self.__class__.__status_to_error(status.value)}"
357
+ # Get shape
358
+ dims = c_int32()
359
+ status = fxnc.FXNValueGetDimensions(value, byref(dims))
360
+ assert status.value == FXNStatus.OK, f"Failed to get value dimensions with error: {self.__class__.__status_to_error(status.value)}"
361
+ shape = zeros(dims.value, dtype=int32)
362
+ status = fxnc.FXNValueGetShape(value, shape.ctypes.data_as(POINTER(c_int32)), dims)
363
+ assert status.value == FXNStatus.OK, f"Failed to get value shape with error: {self.__class__.__status_to_error(status.value)}"
364
+ # Switch
365
+ if dtype == FXNDtype.NULL:
366
+ return None
367
+ elif dtype in _FXN_TO_NP_DTYPE:
368
+ dtype_c = as_ctypes_type(_FXN_TO_NP_DTYPE[dtype])
369
+ tensor = as_array(cast(data, POINTER(dtype_c)), shape)
370
+ return tensor.item() if len(tensor.shape) == 0 else tensor.copy()
371
+ elif dtype == FXNDtype.STRING:
372
+ return cast(data, c_char_p).value.decode()
373
+ elif dtype == FXNDtype.LIST:
374
+ return loads(cast(data, c_char_p).value.decode())
375
+ elif dtype == FXNDtype.DICT:
376
+ return loads(cast(data, c_char_p).value.decode())
377
+ elif dtype == FXNDtype.IMAGE:
378
+ pixel_buffer = as_array(cast(data, POINTER(c_uint8)), shape)
379
+ return Image.fromarray(pixel_buffer.copy())
380
+ elif dtype == FXNDtype.BINARY:
381
+ return BytesIO(string_at(data, shape[0]))
382
+ else:
383
+ raise RuntimeError(f"Failed to convert Function value to Python value because Function value has unsupported type: {dtype}")
384
+
385
+ def __get_resource_path (self, resource: PredictionResource) -> Path:
386
+ cache_dir = Path.home() / ".fxn" / "cache"
387
+ cache_dir.mkdir(exist_ok=True)
388
+ res_name = Path(urlparse(resource.url).path).name
389
+ res_path = cache_dir / res_name
390
+ if res_path.exists():
391
+ return res_path
392
+ req = get(resource.url)
393
+ req.raise_for_status()
394
+ with open(res_path, "wb") as f:
395
+ f.write(req.content)
396
+ return res_path
397
+
398
+ @classmethod
399
+ def __try_ensure_serializable (cls, object: Any) -> Any:
400
+ if object is None:
401
+ return object
402
+ if isinstance(object, list):
403
+ return [cls.__try_ensure_serializable(x) for x in object]
404
+ if is_dataclass(object) and not isinstance(object, type):
405
+ return asdict(object)
406
+ if isinstance(object, BaseModel):
407
+ return object.model_dump(mode="json", by_alias=True)
408
+ return object
409
+
410
+ @classmethod
411
+ def __status_to_error (cls, status: int) -> str:
412
+ if status == FXNStatus.ERROR_INVALID_ARGUMENT:
413
+ return "FXN_ERROR_INVALID_ARGUMENT"
414
+ elif status == FXNStatus.ERROR_INVALID_OPERATION:
415
+ return "FXN_ERROR_INVALID_OPERATION"
416
+ elif status == FXNStatus.ERROR_NOT_IMPLEMENTED:
417
+ return "FXN_ERROR_NOT_IMPLEMENTED"
418
+ return ""
419
+
420
+ PREDICTION_FIELDS = f"""
421
+ id
422
+ tag
423
+ type
424
+ configuration
425
+ resources {{
426
+ type
427
+ url
428
+ name
429
+ }}
430
+ results {{
431
+ data
432
+ type
433
+ shape
434
+ }}
435
+ latency
436
+ error
437
+ logs
438
+ created
439
+ """
440
+
441
+ _FXN_TO_NP_DTYPE = {
442
+ FXNDtype.FLOAT16: dtype("float16"),
443
+ FXNDtype.FLOAT32: dtype("float32"),
444
+ FXNDtype.FLOAT64: dtype("float64"),
445
+ FXNDtype.INT8: dtype("int8"),
446
+ FXNDtype.INT16: dtype("int16"),
447
+ FXNDtype.INT32: dtype("int32"),
448
+ FXNDtype.INT64: dtype("int64"),
449
+ FXNDtype.UINT8: dtype("uint8"),
450
+ FXNDtype.UINT16: dtype("uint16"),
451
+ FXNDtype.UINT32: dtype("uint32"),
452
+ FXNDtype.UINT64: dtype("uint64"),
453
+ FXNDtype.BOOL: dtype("bool"),
454
+ }
455
+
456
+ _NP_TO_FXN_DTYPE = { value: key for key, value in _FXN_TO_NP_DTYPE.items() }
fxn/services/predictor.py CHANGED
@@ -3,11 +3,10 @@
3
3
  # Copyright © 2024 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
- from pathlib import Path
7
- from typing import Dict, List, Union
6
+ from typing import List
8
7
 
9
- from ..graph import GraphClient
10
- from ..types import Acceleration, AccessMode, Predictor, PredictorStatus, PredictorType, UploadType
8
+ from ..api import GraphClient
9
+ from ..types import Predictor, PredictorStatus
11
10
  from .storage import StorageService
12
11
  from .user import PROFILE_FIELDS
13
12
 
@@ -120,70 +119,6 @@ class PredictorService:
120
119
  # Return
121
120
  return predictors
122
121
 
123
- def create (
124
- self,
125
- tag: str,
126
- notebook: Union[str, Path],
127
- type: PredictorType=None,
128
- access: AccessMode=None,
129
- description: str=None,
130
- media: Union[str, Path]=None,
131
- acceleration: Acceleration=None,
132
- environment: Dict[str, str]=None,
133
- license: str=None,
134
- overwrite: bool=None
135
- ) -> Predictor:
136
- """
137
- Create a predictor.
138
-
139
- Parameters:
140
- tag (str): Predictor tag.
141
- notebook (str | Path): Predictor notebook path or URL.
142
- type (PredictorType): Predictor type. This defaults to `CLOUD`.
143
- access (AccessMode): Predictor access mode. This defaults to `PRIVATE`.
144
- description (str): Predictor description. This must be under 200 characters long.
145
- media (str | Path): Predictor media path or URL.
146
- acceleration (Acceleration): Predictor acceleration. This only applies for cloud predictors and defaults to `CPU`.
147
- environment (dict): Predictor environment variables.
148
- license (str): Predictor license URL.
149
- overwrite (bool): Overwrite any existing predictor with the same tag. Existing predictor will be deleted.
150
-
151
- Returns:
152
- Predictor: Created predictor.
153
- """
154
- # Prepare
155
- environment = [{ "name": name, "value": value } for name, value in environment.items()] if environment is not None else []
156
- notebook = self.storage.upload(notebook, type=UploadType.Notebook) if isinstance(notebook, Path) else notebook
157
- media = self.storage.upload(media, type=UploadType.Media) if isinstance(media, Path) else media
158
- # Query
159
- response = self.client.query(f"""
160
- mutation ($input: CreatePredictorInput!) {{
161
- createPredictor (input: $input) {{
162
- {PREDICTOR_FIELDS}
163
- }}
164
- }}
165
- """,
166
- {
167
- "input": {
168
- "tag": tag,
169
- "type": type,
170
- "notebook": notebook,
171
- "access": access,
172
- "description": description,
173
- "media": media,
174
- "acceleration": acceleration,
175
- "environment": environment,
176
- "overwrite": overwrite,
177
- "license": license
178
- }
179
- }
180
- )
181
- # Create predictor
182
- predictor = response["createPredictor"]
183
- predictor = Predictor(**predictor) if predictor else None
184
- # Return
185
- return predictor
186
-
187
122
  def delete (self, tag: str) -> bool:
188
123
  """
189
124
  Delete a predictor.
@@ -239,14 +174,12 @@ owner {{
239
174
  {PROFILE_FIELDS}
240
175
  }}
241
176
  name
242
- type
243
177
  status
244
178
  access
245
179
  created
246
180
  description
247
181
  card
248
182
  media
249
- acceleration
250
183
  signature {{
251
184
  inputs {{
252
185
  name
fxn/services/storage.py CHANGED
@@ -9,10 +9,9 @@ from magika import Magika
9
9
  from pathlib import Path
10
10
  from requests import put
11
11
  from rich.progress import open as open_progress, wrap_file
12
- from typing import Union
13
12
  from urllib.parse import urlparse, urlunparse
14
13
 
15
- from ..graph import GraphClient
14
+ from ..api import GraphClient
16
15
  from ..types import UploadType
17
16
 
18
17
  class StorageService:
@@ -45,7 +44,7 @@ class StorageService:
45
44
 
46
45
  def upload (
47
46
  self,
48
- file: Union[str, Path, BytesIO],
47
+ file: str | Path | BytesIO,
49
48
  *,
50
49
  type: UploadType,
51
50
  name: str=None,
@@ -138,7 +137,7 @@ class StorageService:
138
137
  url = urlunparse(parsed_url)
139
138
  return url
140
139
 
141
- def __infer_mime (self, file: Union[str, Path, BytesIO]) -> str:
140
+ def __infer_mime (self, file: str | Path | BytesIO) -> str:
142
141
  MAGIC_TO_MIME = {
143
142
  b"\x00\x61\x73\x6d": "application/wasm"
144
143
  }
fxn/services/user.py CHANGED
@@ -3,7 +3,7 @@
3
3
  # Copyright © 2024 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
- from ..graph import GraphClient
6
+ from ..api import GraphClient
7
7
  from ..types import Profile
8
8
 
9
9
  class UserService:
fxn/types/__init__.py CHANGED
@@ -6,8 +6,7 @@
6
6
  from .dtype import Dtype
7
7
  from .environment import EnvironmentVariable
8
8
  from .prediction import Prediction, PredictionResource
9
- from .predictor import Acceleration, AccessMode, EnumerationMember, Parameter, Predictor, PredictorStatus, PredictorType, Signature
9
+ from .predictor import Acceleration, AccessMode, EnumerationMember, Parameter, Predictor, PredictorStatus, Signature
10
10
  from .profile import Profile
11
11
  from .storage import UploadType
12
- from .user import User
13
- from .value import Value
12
+ from .user import User
fxn/types/prediction.py CHANGED
@@ -6,8 +6,6 @@
6
6
  from pydantic import BaseModel, Field
7
7
  from typing import Any, List, Optional
8
8
 
9
- from .predictor import PredictorType
10
-
11
9
  class PredictionResource (BaseModel):
12
10
  """
13
11
  Prediction resource.
@@ -28,7 +26,6 @@ class Prediction (BaseModel):
28
26
  Members:
29
27
  id (str): Prediction identifier.
30
28
  tag (str): Predictor tag.
31
- type (PredictorType): Prediction type.
32
29
  configuration (str): Prediction configuration token. This is only populated for `EDGE` predictions.
33
30
  resources (list): Prediction resources. This is only populated for `EDGE` predictions.
34
31
  results (list): Prediction results.
@@ -39,7 +36,6 @@ class Prediction (BaseModel):
39
36
  """
40
37
  id: str = Field(description="Prediction identifier.")
41
38
  tag: str = Field(description="Predictor tag.")
42
- type: PredictorType = Field(description="Prediction type.")
43
39
  configuration: Optional[str] = Field(default=None, description="Prediction configuration token. This is only populated for `EDGE` predictions.")
44
40
  resources: Optional[List[PredictionResource]] = Field(default=None, description="Prediction resources. This is only populated for `EDGE` predictions.")
45
41
  results: Optional[List[Any]] = Field(default=None, description="Prediction results.")