fxn 0.0.35__py3-none-any.whl → 0.0.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fxn/c/__init__.py +16 -0
- fxn/c/configuration.py +60 -0
- fxn/c/dtype.py +26 -0
- fxn/c/fxnc.py +28 -0
- fxn/c/map.py +34 -0
- fxn/c/prediction.py +37 -0
- fxn/c/predictor.py +31 -0
- fxn/c/status.py +12 -0
- fxn/c/stream.py +22 -0
- fxn/c/value.py +50 -0
- fxn/c/version.py +13 -0
- fxn/cli/__init__.py +8 -8
- fxn/cli/auth.py +1 -1
- fxn/cli/misc.py +10 -4
- fxn/cli/predict.py +3 -4
- fxn/cli/predictors.py +1 -40
- fxn/function.py +4 -10
- fxn/lib/macos/arm64/Function.dylib +0 -0
- fxn/lib/macos/x86_64/Function.dylib +0 -0
- fxn/lib/windows/arm64/Function.dll +0 -0
- fxn/lib/windows/x86_64/Function.dll +0 -0
- fxn/services/__init__.py +1 -3
- fxn/services/prediction.py +456 -0
- fxn/services/predictor.py +4 -73
- fxn/services/user.py +1 -1
- fxn/types/__init__.py +2 -3
- fxn/types/prediction.py +0 -4
- fxn/types/predictor.py +15 -22
- fxn/version.py +1 -1
- {fxn-0.0.35.dist-info → fxn-0.0.37.dist-info}/METADATA +27 -29
- fxn-0.0.37.dist-info/RECORD +46 -0
- {fxn-0.0.35.dist-info → fxn-0.0.37.dist-info}/WHEEL +1 -1
- fxn/libs/linux/__init__.py +0 -4
- fxn/libs/macos/Function.dylib +0 -0
- fxn/libs/macos/__init__.py +0 -4
- fxn/libs/windows/Function.dll +0 -0
- fxn/libs/windows/__init__.py +0 -4
- fxn/magic.py +0 -35
- fxn/services/environment.py +0 -111
- fxn/services/prediction/__init__.py +0 -6
- fxn/services/prediction/fxnc.py +0 -312
- fxn/services/prediction/service.py +0 -512
- fxn/services/storage.py +0 -160
- fxn/types/value.py +0 -22
- fxn-0.0.35.dist-info/RECORD +0 -42
- /fxn/{graph → api}/__init__.py +0 -0
- /fxn/{graph → api}/client.py +0 -0
- /fxn/{libs → lib}/__init__.py +0 -0
- {fxn-0.0.35.dist-info → fxn-0.0.37.dist-info}/LICENSE +0 -0
- {fxn-0.0.35.dist-info → fxn-0.0.37.dist-info}/entry_points.txt +0 -0
- {fxn-0.0.35.dist-info → fxn-0.0.37.dist-info}/top_level.txt +0 -0
@@ -1,512 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Function
|
3
|
-
# Copyright © 2024 NatML Inc. All Rights Reserved.
|
4
|
-
#
|
5
|
-
|
6
|
-
from aiohttp import ClientSession
|
7
|
-
from ctypes import byref, c_double, c_int32, create_string_buffer, CDLL
|
8
|
-
from dataclasses import asdict, is_dataclass
|
9
|
-
from datetime import datetime, timezone
|
10
|
-
from importlib import resources
|
11
|
-
from io import BytesIO
|
12
|
-
from json import dumps, loads
|
13
|
-
from magika import Magika
|
14
|
-
from numpy import array, float32, frombuffer, int32, ndarray
|
15
|
-
from numpy.typing import NDArray
|
16
|
-
from pathlib import Path
|
17
|
-
from PIL import Image
|
18
|
-
from platform import machine, system
|
19
|
-
from pydantic import BaseModel
|
20
|
-
from requests import get, post
|
21
|
-
from tempfile import NamedTemporaryFile
|
22
|
-
from typing import Any, AsyncIterator, Dict, List, Optional, Union
|
23
|
-
from uuid import uuid4
|
24
|
-
from urllib.parse import urlparse
|
25
|
-
from urllib.request import urlopen
|
26
|
-
|
27
|
-
from ...graph import GraphClient
|
28
|
-
from ...types import Dtype, PredictorType, Prediction, PredictionResource, Value, UploadType
|
29
|
-
from ..storage import StorageService
|
30
|
-
from .fxnc import load_fxnc, to_fxn_value, to_py_value, FXNConfigurationRef, FXNPredictorRef, FXNPredictionRef, FXNStatus, FXNValueRef, FXNValueMapRef
|
31
|
-
|
32
|
-
class PredictionService:
|
33
|
-
|
34
|
-
def __init__ (self, client: GraphClient, storage: StorageService):
|
35
|
-
self.client = client
|
36
|
-
self.storage = storage
|
37
|
-
self.__fxnc = PredictionService.__load_fxnc()
|
38
|
-
self.__cache = { }
|
39
|
-
|
40
|
-
def create (
|
41
|
-
self,
|
42
|
-
tag: str,
|
43
|
-
*,
|
44
|
-
inputs: Dict[str, Union[ndarray, str, float, int, bool, List, Dict[str, Any], Path, Image.Image, Value]] = None,
|
45
|
-
raw_outputs: bool=False,
|
46
|
-
return_binary_path: bool=True,
|
47
|
-
data_url_limit: int=None,
|
48
|
-
client_id: str=None,
|
49
|
-
configuration_id: str=None
|
50
|
-
) -> Prediction:
|
51
|
-
"""
|
52
|
-
Create a prediction.
|
53
|
-
|
54
|
-
Parameters:
|
55
|
-
tag (str): Predictor tag.
|
56
|
-
inputs (dict): Input values. This only applies to `CLOUD` predictions.
|
57
|
-
raw_outputs (bool): Skip converting output values into Pythonic types. This only applies to `CLOUD` predictions.
|
58
|
-
return_binary_path (bool): Write binary values to file and return a `Path` instead of returning `BytesIO` instance.
|
59
|
-
data_url_limit (int): Return a data URL if a given output value is smaller than this size in bytes. This only applies to `CLOUD` predictions.
|
60
|
-
client_id (str): Function client identifier. Specify this to override the current client identifier.
|
61
|
-
configuration_id (str): Configuration identifier. Specify this to override the current client configuration identifier.
|
62
|
-
|
63
|
-
Returns:
|
64
|
-
Prediction: Created prediction.
|
65
|
-
"""
|
66
|
-
# Check if cached
|
67
|
-
if tag in self.__cache:
|
68
|
-
return self.__predict(tag=tag, predictor=self.__cache[tag], inputs=inputs)
|
69
|
-
# Serialize inputs
|
70
|
-
key = uuid4().hex
|
71
|
-
values = { name: self.to_value(value, name, key=key).model_dump(mode="json") for name, value in inputs.items() } if inputs is not None else { }
|
72
|
-
# Query
|
73
|
-
response = post(
|
74
|
-
f"{self.client.api_url}/predict/{tag}?rawOutputs=true&dataUrlLimit={data_url_limit}",
|
75
|
-
json=values,
|
76
|
-
headers={
|
77
|
-
"Authorization": f"Bearer {self.client.access_key}",
|
78
|
-
"fxn-client": client_id if client_id is not None else self.__get_client_id(),
|
79
|
-
"fxn-configuration-token": configuration_id if configuration_id is not None else self.__get_configuration_id()
|
80
|
-
}
|
81
|
-
)
|
82
|
-
# Check
|
83
|
-
prediction = response.json()
|
84
|
-
try:
|
85
|
-
response.raise_for_status()
|
86
|
-
except Exception as ex:
|
87
|
-
error = prediction["errors"][0]["message"] if "errors" in prediction else str(ex)
|
88
|
-
raise RuntimeError(error)
|
89
|
-
# Parse prediction
|
90
|
-
prediction = self.__parse_prediction(prediction, raw_outputs=raw_outputs, return_binary_path=return_binary_path)
|
91
|
-
# Check edge prediction
|
92
|
-
if prediction.type != PredictorType.Edge or raw_outputs:
|
93
|
-
return prediction
|
94
|
-
# Load edge predictor
|
95
|
-
predictor = self.__load(prediction)
|
96
|
-
self.__cache[tag] = predictor
|
97
|
-
# Create edge prediction
|
98
|
-
prediction = self.__predict(tag=tag, predictor=predictor, inputs=inputs) if inputs is not None else prediction
|
99
|
-
return prediction
|
100
|
-
|
101
|
-
async def stream (
|
102
|
-
self,
|
103
|
-
tag: str,
|
104
|
-
*,
|
105
|
-
inputs: Dict[str, Union[float, int, str, bool, NDArray, List[Any], Dict[str, Any], Path, Image.Image, Value]] = {},
|
106
|
-
raw_outputs: bool=False,
|
107
|
-
return_binary_path: bool=True,
|
108
|
-
data_url_limit: int=None,
|
109
|
-
client_id: str=None,
|
110
|
-
configuration_id: str=None
|
111
|
-
) -> AsyncIterator[Prediction]:
|
112
|
-
"""
|
113
|
-
Create a streaming prediction.
|
114
|
-
|
115
|
-
NOTE: This feature is currently experimental.
|
116
|
-
|
117
|
-
Parameters:
|
118
|
-
tag (str): Predictor tag.
|
119
|
-
inputs (dict): Input values. This only applies to `CLOUD` predictions.
|
120
|
-
raw_outputs (bool): Skip converting output values into Pythonic types. This only applies to `CLOUD` predictions.
|
121
|
-
return_binary_path (bool): Write binary values to file and return a `Path` instead of returning `BytesIO` instance.
|
122
|
-
data_url_limit (int): Return a data URL if a given output value is smaller than this size in bytes. This only applies to `CLOUD` predictions.
|
123
|
-
client_id (str): Function client identifier. Specify this to override the current client identifier.
|
124
|
-
configuration_id (str): Configuration identifier. Specify this to override the current client configuration identifier.
|
125
|
-
|
126
|
-
Returns:
|
127
|
-
Prediction: Created prediction.
|
128
|
-
"""
|
129
|
-
# Check if cached
|
130
|
-
if tag in self.__cache:
|
131
|
-
yield self.__predict(tag=tag, predictor=self.__cache[tag], inputs=inputs)
|
132
|
-
return
|
133
|
-
# Serialize inputs
|
134
|
-
key = uuid4().hex
|
135
|
-
values = { name: self.to_value(value, name, key=key).model_dump(mode="json") for name, value in inputs.items() }
|
136
|
-
# Request
|
137
|
-
url = f"{self.client.api_url}/predict/{tag}?stream=true&rawOutputs=true&dataUrlLimit={data_url_limit}"
|
138
|
-
headers = {
|
139
|
-
"Content-Type": "application/json",
|
140
|
-
"Authorization": f"Bearer {self.client.access_key}",
|
141
|
-
"fxn-client": client_id if client_id is not None else self.__get_client_id(),
|
142
|
-
"fxn-configuration-token": configuration_id if configuration_id is not None else self.__get_configuration_id()
|
143
|
-
}
|
144
|
-
async with ClientSession(headers=headers) as session:
|
145
|
-
async with session.post(url, data=dumps(values)) as response:
|
146
|
-
async for chunk in response.content.iter_any():
|
147
|
-
prediction = loads(chunk)
|
148
|
-
# Check status
|
149
|
-
try:
|
150
|
-
response.raise_for_status()
|
151
|
-
except Exception as ex:
|
152
|
-
error = prediction["errors"][0]["message"] if "errors" in prediction else str(ex)
|
153
|
-
raise RuntimeError(error)
|
154
|
-
# Parse prediction
|
155
|
-
prediction = self.__parse_prediction(prediction, raw_outputs=raw_outputs, return_binary_path=return_binary_path)
|
156
|
-
# Check edge prediction
|
157
|
-
if prediction.type != PredictorType.Edge or raw_outputs:
|
158
|
-
yield prediction
|
159
|
-
continue
|
160
|
-
# Load edge predictor
|
161
|
-
predictor = self.__load(prediction)
|
162
|
-
self.__cache[tag] = predictor
|
163
|
-
# Create prediction
|
164
|
-
prediction = self.__predict(tag=tag, predictor=predictor, inputs=inputs) if inputs is not None else prediction
|
165
|
-
yield prediction
|
166
|
-
|
167
|
-
def to_object (
|
168
|
-
self,
|
169
|
-
value: Value,
|
170
|
-
return_binary_path: bool=True
|
171
|
-
) -> Union[str, float, int, bool, NDArray, list, dict, Image.Image, BytesIO, Path]:
|
172
|
-
"""
|
173
|
-
Convert a Function value to a plain object.
|
174
|
-
|
175
|
-
Parameters:
|
176
|
-
return_binary_path (str): Write binary values to file and return a `Path` instead of returning `BytesIO` instance.
|
177
|
-
|
178
|
-
Returns:
|
179
|
-
str | float | int | bool | list | dict | ndarray | Image.Image | BytesIO | Path: Plain objectt.
|
180
|
-
"""
|
181
|
-
# Null
|
182
|
-
if value.type == Dtype.null:
|
183
|
-
return None
|
184
|
-
# Download
|
185
|
-
buffer = self.__download_value_data(value.data)
|
186
|
-
# Array
|
187
|
-
if value.type in [
|
188
|
-
Dtype.int8, Dtype.int16, Dtype.int32, Dtype.int64,
|
189
|
-
Dtype.uint8, Dtype.uint16, Dtype.uint32, Dtype.uint64,
|
190
|
-
Dtype.float16, Dtype.float32, Dtype.float64, Dtype.bool
|
191
|
-
]:
|
192
|
-
assert value.shape is not None, "Array value must have a shape specified"
|
193
|
-
array = frombuffer(buffer.getbuffer(), dtype=value.type).reshape(value.shape)
|
194
|
-
return array if len(value.shape) > 0 else array.item()
|
195
|
-
# String
|
196
|
-
if value.type == Dtype.string:
|
197
|
-
return buffer.getvalue().decode("utf-8")
|
198
|
-
# List
|
199
|
-
if value.type == Dtype.list:
|
200
|
-
return loads(buffer.getvalue().decode("utf-8"))
|
201
|
-
# Dict
|
202
|
-
if value.type == Dtype.dict:
|
203
|
-
return loads(buffer.getvalue().decode("utf-8"))
|
204
|
-
# Image
|
205
|
-
if value.type == Dtype.image:
|
206
|
-
return Image.open(buffer)
|
207
|
-
# Binary
|
208
|
-
if return_binary_path:
|
209
|
-
with NamedTemporaryFile(mode="wb", delete=False) as f:
|
210
|
-
f.write(buffer.getbuffer())
|
211
|
-
return Path(f.name)
|
212
|
-
# Return
|
213
|
-
return buffer
|
214
|
-
|
215
|
-
def to_value (
|
216
|
-
self,
|
217
|
-
object: Union[str, float, int, bool, ndarray, List[Any], Dict[str, any], Path, Image.Image],
|
218
|
-
name: str,
|
219
|
-
min_upload_size: int=4096,
|
220
|
-
key: str=None
|
221
|
-
) -> Value:
|
222
|
-
"""
|
223
|
-
Convert a plain object to a Function value.
|
224
|
-
|
225
|
-
Parameters:
|
226
|
-
object (str | float | int | bool | ndarray | list | dict | dataclass | Path | PIL.Image): Input object.
|
227
|
-
name (str): Value name.
|
228
|
-
min_upload_size (int): Values larger than this size in bytes will be uploaded.
|
229
|
-
|
230
|
-
Returns:
|
231
|
-
Value: Function value.
|
232
|
-
"""
|
233
|
-
object = self.__try_ensure_serializable(object)
|
234
|
-
# None
|
235
|
-
if object is None:
|
236
|
-
return Value(data=None, type=Dtype.null)
|
237
|
-
# Value
|
238
|
-
if isinstance(object, Value):
|
239
|
-
return object
|
240
|
-
# Array
|
241
|
-
if isinstance(object, ndarray):
|
242
|
-
buffer = BytesIO(object.tobytes())
|
243
|
-
data = self.storage.upload(buffer, type=UploadType.Value, name=name, data_url_limit=min_upload_size, key=key)
|
244
|
-
return Value(data=data, type=object.dtype.name, shape=list(object.shape))
|
245
|
-
# String
|
246
|
-
if isinstance(object, str):
|
247
|
-
buffer = BytesIO(object.encode())
|
248
|
-
data = self.storage.upload(buffer, type=UploadType.Value, name=name, data_url_limit=min_upload_size, key=key)
|
249
|
-
return Value(data=data, type=Dtype.string)
|
250
|
-
# Float
|
251
|
-
if isinstance(object, float):
|
252
|
-
object = array(object, dtype=float32)
|
253
|
-
return self.to_value(object, name, min_upload_size=min_upload_size, key=key)
|
254
|
-
# Boolean
|
255
|
-
if isinstance(object, bool):
|
256
|
-
object = array(object, dtype=bool)
|
257
|
-
return self.to_value(object, name, min_upload_size=min_upload_size, key=key)
|
258
|
-
# Integer
|
259
|
-
if isinstance(object, int):
|
260
|
-
object = array(object, dtype=int32)
|
261
|
-
return self.to_value(object, name, min_upload_size=min_upload_size, key=key)
|
262
|
-
# List
|
263
|
-
if isinstance(object, list):
|
264
|
-
buffer = BytesIO(dumps(object).encode())
|
265
|
-
data = self.storage.upload(buffer, type=UploadType.Value, name=name, data_url_limit=min_upload_size, key=key)
|
266
|
-
return Value(data=data, type=Dtype.list)
|
267
|
-
# Dict
|
268
|
-
if isinstance(object, dict):
|
269
|
-
buffer = BytesIO(dumps(object).encode())
|
270
|
-
data = self.storage.upload(buffer, type=UploadType.Value, name=name, data_url_limit=min_upload_size, key=key)
|
271
|
-
return Value(data=data, type=Dtype.dict)
|
272
|
-
# Image
|
273
|
-
if isinstance(object, Image.Image):
|
274
|
-
buffer = BytesIO()
|
275
|
-
format = "PNG" if object.mode == "RGBA" else "JPEG"
|
276
|
-
object.save(buffer, format=format)
|
277
|
-
data = self.storage.upload(buffer, type=UploadType.Value, name=name, data_url_limit=min_upload_size, key=key)
|
278
|
-
return Value(data=data, type=Dtype.image)
|
279
|
-
# Binary
|
280
|
-
if isinstance(object, BytesIO):
|
281
|
-
data = self.storage.upload(object, type=UploadType.Value, name=name, data_url_limit=min_upload_size, key=key)
|
282
|
-
dtype = self.__get_data_dtype(object)
|
283
|
-
return Value(data=data, type=dtype)
|
284
|
-
# Path
|
285
|
-
if isinstance(object, Path):
|
286
|
-
assert object.exists(), "Value does not exist at the given path"
|
287
|
-
assert object.is_file(), "Value path must point to a file, not a directory"
|
288
|
-
object = object.expanduser().resolve()
|
289
|
-
data = self.storage.upload(object, type=UploadType.Value, name=name, data_url_limit=min_upload_size, key=key)
|
290
|
-
dtype = self.__get_data_dtype(object)
|
291
|
-
return Value(data=data, type=dtype)
|
292
|
-
# Unsupported
|
293
|
-
raise RuntimeError(f"Cannot create Function value '{name}' for object {object} of type {type(object)}")
|
294
|
-
|
295
|
-
@classmethod
|
296
|
-
def __load_fxnc (self) -> Optional[CDLL]:
|
297
|
-
RESOURCE_MAP = {
|
298
|
-
"Darwin": ("fxn.libs.macos", "Function.dylib"),
|
299
|
-
"Windows": ("fxn.libs.windows", "Function.dll"),
|
300
|
-
}
|
301
|
-
# Get resource
|
302
|
-
package, resource = RESOURCE_MAP.get(system(), (None, None))
|
303
|
-
if package is None or resource is None:
|
304
|
-
return None
|
305
|
-
# Load
|
306
|
-
with resources.path(package, resource) as fxnc_path:
|
307
|
-
return load_fxnc(fxnc_path)
|
308
|
-
|
309
|
-
def __get_client_id (self) -> str:
|
310
|
-
id = system()
|
311
|
-
if id == "Darwin":
|
312
|
-
return f"macos:{machine()}"
|
313
|
-
if id == "Linux":
|
314
|
-
return f"linux:{machine()}"
|
315
|
-
if id == "Windows":
|
316
|
-
return f"windows:{machine()}"
|
317
|
-
raise RuntimeError(f"Function cannot make predictions on the {id} platform")
|
318
|
-
|
319
|
-
def __get_configuration_id (self) -> Optional[str]:
|
320
|
-
# Check
|
321
|
-
if not self.__fxnc:
|
322
|
-
return None
|
323
|
-
# Get
|
324
|
-
buffer = create_string_buffer(2048)
|
325
|
-
status = self.__fxnc.FXNConfigurationGetUniqueID(buffer, len(buffer))
|
326
|
-
assert status.value == FXNStatus.OK, f"Failed to create prediction configuration identifier with status: {status.value}"
|
327
|
-
uid = buffer.value.decode("utf-8")
|
328
|
-
# Return
|
329
|
-
return uid
|
330
|
-
|
331
|
-
def __load (self, prediction: Prediction):
|
332
|
-
# Load predictor
|
333
|
-
fxnc = self.__fxnc
|
334
|
-
configuration = FXNConfigurationRef()
|
335
|
-
try:
|
336
|
-
# Create configuration
|
337
|
-
status = fxnc.FXNConfigurationCreate(byref(configuration))
|
338
|
-
assert status.value == FXNStatus.OK, f"Failed to create {prediction.tag} prediction configuration with status: {status.value}"
|
339
|
-
# Set tag
|
340
|
-
status = fxnc.FXNConfigurationSetTag(configuration, prediction.tag.encode())
|
341
|
-
assert status.value == FXNStatus.OK, f"Failed to set {prediction.tag} prediction configuration tag with status: {status.value}"
|
342
|
-
# Set token
|
343
|
-
status = fxnc.FXNConfigurationSetToken(configuration, prediction.configuration.encode())
|
344
|
-
assert status.value == FXNStatus.OK, f"Failed to set {prediction.tag} prediction configuration token with status: {status.value}"
|
345
|
-
# Add resources
|
346
|
-
for resource in prediction.resources:
|
347
|
-
if resource.type == "fxn":
|
348
|
-
continue
|
349
|
-
path = self.__get_resource_path(resource)
|
350
|
-
status = fxnc.FXNConfigurationAddResource(configuration, resource.type.encode(), str(path).encode())
|
351
|
-
assert status.value == FXNStatus.OK, f"Failed to set prediction configuration resource with type {resource.type} for tag {prediction.tag} with status: {status.value}"
|
352
|
-
# Create predictor
|
353
|
-
predictor = FXNPredictorRef()
|
354
|
-
status = fxnc.FXNPredictorCreate(configuration, byref(predictor))
|
355
|
-
assert status.value == FXNStatus.OK, f"Failed to create prediction for tag {prediction.tag} with status: {status.value}"
|
356
|
-
# Return
|
357
|
-
return predictor
|
358
|
-
finally:
|
359
|
-
fxnc.FXNConfigurationRelease(configuration)
|
360
|
-
|
361
|
-
def __predict (self, *, tag: str, predictor, inputs: Dict[str, Any]) -> Prediction:
|
362
|
-
fxnc = self.__fxnc
|
363
|
-
input_map = FXNValueMapRef()
|
364
|
-
prediction = FXNPredictionRef()
|
365
|
-
try:
|
366
|
-
# Create input map
|
367
|
-
status = fxnc.FXNValueMapCreate(byref(input_map))
|
368
|
-
assert status.value == FXNStatus.OK, f"Failed to create {tag} prediction because input values could not be provided to the predictor with status: {status.value}"
|
369
|
-
# Marshal inputs
|
370
|
-
for name, value in inputs.items():
|
371
|
-
value = to_fxn_value(fxnc, value, copy=False)
|
372
|
-
fxnc.FXNValueMapSetValue(input_map, name.encode(), value)
|
373
|
-
# Predict
|
374
|
-
status = fxnc.FXNPredictorCreatePrediction(predictor, input_map, byref(prediction))
|
375
|
-
assert status.value == FXNStatus.OK, f"Failed to create {tag} prediction with status: {status.value}"
|
376
|
-
# Marshal prediction
|
377
|
-
id = create_string_buffer(256)
|
378
|
-
error = create_string_buffer(2048)
|
379
|
-
latency = c_double()
|
380
|
-
status = fxnc.FXNPredictionGetID(prediction, id, len(id))
|
381
|
-
assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction identifier with status: {status.value}"
|
382
|
-
status = fxnc.FXNPredictionGetLatency(prediction, byref(latency))
|
383
|
-
assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction latency with status: {status.value}"
|
384
|
-
fxnc.FXNPredictionGetError(prediction, error, len(error))
|
385
|
-
id = id.value.decode("utf-8")
|
386
|
-
latency = latency.value
|
387
|
-
error = error.value.decode("utf-8")
|
388
|
-
# Marshal logs
|
389
|
-
log_length = c_int32()
|
390
|
-
fxnc.FXNPredictionGetLogLength(prediction, byref(log_length))
|
391
|
-
logs = create_string_buffer(log_length.value + 1)
|
392
|
-
fxnc.FXNPredictionGetLogs(prediction, logs, len(logs))
|
393
|
-
logs = logs.value.decode("utf-8")
|
394
|
-
# Marshal outputs
|
395
|
-
results = []
|
396
|
-
output_count = c_int32()
|
397
|
-
output_map = FXNValueMapRef()
|
398
|
-
status = fxnc.FXNPredictionGetResults(prediction, byref(output_map))
|
399
|
-
assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction results with status: {status.value}"
|
400
|
-
status = fxnc.FXNValueMapGetSize(output_map, byref(output_count))
|
401
|
-
assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction result count with status: {status.value}"
|
402
|
-
for idx in range(output_count.value):
|
403
|
-
# Get name
|
404
|
-
name = create_string_buffer(256)
|
405
|
-
status = fxnc.FXNValueMapGetKey(output_map, idx, name, len(name))
|
406
|
-
assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction output name at index {idx} with status: {status.value}"
|
407
|
-
# Get value
|
408
|
-
value = FXNValueRef()
|
409
|
-
status = fxnc.FXNValueMapGetValue(output_map, name, byref(value))
|
410
|
-
assert status.value == FXNStatus.OK, f"Failed to get {tag} prediction output value at index {idx} with status: {status.value}"
|
411
|
-
# Parse
|
412
|
-
name = name.value.decode("utf-8")
|
413
|
-
value = to_py_value(fxnc, value)
|
414
|
-
results.append(value)
|
415
|
-
# Return
|
416
|
-
return Prediction(
|
417
|
-
id=id,
|
418
|
-
tag=tag,
|
419
|
-
type=PredictorType.Edge,
|
420
|
-
results=results if not error else None,
|
421
|
-
latency=latency,
|
422
|
-
error=error if error else None,
|
423
|
-
logs=logs,
|
424
|
-
created=datetime.now(timezone.utc).isoformat()
|
425
|
-
)
|
426
|
-
finally:
|
427
|
-
fxnc.FXNPredictionRelease(prediction)
|
428
|
-
fxnc.FXNValueMapRelease(input_map)
|
429
|
-
|
430
|
-
def __parse_prediction (
|
431
|
-
self,
|
432
|
-
data: Dict[str, Any],
|
433
|
-
*,
|
434
|
-
raw_outputs: bool,
|
435
|
-
return_binary_path: bool
|
436
|
-
) -> Prediction:
|
437
|
-
prediction = Prediction(**data)
|
438
|
-
prediction.results = [Value(**value) for value in prediction.results] if prediction.results is not None else None
|
439
|
-
prediction.results = [self.to_object(value, return_binary_path=return_binary_path) for value in prediction.results] if prediction.results is not None and not raw_outputs else prediction.results
|
440
|
-
return prediction
|
441
|
-
|
442
|
-
def __get_data_dtype (self, data: Union[Path, BytesIO]) -> Dtype:
|
443
|
-
magika = Magika()
|
444
|
-
result = magika.identify_bytes(data.getvalue()) if isinstance(data, BytesIO) else magika.identify_path(data)
|
445
|
-
group = result.output.group
|
446
|
-
if group == "image":
|
447
|
-
return Dtype.image
|
448
|
-
elif group == "audio":
|
449
|
-
return Dtype.audio
|
450
|
-
elif group == "video":
|
451
|
-
return Dtype.video
|
452
|
-
elif isinstance(data, Path) and data.suffix in [".obj", ".gltf", ".glb", ".fbx", ".usd", ".usdz", ".blend"]:
|
453
|
-
return Dtype._3d
|
454
|
-
else:
|
455
|
-
return Dtype.binary
|
456
|
-
|
457
|
-
def __download_value_data (self, url: str) -> BytesIO:
|
458
|
-
if url.startswith("data:"):
|
459
|
-
with urlopen(url) as response:
|
460
|
-
return BytesIO(response.read())
|
461
|
-
response = get(url)
|
462
|
-
result = BytesIO(response.content)
|
463
|
-
return result
|
464
|
-
|
465
|
-
def __get_resource_path (self, resource: PredictionResource) -> Path:
|
466
|
-
cache_dir = Path.home() / ".fxn" / "cache"
|
467
|
-
cache_dir.mkdir(exist_ok=True)
|
468
|
-
res_name = Path(urlparse(resource.url).path).name
|
469
|
-
res_path = cache_dir / res_name
|
470
|
-
if res_path.exists():
|
471
|
-
return res_path
|
472
|
-
req = get(resource.url)
|
473
|
-
req.raise_for_status()
|
474
|
-
with open(res_path, "wb") as f:
|
475
|
-
f.write(req.content)
|
476
|
-
return res_path
|
477
|
-
|
478
|
-
@classmethod
|
479
|
-
def __try_ensure_serializable (cls, object: Any) -> Any:
|
480
|
-
if object is None:
|
481
|
-
return object
|
482
|
-
if isinstance(object, Value): # passthrough
|
483
|
-
return object
|
484
|
-
if isinstance(object, list):
|
485
|
-
return [cls.__try_ensure_serializable(x) for x in object]
|
486
|
-
if is_dataclass(object) and not isinstance(object, type):
|
487
|
-
return asdict(object)
|
488
|
-
if isinstance(object, BaseModel):
|
489
|
-
return object.model_dump(mode="json", by_alias=True)
|
490
|
-
return object
|
491
|
-
|
492
|
-
|
493
|
-
PREDICTION_FIELDS = f"""
|
494
|
-
id
|
495
|
-
tag
|
496
|
-
type
|
497
|
-
configuration
|
498
|
-
resources {{
|
499
|
-
type
|
500
|
-
url
|
501
|
-
name
|
502
|
-
}}
|
503
|
-
results {{
|
504
|
-
data
|
505
|
-
type
|
506
|
-
shape
|
507
|
-
}}
|
508
|
-
latency
|
509
|
-
error
|
510
|
-
logs
|
511
|
-
created
|
512
|
-
"""
|
fxn/services/storage.py
DELETED
@@ -1,160 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Function
|
3
|
-
# Copyright © 2024 NatML Inc. All Rights Reserved.
|
4
|
-
#
|
5
|
-
|
6
|
-
from base64 import b64encode
|
7
|
-
from io import BytesIO
|
8
|
-
from magika import Magika
|
9
|
-
from pathlib import Path
|
10
|
-
from requests import put
|
11
|
-
from rich.progress import open as open_progress, wrap_file
|
12
|
-
from typing import Union
|
13
|
-
from urllib.parse import urlparse, urlunparse
|
14
|
-
|
15
|
-
from ..graph import GraphClient
|
16
|
-
from ..types import UploadType
|
17
|
-
|
18
|
-
class StorageService:
|
19
|
-
|
20
|
-
def __init__ (self, client: GraphClient) -> None:
|
21
|
-
self.client = client
|
22
|
-
|
23
|
-
def create_upload_url (self, name: str, type: UploadType, key: str=None) -> str:
|
24
|
-
"""
|
25
|
-
Create an upload URL.
|
26
|
-
|
27
|
-
Parameters:
|
28
|
-
name (str): File name.
|
29
|
-
type (UploadType): Upload type.
|
30
|
-
key (str): File key. This is useful for grouping related files.
|
31
|
-
|
32
|
-
Returns:
|
33
|
-
str: File upload URL.
|
34
|
-
"""
|
35
|
-
# Query
|
36
|
-
response = self.client.query(f"""
|
37
|
-
mutation ($input: CreateUploadUrlInput!) {{
|
38
|
-
createUploadUrl (input: $input)
|
39
|
-
}}
|
40
|
-
""",
|
41
|
-
{ "input": { "type": type, "name": name, "key": key } }
|
42
|
-
)
|
43
|
-
# Return
|
44
|
-
return response["createUploadUrl"]
|
45
|
-
|
46
|
-
def upload (
|
47
|
-
self,
|
48
|
-
file: Union[str, Path, BytesIO],
|
49
|
-
*,
|
50
|
-
type: UploadType,
|
51
|
-
name: str=None,
|
52
|
-
data_url_limit: int=None,
|
53
|
-
key: str=None,
|
54
|
-
verbose: bool=False
|
55
|
-
) -> str:
|
56
|
-
"""
|
57
|
-
Upload a file and return the URL.
|
58
|
-
|
59
|
-
Parameters:
|
60
|
-
file (str | Path | BytesIO): Input file.
|
61
|
-
type (UploadType): File type.
|
62
|
-
name (str): File name. This MUST be provided if `file` is not a file path.
|
63
|
-
data_url_limit (int): Return a data URL if the file is smaller than this limit (in bytes).
|
64
|
-
key (str): File key. This is useful for grouping related files.
|
65
|
-
verbose (bool): Print a progress bar for the upload.
|
66
|
-
|
67
|
-
Returns:
|
68
|
-
str: Upload URL.
|
69
|
-
"""
|
70
|
-
file = Path(file) if isinstance(file, str) else file
|
71
|
-
if isinstance(file, Path):
|
72
|
-
return self.__upload_file(file, type=type, name=name, key=key, data_url_limit=data_url_limit, verbose=verbose)
|
73
|
-
else:
|
74
|
-
return self.__upload_buffer(file, type=type, name=name, key=key, data_url_limit=data_url_limit, verbose=verbose)
|
75
|
-
|
76
|
-
def __upload_file (
|
77
|
-
self,
|
78
|
-
file: Path,
|
79
|
-
*,
|
80
|
-
type: UploadType,
|
81
|
-
name: str=None,
|
82
|
-
key: str=None,
|
83
|
-
data_url_limit: int=None,
|
84
|
-
verbose: bool=False
|
85
|
-
) -> str:
|
86
|
-
# Check file
|
87
|
-
assert file.exists(), f"Cannot upload {file.name} because the file does not exist"
|
88
|
-
assert file.is_file(), f"Cannot upload {file.name} becaause it does not point to a file"
|
89
|
-
# Create data URL
|
90
|
-
mime = self.__infer_mime(file)
|
91
|
-
if file.stat().st_size < (data_url_limit or 0):
|
92
|
-
with open(file, mode="rb") as f:
|
93
|
-
buffer = BytesIO(f.read())
|
94
|
-
return self.__create_data_url(buffer, mime=mime)
|
95
|
-
# Upload
|
96
|
-
name = name or file.name
|
97
|
-
url = self.create_upload_url(name, type, key=key)
|
98
|
-
with open_progress(file, mode="rb", description=name, disable=not verbose) as f:
|
99
|
-
put(url, data=f, headers={ "Content-Type": mime }).raise_for_status()
|
100
|
-
# Return
|
101
|
-
return self.__simplify_url(url)
|
102
|
-
|
103
|
-
def __upload_buffer (
|
104
|
-
self,
|
105
|
-
file: BytesIO,
|
106
|
-
*,
|
107
|
-
type: UploadType,
|
108
|
-
name: str=None,
|
109
|
-
key: str=None,
|
110
|
-
data_url_limit: int=None,
|
111
|
-
verbose: bool=False
|
112
|
-
) -> str:
|
113
|
-
# Check name
|
114
|
-
assert name, "You must specify the file `name` if the `file` is not a path"
|
115
|
-
# Create data URL
|
116
|
-
file.seek(0)
|
117
|
-
mime = self.__infer_mime(file)
|
118
|
-
size = file.getbuffer().nbytes
|
119
|
-
if size < (data_url_limit or 0):
|
120
|
-
return self.__create_data_url(file, mime=mime)
|
121
|
-
# Upload
|
122
|
-
url = self.create_upload_url(name, type, key=key)
|
123
|
-
with wrap_file(file, total=size, description=name, disable=not verbose) as f:
|
124
|
-
put(url, data=f, headers={ "Content-Type": mime }).raise_for_status()
|
125
|
-
# Return
|
126
|
-
return self.__simplify_url(url)
|
127
|
-
|
128
|
-
def __create_data_url (self, file: BytesIO, *, mime: str) -> str:
|
129
|
-
encoded_data = b64encode(file.getvalue()).decode("ascii")
|
130
|
-
url = f"data:{mime};base64,{encoded_data}"
|
131
|
-
return url
|
132
|
-
|
133
|
-
def __simplify_url (self, url: str) -> str:
|
134
|
-
if url.startswith("data:"):
|
135
|
-
return url
|
136
|
-
parsed_url = urlparse(url)
|
137
|
-
parsed_url = parsed_url._replace(netloc="cdn.fxn.ai", query="")
|
138
|
-
url = urlunparse(parsed_url)
|
139
|
-
return url
|
140
|
-
|
141
|
-
def __infer_mime (self, file: Union[str, Path, BytesIO]) -> str:
|
142
|
-
MAGIC_TO_MIME = {
|
143
|
-
b"\x00\x61\x73\x6d": "application/wasm"
|
144
|
-
}
|
145
|
-
# Read magic
|
146
|
-
file = Path(file) if isinstance(file, str) else file
|
147
|
-
if isinstance(file, Path):
|
148
|
-
with open(file, "rb") as f:
|
149
|
-
magic = f.read(4)
|
150
|
-
elif isinstance(file, BytesIO):
|
151
|
-
magic = file.getvalue()[:4]
|
152
|
-
# Check known mime
|
153
|
-
mime = MAGIC_TO_MIME.get(magic)
|
154
|
-
# Infer
|
155
|
-
if mime is None:
|
156
|
-
magika = Magika()
|
157
|
-
result = magika.identify_bytes(file.getvalue()) if isinstance(file, BytesIO) else magika.identify_path(file)
|
158
|
-
mime = result.output.mime_type
|
159
|
-
# Return
|
160
|
-
return mime
|
fxn/types/value.py
DELETED
@@ -1,22 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Function
|
3
|
-
# Copyright © 2024 NatML Inc. All Rights Reserved.
|
4
|
-
#
|
5
|
-
|
6
|
-
from pydantic import BaseModel, Field
|
7
|
-
from typing import List, Optional, Union
|
8
|
-
|
9
|
-
from .dtype import Dtype
|
10
|
-
|
11
|
-
class Value (BaseModel):
|
12
|
-
"""
|
13
|
-
Prediction value.
|
14
|
-
|
15
|
-
Members:
|
16
|
-
data (str): Value URL. This can be a web URL or a data URL.
|
17
|
-
type (Dtype): Value data type.
|
18
|
-
shape (list): Value shape. This is `None` if shape information is not available or applicable.
|
19
|
-
"""
|
20
|
-
data: Union[str, None] = Field(description="Value URL. This can be a web URL or a data URL.")
|
21
|
-
type: Dtype = Field(description="Value data type.")
|
22
|
-
shape: Optional[List[int]] = Field(default=None, description="Value shape. This is `None` if shape information is not available or applicable.")
|