fxn 0.0.9__tar.gz → 0.0.11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {fxn-0.0.9 → fxn-0.0.11}/PKG-INFO +1 -1
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/__init__.py +1 -2
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/dtype.py +1 -1
- fxn-0.0.11/fxn/api/feature.py +184 -0
- fxn-0.0.11/fxn/api/prediction.py +132 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/predictor.py +75 -12
- fxn-0.0.11/fxn/api/storage.py +146 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/cli/__init__.py +2 -1
- {fxn-0.0.9 → fxn-0.0.11}/fxn/cli/predict.py +21 -4
- {fxn-0.0.9 → fxn-0.0.11}/fxn/cli/predictors.py +19 -3
- {fxn-0.0.9 → fxn-0.0.11}/fxn/version.py +1 -1
- {fxn-0.0.9 → fxn-0.0.11}/fxn.egg-info/PKG-INFO +1 -1
- {fxn-0.0.9 → fxn-0.0.11}/fxn.egg-info/SOURCES.txt +0 -1
- fxn-0.0.9/fxn/api/feature.py +0 -23
- fxn-0.0.9/fxn/api/featureinput.py +0 -117
- fxn-0.0.9/fxn/api/prediction.py +0 -186
- fxn-0.0.9/fxn/api/storage.py +0 -99
- {fxn-0.0.9 → fxn-0.0.11}/LICENSE +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/README.md +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/__init__.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/api.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/environment.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/profile.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/tag.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/api/user.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/cli/auth.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/cli/env.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/cli/misc.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn/magic.py +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn.egg-info/dependency_links.txt +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn.egg-info/entry_points.txt +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn.egg-info/requires.txt +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/fxn.egg-info/top_level.txt +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/setup.cfg +0 -0
- {fxn-0.0.9 → fxn-0.0.11}/setup.py +0 -0
@@ -6,9 +6,8 @@
|
|
6
6
|
from .dtype import Dtype
|
7
7
|
from .environment import EnvironmentVariable
|
8
8
|
from .feature import Feature
|
9
|
-
from .featureinput import FeatureInput
|
10
9
|
from .prediction import CloudPrediction, EdgePrediction, Prediction
|
11
|
-
from .predictor import Acceleration, AccessMode, Parameter, Predictor, PredictorStatus, PredictorType, Signature
|
10
|
+
from .predictor import Acceleration, AccessMode, EnumerationMember, Parameter, Predictor, PredictorStatus, PredictorType, Signature
|
12
11
|
from .profile import Profile
|
13
12
|
from .storage import Storage, UploadType
|
14
13
|
from .user import User
|
@@ -0,0 +1,184 @@
|
|
1
|
+
#
|
2
|
+
# Function
|
3
|
+
# Copyright © 2023 NatML Inc. All Rights Reserved.
|
4
|
+
#
|
5
|
+
|
6
|
+
from __future__ import annotations
|
7
|
+
from dataclasses import asdict, dataclass, is_dataclass
|
8
|
+
from filetype import guess_mime
|
9
|
+
from io import BytesIO
|
10
|
+
from json import loads, dumps
|
11
|
+
from numpy import array, float32, frombuffer, int32, ndarray
|
12
|
+
from pathlib import Path
|
13
|
+
from PIL import Image
|
14
|
+
from requests import get
|
15
|
+
from tempfile import NamedTemporaryFile
|
16
|
+
from typing import Dict, List, Optional, Union
|
17
|
+
from urllib.request import urlopen
|
18
|
+
|
19
|
+
from .dtype import Dtype
|
20
|
+
from .storage import Storage, UploadType
|
21
|
+
|
22
|
+
@dataclass(frozen=True)
|
23
|
+
class Feature:
|
24
|
+
"""
|
25
|
+
Prediction feature.
|
26
|
+
|
27
|
+
Members:
|
28
|
+
data (str): Feature data URL. This can be a web URL or a data URL.
|
29
|
+
type (Dtype): Feature data type.
|
30
|
+
shape (list): Feature shape. This is `None` if shape information is not available or applicable.
|
31
|
+
"""
|
32
|
+
data: str
|
33
|
+
type: Dtype
|
34
|
+
shape: Optional[List[int]] = None
|
35
|
+
|
36
|
+
def to_value (
|
37
|
+
self,
|
38
|
+
return_binary_path: bool=True
|
39
|
+
) -> Union[str, float, int, bool, ndarray, list, dict, Image.Image, Path]:
|
40
|
+
"""
|
41
|
+
Convert a feature to a plain Python value.
|
42
|
+
|
43
|
+
Parameters:
|
44
|
+
return_binary_path (str): Write binary features to file and return a `Path` instead of returning `BytesIO` instance.
|
45
|
+
|
46
|
+
Returns:
|
47
|
+
str | float | int | bool | ndarray | list | dict | Image.Image | Path: Feature value.
|
48
|
+
"""
|
49
|
+
buffer = Feature.__download_feature_data(self.data)
|
50
|
+
# Array
|
51
|
+
if self.type in [
|
52
|
+
Dtype.int8, Dtype.int16, Dtype.int32, Dtype.int64,
|
53
|
+
Dtype.uint8, Dtype.uint16, Dtype.uint32, Dtype.uint64,
|
54
|
+
Dtype.float16, Dtype.float32, Dtype.float64, Dtype.bool
|
55
|
+
]:
|
56
|
+
assert self.shape is not None, "Array feature must have a shape specified"
|
57
|
+
array = frombuffer(buffer.getbuffer(), dtype=self.type).reshape(self.shape)
|
58
|
+
return array if len(self.shape) > 0 else array.item()
|
59
|
+
# String
|
60
|
+
if self.type == Dtype.string:
|
61
|
+
return buffer.getvalue().decode("utf-8")
|
62
|
+
# List
|
63
|
+
if self.type == Dtype.list:
|
64
|
+
return loads(buffer.getvalue().decode("utf-8"))
|
65
|
+
# Dict
|
66
|
+
if self.type == Dtype.dict:
|
67
|
+
return loads(buffer.getvalue().decode("utf-8"))
|
68
|
+
# Image
|
69
|
+
if self.type == Dtype.image:
|
70
|
+
return Image.open(buffer)
|
71
|
+
# Binary
|
72
|
+
if return_binary_path:
|
73
|
+
with NamedTemporaryFile(mode="wb", delete=False) as f:
|
74
|
+
f.write(buffer.getbuffer())
|
75
|
+
return Path(f.name)
|
76
|
+
# Return
|
77
|
+
return buffer
|
78
|
+
|
79
|
+
@classmethod
|
80
|
+
def from_value (
|
81
|
+
cls,
|
82
|
+
value: Union[str, float, int, bool, ndarray, List, Dict[str, any], Path, Image.Image],
|
83
|
+
name: str,
|
84
|
+
type: Dtype=None,
|
85
|
+
min_upload_size: int=4096,
|
86
|
+
key: str=None
|
87
|
+
) -> Feature:
|
88
|
+
"""
|
89
|
+
Create a feature input from a given value.
|
90
|
+
|
91
|
+
Parameters:
|
92
|
+
value (str | float | int | bool | ndarray | list | dict | dataclass | Path | PIL.Image): Value.
|
93
|
+
name (str): Feature name.
|
94
|
+
type (Dtype): Feature data type override.
|
95
|
+
min_upload_size (int): Features larger than this size in bytes will be uploaded.
|
96
|
+
|
97
|
+
Returns:
|
98
|
+
Feature: Feature.
|
99
|
+
"""
|
100
|
+
# Feature
|
101
|
+
if isinstance(value, Feature):
|
102
|
+
return value
|
103
|
+
# Array
|
104
|
+
if isinstance(value, ndarray):
|
105
|
+
buffer = BytesIO(value.tobytes())
|
106
|
+
data = Storage.upload(buffer, UploadType.Feature, name=name, data_url_limit=min_upload_size, key=key)
|
107
|
+
type = type or value.dtype.name
|
108
|
+
return Feature(data, type, shape=list(value.shape))
|
109
|
+
# String
|
110
|
+
if isinstance(value, str):
|
111
|
+
buffer = BytesIO(value.encode("utf-8"))
|
112
|
+
data = Storage.upload(buffer, UploadType.Feature, name=name, data_url_limit=min_upload_size, key=key)
|
113
|
+
type = type or Dtype.string
|
114
|
+
return Feature(data, type)
|
115
|
+
# Float
|
116
|
+
if isinstance(value, float):
|
117
|
+
value = array(value, dtype=float32)
|
118
|
+
return cls.from_value(value, name, type=type, min_upload_size=min_upload_size, key=key)
|
119
|
+
# Boolean
|
120
|
+
if isinstance(value, bool):
|
121
|
+
value = array(value, dtype=bool)
|
122
|
+
return cls.from_value(value, name, type=type, min_upload_size=min_upload_size, key=key)
|
123
|
+
# Integer
|
124
|
+
if isinstance(value, int):
|
125
|
+
value = array(value, dtype=int32)
|
126
|
+
return cls.from_value(value, name, type=type, min_upload_size=min_upload_size, key=key)
|
127
|
+
# List
|
128
|
+
if isinstance(value, list):
|
129
|
+
value = dumps(value)
|
130
|
+
type = type or Dtype.list
|
131
|
+
return cls.from_value(value, name, type=type, min_upload_size=min_upload_size, key=key)
|
132
|
+
# Dict
|
133
|
+
if isinstance(value, dict):
|
134
|
+
value = dumps(value)
|
135
|
+
type = type or Dtype.dict
|
136
|
+
return cls.from_value(value, name, type=type, min_upload_size=min_upload_size, key=key)
|
137
|
+
# Dataclass # https://docs.python.org/3/library/dataclasses.html#dataclasses.is_dataclass
|
138
|
+
if is_dataclass(value) and not isinstance(value, type):
|
139
|
+
value = asdict(value)
|
140
|
+
type = type or Dtype.dict
|
141
|
+
return cls.from_value(value, name=name, type=type, min_upload_size=min_upload_size, key=key)
|
142
|
+
# Image
|
143
|
+
if isinstance(value, Image.Image):
|
144
|
+
buffer = BytesIO()
|
145
|
+
format = "PNG" if value.mode == "RGBA" else "JPEG"
|
146
|
+
value.save(buffer, format=format)
|
147
|
+
data = Storage.upload(buffer, UploadType.Feature, name=name, data_url_limit=min_upload_size, key=key)
|
148
|
+
type = type or Dtype.image
|
149
|
+
return Feature(data, type)
|
150
|
+
# Path
|
151
|
+
if isinstance(value, Path):
|
152
|
+
assert value.is_file(), "Feature path must point to a file, not a directory"
|
153
|
+
value = value.expanduser().resolve()
|
154
|
+
data = Storage.upload(value, UploadType.Feature, name=name, data_url_limit=min_upload_size, key=key)
|
155
|
+
type = type or cls.__get_file_dtype(value)
|
156
|
+
return Feature(data, type)
|
157
|
+
# Unsupported
|
158
|
+
raise RuntimeError(f"Cannot create feature '{name}' for value {value} of type {type(value)}")
|
159
|
+
|
160
|
+
@classmethod
|
161
|
+
def __get_file_dtype (cls, path: Path) -> Dtype:
|
162
|
+
mime = guess_mime(str(path))
|
163
|
+
if not mime:
|
164
|
+
return Dtype.binary
|
165
|
+
if mime.startswith("image"):
|
166
|
+
return Dtype.image
|
167
|
+
if mime.startswith("video"):
|
168
|
+
return Dtype.video
|
169
|
+
if mime.startswith("audio"):
|
170
|
+
return Dtype.audio
|
171
|
+
if path.suffix in [".obj", ".gltf", ".glb", ".fbx", ".usd", ".usdz", ".blend"]:
|
172
|
+
return Dtype._3d
|
173
|
+
return Dtype.binary
|
174
|
+
|
175
|
+
@classmethod
|
176
|
+
def __download_feature_data (cls, url: str) -> BytesIO:
|
177
|
+
# Check if data URL
|
178
|
+
if url.startswith("data:"):
|
179
|
+
with urlopen(url) as response:
|
180
|
+
return BytesIO(response.read())
|
181
|
+
# Download
|
182
|
+
response = get(url)
|
183
|
+
result = BytesIO(response.content)
|
184
|
+
return result
|
@@ -0,0 +1,132 @@
|
|
1
|
+
#
|
2
|
+
# Function
|
3
|
+
# Copyright © 2023 NatML Inc. All Rights Reserved.
|
4
|
+
#
|
5
|
+
|
6
|
+
from __future__ import annotations
|
7
|
+
from dataclasses import asdict, dataclass
|
8
|
+
from numpy import ndarray
|
9
|
+
from pathlib import Path
|
10
|
+
from PIL import Image
|
11
|
+
from platform import system
|
12
|
+
from typing import Any, Dict, List, Union
|
13
|
+
from uuid import uuid4
|
14
|
+
|
15
|
+
from .api import query
|
16
|
+
from .feature import Feature
|
17
|
+
from .predictor import PredictorType
|
18
|
+
|
19
|
+
@dataclass(frozen=True)
|
20
|
+
class Prediction:
|
21
|
+
"""
|
22
|
+
Prediction.
|
23
|
+
|
24
|
+
Members:
|
25
|
+
id (str): Prediction ID.
|
26
|
+
tag (str): Predictor tag.
|
27
|
+
type (PredictorType): Prediction type.
|
28
|
+
created (str): Date created.
|
29
|
+
"""
|
30
|
+
id: str
|
31
|
+
tag: str
|
32
|
+
type: PredictorType
|
33
|
+
created: str
|
34
|
+
FIELDS = f"""
|
35
|
+
id
|
36
|
+
tag
|
37
|
+
type
|
38
|
+
created
|
39
|
+
... on CloudPrediction {{
|
40
|
+
results {{
|
41
|
+
data
|
42
|
+
type
|
43
|
+
shape
|
44
|
+
}}
|
45
|
+
latency
|
46
|
+
error
|
47
|
+
logs
|
48
|
+
}}
|
49
|
+
"""
|
50
|
+
|
51
|
+
@classmethod
|
52
|
+
def create (
|
53
|
+
cls,
|
54
|
+
tag: str,
|
55
|
+
raw_outputs: bool=False,
|
56
|
+
return_binary_path: bool=True,
|
57
|
+
data_url_limit: int=None,
|
58
|
+
access_key: str=None,
|
59
|
+
**inputs: Dict[str, Union[ndarray, str, float, int, bool, List, Dict[str, Any], Path, Image.Image, Feature]],
|
60
|
+
) -> Union[CloudPrediction, EdgePrediction]:
|
61
|
+
"""
|
62
|
+
Create a prediction.
|
63
|
+
|
64
|
+
Parameters:
|
65
|
+
tag (str): Predictor tag.
|
66
|
+
raw_outputs (bool): Skip parsing output features into Pythonic data types.
|
67
|
+
return_binary_path (bool): Write binary features to file and return a `Path` instead of returning `BytesIO` instance.
|
68
|
+
data_url_limit (int): Return a data URL if a given output feature is smaller than this size in bytes. Only applies to `CLOUD` predictions.
|
69
|
+
access_key (str): Function access key.
|
70
|
+
inputs (dict): Input features. Only applies to `CLOUD` predictions.
|
71
|
+
|
72
|
+
Returns:
|
73
|
+
CloudPrediction | EdgePrediction: Created prediction.
|
74
|
+
"""
|
75
|
+
# Collect inputs
|
76
|
+
key = uuid4().hex
|
77
|
+
inputs = { name: Feature.from_value(value, name, key=key) for name, value in inputs.items() }
|
78
|
+
inputs = [{ "name": name, **asdict(feature) } for name, feature in inputs.items()]
|
79
|
+
# Query
|
80
|
+
response = query(f"""
|
81
|
+
mutation ($input: CreatePredictionInput!) {{
|
82
|
+
createPrediction (input: $input) {{
|
83
|
+
{cls.FIELDS}
|
84
|
+
}}
|
85
|
+
}}""",
|
86
|
+
{ "input": { "tag": tag, "client": cls.__get_client(), "inputs": inputs, "dataUrlLimit": data_url_limit } },
|
87
|
+
access_key=access_key
|
88
|
+
)
|
89
|
+
# Check
|
90
|
+
prediction = response["createPrediction"]
|
91
|
+
if not prediction:
|
92
|
+
return None
|
93
|
+
# Parse results
|
94
|
+
if "results" in prediction and prediction["results"] is not None:
|
95
|
+
prediction["results"] = [Feature(**feature).to_value(return_binary_path=return_binary_path) if not raw_outputs else Feature(**feature) for feature in prediction["results"]]
|
96
|
+
# Create
|
97
|
+
prediction = CloudPrediction(**prediction) if prediction["type"] == PredictorType.Cloud else EdgePrediction(**prediction)
|
98
|
+
# Return
|
99
|
+
return prediction
|
100
|
+
|
101
|
+
@classmethod
|
102
|
+
def __get_client (cls) -> str:
|
103
|
+
id = system()
|
104
|
+
if id == "Darwin":
|
105
|
+
return "macos"
|
106
|
+
if id == "Linux":
|
107
|
+
return "linux"
|
108
|
+
if id == "Windows":
|
109
|
+
return "windows"
|
110
|
+
raise RuntimeError(f"Function cannot make predictions on the {id} platform")
|
111
|
+
|
112
|
+
@dataclass(frozen=True)
|
113
|
+
class CloudPrediction (Prediction):
|
114
|
+
"""
|
115
|
+
Cloud prediction.
|
116
|
+
|
117
|
+
Members:
|
118
|
+
results (list): Prediction results.
|
119
|
+
latency (float): Prediction latency in milliseconds.
|
120
|
+
error (str): Prediction error. This is `null` if the prediction completed successfully.
|
121
|
+
logs (str): Prediction logs.
|
122
|
+
"""
|
123
|
+
results: List[Feature] = None
|
124
|
+
latency: float = None
|
125
|
+
error: str = None
|
126
|
+
logs: str = None
|
127
|
+
|
128
|
+
@dataclass(frozen=True)
|
129
|
+
class EdgePrediction (Prediction):
|
130
|
+
"""
|
131
|
+
Edge prediction.
|
132
|
+
"""
|
@@ -65,10 +65,11 @@ class Predictor:
|
|
65
65
|
description
|
66
66
|
range
|
67
67
|
optional
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
68
|
+
enumeration {{
|
69
|
+
name
|
70
|
+
value
|
71
|
+
}}
|
72
|
+
default_value: defaultValue
|
72
73
|
}}
|
73
74
|
outputs {{
|
74
75
|
name
|
@@ -118,6 +119,54 @@ class Predictor:
|
|
118
119
|
# Return
|
119
120
|
return predictor
|
120
121
|
|
122
|
+
@classmethod
|
123
|
+
def list (
|
124
|
+
cls,
|
125
|
+
owner: str=None,
|
126
|
+
status: PredictorStatus=None,
|
127
|
+
offset: int=None,
|
128
|
+
count: int=None,
|
129
|
+
access_key: str=None
|
130
|
+
) -> List[Predictor]:
|
131
|
+
"""
|
132
|
+
List the current user's predictors.
|
133
|
+
|
134
|
+
Parameters:
|
135
|
+
owner (str): Predictor owner. This defaults to the current user.
|
136
|
+
status (PredictorStatus): Predictor status. This defaults to `ACTIVE`.
|
137
|
+
offset (int): Pagination offset.
|
138
|
+
count (int): Pagination count.
|
139
|
+
access_key (str): Function access key.
|
140
|
+
|
141
|
+
Returns:
|
142
|
+
list: User predictors.
|
143
|
+
"""
|
144
|
+
# Query
|
145
|
+
response = query(f"""
|
146
|
+
query ($user: UserInput, $predictors: UserPredictorsInput) {{
|
147
|
+
user (input: $user) {{
|
148
|
+
predictors (input: $predictors) {{
|
149
|
+
{cls.FIELDS}
|
150
|
+
}}
|
151
|
+
}}
|
152
|
+
}}
|
153
|
+
""",
|
154
|
+
{
|
155
|
+
"user": { "username": owner } if owner else None,
|
156
|
+
"predictors": { "status": status, "offset": offset, "count": count }
|
157
|
+
},
|
158
|
+
access_key=access_key
|
159
|
+
)
|
160
|
+
# Check
|
161
|
+
user = response["user"]
|
162
|
+
if not user:
|
163
|
+
return None
|
164
|
+
# Create predictors
|
165
|
+
predictors = response["user"]["predictors"]
|
166
|
+
predictors = [Predictor(**predictor) for predictor in predictors]
|
167
|
+
# Return
|
168
|
+
return predictors
|
169
|
+
|
121
170
|
@classmethod
|
122
171
|
def search (
|
123
172
|
cls,
|
@@ -131,6 +180,8 @@ class Predictor:
|
|
131
180
|
|
132
181
|
Parameters:
|
133
182
|
query (str): Search query.
|
183
|
+
offset (int): Pagination offset.
|
184
|
+
count (int): Pagination count.
|
134
185
|
access_key (str): Function access key.
|
135
186
|
|
136
187
|
Returns:
|
@@ -312,20 +363,32 @@ class Parameter:
|
|
312
363
|
description (str): Parameter description.
|
313
364
|
optional (bool): Parameter is optional.
|
314
365
|
range (tuple): Parameter value range for numeric parameters.
|
315
|
-
|
316
|
-
|
317
|
-
intDefault (int): Parameter default integer value.
|
318
|
-
boolDefault (bool): Parameter default boolean value.
|
366
|
+
enumeration (list): Parameter value choices for enumeration parameters.
|
367
|
+
default_value (str | float | int | bool): Parameter default value.
|
319
368
|
"""
|
320
369
|
name: Optional[str] = None
|
321
370
|
type: Optional[Dtype] = None
|
322
371
|
description: Optional[str] = None
|
323
372
|
optional: Optional[bool] = None
|
324
373
|
range: Optional[Tuple[float, float]] = None
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
374
|
+
enumeration: Optional[List[EnumerationMember]] = None
|
375
|
+
default_value: Optional[Union[str, float, int, bool]] = None
|
376
|
+
|
377
|
+
def __post_init__ (self):
|
378
|
+
enumeration = [EnumerationMember(**member) if isinstance(member, dict) else member for member in self.enumeration] if self.enumeration else self.enumeration
|
379
|
+
object.__setattr__(self, "enumeration", enumeration)
|
380
|
+
|
381
|
+
@dataclass(frozen=True)
|
382
|
+
class EnumerationMember:
|
383
|
+
"""
|
384
|
+
Prediction parameter enumeration member.
|
385
|
+
|
386
|
+
Members:
|
387
|
+
name (str): Enumeration member name.
|
388
|
+
value (str | float | int): Enumeration member value.
|
389
|
+
"""
|
390
|
+
name: str
|
391
|
+
value: Union[str, float, int]
|
329
392
|
|
330
393
|
class Acceleration (str, Enum):
|
331
394
|
"""
|
@@ -0,0 +1,146 @@
|
|
1
|
+
#
|
2
|
+
# Function
|
3
|
+
# Copyright © 2023 NatML Inc. All Rights Reserved.
|
4
|
+
#
|
5
|
+
|
6
|
+
from base64 import b64encode
|
7
|
+
from enum import Enum
|
8
|
+
from io import BytesIO
|
9
|
+
from filetype import guess_mime
|
10
|
+
from pathlib import Path
|
11
|
+
from requests import put
|
12
|
+
from rich.progress import open as open_progress, wrap_file
|
13
|
+
from typing import Union
|
14
|
+
|
15
|
+
from .api import query
|
16
|
+
|
17
|
+
class UploadType (str, Enum):
|
18
|
+
"""
|
19
|
+
Upload URL type.
|
20
|
+
"""
|
21
|
+
Feature = "FEATURE"
|
22
|
+
Media = "MEDIA"
|
23
|
+
Notebook = "NOTEBOOK"
|
24
|
+
|
25
|
+
class Storage:
|
26
|
+
"""
|
27
|
+
Upload and download files.
|
28
|
+
"""
|
29
|
+
|
30
|
+
@classmethod
|
31
|
+
def create_upload_url (
|
32
|
+
cls,
|
33
|
+
name: str,
|
34
|
+
type: UploadType,
|
35
|
+
key: str=None
|
36
|
+
) -> str:
|
37
|
+
"""
|
38
|
+
Create an upload URL.
|
39
|
+
|
40
|
+
Parameters:
|
41
|
+
name (str): File name.
|
42
|
+
type (UploadType): Upload type.
|
43
|
+
key (str): File key. This is useful for grouping related files.
|
44
|
+
|
45
|
+
Returns:
|
46
|
+
str: File upload URL.
|
47
|
+
"""
|
48
|
+
# Query
|
49
|
+
response = query(f"""
|
50
|
+
mutation ($input: CreateUploadUrlInput!) {{
|
51
|
+
createUploadUrl (input: $input)
|
52
|
+
}}
|
53
|
+
""",
|
54
|
+
{ "input": { "type": type, "name": name, "key": key } }
|
55
|
+
)
|
56
|
+
url = response["createUploadUrl"]
|
57
|
+
# Return
|
58
|
+
return url
|
59
|
+
|
60
|
+
@classmethod
|
61
|
+
def upload (
|
62
|
+
cls,
|
63
|
+
file: Union[str, Path, BytesIO],
|
64
|
+
type: UploadType,
|
65
|
+
name: str=None,
|
66
|
+
data_url_limit: int=None,
|
67
|
+
key: str=None,
|
68
|
+
verbose: bool=False
|
69
|
+
) -> str:
|
70
|
+
"""
|
71
|
+
Upload a file and return the URL.
|
72
|
+
|
73
|
+
Parameters:
|
74
|
+
file (str | Path | BytesIO): File path.
|
75
|
+
type (UploadType): File type.
|
76
|
+
name (str): File name. This MUST be provided if `file` is not a file path.
|
77
|
+
data_url_limit (int): Return a data URL if the output feature is smaller than this limit (in bytes).
|
78
|
+
key (str): File key. This is useful for grouping related files.
|
79
|
+
verbose (bool): Print a progress bar for the upload.
|
80
|
+
|
81
|
+
Returns:
|
82
|
+
str: Upload URL.
|
83
|
+
"""
|
84
|
+
file = Path(file) if isinstance(file, str) else file
|
85
|
+
if isinstance(file, Path):
|
86
|
+
return cls.__upload_file(file, type, name=name, key=key, data_url_limit=data_url_limit, verbose=verbose)
|
87
|
+
else:
|
88
|
+
return cls.__upload_buffer(file, type, name=name, key=key, data_url_limit=data_url_limit, verbose=verbose)
|
89
|
+
|
90
|
+
@classmethod
|
91
|
+
def __upload_file (
|
92
|
+
cls,
|
93
|
+
file: Path,
|
94
|
+
type: UploadType,
|
95
|
+
name: str=None,
|
96
|
+
key: str=None,
|
97
|
+
data_url_limit: int=None,
|
98
|
+
verbose: bool=False
|
99
|
+
) -> str:
|
100
|
+
# Check file
|
101
|
+
assert file.exists(), f"Cannot upload {file.name} because the file does not exist"
|
102
|
+
assert file.is_file(), f"Cannot upload {file.name} becaause it does not point to a file"
|
103
|
+
# Create data URL
|
104
|
+
mime = guess_mime(file) or "application/octet-stream"
|
105
|
+
if file.stat().st_size <= (data_url_limit or 0):
|
106
|
+
with open(file, mode="rb") as f:
|
107
|
+
buffer = BytesIO(f.read())
|
108
|
+
return cls.__create_data_url(buffer, mime)
|
109
|
+
# Upload
|
110
|
+
name = name or file.name
|
111
|
+
url = cls.create_upload_url(name, type, key=key)
|
112
|
+
with open_progress(file, mode="rb", description=name, disable=not verbose) as f:
|
113
|
+
put(url, data=f, headers={ "Content-Type": mime }).raise_for_status()
|
114
|
+
# Return
|
115
|
+
return url
|
116
|
+
|
117
|
+
@classmethod
|
118
|
+
def __upload_buffer (
|
119
|
+
cls,
|
120
|
+
file: BytesIO,
|
121
|
+
type: UploadType,
|
122
|
+
name: str=None,
|
123
|
+
key: str=None,
|
124
|
+
data_url_limit: int=None,
|
125
|
+
verbose: bool=False
|
126
|
+
) -> str:
|
127
|
+
# Check name
|
128
|
+
assert name, "You must specify the file `name` if the `file` is not a path"
|
129
|
+
# Create data URL
|
130
|
+
file.seek(0)
|
131
|
+
mime = guess_mime(file) or "application/octet-stream"
|
132
|
+
size = file.getbuffer().nbytes
|
133
|
+
if size <= (data_url_limit or 0):
|
134
|
+
return cls.__create_data_url(file, mime)
|
135
|
+
# Upload
|
136
|
+
url = cls.create_upload_url(name, type, key=key)
|
137
|
+
with wrap_file(file, total=size, description=name, disable=not verbose) as f:
|
138
|
+
put(url, data=f, headers={ "Content-Type": mime }).raise_for_status()
|
139
|
+
# Return
|
140
|
+
return url
|
141
|
+
|
142
|
+
@classmethod
|
143
|
+
def __create_data_url (cls, file: BytesIO, mime: str) -> str:
|
144
|
+
encoded_data = b64encode(file.getvalue()).decode("ascii")
|
145
|
+
url = f"data:{mime};base64,{encoded_data}"
|
146
|
+
return url
|
@@ -9,7 +9,7 @@ from .auth import app as auth_app
|
|
9
9
|
from .env import app as env_app
|
10
10
|
from .misc import cli_options
|
11
11
|
from .predict import predict
|
12
|
-
from .predictors import archive_predictor, create_predictor, delete_predictor, retrieve_predictor, search_predictors
|
12
|
+
from .predictors import archive_predictor, create_predictor, delete_predictor, list_predictors, retrieve_predictor, search_predictors
|
13
13
|
from ..version import __version__
|
14
14
|
|
15
15
|
# Define CLI
|
@@ -32,6 +32,7 @@ app.add_typer(env_app, name="env", help="Manage predictor environment variables.
|
|
32
32
|
app.command(name="create", help="Create a predictor.")(create_predictor)
|
33
33
|
app.command(name="delete", help="Delete a predictor.")(delete_predictor)
|
34
34
|
app.command(name="predict", help="Make a prediction.", context_settings={ "allow_extra_args": True, "ignore_unknown_options": True })(predict)
|
35
|
+
app.command(name="list", help="List predictors.")(list_predictors)
|
35
36
|
app.command(name="search", help="Search predictors.")(search_predictors)
|
36
37
|
app.command(name="retrieve", help="Retrieve a predictor.")(retrieve_predictor)
|
37
38
|
app.command(name="archive", help="Archive a predictor.")(archive_predictor)
|
@@ -4,8 +4,9 @@
|
|
4
4
|
#
|
5
5
|
|
6
6
|
from dataclasses import asdict
|
7
|
+
from io import BytesIO
|
7
8
|
from numpy import ndarray
|
8
|
-
from pathlib import Path
|
9
|
+
from pathlib import Path, PurePath
|
9
10
|
from PIL import Image
|
10
11
|
from rich import print_json
|
11
12
|
from tempfile import mkstemp
|
@@ -16,7 +17,7 @@ from .auth import get_access_key
|
|
16
17
|
|
17
18
|
def predict (
|
18
19
|
tag: str = Argument(..., help="Predictor tag."),
|
19
|
-
raw_outputs: bool = Option(False, "--raw-outputs", help="
|
20
|
+
raw_outputs: bool = Option(False, "--raw-outputs", help="Output raw features instead of parsing into Pythonic data."),
|
20
21
|
context: Context = 0
|
21
22
|
):
|
22
23
|
# Predict
|
@@ -25,6 +26,7 @@ def predict (
|
|
25
26
|
tag=tag,
|
26
27
|
**inputs,
|
27
28
|
raw_outputs=raw_outputs,
|
29
|
+
return_binary_path=True,
|
28
30
|
access_key=get_access_key()
|
29
31
|
)
|
30
32
|
# Parse results
|
@@ -34,7 +36,7 @@ def predict (
|
|
34
36
|
results = [_serialize_feature(feature) for feature in prediction.results]
|
35
37
|
object.__setattr__(prediction, "results", results)
|
36
38
|
# Print
|
37
|
-
print_json(data=asdict(prediction))
|
39
|
+
print_json(data=asdict(prediction, dict_factory=_prediction_dict_factory))
|
38
40
|
# Show images
|
39
41
|
for image in images:
|
40
42
|
image.show()
|
@@ -79,5 +81,20 @@ def _serialize_feature (feature):
|
|
79
81
|
_, path = mkstemp(suffix=".png" if feature.mode == "RGBA" else ".jpg")
|
80
82
|
feature.save(path)
|
81
83
|
return path
|
84
|
+
# Serialize `BytesIO`
|
85
|
+
if isinstance(feature, BytesIO):
|
86
|
+
return str(feature)
|
87
|
+
# Serialize `Path`
|
88
|
+
if isinstance(feature, PurePath):
|
89
|
+
return str(feature)
|
82
90
|
# Return
|
83
|
-
return feature
|
91
|
+
return feature
|
92
|
+
|
93
|
+
def _prediction_dict_factory (kv_pairs):
|
94
|
+
# Check if feature
|
95
|
+
FEATURE_KEYS = ["data", "type", "shape"]
|
96
|
+
keys = [k for k, _ in kv_pairs]
|
97
|
+
is_feature = all(k in keys for k in FEATURE_KEYS)
|
98
|
+
kv_pairs = [(k, v) for k, v in kv_pairs if v is not None] if is_feature else kv_pairs
|
99
|
+
# Construct
|
100
|
+
return dict(kv_pairs)
|
@@ -9,7 +9,7 @@ from pathlib import Path
|
|
9
9
|
from typer import Argument, Option
|
10
10
|
from typing import List
|
11
11
|
|
12
|
-
from ..api import Acceleration, AccessMode, Predictor, PredictorType
|
12
|
+
from ..api import Acceleration, AccessMode, Predictor, PredictorStatus, PredictorType
|
13
13
|
from .auth import get_access_key
|
14
14
|
|
15
15
|
def retrieve_predictor (
|
@@ -22,6 +22,22 @@ def retrieve_predictor (
|
|
22
22
|
predictor = asdict(predictor) if predictor else None
|
23
23
|
print_json(data=predictor)
|
24
24
|
|
25
|
+
def list_predictors (
|
26
|
+
owner: str=Option(None, help="Predictor owner. This defaults to the current user."),
|
27
|
+
status: PredictorStatus=Option(PredictorStatus.Active, help="Predictor status. This defaults to `ACTIVE`."),
|
28
|
+
offset: int=Option(None, help="Pagination offset."),
|
29
|
+
count: int=Option(None, help="Pagination count.")
|
30
|
+
):
|
31
|
+
predictors = Predictor.list(
|
32
|
+
owner=owner,
|
33
|
+
status=status,
|
34
|
+
offset=offset,
|
35
|
+
count=count,
|
36
|
+
access_key=get_access_key()
|
37
|
+
)
|
38
|
+
predictors = [asdict(predictor) for predictor in predictors] if predictors is not None else None
|
39
|
+
print_json(data=predictors)
|
40
|
+
|
25
41
|
def search_predictors (
|
26
42
|
query: str=Argument(..., help="Search query."),
|
27
43
|
offset: int=Option(None, help="Pagination offset."),
|
@@ -44,11 +60,11 @@ def create_predictor (
|
|
44
60
|
description: str=Option(None, help="Predictor description. This supports Markdown."),
|
45
61
|
media: Path=Option(None, help="Predictor image path."),
|
46
62
|
acceleration: Acceleration=Option(None, case_sensitive=False, help="Cloud predictor acceleration. This defaults to `CPU`."),
|
47
|
-
environment: List[str]=Option([], help="Predictor environment variables."),
|
48
63
|
license: str=Option(None, help="Predictor license URL."),
|
64
|
+
env: List[str]=Option([], help="Specify a predictor environment variable."),
|
49
65
|
overwrite: bool=Option(None, "--overwrite", help="Overwrite any existing predictor with the same tag.")
|
50
66
|
):
|
51
|
-
environment = { e.split("=")[0].strip(): e.split("=")[1].strip() for e in
|
67
|
+
environment = { e.split("=")[0].strip(): e.split("=")[1].strip() for e in env }
|
52
68
|
predictor = Predictor.create(
|
53
69
|
tag=tag,
|
54
70
|
notebook=notebook,
|
fxn-0.0.9/fxn/api/feature.py
DELETED
@@ -1,23 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Function
|
3
|
-
# Copyright © 2023 NatML Inc. All Rights Reserved.
|
4
|
-
#
|
5
|
-
|
6
|
-
from dataclasses import dataclass
|
7
|
-
from typing import List, Optional
|
8
|
-
|
9
|
-
from .dtype import Dtype
|
10
|
-
|
11
|
-
@dataclass(frozen=True)
|
12
|
-
class Feature:
|
13
|
-
"""
|
14
|
-
Prediction feature.
|
15
|
-
|
16
|
-
Members:
|
17
|
-
data (str): Feature data URL.
|
18
|
-
type (Dtype): Feature data type.
|
19
|
-
shape (list): Feature shape. This is `None` if shape information is not available or applicable.
|
20
|
-
"""
|
21
|
-
data: str
|
22
|
-
type: Dtype
|
23
|
-
shape: Optional[List[int]] = None
|
@@ -1,117 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Function
|
3
|
-
# Copyright © 2023 NatML Inc. All Rights Reserved.
|
4
|
-
#
|
5
|
-
|
6
|
-
from __future__ import annotations
|
7
|
-
from base64 import b64encode
|
8
|
-
from dataclasses import dataclass
|
9
|
-
from filetype import guess_mime
|
10
|
-
from io import BytesIO
|
11
|
-
from numpy import ndarray
|
12
|
-
from pathlib import Path
|
13
|
-
from PIL import Image
|
14
|
-
from typing import Any, Dict, List, Optional, Union
|
15
|
-
|
16
|
-
from .dtype import Dtype
|
17
|
-
|
18
|
-
@dataclass(frozen=True)
|
19
|
-
class FeatureInput:
|
20
|
-
"""
|
21
|
-
Prediction input feature.
|
22
|
-
|
23
|
-
Members:
|
24
|
-
name (str): Feature name. This MUST match the input parameter name defined by the predictor.
|
25
|
-
data (str): Feature data URL. This can be a web URL or a data URL.
|
26
|
-
type (Dtype): Feature data type.
|
27
|
-
shape (list): Feature shape. This MUST be provided for array features.
|
28
|
-
"""
|
29
|
-
name: str
|
30
|
-
data: str = None
|
31
|
-
type: Dtype = None
|
32
|
-
shape: Optional[List[int]] = None
|
33
|
-
stringValue: str = None
|
34
|
-
floatValue: float = None
|
35
|
-
floatArray: List[float] = None
|
36
|
-
intValue: int = None
|
37
|
-
intArray: List[int] = None
|
38
|
-
boolValue: bool = None
|
39
|
-
listValue: list = None
|
40
|
-
dictValue: Dict[str, Any] = None
|
41
|
-
|
42
|
-
@classmethod
|
43
|
-
def from_value (
|
44
|
-
cls,
|
45
|
-
value: Union[ndarray, str, float, int, bool, List, Dict[str, any], Path, Image.Image],
|
46
|
-
name: str
|
47
|
-
) -> FeatureInput:
|
48
|
-
"""
|
49
|
-
Create a feature input from a given value.
|
50
|
-
|
51
|
-
Parameters:
|
52
|
-
value (any): Value.
|
53
|
-
name (str): Feature name.
|
54
|
-
|
55
|
-
Returns:
|
56
|
-
FeatureInput: Feature input.
|
57
|
-
"""
|
58
|
-
# Array
|
59
|
-
if isinstance(value, ndarray):
|
60
|
-
encoded_data = b64encode(value).decode("ascii")
|
61
|
-
data = f"data:application/octet-stream;base64,{encoded_data}"
|
62
|
-
return FeatureInput(name, data, value.dtype.name, list(value.shape))
|
63
|
-
# String
|
64
|
-
if isinstance(value, str):
|
65
|
-
return FeatureInput(name, stringValue=value)
|
66
|
-
# Float
|
67
|
-
if isinstance(value, float):
|
68
|
-
return FeatureInput(name, floatValue=value)
|
69
|
-
# Boolean
|
70
|
-
if isinstance(value, bool):
|
71
|
-
return FeatureInput(name, boolValue=value)
|
72
|
-
# Integer
|
73
|
-
if isinstance(value, int):
|
74
|
-
return FeatureInput(name, intValue=value)
|
75
|
-
# List
|
76
|
-
if isinstance(value, list):
|
77
|
-
return FeatureInput(name, listValue=value)
|
78
|
-
# Dict
|
79
|
-
if isinstance(value, dict):
|
80
|
-
return FeatureInput(name, dictValue=value)
|
81
|
-
# Image
|
82
|
-
if isinstance(value, Image.Image):
|
83
|
-
image_buffer = BytesIO()
|
84
|
-
channels = { "L": 1, "RGB": 3, "RGBA": 4 }[value.mode]
|
85
|
-
format = "PNG" if value.mode == "RGBA" else "JPEG"
|
86
|
-
value.save(image_buffer, format=format)
|
87
|
-
encoded_data = b64encode(image_buffer.getvalue()).decode("ascii")
|
88
|
-
data = f"data:{value.get_format_mimetype()};base64,{encoded_data}"
|
89
|
-
shape = [1, value.height, value.width, channels]
|
90
|
-
return FeatureInput(name, data, Dtype.image, shape)
|
91
|
-
# Path
|
92
|
-
if isinstance(value, Path):
|
93
|
-
assert value.is_file(), "Input path must be a file, not a directory"
|
94
|
-
value = value.expanduser().resolve()
|
95
|
-
type = _file_to_dtype(value)
|
96
|
-
mime = guess_mime(str(value)) or "application/octet-stream"
|
97
|
-
with open(value, "rb") as f:
|
98
|
-
buffer = BytesIO(f.read())
|
99
|
-
encoded_data = b64encode(buffer.getvalue()).decode("ascii")
|
100
|
-
data = f"data:{mime};base64,{encoded_data}"
|
101
|
-
return FeatureInput(name, data, type)
|
102
|
-
# Unsupported
|
103
|
-
raise RuntimeError(f"Cannot create input feature for value {value} of type {type(value)}")
|
104
|
-
|
105
|
-
def _file_to_dtype (path: Path) -> str:
|
106
|
-
mime = guess_mime(str(path))
|
107
|
-
if not mime:
|
108
|
-
return Dtype.binary
|
109
|
-
if mime.startswith("image"):
|
110
|
-
return Dtype.image
|
111
|
-
if mime.startswith("video"):
|
112
|
-
return Dtype.video
|
113
|
-
if mime.startswith("audio"):
|
114
|
-
return Dtype.audio
|
115
|
-
if path.suffix in [".obj", ".gltf", ".glb", ".fbx", ".usd", ".usdz", ".blend"]:
|
116
|
-
return Dtype._3d
|
117
|
-
return Dtype.binary
|
fxn-0.0.9/fxn/api/prediction.py
DELETED
@@ -1,186 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Function
|
3
|
-
# Copyright © 2023 NatML Inc. All Rights Reserved.
|
4
|
-
#
|
5
|
-
|
6
|
-
from __future__ import annotations
|
7
|
-
from dataclasses import asdict, dataclass
|
8
|
-
from io import BytesIO
|
9
|
-
from numpy import frombuffer
|
10
|
-
from PIL import Image
|
11
|
-
from platform import system
|
12
|
-
from requests import get
|
13
|
-
from typing import Any, Dict, List, Union
|
14
|
-
from urllib.request import urlopen
|
15
|
-
|
16
|
-
from .api import query
|
17
|
-
from .dtype import Dtype
|
18
|
-
from .feature import Feature
|
19
|
-
from .featureinput import FeatureInput
|
20
|
-
from .predictor import PredictorType
|
21
|
-
|
22
|
-
@dataclass(frozen=True)
|
23
|
-
class Prediction:
|
24
|
-
"""
|
25
|
-
Prediction.
|
26
|
-
|
27
|
-
Members:
|
28
|
-
id (str): Prediction ID.
|
29
|
-
tag (str): Predictor tag.
|
30
|
-
type (PredictorType): Prediction type.
|
31
|
-
created (str): Date created.
|
32
|
-
"""
|
33
|
-
id: str
|
34
|
-
tag: str
|
35
|
-
type: PredictorType
|
36
|
-
created: str
|
37
|
-
FIELDS = f"""
|
38
|
-
id
|
39
|
-
tag
|
40
|
-
type
|
41
|
-
created
|
42
|
-
... on CloudPrediction {{
|
43
|
-
results {{
|
44
|
-
data
|
45
|
-
type
|
46
|
-
shape
|
47
|
-
stringValue
|
48
|
-
listValue
|
49
|
-
dictValue
|
50
|
-
}}
|
51
|
-
latency
|
52
|
-
error
|
53
|
-
logs
|
54
|
-
}}
|
55
|
-
"""
|
56
|
-
RAW_FIELDS = f"""
|
57
|
-
id
|
58
|
-
tag
|
59
|
-
type
|
60
|
-
created
|
61
|
-
... on CloudPrediction {{
|
62
|
-
results {{
|
63
|
-
data
|
64
|
-
type
|
65
|
-
shape
|
66
|
-
}}
|
67
|
-
latency
|
68
|
-
error
|
69
|
-
logs
|
70
|
-
}}
|
71
|
-
"""
|
72
|
-
|
73
|
-
@classmethod
|
74
|
-
def create (
|
75
|
-
cls,
|
76
|
-
tag: str,
|
77
|
-
*features: List[FeatureInput],
|
78
|
-
data_url_limit: int=None,
|
79
|
-
raw_outputs: bool=False,
|
80
|
-
access_key: str=None,
|
81
|
-
**inputs: Dict[str, Any],
|
82
|
-
) -> Union[CloudPrediction, EdgePrediction]:
|
83
|
-
"""
|
84
|
-
Create a prediction.
|
85
|
-
|
86
|
-
Parameters:
|
87
|
-
tag (str): Predictor tag.
|
88
|
-
features (list): Input features. Only applies to `CLOUD` predictions.
|
89
|
-
data_url_limit (int): Return a data URL if a given output feature is smaller than this limit in bytes. Only applies to `CLOUD` predictions.
|
90
|
-
raw_outputs (bool): Skip parsing output features into Pythonic data types.
|
91
|
-
access_key (str): Function access key.
|
92
|
-
inputs (dict): Input features. Only applies to `CLOUD` predictions.
|
93
|
-
|
94
|
-
Returns:
|
95
|
-
CloudPrediction | EdgePrediction: Created prediction.
|
96
|
-
"""
|
97
|
-
# Collect input features
|
98
|
-
input_features = list(features) + [FeatureInput.from_value(value, name) for name, value in inputs.items()]
|
99
|
-
input_features = [asdict(feature) for feature in input_features]
|
100
|
-
# Query
|
101
|
-
response = query(f"""
|
102
|
-
mutation ($input: CreatePredictionInput!) {{
|
103
|
-
createPrediction (input: $input) {{
|
104
|
-
{cls.RAW_FIELDS if raw_outputs else cls.FIELDS}
|
105
|
-
}}
|
106
|
-
}}""",
|
107
|
-
{ "input": { "tag": tag, "client": _get_client(), "inputs": input_features, "dataUrlLimit": data_url_limit } },
|
108
|
-
access_key=access_key
|
109
|
-
)
|
110
|
-
# Check
|
111
|
-
prediction = response["createPrediction"]
|
112
|
-
if not prediction:
|
113
|
-
return None
|
114
|
-
# Parse results
|
115
|
-
if "results" in prediction and not raw_outputs:
|
116
|
-
prediction["results"] = [_parse_output_feature(feature) for feature in prediction["results"]] if prediction["results"] is not None else None
|
117
|
-
# Create
|
118
|
-
prediction = CloudPrediction(**prediction) if prediction["type"] == PredictorType.Cloud else EdgePrediction(**prediction)
|
119
|
-
# Return
|
120
|
-
return prediction
|
121
|
-
|
122
|
-
@dataclass(frozen=True)
|
123
|
-
class CloudPrediction (Prediction):
|
124
|
-
"""
|
125
|
-
Cloud prediction.
|
126
|
-
|
127
|
-
Members:
|
128
|
-
results (list): Prediction results.
|
129
|
-
latency (float): Prediction latency in milliseconds.
|
130
|
-
error (str): Prediction error. This is `null` if the prediction completed successfully.
|
131
|
-
logs (str): Prediction logs.
|
132
|
-
"""
|
133
|
-
results: List[Feature] = None
|
134
|
-
latency: float = None
|
135
|
-
error: str = None
|
136
|
-
logs: str = None
|
137
|
-
|
138
|
-
@dataclass(frozen=True)
|
139
|
-
class EdgePrediction (Prediction):
|
140
|
-
"""
|
141
|
-
Edge prediction
|
142
|
-
"""
|
143
|
-
|
144
|
-
def _parse_output_feature (feature: dict) -> Union[Feature, str, float, int, bool, Image.Image, list, dict]:
|
145
|
-
data, type, shape = feature["data"], feature["type"], feature["shape"]
|
146
|
-
# Handle image
|
147
|
-
if type == Dtype.image:
|
148
|
-
return Image.open(_download_feature_data(data))
|
149
|
-
# Handle non-numeric scalars
|
150
|
-
values = [feature.get(key, None) for key in ["stringValue", "listValue", "dictValue"]]
|
151
|
-
scalar = next((value for value in values if value is not None), None)
|
152
|
-
if scalar is not None:
|
153
|
-
return scalar
|
154
|
-
# Handle ndarray
|
155
|
-
ARRAY_TYPES = [
|
156
|
-
Dtype.int8, Dtype.int16, Dtype.int32, Dtype.int64,
|
157
|
-
Dtype.uint8, Dtype.uint16, Dtype.uint32, Dtype.uint64,
|
158
|
-
Dtype.float16, Dtype.float32, Dtype.float64, Dtype.bool
|
159
|
-
]
|
160
|
-
if type in ARRAY_TYPES:
|
161
|
-
# Create array
|
162
|
-
array = frombuffer(_download_feature_data(data).getbuffer(), dtype=type).reshape(shape)
|
163
|
-
return array if len(shape) > 0 else array.item()
|
164
|
-
# Handle generic feature
|
165
|
-
feature = Feature(**feature)
|
166
|
-
return feature
|
167
|
-
|
168
|
-
def _download_feature_data (url: str) -> BytesIO:
|
169
|
-
# Check if data URL
|
170
|
-
if url.startswith("data:"):
|
171
|
-
with urlopen(url) as response:
|
172
|
-
return BytesIO(response.read())
|
173
|
-
# Download
|
174
|
-
response = get(url)
|
175
|
-
result = BytesIO(response.content)
|
176
|
-
return result
|
177
|
-
|
178
|
-
def _get_client () -> str:
|
179
|
-
id = system()
|
180
|
-
if id == "Darwin":
|
181
|
-
return "macos"
|
182
|
-
if id == "Linux":
|
183
|
-
return "linux"
|
184
|
-
if id == "Windows":
|
185
|
-
return "windows"
|
186
|
-
raise RuntimeError(f"Function cannot make predictions on the {id} platform")
|
fxn-0.0.9/fxn/api/storage.py
DELETED
@@ -1,99 +0,0 @@
|
|
1
|
-
#
|
2
|
-
# Function
|
3
|
-
# Copyright © 2023 NatML Inc. All Rights Reserved.
|
4
|
-
#
|
5
|
-
|
6
|
-
from enum import Enum
|
7
|
-
from io import BytesIO
|
8
|
-
from mimetypes import guess_type
|
9
|
-
from pathlib import Path
|
10
|
-
from requests import put
|
11
|
-
from rich.progress import open as open_progress
|
12
|
-
from typing import Union
|
13
|
-
|
14
|
-
from .api import query
|
15
|
-
|
16
|
-
class UploadType (str, Enum):
|
17
|
-
"""
|
18
|
-
Upload URL type.
|
19
|
-
"""
|
20
|
-
Feature = "FEATURE"
|
21
|
-
Media = "MEDIA"
|
22
|
-
Notebook = "NOTEBOOK"
|
23
|
-
|
24
|
-
class Storage:
|
25
|
-
"""
|
26
|
-
Upload and download files.
|
27
|
-
"""
|
28
|
-
|
29
|
-
@classmethod
|
30
|
-
def create_upload_url (
|
31
|
-
cls,
|
32
|
-
name: str,
|
33
|
-
type: UploadType,
|
34
|
-
key: str=None
|
35
|
-
) -> str:
|
36
|
-
"""
|
37
|
-
Create an upload URL.
|
38
|
-
|
39
|
-
Parameters:
|
40
|
-
name (str): File name.
|
41
|
-
type (UploadType): Upload type.
|
42
|
-
key (str): File key. This is useful for grouping related files.
|
43
|
-
|
44
|
-
Returns:
|
45
|
-
str: File upload URL.
|
46
|
-
"""
|
47
|
-
# Query
|
48
|
-
response = query(f"""
|
49
|
-
mutation ($input: CreateUploadUrlInput!) {{
|
50
|
-
createUploadUrl (input: $input)
|
51
|
-
}}
|
52
|
-
""",
|
53
|
-
{ "input": { "type": type, "name": name, "key": key } }
|
54
|
-
)
|
55
|
-
url = response["createUploadUrl"]
|
56
|
-
# Return
|
57
|
-
return url
|
58
|
-
|
59
|
-
@classmethod
|
60
|
-
def upload ( # INCOMPLETE # `bytes` and `BytesIO` support` # Data URL limit
|
61
|
-
cls,
|
62
|
-
file: Union[str, Path, BytesIO, bytes],
|
63
|
-
type: UploadType,
|
64
|
-
name: str=None,
|
65
|
-
key: str=None,
|
66
|
-
data_url_limit: int=0,
|
67
|
-
verbose: bool=False
|
68
|
-
) -> str:
|
69
|
-
"""
|
70
|
-
Upload a file and return the URL.
|
71
|
-
|
72
|
-
Parameters:
|
73
|
-
file (str | Path | BytesIO | bytes): File path.
|
74
|
-
type (UploadType): File type.
|
75
|
-
name (str): File name. This MUST be provided if `file` is not a file path.
|
76
|
-
key (str): File key. This is useful for grouping related files.
|
77
|
-
data_url_limit (int): Return a data URL if the output feature is smaller than this limit (in bytes).
|
78
|
-
check_extension (bool): Validate file extensions before uploading.
|
79
|
-
verbose (bool): Print a progress bar for the upload.
|
80
|
-
|
81
|
-
Returns:
|
82
|
-
str: Upload URL.
|
83
|
-
"""
|
84
|
-
# Create path
|
85
|
-
file = Path(file) if isinstance(file, str) else file
|
86
|
-
# Check path
|
87
|
-
if not file.exists():
|
88
|
-
raise RuntimeError(f"Cannot upload {file.name} because the file does not exist")
|
89
|
-
# Check file
|
90
|
-
if not file.is_file():
|
91
|
-
raise RuntimeError(f"Cannot upload {file.name} becaause it does not point to a file")
|
92
|
-
# Get upload URL
|
93
|
-
name = name or file.name
|
94
|
-
mime = guess_type(file, strict=False)[0] or "application/octet-stream"
|
95
|
-
url = cls.create_upload_url(name, type, key=key)
|
96
|
-
with open_progress(file, "rb", description=name, disable=not verbose) as f:
|
97
|
-
put(url, data=f, headers={ "Content-Type": mime }).raise_for_status()
|
98
|
-
# Return
|
99
|
-
return url
|
{fxn-0.0.9 → fxn-0.0.11}/LICENSE
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|