fxn 0.0.41__py3-none-any.whl → 0.0.43__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fxn/__init__.py +4 -1
- fxn/beta/__init__.py +6 -0
- fxn/beta/client.py +16 -0
- fxn/beta/prediction.py +16 -0
- fxn/beta/remote.py +207 -0
- fxn/c/__init__.py +1 -1
- fxn/c/configuration.py +1 -1
- fxn/c/fxnc.py +1 -1
- fxn/c/map.py +1 -1
- fxn/c/prediction.py +2 -2
- fxn/c/predictor.py +2 -3
- fxn/c/stream.py +2 -3
- fxn/c/value.py +1 -1
- fxn/cli/__init__.py +10 -10
- fxn/cli/auth.py +1 -1
- fxn/cli/compile.py +141 -0
- fxn/cli/misc.py +1 -1
- fxn/cli/predictions.py +17 -14
- fxn/cli/predictors.py +31 -48
- fxn/client.py +85 -12
- fxn/compile.py +76 -0
- fxn/function.py +6 -2
- fxn/lib/__init__.py +1 -1
- fxn/lib/linux/arm64/libFunction.so +0 -0
- fxn/lib/linux/x86_64/libFunction.so +0 -0
- fxn/lib/macos/arm64/Function.dylib +0 -0
- fxn/lib/macos/x86_64/Function.dylib +0 -0
- fxn/lib/windows/arm64/Function.dll +0 -0
- fxn/lib/windows/x86_64/Function.dll +0 -0
- fxn/logging.py +137 -0
- fxn/sandbox.py +206 -0
- fxn/services/__init__.py +1 -1
- fxn/services/prediction.py +32 -32
- fxn/services/predictor.py +6 -3
- fxn/services/user.py +6 -3
- fxn/types/__init__.py +3 -3
- fxn/types/dtype.py +1 -1
- fxn/types/prediction.py +12 -2
- fxn/types/predictor.py +3 -14
- fxn/types/user.py +1 -1
- fxn/version.py +2 -2
- {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/METADATA +3 -3
- fxn-0.0.43.dist-info/RECORD +47 -0
- {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/WHEEL +1 -1
- fxn/cli/env.py +0 -40
- fxn-0.0.41.dist-info/RECORD +0 -40
- {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/LICENSE +0 -0
- {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/entry_points.txt +0 -0
- {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/top_level.txt +0 -0
fxn/__init__.py
CHANGED
@@ -1,8 +1,11 @@
|
|
1
1
|
#
|
2
2
|
# Function
|
3
|
-
# Copyright ©
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
|
+
from .client import FunctionAPIError
|
7
|
+
from .compile import compile
|
6
8
|
from .function import Function
|
9
|
+
from .sandbox import Sandbox
|
7
10
|
from .types import *
|
8
11
|
from .version import *
|
fxn/beta/__init__.py
ADDED
fxn/beta/client.py
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
#
|
2
|
+
# Function
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
|
+
#
|
5
|
+
|
6
|
+
from ..client import FunctionClient
|
7
|
+
from .prediction import PredictionService
|
8
|
+
|
9
|
+
class BetaClient:
|
10
|
+
"""
|
11
|
+
Client for incubating features.
|
12
|
+
"""
|
13
|
+
predictions: PredictionService
|
14
|
+
|
15
|
+
def __init__ (self, client: FunctionClient):
|
16
|
+
self.predictions = PredictionService(client)
|
fxn/beta/prediction.py
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
#
|
2
|
+
# Function
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
|
+
#
|
5
|
+
|
6
|
+
from ..client import FunctionClient
|
7
|
+
from .remote import RemotePredictionService
|
8
|
+
|
9
|
+
class PredictionService:
|
10
|
+
"""
|
11
|
+
Make predictions.
|
12
|
+
"""
|
13
|
+
remote: RemotePredictionService
|
14
|
+
|
15
|
+
def __init__ (self, client: FunctionClient):
|
16
|
+
self.remote = RemotePredictionService(client)
|
fxn/beta/remote.py
ADDED
@@ -0,0 +1,207 @@
|
|
1
|
+
#
|
2
|
+
# Function
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
|
+
#
|
5
|
+
|
6
|
+
from __future__ import annotations
|
7
|
+
from base64 import b64encode
|
8
|
+
from dataclasses import asdict, is_dataclass
|
9
|
+
from enum import Enum
|
10
|
+
from io import BytesIO
|
11
|
+
from json import dumps, loads
|
12
|
+
from numpy import array, frombuffer, ndarray
|
13
|
+
from PIL import Image
|
14
|
+
from pydantic import BaseModel, Field
|
15
|
+
from requests import get, put
|
16
|
+
from typing import Any
|
17
|
+
from urllib.request import urlopen
|
18
|
+
|
19
|
+
from ..c import Configuration
|
20
|
+
from ..client import FunctionClient
|
21
|
+
from ..services import Value
|
22
|
+
from ..types import Dtype, Prediction
|
23
|
+
|
24
|
+
class RemoteAcceleration (str, Enum):
|
25
|
+
"""
|
26
|
+
Remote acceleration.
|
27
|
+
"""
|
28
|
+
Auto = "auto"
|
29
|
+
CPU = "cpu"
|
30
|
+
A40 = "a40"
|
31
|
+
A100 = "a100"
|
32
|
+
|
33
|
+
class RemotePredictionService:
|
34
|
+
"""
|
35
|
+
Make remote predictions.
|
36
|
+
"""
|
37
|
+
|
38
|
+
def __init__ (self, client: FunctionClient):
|
39
|
+
self.client = client
|
40
|
+
|
41
|
+
def create (
|
42
|
+
self,
|
43
|
+
tag: str,
|
44
|
+
*,
|
45
|
+
inputs: dict[str, Value],
|
46
|
+
acceleration: RemoteAcceleration=RemoteAcceleration.Auto
|
47
|
+
) -> Prediction:
|
48
|
+
"""
|
49
|
+
Create a remote prediction.
|
50
|
+
|
51
|
+
Parameters:
|
52
|
+
tag (str): Predictor tag.
|
53
|
+
inputs (dict): Input values.
|
54
|
+
acceleration (RemoteAcceleration): Prediction acceleration.
|
55
|
+
|
56
|
+
Returns:
|
57
|
+
Prediction: Created prediction.
|
58
|
+
"""
|
59
|
+
input_map = { name: self.__to_value(value, name=name).model_dump(mode="json") for name, value in inputs.items() }
|
60
|
+
prediction = self.client.request(
|
61
|
+
method="POST",
|
62
|
+
path="/predictions/remote",
|
63
|
+
body={
|
64
|
+
"tag": tag,
|
65
|
+
"inputs": input_map,
|
66
|
+
"acceleration": acceleration,
|
67
|
+
"clientId": Configuration.get_client_id()
|
68
|
+
},
|
69
|
+
response_type=RemotePrediction
|
70
|
+
)
|
71
|
+
results = list(map(self.__to_object, prediction.results)) if prediction.results is not None else None
|
72
|
+
prediction = Prediction(**{ **prediction.model_dump(), "results": results })
|
73
|
+
return prediction
|
74
|
+
|
75
|
+
def __to_value (
|
76
|
+
self,
|
77
|
+
object: Value,
|
78
|
+
*,
|
79
|
+
name: str,
|
80
|
+
max_data_url_size: int=4 * 1024 * 1024
|
81
|
+
) -> RemoteValue:
|
82
|
+
object = self.__try_ensure_serializable(object)
|
83
|
+
if object is None:
|
84
|
+
return RemoteValue(data=None, type=Dtype.null)
|
85
|
+
elif isinstance(object, float):
|
86
|
+
object = array(object, dtype=Dtype.float32)
|
87
|
+
return self.__to_value(object, name=name, max_data_url_size=max_data_url_size)
|
88
|
+
elif isinstance(object, bool):
|
89
|
+
object = array(object, dtype=Dtype.bool)
|
90
|
+
return self.__to_value(object, name=name, max_data_url_size=max_data_url_size)
|
91
|
+
elif isinstance(object, int):
|
92
|
+
object = array(object, dtype=Dtype.int32)
|
93
|
+
return self.__to_value(object, name=name, max_data_url_size=max_data_url_size)
|
94
|
+
elif isinstance(object, ndarray):
|
95
|
+
buffer = BytesIO(object.tobytes())
|
96
|
+
data = self.__upload(buffer, name=name, max_data_url_size=max_data_url_size)
|
97
|
+
return RemoteValue(data=data, type=object.dtype.name, shape=list(object.shape))
|
98
|
+
elif isinstance(object, str):
|
99
|
+
buffer = BytesIO(object.encode())
|
100
|
+
data = self.__upload(buffer, name=name, mime="text/plain", max_data_url_size=max_data_url_size)
|
101
|
+
return RemoteValue(data=data, type=Dtype.string)
|
102
|
+
elif isinstance(object, list):
|
103
|
+
buffer = BytesIO(dumps(object).encode())
|
104
|
+
data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size)
|
105
|
+
return RemoteValue(data=data, type=Dtype.list)
|
106
|
+
elif isinstance(object, dict):
|
107
|
+
buffer = BytesIO(dumps(object).encode())
|
108
|
+
data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size)
|
109
|
+
return RemoteValue(data=data, type=Dtype.dict)
|
110
|
+
elif isinstance(object, Image.Image):
|
111
|
+
buffer = BytesIO()
|
112
|
+
format = "PNG" if object.mode == "RGBA" else "JPEG"
|
113
|
+
mime = f"image/{format.lower()}"
|
114
|
+
object.save(buffer, format=format)
|
115
|
+
data = self.__upload(buffer, name=name, mime=mime, max_data_url_size=max_data_url_size)
|
116
|
+
return RemoteValue(data=data, type=Dtype.image)
|
117
|
+
elif isinstance(object, BytesIO):
|
118
|
+
data = self.__upload(object, name=name, max_data_url_size=max_data_url_size)
|
119
|
+
return RemoteValue(data=data, type=Dtype.binary)
|
120
|
+
else:
|
121
|
+
raise ValueError(f"Failed to serialize value '{object}' of type `{type(object)}` because it is not supported")
|
122
|
+
|
123
|
+
def __to_object (self, value: RemoteValue) -> Value:
|
124
|
+
if value.type == Dtype.null:
|
125
|
+
return None
|
126
|
+
buffer = self.__download(value.data)
|
127
|
+
if value.type in [
|
128
|
+
Dtype.int8, Dtype.int16, Dtype.int32, Dtype.int64,
|
129
|
+
Dtype.uint8, Dtype.uint16, Dtype.uint32, Dtype.uint64,
|
130
|
+
Dtype.float16, Dtype.float32, Dtype.float64, Dtype.bool
|
131
|
+
]:
|
132
|
+
assert value.shape is not None, "Array value must have a shape specified"
|
133
|
+
array = frombuffer(buffer.getbuffer(), dtype=value.type).reshape(value.shape)
|
134
|
+
return array if len(value.shape) > 0 else array.item()
|
135
|
+
elif value.type == Dtype.string:
|
136
|
+
return buffer.getvalue().decode("utf-8")
|
137
|
+
elif value.type in [Dtype.list, Dtype.dict]:
|
138
|
+
return loads(buffer.getvalue().decode("utf-8"))
|
139
|
+
elif value.type == Dtype.image:
|
140
|
+
return Image.open(buffer)
|
141
|
+
elif value.type == Dtype.binary:
|
142
|
+
return buffer
|
143
|
+
else:
|
144
|
+
raise ValueError(f"Failed to deserialize value with type `{value.type}` because it is not supported")
|
145
|
+
|
146
|
+
def __upload (
|
147
|
+
self,
|
148
|
+
data: BytesIO,
|
149
|
+
*,
|
150
|
+
name: str,
|
151
|
+
mime: str="application/octet-stream",
|
152
|
+
max_data_url_size: int=4 * 1024 * 1024
|
153
|
+
) -> str:
|
154
|
+
if data.getbuffer().nbytes <= max_data_url_size:
|
155
|
+
encoded_data = b64encode(data.getvalue()).decode("ascii")
|
156
|
+
return f"data:{mime};base64,{encoded_data}"
|
157
|
+
value = self.client.request(
|
158
|
+
method="POST",
|
159
|
+
path="/values",
|
160
|
+
body={ "name": name },
|
161
|
+
response_type=CreateValueResponse
|
162
|
+
)
|
163
|
+
put(
|
164
|
+
value.upload_url,
|
165
|
+
data=data,
|
166
|
+
headers={ "Content-Type": mime }
|
167
|
+
).raise_for_status()
|
168
|
+
return value.download_url
|
169
|
+
|
170
|
+
def __download (self, url: str) -> BytesIO:
|
171
|
+
if url.startswith("data:"):
|
172
|
+
with urlopen(url) as response:
|
173
|
+
return BytesIO(response.read())
|
174
|
+
response = get(url)
|
175
|
+
response.raise_for_status()
|
176
|
+
result = BytesIO(response.content)
|
177
|
+
return result
|
178
|
+
|
179
|
+
@classmethod
|
180
|
+
def __try_ensure_serializable (cls, object: Any) -> Any:
|
181
|
+
if object is None:
|
182
|
+
return object
|
183
|
+
if isinstance(object, list):
|
184
|
+
return [cls.__try_ensure_serializable(x) for x in object]
|
185
|
+
if is_dataclass(object) and not isinstance(object, type):
|
186
|
+
return asdict(object)
|
187
|
+
if isinstance(object, BaseModel):
|
188
|
+
return object.model_dump(mode="json", by_alias=True)
|
189
|
+
return object
|
190
|
+
|
191
|
+
class RemoteValue (BaseModel):
|
192
|
+
data: str | None
|
193
|
+
type: Dtype
|
194
|
+
shape: list[int] | None = None
|
195
|
+
|
196
|
+
class RemotePrediction (BaseModel):
|
197
|
+
id: str
|
198
|
+
tag: str
|
199
|
+
created: str
|
200
|
+
results: list[RemoteValue] | None
|
201
|
+
latency: float | None
|
202
|
+
error: str | None
|
203
|
+
logs: str | None
|
204
|
+
|
205
|
+
class CreateValueResponse (BaseModel):
|
206
|
+
upload_url: str = Field(validation_alias="uploadUrl")
|
207
|
+
download_url: str = Field(validation_alias="downloadUrl")
|
fxn/c/__init__.py
CHANGED
fxn/c/configuration.py
CHANGED
fxn/c/fxnc.py
CHANGED
fxn/c/map.py
CHANGED
fxn/c/prediction.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
#
|
2
2
|
# Function
|
3
|
-
# Copyright ©
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
|
-
from ctypes import byref, c_double,
|
6
|
+
from ctypes import byref, c_double, c_int32, c_void_p, create_string_buffer
|
7
7
|
from pathlib import Path
|
8
8
|
from typing import final
|
9
9
|
|
fxn/c/predictor.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1
1
|
#
|
2
2
|
# Function
|
3
|
-
# Copyright ©
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
|
-
from ctypes import byref,
|
7
|
-
from pathlib import Path
|
6
|
+
from ctypes import byref, c_void_p
|
8
7
|
from typing import final
|
9
8
|
|
10
9
|
from .configuration import Configuration
|
fxn/c/stream.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1
1
|
#
|
2
2
|
# Function
|
3
|
-
# Copyright ©
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
|
-
from ctypes import byref,
|
7
|
-
from pathlib import Path
|
6
|
+
from ctypes import byref, c_void_p
|
8
7
|
from typing import final
|
9
8
|
|
10
9
|
from .fxnc import get_fxnc, status_to_error, FXNStatus
|
fxn/c/value.py
CHANGED
fxn/cli/__init__.py
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
#
|
2
2
|
# Function
|
3
|
-
# Copyright ©
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
6
|
from typer import Typer
|
7
7
|
|
8
8
|
from .auth import app as auth_app
|
9
|
-
from .
|
9
|
+
from .compile import compile_predictor
|
10
10
|
from .misc import cli_options
|
11
11
|
from .predictions import create_prediction
|
12
|
-
from .predictors import archive_predictor, delete_predictor,
|
12
|
+
from .predictors import archive_predictor, delete_predictor, retrieve_predictor
|
13
13
|
from ..version import __version__
|
14
14
|
|
15
15
|
# Define CLI
|
@@ -26,20 +26,20 @@ app.callback()(cli_options)
|
|
26
26
|
|
27
27
|
# Add subcommands
|
28
28
|
app.add_typer(auth_app, name="auth", help="Login, logout, and check your authentication status.")
|
29
|
-
#app.add_typer(env_app, name="env", help="Manage predictor environment variables.")
|
30
29
|
|
31
30
|
# Add top-level commands
|
32
|
-
#app.command(name="create", help="Create a predictor.")(create_predictor)
|
33
|
-
#app.command(name="delete", help="Delete a predictor.")(delete_predictor)
|
34
31
|
app.command(
|
35
32
|
name="predict",
|
36
33
|
help="Make a prediction.",
|
37
34
|
context_settings={ "allow_extra_args": True, "ignore_unknown_options": True }
|
38
35
|
)(create_prediction)
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
36
|
+
app.command(
|
37
|
+
name="compile",
|
38
|
+
help="Create a predictor by compiling a Python function."
|
39
|
+
)(compile_predictor)
|
40
|
+
app.command(name="retrieve", help="Retrieve a predictor.")(retrieve_predictor)
|
41
|
+
app.command(name="archive", help="Archive a predictor.")(archive_predictor)
|
42
|
+
app.command(name="delete", help="Delete a predictor.")(delete_predictor)
|
43
43
|
|
44
44
|
# Run
|
45
45
|
if __name__ == "__main__":
|
fxn/cli/auth.py
CHANGED
fxn/cli/compile.py
ADDED
@@ -0,0 +1,141 @@
|
|
1
|
+
#
|
2
|
+
# Function
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
|
+
#
|
5
|
+
|
6
|
+
from asyncio import run as run_async
|
7
|
+
from importlib.util import module_from_spec, spec_from_file_location
|
8
|
+
from inspect import getmembers, getmodulename, isfunction
|
9
|
+
from pathlib import Path
|
10
|
+
from pydantic import BaseModel
|
11
|
+
from re import sub
|
12
|
+
from rich import print as print_rich
|
13
|
+
from rich.progress import SpinnerColumn, TextColumn
|
14
|
+
import sys
|
15
|
+
from typer import Argument, Option
|
16
|
+
from typing import Callable, Literal
|
17
|
+
from urllib.parse import urlparse, urlunparse
|
18
|
+
|
19
|
+
from ..compile import PredictorSpec
|
20
|
+
from ..function import Function
|
21
|
+
from ..sandbox import EntrypointCommand
|
22
|
+
from ..logging import CustomProgress, CustomProgressTask
|
23
|
+
from .auth import get_access_key
|
24
|
+
|
25
|
+
def compile_predictor (
|
26
|
+
path: str=Argument(..., help="Predictor path.")
|
27
|
+
):
|
28
|
+
run_async(_compile_predictor_async(path))
|
29
|
+
|
30
|
+
async def _compile_predictor_async (path: str):
|
31
|
+
fxn = Function(get_access_key())
|
32
|
+
path: Path = Path(path).resolve()
|
33
|
+
with CustomProgress(
|
34
|
+
SpinnerColumn(spinner_name="dots", finished_text="[bold green]✔[/bold green]"),
|
35
|
+
TextColumn("[progress.description]{task.description}"),
|
36
|
+
):
|
37
|
+
# Load
|
38
|
+
with CustomProgressTask(loading_text="Loading predictor...") as task:
|
39
|
+
func = _load_predictor_func(path)
|
40
|
+
entrypoint = EntrypointCommand(from_path=str(path), to_path="./", name=func.__name__)
|
41
|
+
spec: PredictorSpec = func.__predictor_spec
|
42
|
+
task.finish(f"Loaded prediction function: [bold cyan]{spec.tag}[/bold cyan]")
|
43
|
+
# Populate
|
44
|
+
sandbox = spec.sandbox
|
45
|
+
sandbox.commands.append(entrypoint)
|
46
|
+
with CustomProgressTask(loading_text="Uploading sandbox...", done_text="Uploaded sandbox"):
|
47
|
+
sandbox.populate(fxn=fxn)
|
48
|
+
# Compile
|
49
|
+
with CustomProgressTask(loading_text="Running codegen...", done_text="Completed codegen"):
|
50
|
+
with CustomProgressTask(loading_text="Creating predictor..."):
|
51
|
+
predictor = fxn.client.request(
|
52
|
+
method="POST",
|
53
|
+
path="/predictors",
|
54
|
+
body=spec.model_dump(mode="json"),
|
55
|
+
response_type=_Predictor
|
56
|
+
)
|
57
|
+
with ProgressLogQueue() as task_queue:
|
58
|
+
async for event in fxn.client.stream(
|
59
|
+
method="POST",
|
60
|
+
path=f"/predictors/{predictor.tag}/compile",
|
61
|
+
body={ },
|
62
|
+
response_type=_LogEvent | _ErrorEvent
|
63
|
+
):
|
64
|
+
if isinstance(event, _LogEvent):
|
65
|
+
task_queue.push_log(event)
|
66
|
+
elif isinstance(event, _ErrorEvent):
|
67
|
+
task_queue.push_error(event)
|
68
|
+
raise RuntimeError(event.data.error)
|
69
|
+
predictor_url = _compute_predictor_url(fxn.client.api_url, spec.tag)
|
70
|
+
print_rich(f"\n[bold spring_green3]🎉 Predictor is now being compiled.[/bold spring_green3] Check it out at {predictor_url}")
|
71
|
+
|
72
|
+
def _load_predictor_func (path: str) -> Callable[...,object]:
|
73
|
+
if "" not in sys.path:
|
74
|
+
sys.path.insert(0, "")
|
75
|
+
path: Path = Path(path).resolve()
|
76
|
+
sys.path.insert(0, str(path.parent))
|
77
|
+
name = getmodulename(path)
|
78
|
+
spec = spec_from_file_location(name, path)
|
79
|
+
module = module_from_spec(spec)
|
80
|
+
sys.modules[name] = module
|
81
|
+
spec.loader.exec_module(module)
|
82
|
+
main_func = next(func for _, func in getmembers(module, isfunction) if hasattr(func, "__predictor_spec"))
|
83
|
+
return main_func
|
84
|
+
|
85
|
+
def _compute_predictor_url (api_url: str, tag: str) -> str:
|
86
|
+
parsed_url = urlparse(api_url)
|
87
|
+
hostname_parts = parsed_url.hostname.split(".")
|
88
|
+
if hostname_parts[0] == "api":
|
89
|
+
hostname_parts.pop(0)
|
90
|
+
hostname = ".".join(hostname_parts)
|
91
|
+
netloc = hostname if not parsed_url.port else f"{hostname}:{parsed_url.port}"
|
92
|
+
predictor_url = urlunparse(parsed_url._replace(netloc=netloc, path=f"{tag}"))
|
93
|
+
return predictor_url
|
94
|
+
|
95
|
+
class _Predictor (BaseModel):
|
96
|
+
tag: str
|
97
|
+
|
98
|
+
class _LogData (BaseModel):
|
99
|
+
message: str
|
100
|
+
level: int = 0
|
101
|
+
|
102
|
+
class _LogEvent (BaseModel):
|
103
|
+
event: Literal["log"]
|
104
|
+
data: _LogData
|
105
|
+
|
106
|
+
class _ErrorData (BaseModel):
|
107
|
+
error: str
|
108
|
+
|
109
|
+
class _ErrorEvent (BaseModel):
|
110
|
+
event: Literal["error"]
|
111
|
+
data: _ErrorData
|
112
|
+
|
113
|
+
class ProgressLogQueue:
|
114
|
+
|
115
|
+
def __init__ (self):
|
116
|
+
self.queue: list[tuple[int, CustomProgressTask]] = []
|
117
|
+
|
118
|
+
def push_log (self, event: _LogEvent):
|
119
|
+
while self.queue:
|
120
|
+
current_level, current_task = self.queue[-1]
|
121
|
+
if event.data.level > current_level:
|
122
|
+
break
|
123
|
+
current_task.__exit__(None, None, None)
|
124
|
+
self.queue.pop()
|
125
|
+
message = sub(r"`([^`]+)`", r"[hot_pink italic]\1[/hot_pink italic]", event.data.message)
|
126
|
+
task = CustomProgressTask(loading_text=message)
|
127
|
+
task.__enter__()
|
128
|
+
self.queue.append((event.data.level, task))
|
129
|
+
|
130
|
+
def push_error (self, error: _ErrorEvent):
|
131
|
+
while self.queue:
|
132
|
+
_, current_task = self.queue.pop()
|
133
|
+
current_task.__exit__(RuntimeError, None, None)
|
134
|
+
|
135
|
+
def __enter__ (self):
|
136
|
+
return self
|
137
|
+
|
138
|
+
def __exit__ (self, exc_type, exc_value, traceback):
|
139
|
+
while self.queue:
|
140
|
+
_, current_task = self.queue.pop()
|
141
|
+
current_task.__exit__(None, None, None)
|
fxn/cli/misc.py
CHANGED
fxn/cli/predictions.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
#
|
2
2
|
# Function
|
3
|
-
# Copyright ©
|
3
|
+
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
6
|
from asyncio import run as run_async
|
@@ -9,11 +9,11 @@ from numpy import array_repr, ndarray
|
|
9
9
|
from pathlib import Path, PurePath
|
10
10
|
from PIL import Image
|
11
11
|
from rich import print_json
|
12
|
-
from rich.progress import Progress, SpinnerColumn, TextColumn
|
13
12
|
from tempfile import mkstemp
|
14
13
|
from typer import Argument, Context, Option
|
15
14
|
|
16
15
|
from ..function import Function
|
16
|
+
from ..logging import CustomProgress, CustomProgressTask
|
17
17
|
from ..types import Prediction
|
18
18
|
from .auth import get_access_key
|
19
19
|
|
@@ -26,18 +26,21 @@ def create_prediction (
|
|
26
26
|
|
27
27
|
async def _predict_async (tag: str, quiet: bool, context: Context):
|
28
28
|
# Preload
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
29
|
+
with CustomProgress(transient=True, disable=quiet):
|
30
|
+
fxn = Function(get_access_key())
|
31
|
+
with CustomProgressTask(
|
32
|
+
loading_text="Preloading predictor...",
|
33
|
+
done_text="Preloaded predictor"
|
34
|
+
):
|
35
|
+
fxn.predictions.create(tag, inputs={ })
|
36
|
+
with CustomProgressTask(loading_text="Making prediction..."):
|
37
|
+
inputs = { }
|
38
|
+
for i in range(0, len(context.args), 2):
|
39
|
+
name = context.args[i].replace("-", "")
|
40
|
+
value = _parse_value(context.args[i+1])
|
41
|
+
inputs[name] = value
|
42
|
+
prediction = fxn.predictions.create(tag, inputs=inputs)
|
43
|
+
_log_prediction(prediction)
|
41
44
|
|
42
45
|
def _parse_value (value: str):
|
43
46
|
"""
|