fxn 0.0.43__py3-none-any.whl → 0.0.44__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fxn/__init__.py +1 -1
- fxn/beta/client.py +59 -2
- fxn/beta/remote.py +40 -47
- fxn/c/configuration.py +62 -30
- fxn/c/value.py +3 -2
- fxn/cli/__init__.py +4 -2
- fxn/cli/compile.py +16 -10
- fxn/compile.py +19 -9
- fxn/function.py +8 -3
- fxn/logging.py +119 -37
- fxn/services/prediction.py +20 -8
- fxn/types/dtype.py +3 -3
- fxn/types/prediction.py +5 -13
- fxn/version.py +1 -1
- {fxn-0.0.43.dist-info → fxn-0.0.44.dist-info}/METADATA +3 -2
- {fxn-0.0.43.dist-info → fxn-0.0.44.dist-info}/RECORD +20 -20
- {fxn-0.0.43.dist-info → fxn-0.0.44.dist-info}/WHEEL +1 -1
- {fxn-0.0.43.dist-info → fxn-0.0.44.dist-info}/entry_points.txt +0 -0
- {fxn-0.0.43.dist-info → fxn-0.0.44.dist-info/licenses}/LICENSE +0 -0
- {fxn-0.0.43.dist-info → fxn-0.0.44.dist-info}/top_level.txt +0 -0
fxn/__init__.py
CHANGED
fxn/beta/client.py
CHANGED
@@ -3,8 +3,17 @@
|
|
3
3
|
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
|
+
from functools import wraps
|
7
|
+
from inspect import signature as get_signature, Signature
|
8
|
+
from typing import get_origin, Callable, Generator, Iterator, TypeVar
|
9
|
+
|
6
10
|
from ..client import FunctionClient
|
11
|
+
from ..services import PredictionService as EdgePredictionService
|
12
|
+
from ..types import Acceleration
|
7
13
|
from .prediction import PredictionService
|
14
|
+
from .remote import RemoteAcceleration
|
15
|
+
|
16
|
+
F = TypeVar("F", bound=Callable[..., object])
|
8
17
|
|
9
18
|
class BetaClient:
|
10
19
|
"""
|
@@ -12,5 +21,53 @@ class BetaClient:
|
|
12
21
|
"""
|
13
22
|
predictions: PredictionService
|
14
23
|
|
15
|
-
def __init__ (
|
16
|
-
self
|
24
|
+
def __init__ (
|
25
|
+
self,
|
26
|
+
client: FunctionClient,
|
27
|
+
*,
|
28
|
+
predictions: EdgePredictionService
|
29
|
+
):
|
30
|
+
self.predictions = PredictionService(client)
|
31
|
+
self.__edge_predictions = predictions
|
32
|
+
|
33
|
+
def predict ( # INCOMPLETE # Preload
|
34
|
+
self,
|
35
|
+
tag: str,
|
36
|
+
*,
|
37
|
+
remote: bool=False,
|
38
|
+
acceleration: Acceleration | RemoteAcceleration="auto",
|
39
|
+
preload: bool=True
|
40
|
+
) -> Callable[[F], F]:
|
41
|
+
"""
|
42
|
+
Create a prediction and return results when the decorated function is invoked.
|
43
|
+
|
44
|
+
Parameters:
|
45
|
+
tag (str): Predictor tag.
|
46
|
+
remote (bool): Whether to create the prediction remotely.
|
47
|
+
acceleration (Acceleration | RemoteAcceleration): Prediction acceleration.
|
48
|
+
preload (bool): Whether to preload the predictor on the first run.
|
49
|
+
"""
|
50
|
+
def decorator(func: F) -> F:
|
51
|
+
signature = get_signature(func)
|
52
|
+
@wraps(func)
|
53
|
+
def wrapper(*args, **kwargs):
|
54
|
+
bound_args = signature.bind(*args, **kwargs)
|
55
|
+
bound_args.apply_defaults()
|
56
|
+
stream = (
|
57
|
+
signature.return_annotation is not Signature.empty and
|
58
|
+
get_origin(signature.return_annotation) in [Iterator, Generator]
|
59
|
+
)
|
60
|
+
create_func = self.predictions.remote.create if remote else self.__edge_predictions.create
|
61
|
+
def _predict (): # INCOMPLETE
|
62
|
+
prediction = create_func(
|
63
|
+
tag=tag,
|
64
|
+
inputs=bound_args.arguments,
|
65
|
+
acceleration=acceleration
|
66
|
+
)
|
67
|
+
if prediction.error:
|
68
|
+
raise RuntimeError(prediction.error)
|
69
|
+
return tuple(prediction.results) if len(prediction.results) > 1 else prediction.results[0]
|
70
|
+
result = _predict()
|
71
|
+
return result
|
72
|
+
return wrapper
|
73
|
+
return decorator
|
fxn/beta/remote.py
CHANGED
@@ -13,7 +13,7 @@ from numpy import array, frombuffer, ndarray
|
|
13
13
|
from PIL import Image
|
14
14
|
from pydantic import BaseModel, Field
|
15
15
|
from requests import get, put
|
16
|
-
from typing import
|
16
|
+
from typing import Literal
|
17
17
|
from urllib.request import urlopen
|
18
18
|
|
19
19
|
from ..c import Configuration
|
@@ -21,14 +21,7 @@ from ..client import FunctionClient
|
|
21
21
|
from ..services import Value
|
22
22
|
from ..types import Dtype, Prediction
|
23
23
|
|
24
|
-
|
25
|
-
"""
|
26
|
-
Remote acceleration.
|
27
|
-
"""
|
28
|
-
Auto = "auto"
|
29
|
-
CPU = "cpu"
|
30
|
-
A40 = "a40"
|
31
|
-
A100 = "a100"
|
24
|
+
RemoteAcceleration = Literal["auto", "cpu", "a40", "a100"]
|
32
25
|
|
33
26
|
class RemotePredictionService:
|
34
27
|
"""
|
@@ -43,7 +36,7 @@ class RemotePredictionService:
|
|
43
36
|
tag: str,
|
44
37
|
*,
|
45
38
|
inputs: dict[str, Value],
|
46
|
-
acceleration: RemoteAcceleration=
|
39
|
+
acceleration: RemoteAcceleration="auto"
|
47
40
|
) -> Prediction:
|
48
41
|
"""
|
49
42
|
Create a remote prediction.
|
@@ -74,51 +67,51 @@ class RemotePredictionService:
|
|
74
67
|
|
75
68
|
def __to_value (
|
76
69
|
self,
|
77
|
-
|
70
|
+
obj: Value,
|
78
71
|
*,
|
79
72
|
name: str,
|
80
73
|
max_data_url_size: int=4 * 1024 * 1024
|
81
74
|
) -> RemoteValue:
|
82
|
-
|
83
|
-
if
|
75
|
+
obj = self.__try_ensure_serializable(obj)
|
76
|
+
if obj is None:
|
84
77
|
return RemoteValue(data=None, type=Dtype.null)
|
85
|
-
elif isinstance(
|
86
|
-
|
87
|
-
return self.__to_value(
|
88
|
-
elif isinstance(
|
89
|
-
|
90
|
-
return self.__to_value(
|
91
|
-
elif isinstance(
|
92
|
-
|
93
|
-
return self.__to_value(
|
94
|
-
elif isinstance(
|
95
|
-
buffer = BytesIO(
|
78
|
+
elif isinstance(obj, float):
|
79
|
+
obj = array(obj, dtype=Dtype.float32)
|
80
|
+
return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size)
|
81
|
+
elif isinstance(obj, bool):
|
82
|
+
obj = array(obj, dtype=Dtype.bool)
|
83
|
+
return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size)
|
84
|
+
elif isinstance(obj, int):
|
85
|
+
obj = array(obj, dtype=Dtype.int32)
|
86
|
+
return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size)
|
87
|
+
elif isinstance(obj, ndarray):
|
88
|
+
buffer = BytesIO(obj.tobytes())
|
96
89
|
data = self.__upload(buffer, name=name, max_data_url_size=max_data_url_size)
|
97
|
-
return RemoteValue(data=data, type=
|
98
|
-
elif isinstance(
|
99
|
-
buffer = BytesIO(
|
90
|
+
return RemoteValue(data=data, type=obj.dtype.name, shape=list(obj.shape))
|
91
|
+
elif isinstance(obj, str):
|
92
|
+
buffer = BytesIO(obj.encode())
|
100
93
|
data = self.__upload(buffer, name=name, mime="text/plain", max_data_url_size=max_data_url_size)
|
101
94
|
return RemoteValue(data=data, type=Dtype.string)
|
102
|
-
elif isinstance(
|
103
|
-
buffer = BytesIO(dumps(
|
95
|
+
elif isinstance(obj, list):
|
96
|
+
buffer = BytesIO(dumps(obj).encode())
|
104
97
|
data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size)
|
105
98
|
return RemoteValue(data=data, type=Dtype.list)
|
106
|
-
elif isinstance(
|
107
|
-
buffer = BytesIO(dumps(
|
99
|
+
elif isinstance(obj, dict):
|
100
|
+
buffer = BytesIO(dumps(obj).encode())
|
108
101
|
data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size)
|
109
102
|
return RemoteValue(data=data, type=Dtype.dict)
|
110
|
-
elif isinstance(
|
103
|
+
elif isinstance(obj, Image.Image):
|
111
104
|
buffer = BytesIO()
|
112
|
-
format = "PNG" if
|
105
|
+
format = "PNG" if obj.mode == "RGBA" else "JPEG"
|
113
106
|
mime = f"image/{format.lower()}"
|
114
|
-
|
107
|
+
obj.save(buffer, format=format)
|
115
108
|
data = self.__upload(buffer, name=name, mime=mime, max_data_url_size=max_data_url_size)
|
116
109
|
return RemoteValue(data=data, type=Dtype.image)
|
117
|
-
elif isinstance(
|
118
|
-
data = self.__upload(
|
110
|
+
elif isinstance(obj, BytesIO):
|
111
|
+
data = self.__upload(obj, name=name, max_data_url_size=max_data_url_size)
|
119
112
|
return RemoteValue(data=data, type=Dtype.binary)
|
120
113
|
else:
|
121
|
-
raise ValueError(f"Failed to serialize value '{
|
114
|
+
raise ValueError(f"Failed to serialize value '{obj}' of type `{type(obj)}` because it is not supported")
|
122
115
|
|
123
116
|
def __to_object (self, value: RemoteValue) -> Value:
|
124
117
|
if value.type == Dtype.null:
|
@@ -177,16 +170,16 @@ class RemotePredictionService:
|
|
177
170
|
return result
|
178
171
|
|
179
172
|
@classmethod
|
180
|
-
def __try_ensure_serializable (cls,
|
181
|
-
if
|
182
|
-
return
|
183
|
-
if isinstance(
|
184
|
-
return [cls.__try_ensure_serializable(x) for x in
|
185
|
-
if is_dataclass(
|
186
|
-
return asdict(
|
187
|
-
if isinstance(
|
188
|
-
return
|
189
|
-
return
|
173
|
+
def __try_ensure_serializable (cls, obj: object) -> object:
|
174
|
+
if obj is None:
|
175
|
+
return obj
|
176
|
+
if isinstance(obj, list):
|
177
|
+
return [cls.__try_ensure_serializable(x) for x in obj]
|
178
|
+
if is_dataclass(obj) and not isinstance(obj, type):
|
179
|
+
return asdict(obj)
|
180
|
+
if isinstance(obj, BaseModel):
|
181
|
+
return obj.model_dump(mode="json", by_alias=True)
|
182
|
+
return obj
|
190
183
|
|
191
184
|
class RemoteValue (BaseModel):
|
192
185
|
data: str | None
|
fxn/c/configuration.py
CHANGED
@@ -16,15 +16,18 @@ class Configuration:
|
|
16
16
|
def __init__ (self):
|
17
17
|
configuration = c_void_p()
|
18
18
|
status = get_fxnc().FXNConfigurationCreate(byref(configuration))
|
19
|
-
if status
|
20
|
-
self.__configuration = configuration
|
21
|
-
else:
|
19
|
+
if status != FXNStatus.OK:
|
22
20
|
raise RuntimeError(f"Failed to create configuration with error: {status_to_error(status)}")
|
21
|
+
self.__configuration = configuration
|
23
22
|
|
24
23
|
@property
|
25
24
|
def tag (self) -> str:
|
26
25
|
buffer = create_string_buffer(2048)
|
27
|
-
status = get_fxnc().FXNConfigurationGetTag(
|
26
|
+
status = get_fxnc().FXNConfigurationGetTag(
|
27
|
+
self.__configuration,
|
28
|
+
buffer,
|
29
|
+
len(buffer)
|
30
|
+
)
|
28
31
|
if status != FXNStatus.OK:
|
29
32
|
raise RuntimeError(f"Failed to get configuration tag with error: {status_to_error(status)}")
|
30
33
|
tag = buffer.value.decode("utf-8")
|
@@ -40,7 +43,11 @@ class Configuration:
|
|
40
43
|
@property
|
41
44
|
def token (self) -> str:
|
42
45
|
buffer = create_string_buffer(2048)
|
43
|
-
status = get_fxnc().FXNConfigurationGetToken(
|
46
|
+
status = get_fxnc().FXNConfigurationGetToken(
|
47
|
+
self.__configuration,
|
48
|
+
buffer,
|
49
|
+
len(buffer)
|
50
|
+
)
|
44
51
|
if status != FXNStatus.OK:
|
45
52
|
raise RuntimeError(f"Failed to get configuration token with error: {status_to_error(status)}")
|
46
53
|
token = buffer.value.decode("utf-8")
|
@@ -56,26 +63,33 @@ class Configuration:
|
|
56
63
|
@property
|
57
64
|
def acceleration (self) -> Acceleration:
|
58
65
|
acceleration = c_int()
|
59
|
-
status = get_fxnc().FXNConfigurationGetAcceleration(
|
60
|
-
|
61
|
-
|
62
|
-
|
66
|
+
status = get_fxnc().FXNConfigurationGetAcceleration(
|
67
|
+
self.__configuration,
|
68
|
+
byref(acceleration)
|
69
|
+
)
|
70
|
+
if status != FXNStatus.OK:
|
63
71
|
raise RuntimeError(f"Failed to get configuration acceleration with error: {status_to_error(status)}")
|
72
|
+
return self.__to_acceleration_str(acceleration.value)
|
64
73
|
|
65
74
|
@acceleration.setter
|
66
75
|
def acceleration (self, acceleration: Acceleration):
|
67
|
-
status = get_fxnc().FXNConfigurationSetAcceleration(
|
76
|
+
status = get_fxnc().FXNConfigurationSetAcceleration(
|
77
|
+
self.__configuration,
|
78
|
+
self.__to_acceleration_int(acceleration)
|
79
|
+
)
|
68
80
|
if status != FXNStatus.OK:
|
69
81
|
raise RuntimeError(f"Failed to set configuration acceleration with error: {status_to_error(status)}")
|
70
82
|
|
71
83
|
@property
|
72
84
|
def device (self):
|
73
85
|
device = c_void_p()
|
74
|
-
status = get_fxnc().FXNConfigurationGetDevice(
|
75
|
-
|
76
|
-
|
77
|
-
|
86
|
+
status = get_fxnc().FXNConfigurationGetDevice(
|
87
|
+
self.__configuration,
|
88
|
+
byref(device)
|
89
|
+
)
|
90
|
+
if status != FXNStatus.OK:
|
78
91
|
raise RuntimeError(f"Failed to get configuration device with error: {status_to_error(status)}")
|
92
|
+
return device if device.value else None
|
79
93
|
|
80
94
|
@device.setter
|
81
95
|
def device (self, device):
|
@@ -84,29 +98,21 @@ class Configuration:
|
|
84
98
|
raise RuntimeError(f"Failed to set configuration device with error: {status_to_error(status)}")
|
85
99
|
|
86
100
|
def add_resource (self, type: str, path: Path):
|
87
|
-
status = get_fxnc().FXNConfigurationAddResource(
|
101
|
+
status = get_fxnc().FXNConfigurationAddResource(
|
102
|
+
self.__configuration,
|
103
|
+
type.encode(),
|
104
|
+
str(path).encode()
|
105
|
+
)
|
88
106
|
if status != FXNStatus.OK:
|
89
107
|
raise RuntimeError(f"Failed to add configuration resource with error: {status_to_error(status)}")
|
90
108
|
|
91
|
-
def __enter__ (self):
|
92
|
-
return self
|
93
|
-
|
94
|
-
def __exit__ (self, exc_type, exc_value, traceback):
|
95
|
-
self.__release()
|
96
|
-
|
97
|
-
def __release (self):
|
98
|
-
if self.__configuration:
|
99
|
-
get_fxnc().FXNConfigurationRelease(self.__configuration)
|
100
|
-
self.__configuration = None
|
101
|
-
|
102
109
|
@classmethod
|
103
110
|
def get_unique_id (cls) -> str:
|
104
111
|
buffer = create_string_buffer(2048)
|
105
112
|
status = get_fxnc().FXNConfigurationGetUniqueID(buffer, len(buffer))
|
106
|
-
if status
|
107
|
-
return buffer.value.decode("utf-8")
|
108
|
-
else:
|
113
|
+
if status != FXNStatus.OK:
|
109
114
|
raise RuntimeError(f"Failed to retrieve configuration identifier with error: {status_to_error(status)}")
|
115
|
+
return buffer.value.decode("utf-8")
|
110
116
|
|
111
117
|
@classmethod
|
112
118
|
def get_client_id (cls) -> str:
|
@@ -115,4 +121,30 @@ class Configuration:
|
|
115
121
|
if status == FXNStatus.OK:
|
116
122
|
return buffer.value.decode("utf-8")
|
117
123
|
else:
|
118
|
-
raise RuntimeError(f"Failed to retrieve client identifier with error: {status_to_error(status)}")
|
124
|
+
raise RuntimeError(f"Failed to retrieve client identifier with error: {status_to_error(status)}")
|
125
|
+
|
126
|
+
def __enter__ (self):
|
127
|
+
return self
|
128
|
+
|
129
|
+
def __exit__ (self, exc_type, exc_value, traceback):
|
130
|
+
self.__release()
|
131
|
+
|
132
|
+
def __release (self):
|
133
|
+
if self.__configuration:
|
134
|
+
get_fxnc().FXNConfigurationRelease(self.__configuration)
|
135
|
+
self.__configuration = None
|
136
|
+
|
137
|
+
def __to_acceleration_int (self, value: Acceleration) -> int:
|
138
|
+
match value:
|
139
|
+
case "auto": return 0
|
140
|
+
case "cpu": return 1
|
141
|
+
case "gpu": return 2
|
142
|
+
case "npu": return 4
|
143
|
+
|
144
|
+
def __to_acceleration_str (self, value: int) -> Acceleration:
|
145
|
+
match value:
|
146
|
+
case 0: return "auto"
|
147
|
+
case 1: return "cpu"
|
148
|
+
case 2: return "gpu"
|
149
|
+
case 4: return "npu"
|
150
|
+
case _: return None
|
fxn/c/value.py
CHANGED
@@ -48,7 +48,7 @@ class Value:
|
|
48
48
|
|
49
49
|
@property
|
50
50
|
def shape (self) -> list[int] | None:
|
51
|
-
if self.type not in
|
51
|
+
if self.type not in _TENSOR_ISH_DTYPES:
|
52
52
|
return None
|
53
53
|
fxnc = get_fxnc()
|
54
54
|
dims = c_int32()
|
@@ -220,4 +220,5 @@ _TENSOR_DTYPES = {
|
|
220
220
|
Dtype.uint32,
|
221
221
|
Dtype.uint64,
|
222
222
|
Dtype.bool,
|
223
|
-
}
|
223
|
+
}
|
224
|
+
_TENSOR_ISH_DTYPES = _TENSOR_DTYPES | { Dtype.image }
|
fxn/cli/__init__.py
CHANGED
@@ -3,17 +3,19 @@
|
|
3
3
|
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
|
-
|
6
|
+
import typer
|
7
7
|
|
8
8
|
from .auth import app as auth_app
|
9
9
|
from .compile import compile_predictor
|
10
10
|
from .misc import cli_options
|
11
11
|
from .predictions import create_prediction
|
12
12
|
from .predictors import archive_predictor, delete_predictor, retrieve_predictor
|
13
|
+
from ..logging import TracebackMarkupConsole
|
13
14
|
from ..version import __version__
|
14
15
|
|
15
16
|
# Define CLI
|
16
|
-
|
17
|
+
typer.main.console_stderr = TracebackMarkupConsole()
|
18
|
+
app = typer.Typer(
|
17
19
|
name=f"Function CLI {__version__}",
|
18
20
|
no_args_is_help=True,
|
19
21
|
pretty_exceptions_show_locals=False,
|
fxn/cli/compile.py
CHANGED
@@ -8,7 +8,6 @@ from importlib.util import module_from_spec, spec_from_file_location
|
|
8
8
|
from inspect import getmembers, getmodulename, isfunction
|
9
9
|
from pathlib import Path
|
10
10
|
from pydantic import BaseModel
|
11
|
-
from re import sub
|
12
11
|
from rich import print as print_rich
|
13
12
|
from rich.progress import SpinnerColumn, TextColumn
|
14
13
|
import sys
|
@@ -22,6 +21,9 @@ from ..sandbox import EntrypointCommand
|
|
22
21
|
from ..logging import CustomProgress, CustomProgressTask
|
23
22
|
from .auth import get_access_key
|
24
23
|
|
24
|
+
class CompileError (Exception):
|
25
|
+
pass
|
26
|
+
|
25
27
|
def compile_predictor (
|
26
28
|
path: str=Argument(..., help="Predictor path.")
|
27
29
|
):
|
@@ -30,10 +32,7 @@ def compile_predictor (
|
|
30
32
|
async def _compile_predictor_async (path: str):
|
31
33
|
fxn = Function(get_access_key())
|
32
34
|
path: Path = Path(path).resolve()
|
33
|
-
with CustomProgress(
|
34
|
-
SpinnerColumn(spinner_name="dots", finished_text="[bold green]✔[/bold green]"),
|
35
|
-
TextColumn("[progress.description]{task.description}"),
|
36
|
-
):
|
35
|
+
with CustomProgress():
|
37
36
|
# Load
|
38
37
|
with CustomProgressTask(loading_text="Loading predictor...") as task:
|
39
38
|
func = _load_predictor_func(path)
|
@@ -51,7 +50,7 @@ async def _compile_predictor_async (path: str):
|
|
51
50
|
predictor = fxn.client.request(
|
52
51
|
method="POST",
|
53
52
|
path="/predictors",
|
54
|
-
body=spec.model_dump(mode="json"),
|
53
|
+
body=spec.model_dump(mode="json", exclude=spec.model_extra.keys(), by_alias=True),
|
55
54
|
response_type=_Predictor
|
56
55
|
)
|
57
56
|
with ProgressLogQueue() as task_queue:
|
@@ -65,9 +64,9 @@ async def _compile_predictor_async (path: str):
|
|
65
64
|
task_queue.push_log(event)
|
66
65
|
elif isinstance(event, _ErrorEvent):
|
67
66
|
task_queue.push_error(event)
|
68
|
-
raise
|
67
|
+
raise CompileError(event.data.error)
|
69
68
|
predictor_url = _compute_predictor_url(fxn.client.api_url, spec.tag)
|
70
|
-
print_rich(f"\n[bold spring_green3]🎉 Predictor is now being compiled.[/bold spring_green3] Check it out at {predictor_url}")
|
69
|
+
print_rich(f"\n[bold spring_green3]🎉 Predictor is now being compiled.[/bold spring_green3] Check it out at [link={predictor_url}]{predictor_url}[/link]")
|
71
70
|
|
72
71
|
def _load_predictor_func (path: str) -> Callable[...,object]:
|
73
72
|
if "" not in sys.path:
|
@@ -98,6 +97,8 @@ class _Predictor (BaseModel):
|
|
98
97
|
class _LogData (BaseModel):
|
99
98
|
message: str
|
100
99
|
level: int = 0
|
100
|
+
status: Literal["success", "error"] = "success"
|
101
|
+
update: bool = False
|
101
102
|
|
102
103
|
class _LogEvent (BaseModel):
|
103
104
|
event: Literal["log"]
|
@@ -116,14 +117,19 @@ class ProgressLogQueue:
|
|
116
117
|
self.queue: list[tuple[int, CustomProgressTask]] = []
|
117
118
|
|
118
119
|
def push_log (self, event: _LogEvent):
|
120
|
+
# Check for update
|
121
|
+
if event.data.update and self.queue:
|
122
|
+
current_level, current_task = self.queue[-1]
|
123
|
+
current_task.update(description=event.data.message, status=event.data.status)
|
124
|
+
return
|
125
|
+
# Pop
|
119
126
|
while self.queue:
|
120
127
|
current_level, current_task = self.queue[-1]
|
121
128
|
if event.data.level > current_level:
|
122
129
|
break
|
123
130
|
current_task.__exit__(None, None, None)
|
124
131
|
self.queue.pop()
|
125
|
-
|
126
|
-
task = CustomProgressTask(loading_text=message)
|
132
|
+
task = CustomProgressTask(loading_text=event.data.message)
|
127
133
|
task.__enter__()
|
128
134
|
self.queue.append((event.data.level, task))
|
129
135
|
|
fxn/compile.py
CHANGED
@@ -7,11 +7,15 @@ from collections.abc import Callable
|
|
7
7
|
from functools import wraps
|
8
8
|
from inspect import isasyncgenfunction, iscoroutinefunction
|
9
9
|
from pathlib import Path
|
10
|
-
from pydantic import BaseModel, Field
|
10
|
+
from pydantic import BaseModel, ConfigDict, Field
|
11
|
+
from types import ModuleType
|
12
|
+
from typing import Literal
|
11
13
|
|
12
14
|
from .sandbox import Sandbox
|
13
15
|
from .types import AccessMode
|
14
16
|
|
17
|
+
CompileTarget = Literal["android", "ios", "linux", "macos", "visionos", "wasm", "windows"]
|
18
|
+
|
15
19
|
class PredictorSpec (BaseModel):
|
16
20
|
"""
|
17
21
|
Descriptor of a predictor to be compiled.
|
@@ -19,20 +23,26 @@ class PredictorSpec (BaseModel):
|
|
19
23
|
tag: str = Field(description="Predictor tag.")
|
20
24
|
description: str = Field(description="Predictor description. MUST be less than 100 characters long.", min_length=4, max_length=100)
|
21
25
|
sandbox: Sandbox = Field(description="Sandbox to compile the function.")
|
26
|
+
trace_modules: list[ModuleType] = Field(description="Modules to trace and compile.", exclude=True)
|
27
|
+
targets: list[str] | None = Field(description="Targets to compile this predictor for. Pass `None` to compile for our default targets.")
|
22
28
|
access: AccessMode = Field(description="Predictor access.")
|
23
29
|
card: str | None = Field(default=None, description="Predictor card (markdown).")
|
24
30
|
media: str | None = Field(default=None, description="Predictor media URL.")
|
25
31
|
license: str | None = Field(default=None, description="Predictor license URL. This is required for public predictors.")
|
32
|
+
model_config = ConfigDict(arbitrary_types_allowed=True, extra="allow", frozen=True)
|
26
33
|
|
27
34
|
def compile (
|
28
35
|
tag: str,
|
29
36
|
*,
|
30
37
|
description: str,
|
31
38
|
sandbox: Sandbox=None,
|
39
|
+
trace_modules: list[ModuleType]=[],
|
40
|
+
targets: list[CompileTarget]=None,
|
32
41
|
access: AccessMode=AccessMode.Private,
|
33
42
|
card: str | Path=None,
|
34
43
|
media: Path=None,
|
35
44
|
license: str=None,
|
45
|
+
**kwargs
|
36
46
|
):
|
37
47
|
"""
|
38
48
|
Create a predictor by compiling a stateless function.
|
@@ -41,6 +51,8 @@ def compile (
|
|
41
51
|
tag (str): Predictor tag.
|
42
52
|
description (str): Predictor description. MUST be less than 100 characters long.
|
43
53
|
sandbox (Sandbox): Sandbox to compile the function.
|
54
|
+
trace_modules (list): Modules to trace and compile.
|
55
|
+
targets (list): Targets to compile this predictor for. Pass `None` to compile for our default targets.
|
44
56
|
access (AccessMode): Predictor access.
|
45
57
|
card (str | Path): Predictor card markdown string or path to card.
|
46
58
|
media (Path): Predictor thumbnail image (jpeg or png) path.
|
@@ -51,21 +63,19 @@ def compile (
|
|
51
63
|
if not callable(func):
|
52
64
|
raise TypeError("Cannot compile non-function objects")
|
53
65
|
if isasyncgenfunction(func) or iscoroutinefunction(func):
|
54
|
-
raise TypeError(f"
|
66
|
+
raise TypeError(f"Entrypoint function '{func.__name__}' must be a regular function or generator")
|
55
67
|
# Gather metadata
|
56
|
-
if isinstance(card, Path):
|
57
|
-
with open(card_content, "r") as f:
|
58
|
-
card_content = f.read()
|
59
|
-
else:
|
60
|
-
card_content = card
|
61
68
|
spec = PredictorSpec(
|
62
69
|
tag=tag,
|
63
70
|
description=description,
|
64
71
|
sandbox=sandbox if sandbox is not None else Sandbox(),
|
72
|
+
trace_modules=trace_modules,
|
73
|
+
targets=targets,
|
65
74
|
access=access,
|
66
|
-
card=
|
75
|
+
card=card.read_text() if isinstance(card, Path) else card,
|
67
76
|
media=None, # INCOMPLETE
|
68
|
-
license=license
|
77
|
+
license=license,
|
78
|
+
**kwargs
|
69
79
|
)
|
70
80
|
# Wrap
|
71
81
|
@wraps(func)
|
fxn/function.py
CHANGED
@@ -30,11 +30,16 @@ class Function:
|
|
30
30
|
predictions: PredictionService
|
31
31
|
beta: BetaClient
|
32
32
|
|
33
|
-
def __init__ (
|
34
|
-
|
33
|
+
def __init__ (
|
34
|
+
self,
|
35
|
+
access_key: str=None,
|
36
|
+
*,
|
37
|
+
api_url: str=None
|
38
|
+
):
|
39
|
+
access_key = access_key or environ.get("FXN_ACCESS_KEY")
|
35
40
|
api_url = api_url or environ.get("FXN_API_URL")
|
36
41
|
self.client = FunctionClient(access_key, api_url)
|
37
42
|
self.users = UserService(self.client)
|
38
43
|
self.predictors = PredictorService(self.client)
|
39
44
|
self.predictions = PredictionService(self.client)
|
40
|
-
self.beta = BetaClient(self.client)
|
45
|
+
self.beta = BetaClient(self.client, predictions=self.predictions)
|
fxn/logging.py
CHANGED
@@ -4,12 +4,62 @@
|
|
4
4
|
#
|
5
5
|
|
6
6
|
from contextvars import ContextVar
|
7
|
-
from rich.
|
8
|
-
from
|
7
|
+
from rich.console import Console, ConsoleOptions, RenderResult
|
8
|
+
from rich.progress import Progress, ProgressColumn, SpinnerColumn, TextColumn
|
9
|
+
from rich.text import Text
|
10
|
+
from rich.traceback import Traceback
|
11
|
+
from types import MethodType
|
9
12
|
|
10
13
|
current_progress = ContextVar("current_progress", default=None)
|
11
14
|
progress_task_stack = ContextVar("progress_task_stack", default=[])
|
12
15
|
|
16
|
+
class CustomSpinnerColumn (SpinnerColumn):
|
17
|
+
|
18
|
+
def __init__ (
|
19
|
+
self,
|
20
|
+
spinner_name="dots",
|
21
|
+
success_text="[bold green]✔[/bold green]",
|
22
|
+
failure_text="[bright_red]✘[/bright_red]",
|
23
|
+
style="",
|
24
|
+
):
|
25
|
+
super().__init__(spinner_name=spinner_name, style=style)
|
26
|
+
self.success_text = success_text
|
27
|
+
self.failure_text = failure_text
|
28
|
+
|
29
|
+
def render (self, task):
|
30
|
+
done_text = (
|
31
|
+
self.failure_text
|
32
|
+
if task.fields.get("status") == "error"
|
33
|
+
else self.success_text
|
34
|
+
)
|
35
|
+
return done_text if task.finished else self.spinner
|
36
|
+
|
37
|
+
class CustomTextColumn (TextColumn):
|
38
|
+
"""Custom text column that changes color based on task status"""
|
39
|
+
|
40
|
+
def __init__ (self, text_format="{task.description}"):
|
41
|
+
super().__init__(text_format)
|
42
|
+
|
43
|
+
def render (self, task):
|
44
|
+
# Indent and color
|
45
|
+
description = task.description
|
46
|
+
indent_level = task.fields.get("indent_level", 0)
|
47
|
+
indent = self.__get_indent(indent_level)
|
48
|
+
task.description = f"{indent}{description}"
|
49
|
+
if task.fields.get("status") == "error":
|
50
|
+
task.description = f"[bright_red]{task.description}[/bright_red]"
|
51
|
+
# Render
|
52
|
+
text = super().render(task)
|
53
|
+
task.description = description
|
54
|
+
# Return
|
55
|
+
return text
|
56
|
+
|
57
|
+
def __get_indent (self, level: int) -> str:
|
58
|
+
if level == 0:
|
59
|
+
return ""
|
60
|
+
indicator = "└── "
|
61
|
+
return " " * len(indicator) * (level - 1) + indicator
|
62
|
+
|
13
63
|
class CustomProgress(Progress):
|
14
64
|
|
15
65
|
def __init__ (
|
@@ -27,8 +77,8 @@ class CustomProgress(Progress):
|
|
27
77
|
expand=False
|
28
78
|
):
|
29
79
|
default_columns = list(columns) if len(columns) > 0 else [
|
30
|
-
|
31
|
-
|
80
|
+
CustomSpinnerColumn(),
|
81
|
+
CustomTextColumn("[progress.description]{task.description}"),
|
32
82
|
]
|
33
83
|
super().__init__(
|
34
84
|
*default_columns,
|
@@ -71,19 +121,18 @@ class CustomProgressTask:
|
|
71
121
|
columns: list[ProgressColumn]=None
|
72
122
|
):
|
73
123
|
self.loading_text = loading_text
|
74
|
-
self.done_text = done_text
|
124
|
+
self.done_text = done_text
|
75
125
|
self.task_id = None
|
76
126
|
self.columns = columns
|
77
127
|
|
78
128
|
def __enter__ (self):
|
79
129
|
progress = current_progress.get()
|
80
|
-
indent_level = len(progress_task_stack.get())
|
81
|
-
indent = self.__get_indent(indent_level)
|
82
130
|
if progress is not None:
|
83
131
|
self.task_id = progress.add_task(
|
84
|
-
|
132
|
+
self.loading_text,
|
85
133
|
total=1,
|
86
|
-
columns=self.columns
|
134
|
+
columns=self.columns,
|
135
|
+
indent_level=len(progress_task_stack.get())
|
87
136
|
)
|
88
137
|
current_stack = progress_task_stack.get()
|
89
138
|
progress_task_stack.set(current_stack + [self.task_id])
|
@@ -92,20 +141,13 @@ class CustomProgressTask:
|
|
92
141
|
def __exit__ (self, exc_type, exc_val, exc_tb):
|
93
142
|
progress = current_progress.get()
|
94
143
|
if progress is not None and self.task_id is not None:
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
completed=total
|
103
|
-
)
|
104
|
-
else:
|
105
|
-
progress.update(
|
106
|
-
self.task_id,
|
107
|
-
description=f"{indent}[bright_red]✘ {self.loading_text}[/bright_red]",
|
108
|
-
)
|
144
|
+
current_task = progress._tasks[self.task_id]
|
145
|
+
progress.update(
|
146
|
+
self.task_id,
|
147
|
+
description=self.done_text or current_task.description,
|
148
|
+
completed=current_task.total,
|
149
|
+
status="error" if exc_type is not None else current_task.fields.get("status")
|
150
|
+
)
|
109
151
|
current_stack = progress_task_stack.get()
|
110
152
|
if current_stack:
|
111
153
|
progress_task_stack.set(current_stack[:-1])
|
@@ -116,22 +158,62 @@ class CustomProgressTask:
|
|
116
158
|
progress = current_progress.get()
|
117
159
|
if progress is None or self.task_id is None:
|
118
160
|
return
|
119
|
-
if "description" in kwargs:
|
120
|
-
stack = progress_task_stack.get()
|
121
|
-
try:
|
122
|
-
index = stack.index(self.task_id)
|
123
|
-
except ValueError:
|
124
|
-
index = len(stack) - 1
|
125
|
-
indent = self.__get_indent(index)
|
126
|
-
description = kwargs["description"]
|
127
|
-
kwargs["description"] = f"{indent}{description}"
|
128
161
|
progress.update(self.task_id, **kwargs)
|
129
162
|
|
130
163
|
def finish (self, message: str):
|
131
164
|
self.done_text = message
|
165
|
+
|
166
|
+
class TracebackMarkupConsole (Console):
|
132
167
|
|
133
|
-
def
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
168
|
+
def print(
|
169
|
+
self,
|
170
|
+
*objects,
|
171
|
+
sep = " ",
|
172
|
+
end = "\n",
|
173
|
+
style = None,
|
174
|
+
justify = None,
|
175
|
+
overflow = None,
|
176
|
+
no_wrap = None,
|
177
|
+
emoji = None,
|
178
|
+
markup = None,
|
179
|
+
highlight = None,
|
180
|
+
width = None,
|
181
|
+
height = None,
|
182
|
+
crop = True,
|
183
|
+
soft_wrap = None,
|
184
|
+
new_line_start = False
|
185
|
+
):
|
186
|
+
traceback = objects[0]
|
187
|
+
if isinstance(traceback, Traceback):
|
188
|
+
stack = traceback.trace.stacks[0]
|
189
|
+
original_rich_console = traceback.__rich_console__
|
190
|
+
def __rich_console__ (self: Traceback, console: Console, options: ConsoleOptions) -> RenderResult:
|
191
|
+
for renderable in original_rich_console(console, options):
|
192
|
+
if (
|
193
|
+
isinstance(renderable, Text) and
|
194
|
+
any(part.startswith(f"{stack.exc_type}:") for part in renderable._text)
|
195
|
+
):
|
196
|
+
yield Text.assemble(
|
197
|
+
(f"{stack.exc_type}: ", "traceback.exc_type"),
|
198
|
+
Text.from_markup(stack.exc_value)
|
199
|
+
)
|
200
|
+
else:
|
201
|
+
yield renderable
|
202
|
+
traceback.__rich_console__ = MethodType(__rich_console__, traceback)
|
203
|
+
return super().print(
|
204
|
+
*objects,
|
205
|
+
sep=sep,
|
206
|
+
end=end,
|
207
|
+
style=style,
|
208
|
+
justify=justify,
|
209
|
+
overflow=overflow,
|
210
|
+
no_wrap=no_wrap,
|
211
|
+
emoji=emoji,
|
212
|
+
markup=markup,
|
213
|
+
highlight=highlight,
|
214
|
+
width=width,
|
215
|
+
height=height,
|
216
|
+
crop=crop,
|
217
|
+
soft_wrap=soft_wrap,
|
218
|
+
new_line_start=new_line_start
|
219
|
+
)
|
fxn/services/prediction.py
CHANGED
@@ -13,7 +13,7 @@ from pydantic import BaseModel
|
|
13
13
|
from requests import get
|
14
14
|
from rich.progress import BarColumn, DownloadColumn, TransferSpeedColumn, TimeRemainingColumn
|
15
15
|
from tempfile import gettempdir, NamedTemporaryFile
|
16
|
-
from typing import
|
16
|
+
from typing import Iterator
|
17
17
|
from urllib.parse import urlparse
|
18
18
|
|
19
19
|
from ..c import Configuration, Predictor, Prediction as CPrediction, Value as CValue, ValueFlags, ValueMap
|
@@ -21,7 +21,19 @@ from ..client import FunctionClient
|
|
21
21
|
from ..logging import CustomProgressTask
|
22
22
|
from ..types import Acceleration, Prediction, PredictionResource
|
23
23
|
|
24
|
-
Value =
|
24
|
+
Value = (
|
25
|
+
None |
|
26
|
+
float |
|
27
|
+
int |
|
28
|
+
bool |
|
29
|
+
ndarray |
|
30
|
+
str |
|
31
|
+
list[object] |
|
32
|
+
dict[str, object] |
|
33
|
+
Image.Image |
|
34
|
+
BytesIO |
|
35
|
+
memoryview
|
36
|
+
)
|
25
37
|
|
26
38
|
class PredictionService:
|
27
39
|
|
@@ -48,7 +60,7 @@ class PredictionService:
|
|
48
60
|
tag: str,
|
49
61
|
*,
|
50
62
|
inputs: dict[str, Value] | None=None,
|
51
|
-
acceleration: Acceleration=
|
63
|
+
acceleration: Acceleration="auto",
|
52
64
|
device=None,
|
53
65
|
client_id: str=None,
|
54
66
|
configuration_id: str=None
|
@@ -85,14 +97,14 @@ class PredictionService:
|
|
85
97
|
):
|
86
98
|
return self.__to_prediction(tag, prediction)
|
87
99
|
|
88
|
-
|
100
|
+
def stream (
|
89
101
|
self,
|
90
102
|
tag: str,
|
91
103
|
*,
|
92
104
|
inputs: dict[str, Value],
|
93
|
-
acceleration: Acceleration=
|
105
|
+
acceleration: Acceleration="auto",
|
94
106
|
device=None
|
95
|
-
) ->
|
107
|
+
) -> Iterator[Prediction]:
|
96
108
|
"""
|
97
109
|
Stream a prediction.
|
98
110
|
|
@@ -140,7 +152,7 @@ class PredictionService:
|
|
140
152
|
def __get_predictor (
|
141
153
|
self,
|
142
154
|
tag: str,
|
143
|
-
acceleration: Acceleration=
|
155
|
+
acceleration: Acceleration="auto",
|
144
156
|
device=None,
|
145
157
|
client_id: str=None,
|
146
158
|
configuration_id: str=None
|
@@ -266,7 +278,7 @@ class PredictionService:
|
|
266
278
|
return Path(gettempdir())
|
267
279
|
|
268
280
|
@classmethod
|
269
|
-
def __try_ensure_serializable (cls, object:
|
281
|
+
def __try_ensure_serializable (cls, object: object) -> object:
|
270
282
|
if object is None:
|
271
283
|
return object
|
272
284
|
if isinstance(object, list):
|
fxn/types/dtype.py
CHANGED
@@ -11,6 +11,9 @@ class Dtype (str, Enum):
|
|
11
11
|
This follows `numpy` dtypes.
|
12
12
|
"""
|
13
13
|
null = "null"
|
14
|
+
float16 = "float16"
|
15
|
+
float32 = "float32"
|
16
|
+
float64 = "float64"
|
14
17
|
int8 = "int8"
|
15
18
|
int16 = "int16"
|
16
19
|
int32 = "int32"
|
@@ -19,9 +22,6 @@ class Dtype (str, Enum):
|
|
19
22
|
uint16 = "uint16"
|
20
23
|
uint32 = "uint32"
|
21
24
|
uint64 = "uint64"
|
22
|
-
float16 = "float16"
|
23
|
-
float32 = "float32"
|
24
|
-
float64 = "float64"
|
25
25
|
bool = "bool"
|
26
26
|
string = "string"
|
27
27
|
list = "list"
|
fxn/types/prediction.py
CHANGED
@@ -3,9 +3,10 @@
|
|
3
3
|
# Copyright © 2025 NatML Inc. All Rights Reserved.
|
4
4
|
#
|
5
5
|
|
6
|
-
from enum import IntFlag
|
7
6
|
from pydantic import BaseModel, Field
|
8
|
-
from typing import
|
7
|
+
from typing import Literal
|
8
|
+
|
9
|
+
Acceleration = Literal["auto", "cpu", "gpu", "npu"]
|
9
10
|
|
10
11
|
class PredictionResource (BaseModel):
|
11
12
|
"""
|
@@ -39,17 +40,8 @@ class Prediction (BaseModel):
|
|
39
40
|
tag: str = Field(description="Predictor tag.")
|
40
41
|
configuration: str | None = Field(default=None, description="Prediction configuration token. This is only populated for `EDGE` predictions.")
|
41
42
|
resources: list[PredictionResource] | None = Field(default=None, description="Prediction resources. This is only populated for `EDGE` predictions.")
|
42
|
-
results: list[
|
43
|
+
results: list[object] | None = Field(default=None, description="Prediction results.")
|
43
44
|
latency: float | None = Field(default=None, description="Prediction latency in milliseconds.")
|
44
45
|
error: str | None = Field(default=None, description="Prediction error. This is `None` if the prediction completed successfully.")
|
45
46
|
logs: str | None = Field(default=None, description="Prediction logs.")
|
46
|
-
created: str = Field(description="Date created.")
|
47
|
-
|
48
|
-
class Acceleration (IntFlag):
|
49
|
-
"""
|
50
|
-
Predictor acceleration.
|
51
|
-
"""
|
52
|
-
Auto = 0,
|
53
|
-
CPU = 1 << 0,
|
54
|
-
GPU = 1 << 1,
|
55
|
-
NPU = 1 << 2
|
47
|
+
created: str = Field(description="Date created.")
|
fxn/version.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: fxn
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.44
|
4
4
|
Summary: Run prediction functions locally in Python. Register at https://fxn.ai.
|
5
5
|
Author-email: "NatML Inc." <hi@fxn.ai>
|
6
6
|
License: Apache License
|
@@ -222,6 +222,7 @@ Requires-Dist: pydantic>=2.0
|
|
222
222
|
Requires-Dist: requests
|
223
223
|
Requires-Dist: rich
|
224
224
|
Requires-Dist: typer
|
225
|
+
Dynamic: license-file
|
225
226
|
|
226
227
|
# Function for Python and CLI
|
227
228
|
|
@@ -1,25 +1,25 @@
|
|
1
|
-
fxn/__init__.py,sha256=
|
1
|
+
fxn/__init__.py,sha256=eOYYoHwwQWzYVnyBO1VtcWR0JnUm1GPl4kr8IzhnCqg,257
|
2
2
|
fxn/client.py,sha256=Deje8eiS1VOHX85tQnV34viv2CPVx2ljwHSbyVB5Z1o,3790
|
3
|
-
fxn/compile.py,sha256=
|
4
|
-
fxn/function.py,sha256=
|
5
|
-
fxn/logging.py,sha256=
|
3
|
+
fxn/compile.py,sha256=XO_0a0hEfM3SI03cb8EFs2xL6E6XpmrVtRPo2icR6J0,3529
|
4
|
+
fxn/function.py,sha256=XeEuALkbVhkvwEBUfP0A2fu3tdimwHemoR17oomhzc8,1407
|
5
|
+
fxn/logging.py,sha256=MsTSf0GZxrHNDwVAXDOh8_zRUg9hkeZ8DfhFUJs7D8A,7250
|
6
6
|
fxn/sandbox.py,sha256=w2dnHMBaKOERxFMpeAP11X6_SPqcvnpd6SmX6b_FOYQ,7000
|
7
|
-
fxn/version.py,sha256=
|
7
|
+
fxn/version.py,sha256=zQOEiQse1Ff3n-mSuBrT9-JLRGel6n8RluZbXIshvHI,95
|
8
8
|
fxn/beta/__init__.py,sha256=gKoDhuXtXCjdhUYUqmF0gDPMhJfg3UwFgbvMtRB5ipo,111
|
9
|
-
fxn/beta/client.py,sha256=
|
9
|
+
fxn/beta/client.py,sha256=0lfwQPcB9ToIJC7AcCXO6DlJKkmId8EChhd9bk29GGE,2611
|
10
10
|
fxn/beta/prediction.py,sha256=9DTBahNF6m0TicLab2o9e8IKpiSV6K7cUSTYaFju0ZU,356
|
11
|
-
fxn/beta/remote.py,sha256=
|
11
|
+
fxn/beta/remote.py,sha256=HC8OIslZYyxw3XafVCCrP_wrPa00y5uekkKd_tkzyV0,7551
|
12
12
|
fxn/c/__init__.py,sha256=NMIduqO_MYtI9jVCu6ZxvbBtYQXoQyNEWblNy3m2UPY,313
|
13
|
-
fxn/c/configuration.py,sha256=
|
13
|
+
fxn/c/configuration.py,sha256=56_-NNT4yoHDNfvB6jJNYF2eKJYMRLVrv3mIg7g6qaE,5597
|
14
14
|
fxn/c/fxnc.py,sha256=YrvwOlzPmTlSDuz2zmKZfws2WK5BY4YZ62edoplcMJU,1381
|
15
15
|
fxn/c/map.py,sha256=47fBJ0Q6uB_xeW3sn9aCLYJ539edg8ff9DU-EIfWRGA,2352
|
16
16
|
fxn/c/prediction.py,sha256=-d-5yreFAaRS-nDHzhfabRNtgYcmJGiY_N2dt09gk84,2689
|
17
17
|
fxn/c/predictor.py,sha256=48poLj1AthzCgU9n6Wv9gL8o4gFucIlOnBO2wdor6r0,1925
|
18
18
|
fxn/c/stream.py,sha256=Y1Xv1Bt3_qlnWg9rCn7NWESpouF1eKMzDiQjhZWbXTg,1105
|
19
|
-
fxn/c/value.py,sha256=
|
20
|
-
fxn/cli/__init__.py,sha256=
|
19
|
+
fxn/c/value.py,sha256=h5n91nm8C3YvEEFORfJBUdncZ29DFIdUKGWQ_KpLsWc,7420
|
20
|
+
fxn/cli/__init__.py,sha256=OcdxY751a5avAWNYUpBG92kjqpKmYXwhZxgB1mGwUpA,1396
|
21
21
|
fxn/cli/auth.py,sha256=6iGbNbjxfCr8OZT3_neLThXdWeKRBZATwru8vU0XmRw,1688
|
22
|
-
fxn/cli/compile.py,sha256=
|
22
|
+
fxn/cli/compile.py,sha256=y5SOGSr5_B_WY-TYbg3Q9kCCaYGlbyQcDOYSPeg2lDc,5490
|
23
23
|
fxn/cli/misc.py,sha256=LcJbCj_GAgtGraTRva2zHHOPpNwI6SOFntRksxwlqvM,843
|
24
24
|
fxn/cli/predictions.py,sha256=ma7wbsKD5CFCRTU_TtJ8N0nN1fgFX2BZPGG8qm8HlNI,3182
|
25
25
|
fxn/cli/predictors.py,sha256=bVQAuBue_Jxb79X85RTCzOerWRRT2Ny1oF5DNYAsx4M,1545
|
@@ -31,17 +31,17 @@ fxn/lib/macos/x86_64/Function.dylib,sha256=qIu4dhx0Xk5dQHgTnZTcm2IpoMYJwRPmKRi9J
|
|
31
31
|
fxn/lib/windows/arm64/Function.dll,sha256=FyL-oipK9wSxXdbD9frc8QFbUKTPMCdtmCkCT8ooIIM,419328
|
32
32
|
fxn/lib/windows/x86_64/Function.dll,sha256=iL6w1FwDgBkHlNhQmhE7XgfoeHsiYQgpVGzeGDdHGUw,454656
|
33
33
|
fxn/services/__init__.py,sha256=Bif8IttwJ089mSRsd3MFdob7z2eF-MKigKu4ZQFZBCQ,190
|
34
|
-
fxn/services/prediction.py,sha256=
|
34
|
+
fxn/services/prediction.py,sha256=QCop-f7ojkGR7DI5tLJe3FPnr0BvPJ_vWhCk4kg8Fqg,10373
|
35
35
|
fxn/services/predictor.py,sha256=Wl_7YKiD5mTpC5x2Zaq4BpatRjwRUX8Th9GIrwd38MA,791
|
36
36
|
fxn/services/user.py,sha256=ADl5MFLsk4K0altgKHnI-i64E3g1wU3e56Noq_ciRuk,685
|
37
37
|
fxn/types/__init__.py,sha256=MEg71rzbGgoWfgB4Yi5QvxbnovHTZRIzCUZLtWtWP1E,292
|
38
|
-
fxn/types/dtype.py,sha256=
|
39
|
-
fxn/types/prediction.py,sha256=
|
38
|
+
fxn/types/dtype.py,sha256=71Tuu4IydmELcBcSBbmWswhCE-7WqBSQ4VkETsFRzjA,617
|
39
|
+
fxn/types/prediction.py,sha256=BdLTxnKiSFbz5warX8g_Z4DedNxXK3gaNjSKR2FP8tA,2051
|
40
40
|
fxn/types/predictor.py,sha256=KRGZEuDt7WPMCyRcZvQq4y2FMocfVrLEUNJCJgfDY9Y,4000
|
41
41
|
fxn/types/user.py,sha256=Z44TwEocyxSrfKyzcNfmAXUrpX_Ry8fJ7MffSxRn4oU,1071
|
42
|
-
fxn-0.0.
|
43
|
-
fxn-0.0.
|
44
|
-
fxn-0.0.
|
45
|
-
fxn-0.0.
|
46
|
-
fxn-0.0.
|
47
|
-
fxn-0.0.
|
42
|
+
fxn-0.0.44.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
43
|
+
fxn-0.0.44.dist-info/METADATA,sha256=5o1xzi17jmhMTf14XKQHixi_msQz57j7oTx3DtjCQOg,16144
|
44
|
+
fxn-0.0.44.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
45
|
+
fxn-0.0.44.dist-info/entry_points.txt,sha256=O_AwD5dYaeB-YT1F9hPAPuDYCkw_W0tdNGYbc5RVR2k,45
|
46
|
+
fxn-0.0.44.dist-info/top_level.txt,sha256=1ULIEGrnMlhId8nYAkjmRn9g3KEFuHKboq193SEKQkA,4
|
47
|
+
fxn-0.0.44.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|