fxn 0.0.42__py3-none-any.whl → 0.0.44__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fxn/__init__.py +2 -1
- fxn/beta/client.py +59 -2
- fxn/beta/remote.py +40 -47
- fxn/c/configuration.py +62 -30
- fxn/c/value.py +3 -2
- fxn/cli/__init__.py +12 -8
- fxn/cli/compile.py +147 -0
- fxn/cli/predictions.py +16 -13
- fxn/cli/predictors.py +35 -4
- fxn/client.py +66 -5
- fxn/{compile/compile.py → compile.py} +22 -16
- fxn/function.py +8 -3
- fxn/logging.py +219 -0
- fxn/{compile/sandbox.py → sandbox.py} +62 -33
- fxn/services/prediction.py +47 -36
- fxn/types/dtype.py +3 -3
- fxn/types/prediction.py +5 -13
- fxn/types/predictor.py +1 -1
- fxn/version.py +1 -1
- {fxn-0.0.42.dist-info → fxn-0.0.44.dist-info}/METADATA +3 -2
- fxn-0.0.44.dist-info/RECORD +47 -0
- {fxn-0.0.42.dist-info → fxn-0.0.44.dist-info}/WHEEL +1 -1
- fxn/compile/__init__.py +0 -7
- fxn/compile/signature.py +0 -183
- fxn-0.0.42.dist-info/RECORD +0 -47
- {fxn-0.0.42.dist-info → fxn-0.0.44.dist-info}/entry_points.txt +0 -0
- {fxn-0.0.42.dist-info → fxn-0.0.44.dist-info/licenses}/LICENSE +0 -0
- {fxn-0.0.42.dist-info → fxn-0.0.44.dist-info}/top_level.txt +0 -0
    
        fxn/__init__.py
    CHANGED
    
    
    
        fxn/beta/client.py
    CHANGED
    
    | @@ -3,8 +3,17 @@ | |
| 3 3 | 
             
            #   Copyright © 2025 NatML Inc. All Rights Reserved.
         | 
| 4 4 | 
             
            #
         | 
| 5 5 |  | 
| 6 | 
            +
            from functools import wraps
         | 
| 7 | 
            +
            from inspect import signature as get_signature, Signature
         | 
| 8 | 
            +
            from typing import get_origin, Callable, Generator, Iterator, TypeVar
         | 
| 9 | 
            +
             | 
| 6 10 | 
             
            from ..client import FunctionClient
         | 
| 11 | 
            +
            from ..services import PredictionService as EdgePredictionService
         | 
| 12 | 
            +
            from ..types import Acceleration
         | 
| 7 13 | 
             
            from .prediction import PredictionService
         | 
| 14 | 
            +
            from .remote import RemoteAcceleration
         | 
| 15 | 
            +
             | 
| 16 | 
            +
            F = TypeVar("F", bound=Callable[..., object])
         | 
| 8 17 |  | 
| 9 18 | 
             
            class BetaClient:
         | 
| 10 19 | 
             
                """
         | 
| @@ -12,5 +21,53 @@ class BetaClient: | |
| 12 21 | 
             
                """
         | 
| 13 22 | 
             
                predictions: PredictionService
         | 
| 14 23 |  | 
| 15 | 
            -
                def __init__ ( | 
| 16 | 
            -
                    self | 
| 24 | 
            +
                def __init__ (
         | 
| 25 | 
            +
                    self,
         | 
| 26 | 
            +
                    client: FunctionClient,
         | 
| 27 | 
            +
                    *,
         | 
| 28 | 
            +
                    predictions: EdgePredictionService
         | 
| 29 | 
            +
                ):
         | 
| 30 | 
            +
                    self.predictions = PredictionService(client)
         | 
| 31 | 
            +
                    self.__edge_predictions = predictions
         | 
| 32 | 
            +
             | 
| 33 | 
            +
                def predict ( # INCOMPLETE # Preload
         | 
| 34 | 
            +
                    self,
         | 
| 35 | 
            +
                    tag: str,
         | 
| 36 | 
            +
                    *,
         | 
| 37 | 
            +
                    remote: bool=False,
         | 
| 38 | 
            +
                    acceleration: Acceleration | RemoteAcceleration="auto",
         | 
| 39 | 
            +
                    preload: bool=True
         | 
| 40 | 
            +
                ) -> Callable[[F], F]:
         | 
| 41 | 
            +
                    """
         | 
| 42 | 
            +
                    Create a prediction and return results when the decorated function is invoked.
         | 
| 43 | 
            +
             | 
| 44 | 
            +
                    Parameters:
         | 
| 45 | 
            +
                        tag (str): Predictor tag.
         | 
| 46 | 
            +
                        remote (bool): Whether to create the prediction remotely.
         | 
| 47 | 
            +
                        acceleration (Acceleration | RemoteAcceleration): Prediction acceleration.
         | 
| 48 | 
            +
                        preload (bool): Whether to preload the predictor on the first run.
         | 
| 49 | 
            +
                    """
         | 
| 50 | 
            +
                    def decorator(func: F) -> F:
         | 
| 51 | 
            +
                        signature = get_signature(func)
         | 
| 52 | 
            +
                        @wraps(func)
         | 
| 53 | 
            +
                        def wrapper(*args, **kwargs):
         | 
| 54 | 
            +
                            bound_args = signature.bind(*args, **kwargs)
         | 
| 55 | 
            +
                            bound_args.apply_defaults()
         | 
| 56 | 
            +
                            stream = (
         | 
| 57 | 
            +
                                signature.return_annotation is not Signature.empty and
         | 
| 58 | 
            +
                                get_origin(signature.return_annotation) in [Iterator, Generator]
         | 
| 59 | 
            +
                            )
         | 
| 60 | 
            +
                            create_func = self.predictions.remote.create if remote else self.__edge_predictions.create
         | 
| 61 | 
            +
                            def _predict (): # INCOMPLETE
         | 
| 62 | 
            +
                                prediction = create_func(
         | 
| 63 | 
            +
                                    tag=tag,
         | 
| 64 | 
            +
                                    inputs=bound_args.arguments,
         | 
| 65 | 
            +
                                    acceleration=acceleration
         | 
| 66 | 
            +
                                )
         | 
| 67 | 
            +
                                if prediction.error:
         | 
| 68 | 
            +
                                    raise RuntimeError(prediction.error)
         | 
| 69 | 
            +
                                return tuple(prediction.results) if len(prediction.results) > 1 else prediction.results[0]
         | 
| 70 | 
            +
                            result = _predict()
         | 
| 71 | 
            +
                            return result
         | 
| 72 | 
            +
                        return wrapper
         | 
| 73 | 
            +
                    return decorator
         | 
    
        fxn/beta/remote.py
    CHANGED
    
    | @@ -13,7 +13,7 @@ from numpy import array, frombuffer, ndarray | |
| 13 13 | 
             
            from PIL import Image
         | 
| 14 14 | 
             
            from pydantic import BaseModel, Field
         | 
| 15 15 | 
             
            from requests import get, put
         | 
| 16 | 
            -
            from typing import  | 
| 16 | 
            +
            from typing import Literal
         | 
| 17 17 | 
             
            from urllib.request import urlopen
         | 
| 18 18 |  | 
| 19 19 | 
             
            from ..c import Configuration
         | 
| @@ -21,14 +21,7 @@ from ..client import FunctionClient | |
| 21 21 | 
             
            from ..services import Value
         | 
| 22 22 | 
             
            from ..types import Dtype, Prediction
         | 
| 23 23 |  | 
| 24 | 
            -
             | 
| 25 | 
            -
                """
         | 
| 26 | 
            -
                Remote acceleration.
         | 
| 27 | 
            -
                """
         | 
| 28 | 
            -
                Auto = "auto"
         | 
| 29 | 
            -
                CPU = "cpu"
         | 
| 30 | 
            -
                A40 = "a40"
         | 
| 31 | 
            -
                A100 = "a100"
         | 
| 24 | 
            +
            RemoteAcceleration = Literal["auto", "cpu", "a40", "a100"]
         | 
| 32 25 |  | 
| 33 26 | 
             
            class RemotePredictionService:
         | 
| 34 27 | 
             
                """
         | 
| @@ -43,7 +36,7 @@ class RemotePredictionService: | |
| 43 36 | 
             
                    tag: str,
         | 
| 44 37 | 
             
                    *,
         | 
| 45 38 | 
             
                    inputs: dict[str, Value],
         | 
| 46 | 
            -
                    acceleration: RemoteAcceleration= | 
| 39 | 
            +
                    acceleration: RemoteAcceleration="auto"
         | 
| 47 40 | 
             
                ) -> Prediction:
         | 
| 48 41 | 
             
                    """
         | 
| 49 42 | 
             
                    Create a remote prediction.
         | 
| @@ -74,51 +67,51 @@ class RemotePredictionService: | |
| 74 67 |  | 
| 75 68 | 
             
                def __to_value (
         | 
| 76 69 | 
             
                    self,
         | 
| 77 | 
            -
                     | 
| 70 | 
            +
                    obj: Value,
         | 
| 78 71 | 
             
                    *,
         | 
| 79 72 | 
             
                    name: str,
         | 
| 80 73 | 
             
                    max_data_url_size: int=4 * 1024 * 1024
         | 
| 81 74 | 
             
                ) -> RemoteValue:
         | 
| 82 | 
            -
                     | 
| 83 | 
            -
                    if  | 
| 75 | 
            +
                    obj = self.__try_ensure_serializable(obj)
         | 
| 76 | 
            +
                    if obj is None:
         | 
| 84 77 | 
             
                        return RemoteValue(data=None, type=Dtype.null)
         | 
| 85 | 
            -
                    elif isinstance( | 
| 86 | 
            -
                         | 
| 87 | 
            -
                        return self.__to_value( | 
| 88 | 
            -
                    elif isinstance( | 
| 89 | 
            -
                         | 
| 90 | 
            -
                        return self.__to_value( | 
| 91 | 
            -
                    elif isinstance( | 
| 92 | 
            -
                         | 
| 93 | 
            -
                        return self.__to_value( | 
| 94 | 
            -
                    elif isinstance( | 
| 95 | 
            -
                        buffer = BytesIO( | 
| 78 | 
            +
                    elif isinstance(obj, float):
         | 
| 79 | 
            +
                        obj = array(obj, dtype=Dtype.float32)
         | 
| 80 | 
            +
                        return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size)
         | 
| 81 | 
            +
                    elif isinstance(obj, bool):
         | 
| 82 | 
            +
                        obj = array(obj, dtype=Dtype.bool)
         | 
| 83 | 
            +
                        return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size)
         | 
| 84 | 
            +
                    elif isinstance(obj, int):
         | 
| 85 | 
            +
                        obj = array(obj, dtype=Dtype.int32)
         | 
| 86 | 
            +
                        return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size)
         | 
| 87 | 
            +
                    elif isinstance(obj, ndarray):
         | 
| 88 | 
            +
                        buffer = BytesIO(obj.tobytes())
         | 
| 96 89 | 
             
                        data = self.__upload(buffer, name=name, max_data_url_size=max_data_url_size)
         | 
| 97 | 
            -
                        return RemoteValue(data=data, type= | 
| 98 | 
            -
                    elif isinstance( | 
| 99 | 
            -
                        buffer = BytesIO( | 
| 90 | 
            +
                        return RemoteValue(data=data, type=obj.dtype.name, shape=list(obj.shape))
         | 
| 91 | 
            +
                    elif isinstance(obj, str):
         | 
| 92 | 
            +
                        buffer = BytesIO(obj.encode())
         | 
| 100 93 | 
             
                        data = self.__upload(buffer, name=name, mime="text/plain", max_data_url_size=max_data_url_size)
         | 
| 101 94 | 
             
                        return RemoteValue(data=data, type=Dtype.string)
         | 
| 102 | 
            -
                    elif isinstance( | 
| 103 | 
            -
                        buffer = BytesIO(dumps( | 
| 95 | 
            +
                    elif isinstance(obj, list):
         | 
| 96 | 
            +
                        buffer = BytesIO(dumps(obj).encode())
         | 
| 104 97 | 
             
                        data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size)
         | 
| 105 98 | 
             
                        return RemoteValue(data=data, type=Dtype.list)
         | 
| 106 | 
            -
                    elif isinstance( | 
| 107 | 
            -
                        buffer = BytesIO(dumps( | 
| 99 | 
            +
                    elif isinstance(obj, dict):
         | 
| 100 | 
            +
                        buffer = BytesIO(dumps(obj).encode())
         | 
| 108 101 | 
             
                        data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size)
         | 
| 109 102 | 
             
                        return RemoteValue(data=data, type=Dtype.dict)
         | 
| 110 | 
            -
                    elif isinstance( | 
| 103 | 
            +
                    elif isinstance(obj, Image.Image):
         | 
| 111 104 | 
             
                        buffer = BytesIO()
         | 
| 112 | 
            -
                        format = "PNG" if  | 
| 105 | 
            +
                        format = "PNG" if obj.mode == "RGBA" else "JPEG"
         | 
| 113 106 | 
             
                        mime = f"image/{format.lower()}"
         | 
| 114 | 
            -
                         | 
| 107 | 
            +
                        obj.save(buffer, format=format)
         | 
| 115 108 | 
             
                        data = self.__upload(buffer, name=name, mime=mime, max_data_url_size=max_data_url_size)
         | 
| 116 109 | 
             
                        return RemoteValue(data=data, type=Dtype.image)
         | 
| 117 | 
            -
                    elif isinstance( | 
| 118 | 
            -
                        data = self.__upload( | 
| 110 | 
            +
                    elif isinstance(obj, BytesIO):
         | 
| 111 | 
            +
                        data = self.__upload(obj, name=name, max_data_url_size=max_data_url_size)
         | 
| 119 112 | 
             
                        return RemoteValue(data=data, type=Dtype.binary)
         | 
| 120 113 | 
             
                    else:
         | 
| 121 | 
            -
                        raise ValueError(f"Failed to serialize value '{ | 
| 114 | 
            +
                        raise ValueError(f"Failed to serialize value '{obj}' of type `{type(obj)}` because it is not supported")
         | 
| 122 115 |  | 
| 123 116 | 
             
                def __to_object (self, value: RemoteValue) -> Value:
         | 
| 124 117 | 
             
                    if value.type == Dtype.null:
         | 
| @@ -177,16 +170,16 @@ class RemotePredictionService: | |
| 177 170 | 
             
                    return result
         | 
| 178 171 |  | 
| 179 172 | 
             
                @classmethod
         | 
| 180 | 
            -
                def __try_ensure_serializable (cls,  | 
| 181 | 
            -
                    if  | 
| 182 | 
            -
                        return  | 
| 183 | 
            -
                    if isinstance( | 
| 184 | 
            -
                        return [cls.__try_ensure_serializable(x) for x in  | 
| 185 | 
            -
                    if is_dataclass( | 
| 186 | 
            -
                        return asdict( | 
| 187 | 
            -
                    if isinstance( | 
| 188 | 
            -
                        return  | 
| 189 | 
            -
                    return  | 
| 173 | 
            +
                def __try_ensure_serializable (cls, obj: object) -> object:
         | 
| 174 | 
            +
                    if obj is None:
         | 
| 175 | 
            +
                        return obj
         | 
| 176 | 
            +
                    if isinstance(obj, list):
         | 
| 177 | 
            +
                        return [cls.__try_ensure_serializable(x) for x in obj]
         | 
| 178 | 
            +
                    if is_dataclass(obj) and not isinstance(obj, type):
         | 
| 179 | 
            +
                        return asdict(obj)
         | 
| 180 | 
            +
                    if isinstance(obj, BaseModel):
         | 
| 181 | 
            +
                        return obj.model_dump(mode="json", by_alias=True)
         | 
| 182 | 
            +
                    return obj
         | 
| 190 183 |  | 
| 191 184 | 
             
            class RemoteValue (BaseModel):
         | 
| 192 185 | 
             
                data: str | None
         | 
    
        fxn/c/configuration.py
    CHANGED
    
    | @@ -16,15 +16,18 @@ class Configuration: | |
| 16 16 | 
             
                def __init__ (self):
         | 
| 17 17 | 
             
                    configuration = c_void_p()
         | 
| 18 18 | 
             
                    status = get_fxnc().FXNConfigurationCreate(byref(configuration))
         | 
| 19 | 
            -
                    if status  | 
| 20 | 
            -
                        self.__configuration = configuration
         | 
| 21 | 
            -
                    else:
         | 
| 19 | 
            +
                    if status != FXNStatus.OK:
         | 
| 22 20 | 
             
                        raise RuntimeError(f"Failed to create configuration with error: {status_to_error(status)}")
         | 
| 21 | 
            +
                    self.__configuration = configuration            
         | 
| 23 22 |  | 
| 24 23 | 
             
                @property
         | 
| 25 24 | 
             
                def tag (self) -> str:
         | 
| 26 25 | 
             
                    buffer = create_string_buffer(2048)
         | 
| 27 | 
            -
                    status = get_fxnc().FXNConfigurationGetTag( | 
| 26 | 
            +
                    status = get_fxnc().FXNConfigurationGetTag(
         | 
| 27 | 
            +
                        self.__configuration,
         | 
| 28 | 
            +
                        buffer,
         | 
| 29 | 
            +
                        len(buffer)
         | 
| 30 | 
            +
                    )
         | 
| 28 31 | 
             
                    if status != FXNStatus.OK:
         | 
| 29 32 | 
             
                        raise RuntimeError(f"Failed to get configuration tag with error: {status_to_error(status)}")
         | 
| 30 33 | 
             
                    tag = buffer.value.decode("utf-8")
         | 
| @@ -40,7 +43,11 @@ class Configuration: | |
| 40 43 | 
             
                @property
         | 
| 41 44 | 
             
                def token (self) -> str:
         | 
| 42 45 | 
             
                    buffer = create_string_buffer(2048)
         | 
| 43 | 
            -
                    status = get_fxnc().FXNConfigurationGetToken( | 
| 46 | 
            +
                    status = get_fxnc().FXNConfigurationGetToken(
         | 
| 47 | 
            +
                        self.__configuration,
         | 
| 48 | 
            +
                        buffer,
         | 
| 49 | 
            +
                        len(buffer)
         | 
| 50 | 
            +
                    )
         | 
| 44 51 | 
             
                    if status != FXNStatus.OK:
         | 
| 45 52 | 
             
                        raise RuntimeError(f"Failed to get configuration token with error: {status_to_error(status)}")
         | 
| 46 53 | 
             
                    token = buffer.value.decode("utf-8")
         | 
| @@ -56,26 +63,33 @@ class Configuration: | |
| 56 63 | 
             
                @property
         | 
| 57 64 | 
             
                def acceleration (self) -> Acceleration:
         | 
| 58 65 | 
             
                    acceleration = c_int()
         | 
| 59 | 
            -
                    status = get_fxnc().FXNConfigurationGetAcceleration( | 
| 60 | 
            -
             | 
| 61 | 
            -
                         | 
| 62 | 
            -
                     | 
| 66 | 
            +
                    status = get_fxnc().FXNConfigurationGetAcceleration(
         | 
| 67 | 
            +
                        self.__configuration,
         | 
| 68 | 
            +
                        byref(acceleration)
         | 
| 69 | 
            +
                    )
         | 
| 70 | 
            +
                    if status != FXNStatus.OK:
         | 
| 63 71 | 
             
                        raise RuntimeError(f"Failed to get configuration acceleration with error: {status_to_error(status)}")
         | 
| 72 | 
            +
                    return self.__to_acceleration_str(acceleration.value)
         | 
| 64 73 |  | 
| 65 74 | 
             
                @acceleration.setter
         | 
| 66 75 | 
             
                def acceleration (self, acceleration: Acceleration):
         | 
| 67 | 
            -
                    status = get_fxnc().FXNConfigurationSetAcceleration( | 
| 76 | 
            +
                    status = get_fxnc().FXNConfigurationSetAcceleration(
         | 
| 77 | 
            +
                        self.__configuration,
         | 
| 78 | 
            +
                        self.__to_acceleration_int(acceleration)
         | 
| 79 | 
            +
                    )
         | 
| 68 80 | 
             
                    if status != FXNStatus.OK:
         | 
| 69 81 | 
             
                        raise RuntimeError(f"Failed to set configuration acceleration with error: {status_to_error(status)}")
         | 
| 70 82 |  | 
| 71 83 | 
             
                @property
         | 
| 72 84 | 
             
                def device (self):
         | 
| 73 85 | 
             
                    device = c_void_p()
         | 
| 74 | 
            -
                    status = get_fxnc().FXNConfigurationGetDevice( | 
| 75 | 
            -
             | 
| 76 | 
            -
                         | 
| 77 | 
            -
                     | 
| 86 | 
            +
                    status = get_fxnc().FXNConfigurationGetDevice(
         | 
| 87 | 
            +
                        self.__configuration,
         | 
| 88 | 
            +
                        byref(device)
         | 
| 89 | 
            +
                    )
         | 
| 90 | 
            +
                    if status != FXNStatus.OK:
         | 
| 78 91 | 
             
                        raise RuntimeError(f"Failed to get configuration device with error: {status_to_error(status)}")
         | 
| 92 | 
            +
                    return device if device.value else None            
         | 
| 79 93 |  | 
| 80 94 | 
             
                @device.setter
         | 
| 81 95 | 
             
                def device (self, device):
         | 
| @@ -84,29 +98,21 @@ class Configuration: | |
| 84 98 | 
             
                        raise RuntimeError(f"Failed to set configuration device with error: {status_to_error(status)}")
         | 
| 85 99 |  | 
| 86 100 | 
             
                def add_resource (self, type: str, path: Path):
         | 
| 87 | 
            -
                    status = get_fxnc().FXNConfigurationAddResource( | 
| 101 | 
            +
                    status = get_fxnc().FXNConfigurationAddResource(
         | 
| 102 | 
            +
                        self.__configuration,
         | 
| 103 | 
            +
                        type.encode(),
         | 
| 104 | 
            +
                        str(path).encode()
         | 
| 105 | 
            +
                    )
         | 
| 88 106 | 
             
                    if status != FXNStatus.OK:
         | 
| 89 107 | 
             
                        raise RuntimeError(f"Failed to add configuration resource with error: {status_to_error(status)}")
         | 
| 90 108 |  | 
| 91 | 
            -
                def __enter__ (self):
         | 
| 92 | 
            -
                    return self
         | 
| 93 | 
            -
                
         | 
| 94 | 
            -
                def __exit__ (self, exc_type, exc_value, traceback):
         | 
| 95 | 
            -
                    self.__release()
         | 
| 96 | 
            -
             | 
| 97 | 
            -
                def __release (self):
         | 
| 98 | 
            -
                    if self.__configuration:
         | 
| 99 | 
            -
                        get_fxnc().FXNConfigurationRelease(self.__configuration)
         | 
| 100 | 
            -
                    self.__configuration = None
         | 
| 101 | 
            -
             | 
| 102 109 | 
             
                @classmethod
         | 
| 103 110 | 
             
                def get_unique_id (cls) -> str:
         | 
| 104 111 | 
             
                    buffer = create_string_buffer(2048)
         | 
| 105 112 | 
             
                    status = get_fxnc().FXNConfigurationGetUniqueID(buffer, len(buffer))
         | 
| 106 | 
            -
                    if status  | 
| 107 | 
            -
                        return buffer.value.decode("utf-8")
         | 
| 108 | 
            -
                    else:
         | 
| 113 | 
            +
                    if status != FXNStatus.OK:
         | 
| 109 114 | 
             
                        raise RuntimeError(f"Failed to retrieve configuration identifier with error: {status_to_error(status)}")
         | 
| 115 | 
            +
                    return buffer.value.decode("utf-8")
         | 
| 110 116 |  | 
| 111 117 | 
             
                @classmethod
         | 
| 112 118 | 
             
                def get_client_id (cls) -> str:
         | 
| @@ -115,4 +121,30 @@ class Configuration: | |
| 115 121 | 
             
                    if status == FXNStatus.OK:
         | 
| 116 122 | 
             
                        return buffer.value.decode("utf-8")
         | 
| 117 123 | 
             
                    else:
         | 
| 118 | 
            -
                        raise RuntimeError(f"Failed to retrieve client identifier with error: {status_to_error(status)}")
         | 
| 124 | 
            +
                        raise RuntimeError(f"Failed to retrieve client identifier with error: {status_to_error(status)}")
         | 
| 125 | 
            +
                    
         | 
| 126 | 
            +
                def __enter__ (self):
         | 
| 127 | 
            +
                    return self
         | 
| 128 | 
            +
                
         | 
| 129 | 
            +
                def __exit__ (self, exc_type, exc_value, traceback):
         | 
| 130 | 
            +
                    self.__release()
         | 
| 131 | 
            +
             | 
| 132 | 
            +
                def __release (self):
         | 
| 133 | 
            +
                    if self.__configuration:
         | 
| 134 | 
            +
                        get_fxnc().FXNConfigurationRelease(self.__configuration)
         | 
| 135 | 
            +
                    self.__configuration = None
         | 
| 136 | 
            +
             | 
| 137 | 
            +
                def __to_acceleration_int (self, value: Acceleration) -> int:
         | 
| 138 | 
            +
                    match value:
         | 
| 139 | 
            +
                        case "auto": return 0
         | 
| 140 | 
            +
                        case "cpu": return 1
         | 
| 141 | 
            +
                        case "gpu": return 2
         | 
| 142 | 
            +
                        case "npu": return 4
         | 
| 143 | 
            +
             | 
| 144 | 
            +
                def __to_acceleration_str (self, value: int) -> Acceleration:
         | 
| 145 | 
            +
                    match value:
         | 
| 146 | 
            +
                        case 0: return "auto"
         | 
| 147 | 
            +
                        case 1: return "cpu"
         | 
| 148 | 
            +
                        case 2: return "gpu"
         | 
| 149 | 
            +
                        case 4: return "npu"
         | 
| 150 | 
            +
                        case _: return None
         | 
    
        fxn/c/value.py
    CHANGED
    
    | @@ -48,7 +48,7 @@ class Value: | |
| 48 48 |  | 
| 49 49 | 
             
                @property
         | 
| 50 50 | 
             
                def shape (self) -> list[int] | None:
         | 
| 51 | 
            -
                    if self.type not in  | 
| 51 | 
            +
                    if self.type not in _TENSOR_ISH_DTYPES:
         | 
| 52 52 | 
             
                        return None
         | 
| 53 53 | 
             
                    fxnc = get_fxnc()
         | 
| 54 54 | 
             
                    dims = c_int32()
         | 
| @@ -220,4 +220,5 @@ _TENSOR_DTYPES = { | |
| 220 220 | 
             
                Dtype.uint32,
         | 
| 221 221 | 
             
                Dtype.uint64,
         | 
| 222 222 | 
             
                Dtype.bool,
         | 
| 223 | 
            -
            }
         | 
| 223 | 
            +
            }
         | 
| 224 | 
            +
            _TENSOR_ISH_DTYPES = _TENSOR_DTYPES | { Dtype.image }
         | 
    
        fxn/cli/__init__.py
    CHANGED
    
    | @@ -3,17 +3,19 @@ | |
| 3 3 | 
             
            #   Copyright © 2025 NatML Inc. All Rights Reserved.
         | 
| 4 4 | 
             
            #
         | 
| 5 5 |  | 
| 6 | 
            -
             | 
| 6 | 
            +
            import typer
         | 
| 7 7 |  | 
| 8 8 | 
             
            from .auth import app as auth_app
         | 
| 9 | 
            -
             | 
| 9 | 
            +
            from .compile import compile_predictor
         | 
| 10 10 | 
             
            from .misc import cli_options
         | 
| 11 11 | 
             
            from .predictions import create_prediction
         | 
| 12 | 
            -
            from .predictors import retrieve_predictor
         | 
| 12 | 
            +
            from .predictors import archive_predictor, delete_predictor, retrieve_predictor
         | 
| 13 | 
            +
            from ..logging import TracebackMarkupConsole
         | 
| 13 14 | 
             
            from ..version import __version__
         | 
| 14 15 |  | 
| 15 16 | 
             
            # Define CLI
         | 
| 16 | 
            -
             | 
| 17 | 
            +
            typer.main.console_stderr = TracebackMarkupConsole()
         | 
| 18 | 
            +
            app = typer.Typer(
         | 
| 17 19 | 
             
                name=f"Function CLI {__version__}",
         | 
| 18 20 | 
             
                no_args_is_help=True,
         | 
| 19 21 | 
             
                pretty_exceptions_show_locals=False,
         | 
| @@ -33,11 +35,13 @@ app.command( | |
| 33 35 | 
             
                help="Make a prediction.",
         | 
| 34 36 | 
             
                context_settings={ "allow_extra_args": True, "ignore_unknown_options": True }
         | 
| 35 37 | 
             
            )(create_prediction)
         | 
| 36 | 
            -
             | 
| 37 | 
            -
             | 
| 38 | 
            -
             | 
| 39 | 
            -
             | 
| 38 | 
            +
            app.command(
         | 
| 39 | 
            +
                name="compile",
         | 
| 40 | 
            +
                help="Create a predictor by compiling a Python function."
         | 
| 41 | 
            +
            )(compile_predictor)
         | 
| 40 42 | 
             
            app.command(name="retrieve", help="Retrieve a predictor.")(retrieve_predictor)
         | 
| 43 | 
            +
            app.command(name="archive", help="Archive a predictor.")(archive_predictor)
         | 
| 44 | 
            +
            app.command(name="delete", help="Delete a predictor.")(delete_predictor)
         | 
| 41 45 |  | 
| 42 46 | 
             
            # Run
         | 
| 43 47 | 
             
            if __name__ == "__main__":
         | 
    
        fxn/cli/compile.py
    ADDED
    
    | @@ -0,0 +1,147 @@ | |
| 1 | 
            +
            # 
         | 
| 2 | 
            +
            #   Function
         | 
| 3 | 
            +
            #   Copyright © 2025 NatML Inc. All Rights Reserved.
         | 
| 4 | 
            +
            #
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            from asyncio import run as run_async
         | 
| 7 | 
            +
            from importlib.util import module_from_spec, spec_from_file_location
         | 
| 8 | 
            +
            from inspect import getmembers, getmodulename, isfunction
         | 
| 9 | 
            +
            from pathlib import Path
         | 
| 10 | 
            +
            from pydantic import BaseModel
         | 
| 11 | 
            +
            from rich import print as print_rich
         | 
| 12 | 
            +
            from rich.progress import SpinnerColumn, TextColumn
         | 
| 13 | 
            +
            import sys
         | 
| 14 | 
            +
            from typer import Argument, Option
         | 
| 15 | 
            +
            from typing import Callable, Literal
         | 
| 16 | 
            +
            from urllib.parse import urlparse, urlunparse
         | 
| 17 | 
            +
             | 
| 18 | 
            +
            from ..compile import PredictorSpec
         | 
| 19 | 
            +
            from ..function import Function
         | 
| 20 | 
            +
            from ..sandbox import EntrypointCommand
         | 
| 21 | 
            +
            from ..logging import CustomProgress, CustomProgressTask
         | 
| 22 | 
            +
            from .auth import get_access_key
         | 
| 23 | 
            +
             | 
| 24 | 
            +
            class CompileError (Exception):
         | 
| 25 | 
            +
                pass
         | 
| 26 | 
            +
             | 
| 27 | 
            +
            def compile_predictor (
         | 
| 28 | 
            +
                path: str=Argument(..., help="Predictor path.")
         | 
| 29 | 
            +
            ):
         | 
| 30 | 
            +
                run_async(_compile_predictor_async(path))
         | 
| 31 | 
            +
             | 
| 32 | 
            +
            async def _compile_predictor_async (path: str):
         | 
| 33 | 
            +
                fxn = Function(get_access_key())
         | 
| 34 | 
            +
                path: Path = Path(path).resolve()
         | 
| 35 | 
            +
                with CustomProgress():
         | 
| 36 | 
            +
                    # Load
         | 
| 37 | 
            +
                    with CustomProgressTask(loading_text="Loading predictor...") as task:
         | 
| 38 | 
            +
                        func = _load_predictor_func(path)
         | 
| 39 | 
            +
                        entrypoint = EntrypointCommand(from_path=str(path), to_path="./", name=func.__name__)
         | 
| 40 | 
            +
                        spec: PredictorSpec = func.__predictor_spec
         | 
| 41 | 
            +
                        task.finish(f"Loaded prediction function: [bold cyan]{spec.tag}[/bold cyan]")
         | 
| 42 | 
            +
                    # Populate
         | 
| 43 | 
            +
                    sandbox = spec.sandbox
         | 
| 44 | 
            +
                    sandbox.commands.append(entrypoint)
         | 
| 45 | 
            +
                    with CustomProgressTask(loading_text="Uploading sandbox...", done_text="Uploaded sandbox"):
         | 
| 46 | 
            +
                        sandbox.populate(fxn=fxn)
         | 
| 47 | 
            +
                    # Compile
         | 
| 48 | 
            +
                    with CustomProgressTask(loading_text="Running codegen...", done_text="Completed codegen"):
         | 
| 49 | 
            +
                        with CustomProgressTask(loading_text="Creating predictor..."):
         | 
| 50 | 
            +
                            predictor = fxn.client.request(
         | 
| 51 | 
            +
                                method="POST",
         | 
| 52 | 
            +
                                path="/predictors",
         | 
| 53 | 
            +
                                body=spec.model_dump(mode="json", exclude=spec.model_extra.keys(), by_alias=True),
         | 
| 54 | 
            +
                                response_type=_Predictor
         | 
| 55 | 
            +
                            )
         | 
| 56 | 
            +
                        with ProgressLogQueue() as task_queue:
         | 
| 57 | 
            +
                            async for event in fxn.client.stream(
         | 
| 58 | 
            +
                                method="POST",
         | 
| 59 | 
            +
                                path=f"/predictors/{predictor.tag}/compile",
         | 
| 60 | 
            +
                                body={ },
         | 
| 61 | 
            +
                                response_type=_LogEvent | _ErrorEvent
         | 
| 62 | 
            +
                            ):
         | 
| 63 | 
            +
                                if isinstance(event, _LogEvent):
         | 
| 64 | 
            +
                                    task_queue.push_log(event)
         | 
| 65 | 
            +
                                elif isinstance(event, _ErrorEvent):
         | 
| 66 | 
            +
                                    task_queue.push_error(event)
         | 
| 67 | 
            +
                                    raise CompileError(event.data.error)
         | 
| 68 | 
            +
                predictor_url = _compute_predictor_url(fxn.client.api_url, spec.tag)
         | 
| 69 | 
            +
                print_rich(f"\n[bold spring_green3]🎉 Predictor is now being compiled.[/bold spring_green3] Check it out at [link={predictor_url}]{predictor_url}[/link]")
         | 
| 70 | 
            +
             | 
| 71 | 
            +
            def _load_predictor_func (path: str) -> Callable[...,object]:
         | 
| 72 | 
            +
                if "" not in sys.path:
         | 
| 73 | 
            +
                    sys.path.insert(0, "")
         | 
| 74 | 
            +
                path: Path = Path(path).resolve()
         | 
| 75 | 
            +
                sys.path.insert(0, str(path.parent))
         | 
| 76 | 
            +
                name = getmodulename(path)
         | 
| 77 | 
            +
                spec = spec_from_file_location(name, path)
         | 
| 78 | 
            +
                module = module_from_spec(spec)
         | 
| 79 | 
            +
                sys.modules[name] = module
         | 
| 80 | 
            +
                spec.loader.exec_module(module)
         | 
| 81 | 
            +
                main_func = next(func for _, func in getmembers(module, isfunction) if hasattr(func, "__predictor_spec"))
         | 
| 82 | 
            +
                return main_func
         | 
| 83 | 
            +
             | 
| 84 | 
            +
            def _compute_predictor_url (api_url: str, tag: str) -> str:
         | 
| 85 | 
            +
                parsed_url = urlparse(api_url)
         | 
| 86 | 
            +
                hostname_parts = parsed_url.hostname.split(".")
         | 
| 87 | 
            +
                if hostname_parts[0] == "api":
         | 
| 88 | 
            +
                    hostname_parts.pop(0)
         | 
| 89 | 
            +
                hostname = ".".join(hostname_parts)
         | 
| 90 | 
            +
                netloc = hostname if not parsed_url.port else f"{hostname}:{parsed_url.port}"
         | 
| 91 | 
            +
                predictor_url = urlunparse(parsed_url._replace(netloc=netloc, path=f"{tag}"))
         | 
| 92 | 
            +
                return predictor_url
         | 
| 93 | 
            +
             | 
| 94 | 
            +
            class _Predictor (BaseModel):
         | 
| 95 | 
            +
                tag: str
         | 
| 96 | 
            +
             | 
| 97 | 
            +
            class _LogData (BaseModel):
         | 
| 98 | 
            +
                message: str
         | 
| 99 | 
            +
                level: int = 0
         | 
| 100 | 
            +
                status: Literal["success", "error"] = "success"
         | 
| 101 | 
            +
                update: bool = False
         | 
| 102 | 
            +
             | 
| 103 | 
            +
            class _LogEvent (BaseModel):
         | 
| 104 | 
            +
                event: Literal["log"]
         | 
| 105 | 
            +
                data: _LogData
         | 
| 106 | 
            +
             | 
| 107 | 
            +
            class _ErrorData (BaseModel):
         | 
| 108 | 
            +
                error: str
         | 
| 109 | 
            +
             | 
| 110 | 
            +
            class _ErrorEvent (BaseModel):
         | 
| 111 | 
            +
                event: Literal["error"]
         | 
| 112 | 
            +
                data: _ErrorData
         | 
| 113 | 
            +
             | 
| 114 | 
            +
            class ProgressLogQueue:
         | 
| 115 | 
            +
             | 
| 116 | 
            +
                def __init__ (self):
         | 
| 117 | 
            +
                    self.queue: list[tuple[int, CustomProgressTask]] = []
         | 
| 118 | 
            +
             | 
| 119 | 
            +
                def push_log (self, event: _LogEvent):
         | 
| 120 | 
            +
                    # Check for update
         | 
| 121 | 
            +
                    if event.data.update and self.queue:
         | 
| 122 | 
            +
                        current_level, current_task = self.queue[-1]
         | 
| 123 | 
            +
                        current_task.update(description=event.data.message, status=event.data.status)
         | 
| 124 | 
            +
                        return
         | 
| 125 | 
            +
                    # Pop
         | 
| 126 | 
            +
                    while self.queue:
         | 
| 127 | 
            +
                        current_level, current_task = self.queue[-1]
         | 
| 128 | 
            +
                        if event.data.level > current_level:
         | 
| 129 | 
            +
                            break
         | 
| 130 | 
            +
                        current_task.__exit__(None, None, None)
         | 
| 131 | 
            +
                        self.queue.pop()
         | 
| 132 | 
            +
                    task = CustomProgressTask(loading_text=event.data.message)
         | 
| 133 | 
            +
                    task.__enter__()
         | 
| 134 | 
            +
                    self.queue.append((event.data.level, task))
         | 
| 135 | 
            +
             | 
| 136 | 
            +
                def push_error (self, error: _ErrorEvent):
         | 
| 137 | 
            +
                    while self.queue:
         | 
| 138 | 
            +
                        _, current_task = self.queue.pop()
         | 
| 139 | 
            +
                        current_task.__exit__(RuntimeError, None, None)
         | 
| 140 | 
            +
             | 
| 141 | 
            +
                def __enter__ (self):
         | 
| 142 | 
            +
                    return self
         | 
| 143 | 
            +
                
         | 
| 144 | 
            +
                def __exit__ (self, exc_type, exc_value, traceback):
         | 
| 145 | 
            +
                    while self.queue:
         | 
| 146 | 
            +
                        _, current_task = self.queue.pop()
         | 
| 147 | 
            +
                        current_task.__exit__(None, None, None)
         | 
    
        fxn/cli/predictions.py
    CHANGED
    
    | @@ -9,11 +9,11 @@ from numpy import array_repr, ndarray | |
| 9 9 | 
             
            from pathlib import Path, PurePath
         | 
| 10 10 | 
             
            from PIL import Image
         | 
| 11 11 | 
             
            from rich import print_json
         | 
| 12 | 
            -
            from rich.progress import Progress, SpinnerColumn, TextColumn
         | 
| 13 12 | 
             
            from tempfile import mkstemp
         | 
| 14 13 | 
             
            from typer import Argument, Context, Option
         | 
| 15 14 |  | 
| 16 15 | 
             
            from ..function import Function
         | 
| 16 | 
            +
            from ..logging import CustomProgress, CustomProgressTask
         | 
| 17 17 | 
             
            from ..types import Prediction
         | 
| 18 18 | 
             
            from .auth import get_access_key
         | 
| 19 19 |  | 
| @@ -26,18 +26,21 @@ def create_prediction ( | |
| 26 26 |  | 
| 27 27 | 
             
            async def _predict_async (tag: str, quiet: bool, context: Context):
         | 
| 28 28 | 
             
                # Preload
         | 
| 29 | 
            -
                 | 
| 30 | 
            -
             | 
| 31 | 
            -
             | 
| 32 | 
            -
             | 
| 33 | 
            -
             | 
| 34 | 
            -
                     | 
| 35 | 
            -
             | 
| 36 | 
            -
             | 
| 37 | 
            -
             | 
| 38 | 
            -
             | 
| 39 | 
            -
             | 
| 40 | 
            -
             | 
| 29 | 
            +
                with CustomProgress(transient=True, disable=quiet):
         | 
| 30 | 
            +
                    fxn = Function(get_access_key())
         | 
| 31 | 
            +
                    with CustomProgressTask(
         | 
| 32 | 
            +
                        loading_text="Preloading predictor...",
         | 
| 33 | 
            +
                        done_text="Preloaded predictor"
         | 
| 34 | 
            +
                    ):
         | 
| 35 | 
            +
                        fxn.predictions.create(tag, inputs={ })
         | 
| 36 | 
            +
                    with CustomProgressTask(loading_text="Making prediction..."):
         | 
| 37 | 
            +
                        inputs = { }
         | 
| 38 | 
            +
                        for i in range(0, len(context.args), 2):
         | 
| 39 | 
            +
                            name = context.args[i].replace("-", "")
         | 
| 40 | 
            +
                            value = _parse_value(context.args[i+1])
         | 
| 41 | 
            +
                            inputs[name] = value
         | 
| 42 | 
            +
                        prediction = fxn.predictions.create(tag, inputs=inputs)
         | 
| 43 | 
            +
                _log_prediction(prediction)
         | 
| 41 44 |  | 
| 42 45 | 
             
            def _parse_value (value: str):
         | 
| 43 46 | 
             
                """
         | 
    
        fxn/cli/predictors.py
    CHANGED
    
    | @@ -7,12 +7,43 @@ from rich import print_json | |
| 7 7 | 
             
            from typer import Argument
         | 
| 8 8 |  | 
| 9 9 | 
             
            from ..function import Function
         | 
| 10 | 
            +
            from ..logging import CustomProgress, CustomProgressTask
         | 
| 10 11 | 
             
            from .auth import get_access_key
         | 
| 11 12 |  | 
| 12 13 | 
             
            def retrieve_predictor (
         | 
| 13 14 | 
             
                tag: str=Argument(..., help="Predictor tag.")
         | 
| 14 15 | 
             
            ):
         | 
| 15 | 
            -
                 | 
| 16 | 
            -
             | 
| 17 | 
            -
             | 
| 18 | 
            -
             | 
| 16 | 
            +
                with CustomProgress(transient=True):
         | 
| 17 | 
            +
                    with CustomProgressTask(loading_text="Retrieving predictor..."):
         | 
| 18 | 
            +
                        fxn = Function(get_access_key())
         | 
| 19 | 
            +
                        predictor = fxn.predictors.retrieve(tag)
         | 
| 20 | 
            +
                        predictor = predictor.model_dump() if predictor else None
         | 
| 21 | 
            +
                        print_json(data=predictor)
         | 
| 22 | 
            +
             | 
| 23 | 
            +
            def archive_predictor (
         | 
| 24 | 
            +
                tag: str=Argument(..., help="Predictor tag.")
         | 
| 25 | 
            +
            ):
         | 
| 26 | 
            +
                with CustomProgress():
         | 
| 27 | 
            +
                    with CustomProgressTask(
         | 
| 28 | 
            +
                        loading_text="Archiving predictor...",
         | 
| 29 | 
            +
                        done_text=f"Archived predictor: [bold dark_orange]{tag}[/bold dark_orange]"
         | 
| 30 | 
            +
                    ):
         | 
| 31 | 
            +
                        fxn = Function(get_access_key())
         | 
| 32 | 
            +
                        fxn.client.request(
         | 
| 33 | 
            +
                            method="POST",
         | 
| 34 | 
            +
                            path=f"/predictors/{tag}/archive"
         | 
| 35 | 
            +
                        )
         | 
| 36 | 
            +
             | 
| 37 | 
            +
            def delete_predictor (
         | 
| 38 | 
            +
                tag: str=Argument(..., help="Predictor tag.")
         | 
| 39 | 
            +
            ):
         | 
| 40 | 
            +
                with CustomProgress():
         | 
| 41 | 
            +
                    with CustomProgressTask(
         | 
| 42 | 
            +
                        loading_text="Deleting predictor...",
         | 
| 43 | 
            +
                        done_text=f"Deleted predictor: [bold red]{tag}[/bold red]"
         | 
| 44 | 
            +
                    ):
         | 
| 45 | 
            +
                        fxn = Function(get_access_key())
         | 
| 46 | 
            +
                        fxn.client.request(
         | 
| 47 | 
            +
                            method="DELETE",
         | 
| 48 | 
            +
                            path=f"/predictors/{tag}"
         | 
| 49 | 
            +
                        )
         |