fxn 0.0.40__py3-none-any.whl → 0.0.42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. fxn/__init__.py +3 -1
  2. fxn/beta/__init__.py +6 -0
  3. fxn/beta/client.py +16 -0
  4. fxn/beta/prediction.py +16 -0
  5. fxn/beta/remote.py +207 -0
  6. fxn/c/__init__.py +7 -10
  7. fxn/c/configuration.py +114 -56
  8. fxn/c/fxnc.py +42 -22
  9. fxn/c/map.py +60 -30
  10. fxn/c/prediction.py +72 -33
  11. fxn/c/predictor.py +55 -27
  12. fxn/c/stream.py +33 -15
  13. fxn/c/value.py +215 -42
  14. fxn/cli/__init__.py +14 -12
  15. fxn/cli/auth.py +1 -1
  16. fxn/cli/misc.py +1 -1
  17. fxn/cli/{predict.py → predictions.py} +33 -36
  18. fxn/cli/predictors.py +3 -51
  19. fxn/client.py +58 -0
  20. fxn/compile/__init__.py +7 -0
  21. fxn/compile/compile.py +80 -0
  22. fxn/compile/sandbox.py +177 -0
  23. fxn/compile/signature.py +183 -0
  24. fxn/function.py +10 -6
  25. fxn/lib/__init__.py +1 -1
  26. fxn/lib/linux/arm64/libFunction.so +0 -0
  27. fxn/lib/linux/x86_64/libFunction.so +0 -0
  28. fxn/lib/macos/arm64/Function.dylib +0 -0
  29. fxn/lib/macos/x86_64/Function.dylib +0 -0
  30. fxn/lib/windows/arm64/Function.dll +0 -0
  31. fxn/lib/windows/x86_64/Function.dll +0 -0
  32. fxn/services/__init__.py +4 -4
  33. fxn/services/prediction.py +180 -351
  34. fxn/services/predictor.py +14 -187
  35. fxn/services/user.py +16 -42
  36. fxn/types/__init__.py +4 -4
  37. fxn/types/dtype.py +1 -1
  38. fxn/types/prediction.py +20 -10
  39. fxn/types/predictor.py +18 -32
  40. fxn/types/user.py +9 -15
  41. fxn/version.py +2 -2
  42. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/METADATA +5 -5
  43. fxn-0.0.42.dist-info/RECORD +47 -0
  44. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/WHEEL +1 -1
  45. fxn/api/__init__.py +0 -6
  46. fxn/api/client.py +0 -43
  47. fxn/c/dtype.py +0 -26
  48. fxn/c/status.py +0 -12
  49. fxn/c/version.py +0 -13
  50. fxn/cli/env.py +0 -40
  51. fxn-0.0.40.dist-info/RECORD +0 -44
  52. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/LICENSE +0 -0
  53. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/entry_points.txt +0 -0
  54. {fxn-0.0.40.dist-info → fxn-0.0.42.dist-info}/top_level.txt +0 -0
@@ -1,11 +1,11 @@
1
1
  #
2
2
  # Function
3
- # Copyright © 2024 NatML Inc. All Rights Reserved.
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
6
  from asyncio import run as run_async
7
7
  from io import BytesIO
8
- from numpy import ndarray
8
+ from numpy import array_repr, ndarray
9
9
  from pathlib import Path, PurePath
10
10
  from PIL import Image
11
11
  from rich import print_json
@@ -14,34 +14,30 @@ from tempfile import mkstemp
14
14
  from typer import Argument, Context, Option
15
15
 
16
16
  from ..function import Function
17
+ from ..types import Prediction
17
18
  from .auth import get_access_key
18
19
 
19
- def predict (
20
- tag: str = Argument(..., help="Predictor tag."),
20
+ def create_prediction (
21
+ tag: str=Argument(..., help="Predictor tag."),
22
+ quiet: bool=Option(False, "--quiet", help="Suppress verbose logging when creating the prediction."),
21
23
  context: Context = 0
22
24
  ):
23
- run_async(_predict_async(tag, context=context))
25
+ run_async(_predict_async(tag, quiet=quiet, context=context))
24
26
 
25
- async def _predict_async (tag: str, context: Context):
27
+ async def _predict_async (tag: str, quiet: bool, context: Context):
28
+ # Preload
29
+ fxn = Function(get_access_key())
30
+ fxn.predictions.create(tag, inputs={ }, verbose=not quiet)
31
+ # Predict
26
32
  with Progress(
27
33
  SpinnerColumn(spinner_name="dots"),
28
34
  TextColumn("[progress.description]{task.description}"),
29
35
  transient=True
30
36
  ) as progress:
31
37
  progress.add_task(description="Running Function...", total=None)
32
- # Parse inputs
33
38
  inputs = { context.args[i].replace("-", ""): _parse_value(context.args[i+1]) for i in range(0, len(context.args), 2) }
34
- # Stream
35
- fxn = Function(get_access_key())
36
- async for prediction in fxn.predictions.stream(tag, inputs=inputs):
37
- # Parse results
38
- images = [value for value in prediction.results or [] if isinstance(value, Image.Image)]
39
- prediction.results = [_serialize_value(value) for value in prediction.results] if prediction.results is not None else None
40
- # Print
41
- print_json(data=prediction.model_dump())
42
- # Show images
43
- for image in images:
44
- image.show()
39
+ prediction = fxn.predictions.create(tag, inputs=inputs)
40
+ _log_prediction(prediction)
45
41
 
46
42
  def _parse_value (value: str):
47
43
  """
@@ -70,33 +66,34 @@ def _parse_value (value: str):
70
66
  pass
71
67
  # File
72
68
  if value.startswith("@"):
73
- return Path(value[1:])
69
+ path = Path(value[1:]).expanduser().resolve()
70
+ if path.suffix in [".txt", ".md"]:
71
+ with open(path) as f:
72
+ return f.read()
73
+ elif path.suffix in [".jpg", ".png"]:
74
+ return Image.open(path)
75
+ else:
76
+ with open(path, "rb") as f:
77
+ return BytesIO(f.read())
74
78
  # String
75
79
  return value
76
-
80
+
81
+ def _log_prediction (prediction: Prediction):
82
+ images = [value for value in prediction.results or [] if isinstance(value, Image.Image)]
83
+ prediction.results = [_serialize_value(value) for value in prediction.results] if prediction.results is not None else None
84
+ print_json(data=prediction.model_dump())
85
+ for image in images:
86
+ image.show()
87
+
77
88
  def _serialize_value (value):
78
- # Convert ndarray to list
79
89
  if isinstance(value, ndarray):
80
- return value.tolist()
81
- # Write image
90
+ return array_repr(value)
82
91
  if isinstance(value, Image.Image):
83
92
  _, path = mkstemp(suffix=".png" if value.mode == "RGBA" else ".jpg")
84
93
  value.save(path)
85
94
  return path
86
- # Serialize `BytesIO`
87
95
  if isinstance(value, BytesIO):
88
96
  return str(value)
89
- # Serialize `Path`
90
97
  if isinstance(value, PurePath):
91
98
  return str(value)
92
- # Return
93
- return value
94
-
95
- def _prediction_dict_factory (kv_pairs):
96
- # Check if value
97
- VALUE_KEYS = ["data", "type", "shape"]
98
- keys = [k for k, _ in kv_pairs]
99
- is_value = all(k in keys for k in VALUE_KEYS)
100
- kv_pairs = [(k, v) for k, v in kv_pairs if v is not None] if is_value else kv_pairs
101
- # Construct
102
- return dict(kv_pairs)
99
+ return value
fxn/cli/predictors.py CHANGED
@@ -1,14 +1,12 @@
1
1
  #
2
2
  # Function
3
- # Copyright © 2024 NatML Inc. All Rights Reserved.
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
6
  from rich import print_json
7
- from rich.progress import Progress, SpinnerColumn, TextColumn
8
- from typer import Argument, Option
7
+ from typer import Argument
9
8
 
10
9
  from ..function import Function
11
- from ..types import PredictorStatus
12
10
  from .auth import get_access_key
13
11
 
14
12
  def retrieve_predictor (
@@ -17,50 +15,4 @@ def retrieve_predictor (
17
15
  fxn = Function(get_access_key())
18
16
  predictor = fxn.predictors.retrieve(tag)
19
17
  predictor = predictor.model_dump() if predictor else None
20
- print_json(data=predictor)
21
-
22
- def list_predictors (
23
- owner: str=Option(None, help="Predictor owner. This defaults to the current user."),
24
- status: PredictorStatus=Option(PredictorStatus.Active, help="Predictor status. This defaults to `ACTIVE`."),
25
- offset: int=Option(None, help="Pagination offset."),
26
- count: int=Option(None, help="Pagination count.")
27
- ):
28
- fxn = Function(get_access_key())
29
- predictors = fxn.predictors.list(
30
- owner=owner,
31
- status=status,
32
- offset=offset,
33
- count=count
34
- )
35
- predictors = [predictor.model_dump() for predictor in predictors] if predictors is not None else None
36
- print_json(data=predictors)
37
-
38
- def search_predictors (
39
- query: str=Argument(..., help="Search query."),
40
- offset: int=Option(None, help="Pagination offset."),
41
- count: int=Option(None, help="Pagination count.")
42
- ):
43
- fxn = Function(get_access_key())
44
- predictors = fxn.predictors.search(query=query, offset=offset, count=count)
45
- predictors = [predictor.model_dump() for predictor in predictors]
46
- print_json(data=predictors)
47
-
48
- def delete_predictor (
49
- tag: str=Argument(..., help="Predictor tag.")
50
- ):
51
- with Progress(
52
- SpinnerColumn(spinner_name="dots"),
53
- TextColumn("[progress.description]{task.description}"),
54
- transient=True
55
- ) as progress:
56
- progress.add_task(description="Deleting Function...", total=None)
57
- fxn = Function(get_access_key())
58
- result = fxn.predictors.delete(tag)
59
- print_json(data=result)
60
-
61
- def archive_predictor (
62
- tag: str=Argument(..., help="Predictor tag.")
63
- ):
64
- fxn = Function(get_access_key())
65
- predictor = fxn.predictors.archive(tag)
66
- print_json(data=predictor.model_dump())
18
+ print_json(data=predictor)
fxn/client.py ADDED
@@ -0,0 +1,58 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from json import loads, JSONDecodeError
7
+ from pydantic import BaseModel
8
+ from requests import request
9
+ from typing import Any, Literal, Type, TypeVar
10
+
11
+ T = TypeVar("T", bound=BaseModel)
12
+
13
+ class FunctionClient:
14
+
15
+ def __init__(self, access_key: str, api_url: str | None) -> None:
16
+ self.access_key = access_key
17
+ self.api_url = api_url or "https://api.fxn.ai/v1"
18
+
19
+ def request (
20
+ self,
21
+ *,
22
+ method: Literal["GET", "POST", "DELETE"],
23
+ path: str,
24
+ body: dict[str, Any]=None,
25
+ response_type: Type[T]=None
26
+ ) -> T:
27
+ response = request(
28
+ method=method,
29
+ url=f"{self.api_url}{path}",
30
+ json=body,
31
+ headers={ "Authorization": f"Bearer {self.access_key}" }
32
+ )
33
+ data = response.text
34
+ try:
35
+ data = response.json()
36
+ except JSONDecodeError:
37
+ pass
38
+ if response.ok:
39
+ return response_type(**data) if response_type is not None else None
40
+ else:
41
+ error = _ErrorResponse(**data).errors[0].message if isinstance(data, dict) else data
42
+ raise FunctionAPIError(error, response.status_code)
43
+
44
+ class FunctionAPIError (Exception):
45
+
46
+ def __init__(self, message: str, status_code: int):
47
+ super().__init__(message)
48
+ self.message = message
49
+ self.status_code = status_code
50
+
51
+ def __str__(self):
52
+ return f"FunctionAPIError: {self.message} (Status Code: {self.status_code})"
53
+
54
+ class _APIError (BaseModel):
55
+ message: str
56
+
57
+ class _ErrorResponse (BaseModel):
58
+ errors: list[_APIError]
@@ -0,0 +1,7 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from .compile import compile
7
+ from .sandbox import Sandbox
fxn/compile/compile.py ADDED
@@ -0,0 +1,80 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from collections.abc import Callable
7
+ from functools import wraps
8
+ from pathlib import Path
9
+ from pydantic import BaseModel, Field
10
+
11
+ from ..types import AccessMode, Signature
12
+ from .sandbox import Sandbox
13
+ from .signature import get_function_type, infer_function_signature, FunctionType
14
+
15
+ class PredictorSpec (BaseModel):
16
+ """
17
+ Descriptor of a predictor to be compiled.
18
+ """
19
+ tag: str = Field(description="Predictor tag.")
20
+ description: str = Field(description="Predictor description. MUST be less than 100 characters long.", min_length=4, max_length=100)
21
+ sandbox: Sandbox = Field(description="Sandbox to compile the function.")
22
+ access: AccessMode = Field(description="Predictor access.")
23
+ signature: Signature = Field(description="Predictor signature.")
24
+ card: str | None = Field(default=None, description="Predictor card (markdown).")
25
+ media: str | None = Field(default=None, description="Predictor media URL.")
26
+ license: str | None = Field(default=None, description="Predictor license URL. This is required for public predictors.")
27
+
28
+ def compile (
29
+ tag: str,
30
+ *,
31
+ description: str,
32
+ sandbox: Sandbox=None,
33
+ access: AccessMode=AccessMode.Private,
34
+ card: str | Path=None,
35
+ media: Path=None,
36
+ license: str=None,
37
+ ):
38
+ """
39
+ Create a predictor by compiling a stateless function.
40
+
41
+ Parameters:
42
+ tag (str): Predictor tag.
43
+ description (str): Predictor description. MUST be less than 100 characters long.
44
+ sandbox (Sandbox): Sandbox to compile the function.
45
+ access (AccessMode): Predictor access.
46
+ card (str | Path): Predictor card markdown string or path to card.
47
+ media (Path): Predictor thumbnail image (jpeg or png) path.
48
+ license (str): Predictor license URL. This is required for public predictors.
49
+ """
50
+ def decorator (func: Callable):
51
+ # Check type
52
+ if not callable(func):
53
+ raise TypeError("Cannot compile non-function objects")
54
+ func_type = get_function_type(func)
55
+ if func_type not in { FunctionType.Function, FunctionType.Generator }:
56
+ raise TypeError(f"Function '{func.__name__}' must be a regular function or generator")
57
+ # Gather metadata
58
+ signature = infer_function_signature(func) # throws
59
+ if isinstance(card, Path):
60
+ with open(card_content, "r") as f:
61
+ card_content = f.read()
62
+ else:
63
+ card_content = card
64
+ spec = PredictorSpec(
65
+ tag=tag,
66
+ description=description,
67
+ sandbox=sandbox if sandbox is not None else Sandbox(),
68
+ access=access,
69
+ signature=signature,
70
+ card=card_content,
71
+ media=None, # INCOMPLETE
72
+ license=license
73
+ )
74
+ # Wrap
75
+ @wraps(func)
76
+ def wrapper (*args, **kwargs):
77
+ return func(*args, **kwargs)
78
+ wrapper.__predictor_spec = spec
79
+ return wrapper
80
+ return decorator
fxn/compile/sandbox.py ADDED
@@ -0,0 +1,177 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from __future__ import annotations
7
+ from hashlib import sha256
8
+ from pathlib import Path
9
+ from pydantic import BaseModel
10
+ from requests import put
11
+ from typing import Literal
12
+
13
+ from ..function import Function
14
+
15
+ class WorkdirCommand (BaseModel):
16
+ kind: Literal["workdir"] = "workdir"
17
+ path: str
18
+
19
+ class EnvCommand (BaseModel):
20
+ kind: Literal["env"] = "env"
21
+ env: dict[str, str]
22
+
23
+ class UploadFileCommand (BaseModel):
24
+ kind: Literal["upload_file"] = "upload_file"
25
+ from_path: str
26
+ to_path: str
27
+ manifest: dict[str, str] | None = None
28
+
29
+ class UploadDirectoryCommand (BaseModel):
30
+ kind: Literal["upload_dir"] = "upload_dir"
31
+ from_path: str
32
+ to_path: str
33
+ manifest: dict[str, str] | None = None
34
+
35
+ class PipInstallCommand (BaseModel):
36
+ kind: Literal["pip_install"] = "pip_install"
37
+ packages: list[str]
38
+
39
+ class AptInstallCommand (BaseModel):
40
+ kind: Literal["apt_install"] = "apt_install"
41
+ packages: list[str]
42
+
43
+ class EntrypointCommand (BaseModel):
44
+ kind: Literal["entrypoint"] = "entrypoint"
45
+ path: str
46
+
47
+ Command = (
48
+ WorkdirCommand |
49
+ EnvCommand |
50
+ UploadFileCommand |
51
+ UploadDirectoryCommand |
52
+ PipInstallCommand |
53
+ AptInstallCommand |
54
+ EntrypointCommand
55
+ )
56
+
57
+ class Sandbox (BaseModel):
58
+ """
59
+ Sandbox which defines a containerized environment for compiling your Python function.
60
+ """
61
+ commands: list[Command] = []
62
+
63
+ def workdir (self, path: str | Path) -> Sandbox:
64
+ """
65
+ Change the current working directory for subsequent commands.
66
+
67
+ Parameters:
68
+ path (str | Path): Path to change to.
69
+ """
70
+ command = WorkdirCommand(path=str(path))
71
+ self.commands.append(command)
72
+ return self
73
+
74
+ def env (self, **env: str) -> Sandbox:
75
+ """
76
+ Set environment variables in the sandbox.
77
+ """
78
+ command = EnvCommand(env=env)
79
+ self.commands.append(command)
80
+ return self
81
+
82
+ def upload_file (
83
+ self,
84
+ from_path: str | Path,
85
+ to_path: str | Path = "./"
86
+ ) -> Sandbox:
87
+ """
88
+ Upload a file to the sandbox.
89
+
90
+ Parameters:
91
+ from_path (str | Path): File path on the local file system.
92
+ to_path (str | Path): Remote path to upload file to.
93
+ """
94
+ command = UploadFileCommand(from_path=str(from_path), to_path=str(to_path))
95
+ self.commands.append(command)
96
+ return self
97
+
98
+ def upload_directory (
99
+ self,
100
+ from_path: str | Path,
101
+ to_path: str | Path = "."
102
+ ) -> Sandbox:
103
+ """
104
+ Upload a directory to the sandbox.
105
+
106
+ Parameters:
107
+ from_path (str | Path): Directory path on the local file system.
108
+ to_path (str | Path): Remote path to upload directory to.
109
+ """
110
+ command = UploadDirectoryCommand(from_path=str(from_path), to_path=str(to_path))
111
+ self.commands.append(command)
112
+ return self
113
+
114
+ def pip_install (self, *packages: str) -> Sandbox:
115
+ """
116
+ Install Python packages in the sandbox.
117
+
118
+ Parameters:
119
+ packages (list): Packages to install.
120
+ """
121
+ command = PipInstallCommand(packages=packages)
122
+ self.commands.append(command)
123
+ return self
124
+
125
+ def apt_install (self, *packages: str) -> Sandbox:
126
+ """
127
+ Install Debian packages in the sandbox.
128
+
129
+ Parameters:
130
+ packages (list): Packages to install.
131
+ """
132
+ command = AptInstallCommand(packages=packages)
133
+ self.commands.append(command)
134
+ return self
135
+
136
+ def populate (self, fxn: Function=None) -> Sandbox:
137
+ """
138
+ Populate all metadata.
139
+ """
140
+ fxn = fxn if fxn is not None else Function()
141
+ for command in self.commands:
142
+ if isinstance(command, UploadFileCommand):
143
+ from_path = Path(command.from_path)
144
+ to_path = Path(command.to_path)
145
+ command.manifest = { str(to_path / from_path.name): self.__upload_file(from_path, fxn=fxn) }
146
+ elif isinstance(command, UploadDirectoryCommand):
147
+ from_path = Path(command.from_path)
148
+ to_path = Path(command.to_path)
149
+ files = [file for file in from_path.rglob("*") if file.is_file()]
150
+ command.manifest = { str(to_path / file.relative_to(from_path)): self.__upload_file(file, fxn=fxn) for file in files }
151
+ return self
152
+
153
+ def __upload_file (self, path: Path, fxn: Function) -> str:
154
+ assert path.is_file(), "Cannot upload file at path {path} because it is not a file"
155
+ hash = self.__compute_hash(path)
156
+ try:
157
+ fxn.client.request(method="HEAD", path=f"/resources/{hash}")
158
+ except:
159
+ resource = fxn.client.request(
160
+ method="POST",
161
+ path="/resources",
162
+ body={ "name": hash },
163
+ response_type=_Resource
164
+ )
165
+ with path.open("rb") as f:
166
+ put(resource.url, data=f).raise_for_status()
167
+ return hash
168
+
169
+ def __compute_hash (self, path: Path) -> str:
170
+ hash = sha256()
171
+ with path.open("rb") as f:
172
+ for chunk in iter(lambda: f.read(4096), b""):
173
+ hash.update(chunk)
174
+ return hash.hexdigest()
175
+
176
+ class _Resource (BaseModel):
177
+ url: str
@@ -0,0 +1,183 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from collections.abc import Mapping, Sequence
7
+ from enum import Enum
8
+ from inspect import isasyncgenfunction, iscoroutinefunction, isgeneratorfunction, signature
9
+ from io import BytesIO
10
+ import numpy as np
11
+ from PIL import Image
12
+ from pydantic import BaseModel, TypeAdapter
13
+ from typing import get_type_hints, get_origin, get_args, Any, Dict, List, Union
14
+
15
+ from ..types import Dtype, EnumerationMember, Parameter, Signature
16
+
17
+ class FunctionType (str, Enum):
18
+ Coroutine = "ASYNC_FUNCTION"
19
+ Function = "FUNCTION"
20
+ Generator = "GENERATOR"
21
+ AsyncGenerator = "ASYNC_GENERATOR"
22
+
23
+ def get_function_type (func) -> FunctionType:
24
+ if isasyncgenfunction(func):
25
+ return FunctionType.AsyncGenerator
26
+ elif iscoroutinefunction(func):
27
+ return FunctionType.Coroutine
28
+ elif isgeneratorfunction(func):
29
+ return FunctionType.Generator
30
+ else:
31
+ return FunctionType.Function
32
+
33
+ def infer_function_signature (func) -> Signature:
34
+ inputs = _get_input_parameters(func)
35
+ outputs = _get_output_parameters(func)
36
+ signature = Signature(inputs=inputs, outputs=outputs)
37
+ return signature
38
+
39
+ def _get_input_parameters (func) -> list[Parameter]:
40
+ sig = signature(func)
41
+ type_hints = get_type_hints(func)
42
+ parameters = []
43
+ for name, param in sig.parameters.items():
44
+ param_type = type_hints.get(name)
45
+ if param_type is None:
46
+ raise TypeError(f"Missing type annotation for parameter '{name}' in function '{func.__name__}'")
47
+ dtype = _infer_dtype(param_type)
48
+ enumeration = [EnumerationMember(
49
+ name=member.name,
50
+ value=member.value
51
+ ) for member in param_type] if _is_enum_subclass(param_type) else None
52
+ value_schema = _get_type_schema(param_type) if dtype in { Dtype.list, Dtype.dict } else None
53
+ input_param = Parameter(
54
+ name=name,
55
+ type=dtype,
56
+ description=None,
57
+ optional=param.default != param.empty,
58
+ range=None,
59
+ enumeration=enumeration,
60
+ value_schema=value_schema
61
+ )
62
+ parameters.append(input_param)
63
+ return parameters
64
+
65
+ def _get_output_parameters (func) -> list[Parameter]:
66
+ # Check for return annotation
67
+ sig = signature(func)
68
+ if sig.return_annotation is sig.empty:
69
+ raise TypeError(f"Missing return type annotation for function '{func.__name__}'")
70
+ # Gather return types
71
+ return_types = []
72
+ if _is_tuple_type(sig.return_annotation):
73
+ return_types = get_args(sig.return_annotation)
74
+ if not return_types or Ellipsis in return_types:
75
+ raise TypeError(f"Return type of function '{func.__name__}' must be fully typed with generic type arguments.")
76
+ else:
77
+ return_types = [sig.return_annotation]
78
+ # Create parameters
79
+ parameters = [_get_output_parameter(f"output{idx}", output_type) for idx, output_type in enumerate(return_types)]
80
+ return parameters
81
+
82
+ def _get_output_parameter (name: str, return_type) -> Parameter:
83
+ dtype = _infer_dtype(return_type)
84
+ enumeration = [EnumerationMember(
85
+ name=member.name,
86
+ value=member.value
87
+ ) for member in return_type] if _is_enum_subclass(return_type) else None
88
+ value_schema = _get_type_schema(return_type) if dtype in { Dtype.list, Dtype.dict } else None
89
+ parameter = Parameter(
90
+ name=name,
91
+ type=dtype,
92
+ description=None,
93
+ optional=False,
94
+ range=None,
95
+ enumeration=enumeration,
96
+ value_schema=value_schema
97
+ )
98
+ return parameter
99
+
100
+ def _infer_dtype (param_type) -> Dtype:
101
+ param_type = _strip_optional(param_type)
102
+ origin = get_origin(param_type)
103
+ args = get_args(param_type)
104
+ if origin is None:
105
+ if param_type is np.ndarray:
106
+ return Dtype.float32
107
+ elif param_type is Image.Image:
108
+ return Dtype.image
109
+ elif param_type in { bytes, bytearray, memoryview, BytesIO }:
110
+ return Dtype.binary
111
+ elif param_type is int:
112
+ return Dtype.int32
113
+ elif param_type is float:
114
+ return Dtype.float32
115
+ elif param_type is bool:
116
+ return Dtype.bool
117
+ elif param_type is str:
118
+ return Dtype.string
119
+ elif _is_enum_subclass(param_type):
120
+ return Dtype.string
121
+ elif param_type is list:
122
+ return Dtype.list
123
+ elif param_type is dict:
124
+ return Dtype.dict
125
+ elif _is_pydantic_model(param_type):
126
+ return Dtype.dict
127
+ else:
128
+ raise TypeError(f"Unsupported parameter type: {param_type}")
129
+ else:
130
+ if origin in { list, List, Sequence }:
131
+ return Dtype.list
132
+ elif origin in { dict, Dict, Mapping }:
133
+ return Dtype.dict
134
+ elif origin is np.ndarray:
135
+ if args:
136
+ dtype_arg = args[0]
137
+ dtype = _numpy_to_fxn_dtype(dtype_arg)
138
+ if dtype is not None:
139
+ return dtype
140
+ return Dtype.float32
141
+ else:
142
+ raise TypeError(f"Unsupported parameter type: {param_type}")
143
+
144
+ def _is_enum_subclass (cls) -> bool:
145
+ return isinstance(cls, type) and issubclass(cls, Enum)
146
+
147
+ def _is_pydantic_model (cls) -> bool:
148
+ return isinstance(cls, type) and issubclass(cls, BaseModel)
149
+
150
+ def _is_tuple_type (param_type) -> bool:
151
+ origin = get_origin(param_type)
152
+ return origin is tuple
153
+
154
+ def _strip_optional (param_type):
155
+ if get_origin(param_type) is Union:
156
+ args = get_args(param_type)
157
+ non_none_args = [arg for arg in args if arg is not type(None)]
158
+ if len(non_none_args) == 1:
159
+ return non_none_args[0]
160
+ return param_type
161
+
162
+ def _numpy_to_fxn_dtype (dtype) -> Dtype | None:
163
+ dtype_mapping = {
164
+ np.int8: Dtype.int8,
165
+ np.int16: Dtype.int16,
166
+ np.int32: Dtype.int32,
167
+ np.int64: Dtype.int64,
168
+ np.uint8: Dtype.uint8,
169
+ np.uint16: Dtype.uint16,
170
+ np.uint32: Dtype.uint32,
171
+ np.uint64: Dtype.uint64,
172
+ np.float16: Dtype.float16,
173
+ np.float32: Dtype.float32,
174
+ np.float64: Dtype.float64,
175
+ np.bool_: Dtype.bool,
176
+ }
177
+ return dtype_mapping.get(dtype, None)
178
+
179
+ def _get_type_schema (param_type) -> dict[str, Any] | None:
180
+ try:
181
+ return TypeAdapter(param_type).json_schema(mode="serialization")
182
+ except Exception:
183
+ return None