fxn 0.0.41__py3-none-any.whl → 0.0.43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. fxn/__init__.py +4 -1
  2. fxn/beta/__init__.py +6 -0
  3. fxn/beta/client.py +16 -0
  4. fxn/beta/prediction.py +16 -0
  5. fxn/beta/remote.py +207 -0
  6. fxn/c/__init__.py +1 -1
  7. fxn/c/configuration.py +1 -1
  8. fxn/c/fxnc.py +1 -1
  9. fxn/c/map.py +1 -1
  10. fxn/c/prediction.py +2 -2
  11. fxn/c/predictor.py +2 -3
  12. fxn/c/stream.py +2 -3
  13. fxn/c/value.py +1 -1
  14. fxn/cli/__init__.py +10 -10
  15. fxn/cli/auth.py +1 -1
  16. fxn/cli/compile.py +141 -0
  17. fxn/cli/misc.py +1 -1
  18. fxn/cli/predictions.py +17 -14
  19. fxn/cli/predictors.py +31 -48
  20. fxn/client.py +85 -12
  21. fxn/compile.py +76 -0
  22. fxn/function.py +6 -2
  23. fxn/lib/__init__.py +1 -1
  24. fxn/lib/linux/arm64/libFunction.so +0 -0
  25. fxn/lib/linux/x86_64/libFunction.so +0 -0
  26. fxn/lib/macos/arm64/Function.dylib +0 -0
  27. fxn/lib/macos/x86_64/Function.dylib +0 -0
  28. fxn/lib/windows/arm64/Function.dll +0 -0
  29. fxn/lib/windows/x86_64/Function.dll +0 -0
  30. fxn/logging.py +137 -0
  31. fxn/sandbox.py +206 -0
  32. fxn/services/__init__.py +1 -1
  33. fxn/services/prediction.py +32 -32
  34. fxn/services/predictor.py +6 -3
  35. fxn/services/user.py +6 -3
  36. fxn/types/__init__.py +3 -3
  37. fxn/types/dtype.py +1 -1
  38. fxn/types/prediction.py +12 -2
  39. fxn/types/predictor.py +3 -14
  40. fxn/types/user.py +1 -1
  41. fxn/version.py +2 -2
  42. {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/METADATA +3 -3
  43. fxn-0.0.43.dist-info/RECORD +47 -0
  44. {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/WHEEL +1 -1
  45. fxn/cli/env.py +0 -40
  46. fxn-0.0.41.dist-info/RECORD +0 -40
  47. {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/LICENSE +0 -0
  48. {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/entry_points.txt +0 -0
  49. {fxn-0.0.41.dist-info → fxn-0.0.43.dist-info}/top_level.txt +0 -0
fxn/cli/predictors.py CHANGED
@@ -1,66 +1,49 @@
1
1
  #
2
2
  # Function
3
- # Copyright © 2024 NatML Inc. All Rights Reserved.
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
6
  from rich import print_json
7
- from rich.progress import Progress, SpinnerColumn, TextColumn
8
- from typer import Argument, Option
7
+ from typer import Argument
9
8
 
10
9
  from ..function import Function
11
- from ..types import PredictorStatus
10
+ from ..logging import CustomProgress, CustomProgressTask
12
11
  from .auth import get_access_key
13
12
 
14
13
  def retrieve_predictor (
15
14
  tag: str=Argument(..., help="Predictor tag.")
16
15
  ):
17
- fxn = Function(get_access_key())
18
- predictor = fxn.predictors.retrieve(tag)
19
- predictor = predictor.model_dump() if predictor else None
20
- print_json(data=predictor)
16
+ with CustomProgress(transient=True):
17
+ with CustomProgressTask(loading_text="Retrieving predictor..."):
18
+ fxn = Function(get_access_key())
19
+ predictor = fxn.predictors.retrieve(tag)
20
+ predictor = predictor.model_dump() if predictor else None
21
+ print_json(data=predictor)
21
22
 
22
- def list_predictors (
23
- owner: str=Option(None, help="Predictor owner. This defaults to the current user."),
24
- status: PredictorStatus=Option(PredictorStatus.Active, help="Predictor status. This defaults to `ACTIVE`."),
25
- offset: int=Option(None, help="Pagination offset."),
26
- count: int=Option(None, help="Pagination count.")
27
- ):
28
- fxn = Function(get_access_key())
29
- predictors = fxn.predictors.list(
30
- owner=owner,
31
- status=status,
32
- offset=offset,
33
- count=count
34
- )
35
- predictors = [predictor.model_dump() for predictor in predictors] if predictors is not None else None
36
- print_json(data=predictors)
37
-
38
- def search_predictors (
39
- query: str=Argument(..., help="Search query."),
40
- offset: int=Option(None, help="Pagination offset."),
41
- count: int=Option(None, help="Pagination count.")
42
- ):
43
- fxn = Function(get_access_key())
44
- predictors = fxn.predictors.search(query=query, offset=offset, count=count)
45
- predictors = [predictor.model_dump() for predictor in predictors]
46
- print_json(data=predictors)
47
-
48
- def delete_predictor (
23
+ def archive_predictor (
49
24
  tag: str=Argument(..., help="Predictor tag.")
50
25
  ):
51
- with Progress(
52
- SpinnerColumn(spinner_name="dots"),
53
- TextColumn("[progress.description]{task.description}"),
54
- transient=True
55
- ) as progress:
56
- progress.add_task(description="Deleting Function...", total=None)
57
- fxn = Function(get_access_key())
58
- result = fxn.predictors.delete(tag)
59
- print_json(data=result)
26
+ with CustomProgress():
27
+ with CustomProgressTask(
28
+ loading_text="Archiving predictor...",
29
+ done_text=f"Archived predictor: [bold dark_orange]{tag}[/bold dark_orange]"
30
+ ):
31
+ fxn = Function(get_access_key())
32
+ fxn.client.request(
33
+ method="POST",
34
+ path=f"/predictors/{tag}/archive"
35
+ )
60
36
 
61
- def archive_predictor (
37
+ def delete_predictor (
62
38
  tag: str=Argument(..., help="Predictor tag.")
63
39
  ):
64
- fxn = Function(get_access_key())
65
- predictor = fxn.predictors.archive(tag)
66
- print_json(data=predictor.model_dump())
40
+ with CustomProgress():
41
+ with CustomProgressTask(
42
+ loading_text="Deleting predictor...",
43
+ done_text=f"Deleted predictor: [bold red]{tag}[/bold red]"
44
+ ):
45
+ fxn = Function(get_access_key())
46
+ fxn.client.request(
47
+ method="DELETE",
48
+ path=f"/predictors/{tag}"
49
+ )
fxn/client.py CHANGED
@@ -1,10 +1,14 @@
1
1
  #
2
2
  # Function
3
- # Copyright © 2024 NatML Inc. All Rights Reserved.
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
+ from json import loads, JSONDecodeError
7
+ from pydantic import BaseModel, TypeAdapter
6
8
  from requests import request
7
- from typing import Any, Literal
9
+ from typing import AsyncGenerator, Literal, Type, TypeVar
10
+
11
+ T = TypeVar("T", bound=BaseModel)
8
12
 
9
13
  class FunctionClient:
10
14
 
@@ -15,25 +19,83 @@ class FunctionClient:
15
19
  def request (
16
20
  self,
17
21
  *,
18
- method: Literal["GET", "POST", "DELETE"],
22
+ method: Literal["GET", "POST", "PATCH", "DELETE"],
19
23
  path: str,
20
- body: dict[str, Any]=None
21
- ) -> dict[str, Any] | list[Any]:
24
+ body: dict[str, object]=None,
25
+ response_type: Type[T]=None
26
+ ) -> T:
27
+ """
28
+ Make a request to a REST endpoint.
29
+
30
+ Parameters:
31
+ method (str): Request method.
32
+ path (str): Endpoint path.
33
+ body (dict): Request JSON body.
34
+ response_type (Type): Response type.
35
+ """
22
36
  response = request(
23
37
  method=method,
24
38
  url=f"{self.api_url}{path}",
25
39
  json=body,
26
40
  headers={ "Authorization": f"Bearer {self.access_key}" }
27
41
  )
28
- data = None
42
+ data = response.text
29
43
  try:
30
44
  data = response.json()
31
- except Exception as ex:
32
- raise FunctionAPIError(str(ex), response.status_code)
33
- if not response.ok:
34
- error = data["errors"][0]["message"] if "errors" in data else str(ex)
45
+ except JSONDecodeError:
46
+ pass
47
+ if response.ok:
48
+ return response_type(**data) if response_type is not None else None
49
+ else:
50
+ error = _ErrorResponse(**data).errors[0].message if isinstance(data, dict) else data
35
51
  raise FunctionAPIError(error, response.status_code)
36
- return data
52
+
53
+ async def stream (
54
+ self,
55
+ *,
56
+ method: Literal["GET", "POST", "PATCH", "DELETE"],
57
+ path: str,
58
+ body: dict[str, object]=None,
59
+ response_type: Type[T]=None
60
+ ) -> AsyncGenerator[T, None]:
61
+ """
62
+ Make a request to a REST endpoint and consume the response as a server-sent events stream.
63
+
64
+ Parameters:
65
+ method (str): Request method.
66
+ path (str): Endpoint path.
67
+ body (dict): Request JSON body.
68
+ response_type (Type): Response type.
69
+ """
70
+ response = request(
71
+ method=method,
72
+ url=f"{self.api_url}{path}",
73
+ json=body,
74
+ headers={
75
+ "Accept": "text/event-stream",
76
+ "Authorization": f"Bearer {self.access_key}"
77
+ },
78
+ stream=True
79
+ )
80
+ event = None
81
+ data: str = ""
82
+ for line in response.iter_lines(decode_unicode=True):
83
+ if line is None:
84
+ break
85
+ line: str = line.strip()
86
+ if line:
87
+ if line.startswith("event:"):
88
+ event = line[len("event:"):].strip()
89
+ elif line.startswith("data:"):
90
+ line_data = line[len("data:"):].strip()
91
+ data = f"{data}\n{line_data}"
92
+ continue
93
+ if event is not None:
94
+ yield _parse_sse_event(event, data, response_type)
95
+ event = None
96
+ data = ""
97
+ if event or data:
98
+ yield _parse_sse_event(event, data, response_type)
37
99
 
38
100
  class FunctionAPIError (Exception):
39
101
 
@@ -43,4 +105,15 @@ class FunctionAPIError (Exception):
43
105
  self.status_code = status_code
44
106
 
45
107
  def __str__(self):
46
- return f"FunctionAPIError: {self.message} (Status Code: {self.status_code})"
108
+ return f"FunctionAPIError: {self.message} (Status Code: {self.status_code})"
109
+
110
+ class _APIError (BaseModel):
111
+ message: str
112
+
113
+ class _ErrorResponse (BaseModel):
114
+ errors: list[_APIError]
115
+
116
+ def _parse_sse_event (event: str, data: str, type: Type[T]=None) -> T:
117
+ result = { "event": event, "data": loads(data) }
118
+ result = TypeAdapter(type).validate_python(result) if type is not None else result
119
+ return result
fxn/compile.py ADDED
@@ -0,0 +1,76 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from collections.abc import Callable
7
+ from functools import wraps
8
+ from inspect import isasyncgenfunction, iscoroutinefunction
9
+ from pathlib import Path
10
+ from pydantic import BaseModel, Field
11
+
12
+ from .sandbox import Sandbox
13
+ from .types import AccessMode
14
+
15
+ class PredictorSpec (BaseModel):
16
+ """
17
+ Descriptor of a predictor to be compiled.
18
+ """
19
+ tag: str = Field(description="Predictor tag.")
20
+ description: str = Field(description="Predictor description. MUST be less than 100 characters long.", min_length=4, max_length=100)
21
+ sandbox: Sandbox = Field(description="Sandbox to compile the function.")
22
+ access: AccessMode = Field(description="Predictor access.")
23
+ card: str | None = Field(default=None, description="Predictor card (markdown).")
24
+ media: str | None = Field(default=None, description="Predictor media URL.")
25
+ license: str | None = Field(default=None, description="Predictor license URL. This is required for public predictors.")
26
+
27
+ def compile (
28
+ tag: str,
29
+ *,
30
+ description: str,
31
+ sandbox: Sandbox=None,
32
+ access: AccessMode=AccessMode.Private,
33
+ card: str | Path=None,
34
+ media: Path=None,
35
+ license: str=None,
36
+ ):
37
+ """
38
+ Create a predictor by compiling a stateless function.
39
+
40
+ Parameters:
41
+ tag (str): Predictor tag.
42
+ description (str): Predictor description. MUST be less than 100 characters long.
43
+ sandbox (Sandbox): Sandbox to compile the function.
44
+ access (AccessMode): Predictor access.
45
+ card (str | Path): Predictor card markdown string or path to card.
46
+ media (Path): Predictor thumbnail image (jpeg or png) path.
47
+ license (str): Predictor license URL. This is required for public predictors.
48
+ """
49
+ def decorator (func: Callable):
50
+ # Check type
51
+ if not callable(func):
52
+ raise TypeError("Cannot compile non-function objects")
53
+ if isasyncgenfunction(func) or iscoroutinefunction(func):
54
+ raise TypeError(f"Function '{func.__name__}' must be a regular function or generator")
55
+ # Gather metadata
56
+ if isinstance(card, Path):
57
+ with open(card_content, "r") as f:
58
+ card_content = f.read()
59
+ else:
60
+ card_content = card
61
+ spec = PredictorSpec(
62
+ tag=tag,
63
+ description=description,
64
+ sandbox=sandbox if sandbox is not None else Sandbox(),
65
+ access=access,
66
+ card=card_content,
67
+ media=None, # INCOMPLETE
68
+ license=license
69
+ )
70
+ # Wrap
71
+ @wraps(func)
72
+ def wrapper (*args, **kwargs):
73
+ return func(*args, **kwargs)
74
+ wrapper.__predictor_spec = spec
75
+ return wrapper
76
+ return decorator
fxn/function.py CHANGED
@@ -1,10 +1,11 @@
1
1
  #
2
2
  # Function
3
- # Copyright © 2024 NatML Inc. All Rights Reserved.
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
6
  from os import environ
7
7
 
8
+ from .beta.client import BetaClient
8
9
  from .client import FunctionClient
9
10
  from .services import PredictionService, PredictorService, UserService
10
11
 
@@ -17,6 +18,7 @@ class Function:
17
18
  users (UserService): Manage users.
18
19
  predictors (PredictorService): Manage predictors.
19
20
  predictions (PredictionService): Manage predictions.
21
+ beta (BetaClient): Beta client for incubating features.
20
22
 
21
23
  Constructor:
22
24
  access_key (str): Function access key.
@@ -26,6 +28,7 @@ class Function:
26
28
  users: UserService
27
29
  predictors: PredictorService
28
30
  predictions: PredictionService
31
+ beta: BetaClient
29
32
 
30
33
  def __init__ (self, access_key: str=None, api_url: str=None):
31
34
  access_key = access_key or environ.get("FXN_ACCESS_KEY", None)
@@ -33,4 +36,5 @@ class Function:
33
36
  self.client = FunctionClient(access_key, api_url)
34
37
  self.users = UserService(self.client)
35
38
  self.predictors = PredictorService(self.client)
36
- self.predictions = PredictionService(self.client)
39
+ self.predictions = PredictionService(self.client)
40
+ self.beta = BetaClient(self.client)
fxn/lib/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  #
2
2
  # Function
3
- # Copyright © 2024 NatML Inc. All Rights Reserved.
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
fxn/logging.py ADDED
@@ -0,0 +1,137 @@
1
+ #
2
+ # Function
3
+ # Copyright © 2025 NatML Inc. All Rights Reserved.
4
+ #
5
+
6
+ from contextvars import ContextVar
7
+ from rich.progress import BarColumn, Progress, ProgressColumn, SpinnerColumn, TextColumn
8
+ from typing import Literal
9
+
10
+ current_progress = ContextVar("current_progress", default=None)
11
+ progress_task_stack = ContextVar("progress_task_stack", default=[])
12
+
13
+ class CustomProgress(Progress):
14
+
15
+ def __init__ (
16
+ self,
17
+ *columns: ProgressColumn,
18
+ console=None,
19
+ auto_refresh=True,
20
+ refresh_per_second = 10,
21
+ speed_estimate_period=30,
22
+ transient=False,
23
+ redirect_stdout=True,
24
+ redirect_stderr=True,
25
+ get_time=None,
26
+ disable=False,
27
+ expand=False
28
+ ):
29
+ default_columns = list(columns) if len(columns) > 0 else [
30
+ SpinnerColumn(spinner_name="dots", finished_text="[bold green]✔[/bold green]"),
31
+ TextColumn("[progress.description]{task.description}"),
32
+ ]
33
+ super().__init__(
34
+ *default_columns,
35
+ console=console,
36
+ auto_refresh=auto_refresh,
37
+ refresh_per_second=refresh_per_second,
38
+ speed_estimate_period=speed_estimate_period,
39
+ transient=transient,
40
+ redirect_stdout=redirect_stdout,
41
+ redirect_stderr=redirect_stderr,
42
+ get_time=get_time,
43
+ disable=disable,
44
+ expand=expand
45
+ )
46
+ self.default_columns = default_columns
47
+
48
+ def __enter__ (self):
49
+ self._token = current_progress.set(self)
50
+ self._stack_token = progress_task_stack.set([])
51
+ return super().__enter__()
52
+
53
+ def __exit__ (self, exc_type, exc_val, exc_tb):
54
+ current_progress.reset(self._token)
55
+ progress_task_stack.reset(self._stack_token)
56
+ return super().__exit__(exc_type, exc_val, exc_tb)
57
+
58
+ def get_renderables (self):
59
+ for task in self.tasks:
60
+ task_columns = task.fields.get("columns") or list()
61
+ self.columns = self.default_columns + task_columns
62
+ yield self.make_tasks_table([task])
63
+
64
+ class CustomProgressTask:
65
+
66
+ def __init__ (
67
+ self,
68
+ *,
69
+ loading_text: str,
70
+ done_text: str=None,
71
+ columns: list[ProgressColumn]=None
72
+ ):
73
+ self.loading_text = loading_text
74
+ self.done_text = done_text if done_text is not None else loading_text
75
+ self.task_id = None
76
+ self.columns = columns
77
+
78
+ def __enter__ (self):
79
+ progress = current_progress.get()
80
+ indent_level = len(progress_task_stack.get())
81
+ indent = self.__get_indent(indent_level)
82
+ if progress is not None:
83
+ self.task_id = progress.add_task(
84
+ f"{indent}{self.loading_text}",
85
+ total=1,
86
+ columns=self.columns
87
+ )
88
+ current_stack = progress_task_stack.get()
89
+ progress_task_stack.set(current_stack + [self.task_id])
90
+ return self
91
+
92
+ def __exit__ (self, exc_type, exc_val, exc_tb):
93
+ progress = current_progress.get()
94
+ if progress is not None and self.task_id is not None:
95
+ indent_level = len(progress_task_stack.get()) - 1
96
+ indent = self.__get_indent(indent_level)
97
+ if exc_type is None:
98
+ total = progress._tasks[self.task_id].total
99
+ progress.update(
100
+ self.task_id,
101
+ description=f"{indent}{self.done_text}",
102
+ completed=total
103
+ )
104
+ else:
105
+ progress.update(
106
+ self.task_id,
107
+ description=f"{indent}[bright_red]✘ {self.loading_text}[/bright_red]",
108
+ )
109
+ current_stack = progress_task_stack.get()
110
+ if current_stack:
111
+ progress_task_stack.set(current_stack[:-1])
112
+ self.task_id = None
113
+ return False
114
+
115
+ def update (self, **kwargs):
116
+ progress = current_progress.get()
117
+ if progress is None or self.task_id is None:
118
+ return
119
+ if "description" in kwargs:
120
+ stack = progress_task_stack.get()
121
+ try:
122
+ index = stack.index(self.task_id)
123
+ except ValueError:
124
+ index = len(stack) - 1
125
+ indent = self.__get_indent(index)
126
+ description = kwargs["description"]
127
+ kwargs["description"] = f"{indent}{description}"
128
+ progress.update(self.task_id, **kwargs)
129
+
130
+ def finish (self, message: str):
131
+ self.done_text = message
132
+
133
+ def __get_indent (self, level: int) -> str:
134
+ if level == 0:
135
+ return ""
136
+ indicator = "└── "
137
+ return " " * len(indicator) * (level - 1) + indicator