fxn 0.0.51__tar.gz → 0.0.52__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. {fxn-0.0.51 → fxn-0.0.52}/PKG-INFO +1 -1
  2. {fxn-0.0.51 → fxn-0.0.52}/fxn/beta/__init__.py +2 -1
  3. {fxn-0.0.51 → fxn-0.0.52}/fxn/beta/metadata.py +19 -0
  4. {fxn-0.0.51 → fxn-0.0.52}/fxn/compile.py +20 -6
  5. {fxn-0.0.51 → fxn-0.0.52}/fxn/version.py +1 -1
  6. {fxn-0.0.51 → fxn-0.0.52}/fxn.egg-info/PKG-INFO +1 -1
  7. {fxn-0.0.51 → fxn-0.0.52}/LICENSE +0 -0
  8. {fxn-0.0.51 → fxn-0.0.52}/README.md +0 -0
  9. {fxn-0.0.51 → fxn-0.0.52}/fxn/__init__.py +0 -0
  10. {fxn-0.0.51 → fxn-0.0.52}/fxn/beta/client.py +0 -0
  11. {fxn-0.0.51 → fxn-0.0.52}/fxn/beta/prediction.py +0 -0
  12. {fxn-0.0.51 → fxn-0.0.52}/fxn/beta/remote.py +0 -0
  13. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/__init__.py +0 -0
  14. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/configuration.py +0 -0
  15. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/fxnc.py +0 -0
  16. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/map.py +0 -0
  17. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/prediction.py +0 -0
  18. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/predictor.py +0 -0
  19. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/stream.py +0 -0
  20. {fxn-0.0.51 → fxn-0.0.52}/fxn/c/value.py +0 -0
  21. {fxn-0.0.51 → fxn-0.0.52}/fxn/cli/__init__.py +0 -0
  22. {fxn-0.0.51 → fxn-0.0.52}/fxn/cli/auth.py +0 -0
  23. {fxn-0.0.51 → fxn-0.0.52}/fxn/cli/compile.py +0 -0
  24. {fxn-0.0.51 → fxn-0.0.52}/fxn/cli/misc.py +0 -0
  25. {fxn-0.0.51 → fxn-0.0.52}/fxn/cli/predictions.py +0 -0
  26. {fxn-0.0.51 → fxn-0.0.52}/fxn/cli/predictors.py +0 -0
  27. {fxn-0.0.51 → fxn-0.0.52}/fxn/cli/sources.py +0 -0
  28. {fxn-0.0.51 → fxn-0.0.52}/fxn/client.py +0 -0
  29. {fxn-0.0.51 → fxn-0.0.52}/fxn/function.py +0 -0
  30. {fxn-0.0.51 → fxn-0.0.52}/fxn/lib/__init__.py +0 -0
  31. {fxn-0.0.51 → fxn-0.0.52}/fxn/lib/linux/arm64/libFunction.so +0 -0
  32. {fxn-0.0.51 → fxn-0.0.52}/fxn/lib/linux/x86_64/libFunction.so +0 -0
  33. {fxn-0.0.51 → fxn-0.0.52}/fxn/lib/macos/arm64/Function.dylib +0 -0
  34. {fxn-0.0.51 → fxn-0.0.52}/fxn/lib/macos/x86_64/Function.dylib +0 -0
  35. {fxn-0.0.51 → fxn-0.0.52}/fxn/lib/windows/arm64/Function.dll +0 -0
  36. {fxn-0.0.51 → fxn-0.0.52}/fxn/lib/windows/x86_64/Function.dll +0 -0
  37. {fxn-0.0.51 → fxn-0.0.52}/fxn/logging.py +0 -0
  38. {fxn-0.0.51 → fxn-0.0.52}/fxn/sandbox.py +0 -0
  39. {fxn-0.0.51 → fxn-0.0.52}/fxn/services/__init__.py +0 -0
  40. {fxn-0.0.51 → fxn-0.0.52}/fxn/services/prediction.py +0 -0
  41. {fxn-0.0.51 → fxn-0.0.52}/fxn/services/predictor.py +0 -0
  42. {fxn-0.0.51 → fxn-0.0.52}/fxn/services/user.py +0 -0
  43. {fxn-0.0.51 → fxn-0.0.52}/fxn/types/__init__.py +0 -0
  44. {fxn-0.0.51 → fxn-0.0.52}/fxn/types/dtype.py +0 -0
  45. {fxn-0.0.51 → fxn-0.0.52}/fxn/types/prediction.py +0 -0
  46. {fxn-0.0.51 → fxn-0.0.52}/fxn/types/predictor.py +0 -0
  47. {fxn-0.0.51 → fxn-0.0.52}/fxn/types/user.py +0 -0
  48. {fxn-0.0.51 → fxn-0.0.52}/fxn.egg-info/SOURCES.txt +0 -0
  49. {fxn-0.0.51 → fxn-0.0.52}/fxn.egg-info/dependency_links.txt +0 -0
  50. {fxn-0.0.51 → fxn-0.0.52}/fxn.egg-info/entry_points.txt +0 -0
  51. {fxn-0.0.51 → fxn-0.0.52}/fxn.egg-info/requires.txt +0 -0
  52. {fxn-0.0.51 → fxn-0.0.52}/fxn.egg-info/top_level.txt +0 -0
  53. {fxn-0.0.51 → fxn-0.0.52}/pyproject.toml +0 -0
  54. {fxn-0.0.51 → fxn-0.0.52}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fxn
3
- Version: 0.0.51
3
+ Version: 0.0.52
4
4
  Summary: Run prediction functions locally in Python. Register at https://fxn.ai.
5
5
  Author-email: "NatML Inc." <hi@fxn.ai>
6
6
  License: Apache License
@@ -5,6 +5,7 @@
5
5
 
6
6
  from .metadata import (
7
7
  CoreMLInferenceMetadata, LiteRTInferenceMetadata, LlamaCppInferenceMetadata,
8
- ONNXInferenceMetadata, ONNXRuntimeInferenceSessionMetadata
8
+ ONNXInferenceMetadata, ONNXRuntimeInferenceSessionMetadata, OpenVINOInferenceMetadata,
9
+ QnnInferenceMetadata
9
10
  )
10
11
  from .remote import RemoteAcceleration
@@ -61,6 +61,25 @@ class LiteRTInferenceMetadata (BaseModel):
61
61
  model_args: list[object] = Field(description="Positional inputs to the model.", exclude=True)
62
62
  model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
63
63
 
64
+ class OpenVINOInferenceMetadata (BaseModel):
65
+ """
66
+ Metadata required to lower PyTorch model for interence with Intel OpenVINO.
67
+ """
68
+ kind: Literal["meta.inference.openvino"] = "meta.inference.openvino"
69
+ model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field(description="PyTorch module to apply metadata to.", exclude=True)
70
+ model_args: list[object] = Field(description="Positional inputs to the model.", exclude=True)
71
+ model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
72
+
73
+ class QnnInferenceMetadata (BaseModel):
74
+ """
75
+ Metadata required to lower a PyTorch model for inference on Qualcomm accelerators with QNN SDK.
76
+ """
77
+ kind: Literal["meta.inference.qnn"] = "meta.inference.qnn"
78
+ model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field(description="PyTorch module to apply metadata to.", exclude=True)
79
+ model_args: list[object] = Field(description="Positional inputs to the model.", exclude=True)
80
+ backend: Literal["cpu", "gpu"] = Field(default="cpu", description="QNN backend to execute the model.", exclude=True) # CHECK # Add `htp`
81
+ model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
82
+
64
83
  class LlamaCppInferenceMetadata (BaseModel): # INCOMPLETE
65
84
  """
66
85
  Metadata required to lower a GGUF model for LLM inference.
@@ -9,25 +9,39 @@ from inspect import isasyncgenfunction, iscoroutinefunction
9
9
  from pathlib import Path
10
10
  from pydantic import BaseModel, ConfigDict, Field
11
11
  from types import ModuleType
12
- from typing import Literal
12
+ from typing import Any, Callable, Literal, ParamSpec, TypeVar, cast
13
13
 
14
14
  from .beta import (
15
15
  CoreMLInferenceMetadata, LiteRTInferenceMetadata, LlamaCppInferenceMetadata,
16
- ONNXInferenceMetadata, ONNXRuntimeInferenceSessionMetadata
16
+ ONNXInferenceMetadata, ONNXRuntimeInferenceSessionMetadata, OpenVINOInferenceMetadata,
17
+ QnnInferenceMetadata
17
18
  )
18
19
  from .sandbox import Sandbox
19
20
  from .types import AccessMode
20
21
 
21
- CompileTarget = Literal["android", "ios", "linux", "macos", "visionos", "wasm", "windows"]
22
+ CompileTarget = Literal[
23
+ "android",
24
+ "ios",
25
+ "linux",
26
+ "macos",
27
+ "visionos",
28
+ "wasm",
29
+ "windows"
30
+ ]
22
31
 
23
32
  CompileMetadata = (
24
33
  CoreMLInferenceMetadata |
25
34
  LiteRTInferenceMetadata |
26
35
  LlamaCppInferenceMetadata |
27
36
  ONNXInferenceMetadata |
28
- ONNXRuntimeInferenceSessionMetadata
37
+ ONNXRuntimeInferenceSessionMetadata |
38
+ OpenVINOInferenceMetadata |
39
+ QnnInferenceMetadata
29
40
  )
30
41
 
42
+ P = ParamSpec("P")
43
+ R = TypeVar("R")
44
+
31
45
  class PredictorSpec (BaseModel):
32
46
  """
33
47
  Descriptor of a predictor to be compiled.
@@ -56,7 +70,7 @@ def compile (
56
70
  media: Path=None,
57
71
  license: str=None,
58
72
  **kwargs
59
- ):
73
+ ) -> Callable[[Callable[P, R]], Callable[P, R]]:
60
74
  """
61
75
  Create a predictor by compiling a stateless function.
62
76
 
@@ -97,5 +111,5 @@ def compile (
97
111
  def wrapper (*args, **kwargs):
98
112
  return func(*args, **kwargs)
99
113
  wrapper.__predictor_spec = spec
100
- return wrapper
114
+ return cast(Callable[P, R], wrapper)
101
115
  return decorator
@@ -3,4 +3,4 @@
3
3
  # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
- __version__ = "0.0.51"
6
+ __version__ = "0.0.52"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fxn
3
- Version: 0.0.51
3
+ Version: 0.0.52
4
4
  Summary: Run prediction functions locally in Python. Register at https://fxn.ai.
5
5
  Author-email: "NatML Inc." <hi@fxn.ai>
6
6
  License: Apache License
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes