fxn 0.0.46__tar.gz → 0.0.48__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. {fxn-0.0.46 → fxn-0.0.48}/PKG-INFO +1 -1
  2. {fxn-0.0.46 → fxn-0.0.48}/fxn/beta/metadata.py +14 -5
  3. fxn-0.0.48/fxn/lib/linux/arm64/libFunction.so +0 -0
  4. fxn-0.0.48/fxn/lib/linux/x86_64/libFunction.so +0 -0
  5. fxn-0.0.48/fxn/lib/macos/arm64/Function.dylib +0 -0
  6. fxn-0.0.48/fxn/lib/macos/x86_64/Function.dylib +0 -0
  7. fxn-0.0.48/fxn/lib/windows/arm64/Function.dll +0 -0
  8. fxn-0.0.48/fxn/lib/windows/x86_64/Function.dll +0 -0
  9. {fxn-0.0.46 → fxn-0.0.48}/fxn/sandbox.py +14 -4
  10. {fxn-0.0.46 → fxn-0.0.48}/fxn/version.py +1 -1
  11. {fxn-0.0.46 → fxn-0.0.48}/fxn.egg-info/PKG-INFO +1 -1
  12. fxn-0.0.46/fxn/lib/linux/arm64/libFunction.so +0 -0
  13. fxn-0.0.46/fxn/lib/linux/x86_64/libFunction.so +0 -0
  14. fxn-0.0.46/fxn/lib/macos/arm64/Function.dylib +0 -0
  15. fxn-0.0.46/fxn/lib/macos/x86_64/Function.dylib +0 -0
  16. fxn-0.0.46/fxn/lib/windows/arm64/Function.dll +0 -0
  17. fxn-0.0.46/fxn/lib/windows/x86_64/Function.dll +0 -0
  18. {fxn-0.0.46 → fxn-0.0.48}/LICENSE +0 -0
  19. {fxn-0.0.46 → fxn-0.0.48}/README.md +0 -0
  20. {fxn-0.0.46 → fxn-0.0.48}/fxn/__init__.py +0 -0
  21. {fxn-0.0.46 → fxn-0.0.48}/fxn/beta/__init__.py +0 -0
  22. {fxn-0.0.46 → fxn-0.0.48}/fxn/beta/client.py +0 -0
  23. {fxn-0.0.46 → fxn-0.0.48}/fxn/beta/prediction.py +0 -0
  24. {fxn-0.0.46 → fxn-0.0.48}/fxn/beta/remote.py +0 -0
  25. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/__init__.py +0 -0
  26. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/configuration.py +0 -0
  27. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/fxnc.py +0 -0
  28. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/map.py +0 -0
  29. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/prediction.py +0 -0
  30. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/predictor.py +0 -0
  31. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/stream.py +0 -0
  32. {fxn-0.0.46 → fxn-0.0.48}/fxn/c/value.py +0 -0
  33. {fxn-0.0.46 → fxn-0.0.48}/fxn/cli/__init__.py +0 -0
  34. {fxn-0.0.46 → fxn-0.0.48}/fxn/cli/auth.py +0 -0
  35. {fxn-0.0.46 → fxn-0.0.48}/fxn/cli/compile.py +0 -0
  36. {fxn-0.0.46 → fxn-0.0.48}/fxn/cli/misc.py +0 -0
  37. {fxn-0.0.46 → fxn-0.0.48}/fxn/cli/predictions.py +0 -0
  38. {fxn-0.0.46 → fxn-0.0.48}/fxn/cli/predictors.py +0 -0
  39. {fxn-0.0.46 → fxn-0.0.48}/fxn/cli/sources.py +0 -0
  40. {fxn-0.0.46 → fxn-0.0.48}/fxn/client.py +0 -0
  41. {fxn-0.0.46 → fxn-0.0.48}/fxn/compile.py +0 -0
  42. {fxn-0.0.46 → fxn-0.0.48}/fxn/function.py +0 -0
  43. {fxn-0.0.46 → fxn-0.0.48}/fxn/lib/__init__.py +0 -0
  44. {fxn-0.0.46 → fxn-0.0.48}/fxn/logging.py +0 -0
  45. {fxn-0.0.46 → fxn-0.0.48}/fxn/services/__init__.py +0 -0
  46. {fxn-0.0.46 → fxn-0.0.48}/fxn/services/prediction.py +0 -0
  47. {fxn-0.0.46 → fxn-0.0.48}/fxn/services/predictor.py +0 -0
  48. {fxn-0.0.46 → fxn-0.0.48}/fxn/services/user.py +0 -0
  49. {fxn-0.0.46 → fxn-0.0.48}/fxn/types/__init__.py +0 -0
  50. {fxn-0.0.46 → fxn-0.0.48}/fxn/types/dtype.py +0 -0
  51. {fxn-0.0.46 → fxn-0.0.48}/fxn/types/prediction.py +0 -0
  52. {fxn-0.0.46 → fxn-0.0.48}/fxn/types/predictor.py +0 -0
  53. {fxn-0.0.46 → fxn-0.0.48}/fxn/types/user.py +0 -0
  54. {fxn-0.0.46 → fxn-0.0.48}/fxn.egg-info/SOURCES.txt +0 -0
  55. {fxn-0.0.46 → fxn-0.0.48}/fxn.egg-info/dependency_links.txt +0 -0
  56. {fxn-0.0.46 → fxn-0.0.48}/fxn.egg-info/entry_points.txt +0 -0
  57. {fxn-0.0.46 → fxn-0.0.48}/fxn.egg-info/requires.txt +0 -0
  58. {fxn-0.0.46 → fxn-0.0.48}/fxn.egg-info/top_level.txt +0 -0
  59. {fxn-0.0.46 → fxn-0.0.48}/pyproject.toml +0 -0
  60. {fxn-0.0.46 → fxn-0.0.48}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fxn
3
- Version: 0.0.46
3
+ Version: 0.0.48
4
4
  Summary: Run prediction functions locally in Python. Register at https://fxn.ai.
5
5
  Author-email: "NatML Inc." <hi@fxn.ai>
6
6
  License: Apache License
@@ -27,7 +27,7 @@ def _validate_ort_inference_session (session: "onnxruntime.InferenceSession") ->
27
27
 
28
28
  class CoreMLInferenceMetadata (BaseModel):
29
29
  """
30
- Metadata required to lower PyTorch models for inference on iOS, macOS, and visionOS with CoreML.
30
+ Metadata required to lower a PyTorch model for inference on iOS, macOS, and visionOS with CoreML.
31
31
  """
32
32
  kind: Literal["meta.inference.coreml"] = "meta.inference.coreml"
33
33
  model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field(description="PyTorch module to apply metadata to.")
@@ -36,7 +36,7 @@ class CoreMLInferenceMetadata (BaseModel):
36
36
 
37
37
  class ONNXInferenceMetadata (BaseModel):
38
38
  """
39
- Metadata required to lower PyTorch models for inference.
39
+ Metadata required to lower a PyTorch model for inference.
40
40
  """
41
41
  kind: Literal["meta.inference.onnx"] = "meta.inference.onnx"
42
42
  model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field(description="PyTorch module to apply metadata to.")
@@ -45,16 +45,25 @@ class ONNXInferenceMetadata (BaseModel):
45
45
 
46
46
  class ONNXRuntimeInferenceSessionMetadata (BaseModel):
47
47
  """
48
- Metadata required to lower ONNXRuntime inference sessions for inference.
48
+ Metadata required to lower an ONNXRuntime `InferenceSession` for inference.
49
49
  """
50
50
  kind: Literal["meta.inference.onnxruntime"] = "meta.inference.onnxruntime"
51
51
  session: Annotated[object, BeforeValidator(_validate_ort_inference_session)] = Field(description="ONNXRuntime inference session to apply metadata to.")
52
52
  model_path: Path = Field(description="ONNX model path. The model must exist at this path in the compiler sandbox.")
53
53
  model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
54
54
 
55
- class GGUFInferenceMetadata (BaseModel): # INCOMPLETE
55
+ class LiteRTInferenceMetadata (BaseModel):
56
56
  """
57
- Metadata required to lower GGUF models for LLM inference.
57
+ Metadata required to lower PyTorch model for inference with LiteRT (fka TensorFlow Lite).
58
+ """
59
+ kind: Literal["meta.inference.litert"] = "meta.inference.litert"
60
+ model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field(description="PyTorch module to apply metadata to.")
61
+ model_args: list[object] = Field(description="Positional inputs to the model.")
62
+ model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
63
+
64
+ class LlamaCppInferenceMetadata (BaseModel): # INCOMPLETE
65
+ """
66
+ Metadata required to lower a GGUF model for LLM inference.
58
67
  """
59
68
  kind: Literal["meta.inference.gguf"] = "meta.inference.gguf"
60
69
  model_path: Path = Field(description="GGUF model path. The model must exist at this path in the compiler sandbox.")
@@ -43,7 +43,8 @@ class UploadDirectoryCommand (UploadableCommand):
43
43
 
44
44
  def get_files (self) -> list[Path]:
45
45
  from_path = Path(self.from_path)
46
- assert from_path.is_absolute(), "Cannot upload directory because directory path must be absolute"
46
+ if not from_path.is_absolute():
47
+ raise ValueError("Cannot upload directory because directory path must be absolute")
47
48
  return [file for file in from_path.rglob("*") if file.is_file()]
48
49
 
49
50
  class EntrypointCommand (UploadableCommand):
@@ -106,7 +107,11 @@ class Sandbox (BaseModel):
106
107
  from_path (str | Path): File path on the local file system.
107
108
  to_path (str | Path): Remote path to upload file to.
108
109
  """
109
- command = UploadFileCommand(from_path=str(from_path), to_path=str(to_path))
110
+ from_path = from_path if isinstance(from_path, Path) else Path(from_path)
111
+ command = UploadFileCommand(
112
+ from_path=str(from_path.resolve()),
113
+ to_path=str(to_path)
114
+ )
110
115
  return Sandbox(commands=self.commands + [command])
111
116
 
112
117
  def upload_directory (
@@ -121,7 +126,11 @@ class Sandbox (BaseModel):
121
126
  from_path (str | Path): Directory path on the local file system.
122
127
  to_path (str | Path): Remote path to upload directory to.
123
128
  """
124
- command = UploadDirectoryCommand(from_path=str(from_path), to_path=str(to_path))
129
+ from_path = from_path if isinstance(from_path, Path) else Path(from_path)
130
+ command = UploadDirectoryCommand(
131
+ from_path=str(from_path.resolve()),
132
+ to_path=str(to_path)
133
+ )
125
134
  return Sandbox(commands=self.commands + [command])
126
135
 
127
136
  def pip_install (self, *packages: str) -> Sandbox:
@@ -180,7 +189,8 @@ class Sandbox (BaseModel):
180
189
  return self
181
190
 
182
191
  def __upload_file (self, path: Path, fxn: Function) -> str:
183
- assert path.is_file(), "Cannot upload file at path {path} because it is not a file"
192
+ if not path.is_file():
193
+ raise ValueError(f"Cannot upload file at path {path} because it is not a file")
184
194
  hash = self.__compute_hash(path)
185
195
  try:
186
196
  fxn.client.request(method="HEAD", path=f"/resources/{hash}")
@@ -3,4 +3,4 @@
3
3
  # Copyright © 2025 NatML Inc. All Rights Reserved.
4
4
  #
5
5
 
6
- __version__ = "0.0.46"
6
+ __version__ = "0.0.48"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fxn
3
- Version: 0.0.46
3
+ Version: 0.0.48
4
4
  Summary: Run prediction functions locally in Python. Register at https://fxn.ai.
5
5
  Author-email: "NatML Inc." <hi@fxn.ai>
6
6
  License: Apache License
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes