opengradient 0.4.7__tar.gz → 0.4.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {opengradient-0.4.7/src/opengradient.egg-info → opengradient-0.4.8}/PKG-INFO +1 -1
- {opengradient-0.4.7 → opengradient-0.4.8}/pyproject.toml +1 -1
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/__init__.py +7 -5
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/client.py +36 -57
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/types.py +12 -0
- {opengradient-0.4.7 → opengradient-0.4.8/src/opengradient.egg-info}/PKG-INFO +1 -1
- {opengradient-0.4.7 → opengradient-0.4.8}/LICENSE +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/README.md +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/setup.cfg +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/abi/PriceHistoryInference.abi +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/abi/WorkflowScheduler.abi +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/abi/inference.abi +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/account.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/alphasense/__init__.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/alphasense/read_workflow_tool.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/alphasense/run_model_tool.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/alphasense/types.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/bin/PriceHistoryInference.bin +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/cli.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/defaults.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/exceptions.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/llm/__init__.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/llm/og_langchain.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/llm/og_openai.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/proto/__init__.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/proto/infer.proto +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/proto/infer_pb2.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/proto/infer_pb2_grpc.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient/utils.py +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient.egg-info/SOURCES.txt +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient.egg-info/dependency_links.txt +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient.egg-info/entry_points.txt +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient.egg-info/requires.txt +0 -0
- {opengradient-0.4.7 → opengradient-0.4.8}/src/opengradient.egg-info/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "opengradient"
|
|
7
|
-
version = "0.4.
|
|
7
|
+
version = "0.4.8"
|
|
8
8
|
description = "Python SDK for OpenGradient decentralized model management & inference services"
|
|
9
9
|
authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
|
|
10
10
|
license = {file = "LICENSE"}
|
|
@@ -17,6 +17,8 @@ from .types import (
|
|
|
17
17
|
LlmInferenceMode,
|
|
18
18
|
TextGenerationOutput,
|
|
19
19
|
ModelOutput,
|
|
20
|
+
ModelRepository,
|
|
21
|
+
FileUploadResult,
|
|
20
22
|
)
|
|
21
23
|
|
|
22
24
|
from . import llm, alphasense
|
|
@@ -61,7 +63,7 @@ def init(email: str, password: str, private_key: str, rpc_url=DEFAULT_RPC_URL, c
|
|
|
61
63
|
return _client
|
|
62
64
|
|
|
63
65
|
|
|
64
|
-
def upload(model_path, model_name, version):
|
|
66
|
+
def upload(model_path, model_name, version) -> FileUploadResult:
|
|
65
67
|
"""Upload a model file to OpenGradient.
|
|
66
68
|
|
|
67
69
|
Args:
|
|
@@ -70,7 +72,7 @@ def upload(model_path, model_name, version):
|
|
|
70
72
|
version: Version string for this model upload
|
|
71
73
|
|
|
72
74
|
Returns:
|
|
73
|
-
|
|
75
|
+
FileUploadResult: Upload response containing file metadata
|
|
74
76
|
|
|
75
77
|
Raises:
|
|
76
78
|
RuntimeError: If SDK is not initialized
|
|
@@ -80,7 +82,7 @@ def upload(model_path, model_name, version):
|
|
|
80
82
|
return _client.upload(model_path, model_name, version)
|
|
81
83
|
|
|
82
84
|
|
|
83
|
-
def create_model(model_name: str, model_desc: str, model_path: Optional[str] = None):
|
|
85
|
+
def create_model(model_name: str, model_desc: str, model_path: Optional[str] = None) -> ModelRepository:
|
|
84
86
|
"""Create a new model repository.
|
|
85
87
|
|
|
86
88
|
Args:
|
|
@@ -89,7 +91,7 @@ def create_model(model_name: str, model_desc: str, model_path: Optional[str] = N
|
|
|
89
91
|
model_path: Optional path to model file to upload immediately
|
|
90
92
|
|
|
91
93
|
Returns:
|
|
92
|
-
|
|
94
|
+
ModelRepository: Creation response with model metadata and optional upload results
|
|
93
95
|
|
|
94
96
|
Raises:
|
|
95
97
|
RuntimeError: If SDK is not initialized
|
|
@@ -319,7 +321,7 @@ def run_workflow(contract_address: str) -> ModelOutput:
|
|
|
319
321
|
return _client.run_workflow(contract_address)
|
|
320
322
|
|
|
321
323
|
|
|
322
|
-
def read_workflow_history(contract_address: str, num_results: int) -> List[
|
|
324
|
+
def read_workflow_history(contract_address: str, num_results: int) -> List[ModelOutput]:
|
|
323
325
|
"""
|
|
324
326
|
Gets historical inference results from a workflow contract.
|
|
325
327
|
|
|
@@ -13,7 +13,6 @@ from web3 import Web3
|
|
|
13
13
|
from web3.exceptions import ContractLogicError
|
|
14
14
|
from web3.logs import DISCARD
|
|
15
15
|
|
|
16
|
-
from . import utils
|
|
17
16
|
from .exceptions import OpenGradientError
|
|
18
17
|
from .proto import infer_pb2, infer_pb2_grpc
|
|
19
18
|
from .types import (
|
|
@@ -26,8 +25,11 @@ from .types import (
|
|
|
26
25
|
TextGenerationOutput,
|
|
27
26
|
SchedulerParams,
|
|
28
27
|
InferenceResult,
|
|
28
|
+
ModelRepository,
|
|
29
|
+
FileUploadResult,
|
|
29
30
|
)
|
|
30
31
|
from .defaults import DEFAULT_IMAGE_GEN_HOST, DEFAULT_IMAGE_GEN_PORT, DEFAULT_SCHEDULER_ADDRESS
|
|
32
|
+
from .utils import convert_array_to_model_output, convert_to_model_input, convert_to_model_output
|
|
31
33
|
|
|
32
34
|
_FIREBASE_CONFIG = {
|
|
33
35
|
"apiKey": "AIzaSyDUVckVtfl-hiteBzPopy1pDD8Uvfncs7w",
|
|
@@ -53,7 +55,7 @@ class Client:
|
|
|
53
55
|
_blockchain: Web3
|
|
54
56
|
_wallet_account: LocalAccount
|
|
55
57
|
|
|
56
|
-
_hub_user: Dict
|
|
58
|
+
_hub_user: Optional[Dict]
|
|
57
59
|
_inference_abi: Dict
|
|
58
60
|
|
|
59
61
|
def __init__(self, private_key: str, rpc_url: str, contract_address: str, email: Optional[str], password: Optional[str]):
|
|
@@ -88,7 +90,7 @@ class Client:
|
|
|
88
90
|
logging.error(f"Authentication failed: {str(e)}")
|
|
89
91
|
raise
|
|
90
92
|
|
|
91
|
-
def create_model(self, model_name: str, model_desc: str, version: str = "1.00") ->
|
|
93
|
+
def create_model(self, model_name: str, model_desc: str, version: str = "1.00") -> ModelRepository:
|
|
92
94
|
"""
|
|
93
95
|
Create a new model with the given model_name and model_desc, and a specified version.
|
|
94
96
|
|
|
@@ -111,39 +113,22 @@ class Client:
|
|
|
111
113
|
payload = {"name": model_name, "description": model_desc}
|
|
112
114
|
|
|
113
115
|
try:
|
|
114
|
-
logging.debug(f"Create Model URL: {url}")
|
|
115
|
-
logging.debug(f"Headers: {headers}")
|
|
116
|
-
logging.debug(f"Payload: {payload}")
|
|
117
|
-
|
|
118
116
|
response = requests.post(url, json=payload, headers=headers)
|
|
119
117
|
response.raise_for_status()
|
|
118
|
+
except requests.HTTPError as e:
|
|
119
|
+
error_details = f"HTTP {e.response.status_code}: {e.response.text}"
|
|
120
|
+
raise OpenGradientError(f"Model creation failed: {error_details}") from e
|
|
120
121
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
logging.info(f"Model creation successful. Model name: {model_name}")
|
|
122
|
+
json_response = response.json()
|
|
123
|
+
model_name = json_response.get("name")
|
|
124
|
+
if not model_name:
|
|
125
|
+
raise Exception(f"Model creation response missing 'name'. Full response: {json_response}")
|
|
126
126
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
version_response = self.create_version(model_name, version)
|
|
130
|
-
logging.info(f"Version creation successful. Version string: {version_response['versionString']}")
|
|
131
|
-
except Exception as ve:
|
|
132
|
-
logging.error(f"Version creation failed, but model was created. Error: {str(ve)}")
|
|
133
|
-
return {"name": model_name, "versionString": None, "version_error": str(ve)}
|
|
127
|
+
# Create the specified version for the newly created model
|
|
128
|
+
version_response = self.create_version(model_name, version)
|
|
134
129
|
|
|
135
|
-
|
|
130
|
+
return ModelRepository(model_name, version_response["versionString"])
|
|
136
131
|
|
|
137
|
-
except requests.RequestException as e:
|
|
138
|
-
logging.error(f"Model creation failed: {str(e)}")
|
|
139
|
-
if hasattr(e, "response") and e.response is not None:
|
|
140
|
-
logging.error(f"Response status code: {e.response.status_code}")
|
|
141
|
-
logging.error(f"Response headers: {e.response.headers}")
|
|
142
|
-
logging.error(f"Response content: {e.response.text}")
|
|
143
|
-
raise Exception(f"Model creation failed: {str(e)}")
|
|
144
|
-
except Exception as e:
|
|
145
|
-
logging.error(f"Unexpected error during model creation: {str(e)}")
|
|
146
|
-
raise
|
|
147
132
|
|
|
148
133
|
def create_version(self, model_name: str, notes: str = "", is_major: bool = False) -> dict:
|
|
149
134
|
"""
|
|
@@ -204,7 +189,7 @@ class Client:
|
|
|
204
189
|
logging.error(f"Unexpected error during version creation: {str(e)}")
|
|
205
190
|
raise
|
|
206
191
|
|
|
207
|
-
def upload(self, model_path: str, model_name: str, version: str) ->
|
|
192
|
+
def upload(self, model_path: str, model_name: str, version: str) -> FileUploadResult:
|
|
208
193
|
"""
|
|
209
194
|
Upload a model file to the server.
|
|
210
195
|
|
|
@@ -259,12 +244,9 @@ class Client:
|
|
|
259
244
|
if response.status_code == 201:
|
|
260
245
|
if response.content and response.content != b"null":
|
|
261
246
|
json_response = response.json()
|
|
262
|
-
|
|
263
|
-
logging.info(f"Upload successful. CID: {json_response.get('ipfsCid', 'N/A')}")
|
|
264
|
-
result = {"model_cid": json_response.get("ipfsCid"), "size": json_response.get("size")}
|
|
247
|
+
return FileUploadResult(json_response.get("ipfsCid"), json_response.get("size"))
|
|
265
248
|
else:
|
|
266
|
-
|
|
267
|
-
result = {"model_cid": None, "size": None}
|
|
249
|
+
raise RuntimeError("Empty or null response content received. Assuming upload was successful.")
|
|
268
250
|
elif response.status_code == 500:
|
|
269
251
|
error_message = "Internal server error occurred. Please try again later or contact support."
|
|
270
252
|
logging.error(error_message)
|
|
@@ -274,8 +256,6 @@ class Client:
|
|
|
274
256
|
logging.error(f"Upload failed with status code {response.status_code}: {error_message}")
|
|
275
257
|
raise OpenGradientError(f"Upload failed: {error_message}", status_code=response.status_code)
|
|
276
258
|
|
|
277
|
-
return result
|
|
278
|
-
|
|
279
259
|
except requests.RequestException as e:
|
|
280
260
|
logging.error(f"Request exception during upload: {str(e)}")
|
|
281
261
|
if hasattr(e, "response") and e.response is not None:
|
|
@@ -313,7 +293,7 @@ class Client:
|
|
|
313
293
|
contract = self._blockchain.eth.contract(address=self._inference_hub_contract_address, abi=self._inference_abi)
|
|
314
294
|
|
|
315
295
|
inference_mode_uint8 = inference_mode.value
|
|
316
|
-
converted_model_input =
|
|
296
|
+
converted_model_input = convert_to_model_input(model_input)
|
|
317
297
|
|
|
318
298
|
run_function = contract.functions.run(model_cid, inference_mode_uint8, converted_model_input)
|
|
319
299
|
|
|
@@ -342,7 +322,7 @@ class Client:
|
|
|
342
322
|
raise OpenGradientError("InferenceResult event not found in transaction logs")
|
|
343
323
|
|
|
344
324
|
# TODO: This should return a ModelOutput class object
|
|
345
|
-
model_output =
|
|
325
|
+
model_output = convert_to_model_output(parsed_logs[0]["args"])
|
|
346
326
|
|
|
347
327
|
return InferenceResult(tx_hash.hex(), model_output)
|
|
348
328
|
|
|
@@ -751,7 +731,7 @@ class Client:
|
|
|
751
731
|
# if channel:
|
|
752
732
|
# channel.close()
|
|
753
733
|
|
|
754
|
-
def _get_abi(self, abi_name) ->
|
|
734
|
+
def _get_abi(self, abi_name) -> str:
|
|
755
735
|
"""
|
|
756
736
|
Returns the ABI for the requested contract.
|
|
757
737
|
"""
|
|
@@ -759,7 +739,7 @@ class Client:
|
|
|
759
739
|
with open(abi_path, "r") as f:
|
|
760
740
|
return json.load(f)
|
|
761
741
|
|
|
762
|
-
def _get_bin(self, bin_name) ->
|
|
742
|
+
def _get_bin(self, bin_name) -> str:
|
|
763
743
|
"""
|
|
764
744
|
Returns the bin for the requested contract.
|
|
765
745
|
"""
|
|
@@ -781,17 +761,20 @@ class Client:
|
|
|
781
761
|
"""
|
|
782
762
|
Deploy a new workflow contract with the specified parameters.
|
|
783
763
|
|
|
784
|
-
This function deploys a new workflow contract
|
|
785
|
-
|
|
786
|
-
the
|
|
764
|
+
This function deploys a new workflow contract on OpenGradient that connects
|
|
765
|
+
an AI model with its required input data. When executed, the workflow will fetch
|
|
766
|
+
the specified model, evaluate the input query to get data, and perform inference.
|
|
767
|
+
|
|
768
|
+
The workflow can be set to execute manually or automatically via a scheduler.
|
|
787
769
|
|
|
788
770
|
Args:
|
|
789
|
-
model_cid (str):
|
|
790
|
-
input_query (HistoricalInputQuery):
|
|
771
|
+
model_cid (str): CID of the model to be executed from the Model Hub
|
|
772
|
+
input_query (HistoricalInputQuery): Input definition for the model inference,
|
|
773
|
+
will be evaluated at runtime for each inference
|
|
791
774
|
input_tensor_name (str): Name of the input tensor expected by the model
|
|
792
775
|
scheduler_params (Optional[SchedulerParams]): Scheduler configuration for automated execution:
|
|
793
776
|
- frequency: Execution frequency in seconds
|
|
794
|
-
- duration_hours: How long
|
|
777
|
+
- duration_hours: How long the schedule should live for
|
|
795
778
|
|
|
796
779
|
Returns:
|
|
797
780
|
str: Deployed contract address. If scheduler_params was provided, the workflow
|
|
@@ -910,7 +893,7 @@ class Client:
|
|
|
910
893
|
# Get the result
|
|
911
894
|
result = contract.functions.getInferenceResult().call()
|
|
912
895
|
|
|
913
|
-
return
|
|
896
|
+
return convert_array_to_model_output(result)
|
|
914
897
|
|
|
915
898
|
def run_workflow(self, contract_address: str) -> ModelOutput:
|
|
916
899
|
"""
|
|
@@ -955,9 +938,9 @@ class Client:
|
|
|
955
938
|
# Get the inference result from the contract
|
|
956
939
|
result = contract.functions.getInferenceResult().call()
|
|
957
940
|
|
|
958
|
-
return
|
|
941
|
+
return convert_array_to_model_output(result)
|
|
959
942
|
|
|
960
|
-
def read_workflow_history(self, contract_address: str, num_results: int) -> List[
|
|
943
|
+
def read_workflow_history(self, contract_address: str, num_results: int) -> List[ModelOutput]:
|
|
961
944
|
"""
|
|
962
945
|
Gets historical inference results from a workflow contract.
|
|
963
946
|
|
|
@@ -969,18 +952,14 @@ class Client:
|
|
|
969
952
|
num_results (int): Number of historical results to retrieve
|
|
970
953
|
|
|
971
954
|
Returns:
|
|
972
|
-
List[
|
|
973
|
-
- prediction values
|
|
974
|
-
- timestamps
|
|
975
|
-
- any additional metadata stored with the result
|
|
976
|
-
|
|
955
|
+
List[ModelOutput]: List of historical inference results
|
|
977
956
|
"""
|
|
978
957
|
contract = self._blockchain.eth.contract(
|
|
979
958
|
address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
|
|
980
959
|
)
|
|
981
960
|
|
|
982
961
|
results = contract.functions.getLastInferenceResults(num_results).call()
|
|
983
|
-
return [
|
|
962
|
+
return [convert_array_to_model_output(result) for result in results]
|
|
984
963
|
|
|
985
964
|
|
|
986
965
|
def run_with_retry(txn_function, max_retries=DEFAULT_MAX_RETRY, retry_delay=DEFAULT_RETRY_DELAY_SEC):
|
|
@@ -184,3 +184,15 @@ class SchedulerParams:
|
|
|
184
184
|
if data is None:
|
|
185
185
|
return None
|
|
186
186
|
return SchedulerParams(frequency=data.get("frequency", 600), duration_hours=data.get("duration_hours", 2))
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
@dataclass
|
|
190
|
+
class ModelRepository:
|
|
191
|
+
name: str
|
|
192
|
+
initialVersion: str
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
@dataclass
|
|
196
|
+
class FileUploadResult:
|
|
197
|
+
modelCid: str
|
|
198
|
+
size: int
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|