opengradient 0.4.7__py3-none-any.whl → 0.4.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
opengradient/__init__.py CHANGED
@@ -17,6 +17,8 @@ from .types import (
17
17
  LlmInferenceMode,
18
18
  TextGenerationOutput,
19
19
  ModelOutput,
20
+ ModelRepository,
21
+ FileUploadResult,
20
22
  )
21
23
 
22
24
  from . import llm, alphasense
@@ -61,7 +63,7 @@ def init(email: str, password: str, private_key: str, rpc_url=DEFAULT_RPC_URL, c
61
63
  return _client
62
64
 
63
65
 
64
- def upload(model_path, model_name, version):
66
+ def upload(model_path, model_name, version) -> FileUploadResult:
65
67
  """Upload a model file to OpenGradient.
66
68
 
67
69
  Args:
@@ -70,7 +72,7 @@ def upload(model_path, model_name, version):
70
72
  version: Version string for this model upload
71
73
 
72
74
  Returns:
73
- dict: Upload response containing file metadata
75
+ FileUploadResult: Upload response containing file metadata
74
76
 
75
77
  Raises:
76
78
  RuntimeError: If SDK is not initialized
@@ -80,7 +82,7 @@ def upload(model_path, model_name, version):
80
82
  return _client.upload(model_path, model_name, version)
81
83
 
82
84
 
83
- def create_model(model_name: str, model_desc: str, model_path: Optional[str] = None):
85
+ def create_model(model_name: str, model_desc: str, model_path: Optional[str] = None) -> ModelRepository:
84
86
  """Create a new model repository.
85
87
 
86
88
  Args:
@@ -89,7 +91,7 @@ def create_model(model_name: str, model_desc: str, model_path: Optional[str] = N
89
91
  model_path: Optional path to model file to upload immediately
90
92
 
91
93
  Returns:
92
- dict: Creation response with model metadata and optional upload results
94
+ ModelRepository: Creation response with model metadata and optional upload results
93
95
 
94
96
  Raises:
95
97
  RuntimeError: If SDK is not initialized
@@ -319,7 +321,7 @@ def run_workflow(contract_address: str) -> ModelOutput:
319
321
  return _client.run_workflow(contract_address)
320
322
 
321
323
 
322
- def read_workflow_history(contract_address: str, num_results: int) -> List[Dict]:
324
+ def read_workflow_history(contract_address: str, num_results: int) -> List[ModelOutput]:
323
325
  """
324
326
  Gets historical inference results from a workflow contract.
325
327
 
opengradient/client.py CHANGED
@@ -13,7 +13,6 @@ from web3 import Web3
13
13
  from web3.exceptions import ContractLogicError
14
14
  from web3.logs import DISCARD
15
15
 
16
- from . import utils
17
16
  from .exceptions import OpenGradientError
18
17
  from .proto import infer_pb2, infer_pb2_grpc
19
18
  from .types import (
@@ -26,8 +25,11 @@ from .types import (
26
25
  TextGenerationOutput,
27
26
  SchedulerParams,
28
27
  InferenceResult,
28
+ ModelRepository,
29
+ FileUploadResult,
29
30
  )
30
31
  from .defaults import DEFAULT_IMAGE_GEN_HOST, DEFAULT_IMAGE_GEN_PORT, DEFAULT_SCHEDULER_ADDRESS
32
+ from .utils import convert_array_to_model_output, convert_to_model_input, convert_to_model_output
31
33
 
32
34
  _FIREBASE_CONFIG = {
33
35
  "apiKey": "AIzaSyDUVckVtfl-hiteBzPopy1pDD8Uvfncs7w",
@@ -53,7 +55,7 @@ class Client:
53
55
  _blockchain: Web3
54
56
  _wallet_account: LocalAccount
55
57
 
56
- _hub_user: Dict
58
+ _hub_user: Optional[Dict]
57
59
  _inference_abi: Dict
58
60
 
59
61
  def __init__(self, private_key: str, rpc_url: str, contract_address: str, email: Optional[str], password: Optional[str]):
@@ -88,7 +90,7 @@ class Client:
88
90
  logging.error(f"Authentication failed: {str(e)}")
89
91
  raise
90
92
 
91
- def create_model(self, model_name: str, model_desc: str, version: str = "1.00") -> dict:
93
+ def create_model(self, model_name: str, model_desc: str, version: str = "1.00") -> ModelRepository:
92
94
  """
93
95
  Create a new model with the given model_name and model_desc, and a specified version.
94
96
 
@@ -111,39 +113,22 @@ class Client:
111
113
  payload = {"name": model_name, "description": model_desc}
112
114
 
113
115
  try:
114
- logging.debug(f"Create Model URL: {url}")
115
- logging.debug(f"Headers: {headers}")
116
- logging.debug(f"Payload: {payload}")
117
-
118
116
  response = requests.post(url, json=payload, headers=headers)
119
117
  response.raise_for_status()
118
+ except requests.HTTPError as e:
119
+ error_details = f"HTTP {e.response.status_code}: {e.response.text}"
120
+ raise OpenGradientError(f"Model creation failed: {error_details}") from e
120
121
 
121
- json_response = response.json()
122
- model_name = json_response.get("name")
123
- if not model_name:
124
- raise Exception(f"Model creation response missing 'name'. Full response: {json_response}")
125
- logging.info(f"Model creation successful. Model name: {model_name}")
122
+ json_response = response.json()
123
+ model_name = json_response.get("name")
124
+ if not model_name:
125
+ raise Exception(f"Model creation response missing 'name'. Full response: {json_response}")
126
126
 
127
- # Create the specified version for the newly created model
128
- try:
129
- version_response = self.create_version(model_name, version)
130
- logging.info(f"Version creation successful. Version string: {version_response['versionString']}")
131
- except Exception as ve:
132
- logging.error(f"Version creation failed, but model was created. Error: {str(ve)}")
133
- return {"name": model_name, "versionString": None, "version_error": str(ve)}
127
+ # Create the specified version for the newly created model
128
+ version_response = self.create_version(model_name, version)
134
129
 
135
- return {"name": model_name, "versionString": version_response["versionString"]}
130
+ return ModelRepository(model_name, version_response["versionString"])
136
131
 
137
- except requests.RequestException as e:
138
- logging.error(f"Model creation failed: {str(e)}")
139
- if hasattr(e, "response") and e.response is not None:
140
- logging.error(f"Response status code: {e.response.status_code}")
141
- logging.error(f"Response headers: {e.response.headers}")
142
- logging.error(f"Response content: {e.response.text}")
143
- raise Exception(f"Model creation failed: {str(e)}")
144
- except Exception as e:
145
- logging.error(f"Unexpected error during model creation: {str(e)}")
146
- raise
147
132
 
148
133
  def create_version(self, model_name: str, notes: str = "", is_major: bool = False) -> dict:
149
134
  """
@@ -204,7 +189,7 @@ class Client:
204
189
  logging.error(f"Unexpected error during version creation: {str(e)}")
205
190
  raise
206
191
 
207
- def upload(self, model_path: str, model_name: str, version: str) -> dict:
192
+ def upload(self, model_path: str, model_name: str, version: str) -> FileUploadResult:
208
193
  """
209
194
  Upload a model file to the server.
210
195
 
@@ -259,12 +244,9 @@ class Client:
259
244
  if response.status_code == 201:
260
245
  if response.content and response.content != b"null":
261
246
  json_response = response.json()
262
- logging.info(f"JSON response: {json_response}") # Log the parsed JSON response
263
- logging.info(f"Upload successful. CID: {json_response.get('ipfsCid', 'N/A')}")
264
- result = {"model_cid": json_response.get("ipfsCid"), "size": json_response.get("size")}
247
+ return FileUploadResult(json_response.get("ipfsCid"), json_response.get("size"))
265
248
  else:
266
- logging.warning("Empty or null response content received. Assuming upload was successful.")
267
- result = {"model_cid": None, "size": None}
249
+ raise RuntimeError("Empty or null response content received. Assuming upload was successful.")
268
250
  elif response.status_code == 500:
269
251
  error_message = "Internal server error occurred. Please try again later or contact support."
270
252
  logging.error(error_message)
@@ -274,8 +256,6 @@ class Client:
274
256
  logging.error(f"Upload failed with status code {response.status_code}: {error_message}")
275
257
  raise OpenGradientError(f"Upload failed: {error_message}", status_code=response.status_code)
276
258
 
277
- return result
278
-
279
259
  except requests.RequestException as e:
280
260
  logging.error(f"Request exception during upload: {str(e)}")
281
261
  if hasattr(e, "response") and e.response is not None:
@@ -313,7 +293,7 @@ class Client:
313
293
  contract = self._blockchain.eth.contract(address=self._inference_hub_contract_address, abi=self._inference_abi)
314
294
 
315
295
  inference_mode_uint8 = inference_mode.value
316
- converted_model_input = utils.convert_to_model_input(model_input)
296
+ converted_model_input = convert_to_model_input(model_input)
317
297
 
318
298
  run_function = contract.functions.run(model_cid, inference_mode_uint8, converted_model_input)
319
299
 
@@ -342,7 +322,7 @@ class Client:
342
322
  raise OpenGradientError("InferenceResult event not found in transaction logs")
343
323
 
344
324
  # TODO: This should return a ModelOutput class object
345
- model_output = utils.convert_to_model_output(parsed_logs[0]["args"])
325
+ model_output = convert_to_model_output(parsed_logs[0]["args"])
346
326
 
347
327
  return InferenceResult(tx_hash.hex(), model_output)
348
328
 
@@ -751,7 +731,7 @@ class Client:
751
731
  # if channel:
752
732
  # channel.close()
753
733
 
754
- def _get_abi(self, abi_name) -> List[Dict]:
734
+ def _get_abi(self, abi_name) -> str:
755
735
  """
756
736
  Returns the ABI for the requested contract.
757
737
  """
@@ -759,7 +739,7 @@ class Client:
759
739
  with open(abi_path, "r") as f:
760
740
  return json.load(f)
761
741
 
762
- def _get_bin(self, bin_name) -> List[Dict]:
742
+ def _get_bin(self, bin_name) -> str:
763
743
  """
764
744
  Returns the bin for the requested contract.
765
745
  """
@@ -781,17 +761,20 @@ class Client:
781
761
  """
782
762
  Deploy a new workflow contract with the specified parameters.
783
763
 
784
- This function deploys a new workflow contract and optionally registers it with
785
- the scheduler for automated execution. If scheduler_params is not provided,
786
- the workflow will be deployed without automated execution scheduling.
764
+ This function deploys a new workflow contract on OpenGradient that connects
765
+ an AI model with its required input data. When executed, the workflow will fetch
766
+ the specified model, evaluate the input query to get data, and perform inference.
767
+
768
+ The workflow can be set to execute manually or automatically via a scheduler.
787
769
 
788
770
  Args:
789
- model_cid (str): IPFS CID of the model to be executed
790
- input_query (HistoricalInputQuery): Query parameters for data input
771
+ model_cid (str): CID of the model to be executed from the Model Hub
772
+ input_query (HistoricalInputQuery): Input definition for the model inference,
773
+ will be evaluated at runtime for each inference
791
774
  input_tensor_name (str): Name of the input tensor expected by the model
792
775
  scheduler_params (Optional[SchedulerParams]): Scheduler configuration for automated execution:
793
776
  - frequency: Execution frequency in seconds
794
- - duration_hours: How long to run in hours
777
+ - duration_hours: How long the schedule should live for
795
778
 
796
779
  Returns:
797
780
  str: Deployed contract address. If scheduler_params was provided, the workflow
@@ -910,7 +893,7 @@ class Client:
910
893
  # Get the result
911
894
  result = contract.functions.getInferenceResult().call()
912
895
 
913
- return utils.convert_array_to_model_output(result)
896
+ return convert_array_to_model_output(result)
914
897
 
915
898
  def run_workflow(self, contract_address: str) -> ModelOutput:
916
899
  """
@@ -955,9 +938,9 @@ class Client:
955
938
  # Get the inference result from the contract
956
939
  result = contract.functions.getInferenceResult().call()
957
940
 
958
- return utils.convert_array_to_model_output(result)
941
+ return convert_array_to_model_output(result)
959
942
 
960
- def read_workflow_history(self, contract_address: str, num_results: int) -> List[Dict]:
943
+ def read_workflow_history(self, contract_address: str, num_results: int) -> List[ModelOutput]:
961
944
  """
962
945
  Gets historical inference results from a workflow contract.
963
946
 
@@ -969,18 +952,14 @@ class Client:
969
952
  num_results (int): Number of historical results to retrieve
970
953
 
971
954
  Returns:
972
- List[Dict]: List of historical inference results, each containing:
973
- - prediction values
974
- - timestamps
975
- - any additional metadata stored with the result
976
-
955
+ List[ModelOutput]: List of historical inference results
977
956
  """
978
957
  contract = self._blockchain.eth.contract(
979
958
  address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
980
959
  )
981
960
 
982
961
  results = contract.functions.getLastInferenceResults(num_results).call()
983
- return [utils.convert_array_to_model_output(result) for result in results]
962
+ return [convert_array_to_model_output(result) for result in results]
984
963
 
985
964
 
986
965
  def run_with_retry(txn_function, max_retries=DEFAULT_MAX_RETRY, retry_delay=DEFAULT_RETRY_DELAY_SEC):
opengradient/types.py CHANGED
@@ -184,3 +184,15 @@ class SchedulerParams:
184
184
  if data is None:
185
185
  return None
186
186
  return SchedulerParams(frequency=data.get("frequency", 600), duration_hours=data.get("duration_hours", 2))
187
+
188
+
189
+ @dataclass
190
+ class ModelRepository:
191
+ name: str
192
+ initialVersion: str
193
+
194
+
195
+ @dataclass
196
+ class FileUploadResult:
197
+ modelCid: str
198
+ size: int
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: opengradient
3
- Version: 0.4.7
3
+ Version: 0.4.8
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
@@ -1,10 +1,10 @@
1
- opengradient/__init__.py,sha256=CBZiyZhnuqbhWylFTi6KVAADBTt2X09aUH00vxpw7kk,12066
1
+ opengradient/__init__.py,sha256=l-bAgytvG4EnSE6q5OUIkyiaE4ROes9vy3xDN1PgWJg,12178
2
2
  opengradient/account.py,sha256=5wrYpws_1lozjOFjLCTHtxgoxK-LmObDAaVy9eDcJY4,1145
3
3
  opengradient/cli.py,sha256=12fezJJvuceUaPSllsMqrBFoSpd2sTJjMpZ1_Dhzskg,25426
4
- opengradient/client.py,sha256=IGykQOcn2kQBhtcATqa8ka19K5ZK8dYxzdkRqYIMx10,44925
4
+ opengradient/client.py,sha256=q6Md6uleBl8h2Vkz1tjYrovFrg7_Lm0CWDMhymZmPzE,43773
5
5
  opengradient/defaults.py,sha256=Dqc64Qv7RdLv7ZBjXzvxRRqBDc1HhyVbbdImpmzyrzU,490
6
6
  opengradient/exceptions.py,sha256=88tfegboGtlehQcwhxsl6ZzhLJWZWlkf_bkHTiCtXpo,3391
7
- opengradient/types.py,sha256=wbLjrwurGG_74Dn9OZwkR-Kv985X5psSD8kgpiBwf3Q,4614
7
+ opengradient/types.py,sha256=hAOsNJPoeZQtXM_qPnOTyk0ukTRKVAIMuJDr0Z8Z9t0,4757
8
8
  opengradient/utils.py,sha256=NMXg_mi5cHVeV01O4fFQJCcbwgGuGFbdYNrAG9K2Um0,8337
9
9
  opengradient/abi/PriceHistoryInference.abi,sha256=ZB3fZdx1kaFlp2wt1vTbTZZG1k8HPvmNtkG5Q8Bnajw,5098
10
10
  opengradient/abi/WorkflowScheduler.abi,sha256=yEGs76qO4S1z980KL5hBdfyXiJ6k-kERcB1O_o73AEU,416
@@ -21,9 +21,9 @@ opengradient/proto/__init__.py,sha256=AhaSmrqV0TXGzCKaoPV8-XUvqs2fGAJBM2aOmDpkNb
21
21
  opengradient/proto/infer.proto,sha256=13eaEMcppxkBF8yChptsX9HooWFwJKze7oLZNl-LEb8,1217
22
22
  opengradient/proto/infer_pb2.py,sha256=sGWDDVumYhXoCJTG9rLyvKu4XyaEjPE_b038kbNlj7w,3484
23
23
  opengradient/proto/infer_pb2_grpc.py,sha256=q42_eZ7OZCMTXdWocYA4Ka3B0c3B74dOhfqdaIOO5AU,6700
24
- opengradient-0.4.7.dist-info/LICENSE,sha256=xEcvQ3AxZOtDkrqkys2Mm6Y9diEnaSeQRKvxi-JGnNA,1069
25
- opengradient-0.4.7.dist-info/METADATA,sha256=GC5oaUwycu1UodnPUEvBVvCI9cqBWJdLFo4W0oY83jo,5214
26
- opengradient-0.4.7.dist-info/WHEEL,sha256=nn6H5-ilmfVryoAQl3ZQ2l8SH5imPWFpm1A5FgEuFV4,91
27
- opengradient-0.4.7.dist-info/entry_points.txt,sha256=yUKTaJx8RXnybkob0J62wVBiCp_1agVbgw9uzsmaeJc,54
28
- opengradient-0.4.7.dist-info/top_level.txt,sha256=oC1zimVLa2Yi1LQz8c7x-0IQm92milb5ax8gHBHwDqU,13
29
- opengradient-0.4.7.dist-info/RECORD,,
24
+ opengradient-0.4.8.dist-info/LICENSE,sha256=xEcvQ3AxZOtDkrqkys2Mm6Y9diEnaSeQRKvxi-JGnNA,1069
25
+ opengradient-0.4.8.dist-info/METADATA,sha256=teo3WbkHF5H2u0sy2aK2VAh1_HV0OLjwLmhNvf1DWFM,5214
26
+ opengradient-0.4.8.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
27
+ opengradient-0.4.8.dist-info/entry_points.txt,sha256=yUKTaJx8RXnybkob0J62wVBiCp_1agVbgw9uzsmaeJc,54
28
+ opengradient-0.4.8.dist-info/top_level.txt,sha256=oC1zimVLa2Yi1LQz8c7x-0IQm92milb5ax8gHBHwDqU,13
29
+ opengradient-0.4.8.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.1)
2
+ Generator: setuptools (75.8.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5