opengradient 0.3.22__tar.gz → 0.3.24__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {opengradient-0.3.22/src/opengradient.egg-info → opengradient-0.3.24}/PKG-INFO +1 -1
  2. {opengradient-0.3.22 → opengradient-0.3.24}/pyproject.toml +1 -1
  3. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/__init__.py +29 -8
  4. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/client.py +62 -12
  5. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/types.py +21 -2
  6. {opengradient-0.3.22 → opengradient-0.3.24/src/opengradient.egg-info}/PKG-INFO +1 -1
  7. {opengradient-0.3.22 → opengradient-0.3.24}/LICENSE +0 -0
  8. {opengradient-0.3.22 → opengradient-0.3.24}/MANIFEST.in +0 -0
  9. {opengradient-0.3.22 → opengradient-0.3.24}/README.md +0 -0
  10. {opengradient-0.3.22 → opengradient-0.3.24}/setup.cfg +0 -0
  11. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/abi/ModelExecutorHistorical.abi +0 -0
  12. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/abi/inference.abi +0 -0
  13. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/account.py +0 -0
  14. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/cli.py +0 -0
  15. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/contracts/templates/ModelExecutorHistorical.bin +0 -0
  16. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/defaults.py +0 -0
  17. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/exceptions.py +0 -0
  18. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/llm/__init__.py +0 -0
  19. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/llm/og_langchain.py +0 -0
  20. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/llm/og_openai.py +0 -0
  21. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/mltools/__init__.py +0 -0
  22. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/mltools/model_tool.py +0 -0
  23. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/proto/__init__.py +0 -0
  24. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/proto/infer.proto +0 -0
  25. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/proto/infer_pb2.py +0 -0
  26. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/proto/infer_pb2_grpc.py +0 -0
  27. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient/utils.py +0 -0
  28. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient.egg-info/SOURCES.txt +0 -0
  29. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient.egg-info/dependency_links.txt +0 -0
  30. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient.egg-info/entry_points.txt +0 -0
  31. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient.egg-info/requires.txt +0 -0
  32. {opengradient-0.3.22 → opengradient-0.3.24}/src/opengradient.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: opengradient
3
- Version: 0.3.22
3
+ Version: 0.3.24
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "opengradient"
7
- version = "0.3.22"
7
+ version = "0.3.24"
8
8
  description = "Python SDK for OpenGradient decentralized model management & inference services"
9
9
  authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
10
10
  license = {file = "LICENSE"}
@@ -5,7 +5,7 @@ from typing import Dict, List, Optional, Tuple, Any, Union
5
5
  from pathlib import Path
6
6
  from .client import Client
7
7
  from .defaults import DEFAULT_INFERENCE_CONTRACT_ADDRESS, DEFAULT_RPC_URL
8
- from .types import InferenceMode, LlmInferenceMode, LLM, TEE_LLM
8
+ from .types import HistoricalInputQuery, InferenceMode, LlmInferenceMode, LLM, TEE_LLM, SchedulerParams
9
9
  from . import llm
10
10
  from . import mltools
11
11
 
@@ -247,23 +247,44 @@ def generate_image(model: str, prompt: str, height: Optional[int] = None, width:
247
247
 
248
248
  def new_workflow(
249
249
  model_cid: str,
250
- input_query: Dict[str, Any],
251
- input_tensor_name: str
250
+ input_query: Union[Dict[str, Any], HistoricalInputQuery],
251
+ input_tensor_name: str,
252
+ scheduler_params: Optional[Union[Dict[str, int], SchedulerParams]] = None
252
253
  ) -> str:
253
254
  """
254
255
  Deploy a new workflow contract with the specified parameters.
255
256
 
257
+ This function deploys a new workflow contract and optionally registers it with
258
+ the scheduler for automated execution. If scheduler_params is not provided,
259
+ the workflow will be deployed without automated execution scheduling.
260
+
256
261
  Args:
257
262
  model_cid: IPFS CID of the model
258
- input_query: Dictionary containing query parameters
263
+ input_query: Dictionary or HistoricalInputQuery containing query parameters
259
264
  input_tensor_name: Name of the input tensor
265
+ scheduler_params: Optional scheduler configuration:
266
+ - Can be a dictionary with:
267
+ - frequency: Execution frequency in seconds (default: 600)
268
+ - duration_hours: How long to run in hours (default: 2)
269
+ - Or a SchedulerParams instance
270
+ If not provided, the workflow will be deployed without scheduling.
260
271
 
261
272
  Returns:
262
- str: Deployed contract address
273
+ str: Deployed contract address. If scheduler_params was provided, the workflow
274
+ will be automatically executed according to the specified schedule.
263
275
  """
264
276
  if _client is None:
265
277
  raise RuntimeError("OpenGradient client not initialized. Call og.init(...) first.")
266
- return _client.new_workflow(model_cid, input_query, input_tensor_name)
278
+
279
+ # Convert scheduler_params if it's a dict, otherwise use as is
280
+ scheduler = SchedulerParams.from_dict(scheduler_params) if isinstance(scheduler_params, dict) else scheduler_params
281
+
282
+ return _client.new_workflow(
283
+ model_cid=model_cid,
284
+ input_query=input_query,
285
+ input_tensor_name=input_tensor_name,
286
+ scheduler_params=scheduler
287
+ )
267
288
 
268
289
  def read_workflow_result(contract_address: str) -> Dict[str, Union[str, Dict]]:
269
290
  """
@@ -314,7 +335,7 @@ __all__ = [
314
335
  'upload',
315
336
  'init',
316
337
  'LLM',
317
- 'TEE_LLM'
338
+ 'TEE_LLM',
318
339
  'new_workflow',
319
340
  'read_workflow_result',
320
341
  'run_workflow'
@@ -329,6 +350,6 @@ __pdoc__ = {
329
350
  'llm': True,
330
351
  'mltools': True,
331
352
  'proto': False,
332
- 'types': False,
353
+ 'types': True,
333
354
  'utils': False
334
355
  }
@@ -20,7 +20,8 @@ from opengradient.types import (
20
20
  LlmInferenceMode,
21
21
  LLM,
22
22
  TEE_LLM,
23
- ModelOutput
23
+ ModelOutput,
24
+ SchedulerParams
24
25
  )
25
26
 
26
27
  import grpc
@@ -812,18 +813,11 @@ class Client:
812
813
  self,
813
814
  model_cid: str,
814
815
  input_query: Union[Dict[str, Any], HistoricalInputQuery],
815
- input_tensor_name: str
816
+ input_tensor_name: str,
817
+ scheduler_params: Optional[SchedulerParams] = None
816
818
  ) -> str:
817
819
  """
818
820
  Deploy a new workflow contract with the specified parameters.
819
-
820
- Args:
821
- model_cid: IPFS CID of the model
822
- input_query: Either a HistoricalInputQuery object or dictionary containing query parameters
823
- input_tensor_name: Name of the input tensor
824
-
825
- Returns:
826
- str: Deployed contract address
827
821
  """
828
822
  if isinstance(input_query, dict):
829
823
  input_query = HistoricalInputQuery.from_dict(input_query)
@@ -835,6 +829,8 @@ class Client:
835
829
  with open(bin_path, 'r') as f:
836
830
  bytecode = f.read().strip()
837
831
 
832
+ print("📦 Deploying workflow contract...")
833
+
838
834
  # Create contract instance
839
835
  contract = self._w3.eth.contract(abi=abi, bytecode=bytecode)
840
836
 
@@ -851,12 +847,66 @@ class Client:
851
847
  'gasPrice': self._w3.eth.gas_price,
852
848
  'chainId': self._w3.eth.chain_id
853
849
  })
854
-
850
+
855
851
  signed_txn = self._w3.eth.account.sign_transaction(transaction, self.private_key)
856
852
  tx_hash = self._w3.eth.send_raw_transaction(signed_txn.raw_transaction)
857
853
  tx_receipt = self._w3.eth.wait_for_transaction_receipt(tx_hash)
854
+ contract_address = tx_receipt.contractAddress
858
855
 
859
- return tx_receipt.contractAddress
856
+ print(f"✅ Workflow contract deployed at: {contract_address}")
857
+
858
+ # Register with scheduler if params provided
859
+ if scheduler_params:
860
+ print("\n⏰ Setting up automated execution schedule...")
861
+ print(f" • Frequency: Every {scheduler_params.frequency} seconds")
862
+ print(f" • Duration: {scheduler_params.duration_hours} hours")
863
+ print(f" • End Time: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(scheduler_params.end_time))}")
864
+
865
+ scheduler_abi = [{
866
+ "inputs": [
867
+ {"internalType": "address", "name": "contractAddress", "type": "address"},
868
+ {"internalType": "uint256", "name": "endTime", "type": "uint256"},
869
+ {"internalType": "uint256", "name": "frequency", "type": "uint256"}
870
+ ],
871
+ "name": "registerTask",
872
+ "outputs": [],
873
+ "stateMutability": "nonpayable",
874
+ "type": "function"
875
+ }]
876
+
877
+ scheduler_address = "0xE81a54399CFDf551bB917d0427464fE54537D245"
878
+ scheduler_contract = self._w3.eth.contract(
879
+ address=scheduler_address,
880
+ abi=scheduler_abi
881
+ )
882
+
883
+ try:
884
+ # Register the workflow with the scheduler
885
+ scheduler_tx = scheduler_contract.functions.registerTask(
886
+ contract_address,
887
+ scheduler_params.end_time,
888
+ scheduler_params.frequency
889
+ ).build_transaction({
890
+ 'from': self.wallet_address,
891
+ 'gas': 300000,
892
+ 'gasPrice': self._w3.eth.gas_price,
893
+ 'nonce': self._w3.eth.get_transaction_count(self.wallet_address, 'pending'),
894
+ 'chainId': self._w3.eth.chain_id
895
+ })
896
+
897
+ signed_scheduler_tx = self._w3.eth.account.sign_transaction(scheduler_tx, self.private_key)
898
+ scheduler_tx_hash = self._w3.eth.send_raw_transaction(signed_scheduler_tx.raw_transaction)
899
+ self._w3.eth.wait_for_transaction_receipt(scheduler_tx_hash)
900
+
901
+ print("✅ Automated execution schedule set successfully!")
902
+ print(f" Transaction hash: {scheduler_tx_hash.hex()}")
903
+
904
+ except Exception as e:
905
+ print("❌ Failed to set up automated execution schedule")
906
+ print(f" Error: {str(e)}")
907
+ print(" The workflow contract is still deployed and can be executed manually.")
908
+
909
+ return contract_address
860
910
 
861
911
  def read_workflow_result(self, contract_address: str) -> Any:
862
912
  """
@@ -1,6 +1,7 @@
1
1
  from dataclasses import dataclass
2
- from typing import List, Tuple, Union
2
+ from typing import List, Tuple, Union, Dict, Optional
3
3
  from enum import Enum, IntEnum
4
+ import time
4
5
 
5
6
  class CandleOrder(IntEnum):
6
7
  ASCENDING = 0
@@ -132,4 +133,22 @@ class LLM(str, Enum):
132
133
  class TEE_LLM(str, Enum):
133
134
  """Enum for LLM models available for TEE execution"""
134
135
 
135
- META_LLAMA_3_1_70B_INSTRUCT = "meta-llama/Llama-3.1-70B-Instruct"
136
+ META_LLAMA_3_1_70B_INSTRUCT = "meta-llama/Llama-3.1-70B-Instruct"
137
+
138
+ @dataclass
139
+ class SchedulerParams:
140
+ frequency: int
141
+ duration_hours: int
142
+
143
+ @property
144
+ def end_time(self) -> int:
145
+ return int(time.time()) + (self.duration_hours * 60 * 60)
146
+
147
+ @staticmethod
148
+ def from_dict(data: Optional[Dict[str, int]]) -> Optional['SchedulerParams']:
149
+ if data is None:
150
+ return None
151
+ return SchedulerParams(
152
+ frequency=data.get('frequency', 600),
153
+ duration_hours=data.get('duration_hours', 2)
154
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: opengradient
3
- Version: 0.3.22
3
+ Version: 0.3.24
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
File without changes
File without changes
File without changes
File without changes