opengradient 0.5.9__py3-none-any.whl → 0.5.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
opengradient/__init__.py CHANGED
@@ -23,9 +23,13 @@ from .types import (
23
23
  FileUploadResult,
24
24
  x402SettlementMode,
25
25
  )
26
+ from .alpha import _AlphaNamespace
26
27
 
27
28
  from . import llm, alphasense
28
29
 
30
+ # Module-level alpha namespace for workflow/ML execution features (Alpha Testnet only)
31
+ alpha = _AlphaNamespace()
32
+
29
33
  _client = None
30
34
 
31
35
 
@@ -284,93 +288,6 @@ def list_files(model_name: str, version: str) -> List[Dict]:
284
288
  return _client.list_files(model_name, version)
285
289
 
286
290
 
287
- def new_workflow(
288
- model_cid: str,
289
- input_query: HistoricalInputQuery,
290
- input_tensor_name: str,
291
- scheduler_params: Optional[SchedulerParams] = None,
292
- ) -> str:
293
- """
294
- Deploy a new workflow contract with the specified parameters.
295
-
296
- This function deploys a new workflow contract and optionally registers it with
297
- the scheduler for automated execution. If scheduler_params is not provided,
298
- the workflow will be deployed without automated execution scheduling.
299
-
300
- Args:
301
- model_cid: IPFS CID of the model
302
- input_query: HistoricalInputQuery containing query parameters
303
- input_tensor_name: Name of the input tensor
304
- scheduler_params: Optional scheduler configuration as SchedulerParams instance
305
- If not provided, the workflow will be deployed without scheduling.
306
-
307
- Returns:
308
- str: Deployed contract address. If scheduler_params was provided, the workflow
309
- will be automatically executed according to the specified schedule.
310
- """
311
- if _client is None:
312
- raise RuntimeError("OpenGradient client not initialized. Call og.init(...) first.")
313
-
314
- return _client.new_workflow(
315
- model_cid=model_cid, input_query=input_query, input_tensor_name=input_tensor_name, scheduler_params=scheduler_params
316
- )
317
-
318
-
319
- def read_workflow_result(contract_address: str) -> ModelOutput:
320
- """
321
- Reads the latest inference result from a deployed workflow contract.
322
-
323
- This function retrieves the most recent output from a deployed model executor contract.
324
- It includes built-in retry logic to handle blockchain state delays.
325
-
326
- Args:
327
- contract_address (str): Address of the deployed workflow contract
328
-
329
- Returns:
330
- Dict[str, Union[str, Dict]]: A dictionary containing:
331
- - status: "success" or "error"
332
- - result: The model output data if successful
333
- - error: Error message if status is "error"
334
-
335
- Raises:
336
- RuntimeError: If OpenGradient client is not initialized
337
- """
338
- if _client is None:
339
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
340
- return _client.read_workflow_result(contract_address)
341
-
342
-
343
- def run_workflow(contract_address: str) -> ModelOutput:
344
- """
345
- Executes the workflow by calling run() on the contract to pull latest data and perform inference.
346
-
347
- Args:
348
- contract_address (str): Address of the deployed workflow contract
349
-
350
- Returns:
351
- Dict[str, Union[str, Dict]]: Status of the run operation
352
- """
353
- if _client is None:
354
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
355
- return _client.run_workflow(contract_address)
356
-
357
-
358
- def read_workflow_history(contract_address: str, num_results: int) -> List[ModelOutput]:
359
- """
360
- Gets historical inference results from a workflow contract.
361
-
362
- Args:
363
- contract_address (str): Address of the deployed workflow contract
364
- num_results (int): Number of historical results to retrieve
365
-
366
- Returns:
367
- List[Dict]: List of historical inference results
368
- """
369
- if _client is None:
370
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
371
- return _client.read_workflow_history(contract_address, num_results)
372
-
373
-
374
291
  __all__ = [
375
292
  "list_files",
376
293
  "login",
@@ -383,10 +300,7 @@ __all__ = [
383
300
  "init",
384
301
  "LLM",
385
302
  "TEE_LLM",
386
- "new_workflow",
387
- "read_workflow_result",
388
- "run_workflow",
389
- "read_workflow_history",
303
+ "alpha",
390
304
  "InferenceMode",
391
305
  "LlmInferenceMode",
392
306
  "HistoricalInputQuery",
opengradient/alpha.py ADDED
@@ -0,0 +1,375 @@
1
+ """
2
+ Alpha Testnet features for OpenGradient SDK.
3
+
4
+ This module contains features that are only available on the Alpha Testnet,
5
+ including workflow management and ML model execution.
6
+ """
7
+
8
+ import json
9
+ import logging
10
+ from pathlib import Path
11
+ from typing import TYPE_CHECKING, List, Optional
12
+
13
+ from web3 import Web3
14
+ from web3.exceptions import ContractLogicError
15
+
16
+ from .defaults import DEFAULT_SCHEDULER_ADDRESS
17
+ from .types import HistoricalInputQuery, ModelOutput, SchedulerParams
18
+ from .utils import convert_array_to_model_output
19
+
20
+ if TYPE_CHECKING:
21
+ from .client import Client
22
+
23
+ # How much time we wait for txn to be included in chain
24
+ INFERENCE_TX_TIMEOUT = 120
25
+ REGULAR_TX_TIMEOUT = 30
26
+
27
+
28
+ class Alpha:
29
+ """
30
+ Alpha Testnet features namespace.
31
+
32
+ This class provides access to features that are only available on the Alpha Testnet,
33
+ including workflow deployment and execution.
34
+
35
+ Usage:
36
+ client = og.new_client(...)
37
+ result = client.alpha.new_workflow(model_cid, input_query, input_tensor_name)
38
+ """
39
+
40
+ def __init__(self, client: "Client"):
41
+ self._client = client
42
+
43
+ def _get_abi(self, abi_name: str) -> dict:
44
+ """Returns the ABI for the requested contract."""
45
+ abi_path = Path(__file__).parent / "abi" / abi_name
46
+ with open(abi_path, "r") as f:
47
+ return json.load(f)
48
+
49
+ def _get_bin(self, bin_name: str) -> str:
50
+ """Returns the bin for the requested contract."""
51
+ bin_path = Path(__file__).parent / "bin" / bin_name
52
+ with open(bin_path, "r", encoding="utf-8") as f:
53
+ bytecode = f.read().strip()
54
+ if not bytecode.startswith("0x"):
55
+ bytecode = "0x" + bytecode
56
+ return bytecode
57
+
58
+ def new_workflow(
59
+ self,
60
+ model_cid: str,
61
+ input_query: HistoricalInputQuery,
62
+ input_tensor_name: str,
63
+ scheduler_params: Optional[SchedulerParams] = None,
64
+ ) -> str:
65
+ """
66
+ Deploy a new workflow contract with the specified parameters.
67
+
68
+ This function deploys a new workflow contract on OpenGradient that connects
69
+ an AI model with its required input data. When executed, the workflow will fetch
70
+ the specified model, evaluate the input query to get data, and perform inference.
71
+
72
+ The workflow can be set to execute manually or automatically via a scheduler.
73
+
74
+ Args:
75
+ model_cid (str): CID of the model to be executed from the Model Hub
76
+ input_query (HistoricalInputQuery): Input definition for the model inference,
77
+ will be evaluated at runtime for each inference
78
+ input_tensor_name (str): Name of the input tensor expected by the model
79
+ scheduler_params (Optional[SchedulerParams]): Scheduler configuration for automated execution:
80
+ - frequency: Execution frequency in seconds
81
+ - duration_hours: How long the schedule should live for
82
+
83
+ Returns:
84
+ str: Deployed contract address. If scheduler_params was provided, the workflow
85
+ will be automatically executed according to the specified schedule.
86
+
87
+ Raises:
88
+ Exception: If transaction fails or gas estimation fails
89
+ """
90
+ from .client import run_with_retry
91
+
92
+ # Get contract ABI and bytecode
93
+ abi = self._get_abi("PriceHistoryInference.abi")
94
+ bytecode = self._get_bin("PriceHistoryInference.bin")
95
+
96
+ def deploy_transaction():
97
+ contract = self._client._blockchain.eth.contract(abi=abi, bytecode=bytecode)
98
+ query_tuple = input_query.to_abi_format()
99
+ constructor_args = [model_cid, input_tensor_name, query_tuple]
100
+
101
+ try:
102
+ # Estimate gas needed
103
+ estimated_gas = contract.constructor(*constructor_args).estimate_gas(
104
+ {"from": self._client._wallet_account.address}
105
+ )
106
+ gas_limit = int(estimated_gas * 1.2)
107
+ except Exception as e:
108
+ print(f"⚠️ Gas estimation failed: {str(e)}")
109
+ gas_limit = 5000000 # Conservative fallback
110
+ print(f"📊 Using fallback gas limit: {gas_limit}")
111
+
112
+ transaction = contract.constructor(*constructor_args).build_transaction(
113
+ {
114
+ "from": self._client._wallet_account.address,
115
+ "nonce": self._client._blockchain.eth.get_transaction_count(
116
+ self._client._wallet_account.address, "pending"
117
+ ),
118
+ "gas": gas_limit,
119
+ "gasPrice": self._client._blockchain.eth.gas_price,
120
+ "chainId": self._client._blockchain.eth.chain_id,
121
+ }
122
+ )
123
+
124
+ signed_txn = self._client._wallet_account.sign_transaction(transaction)
125
+ tx_hash = self._client._blockchain.eth.send_raw_transaction(signed_txn.raw_transaction)
126
+
127
+ tx_receipt = self._client._blockchain.eth.wait_for_transaction_receipt(tx_hash, timeout=60)
128
+
129
+ if tx_receipt["status"] == 0:
130
+ raise Exception(f"❌ Contract deployment failed, transaction hash: {tx_hash.hex()}")
131
+
132
+ return tx_receipt.contractAddress
133
+
134
+ contract_address = run_with_retry(deploy_transaction)
135
+
136
+ if scheduler_params:
137
+ self._register_with_scheduler(contract_address, scheduler_params)
138
+
139
+ return contract_address
140
+
141
+ def _register_with_scheduler(self, contract_address: str, scheduler_params: SchedulerParams) -> None:
142
+ """
143
+ Register the deployed workflow contract with the scheduler for automated execution.
144
+
145
+ Args:
146
+ contract_address (str): Address of the deployed workflow contract
147
+ scheduler_params (SchedulerParams): Scheduler configuration containing:
148
+ - frequency: Execution frequency in seconds
149
+ - duration_hours: How long to run in hours
150
+ - end_time: Unix timestamp when scheduling should end
151
+
152
+ Raises:
153
+ Exception: If registration with scheduler fails. The workflow contract will
154
+ still be deployed and can be executed manually.
155
+ """
156
+ scheduler_abi = self._get_abi("WorkflowScheduler.abi")
157
+
158
+ # Scheduler contract address
159
+ scheduler_address = DEFAULT_SCHEDULER_ADDRESS
160
+ scheduler_contract = self._client._blockchain.eth.contract(address=scheduler_address, abi=scheduler_abi)
161
+
162
+ try:
163
+ # Register the workflow with the scheduler
164
+ scheduler_tx = scheduler_contract.functions.registerTask(
165
+ contract_address, scheduler_params.end_time, scheduler_params.frequency
166
+ ).build_transaction(
167
+ {
168
+ "from": self._client._wallet_account.address,
169
+ "gas": 300000,
170
+ "gasPrice": self._client._blockchain.eth.gas_price,
171
+ "nonce": self._client._blockchain.eth.get_transaction_count(
172
+ self._client._wallet_account.address, "pending"
173
+ ),
174
+ "chainId": self._client._blockchain.eth.chain_id,
175
+ }
176
+ )
177
+
178
+ signed_scheduler_tx = self._client._wallet_account.sign_transaction(scheduler_tx)
179
+ scheduler_tx_hash = self._client._blockchain.eth.send_raw_transaction(signed_scheduler_tx.raw_transaction)
180
+ self._client._blockchain.eth.wait_for_transaction_receipt(scheduler_tx_hash, timeout=REGULAR_TX_TIMEOUT)
181
+ except Exception as e:
182
+ print(f"❌ Error registering contract with scheduler: {str(e)}")
183
+ print(" The workflow contract is still deployed and can be executed manually.")
184
+
185
+ def read_workflow_result(self, contract_address: str) -> ModelOutput:
186
+ """
187
+ Reads the latest inference result from a deployed workflow contract.
188
+
189
+ Args:
190
+ contract_address (str): Address of the deployed workflow contract
191
+
192
+ Returns:
193
+ ModelOutput: The inference result from the contract
194
+
195
+ Raises:
196
+ ContractLogicError: If the transaction fails
197
+ Web3Error: If there are issues with the web3 connection or contract interaction
198
+ """
199
+ # Get the contract interface
200
+ contract = self._client._blockchain.eth.contract(
201
+ address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
202
+ )
203
+
204
+ # Get the result
205
+ result = contract.functions.getInferenceResult().call()
206
+
207
+ return convert_array_to_model_output(result)
208
+
209
+ def run_workflow(self, contract_address: str) -> ModelOutput:
210
+ """
211
+ Triggers the run() function on a deployed workflow contract and returns the result.
212
+
213
+ Args:
214
+ contract_address (str): Address of the deployed workflow contract
215
+
216
+ Returns:
217
+ ModelOutput: The inference result from the contract
218
+
219
+ Raises:
220
+ ContractLogicError: If the transaction fails
221
+ Web3Error: If there are issues with the web3 connection or contract interaction
222
+ """
223
+ # Get the contract interface
224
+ contract = self._client._blockchain.eth.contract(
225
+ address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
226
+ )
227
+
228
+ # Call run() function
229
+ nonce = self._client._blockchain.eth.get_transaction_count(self._client._wallet_account.address, "pending")
230
+
231
+ run_function = contract.functions.run()
232
+ transaction = run_function.build_transaction(
233
+ {
234
+ "from": self._client._wallet_account.address,
235
+ "nonce": nonce,
236
+ "gas": 30000000,
237
+ "gasPrice": self._client._blockchain.eth.gas_price,
238
+ "chainId": self._client._blockchain.eth.chain_id,
239
+ }
240
+ )
241
+
242
+ signed_txn = self._client._wallet_account.sign_transaction(transaction)
243
+ tx_hash = self._client._blockchain.eth.send_raw_transaction(signed_txn.raw_transaction)
244
+ tx_receipt = self._client._blockchain.eth.wait_for_transaction_receipt(tx_hash, timeout=INFERENCE_TX_TIMEOUT)
245
+
246
+ if tx_receipt.status == 0:
247
+ raise ContractLogicError(f"Run transaction failed. Receipt: {tx_receipt}")
248
+
249
+ # Get the inference result from the contract
250
+ result = contract.functions.getInferenceResult().call()
251
+
252
+ return convert_array_to_model_output(result)
253
+
254
+ def read_workflow_history(self, contract_address: str, num_results: int) -> List[ModelOutput]:
255
+ """
256
+ Gets historical inference results from a workflow contract.
257
+
258
+ Retrieves the specified number of most recent inference results from the contract's
259
+ storage, with the most recent result first.
260
+
261
+ Args:
262
+ contract_address (str): Address of the deployed workflow contract
263
+ num_results (int): Number of historical results to retrieve
264
+
265
+ Returns:
266
+ List[ModelOutput]: List of historical inference results
267
+ """
268
+ contract = self._client._blockchain.eth.contract(
269
+ address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
270
+ )
271
+
272
+ results = contract.functions.getLastInferenceResults(num_results).call()
273
+ return [convert_array_to_model_output(result) for result in results]
274
+
275
+
276
+ class _AlphaNamespace:
277
+ """
278
+ Module-level alpha namespace for use with og.init().
279
+
280
+ Usage:
281
+ og.init(...)
282
+ result = og.alpha.new_workflow(model_cid, input_query, input_tensor_name)
283
+ """
284
+
285
+ def new_workflow(
286
+ self,
287
+ model_cid: str,
288
+ input_query: HistoricalInputQuery,
289
+ input_tensor_name: str,
290
+ scheduler_params: Optional[SchedulerParams] = None,
291
+ ) -> str:
292
+ """
293
+ Deploy a new workflow contract with the specified parameters.
294
+
295
+ This function deploys a new workflow contract and optionally registers it with
296
+ the scheduler for automated execution. If scheduler_params is not provided,
297
+ the workflow will be deployed without automated execution scheduling.
298
+
299
+ Args:
300
+ model_cid: IPFS CID of the model
301
+ input_query: HistoricalInputQuery containing query parameters
302
+ input_tensor_name: Name of the input tensor
303
+ scheduler_params: Optional scheduler configuration as SchedulerParams instance
304
+ If not provided, the workflow will be deployed without scheduling.
305
+
306
+ Returns:
307
+ str: Deployed contract address. If scheduler_params was provided, the workflow
308
+ will be automatically executed according to the specified schedule.
309
+ """
310
+ from . import _client
311
+
312
+ if _client is None:
313
+ raise RuntimeError("OpenGradient client not initialized. Call og.init(...) first.")
314
+
315
+ return _client.alpha.new_workflow(
316
+ model_cid=model_cid,
317
+ input_query=input_query,
318
+ input_tensor_name=input_tensor_name,
319
+ scheduler_params=scheduler_params,
320
+ )
321
+
322
+ def read_workflow_result(self, contract_address: str) -> ModelOutput:
323
+ """
324
+ Reads the latest inference result from a deployed workflow contract.
325
+
326
+ This function retrieves the most recent output from a deployed model executor contract.
327
+ It includes built-in retry logic to handle blockchain state delays.
328
+
329
+ Args:
330
+ contract_address (str): Address of the deployed workflow contract
331
+
332
+ Returns:
333
+ ModelOutput: The inference result from the contract
334
+
335
+ Raises:
336
+ RuntimeError: If OpenGradient client is not initialized
337
+ """
338
+ from . import _client
339
+
340
+ if _client is None:
341
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
342
+ return _client.alpha.read_workflow_result(contract_address)
343
+
344
+ def run_workflow(self, contract_address: str) -> ModelOutput:
345
+ """
346
+ Executes the workflow by calling run() on the contract to pull latest data and perform inference.
347
+
348
+ Args:
349
+ contract_address (str): Address of the deployed workflow contract
350
+
351
+ Returns:
352
+ ModelOutput: The inference result from the contract
353
+ """
354
+ from . import _client
355
+
356
+ if _client is None:
357
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
358
+ return _client.alpha.run_workflow(contract_address)
359
+
360
+ def read_workflow_history(self, contract_address: str, num_results: int) -> List[ModelOutput]:
361
+ """
362
+ Gets historical inference results from a workflow contract.
363
+
364
+ Args:
365
+ contract_address (str): Address of the deployed workflow contract
366
+ num_results (int): Number of historical results to retrieve
367
+
368
+ Returns:
369
+ List[ModelOutput]: List of historical inference results
370
+ """
371
+ from . import _client
372
+
373
+ if _client is None:
374
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
375
+ return _client.alpha.read_workflow_history(contract_address, num_results)
@@ -64,7 +64,7 @@ def create_read_workflow_tool(
64
64
 
65
65
  # define runnable
66
66
  def read_workflow():
67
- output = og.read_workflow_result(contract_address=workflow_contract_address)
67
+ output = og.alpha.read_workflow_result(contract_address=workflow_contract_address)
68
68
  return output_formatter(output)
69
69
 
70
70
  if tool_type == ToolType.LANGCHAIN:
opengradient/client.py CHANGED
@@ -48,7 +48,7 @@ from .defaults import (
48
48
  DEFAULT_OPENGRADIENT_LLM_STREAMING_SERVER_URL,
49
49
  DEFAULT_NETWORK_FILTER,
50
50
  )
51
- from .utils import convert_array_to_model_output, convert_to_model_input, convert_to_model_output
51
+ from .utils import convert_to_model_input, convert_to_model_output
52
52
 
53
53
  _FIREBASE_CONFIG = {
54
54
  "apiKey": "AIzaSyDUVckVtfl-hiteBzPopy1pDD8Uvfncs7w",
@@ -153,6 +153,25 @@ class Client:
153
153
  if google_api_key or os.getenv("GOOGLE_API_KEY"):
154
154
  self._external_api_keys["google"] = google_api_key or os.getenv("GOOGLE_API_KEY")
155
155
 
156
+ self._alpha = None # Lazy initialization for alpha namespace
157
+
158
+ @property
159
+ def alpha(self):
160
+ """
161
+ Access Alpha Testnet features.
162
+
163
+ Returns:
164
+ Alpha: Alpha namespace with workflow and ML model execution methods.
165
+
166
+ Example:
167
+ client = og.new_client(...)
168
+ result = client.alpha.new_workflow(model_cid, input_query, input_tensor_name)
169
+ """
170
+ if self._alpha is None:
171
+ from .alpha import Alpha
172
+ self._alpha = Alpha(self)
173
+ return self._alpha
174
+
156
175
  def set_api_key(self, provider: str, api_key: str):
157
176
  """
158
177
  Set or update API key for an external provider.
@@ -1133,7 +1152,7 @@ class Client:
1133
1152
  limits=LIMITS,
1134
1153
  http2=False,
1135
1154
  follow_redirects=False,
1136
- auth=X402Auth(account=self._wallet_account), # type: ignore
1155
+ auth=X402Auth(account=self._wallet_account, network_filter=DEFAULT_NETWORK_FILTER), # type: ignore
1137
1156
  ) as client:
1138
1157
  headers = {
1139
1158
  "Content-Type": "application/json",
@@ -1425,216 +1444,6 @@ class Client:
1425
1444
 
1426
1445
  return tx_hash, tx_receipt
1427
1446
 
1428
- def new_workflow(
1429
- self,
1430
- model_cid: str,
1431
- input_query: HistoricalInputQuery,
1432
- input_tensor_name: str,
1433
- scheduler_params: Optional[SchedulerParams] = None,
1434
- ) -> str:
1435
- """
1436
- Deploy a new workflow contract with the specified parameters.
1437
-
1438
- This function deploys a new workflow contract on OpenGradient that connects
1439
- an AI model with its required input data. When executed, the workflow will fetch
1440
- the specified model, evaluate the input query to get data, and perform inference.
1441
-
1442
- The workflow can be set to execute manually or automatically via a scheduler.
1443
-
1444
- Args:
1445
- model_cid (str): CID of the model to be executed from the Model Hub
1446
- input_query (HistoricalInputQuery): Input definition for the model inference,
1447
- will be evaluated at runtime for each inference
1448
- input_tensor_name (str): Name of the input tensor expected by the model
1449
- scheduler_params (Optional[SchedulerParams]): Scheduler configuration for automated execution:
1450
- - frequency: Execution frequency in seconds
1451
- - duration_hours: How long the schedule should live for
1452
-
1453
- Returns:
1454
- str: Deployed contract address. If scheduler_params was provided, the workflow
1455
- will be automatically executed according to the specified schedule.
1456
-
1457
- Raises:
1458
- Exception: If transaction fails or gas estimation fails
1459
- """
1460
- # Get contract ABI and bytecode
1461
- abi = self._get_abi("PriceHistoryInference.abi")
1462
- bytecode = self._get_bin("PriceHistoryInference.bin")
1463
-
1464
- def deploy_transaction():
1465
- contract = self._blockchain.eth.contract(abi=abi, bytecode=bytecode)
1466
- query_tuple = input_query.to_abi_format()
1467
- constructor_args = [model_cid, input_tensor_name, query_tuple]
1468
-
1469
- try:
1470
- # Estimate gas needed
1471
- estimated_gas = contract.constructor(*constructor_args).estimate_gas({"from": self._wallet_account.address})
1472
- gas_limit = int(estimated_gas * 1.2)
1473
- except Exception as e:
1474
- print(f"⚠️ Gas estimation failed: {str(e)}")
1475
- gas_limit = 5000000 # Conservative fallback
1476
- print(f"📊 Using fallback gas limit: {gas_limit}")
1477
-
1478
- transaction = contract.constructor(*constructor_args).build_transaction(
1479
- {
1480
- "from": self._wallet_account.address,
1481
- "nonce": self._blockchain.eth.get_transaction_count(self._wallet_account.address, "pending"),
1482
- "gas": gas_limit,
1483
- "gasPrice": self._blockchain.eth.gas_price,
1484
- "chainId": self._blockchain.eth.chain_id,
1485
- }
1486
- )
1487
-
1488
- signed_txn = self._wallet_account.sign_transaction(transaction)
1489
- tx_hash = self._blockchain.eth.send_raw_transaction(signed_txn.raw_transaction)
1490
-
1491
- tx_receipt = self._blockchain.eth.wait_for_transaction_receipt(tx_hash, timeout=60)
1492
-
1493
- if tx_receipt["status"] == 0:
1494
- raise Exception(f"❌ Contract deployment failed, transaction hash: {tx_hash.hex()}")
1495
-
1496
- return tx_receipt.contractAddress
1497
-
1498
- contract_address = run_with_retry(deploy_transaction)
1499
-
1500
- if scheduler_params:
1501
- self._register_with_scheduler(contract_address, scheduler_params)
1502
-
1503
- return contract_address
1504
-
1505
- def _register_with_scheduler(self, contract_address: str, scheduler_params: SchedulerParams) -> None:
1506
- """
1507
- Register the deployed workflow contract with the scheduler for automated execution.
1508
-
1509
- Args:
1510
- contract_address (str): Address of the deployed workflow contract
1511
- scheduler_params (SchedulerParams): Scheduler configuration containing:
1512
- - frequency: Execution frequency in seconds
1513
- - duration_hours: How long to run in hours
1514
- - end_time: Unix timestamp when scheduling should end
1515
-
1516
- Raises:
1517
- Exception: If registration with scheduler fails. The workflow contract will
1518
- still be deployed and can be executed manually.
1519
- """
1520
-
1521
- scheduler_abi = self._get_abi("WorkflowScheduler.abi")
1522
-
1523
- # Scheduler contract address
1524
- scheduler_address = DEFAULT_SCHEDULER_ADDRESS
1525
- scheduler_contract = self._blockchain.eth.contract(address=scheduler_address, abi=scheduler_abi)
1526
-
1527
- try:
1528
- # Register the workflow with the scheduler
1529
- scheduler_tx = scheduler_contract.functions.registerTask(
1530
- contract_address, scheduler_params.end_time, scheduler_params.frequency
1531
- ).build_transaction(
1532
- {
1533
- "from": self._wallet_account.address,
1534
- "gas": 300000,
1535
- "gasPrice": self._blockchain.eth.gas_price,
1536
- "nonce": self._blockchain.eth.get_transaction_count(self._wallet_account.address, "pending"),
1537
- "chainId": self._blockchain.eth.chain_id,
1538
- }
1539
- )
1540
-
1541
- signed_scheduler_tx = self._wallet_account.sign_transaction(scheduler_tx)
1542
- scheduler_tx_hash = self._blockchain.eth.send_raw_transaction(signed_scheduler_tx.raw_transaction)
1543
- self._blockchain.eth.wait_for_transaction_receipt(scheduler_tx_hash, timeout=REGULAR_TX_TIMEOUT)
1544
- except Exception as e:
1545
- print(f"❌ Error registering contract with scheduler: {str(e)}")
1546
- print(" The workflow contract is still deployed and can be executed manually.")
1547
-
1548
- def read_workflow_result(self, contract_address: str) -> ModelOutput:
1549
- """
1550
- Reads the latest inference result from a deployed workflow contract.
1551
-
1552
- Args:
1553
- contract_address (str): Address of the deployed workflow contract
1554
-
1555
- Returns:
1556
- ModelOutput: The inference result from the contract
1557
-
1558
- Raises:
1559
- ContractLogicError: If the transaction fails
1560
- Web3Error: If there are issues with the web3 connection or contract interaction
1561
- """
1562
- # Get the contract interface
1563
- contract = self._blockchain.eth.contract(
1564
- address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
1565
- )
1566
-
1567
- # Get the result
1568
- result = contract.functions.getInferenceResult().call()
1569
-
1570
- return convert_array_to_model_output(result)
1571
-
1572
- def run_workflow(self, contract_address: str) -> ModelOutput:
1573
- """
1574
- Triggers the run() function on a deployed workflow contract and returns the result.
1575
-
1576
- Args:
1577
- contract_address (str): Address of the deployed workflow contract
1578
-
1579
- Returns:
1580
- ModelOutput: The inference result from the contract
1581
-
1582
- Raises:
1583
- ContractLogicError: If the transaction fails
1584
- Web3Error: If there are issues with the web3 connection or contract interaction
1585
- """
1586
- # Get the contract interface
1587
- contract = self._blockchain.eth.contract(
1588
- address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
1589
- )
1590
-
1591
- # Call run() function
1592
- nonce = self._blockchain.eth.get_transaction_count(self._wallet_account.address, "pending")
1593
-
1594
- run_function = contract.functions.run()
1595
- transaction = run_function.build_transaction(
1596
- {
1597
- "from": self._wallet_account.address,
1598
- "nonce": nonce,
1599
- "gas": 30000000,
1600
- "gasPrice": self._blockchain.eth.gas_price,
1601
- "chainId": self._blockchain.eth.chain_id,
1602
- }
1603
- )
1604
-
1605
- signed_txn = self._wallet_account.sign_transaction(transaction)
1606
- tx_hash = self._blockchain.eth.send_raw_transaction(signed_txn.raw_transaction)
1607
- tx_receipt = self._blockchain.eth.wait_for_transaction_receipt(tx_hash, timeout=INFERENCE_TX_TIMEOUT)
1608
-
1609
- if tx_receipt.status == 0:
1610
- raise ContractLogicError(f"Run transaction failed. Receipt: {tx_receipt}")
1611
-
1612
- # Get the inference result from the contract
1613
- result = contract.functions.getInferenceResult().call()
1614
-
1615
- return convert_array_to_model_output(result)
1616
-
1617
- def read_workflow_history(self, contract_address: str, num_results: int) -> List[ModelOutput]:
1618
- """
1619
- Gets historical inference results from a workflow contract.
1620
-
1621
- Retrieves the specified number of most recent inference results from the contract's
1622
- storage, with the most recent result first.
1623
-
1624
- Args:
1625
- contract_address (str): Address of the deployed workflow contract
1626
- num_results (int): Number of historical results to retrieve
1627
-
1628
- Returns:
1629
- List[ModelOutput]: List of historical inference results
1630
- """
1631
- contract = self._blockchain.eth.contract(
1632
- address=Web3.to_checksum_address(contract_address), abi=self._get_abi("PriceHistoryInference.abi")
1633
- )
1634
-
1635
- results = contract.functions.getLastInferenceResults(num_results).call()
1636
- return [convert_array_to_model_output(result) for result in results]
1637
-
1638
1447
  def _get_inference_result_from_node(self, inference_id: str, inference_mode: InferenceMode) -> Dict:
1639
1448
  """
1640
1449
  Get the inference result from node.
opengradient/defaults.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Default variables
2
- DEFAULT_RPC_URL = "https://eth-devnet.opengradient.ai"
2
+ DEFAULT_RPC_URL = "https://ogevmdevnet.opengradient.ai"
3
3
  DEFAULT_API_URL = "https://sdk-devnet.opengradient.ai"
4
4
  DEFAULT_OG_FAUCET_URL = "https://faucet.opengradient.ai/?address="
5
5
  DEFAULT_HUB_SIGNUP_URL = "https://hub.opengradient.ai/signup"
@@ -26,7 +26,7 @@ def read_workflow_wrapper(contract_address: str, format_function: Callable[...,
26
26
  format_function (Callable): Function for formatting the result returned by read_workflow
27
27
  """
28
28
  try:
29
- result = og.read_workflow_result(contract_address)
29
+ result = og.alpha.read_workflow_result(contract_address)
30
30
 
31
31
  formatted_result = format_function(result)
32
32
  block_explorer_link = create_block_explorer_link_smart_contract(contract_address)
opengradient/x402_auth.py CHANGED
@@ -1,3 +1,10 @@
1
+ """
2
+ X402 Authentication handler for httpx streaming requests.
3
+
4
+ This module provides an httpx Auth class that handles x402 payment protocol
5
+ authentication for streaming responses.
6
+ """
7
+
1
8
  import httpx
2
9
  import typing
3
10
  import logging
@@ -7,7 +14,18 @@ from x402.types import x402PaymentRequiredResponse, PaymentRequirements
7
14
 
8
15
 
9
16
  class X402Auth(httpx.Auth):
10
- """Auth class for handling x402 payment requirements."""
17
+ """
18
+ httpx Auth handler for x402 payment protocol.
19
+
20
+ This class implements the httpx Auth interface to handle 402 Payment Required
21
+ responses by automatically creating and attaching payment headers.
22
+
23
+ Example:
24
+ async with httpx.AsyncClient(auth=X402Auth(account=wallet_account)) as client:
25
+ response = await client.get("https://api.example.com/paid-resource")
26
+ """
27
+
28
+ requires_response_body = True
11
29
 
12
30
  def __init__(
13
31
  self,
@@ -24,16 +42,36 @@ class X402Auth(httpx.Auth):
24
42
  PaymentRequirements,
25
43
  ]
26
44
  ] = None,
45
+ network_filter: typing.Optional[str] = None,
27
46
  ):
47
+ """
48
+ Initialize X402Auth with an Ethereum account for signing payments.
49
+
50
+ Args:
51
+ account: eth_account LocalAccount instance for signing payments
52
+ max_value: Optional maximum allowed payment amount in base units
53
+ network_filter: Optional network filter for selecting payment requirements
54
+ scheme_filter: Optional scheme filter for selecting payment requirements
55
+ """
28
56
  self.x402_client = x402Client(
29
57
  account,
30
58
  max_value=max_value,
31
59
  payment_requirements_selector=payment_requirements_selector, # type: ignore
32
60
  )
61
+ self.network_filter = network_filter
33
62
 
34
63
  async def async_auth_flow(
35
64
  self, request: httpx.Request
36
65
  ) -> typing.AsyncGenerator[httpx.Request, httpx.Response]:
66
+ """
67
+ Handle authentication flow for x402 payment protocol.
68
+
69
+ Args:
70
+ request: httpx Request object to be authenticated
71
+
72
+ Yields:
73
+ httpx Request object with authentication headers attached
74
+ """
37
75
  response = yield request
38
76
 
39
77
  if response.status_code == 402:
@@ -44,7 +82,8 @@ class X402Auth(httpx.Auth):
44
82
  payment_response = x402PaymentRequiredResponse(**data)
45
83
 
46
84
  selected_requirements = self.x402_client.select_payment_requirements(
47
- payment_response.accepts
85
+ payment_response.accepts,
86
+ self.network_filter,
48
87
  )
49
88
 
50
89
  payment_header = self.x402_client.create_payment_header(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: opengradient
3
- Version: 0.5.9
3
+ Version: 0.5.11
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <kyle@vannalabs.ai>
6
6
  License-Expression: MIT
@@ -35,6 +35,7 @@ A Python SDK for decentralized model management and inference services on the Op
35
35
  - Model management and versioning
36
36
  - Decentralized model inference
37
37
  - Support for LLM inference with various models
38
+ - **Trusted Execution Environment (TEE) inference** with cryptographic attestation
38
39
  - End-to-end verified AI execution
39
40
  - Command-line interface (CLI) for direct access
40
41
 
@@ -46,7 +47,6 @@ Browse and discover AI models on our [Model Hub](https://hub.opengradient.ai/).
46
47
  - Direct integration with the SDK
47
48
 
48
49
  ## Installation
49
-
50
50
  ```bash
51
51
  pip install opengradient
52
52
  ```
@@ -62,7 +62,6 @@ You'll need two accounts to use the SDK:
62
62
  - **OpenGradient account**: Use an existing Ethereum-compatible wallet or create a new one via SDK
63
63
 
64
64
  The easiest way to set up your accounts is through our configuration wizard:
65
-
66
65
  ```bash
67
66
  opengradient config init
68
67
  ```
@@ -73,7 +72,6 @@ This wizard will:
73
72
  - Direct you to our Test Faucet for devnet tokens
74
73
 
75
74
  ### 2. Initialize the SDK
76
-
77
75
  ```python
78
76
  import opengradient as og
79
77
  og.init(private_key="<private_key>", email="<email>", password="<password>")
@@ -82,8 +80,6 @@ og.init(private_key="<private_key>", email="<email>", password="<password>")
82
80
  ### 3. Basic Usage
83
81
 
84
82
  Browse available models on our [Model Hub](https://hub.opengradient.ai/) or create and upload your own:
85
-
86
-
87
83
  ```python
88
84
  # Create and upload a model
89
85
  og.create_model(
@@ -101,20 +97,41 @@ result = og.infer(
101
97
  )
102
98
  ```
103
99
 
104
- ### 4. Examples
100
+ ### 4. TEE (Trusted Execution Environment) Inference
101
+
102
+ OpenGradient supports secure, verifiable inference through TEE for leading LLM providers. Access models from OpenAI, Anthropic, Google, and xAI with cryptographic attestation:
103
+ ```python
104
+ from opengradient import TEE_LLM
105
+
106
+ # Use TEE-enabled models for verifiable AI execution
107
+ result = og.infer(
108
+ model_cid=TEE_LLM.CLAUDE_3_7_SONNET, # or any other TEE_LLM model
109
+ model_inputs={"prompt": "Your prompt here"},
110
+ inference_mode=og.InferenceMode.TEE
111
+ )
112
+ ```
113
+
114
+ **Available TEE Models:**
115
+ The SDK includes models from multiple providers accessible via the `TEE_LLM` enum:
116
+ - **OpenAI**: GPT-4.1, GPT-4o, o4-mini
117
+ - **Anthropic**: Claude 3.7 Sonnet, Claude 3.5 Haiku, Claude 4.0 Sonnet
118
+ - **Google**: Gemini 2.5 Flash, Gemini 2.5 Pro, and more
119
+ - **xAI**: Grok 3 Beta, Grok 4.1 Fast, and other Grok variants
120
+
121
+ For the complete list of available models, check the `TEE_LLM` enum in your IDE autocomplete or see the [API documentation](https://docs.opengradient.ai/).
122
+
123
+ ### 5. Examples
105
124
 
106
125
  See code examples under [examples](./examples).
107
126
 
108
127
  ## CLI Usage
109
128
 
110
129
  The SDK includes a command-line interface for quick operations. First, verify your configuration:
111
-
112
130
  ```bash
113
131
  opengradient config show
114
132
  ```
115
133
 
116
134
  Run a test inference:
117
-
118
135
  ```bash
119
136
  opengradient infer -m QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ \
120
137
  --input '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10}'
@@ -124,7 +141,9 @@ opengradient infer -m QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ \
124
141
 
125
142
  1. **Off-chain Applications**: Use OpenGradient as a decentralized alternative to centralized AI providers like HuggingFace and OpenAI.
126
143
 
127
- 2. **Model Development**: Manage models on the Model Hub and integrate directly into your development workflow.
144
+ 2. **Verifiable AI Execution**: Leverage TEE inference for cryptographically attested AI outputs, enabling trustless AI applications.
145
+
146
+ 3. **Model Development**: Manage models on the Model Hub and integrate directly into your development workflow.
128
147
 
129
148
  ## Documentation
130
149
 
@@ -140,4 +159,4 @@ If you use [Claude Code](https://claude.ai/code), copy [docs/CLAUDE_SDK_USERS.md
140
159
 
141
160
  - Run `opengradient --help` for CLI command reference
142
161
  - Visit our [documentation](https://docs.opengradient.ai/) for detailed guides
143
- - Join our [community](https://.opengradient.ai/) for support
162
+ - Join our [community](https://opengradient.ai/) for support
@@ -1,18 +1,19 @@
1
- opengradient/__init__.py,sha256=1PSbDRGe4ft_0FYoPS3XpeajnRPOTkmx8aZZxcOeztQ,13455
1
+ opengradient/__init__.py,sha256=hRGex2VGwAQ-lqBNphW93D4R3wPHQpQrTkwgGLe2MoA,10255
2
2
  opengradient/account.py,sha256=5wrYpws_1lozjOFjLCTHtxgoxK-LmObDAaVy9eDcJY4,1145
3
+ opengradient/alpha.py,sha256=WAtL1GGbEpoeLO89rOMd8-YAgAFJYM1UJlICm6YGsPs,15195
3
4
  opengradient/cli.py,sha256=pfgyLfD1MIDifKmGLFsJqBlgvqIcnsIh3zzg7PaIeH4,33670
4
- opengradient/client.py,sha256=KDkFxcZ-vGyriFW-ydWTnitgV6rYfxtnNzchWca-8u8,74009
5
- opengradient/defaults.py,sha256=YOtFDq8HiwEkgMXlV4Zf3YgkopfKUkkx0CpgNuY_Mxk,796
5
+ opengradient/client.py,sha256=KQZI1WZaMqyIHRWFH4HA1fFd1JtSdkrUYa7RX_bIwJA,65289
6
+ opengradient/defaults.py,sha256=8faLPwvp_BQdErY_SEjBzvmGVuOBdZ2zKcoryD8SCXk,797
6
7
  opengradient/exceptions.py,sha256=88tfegboGtlehQcwhxsl6ZzhLJWZWlkf_bkHTiCtXpo,3391
7
8
  opengradient/types.py,sha256=bADakUM6WwdMORGC5HvQvWCezNwIlVc7l0zodPapbhQ,14622
8
9
  opengradient/utils.py,sha256=ZUq4OBIml2vsC0tRqus4Zwb_e3g4woo00apByrafuVw,8058
9
- opengradient/x402_auth.py,sha256=Jmj-40OybugOXIt_qHzN1qy4x7U3QuM1MKNmPzoEKwc,1920
10
+ opengradient/x402_auth.py,sha256=fuSsgFmvrlZ3X6uAlUnqtkQ485NECxHfBr3IFADpKAg,3309
10
11
  opengradient/abi/InferencePrecompile.abi,sha256=reepTHg6Q01UrFP0Gexc-JayplsvOLPfG7jrEZ-cV28,10197
11
12
  opengradient/abi/PriceHistoryInference.abi,sha256=ZB3fZdx1kaFlp2wt1vTbTZZG1k8HPvmNtkG5Q8Bnajw,5098
12
13
  opengradient/abi/WorkflowScheduler.abi,sha256=yEGs76qO4S1z980KL5hBdfyXiJ6k-kERcB1O_o73AEU,416
13
14
  opengradient/abi/inference.abi,sha256=MR5u9npZ-Yx2EqRW17_M-UnGgFF3mMEMepOwaZ-Bkgc,7040
14
15
  opengradient/alphasense/__init__.py,sha256=Ah6IpoPTb6UkY7ImOWLJs3tjlxDJx6vZVR7p5IwP_Ks,292
15
- opengradient/alphasense/read_workflow_tool.py,sha256=ojCf-eMO6e0ib77nqjgEJtXxTxdLZmc_-MvyRemYFY0,3216
16
+ opengradient/alphasense/read_workflow_tool.py,sha256=Y_MKRpBR1dNvCu9gNxcSnT3E_IqxZUAkE-_XziJ0BVY,3222
16
17
  opengradient/alphasense/run_model_tool.py,sha256=wlDqXVHa1xpqQy_hmht_wWegxtqdYgYBXNbRP3qbfwM,6945
17
18
  opengradient/alphasense/types.py,sha256=uxk4JQKbaS2cM3ZiKpdHQb234OJ5ylprNR5vi01QFzA,220
18
19
  opengradient/bin/PriceHistoryInference.bin,sha256=nU2FZpGHIKBZ7NSK9Sr-p9lr-nXja_40ISPN9yckDq8,41276
@@ -26,11 +27,11 @@ opengradient/proto/infer_pb2_grpc.py,sha256=q42_eZ7OZCMTXdWocYA4Ka3B0c3B74dOhfqd
26
27
  opengradient/workflow_models/__init__.py,sha256=pAGRaMZOXmuPnqG5gAQB1FeFLYxQ4F4bmYdrqXIez7c,826
27
28
  opengradient/workflow_models/constants.py,sha256=viIkb_LGcfVprqQNaA80gBTj6cfYam0r6b6MHW9XGFA,740
28
29
  opengradient/workflow_models/types.py,sha256=Z22hF6c8Y4D2GlzVEIBODGwsqSjSrQvUcpZ7R-mIJdI,409
29
- opengradient/workflow_models/utils.py,sha256=ySfpuiOBqLTlfto6ZxZf2vc7K6RGIja0l4eaVm5AOzY,1503
30
+ opengradient/workflow_models/utils.py,sha256=aL2-Hp5J5qlJft6-wx4GnZNOXZ1vjYaTF1uEgYavWdI,1509
30
31
  opengradient/workflow_models/workflow_models.py,sha256=d4C_gs39DAfy4cdY9Ee6GMXpPfzwvKFpmxzK1A7LNgU,3900
31
- opengradient-0.5.9.dist-info/licenses/LICENSE,sha256=xEcvQ3AxZOtDkrqkys2Mm6Y9diEnaSeQRKvxi-JGnNA,1069
32
- opengradient-0.5.9.dist-info/METADATA,sha256=kSTyBctZ-r4h3ilq7DRgxvxQhYO4ejUl3KbwcDX1Ygs,4215
33
- opengradient-0.5.9.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
34
- opengradient-0.5.9.dist-info/entry_points.txt,sha256=yUKTaJx8RXnybkob0J62wVBiCp_1agVbgw9uzsmaeJc,54
35
- opengradient-0.5.9.dist-info/top_level.txt,sha256=oC1zimVLa2Yi1LQz8c7x-0IQm92milb5ax8gHBHwDqU,13
36
- opengradient-0.5.9.dist-info/RECORD,,
32
+ opengradient-0.5.11.dist-info/licenses/LICENSE,sha256=xEcvQ3AxZOtDkrqkys2Mm6Y9diEnaSeQRKvxi-JGnNA,1069
33
+ opengradient-0.5.11.dist-info/METADATA,sha256=qJZT1jnWcmpeBQsLN1YrmPRCfMzD4nk7saMguFYNmyc,5437
34
+ opengradient-0.5.11.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
35
+ opengradient-0.5.11.dist-info/entry_points.txt,sha256=yUKTaJx8RXnybkob0J62wVBiCp_1agVbgw9uzsmaeJc,54
36
+ opengradient-0.5.11.dist-info/top_level.txt,sha256=oC1zimVLa2Yi1LQz8c7x-0IQm92milb5ax8gHBHwDqU,13
37
+ opengradient-0.5.11.dist-info/RECORD,,