opengradient 0.5.8__tar.gz → 0.5.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. {opengradient-0.5.8/src/opengradient.egg-info → opengradient-0.5.10}/PKG-INFO +31 -12
  2. {opengradient-0.5.8 → opengradient-0.5.10}/README.md +29 -10
  3. {opengradient-0.5.8 → opengradient-0.5.10}/pyproject.toml +2 -2
  4. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/__init__.py +12 -94
  5. opengradient-0.5.10/src/opengradient/alpha.py +375 -0
  6. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/alphasense/read_workflow_tool.py +1 -1
  7. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/cli.py +89 -4
  8. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/client.py +337 -244
  9. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/defaults.py +4 -2
  10. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/types.py +191 -1
  11. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/workflow_models/utils.py +1 -1
  12. opengradient-0.5.10/src/opengradient/x402_auth.py +102 -0
  13. {opengradient-0.5.8 → opengradient-0.5.10/src/opengradient.egg-info}/PKG-INFO +31 -12
  14. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient.egg-info/SOURCES.txt +2 -0
  15. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient.egg-info/requires.txt +1 -1
  16. {opengradient-0.5.8 → opengradient-0.5.10}/LICENSE +0 -0
  17. {opengradient-0.5.8 → opengradient-0.5.10}/setup.cfg +0 -0
  18. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/abi/InferencePrecompile.abi +0 -0
  19. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/abi/PriceHistoryInference.abi +0 -0
  20. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/abi/WorkflowScheduler.abi +0 -0
  21. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/abi/inference.abi +0 -0
  22. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/account.py +0 -0
  23. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/alphasense/__init__.py +0 -0
  24. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/alphasense/run_model_tool.py +0 -0
  25. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/alphasense/types.py +0 -0
  26. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/bin/PriceHistoryInference.bin +0 -0
  27. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/exceptions.py +0 -0
  28. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/llm/__init__.py +0 -0
  29. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/llm/og_langchain.py +0 -0
  30. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/llm/og_openai.py +0 -0
  31. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/proto/__init__.py +0 -0
  32. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/proto/infer.proto +0 -0
  33. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/proto/infer_pb2.py +0 -0
  34. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/proto/infer_pb2_grpc.py +0 -0
  35. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/utils.py +0 -0
  36. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/workflow_models/__init__.py +0 -0
  37. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/workflow_models/constants.py +0 -0
  38. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/workflow_models/types.py +0 -0
  39. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient/workflow_models/workflow_models.py +0 -0
  40. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient.egg-info/dependency_links.txt +0 -0
  41. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient.egg-info/entry_points.txt +0 -0
  42. {opengradient-0.5.8 → opengradient-0.5.10}/src/opengradient.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: opengradient
3
- Version: 0.5.8
3
+ Version: 0.5.10
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <kyle@vannalabs.ai>
6
6
  License-Expression: MIT
@@ -23,7 +23,7 @@ Requires-Dist: requests>=2.32.3
23
23
  Requires-Dist: langchain>=0.3.7
24
24
  Requires-Dist: openai>=1.58.1
25
25
  Requires-Dist: pydantic>=2.9.2
26
- Requires-Dist: og-test-x402==0.0.1
26
+ Requires-Dist: og-test-x402==0.0.9
27
27
  Dynamic: license-file
28
28
 
29
29
  # OpenGradient Python SDK
@@ -35,6 +35,7 @@ A Python SDK for decentralized model management and inference services on the Op
35
35
  - Model management and versioning
36
36
  - Decentralized model inference
37
37
  - Support for LLM inference with various models
38
+ - **Trusted Execution Environment (TEE) inference** with cryptographic attestation
38
39
  - End-to-end verified AI execution
39
40
  - Command-line interface (CLI) for direct access
40
41
 
@@ -46,7 +47,6 @@ Browse and discover AI models on our [Model Hub](https://hub.opengradient.ai/).
46
47
  - Direct integration with the SDK
47
48
 
48
49
  ## Installation
49
-
50
50
  ```bash
51
51
  pip install opengradient
52
52
  ```
@@ -62,7 +62,6 @@ You'll need two accounts to use the SDK:
62
62
  - **OpenGradient account**: Use an existing Ethereum-compatible wallet or create a new one via SDK
63
63
 
64
64
  The easiest way to set up your accounts is through our configuration wizard:
65
-
66
65
  ```bash
67
66
  opengradient config init
68
67
  ```
@@ -73,7 +72,6 @@ This wizard will:
73
72
  - Direct you to our Test Faucet for devnet tokens
74
73
 
75
74
  ### 2. Initialize the SDK
76
-
77
75
  ```python
78
76
  import opengradient as og
79
77
  og.init(private_key="<private_key>", email="<email>", password="<password>")
@@ -82,8 +80,6 @@ og.init(private_key="<private_key>", email="<email>", password="<password>")
82
80
  ### 3. Basic Usage
83
81
 
84
82
  Browse available models on our [Model Hub](https://hub.opengradient.ai/) or create and upload your own:
85
-
86
-
87
83
  ```python
88
84
  # Create and upload a model
89
85
  og.create_model(
@@ -101,20 +97,41 @@ result = og.infer(
101
97
  )
102
98
  ```
103
99
 
104
- ### 4. Examples
100
+ ### 4. TEE (Trusted Execution Environment) Inference
101
+
102
+ OpenGradient supports secure, verifiable inference through TEE for leading LLM providers. Access models from OpenAI, Anthropic, Google, and xAI with cryptographic attestation:
103
+ ```python
104
+ from opengradient import TEE_LLM
105
+
106
+ # Use TEE-enabled models for verifiable AI execution
107
+ result = og.infer(
108
+ model_cid=TEE_LLM.CLAUDE_3_7_SONNET, # or any other TEE_LLM model
109
+ model_inputs={"prompt": "Your prompt here"},
110
+ inference_mode=og.InferenceMode.TEE
111
+ )
112
+ ```
113
+
114
+ **Available TEE Models:**
115
+ The SDK includes models from multiple providers accessible via the `TEE_LLM` enum:
116
+ - **OpenAI**: GPT-4.1, GPT-4o, o4-mini
117
+ - **Anthropic**: Claude 3.7 Sonnet, Claude 3.5 Haiku, Claude 4.0 Sonnet
118
+ - **Google**: Gemini 2.5 Flash, Gemini 2.5 Pro, and more
119
+ - **xAI**: Grok 3 Beta, Grok 4.1 Fast, and other Grok variants
120
+
121
+ For the complete list of available models, check the `TEE_LLM` enum in your IDE autocomplete or see the [API documentation](https://docs.opengradient.ai/).
122
+
123
+ ### 5. Examples
105
124
 
106
125
  See code examples under [examples](./examples).
107
126
 
108
127
  ## CLI Usage
109
128
 
110
129
  The SDK includes a command-line interface for quick operations. First, verify your configuration:
111
-
112
130
  ```bash
113
131
  opengradient config show
114
132
  ```
115
133
 
116
134
  Run a test inference:
117
-
118
135
  ```bash
119
136
  opengradient infer -m QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ \
120
137
  --input '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10}'
@@ -124,7 +141,9 @@ opengradient infer -m QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ \
124
141
 
125
142
  1. **Off-chain Applications**: Use OpenGradient as a decentralized alternative to centralized AI providers like HuggingFace and OpenAI.
126
143
 
127
- 2. **Model Development**: Manage models on the Model Hub and integrate directly into your development workflow.
144
+ 2. **Verifiable AI Execution**: Leverage TEE inference for cryptographically attested AI outputs, enabling trustless AI applications.
145
+
146
+ 3. **Model Development**: Manage models on the Model Hub and integrate directly into your development workflow.
128
147
 
129
148
  ## Documentation
130
149
 
@@ -140,4 +159,4 @@ If you use [Claude Code](https://claude.ai/code), copy [docs/CLAUDE_SDK_USERS.md
140
159
 
141
160
  - Run `opengradient --help` for CLI command reference
142
161
  - Visit our [documentation](https://docs.opengradient.ai/) for detailed guides
143
- - Join our [community](https://.opengradient.ai/) for support
162
+ - Join our [community](https://opengradient.ai/) for support
@@ -7,6 +7,7 @@ A Python SDK for decentralized model management and inference services on the Op
7
7
  - Model management and versioning
8
8
  - Decentralized model inference
9
9
  - Support for LLM inference with various models
10
+ - **Trusted Execution Environment (TEE) inference** with cryptographic attestation
10
11
  - End-to-end verified AI execution
11
12
  - Command-line interface (CLI) for direct access
12
13
 
@@ -18,7 +19,6 @@ Browse and discover AI models on our [Model Hub](https://hub.opengradient.ai/).
18
19
  - Direct integration with the SDK
19
20
 
20
21
  ## Installation
21
-
22
22
  ```bash
23
23
  pip install opengradient
24
24
  ```
@@ -34,7 +34,6 @@ You'll need two accounts to use the SDK:
34
34
  - **OpenGradient account**: Use an existing Ethereum-compatible wallet or create a new one via SDK
35
35
 
36
36
  The easiest way to set up your accounts is through our configuration wizard:
37
-
38
37
  ```bash
39
38
  opengradient config init
40
39
  ```
@@ -45,7 +44,6 @@ This wizard will:
45
44
  - Direct you to our Test Faucet for devnet tokens
46
45
 
47
46
  ### 2. Initialize the SDK
48
-
49
47
  ```python
50
48
  import opengradient as og
51
49
  og.init(private_key="<private_key>", email="<email>", password="<password>")
@@ -54,8 +52,6 @@ og.init(private_key="<private_key>", email="<email>", password="<password>")
54
52
  ### 3. Basic Usage
55
53
 
56
54
  Browse available models on our [Model Hub](https://hub.opengradient.ai/) or create and upload your own:
57
-
58
-
59
55
  ```python
60
56
  # Create and upload a model
61
57
  og.create_model(
@@ -73,20 +69,41 @@ result = og.infer(
73
69
  )
74
70
  ```
75
71
 
76
- ### 4. Examples
72
+ ### 4. TEE (Trusted Execution Environment) Inference
73
+
74
+ OpenGradient supports secure, verifiable inference through TEE for leading LLM providers. Access models from OpenAI, Anthropic, Google, and xAI with cryptographic attestation:
75
+ ```python
76
+ from opengradient import TEE_LLM
77
+
78
+ # Use TEE-enabled models for verifiable AI execution
79
+ result = og.infer(
80
+ model_cid=TEE_LLM.CLAUDE_3_7_SONNET, # or any other TEE_LLM model
81
+ model_inputs={"prompt": "Your prompt here"},
82
+ inference_mode=og.InferenceMode.TEE
83
+ )
84
+ ```
85
+
86
+ **Available TEE Models:**
87
+ The SDK includes models from multiple providers accessible via the `TEE_LLM` enum:
88
+ - **OpenAI**: GPT-4.1, GPT-4o, o4-mini
89
+ - **Anthropic**: Claude 3.7 Sonnet, Claude 3.5 Haiku, Claude 4.0 Sonnet
90
+ - **Google**: Gemini 2.5 Flash, Gemini 2.5 Pro, and more
91
+ - **xAI**: Grok 3 Beta, Grok 4.1 Fast, and other Grok variants
92
+
93
+ For the complete list of available models, check the `TEE_LLM` enum in your IDE autocomplete or see the [API documentation](https://docs.opengradient.ai/).
94
+
95
+ ### 5. Examples
77
96
 
78
97
  See code examples under [examples](./examples).
79
98
 
80
99
  ## CLI Usage
81
100
 
82
101
  The SDK includes a command-line interface for quick operations. First, verify your configuration:
83
-
84
102
  ```bash
85
103
  opengradient config show
86
104
  ```
87
105
 
88
106
  Run a test inference:
89
-
90
107
  ```bash
91
108
  opengradient infer -m QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ \
92
109
  --input '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10}'
@@ -96,7 +113,9 @@ opengradient infer -m QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ \
96
113
 
97
114
  1. **Off-chain Applications**: Use OpenGradient as a decentralized alternative to centralized AI providers like HuggingFace and OpenAI.
98
115
 
99
- 2. **Model Development**: Manage models on the Model Hub and integrate directly into your development workflow.
116
+ 2. **Verifiable AI Execution**: Leverage TEE inference for cryptographically attested AI outputs, enabling trustless AI applications.
117
+
118
+ 3. **Model Development**: Manage models on the Model Hub and integrate directly into your development workflow.
100
119
 
101
120
  ## Documentation
102
121
 
@@ -112,4 +131,4 @@ If you use [Claude Code](https://claude.ai/code), copy [docs/CLAUDE_SDK_USERS.md
112
131
 
113
132
  - Run `opengradient --help` for CLI command reference
114
133
  - Visit our [documentation](https://docs.opengradient.ai/) for detailed guides
115
- - Join our [community](https://.opengradient.ai/) for support
134
+ - Join our [community](https://opengradient.ai/) for support
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "opengradient"
7
- version = "0.5.8"
7
+ version = "0.5.10"
8
8
  description = "Python SDK for OpenGradient decentralized model management & inference services"
9
9
  authors = [{name = "OpenGradient", email = "kyle@vannalabs.ai"}]
10
10
  readme = "README.md"
@@ -29,7 +29,7 @@ dependencies = [
29
29
  "langchain>=0.3.7",
30
30
  "openai>=1.58.1",
31
31
  "pydantic>=2.9.2",
32
- "og-test-x402==0.0.1",
32
+ "og-test-x402==0.0.9",
33
33
  ]
34
34
 
35
35
  [project.scripts]
@@ -17,14 +17,19 @@ from .types import (
17
17
  InferenceResult,
18
18
  LlmInferenceMode,
19
19
  TextGenerationOutput,
20
+ TextGenerationStream,
20
21
  ModelOutput,
21
22
  ModelRepository,
22
23
  FileUploadResult,
23
24
  x402SettlementMode,
24
25
  )
26
+ from .alpha import _AlphaNamespace
25
27
 
26
28
  from . import llm, alphasense
27
29
 
30
+ # Module-level alpha namespace for workflow/ML execution features (Alpha Testnet only)
31
+ alpha = _AlphaNamespace()
32
+
28
33
  _client = None
29
34
 
30
35
 
@@ -225,7 +230,8 @@ def llm_chat(
225
230
  tool_choice: Optional[str] = None,
226
231
  max_retries: Optional[int] = None,
227
232
  x402_settlement_mode: Optional[x402SettlementMode] = x402SettlementMode.SETTLE_BATCH,
228
- ) -> TextGenerationOutput:
233
+ stream: Optional[bool] = False,
234
+ ) -> Union[TextGenerationOutput, TextGenerationStream]:
229
235
  """Have a chat conversation with an LLM.
230
236
 
231
237
  Args:
@@ -239,9 +245,10 @@ def llm_chat(
239
245
  tool_choice: Optional specific tool to use
240
246
  max_retries: Maximum number of retries for failed transactions
241
247
  x402_settlement_mode: Settlement modes for x402 payment protocol transactions (enum x402SettlementMode)
248
+ stream: Optional boolean to enable streaming
242
249
 
243
250
  Returns:
244
- TextGenerationOutput
251
+ TextGenerationOutput or TextGenerationStream
245
252
 
246
253
  Raises:
247
254
  RuntimeError: If SDK is not initialized
@@ -258,7 +265,8 @@ def llm_chat(
258
265
  tools=tools,
259
266
  tool_choice=tool_choice,
260
267
  max_retries=max_retries,
261
- x402_settlement_mode=x402_settlement_mode
268
+ x402_settlement_mode=x402_settlement_mode,
269
+ stream=stream,
262
270
  )
263
271
 
264
272
 
@@ -280,93 +288,6 @@ def list_files(model_name: str, version: str) -> List[Dict]:
280
288
  return _client.list_files(model_name, version)
281
289
 
282
290
 
283
- def new_workflow(
284
- model_cid: str,
285
- input_query: HistoricalInputQuery,
286
- input_tensor_name: str,
287
- scheduler_params: Optional[SchedulerParams] = None,
288
- ) -> str:
289
- """
290
- Deploy a new workflow contract with the specified parameters.
291
-
292
- This function deploys a new workflow contract and optionally registers it with
293
- the scheduler for automated execution. If scheduler_params is not provided,
294
- the workflow will be deployed without automated execution scheduling.
295
-
296
- Args:
297
- model_cid: IPFS CID of the model
298
- input_query: HistoricalInputQuery containing query parameters
299
- input_tensor_name: Name of the input tensor
300
- scheduler_params: Optional scheduler configuration as SchedulerParams instance
301
- If not provided, the workflow will be deployed without scheduling.
302
-
303
- Returns:
304
- str: Deployed contract address. If scheduler_params was provided, the workflow
305
- will be automatically executed according to the specified schedule.
306
- """
307
- if _client is None:
308
- raise RuntimeError("OpenGradient client not initialized. Call og.init(...) first.")
309
-
310
- return _client.new_workflow(
311
- model_cid=model_cid, input_query=input_query, input_tensor_name=input_tensor_name, scheduler_params=scheduler_params
312
- )
313
-
314
-
315
- def read_workflow_result(contract_address: str) -> ModelOutput:
316
- """
317
- Reads the latest inference result from a deployed workflow contract.
318
-
319
- This function retrieves the most recent output from a deployed model executor contract.
320
- It includes built-in retry logic to handle blockchain state delays.
321
-
322
- Args:
323
- contract_address (str): Address of the deployed workflow contract
324
-
325
- Returns:
326
- Dict[str, Union[str, Dict]]: A dictionary containing:
327
- - status: "success" or "error"
328
- - result: The model output data if successful
329
- - error: Error message if status is "error"
330
-
331
- Raises:
332
- RuntimeError: If OpenGradient client is not initialized
333
- """
334
- if _client is None:
335
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
336
- return _client.read_workflow_result(contract_address)
337
-
338
-
339
- def run_workflow(contract_address: str) -> ModelOutput:
340
- """
341
- Executes the workflow by calling run() on the contract to pull latest data and perform inference.
342
-
343
- Args:
344
- contract_address (str): Address of the deployed workflow contract
345
-
346
- Returns:
347
- Dict[str, Union[str, Dict]]: Status of the run operation
348
- """
349
- if _client is None:
350
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
351
- return _client.run_workflow(contract_address)
352
-
353
-
354
- def read_workflow_history(contract_address: str, num_results: int) -> List[ModelOutput]:
355
- """
356
- Gets historical inference results from a workflow contract.
357
-
358
- Args:
359
- contract_address (str): Address of the deployed workflow contract
360
- num_results (int): Number of historical results to retrieve
361
-
362
- Returns:
363
- List[Dict]: List of historical inference results
364
- """
365
- if _client is None:
366
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
367
- return _client.read_workflow_history(contract_address, num_results)
368
-
369
-
370
291
  __all__ = [
371
292
  "list_files",
372
293
  "login",
@@ -379,10 +300,7 @@ __all__ = [
379
300
  "init",
380
301
  "LLM",
381
302
  "TEE_LLM",
382
- "new_workflow",
383
- "read_workflow_result",
384
- "run_workflow",
385
- "read_workflow_history",
303
+ "alpha",
386
304
  "InferenceMode",
387
305
  "LlmInferenceMode",
388
306
  "HistoricalInputQuery",