opengradient 0.2.8__tar.gz → 0.3.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {opengradient-0.2.8/src/opengradient.egg-info → opengradient-0.3.1}/PKG-INFO +40 -19
  2. opengradient-0.3.1/README.md +93 -0
  3. {opengradient-0.2.8 → opengradient-0.3.1}/pyproject.toml +2 -2
  4. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient/__init__.py +22 -9
  5. opengradient-0.3.1/src/opengradient/abi/llm.abi +1 -0
  6. opengradient-0.3.1/src/opengradient/account.py +37 -0
  7. opengradient-0.3.1/src/opengradient/cli.py +390 -0
  8. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient/client.py +51 -6
  9. opengradient-0.3.1/src/opengradient/defaults.py +6 -0
  10. {opengradient-0.2.8 → opengradient-0.3.1/src/opengradient.egg-info}/PKG-INFO +40 -19
  11. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient.egg-info/SOURCES.txt +2 -3
  12. opengradient-0.2.8/README.md +0 -72
  13. opengradient-0.2.8/src/opengradient/cli.py +0 -195
  14. opengradient-0.2.8/src/opengradient/defaults.py +0 -7
  15. opengradient-0.2.8/tests/test_api.py +0 -110
  16. opengradient-0.2.8/tests/test_exceptions.py +0 -25
  17. opengradient-0.2.8/tests/test_integration.py +0 -20
  18. {opengradient-0.2.8 → opengradient-0.3.1}/LICENSE +0 -0
  19. {opengradient-0.2.8 → opengradient-0.3.1}/setup.cfg +0 -0
  20. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient/abi/inference.abi +0 -0
  21. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient/exceptions.py +0 -0
  22. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient/types.py +0 -0
  23. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient/utils.py +0 -0
  24. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient.egg-info/dependency_links.txt +0 -0
  25. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient.egg-info/entry_points.txt +0 -0
  26. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient.egg-info/requires.txt +0 -0
  27. {opengradient-0.2.8 → opengradient-0.3.1}/src/opengradient.egg-info/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opengradient
3
- Version: 0.2.8
4
- Summary: A Python SDK for OpenGradient inference services
3
+ Version: 0.3.1
4
+ Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
7
7
 
@@ -136,66 +136,85 @@ Requires-Dist: xattr==1.1.0
136
136
  Requires-Dist: yarl==1.13.1
137
137
 
138
138
  # OpenGradient Python SDK
139
-
140
- Python SDK for OpenGradient inference services.
139
+ Python SDK for the OpenGradient platform provides decentralized model management & inference services. Python SDK allows programmatic access to our model repository and decentralized AI infrastructure.
141
140
 
142
141
  ## Installation
142
+
143
+ To install Python SDK and CLI, run the following command:
143
144
  ```python
144
145
  pip install opengradient
145
146
  ```
146
147
 
147
148
  ## Quick Start
149
+
150
+ To get started, run:
151
+
148
152
  ```python
149
153
  import opengradient as og
150
- og.init(private_key="x", rpc_url="y", contract_address="z")
154
+ og.init(private_key="<private_key>", email="<email>", password="<password>")
151
155
  ```
152
156
 
153
- ### Sign in with Email
157
+ The following commands show how to use Python SDK.
158
+
159
+ ### Create a Model
154
160
  ```python
155
- og.login(email="you@opengradient.ai", password="xyz")
161
+ og.create_model(model_name="<model_name>", model_desc="<model_description>")
156
162
  ```
157
163
 
158
- ### Create a Model
164
+ ### Create a Model (with file upload)
159
165
  ```python
160
- og.create_model(model_name="test-network-model", model_desc="testing upload to sdk")
166
+ og.create_model(model_name="<model_name>", model_desc="<model_description>", model_path="<model_path>")
161
167
  ```
162
168
 
163
169
  ### Create a Version of a Model
164
170
  ```python
165
- og.create_version(model_name="test-network-model", notes="test notes")
171
+ og.create_version(model_name="<model_name>", notes="<model_notes>")
166
172
  ```
167
173
 
168
174
  ### Upload Files to a Model
169
175
  ```python
170
- og.upload(model_path="local_path_to_your_model.onnx", model_name="test-network-model", version="0.01")
176
+ og.upload(model_path="<model_path>", model_name="<model_name>", version="<version>")
171
177
  ```
172
178
 
173
- ### Run Inference
179
+ ### List Files of a Model Version
174
180
  ```python
175
- inference_mode = og.InferenceMode.VANILLA
176
- inference_cid = og.infer(model_cid, model_inputs, inference_mode)
181
+ og.list_files(model_name="<model_name>", version="<version>")
177
182
  ```
178
183
 
184
+ ### Run Inference
179
185
  ```python
180
- og.infer(model_id, inference_mode, model_input)
186
+ inference_mode = og.InferenceMode.VANILLA
187
+ og.infer(model_cid, model_inputs, inference_mode)
181
188
  ```
189
+ - inference mode can be `VANILLA`, `ZKML`, or `TEE`
190
+
182
191
 
183
192
  ## Using the CLI
184
193
 
185
- #### Creating a Model
186
194
  ```bash
187
- opengradient create_model "<model_name>" "<description>"
195
+ export OPENGRADIENT_EMAIL="<email>"
196
+ export OPENGRADIENT_PASSWORD="<password>"
197
+ ```
198
+
199
+ #### Creating a Model Repo
200
+ ```bash
201
+ opengradient create_model_repo "<model_name>" "<description>"
188
202
  ```
189
203
  - creating a model automatically initializes version `v0.01`
190
204
 
191
205
  #### Creating a Version
192
206
  ```bash
193
- opengradient create_model "<model_name>" "<description>"
207
+ opengradient create_model_repo "<model_name>" "<description>"
194
208
  ```
195
209
 
196
210
  #### Upload a File
197
211
  ```bash
198
- opengradient upload "path/to/model.onnx" "<model_name>" "<version>"
212
+ opengradient upload "<model_path>" "<model_name>" "<version>"
213
+ ```
214
+
215
+ #### List Files of a Model Version
216
+ ```bash
217
+ opengradient list_files "<model_name>" "<version>"
199
218
  ```
200
219
 
201
220
  #### CLI infer using string
@@ -207,3 +226,5 @@ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num
207
226
  ```bash
208
227
  opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
209
228
  ```
229
+
230
+ For more information read the OpenGradient [documentation](https://docs.opengradient.ai/).
@@ -0,0 +1,93 @@
1
+ # OpenGradient Python SDK
2
+ Python SDK for the OpenGradient platform provides decentralized model management & inference services. Python SDK allows programmatic access to our model repository and decentralized AI infrastructure.
3
+
4
+ ## Installation
5
+
6
+ To install Python SDK and CLI, run the following command:
7
+ ```python
8
+ pip install opengradient
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ To get started, run:
14
+
15
+ ```python
16
+ import opengradient as og
17
+ og.init(private_key="<private_key>", email="<email>", password="<password>")
18
+ ```
19
+
20
+ The following commands show how to use Python SDK.
21
+
22
+ ### Create a Model
23
+ ```python
24
+ og.create_model(model_name="<model_name>", model_desc="<model_description>")
25
+ ```
26
+
27
+ ### Create a Model (with file upload)
28
+ ```python
29
+ og.create_model(model_name="<model_name>", model_desc="<model_description>", model_path="<model_path>")
30
+ ```
31
+
32
+ ### Create a Version of a Model
33
+ ```python
34
+ og.create_version(model_name="<model_name>", notes="<model_notes>")
35
+ ```
36
+
37
+ ### Upload Files to a Model
38
+ ```python
39
+ og.upload(model_path="<model_path>", model_name="<model_name>", version="<version>")
40
+ ```
41
+
42
+ ### List Files of a Model Version
43
+ ```python
44
+ og.list_files(model_name="<model_name>", version="<version>")
45
+ ```
46
+
47
+ ### Run Inference
48
+ ```python
49
+ inference_mode = og.InferenceMode.VANILLA
50
+ og.infer(model_cid, model_inputs, inference_mode)
51
+ ```
52
+ - inference mode can be `VANILLA`, `ZKML`, or `TEE`
53
+
54
+
55
+ ## Using the CLI
56
+
57
+ ```bash
58
+ export OPENGRADIENT_EMAIL="<email>"
59
+ export OPENGRADIENT_PASSWORD="<password>"
60
+ ```
61
+
62
+ #### Creating a Model Repo
63
+ ```bash
64
+ opengradient create_model_repo "<model_name>" "<description>"
65
+ ```
66
+ - creating a model automatically initializes version `v0.01`
67
+
68
+ #### Creating a Version
69
+ ```bash
70
+ opengradient create_model_repo "<model_name>" "<description>"
71
+ ```
72
+
73
+ #### Upload a File
74
+ ```bash
75
+ opengradient upload "<model_path>" "<model_name>" "<version>"
76
+ ```
77
+
78
+ #### List Files of a Model Version
79
+ ```bash
80
+ opengradient list_files "<model_name>" "<version>"
81
+ ```
82
+
83
+ #### CLI infer using string
84
+ ```bash
85
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10, "str_input1":["hello", "ONNX"], "str_input2":" world"}'
86
+ ```
87
+
88
+ #### CLI infer using file path input
89
+ ```bash
90
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
91
+ ```
92
+
93
+ For more information read the OpenGradient [documentation](https://docs.opengradient.ai/).
@@ -4,8 +4,8 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "opengradient"
7
- version = "0.2.8"
8
- description = "A Python SDK for OpenGradient inference services"
7
+ version = "0.3.1"
8
+ description = "Python SDK for OpenGradient decentralized model management & inference services"
9
9
  authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
10
10
  license = {file = "LICENSE"}
11
11
  readme = "README.md"
@@ -1,16 +1,16 @@
1
1
  from .client import Client
2
2
  from .defaults import *
3
3
  from .types import InferenceMode
4
-
5
- __version__ = "0.2.8"
4
+ from typing import List, Dict
5
+ __version__ = "0.3.1"
6
6
 
7
7
  _client = None
8
8
 
9
- def init(private_key=DEFAULT_PRIVATE_KEY,
9
+ def init(email: str,
10
+ password: str,
11
+ private_key: str,
10
12
  rpc_url=DEFAULT_RPC_URL,
11
- contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS,
12
- email=DEFAULT_HUB_EMAIL,
13
- password=DEFAULT_HUB_PASSWORD):
13
+ contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS):
14
14
  global _client
15
15
  _client = Client(private_key=private_key, rpc_url=rpc_url, contract_address=contract_address, email=email, password=password)
16
16
 
@@ -19,10 +19,18 @@ def upload(model_path, model_name, version):
19
19
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
20
20
  return _client.upload(model_path, model_name, version)
21
21
 
22
- def create_model(model_name, model_desc):
22
+ def create_model(model_name: str, model_desc: str, model_path: str = None):
23
23
  if _client is None:
24
24
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
25
- return _client.create_model(model_name, model_desc)
25
+
26
+ result = _client.create_model(model_name, model_desc)
27
+
28
+ if model_path:
29
+ version = "0.01"
30
+ upload_result = _client.upload(model_path, model_name, version)
31
+ result["upload"] = upload_result
32
+
33
+ return result
26
34
 
27
35
  def create_version(model_name, notes=None, is_major=False):
28
36
  if _client is None:
@@ -37,4 +45,9 @@ def infer(model_cid, inference_mode, model_input):
37
45
  def login(email: str, password: str):
38
46
  if _client is None:
39
47
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
40
- return _client.login(email, password)
48
+ return _client.login(email, password)
49
+
50
+ def list_files(model_name: str, version: str) -> List[Dict]:
51
+ if _client is None:
52
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
53
+ return _client.list_files(model_name, version)
@@ -0,0 +1 @@
1
+ [{"anonymous":false,"inputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"indexed":false,"internalType":"struct ModelOutput","name":"output","type":"tuple"}],"name":"InferenceResult","type":"event"},{"anonymous":false,"inputs":[{"components":[{"internalType":"string","name":"answer","type":"string"}],"indexed":false,"internalType":"struct LlmResponse","name":"response","type":"tuple"}],"name":"LLMResult","type":"event"},{"inputs":[{"internalType":"string","name":"modelId","type":"string"},{"internalType":"enum ModelInferenceMode","name":"inferenceMode","type":"uint8"},{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"}],"internalType":"struct ModelInput","name":"modelInput","type":"tuple"}],"name":"run","outputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"internalType":"struct ModelOutput","name":"","type":"tuple"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"components":[{"internalType":"enum LlmInferenceMode","name":"mode","type":"uint8"},{"internalType":"string","name":"modelCID","type":"string"},{"internalType":"string","name":"prompt","type":"string"},{"internalType":"uint32","name":"max_tokens","type":"uint32"},{"internalType":"string[]","name":"stop_sequence","type":"string[]"},{"internalType":"uint32","name":"temperature","type":"uint32"}],"internalType":"struct LlmInferenceRequest","name":"request","type":"tuple"}],"name":"runLLM","outputs":[{"components":[{"internalType":"string","name":"answer","type":"string"}],"internalType":"struct LlmResponse","name":"","type":"tuple"}],"stateMutability":"nonpayable","type":"function"}]
@@ -0,0 +1,37 @@
1
+ from eth_account import Account
2
+ import secrets
3
+ from collections import namedtuple
4
+ import os
5
+ import hashlib
6
+
7
+ EthAccount = namedtuple('EthAccount', ['address', 'private_key'])
8
+
9
+
10
+ def generate_eth_account() -> EthAccount:
11
+ user_seed = _get_user_random_seed()
12
+ private_key = _generate_secure_private_key(user_seed)
13
+
14
+ # derive account
15
+ account = Account.from_key(private_key)
16
+
17
+ # get the public key (address)
18
+ public_key = account.address
19
+
20
+ return EthAccount(address=public_key, private_key=private_key)
21
+
22
+ def _get_user_random_seed():
23
+ print("Please type a random string of characters (the longer and more random, the better):")
24
+ print("> ", end="") # Add a '>' prompt on a new line
25
+ return input().encode()
26
+
27
+ def _generate_secure_private_key(user_input):
28
+ # Combine multiple sources of entropy
29
+ system_random = secrets.token_bytes(32)
30
+ os_urandom = os.urandom(32)
31
+ timestamp = str(secrets.randbits(256)).encode()
32
+
33
+ # Add user input to the entropy sources
34
+ combined = system_random + os_urandom + timestamp + user_input
35
+
36
+ # Hash the combined entropy
37
+ return hashlib.sha256(combined).hexdigest()