opengradient 0.2.7__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {opengradient-0.2.7/src/opengradient.egg-info → opengradient-0.3.0}/PKG-INFO +58 -19
  2. opengradient-0.3.0/README.md +83 -0
  3. {opengradient-0.2.7 → opengradient-0.3.0}/pyproject.toml +2 -2
  4. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient/__init__.py +25 -12
  5. opengradient-0.3.0/src/opengradient/abi/inference.abi +1 -0
  6. opengradient-0.3.0/src/opengradient/abi/llm.abi +1 -0
  7. opengradient-0.3.0/src/opengradient/account.py +37 -0
  8. opengradient-0.3.0/src/opengradient/cli.py +390 -0
  9. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient/client.py +57 -6
  10. opengradient-0.3.0/src/opengradient/defaults.py +7 -0
  11. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient/utils.py +37 -9
  12. {opengradient-0.2.7 → opengradient-0.3.0/src/opengradient.egg-info}/PKG-INFO +58 -19
  13. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient.egg-info/SOURCES.txt +3 -3
  14. opengradient-0.2.7/README.md +0 -44
  15. opengradient-0.2.7/src/opengradient/abi/inference.abi +0 -1
  16. opengradient-0.2.7/src/opengradient/cli.py +0 -194
  17. opengradient-0.2.7/tests/test_api.py +0 -110
  18. opengradient-0.2.7/tests/test_exceptions.py +0 -25
  19. opengradient-0.2.7/tests/test_integration.py +0 -20
  20. {opengradient-0.2.7 → opengradient-0.3.0}/LICENSE +0 -0
  21. {opengradient-0.2.7 → opengradient-0.3.0}/setup.cfg +0 -0
  22. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient/exceptions.py +0 -0
  23. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient/types.py +0 -0
  24. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient.egg-info/dependency_links.txt +0 -0
  25. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient.egg-info/entry_points.txt +0 -0
  26. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient.egg-info/requires.txt +0 -0
  27. {opengradient-0.2.7 → opengradient-0.3.0}/src/opengradient.egg-info/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opengradient
3
- Version: 0.2.7
4
- Summary: A Python SDK for OpenGradient inference services
3
+ Version: 0.3.0
4
+ Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
7
7
 
@@ -135,47 +135,86 @@ Requires-Dist: websockets==13.1
135
135
  Requires-Dist: xattr==1.1.0
136
136
  Requires-Dist: yarl==1.13.1
137
137
 
138
- # OpenGradient Python SDK
138
+ # OpenGradient SDK
139
139
 
140
- Python SDK for OpenGradient inference services.
140
+ Python SDK for OpenGradient decentralized model management & inference services.
141
141
 
142
142
  ## Installation
143
- ```
143
+ ```python
144
144
  pip install opengradient
145
145
  ```
146
146
 
147
147
  ## Quick Start
148
- ```
148
+ ```python
149
149
  import opengradient as og
150
- og.init(private_key="x", rpc_url="y", contract_address="z")
151
- ```
152
-
153
- ### Sign in with Email
154
- ```
155
- og.login(email="you@opengradient.ai", password="xyz")
150
+ og.init(email="<email>", password="<password>")
156
151
  ```
157
152
 
158
153
  ### Create a Model
154
+ ```python
155
+ og.create_model(model_name="<model_name>", model_desc="<model_description>")
159
156
  ```
160
- og.create_model(model_name="test-network-model", model_desc="testing upload to sdk")
157
+
158
+ ### Create a Model (with file upload)
159
+ ```python
160
+ og.create_model(model_name="<model_name>", model_desc="<model_description>", model_path="<model_path>")
161
161
  ```
162
162
 
163
163
  ### Create a Version of a Model
164
- ```
165
- og.create_version(model_name="test-network-model", notes="test notes")
164
+ ```python
165
+ og.create_version(model_name="<model_name>", notes="<model_notes>")
166
166
  ```
167
167
 
168
168
  ### Upload Files to a Model
169
+ ```python
170
+ og.upload(model_path="<model_path>", model_name="<model_name>", version="<version>")
169
171
  ```
170
- og.upload(model_path="local_path_to_your_model.onnx", model_name="test-network-model", version="0.01")
172
+
173
+ ### List Files of a Model Version
174
+ ```python
175
+ og.list_files(model_name="<model_name>", version="<version>")
171
176
  ```
172
177
 
173
178
  ### Run Inference
174
- ```
179
+ ```python
175
180
  inference_mode = og.InferenceMode.VANILLA
176
- inference_cid = og.infer(model_cid, model_inputs, inference_mode)
181
+ og.infer(model_cid, model_inputs, inference_mode)
177
182
  ```
178
183
 
184
+ ## Using the CLI
185
+
186
+ ```bash
187
+ export OPENGRADIENT_EMAIL="<email>"
188
+ export OPENGRADIENT_PASSWORD="<password>"
179
189
  ```
180
- og.infer(model_id, inference_mode, model_input)
190
+
191
+ #### Creating a Model
192
+ ```bash
193
+ opengradient create_model "<model_name>" "<description>"
194
+ ```
195
+ - creating a model automatically initializes version `v0.01`
196
+
197
+ #### Creating a Version
198
+ ```bash
199
+ opengradient create_model "<model_name>" "<description>"
200
+ ```
201
+
202
+ #### Upload a File
203
+ ```bash
204
+ opengradient upload "<model_path>" "<model_name>" "<version>"
205
+ ```
206
+
207
+ #### List Files of a Model Version
208
+ ```bash
209
+ opengradient list_files "<model_name>" "<version>"
210
+ ```
211
+
212
+ #### CLI infer using string
213
+ ```bash
214
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10, "str_input1":["hello", "ONNX"], "str_input2":" world"}'
215
+ ```
216
+
217
+ #### CLI infer using file path input
218
+ ```bash
219
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
181
220
  ```
@@ -0,0 +1,83 @@
1
+ # OpenGradient SDK
2
+
3
+ Python SDK for OpenGradient decentralized model management & inference services.
4
+
5
+ ## Installation
6
+ ```python
7
+ pip install opengradient
8
+ ```
9
+
10
+ ## Quick Start
11
+ ```python
12
+ import opengradient as og
13
+ og.init(email="<email>", password="<password>")
14
+ ```
15
+
16
+ ### Create a Model
17
+ ```python
18
+ og.create_model(model_name="<model_name>", model_desc="<model_description>")
19
+ ```
20
+
21
+ ### Create a Model (with file upload)
22
+ ```python
23
+ og.create_model(model_name="<model_name>", model_desc="<model_description>", model_path="<model_path>")
24
+ ```
25
+
26
+ ### Create a Version of a Model
27
+ ```python
28
+ og.create_version(model_name="<model_name>", notes="<model_notes>")
29
+ ```
30
+
31
+ ### Upload Files to a Model
32
+ ```python
33
+ og.upload(model_path="<model_path>", model_name="<model_name>", version="<version>")
34
+ ```
35
+
36
+ ### List Files of a Model Version
37
+ ```python
38
+ og.list_files(model_name="<model_name>", version="<version>")
39
+ ```
40
+
41
+ ### Run Inference
42
+ ```python
43
+ inference_mode = og.InferenceMode.VANILLA
44
+ og.infer(model_cid, model_inputs, inference_mode)
45
+ ```
46
+
47
+ ## Using the CLI
48
+
49
+ ```bash
50
+ export OPENGRADIENT_EMAIL="<email>"
51
+ export OPENGRADIENT_PASSWORD="<password>"
52
+ ```
53
+
54
+ #### Creating a Model
55
+ ```bash
56
+ opengradient create_model "<model_name>" "<description>"
57
+ ```
58
+ - creating a model automatically initializes version `v0.01`
59
+
60
+ #### Creating a Version
61
+ ```bash
62
+ opengradient create_model "<model_name>" "<description>"
63
+ ```
64
+
65
+ #### Upload a File
66
+ ```bash
67
+ opengradient upload "<model_path>" "<model_name>" "<version>"
68
+ ```
69
+
70
+ #### List Files of a Model Version
71
+ ```bash
72
+ opengradient list_files "<model_name>" "<version>"
73
+ ```
74
+
75
+ #### CLI infer using string
76
+ ```bash
77
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10, "str_input1":["hello", "ONNX"], "str_input2":" world"}'
78
+ ```
79
+
80
+ #### CLI infer using file path input
81
+ ```bash
82
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
83
+ ```
@@ -4,8 +4,8 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "opengradient"
7
- version = "0.2.7"
8
- description = "A Python SDK for OpenGradient inference services"
7
+ version = "0.3.0"
8
+ description = "Python SDK for OpenGradient decentralized model management & inference services"
9
9
  authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
10
10
  license = {file = "LICENSE"}
11
11
  readme = "README.md"
@@ -1,16 +1,16 @@
1
1
  from .client import Client
2
- from .exceptions import OpenGradientError, FileNotFoundError, UploadError, InferenceError, ResultRetrievalError
3
- from .types import ModelInput, InferenceMode, Number, NumberTensor, StringTensor, ModelOutput
4
-
5
- __version__ = "0.2.7"
2
+ from .defaults import *
3
+ from .types import InferenceMode
4
+ from typing import List, Dict
5
+ __version__ = "0.3.0"
6
6
 
7
7
  _client = None
8
8
 
9
- def init(private_key="cd09980ef6e280afc3900d2d6801f9e9c5d858a5deaeeab74a65643f5ff1a4c1",
10
- rpc_url="http://18.218.115.248:8545",
11
- contract_address="0x350E0A430b2B1563481833a99523Cfd17a530e4e",
12
- email="test@test.com",
13
- password="Test-123"):
9
+ def init(email: str,
10
+ password: str,
11
+ private_key=DEFAULT_PRIVATE_KEY,
12
+ rpc_url=DEFAULT_RPC_URL,
13
+ contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS):
14
14
  global _client
15
15
  _client = Client(private_key=private_key, rpc_url=rpc_url, contract_address=contract_address, email=email, password=password)
16
16
 
@@ -19,10 +19,18 @@ def upload(model_path, model_name, version):
19
19
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
20
20
  return _client.upload(model_path, model_name, version)
21
21
 
22
- def create_model(model_name, model_desc):
22
+ def create_model(model_name: str, model_desc: str, model_path: str = None):
23
23
  if _client is None:
24
24
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
25
- return _client.create_model(model_name, model_desc)
25
+
26
+ result = _client.create_model(model_name, model_desc)
27
+
28
+ if model_path:
29
+ version = "0.01"
30
+ upload_result = _client.upload(model_path, model_name, version)
31
+ result["upload"] = upload_result
32
+
33
+ return result
26
34
 
27
35
  def create_version(model_name, notes=None, is_major=False):
28
36
  if _client is None:
@@ -37,4 +45,9 @@ def infer(model_cid, inference_mode, model_input):
37
45
  def login(email: str, password: str):
38
46
  if _client is None:
39
47
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
40
- return _client.login(email, password)
48
+ return _client.login(email, password)
49
+
50
+ def list_files(model_name: str, version: str) -> List[Dict]:
51
+ if _client is None:
52
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
53
+ return _client.list_files(model_name, version)
@@ -0,0 +1 @@
1
+ [{"anonymous":false,"inputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"indexed":false,"internalType":"struct ModelOutput","name":"output","type":"tuple"}],"name":"InferenceResult","type":"event"},{"inputs":[{"internalType":"string","name":"modelId","type":"string"},{"internalType":"enum ModelInferenceMode","name":"inferenceMode","type":"uint8"},{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"}],"internalType":"struct ModelInput","name":"modelInput","type":"tuple"}],"name":"run","outputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"internalType":"struct ModelOutput","name":"","type":"tuple"}],"stateMutability":"nonpayable","type":"function"}]
@@ -0,0 +1 @@
1
+ [{"anonymous":false,"inputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"indexed":false,"internalType":"struct ModelOutput","name":"output","type":"tuple"}],"name":"InferenceResult","type":"event"},{"anonymous":false,"inputs":[{"components":[{"internalType":"string","name":"answer","type":"string"}],"indexed":false,"internalType":"struct LlmResponse","name":"response","type":"tuple"}],"name":"LLMResult","type":"event"},{"inputs":[{"internalType":"string","name":"modelId","type":"string"},{"internalType":"enum ModelInferenceMode","name":"inferenceMode","type":"uint8"},{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"}],"internalType":"struct ModelInput","name":"modelInput","type":"tuple"}],"name":"run","outputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"internalType":"struct ModelOutput","name":"","type":"tuple"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"components":[{"internalType":"enum LlmInferenceMode","name":"mode","type":"uint8"},{"internalType":"string","name":"modelCID","type":"string"},{"internalType":"string","name":"prompt","type":"string"},{"internalType":"uint32","name":"max_tokens","type":"uint32"},{"internalType":"string[]","name":"stop_sequence","type":"string[]"},{"internalType":"uint32","name":"temperature","type":"uint32"}],"internalType":"struct LlmInferenceRequest","name":"request","type":"tuple"}],"name":"runLLM","outputs":[{"components":[{"internalType":"string","name":"answer","type":"string"}],"internalType":"struct LlmResponse","name":"","type":"tuple"}],"stateMutability":"nonpayable","type":"function"}]
@@ -0,0 +1,37 @@
1
+ from eth_account import Account
2
+ import secrets
3
+ from collections import namedtuple
4
+ import os
5
+ import hashlib
6
+
7
+ EthAccount = namedtuple('EthAccount', ['address', 'private_key'])
8
+
9
+
10
+ def generate_eth_account() -> EthAccount:
11
+ user_seed = _get_user_random_seed()
12
+ private_key = _generate_secure_private_key(user_seed)
13
+
14
+ # derive account
15
+ account = Account.from_key(private_key)
16
+
17
+ # get the public key (address)
18
+ public_key = account.address
19
+
20
+ return EthAccount(address=public_key, private_key=private_key)
21
+
22
+ def _get_user_random_seed():
23
+ print("Please type a random string of characters (the longer and more random, the better):")
24
+ print("> ", end="") # Add a '>' prompt on a new line
25
+ return input().encode()
26
+
27
+ def _generate_secure_private_key(user_input):
28
+ # Combine multiple sources of entropy
29
+ system_random = secrets.token_bytes(32)
30
+ os_urandom = os.urandom(32)
31
+ timestamp = str(secrets.randbits(256)).encode()
32
+
33
+ # Add user input to the entropy sources
34
+ combined = system_random + os_urandom + timestamp + user_input
35
+
36
+ # Hash the combined entropy
37
+ return hashlib.sha256(combined).hexdigest()