opengradient 0.2.7__tar.gz → 0.2.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {opengradient-0.2.7/src/opengradient.egg-info → opengradient-0.2.8}/PKG-INFO +37 -9
  2. {opengradient-0.2.7 → opengradient-0.2.8}/README.md +36 -8
  3. {opengradient-0.2.7 → opengradient-0.2.8}/pyproject.toml +1 -1
  4. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient/__init__.py +8 -8
  5. opengradient-0.2.8/src/opengradient/abi/inference.abi +1 -0
  6. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient/cli.py +19 -18
  7. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient/client.py +6 -0
  8. opengradient-0.2.8/src/opengradient/defaults.py +7 -0
  9. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient/utils.py +37 -9
  10. {opengradient-0.2.7 → opengradient-0.2.8/src/opengradient.egg-info}/PKG-INFO +37 -9
  11. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient.egg-info/SOURCES.txt +1 -0
  12. opengradient-0.2.7/src/opengradient/abi/inference.abi +0 -1
  13. {opengradient-0.2.7 → opengradient-0.2.8}/LICENSE +0 -0
  14. {opengradient-0.2.7 → opengradient-0.2.8}/setup.cfg +0 -0
  15. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient/exceptions.py +0 -0
  16. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient/types.py +0 -0
  17. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient.egg-info/dependency_links.txt +0 -0
  18. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient.egg-info/entry_points.txt +0 -0
  19. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient.egg-info/requires.txt +0 -0
  20. {opengradient-0.2.7 → opengradient-0.2.8}/src/opengradient.egg-info/top_level.txt +0 -0
  21. {opengradient-0.2.7 → opengradient-0.2.8}/tests/test_api.py +0 -0
  22. {opengradient-0.2.7 → opengradient-0.2.8}/tests/test_exceptions.py +0 -0
  23. {opengradient-0.2.7 → opengradient-0.2.8}/tests/test_integration.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opengradient
3
- Version: 0.2.7
3
+ Version: 0.2.8
4
4
  Summary: A Python SDK for OpenGradient inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
@@ -140,42 +140,70 @@ Requires-Dist: yarl==1.13.1
140
140
  Python SDK for OpenGradient inference services.
141
141
 
142
142
  ## Installation
143
- ```
143
+ ```python
144
144
  pip install opengradient
145
145
  ```
146
146
 
147
147
  ## Quick Start
148
- ```
148
+ ```python
149
149
  import opengradient as og
150
150
  og.init(private_key="x", rpc_url="y", contract_address="z")
151
151
  ```
152
152
 
153
153
  ### Sign in with Email
154
- ```
154
+ ```python
155
155
  og.login(email="you@opengradient.ai", password="xyz")
156
156
  ```
157
157
 
158
158
  ### Create a Model
159
- ```
159
+ ```python
160
160
  og.create_model(model_name="test-network-model", model_desc="testing upload to sdk")
161
161
  ```
162
162
 
163
163
  ### Create a Version of a Model
164
- ```
164
+ ```python
165
165
  og.create_version(model_name="test-network-model", notes="test notes")
166
166
  ```
167
167
 
168
168
  ### Upload Files to a Model
169
- ```
169
+ ```python
170
170
  og.upload(model_path="local_path_to_your_model.onnx", model_name="test-network-model", version="0.01")
171
171
  ```
172
172
 
173
173
  ### Run Inference
174
- ```
174
+ ```python
175
175
  inference_mode = og.InferenceMode.VANILLA
176
176
  inference_cid = og.infer(model_cid, model_inputs, inference_mode)
177
177
  ```
178
178
 
179
- ```
179
+ ```python
180
180
  og.infer(model_id, inference_mode, model_input)
181
181
  ```
182
+
183
+ ## Using the CLI
184
+
185
+ #### Creating a Model
186
+ ```bash
187
+ opengradient create_model "<model_name>" "<description>"
188
+ ```
189
+ - creating a model automatically initializes version `v0.01`
190
+
191
+ #### Creating a Version
192
+ ```bash
193
+ opengradient create_model "<model_name>" "<description>"
194
+ ```
195
+
196
+ #### Upload a File
197
+ ```bash
198
+ opengradient upload "path/to/model.onnx" "<model_name>" "<version>"
199
+ ```
200
+
201
+ #### CLI infer using string
202
+ ```bash
203
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10, "str_input1":["hello", "ONNX"], "str_input2":" world"}'
204
+ ```
205
+
206
+ #### CLI infer using file path input
207
+ ```bash
208
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
209
+ ```
@@ -3,42 +3,70 @@
3
3
  Python SDK for OpenGradient inference services.
4
4
 
5
5
  ## Installation
6
- ```
6
+ ```python
7
7
  pip install opengradient
8
8
  ```
9
9
 
10
10
  ## Quick Start
11
- ```
11
+ ```python
12
12
  import opengradient as og
13
13
  og.init(private_key="x", rpc_url="y", contract_address="z")
14
14
  ```
15
15
 
16
16
  ### Sign in with Email
17
- ```
17
+ ```python
18
18
  og.login(email="you@opengradient.ai", password="xyz")
19
19
  ```
20
20
 
21
21
  ### Create a Model
22
- ```
22
+ ```python
23
23
  og.create_model(model_name="test-network-model", model_desc="testing upload to sdk")
24
24
  ```
25
25
 
26
26
  ### Create a Version of a Model
27
- ```
27
+ ```python
28
28
  og.create_version(model_name="test-network-model", notes="test notes")
29
29
  ```
30
30
 
31
31
  ### Upload Files to a Model
32
- ```
32
+ ```python
33
33
  og.upload(model_path="local_path_to_your_model.onnx", model_name="test-network-model", version="0.01")
34
34
  ```
35
35
 
36
36
  ### Run Inference
37
- ```
37
+ ```python
38
38
  inference_mode = og.InferenceMode.VANILLA
39
39
  inference_cid = og.infer(model_cid, model_inputs, inference_mode)
40
40
  ```
41
41
 
42
- ```
42
+ ```python
43
43
  og.infer(model_id, inference_mode, model_input)
44
+ ```
45
+
46
+ ## Using the CLI
47
+
48
+ #### Creating a Model
49
+ ```bash
50
+ opengradient create_model "<model_name>" "<description>"
51
+ ```
52
+ - creating a model automatically initializes version `v0.01`
53
+
54
+ #### Creating a Version
55
+ ```bash
56
+ opengradient create_model "<model_name>" "<description>"
57
+ ```
58
+
59
+ #### Upload a File
60
+ ```bash
61
+ opengradient upload "path/to/model.onnx" "<model_name>" "<version>"
62
+ ```
63
+
64
+ #### CLI infer using string
65
+ ```bash
66
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10, "str_input1":["hello", "ONNX"], "str_input2":" world"}'
67
+ ```
68
+
69
+ #### CLI infer using file path input
70
+ ```bash
71
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
44
72
  ```
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "opengradient"
7
- version = "0.2.7"
7
+ version = "0.2.8"
8
8
  description = "A Python SDK for OpenGradient inference services"
9
9
  authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
10
10
  license = {file = "LICENSE"}
@@ -1,16 +1,16 @@
1
1
  from .client import Client
2
- from .exceptions import OpenGradientError, FileNotFoundError, UploadError, InferenceError, ResultRetrievalError
3
- from .types import ModelInput, InferenceMode, Number, NumberTensor, StringTensor, ModelOutput
2
+ from .defaults import *
3
+ from .types import InferenceMode
4
4
 
5
- __version__ = "0.2.7"
5
+ __version__ = "0.2.8"
6
6
 
7
7
  _client = None
8
8
 
9
- def init(private_key="cd09980ef6e280afc3900d2d6801f9e9c5d858a5deaeeab74a65643f5ff1a4c1",
10
- rpc_url="http://18.218.115.248:8545",
11
- contract_address="0x350E0A430b2B1563481833a99523Cfd17a530e4e",
12
- email="test@test.com",
13
- password="Test-123"):
9
+ def init(private_key=DEFAULT_PRIVATE_KEY,
10
+ rpc_url=DEFAULT_RPC_URL,
11
+ contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS,
12
+ email=DEFAULT_HUB_EMAIL,
13
+ password=DEFAULT_HUB_PASSWORD):
14
14
  global _client
15
15
  _client = Client(private_key=private_key, rpc_url=rpc_url, contract_address=contract_address, email=email, password=password)
16
16
 
@@ -0,0 +1 @@
1
+ [{"anonymous":false,"inputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"indexed":false,"internalType":"struct ModelOutput","name":"output","type":"tuple"}],"name":"InferenceResult","type":"event"},{"inputs":[{"internalType":"string","name":"modelId","type":"string"},{"internalType":"enum ModelInferenceMode","name":"inferenceMode","type":"uint8"},{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"}],"internalType":"struct ModelInput","name":"modelInput","type":"tuple"}],"name":"run","outputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct TensorLib.Number[]","name":"values","type":"tuple[]"},{"internalType":"uint32[]","name":"shape","type":"uint32[]"}],"internalType":"struct TensorLib.MultiDimensionalNumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct TensorLib.StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"internalType":"struct ModelOutput","name":"","type":"tuple"}],"stateMutability":"nonpayable","type":"function"}]
@@ -5,10 +5,11 @@ import json
5
5
  import ast
6
6
  from pathlib import Path
7
7
  from .client import Client
8
- from opengradient.types import InferenceMode, ModelInput
8
+ from .defaults import *
9
+ from .types import InferenceMode, ModelInput
9
10
 
10
11
  # Environment variable names
11
- API_KEY_ENV = 'OPENGRADIENT_API_KEY'
12
+ PRIVATE_KEY_ENV = 'OPENGRADIENT_PRIVATE_KEY'
12
13
  RPC_URL_ENV = 'OPENGRADIENT_RPC_URL'
13
14
  CONTRACT_ADDRESS_ENV = 'OPENGRADIENT_CONTRACT_ADDRESS'
14
15
  EMAIL_ENV = 'OPENGRADIENT_EMAIL'
@@ -39,48 +40,48 @@ Dict = DictParamType()
39
40
 
40
41
  # Support inference modes
41
42
  InferenceModes = {
42
- "VANILLA": opengradient.InferenceMode.VANILLA,
43
- "ZKML": opengradient.InferenceMode.ZKML,
44
- "TEE": opengradient.InferenceMode.TEE,
43
+ "VANILLA": InferenceMode.VANILLA,
44
+ "ZKML": InferenceMode.ZKML,
45
+ "TEE": InferenceMode.TEE,
45
46
  }
46
47
 
47
48
  # TODO (Kyle): Once we're farther into development, we should remove the defaults for these options
48
49
  @click.group()
49
- @click.option('--api_key',
50
- envvar=API_KEY_ENV,
50
+ @click.option('--private_key',
51
+ envvar=PRIVATE_KEY_ENV,
51
52
  help='Your OpenGradient private key',
52
- default="cd09980ef6e280afc3900d2d6801f9e9c5d858a5deaeeab74a65643f5ff1a4c1")
53
+ default=DEFAULT_PRIVATE_KEY)
53
54
  @click.option('--rpc_url',
54
55
  envvar=RPC_URL_ENV,
55
56
  help='OpenGradient RPC URL address',
56
- default="http://18.218.115.248:8545")
57
+ default=DEFAULT_RPC_URL)
57
58
  @click.option('--contract_address',
58
59
  envvar=CONTRACT_ADDRESS_ENV,
59
60
  help='OpenGradient inference contract address',
60
- default="0x350E0A430b2B1563481833a99523Cfd17a530e4e")
61
+ default=DEFAULT_INFERENCE_CONTRACT_ADDRESS)
61
62
  @click.option('--email',
62
63
  envvar=EMAIL_ENV,
63
64
  help='Your OpenGradient Hub email address -- not required for inference',
64
- default="test@test.com")
65
+ default=DEFAULT_HUB_EMAIL)
65
66
  @click.option('--password',
66
67
  envvar=PASSWORD_ENV,
67
68
  help='Your OpenGradient Hub password -- not required for inference',
68
- default="Test-123")
69
+ default=DEFAULT_HUB_PASSWORD)
69
70
  @click.pass_context
70
- def cli(ctx, api_key, rpc_url, contract_address, email, password):
71
+ def cli(ctx, private_key, rpc_url, contract_address, email, password):
71
72
  """CLI for OpenGradient SDK"""
72
- if not api_key:
73
- click.echo("Please provide an API key via flag or setting environment variable OPENGRADIENT_API_KEY")
73
+ if not private_key:
74
+ click.echo("Please provide a private key via flag or setting environment variable OPENGRADIENT_PRIVATE_KEY")
74
75
  if not rpc_url:
75
76
  click.echo("Please provide a RPC URL via flag or setting environment variable OPENGRADIENT_RPC_URL")
76
77
  if not contract_address:
77
78
  click.echo("Please provide a contract address via flag or setting environment variable OPENGRADIENT_CONTRACT_ADDRESS")
78
- if not api_key or not rpc_url or not contract_address:
79
+ if not private_key or not rpc_url or not contract_address:
79
80
  ctx.exit(1)
80
81
  return
81
82
 
82
83
  try:
83
- ctx.obj = Client(private_key=api_key,
84
+ ctx.obj = Client(private_key=private_key,
84
85
  rpc_url=rpc_url,
85
86
  contract_address=contract_address,
86
87
  email=email,
@@ -98,7 +99,7 @@ def client_settings(ctx):
98
99
  ctx.exit(1)
99
100
 
100
101
  click.echo("Settings for OpenGradient client:")
101
- click.echo(f"\tAPI key ({API_KEY_ENV}): {client.private_key}")
102
+ click.echo(f"\tPrivate key ({PRIVATE_KEY_ENV}): {client.private_key}")
102
103
  click.echo(f"\tRPC URL ({RPC_URL_ENV}): {client.rpc_url}")
103
104
  click.echo(f"\tContract address ({CONTRACT_ADDRESS_ENV}): {client.contract_address}")
104
105
  if client.user:
@@ -43,6 +43,12 @@ class Client:
43
43
  self.firebase_app = firebase.initialize_app(self.FIREBASE_CONFIG)
44
44
  self.auth = self.firebase_app.auth()
45
45
  self.user = None
46
+
47
+ logging.debug("Initialized client with parameters:\n"
48
+ "private key: %s\n"
49
+ "RPC URL: %s\n"
50
+ "Contract Address: %s\n",
51
+ private_key, rpc_url, contract_address)
46
52
 
47
53
  abi_path = os.path.join(os.path.dirname(__file__), 'abi', 'inference.abi')
48
54
  with open(abi_path, 'r') as abi_file:
@@ -0,0 +1,7 @@
1
+
2
+ # Default variables
3
+ DEFAULT_PRIVATE_KEY="cd09980ef6e280afc3900d2d6801f9e9c5d858a5deaeeab74a65643f5ff1a4c1"
4
+ DEFAULT_RPC_URL="http://18.218.115.248:8545"
5
+ DEFAULT_INFERENCE_CONTRACT_ADDRESS="0x75D0266DAb643417e9FFD828A1A31C1E039a966c"
6
+ DEFAULT_HUB_EMAIL="test@test.com"
7
+ DEFAULT_HUB_PASSWORD="Test-123"
@@ -40,11 +40,10 @@ def convert_to_model_input(inputs: Dict[str, np.ndarray]) -> Tuple[List[Tuple[st
40
40
  """
41
41
  Expect SDK input to be a dict with the format
42
42
  key: tensor name
43
- value: np.array
43
+ value: np.array / list
44
44
 
45
- Note: np.array types must be float or string. Ints currently not supported.
46
-
47
- Return a tuple of (number tensors, string tensors) depending on the input type
45
+ Return a tuple of (number tensors, string tensors) depending on the input type. Each number and string tensor converted
46
+ to a numpy array and flattened and the shape saved.
48
47
  """
49
48
  logging.debug("Converting the following input dictionary to ModelInput: %s", inputs)
50
49
  number_tensors = []
@@ -59,19 +58,37 @@ def convert_to_model_input(inputs: Dict[str, np.ndarray]) -> Tuple[List[Tuple[st
59
58
  logging.debug(f"\tConverting single entry {tensor_data} to a list")
60
59
  tensor_data = np.array([tensor_data])
61
60
 
61
+ # Check if type is np array
62
+ if not isinstance(tensor_data, np.ndarray):
63
+ raise TypeError("Inference input must be list, numpy array, or type (str, int, float): %s" % type(tensor_data))
64
+
65
+ # Flatten list and retain shape
66
+ shape = tensor_data.shape
67
+ flat_data = tensor_data.flatten()
68
+ logging.debug("Shape and flattened data: %s, %s", shape, flat_data)
69
+
62
70
  # Parse into number and string tensors
63
71
  if issubclass(tensor_data.dtype.type, np.floating):
64
- input = (tensor_name, [convert_to_fixed_point(i) for i in tensor_data])
72
+ # Convert to fixed-point tuples
73
+ data_type = np.dtype([('value', int), ('decimal', int)])
74
+ converted_tensor_data = np.array([convert_to_fixed_point(i) for i in flat_data], dtype=data_type)
75
+
76
+ input = (tensor_name, converted_tensor_data.tolist(), shape)
65
77
  logging.debug("\tFloating tensor input: %s", input)
66
78
 
67
79
  number_tensors.append(input)
68
80
  elif issubclass(tensor_data.dtype.type, np.integer):
69
- input = (tensor_name, [convert_to_fixed_point(int(i)) for i in tensor_data])
81
+ # Convert to fixed-point tuples
82
+ data_type = np.dtype([('value', int), ('decimal', int)])
83
+ converted_tensor_data = np.array([convert_to_fixed_point(int(i)) for i in flat_data], dtype=data_type)
84
+
85
+ input = (tensor_name, converted_tensor_data.tolist(), shape)
70
86
  logging.debug("\tInteger tensor input: %s", input)
71
87
 
72
88
  number_tensors.append(input)
73
89
  elif issubclass(tensor_data.dtype.type, np.str_):
74
- input = (tensor_name, [s for s in tensor_data])
90
+ # TODO (Kyle): Add shape into here as well
91
+ input = (tensor_name, [s for s in flat_data])
75
92
  logging.debug("\tString tensor input: %s", input)
76
93
 
77
94
  string_tensors.append(input)
@@ -85,6 +102,15 @@ def convert_to_model_input(inputs: Dict[str, np.ndarray]) -> Tuple[List[Tuple[st
85
102
  return number_tensors, string_tensors
86
103
 
87
104
  def convert_to_model_output(event_data: AttributeDict) -> Dict[str, np.ndarray]:
105
+ """
106
+ Converts inference output into a user-readable output.
107
+ Expects the inference node to return a dict with the format:
108
+ key: output_name (str)
109
+ value: (output_array (list), shape (list)) (tuple)
110
+
111
+ We need to reshape each output array using the shape parameter in order to get the array
112
+ back into its original shape.
113
+ """
88
114
  logging.debug(f"Parsing event data: {event_data}")
89
115
 
90
116
  output_dict = {}
@@ -98,6 +124,7 @@ def convert_to_model_output(event_data: AttributeDict) -> Dict[str, np.ndarray]:
98
124
  logging.debug(f"Processing number tensor: {tensor}")
99
125
  if isinstance(tensor, AttributeDict):
100
126
  name = tensor.get('name')
127
+ shape = tensor.get('shape')
101
128
  values = []
102
129
  # Convert from fixed point back into np.float32
103
130
  for v in tensor.get('values', []):
@@ -105,7 +132,7 @@ def convert_to_model_output(event_data: AttributeDict) -> Dict[str, np.ndarray]:
105
132
  values.append(convert_to_float32(value=int(v.get('value')), decimals=int(v.get('decimals'))))
106
133
  else:
107
134
  logging.warning(f"Unexpected number type: {type(v)}")
108
- output_dict[name] = np.array(values)
135
+ output_dict[name] = np.array(values).reshape(shape)
109
136
  else:
110
137
  logging.warning(f"Unexpected tensor type: {type(tensor)}")
111
138
 
@@ -114,8 +141,9 @@ def convert_to_model_output(event_data: AttributeDict) -> Dict[str, np.ndarray]:
114
141
  logging.debug(f"Processing string tensor: {tensor}")
115
142
  if isinstance(tensor, AttributeDict):
116
143
  name = tensor.get('name')
144
+ shape = tensor.get('shape')
117
145
  values = tensor.get('values', [])
118
- output_dict[name] = values
146
+ output_dict[name] = np.array(values).reshape(shape)
119
147
  else:
120
148
  logging.warning(f"Unexpected tensor type: {type(tensor)}")
121
149
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opengradient
3
- Version: 0.2.7
3
+ Version: 0.2.8
4
4
  Summary: A Python SDK for OpenGradient inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
@@ -140,42 +140,70 @@ Requires-Dist: yarl==1.13.1
140
140
  Python SDK for OpenGradient inference services.
141
141
 
142
142
  ## Installation
143
- ```
143
+ ```python
144
144
  pip install opengradient
145
145
  ```
146
146
 
147
147
  ## Quick Start
148
- ```
148
+ ```python
149
149
  import opengradient as og
150
150
  og.init(private_key="x", rpc_url="y", contract_address="z")
151
151
  ```
152
152
 
153
153
  ### Sign in with Email
154
- ```
154
+ ```python
155
155
  og.login(email="you@opengradient.ai", password="xyz")
156
156
  ```
157
157
 
158
158
  ### Create a Model
159
- ```
159
+ ```python
160
160
  og.create_model(model_name="test-network-model", model_desc="testing upload to sdk")
161
161
  ```
162
162
 
163
163
  ### Create a Version of a Model
164
- ```
164
+ ```python
165
165
  og.create_version(model_name="test-network-model", notes="test notes")
166
166
  ```
167
167
 
168
168
  ### Upload Files to a Model
169
- ```
169
+ ```python
170
170
  og.upload(model_path="local_path_to_your_model.onnx", model_name="test-network-model", version="0.01")
171
171
  ```
172
172
 
173
173
  ### Run Inference
174
- ```
174
+ ```python
175
175
  inference_mode = og.InferenceMode.VANILLA
176
176
  inference_cid = og.infer(model_cid, model_inputs, inference_mode)
177
177
  ```
178
178
 
179
- ```
179
+ ```python
180
180
  og.infer(model_id, inference_mode, model_input)
181
181
  ```
182
+
183
+ ## Using the CLI
184
+
185
+ #### Creating a Model
186
+ ```bash
187
+ opengradient create_model "<model_name>" "<description>"
188
+ ```
189
+ - creating a model automatically initializes version `v0.01`
190
+
191
+ #### Creating a Version
192
+ ```bash
193
+ opengradient create_model "<model_name>" "<description>"
194
+ ```
195
+
196
+ #### Upload a File
197
+ ```bash
198
+ opengradient upload "path/to/model.onnx" "<model_name>" "<version>"
199
+ ```
200
+
201
+ #### CLI infer using string
202
+ ```bash
203
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num_input1":[1.0, 2.0, 3.0], "num_input2":10, "str_input1":["hello", "ONNX"], "str_input2":" world"}'
204
+ ```
205
+
206
+ #### CLI infer using file path input
207
+ ```bash
208
+ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
209
+ ```
@@ -4,6 +4,7 @@ pyproject.toml
4
4
  src/opengradient/__init__.py
5
5
  src/opengradient/cli.py
6
6
  src/opengradient/client.py
7
+ src/opengradient/defaults.py
7
8
  src/opengradient/exceptions.py
8
9
  src/opengradient/types.py
9
10
  src/opengradient/utils.py
@@ -1 +0,0 @@
1
- [{"anonymous":false,"inputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct Number[]","name":"values","type":"tuple[]"}],"internalType":"struct NumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"indexed":false,"internalType":"struct ModelOutput","name":"output","type":"tuple"}],"name":"InferenceResult","type":"event"},{"inputs":[{"internalType":"string","name":"modelId","type":"string"},{"internalType":"enum IInference.ModelInferenceMode","name":"inferenceMode","type":"uint8"},{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct Number[]","name":"values","type":"tuple[]"}],"internalType":"struct NumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct StringTensor[]","name":"strings","type":"tuple[]"}],"internalType":"struct ModelInput","name":"modelInput","type":"tuple"}],"name":"run","outputs":[{"components":[{"components":[{"internalType":"string","name":"name","type":"string"},{"components":[{"internalType":"int128","name":"value","type":"int128"},{"internalType":"int128","name":"decimals","type":"int128"}],"internalType":"struct Number[]","name":"values","type":"tuple[]"}],"internalType":"struct NumberTensor[]","name":"numbers","type":"tuple[]"},{"components":[{"internalType":"string","name":"name","type":"string"},{"internalType":"string[]","name":"values","type":"string[]"}],"internalType":"struct StringTensor[]","name":"strings","type":"tuple[]"},{"internalType":"bool","name":"is_simulation_result","type":"bool"}],"internalType":"struct ModelOutput","name":"","type":"tuple"}],"stateMutability":"nonpayable","type":"function"}]
File without changes
File without changes