opengradient 0.3.14__tar.gz → 0.3.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (20) hide show
  1. {opengradient-0.3.14 → opengradient-0.3.16}/PKG-INFO +73 -2
  2. {opengradient-0.3.14 → opengradient-0.3.16}/README.md +72 -1
  3. {opengradient-0.3.14 → opengradient-0.3.16}/pyproject.toml +1 -1
  4. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/__init__.py +1 -1
  5. {opengradient-0.3.14 → opengradient-0.3.16}/.gitignore +0 -0
  6. {opengradient-0.3.14 → opengradient-0.3.16}/LICENSE +0 -0
  7. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/abi/inference.abi +0 -0
  8. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/account.py +0 -0
  9. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/cli.py +0 -0
  10. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/client.py +0 -0
  11. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/defaults.py +0 -0
  12. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/exceptions.py +0 -0
  13. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/llm/__init__.py +0 -0
  14. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/llm/chat.py +0 -0
  15. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/proto/__init__.py +0 -0
  16. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/proto/infer.proto +0 -0
  17. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/proto/infer_pb2.py +0 -0
  18. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/proto/infer_pb2_grpc.py +0 -0
  19. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/types.py +0 -0
  20. {opengradient-0.3.14 → opengradient-0.3.16}/src/opengradient/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: opengradient
3
- Version: 0.3.14
3
+ Version: 0.3.16
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Project-URL: Homepage, https://opengradient.ai
6
6
  Author-email: OpenGradient <oliver@opengradient.ai>
@@ -183,6 +183,50 @@ og.infer(model_cid, model_inputs, inference_mode)
183
183
  ```
184
184
  - inference mode can be `VANILLA`, `ZKML`, or `TEE`
185
185
 
186
+ ### LLM Inference
187
+ #### LLM Completion
188
+ ```python
189
+ tx_hash, response = og.llm_completion(
190
+ model_cid='meta-llama/Meta-Llama-3-8B-Instruct',
191
+ prompt="Translate the following English text to French: 'Hello, how are you?'",
192
+ max_tokens=50,
193
+ temperature=0.0
194
+ )
195
+ ```
196
+
197
+ #### LLM Chat
198
+ ```python
199
+ # create messages history
200
+ messages = [
201
+ {
202
+ "role": "system",
203
+ "content": "You are a helpful AI assistant.",
204
+ "name": "HAL"
205
+ },
206
+ {
207
+ "role": "user",
208
+ "content": "Hello! How are you doing? Can you repeat my name?",
209
+ }]
210
+
211
+ # run LLM inference
212
+ tx_hash, finish_reason, message = og.llm_chat(
213
+ model_cid=og.LLM.MISTRAL_7B_INSTRUCT_V3,
214
+ messages=messages
215
+ )
216
+ ```
217
+
218
+ ### Image Generation
219
+ ```python
220
+ tx_hash, image_data = og.generate_image(
221
+ model="stabilityai/stable-diffusion-xl-base-1.0",
222
+ prompt="A beautiful sunset over mountains",
223
+ width=1024,
224
+ height=1024
225
+ )
226
+
227
+ with open("generated_image.png", "wb") as f:
228
+ f.write(image_data)
229
+ ```
186
230
 
187
231
  ## Using the CLI
188
232
 
@@ -223,8 +267,35 @@ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --inpu
223
267
  ```
224
268
 
225
269
  #### Run LLM Inference
270
+ We also have explicit support for using LLMs through the completion and chat commands in the CLI.
271
+
272
+ For example, you can run a competion inference with Llama-3 using the following command:
273
+
274
+ ``` bash
275
+ opengradient completion --model "meta-llama/Meta-Llama-3-8B-Instruct" --prompt "hello who are you?" --max-tokens 50
276
+ ```
277
+
278
+ Or you can use files instead of text input in order to simplify your command:
279
+
226
280
  ```bash
227
- opengradient llm --model "meta-llama/Meta-Llama-3-8B-Instruct" --prompt "Translate to French: Hello, how are you?" --max-tokens 50 --temperature 0.7
281
+ opengradient chat --model "mistralai/Mistral-7B-Instruct-v0.3" --messages-file messages.json --tools-file tools.json --max-tokens 200
228
282
  ```
229
283
 
284
+ ### Image Generation
285
+ ```bash
286
+ opengradient generate-image \
287
+ --model "stabilityai/stable-diffusion-xl-base-1.0" \
288
+ --prompt "A beautiful sunset over mountains" \
289
+ --output-path sunset.png \
290
+ --width 1024 \
291
+ --height 1024
292
+ ```
293
+
294
+ Options:
295
+ - `--model`, `-m`: Model identifier for image generation (required)
296
+ - `--prompt`, `-p`: Text prompt for generating the image (required)
297
+ - `--output-path`, `-o`: Output file path for the generated image (required)
298
+ - `--width`: Output image width in pixels (default: 1024)
299
+ - `--height`: Output image height in pixels (default: 1024)
300
+
230
301
  For more information read the OpenGradient [documentation](https://docs.opengradient.ai/).
@@ -51,6 +51,50 @@ og.infer(model_cid, model_inputs, inference_mode)
51
51
  ```
52
52
  - inference mode can be `VANILLA`, `ZKML`, or `TEE`
53
53
 
54
+ ### LLM Inference
55
+ #### LLM Completion
56
+ ```python
57
+ tx_hash, response = og.llm_completion(
58
+ model_cid='meta-llama/Meta-Llama-3-8B-Instruct',
59
+ prompt="Translate the following English text to French: 'Hello, how are you?'",
60
+ max_tokens=50,
61
+ temperature=0.0
62
+ )
63
+ ```
64
+
65
+ #### LLM Chat
66
+ ```python
67
+ # create messages history
68
+ messages = [
69
+ {
70
+ "role": "system",
71
+ "content": "You are a helpful AI assistant.",
72
+ "name": "HAL"
73
+ },
74
+ {
75
+ "role": "user",
76
+ "content": "Hello! How are you doing? Can you repeat my name?",
77
+ }]
78
+
79
+ # run LLM inference
80
+ tx_hash, finish_reason, message = og.llm_chat(
81
+ model_cid=og.LLM.MISTRAL_7B_INSTRUCT_V3,
82
+ messages=messages
83
+ )
84
+ ```
85
+
86
+ ### Image Generation
87
+ ```python
88
+ tx_hash, image_data = og.generate_image(
89
+ model="stabilityai/stable-diffusion-xl-base-1.0",
90
+ prompt="A beautiful sunset over mountains",
91
+ width=1024,
92
+ height=1024
93
+ )
94
+
95
+ with open("generated_image.png", "wb") as f:
96
+ f.write(image_data)
97
+ ```
54
98
 
55
99
  ## Using the CLI
56
100
 
@@ -91,8 +135,35 @@ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --inpu
91
135
  ```
92
136
 
93
137
  #### Run LLM Inference
138
+ We also have explicit support for using LLMs through the completion and chat commands in the CLI.
139
+
140
+ For example, you can run a competion inference with Llama-3 using the following command:
141
+
142
+ ``` bash
143
+ opengradient completion --model "meta-llama/Meta-Llama-3-8B-Instruct" --prompt "hello who are you?" --max-tokens 50
144
+ ```
145
+
146
+ Or you can use files instead of text input in order to simplify your command:
147
+
94
148
  ```bash
95
- opengradient llm --model "meta-llama/Meta-Llama-3-8B-Instruct" --prompt "Translate to French: Hello, how are you?" --max-tokens 50 --temperature 0.7
149
+ opengradient chat --model "mistralai/Mistral-7B-Instruct-v0.3" --messages-file messages.json --tools-file tools.json --max-tokens 200
96
150
  ```
97
151
 
152
+ ### Image Generation
153
+ ```bash
154
+ opengradient generate-image \
155
+ --model "stabilityai/stable-diffusion-xl-base-1.0" \
156
+ --prompt "A beautiful sunset over mountains" \
157
+ --output-path sunset.png \
158
+ --width 1024 \
159
+ --height 1024
160
+ ```
161
+
162
+ Options:
163
+ - `--model`, `-m`: Model identifier for image generation (required)
164
+ - `--prompt`, `-p`: Text prompt for generating the image (required)
165
+ - `--output-path`, `-o`: Output file path for the generated image (required)
166
+ - `--width`: Output image width in pixels (default: 1024)
167
+ - `--height`: Output image height in pixels (default: 1024)
168
+
98
169
  For more information read the OpenGradient [documentation](https://docs.opengradient.ai/).
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "opengradient"
7
- version = "0.3.14"
7
+ version = "0.3.16"
8
8
  description = "Python SDK for OpenGradient decentralized model management & inference services"
9
9
  authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
10
10
  license = {file = "LICENSE"}
@@ -5,7 +5,7 @@ from .defaults import DEFAULT_INFERENCE_CONTRACT_ADDRESS, DEFAULT_RPC_URL
5
5
  from .types import InferenceMode, LLM
6
6
  from . import llm
7
7
 
8
- __version__ = "0.3.14"
8
+ __version__ = "0.3.16"
9
9
 
10
10
  _client = None
11
11
 
File without changes
File without changes