opengradient 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
opengradient/__init__.py CHANGED
@@ -2,13 +2,13 @@ from .client import Client
2
2
  from .defaults import *
3
3
  from .types import InferenceMode
4
4
  from typing import List, Dict
5
- __version__ = "0.3.0"
5
+ __version__ = "0.3.2"
6
6
 
7
7
  _client = None
8
8
 
9
9
  def init(email: str,
10
10
  password: str,
11
- private_key=DEFAULT_PRIVATE_KEY,
11
+ private_key: str,
12
12
  rpc_url=DEFAULT_RPC_URL,
13
13
  contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS):
14
14
  global _client
@@ -42,6 +42,15 @@ def infer(model_cid, inference_mode, model_input):
42
42
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
43
43
  return _client.infer(model_cid, inference_mode, model_input)
44
44
 
45
+ def infer_llm(model_cid: str,
46
+ prompt: str,
47
+ max_tokens: int = 100,
48
+ stop_sequence: Optional[List[str]] = None,
49
+ temperature: float = 0.0) -> Tuple[str, str]:
50
+ if _client is None:
51
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
52
+ return _client.infer_llm(model_cid, prompt, max_tokens, stop_sequence, temperature)
53
+
45
54
  def login(email: str, password: str):
46
55
  if _client is None:
47
56
  raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
opengradient/cli.py CHANGED
@@ -5,6 +5,7 @@ import ast
5
5
  from pathlib import Path
6
6
  import logging
7
7
  from pprint import pformat
8
+ from typing import List
8
9
  import webbrowser
9
10
  import sys
10
11
 
@@ -97,7 +98,13 @@ def initialize_config(ctx):
97
98
  @click.group()
98
99
  @click.pass_context
99
100
  def cli(ctx):
100
- """CLI for OpenGradient SDK. Visit https://docs.opengradient.ai/developers/python_sdk/ for more documentation."""
101
+ """
102
+ CLI for OpenGradient SDK.
103
+
104
+ Run 'opengradient config show' to make sure you have configs set up.
105
+
106
+ Visit https://docs.opengradient.ai/developers/python_sdk/ for more documentation.
107
+ """
101
108
  # Load existing config
102
109
  ctx.obj = load_og_config()
103
110
 
@@ -149,7 +156,7 @@ def show(ctx):
149
156
  click.echo("Current config:")
150
157
  for key, value in ctx.obj.items():
151
158
  if key != 'client': # Don't display the client object
152
- if key == 'password' and value is not None:
159
+ if (key == 'password' or key == 'private_key') and value is not None:
153
160
  click.echo(f"{key}: {'*' * len(value)}") # Mask the password
154
161
  elif value is None:
155
162
  click.echo(f"{key}: Not set")
@@ -294,7 +301,6 @@ def infer(ctx, model_cid: str, inference_mode: str, input_data, input_file: Path
294
301
  with input_file.open('r') as file:
295
302
  model_input = json.load(file)
296
303
 
297
- # Parse input data from string to dict
298
304
  click.echo(f"Running {inference_mode} inference for model \"{model_cid}\"\n")
299
305
  tx_hash, model_output = client.infer(model_cid=model_cid, inference_mode=InferenceModes[inference_mode], model_input=model_input)
300
306
 
@@ -307,6 +313,41 @@ def infer(ctx, model_cid: str, inference_mode: str, input_data, input_file: Path
307
313
  except Exception as e:
308
314
  click.echo(f"Error running inference: {str(e)}")
309
315
 
316
+ @cli.command()
317
+ @click.option('--model', '-m', 'model_cid', required=True, help='CID of the LLM model to run inference on')
318
+ @click.option('--prompt', '-p', required=True, help='Input prompt for the LLM')
319
+ @click.option('--max-tokens', type=int, default=100, help='Maximum number of tokens for LLM output')
320
+ @click.option('--stop-sequence', multiple=True, help='Stop sequences for LLM')
321
+ @click.option('--temperature', type=float, default=0.0, help='Temperature for LLM inference (0.0 to 1.0)')
322
+ @click.pass_context
323
+ def llm(ctx, model_cid: str, prompt: str, max_tokens: int, stop_sequence: List[str], temperature: float):
324
+ """
325
+ Run inference on an LLM model.
326
+
327
+ This command runs inference on the specified LLM model using the provided prompt and parameters.
328
+
329
+ Example usage:
330
+
331
+ \b
332
+ opengradient llm --model Qm... --prompt "Hello, how are you?" --max-tokens 50 --temperature 0.7
333
+ opengradient llm -m Qm... -p "Translate to French: Hello world" --stop-sequence "." --stop-sequence "\n"
334
+ """
335
+ client: Client = ctx.obj['client']
336
+ try:
337
+ click.echo(f"Running LLM inference for model \"{model_cid}\"\n")
338
+ tx_hash, llm_output = client.infer_llm(
339
+ model_cid=model_cid,
340
+ prompt=prompt,
341
+ max_tokens=max_tokens,
342
+ stop_sequence=list(stop_sequence),
343
+ temperature=temperature
344
+ )
345
+
346
+ click.secho("Success!", fg="green")
347
+ click.echo(f"Transaction hash: {tx_hash}")
348
+ click.echo(f"LLM output:\n{llm_output}")
349
+ except Exception as e:
350
+ click.echo(f"Error running LLM inference: {str(e)}")
310
351
 
311
352
  @cli.command()
312
353
  def create_account():
opengradient/client.py CHANGED
@@ -7,7 +7,7 @@ from opengradient.types import InferenceMode
7
7
  from opengradient import utils
8
8
  import numpy as np
9
9
  import logging
10
- from typing import Dict, Tuple, Union, List
10
+ from typing import Dict, Optional, Tuple, Union, List
11
11
  from web3.exceptions import ContractLogicError
12
12
  import firebase
13
13
 
@@ -32,6 +32,8 @@ class Client:
32
32
  email (str, optional): Email for authentication. Defaults to "test@test.com".
33
33
  password (str, optional): Password for authentication. Defaults to "Test-123".
34
34
  """
35
+ self.email = email
36
+ self.password = password
35
37
  self.private_key = private_key
36
38
  self.rpc_url = rpc_url
37
39
  self.contract_address = contract_address
@@ -41,8 +43,6 @@ class Client:
41
43
  self.firebase_app = firebase.initialize_app(self.FIREBASE_CONFIG)
42
44
  self.auth = self.firebase_app.auth()
43
45
  self.user = None
44
- self.email = email
45
- self.password = password
46
46
 
47
47
  logging.debug("Initialized client with parameters:\n"
48
48
  "private key: %s\n"
@@ -58,6 +58,14 @@ class Client:
58
58
  if email is not None:
59
59
  self.login(email, password)
60
60
 
61
+ def login(self, email, password):
62
+ try:
63
+ self.user = self.auth.sign_in_with_email_and_password(email, password)
64
+ return self.user
65
+ except Exception as e:
66
+ logging.error(f"Authentication failed: {str(e)}")
67
+ raise
68
+
61
69
  def _initialize_web3(self):
62
70
  """
63
71
  Initialize the Web3 instance if it is not already initialized.
@@ -402,13 +410,97 @@ class Client:
402
410
  logging.error(f"Error in infer method: {str(e)}", exc_info=True)
403
411
  raise OpenGradientError(f"Inference failed: {str(e)}")
404
412
 
405
- def login(self, email, password):
413
+ def infer_llm(self,
414
+ model_cid: str,
415
+ prompt: str,
416
+ max_tokens: int = 100,
417
+ stop_sequence: Optional[List[str]] = None,
418
+ temperature: float = 0.0) -> Tuple[str, str]:
419
+ """
420
+ Perform inference on an LLM model using completions.
421
+
422
+ Args:
423
+ model_cid (str): The unique content identifier for the model.
424
+ prompt (str): The input prompt for the LLM.
425
+ max_tokens (int): Maximum number of tokens for LLM output. Default is 100.
426
+ stop_sequence (List[str], optional): List of stop sequences for LLM. Default is None.
427
+ temperature (float): Temperature for LLM inference, between 0 and 1. Default is 0.0.
428
+
429
+ Returns:
430
+ Tuple[str, str]: The transaction hash and the LLM output.
431
+
432
+ Raises:
433
+ OpenGradientError: If the inference fails.
434
+ """
406
435
  try:
407
- self.user = self.auth.sign_in_with_email_and_password(email, password)
408
- return self.user
436
+ self._initialize_web3()
437
+
438
+ abi_path = os.path.join(os.path.dirname(__file__), 'abi', 'llm.abi')
439
+ with open(abi_path, 'r') as abi_file:
440
+ llm_abi = json.load(abi_file)
441
+ contract = self._w3.eth.contract(address=self.contract_address, abi=llm_abi)
442
+
443
+ # Prepare LLM input
444
+ llm_request = {
445
+ "mode": InferenceMode.VANILLA,
446
+ "modelCID": model_cid,
447
+ "prompt": prompt,
448
+ "max_tokens": max_tokens,
449
+ "stop_sequence": stop_sequence or [],
450
+ "temperature": int(temperature * 100) # Scale to 0-100 range
451
+ }
452
+ logging.debug(f"Prepared LLM request: {llm_request}")
453
+
454
+ # Prepare run function
455
+ run_function = contract.functions.runLLM(llm_request)
456
+
457
+ # Build transaction
458
+ nonce = self._w3.eth.get_transaction_count(self.wallet_address)
459
+ estimated_gas = run_function.estimate_gas({'from': self.wallet_address})
460
+ gas_limit = int(estimated_gas * 1.2)
461
+
462
+ transaction = run_function.build_transaction({
463
+ 'from': self.wallet_address,
464
+ 'nonce': nonce,
465
+ 'gas': gas_limit,
466
+ 'gasPrice': self._w3.eth.gas_price,
467
+ })
468
+
469
+ # Sign and send transaction
470
+ signed_tx = self._w3.eth.account.sign_transaction(transaction, self.private_key)
471
+ tx_hash = self._w3.eth.send_raw_transaction(signed_tx.raw_transaction)
472
+ logging.debug(f"Transaction sent. Hash: {tx_hash.hex()}")
473
+
474
+ # Wait for transaction receipt
475
+ tx_receipt = self._w3.eth.wait_for_transaction_receipt(tx_hash)
476
+
477
+ if tx_receipt['status'] == 0:
478
+ raise ContractLogicError(f"Transaction failed. Receipt: {tx_receipt}")
479
+
480
+ # Process the LLMResult event
481
+ llm_result = None
482
+ for log in tx_receipt['logs']:
483
+ try:
484
+ decoded_log = contract.events.LLMResult().process_log(log)
485
+ llm_result = decoded_log['args']['response']['answer']
486
+ break
487
+ except:
488
+ continue
489
+
490
+ if llm_result is None:
491
+ raise OpenGradientError("LLMResult event not found in transaction logs")
492
+
493
+ logging.debug(f"LLM output: {llm_result}")
494
+
495
+ return tx_hash.hex(), llm_result
496
+
497
+ except ContractLogicError as e:
498
+ logging.error(f"Contract logic error: {str(e)}", exc_info=True)
499
+ raise OpenGradientError(f"LLM inference failed due to contract logic error: {str(e)}")
409
500
  except Exception as e:
410
- logging.error(f"Authentication failed: {str(e)}")
411
- raise
501
+ logging.error(f"Error in infer_llm method: {str(e)}", exc_info=True)
502
+ raise OpenGradientError(f"LLM inference failed: {str(e)}")
503
+
412
504
 
413
505
  def list_files(self, model_name: str, version: str) -> List[Dict]:
414
506
  """
opengradient/defaults.py CHANGED
@@ -1,7 +1,6 @@
1
1
 
2
2
  # Default variables
3
- DEFAULT_PRIVATE_KEY="cd09980ef6e280afc3900d2d6801f9e9c5d858a5deaeeab74a65643f5ff1a4c1"
4
3
  DEFAULT_RPC_URL="http://18.218.115.248:8545"
5
4
  DEFAULT_OG_FAUCET_URL="http://18.218.115.248:8080/?address="
6
5
  DEFAULT_HUB_SIGNUP_URL="https://hub.opengradient.ai/signup"
7
- DEFAULT_INFERENCE_CONTRACT_ADDRESS="0x75D0266DAb643417e9FFD828A1A31C1E039a966c"
6
+ DEFAULT_INFERENCE_CONTRACT_ADDRESS="0x350E0A430b2B1563481833a99523Cfd17a530e4e"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opengradient
3
- Version: 0.3.0
3
+ Version: 0.3.2
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Author-email: OpenGradient <oliver@opengradient.ai>
6
6
  License: MIT License
@@ -135,21 +135,27 @@ Requires-Dist: websockets==13.1
135
135
  Requires-Dist: xattr==1.1.0
136
136
  Requires-Dist: yarl==1.13.1
137
137
 
138
- # OpenGradient SDK
139
-
140
- Python SDK for OpenGradient decentralized model management & inference services.
138
+ # OpenGradient Python SDK
139
+ Python SDK for the OpenGradient platform provides decentralized model management & inference services. Python SDK allows programmatic access to our model repository and decentralized AI infrastructure.
141
140
 
142
141
  ## Installation
142
+
143
+ To install Python SDK and CLI, run the following command:
143
144
  ```python
144
145
  pip install opengradient
145
146
  ```
146
147
 
147
148
  ## Quick Start
149
+
150
+ To get started, run:
151
+
148
152
  ```python
149
153
  import opengradient as og
150
- og.init(email="<email>", password="<password>")
154
+ og.init(private_key="<private_key>", email="<email>", password="<password>")
151
155
  ```
152
156
 
157
+ The following commands show how to use Python SDK.
158
+
153
159
  ### Create a Model
154
160
  ```python
155
161
  og.create_model(model_name="<model_name>", model_desc="<model_description>")
@@ -180,6 +186,8 @@ og.list_files(model_name="<model_name>", version="<version>")
180
186
  inference_mode = og.InferenceMode.VANILLA
181
187
  og.infer(model_cid, model_inputs, inference_mode)
182
188
  ```
189
+ - inference mode can be `VANILLA`, `ZKML`, or `TEE`
190
+
183
191
 
184
192
  ## Using the CLI
185
193
 
@@ -188,15 +196,15 @@ export OPENGRADIENT_EMAIL="<email>"
188
196
  export OPENGRADIENT_PASSWORD="<password>"
189
197
  ```
190
198
 
191
- #### Creating a Model
199
+ #### Creating a Model Repo
192
200
  ```bash
193
- opengradient create_model "<model_name>" "<description>"
201
+ opengradient create_model_repo "<model_name>" "<description>"
194
202
  ```
195
203
  - creating a model automatically initializes version `v0.01`
196
204
 
197
205
  #### Creating a Version
198
206
  ```bash
199
- opengradient create_model "<model_name>" "<description>"
207
+ opengradient create_model_repo "<model_name>" "<description>"
200
208
  ```
201
209
 
202
210
  #### Upload a File
@@ -218,3 +226,5 @@ opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA '{"num
218
226
  ```bash
219
227
  opengradient infer QmbUqS93oc4JTLMHwpVxsE39mhNxy6hpf6Py3r9oANr8aZ VANILLA --input_file input.json
220
228
  ```
229
+
230
+ For more information read the OpenGradient [documentation](https://docs.opengradient.ai/).
@@ -0,0 +1,16 @@
1
+ opengradient/__init__.py,sha256=f1f7RSil3XAhjeTtcLuKuVyovgw7uIal1Xm3SyFrceA,2430
2
+ opengradient/account.py,sha256=s1C4hAtc8vcHObWjwxwlYJA041S6DTbr7-rK6qiWPsQ,1149
3
+ opengradient/cli.py,sha256=T59Z2S3AsMVS6TLgsSgxW9esssvYP5ZmVJrYE6p4oW4,16105
4
+ opengradient/client.py,sha256=DCDp2EWPF62ZQnx2_cM0wPghRxgn213VnR65R8yZBVY,23964
5
+ opengradient/defaults.py,sha256=pDfsmPoUzdLG55n-hwh0CMBFxKR2rdNcjqCcwTWc6iw,267
6
+ opengradient/exceptions.py,sha256=v4VmUGTvvtjhCZAhR24Ga42z3q-DzR1Y5zSqP_yn2Xk,3366
7
+ opengradient/types.py,sha256=EoJN-DkQrJ2WTUv8OenlrlWJWFY2jPGTl-T8C_OVjp8,1849
8
+ opengradient/utils.py,sha256=F1Nj-GMNFQFxCtbGgWQq1RP4TSurbpQxJV3yKeEo1b0,6482
9
+ opengradient/abi/inference.abi,sha256=u8FsW0s1YeRjUb9eLS1k_qh_5f_cwOdr0bii-tAdxh0,2683
10
+ opengradient/abi/llm.abi,sha256=zhiPFyBT09EI3QU5DVoKHo7e8T9PFcfIQ3RHDYetm4M,3609
11
+ opengradient-0.3.2.dist-info/LICENSE,sha256=xEcvQ3AxZOtDkrqkys2Mm6Y9diEnaSeQRKvxi-JGnNA,1069
12
+ opengradient-0.3.2.dist-info/METADATA,sha256=x0ItAXOKTD_3D6cTscWUBDloysJNgZCCZ2YruhjJwDE,7620
13
+ opengradient-0.3.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
14
+ opengradient-0.3.2.dist-info/entry_points.txt,sha256=yUKTaJx8RXnybkob0J62wVBiCp_1agVbgw9uzsmaeJc,54
15
+ opengradient-0.3.2.dist-info/top_level.txt,sha256=oC1zimVLa2Yi1LQz8c7x-0IQm92milb5ax8gHBHwDqU,13
16
+ opengradient-0.3.2.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- opengradient/__init__.py,sha256=JtRa1lL5qn5slM2egGm76gVuft-qEHc9gtU6tbOPRG8,2028
2
- opengradient/account.py,sha256=s1C4hAtc8vcHObWjwxwlYJA041S6DTbr7-rK6qiWPsQ,1149
3
- opengradient/cli.py,sha256=YKctHMZhT_Y1fANWDnGo68QpIVLXqz5ifH5kQXIxD8A,14412
4
- opengradient/client.py,sha256=__KtU-i6EQqZNj8xX-yKoqb_V8Cdzgr9zHBY0m02Xfw,20141
5
- opengradient/defaults.py,sha256=5JNcTNfOmoP3DymEvBqZPQHUYCIslvxvLRtJLon0MkM,354
6
- opengradient/exceptions.py,sha256=v4VmUGTvvtjhCZAhR24Ga42z3q-DzR1Y5zSqP_yn2Xk,3366
7
- opengradient/types.py,sha256=EoJN-DkQrJ2WTUv8OenlrlWJWFY2jPGTl-T8C_OVjp8,1849
8
- opengradient/utils.py,sha256=F1Nj-GMNFQFxCtbGgWQq1RP4TSurbpQxJV3yKeEo1b0,6482
9
- opengradient/abi/inference.abi,sha256=u8FsW0s1YeRjUb9eLS1k_qh_5f_cwOdr0bii-tAdxh0,2683
10
- opengradient/abi/llm.abi,sha256=zhiPFyBT09EI3QU5DVoKHo7e8T9PFcfIQ3RHDYetm4M,3609
11
- opengradient-0.3.0.dist-info/LICENSE,sha256=xEcvQ3AxZOtDkrqkys2Mm6Y9diEnaSeQRKvxi-JGnNA,1069
12
- opengradient-0.3.0.dist-info/METADATA,sha256=A6qo4CPFnnCw1N_PK5yfxKS9hCxFUbpvKEiBWma8byw,7168
13
- opengradient-0.3.0.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
14
- opengradient-0.3.0.dist-info/entry_points.txt,sha256=yUKTaJx8RXnybkob0J62wVBiCp_1agVbgw9uzsmaeJc,54
15
- opengradient-0.3.0.dist-info/top_level.txt,sha256=oC1zimVLa2Yi1LQz8c7x-0IQm92milb5ax8gHBHwDqU,13
16
- opengradient-0.3.0.dist-info/RECORD,,