opengradient 0.3.17__tar.gz → 0.3.20__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {opengradient-0.3.17 → opengradient-0.3.20}/PKG-INFO +99 -97
  2. opengradient-0.3.20/pyproject.toml +148 -0
  3. opengradient-0.3.20/src/opengradient/__init__.py +267 -0
  4. opengradient-0.3.20/src/opengradient/llm/__init__.py +38 -0
  5. opengradient-0.3.20/src/opengradient/llm/og_openai.py +121 -0
  6. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/types.py +4 -0
  7. opengradient-0.3.17/pyproject.toml +0 -158
  8. opengradient-0.3.17/src/opengradient/__init__.py +0 -127
  9. opengradient-0.3.17/src/opengradient/llm/__init__.py +0 -5
  10. {opengradient-0.3.17 → opengradient-0.3.20}/.gitignore +0 -0
  11. {opengradient-0.3.17 → opengradient-0.3.20}/LICENSE +0 -0
  12. {opengradient-0.3.17 → opengradient-0.3.20}/README.md +0 -0
  13. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/abi/inference.abi +0 -0
  14. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/account.py +0 -0
  15. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/cli.py +0 -0
  16. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/client.py +0 -0
  17. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/defaults.py +0 -0
  18. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/exceptions.py +0 -0
  19. /opengradient-0.3.17/src/opengradient/llm/chat.py → /opengradient-0.3.20/src/opengradient/llm/og_langchain.py +0 -0
  20. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/proto/__init__.py +0 -0
  21. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/proto/infer.proto +0 -0
  22. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/proto/infer_pb2.py +0 -0
  23. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/proto/infer_pb2_grpc.py +0 -0
  24. {opengradient-0.3.17 → opengradient-0.3.20}/src/opengradient/utils.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: opengradient
3
- Version: 0.3.17
3
+ Version: 0.3.20
4
4
  Summary: Python SDK for OpenGradient decentralized model management & inference services
5
5
  Project-URL: Homepage, https://opengradient.ai
6
6
  Author-email: OpenGradient <oliver@opengradient.ai>
@@ -25,6 +25,7 @@ License: MIT License
25
25
  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
26
26
  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27
27
  SOFTWARE.
28
+ License-File: LICENSE
28
29
  Classifier: Development Status :: 3 - Alpha
29
30
  Classifier: Intended Audience :: Developers
30
31
  Classifier: License :: OSI Approved :: MIT License
@@ -32,102 +33,103 @@ Classifier: Programming Language :: Python :: 3.10
32
33
  Classifier: Programming Language :: Python :: 3.11
33
34
  Classifier: Programming Language :: Python :: 3.12
34
35
  Requires-Python: >=3.10
35
- Requires-Dist: aiohappyeyeballs==2.4.3
36
- Requires-Dist: aiohttp==3.10.8
37
- Requires-Dist: aiosignal==1.3.1
38
- Requires-Dist: annotated-types==0.7.0
39
- Requires-Dist: attrs==24.2.0
40
- Requires-Dist: bitarray==2.9.2
41
- Requires-Dist: cachecontrol==0.14.0
42
- Requires-Dist: cachetools==5.5.0
43
- Requires-Dist: certifi==2024.8.30
44
- Requires-Dist: cffi==1.17.1
45
- Requires-Dist: charset-normalizer==3.3.2
46
- Requires-Dist: ckzg==2.0.1
47
- Requires-Dist: cleo==2.1.0
48
- Requires-Dist: click==8.1.7
49
- Requires-Dist: cramjam==2.8.4
50
- Requires-Dist: crashtest==0.4.1
51
- Requires-Dist: cryptography==43.0.1
52
- Requires-Dist: cytoolz==0.12.3
53
- Requires-Dist: distlib==0.3.8
54
- Requires-Dist: dulwich==0.21.7
55
- Requires-Dist: eth-abi==5.1.0
56
- Requires-Dist: eth-account==0.13.4
57
- Requires-Dist: eth-hash==0.7.0
58
- Requires-Dist: eth-keyfile==0.8.1
59
- Requires-Dist: eth-keys==0.5.1
60
- Requires-Dist: eth-rlp==2.1.0
61
- Requires-Dist: eth-typing==5.0.0
62
- Requires-Dist: eth-utils==5.0.0
63
- Requires-Dist: fastjsonschema==2.20.0
64
- Requires-Dist: fastparquet==2024.5.0
65
- Requires-Dist: filelock==3.16.1
66
- Requires-Dist: firebase-rest-api==1.11.0
67
- Requires-Dist: frozenlist==1.4.1
68
- Requires-Dist: fsspec==2024.9.0
69
- Requires-Dist: google-api-core==2.20.0
70
- Requires-Dist: google-auth==2.35.0
71
- Requires-Dist: google-cloud-core==2.4.1
72
- Requires-Dist: google-cloud-firestore==2.19.0
73
- Requires-Dist: google-cloud-storage==2.18.2
74
- Requires-Dist: google-crc32c==1.6.0
75
- Requires-Dist: google-resumable-media==2.7.2
76
- Requires-Dist: googleapis-common-protos==1.65.0
77
- Requires-Dist: grpcio-tools==1.66.2
78
- Requires-Dist: grpcio==1.66.2
79
- Requires-Dist: hexbytes==1.2.1
80
- Requires-Dist: idna==3.10
81
- Requires-Dist: jaraco-classes==3.4.0
82
- Requires-Dist: jwcrypto==1.5.6
83
- Requires-Dist: keyring==24.3.1
84
- Requires-Dist: langchain==0.3.7
85
- Requires-Dist: more-itertools==10.5.0
86
- Requires-Dist: msgpack==1.1.0
87
- Requires-Dist: multidict==6.1.0
88
- Requires-Dist: packaging==24.1
89
- Requires-Dist: pandas==2.2.3
90
- Requires-Dist: parsimonious==0.10.0
91
- Requires-Dist: pathlib==1.0.1
92
- Requires-Dist: pexpect==4.9.0
93
- Requires-Dist: pkce==1.0.3
94
- Requires-Dist: pkginfo==1.11.1
95
- Requires-Dist: platformdirs==4.3.6
96
- Requires-Dist: proto-plus==1.24.0
97
- Requires-Dist: protobuf==5.28.2
36
+ Requires-Dist: aiohappyeyeballs>=2.4.3
37
+ Requires-Dist: aiohttp>=3.10.8
38
+ Requires-Dist: aiosignal>=1.3.1
39
+ Requires-Dist: annotated-types>=0.7.0
40
+ Requires-Dist: attrs>=24.2.0
41
+ Requires-Dist: bitarray>=2.9.2
42
+ Requires-Dist: cachecontrol>=0.14.0
43
+ Requires-Dist: cachetools>=5.5.0
44
+ Requires-Dist: certifi>=2024.8.30
45
+ Requires-Dist: cffi>=1.17.1
46
+ Requires-Dist: charset-normalizer>=3.3.2
47
+ Requires-Dist: ckzg>=2.0.1
48
+ Requires-Dist: cleo>=2.1.0
49
+ Requires-Dist: click>=8.1.7
50
+ Requires-Dist: cramjam>=2.8.4
51
+ Requires-Dist: crashtest>=0.4.1
52
+ Requires-Dist: cryptography>=43.0.1
53
+ Requires-Dist: cytoolz>=0.12.3
54
+ Requires-Dist: distlib>=0.3.8
55
+ Requires-Dist: dulwich>=0.21.7
56
+ Requires-Dist: eth-abi>=5.1.0
57
+ Requires-Dist: eth-account>=0.13.4
58
+ Requires-Dist: eth-hash>=0.7.0
59
+ Requires-Dist: eth-keyfile>=0.8.1
60
+ Requires-Dist: eth-keys>=0.5.1
61
+ Requires-Dist: eth-rlp>=2.1.0
62
+ Requires-Dist: eth-typing>=5.0.0
63
+ Requires-Dist: eth-utils>=5.0.0
64
+ Requires-Dist: fastjsonschema>=2.20.0
65
+ Requires-Dist: fastparquet>=2024.5.0
66
+ Requires-Dist: filelock>=3.16.1
67
+ Requires-Dist: firebase-rest-api>=1.11.0
68
+ Requires-Dist: frozenlist>=1.4.1
69
+ Requires-Dist: fsspec>=2024.9.0
70
+ Requires-Dist: google-api-core>=2.20.0
71
+ Requires-Dist: google-auth>=2.35.0
72
+ Requires-Dist: google-cloud-core>=2.4.1
73
+ Requires-Dist: google-cloud-firestore>=2.19.0
74
+ Requires-Dist: google-cloud-storage>=2.18.2
75
+ Requires-Dist: google-crc32c>=1.6.0
76
+ Requires-Dist: google-resumable-media>=2.7.2
77
+ Requires-Dist: googleapis-common-protos>=1.65.0
78
+ Requires-Dist: grpcio-tools>=1.66.2
79
+ Requires-Dist: grpcio>=1.66.2
80
+ Requires-Dist: hexbytes>=1.2.1
81
+ Requires-Dist: idna>=3.10
82
+ Requires-Dist: jaraco-classes>=3.4.0
83
+ Requires-Dist: jwcrypto>=1.5.6
84
+ Requires-Dist: keyring>=24.3.1
85
+ Requires-Dist: langchain>=0.3.7
86
+ Requires-Dist: more-itertools>=10.5.0
87
+ Requires-Dist: msgpack>=1.1.0
88
+ Requires-Dist: multidict>=6.1.0
89
+ Requires-Dist: openai>=1.58.1
90
+ Requires-Dist: packaging>=24.1
91
+ Requires-Dist: pandas>=2.2.3
92
+ Requires-Dist: parsimonious>=0.10.0
93
+ Requires-Dist: pathlib>=1.0.1
94
+ Requires-Dist: pexpect>=4.9.0
95
+ Requires-Dist: pkce>=1.0.3
96
+ Requires-Dist: pkginfo>=1.11.1
97
+ Requires-Dist: platformdirs>=4.3.6
98
+ Requires-Dist: proto-plus>=1.24.0
98
99
  Requires-Dist: protobuf>=4.24.0
99
- Requires-Dist: ptyprocess==0.7.0
100
- Requires-Dist: pyarrow==17.0.0
101
- Requires-Dist: pyasn1-modules==0.4.1
102
- Requires-Dist: pyasn1==0.6.1
103
- Requires-Dist: pycparser==2.22
104
- Requires-Dist: pycryptodome==3.21.0
105
- Requires-Dist: pydantic-core==2.23.4
106
- Requires-Dist: pydantic==2.9.2
107
- Requires-Dist: pyproject-hooks==1.2.0
108
- Requires-Dist: python-dateutil==2.9.0.post0
109
- Requires-Dist: python-jwt==4.1.0
110
- Requires-Dist: pytz==2024.2
111
- Requires-Dist: pyunormalize==16.0.0
112
- Requires-Dist: rapidfuzz==3.10.0
113
- Requires-Dist: regex==2024.9.11
114
- Requires-Dist: requests-toolbelt==1.0.0
115
- Requires-Dist: requests==2.32.3
116
- Requires-Dist: rlp==4.0.1
117
- Requires-Dist: rsa==4.9
118
- Requires-Dist: shellingham==1.5.4
119
- Requires-Dist: six==1.16.0
120
- Requires-Dist: tomlkit==0.13.2
121
- Requires-Dist: toolz==0.12.1
122
- Requires-Dist: trove-classifiers==2024.9.12
123
- Requires-Dist: types-requests==2.32.0.20240914
124
- Requires-Dist: typing-extensions==4.12.2
125
- Requires-Dist: tzdata==2024.2
126
- Requires-Dist: urllib3==2.2.3
127
- Requires-Dist: web3==7.3.0
128
- Requires-Dist: websockets==13.1
129
- Requires-Dist: xattr==1.1.0
130
- Requires-Dist: yarl==1.13.1
100
+ Requires-Dist: protobuf>=5.28.2
101
+ Requires-Dist: ptyprocess>=0.7.0
102
+ Requires-Dist: pyarrow>=17.0.0
103
+ Requires-Dist: pyasn1-modules>=0.4.1
104
+ Requires-Dist: pyasn1>=0.6.1
105
+ Requires-Dist: pycparser>=2.22
106
+ Requires-Dist: pycryptodome>=3.21.0
107
+ Requires-Dist: pydantic-core>=2.23.4
108
+ Requires-Dist: pydantic>=2.9.2
109
+ Requires-Dist: pyproject-hooks>=1.2.0
110
+ Requires-Dist: python-dateutil>=2.9.0.post0
111
+ Requires-Dist: python-jwt>=4.1.0
112
+ Requires-Dist: pytz>=2024.2
113
+ Requires-Dist: pyunormalize>=16.0.0
114
+ Requires-Dist: rapidfuzz>=3.10.0
115
+ Requires-Dist: regex>=2024.9.11
116
+ Requires-Dist: requests-toolbelt>=1.0.0
117
+ Requires-Dist: requests>=2.32.3
118
+ Requires-Dist: rlp>=4.0.1
119
+ Requires-Dist: rsa>=4.9
120
+ Requires-Dist: shellingham>=1.5.4
121
+ Requires-Dist: six>=1.16.0
122
+ Requires-Dist: tomlkit>=0.13.2
123
+ Requires-Dist: toolz>=0.12.1
124
+ Requires-Dist: trove-classifiers>=2024.9.12
125
+ Requires-Dist: types-requests>=2.32.0.20240914
126
+ Requires-Dist: typing-extensions>=4.12.2
127
+ Requires-Dist: tzdata>=2024.2
128
+ Requires-Dist: urllib3>=2.2.3
129
+ Requires-Dist: web3>=7.3.0
130
+ Requires-Dist: websockets>=13.1
131
+ Requires-Dist: xattr>=1.1.0
132
+ Requires-Dist: yarl>=1.13.1
131
133
  Description-Content-Type: text/markdown
132
134
 
133
135
  # OpenGradient Python SDK
@@ -0,0 +1,148 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+ [project]
5
+ name = "opengradient"
6
+ version = "0.3.20"
7
+ description = "Python SDK for OpenGradient decentralized model management & inference services"
8
+ authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
9
+ license = {file = "LICENSE"}
10
+ readme = "README.md"
11
+ requires-python = ">=3.10"
12
+ classifiers = [
13
+ "Development Status :: 3 - Alpha",
14
+ "Intended Audience :: Developers",
15
+ "License :: OSI Approved :: MIT License",
16
+ "Programming Language :: Python :: 3.10",
17
+ "Programming Language :: Python :: 3.11",
18
+ "Programming Language :: Python :: 3.12",
19
+ ]
20
+ dependencies = [
21
+ "aiohappyeyeballs>=2.4.3",
22
+ "aiohttp>=3.10.8",
23
+ "aiosignal>=1.3.1",
24
+ "annotated-types>=0.7.0",
25
+ "attrs>=24.2.0",
26
+ "bitarray>=2.9.2",
27
+ "CacheControl>=0.14.0",
28
+ "cachetools>=5.5.0",
29
+ "certifi>=2024.8.30",
30
+ "cffi>=1.17.1",
31
+ "charset-normalizer>=3.3.2",
32
+ "ckzg>=2.0.1",
33
+ "cleo>=2.1.0",
34
+ "click>=8.1.7",
35
+ "cramjam>=2.8.4",
36
+ "crashtest>=0.4.1",
37
+ "cryptography>=43.0.1",
38
+ "cytoolz>=0.12.3",
39
+ "distlib>=0.3.8",
40
+ "dulwich>=0.21.7",
41
+ "eth-account>=0.13.4",
42
+ "eth-hash>=0.7.0",
43
+ "eth-keyfile>=0.8.1",
44
+ "eth-keys>=0.5.1",
45
+ "eth-rlp>=2.1.0",
46
+ "eth-typing>=5.0.0",
47
+ "eth-utils>=5.0.0",
48
+ "eth_abi>=5.1.0",
49
+ "fastjsonschema>=2.20.0",
50
+ "fastparquet>=2024.5.0",
51
+ "filelock>=3.16.1",
52
+ "firebase-rest-api>=1.11.0",
53
+ "frozenlist>=1.4.1",
54
+ "fsspec>=2024.9.0",
55
+ "google-api-core>=2.20.0",
56
+ "google-auth>=2.35.0",
57
+ "google-cloud-core>=2.4.1",
58
+ "google-cloud-firestore>=2.19.0",
59
+ "google-cloud-storage>=2.18.2",
60
+ "google-crc32c>=1.6.0",
61
+ "google-resumable-media>=2.7.2",
62
+ "googleapis-common-protos>=1.65.0",
63
+ "grpcio>=1.66.2",
64
+ "grpcio-tools>=1.66.2",
65
+ "protobuf>=4.24.0",
66
+ "hexbytes>=1.2.1",
67
+ "idna>=3.10",
68
+ "jaraco.classes>=3.4.0",
69
+ "jwcrypto>=1.5.6",
70
+ "keyring>=24.3.1",
71
+ "langchain>=0.3.7",
72
+ "more-itertools>=10.5.0",
73
+ "msgpack>=1.1.0",
74
+ "multidict>=6.1.0",
75
+ "openai>=1.58.1",
76
+ "packaging>=24.1",
77
+ "pandas>=2.2.3",
78
+ "parsimonious>=0.10.0",
79
+ "pathlib>=1.0.1",
80
+ "pexpect>=4.9.0",
81
+ "pkce>=1.0.3",
82
+ "pkginfo>=1.11.1",
83
+ "platformdirs>=4.3.6",
84
+ "proto-plus>=1.24.0",
85
+ "protobuf>=5.28.2",
86
+ "ptyprocess>=0.7.0",
87
+ "pyarrow>=17.0.0",
88
+ "pyasn1>=0.6.1",
89
+ "pyasn1_modules>=0.4.1",
90
+ "pycparser>=2.22",
91
+ "pycryptodome>=3.21.0",
92
+ "pydantic>=2.9.2",
93
+ "pydantic_core>=2.23.4",
94
+ "pyproject_hooks>=1.2.0",
95
+ "python-dateutil>=2.9.0.post0",
96
+ "python-jwt>=4.1.0",
97
+ "pytz>=2024.2",
98
+ "pyunormalize>=16.0.0",
99
+ "RapidFuzz>=3.10.0",
100
+ "regex>=2024.9.11",
101
+ "requests>=2.32.3",
102
+ "requests-toolbelt>=1.0.0",
103
+ "rlp>=4.0.1",
104
+ "rsa>=4.9",
105
+ "shellingham>=1.5.4",
106
+ "six>=1.16.0",
107
+ "tomlkit>=0.13.2",
108
+ "toolz>=0.12.1",
109
+ "trove-classifiers>=2024.9.12",
110
+ "types-requests>=2.32.0.20240914",
111
+ "typing_extensions>=4.12.2",
112
+ "tzdata>=2024.2",
113
+ "urllib3>=2.2.3",
114
+ "web3>=7.3.0",
115
+ "websockets>=13.1",
116
+ "xattr>=1.1.0",
117
+ "yarl>=1.13.1",
118
+ ]
119
+ [project.scripts]
120
+ opengradient = "opengradient.cli:cli"
121
+ [project.urls]
122
+ Homepage = "https://opengradient.ai"
123
+ [tool.setuptools]
124
+ package-dir = {"" = "src"}
125
+ include-package-data = true
126
+ [tool.setuptools.packages.find]
127
+ where = ["src"]
128
+ exclude = ["tests*", "stresstest*"]
129
+ [tool.setuptools.package-data]
130
+ "*" = ["*.py", "*.abi"]
131
+ "opengradient" = ["abi/*.abi"]
132
+ [tool.setuptools.exclude-package-data]
133
+ "*" = ["*.ipynb", "*.pyc", "*.pyo", ".gitignore", "requirements.txt", "conftest.py"]
134
+ [tool.ruff]
135
+ line-length = 140
136
+ target-version = "py310" # Specify your Python version
137
+ select = ["E", "F", "I", "N"]
138
+ ignore = []
139
+ [tool.ruff.mccabe]
140
+ max-complexity = 10
141
+ [tool.hatch.build]
142
+ include = [
143
+ "src/opengradient/**/*.py",
144
+ "src/opengradient/proto/*.proto",
145
+ "src/opengradient/abi/*.abi"
146
+ ]
147
+ [tool.hatch.build.targets.wheel]
148
+ packages = ["src/opengradient"]
@@ -0,0 +1,267 @@
1
+ """
2
+ OpenGradient Python SDK for interacting with AI models and infrastructure.
3
+ """
4
+
5
+ from typing import Dict, List, Optional, Tuple
6
+
7
+ from .client import Client
8
+ from .defaults import DEFAULT_INFERENCE_CONTRACT_ADDRESS, DEFAULT_RPC_URL
9
+ from .types import InferenceMode, LlmInferenceMode, LLM, TEE_LLM
10
+ from . import llm
11
+
12
+ __version__ = "0.3.20"
13
+
14
+ _client = None
15
+
16
+ def init(email: str,
17
+ password: str,
18
+ private_key: str,
19
+ rpc_url=DEFAULT_RPC_URL,
20
+ contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS):
21
+ """Initialize the OpenGradient SDK with authentication and network settings.
22
+
23
+ Args:
24
+ email: User's email address for authentication
25
+ password: User's password for authentication
26
+ private_key: Ethereum private key for blockchain transactions
27
+ rpc_url: Optional RPC URL for the blockchain network, defaults to mainnet
28
+ contract_address: Optional inference contract address
29
+ """
30
+ global _client
31
+ _client = Client(private_key=private_key, rpc_url=rpc_url, contract_address=contract_address, email=email, password=password)
32
+
33
+ def upload(model_path, model_name, version):
34
+ """Upload a model file to OpenGradient.
35
+
36
+ Args:
37
+ model_path: Path to the model file on local filesystem
38
+ model_name: Name of the model repository
39
+ version: Version string for this model upload
40
+
41
+ Returns:
42
+ dict: Upload response containing file metadata
43
+
44
+ Raises:
45
+ RuntimeError: If SDK is not initialized
46
+ """
47
+ if _client is None:
48
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
49
+ return _client.upload(model_path, model_name, version)
50
+
51
+ def create_model(model_name: str, model_desc: str, model_path: str = None):
52
+ """Create a new model repository.
53
+
54
+ Args:
55
+ model_name: Name for the new model repository
56
+ model_desc: Description of the model
57
+ model_path: Optional path to model file to upload immediately
58
+
59
+ Returns:
60
+ dict: Creation response with model metadata and optional upload results
61
+
62
+ Raises:
63
+ RuntimeError: If SDK is not initialized
64
+ """
65
+ if _client is None:
66
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
67
+
68
+ result = _client.create_model(model_name, model_desc)
69
+
70
+ if model_path:
71
+ version = "0.01"
72
+ upload_result = _client.upload(model_path, model_name, version)
73
+ result["upload"] = upload_result
74
+
75
+ return result
76
+
77
+ def create_version(model_name, notes=None, is_major=False):
78
+ """Create a new version for an existing model.
79
+
80
+ Args:
81
+ model_name: Name of the model repository
82
+ notes: Optional release notes for this version
83
+ is_major: If True, creates a major version bump instead of minor
84
+
85
+ Returns:
86
+ dict: Version creation response with version metadata
87
+
88
+ Raises:
89
+ RuntimeError: If SDK is not initialized
90
+ """
91
+ if _client is None:
92
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
93
+ return _client.create_version(model_name, notes, is_major)
94
+
95
+ def infer(model_cid, inference_mode, model_input, max_retries: Optional[int] = None):
96
+ """Run inference on a model.
97
+
98
+ Args:
99
+ model_cid: CID of the model to use
100
+ inference_mode: Mode of inference (e.g. VANILLA)
101
+ model_input: Input data for the model
102
+ max_retries: Maximum number of retries for failed transactions
103
+
104
+ Returns:
105
+ Tuple[str, Any]: Transaction hash and model output
106
+
107
+ Raises:
108
+ RuntimeError: If SDK is not initialized
109
+ """
110
+ if _client is None:
111
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
112
+ return _client.infer(model_cid, inference_mode, model_input, max_retries=max_retries)
113
+
114
+ def llm_completion(model_cid: LLM,
115
+ prompt: str,
116
+ inference_mode: str = LlmInferenceMode.VANILLA,
117
+ max_tokens: int = 100,
118
+ stop_sequence: Optional[List[str]] = None,
119
+ temperature: float = 0.0,
120
+ max_retries: Optional[int] = None) -> Tuple[str, str]:
121
+ """Generate text completion using an LLM.
122
+
123
+ Args:
124
+ model_cid: CID of the LLM model to use
125
+ prompt: Text prompt for completion
126
+ inference_mode: Mode of inference, defaults to VANILLA
127
+ max_tokens: Maximum tokens to generate
128
+ stop_sequence: Optional list of sequences where generation should stop
129
+ temperature: Sampling temperature (0.0 = deterministic, 1.0 = creative)
130
+ max_retries: Maximum number of retries for failed transactions
131
+
132
+ Returns:
133
+ Tuple[str, str]: Transaction hash and generated text
134
+
135
+ Raises:
136
+ RuntimeError: If SDK is not initialized
137
+ """
138
+ if _client is None:
139
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
140
+ return _client.llm_completion(model_cid=model_cid,
141
+ inference_mode=inference_mode,
142
+ prompt=prompt,
143
+ max_tokens=max_tokens,
144
+ stop_sequence=stop_sequence,
145
+ temperature=temperature,
146
+ max_retries=max_retries)
147
+
148
+ def llm_chat(model_cid: LLM,
149
+ messages: List[Dict],
150
+ inference_mode: str = LlmInferenceMode.VANILLA,
151
+ max_tokens: int = 100,
152
+ stop_sequence: Optional[List[str]] = None,
153
+ temperature: float = 0.0,
154
+ tools: Optional[List[Dict]] = None,
155
+ tool_choice: Optional[str] = None,
156
+ max_retries: Optional[int] = None) -> Tuple[str, str, Dict]:
157
+ """Have a chat conversation with an LLM.
158
+
159
+ Args:
160
+ model_cid: CID of the LLM model to use
161
+ messages: List of chat messages, each with 'role' and 'content'
162
+ inference_mode: Mode of inference, defaults to VANILLA
163
+ max_tokens: Maximum tokens to generate
164
+ stop_sequence: Optional list of sequences where generation should stop
165
+ temperature: Sampling temperature (0.0 = deterministic, 1.0 = creative)
166
+ tools: Optional list of tools the model can use
167
+ tool_choice: Optional specific tool to use
168
+ max_retries: Maximum number of retries for failed transactions
169
+
170
+ Returns:
171
+ Tuple[str, str, Dict]: Transaction hash, model response, and metadata
172
+
173
+ Raises:
174
+ RuntimeError: If SDK is not initialized
175
+ """
176
+ if _client is None:
177
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
178
+ return _client.llm_chat(model_cid=model_cid,
179
+ inference_mode=inference_mode,
180
+ messages=messages,
181
+ max_tokens=max_tokens,
182
+ stop_sequence=stop_sequence,
183
+ temperature=temperature,
184
+ tools=tools,
185
+ tool_choice=tool_choice,
186
+ max_retries=max_retries)
187
+
188
+ def login(email: str, password: str):
189
+ """Login to OpenGradient.
190
+
191
+ Args:
192
+ email: User's email address
193
+ password: User's password
194
+
195
+ Returns:
196
+ dict: Login response with authentication tokens
197
+
198
+ Raises:
199
+ RuntimeError: If SDK is not initialized
200
+ """
201
+ if _client is None:
202
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
203
+ return _client.login(email, password)
204
+
205
+ def list_files(model_name: str, version: str) -> List[Dict]:
206
+ """List files in a model repository version.
207
+
208
+ Args:
209
+ model_name: Name of the model repository
210
+ version: Version string to list files from
211
+
212
+ Returns:
213
+ List[Dict]: List of file metadata dictionaries
214
+
215
+ Raises:
216
+ RuntimeError: If SDK is not initialized
217
+ """
218
+ if _client is None:
219
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
220
+ return _client.list_files(model_name, version)
221
+
222
+ def generate_image(model: str, prompt: str, height: Optional[int] = None, width: Optional[int] = None) -> bytes:
223
+ """Generate an image from a text prompt.
224
+
225
+ Args:
226
+ model: Model identifier (e.g. "stabilityai/stable-diffusion-xl-base-1.0")
227
+ prompt: Text description of the desired image
228
+ height: Optional height of the generated image in pixels
229
+ width: Optional width of the generated image in pixels
230
+
231
+ Returns:
232
+ bytes: Raw image data as bytes
233
+
234
+ Raises:
235
+ RuntimeError: If SDK is not initialized
236
+ OpenGradientError: If image generation fails
237
+ """
238
+ if _client is None:
239
+ raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
240
+ return _client.generate_image(model, prompt, height=height, width=width)
241
+
242
+ __all__ = [
243
+ 'generate_image',
244
+ 'list_files'
245
+ 'login',
246
+ 'llm_chat',
247
+ 'llm_completion',
248
+ 'infer',
249
+ 'create_version',
250
+ 'create_model',
251
+ 'upload',
252
+ 'init',
253
+ 'LLM',
254
+ 'TEE_LLM'
255
+ ]
256
+
257
+ __pdoc__ = {
258
+ 'account': False,
259
+ 'cli': False,
260
+ 'client': False,
261
+ 'defaults': False,
262
+ 'exceptions': False,
263
+ 'llm': True,
264
+ 'proto': False,
265
+ 'types': False,
266
+ 'utils': False
267
+ }
@@ -0,0 +1,38 @@
1
+ """
2
+ OpenGradient LLM Adapters
3
+
4
+ This module provides adapter interfaces to use OpenGradient LLMs with popular AI frameworks
5
+ like LangChain and OpenAI. These adapters allow seamless integration of OpenGradient models
6
+ into existing applications and agent frameworks.
7
+ """
8
+
9
+ from .og_langchain import *
10
+ from .og_openai import *
11
+
12
+ def langchain_adapter(private_key: str, model_cid: str, max_tokens: int = 300) -> OpenGradientChatModel:
13
+ """
14
+ Returns an OpenGradient LLM that implements LangChain's LLM interface
15
+ and can be plugged into LangChain agents.
16
+ """
17
+ return OpenGradientChatModel(
18
+ private_key=private_key,
19
+ model_cid=model_cid,
20
+ max_tokens=max_tokens)
21
+
22
+ def openai_adapter(private_key: str) -> OpenGradientOpenAIClient:
23
+ """
24
+ Returns an generic OpenAI LLM client that can be plugged into Swarm and can
25
+ be used with any LLM model on OpenGradient. The LLM is usually defined in the
26
+ agent.
27
+ """
28
+ return OpenGradientOpenAIClient(private_key=private_key)
29
+
30
+ __all__ = [
31
+ 'langchain_adapter',
32
+ 'openai_adapter',
33
+ ]
34
+
35
+ __pdoc__ = {
36
+ 'og_langchain': False,
37
+ 'og_openai': False
38
+ }
@@ -0,0 +1,121 @@
1
+ from openai.types.chat import ChatCompletion
2
+ import opengradient as og
3
+ from opengradient.defaults import DEFAULT_RPC_URL, DEFAULT_INFERENCE_CONTRACT_ADDRESS
4
+
5
+ from typing import List
6
+ import time
7
+ import json
8
+ import uuid
9
+
10
+ class OGCompletions(object):
11
+ client: og.Client
12
+
13
+ def __init__(self, client: og.Client):
14
+ self.client = client
15
+
16
+ def create(
17
+ self,
18
+ model: str,
19
+ messages: List[object],
20
+ tools: List[object],
21
+ tool_choice: str,
22
+ stream: bool = False,
23
+ parallel_tool_calls: bool = False) -> ChatCompletion:
24
+
25
+ # convert OpenAI message format so it's compatible with the SDK
26
+ sdk_messages = OGCompletions.convert_to_abi_compatible(messages)
27
+
28
+ _, finish_reason, chat_completion = self.client.llm_chat(
29
+ model_cid=model,
30
+ messages=sdk_messages,
31
+ max_tokens=200,
32
+ tools=tools,
33
+ tool_choice=tool_choice,
34
+ temperature=0.25,
35
+ inference_mode=og.LlmInferenceMode.VANILLA
36
+ )
37
+
38
+ choice = {
39
+ 'index': 0, # Add missing index field
40
+ 'finish_reason': finish_reason,
41
+ 'message': {
42
+ 'role': chat_completion['role'],
43
+ 'content': chat_completion['content'],
44
+ 'tool_calls': [
45
+ {
46
+ 'id': tool_call['id'],
47
+ 'type': 'function', # Add missing type field
48
+ 'function': { # Add missing function field
49
+ 'name': tool_call['name'],
50
+ 'arguments': tool_call['arguments']
51
+ }
52
+ }
53
+ for tool_call in chat_completion.get('tool_calls', [])
54
+ ]
55
+ }
56
+ }
57
+
58
+ return ChatCompletion(
59
+ id=str(uuid.uuid4()),
60
+ created=int(time.time()),
61
+ model=model,
62
+ object='chat.completion',
63
+ choices=[choice]
64
+ )
65
+
66
+ @staticmethod
67
+ @staticmethod
68
+ def convert_to_abi_compatible(messages):
69
+ sdk_messages = []
70
+
71
+ for message in messages:
72
+ role = message['role']
73
+ sdk_message = {
74
+ 'role': role
75
+ }
76
+
77
+ if role == 'system':
78
+ sdk_message['content'] = message['content']
79
+ elif role == 'user':
80
+ sdk_message['content'] = message['content']
81
+ elif role == 'tool':
82
+ sdk_message['content'] = message['content']
83
+ sdk_message['tool_call_id'] = message['tool_call_id']
84
+ elif role == 'assistant':
85
+ flattened_calls = []
86
+ for tool_call in message['tool_calls']:
87
+ # OpenAI format
88
+ flattened_call = {
89
+ 'id': tool_call['id'],
90
+ 'name': tool_call['function']['name'],
91
+ 'arguments': tool_call['function']['arguments']
92
+ }
93
+ flattened_calls.append(flattened_call)
94
+
95
+ sdk_message['tool_calls'] = flattened_calls
96
+ sdk_message['content'] = message['content']
97
+
98
+ sdk_messages.append(sdk_message)
99
+
100
+ return sdk_messages
101
+
102
+ class OGChat(object):
103
+ completions: OGCompletions
104
+
105
+ def __init__(self, client: og.Client):
106
+ self.completions = OGCompletions(client)
107
+
108
+ class OpenGradientOpenAIClient(object):
109
+ """OpenAI client implementation"""
110
+ client: og.Client
111
+ chat: OGChat
112
+
113
+ def __init__(self, private_key: str):
114
+ self.client = og.Client(
115
+ private_key=private_key,
116
+ rpc_url=DEFAULT_RPC_URL,
117
+ contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS,
118
+ email=None,
119
+ password=None
120
+ )
121
+ self.chat = OGChat(self.client)
@@ -79,6 +79,8 @@ class Abi:
79
79
  return result
80
80
 
81
81
  class LLM(str, Enum):
82
+ """Enum for available LLM models"""
83
+
82
84
  META_LLAMA_3_8B_INSTRUCT = "meta-llama/Meta-Llama-3-8B-Instruct"
83
85
  LLAMA_3_2_3B_INSTRUCT = "meta-llama/Llama-3.2-3B-Instruct"
84
86
  MISTRAL_7B_INSTRUCT_V3 = "mistralai/Mistral-7B-Instruct-v0.3"
@@ -86,4 +88,6 @@ class LLM(str, Enum):
86
88
  META_LLAMA_3_1_70B_INSTRUCT = "meta-llama/Llama-3.1-70B-Instruct"
87
89
 
88
90
  class TEE_LLM(str, Enum):
91
+ """Enum for LLM models available for TEE execution"""
92
+
89
93
  META_LLAMA_3_1_70B_INSTRUCT = "meta-llama/Llama-3.1-70B-Instruct"
@@ -1,158 +0,0 @@
1
- [build-system]
2
- requires = ["hatchling"]
3
- build-backend = "hatchling.build"
4
-
5
- [project]
6
- name = "opengradient"
7
- version = "0.3.17"
8
- description = "Python SDK for OpenGradient decentralized model management & inference services"
9
- authors = [{name = "OpenGradient", email = "oliver@opengradient.ai"}]
10
- license = {file = "LICENSE"}
11
- readme = "README.md"
12
- requires-python = ">=3.10"
13
- classifiers = [
14
- "Development Status :: 3 - Alpha",
15
- "Intended Audience :: Developers",
16
- "License :: OSI Approved :: MIT License",
17
- "Programming Language :: Python :: 3.10",
18
- "Programming Language :: Python :: 3.11",
19
- "Programming Language :: Python :: 3.12",
20
- ]
21
- dependencies = [
22
- "aiohappyeyeballs==2.4.3",
23
- "aiohttp==3.10.8",
24
- "aiosignal==1.3.1",
25
- "annotated-types==0.7.0",
26
- "attrs==24.2.0",
27
- "bitarray==2.9.2",
28
- "CacheControl==0.14.0",
29
- "cachetools==5.5.0",
30
- "certifi==2024.8.30",
31
- "cffi==1.17.1",
32
- "charset-normalizer==3.3.2",
33
- "ckzg==2.0.1",
34
- "cleo==2.1.0",
35
- "click==8.1.7",
36
- "cramjam==2.8.4",
37
- "crashtest==0.4.1",
38
- "cryptography==43.0.1",
39
- "cytoolz==0.12.3",
40
- "distlib==0.3.8",
41
- "dulwich==0.21.7",
42
- "eth-account==0.13.4",
43
- "eth-hash==0.7.0",
44
- "eth-keyfile==0.8.1",
45
- "eth-keys==0.5.1",
46
- "eth-rlp==2.1.0",
47
- "eth-typing==5.0.0",
48
- "eth-utils==5.0.0",
49
- "eth_abi==5.1.0",
50
- "fastjsonschema==2.20.0",
51
- "fastparquet==2024.5.0",
52
- "filelock==3.16.1",
53
- "firebase-rest-api==1.11.0",
54
- "frozenlist==1.4.1",
55
- "fsspec==2024.9.0",
56
- "google-api-core==2.20.0",
57
- "google-auth==2.35.0",
58
- "google-cloud-core==2.4.1",
59
- "google-cloud-firestore==2.19.0",
60
- "google-cloud-storage==2.18.2",
61
- "google-crc32c==1.6.0",
62
- "google-resumable-media==2.7.2",
63
- "googleapis-common-protos==1.65.0",
64
- "grpcio==1.66.2",
65
- "grpcio-tools==1.66.2",
66
- "protobuf>=4.24.0",
67
- "hexbytes==1.2.1",
68
- "idna==3.10",
69
- "jaraco.classes==3.4.0",
70
- "jwcrypto==1.5.6",
71
- "keyring==24.3.1",
72
- "langchain==0.3.7",
73
- "more-itertools==10.5.0",
74
- "msgpack==1.1.0",
75
- "multidict==6.1.0",
76
- "packaging==24.1",
77
- "pandas==2.2.3",
78
- "parsimonious==0.10.0",
79
- "pathlib==1.0.1",
80
- "pexpect==4.9.0",
81
- "pkce==1.0.3",
82
- "pkginfo==1.11.1",
83
- "platformdirs==4.3.6",
84
- "proto-plus==1.24.0",
85
- "protobuf==5.28.2",
86
- "ptyprocess==0.7.0",
87
- "pyarrow==17.0.0",
88
- "pyasn1==0.6.1",
89
- "pyasn1_modules==0.4.1",
90
- "pycparser==2.22",
91
- "pycryptodome==3.21.0",
92
- "pydantic==2.9.2",
93
- "pydantic_core==2.23.4",
94
- "pyproject_hooks==1.2.0",
95
- "python-dateutil==2.9.0.post0",
96
- "python-jwt==4.1.0",
97
- "pytz==2024.2",
98
- "pyunormalize==16.0.0",
99
- "RapidFuzz==3.10.0",
100
- "regex==2024.9.11",
101
- "requests==2.32.3",
102
- "requests-toolbelt==1.0.0",
103
- "rlp==4.0.1",
104
- "rsa==4.9",
105
- "shellingham==1.5.4",
106
- "six==1.16.0",
107
- "tomlkit==0.13.2",
108
- "toolz==0.12.1",
109
- "trove-classifiers==2024.9.12",
110
- "types-requests==2.32.0.20240914",
111
- "typing_extensions==4.12.2",
112
- "tzdata==2024.2",
113
- "urllib3==2.2.3",
114
- "web3==7.3.0",
115
- "websockets==13.1",
116
- "xattr==1.1.0",
117
- "yarl==1.13.1",
118
- ]
119
-
120
- [project.scripts]
121
- opengradient = "opengradient.cli:cli"
122
-
123
- [project.urls]
124
- Homepage = "https://opengradient.ai"
125
-
126
- [tool.setuptools]
127
- package-dir = {"" = "src"}
128
- include-package-data = true
129
-
130
- [tool.setuptools.packages.find]
131
- where = ["src"]
132
- exclude = ["tests*", "stresstest*"]
133
-
134
- [tool.setuptools.package-data]
135
- "*" = ["*.py", "*.abi"]
136
- "opengradient" = ["abi/*.abi"]
137
-
138
- [tool.setuptools.exclude-package-data]
139
- "*" = ["*.ipynb", "*.pyc", "*.pyo", ".gitignore", "requirements.txt", "conftest.py"]
140
-
141
- [tool.ruff]
142
- line-length = 140
143
- target-version = "py310" # Specify your Python version
144
- select = ["E", "F", "I", "N"]
145
- ignore = []
146
-
147
- [tool.ruff.mccabe]
148
- max-complexity = 10
149
-
150
- [tool.hatch.build]
151
- include = [
152
- "src/opengradient/**/*.py",
153
- "src/opengradient/proto/*.proto",
154
- "src/opengradient/abi/*.abi"
155
- ]
156
-
157
- [tool.hatch.build.targets.wheel]
158
- packages = ["src/opengradient"]
@@ -1,127 +0,0 @@
1
- from typing import Dict, List, Optional, Tuple
2
-
3
- from .client import Client
4
- from .defaults import DEFAULT_INFERENCE_CONTRACT_ADDRESS, DEFAULT_RPC_URL
5
- from .types import InferenceMode, LlmInferenceMode, LLM, TEE_LLM
6
- from . import llm
7
-
8
- __version__ = "0.3.17"
9
-
10
- _client = None
11
-
12
- def init(email: str,
13
- password: str,
14
- private_key: str,
15
- rpc_url=DEFAULT_RPC_URL,
16
- contract_address=DEFAULT_INFERENCE_CONTRACT_ADDRESS):
17
- global _client
18
- _client = Client(private_key=private_key, rpc_url=rpc_url, contract_address=contract_address, email=email, password=password)
19
-
20
- def upload(model_path, model_name, version):
21
- if _client is None:
22
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
23
- return _client.upload(model_path, model_name, version)
24
-
25
- def create_model(model_name: str, model_desc: str, model_path: str = None):
26
- if _client is None:
27
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
28
-
29
- result = _client.create_model(model_name, model_desc)
30
-
31
- if model_path:
32
- version = "0.01"
33
- upload_result = _client.upload(model_path, model_name, version)
34
- result["upload"] = upload_result
35
-
36
- return result
37
-
38
- def create_version(model_name, notes=None, is_major=False):
39
- if _client is None:
40
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
41
- return _client.create_version(model_name, notes, is_major)
42
-
43
- def infer(model_cid, inference_mode, model_input, max_retries: Optional[int] = None):
44
- """
45
- Perform inference on a model.
46
-
47
- Args:
48
- model_cid: Model CID to use for inference
49
- inference_mode: Mode of inference (e.g. VANILLA)
50
- model_input: Input data for the model
51
- max_retries: Optional maximum number of retry attempts for transaction errors
52
-
53
- Returns:
54
- Tuple of (transaction hash, model output)
55
- """
56
- if _client is None:
57
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
58
- return _client.infer(model_cid, inference_mode, model_input, max_retries=max_retries)
59
-
60
- def llm_completion(model_cid: LLM,
61
- prompt: str,
62
- inference_mode: str = LlmInferenceMode.VANILLA,
63
- max_tokens: int = 100,
64
- stop_sequence: Optional[List[str]] = None,
65
- temperature: float = 0.0,
66
- max_retries: Optional[int] = None) -> Tuple[str, str]:
67
- if _client is None:
68
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
69
- return _client.llm_completion(model_cid=model_cid,
70
- inference_mode=inference_mode,
71
- prompt=prompt,
72
- max_tokens=max_tokens,
73
- stop_sequence=stop_sequence,
74
- temperature=temperature,
75
- max_retries=max_retries)
76
-
77
- def llm_chat(model_cid: LLM,
78
- messages: List[Dict],
79
- inference_mode: str = LlmInferenceMode.VANILLA,
80
- max_tokens: int = 100,
81
- stop_sequence: Optional[List[str]] = None,
82
- temperature: float = 0.0,
83
- tools: Optional[List[Dict]] = None,
84
- tool_choice: Optional[str] = None,
85
- max_retries: Optional[int] = None) -> Tuple[str, str, Dict]:
86
- if _client is None:
87
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
88
- return _client.llm_chat(model_cid=model_cid,
89
- inference_mode=inference_mode,
90
- messages=messages,
91
- max_tokens=max_tokens,
92
- stop_sequence=stop_sequence,
93
- temperature=temperature,
94
- tools=tools,
95
- tool_choice=tool_choice,
96
- max_retries=max_retries)
97
-
98
- def login(email: str, password: str):
99
- if _client is None:
100
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
101
- return _client.login(email, password)
102
-
103
- def list_files(model_name: str, version: str) -> List[Dict]:
104
- if _client is None:
105
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
106
- return _client.list_files(model_name, version)
107
-
108
- def generate_image(model: str, prompt: str, height: Optional[int] = None, width: Optional[int] = None) -> bytes:
109
- """
110
- Generate an image using the specified model and prompt.
111
-
112
- Args:
113
- model (str): The model identifier (e.g. "stabilityai/stable-diffusion-xl-base-1.0")
114
- prompt (str): The text prompt to generate the image from
115
- height (Optional[int]): Height of the generated image. Default is None.
116
- width (Optional[int]): Width of the generated image. Default is None.
117
-
118
- Returns:
119
- bytes: The raw image data bytes
120
-
121
- Raises:
122
- RuntimeError: If the client is not initialized
123
- OpenGradientError: If the image generation fails
124
- """
125
- if _client is None:
126
- raise RuntimeError("OpenGradient client not initialized. Call og.init() first.")
127
- return _client.generate_image(model, prompt, height=height, width=width)
@@ -1,5 +0,0 @@
1
- from .chat import *
2
-
3
- __all__ = [
4
- 'OpenGradientChatModel'
5
- ]
File without changes
File without changes
File without changes