gravixlayer 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gravixlayer-0.0.1/.github/workflows/pypi-release.yml +40 -0
- gravixlayer-0.0.1/.gitignore +99 -0
- gravixlayer-0.0.1/CHANGELOG.md +0 -0
- gravixlayer-0.0.1/MANIFEST.in +4 -0
- gravixlayer-0.0.1/PKG-INFO +124 -0
- gravixlayer-0.0.1/README.md +94 -0
- gravixlayer-0.0.1/gravixlayer/__init__.py +22 -0
- gravixlayer-0.0.1/gravixlayer/cli.py +46 -0
- gravixlayer-0.0.1/gravixlayer/client.py +107 -0
- gravixlayer-0.0.1/gravixlayer/resources/__init__.py +1 -0
- gravixlayer-0.0.1/gravixlayer/resources/chat/__init__.py +3 -0
- gravixlayer-0.0.1/gravixlayer/resources/chat/completions.py +99 -0
- gravixlayer-0.0.1/gravixlayer/types/__init__.py +13 -0
- gravixlayer-0.0.1/gravixlayer/types/async_client.py +67 -0
- gravixlayer-0.0.1/gravixlayer/types/chat.py +30 -0
- gravixlayer-0.0.1/gravixlayer/types/exceptions.py +18 -0
- gravixlayer-0.0.1/gravixlayer.egg-info/PKG-INFO +124 -0
- gravixlayer-0.0.1/gravixlayer.egg-info/SOURCES.txt +33 -0
- gravixlayer-0.0.1/gravixlayer.egg-info/dependency_links.txt +1 -0
- gravixlayer-0.0.1/gravixlayer.egg-info/entry_points.txt +2 -0
- gravixlayer-0.0.1/gravixlayer.egg-info/requires.txt +2 -0
- gravixlayer-0.0.1/gravixlayer.egg-info/top_level.txt +1 -0
- gravixlayer-0.0.1/pyproject.toml +39 -0
- gravixlayer-0.0.1/requirements.txt +2 -0
- gravixlayer-0.0.1/scripts/bump_version.py +0 -0
- gravixlayer-0.0.1/scripts/release.sh +0 -0
- gravixlayer-0.0.1/setup.cfg +4 -0
- gravixlayer-0.0.1/setup.py +52 -0
- gravixlayer-0.0.1/tempCodeRunnerFile.py +1 -0
- gravixlayer-0.0.1/test.py +25 -0
- gravixlayer-0.0.1/test2.py +12 -0
- gravixlayer-0.0.1/tests/test_chat_completions.py +54 -0
- gravixlayer-0.0.1/tests/test_client.py +18 -0
- gravixlayer-0.0.1/tests/test_exceptions.py +24 -0
- gravixlayer-0.0.1/version.py +18 -0
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
name: Publish and Release to PyPI
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
tags:
|
|
6
|
+
- "v*.*.*" # Triggers workflow on tags like v0.0.1
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
build-and-publish:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
|
|
12
|
+
steps:
|
|
13
|
+
- uses: actions/checkout@v4
|
|
14
|
+
|
|
15
|
+
- name: Set up Python
|
|
16
|
+
uses: actions/setup-python@v5
|
|
17
|
+
with:
|
|
18
|
+
python-version: "3.10"
|
|
19
|
+
|
|
20
|
+
- name: Install build tools
|
|
21
|
+
run: |
|
|
22
|
+
pip install --upgrade pip
|
|
23
|
+
pip install build
|
|
24
|
+
|
|
25
|
+
- name: Build package
|
|
26
|
+
run: python -m build
|
|
27
|
+
|
|
28
|
+
- name: Publish to PyPI
|
|
29
|
+
uses: pypa/gh-action-pypi-publish@release/v1
|
|
30
|
+
with:
|
|
31
|
+
password: ${{ secrets.PYPI_TOKEN }}
|
|
32
|
+
|
|
33
|
+
release:
|
|
34
|
+
needs: build-and-publish
|
|
35
|
+
runs-on: ubuntu-latest
|
|
36
|
+
steps:
|
|
37
|
+
- name: Create GitHub Release
|
|
38
|
+
uses: softprops/action-gh-release@v2
|
|
39
|
+
with:
|
|
40
|
+
generate_release_notes: true
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# Distribution / packaging
|
|
7
|
+
.Python
|
|
8
|
+
build/
|
|
9
|
+
develop-eggs/
|
|
10
|
+
dist/
|
|
11
|
+
downloads/
|
|
12
|
+
eggs/
|
|
13
|
+
.eggs/
|
|
14
|
+
lib/
|
|
15
|
+
lib64/
|
|
16
|
+
parts/
|
|
17
|
+
sdist/
|
|
18
|
+
var/
|
|
19
|
+
wheels/
|
|
20
|
+
*.egg-info/
|
|
21
|
+
.installed.cfg
|
|
22
|
+
*.egg
|
|
23
|
+
MANIFEST
|
|
24
|
+
|
|
25
|
+
# PyInstaller
|
|
26
|
+
*.manifest
|
|
27
|
+
*.spec
|
|
28
|
+
|
|
29
|
+
# Installer logs
|
|
30
|
+
pip-log.txt
|
|
31
|
+
pip-delete-this-directory.txt
|
|
32
|
+
|
|
33
|
+
# Unit test / coverage reports
|
|
34
|
+
htmlcov/
|
|
35
|
+
.tox/
|
|
36
|
+
.coverage
|
|
37
|
+
.coverage.*
|
|
38
|
+
.cache
|
|
39
|
+
nosetests.xml
|
|
40
|
+
coverage.xml
|
|
41
|
+
*.cover
|
|
42
|
+
.hypothesis/
|
|
43
|
+
.pytest_cache/
|
|
44
|
+
|
|
45
|
+
# Translations
|
|
46
|
+
*.mo
|
|
47
|
+
*.pot
|
|
48
|
+
|
|
49
|
+
# Django stuff:
|
|
50
|
+
*.log
|
|
51
|
+
local_settings.py
|
|
52
|
+
db.sqlite3
|
|
53
|
+
|
|
54
|
+
# Flask stuff:
|
|
55
|
+
instance/
|
|
56
|
+
.webassets-cache
|
|
57
|
+
|
|
58
|
+
# Scrapy stuff:
|
|
59
|
+
.scrapy
|
|
60
|
+
|
|
61
|
+
# Sphinx documentation
|
|
62
|
+
docs/_build/
|
|
63
|
+
|
|
64
|
+
# PyBuilder
|
|
65
|
+
target/
|
|
66
|
+
|
|
67
|
+
# Jupyter Notebook
|
|
68
|
+
.ipynb_checkpoints
|
|
69
|
+
|
|
70
|
+
# pyenv
|
|
71
|
+
.python-version
|
|
72
|
+
|
|
73
|
+
# celery beat schedule file
|
|
74
|
+
celerybeat-schedule
|
|
75
|
+
|
|
76
|
+
# SageMath parsed files
|
|
77
|
+
*.sage.py
|
|
78
|
+
|
|
79
|
+
# Environments
|
|
80
|
+
.env
|
|
81
|
+
.venv
|
|
82
|
+
env/
|
|
83
|
+
venv/
|
|
84
|
+
ENV/
|
|
85
|
+
env.bak/
|
|
86
|
+
venv.bak/
|
|
87
|
+
|
|
88
|
+
# Spyder project settings
|
|
89
|
+
.spyderproject
|
|
90
|
+
.spyproject
|
|
91
|
+
|
|
92
|
+
# Rope project settings
|
|
93
|
+
.ropeproject
|
|
94
|
+
|
|
95
|
+
# mkdocs documentation
|
|
96
|
+
/site
|
|
97
|
+
|
|
98
|
+
# mypy
|
|
99
|
+
.mypy_cache/
|
|
File without changes
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: gravixlayer
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: GravixLayer Python SDK - OpenAI Compatible
|
|
5
|
+
Home-page: https://github.com/sukrithpvs/gravixlayer-python
|
|
6
|
+
Author: Sukrith
|
|
7
|
+
Author-email: Sukrith <sukrithpvs@gmail.com>
|
|
8
|
+
License: MIT
|
|
9
|
+
Project-URL: Homepage, https://github.com/sukrithpvs/gravixlayer-python
|
|
10
|
+
Project-URL: Repository, https://github.com/sukrithpvs/gravixlayer-python
|
|
11
|
+
Project-URL: Issues, https://github.com/sukrithpvs/gravixlayer-python/issues
|
|
12
|
+
Keywords: gravixlayer,openai,llm,ai,api,sdk
|
|
13
|
+
Classifier: Development Status :: 4 - Beta
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Operating System :: OS Independent
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.7
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
23
|
+
Requires-Python: >=3.7
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
Requires-Dist: requests>=2.25.0
|
|
26
|
+
Requires-Dist: python-dotenv>=0.19.0
|
|
27
|
+
Dynamic: author
|
|
28
|
+
Dynamic: home-page
|
|
29
|
+
Dynamic: requires-python
|
|
30
|
+
|
|
31
|
+
text
|
|
32
|
+
# GravixLayer Python SDK
|
|
33
|
+
|
|
34
|
+
A Python SDK for GravixLayer API that's fully compatible with OpenAI's interface.
|
|
35
|
+
|
|
36
|
+
## Installation
|
|
37
|
+
|
|
38
|
+
pip install gravixlayer
|
|
39
|
+
|
|
40
|
+
text
|
|
41
|
+
|
|
42
|
+
## Quick Start
|
|
43
|
+
|
|
44
|
+
import os
|
|
45
|
+
from gravixlayer import GravixLayer
|
|
46
|
+
Initialize client
|
|
47
|
+
|
|
48
|
+
client = GravixLayer(
|
|
49
|
+
api_key=os.environ.get("GRAVIXLAYER_API_KEY"),
|
|
50
|
+
)
|
|
51
|
+
Create completion
|
|
52
|
+
|
|
53
|
+
completion = client.chat.completions.create(
|
|
54
|
+
model="llama3.1:8b",
|
|
55
|
+
messages=[
|
|
56
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
|
57
|
+
{"role": "user", "content": "What are the three most popular programming languages?"}
|
|
58
|
+
]
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
print(completion.choices.message.content)
|
|
62
|
+
|
|
63
|
+
text
|
|
64
|
+
|
|
65
|
+
## OpenAI Compatibility
|
|
66
|
+
|
|
67
|
+
You can use this SDK as a drop-in replacement for OpenAI:
|
|
68
|
+
|
|
69
|
+
from gravixlayer import OpenAI # This is an alias for GravixLayer
|
|
70
|
+
|
|
71
|
+
client = OpenAI(
|
|
72
|
+
base_url="https://api.gravixlayer.com/v1/inference",
|
|
73
|
+
api_key=os.environ.get("GRAVIXLAYER_API_KEY"),
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
text
|
|
77
|
+
|
|
78
|
+
## Features
|
|
79
|
+
|
|
80
|
+
- **OpenAI Compatible**: Drop-in replacement for OpenAI Python SDK
|
|
81
|
+
- **Streaming Support**: Real-time response streaming
|
|
82
|
+
- **Error Handling**: Automatic retries and proper error handling
|
|
83
|
+
- **Type Hints**: Full type support for better development experience
|
|
84
|
+
- **Environment Variables**: Automatic API key detection
|
|
85
|
+
|
|
86
|
+
## API Reference
|
|
87
|
+
|
|
88
|
+
### Chat Completions
|
|
89
|
+
|
|
90
|
+
completion = client.chat.completions.create(
|
|
91
|
+
model="llama3.1:8b",
|
|
92
|
+
messages=[...],
|
|
93
|
+
temperature=0.7,
|
|
94
|
+
max_tokens=150,
|
|
95
|
+
top_p=1.0,
|
|
96
|
+
frequency_penalty=0,
|
|
97
|
+
presence_penalty=0,
|
|
98
|
+
stop=None,
|
|
99
|
+
stream=False
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
text
|
|
103
|
+
|
|
104
|
+
### Streaming
|
|
105
|
+
|
|
106
|
+
stream = client.chat.completions.create(
|
|
107
|
+
model="llama3.1:8b",
|
|
108
|
+
messages=[...],
|
|
109
|
+
stream=True
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
for chunk in stream:
|
|
113
|
+
if chunk.choices.delta.content is not None:
|
|
114
|
+
print(chunk.choices.delta.content, end="")
|
|
115
|
+
|
|
116
|
+
text
|
|
117
|
+
|
|
118
|
+
## Environment Variables
|
|
119
|
+
|
|
120
|
+
- `GRAVIXLAYER_API_KEY`: Your GravixLayer API key
|
|
121
|
+
|
|
122
|
+
## License
|
|
123
|
+
|
|
124
|
+
MIT License
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
text
|
|
2
|
+
# GravixLayer Python SDK
|
|
3
|
+
|
|
4
|
+
A Python SDK for GravixLayer API that's fully compatible with OpenAI's interface.
|
|
5
|
+
|
|
6
|
+
## Installation
|
|
7
|
+
|
|
8
|
+
pip install gravixlayer
|
|
9
|
+
|
|
10
|
+
text
|
|
11
|
+
|
|
12
|
+
## Quick Start
|
|
13
|
+
|
|
14
|
+
import os
|
|
15
|
+
from gravixlayer import GravixLayer
|
|
16
|
+
Initialize client
|
|
17
|
+
|
|
18
|
+
client = GravixLayer(
|
|
19
|
+
api_key=os.environ.get("GRAVIXLAYER_API_KEY"),
|
|
20
|
+
)
|
|
21
|
+
Create completion
|
|
22
|
+
|
|
23
|
+
completion = client.chat.completions.create(
|
|
24
|
+
model="llama3.1:8b",
|
|
25
|
+
messages=[
|
|
26
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
|
27
|
+
{"role": "user", "content": "What are the three most popular programming languages?"}
|
|
28
|
+
]
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
print(completion.choices.message.content)
|
|
32
|
+
|
|
33
|
+
text
|
|
34
|
+
|
|
35
|
+
## OpenAI Compatibility
|
|
36
|
+
|
|
37
|
+
You can use this SDK as a drop-in replacement for OpenAI:
|
|
38
|
+
|
|
39
|
+
from gravixlayer import OpenAI # This is an alias for GravixLayer
|
|
40
|
+
|
|
41
|
+
client = OpenAI(
|
|
42
|
+
base_url="https://api.gravixlayer.com/v1/inference",
|
|
43
|
+
api_key=os.environ.get("GRAVIXLAYER_API_KEY"),
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
text
|
|
47
|
+
|
|
48
|
+
## Features
|
|
49
|
+
|
|
50
|
+
- **OpenAI Compatible**: Drop-in replacement for OpenAI Python SDK
|
|
51
|
+
- **Streaming Support**: Real-time response streaming
|
|
52
|
+
- **Error Handling**: Automatic retries and proper error handling
|
|
53
|
+
- **Type Hints**: Full type support for better development experience
|
|
54
|
+
- **Environment Variables**: Automatic API key detection
|
|
55
|
+
|
|
56
|
+
## API Reference
|
|
57
|
+
|
|
58
|
+
### Chat Completions
|
|
59
|
+
|
|
60
|
+
completion = client.chat.completions.create(
|
|
61
|
+
model="llama3.1:8b",
|
|
62
|
+
messages=[...],
|
|
63
|
+
temperature=0.7,
|
|
64
|
+
max_tokens=150,
|
|
65
|
+
top_p=1.0,
|
|
66
|
+
frequency_penalty=0,
|
|
67
|
+
presence_penalty=0,
|
|
68
|
+
stop=None,
|
|
69
|
+
stream=False
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
text
|
|
73
|
+
|
|
74
|
+
### Streaming
|
|
75
|
+
|
|
76
|
+
stream = client.chat.completions.create(
|
|
77
|
+
model="llama3.1:8b",
|
|
78
|
+
messages=[...],
|
|
79
|
+
stream=True
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
for chunk in stream:
|
|
83
|
+
if chunk.choices.delta.content is not None:
|
|
84
|
+
print(chunk.choices.delta.content, end="")
|
|
85
|
+
|
|
86
|
+
text
|
|
87
|
+
|
|
88
|
+
## Environment Variables
|
|
89
|
+
|
|
90
|
+
- `GRAVIXLAYER_API_KEY`: Your GravixLayer API key
|
|
91
|
+
|
|
92
|
+
## License
|
|
93
|
+
|
|
94
|
+
MIT License
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""
|
|
2
|
+
GravixLayer Python SDK - OpenAI Compatible
|
|
3
|
+
"""
|
|
4
|
+
__version__ = "0.0.1"
|
|
5
|
+
|
|
6
|
+
from .client import GravixLayer
|
|
7
|
+
from .types.chat import (
|
|
8
|
+
ChatCompletion,
|
|
9
|
+
ChatCompletionMessage,
|
|
10
|
+
ChatCompletionChoice,
|
|
11
|
+
ChatCompletionUsage,
|
|
12
|
+
)
|
|
13
|
+
OpenAI = GravixLayer
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"GravixLayer",
|
|
17
|
+
"OpenAI",
|
|
18
|
+
"ChatCompletion",
|
|
19
|
+
"ChatCompletionMessage",
|
|
20
|
+
"ChatCompletionChoice",
|
|
21
|
+
"ChatCompletionUsage",
|
|
22
|
+
]
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import os
|
|
3
|
+
from gravixlayer import GravixLayer
|
|
4
|
+
|
|
5
|
+
def main():
|
|
6
|
+
parser = argparse.ArgumentParser(
|
|
7
|
+
description="GravixLayer CLI – OpenAI-Compatible Chat Completions"
|
|
8
|
+
)
|
|
9
|
+
parser.add_argument("--api-key", type=str, default=None, help="API key")
|
|
10
|
+
parser.add_argument("--model", required=True, help="Model name (e.g., gemma3:12b)")
|
|
11
|
+
parser.add_argument("--system", default=None, help="System prompt (optional)")
|
|
12
|
+
parser.add_argument("--user", required=True, help="User prompt/message")
|
|
13
|
+
parser.add_argument("--temperature", type=float, default=None, help="Temperature")
|
|
14
|
+
parser.add_argument("--stream", action="store_true", help="Stream output (token-by-token)")
|
|
15
|
+
|
|
16
|
+
args = parser.parse_args()
|
|
17
|
+
|
|
18
|
+
client = GravixLayer(api_key=args.api_key or os.environ.get("GRAVIXLAYER_API_KEY"))
|
|
19
|
+
|
|
20
|
+
messages = []
|
|
21
|
+
if args.system:
|
|
22
|
+
messages.append({"role": "system", "content": args.system})
|
|
23
|
+
messages.append({"role": "user", "content": args.user})
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
if args.stream:
|
|
27
|
+
for chunk in client.chat.completions.create(
|
|
28
|
+
model=args.model,
|
|
29
|
+
messages=messages,
|
|
30
|
+
temperature=args.temperature,
|
|
31
|
+
stream=True
|
|
32
|
+
):
|
|
33
|
+
print(chunk.choices[0].message.content, end="", flush=True)
|
|
34
|
+
print()
|
|
35
|
+
else:
|
|
36
|
+
completion = client.chat.completions.create(
|
|
37
|
+
model=args.model,
|
|
38
|
+
messages=messages,
|
|
39
|
+
temperature=args.temperature
|
|
40
|
+
)
|
|
41
|
+
print(completion.choices[0].message.content)
|
|
42
|
+
except Exception as e:
|
|
43
|
+
print(f"❌ Error: {e}")
|
|
44
|
+
|
|
45
|
+
if __name__ == "__main__":
|
|
46
|
+
main()
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import requests
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Optional, Dict, Any, Type
|
|
5
|
+
from .resources.chat.completions import ChatCompletions
|
|
6
|
+
from .types.exceptions import (
|
|
7
|
+
GravixLayerError,
|
|
8
|
+
GravixLayerAuthenticationError,
|
|
9
|
+
GravixLayerRateLimitError,
|
|
10
|
+
GravixLayerServerError,
|
|
11
|
+
GravixLayerBadRequestError,
|
|
12
|
+
GravixLayerConnectionError
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
class GravixLayer:
|
|
16
|
+
"""
|
|
17
|
+
Main GravixLayer client - OpenAI compatible.
|
|
18
|
+
"""
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
api_key: Optional[str] = None,
|
|
22
|
+
base_url: Optional[str] = None,
|
|
23
|
+
timeout: float = 60.0,
|
|
24
|
+
max_retries: int = 3,
|
|
25
|
+
headers: Optional[Dict[str, str]] = None,
|
|
26
|
+
logger: Optional[Type[logging.Logger]] = None,
|
|
27
|
+
user_agent: Optional[str] = None,
|
|
28
|
+
):
|
|
29
|
+
self.api_key = api_key or os.environ.get("GRAVIXLAYER_API_KEY")
|
|
30
|
+
self.base_url = base_url or "https://api.gravixlayer.com/v1/inference"
|
|
31
|
+
if not self.base_url.startswith("https://"):
|
|
32
|
+
raise ValueError("Base URL must use HTTPS for security reasons.")
|
|
33
|
+
self.timeout = timeout
|
|
34
|
+
self.max_retries = max_retries
|
|
35
|
+
self.custom_headers = headers or {}
|
|
36
|
+
self.logger = logger or logging.getLogger("gravixlayer")
|
|
37
|
+
self.logger.setLevel(logging.INFO)
|
|
38
|
+
if not self.logger.hasHandlers():
|
|
39
|
+
logging.basicConfig(level=logging.INFO)
|
|
40
|
+
self.user_agent = user_agent or f"gravixlayer-python/0.0.1"
|
|
41
|
+
if not self.api_key:
|
|
42
|
+
raise ValueError("API key must be provided via argument or GRAVIXLAYER_API_KEY environment variable")
|
|
43
|
+
self.chat = ChatResource(self)
|
|
44
|
+
|
|
45
|
+
def _make_request(
|
|
46
|
+
self,
|
|
47
|
+
method: str,
|
|
48
|
+
endpoint: str,
|
|
49
|
+
data: Optional[Dict[str, Any]] = None,
|
|
50
|
+
stream: bool = False,
|
|
51
|
+
**kwargs
|
|
52
|
+
) -> requests.Response:
|
|
53
|
+
url = f"{self.base_url.rstrip('/')}/{endpoint.lstrip('/')}" if endpoint else self.base_url
|
|
54
|
+
headers = {
|
|
55
|
+
"Authorization": f"Bearer {self.api_key}",
|
|
56
|
+
"Content-Type": "application/json",
|
|
57
|
+
"User-Agent": self.user_agent,
|
|
58
|
+
**self.custom_headers,
|
|
59
|
+
}
|
|
60
|
+
for attempt in range(self.max_retries + 1):
|
|
61
|
+
try:
|
|
62
|
+
resp = requests.request(
|
|
63
|
+
method=method,
|
|
64
|
+
url=url,
|
|
65
|
+
headers=headers,
|
|
66
|
+
json=data,
|
|
67
|
+
timeout=self.timeout,
|
|
68
|
+
stream=stream,
|
|
69
|
+
**kwargs
|
|
70
|
+
)
|
|
71
|
+
if resp.status_code == 200:
|
|
72
|
+
return resp
|
|
73
|
+
elif resp.status_code == 401:
|
|
74
|
+
raise GravixLayerAuthenticationError("Authentication failed.")
|
|
75
|
+
elif resp.status_code == 429:
|
|
76
|
+
retry_after = resp.headers.get("Retry-After")
|
|
77
|
+
self.logger.warning(f"Rate limit exceeded. Retrying in {retry_after or 2**attempt}s...")
|
|
78
|
+
if attempt < self.max_retries:
|
|
79
|
+
import time
|
|
80
|
+
time.sleep(float(retry_after) if retry_after else (2 ** attempt))
|
|
81
|
+
continue
|
|
82
|
+
raise GravixLayerRateLimitError(resp.text)
|
|
83
|
+
elif resp.status_code in [502, 503, 504] and attempt < self.max_retries:
|
|
84
|
+
self.logger.warning(f"Server error: {resp.status_code}. Retrying...")
|
|
85
|
+
import time
|
|
86
|
+
time.sleep(2 ** attempt)
|
|
87
|
+
continue
|
|
88
|
+
elif 400 <= resp.status_code < 500:
|
|
89
|
+
raise GravixLayerBadRequestError(resp.text)
|
|
90
|
+
elif 500 <= resp.status_code < 600:
|
|
91
|
+
raise GravixLayerServerError(resp.text)
|
|
92
|
+
else:
|
|
93
|
+
resp.raise_for_status()
|
|
94
|
+
except requests.RequestException as e:
|
|
95
|
+
if attempt == self.max_retries:
|
|
96
|
+
raise GravixLayerConnectionError(str(e)) from e
|
|
97
|
+
self.logger.warning("Transient connection error, retrying...")
|
|
98
|
+
import time
|
|
99
|
+
time.sleep(2 ** attempt)
|
|
100
|
+
raise GravixLayerError("Failed to complete request.")
|
|
101
|
+
|
|
102
|
+
class ChatResource:
|
|
103
|
+
def __init__(self, client: GravixLayer):
|
|
104
|
+
self.client = client
|
|
105
|
+
self.completions = ChatCompletions(client)
|
|
106
|
+
|
|
107
|
+
OpenAI = GravixLayer
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Resources module
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
from typing import Dict, Any, List, Optional, Union, Iterator
|
|
2
|
+
import json
|
|
3
|
+
from ...types.chat import ChatCompletion, ChatCompletionChoice, ChatCompletionMessage, ChatCompletionUsage
|
|
4
|
+
|
|
5
|
+
class ChatCompletions:
|
|
6
|
+
"""Chat completions resource"""
|
|
7
|
+
|
|
8
|
+
def __init__(self, client):
|
|
9
|
+
self.client = client
|
|
10
|
+
|
|
11
|
+
def create(
|
|
12
|
+
self,
|
|
13
|
+
model: str,
|
|
14
|
+
messages: List[Dict[str, str]],
|
|
15
|
+
temperature: Optional[float] = None,
|
|
16
|
+
max_tokens: Optional[int] = None,
|
|
17
|
+
top_p: Optional[float] = None,
|
|
18
|
+
frequency_penalty: Optional[float] = None,
|
|
19
|
+
presence_penalty: Optional[float] = None,
|
|
20
|
+
stop: Optional[Union[str, List[str]]] = None,
|
|
21
|
+
stream: bool = False,
|
|
22
|
+
**kwargs
|
|
23
|
+
) -> Union[ChatCompletion, Iterator[ChatCompletion]]:
|
|
24
|
+
data = {
|
|
25
|
+
"model": model,
|
|
26
|
+
"messages": messages,
|
|
27
|
+
"stream": stream
|
|
28
|
+
}
|
|
29
|
+
if temperature is not None:
|
|
30
|
+
data["temperature"] = temperature
|
|
31
|
+
if max_tokens is not None:
|
|
32
|
+
data["max_tokens"] = max_tokens
|
|
33
|
+
if top_p is not None:
|
|
34
|
+
data["top_p"] = top_p
|
|
35
|
+
if frequency_penalty is not None:
|
|
36
|
+
data["frequency_penalty"] = frequency_penalty
|
|
37
|
+
if presence_penalty is not None:
|
|
38
|
+
data["presence_penalty"] = presence_penalty
|
|
39
|
+
if stop is not None:
|
|
40
|
+
data["stop"] = stop
|
|
41
|
+
data.update(kwargs)
|
|
42
|
+
return self._create_stream(data) if stream else self._create_non_stream(data)
|
|
43
|
+
|
|
44
|
+
def _create_non_stream(self, data: Dict[str, Any]) -> ChatCompletion:
|
|
45
|
+
resp = self.client._make_request("POST", "chat/completions", data)
|
|
46
|
+
return self._parse_response(resp.json())
|
|
47
|
+
|
|
48
|
+
def _create_stream(self, data: Dict[str, Any]) -> Iterator[ChatCompletion]:
|
|
49
|
+
resp = self.client._make_request("POST", "chat/completions", data, stream=True)
|
|
50
|
+
for line in resp.iter_lines():
|
|
51
|
+
if not line:
|
|
52
|
+
continue
|
|
53
|
+
line = line.decode("utf-8")
|
|
54
|
+
if line.startswith("data: "):
|
|
55
|
+
line = line[len("data: "):]
|
|
56
|
+
if line.strip() == "[DONE]":
|
|
57
|
+
break
|
|
58
|
+
try:
|
|
59
|
+
chunk = json.loads(line)
|
|
60
|
+
yield self._parse_response(chunk, is_stream=True)
|
|
61
|
+
except json.JSONDecodeError:
|
|
62
|
+
continue
|
|
63
|
+
|
|
64
|
+
def _parse_response(self, resp_data: Dict[str, Any], is_stream: bool = False) -> ChatCompletion:
|
|
65
|
+
if "choices" in resp_data:
|
|
66
|
+
choices = []
|
|
67
|
+
for choice in resp_data["choices"]:
|
|
68
|
+
msg = ChatCompletionMessage(
|
|
69
|
+
role=choice.get("message", {}).get("role", "assistant"),
|
|
70
|
+
content=choice.get("message", {}).get("content", "")
|
|
71
|
+
)
|
|
72
|
+
choices.append(ChatCompletionChoice(
|
|
73
|
+
index=choice.get("index", 0),
|
|
74
|
+
message=msg,
|
|
75
|
+
finish_reason=choice.get("finish_reason")
|
|
76
|
+
))
|
|
77
|
+
else:
|
|
78
|
+
content = resp_data.get("content", "")
|
|
79
|
+
if isinstance(resp_data, str):
|
|
80
|
+
content = resp_data
|
|
81
|
+
msg = ChatCompletionMessage(role="assistant", content=content)
|
|
82
|
+
choices = [ChatCompletionChoice(index=0, message=msg, finish_reason="stop")]
|
|
83
|
+
|
|
84
|
+
usage = None
|
|
85
|
+
if "usage" in resp_data:
|
|
86
|
+
usage = ChatCompletionUsage(
|
|
87
|
+
prompt_tokens=resp_data["usage"].get("prompt_tokens", 0),
|
|
88
|
+
completion_tokens=resp_data["usage"].get("completion_tokens", 0),
|
|
89
|
+
total_tokens=resp_data["usage"].get("total_tokens", 0),
|
|
90
|
+
)
|
|
91
|
+
import time
|
|
92
|
+
return ChatCompletion(
|
|
93
|
+
id=resp_data.get("id", "chatcmpl-" + str(hash(str(resp_data)))),
|
|
94
|
+
object="chat.completion" if not is_stream else "chat.completion.chunk",
|
|
95
|
+
created=resp_data.get("created", int(time.time())),
|
|
96
|
+
model=resp_data.get("model", "unknown"),
|
|
97
|
+
choices=choices,
|
|
98
|
+
usage=usage,
|
|
99
|
+
)
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import httpx
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Optional, Dict, Any
|
|
5
|
+
from gravixlayer.resources.chat.completions import ChatCompletions
|
|
6
|
+
|
|
7
|
+
class AsyncGravixLayer:
|
|
8
|
+
"""
|
|
9
|
+
Async client for GravixLayer
|
|
10
|
+
"""
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
api_key: Optional[str] = None,
|
|
14
|
+
base_url: Optional[str] = None,
|
|
15
|
+
timeout: float = 60.0,
|
|
16
|
+
max_retries: int = 3,
|
|
17
|
+
headers: Optional[Dict[str, str]] = None,
|
|
18
|
+
logger: Optional[logging.Logger] = None,
|
|
19
|
+
user_agent: Optional[str] = None,
|
|
20
|
+
):
|
|
21
|
+
self.api_key = api_key or os.environ.get("GRAVIXLAYER_API_KEY")
|
|
22
|
+
self.base_url = base_url or "https://api.gravixlayer.com/v1/inference"
|
|
23
|
+
if not self.base_url.startswith("https://"):
|
|
24
|
+
raise ValueError("Base URL must use HTTPS for security reasons.")
|
|
25
|
+
self.timeout = timeout
|
|
26
|
+
self.max_retries = max_retries
|
|
27
|
+
self.custom_headers = headers or {}
|
|
28
|
+
self.logger = logger or logging.getLogger("gravixlayer-async")
|
|
29
|
+
self.user_agent = user_agent or f"gravixlayer-python/0.0.1"
|
|
30
|
+
if not self.api_key:
|
|
31
|
+
raise ValueError("API key must be provided via argument or GRAVIXLAYER_API_KEY environment variable")
|
|
32
|
+
self.chat = ChatCompletions(self)
|
|
33
|
+
|
|
34
|
+
async def _make_request(
|
|
35
|
+
self,
|
|
36
|
+
method: str,
|
|
37
|
+
endpoint: str,
|
|
38
|
+
data: Optional[Dict[str, Any]] = None,
|
|
39
|
+
stream: bool = False,
|
|
40
|
+
**kwargs
|
|
41
|
+
) -> httpx.Response:
|
|
42
|
+
url = f"{self.base_url.rstrip('/')}/{endpoint.lstrip('/')}" if endpoint else self.base_url
|
|
43
|
+
headers = {
|
|
44
|
+
"Authorization": f"Bearer {self.api_key}",
|
|
45
|
+
"Content-Type": "application/json",
|
|
46
|
+
"User-Agent": self.user_agent,
|
|
47
|
+
**self.custom_headers,
|
|
48
|
+
}
|
|
49
|
+
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
|
50
|
+
for attempt in range(self.max_retries + 1):
|
|
51
|
+
try:
|
|
52
|
+
resp = await client.request(
|
|
53
|
+
method=method,
|
|
54
|
+
url=url,
|
|
55
|
+
headers=headers,
|
|
56
|
+
json=data,
|
|
57
|
+
**kwargs,
|
|
58
|
+
)
|
|
59
|
+
if resp.status_code == 200:
|
|
60
|
+
return resp
|
|
61
|
+
# TODO: map errors as in sync client (add equivalent exception handling)
|
|
62
|
+
except httpx.RequestError as e:
|
|
63
|
+
if attempt == self.max_retries:
|
|
64
|
+
raise e
|
|
65
|
+
import asyncio
|
|
66
|
+
await asyncio.sleep(2 ** attempt)
|
|
67
|
+
raise Exception("Failed async request")
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from typing import Optional, List, Dict, Any
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
|
|
4
|
+
@dataclass
|
|
5
|
+
class ChatCompletionMessage:
|
|
6
|
+
role: str
|
|
7
|
+
content: str
|
|
8
|
+
name: Optional[str] = None
|
|
9
|
+
function_call: Optional[Dict[str, Any]] = None
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class ChatCompletionChoice:
|
|
13
|
+
index: int
|
|
14
|
+
message: ChatCompletionMessage
|
|
15
|
+
finish_reason: Optional[str] = None
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class ChatCompletionUsage:
|
|
19
|
+
prompt_tokens: int
|
|
20
|
+
completion_tokens: int
|
|
21
|
+
total_tokens: int
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class ChatCompletion:
|
|
25
|
+
id: str
|
|
26
|
+
object: str
|
|
27
|
+
created: int
|
|
28
|
+
model: str
|
|
29
|
+
choices: List[ChatCompletionChoice]
|
|
30
|
+
usage: Optional[ChatCompletionUsage] = None
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
class GravixLayerError(Exception):
|
|
2
|
+
"""Base SDK exception"""
|
|
3
|
+
pass
|
|
4
|
+
|
|
5
|
+
class GravixLayerAuthenticationError(GravixLayerError):
|
|
6
|
+
pass
|
|
7
|
+
|
|
8
|
+
class GravixLayerRateLimitError(GravixLayerError):
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
class GravixLayerServerError(GravixLayerError):
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
class GravixLayerBadRequestError(GravixLayerError):
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
class GravixLayerConnectionError(GravixLayerError):
|
|
18
|
+
pass
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: gravixlayer
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: GravixLayer Python SDK - OpenAI Compatible
|
|
5
|
+
Home-page: https://github.com/sukrithpvs/gravixlayer-python
|
|
6
|
+
Author: Sukrith
|
|
7
|
+
Author-email: Sukrith <sukrithpvs@gmail.com>
|
|
8
|
+
License: MIT
|
|
9
|
+
Project-URL: Homepage, https://github.com/sukrithpvs/gravixlayer-python
|
|
10
|
+
Project-URL: Repository, https://github.com/sukrithpvs/gravixlayer-python
|
|
11
|
+
Project-URL: Issues, https://github.com/sukrithpvs/gravixlayer-python/issues
|
|
12
|
+
Keywords: gravixlayer,openai,llm,ai,api,sdk
|
|
13
|
+
Classifier: Development Status :: 4 - Beta
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Operating System :: OS Independent
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.7
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
23
|
+
Requires-Python: >=3.7
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
Requires-Dist: requests>=2.25.0
|
|
26
|
+
Requires-Dist: python-dotenv>=0.19.0
|
|
27
|
+
Dynamic: author
|
|
28
|
+
Dynamic: home-page
|
|
29
|
+
Dynamic: requires-python
|
|
30
|
+
|
|
31
|
+
text
|
|
32
|
+
# GravixLayer Python SDK
|
|
33
|
+
|
|
34
|
+
A Python SDK for GravixLayer API that's fully compatible with OpenAI's interface.
|
|
35
|
+
|
|
36
|
+
## Installation
|
|
37
|
+
|
|
38
|
+
pip install gravixlayer
|
|
39
|
+
|
|
40
|
+
text
|
|
41
|
+
|
|
42
|
+
## Quick Start
|
|
43
|
+
|
|
44
|
+
import os
|
|
45
|
+
from gravixlayer import GravixLayer
|
|
46
|
+
Initialize client
|
|
47
|
+
|
|
48
|
+
client = GravixLayer(
|
|
49
|
+
api_key=os.environ.get("GRAVIXLAYER_API_KEY"),
|
|
50
|
+
)
|
|
51
|
+
Create completion
|
|
52
|
+
|
|
53
|
+
completion = client.chat.completions.create(
|
|
54
|
+
model="llama3.1:8b",
|
|
55
|
+
messages=[
|
|
56
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
|
57
|
+
{"role": "user", "content": "What are the three most popular programming languages?"}
|
|
58
|
+
]
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
print(completion.choices.message.content)
|
|
62
|
+
|
|
63
|
+
text
|
|
64
|
+
|
|
65
|
+
## OpenAI Compatibility
|
|
66
|
+
|
|
67
|
+
You can use this SDK as a drop-in replacement for OpenAI:
|
|
68
|
+
|
|
69
|
+
from gravixlayer import OpenAI # This is an alias for GravixLayer
|
|
70
|
+
|
|
71
|
+
client = OpenAI(
|
|
72
|
+
base_url="https://api.gravixlayer.com/v1/inference",
|
|
73
|
+
api_key=os.environ.get("GRAVIXLAYER_API_KEY"),
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
text
|
|
77
|
+
|
|
78
|
+
## Features
|
|
79
|
+
|
|
80
|
+
- **OpenAI Compatible**: Drop-in replacement for OpenAI Python SDK
|
|
81
|
+
- **Streaming Support**: Real-time response streaming
|
|
82
|
+
- **Error Handling**: Automatic retries and proper error handling
|
|
83
|
+
- **Type Hints**: Full type support for better development experience
|
|
84
|
+
- **Environment Variables**: Automatic API key detection
|
|
85
|
+
|
|
86
|
+
## API Reference
|
|
87
|
+
|
|
88
|
+
### Chat Completions
|
|
89
|
+
|
|
90
|
+
completion = client.chat.completions.create(
|
|
91
|
+
model="llama3.1:8b",
|
|
92
|
+
messages=[...],
|
|
93
|
+
temperature=0.7,
|
|
94
|
+
max_tokens=150,
|
|
95
|
+
top_p=1.0,
|
|
96
|
+
frequency_penalty=0,
|
|
97
|
+
presence_penalty=0,
|
|
98
|
+
stop=None,
|
|
99
|
+
stream=False
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
text
|
|
103
|
+
|
|
104
|
+
### Streaming
|
|
105
|
+
|
|
106
|
+
stream = client.chat.completions.create(
|
|
107
|
+
model="llama3.1:8b",
|
|
108
|
+
messages=[...],
|
|
109
|
+
stream=True
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
for chunk in stream:
|
|
113
|
+
if chunk.choices.delta.content is not None:
|
|
114
|
+
print(chunk.choices.delta.content, end="")
|
|
115
|
+
|
|
116
|
+
text
|
|
117
|
+
|
|
118
|
+
## Environment Variables
|
|
119
|
+
|
|
120
|
+
- `GRAVIXLAYER_API_KEY`: Your GravixLayer API key
|
|
121
|
+
|
|
122
|
+
## License
|
|
123
|
+
|
|
124
|
+
MIT License
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
.gitignore
|
|
2
|
+
CHANGELOG.md
|
|
3
|
+
MANIFEST.in
|
|
4
|
+
README.md
|
|
5
|
+
pyproject.toml
|
|
6
|
+
requirements.txt
|
|
7
|
+
setup.py
|
|
8
|
+
tempCodeRunnerFile.py
|
|
9
|
+
test.py
|
|
10
|
+
test2.py
|
|
11
|
+
version.py
|
|
12
|
+
.github/workflows/pypi-release.yml
|
|
13
|
+
gravixlayer/__init__.py
|
|
14
|
+
gravixlayer/cli.py
|
|
15
|
+
gravixlayer/client.py
|
|
16
|
+
gravixlayer.egg-info/PKG-INFO
|
|
17
|
+
gravixlayer.egg-info/SOURCES.txt
|
|
18
|
+
gravixlayer.egg-info/dependency_links.txt
|
|
19
|
+
gravixlayer.egg-info/entry_points.txt
|
|
20
|
+
gravixlayer.egg-info/requires.txt
|
|
21
|
+
gravixlayer.egg-info/top_level.txt
|
|
22
|
+
gravixlayer/resources/__init__.py
|
|
23
|
+
gravixlayer/resources/chat/__init__.py
|
|
24
|
+
gravixlayer/resources/chat/completions.py
|
|
25
|
+
gravixlayer/types/__init__.py
|
|
26
|
+
gravixlayer/types/async_client.py
|
|
27
|
+
gravixlayer/types/chat.py
|
|
28
|
+
gravixlayer/types/exceptions.py
|
|
29
|
+
scripts/bump_version.py
|
|
30
|
+
scripts/release.sh
|
|
31
|
+
tests/test_chat_completions.py
|
|
32
|
+
tests/test_client.py
|
|
33
|
+
tests/test_exceptions.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
gravixlayer
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=45", "wheel", "setuptools_scm>=6.2"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "gravixlayer"
|
|
7
|
+
dynamic = ["version"]
|
|
8
|
+
description = "GravixLayer Python SDK - OpenAI Compatible"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.7"
|
|
11
|
+
license = {text = "MIT"}
|
|
12
|
+
authors = [
|
|
13
|
+
{name = "Sukrith", email = "sukrithpvs@gmail.com"},
|
|
14
|
+
]
|
|
15
|
+
keywords = ["gravixlayer", "openai", "llm", "ai", "api", "sdk"]
|
|
16
|
+
classifiers = [
|
|
17
|
+
"Development Status :: 4 - Beta",
|
|
18
|
+
"Intended Audience :: Developers",
|
|
19
|
+
"License :: OSI Approved :: MIT License",
|
|
20
|
+
"Operating System :: OS Independent",
|
|
21
|
+
"Programming Language :: Python :: 3",
|
|
22
|
+
"Programming Language :: Python :: 3.7",
|
|
23
|
+
"Programming Language :: Python :: 3.8",
|
|
24
|
+
"Programming Language :: Python :: 3.9",
|
|
25
|
+
"Programming Language :: Python :: 3.10",
|
|
26
|
+
"Programming Language :: Python :: 3.11",
|
|
27
|
+
]
|
|
28
|
+
dependencies = [
|
|
29
|
+
"requests>=2.25.0",
|
|
30
|
+
"python-dotenv>=0.19.0",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
[project.scripts]
|
|
34
|
+
gravixlayer = "gravixlayer.cli:main"
|
|
35
|
+
|
|
36
|
+
[project.urls]
|
|
37
|
+
Homepage = "https://github.com/sukrithpvs/gravixlayer-python"
|
|
38
|
+
Repository = "https://github.com/sukrithpvs/gravixlayer-python"
|
|
39
|
+
Issues = "https://github.com/sukrithpvs/gravixlayer-python/issues"
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from setuptools import setup, find_packages
|
|
3
|
+
|
|
4
|
+
# Read version
|
|
5
|
+
exec(open('version.py').read())
|
|
6
|
+
|
|
7
|
+
# Read README
|
|
8
|
+
with open("README.md", "r", encoding="utf-8") as fh:
|
|
9
|
+
long_description = fh.read()
|
|
10
|
+
|
|
11
|
+
# Read requirements
|
|
12
|
+
with open("requirements.txt", "r", encoding="utf-8") as fh:
|
|
13
|
+
requirements = [line.strip() for line in fh if line.strip() and not line.startswith("#")]
|
|
14
|
+
|
|
15
|
+
setup(
|
|
16
|
+
name="gravixlayer",
|
|
17
|
+
version="0.0.1",
|
|
18
|
+
author="Sukrith",
|
|
19
|
+
author_email="sukrithpvs@gmail.com",
|
|
20
|
+
description="GravixLayer Python SDK - OpenAI Compatible",
|
|
21
|
+
long_description=long_description,
|
|
22
|
+
long_description_content_type="text/markdown",
|
|
23
|
+
url="https://github.com/sukrithpvs/gravixlayer-python",
|
|
24
|
+
packages=find_packages(),
|
|
25
|
+
classifiers=[
|
|
26
|
+
"Development Status :: 4 - Beta",
|
|
27
|
+
"Intended Audience :: Developers",
|
|
28
|
+
"License :: OSI Approved :: MIT License",
|
|
29
|
+
"Operating System :: OS Independent",
|
|
30
|
+
"Programming Language :: Python :: 3",
|
|
31
|
+
"Programming Language :: Python :: 3.7",
|
|
32
|
+
"Programming Language :: Python :: 3.8",
|
|
33
|
+
"Programming Language :: Python :: 3.9",
|
|
34
|
+
"Programming Language :: Python :: 3.10",
|
|
35
|
+
"Programming Language :: Python :: 3.11",
|
|
36
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
37
|
+
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
|
|
38
|
+
],
|
|
39
|
+
python_requires=">=3.7",
|
|
40
|
+
install_requires=requirements,
|
|
41
|
+
entry_points={
|
|
42
|
+
'console_scripts': [
|
|
43
|
+
'gravixlayer=gravixlayer.cli:main',
|
|
44
|
+
],
|
|
45
|
+
},
|
|
46
|
+
keywords="gravixlayer, openai, llm, ai, api, sdk",
|
|
47
|
+
project_urls={
|
|
48
|
+
"Bug Reports": "https://github.com/sukrithpvs/gravixlayer-python/issues",
|
|
49
|
+
"Source": "https://github.com/sukrithpvs/gravixlayer-python",
|
|
50
|
+
"Documentation": "https://github.com/sukrithpvs/gravixlayer-python/blob/main/README.md",
|
|
51
|
+
},
|
|
52
|
+
)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from dotenv import load_dotenv
|
|
3
|
+
|
|
4
|
+
load_dotenv()
|
|
5
|
+
|
|
6
|
+
from gravixlayer import GravixLayer
|
|
7
|
+
|
|
8
|
+
client = GravixLayer(
|
|
9
|
+
api_key=os.environ.get("GRAVIXLAYER_API_KEY"),
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
print(f"Testing SDK...")
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
completion = client.chat.completions.create(
|
|
16
|
+
model="gemma3:12b",
|
|
17
|
+
messages=[
|
|
18
|
+
{"role": "system", "content": "You are a helpful and friendly assistant."},
|
|
19
|
+
{"role": "user", "content": "What is the capital of France?"}
|
|
20
|
+
]
|
|
21
|
+
)
|
|
22
|
+
print("✅ Success!")
|
|
23
|
+
print(f"Response: {completion.choices[0].message.content}")
|
|
24
|
+
except Exception as e:
|
|
25
|
+
print(f"❌ Error: {e}")
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from gravixlayer.async_client import AsyncGravixLayer
|
|
3
|
+
|
|
4
|
+
async def main():
|
|
5
|
+
client = AsyncGravixLayer()
|
|
6
|
+
result = await client.chat.completions.create(
|
|
7
|
+
model="gemma3:12b",
|
|
8
|
+
messages=[{"role": "user", "content": "Say something cool!"}]
|
|
9
|
+
)
|
|
10
|
+
print("Async response:", result.choices[0].message.content)
|
|
11
|
+
|
|
12
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
from unittest.mock import MagicMock
|
|
3
|
+
from gravixlayer.client import GravixLayer
|
|
4
|
+
|
|
5
|
+
class TestChatCompletions(unittest.TestCase):
|
|
6
|
+
def setUp(self):
|
|
7
|
+
self.client = GravixLayer(api_key="x")
|
|
8
|
+
self.client._make_request = MagicMock()
|
|
9
|
+
self.mock_response = MagicMock()
|
|
10
|
+
self.mock_response.json.return_value = {
|
|
11
|
+
"id": "chatcmpl-1",
|
|
12
|
+
"object": "chat.completion",
|
|
13
|
+
"created": 1234567890,
|
|
14
|
+
"model": "lorem-llama",
|
|
15
|
+
"choices": [{
|
|
16
|
+
"index": 0,
|
|
17
|
+
"message": {"role": "assistant", "content": "Hello!"},
|
|
18
|
+
"finish_reason": "stop"
|
|
19
|
+
}],
|
|
20
|
+
"usage": {"prompt_tokens": 5, "completion_tokens": 2, "total_tokens": 7}
|
|
21
|
+
}
|
|
22
|
+
self.client._make_request.return_value = self.mock_response
|
|
23
|
+
|
|
24
|
+
def test_completion_create(self):
|
|
25
|
+
chat = self.client.chat.completions
|
|
26
|
+
result = chat.create(model="lorem-llama", messages=[{"role": "user", "content": "Hi"}])
|
|
27
|
+
self.assertEqual(result.choices[0].message.content, "Hello!")
|
|
28
|
+
|
|
29
|
+
def test_optional_parameters(self):
|
|
30
|
+
chat = self.client.chat.completions
|
|
31
|
+
chat.create(
|
|
32
|
+
model="abc", messages=[], temperature=0.5, top_p=0.7, max_tokens=10,
|
|
33
|
+
frequency_penalty=0.2, presence_penalty=0.3
|
|
34
|
+
)
|
|
35
|
+
call = self.client._make_request.call_args
|
|
36
|
+
data_sent = call.args[2] # data is always the third positional argument
|
|
37
|
+
assert data_sent["temperature"] == 0.5
|
|
38
|
+
assert data_sent["top_p"] == 0.7
|
|
39
|
+
assert data_sent["max_tokens"] == 10
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def test_streaming_response(self):
|
|
43
|
+
# Stream mock: returns bytes like HTTP event-stream
|
|
44
|
+
lines = [
|
|
45
|
+
b'data: {"id":"chunk","choices":[{"message":{"role":"assistant","content":"Hey"},"index":0,"finish_reason":null}]}\n',
|
|
46
|
+
b'data: [DONE]\n'
|
|
47
|
+
]
|
|
48
|
+
mock_stream_response = MagicMock()
|
|
49
|
+
mock_stream_response.iter_lines.return_value = lines
|
|
50
|
+
self.client._make_request.return_value = mock_stream_response
|
|
51
|
+
|
|
52
|
+
chat = self.client.chat.completions
|
|
53
|
+
chunks = list(chat.create(model="x", messages=[], stream=True))
|
|
54
|
+
self.assertEqual(chunks[0].choices[0].message.content, "Hey")
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import unittest
|
|
3
|
+
from gravixlayer.client import GravixLayer
|
|
4
|
+
|
|
5
|
+
class TestGravixLayerClient(unittest.TestCase):
|
|
6
|
+
def test_api_key_argument(self):
|
|
7
|
+
client = GravixLayer(api_key="test-key")
|
|
8
|
+
self.assertEqual(client.api_key, "test-key")
|
|
9
|
+
|
|
10
|
+
def test_api_key_env(self):
|
|
11
|
+
os.environ["GRAVIXLAYER_API_KEY"] = "env-key"
|
|
12
|
+
client = GravixLayer()
|
|
13
|
+
self.assertEqual(client.api_key, "env-key")
|
|
14
|
+
del os.environ["GRAVIXLAYER_API_KEY"]
|
|
15
|
+
|
|
16
|
+
def test_https_enforcement(self):
|
|
17
|
+
with self.assertRaises(ValueError):
|
|
18
|
+
GravixLayer(api_key="k", base_url="http://not-secure.com")
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
from gravixlayer.client import GravixLayer
|
|
3
|
+
from gravixlayer.types.exceptions import (
|
|
4
|
+
GravixLayerAuthenticationError, GravixLayerRateLimitError, GravixLayerServerError,
|
|
5
|
+
GravixLayerBadRequestError, GravixLayerConnectionError
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
class TestExceptionHandling(unittest.TestCase):
|
|
9
|
+
def setUp(self):
|
|
10
|
+
self.client = GravixLayer(api_key="x")
|
|
11
|
+
|
|
12
|
+
def test_auth_error(self):
|
|
13
|
+
def fail_auth(*a, **k):
|
|
14
|
+
raise GravixLayerAuthenticationError("bad key")
|
|
15
|
+
self.client._make_request = fail_auth
|
|
16
|
+
with self.assertRaises(GravixLayerAuthenticationError):
|
|
17
|
+
self.client.chat.completions.create(model="x", messages=[])
|
|
18
|
+
|
|
19
|
+
def test_rate_limit_error(self):
|
|
20
|
+
def fail_limit(*a, **k):
|
|
21
|
+
raise GravixLayerRateLimitError("Too many requests")
|
|
22
|
+
self.client._make_request = fail_limit
|
|
23
|
+
with self.assertRaises(GravixLayerRateLimitError):
|
|
24
|
+
self.client.chat.completions.create(model="x", messages=[])
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Version information for GravixLayer SDK"""
|
|
2
|
+
|
|
3
|
+
__version__ = "0.0.1"
|
|
4
|
+
__version_info__ = tuple(int(x) for x in __version__.split('.'))
|
|
5
|
+
|
|
6
|
+
# Version history
|
|
7
|
+
VERSION_HISTORY = {
|
|
8
|
+
"0.0.1": "Initial release with OpenAI compatibility",
|
|
9
|
+
# Add future versions here
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
def get_version_info():
|
|
13
|
+
"""Get current version information"""
|
|
14
|
+
return {
|
|
15
|
+
"version": __version__,
|
|
16
|
+
"version_info": __version_info__,
|
|
17
|
+
"description": VERSION_HISTORY.get(__version__, "No description available")
|
|
18
|
+
}
|