byok 0.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- byok-0.2.0/PKG-INFO +169 -0
- byok-0.2.0/README.md +156 -0
- byok-0.2.0/byo/__init__.py +5 -0
- byok-0.2.0/byo/client.py +226 -0
- byok-0.2.0/byo/errors.py +19 -0
- byok-0.2.0/byok.egg-info/PKG-INFO +169 -0
- byok-0.2.0/byok.egg-info/SOURCES.txt +10 -0
- byok-0.2.0/byok.egg-info/dependency_links.txt +1 -0
- byok-0.2.0/byok.egg-info/requires.txt +1 -0
- byok-0.2.0/byok.egg-info/top_level.txt +1 -0
- byok-0.2.0/pyproject.toml +22 -0
- byok-0.2.0/setup.cfg +4 -0
byok-0.2.0/PKG-INFO
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: byok
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: BYO - Python SDK for BYOK
|
|
5
|
+
License: FSL-1.1-MIT
|
|
6
|
+
Project-URL: Homepage, https://github.com/treadiehq/byo
|
|
7
|
+
Project-URL: Repository, https://github.com/treadiehq/byo/tree/main/packages/sdk-python
|
|
8
|
+
Project-URL: Issues, https://github.com/treadiehq/byo/issues
|
|
9
|
+
Keywords: byo,byok,openai,anthropic,proxy,api-keys
|
|
10
|
+
Requires-Python: >=3.9
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
Requires-Dist: httpx>=0.25.0
|
|
13
|
+
|
|
14
|
+
# BYO PYTHON SDK for BYOK
|
|
15
|
+
|
|
16
|
+
Official Python SDK for BYO — BYOK (Bring Your Own Key).
|
|
17
|
+
|
|
18
|
+
## Installation
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
pip install byok
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Quick Start
|
|
25
|
+
|
|
26
|
+
```python
|
|
27
|
+
import os
|
|
28
|
+
from byo import BYOK
|
|
29
|
+
|
|
30
|
+
byok = BYOK(api_key=os.environ["BYOK_API_KEY"])
|
|
31
|
+
|
|
32
|
+
# Proxy an OpenAI request
|
|
33
|
+
openai = byok.openai(ref_id="customer_123")
|
|
34
|
+
response = openai.responses.create(
|
|
35
|
+
model="gpt-4.1",
|
|
36
|
+
input="Hello from BYOK!"
|
|
37
|
+
)
|
|
38
|
+
print(response)
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## OpenAI
|
|
42
|
+
|
|
43
|
+
### Responses API
|
|
44
|
+
|
|
45
|
+
```python
|
|
46
|
+
openai = byok.openai(ref_id="customer_123")
|
|
47
|
+
response = openai.responses.create(model="gpt-4.1", input="What is BYOK?")
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
### Chat Completions API
|
|
51
|
+
|
|
52
|
+
```python
|
|
53
|
+
openai = byok.openai(ref_id="customer_123")
|
|
54
|
+
response = openai.chat.completions.create(
|
|
55
|
+
model="gpt-4.1",
|
|
56
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
57
|
+
)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Anthropic
|
|
61
|
+
|
|
62
|
+
### Messages API
|
|
63
|
+
|
|
64
|
+
```python
|
|
65
|
+
claude = byok.anthropic(ref_id="customer_123")
|
|
66
|
+
response = claude.messages.create(
|
|
67
|
+
model="claude-sonnet-4-20250514",
|
|
68
|
+
max_tokens=1024,
|
|
69
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
70
|
+
)
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
## Google AI Studio
|
|
74
|
+
|
|
75
|
+
### Generate Content
|
|
76
|
+
|
|
77
|
+
```python
|
|
78
|
+
gemini = byok.google(ref_id="customer_123")
|
|
79
|
+
response = gemini.generate_content.create(
|
|
80
|
+
model="gemini-2.0-flash",
|
|
81
|
+
contents=[{"parts": [{"text": "Hello!"}]}]
|
|
82
|
+
)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Azure OpenAI
|
|
86
|
+
|
|
87
|
+
### Chat Completions
|
|
88
|
+
|
|
89
|
+
```python
|
|
90
|
+
azure = byok.azure_openai(ref_id="customer_123")
|
|
91
|
+
response = azure.chat.completions.create(
|
|
92
|
+
model="gpt-4",
|
|
93
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
94
|
+
)
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
Azure OpenAI requires `provider_config` when connecting the key:
|
|
98
|
+
|
|
99
|
+
```python
|
|
100
|
+
byok.keys.connect(
|
|
101
|
+
provider="azure-openai",
|
|
102
|
+
ref_id="customer_123",
|
|
103
|
+
provider_key="your-azure-api-key",
|
|
104
|
+
provider_config={
|
|
105
|
+
"baseUrl": "https://your-resource.openai.azure.com",
|
|
106
|
+
"deploymentName": "gpt-4",
|
|
107
|
+
},
|
|
108
|
+
)
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## AWS Bedrock
|
|
112
|
+
|
|
113
|
+
### Converse
|
|
114
|
+
|
|
115
|
+
```python
|
|
116
|
+
bedrock = byok.bedrock(ref_id="customer_123")
|
|
117
|
+
response = bedrock.converse.create(
|
|
118
|
+
modelId="anthropic.claude-3-haiku-20240307-v1:0",
|
|
119
|
+
messages=[{"role": "user", "content": [{"text": "Hello!"}]}]
|
|
120
|
+
)
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
AWS Bedrock requires `provider_config` when connecting the key:
|
|
124
|
+
|
|
125
|
+
```python
|
|
126
|
+
byok.keys.connect(
|
|
127
|
+
provider="bedrock",
|
|
128
|
+
ref_id="customer_123",
|
|
129
|
+
provider_key="your-aws-secret-access-key",
|
|
130
|
+
provider_config={
|
|
131
|
+
"accessKeyId": "AKIA...",
|
|
132
|
+
"region": "us-east-1",
|
|
133
|
+
},
|
|
134
|
+
)
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
## Key Management
|
|
138
|
+
|
|
139
|
+
```python
|
|
140
|
+
# Connect a provider key
|
|
141
|
+
byok.keys.connect(
|
|
142
|
+
provider="openai",
|
|
143
|
+
ref_id="customer_123",
|
|
144
|
+
provider_key="sk-..."
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# Validate a stored key
|
|
148
|
+
result = byok.keys.validate(provider="openai", ref_id="customer_123")
|
|
149
|
+
|
|
150
|
+
# Revoke a stored key
|
|
151
|
+
byok.keys.revoke(provider="openai", ref_id="customer_123")
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## Error Handling
|
|
155
|
+
|
|
156
|
+
```python
|
|
157
|
+
from byo import BYOK, BYOKError, AuthenticationError
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
response = openai.responses.create(model="gpt-4.1", input="Hello")
|
|
161
|
+
except AuthenticationError:
|
|
162
|
+
print("Invalid API key")
|
|
163
|
+
except BYOKError as e:
|
|
164
|
+
print(f"Error {e.status_code}: {e.message}")
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
## License
|
|
168
|
+
|
|
169
|
+
[FSL-1.1-MIT](LICENSE)
|
byok-0.2.0/README.md
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
# BYO PYTHON SDK for BYOK
|
|
2
|
+
|
|
3
|
+
Official Python SDK for BYO — BYOK (Bring Your Own Key).
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install byok
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Quick Start
|
|
12
|
+
|
|
13
|
+
```python
|
|
14
|
+
import os
|
|
15
|
+
from byo import BYOK
|
|
16
|
+
|
|
17
|
+
byok = BYOK(api_key=os.environ["BYOK_API_KEY"])
|
|
18
|
+
|
|
19
|
+
# Proxy an OpenAI request
|
|
20
|
+
openai = byok.openai(ref_id="customer_123")
|
|
21
|
+
response = openai.responses.create(
|
|
22
|
+
model="gpt-4.1",
|
|
23
|
+
input="Hello from BYOK!"
|
|
24
|
+
)
|
|
25
|
+
print(response)
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## OpenAI
|
|
29
|
+
|
|
30
|
+
### Responses API
|
|
31
|
+
|
|
32
|
+
```python
|
|
33
|
+
openai = byok.openai(ref_id="customer_123")
|
|
34
|
+
response = openai.responses.create(model="gpt-4.1", input="What is BYOK?")
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### Chat Completions API
|
|
38
|
+
|
|
39
|
+
```python
|
|
40
|
+
openai = byok.openai(ref_id="customer_123")
|
|
41
|
+
response = openai.chat.completions.create(
|
|
42
|
+
model="gpt-4.1",
|
|
43
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
44
|
+
)
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## Anthropic
|
|
48
|
+
|
|
49
|
+
### Messages API
|
|
50
|
+
|
|
51
|
+
```python
|
|
52
|
+
claude = byok.anthropic(ref_id="customer_123")
|
|
53
|
+
response = claude.messages.create(
|
|
54
|
+
model="claude-sonnet-4-20250514",
|
|
55
|
+
max_tokens=1024,
|
|
56
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
57
|
+
)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Google AI Studio
|
|
61
|
+
|
|
62
|
+
### Generate Content
|
|
63
|
+
|
|
64
|
+
```python
|
|
65
|
+
gemini = byok.google(ref_id="customer_123")
|
|
66
|
+
response = gemini.generate_content.create(
|
|
67
|
+
model="gemini-2.0-flash",
|
|
68
|
+
contents=[{"parts": [{"text": "Hello!"}]}]
|
|
69
|
+
)
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
## Azure OpenAI
|
|
73
|
+
|
|
74
|
+
### Chat Completions
|
|
75
|
+
|
|
76
|
+
```python
|
|
77
|
+
azure = byok.azure_openai(ref_id="customer_123")
|
|
78
|
+
response = azure.chat.completions.create(
|
|
79
|
+
model="gpt-4",
|
|
80
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
81
|
+
)
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
Azure OpenAI requires `provider_config` when connecting the key:
|
|
85
|
+
|
|
86
|
+
```python
|
|
87
|
+
byok.keys.connect(
|
|
88
|
+
provider="azure-openai",
|
|
89
|
+
ref_id="customer_123",
|
|
90
|
+
provider_key="your-azure-api-key",
|
|
91
|
+
provider_config={
|
|
92
|
+
"baseUrl": "https://your-resource.openai.azure.com",
|
|
93
|
+
"deploymentName": "gpt-4",
|
|
94
|
+
},
|
|
95
|
+
)
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
## AWS Bedrock
|
|
99
|
+
|
|
100
|
+
### Converse
|
|
101
|
+
|
|
102
|
+
```python
|
|
103
|
+
bedrock = byok.bedrock(ref_id="customer_123")
|
|
104
|
+
response = bedrock.converse.create(
|
|
105
|
+
modelId="anthropic.claude-3-haiku-20240307-v1:0",
|
|
106
|
+
messages=[{"role": "user", "content": [{"text": "Hello!"}]}]
|
|
107
|
+
)
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
AWS Bedrock requires `provider_config` when connecting the key:
|
|
111
|
+
|
|
112
|
+
```python
|
|
113
|
+
byok.keys.connect(
|
|
114
|
+
provider="bedrock",
|
|
115
|
+
ref_id="customer_123",
|
|
116
|
+
provider_key="your-aws-secret-access-key",
|
|
117
|
+
provider_config={
|
|
118
|
+
"accessKeyId": "AKIA...",
|
|
119
|
+
"region": "us-east-1",
|
|
120
|
+
},
|
|
121
|
+
)
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
## Key Management
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
# Connect a provider key
|
|
128
|
+
byok.keys.connect(
|
|
129
|
+
provider="openai",
|
|
130
|
+
ref_id="customer_123",
|
|
131
|
+
provider_key="sk-..."
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Validate a stored key
|
|
135
|
+
result = byok.keys.validate(provider="openai", ref_id="customer_123")
|
|
136
|
+
|
|
137
|
+
# Revoke a stored key
|
|
138
|
+
byok.keys.revoke(provider="openai", ref_id="customer_123")
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
## Error Handling
|
|
142
|
+
|
|
143
|
+
```python
|
|
144
|
+
from byo import BYOK, BYOKError, AuthenticationError
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
response = openai.responses.create(model="gpt-4.1", input="Hello")
|
|
148
|
+
except AuthenticationError:
|
|
149
|
+
print("Invalid API key")
|
|
150
|
+
except BYOKError as e:
|
|
151
|
+
print(f"Error {e.status_code}: {e.message}")
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## License
|
|
155
|
+
|
|
156
|
+
[FSL-1.1-MIT](LICENSE)
|
byok-0.2.0/byo/client.py
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
from byo.errors import BYOKError, AuthenticationError, ProviderError
|
|
8
|
+
|
|
9
|
+
DEFAULT_BASE_URL = "http://localhost:3000"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BYOK:
|
|
13
|
+
def __init__(self, api_key: str, base_url: str = DEFAULT_BASE_URL):
|
|
14
|
+
if not api_key:
|
|
15
|
+
raise AuthenticationError("API key is required")
|
|
16
|
+
self._api_key = api_key
|
|
17
|
+
self._base_url = base_url.rstrip("/")
|
|
18
|
+
self._client = httpx.Client(
|
|
19
|
+
base_url=self._base_url,
|
|
20
|
+
headers={
|
|
21
|
+
"Authorization": f"Bearer {self._api_key}",
|
|
22
|
+
"Content-Type": "application/json",
|
|
23
|
+
},
|
|
24
|
+
timeout=60.0,
|
|
25
|
+
)
|
|
26
|
+
self.keys = KeysClient(self._client)
|
|
27
|
+
|
|
28
|
+
def openai(self, *, ref_id: str) -> OpenAIClient:
|
|
29
|
+
return OpenAIClient(self._client, ref_id)
|
|
30
|
+
|
|
31
|
+
def anthropic(self, *, ref_id: str) -> AnthropicClient:
|
|
32
|
+
return AnthropicClient(self._client, ref_id)
|
|
33
|
+
|
|
34
|
+
def google(self, *, ref_id: str) -> GoogleClient:
|
|
35
|
+
return GoogleClient(self._client, ref_id)
|
|
36
|
+
|
|
37
|
+
def azure_openai(self, *, ref_id: str) -> AzureOpenAIClient:
|
|
38
|
+
return AzureOpenAIClient(self._client, ref_id)
|
|
39
|
+
|
|
40
|
+
def bedrock(self, *, ref_id: str) -> BedrockClient:
|
|
41
|
+
return BedrockClient(self._client, ref_id)
|
|
42
|
+
|
|
43
|
+
def close(self):
|
|
44
|
+
self._client.close()
|
|
45
|
+
|
|
46
|
+
def __enter__(self):
|
|
47
|
+
return self
|
|
48
|
+
|
|
49
|
+
def __exit__(self, *args):
|
|
50
|
+
self.close()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class KeysClient:
|
|
54
|
+
def __init__(self, client: httpx.Client):
|
|
55
|
+
self._client = client
|
|
56
|
+
|
|
57
|
+
def connect(
|
|
58
|
+
self,
|
|
59
|
+
*,
|
|
60
|
+
provider: str,
|
|
61
|
+
ref_id: str,
|
|
62
|
+
provider_key: str,
|
|
63
|
+
provider_config: dict[str, Any] | None = None,
|
|
64
|
+
) -> dict:
|
|
65
|
+
body: dict[str, Any] = {
|
|
66
|
+
"provider": provider,
|
|
67
|
+
"refId": ref_id,
|
|
68
|
+
"providerKey": provider_key,
|
|
69
|
+
}
|
|
70
|
+
if provider_config is not None:
|
|
71
|
+
body["providerConfig"] = provider_config
|
|
72
|
+
return _request(self._client, "POST", "/keys/connect", json=body)
|
|
73
|
+
|
|
74
|
+
def validate(self, *, provider: str, ref_id: str) -> dict:
|
|
75
|
+
return _request(self._client, "POST", "/keys/validate", json={
|
|
76
|
+
"provider": provider,
|
|
77
|
+
"refId": ref_id,
|
|
78
|
+
})
|
|
79
|
+
|
|
80
|
+
def revoke(self, *, provider: str, ref_id: str) -> dict:
|
|
81
|
+
return _request(self._client, "DELETE", "/keys/revoke", json={
|
|
82
|
+
"provider": provider,
|
|
83
|
+
"refId": ref_id,
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class OpenAIClient:
|
|
88
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
89
|
+
self.responses = _OpenAIResponses(client, ref_id)
|
|
90
|
+
self.chat = _OpenAIChat(client, ref_id)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class _OpenAIChat:
|
|
94
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
95
|
+
self.completions = _OpenAIChatCompletions(client, ref_id)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class _OpenAIResponses:
|
|
99
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
100
|
+
self._client = client
|
|
101
|
+
self._ref_id = ref_id
|
|
102
|
+
|
|
103
|
+
def create(self, **kwargs) -> dict:
|
|
104
|
+
resp = _request(self._client, "POST", "/proxy/openai/responses", json={
|
|
105
|
+
"refId": self._ref_id,
|
|
106
|
+
"providerPayload": kwargs,
|
|
107
|
+
})
|
|
108
|
+
return _extract_proxy_data(resp)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class _OpenAIChatCompletions:
|
|
112
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
113
|
+
self._client = client
|
|
114
|
+
self._ref_id = ref_id
|
|
115
|
+
|
|
116
|
+
def create(self, **kwargs) -> dict:
|
|
117
|
+
resp = _request(self._client, "POST", "/proxy/openai/chat/completions", json={
|
|
118
|
+
"refId": self._ref_id,
|
|
119
|
+
"providerPayload": kwargs,
|
|
120
|
+
})
|
|
121
|
+
return _extract_proxy_data(resp)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class AnthropicClient:
|
|
125
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
126
|
+
self.messages = _AnthropicMessages(client, ref_id)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class _AnthropicMessages:
|
|
130
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
131
|
+
self._client = client
|
|
132
|
+
self._ref_id = ref_id
|
|
133
|
+
|
|
134
|
+
def create(self, **kwargs) -> dict:
|
|
135
|
+
resp = _request(self._client, "POST", "/proxy/anthropic/messages", json={
|
|
136
|
+
"refId": self._ref_id,
|
|
137
|
+
"providerPayload": kwargs,
|
|
138
|
+
})
|
|
139
|
+
return _extract_proxy_data(resp)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class GoogleClient:
|
|
143
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
144
|
+
self.generate_content = _GoogleGenerateContent(client, ref_id)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class _GoogleGenerateContent:
|
|
148
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
149
|
+
self._client = client
|
|
150
|
+
self._ref_id = ref_id
|
|
151
|
+
|
|
152
|
+
def create(self, **kwargs) -> dict:
|
|
153
|
+
resp = _request(self._client, "POST", "/proxy/google/generateContent", json={
|
|
154
|
+
"refId": self._ref_id,
|
|
155
|
+
"providerPayload": kwargs,
|
|
156
|
+
})
|
|
157
|
+
return _extract_proxy_data(resp)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class AzureOpenAIClient:
|
|
161
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
162
|
+
self.chat = _AzureOpenAIChat(client, ref_id)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class _AzureOpenAIChat:
|
|
166
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
167
|
+
self.completions = _AzureOpenAIChatCompletions(client, ref_id)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
class _AzureOpenAIChatCompletions:
|
|
171
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
172
|
+
self._client = client
|
|
173
|
+
self._ref_id = ref_id
|
|
174
|
+
|
|
175
|
+
def create(self, **kwargs) -> dict:
|
|
176
|
+
resp = _request(self._client, "POST", "/proxy/azure-openai/chat/completions", json={
|
|
177
|
+
"refId": self._ref_id,
|
|
178
|
+
"providerPayload": kwargs,
|
|
179
|
+
})
|
|
180
|
+
return _extract_proxy_data(resp)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
class BedrockClient:
|
|
184
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
185
|
+
self.converse = _BedrockConverse(client, ref_id)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
class _BedrockConverse:
|
|
189
|
+
def __init__(self, client: httpx.Client, ref_id: str):
|
|
190
|
+
self._client = client
|
|
191
|
+
self._ref_id = ref_id
|
|
192
|
+
|
|
193
|
+
def create(self, **kwargs) -> dict:
|
|
194
|
+
resp = _request(self._client, "POST", "/proxy/bedrock/converse", json={
|
|
195
|
+
"refId": self._ref_id,
|
|
196
|
+
"providerPayload": kwargs,
|
|
197
|
+
})
|
|
198
|
+
return _extract_proxy_data(resp)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def _extract_proxy_data(resp: dict) -> dict:
|
|
202
|
+
if not resp.get("success", True):
|
|
203
|
+
error_data = resp.get("data", {})
|
|
204
|
+
if isinstance(error_data, dict):
|
|
205
|
+
msg = error_data.get("error", error_data)
|
|
206
|
+
if isinstance(msg, dict):
|
|
207
|
+
msg = msg.get("message", str(msg))
|
|
208
|
+
else:
|
|
209
|
+
msg = str(error_data)
|
|
210
|
+
raise ProviderError(str(msg), status_code=resp.get("statusCode"))
|
|
211
|
+
return resp.get("data", resp)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _request(client: httpx.Client, method: str, path: str, **kwargs) -> dict:
|
|
215
|
+
resp = client.request(method, path, **kwargs)
|
|
216
|
+
if resp.status_code == 401:
|
|
217
|
+
raise AuthenticationError()
|
|
218
|
+
try:
|
|
219
|
+
data = resp.json()
|
|
220
|
+
except Exception:
|
|
221
|
+
text = resp.text[:200] if resp.text else f"Request failed ({resp.status_code})"
|
|
222
|
+
raise BYOKError(text, status_code=resp.status_code)
|
|
223
|
+
if not resp.is_success:
|
|
224
|
+
msg = data.get("message", "Request failed") if isinstance(data, dict) else str(data)
|
|
225
|
+
raise BYOKError(msg, status_code=resp.status_code)
|
|
226
|
+
return data
|
byok-0.2.0/byo/errors.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class BYOKError(Exception):
|
|
5
|
+
def __init__(self, message: str, status_code: int | None = None, code: str | None = None):
|
|
6
|
+
super().__init__(message)
|
|
7
|
+
self.message = message
|
|
8
|
+
self.status_code = status_code
|
|
9
|
+
self.code = code
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class AuthenticationError(BYOKError):
|
|
13
|
+
def __init__(self, message: str = "Invalid or missing API key"):
|
|
14
|
+
super().__init__(message, status_code=401, code="AUTHENTICATION_ERROR")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ProviderError(BYOKError):
|
|
18
|
+
def __init__(self, message: str, status_code: int | None = None):
|
|
19
|
+
super().__init__(message, status_code=status_code, code="PROVIDER_ERROR")
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: byok
|
|
3
|
+
Version: 0.2.0
|
|
4
|
+
Summary: BYO - Python SDK for BYOK
|
|
5
|
+
License: FSL-1.1-MIT
|
|
6
|
+
Project-URL: Homepage, https://github.com/treadiehq/byo
|
|
7
|
+
Project-URL: Repository, https://github.com/treadiehq/byo/tree/main/packages/sdk-python
|
|
8
|
+
Project-URL: Issues, https://github.com/treadiehq/byo/issues
|
|
9
|
+
Keywords: byo,byok,openai,anthropic,proxy,api-keys
|
|
10
|
+
Requires-Python: >=3.9
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
Requires-Dist: httpx>=0.25.0
|
|
13
|
+
|
|
14
|
+
# BYO PYTHON SDK for BYOK
|
|
15
|
+
|
|
16
|
+
Official Python SDK for BYO — BYOK (Bring Your Own Key).
|
|
17
|
+
|
|
18
|
+
## Installation
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
pip install byok
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## Quick Start
|
|
25
|
+
|
|
26
|
+
```python
|
|
27
|
+
import os
|
|
28
|
+
from byo import BYOK
|
|
29
|
+
|
|
30
|
+
byok = BYOK(api_key=os.environ["BYOK_API_KEY"])
|
|
31
|
+
|
|
32
|
+
# Proxy an OpenAI request
|
|
33
|
+
openai = byok.openai(ref_id="customer_123")
|
|
34
|
+
response = openai.responses.create(
|
|
35
|
+
model="gpt-4.1",
|
|
36
|
+
input="Hello from BYOK!"
|
|
37
|
+
)
|
|
38
|
+
print(response)
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## OpenAI
|
|
42
|
+
|
|
43
|
+
### Responses API
|
|
44
|
+
|
|
45
|
+
```python
|
|
46
|
+
openai = byok.openai(ref_id="customer_123")
|
|
47
|
+
response = openai.responses.create(model="gpt-4.1", input="What is BYOK?")
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
### Chat Completions API
|
|
51
|
+
|
|
52
|
+
```python
|
|
53
|
+
openai = byok.openai(ref_id="customer_123")
|
|
54
|
+
response = openai.chat.completions.create(
|
|
55
|
+
model="gpt-4.1",
|
|
56
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
57
|
+
)
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Anthropic
|
|
61
|
+
|
|
62
|
+
### Messages API
|
|
63
|
+
|
|
64
|
+
```python
|
|
65
|
+
claude = byok.anthropic(ref_id="customer_123")
|
|
66
|
+
response = claude.messages.create(
|
|
67
|
+
model="claude-sonnet-4-20250514",
|
|
68
|
+
max_tokens=1024,
|
|
69
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
70
|
+
)
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
## Google AI Studio
|
|
74
|
+
|
|
75
|
+
### Generate Content
|
|
76
|
+
|
|
77
|
+
```python
|
|
78
|
+
gemini = byok.google(ref_id="customer_123")
|
|
79
|
+
response = gemini.generate_content.create(
|
|
80
|
+
model="gemini-2.0-flash",
|
|
81
|
+
contents=[{"parts": [{"text": "Hello!"}]}]
|
|
82
|
+
)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Azure OpenAI
|
|
86
|
+
|
|
87
|
+
### Chat Completions
|
|
88
|
+
|
|
89
|
+
```python
|
|
90
|
+
azure = byok.azure_openai(ref_id="customer_123")
|
|
91
|
+
response = azure.chat.completions.create(
|
|
92
|
+
model="gpt-4",
|
|
93
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
94
|
+
)
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
Azure OpenAI requires `provider_config` when connecting the key:
|
|
98
|
+
|
|
99
|
+
```python
|
|
100
|
+
byok.keys.connect(
|
|
101
|
+
provider="azure-openai",
|
|
102
|
+
ref_id="customer_123",
|
|
103
|
+
provider_key="your-azure-api-key",
|
|
104
|
+
provider_config={
|
|
105
|
+
"baseUrl": "https://your-resource.openai.azure.com",
|
|
106
|
+
"deploymentName": "gpt-4",
|
|
107
|
+
},
|
|
108
|
+
)
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## AWS Bedrock
|
|
112
|
+
|
|
113
|
+
### Converse
|
|
114
|
+
|
|
115
|
+
```python
|
|
116
|
+
bedrock = byok.bedrock(ref_id="customer_123")
|
|
117
|
+
response = bedrock.converse.create(
|
|
118
|
+
modelId="anthropic.claude-3-haiku-20240307-v1:0",
|
|
119
|
+
messages=[{"role": "user", "content": [{"text": "Hello!"}]}]
|
|
120
|
+
)
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
AWS Bedrock requires `provider_config` when connecting the key:
|
|
124
|
+
|
|
125
|
+
```python
|
|
126
|
+
byok.keys.connect(
|
|
127
|
+
provider="bedrock",
|
|
128
|
+
ref_id="customer_123",
|
|
129
|
+
provider_key="your-aws-secret-access-key",
|
|
130
|
+
provider_config={
|
|
131
|
+
"accessKeyId": "AKIA...",
|
|
132
|
+
"region": "us-east-1",
|
|
133
|
+
},
|
|
134
|
+
)
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
## Key Management
|
|
138
|
+
|
|
139
|
+
```python
|
|
140
|
+
# Connect a provider key
|
|
141
|
+
byok.keys.connect(
|
|
142
|
+
provider="openai",
|
|
143
|
+
ref_id="customer_123",
|
|
144
|
+
provider_key="sk-..."
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
# Validate a stored key
|
|
148
|
+
result = byok.keys.validate(provider="openai", ref_id="customer_123")
|
|
149
|
+
|
|
150
|
+
# Revoke a stored key
|
|
151
|
+
byok.keys.revoke(provider="openai", ref_id="customer_123")
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## Error Handling
|
|
155
|
+
|
|
156
|
+
```python
|
|
157
|
+
from byo import BYOK, BYOKError, AuthenticationError
|
|
158
|
+
|
|
159
|
+
try:
|
|
160
|
+
response = openai.responses.create(model="gpt-4.1", input="Hello")
|
|
161
|
+
except AuthenticationError:
|
|
162
|
+
print("Invalid API key")
|
|
163
|
+
except BYOKError as e:
|
|
164
|
+
print(f"Error {e.status_code}: {e.message}")
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
## License
|
|
168
|
+
|
|
169
|
+
[FSL-1.1-MIT](LICENSE)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
httpx>=0.25.0
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
byo
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=68.0", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "byok"
|
|
7
|
+
version = "0.2.0"
|
|
8
|
+
description = "BYO - Python SDK for BYOK"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.9"
|
|
11
|
+
license = {text = "FSL-1.1-MIT"}
|
|
12
|
+
dependencies = ["httpx>=0.25.0"]
|
|
13
|
+
keywords = ["byo", "byok", "openai", "anthropic", "proxy", "api-keys"]
|
|
14
|
+
|
|
15
|
+
[project.urls]
|
|
16
|
+
Homepage = "https://github.com/treadiehq/byo"
|
|
17
|
+
Repository = "https://github.com/treadiehq/byo/tree/main/packages/sdk-python"
|
|
18
|
+
Issues = "https://github.com/treadiehq/byo/issues"
|
|
19
|
+
|
|
20
|
+
[tool.setuptools.packages.find]
|
|
21
|
+
where = ["."]
|
|
22
|
+
include = ["byo*"]
|
byok-0.2.0/setup.cfg
ADDED