nexttoken 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,39 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ *.egg-info/
6
+ dist/
7
+ build/
8
+ .eggs/
9
+ *.egg
10
+ .pytest_cache/
11
+ .coverage
12
+ htmlcov/
13
+ .venv/
14
+ venv/
15
+
16
+ # Node
17
+ node_modules/
18
+ npm-debug.log
19
+ yarn-error.log
20
+
21
+ # IDE/Editor
22
+ .idea/
23
+ .vscode/
24
+ *.swp
25
+ *.swo
26
+ *~
27
+
28
+ # OS
29
+ .DS_Store
30
+ Thumbs.db
31
+
32
+ # Environment
33
+ .env
34
+ .env.local
35
+ .env.*.local
36
+
37
+ # Build artifacts
38
+ *.log
39
+ *.tmp
@@ -0,0 +1,98 @@
1
+ Metadata-Version: 2.4
2
+ Name: nexttoken
3
+ Version: 0.1.0
4
+ Summary: NextToken SDK - Simple client for the NextToken APIs and Gateway
5
+ Project-URL: Homepage, https://nexttoken.co
6
+ Project-URL: Documentation, https://docs.nexttoken.co
7
+ Project-URL: Repository, https://github.com/NextTokenAI/nexttoken
8
+ Author-email: NextToken <contact@nexttoken.co>
9
+ License-Expression: MIT
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: License :: OSI Approved :: MIT License
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.8
15
+ Classifier: Programming Language :: Python :: 3.9
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Requires-Python: >=3.8
20
+ Requires-Dist: openai>=1.0.0
21
+ Description-Content-Type: text/markdown
22
+
23
+ # NextToken Python SDK
24
+
25
+ Simple Python client for the NextToken Gateway - an OpenAI-compatible LLM proxy.
26
+
27
+ ## Installation
28
+
29
+ ```bash
30
+ pip install nexttoken
31
+ ```
32
+
33
+ ## Quick Start
34
+
35
+ ```python
36
+ from nexttoken import NextToken
37
+
38
+ # Initialize with your API key
39
+ client = NextToken(api_key="your-api-key")
40
+
41
+ # Use like the OpenAI SDK
42
+ response = client.chat.completions.create(
43
+ model="gpt-4o", # or "claude-3-5-sonnet", "gemini-2.5-flash"
44
+ messages=[
45
+ {"role": "user", "content": "Hello!"}
46
+ ]
47
+ )
48
+
49
+ print(response.choices[0].message.content)
50
+ ```
51
+
52
+ ## Using with OpenAI SDK Directly
53
+
54
+ Since the Gateway is OpenAI-compatible, you can also use the OpenAI SDK:
55
+
56
+ ```python
57
+ from openai import OpenAI
58
+
59
+ client = OpenAI(
60
+ api_key="your-nexttoken-api-key",
61
+ base_url="https://gateway.nexttoken.co/v1"
62
+ )
63
+
64
+ response = client.chat.completions.create(
65
+ model="gpt-4o",
66
+ messages=[{"role": "user", "content": "Hello!"}]
67
+ )
68
+ ```
69
+
70
+ ## Available Models
71
+
72
+ - OpenAI models
73
+ - Anthropic models
74
+ - Gemini models
75
+ - Openrouter models
76
+
77
+ ## Embeddings
78
+
79
+ ```python
80
+ from nexttoken import NextToken
81
+
82
+ client = NextToken(api_key="your-api-key")
83
+
84
+ response = client.embeddings.create(
85
+ model="text-embedding-3-small",
86
+ input="Your text to embed"
87
+ )
88
+
89
+ print(response.data[0].embedding)
90
+ ```
91
+
92
+ ## Get Your API Key
93
+
94
+ Sign up at [nexttoken.co](https://nexttoken.co) and get your API key from Settings.
95
+
96
+ ## License
97
+
98
+ MIT
@@ -0,0 +1,76 @@
1
+ # NextToken Python SDK
2
+
3
+ Simple Python client for the NextToken Gateway - an OpenAI-compatible LLM proxy.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install nexttoken
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ```python
14
+ from nexttoken import NextToken
15
+
16
+ # Initialize with your API key
17
+ client = NextToken(api_key="your-api-key")
18
+
19
+ # Use like the OpenAI SDK
20
+ response = client.chat.completions.create(
21
+ model="gpt-4o", # or "claude-3-5-sonnet", "gemini-2.5-flash"
22
+ messages=[
23
+ {"role": "user", "content": "Hello!"}
24
+ ]
25
+ )
26
+
27
+ print(response.choices[0].message.content)
28
+ ```
29
+
30
+ ## Using with OpenAI SDK Directly
31
+
32
+ Since the Gateway is OpenAI-compatible, you can also use the OpenAI SDK:
33
+
34
+ ```python
35
+ from openai import OpenAI
36
+
37
+ client = OpenAI(
38
+ api_key="your-nexttoken-api-key",
39
+ base_url="https://gateway.nexttoken.co/v1"
40
+ )
41
+
42
+ response = client.chat.completions.create(
43
+ model="gpt-4o",
44
+ messages=[{"role": "user", "content": "Hello!"}]
45
+ )
46
+ ```
47
+
48
+ ## Available Models
49
+
50
+ - OpenAI models
51
+ - Anthropic models
52
+ - Gemini models
53
+ - Openrouter models
54
+
55
+ ## Embeddings
56
+
57
+ ```python
58
+ from nexttoken import NextToken
59
+
60
+ client = NextToken(api_key="your-api-key")
61
+
62
+ response = client.embeddings.create(
63
+ model="text-embedding-3-small",
64
+ input="Your text to embed"
65
+ )
66
+
67
+ print(response.data[0].embedding)
68
+ ```
69
+
70
+ ## Get Your API Key
71
+
72
+ Sign up at [nexttoken.co](https://nexttoken.co) and get your API key from Settings.
73
+
74
+ ## License
75
+
76
+ MIT
@@ -0,0 +1,36 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "nexttoken"
7
+ version = "0.1.0"
8
+ description = "NextToken SDK - Simple client for the NextToken APIs and Gateway"
9
+ readme = "README.md"
10
+ license = "MIT"
11
+ requires-python = ">=3.8"
12
+ authors = [
13
+ { name = "NextToken", email = "contact@nexttoken.co" }
14
+ ]
15
+ classifiers = [
16
+ "Development Status :: 4 - Beta",
17
+ "Intended Audience :: Developers",
18
+ "License :: OSI Approved :: MIT License",
19
+ "Programming Language :: Python :: 3",
20
+ "Programming Language :: Python :: 3.8",
21
+ "Programming Language :: Python :: 3.9",
22
+ "Programming Language :: Python :: 3.10",
23
+ "Programming Language :: Python :: 3.11",
24
+ "Programming Language :: Python :: 3.12",
25
+ ]
26
+ dependencies = [
27
+ "openai>=1.0.0",
28
+ ]
29
+
30
+ [project.urls]
31
+ Homepage = "https://nexttoken.co"
32
+ Documentation = "https://docs.nexttoken.co"
33
+ Repository = "https://github.com/NextTokenAI/nexttoken"
34
+
35
+ [tool.hatch.build.targets.wheel]
36
+ packages = ["src/nexttoken"]
@@ -0,0 +1,6 @@
1
+ """NextToken SDK - Simple client for the NextToken Gateway."""
2
+
3
+ from .client import NextToken
4
+
5
+ __version__ = "0.1.0"
6
+ __all__ = ["NextToken", "__version__"]
@@ -0,0 +1,57 @@
1
+ """NextToken client for the OpenAI-compatible Gateway."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from openai import OpenAI
6
+
7
+
8
+ class NextToken:
9
+ """Simple client for the NextToken Gateway.
10
+
11
+ The Gateway is an OpenAI-compatible LLM proxy that provides:
12
+ - Access to multiple models (GPT-4, Claude, Gemini)
13
+ - Usage tracking and cost management
14
+ - Simple Bearer token authentication
15
+
16
+ Example:
17
+ >>> from nexttoken import NextToken
18
+ >>> client = NextToken(api_key="your-api-key")
19
+ >>> response = client.chat.completions.create(
20
+ ... model="gpt-4o",
21
+ ... messages=[{"role": "user", "content": "Hello!"}]
22
+ ... )
23
+ >>> print(response.choices[0].message.content)
24
+ """
25
+
26
+ DEFAULT_BASE_URL = "https://gateway.nexttoken.co/v1"
27
+
28
+ def __init__(
29
+ self,
30
+ api_key: str,
31
+ base_url: str | None = None,
32
+ ):
33
+ """Initialize the NextToken client.
34
+
35
+ Args:
36
+ api_key: Your NextToken API key (from https://nexttoken.co/settings)
37
+ base_url: Optional custom gateway URL (defaults to production)
38
+ """
39
+ self._client = OpenAI(
40
+ api_key=api_key,
41
+ base_url=base_url or self.DEFAULT_BASE_URL,
42
+ )
43
+
44
+ @property
45
+ def chat(self):
46
+ """Access chat completions."""
47
+ return self._client.chat
48
+
49
+ @property
50
+ def embeddings(self):
51
+ """Access embeddings."""
52
+ return self._client.embeddings
53
+
54
+ @property
55
+ def models(self):
56
+ """Access models list."""
57
+ return self._client.models