paygent-sdk 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/__init__.py +1 -0
- examples/advanced_usage.py +101 -0
- examples/basic_usage.py +38 -0
- examples/constants_usage.py +136 -0
- paygent_sdk/__init__.py +47 -0
- paygent_sdk/client.py +464 -0
- paygent_sdk/constants.py +217 -0
- paygent_sdk/models.py +485 -0
- paygent_sdk-1.0.0.dist-info/METADATA +383 -0
- paygent_sdk-1.0.0.dist-info/RECORD +15 -0
- paygent_sdk-1.0.0.dist-info/WHEEL +5 -0
- paygent_sdk-1.0.0.dist-info/licenses/LICENSE +21 -0
- paygent_sdk-1.0.0.dist-info/top_level.txt +3 -0
- tests/__init__.py +1 -0
- tests/test_client.py +277 -0
examples/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Examples package
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Advanced usage example for the Paygent SDK with token string functionality.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
import os
|
|
7
|
+
|
|
8
|
+
# Add parent directory to path to allow imports
|
|
9
|
+
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
|
10
|
+
|
|
11
|
+
from paygent_sdk import (
|
|
12
|
+
Client,
|
|
13
|
+
UsageData,
|
|
14
|
+
UsageDataWithStrings,
|
|
15
|
+
ServiceProvider,
|
|
16
|
+
OpenAIModels,
|
|
17
|
+
AnthropicModels,
|
|
18
|
+
GoogleDeepMindModels,
|
|
19
|
+
MetaModels,
|
|
20
|
+
MistralAIModels
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
def main():
|
|
24
|
+
"""Advanced usage example with both token counting methods."""
|
|
25
|
+
# Create a new client with your API key
|
|
26
|
+
client = Client.new_client_with_url(
|
|
27
|
+
"pk_e0ea0d11bb7f0d174caf578d665454acff97bdb1f85c235af547ccd9a733ef35",
|
|
28
|
+
"http://localhost:8080"
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
print("Example 1: Basic Usage with Pre-calculated Tokens")
|
|
32
|
+
|
|
33
|
+
# Define usage data with pre-calculated tokens using constants
|
|
34
|
+
usage_data = UsageData(
|
|
35
|
+
service_provider=ServiceProvider.OPENAI,
|
|
36
|
+
model=OpenAIModels.GPT_4_0613,
|
|
37
|
+
prompt_tokens=1000,
|
|
38
|
+
completion_tokens=500,
|
|
39
|
+
total_tokens=1500
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
# Send usage data
|
|
43
|
+
try:
|
|
44
|
+
client.send_usage("agent-123", "customer-456", "chat-completion", usage_data)
|
|
45
|
+
print("✓ Usage data sent successfully!")
|
|
46
|
+
except Exception as e:
|
|
47
|
+
print(f"✗ Failed to send usage: {e}")
|
|
48
|
+
|
|
49
|
+
print("\nExample 2: Advanced Usage with Token String Counting")
|
|
50
|
+
|
|
51
|
+
# Define usage data with prompt and output strings using constants
|
|
52
|
+
usage_data_with_strings = UsageDataWithStrings(
|
|
53
|
+
service_provider=ServiceProvider.OPENAI,
|
|
54
|
+
model=OpenAIModels.GPT_3_5_TURBO,
|
|
55
|
+
prompt_string="What is the capital of France? Please provide a detailed explanation.",
|
|
56
|
+
output_string="The capital of France is Paris. Paris is located in the north-central part of France and is the country's largest city and economic center. It serves as the political, economic, and cultural hub of the nation."
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Send usage data (tokens will be automatically counted)
|
|
60
|
+
try:
|
|
61
|
+
client.send_usage_with_token_string(
|
|
62
|
+
"agent-789",
|
|
63
|
+
"customer-101",
|
|
64
|
+
"question-answer",
|
|
65
|
+
usage_data_with_strings
|
|
66
|
+
)
|
|
67
|
+
print("✓ Usage data with token strings sent successfully!")
|
|
68
|
+
except Exception as e:
|
|
69
|
+
print(f"✗ Failed to send usage with token strings: {e}")
|
|
70
|
+
|
|
71
|
+
print("\nExample 3: Different AI Models")
|
|
72
|
+
|
|
73
|
+
# Test different AI models using constants
|
|
74
|
+
models_to_test = [
|
|
75
|
+
(AnthropicModels.SONNET_3_7, ServiceProvider.ANTHROPIC, "Anthropic"),
|
|
76
|
+
(GoogleDeepMindModels.GEMINI_2_5_PRO, ServiceProvider.GOOGLE_DEEPMIND, "Google DeepMind"),
|
|
77
|
+
(MetaModels.LLAMA_3_1_8B_INSTRUCT_TURBO, ServiceProvider.META, "Meta"),
|
|
78
|
+
(MistralAIModels.MISTRAL_LARGE, ServiceProvider.MISTRAL_AI, "Mistral AI")
|
|
79
|
+
]
|
|
80
|
+
|
|
81
|
+
for model, provider, provider_name in models_to_test:
|
|
82
|
+
usage_data_test = UsageDataWithStrings(
|
|
83
|
+
service_provider=provider,
|
|
84
|
+
model=model,
|
|
85
|
+
prompt_string="Hello, how are you?",
|
|
86
|
+
output_string="I'm doing well, thank you for asking! How can I help you today?"
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
client.send_usage_with_token_string(
|
|
91
|
+
f"agent-{model.replace('-', '_')}",
|
|
92
|
+
"customer-test",
|
|
93
|
+
"greeting",
|
|
94
|
+
usage_data_test
|
|
95
|
+
)
|
|
96
|
+
print(f"✓ {provider_name} {model} usage sent successfully!")
|
|
97
|
+
except Exception as e:
|
|
98
|
+
print(f"✗ Failed to send usage for {provider_name} {model}: {e}")
|
|
99
|
+
|
|
100
|
+
if __name__ == "__main__":
|
|
101
|
+
main()
|
examples/basic_usage.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Basic usage example for the Paygent SDK.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
import os
|
|
7
|
+
|
|
8
|
+
# Add parent directory to path to allow imports
|
|
9
|
+
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
|
10
|
+
|
|
11
|
+
from paygent_sdk import Client, UsageData, ServiceProvider, MetaModels
|
|
12
|
+
|
|
13
|
+
def main():
|
|
14
|
+
"""Basic usage example."""
|
|
15
|
+
# Create a new client with your API key
|
|
16
|
+
client = Client.new_client_with_url(
|
|
17
|
+
"pk_e0ea0d11bb7f0d174caf578d665454acff97bdb1f85c235af547ccd9a733ef35",
|
|
18
|
+
"http://localhost:8080"
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# Define usage data using constants
|
|
22
|
+
usage_data = UsageData(
|
|
23
|
+
service_provider=ServiceProvider.META,
|
|
24
|
+
model=MetaModels.LLAMA_2,
|
|
25
|
+
prompt_tokens=756,
|
|
26
|
+
completion_tokens=244,
|
|
27
|
+
total_tokens=1000
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
# Send usage data
|
|
31
|
+
try:
|
|
32
|
+
client.send_usage("agent-123", "customer-456", "email-sent", usage_data)
|
|
33
|
+
print("Usage data sent successfully!")
|
|
34
|
+
except Exception as e:
|
|
35
|
+
print(f"Failed to send usage: {e}")
|
|
36
|
+
|
|
37
|
+
if __name__ == "__main__":
|
|
38
|
+
main()
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Example demonstrating the use of model constants in the Paygent SDK.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from paygent_sdk import (
|
|
6
|
+
Client,
|
|
7
|
+
UsageData,
|
|
8
|
+
UsageDataWithStrings,
|
|
9
|
+
OpenAIModels,
|
|
10
|
+
AnthropicModels,
|
|
11
|
+
GoogleDeepMindModels,
|
|
12
|
+
MetaModels,
|
|
13
|
+
DeepSeekModels,
|
|
14
|
+
ServiceProvider,
|
|
15
|
+
is_model_supported,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def main():
|
|
20
|
+
"""Main function demonstrating model constants usage."""
|
|
21
|
+
# Create a client
|
|
22
|
+
client = Client.new_client("your-api-key-here")
|
|
23
|
+
|
|
24
|
+
print("=== Model Constants Example ===\n")
|
|
25
|
+
|
|
26
|
+
# Example 1: Using OpenAI model constants
|
|
27
|
+
print("1. Using OpenAI Model Constants:")
|
|
28
|
+
openai_usage = UsageData(
|
|
29
|
+
service_provider=ServiceProvider.OPENAI,
|
|
30
|
+
model=OpenAIModels.GPT_4O,
|
|
31
|
+
prompt_tokens=100,
|
|
32
|
+
completion_tokens=50,
|
|
33
|
+
total_tokens=150,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
print(f" Model: {openai_usage.model}")
|
|
37
|
+
print(f" Provider: {openai_usage.service_provider}")
|
|
38
|
+
print(f" Tokens: {openai_usage.total_tokens}\n")
|
|
39
|
+
|
|
40
|
+
# Example 2: Using Anthropic model constants
|
|
41
|
+
print("2. Using Anthropic Model Constants:")
|
|
42
|
+
anthropic_usage = UsageData(
|
|
43
|
+
service_provider=ServiceProvider.ANTHROPIC,
|
|
44
|
+
model=AnthropicModels.SONNET_4_5,
|
|
45
|
+
prompt_tokens=200,
|
|
46
|
+
completion_tokens=100,
|
|
47
|
+
total_tokens=300,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
print(f" Model: {anthropic_usage.model}")
|
|
51
|
+
print(f" Provider: {anthropic_usage.service_provider}\n")
|
|
52
|
+
|
|
53
|
+
# Example 3: Using Google DeepMind model constants
|
|
54
|
+
print("3. Using Google DeepMind Model Constants:")
|
|
55
|
+
gemini_usage = UsageData(
|
|
56
|
+
service_provider=ServiceProvider.GOOGLE_DEEPMIND,
|
|
57
|
+
model=GoogleDeepMindModels.GEMINI_2_5_PRO,
|
|
58
|
+
prompt_tokens=150,
|
|
59
|
+
completion_tokens=75,
|
|
60
|
+
total_tokens=225,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
print(f" Model: {gemini_usage.model}")
|
|
64
|
+
print(f" Provider: {gemini_usage.service_provider}\n")
|
|
65
|
+
|
|
66
|
+
# Example 4: Using Meta model constants
|
|
67
|
+
print("4. Using Meta Model Constants:")
|
|
68
|
+
llama_usage = UsageData(
|
|
69
|
+
service_provider=ServiceProvider.META,
|
|
70
|
+
model=MetaModels.LLAMA_4_MAVERICK,
|
|
71
|
+
prompt_tokens=300,
|
|
72
|
+
completion_tokens=150,
|
|
73
|
+
total_tokens=450,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
print(f" Model: {llama_usage.model}")
|
|
77
|
+
print(f" Provider: {llama_usage.service_provider}\n")
|
|
78
|
+
|
|
79
|
+
# Example 5: Using DeepSeek model constants
|
|
80
|
+
print("5. Using DeepSeek Model Constants:")
|
|
81
|
+
deepseek_usage = UsageData(
|
|
82
|
+
service_provider=ServiceProvider.DEEPSEEK,
|
|
83
|
+
model=DeepSeekModels.DEEPSEEK_R1_GLOBAL,
|
|
84
|
+
prompt_tokens=250,
|
|
85
|
+
completion_tokens=125,
|
|
86
|
+
total_tokens=375,
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
print(f" Model: {deepseek_usage.model}")
|
|
90
|
+
print(f" Provider: {deepseek_usage.service_provider}\n")
|
|
91
|
+
|
|
92
|
+
# Example 6: Check if a model is supported
|
|
93
|
+
print("6. Checking Model Support:")
|
|
94
|
+
models_to_check = [
|
|
95
|
+
OpenAIModels.GPT_5,
|
|
96
|
+
OpenAIModels.O3,
|
|
97
|
+
AnthropicModels.SONNET_4_5,
|
|
98
|
+
"unknown-model",
|
|
99
|
+
]
|
|
100
|
+
|
|
101
|
+
for model in models_to_check:
|
|
102
|
+
supported = is_model_supported(model)
|
|
103
|
+
status = "✓ Supported" if supported else "✗ Not Supported"
|
|
104
|
+
print(f" {model}: {status}")
|
|
105
|
+
print()
|
|
106
|
+
|
|
107
|
+
# Example 7: Service providers are explicit (user provides them)
|
|
108
|
+
print("7. Service Providers (User-Provided):")
|
|
109
|
+
print(f" OpenAI models use: {ServiceProvider.OPENAI}")
|
|
110
|
+
print(f" Anthropic models use: {ServiceProvider.ANTHROPIC}")
|
|
111
|
+
print(f" Google DeepMind models use: {ServiceProvider.GOOGLE_DEEPMIND}")
|
|
112
|
+
print(f" Meta models use: {ServiceProvider.META}")
|
|
113
|
+
print()
|
|
114
|
+
|
|
115
|
+
# Example 8: Using constants with send_usage_with_token_string
|
|
116
|
+
print("8. Using Constants with send_usage_with_token_string:")
|
|
117
|
+
string_usage = UsageDataWithStrings(
|
|
118
|
+
service_provider=ServiceProvider.OPENAI,
|
|
119
|
+
model=OpenAIModels.GPT_4O_MINI,
|
|
120
|
+
prompt_string="What is the capital of France?",
|
|
121
|
+
output_string="The capital of France is Paris.",
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
print(f" Model: {string_usage.model}")
|
|
125
|
+
print(f" Provider: {string_usage.service_provider}")
|
|
126
|
+
print(f' Prompt: "{string_usage.prompt_string}"')
|
|
127
|
+
print(f' Output: "{string_usage.output_string}"\n')
|
|
128
|
+
|
|
129
|
+
print("=== All examples completed successfully! ===")
|
|
130
|
+
print("\nNote: To actually send data to the API, uncomment the client.send_usage() calls below:\n")
|
|
131
|
+
print('# client.send_usage("agent-123", "customer-456", "test", openai_usage)')
|
|
132
|
+
print('# client.send_usage_with_token_string("agent-123", "customer-456", "test", string_usage)')
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
if __name__ == "__main__":
|
|
136
|
+
main()
|
paygent_sdk/__init__.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Paygent SDK for Python
|
|
3
|
+
|
|
4
|
+
A Python SDK for integrating with the Paygent API to track usage and costs for AI models.
|
|
5
|
+
|
|
6
|
+
For the Go SDK equivalent, see: https://github.com/paygent/paygent-sdk-go
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from .client import Client
|
|
10
|
+
from .models import UsageData, UsageDataWithStrings, APIRequest, ModelPricing, MODEL_PRICING
|
|
11
|
+
from .constants import (
|
|
12
|
+
ServiceProvider,
|
|
13
|
+
OpenAIModels,
|
|
14
|
+
AnthropicModels,
|
|
15
|
+
GoogleDeepMindModels,
|
|
16
|
+
MetaModels,
|
|
17
|
+
AWSModels,
|
|
18
|
+
MistralAIModels,
|
|
19
|
+
CohereModels,
|
|
20
|
+
DeepSeekModels,
|
|
21
|
+
is_model_supported
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
__version__ = "1.0.0"
|
|
25
|
+
__all__ = [
|
|
26
|
+
# Core classes
|
|
27
|
+
"Client",
|
|
28
|
+
"UsageData",
|
|
29
|
+
"UsageDataWithStrings",
|
|
30
|
+
"APIRequest",
|
|
31
|
+
"ModelPricing",
|
|
32
|
+
"MODEL_PRICING",
|
|
33
|
+
|
|
34
|
+
# Constants
|
|
35
|
+
"ServiceProvider",
|
|
36
|
+
"OpenAIModels",
|
|
37
|
+
"AnthropicModels",
|
|
38
|
+
"GoogleDeepMindModels",
|
|
39
|
+
"MetaModels",
|
|
40
|
+
"AWSModels",
|
|
41
|
+
"MistralAIModels",
|
|
42
|
+
"CohereModels",
|
|
43
|
+
"DeepSeekModels",
|
|
44
|
+
|
|
45
|
+
# Utility functions
|
|
46
|
+
"is_model_supported"
|
|
47
|
+
]
|