vallignus 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vallignus/__init__.py +3 -0
- vallignus/auth.py +699 -0
- vallignus/cli.py +780 -0
- vallignus/identity/__init__.py +5 -0
- vallignus/identity/chrome.py +47 -0
- vallignus/identity/manager.py +175 -0
- vallignus/logger.py +86 -0
- vallignus/proxy.py +122 -0
- vallignus/rules.py +90 -0
- vallignus/sessions.py +529 -0
- vallignus-0.4.0.dist-info/METADATA +250 -0
- vallignus-0.4.0.dist-info/RECORD +15 -0
- vallignus-0.4.0.dist-info/WHEEL +5 -0
- vallignus-0.4.0.dist-info/entry_points.txt +2 -0
- vallignus-0.4.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""Chrome cookie extraction for testing automation and session persistence"""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
import browser_cookie3
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_chrome_cookies(domain: str, profile: str = "Default") -> Dict[str, str]:
|
|
11
|
+
"""
|
|
12
|
+
Extract cookies from Chrome for testing automation.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
domain: Domain to get cookies for (e.g., "github.com")
|
|
16
|
+
profile: Chrome profile name (default: "Default")
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Dictionary of cookie name-value pairs
|
|
20
|
+
"""
|
|
21
|
+
# Normalize domain
|
|
22
|
+
domain = domain.strip().lower().replace('https://', '').replace('http://', '').split('/')[0]
|
|
23
|
+
|
|
24
|
+
# Get Chrome cookie file path based on OS
|
|
25
|
+
if sys.platform == "darwin":
|
|
26
|
+
profiles_dir = os.path.expanduser("~/Library/Application Support/Google/Chrome/")
|
|
27
|
+
cookie_file = os.path.join(profiles_dir, profile, "Cookies")
|
|
28
|
+
elif sys.platform == "win32":
|
|
29
|
+
profiles_dir = os.path.expandvars("%LOCALAPPDATA%\\Google\\Chrome\\User Data\\")
|
|
30
|
+
cookie_file = os.path.join(profiles_dir, profile, "Network", "Cookies")
|
|
31
|
+
else:
|
|
32
|
+
profiles_dir = os.path.expanduser("~/.config/google-chrome/")
|
|
33
|
+
cookie_file = os.path.join(profiles_dir, profile, "Cookies")
|
|
34
|
+
|
|
35
|
+
if not os.path.exists(cookie_file):
|
|
36
|
+
raise FileNotFoundError(f"Chrome cookie file not found: {cookie_file}")
|
|
37
|
+
|
|
38
|
+
# Use browser-cookie3 to get and decrypt cookies
|
|
39
|
+
try:
|
|
40
|
+
cj = browser_cookie3.chrome(cookie_file=cookie_file, domain_name=domain)
|
|
41
|
+
except Exception as e:
|
|
42
|
+
if "locked" in str(e).lower():
|
|
43
|
+
raise RuntimeError("Chrome is open. Close Chrome and try again.")
|
|
44
|
+
raise
|
|
45
|
+
|
|
46
|
+
cookies = {cookie.name: cookie.value for cookie in cj}
|
|
47
|
+
return cookies
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
"""Identity manager for session persistence in testing automation"""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Dict, List, Optional
|
|
7
|
+
|
|
8
|
+
from cryptography.fernet import Fernet
|
|
9
|
+
from cryptography.hazmat.primitives import hashes
|
|
10
|
+
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
|
11
|
+
import base64
|
|
12
|
+
|
|
13
|
+
from vallignus.identity.chrome import get_chrome_cookies
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class IdentityManager:
|
|
17
|
+
"""
|
|
18
|
+
Manages browser session persistence for testing automation.
|
|
19
|
+
|
|
20
|
+
This class provides methods to save and restore browser sessions (cookies)
|
|
21
|
+
for use in automated testing scenarios. Sessions are stored encrypted
|
|
22
|
+
in the user's home directory.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(self, sessions_dir: Optional[Path] = None):
|
|
26
|
+
"""
|
|
27
|
+
Initialize the IdentityManager.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
sessions_dir: Optional custom path for session storage.
|
|
31
|
+
Defaults to ~/.vallignus/sessions/
|
|
32
|
+
"""
|
|
33
|
+
if sessions_dir is None:
|
|
34
|
+
home = Path.home()
|
|
35
|
+
sessions_dir = home / ".vallignus" / "sessions"
|
|
36
|
+
|
|
37
|
+
self.sessions_dir = Path(sessions_dir)
|
|
38
|
+
self.sessions_dir.mkdir(parents=True, exist_ok=True)
|
|
39
|
+
|
|
40
|
+
# Initialize encryption key (derived from user's home directory)
|
|
41
|
+
self._encryption_key = self._get_encryption_key()
|
|
42
|
+
|
|
43
|
+
def _get_encryption_key(self) -> bytes:
|
|
44
|
+
"""
|
|
45
|
+
Generate or retrieve encryption key for session storage.
|
|
46
|
+
|
|
47
|
+
Uses a key derived from the user's home directory path for consistency.
|
|
48
|
+
In production, you might want to use a user-provided password or keychain.
|
|
49
|
+
"""
|
|
50
|
+
# Derive key from home directory (deterministic but user-specific)
|
|
51
|
+
home_str = str(Path.home()).encode()
|
|
52
|
+
kdf = PBKDF2HMAC(
|
|
53
|
+
algorithm=hashes.SHA256(),
|
|
54
|
+
length=32,
|
|
55
|
+
salt=b'vallignus_salt', # In production, use a random salt per user
|
|
56
|
+
iterations=100000,
|
|
57
|
+
)
|
|
58
|
+
key = base64.urlsafe_b64encode(kdf.derive(home_str))
|
|
59
|
+
return key
|
|
60
|
+
|
|
61
|
+
def _encrypt_data(self, data: Dict) -> bytes:
|
|
62
|
+
"""Encrypt session data before storage"""
|
|
63
|
+
f = Fernet(self._encryption_key)
|
|
64
|
+
json_data = json.dumps(data).encode('utf-8')
|
|
65
|
+
return f.encrypt(json_data)
|
|
66
|
+
|
|
67
|
+
def _decrypt_data(self, encrypted_data: bytes) -> Dict:
|
|
68
|
+
"""Decrypt session data after retrieval"""
|
|
69
|
+
f = Fernet(self._encryption_key)
|
|
70
|
+
decrypted = f.decrypt(encrypted_data)
|
|
71
|
+
return json.loads(decrypted.decode('utf-8'))
|
|
72
|
+
|
|
73
|
+
def snapshot(
|
|
74
|
+
self,
|
|
75
|
+
domain: str,
|
|
76
|
+
browser: str = "chrome",
|
|
77
|
+
profile: str = "Default"
|
|
78
|
+
) -> None:
|
|
79
|
+
"""
|
|
80
|
+
Save current browser session for a domain.
|
|
81
|
+
|
|
82
|
+
Extracts cookies from the specified browser profile and saves them
|
|
83
|
+
encrypted for later use in testing automation.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
domain: The domain to snapshot (e.g., "github.com")
|
|
87
|
+
browser: Browser name (currently supports "chrome")
|
|
88
|
+
profile: Browser profile name (default: "Default")
|
|
89
|
+
|
|
90
|
+
Raises:
|
|
91
|
+
ValueError: If browser is not supported
|
|
92
|
+
FileNotFoundError: If browser profile not found
|
|
93
|
+
"""
|
|
94
|
+
if browser.lower() != "chrome":
|
|
95
|
+
raise ValueError(f"Unsupported browser: {browser}. Only 'chrome' is supported.")
|
|
96
|
+
|
|
97
|
+
# Extract cookies from browser
|
|
98
|
+
cookies = get_chrome_cookies(domain, profile)
|
|
99
|
+
|
|
100
|
+
if not cookies:
|
|
101
|
+
raise ValueError(f"No cookies found for domain: {domain}")
|
|
102
|
+
|
|
103
|
+
# Prepare session data
|
|
104
|
+
session_data = {
|
|
105
|
+
"domain": domain,
|
|
106
|
+
"browser": browser,
|
|
107
|
+
"profile": profile,
|
|
108
|
+
"cookies": cookies
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
# Encrypt and save
|
|
112
|
+
encrypted = self._encrypt_data(session_data)
|
|
113
|
+
session_file = self.sessions_dir / f"{domain}.json.enc"
|
|
114
|
+
|
|
115
|
+
with open(session_file, 'wb') as f:
|
|
116
|
+
f.write(encrypted)
|
|
117
|
+
|
|
118
|
+
def restore(self, domain: str) -> Dict[str, str]:
|
|
119
|
+
"""
|
|
120
|
+
Restore saved session cookies for a domain.
|
|
121
|
+
|
|
122
|
+
Returns a dictionary of cookies ready for use with requests or playwright
|
|
123
|
+
in testing automation scenarios.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
domain: The domain to restore cookies for
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
Dictionary of cookie name-value pairs
|
|
130
|
+
|
|
131
|
+
Raises:
|
|
132
|
+
FileNotFoundError: If no saved session exists for the domain
|
|
133
|
+
"""
|
|
134
|
+
session_file = self.sessions_dir / f"{domain}.json.enc"
|
|
135
|
+
|
|
136
|
+
if not session_file.exists():
|
|
137
|
+
raise FileNotFoundError(f"No saved session found for domain: {domain}")
|
|
138
|
+
|
|
139
|
+
# Read and decrypt
|
|
140
|
+
with open(session_file, 'rb') as f:
|
|
141
|
+
encrypted = f.read()
|
|
142
|
+
|
|
143
|
+
session_data = self._decrypt_data(encrypted)
|
|
144
|
+
return session_data.get("cookies", {})
|
|
145
|
+
|
|
146
|
+
def list_sessions(self) -> List[str]:
|
|
147
|
+
"""
|
|
148
|
+
List all saved session domains.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
List of domain names that have saved sessions
|
|
152
|
+
"""
|
|
153
|
+
sessions = []
|
|
154
|
+
for file in self.sessions_dir.glob("*.json.enc"):
|
|
155
|
+
# Extract domain from filename (domain.json.enc)
|
|
156
|
+
domain = file.stem.replace(".json", "")
|
|
157
|
+
sessions.append(domain)
|
|
158
|
+
return sorted(sessions)
|
|
159
|
+
|
|
160
|
+
def delete(self, domain: str) -> None:
|
|
161
|
+
"""
|
|
162
|
+
Delete a saved session.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
domain: The domain to delete the session for
|
|
166
|
+
|
|
167
|
+
Raises:
|
|
168
|
+
FileNotFoundError: If no saved session exists for the domain
|
|
169
|
+
"""
|
|
170
|
+
session_file = self.sessions_dir / f"{domain}.json.enc"
|
|
171
|
+
|
|
172
|
+
if not session_file.exists():
|
|
173
|
+
raise FileNotFoundError(f"No saved session found for domain: {domain}")
|
|
174
|
+
|
|
175
|
+
session_file.unlink()
|
vallignus/logger.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""Flight recorder - logs all requests to JSON"""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class FlightLogger:
|
|
10
|
+
"""Logs all HTTP requests to a JSON file"""
|
|
11
|
+
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
log_file: str = "flight_log.json",
|
|
15
|
+
agent_id: Optional[str] = None,
|
|
16
|
+
owner: Optional[str] = None,
|
|
17
|
+
policy_id: Optional[str] = None,
|
|
18
|
+
policy_version: Optional[int] = None,
|
|
19
|
+
jti: Optional[str] = None
|
|
20
|
+
):
|
|
21
|
+
self.log_file = Path(log_file)
|
|
22
|
+
self.entries = []
|
|
23
|
+
self.agent_id = agent_id
|
|
24
|
+
self.owner = owner
|
|
25
|
+
self.policy_id = policy_id
|
|
26
|
+
self.policy_version = policy_version
|
|
27
|
+
self.jti = jti
|
|
28
|
+
|
|
29
|
+
if self.log_file.exists():
|
|
30
|
+
try:
|
|
31
|
+
with open(self.log_file, 'r') as f:
|
|
32
|
+
self.entries = json.load(f)
|
|
33
|
+
except (json.JSONDecodeError, IOError):
|
|
34
|
+
self.entries = []
|
|
35
|
+
|
|
36
|
+
def log_request(
|
|
37
|
+
self,
|
|
38
|
+
method: str,
|
|
39
|
+
url: str,
|
|
40
|
+
status: int = None,
|
|
41
|
+
blocked: bool = False,
|
|
42
|
+
allowed: bool = True,
|
|
43
|
+
estimated_cost: float = 0.0,
|
|
44
|
+
deny_reason: Optional[str] = None,
|
|
45
|
+
**kwargs
|
|
46
|
+
):
|
|
47
|
+
entry = {
|
|
48
|
+
"timestamp": datetime.utcnow().isoformat(),
|
|
49
|
+
"method": method,
|
|
50
|
+
"url": url,
|
|
51
|
+
"status": status,
|
|
52
|
+
"blocked": blocked,
|
|
53
|
+
"allowed": allowed,
|
|
54
|
+
"estimated_cost": estimated_cost,
|
|
55
|
+
"decision": "deny" if blocked else "allow",
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if self.agent_id:
|
|
59
|
+
entry["agent_id"] = self.agent_id
|
|
60
|
+
if self.owner:
|
|
61
|
+
entry["owner"] = self.owner
|
|
62
|
+
if self.policy_id:
|
|
63
|
+
entry["policy_id"] = self.policy_id
|
|
64
|
+
if self.policy_version is not None:
|
|
65
|
+
entry["policy_version"] = self.policy_version
|
|
66
|
+
if self.jti:
|
|
67
|
+
entry["jti"] = self.jti
|
|
68
|
+
|
|
69
|
+
if blocked and deny_reason:
|
|
70
|
+
entry["deny_reason"] = deny_reason
|
|
71
|
+
elif blocked:
|
|
72
|
+
entry["deny_reason"] = "domain_not_allowed"
|
|
73
|
+
|
|
74
|
+
entry.update(kwargs)
|
|
75
|
+
self.entries.append(entry)
|
|
76
|
+
self._save()
|
|
77
|
+
|
|
78
|
+
def _save(self):
|
|
79
|
+
try:
|
|
80
|
+
with open(self.log_file, 'w') as f:
|
|
81
|
+
json.dump(self.entries, f, indent=2)
|
|
82
|
+
except IOError:
|
|
83
|
+
pass
|
|
84
|
+
|
|
85
|
+
def get_total_cost(self) -> float:
|
|
86
|
+
return sum(entry.get("estimated_cost", 0.0) for entry in self.entries)
|
vallignus/proxy.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""mitmproxy-based HTTP interceptor"""
|
|
2
|
+
|
|
3
|
+
import threading
|
|
4
|
+
import time
|
|
5
|
+
from typing import Optional, Set
|
|
6
|
+
from mitmproxy import http, options
|
|
7
|
+
from mitmproxy.tools.dump import DumpMaster
|
|
8
|
+
|
|
9
|
+
from vallignus.rules import RulesEngine
|
|
10
|
+
from vallignus.logger import FlightLogger
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class VallignusProxy:
|
|
14
|
+
"""HTTP/HTTPS proxy that intercepts and filters requests"""
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
allowed_domains: Set[str],
|
|
19
|
+
budget: Optional[float] = None,
|
|
20
|
+
logger: Optional[FlightLogger] = None,
|
|
21
|
+
rules: Optional[RulesEngine] = None
|
|
22
|
+
):
|
|
23
|
+
self.allowed_domains = allowed_domains
|
|
24
|
+
self.budget = budget
|
|
25
|
+
self.logger = logger or FlightLogger()
|
|
26
|
+
self.rules = rules or RulesEngine(allowed_domains, budget)
|
|
27
|
+
self.master: Optional[DumpMaster] = None
|
|
28
|
+
self.proxy_thread: Optional[threading.Thread] = None
|
|
29
|
+
self.is_running = False
|
|
30
|
+
self.blocked_count = 0
|
|
31
|
+
self.allowed_count = 0
|
|
32
|
+
self._should_terminate = False
|
|
33
|
+
|
|
34
|
+
def request(self, flow: http.HTTPFlow) -> None:
|
|
35
|
+
"""Intercept outgoing HTTP requests"""
|
|
36
|
+
url = flow.request.pretty_url
|
|
37
|
+
method = flow.request.method
|
|
38
|
+
|
|
39
|
+
# Check if domain is allowed (don't update spending here)
|
|
40
|
+
is_allowed = self.rules.is_allowed(url)
|
|
41
|
+
|
|
42
|
+
if not is_allowed:
|
|
43
|
+
self.blocked_count += 1
|
|
44
|
+
flow.response = http.Response.make(
|
|
45
|
+
403,
|
|
46
|
+
b"[boundary] Execution blocked by Vallignus: domain not permitted by policy",
|
|
47
|
+
{"Content-Type": "text/plain"}
|
|
48
|
+
)
|
|
49
|
+
# Log blocked request immediately
|
|
50
|
+
self.logger.log_request(
|
|
51
|
+
method=method,
|
|
52
|
+
url=url,
|
|
53
|
+
status=403,
|
|
54
|
+
blocked=True,
|
|
55
|
+
allowed=False,
|
|
56
|
+
estimated_cost=0.0
|
|
57
|
+
)
|
|
58
|
+
else:
|
|
59
|
+
self.allowed_count += 1
|
|
60
|
+
# Don't log yet - wait for response to get final status and cost
|
|
61
|
+
|
|
62
|
+
def response(self, flow: http.HTTPFlow) -> None:
|
|
63
|
+
"""Intercept HTTP responses"""
|
|
64
|
+
if flow.response:
|
|
65
|
+
url = flow.request.pretty_url
|
|
66
|
+
method = flow.request.method
|
|
67
|
+
status = flow.response.status_code
|
|
68
|
+
|
|
69
|
+
# Only process if request wasn't blocked (blocked requests don't reach here)
|
|
70
|
+
is_allowed = self.rules.is_allowed(url)
|
|
71
|
+
if is_allowed:
|
|
72
|
+
# Check request and update spending (this adds to total_spend)
|
|
73
|
+
should_block, is_allowed_domain, estimated_cost = self.rules.check_request(
|
|
74
|
+
method, url, status
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# Log the allowed request with final status and cost
|
|
78
|
+
self.logger.log_request(
|
|
79
|
+
method=method,
|
|
80
|
+
url=url,
|
|
81
|
+
status=status,
|
|
82
|
+
blocked=False,
|
|
83
|
+
allowed=True,
|
|
84
|
+
estimated_cost=estimated_cost
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Check budget after response
|
|
88
|
+
if self.rules.is_budget_exceeded() and not self._should_terminate:
|
|
89
|
+
self._should_terminate = True
|
|
90
|
+
|
|
91
|
+
def start(self, port: int = 8080) -> int:
|
|
92
|
+
"""Start the proxy server"""
|
|
93
|
+
self.is_running = True
|
|
94
|
+
self._port = port
|
|
95
|
+
|
|
96
|
+
def run_proxy():
|
|
97
|
+
import asyncio
|
|
98
|
+
|
|
99
|
+
async def run_master():
|
|
100
|
+
opts = options.Options(listen_port=port)
|
|
101
|
+
self.master = DumpMaster(opts)
|
|
102
|
+
self.master.addons.add(self)
|
|
103
|
+
await self.master.run()
|
|
104
|
+
|
|
105
|
+
loop = asyncio.new_event_loop()
|
|
106
|
+
asyncio.set_event_loop(loop)
|
|
107
|
+
try:
|
|
108
|
+
loop.run_until_complete(run_master())
|
|
109
|
+
except Exception:
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
self.proxy_thread = threading.Thread(target=run_proxy, daemon=True)
|
|
113
|
+
self.proxy_thread.start()
|
|
114
|
+
|
|
115
|
+
time.sleep(1.0)
|
|
116
|
+
return port
|
|
117
|
+
|
|
118
|
+
def stop(self):
|
|
119
|
+
"""Stop the proxy server"""
|
|
120
|
+
self.is_running = False
|
|
121
|
+
if self.master:
|
|
122
|
+
self.master.shutdown()
|
vallignus/rules.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"""Domain allowlist, budget tracking, and request blocking logic"""
|
|
2
|
+
|
|
3
|
+
from typing import Set, Optional, Tuple
|
|
4
|
+
from urllib.parse import urlparse
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class RulesEngine:
|
|
8
|
+
"""Manages domain allowlist, budget tracking, and request blocking"""
|
|
9
|
+
|
|
10
|
+
def __init__(self, allowed_domains: Set[str], budget: float = None):
|
|
11
|
+
self.allowed_domains = allowed_domains
|
|
12
|
+
self.budget = budget
|
|
13
|
+
self.total_spend = 0.0
|
|
14
|
+
self.request_count = 0
|
|
15
|
+
|
|
16
|
+
def is_allowed(self, url: str) -> bool:
|
|
17
|
+
"""Check if a URL's domain is in the allowlist"""
|
|
18
|
+
try:
|
|
19
|
+
parsed = urlparse(url)
|
|
20
|
+
domain = parsed.netloc.lower()
|
|
21
|
+
|
|
22
|
+
# Remove port if present
|
|
23
|
+
if ':' in domain:
|
|
24
|
+
domain = domain.split(':')[0]
|
|
25
|
+
|
|
26
|
+
# Check exact match or subdomain
|
|
27
|
+
if domain in self.allowed_domains:
|
|
28
|
+
return True
|
|
29
|
+
|
|
30
|
+
# Check if it's a subdomain of an allowed domain
|
|
31
|
+
for allowed in self.allowed_domains:
|
|
32
|
+
if domain.endswith('.' + allowed):
|
|
33
|
+
return True
|
|
34
|
+
|
|
35
|
+
return False
|
|
36
|
+
except Exception:
|
|
37
|
+
return False
|
|
38
|
+
|
|
39
|
+
def estimate_request_cost(self, method: str, url: str, status: int = None) -> float:
|
|
40
|
+
"""Estimate the cost of an API request"""
|
|
41
|
+
# Simple heuristic: count requests to api.openai.com
|
|
42
|
+
# Rough estimate: $0.002 per request (very conservative)
|
|
43
|
+
try:
|
|
44
|
+
parsed = urlparse(url)
|
|
45
|
+
domain = parsed.netloc.lower()
|
|
46
|
+
|
|
47
|
+
if 'api.openai.com' in domain or 'openai.azure.com' in domain:
|
|
48
|
+
# Very rough estimate: $0.002 per request
|
|
49
|
+
# In reality, cost depends on tokens, model, etc.
|
|
50
|
+
return 0.002
|
|
51
|
+
|
|
52
|
+
return 0.0
|
|
53
|
+
except Exception:
|
|
54
|
+
return 0.0
|
|
55
|
+
|
|
56
|
+
def check_request(
|
|
57
|
+
self,
|
|
58
|
+
method: str,
|
|
59
|
+
url: str,
|
|
60
|
+
status: int = None
|
|
61
|
+
) -> Tuple[bool, bool, float]:
|
|
62
|
+
"""
|
|
63
|
+
Check if a request should be allowed.
|
|
64
|
+
Returns: (should_block, is_allowed, estimated_cost)
|
|
65
|
+
"""
|
|
66
|
+
is_allowed_domain = self.is_allowed(url)
|
|
67
|
+
estimated_cost = self.estimate_request_cost(method, url, status)
|
|
68
|
+
|
|
69
|
+
# Block if domain not in allowlist
|
|
70
|
+
should_block = not is_allowed_domain
|
|
71
|
+
|
|
72
|
+
# Update spending if request is allowed and completed
|
|
73
|
+
if is_allowed_domain and status and status < 500:
|
|
74
|
+
self.total_spend += estimated_cost
|
|
75
|
+
self.request_count += 1
|
|
76
|
+
|
|
77
|
+
return should_block, is_allowed_domain, estimated_cost
|
|
78
|
+
|
|
79
|
+
def is_budget_exceeded(self) -> bool:
|
|
80
|
+
"""Check if budget has been exceeded"""
|
|
81
|
+
if self.budget is None:
|
|
82
|
+
return False
|
|
83
|
+
return self.total_spend >= self.budget
|
|
84
|
+
|
|
85
|
+
def get_budget_status(self) -> Tuple[float, float, float]:
|
|
86
|
+
"""Get (spend, budget, remaining)"""
|
|
87
|
+
remaining = None
|
|
88
|
+
if self.budget is not None:
|
|
89
|
+
remaining = max(0.0, self.budget - self.total_spend)
|
|
90
|
+
return self.total_spend, self.budget, remaining
|