ingestr 0.13.13__py3-none-any.whl → 0.14.104__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ingestr/conftest.py +72 -0
- ingestr/main.py +134 -87
- ingestr/src/adjust/__init__.py +4 -4
- ingestr/src/adjust/adjust_helpers.py +7 -3
- ingestr/src/airtable/__init__.py +3 -2
- ingestr/src/allium/__init__.py +128 -0
- ingestr/src/anthropic/__init__.py +277 -0
- ingestr/src/anthropic/helpers.py +525 -0
- ingestr/src/applovin_max/__init__.py +6 -4
- ingestr/src/appsflyer/__init__.py +325 -0
- ingestr/src/appsflyer/client.py +49 -45
- ingestr/src/appstore/__init__.py +1 -0
- ingestr/src/arrow/__init__.py +9 -1
- ingestr/src/asana_source/__init__.py +1 -1
- ingestr/src/attio/__init__.py +102 -0
- ingestr/src/attio/helpers.py +65 -0
- ingestr/src/blob.py +37 -10
- ingestr/src/buildinfo.py +1 -1
- ingestr/src/chess/__init__.py +1 -1
- ingestr/src/clickup/__init__.py +85 -0
- ingestr/src/clickup/helpers.py +47 -0
- ingestr/src/collector/spinner.py +43 -0
- ingestr/src/couchbase_source/__init__.py +118 -0
- ingestr/src/couchbase_source/helpers.py +135 -0
- ingestr/src/cursor/__init__.py +83 -0
- ingestr/src/cursor/helpers.py +188 -0
- ingestr/src/destinations.py +508 -27
- ingestr/src/docebo/__init__.py +589 -0
- ingestr/src/docebo/client.py +435 -0
- ingestr/src/docebo/helpers.py +97 -0
- ingestr/src/elasticsearch/__init__.py +80 -0
- ingestr/src/elasticsearch/helpers.py +138 -0
- ingestr/src/errors.py +8 -0
- ingestr/src/facebook_ads/__init__.py +47 -28
- ingestr/src/facebook_ads/helpers.py +59 -37
- ingestr/src/facebook_ads/settings.py +2 -0
- ingestr/src/facebook_ads/utils.py +39 -0
- ingestr/src/factory.py +107 -2
- ingestr/src/filesystem/__init__.py +8 -3
- ingestr/src/filters.py +46 -3
- ingestr/src/fluxx/__init__.py +9906 -0
- ingestr/src/fluxx/helpers.py +209 -0
- ingestr/src/frankfurter/__init__.py +157 -0
- ingestr/src/frankfurter/helpers.py +48 -0
- ingestr/src/freshdesk/__init__.py +89 -0
- ingestr/src/freshdesk/freshdesk_client.py +137 -0
- ingestr/src/freshdesk/settings.py +9 -0
- ingestr/src/fundraiseup/__init__.py +95 -0
- ingestr/src/fundraiseup/client.py +81 -0
- ingestr/src/github/__init__.py +41 -6
- ingestr/src/github/helpers.py +5 -5
- ingestr/src/google_analytics/__init__.py +22 -4
- ingestr/src/google_analytics/helpers.py +124 -6
- ingestr/src/google_sheets/__init__.py +4 -4
- ingestr/src/google_sheets/helpers/data_processing.py +2 -2
- ingestr/src/hostaway/__init__.py +302 -0
- ingestr/src/hostaway/client.py +288 -0
- ingestr/src/http/__init__.py +35 -0
- ingestr/src/http/readers.py +114 -0
- ingestr/src/http_client.py +24 -0
- ingestr/src/hubspot/__init__.py +66 -23
- ingestr/src/hubspot/helpers.py +52 -22
- ingestr/src/hubspot/settings.py +14 -7
- ingestr/src/influxdb/__init__.py +46 -0
- ingestr/src/influxdb/client.py +34 -0
- ingestr/src/intercom/__init__.py +142 -0
- ingestr/src/intercom/helpers.py +674 -0
- ingestr/src/intercom/settings.py +279 -0
- ingestr/src/isoc_pulse/__init__.py +159 -0
- ingestr/src/jira_source/__init__.py +340 -0
- ingestr/src/jira_source/helpers.py +439 -0
- ingestr/src/jira_source/settings.py +170 -0
- ingestr/src/kafka/__init__.py +4 -1
- ingestr/src/kinesis/__init__.py +139 -0
- ingestr/src/kinesis/helpers.py +82 -0
- ingestr/src/klaviyo/{_init_.py → __init__.py} +5 -6
- ingestr/src/linear/__init__.py +634 -0
- ingestr/src/linear/helpers.py +111 -0
- ingestr/src/linkedin_ads/helpers.py +0 -1
- ingestr/src/mailchimp/__init__.py +126 -0
- ingestr/src/mailchimp/helpers.py +226 -0
- ingestr/src/mailchimp/settings.py +164 -0
- ingestr/src/masking.py +344 -0
- ingestr/src/mixpanel/__init__.py +62 -0
- ingestr/src/mixpanel/client.py +99 -0
- ingestr/src/monday/__init__.py +246 -0
- ingestr/src/monday/helpers.py +392 -0
- ingestr/src/monday/settings.py +328 -0
- ingestr/src/mongodb/__init__.py +72 -8
- ingestr/src/mongodb/helpers.py +915 -38
- ingestr/src/partition.py +32 -0
- ingestr/src/phantombuster/__init__.py +65 -0
- ingestr/src/phantombuster/client.py +87 -0
- ingestr/src/pinterest/__init__.py +82 -0
- ingestr/src/pipedrive/__init__.py +198 -0
- ingestr/src/pipedrive/helpers/__init__.py +23 -0
- ingestr/src/pipedrive/helpers/custom_fields_munger.py +102 -0
- ingestr/src/pipedrive/helpers/pages.py +115 -0
- ingestr/src/pipedrive/settings.py +27 -0
- ingestr/src/pipedrive/typing.py +3 -0
- ingestr/src/plusvibeai/__init__.py +335 -0
- ingestr/src/plusvibeai/helpers.py +544 -0
- ingestr/src/plusvibeai/settings.py +252 -0
- ingestr/src/quickbooks/__init__.py +117 -0
- ingestr/src/resource.py +40 -0
- ingestr/src/revenuecat/__init__.py +83 -0
- ingestr/src/revenuecat/helpers.py +237 -0
- ingestr/src/salesforce/__init__.py +15 -8
- ingestr/src/shopify/__init__.py +1 -17
- ingestr/src/smartsheets/__init__.py +82 -0
- ingestr/src/snapchat_ads/__init__.py +489 -0
- ingestr/src/snapchat_ads/client.py +72 -0
- ingestr/src/snapchat_ads/helpers.py +535 -0
- ingestr/src/socrata_source/__init__.py +83 -0
- ingestr/src/socrata_source/helpers.py +85 -0
- ingestr/src/socrata_source/settings.py +8 -0
- ingestr/src/solidgate/__init__.py +219 -0
- ingestr/src/solidgate/helpers.py +154 -0
- ingestr/src/sources.py +2933 -245
- ingestr/src/stripe_analytics/__init__.py +49 -21
- ingestr/src/stripe_analytics/helpers.py +286 -1
- ingestr/src/stripe_analytics/settings.py +62 -10
- ingestr/src/telemetry/event.py +10 -9
- ingestr/src/tiktok_ads/__init__.py +12 -6
- ingestr/src/tiktok_ads/tiktok_helpers.py +0 -1
- ingestr/src/trustpilot/__init__.py +48 -0
- ingestr/src/trustpilot/client.py +48 -0
- ingestr/src/wise/__init__.py +68 -0
- ingestr/src/wise/client.py +63 -0
- ingestr/src/zoom/__init__.py +99 -0
- ingestr/src/zoom/helpers.py +102 -0
- ingestr/tests/unit/test_smartsheets.py +133 -0
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/METADATA +229 -19
- ingestr-0.14.104.dist-info/RECORD +203 -0
- ingestr/src/appsflyer/_init_.py +0 -24
- ingestr-0.13.13.dist-info/RECORD +0 -115
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/WHEEL +0 -0
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/entry_points.txt +0 -0
- {ingestr-0.13.13.dist-info → ingestr-0.14.104.dist-info}/licenses/LICENSE.md +0 -0
|
@@ -0,0 +1,525 @@
|
|
|
1
|
+
"""Helper functions for the Anthropic source using common HTTP client."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Any, Callable, Dict, Iterator, List, Optional
|
|
5
|
+
|
|
6
|
+
import requests
|
|
7
|
+
|
|
8
|
+
from ingestr.src.http_client import create_client
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AnthropicClient:
|
|
14
|
+
"""HTTP client for Anthropic Admin API."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, api_key: str):
|
|
17
|
+
self.api_key = api_key
|
|
18
|
+
self.base_url = "https://api.anthropic.com/v1"
|
|
19
|
+
self.headers = {
|
|
20
|
+
"anthropic-version": "2023-06-01",
|
|
21
|
+
"x-api-key": api_key,
|
|
22
|
+
"User-Agent": "ingestr/1.0.0 (https://github.com/bruin-data/ingestr)",
|
|
23
|
+
}
|
|
24
|
+
# Create client with retry logic for common error codes
|
|
25
|
+
self.client = create_client(retry_status_codes=[429, 502, 503, 504])
|
|
26
|
+
|
|
27
|
+
def get(
|
|
28
|
+
self, path: str, params: Optional[Dict[str, Any]] = None
|
|
29
|
+
) -> requests.Response:
|
|
30
|
+
"""Make a GET request to the Anthropic API."""
|
|
31
|
+
url = f"{self.base_url}/{path}"
|
|
32
|
+
return self.client.get(url, headers=self.headers, params=params)
|
|
33
|
+
|
|
34
|
+
def fetch_paginated(
|
|
35
|
+
self,
|
|
36
|
+
path: str,
|
|
37
|
+
params: Optional[Dict[str, Any]] = None,
|
|
38
|
+
flatten_func: Optional[Callable] = None,
|
|
39
|
+
handle_404: bool = False,
|
|
40
|
+
) -> Iterator[Dict[str, Any]]:
|
|
41
|
+
"""
|
|
42
|
+
Fetch paginated data from the Anthropic API.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
path: API endpoint path
|
|
46
|
+
params: Query parameters
|
|
47
|
+
flatten_func: Optional function to flatten records
|
|
48
|
+
handle_404: If True, treat 404 as empty result instead of error
|
|
49
|
+
|
|
50
|
+
Yields:
|
|
51
|
+
Flattened records
|
|
52
|
+
"""
|
|
53
|
+
if params is None:
|
|
54
|
+
params = {}
|
|
55
|
+
|
|
56
|
+
# Make a copy to avoid modifying the original
|
|
57
|
+
params = dict(params)
|
|
58
|
+
|
|
59
|
+
has_more = True
|
|
60
|
+
next_page = None
|
|
61
|
+
|
|
62
|
+
while has_more:
|
|
63
|
+
current_params = dict(params)
|
|
64
|
+
|
|
65
|
+
if next_page:
|
|
66
|
+
current_params["page"] = next_page
|
|
67
|
+
# Remove limit from subsequent requests as page cursor includes it
|
|
68
|
+
current_params.pop("limit", None)
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
response = self.get(path, current_params)
|
|
72
|
+
response.raise_for_status()
|
|
73
|
+
|
|
74
|
+
data = response.json()
|
|
75
|
+
|
|
76
|
+
# Process each record
|
|
77
|
+
for record in data.get("data", []):
|
|
78
|
+
if flatten_func:
|
|
79
|
+
yield flatten_func(record)
|
|
80
|
+
else:
|
|
81
|
+
yield record
|
|
82
|
+
|
|
83
|
+
# Check for more pages
|
|
84
|
+
has_more = data.get("has_more", False)
|
|
85
|
+
next_page = data.get("next_page")
|
|
86
|
+
|
|
87
|
+
except requests.exceptions.HTTPError as e:
|
|
88
|
+
if e.response.status_code == 401:
|
|
89
|
+
raise ValueError(
|
|
90
|
+
"Invalid API key. Please ensure you're using an Admin API key "
|
|
91
|
+
"(starts with sk-ant-admin...) and have the necessary permissions."
|
|
92
|
+
)
|
|
93
|
+
elif e.response.status_code == 404:
|
|
94
|
+
if handle_404:
|
|
95
|
+
logger.info(f"No data available for {path}")
|
|
96
|
+
break
|
|
97
|
+
else:
|
|
98
|
+
logger.info(f"No data found at {path}")
|
|
99
|
+
break
|
|
100
|
+
elif e.response.status_code == 400:
|
|
101
|
+
error_msg = e.response.text
|
|
102
|
+
raise ValueError(f"Bad request: {error_msg}")
|
|
103
|
+
else:
|
|
104
|
+
raise Exception(f"API request failed: {e}")
|
|
105
|
+
except Exception as e:
|
|
106
|
+
raise Exception(f"Failed to fetch data: {e}")
|
|
107
|
+
|
|
108
|
+
def fetch_single(self, path: str) -> Optional[Dict[str, Any]]:
|
|
109
|
+
"""
|
|
110
|
+
Fetch a single resource from the API.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
path: API endpoint path
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
The resource data or None if not found
|
|
117
|
+
"""
|
|
118
|
+
try:
|
|
119
|
+
response = self.get(path)
|
|
120
|
+
response.raise_for_status()
|
|
121
|
+
return response.json()
|
|
122
|
+
except requests.exceptions.HTTPError as e:
|
|
123
|
+
if e.response.status_code == 401:
|
|
124
|
+
raise ValueError("Invalid API key")
|
|
125
|
+
elif e.response.status_code == 404:
|
|
126
|
+
return None
|
|
127
|
+
raise Exception(f"Failed to fetch resource: {e}")
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def flatten_usage_record(record: Dict[str, Any]) -> Dict[str, Any]:
|
|
131
|
+
"""
|
|
132
|
+
Flatten a nested Claude Code usage record.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
record: Nested record from API
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
Flattened record
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
# Extract actor information
|
|
142
|
+
actor = record.get("actor", {})
|
|
143
|
+
actor_type = actor.get("type", "")
|
|
144
|
+
actor_id = ""
|
|
145
|
+
if actor_type == "user_actor":
|
|
146
|
+
actor_id = actor.get("email_address", "")
|
|
147
|
+
elif actor_type == "api_actor":
|
|
148
|
+
actor_id = actor.get("api_key_name", "")
|
|
149
|
+
|
|
150
|
+
# Start with base fields
|
|
151
|
+
flattened = {
|
|
152
|
+
"date": record.get("date"),
|
|
153
|
+
"actor_type": actor_type,
|
|
154
|
+
"actor_id": actor_id,
|
|
155
|
+
"organization_id": record.get("organization_id"),
|
|
156
|
+
"customer_type": record.get("customer_type"),
|
|
157
|
+
"terminal_type": record.get("terminal_type"),
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
# Extract core metrics
|
|
161
|
+
core_metrics = record.get("core_metrics", {})
|
|
162
|
+
flattened.update(
|
|
163
|
+
{
|
|
164
|
+
"num_sessions": core_metrics.get("num_sessions", 0),
|
|
165
|
+
"lines_added": core_metrics.get("lines_of_code", {}).get("added", 0),
|
|
166
|
+
"lines_removed": core_metrics.get("lines_of_code", {}).get("removed", 0),
|
|
167
|
+
"commits_by_claude_code": core_metrics.get("commits_by_claude_code", 0),
|
|
168
|
+
"pull_requests_by_claude_code": core_metrics.get(
|
|
169
|
+
"pull_requests_by_claude_code", 0
|
|
170
|
+
),
|
|
171
|
+
}
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
# Extract tool actions (flatten nested structure)
|
|
175
|
+
tool_actions = record.get("tool_actions", {})
|
|
176
|
+
for tool, actions in tool_actions.items():
|
|
177
|
+
flattened[f"{tool}_accepted"] = actions.get("accepted", 0)
|
|
178
|
+
flattened[f"{tool}_rejected"] = actions.get("rejected", 0)
|
|
179
|
+
|
|
180
|
+
# Extract model breakdown and aggregate totals
|
|
181
|
+
model_breakdown = record.get("model_breakdown", [])
|
|
182
|
+
total_input_tokens = 0
|
|
183
|
+
total_output_tokens = 0
|
|
184
|
+
total_cache_read_tokens = 0
|
|
185
|
+
total_cache_creation_tokens = 0
|
|
186
|
+
total_estimated_cost_cents = 0
|
|
187
|
+
models_used = []
|
|
188
|
+
|
|
189
|
+
for model_info in model_breakdown:
|
|
190
|
+
model_name = model_info.get("model", "")
|
|
191
|
+
if model_name:
|
|
192
|
+
models_used.append(model_name)
|
|
193
|
+
|
|
194
|
+
tokens = model_info.get("tokens", {})
|
|
195
|
+
total_input_tokens += tokens.get("input", 0)
|
|
196
|
+
total_output_tokens += tokens.get("output", 0)
|
|
197
|
+
total_cache_read_tokens += tokens.get("cache_read", 0)
|
|
198
|
+
total_cache_creation_tokens += tokens.get("cache_creation", 0)
|
|
199
|
+
|
|
200
|
+
cost = model_info.get("estimated_cost", {})
|
|
201
|
+
if cost.get("currency") == "USD":
|
|
202
|
+
total_estimated_cost_cents += cost.get("amount", 0)
|
|
203
|
+
|
|
204
|
+
flattened.update(
|
|
205
|
+
{
|
|
206
|
+
"total_input_tokens": total_input_tokens,
|
|
207
|
+
"total_output_tokens": total_output_tokens,
|
|
208
|
+
"total_cache_read_tokens": total_cache_read_tokens,
|
|
209
|
+
"total_cache_creation_tokens": total_cache_creation_tokens,
|
|
210
|
+
"total_estimated_cost_cents": total_estimated_cost_cents,
|
|
211
|
+
"models_used": ",".join(models_used) if models_used else None,
|
|
212
|
+
}
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
return flattened
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def fetch_claude_code_usage(
|
|
219
|
+
api_key: str,
|
|
220
|
+
date: str,
|
|
221
|
+
limit: int = 100,
|
|
222
|
+
) -> Iterator[Dict[str, Any]]:
|
|
223
|
+
"""
|
|
224
|
+
Fetch Claude Code usage data for a specific date.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
api_key: Anthropic Admin API key
|
|
228
|
+
date: Date in YYYY-MM-DD format
|
|
229
|
+
limit: Number of records per page (max 1000)
|
|
230
|
+
|
|
231
|
+
Yields:
|
|
232
|
+
Flattened usage records
|
|
233
|
+
"""
|
|
234
|
+
client = AnthropicClient(api_key)
|
|
235
|
+
params = {"starting_at": date, "ending_at": date, "limit": min(limit, 1000)}
|
|
236
|
+
|
|
237
|
+
for record in client.fetch_paginated(
|
|
238
|
+
"organizations/usage_report/claude_code",
|
|
239
|
+
params=params,
|
|
240
|
+
flatten_func=flatten_usage_record,
|
|
241
|
+
handle_404=True,
|
|
242
|
+
):
|
|
243
|
+
yield record
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def flatten_usage_report_record(record: Dict[str, Any]) -> Dict[str, Any]:
|
|
247
|
+
"""
|
|
248
|
+
Flatten a usage report record.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
record: Nested usage record from API
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
Flattened record
|
|
255
|
+
"""
|
|
256
|
+
|
|
257
|
+
# Start with base fields - ensure bucket is never None
|
|
258
|
+
flattened = {
|
|
259
|
+
"bucket": record.get("bucket", ""),
|
|
260
|
+
"api_key_id": record.get("api_key_id", ""),
|
|
261
|
+
"workspace_id": record.get("workspace_id", ""),
|
|
262
|
+
"model": record.get("model", ""),
|
|
263
|
+
"service_tier": record.get("service_tier", ""),
|
|
264
|
+
"context_window": record.get("context_window", ""),
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
# Extract token counts
|
|
268
|
+
tokens = record.get("tokens", {})
|
|
269
|
+
flattened.update(
|
|
270
|
+
{
|
|
271
|
+
"uncached_input_tokens": tokens.get("uncached_input_tokens", 0),
|
|
272
|
+
"cached_input_tokens": tokens.get("cached_input_tokens", 0),
|
|
273
|
+
"cache_creation_tokens": tokens.get("cache_creation_tokens", 0),
|
|
274
|
+
"output_tokens": tokens.get("output_tokens", 0),
|
|
275
|
+
}
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
# Extract server tool usage
|
|
279
|
+
server_tool_usage = record.get("server_tool_usage", {})
|
|
280
|
+
flattened.update(
|
|
281
|
+
{
|
|
282
|
+
"web_search_usage": server_tool_usage.get("web_search_usage", 0),
|
|
283
|
+
"code_execution_usage": server_tool_usage.get("code_execution_usage", 0),
|
|
284
|
+
}
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
return flattened
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def fetch_usage_report(
|
|
291
|
+
api_key: str,
|
|
292
|
+
starting_at: str,
|
|
293
|
+
ending_at: str,
|
|
294
|
+
bucket_width: str = "1d",
|
|
295
|
+
limit: int = 100,
|
|
296
|
+
group_by: Optional[List[str]] = None,
|
|
297
|
+
models: Optional[List[str]] = None,
|
|
298
|
+
service_tiers: Optional[List[str]] = None,
|
|
299
|
+
context_window: Optional[List[str]] = None,
|
|
300
|
+
api_key_ids: Optional[List[str]] = None,
|
|
301
|
+
workspace_ids: Optional[List[str]] = None,
|
|
302
|
+
) -> Iterator[Dict[str, Any]]:
|
|
303
|
+
"""
|
|
304
|
+
Fetch usage report from the messages endpoint.
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
api_key: Anthropic Admin API key
|
|
308
|
+
starting_at: Start date in ISO 8601 format
|
|
309
|
+
ending_at: End date in ISO 8601 format
|
|
310
|
+
bucket_width: Bucket width (1m, 1h, 1d)
|
|
311
|
+
limit: Number of results per page
|
|
312
|
+
group_by: Fields to group by
|
|
313
|
+
models: Filter by models
|
|
314
|
+
service_tiers: Filter by service tiers
|
|
315
|
+
context_window: Filter by context window
|
|
316
|
+
api_key_ids: Filter by API key IDs
|
|
317
|
+
workspace_ids: Filter by workspace IDs
|
|
318
|
+
|
|
319
|
+
Yields:
|
|
320
|
+
Flattened usage records
|
|
321
|
+
"""
|
|
322
|
+
client = AnthropicClient(api_key)
|
|
323
|
+
|
|
324
|
+
# Adjust limit based on bucket_width
|
|
325
|
+
max_limit = 31 if bucket_width == "1d" else (168 if bucket_width == "1h" else 1440)
|
|
326
|
+
|
|
327
|
+
params = {
|
|
328
|
+
"starting_at": starting_at,
|
|
329
|
+
"ending_at": ending_at,
|
|
330
|
+
"bucket_width": bucket_width,
|
|
331
|
+
"limit": min(limit, max_limit),
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
# Add optional filters
|
|
335
|
+
if group_by:
|
|
336
|
+
for i, field in enumerate(group_by):
|
|
337
|
+
params[f"group_by[{i}]"] = field
|
|
338
|
+
if models:
|
|
339
|
+
for i, model in enumerate(models):
|
|
340
|
+
params[f"models[{i}]"] = model
|
|
341
|
+
if service_tiers:
|
|
342
|
+
for i, tier in enumerate(service_tiers):
|
|
343
|
+
params[f"service_tiers[{i}]"] = tier
|
|
344
|
+
if context_window:
|
|
345
|
+
for i, window in enumerate(context_window):
|
|
346
|
+
params[f"context_window[{i}]"] = window
|
|
347
|
+
if api_key_ids:
|
|
348
|
+
for i, key_id in enumerate(api_key_ids):
|
|
349
|
+
params[f"api_key_ids[{i}]"] = key_id
|
|
350
|
+
if workspace_ids:
|
|
351
|
+
for i, workspace_id in enumerate(workspace_ids):
|
|
352
|
+
params[f"workspace_ids[{i}]"] = workspace_id
|
|
353
|
+
|
|
354
|
+
for record in client.fetch_paginated(
|
|
355
|
+
"organizations/usage_report/messages",
|
|
356
|
+
params=params,
|
|
357
|
+
flatten_func=flatten_usage_report_record,
|
|
358
|
+
handle_404=True,
|
|
359
|
+
):
|
|
360
|
+
yield record
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
def fetch_cost_report(
|
|
364
|
+
api_key: str,
|
|
365
|
+
starting_at: str,
|
|
366
|
+
ending_at: str,
|
|
367
|
+
group_by: Optional[List[str]] = None,
|
|
368
|
+
workspace_ids: Optional[List[str]] = None,
|
|
369
|
+
limit: int = 31,
|
|
370
|
+
) -> Iterator[Dict[str, Any]]:
|
|
371
|
+
"""
|
|
372
|
+
Fetch cost report data.
|
|
373
|
+
|
|
374
|
+
Args:
|
|
375
|
+
api_key: Anthropic Admin API key
|
|
376
|
+
starting_at: Start date in ISO 8601 format
|
|
377
|
+
ending_at: End date in ISO 8601 format
|
|
378
|
+
group_by: Fields to group by
|
|
379
|
+
workspace_ids: Filter by workspace IDs
|
|
380
|
+
limit: Number of results per page (max 31)
|
|
381
|
+
|
|
382
|
+
Yields:
|
|
383
|
+
Cost records
|
|
384
|
+
"""
|
|
385
|
+
client = AnthropicClient(api_key)
|
|
386
|
+
|
|
387
|
+
params = {
|
|
388
|
+
"starting_at": starting_at,
|
|
389
|
+
"ending_at": ending_at,
|
|
390
|
+
"limit": min(limit, 31), # Max 31 for cost reports
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
# Add optional filters
|
|
394
|
+
if group_by:
|
|
395
|
+
for i, field in enumerate(group_by):
|
|
396
|
+
params[f"group_by[{i}]"] = field
|
|
397
|
+
if workspace_ids:
|
|
398
|
+
for i, workspace_id in enumerate(workspace_ids):
|
|
399
|
+
params[f"workspace_ids[{i}]"] = workspace_id
|
|
400
|
+
|
|
401
|
+
def flatten_cost_record(record: Dict[str, Any]) -> Dict[str, Any]:
|
|
402
|
+
"""Flatten cost record with defaults for nullable fields."""
|
|
403
|
+
return {
|
|
404
|
+
"bucket": record.get("bucket", ""),
|
|
405
|
+
"workspace_id": record.get("workspace_id", ""),
|
|
406
|
+
"description": record.get("description", ""),
|
|
407
|
+
"amount_cents": record.get("amount_cents", 0),
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
for record in client.fetch_paginated(
|
|
411
|
+
"organizations/cost_report",
|
|
412
|
+
params=params,
|
|
413
|
+
flatten_func=flatten_cost_record,
|
|
414
|
+
handle_404=True,
|
|
415
|
+
):
|
|
416
|
+
yield record
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def fetch_organization_info(api_key: str) -> Optional[Dict[str, Any]]:
|
|
420
|
+
"""
|
|
421
|
+
Fetch organization information.
|
|
422
|
+
|
|
423
|
+
Args:
|
|
424
|
+
api_key: Anthropic Admin API key
|
|
425
|
+
|
|
426
|
+
Returns:
|
|
427
|
+
Organization information
|
|
428
|
+
"""
|
|
429
|
+
client = AnthropicClient(api_key)
|
|
430
|
+
return client.fetch_single("organizations/me")
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
def fetch_workspaces(api_key: str, limit: int = 100) -> Iterator[Dict[str, Any]]:
|
|
434
|
+
"""
|
|
435
|
+
Fetch all workspaces in the organization.
|
|
436
|
+
|
|
437
|
+
Args:
|
|
438
|
+
api_key: Anthropic Admin API key
|
|
439
|
+
limit: Number of records per page
|
|
440
|
+
|
|
441
|
+
Yields:
|
|
442
|
+
Workspace records
|
|
443
|
+
"""
|
|
444
|
+
client = AnthropicClient(api_key)
|
|
445
|
+
params = {"limit": min(limit, 100)}
|
|
446
|
+
|
|
447
|
+
for record in client.fetch_paginated("workspaces", params=params):
|
|
448
|
+
yield record
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
def fetch_api_keys(api_key: str, limit: int = 100) -> Iterator[Dict[str, Any]]:
|
|
452
|
+
"""
|
|
453
|
+
Fetch all API keys in the organization.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
api_key: Anthropic Admin API key
|
|
457
|
+
limit: Number of records per page
|
|
458
|
+
|
|
459
|
+
Yields:
|
|
460
|
+
API key records
|
|
461
|
+
"""
|
|
462
|
+
client = AnthropicClient(api_key)
|
|
463
|
+
params = {"limit": min(limit, 100)}
|
|
464
|
+
|
|
465
|
+
for record in client.fetch_paginated("api_keys", params=params):
|
|
466
|
+
yield record
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
def fetch_invites(api_key: str, limit: int = 100) -> Iterator[Dict[str, Any]]:
|
|
470
|
+
"""
|
|
471
|
+
Fetch all pending invites.
|
|
472
|
+
|
|
473
|
+
Args:
|
|
474
|
+
api_key: Anthropic Admin API key
|
|
475
|
+
limit: Number of records per page
|
|
476
|
+
|
|
477
|
+
Yields:
|
|
478
|
+
Invite records
|
|
479
|
+
"""
|
|
480
|
+
client = AnthropicClient(api_key)
|
|
481
|
+
params = {"limit": min(limit, 100)}
|
|
482
|
+
|
|
483
|
+
for record in client.fetch_paginated("invites", params=params):
|
|
484
|
+
yield record
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
def fetch_users(api_key: str, limit: int = 100) -> Iterator[Dict[str, Any]]:
|
|
488
|
+
"""
|
|
489
|
+
Fetch all users in the organization.
|
|
490
|
+
|
|
491
|
+
Args:
|
|
492
|
+
api_key: Anthropic Admin API key
|
|
493
|
+
limit: Number of records per page
|
|
494
|
+
|
|
495
|
+
Yields:
|
|
496
|
+
User records
|
|
497
|
+
"""
|
|
498
|
+
client = AnthropicClient(api_key)
|
|
499
|
+
params = {"limit": min(limit, 100)}
|
|
500
|
+
|
|
501
|
+
for record in client.fetch_paginated("users", params=params):
|
|
502
|
+
yield record
|
|
503
|
+
|
|
504
|
+
|
|
505
|
+
def fetch_workspace_members(
|
|
506
|
+
api_key: str, workspace_id: Optional[str] = None, limit: int = 100
|
|
507
|
+
) -> Iterator[Dict[str, Any]]:
|
|
508
|
+
"""
|
|
509
|
+
Fetch workspace members.
|
|
510
|
+
|
|
511
|
+
Args:
|
|
512
|
+
api_key: Anthropic Admin API key
|
|
513
|
+
workspace_id: Optional workspace ID to filter by
|
|
514
|
+
limit: Number of records per page
|
|
515
|
+
|
|
516
|
+
Yields:
|
|
517
|
+
Workspace member records
|
|
518
|
+
"""
|
|
519
|
+
client = AnthropicClient(api_key)
|
|
520
|
+
params: Dict[str, Any] = {"limit": min(limit, 100)}
|
|
521
|
+
if workspace_id:
|
|
522
|
+
params["workspace_id"] = workspace_id
|
|
523
|
+
|
|
524
|
+
for record in client.fetch_paginated("workspace_members", params=params):
|
|
525
|
+
yield record
|
|
@@ -68,7 +68,6 @@ def applovin_max_source(
|
|
|
68
68
|
|
|
69
69
|
def create_client() -> requests.Session:
|
|
70
70
|
return Client(
|
|
71
|
-
request_timeout=10.0,
|
|
72
71
|
raise_for_status=False,
|
|
73
72
|
retry_condition=retry_on_limit,
|
|
74
73
|
request_max_attempts=12,
|
|
@@ -105,11 +104,14 @@ def get_data(
|
|
|
105
104
|
if response.status_code == 404:
|
|
106
105
|
if "No Mediation App Id found for platform" in response.text:
|
|
107
106
|
return None
|
|
108
|
-
error_message =
|
|
107
|
+
error_message = (
|
|
108
|
+
f"AppLovin MAX API error (status {response.status_code}): {response.text}"
|
|
109
|
+
)
|
|
109
110
|
raise requests.HTTPError(error_message)
|
|
110
|
-
|
|
111
|
+
|
|
111
112
|
response_url = response.json().get("ad_revenue_report_url")
|
|
112
113
|
df = pd.read_csv(response_url)
|
|
113
114
|
df["Date"] = pd.to_datetime(df["Date"])
|
|
114
115
|
df["partition_date"] = df["Date"].dt.date
|
|
115
|
-
|
|
116
|
+
df["platform"] = platform
|
|
117
|
+
return df
|