vanda-api 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vanda/__init__.py +25 -0
- vanda/async_client.py +614 -0
- vanda/auth.py +33 -0
- vanda/client.py +800 -0
- vanda/errors.py +61 -0
- vanda/models.py +52 -0
- vanda/retry.py +123 -0
- vanda/utils/__init__.py +9 -0
- vanda/utils/dates.py +69 -0
- vanda/utils/io.py +89 -0
- vanda/utils/normalize.py +53 -0
- vanda_api-0.1.0.dist-info/METADATA +175 -0
- vanda_api-0.1.0.dist-info/RECORD +15 -0
- vanda_api-0.1.0.dist-info/WHEEL +4 -0
- vanda_api-0.1.0.dist-info/licenses/LICENSE +21 -0
vanda/__init__.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from vanda.async_client import AsyncVandaClient
|
|
2
|
+
from vanda.client import VandaClient
|
|
3
|
+
from vanda.errors import (
|
|
4
|
+
AuthError,
|
|
5
|
+
NotFoundError,
|
|
6
|
+
RateLimitError,
|
|
7
|
+
ServerError,
|
|
8
|
+
TransportError,
|
|
9
|
+
ValidationError,
|
|
10
|
+
VandaError,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
__version__ = "0.1.0"
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"VandaClient",
|
|
17
|
+
"AsyncVandaClient",
|
|
18
|
+
"VandaError",
|
|
19
|
+
"AuthError",
|
|
20
|
+
"RateLimitError",
|
|
21
|
+
"NotFoundError",
|
|
22
|
+
"ValidationError",
|
|
23
|
+
"ServerError",
|
|
24
|
+
"TransportError",
|
|
25
|
+
]
|
vanda/async_client.py
ADDED
|
@@ -0,0 +1,614 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
from datetime import date, datetime
|
|
5
|
+
from typing import Any, Optional, Union
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
|
|
9
|
+
from vanda.auth import Auth
|
|
10
|
+
from vanda.errors import (
|
|
11
|
+
AuthError,
|
|
12
|
+
NotFoundError,
|
|
13
|
+
RateLimitError,
|
|
14
|
+
ServerError,
|
|
15
|
+
TransportError,
|
|
16
|
+
ValidationError,
|
|
17
|
+
VandaError,
|
|
18
|
+
)
|
|
19
|
+
from vanda.models import CompressionType, ExportFormat, StreamFormat
|
|
20
|
+
from vanda.utils.dates import format_date, validate_date_range
|
|
21
|
+
from vanda.utils.io import write_stream_to_file
|
|
22
|
+
from vanda.utils.normalize import normalize_result
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class AsyncVandaClient:
|
|
28
|
+
"""
|
|
29
|
+
Asynchronous client for Vanda Analytics API.
|
|
30
|
+
|
|
31
|
+
Example:
|
|
32
|
+
async with AsyncVandaClient(token="YOUR_TOKEN_HERE") as client:
|
|
33
|
+
data = await client.get_timeseries("TSLA", "2025-12-01", "2025-12-31", ["retail_net_turnover"])
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
token: Optional[str] = None,
|
|
39
|
+
base_url: str = "https://stg.api.vanda-analytics.com",
|
|
40
|
+
timeout: float = 600.0,
|
|
41
|
+
max_retries: int = 3,
|
|
42
|
+
) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Initialize async Vanda client.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
token: API token. If None, reads from VANDA_API_TOKEN environment variable.
|
|
48
|
+
base_url: API base URL.
|
|
49
|
+
timeout: Request timeout in seconds.
|
|
50
|
+
max_retries: Maximum retry attempts.
|
|
51
|
+
|
|
52
|
+
Raises:
|
|
53
|
+
AuthError: If token is not provided.
|
|
54
|
+
"""
|
|
55
|
+
self.auth = Auth(token)
|
|
56
|
+
self.base_url = base_url.rstrip("/")
|
|
57
|
+
self.timeout = timeout
|
|
58
|
+
self.max_retries = max_retries
|
|
59
|
+
self._client: Optional[httpx.AsyncClient] = None
|
|
60
|
+
|
|
61
|
+
async def __aenter__(self) -> "AsyncVandaClient":
|
|
62
|
+
"""Async context manager entry."""
|
|
63
|
+
self._client = httpx.AsyncClient(
|
|
64
|
+
base_url=self.base_url,
|
|
65
|
+
headers=self.auth.get_headers(),
|
|
66
|
+
timeout=self.timeout,
|
|
67
|
+
)
|
|
68
|
+
return self
|
|
69
|
+
|
|
70
|
+
async def __aexit__(self, *args: Any) -> None:
|
|
71
|
+
"""Async context manager exit."""
|
|
72
|
+
if self._client:
|
|
73
|
+
await self._client.aclose()
|
|
74
|
+
self._client = None
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def client(self) -> httpx.AsyncClient:
|
|
78
|
+
"""Get HTTP client, creating if necessary."""
|
|
79
|
+
if self._client is None:
|
|
80
|
+
self._client = httpx.AsyncClient(
|
|
81
|
+
base_url=self.base_url,
|
|
82
|
+
headers=self.auth.get_headers(),
|
|
83
|
+
timeout=self.timeout,
|
|
84
|
+
)
|
|
85
|
+
return self._client
|
|
86
|
+
|
|
87
|
+
async def close(self) -> None:
|
|
88
|
+
"""Close HTTP client."""
|
|
89
|
+
if self._client:
|
|
90
|
+
await self._client.aclose()
|
|
91
|
+
self._client = None
|
|
92
|
+
|
|
93
|
+
def _handle_error(self, response: httpx.Response) -> None:
|
|
94
|
+
"""Handle HTTP error responses."""
|
|
95
|
+
status_code = response.status_code
|
|
96
|
+
request_id = response.headers.get("x-request-id")
|
|
97
|
+
|
|
98
|
+
try:
|
|
99
|
+
body = response.json()
|
|
100
|
+
detail = body.get("detail", response.text)
|
|
101
|
+
except Exception:
|
|
102
|
+
detail = response.text
|
|
103
|
+
body = None
|
|
104
|
+
|
|
105
|
+
if status_code == 401 or status_code == 403:
|
|
106
|
+
raise AuthError(
|
|
107
|
+
f"Authentication failed: {detail}",
|
|
108
|
+
status_code=status_code,
|
|
109
|
+
request_id=request_id,
|
|
110
|
+
response_body=body,
|
|
111
|
+
)
|
|
112
|
+
elif status_code == 404:
|
|
113
|
+
raise NotFoundError(
|
|
114
|
+
f"Resource not found: {detail}",
|
|
115
|
+
status_code=status_code,
|
|
116
|
+
request_id=request_id,
|
|
117
|
+
response_body=body,
|
|
118
|
+
)
|
|
119
|
+
elif status_code == 422:
|
|
120
|
+
raise ValidationError(
|
|
121
|
+
f"Validation error: {detail}",
|
|
122
|
+
status_code=status_code,
|
|
123
|
+
request_id=request_id,
|
|
124
|
+
response_body=body,
|
|
125
|
+
)
|
|
126
|
+
elif status_code == 429:
|
|
127
|
+
retry_after = response.headers.get("retry-after")
|
|
128
|
+
retry_after_int = int(retry_after) if retry_after else None
|
|
129
|
+
raise RateLimitError(
|
|
130
|
+
f"Rate limit exceeded: {detail}",
|
|
131
|
+
retry_after=retry_after_int,
|
|
132
|
+
status_code=status_code,
|
|
133
|
+
request_id=request_id,
|
|
134
|
+
response_body=body,
|
|
135
|
+
)
|
|
136
|
+
elif status_code >= 500:
|
|
137
|
+
raise ServerError(
|
|
138
|
+
f"Server error: {detail}",
|
|
139
|
+
status_code=status_code,
|
|
140
|
+
request_id=request_id,
|
|
141
|
+
response_body=body,
|
|
142
|
+
)
|
|
143
|
+
else:
|
|
144
|
+
raise VandaError(
|
|
145
|
+
f"HTTP {status_code}: {detail}",
|
|
146
|
+
status_code=status_code,
|
|
147
|
+
request_id=request_id,
|
|
148
|
+
response_body=body,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
async def _request(
|
|
152
|
+
self,
|
|
153
|
+
method: str,
|
|
154
|
+
path: str,
|
|
155
|
+
params: Optional[dict[str, Any]] = None,
|
|
156
|
+
json: Optional[dict[str, Any]] = None,
|
|
157
|
+
) -> Any:
|
|
158
|
+
"""Make async HTTP request with retry logic."""
|
|
159
|
+
start_time = time.time()
|
|
160
|
+
|
|
161
|
+
for attempt in range(self.max_retries + 1):
|
|
162
|
+
try:
|
|
163
|
+
logger.debug(
|
|
164
|
+
"http_request method=%s path=%s attempt=%d headers=%s",
|
|
165
|
+
method,
|
|
166
|
+
path,
|
|
167
|
+
attempt,
|
|
168
|
+
self.auth.get_headers_safe(),
|
|
169
|
+
)
|
|
170
|
+
response = await self.client.request(method, path, params=params, json=json)
|
|
171
|
+
response.raise_for_status()
|
|
172
|
+
latency = time.time() - start_time
|
|
173
|
+
logger.info(
|
|
174
|
+
"http_success method=%s path=%s status=%d latency=%.3f attempts=%d",
|
|
175
|
+
method,
|
|
176
|
+
path,
|
|
177
|
+
response.status_code,
|
|
178
|
+
latency,
|
|
179
|
+
attempt + 1,
|
|
180
|
+
)
|
|
181
|
+
return response.json()
|
|
182
|
+
except httpx.HTTPStatusError as e:
|
|
183
|
+
if (
|
|
184
|
+
e.response.status_code not in {429, 500, 502, 503, 504}
|
|
185
|
+
or attempt == self.max_retries
|
|
186
|
+
):
|
|
187
|
+
latency = time.time() - start_time
|
|
188
|
+
logger.error(
|
|
189
|
+
"http_error method=%s path=%s status=%d latency=%.3f",
|
|
190
|
+
method,
|
|
191
|
+
path,
|
|
192
|
+
e.response.status_code,
|
|
193
|
+
latency,
|
|
194
|
+
)
|
|
195
|
+
self._handle_error(e.response)
|
|
196
|
+
delay = min(1.0 * (2**attempt), 32.0)
|
|
197
|
+
logger.warning(
|
|
198
|
+
"http_retry method=%s path=%s status=%d retry=%d delay=%.2f",
|
|
199
|
+
method,
|
|
200
|
+
path,
|
|
201
|
+
e.response.status_code,
|
|
202
|
+
attempt + 1,
|
|
203
|
+
delay,
|
|
204
|
+
)
|
|
205
|
+
await asyncio.sleep(delay)
|
|
206
|
+
except (httpx.TimeoutException, httpx.NetworkError) as e:
|
|
207
|
+
if attempt == self.max_retries:
|
|
208
|
+
raise TransportError(f"Network error: {e}") from e
|
|
209
|
+
delay = min(1.0 * (2**attempt), 32.0)
|
|
210
|
+
logger.warning(
|
|
211
|
+
"network_retry method=%s path=%s retry=%d delay=%.2f",
|
|
212
|
+
method,
|
|
213
|
+
path,
|
|
214
|
+
attempt + 1,
|
|
215
|
+
delay,
|
|
216
|
+
)
|
|
217
|
+
await asyncio.sleep(delay)
|
|
218
|
+
|
|
219
|
+
raise RuntimeError("Retry logic failed unexpectedly")
|
|
220
|
+
|
|
221
|
+
async def _request_stream(
|
|
222
|
+
self,
|
|
223
|
+
method: str,
|
|
224
|
+
path: str,
|
|
225
|
+
params: Optional[dict[str, Any]] = None,
|
|
226
|
+
) -> bytes:
|
|
227
|
+
"""Make async streaming HTTP request."""
|
|
228
|
+
start_time = time.time()
|
|
229
|
+
|
|
230
|
+
for attempt in range(self.max_retries + 1):
|
|
231
|
+
try:
|
|
232
|
+
response = await self.client.request(method, path, params=params)
|
|
233
|
+
response.raise_for_status()
|
|
234
|
+
latency = time.time() - start_time
|
|
235
|
+
logger.info(
|
|
236
|
+
"http_stream_success method=%s path=%s status=%d latency=%.3f size_kb=%.2f",
|
|
237
|
+
method,
|
|
238
|
+
path,
|
|
239
|
+
response.status_code,
|
|
240
|
+
latency,
|
|
241
|
+
len(response.content) / 1024,
|
|
242
|
+
)
|
|
243
|
+
return response.content
|
|
244
|
+
except httpx.HTTPStatusError as e:
|
|
245
|
+
if (
|
|
246
|
+
e.response.status_code not in {429, 500, 502, 503, 504}
|
|
247
|
+
or attempt == self.max_retries
|
|
248
|
+
):
|
|
249
|
+
latency = time.time() - start_time
|
|
250
|
+
logger.error(
|
|
251
|
+
"http_error method=%s path=%s status=%d latency=%.3f",
|
|
252
|
+
method,
|
|
253
|
+
path,
|
|
254
|
+
e.response.status_code,
|
|
255
|
+
latency,
|
|
256
|
+
)
|
|
257
|
+
self._handle_error(e.response)
|
|
258
|
+
delay = min(1.0 * (2**attempt), 32.0)
|
|
259
|
+
await asyncio.sleep(delay)
|
|
260
|
+
except (httpx.TimeoutException, httpx.NetworkError) as e:
|
|
261
|
+
if attempt == self.max_retries:
|
|
262
|
+
raise TransportError(f"Network error: {e}") from e
|
|
263
|
+
delay = min(1.0 * (2**attempt), 32.0)
|
|
264
|
+
await asyncio.sleep(delay)
|
|
265
|
+
|
|
266
|
+
raise RuntimeError("Retry logic failed unexpectedly")
|
|
267
|
+
|
|
268
|
+
async def get_timeseries(
|
|
269
|
+
self,
|
|
270
|
+
symbol: str,
|
|
271
|
+
start_date: Union[str, date, datetime],
|
|
272
|
+
end_date: Union[str, date, datetime],
|
|
273
|
+
fields: list[str],
|
|
274
|
+
interval: str = "1d",
|
|
275
|
+
asset_class: str = "cash",
|
|
276
|
+
records_per_page: int = 2000,
|
|
277
|
+
page_number: int = 1,
|
|
278
|
+
order: str = "asc",
|
|
279
|
+
calculate_metrics: bool = False,
|
|
280
|
+
options_type: str = "ALL",
|
|
281
|
+
options_notional: str = "ALL",
|
|
282
|
+
options_moneyness: str = "TOTAL",
|
|
283
|
+
options_size: str = "TOTAL",
|
|
284
|
+
) -> list[dict[str, Any]]:
|
|
285
|
+
"""Get timeseries data for a symbol."""
|
|
286
|
+
start_str = format_date(start_date)
|
|
287
|
+
end_str = format_date(end_date)
|
|
288
|
+
validate_date_range(start_str, end_str)
|
|
289
|
+
|
|
290
|
+
params: dict[str, Any] = {
|
|
291
|
+
"symbol": symbol,
|
|
292
|
+
"interval": interval,
|
|
293
|
+
"start_date": start_str,
|
|
294
|
+
"end_date": end_str,
|
|
295
|
+
"fields": fields,
|
|
296
|
+
"asset_class": asset_class,
|
|
297
|
+
"records_per_page": records_per_page,
|
|
298
|
+
"page_number": page_number,
|
|
299
|
+
"order": order,
|
|
300
|
+
"calculate_metrics": str(calculate_metrics).lower(),
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
if asset_class == "options":
|
|
304
|
+
params.update(
|
|
305
|
+
{
|
|
306
|
+
"type": options_type,
|
|
307
|
+
"notional": options_notional,
|
|
308
|
+
"moneyness": options_moneyness,
|
|
309
|
+
"size": options_size,
|
|
310
|
+
}
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
data = await self._request("GET", "/series/timeseries", params=params)
|
|
314
|
+
|
|
315
|
+
if "job_id" in data:
|
|
316
|
+
result = await self.poll_job(data["job_id"])
|
|
317
|
+
return normalize_result(result)
|
|
318
|
+
|
|
319
|
+
return normalize_result(data)
|
|
320
|
+
|
|
321
|
+
async def get_timeseries_many(
|
|
322
|
+
self,
|
|
323
|
+
symbols: list[str],
|
|
324
|
+
start_date: Union[str, date, datetime],
|
|
325
|
+
end_date: Union[str, date, datetime],
|
|
326
|
+
fields: list[str],
|
|
327
|
+
**kwargs: Any,
|
|
328
|
+
) -> list[dict[str, Any]]:
|
|
329
|
+
"""Get timeseries data for multiple symbols concurrently."""
|
|
330
|
+
tasks = [
|
|
331
|
+
self.get_timeseries(symbol, start_date, end_date, fields, **kwargs)
|
|
332
|
+
for symbol in symbols
|
|
333
|
+
]
|
|
334
|
+
results = await asyncio.gather(*tasks)
|
|
335
|
+
all_records = []
|
|
336
|
+
for records in results:
|
|
337
|
+
all_records.extend(records)
|
|
338
|
+
return all_records
|
|
339
|
+
|
|
340
|
+
async def get_leaderboard(
|
|
341
|
+
self,
|
|
342
|
+
interval: str = "1d",
|
|
343
|
+
metric: str = "retail_net_turnover",
|
|
344
|
+
records_per_page: int = 2000,
|
|
345
|
+
page_number: int = 1,
|
|
346
|
+
asset_class: str = "cash",
|
|
347
|
+
start_date: Optional[Union[str, date, datetime]] = None,
|
|
348
|
+
end_date: Optional[Union[str, date, datetime]] = None,
|
|
349
|
+
date_filter: Optional[Union[str, date, datetime]] = None,
|
|
350
|
+
sector: Optional[str] = None,
|
|
351
|
+
options_type: str = "ALL",
|
|
352
|
+
options_notional: str = "ALL",
|
|
353
|
+
options_moneyness: str = "TOTAL",
|
|
354
|
+
options_size: str = "TOTAL",
|
|
355
|
+
) -> list[dict[str, Any]]:
|
|
356
|
+
"""Get leaderboard data."""
|
|
357
|
+
params: dict[str, Any] = {
|
|
358
|
+
"interval": interval,
|
|
359
|
+
"metric": metric,
|
|
360
|
+
"records_per_page": records_per_page,
|
|
361
|
+
"page_number": page_number,
|
|
362
|
+
"asset_class": asset_class,
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
if start_date:
|
|
366
|
+
params["start_date"] = format_date(start_date)
|
|
367
|
+
if end_date:
|
|
368
|
+
params["end_date"] = format_date(end_date)
|
|
369
|
+
if date_filter:
|
|
370
|
+
params["date"] = format_date(date_filter)
|
|
371
|
+
if sector:
|
|
372
|
+
params["sector"] = sector
|
|
373
|
+
|
|
374
|
+
if asset_class == "options":
|
|
375
|
+
params.update(
|
|
376
|
+
{
|
|
377
|
+
"type": options_type,
|
|
378
|
+
"notional": options_notional,
|
|
379
|
+
"moneyness": options_moneyness,
|
|
380
|
+
"size": options_size,
|
|
381
|
+
}
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
data = await self._request("GET", "/series/timeseries/leaderboard", params=params)
|
|
385
|
+
return normalize_result(data)
|
|
386
|
+
|
|
387
|
+
async def create_bulk_securities_job(
|
|
388
|
+
self,
|
|
389
|
+
interval: str = "1d",
|
|
390
|
+
asset_class: str = "cash",
|
|
391
|
+
start_date: Optional[Union[str, date, datetime]] = None,
|
|
392
|
+
end_date: Optional[Union[str, date, datetime]] = None,
|
|
393
|
+
fields: Optional[list[str]] = None,
|
|
394
|
+
identifiers: Optional[list[str]] = None,
|
|
395
|
+
all: bool = False,
|
|
396
|
+
options_type: str = "ALL",
|
|
397
|
+
options_notional: str = "ALL",
|
|
398
|
+
options_moneyness: str = "TOTAL",
|
|
399
|
+
options_size: str = "TOTAL",
|
|
400
|
+
) -> str:
|
|
401
|
+
"""Create bulk securities job."""
|
|
402
|
+
if not all and not identifiers:
|
|
403
|
+
raise ValidationError("Must provide identifiers or set all=True")
|
|
404
|
+
|
|
405
|
+
payload: dict[str, Any] = {
|
|
406
|
+
"interval": interval,
|
|
407
|
+
"asset_class": asset_class,
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
if all:
|
|
411
|
+
payload["all"] = True
|
|
412
|
+
elif identifiers:
|
|
413
|
+
payload["identifiers"] = identifiers
|
|
414
|
+
|
|
415
|
+
if start_date:
|
|
416
|
+
payload["start_date"] = format_date(start_date)
|
|
417
|
+
if end_date:
|
|
418
|
+
payload["end_date"] = format_date(end_date)
|
|
419
|
+
if fields:
|
|
420
|
+
payload["fields"] = fields
|
|
421
|
+
|
|
422
|
+
if asset_class == "options":
|
|
423
|
+
payload.update(
|
|
424
|
+
{
|
|
425
|
+
"type": options_type,
|
|
426
|
+
"notional": options_notional,
|
|
427
|
+
"moneyness": options_moneyness,
|
|
428
|
+
"size": options_size,
|
|
429
|
+
}
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
data = await self._request("POST", "/series/bulk/securities", json=payload)
|
|
433
|
+
return data["job_id"]
|
|
434
|
+
|
|
435
|
+
async def bulk_securities(
|
|
436
|
+
self,
|
|
437
|
+
wait: bool = True,
|
|
438
|
+
poll_interval: int = 5,
|
|
439
|
+
max_wait: int = 600,
|
|
440
|
+
**kwargs: Any,
|
|
441
|
+
) -> Union[str, list[dict[str, Any]]]:
|
|
442
|
+
"""Fetch bulk securities data."""
|
|
443
|
+
job_id = await self.create_bulk_securities_job(**kwargs)
|
|
444
|
+
if not wait:
|
|
445
|
+
return job_id
|
|
446
|
+
result = await self.poll_job(job_id, poll_interval=poll_interval, max_wait=max_wait)
|
|
447
|
+
return normalize_result(result)
|
|
448
|
+
|
|
449
|
+
async def get_daily_snapshot(
|
|
450
|
+
self,
|
|
451
|
+
interval: str = "1d",
|
|
452
|
+
asset_class: str = "cash",
|
|
453
|
+
limit: int = 2000,
|
|
454
|
+
date_filter: Optional[Union[str, date, datetime]] = None,
|
|
455
|
+
fields: Optional[list[str]] = None,
|
|
456
|
+
is_active: Optional[bool] = None,
|
|
457
|
+
sector: Optional[str] = None,
|
|
458
|
+
wait: bool = True,
|
|
459
|
+
poll_interval: int = 5,
|
|
460
|
+
max_wait: int = 600,
|
|
461
|
+
options_type: str = "ALL",
|
|
462
|
+
options_notional: str = "ALL",
|
|
463
|
+
options_moneyness: str = "TOTAL",
|
|
464
|
+
options_size: str = "TOTAL",
|
|
465
|
+
) -> Union[str, list[dict[str, Any]]]:
|
|
466
|
+
"""Get daily snapshot for many securities."""
|
|
467
|
+
payload: dict[str, Any] = {
|
|
468
|
+
"interval": interval,
|
|
469
|
+
"asset_class": asset_class,
|
|
470
|
+
"limit": limit,
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
if date_filter:
|
|
474
|
+
payload["date"] = format_date(date_filter)
|
|
475
|
+
if fields:
|
|
476
|
+
payload["fields"] = fields
|
|
477
|
+
if is_active is not None:
|
|
478
|
+
payload["is_active"] = is_active
|
|
479
|
+
if sector:
|
|
480
|
+
payload["sector"] = sector
|
|
481
|
+
|
|
482
|
+
if asset_class == "options":
|
|
483
|
+
payload.update(
|
|
484
|
+
{
|
|
485
|
+
"type": options_type,
|
|
486
|
+
"notional": options_notional,
|
|
487
|
+
"moneyness": options_moneyness,
|
|
488
|
+
"size": options_size,
|
|
489
|
+
}
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
data = await self._request("POST", "/series/bulk/daily/snapshot", json=payload)
|
|
493
|
+
|
|
494
|
+
if "job_id" in data:
|
|
495
|
+
job_id = data["job_id"]
|
|
496
|
+
if not wait:
|
|
497
|
+
return job_id
|
|
498
|
+
result = await self.poll_job(job_id, poll_interval=poll_interval, max_wait=max_wait)
|
|
499
|
+
return normalize_result(result)
|
|
500
|
+
|
|
501
|
+
return normalize_result(data)
|
|
502
|
+
|
|
503
|
+
async def get_job(self, job_id: str) -> dict[str, Any]:
|
|
504
|
+
"""Get job status and result."""
|
|
505
|
+
return await self._request("GET", f"/series/jobs/{job_id}")
|
|
506
|
+
|
|
507
|
+
async def get_job_status(self, job_id: str) -> dict[str, Any]:
|
|
508
|
+
"""Get job status only."""
|
|
509
|
+
return await self._request("GET", f"/series/jobs/{job_id}/status")
|
|
510
|
+
|
|
511
|
+
async def poll_job(
|
|
512
|
+
self, job_id: str, poll_interval: int = 5, max_wait: int = 600
|
|
513
|
+
) -> dict[str, Any]:
|
|
514
|
+
"""Poll job until completion."""
|
|
515
|
+
start_time = time.time()
|
|
516
|
+
while time.time() - start_time < max_wait:
|
|
517
|
+
status_data = await self.get_job(job_id)
|
|
518
|
+
status = status_data.get("status", "").upper()
|
|
519
|
+
|
|
520
|
+
logger.debug("job_poll job_id=%s status=%s", job_id, status)
|
|
521
|
+
|
|
522
|
+
if status == "COMPLETED":
|
|
523
|
+
logger.info("job_completed job_id=%s", job_id)
|
|
524
|
+
return status_data.get("result", {})
|
|
525
|
+
elif status == "FAILED":
|
|
526
|
+
error = status_data.get("error", "Unknown error")
|
|
527
|
+
logger.error("job_failed job_id=%s error=%s", job_id, error)
|
|
528
|
+
raise VandaError(f"Job failed: {error}")
|
|
529
|
+
|
|
530
|
+
await asyncio.sleep(poll_interval)
|
|
531
|
+
|
|
532
|
+
raise VandaError(f"Job polling timeout after {max_wait}s")
|
|
533
|
+
|
|
534
|
+
async def wait_for_job(self, job_id: str, **kwargs: Any) -> dict[str, Any]:
|
|
535
|
+
"""Alias for poll_job."""
|
|
536
|
+
return await self.poll_job(job_id, **kwargs)
|
|
537
|
+
|
|
538
|
+
async def export_job_result(
|
|
539
|
+
self,
|
|
540
|
+
job_id: str,
|
|
541
|
+
export_format: ExportFormat = "csv",
|
|
542
|
+
compression: CompressionType = "none",
|
|
543
|
+
output_path: str = "",
|
|
544
|
+
) -> None:
|
|
545
|
+
"""Export job result to file."""
|
|
546
|
+
params = {"export_format": export_format, "compression": compression}
|
|
547
|
+
content = await self._request_stream("GET", f"/series/jobs/{job_id}/export", params=params)
|
|
548
|
+
write_stream_to_file(content, output_path)
|
|
549
|
+
|
|
550
|
+
async def stream_job_result(
|
|
551
|
+
self, job_id: str, format: StreamFormat = "ndjson", output_path: str = ""
|
|
552
|
+
) -> None:
|
|
553
|
+
"""Stream job result to file."""
|
|
554
|
+
params = {"format": format}
|
|
555
|
+
content = await self._request_stream("GET", f"/series/jobs/{job_id}/stream", params=params)
|
|
556
|
+
write_stream_to_file(content, output_path)
|
|
557
|
+
|
|
558
|
+
async def export_timeseries(
|
|
559
|
+
self,
|
|
560
|
+
symbol: Optional[str] = None,
|
|
561
|
+
vanda_id: Optional[str] = None,
|
|
562
|
+
interval: str = "1d",
|
|
563
|
+
start_date: Optional[Union[str, date, datetime]] = None,
|
|
564
|
+
end_date: Optional[Union[str, date, datetime]] = None,
|
|
565
|
+
export_format: ExportFormat = "csv",
|
|
566
|
+
compression: CompressionType = "none",
|
|
567
|
+
asset_class: str = "cash",
|
|
568
|
+
output_path: str = "",
|
|
569
|
+
) -> None:
|
|
570
|
+
"""Export timeseries data to file."""
|
|
571
|
+
if not symbol and not vanda_id:
|
|
572
|
+
raise ValidationError("Must provide either symbol or vanda_id")
|
|
573
|
+
if symbol and vanda_id:
|
|
574
|
+
raise ValidationError("Cannot provide both symbol and vanda_id")
|
|
575
|
+
|
|
576
|
+
params: dict[str, Any] = {
|
|
577
|
+
"interval": interval,
|
|
578
|
+
"export_format": export_format,
|
|
579
|
+
"compression": compression,
|
|
580
|
+
"asset_class": asset_class,
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
if symbol:
|
|
584
|
+
params["symbol"] = symbol
|
|
585
|
+
if vanda_id:
|
|
586
|
+
params["vanda_id"] = vanda_id
|
|
587
|
+
if start_date:
|
|
588
|
+
params["start_date"] = format_date(start_date)
|
|
589
|
+
if end_date:
|
|
590
|
+
params["end_date"] = format_date(end_date)
|
|
591
|
+
|
|
592
|
+
content = await self._request_stream("GET", "/series/timeseries/export", params=params)
|
|
593
|
+
write_stream_to_file(content, output_path)
|
|
594
|
+
|
|
595
|
+
async def list_fields(self, asset_class: str = "cash") -> dict[str, Any]:
|
|
596
|
+
"""List available fields for asset class."""
|
|
597
|
+
params = {"asset_class": asset_class}
|
|
598
|
+
return await self._request("GET", "/series/timeseries/fields", params=params)
|
|
599
|
+
|
|
600
|
+
async def list_intervals(self) -> dict[str, Any]:
|
|
601
|
+
"""List available time intervals."""
|
|
602
|
+
return await self._request("GET", "/series/timeseries/intervals")
|
|
603
|
+
|
|
604
|
+
async def list_securities(
|
|
605
|
+
self, asset_class: str = "cash", limit: int = 2000, is_active: bool = True
|
|
606
|
+
) -> list[dict[str, Any]]:
|
|
607
|
+
"""List available securities."""
|
|
608
|
+
params = {
|
|
609
|
+
"asset_class": asset_class,
|
|
610
|
+
"limit": limit,
|
|
611
|
+
"is_active": str(is_active).lower(),
|
|
612
|
+
}
|
|
613
|
+
data = await self._request("GET", "/series/timeseries/list", params=params)
|
|
614
|
+
return normalize_result(data)
|
vanda/auth.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from vanda.errors import AuthError
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Auth:
|
|
8
|
+
"""Manages authentication tokens."""
|
|
9
|
+
|
|
10
|
+
def __init__(self, token: Optional[str] = None, env_var: str = "VANDA_API_TOKEN") -> None:
|
|
11
|
+
"""
|
|
12
|
+
Initialize authentication.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
token: API token. If None, reads from environment variable.
|
|
16
|
+
env_var: Environment variable name to read token from.
|
|
17
|
+
|
|
18
|
+
Raises:
|
|
19
|
+
AuthError: If token is not provided and not found in environment.
|
|
20
|
+
"""
|
|
21
|
+
self._token = token or os.getenv(env_var)
|
|
22
|
+
if not self._token:
|
|
23
|
+
raise AuthError(
|
|
24
|
+
f"API token not provided. Pass token= or set {env_var} environment variable."
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
def get_headers(self) -> dict[str, str]:
|
|
28
|
+
"""Return authentication headers."""
|
|
29
|
+
return {"Authorization": f"Bearer {self._token}"}
|
|
30
|
+
|
|
31
|
+
def get_headers_safe(self) -> dict[str, str]:
|
|
32
|
+
"""Return headers with token redacted (for logging)."""
|
|
33
|
+
return {"Authorization": "Bearer ***"}
|