datamule 1.0.2__py3-none-any.whl → 1.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datamule/__init__.py +2 -13
- datamule/document.py +0 -1
- datamule/helper.py +85 -105
- datamule/portfolio.py +105 -29
- datamule/submission.py +0 -38
- {datamule-1.0.2.dist-info → datamule-1.0.6.dist-info}/METADATA +2 -8
- datamule-1.0.6.dist-info/RECORD +10 -0
- datamule/book/__init__.py +0 -0
- datamule/book/book.py +0 -34
- datamule/book/eftsquery.py +0 -127
- datamule/book/xbrl_retriever.py +0 -88
- datamule/data/company_former_names.csv +0 -8148
- datamule/data/company_metadata.csv +0 -10049
- datamule/data/company_tickers.csv +0 -9999
- datamule/data/sec-glossary.csv +0 -728
- datamule/data/xbrl_descriptions.csv +0 -10024
- datamule/downloader/downloader.py +0 -374
- datamule/downloader/premiumdownloader.py +0 -335
- datamule/mapping_dicts/txt_mapping_dicts.py +0 -232
- datamule/mapping_dicts/xml_mapping_dicts.py +0 -19
- datamule/monitor.py +0 -238
- datamule/mulebot/__init__.py +0 -1
- datamule/mulebot/helper.py +0 -35
- datamule/mulebot/mulebot.py +0 -130
- datamule/mulebot/mulebot_server/__init__.py +0 -1
- datamule/mulebot/mulebot_server/server.py +0 -87
- datamule/mulebot/mulebot_server/static/css/minimalist.css +0 -174
- datamule/mulebot/mulebot_server/static/scripts/artifacts.js +0 -68
- datamule/mulebot/mulebot_server/static/scripts/chat.js +0 -92
- datamule/mulebot/mulebot_server/static/scripts/filingArtifacts.js +0 -56
- datamule/mulebot/mulebot_server/static/scripts/listArtifacts.js +0 -15
- datamule/mulebot/mulebot_server/static/scripts/main.js +0 -57
- datamule/mulebot/mulebot_server/static/scripts/prefilledPrompt.js +0 -27
- datamule/mulebot/mulebot_server/static/scripts/suggestions.js +0 -47
- datamule/mulebot/mulebot_server/static/scripts/tableArtifacts.js +0 -129
- datamule/mulebot/mulebot_server/static/scripts/utils.js +0 -28
- datamule/mulebot/mulebot_server/templates/chat-minimalist.html +0 -91
- datamule/mulebot/search.py +0 -52
- datamule/mulebot/tools.py +0 -82
- datamule/packageupdater.py +0 -207
- datamule-1.0.2.dist-info/RECORD +0 -43
- {datamule-1.0.2.dist-info → datamule-1.0.6.dist-info}/WHEEL +0 -0
- {datamule-1.0.2.dist-info → datamule-1.0.6.dist-info}/top_level.txt +0 -0
@@ -1,374 +0,0 @@
|
|
1
|
-
import asyncio
|
2
|
-
import aiohttp
|
3
|
-
import os
|
4
|
-
from tqdm import tqdm
|
5
|
-
from datetime import datetime
|
6
|
-
from urllib.parse import urlencode
|
7
|
-
import aiofiles
|
8
|
-
import json
|
9
|
-
import time
|
10
|
-
from collections import deque
|
11
|
-
|
12
|
-
from ..helper import identifier_to_cik, load_package_csv, fix_filing_url, headers
|
13
|
-
from secsgml import parse_sgml_submission
|
14
|
-
|
15
|
-
class RetryException(Exception):
|
16
|
-
def __init__(self, url, retry_after=601):
|
17
|
-
self.url = url
|
18
|
-
self.retry_after = retry_after
|
19
|
-
|
20
|
-
class PreciseRateLimiter:
|
21
|
-
def __init__(self, rate, interval=1.0):
|
22
|
-
self.rate = rate # requests per interval
|
23
|
-
self.interval = interval # in seconds
|
24
|
-
self.token_time = self.interval / self.rate # time per token
|
25
|
-
self.last_time = time.time()
|
26
|
-
self.lock = asyncio.Lock()
|
27
|
-
|
28
|
-
async def acquire(self):
|
29
|
-
async with self.lock:
|
30
|
-
now = time.time()
|
31
|
-
wait_time = self.last_time + self.token_time - now
|
32
|
-
if wait_time > 0:
|
33
|
-
await asyncio.sleep(wait_time)
|
34
|
-
self.last_time = time.time()
|
35
|
-
return True
|
36
|
-
|
37
|
-
async def __aenter__(self):
|
38
|
-
await self.acquire()
|
39
|
-
return self
|
40
|
-
|
41
|
-
async def __aexit__(self, exc_type, exc, tb):
|
42
|
-
pass
|
43
|
-
|
44
|
-
class RateMonitor:
|
45
|
-
def __init__(self, window_size=1.0):
|
46
|
-
self.window_size = window_size
|
47
|
-
self.requests = deque()
|
48
|
-
self._lock = asyncio.Lock()
|
49
|
-
|
50
|
-
async def add_request(self, size_bytes):
|
51
|
-
async with self._lock:
|
52
|
-
now = time.time()
|
53
|
-
self.requests.append((now, size_bytes))
|
54
|
-
while self.requests and self.requests[0][0] < now - self.window_size:
|
55
|
-
self.requests.popleft()
|
56
|
-
|
57
|
-
def get_current_rates(self):
|
58
|
-
now = time.time()
|
59
|
-
while self.requests and self.requests[0][0] < now - self.window_size:
|
60
|
-
self.requests.popleft()
|
61
|
-
|
62
|
-
if not self.requests:
|
63
|
-
return 0, 0
|
64
|
-
|
65
|
-
request_count = len(self.requests)
|
66
|
-
byte_count = sum(size for _, size in self.requests)
|
67
|
-
|
68
|
-
requests_per_second = request_count / self.window_size
|
69
|
-
mb_per_second = (byte_count / 1024 / 1024) / self.window_size
|
70
|
-
|
71
|
-
return round(requests_per_second, 1), round(mb_per_second, 2)
|
72
|
-
|
73
|
-
class Downloader:
|
74
|
-
def __init__(self):
|
75
|
-
self.headers = headers
|
76
|
-
self.limiter = PreciseRateLimiter(5) # 10 requests per second
|
77
|
-
self.session = None
|
78
|
-
self.parse_filings = True
|
79
|
-
self.download_queue = asyncio.Queue()
|
80
|
-
self.rate_monitor = RateMonitor()
|
81
|
-
self.current_pbar = None
|
82
|
-
self.connection_semaphore = asyncio.Semaphore(5)
|
83
|
-
|
84
|
-
def update_progress_description(self):
|
85
|
-
if self.current_pbar:
|
86
|
-
reqs_per_sec, mb_per_sec = self.rate_monitor.get_current_rates()
|
87
|
-
self.current_pbar.set_description(
|
88
|
-
f"Progress [Rate: {reqs_per_sec}/s | {mb_per_sec} MB/s]"
|
89
|
-
)
|
90
|
-
|
91
|
-
async def __aenter__(self):
|
92
|
-
await self._init_session()
|
93
|
-
return self
|
94
|
-
|
95
|
-
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
96
|
-
await self._close()
|
97
|
-
|
98
|
-
async def _init_session(self):
|
99
|
-
if not self.session:
|
100
|
-
self.session = aiohttp.ClientSession(headers=self.headers)
|
101
|
-
|
102
|
-
async def _close(self):
|
103
|
-
if self.session:
|
104
|
-
await self.session.close()
|
105
|
-
self.session = None
|
106
|
-
|
107
|
-
async def _fetch_json(self, url):
|
108
|
-
"""Fetch JSON with rate monitoring."""
|
109
|
-
async with self.limiter:
|
110
|
-
try:
|
111
|
-
url = fix_filing_url(url)
|
112
|
-
async with self.session.get(url) as response:
|
113
|
-
if response.status == 429:
|
114
|
-
raise RetryException(url)
|
115
|
-
response.raise_for_status()
|
116
|
-
content = await response.read()
|
117
|
-
await self.rate_monitor.add_request(len(content))
|
118
|
-
self.update_progress_description()
|
119
|
-
return await response.json()
|
120
|
-
except aiohttp.ClientResponseError as e:
|
121
|
-
if e.status == 429:
|
122
|
-
raise RetryException(url)
|
123
|
-
raise
|
124
|
-
|
125
|
-
async def _get_filing_urls_from_efts(self, base_url, submission_type=None):
|
126
|
-
"""Fetch filing URLs from EFTS in batches with form type filtering."""
|
127
|
-
start = 0
|
128
|
-
page_size = 100
|
129
|
-
urls = []
|
130
|
-
|
131
|
-
data = await self._fetch_json(f"{base_url}&from=0&size=1")
|
132
|
-
if not data or 'hits' not in data:
|
133
|
-
return []
|
134
|
-
|
135
|
-
total_hits = data['hits']['total']['value']
|
136
|
-
if not total_hits:
|
137
|
-
return []
|
138
|
-
|
139
|
-
pbar = tqdm(total=total_hits, desc="Fetching URLs [Rate: 0/s | 0 MB/s]")
|
140
|
-
self.current_pbar = pbar
|
141
|
-
|
142
|
-
while start < total_hits:
|
143
|
-
try:
|
144
|
-
tasks = [
|
145
|
-
self._fetch_json(f"{base_url}&from={start + i * page_size}&size={page_size}")
|
146
|
-
for i in range(10)
|
147
|
-
]
|
148
|
-
|
149
|
-
results = await asyncio.gather(*tasks)
|
150
|
-
|
151
|
-
for data in results:
|
152
|
-
if data and 'hits' in data:
|
153
|
-
hits = data['hits']['hits']
|
154
|
-
if hits:
|
155
|
-
# Filter hits based on exact form match
|
156
|
-
if not submission_type or submission_type == "-0":
|
157
|
-
filtered_hits = hits
|
158
|
-
else:
|
159
|
-
requested_forms = [submission_type] if isinstance(submission_type, str) else submission_type
|
160
|
-
filtered_hits = [
|
161
|
-
hit for hit in hits
|
162
|
-
if hit['_source'].get('form', '') in requested_forms
|
163
|
-
]
|
164
|
-
|
165
|
-
batch_urls = [
|
166
|
-
f"https://www.sec.gov/Archives/edgar/data/{hit['_source']['ciks'][0]}/{hit['_id'].split(':')[0]}.txt"
|
167
|
-
for hit in filtered_hits
|
168
|
-
]
|
169
|
-
urls.extend(batch_urls)
|
170
|
-
pbar.update(len(hits)) # Update progress based on total hits processed
|
171
|
-
self.update_progress_description()
|
172
|
-
|
173
|
-
start += 10 * page_size
|
174
|
-
|
175
|
-
except RetryException as e:
|
176
|
-
print(f"\nRate limited. Sleeping for {e.retry_after} seconds...")
|
177
|
-
await asyncio.sleep(e.retry_after)
|
178
|
-
continue
|
179
|
-
except Exception as e:
|
180
|
-
print(f"\nError fetching URLs batch at {start}: {str(e)}")
|
181
|
-
break
|
182
|
-
|
183
|
-
pbar.close()
|
184
|
-
self.current_pbar = None
|
185
|
-
return urls
|
186
|
-
|
187
|
-
async def _download_file(self, url, filepath):
|
188
|
-
"""Download single file with precise rate limiting."""
|
189
|
-
async with self.connection_semaphore:
|
190
|
-
async with self.limiter:
|
191
|
-
try:
|
192
|
-
url = fix_filing_url(url)
|
193
|
-
async with self.session.get(url) as response:
|
194
|
-
if response.status == 429:
|
195
|
-
raise RetryException(url)
|
196
|
-
response.raise_for_status()
|
197
|
-
content = await response.read()
|
198
|
-
await self.rate_monitor.add_request(len(content))
|
199
|
-
self.update_progress_description()
|
200
|
-
|
201
|
-
parsed_data = None
|
202
|
-
if self.parse_filings:
|
203
|
-
try:
|
204
|
-
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
205
|
-
async with aiofiles.open(filepath, 'wb') as f:
|
206
|
-
await f.write(content)
|
207
|
-
|
208
|
-
parsed_data = parse_sgml_submission(
|
209
|
-
content=content.decode(),
|
210
|
-
output_dir=os.path.dirname(filepath)
|
211
|
-
)
|
212
|
-
|
213
|
-
try:
|
214
|
-
os.remove(filepath)
|
215
|
-
except Exception as e:
|
216
|
-
print(f"\nError deleting original file {filepath}: {str(e)}")
|
217
|
-
|
218
|
-
except Exception as e:
|
219
|
-
print(f"\nError parsing {url}: {str(e)}")
|
220
|
-
try:
|
221
|
-
os.remove(filepath)
|
222
|
-
parsed_dir = os.path.dirname(filepath) + f'/{url.split("/")[-1].split(".")[0].replace("-", "")}'
|
223
|
-
if os.path.exists(parsed_dir):
|
224
|
-
import shutil
|
225
|
-
shutil.rmtree(parsed_dir)
|
226
|
-
except Exception as e:
|
227
|
-
print(f"\nError cleaning up files for {url}: {str(e)}")
|
228
|
-
else:
|
229
|
-
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
230
|
-
async with aiofiles.open(filepath, 'wb') as f:
|
231
|
-
await f.write(content)
|
232
|
-
|
233
|
-
return filepath, parsed_data
|
234
|
-
|
235
|
-
except Exception as e:
|
236
|
-
print(f"\nError downloading {url}: {str(e)}")
|
237
|
-
return None
|
238
|
-
|
239
|
-
async def _download_worker(self, pbar):
|
240
|
-
"""Worker to process download queue."""
|
241
|
-
while True:
|
242
|
-
try:
|
243
|
-
url, filepath = await self.download_queue.get()
|
244
|
-
result = await self._download_file(url, filepath)
|
245
|
-
if result:
|
246
|
-
pbar.update(1)
|
247
|
-
self.download_queue.task_done()
|
248
|
-
except asyncio.CancelledError:
|
249
|
-
break
|
250
|
-
except Exception as e:
|
251
|
-
print(f"\nWorker error processing {url}: {str(e)}")
|
252
|
-
self.download_queue.task_done()
|
253
|
-
|
254
|
-
async def _download_and_process(self, urls, output_dir):
|
255
|
-
"""Queue-based download processing."""
|
256
|
-
results = []
|
257
|
-
parsed_results = []
|
258
|
-
|
259
|
-
pbar = tqdm(total=len(urls), desc="Downloading files [Rate: 0/s | 0 MB/s]")
|
260
|
-
self.current_pbar = pbar
|
261
|
-
|
262
|
-
for url in urls:
|
263
|
-
filename = url.split('/')[-1]
|
264
|
-
filepath = os.path.join(output_dir, filename)
|
265
|
-
await self.download_queue.put((url, filepath))
|
266
|
-
|
267
|
-
workers = [asyncio.create_task(self._download_worker(pbar))
|
268
|
-
for _ in range(5)] # Match number of workers to semaphore
|
269
|
-
|
270
|
-
await self.download_queue.join()
|
271
|
-
|
272
|
-
for worker in workers:
|
273
|
-
worker.cancel()
|
274
|
-
|
275
|
-
await asyncio.gather(*workers, return_exceptions=True)
|
276
|
-
|
277
|
-
pbar.close()
|
278
|
-
self.current_pbar = None
|
279
|
-
return results, parsed_results
|
280
|
-
|
281
|
-
def download_submissions(self, output_dir='filings', cik=None, ticker=None, submission_type=None, filing_date=None, parse=True):
|
282
|
-
"""Main method to download SEC filings."""
|
283
|
-
self.parse_filings = parse
|
284
|
-
|
285
|
-
async def _download():
|
286
|
-
async with self as downloader:
|
287
|
-
if ticker is not None:
|
288
|
-
cik_value = identifier_to_cik(ticker)
|
289
|
-
else:
|
290
|
-
cik_value = cik
|
291
|
-
|
292
|
-
params = {}
|
293
|
-
if cik_value:
|
294
|
-
if isinstance(cik_value, list):
|
295
|
-
params['ciks'] = ','.join(str(c).zfill(10) for c in cik_value)
|
296
|
-
else:
|
297
|
-
params['ciks'] = str(cik_value).zfill(10)
|
298
|
-
|
299
|
-
params['forms'] = ','.join(submission_type) if isinstance(submission_type, list) else submission_type if submission_type else "-0"
|
300
|
-
|
301
|
-
if isinstance(filing_date, list):
|
302
|
-
dates = [(d, d) for d in filing_date]
|
303
|
-
elif isinstance(filing_date, tuple):
|
304
|
-
dates = [filing_date]
|
305
|
-
else:
|
306
|
-
date_str = filing_date if filing_date else f"2001-01-01,{datetime.now().strftime('%Y-%m-%d')}"
|
307
|
-
start, end = date_str.split(',')
|
308
|
-
dates = [(start, end)]
|
309
|
-
|
310
|
-
all_filepaths = []
|
311
|
-
all_parsed_data = []
|
312
|
-
|
313
|
-
for start_date, end_date in dates:
|
314
|
-
params['startdt'] = start_date
|
315
|
-
params['enddt'] = end_date
|
316
|
-
base_url = "https://efts.sec.gov/LATEST/search-index"
|
317
|
-
efts_url = f"{base_url}?{urlencode(params, doseq=True)}"
|
318
|
-
|
319
|
-
urls = await self._get_filing_urls_from_efts(efts_url,submission_type)
|
320
|
-
if urls:
|
321
|
-
filepaths, parsed_data = await self._download_and_process(urls, output_dir)
|
322
|
-
all_filepaths.extend(filepaths)
|
323
|
-
all_parsed_data.extend(parsed_data)
|
324
|
-
|
325
|
-
return all_filepaths, all_parsed_data
|
326
|
-
|
327
|
-
return asyncio.run(_download())
|
328
|
-
|
329
|
-
def download_company_concepts(self, output_dir='company_concepts', cik=None, ticker=None):
|
330
|
-
"""Download company concept data."""
|
331
|
-
async def _download_concepts():
|
332
|
-
async with self as downloader:
|
333
|
-
if ticker is not None:
|
334
|
-
ciks = identifier_to_cik(ticker)
|
335
|
-
elif cik:
|
336
|
-
ciks = [cik] if not isinstance(cik, list) else cik
|
337
|
-
else:
|
338
|
-
company_tickers = load_package_csv('company_tickers')
|
339
|
-
ciks = [company['cik'] for company in company_tickers]
|
340
|
-
|
341
|
-
os.makedirs(output_dir, exist_ok=True)
|
342
|
-
urls = [f'https://data.sec.gov/api/xbrl/companyfacts/CIK{str(cik).zfill(10)}.json' for cik in ciks]
|
343
|
-
|
344
|
-
pbar = tqdm(total=len(urls), desc="Downloading concepts [Rate: 0/s | 0 MB/s]")
|
345
|
-
self.current_pbar = pbar
|
346
|
-
|
347
|
-
for url in urls:
|
348
|
-
filename = url.split('/')[-1]
|
349
|
-
filepath = os.path.join(output_dir, filename)
|
350
|
-
await self.download_queue.put((url, filepath))
|
351
|
-
|
352
|
-
workers = [asyncio.create_task(self._download_worker(pbar))
|
353
|
-
for _ in range(5)]
|
354
|
-
|
355
|
-
await self.download_queue.join()
|
356
|
-
|
357
|
-
for worker in workers:
|
358
|
-
worker.cancel()
|
359
|
-
|
360
|
-
await asyncio.gather(*workers, return_exceptions=True)
|
361
|
-
|
362
|
-
pbar.close()
|
363
|
-
self.current_pbar = None
|
364
|
-
|
365
|
-
results = []
|
366
|
-
for url in urls:
|
367
|
-
filename = url.split('/')[-1]
|
368
|
-
filepath = os.path.join(output_dir, filename)
|
369
|
-
if os.path.exists(filepath):
|
370
|
-
results.append(filepath)
|
371
|
-
|
372
|
-
return results
|
373
|
-
|
374
|
-
return asyncio.run(_download_concepts())
|