datamule 1.8.3__py3-none-any.whl → 1.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@ import os
2
2
  import json
3
3
  import urllib.request
4
4
  import websocket
5
-
5
+ import re
6
6
 
7
7
  class SecConnector:
8
8
  def __init__(self, api_key=None, quiet=False):
@@ -17,11 +17,12 @@ class SecConnector:
17
17
  if not self.quiet:
18
18
  print("Getting JWT token...")
19
19
 
20
- url = f"{self.auth_url}?api_key={self.api_key}"
20
+ url = self.auth_url
21
21
 
22
- req = urllib.request.Request(url)
23
- req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36')
22
+ # Send API key in Authorization header instead of POST body
23
+ req = urllib.request.Request(url, method='GET')
24
24
  req.add_header('Accept', 'application/json')
25
+ req.add_header('Authorization', f'Bearer {self.api_key}') # API key in header
25
26
 
26
27
  with urllib.request.urlopen(req) as response:
27
28
  data = json.loads(response.read().decode())
@@ -35,8 +36,8 @@ class SecConnector:
35
36
  return data['token'], data['websocket_ip']
36
37
 
37
38
  def connect(self, data_callback=None):
38
- token,websocket_ip = self._get_jwt_token_and_ip()
39
- ws_url = f"ws://{websocket_ip}/ws?token={token}"
39
+ token, websocket_ip = self._get_jwt_token_and_ip()
40
+ ws_url = f"ws://{websocket_ip}/ws"
40
41
 
41
42
  if not self.quiet:
42
43
  print("Connecting to WebSocket...")
@@ -51,22 +52,41 @@ class SecConnector:
51
52
  if not self.quiet:
52
53
  print(f"Received data: {len(data)} items")
53
54
  if data_callback:
54
- data_callback(data) # Pass just the data array
55
+ data_callback(data)
55
56
 
56
57
  def on_error(ws, error):
57
58
  if not self.quiet:
58
- print(f"WebSocket error: {error}")
59
+ sanitized_error = self._sanitize_error_message(str(error))
60
+ print(f"WebSocket error: {sanitized_error}")
59
61
 
60
62
  def on_close(ws, close_status_code, close_msg):
61
63
  if not self.quiet:
62
64
  print("WebSocket closed")
63
65
 
66
+ # Use Authorization header for WebSocket connection
67
+ headers = {'Authorization': f'Bearer {token}'}
68
+
64
69
  ws = websocket.WebSocketApp(
65
70
  ws_url,
71
+ header=headers,
66
72
  on_open=on_open,
67
73
  on_message=on_message,
68
74
  on_error=on_error,
69
75
  on_close=on_close
70
76
  )
71
77
 
72
- ws.run_forever()
78
+ ws.run_forever()
79
+
80
+ def _sanitize_error_message(self, error_msg):
81
+ sensitive_patterns = [
82
+ r'Bearer\s+[A-Za-z0-9\-_\.]+', # Bearer tokens
83
+ r'api_key[=:]\s*[A-Za-z0-9\-_]+', # API key patterns
84
+ r'token[=:]\s*[A-Za-z0-9\-_\.]+', # Token patterns
85
+ r'jwt[=:]\s*[A-Za-z0-9\-_\.]+', # JWT patterns
86
+ ]
87
+
88
+ sanitized = error_msg
89
+ for pattern in sensitive_patterns:
90
+ sanitized = re.sub(pattern, '[REDACTED]', sanitized, flags=re.IGNORECASE)
91
+
92
+ return sanitized
@@ -110,7 +110,6 @@ class Document:
110
110
  if type =='inline':
111
111
  if self.extension not in ['.htm','.html']:
112
112
  return
113
-
114
113
  self.xbrl = parse_inline_xbrl(self.content)
115
114
  else:
116
115
  raise ValueError("Only inline has been implemented so far.")
datamule/portfolio.py CHANGED
@@ -13,11 +13,12 @@ from .seclibrary.downloader import download as seclibrary_download
13
13
  from .sec.xbrl.filter_xbrl import filter_xbrl
14
14
  from .sec.submissions.monitor import Monitor
15
15
  from .portfolio_compression_utils import CompressionManager
16
- #from .sec.xbrl.xbrlmonitor import XBRLMonitor
17
16
  from .datamule.sec_connector import SecConnector
18
17
  from secsgml.utils import bytes_to_str, calculate_documents_locations_in_tar
19
18
  import json
20
19
  import io
20
+ import shutil
21
+
21
22
 
22
23
  class Portfolio:
23
24
  def __init__(self, path):
@@ -210,7 +211,10 @@ class Portfolio:
210
211
  self.accession_numbers = new_accession_numbers
211
212
 
212
213
  def download_submissions(self, cik=None, ticker=None, submission_type=None, filing_date=None, provider=None,document_type=[],
213
- requests_per_second=5,keep_filtered_metadata=False,standardize_metadata=True,skip_existing=True, **kwargs):
214
+ requests_per_second=5,keep_filtered_metadata=False,standardize_metadata=True,skip_existing=True,
215
+ accession_numbers=None, **kwargs):
216
+
217
+
214
218
  if provider is None:
215
219
  config = Config()
216
220
  provider = config.get_default_source()
@@ -218,7 +222,8 @@ class Portfolio:
218
222
  # Process CIK and metadata filters
219
223
  cik = _process_cik_and_metadata_filters(cik, ticker, **kwargs)
220
224
 
221
- accession_numbers = self.accession_numbers if hasattr(self, 'accession_numbers') else None
225
+ filtered_accession_numbers = self.accession_numbers if hasattr(self, 'accession_numbers') else None
226
+
222
227
  skip_accession_numbers = []
223
228
  if skip_existing:
224
229
  skip_accession_numbers = [sub.accession for sub in self]
@@ -231,20 +236,22 @@ class Portfolio:
231
236
  api_key=self.api_key,
232
237
  submission_type=submission_type,
233
238
  filing_date=filing_date,
234
- accession_numbers=accession_numbers,
239
+ filtered_accession_numbers=filtered_accession_numbers,
235
240
  keep_document_types=document_type,
236
241
  keep_filtered_metadata=keep_filtered_metadata,
237
242
  standardize_metadata=standardize_metadata,
238
- skip_accession_numbers=skip_accession_numbers
243
+ skip_accession_numbers=skip_accession_numbers,
244
+ accession_numbers = accession_numbers
239
245
  )
240
246
  else:
247
+ # will later add accession_numbers arg in the free update.
241
248
  sec_download(
242
249
  output_dir=self.path,
243
250
  cik=cik,
244
251
  submission_type=submission_type,
245
252
  filing_date=filing_date,
246
253
  requests_per_second=requests_per_second,
247
- accession_numbers=accession_numbers,
254
+ filtered_accession_numbers=filtered_accession_numbers,
248
255
  keep_document_types=document_type,
249
256
  keep_filtered_metadata=keep_filtered_metadata,
250
257
  standardize_metadata=standardize_metadata,
@@ -286,4 +293,11 @@ class Portfolio:
286
293
  document_types = [document_types]
287
294
 
288
295
  for submission in self.submissions:
289
- yield from submission.document_type(document_types)
296
+ yield from submission.document_type(document_types)
297
+
298
+ def delete(self):
299
+ self._close_batch_handles()
300
+ shutil.rmtree(self.path)
301
+
302
+ # reinit
303
+ self.__dict__.update(Portfolio(self.path).__dict__)
@@ -4,7 +4,7 @@ from secsgml import write_sgml_file_to_tar
4
4
  from tqdm import tqdm
5
5
 
6
6
  def download(cik=None, submission_type=None, filing_date=None, location=None, name=None,
7
- requests_per_second=5, output_dir="filings", accession_numbers=None,
7
+ requests_per_second=5, output_dir="filings", filtered_accession_numbers=None,
8
8
  quiet=False, keep_document_types=[],keep_filtered_metadata=False,standardize_metadata=True,
9
9
  skip_accession_numbers=[]):
10
10
  # Make sure output directory exists
@@ -29,7 +29,7 @@ def download(cik=None, submission_type=None, filing_date=None, location=None, na
29
29
  location=location,
30
30
  requests_per_second=requests_per_second,
31
31
  document_callback=callback_wrapper,
32
- accession_numbers=accession_numbers,
32
+ filtered_accession_numbers=filtered_accession_numbers,
33
33
  skip_accession_numbers=skip_accession_numbers,
34
34
  quiet=quiet
35
35
  )
@@ -222,7 +222,7 @@ class Streamer(EFTSQuery):
222
222
  return results
223
223
 
224
224
  def stream(cik=None, submission_type=None, filing_date=None, location=None,
225
- requests_per_second=5.0, document_callback=None, accession_numbers=None,skip_accession_numbers=[],
225
+ requests_per_second=5.0, document_callback=None, filtered_accession_numbers=None,skip_accession_numbers=[],
226
226
  quiet=False, name=None):
227
227
  """
228
228
  Stream EFTS results and download documents into memory.
@@ -253,14 +253,14 @@ def stream(cik=None, submission_type=None, filing_date=None, location=None,
253
253
  """
254
254
 
255
255
  # Check if acc no is empty list
256
- if accession_numbers == []:
256
+ if filtered_accession_numbers == []:
257
257
  raise ValueError("Applied filter resulted in empty accession numbers list")
258
258
 
259
259
  async def run_stream():
260
260
  streamer = Streamer(
261
261
  requests_per_second=requests_per_second,
262
262
  document_callback=document_callback,
263
- accession_numbers=accession_numbers,
263
+ accession_numbers=filtered_accession_numbers,
264
264
  skip_accession_numbers=skip_accession_numbers,
265
265
  quiet=quiet
266
266
  )
datamule/sec/utils.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import asyncio
2
2
  import time
3
3
  from collections import deque
4
-
4
+ import os
5
5
 
6
6
  class RetryException(Exception):
7
7
  def __init__(self, url, retry_after=601): # SEC Rate limit is typically 10 minutes.
@@ -61,4 +61,9 @@ class RateMonitor:
61
61
 
62
62
  return round(requests_per_second, 1), round(mb_per_second, 2)
63
63
 
64
- headers = {'User-Agent': 'John Smith johnsmith@gmail.com'}
64
+
65
+ user_agent = os.environ.get('DATAMULE_SEC_USER_AGENT')
66
+ if user_agent is None:
67
+ user_agent = 'John Smith johnsmith@gmail.com'
68
+
69
+ headers = {'User-Agent': user_agent}
@@ -18,6 +18,7 @@ from os import cpu_count
18
18
  from secsgml import parse_sgml_content_into_memory
19
19
  from secsgml.utils import bytes_to_str
20
20
  from .datamule_lookup import datamule_lookup
21
+ from ..utils.format_accession import format_accession
21
22
 
22
23
  # Set up logging
23
24
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
@@ -268,7 +269,7 @@ class Downloader:
268
269
  tar_manager = self.TarManager(output_dir, num_tar_files, max_batch_size)
269
270
 
270
271
  try:
271
- with tqdm(total=len(urls), desc="Processing files") as pbar:
272
+ with tqdm(total=len(urls), desc="Downloading files") as pbar:
272
273
  semaphore = asyncio.Semaphore(self.MAX_CONCURRENT_DOWNLOADS)
273
274
  decompression_pool = ThreadPoolExecutor(max_workers=self.MAX_DECOMPRESSION_WORKERS)
274
275
 
@@ -296,29 +297,35 @@ class Downloader:
296
297
  finally:
297
298
  tar_manager.close_all()
298
299
 
299
- def download(self, submission_type=None, cik=None, filing_date=None, output_dir="downloads", accession_numbers=None, keep_document_types=[], keep_filtered_metadata=False, standardize_metadata=True,
300
- skip_accession_numbers=[], max_batch_size=1024*1024*1024):
300
+ def download(self, submission_type=None, cik=None, filing_date=None, output_dir="downloads", filtered_accession_numbers=None, keep_document_types=[], keep_filtered_metadata=False, standardize_metadata=True,
301
+ skip_accession_numbers=[], max_batch_size=1024*1024*1024,accession_numbers=None):
301
302
  if self.api_key is None:
302
303
  raise ValueError("No API key found. Please set DATAMULE_API_KEY environment variable or provide api_key in constructor")
303
304
 
304
305
  logger.debug("Querying SEC filings...")
305
306
 
306
- filings = datamule_lookup(cik=cik, submission_type=submission_type, filing_date=filing_date,
307
- columns=['accessionNumber'], distinct=True, page_size=25000, quiet=False)
307
+ if not accession_numbers:
308
+ filings = datamule_lookup(cik=cik, submission_type=submission_type, filing_date=filing_date,
309
+ columns=['accessionNumber'], distinct=True, page_size=25000, quiet=False)
308
310
 
309
- if accession_numbers:
310
- accession_numbers = [str(int(item.replace('-',''))) for item in accession_numbers]
311
- filings = [filing for filing in filings if filing['accessionNumber'] in accession_numbers]
312
-
313
- if skip_accession_numbers:
314
- skip_accession_numbers = [int(item.replace('-','')) for item in skip_accession_numbers]
315
- filings = [filing for filing in filings if filing['accessionNumber'] not in skip_accession_numbers]
311
+ if filtered_accession_numbers:
312
+ filtered_accession_numbers = [str(int(item.replace('-',''))) for item in filtered_accession_numbers]
313
+ filings = [filing for filing in filings if filing['accessionNumber'] in filtered_accession_numbers]
314
+
315
+ if skip_accession_numbers:
316
+ skip_accession_numbers = [int(item.replace('-','')) for item in skip_accession_numbers]
317
+ filings = [filing for filing in filings if filing['accessionNumber'] not in skip_accession_numbers]
316
318
 
317
- logger.debug(f"Generating URLs for {len(filings)} filings...")
318
- urls = []
319
- for item in filings:
320
- url = f"{self.BASE_URL}{str(item['accessionNumber']).zfill(18)}.sgml"
321
- urls.append(url)
319
+ logger.debug(f"Generating URLs for {len(filings)} filings...")
320
+ urls = []
321
+ for item in filings:
322
+ url = f"{self.BASE_URL}{str(item['accessionNumber']).zfill(18)}.sgml"
323
+ urls.append(url)
324
+ else:
325
+ urls = []
326
+ for accession in accession_numbers:
327
+ url = f"{self.BASE_URL}{format_accession(accession,'no-dash').zfill(18)}.sgml"
328
+ urls.append(url)
322
329
 
323
330
  if not urls:
324
331
  logger.warning("No submissions found matching the criteria")
@@ -381,12 +388,12 @@ class Downloader:
381
388
  logger.debug(f"Processing speed: {len(urls)/elapsed_time:.2f} files/second")
382
389
 
383
390
 
384
- def download(submission_type=None, cik=None, filing_date=None, api_key=None, output_dir="downloads", accession_numbers=None, keep_document_types=[],keep_filtered_metadata=False,standardize_metadata=True,
385
- skip_accession_numbers=[], max_batch_size=1024*1024*1024):
391
+ def download(submission_type=None, cik=None, filing_date=None, api_key=None, output_dir="downloads", filtered_accession_numbers=None, keep_document_types=[],keep_filtered_metadata=False,standardize_metadata=True,
392
+ skip_accession_numbers=[], max_batch_size=1024*1024*1024,accession_numbers=None):
386
393
 
387
- if accession_numbers:
388
- accession_numbers = [int(str(x).replace('-', '')) for x in accession_numbers]
389
- elif accession_numbers == []:
394
+ if filtered_accession_numbers:
395
+ filtered_accession_numbers = [int(str(x).replace('-', '')) for x in filtered_accession_numbers]
396
+ elif filtered_accession_numbers == []:
390
397
  raise ValueError("Applied filter resulted in empty accession numbers list")
391
398
  downloader = Downloader(api_key=api_key)
392
399
  downloader.download(
@@ -394,10 +401,11 @@ def download(submission_type=None, cik=None, filing_date=None, api_key=None, out
394
401
  cik=cik,
395
402
  filing_date=filing_date,
396
403
  output_dir=output_dir,
397
- accession_numbers=accession_numbers,
404
+ filtered_accession_numbers=filtered_accession_numbers,
398
405
  keep_document_types=keep_document_types,
399
406
  keep_filtered_metadata=keep_filtered_metadata,
400
407
  standardize_metadata=standardize_metadata,
401
408
  skip_accession_numbers=skip_accession_numbers,
402
- max_batch_size=max_batch_size
409
+ max_batch_size=max_batch_size,
410
+ accession_numbers=accession_numbers
403
411
  )
datamule/submission.py CHANGED
@@ -238,4 +238,6 @@ class Submission:
238
238
 
239
239
  for idx, doc in enumerate(self.metadata.content['documents']):
240
240
  if doc['type'] in document_types:
241
- yield self._load_document_by_index(idx)
241
+ yield self._load_document_by_index(idx)
242
+
243
+
@@ -7,10 +7,9 @@ from concurrent.futures import ThreadPoolExecutor
7
7
  import threading
8
8
  from tqdm import tqdm
9
9
  import urllib.request
10
+ from ..sec.utils import headers
10
11
 
11
- headers = {'User-Agent': 'John Smith johnsmith@gmail.com'}
12
-
13
- def process_file_batch(zip_file, filenames_batch):
12
+ def process_file_batch(zip_file, filenames_batch, columns, mapping):
14
13
  """Process a batch of files from the zip archive"""
15
14
  batch_filings = []
16
15
 
@@ -33,19 +32,17 @@ def process_file_batch(zip_file, filenames_batch):
33
32
  else:
34
33
  filings_data = submissions_dct['filings']['recent']
35
34
 
36
- # Extract required data
37
- accession_numbers = filings_data['accessionNumber']
38
- filing_dates = filings_data['filingDate']
39
- forms = filings_data['form']
40
-
35
+ # Extract required data using mapping
36
+ lst_lst = [filings_data[column] for column in columns]
37
+
41
38
  # Create filing records for this file
42
- for j in range(len(accession_numbers)):
43
- filing_record = {
44
- 'accessionNumber': int(accession_numbers[j].replace('-','')),
45
- 'filingDate': filing_dates[j],
46
- 'submissionType': forms[j],
47
- 'cik': cik
48
- }
39
+ for j in range(len(filings_data['accessionNumber'])):
40
+ filing_record = {'cik': cik}
41
+
42
+ for i, column in enumerate(columns):
43
+ mapped_key = mapping.get(column, column)
44
+ filing_record[mapped_key] = lst_lst[i][j]
45
+
49
46
  batch_filings.append(filing_record)
50
47
 
51
48
  except Exception as e:
@@ -54,24 +51,26 @@ def process_file_batch(zip_file, filenames_batch):
54
51
 
55
52
  return batch_filings
56
53
 
57
- def write_csv_chunk(output_path, filings_data, is_first_write, write_lock):
54
+ def write_csv_chunk(output_path, filings_data, is_first_write, write_lock, fieldnames):
58
55
  """Thread-safe CSV writing with lock"""
59
56
  with write_lock:
60
57
  if is_first_write:
61
58
  with open(output_path, 'w', newline='') as csvfile:
62
- fieldnames = ['accessionNumber', 'filingDate', 'submissionType', 'cik']
63
59
  writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
64
60
  writer.writeheader()
65
61
  writer.writerows(filings_data)
66
62
  else:
67
63
  with open(output_path, 'a', newline='') as csvfile:
68
- fieldnames = ['accessionNumber', 'filingDate', 'submissionType', 'cik']
69
64
  writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
70
65
  writer.writerows(filings_data)
71
66
 
72
- def construct_submissions_data(output_path, submissions_zip_path=None, max_workers=4, batch_size=100):
67
+ def construct_submissions_data(output_path, submissions_zip_path=None, max_workers=4, batch_size=100,
68
+ columns = ['accessionNumber', 'filingDate', 'form'], mapping = {'form': 'submissionType'}):
73
69
  """Creates a list of dicts of every accession number, with filing date, submission type, and ciks"""
74
-
70
+
71
+ # declare fieldnames
72
+ fieldnames = ['cik'] + [mapping.get(col, col) for col in columns]
73
+
75
74
  if submissions_zip_path is None:
76
75
  url = "https://www.sec.gov/Archives/edgar/daily-index/bulkdata/submissions.zip"
77
76
 
@@ -121,7 +120,7 @@ def construct_submissions_data(output_path, submissions_zip_path=None, max_worke
121
120
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
122
121
  # Submit all batch jobs
123
122
  future_to_batch = {
124
- executor.submit(process_file_batch, zip_file, batch): i
123
+ executor.submit(process_file_batch, zip_file, batch, columns, mapping): i
125
124
  for i, batch in enumerate(filename_batches)
126
125
  }
127
126
 
@@ -132,7 +131,7 @@ def construct_submissions_data(output_path, submissions_zip_path=None, max_worke
132
131
  batch_filings = future.result()
133
132
 
134
133
  if batch_filings: # Only write if we have data
135
- write_csv_chunk(output_path, batch_filings, is_first_write, write_lock)
134
+ write_csv_chunk(output_path, batch_filings, is_first_write, write_lock, fieldnames)
136
135
  is_first_write = False
137
136
  total_filings += len(batch_filings)
138
137
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamule
3
- Version: 1.8.3
3
+ Version: 1.8.4
4
4
  Summary: Work with SEC submissions at scale.
5
5
  Home-page: https://github.com/john-friedman/datamule-python
6
6
  Author: John Friedman
@@ -3,15 +3,15 @@ datamule/config.py,sha256=Y--CVv7JcgrjJkMOSLrvm2S8B9ost6RMSkGviP-MKtg,883
3
3
  datamule/helper.py,sha256=KqhAmTMdvATEh3I-O4xLcAcrHB9zXQERBuwzue7zyQw,3674
4
4
  datamule/index.py,sha256=Rrcna9FJV-Oh_K6O2IuUEIDmtay_7UZ4l4jgKCi7A7I,2079
5
5
  datamule/package_updater.py,sha256=Z9zaa_y0Z5cknpRn8oPea3gg4kquFHfpfhduKKCZ6NU,958
6
- datamule/portfolio.py,sha256=AGbZqeofmv1mAttnaO2k0vubwMQ_ENkQOhyevWoO6wo,11847
6
+ datamule/portfolio.py,sha256=dQrTkLBdmEc-gbxQFGxNEXUAlPquXczADUW7BbzRR_k,12221
7
7
  datamule/portfolio_compression_utils.py,sha256=8OPYEN5zAdV1FiTxgVN3S7cTKs99Elv74bwgoIJP4QY,12654
8
8
  datamule/sheet.py,sha256=V5iR9_LkuwTFxfHCfzgadO6qgB6qOhzWiCAED-y8ZJQ,22744
9
- datamule/submission.py,sha256=7a0FgBmWNd1BUaQQR9pL9xXGFjbn5ruzArGMNHZqQoU,11204
9
+ datamule/submission.py,sha256=G2Y93VtvbEEokyL5ixemKrs-Gd2Q-GarOx3RHqQyAqk,11207
10
10
  datamule/data/listed_filer_metadata.csv,sha256=dT9fQ8AC5P1-Udf_UF0ZkdXJ88jNxJb_tuhi5YYL1rc,2426827
11
11
  datamule/datamule/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- datamule/datamule/sec_connector.py,sha256=qCDsOgSFtfp-uz-APJjX4YrRoIGnnX-xHCL_JjLmRxk,2387
12
+ datamule/datamule/sec_connector.py,sha256=pNuXnkamW7TFC-9bzkWciM-IdRO8dvuWEFZwRFfUoo8,3186
13
13
  datamule/document/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- datamule/document/document.py,sha256=YGo-Iz_sBXekUeKEAoNJV0BiLDtSOgD9OXFo2FocYq8,14439
14
+ datamule/document/document.py,sha256=5h_tCO82ZBpGAuGhTgY63OIk-db_3q4RlRWZMwtAxxg,14426
15
15
  datamule/document/processing.py,sha256=jDCEzBFDSQtq7nQxRScIsbALnFcvMPOkNkMUCa7mFxg,31921
16
16
  datamule/document/table.py,sha256=73yUJKY82ap32jhLmZeTti-jQ_lyhcJGlGwyxLtgYOg,12944
17
17
  datamule/document/mappings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -43,14 +43,14 @@ datamule/mapping_dicts/html_mapping_dicts.py,sha256=G2PWB__FNg4VH9iFJFkflM0u-qOE
43
43
  datamule/mapping_dicts/txt_mapping_dicts.py,sha256=DQPrGYbAPQxomRUtt4iiMGrwuF7BHc_LeFBQuYBzU9o,6311
44
44
  datamule/mapping_dicts/xml_mapping_dicts.py,sha256=Z22yDVwKYonUfM5foQP00dVDE8EHhhMKp0CLqVKV5OI,438
45
45
  datamule/sec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
- datamule/sec/utils.py,sha256=JUxwijJiqRMnRJNQzVUamyF5h9ZGc7RnO_zsLOIM73g,2079
46
+ datamule/sec/utils.py,sha256=96bavyG2Kq1t8L1YA2vwYnAHKIKdRSoVXxBO5QH1HWo,2196
47
47
  datamule/sec/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
48
  datamule/sec/infrastructure/submissions_metadata.py,sha256=f1KarzFSryKm0EV8DCDNsBw5Jv0Tx5aljiGUJkk7DRk,18745
49
49
  datamule/sec/submissions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
- datamule/sec/submissions/downloader.py,sha256=zGS0oJJI8tVF_GnVpZm20MymdYxnjrEjQioSVggw7Ck,1486
50
+ datamule/sec/submissions/downloader.py,sha256=9Po1eQ6YEj3Yo9Qw_M5PjQM-OR8iocTNjPIyO3O8GMs,1513
51
51
  datamule/sec/submissions/eftsquery.py,sha256=mSZon8rlW8dxma7M49ZW5V02Fn-ENOdt9TNO6elBrhE,27983
52
52
  datamule/sec/submissions/monitor.py,sha256=6mE0NZFdPId69t4V53GwBb9sqtRN7HE54sU3WpU0bnY,11900
53
- datamule/sec/submissions/streamer.py,sha256=Qydj40CmWB_wsPv2dibefRohmCokegG2pR7iZ9C3xLQ,11584
53
+ datamule/sec/submissions/streamer.py,sha256=A6hunG_mOuBVqA9bBCXhNMcsPaZlhslA3WhopyUwdS4,11611
54
54
  datamule/sec/submissions/textsearch.py,sha256=MKDXEz_VI_0ljl73_aw2lx4MVzJW5uDt8KxjvJBwPwM,5794
55
55
  datamule/sec/xbrl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
56
  datamule/sec/xbrl/downloadcompanyfacts.py,sha256=rMWRiCF9ci_gNZMJ9MC2c_PGEd-yEthawQ0CtVwWTjM,3323
@@ -60,12 +60,12 @@ datamule/sec/xbrl/xbrlmonitor.py,sha256=TKFVfSyyUUfUgFQw4WxEVs4g8Nh-2C0tygNIRmTq
60
60
  datamule/seclibrary/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
61
61
  datamule/seclibrary/bq.py,sha256=C8sb_rpXTvchprrFLcbRar4Qi0XWW25tnv1YsHSS5o4,18025
62
62
  datamule/seclibrary/datamule_lookup.py,sha256=-xsATUVwm58Y1nNP287c-1pHB2uttngIiJ5Zy3DRi-s,9480
63
- datamule/seclibrary/downloader.py,sha256=6cPPddjXekOwlzsyratUqzpCSbvdaNyRCGjQXUtVoJU,17930
63
+ datamule/seclibrary/downloader.py,sha256=XEWz-RA3kSeRDgk5e0-2WTEDPjjIzwBFayoDGE_xpyY,18483
64
64
  datamule/seclibrary/query.py,sha256=qGuursTERRbOGfoDcYcpo4oWkW3PCBW6x1Qf1Puiak4,7352
65
65
  datamule/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
66
- datamule/utils/construct_submissions_data.py,sha256=aX7ZaAp3zXHLcv4TFk_rGwjb8r7yNDQDFVg4nPf60kM,5934
66
+ datamule/utils/construct_submissions_data.py,sha256=NB_hvfxlRXPyt4Fgc-5qA8vJRItkLhBedCSTaxwW7Jg,5887
67
67
  datamule/utils/format_accession.py,sha256=60RtqoNqoT9zSKVb1DeOv1gncJxzPTFMNW4SNOVmC_g,476
68
- datamule-1.8.3.dist-info/METADATA,sha256=glHeaZURF8IlDB8mBf_UwYguyMazLZwoXqPc7eK9nug,524
69
- datamule-1.8.3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
70
- datamule-1.8.3.dist-info/top_level.txt,sha256=iOfgmtSMFVyr7JGl_bYSTDry79JbmsG4p8zKq89ktKk,9
71
- datamule-1.8.3.dist-info/RECORD,,
68
+ datamule-1.8.4.dist-info/METADATA,sha256=BayymEG7W6g3c7uKwo2aaqO50hE64DXv789wZmaGr1c,524
69
+ datamule-1.8.4.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
70
+ datamule-1.8.4.dist-info/top_level.txt,sha256=iOfgmtSMFVyr7JGl_bYSTDry79JbmsG4p8zKq89ktKk,9
71
+ datamule-1.8.4.dist-info/RECORD,,