datamule 0.381__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. datamule/__init__.py +46 -86
  2. datamule/book/book.py +34 -0
  3. datamule/book/eftsquery.py +127 -0
  4. datamule/book/xbrl_retriever.py +88 -0
  5. datamule/config.py +29 -0
  6. datamule/data/company_former_names.csv +8148 -8148
  7. datamule/data/company_metadata.csv +10049 -10049
  8. datamule/data/company_tickers.csv +9999 -10168
  9. datamule/data/sec-glossary.csv +728 -728
  10. datamule/data/xbrl_descriptions.csv +10024 -10024
  11. datamule/document.py +279 -0
  12. datamule/downloader/downloader.py +374 -0
  13. datamule/downloader/premiumdownloader.py +335 -0
  14. datamule/helper.py +123 -136
  15. datamule/mapping_dicts/txt_mapping_dicts.py +232 -0
  16. datamule/mapping_dicts/xml_mapping_dicts.py +19 -0
  17. datamule/monitor.py +238 -0
  18. datamule/mulebot/__init__.py +1 -1
  19. datamule/mulebot/helper.py +34 -34
  20. datamule/mulebot/mulebot.py +129 -129
  21. datamule/mulebot/mulebot_server/server.py +86 -86
  22. datamule/mulebot/mulebot_server/static/css/minimalist.css +173 -173
  23. datamule/mulebot/mulebot_server/static/scripts/artifacts.js +67 -67
  24. datamule/mulebot/mulebot_server/static/scripts/chat.js +91 -91
  25. datamule/mulebot/mulebot_server/static/scripts/filingArtifacts.js +55 -55
  26. datamule/mulebot/mulebot_server/static/scripts/listArtifacts.js +14 -14
  27. datamule/mulebot/mulebot_server/static/scripts/main.js +56 -56
  28. datamule/mulebot/mulebot_server/static/scripts/prefilledPrompt.js +26 -26
  29. datamule/mulebot/mulebot_server/static/scripts/suggestions.js +46 -46
  30. datamule/mulebot/mulebot_server/static/scripts/tableArtifacts.js +128 -128
  31. datamule/mulebot/mulebot_server/static/scripts/utils.js +27 -27
  32. datamule/mulebot/mulebot_server/templates/chat-minimalist.html +90 -90
  33. datamule/mulebot/search.py +51 -51
  34. datamule/mulebot/tools.py +82 -82
  35. datamule/packageupdater.py +207 -0
  36. datamule/portfolio.py +106 -0
  37. datamule/submission.py +76 -0
  38. datamule-1.0.2.dist-info/METADATA +27 -0
  39. datamule-1.0.2.dist-info/RECORD +43 -0
  40. {datamule-0.381.dist-info → datamule-1.0.2.dist-info}/WHEEL +1 -1
  41. datamule/data/filing_types.csv +0 -485
  42. datamule/data/ftd_locations.csv +0 -388
  43. datamule/datamule_api.py +0 -21
  44. datamule/dataset_builder/_init.py +0 -1
  45. datamule/dataset_builder/dataset_builder.py +0 -260
  46. datamule/downloader/dropbox_downloader.py +0 -225
  47. datamule/downloader/ftd.py +0 -216
  48. datamule/downloader/information_table_13f.py +0 -231
  49. datamule/downloader/sec_downloader.py +0 -635
  50. datamule/filing_viewer/__init__.py +0 -1
  51. datamule/filing_viewer/filing_viewer.py +0 -256
  52. datamule/global_vars.py +0 -202
  53. datamule/parser/__init__.py +0 -1
  54. datamule/parser/basic_10k_parser.py +0 -82
  55. datamule/parser/basic_10q_parser.py +0 -73
  56. datamule/parser/basic_13d_parser.py +0 -58
  57. datamule/parser/basic_13g_parser.py +0 -61
  58. datamule/parser/basic_8k_parser.py +0 -84
  59. datamule/parser/company_concepts_parser.py +0 -0
  60. datamule/parser/form_d_parser.py +0 -70
  61. datamule/parser/generalized_item_parser.py +0 -78
  62. datamule/parser/generalized_xml_parser.py +0 -0
  63. datamule/parser/helper.py +0 -75
  64. datamule/parser/information_table_parser_13fhr.py +0 -41
  65. datamule/parser/insider_trading_parser.py +0 -158
  66. datamule/parser/mappings.py +0 -95
  67. datamule/parser/n_port_p_parser.py +0 -70
  68. datamule/parser/sec_parser.py +0 -79
  69. datamule/parser/sgml_parser.py +0 -180
  70. datamule/sec_filing.py +0 -126
  71. datamule/sec_search.py +0 -20
  72. datamule-0.381.dist-info/METADATA +0 -132
  73. datamule-0.381.dist-info/RECORD +0 -61
  74. /datamule/{downloader → book}/__init__.py +0 -0
  75. {datamule-0.381.dist-info → datamule-1.0.2.dist-info}/top_level.txt +0 -0
datamule/__init__.py CHANGED
@@ -1,87 +1,47 @@
1
- # datamule/__init__.py
2
- import sys
3
- from importlib.util import find_spec
4
- from functools import lru_cache
5
-
6
- # Lazy load nest_asyncio only when needed
7
- def _setup_jupyter():
8
- """Setup Jupyter-specific configurations if needed."""
9
- if _is_jupyter():
10
- import nest_asyncio
11
- nest_asyncio.apply()
12
-
13
- def _is_jupyter():
14
- """Check if the code is running in a Jupyter environment."""
15
- try:
16
- shell = get_ipython().__class__.__name__
17
- return shell == 'ZMQInteractiveShell'
18
- except NameError:
19
- return False
20
-
21
- # Lazy loading for main components
22
- @lru_cache(None)
23
- def get_downloader():
24
- from .downloader.sec_downloader import Downloader
25
- return Downloader
26
-
27
- @lru_cache(None)
28
- def get_parser():
29
- from .parser.sec_parser import Parser
30
- return Parser
31
-
32
- @lru_cache(None)
33
- def get_filing():
34
- from .sec_filing import Filing
35
- return Filing
36
-
37
- @lru_cache(None)
38
- def get_dataset_builder():
39
- if find_spec('pandas') is not None:
40
- try:
41
- from .dataset_builder.dataset_builder import DatasetBuilder
42
- return DatasetBuilder
43
- except ImportError:
44
- return None
45
- return None
46
-
47
- # Helper functions that can be imported directly
48
- from .datamule_api import parse_textual_filing
49
- from .helper import load_package_csv, load_package_dataset
50
- from .global_vars import *
51
- from .parser.sgml_parser import parse_submission
52
-
53
- # Define classes with delayed initialization
54
- class Downloader:
55
- def __new__(cls, *args, **kwargs):
56
- return get_downloader()(*args, **kwargs)
57
-
58
- class Parser:
59
- def __new__(cls, *args, **kwargs):
60
- return get_parser()(*args, **kwargs)
61
-
62
- class Filing:
63
- def __new__(cls, *args, **kwargs):
64
- return get_filing()(*args, **kwargs)
65
-
66
- class DatasetBuilder:
67
- def __new__(cls, *args, **kwargs):
68
- builder_cls = get_dataset_builder()
69
- if builder_cls is None:
70
- raise ImportError(
71
- "DatasetBuilder requires pandas. "
72
- "Install with: pip install datamule[dataset_builder]"
73
- )
74
- return builder_cls(*args, **kwargs)
75
-
76
- # Set up Jupyter support only when imported
77
- _setup_jupyter()
78
-
79
- __all__ = [
80
- 'Downloader',
81
- 'parse_textual_filing',
82
- 'load_package_csv',
83
- 'load_package_dataset',
84
- 'Parser',
85
- 'Filing',
86
- 'DatasetBuilder'
1
+ from .downloader.downloader import Downloader
2
+ from .downloader.premiumdownloader import PremiumDownloader
3
+ from .monitor import Monitor
4
+ from .packageupdater import PackageUpdater
5
+ from .submission import Submission
6
+ from .portfolio import Portfolio
7
+ from .document import Document
8
+ from secsgml import parse_sgml_submission
9
+ from .helper import load_package_csv, load_package_dataset
10
+ from .config import Config
11
+
12
+
13
+ # Keep the notebook environment setup
14
+ def _is_notebook_env():
15
+ """Check if the code is running in a Jupyter or Colab environment."""
16
+ try:
17
+ shell = get_ipython().__class__.__name__
18
+ return shell in ('ZMQInteractiveShell', 'Shell', 'Google.Colab')
19
+ except NameError:
20
+ return False
21
+
22
+ from functools import lru_cache
23
+
24
+ @lru_cache(maxsize=1)
25
+ def _setup_notebook_env():
26
+ """Setup Jupyter/Colab-specific configurations if needed."""
27
+ if _is_notebook_env():
28
+ import nest_asyncio
29
+ nest_asyncio.apply()
30
+
31
+ # Set up notebook environment
32
+ _setup_notebook_env()
33
+
34
+ __all__ = [
35
+ 'Downloader',
36
+ 'PremiumDownloader',
37
+ 'load_package_csv',
38
+ 'load_package_dataset',
39
+ 'Filing',
40
+ 'Portfolio',
41
+ 'Monitor',
42
+ 'PackageUpdater',
43
+ 'Submission',
44
+ 'Document',
45
+ 'parse_sgml_submission',
46
+ 'Config'
87
47
  ]
datamule/book/book.py ADDED
@@ -0,0 +1,34 @@
1
+ # Streams data rather than downloading it.
2
+ # additional functionality such as query by xbrl, and other db
3
+ # also this is basically our experimental rework of portfolio w/o disturbing existing users
4
+ # this is highly experimental and may not work as expected
5
+ # only for datamule source
6
+ # likely new bottleneck will be local parsing() - will be bypassed in future when we have parsed archive
7
+ # wow parsed archive is going to be crazy fast - like every 10k in 1 minute.
8
+
9
+ # example queries filter by sic = 7372, xbrl query = dei:operatingprofit > 0 in date range 2018-2019
10
+
11
+ # hmm do we go for sql esq or not.
12
+ # I think we do.
13
+ # i think we remove cik, ticker, sic, etc and just have a query object
14
+ # should be sql esq so users can use it easily w/o learnign new syntax
15
+
16
+ # WHERE submission_type = '10-K'
17
+ # AND us-gaap:ResearchAndDevelopmentExpense > 0
18
+ # AND dei:debt_to_equity < 2
19
+ # AND filing_date BETWEEN '2023-01-01' AND '2023-12-31'
20
+ # AND CIK in (123, 456, 789)
21
+ # AND SIC in (123, 456, 789)
22
+ # AND ticker in ('AAPL', 'GOOGL', 'AMZN')
23
+ # AND document_type = 'EX-99.1' # to select attachments
24
+
25
+ from .eftsquery import EFTSQuery
26
+
27
+
28
+ class Book():
29
+ def process_submissions(self,cik,ticker,sic,submission_type,document_type,date,
30
+ xbrl_query={},
31
+ metadata_callback=None,
32
+ document_callback=None,):
33
+ # grabs data and processes it
34
+ pass
@@ -0,0 +1,127 @@
1
+ import asyncio
2
+ import aiohttp
3
+ from tqdm import tqdm
4
+ from datetime import datetime
5
+ from urllib.parse import urlencode
6
+ import time
7
+
8
+ class PreciseRateLimiter:
9
+ def __init__(self, rate=10, interval=1.0):
10
+ self.rate = rate # requests per interval
11
+ self.interval = interval # in seconds
12
+ self.token_time = self.interval / self.rate # time per token
13
+ self.last_time = time.time()
14
+ self.lock = asyncio.Lock()
15
+
16
+ async def acquire(self):
17
+ async with self.lock:
18
+ now = time.time()
19
+ wait_time = self.last_time + self.token_time - now
20
+ if wait_time > 0:
21
+ await asyncio.sleep(wait_time)
22
+ self.last_time = time.time()
23
+ return True
24
+
25
+ class EFTSQuery:
26
+ def __init__(self):
27
+ self.headers = {
28
+ 'User-Agent': 'Your Name yourname@email.com',
29
+ 'Accept-Encoding': 'gzip, deflate',
30
+ 'Host': 'efts.sec.gov'
31
+ }
32
+ self.session = None
33
+ self.limiter = PreciseRateLimiter(10)
34
+
35
+ async def __aenter__(self):
36
+ if not self.session:
37
+ self.session = aiohttp.ClientSession(headers=self.headers)
38
+ return self
39
+
40
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
41
+ if self.session:
42
+ await self.session.close()
43
+ self.session = None
44
+
45
+ async def _fetch_json(self, url):
46
+ await self.limiter.acquire()
47
+ try:
48
+ async with self.session.get(url) as response:
49
+ if response.status == 429:
50
+ await asyncio.sleep(61)
51
+ return await self._fetch_json(url)
52
+ return await response.json()
53
+ except Exception as e:
54
+ print(f"Error fetching {url}: {str(e)}")
55
+ return None
56
+
57
+ async def _get_accession_numbers(self, base_url):
58
+ data = await self._fetch_json(f"{base_url}&from=0&size=1")
59
+ if not data or 'hits' not in data:
60
+ return []
61
+
62
+ total_hits = data['hits']['total']['value']
63
+ if not total_hits:
64
+ return []
65
+
66
+ accession_numbers = []
67
+ start = 0
68
+ page_size = 100
69
+ batch_size = 10 # Number of concurrent requests
70
+
71
+ with tqdm(total=total_hits) as pbar:
72
+ while start < total_hits:
73
+ tasks = []
74
+ for i in range(batch_size):
75
+ if start + i * page_size >= total_hits:
76
+ break
77
+ url = f"{base_url}&from={start + i * page_size}&size={page_size}"
78
+ tasks.append(self._fetch_json(url))
79
+
80
+ if not tasks:
81
+ break
82
+
83
+ results = await asyncio.gather(*tasks)
84
+
85
+ for data in results:
86
+ if data and 'hits' in data:
87
+ hits = data['hits']['hits']
88
+ batch_numbers = [
89
+ f"{hit['_source']['ciks'][0]}/{hit['_id'].split(':')[0]}"
90
+ for hit in hits
91
+ ]
92
+ accession_numbers.extend(batch_numbers)
93
+ pbar.update(len(hits))
94
+
95
+ start += batch_size * page_size
96
+
97
+ return accession_numbers
98
+
99
+ def query_efts(self, cik=None, ticker=None, submission_type=None, filing_date=None, search_text=None):
100
+ async def _download():
101
+ async with self as downloader:
102
+ params = {}
103
+
104
+ if cik:
105
+ params['ciks'] = str(cik).zfill(10)
106
+
107
+ if submission_type:
108
+ params['forms'] = ','.join(submission_type) if isinstance(submission_type, list) else submission_type
109
+
110
+ if isinstance(filing_date, list):
111
+ dates = [(d, d) for d in filing_date]
112
+ elif isinstance(filing_date, tuple):
113
+ dates = [filing_date]
114
+ else:
115
+ date_str = filing_date if filing_date else f"2001-01-01,{datetime.now().strftime('%Y-%m-%d')}"
116
+ start, end = date_str.split(',')
117
+ dates = [(start, end)]
118
+
119
+ params['startdt'], params['enddt'] = dates[0]
120
+
121
+ if search_text:
122
+ params['q'] = f'"{search_text}"'
123
+
124
+ base_url = f"https://efts.sec.gov/LATEST/search-index?{urlencode(params, doseq=True)}"
125
+ return await self._get_accession_numbers(base_url)
126
+
127
+ return asyncio.run(_download())
@@ -0,0 +1,88 @@
1
+ import asyncio
2
+ import aiohttp
3
+ import time
4
+
5
+ class PreciseRateLimiter:
6
+ def __init__(self, rate=10, interval=1.0):
7
+ self.rate = rate
8
+ self.interval = interval
9
+ self.token_time = self.interval / self.rate
10
+ self.last_time = time.time()
11
+ self.lock = asyncio.Lock()
12
+
13
+ async def acquire(self):
14
+ async with self.lock:
15
+ now = time.time()
16
+ wait_time = self.last_time + self.token_time - now
17
+ if wait_time > 0:
18
+ await asyncio.sleep(wait_time)
19
+ self.last_time = time.time()
20
+ return True
21
+
22
+ class XBRLRetriever:
23
+ def __init__(self):
24
+ self.base_url = "https://data.sec.gov/api/xbrl/frames"
25
+ self.headers = {
26
+ 'User-Agent': 'Your Name yourname@email.com',
27
+ 'Accept-Encoding': 'gzip, deflate',
28
+ 'Host': 'data.sec.gov'
29
+ }
30
+ self.session = None
31
+ self.limiter = PreciseRateLimiter(10)
32
+
33
+ async def __aenter__(self):
34
+ if not self.session:
35
+ self.session = aiohttp.ClientSession(headers=self.headers)
36
+ return self
37
+
38
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
39
+ if self.session:
40
+ await self.session.close()
41
+ self.session = None
42
+
43
+ async def _fetch_json(self, url):
44
+ await self.limiter.acquire()
45
+ try:
46
+ async with self.session.get(url) as response:
47
+ if response.status == 429:
48
+ await asyncio.sleep(61)
49
+ return await self._fetch_json(url)
50
+ elif response.status == 200:
51
+ return await response.json()
52
+ else:
53
+ print(f"Error {response.status} for URL: {url}")
54
+ return None
55
+ except Exception as e:
56
+ print(f"Error fetching {url}: {str(e)}")
57
+ return None
58
+
59
+ def _build_url(self, params):
60
+ taxonomy = params.get('taxonomy')
61
+ concept = params.get('concept')
62
+ unit = params.get('unit')
63
+ period = params.get('period')
64
+
65
+ if not all([taxonomy, concept, unit, period]):
66
+ raise ValueError("Missing required parameters")
67
+
68
+ return f"{self.base_url}/{taxonomy}/{concept}/{unit}/{period}.json"
69
+
70
+ async def _get_xbrl_data(self, params_list):
71
+ tasks = []
72
+ urls = {}
73
+
74
+ for params in params_list:
75
+ url = self._build_url(params)
76
+ urls[url] = params
77
+ tasks.append(self._fetch_json(url))
78
+
79
+ results = await asyncio.gather(*tasks)
80
+
81
+ return {url: result for url, result in zip(urls.keys(), results) if result is not None}
82
+
83
+ def get_xbrl_frames(self, params_list):
84
+ async def _download():
85
+ async with self as downloader:
86
+ return await self._get_xbrl_data(params_list)
87
+
88
+ return asyncio.run(_download())
datamule/config.py ADDED
@@ -0,0 +1,29 @@
1
+ import json
2
+ import os
3
+
4
+ class Config:
5
+ def __init__(self):
6
+ self.config_path = os.path.expanduser("~/.datamule/config.json")
7
+ self._ensure_config_exists()
8
+
9
+ def _ensure_config_exists(self):
10
+ os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
11
+ if not os.path.exists(self.config_path):
12
+ self._save_config({"default_source": None})
13
+
14
+ def _save_config(self, config):
15
+ with open(self.config_path, 'w') as f:
16
+ json.dump(config, f)
17
+
18
+ def set_default_source(self, source):
19
+ config = self._load_config()
20
+ config["default_source"] = source
21
+ self._save_config(config)
22
+
23
+ def get_default_source(self):
24
+ config = self._load_config()
25
+ return config.get("default_source")
26
+
27
+ def _load_config(self):
28
+ with open(self.config_path) as f:
29
+ return json.load(f)