datamule 0.429__tar.gz → 0.430__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. {datamule-0.429 → datamule-0.430}/PKG-INFO +1 -1
  2. {datamule-0.429 → datamule-0.430}/datamule/document.py +3 -0
  3. {datamule-0.429 → datamule-0.430}/datamule/downloader/premiumdownloader.py +2 -1
  4. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/helper.py +1 -1
  5. datamule-0.430/datamule/portfolio.py +95 -0
  6. {datamule-0.429 → datamule-0.430}/datamule.egg-info/PKG-INFO +1 -1
  7. {datamule-0.429 → datamule-0.430}/setup.py +1 -1
  8. datamule-0.429/datamule/portfolio.py +0 -82
  9. {datamule-0.429 → datamule-0.430}/datamule/__init__.py +0 -0
  10. {datamule-0.429 → datamule-0.430}/datamule/config.py +0 -0
  11. {datamule-0.429 → datamule-0.430}/datamule/data/company_former_names.csv +0 -0
  12. {datamule-0.429 → datamule-0.430}/datamule/data/company_metadata.csv +0 -0
  13. {datamule-0.429 → datamule-0.430}/datamule/data/company_tickers.csv +0 -0
  14. {datamule-0.429 → datamule-0.430}/datamule/data/sec-glossary.csv +0 -0
  15. {datamule-0.429 → datamule-0.430}/datamule/data/xbrl_descriptions.csv +0 -0
  16. {datamule-0.429 → datamule-0.430}/datamule/dataset_builder/dataset_builder.py +0 -0
  17. {datamule-0.429 → datamule-0.430}/datamule/downloader/downloader.py +0 -0
  18. {datamule-0.429 → datamule-0.430}/datamule/helper.py +0 -0
  19. {datamule-0.429 → datamule-0.430}/datamule/monitor.py +0 -0
  20. {datamule-0.429 → datamule-0.430}/datamule/mulebot/__init__.py +0 -0
  21. {datamule-0.429 → datamule-0.430}/datamule/mulebot/helper.py +0 -0
  22. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot.py +0 -0
  23. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/__init__.py +0 -0
  24. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/server.py +0 -0
  25. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/css/minimalist.css +0 -0
  26. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/artifacts.js +0 -0
  27. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/chat.js +0 -0
  28. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/filingArtifacts.js +0 -0
  29. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/listArtifacts.js +0 -0
  30. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/main.js +0 -0
  31. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/prefilledPrompt.js +0 -0
  32. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/suggestions.js +0 -0
  33. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/tableArtifacts.js +0 -0
  34. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/static/scripts/utils.js +0 -0
  35. {datamule-0.429 → datamule-0.430}/datamule/mulebot/mulebot_server/templates/chat-minimalist.html +0 -0
  36. {datamule-0.429 → datamule-0.430}/datamule/mulebot/search.py +0 -0
  37. {datamule-0.429 → datamule-0.430}/datamule/mulebot/tools.py +0 -0
  38. {datamule-0.429 → datamule-0.430}/datamule/packageupdater.py +0 -0
  39. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/basic_10k_parser.py +0 -0
  40. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/basic_10q_parser.py +0 -0
  41. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/basic_13d_parser.py +0 -0
  42. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/basic_13g_parser.py +0 -0
  43. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/basic_8k_parser.py +0 -0
  44. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/form_d_parser.py +0 -0
  45. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/generalized_item_parser.py +0 -0
  46. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/information_table_parser_13fhr.py +0 -0
  47. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/insider_trading_parser.py +0 -0
  48. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/mappings.py +0 -0
  49. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/n_port_p_parser.py +0 -0
  50. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/sec_parser.py +0 -0
  51. {datamule-0.429 → datamule-0.430}/datamule/parser/document_parsing/sgml_parser.py +0 -0
  52. {datamule-0.429 → datamule-0.430}/datamule/parser/sgml_parsing/sgml_parser_cy.c +0 -0
  53. {datamule-0.429 → datamule-0.430}/datamule/submission.py +0 -0
  54. {datamule-0.429 → datamule-0.430}/datamule.egg-info/SOURCES.txt +0 -0
  55. {datamule-0.429 → datamule-0.430}/datamule.egg-info/dependency_links.txt +0 -0
  56. {datamule-0.429 → datamule-0.430}/datamule.egg-info/requires.txt +3 -3
  57. {datamule-0.429 → datamule-0.430}/datamule.egg-info/top_level.txt +0 -0
  58. {datamule-0.429 → datamule-0.430}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamule
3
- Version: 0.429
3
+ Version: 0.430
4
4
  Summary: Making it easier to use SEC filings.
5
5
  Home-page: https://github.com/john-friedman/datamule-python
6
6
  Author: John Friedman
@@ -17,6 +17,9 @@ class Document:
17
17
  self.data = None
18
18
  self.content = None
19
19
 
20
+ def _load_content(self):
21
+ self.content = load_file_content(self.path)
22
+
20
23
  def contains_string(self, pattern):
21
24
  """Currently only works for .htm, .html, and .txt files"""
22
25
  if self.path.suffix in ['.htm', '.html', '.txt']:
@@ -259,7 +259,8 @@ class PremiumDownloader:
259
259
  keepalive_timeout=60
260
260
  )
261
261
 
262
- async with aiohttp.ClientSession(connector=connector, timeout=aiohttp.ClientTimeout(total=3600)) as session:
262
+ # timeout should be max 2 hours.
263
+ async with aiohttp.ClientSession(connector=connector, timeout=aiohttp.ClientTimeout(total=7200)) as session:
263
264
  tasks = [self.download_and_process(session, url, semaphore, decompression_pool, output_dir, processor) for url in urls]
264
265
  await asyncio.gather(*tasks, return_exceptions=True)
265
266
 
@@ -62,7 +62,7 @@ def load_file_content(filename):
62
62
  elif filename.suffix in ['.html','.htm']:
63
63
  return load_html_content(filename)
64
64
  else:
65
- raise ValueError(f"Unsupported file type: {filename}")
65
+ raise ValueError(f"Unsupported file type: {filename.suffix}")
66
66
 
67
67
  def clean_title(title: str) -> str:
68
68
  """Clean up section title by removing newlines, periods, and all whitespace, converting to lowercase."""
@@ -0,0 +1,95 @@
1
+ from pathlib import Path
2
+ from tqdm import tqdm
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from .submission import Submission
5
+ from .downloader.premiumdownloader import PremiumDownloader
6
+ from .downloader.downloader import Downloader
7
+ from .config import Config
8
+ import os
9
+
10
+ class Portfolio:
11
+ def __init__(self, path):
12
+ self.path = Path(path)
13
+ self.submissions = []
14
+ self.MAX_WORKERS = os.cpu_count() - 1
15
+
16
+ if self.path.exists():
17
+ self._load_submissions()
18
+
19
+ def _load_submissions(self):
20
+ folders = [f for f in self.path.iterdir() if f.is_dir()]
21
+ print(f"Loading {len(folders)} submissions")
22
+
23
+ with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
24
+ self.submissions = list(tqdm(
25
+ executor.map(Submission, folders),
26
+ total=len(folders),
27
+ desc="Loading submissions"
28
+ ))
29
+
30
+ def process_submissions(self, callback):
31
+ """Process all submissions using a thread pool."""
32
+ with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
33
+ results = list(tqdm(
34
+ executor.map(callback, self.submissions),
35
+ total=len(self.submissions),
36
+ desc="Processing submissions"
37
+ ))
38
+ return results
39
+
40
+ def process_documents(self, callback):
41
+ """Process all documents using a thread pool."""
42
+ documents = [doc for sub in self.submissions for doc in sub]
43
+
44
+ with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
45
+ results = list(tqdm(
46
+ executor.map(callback, documents),
47
+ total=len(documents),
48
+ desc="Processing documents"
49
+ ))
50
+ return results
51
+
52
+ def download_submissions(self, cik=None, ticker=None, submission_type=None, filing_date=None, provider=None):
53
+ if provider is None:
54
+ config = Config()
55
+ provider = config.get_default_source()
56
+
57
+ downloader = PremiumDownloader() if provider == 'datamule' else Downloader()
58
+ downloader.download_submissions(
59
+ output_dir=self.path,
60
+ cik=cik,
61
+ ticker=ticker,
62
+ submission_type=submission_type,
63
+ filing_date=filing_date
64
+ )
65
+
66
+ # Reload submissions after download
67
+ self._load_submissions()
68
+
69
+ def __iter__(self):
70
+ return iter(self.submissions)
71
+
72
+ def document_type(self, document_types):
73
+ """Filter documents by type(s)."""
74
+ if isinstance(document_types, str):
75
+ document_types = [document_types]
76
+
77
+ for submission in self.submissions:
78
+ yield from submission.document_type(document_types)
79
+
80
+ def contains_string(self, pattern, document_types=None):
81
+ """Search for pattern in documents, with optional type filter."""
82
+ def check_document(document):
83
+ return document if document.contains_string(pattern) else None
84
+
85
+ # Get documents, filtered by type if specified
86
+ documents = list(self.document_type(document_types)) if document_types else [
87
+ doc for sub in self.submissions for doc in sub
88
+ ]
89
+
90
+ with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
91
+ results = executor.map(check_document, documents)
92
+
93
+ for doc in tqdm(results, total=len(documents), desc=f"Searching for '{pattern}'"):
94
+ if doc is not None:
95
+ yield doc
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamule
3
- Version: 0.429
3
+ Version: 0.430
4
4
  Summary: Making it easier to use SEC filings.
5
5
  Home-page: https://github.com/john-friedman/datamule-python
6
6
  Author: John Friedman
@@ -55,7 +55,7 @@ extras["all"] = list(all_dependencies)
55
55
  setup(
56
56
  name="datamule",
57
57
  author="John Friedman",
58
- version="0.429",
58
+ version="0.430",
59
59
  description="Making it easier to use SEC filings.",
60
60
  packages=find_namespace_packages(include=['datamule*']),
61
61
  url="https://github.com/john-friedman/datamule-python",
@@ -1,82 +0,0 @@
1
- from pathlib import Path
2
- from tqdm import tqdm
3
- from concurrent.futures import ProcessPoolExecutor
4
- from .submission import Submission
5
- from .downloader.premiumdownloader import PremiumDownloader
6
- from .downloader.downloader import Downloader
7
- from .config import Config
8
-
9
- class Portfolio:
10
- def create(cls, path):
11
- # This method handles the process pool lifecycle
12
- with ProcessPoolExecutor() as executor:
13
- portfolio = cls(path, executor)
14
- return portfolio
15
-
16
- def __init__(self, path, executor=None):
17
- self.path = Path(path)
18
- # check if path exists
19
- if self.path.exists():
20
- folders = [f for f in self.path.iterdir() if f.is_dir()]
21
- print(f"Loading {len(folders)} submissions")
22
-
23
- if executor is None:
24
- # Fall back to sequential loading if no executor
25
- self.submissions = [Submission(f) for f in tqdm(folders, desc="Loading submissions")]
26
- else:
27
- # Use provided executor for parallel loading
28
- self.submissions = list(tqdm(
29
- executor.map(Submission, folders),
30
- total=len(folders),
31
- desc="Loading submissions"
32
- ))
33
-
34
- else:
35
- pass
36
-
37
- def download_submissions(self, cik=None, ticker=None, submission_type=None, filing_date=None, provider=None):
38
- if provider is None:
39
- config = Config()
40
- provider = config.get_default_source()
41
-
42
- if provider == 'sec':
43
- downloader = Downloader()
44
- elif provider == 'datamule':
45
- downloader = PremiumDownloader()
46
-
47
- downloader.download_submissions(output_dir=self.path, cik=cik, ticker=ticker, submission_type=submission_type, filing_date=filing_date
48
- )
49
-
50
- # Reload submissions after download
51
- self.__init__(self.path)
52
-
53
- def __iter__(self):
54
- return iter(self.submissions)
55
-
56
- def document_type(self, document_types):
57
- # Convert single document type to list for consistent handling
58
- if isinstance(document_types, str):
59
- document_types = [document_types]
60
-
61
- for submission in self.submissions:
62
- yield from submission.document_type(document_types)
63
-
64
- def contains_string(self, pattern, document_types=None, executor=None):
65
- def check_document(document):
66
- return document if document.contains_string(pattern) else None
67
-
68
- documents = list(self.document_type(document_types) if document_types else (
69
- doc for sub in tqdm(self.submissions, desc="Collecting documents") for doc in sub
70
- ))
71
-
72
- if executor:
73
- results = list(tqdm(
74
- executor.map(check_document, documents),
75
- total=len(documents),
76
- desc=f"Searching for '{pattern}'"
77
- ))
78
- yield from (doc for doc in results if doc is not None)
79
- else:
80
- for document in tqdm(documents, desc=f"Searching for '{pattern}'"):
81
- if document.contains_string(pattern):
82
- yield document
File without changes
File without changes
File without changes
File without changes
@@ -11,11 +11,11 @@ pytz
11
11
  zstandard
12
12
 
13
13
  [all]
14
- flask
15
- google-generativeai
14
+ psutil
16
15
  pandas
16
+ google-generativeai
17
17
  openai
18
- psutil
18
+ flask
19
19
 
20
20
  [dataset_builder]
21
21
  pandas
File without changes