datamule 1.6.0__tar.gz → 1.6.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. {datamule-1.6.0 → datamule-1.6.1}/PKG-INFO +1 -1
  2. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/submissions/monitor.py +115 -75
  3. datamule-1.6.1/datamule/utils/__init__.py +0 -0
  4. datamule-1.6.1/datamule/utils/construct_submissions_data.py +150 -0
  5. {datamule-1.6.0 → datamule-1.6.1}/datamule.egg-info/PKG-INFO +1 -1
  6. {datamule-1.6.0 → datamule-1.6.1}/datamule.egg-info/SOURCES.txt +3 -1
  7. {datamule-1.6.0 → datamule-1.6.1}/setup.py +1 -1
  8. {datamule-1.6.0 → datamule-1.6.1}/datamule/__init__.py +0 -0
  9. {datamule-1.6.0 → datamule-1.6.1}/datamule/config.py +0 -0
  10. {datamule-1.6.0 → datamule-1.6.1}/datamule/data/listed_filer_metadata.csv +0 -0
  11. {datamule-1.6.0 → datamule-1.6.1}/datamule/datamule/__init__.py +0 -0
  12. {datamule-1.6.0 → datamule-1.6.1}/datamule/datamule/sec_connector.py +0 -0
  13. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/__init__.py +0 -0
  14. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/document.py +0 -0
  15. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/__init__.py +0 -0
  16. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/atsn.py +0 -0
  17. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/cfportal.py +0 -0
  18. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/d.py +0 -0
  19. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/ex102_abs.py +0 -0
  20. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/ex99a_sdr.py +0 -0
  21. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/ex99c_sdr.py +0 -0
  22. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/ex99g_sdr.py +0 -0
  23. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/ex99i_sdr.py +0 -0
  24. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/information_table.py +0 -0
  25. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/nmfp.py +0 -0
  26. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/npx.py +0 -0
  27. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/onefourtyfour.py +0 -0
  28. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/ownership.py +0 -0
  29. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/proxy_voting_record.py +0 -0
  30. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/sbs.py +0 -0
  31. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/sbsef.py +0 -0
  32. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/schedule13.py +0 -0
  33. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/sdr.py +0 -0
  34. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/submission_metadata.py +0 -0
  35. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/ta.py +0 -0
  36. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/thirteenfhr.py +0 -0
  37. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/twentyfivense.py +0 -0
  38. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings/twentyfourf2nt.py +0 -0
  39. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings_new/__init__.py +0 -0
  40. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings_new/mappings.py +0 -0
  41. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/mappings_new/ownership.py +0 -0
  42. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/processing.py +0 -0
  43. {datamule-1.6.0 → datamule-1.6.1}/datamule/document/table.py +0 -0
  44. {datamule-1.6.0 → datamule-1.6.1}/datamule/helper.py +0 -0
  45. {datamule-1.6.0 → datamule-1.6.1}/datamule/index.py +0 -0
  46. {datamule-1.6.0 → datamule-1.6.1}/datamule/mapping_dicts/__init__.py +0 -0
  47. {datamule-1.6.0 → datamule-1.6.1}/datamule/mapping_dicts/html_mapping_dicts.py +0 -0
  48. {datamule-1.6.0 → datamule-1.6.1}/datamule/mapping_dicts/txt_mapping_dicts.py +0 -0
  49. {datamule-1.6.0 → datamule-1.6.1}/datamule/mapping_dicts/xml_mapping_dicts.py +0 -0
  50. {datamule-1.6.0 → datamule-1.6.1}/datamule/package_updater.py +0 -0
  51. {datamule-1.6.0 → datamule-1.6.1}/datamule/portfolio.py +0 -0
  52. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/__init__.py +0 -0
  53. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/infrastructure/__init__.py +0 -0
  54. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/infrastructure/submissions_metadata.py +0 -0
  55. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/submissions/__init__.py +0 -0
  56. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/submissions/downloader.py +0 -0
  57. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/submissions/eftsquery.py +0 -0
  58. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/submissions/streamer.py +0 -0
  59. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/submissions/textsearch.py +0 -0
  60. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/utils.py +0 -0
  61. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/xbrl/__init__.py +0 -0
  62. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/xbrl/downloadcompanyfacts.py +0 -0
  63. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/xbrl/filter_xbrl.py +0 -0
  64. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/xbrl/streamcompanyfacts.py +0 -0
  65. {datamule-1.6.0 → datamule-1.6.1}/datamule/sec/xbrl/xbrlmonitor.py +0 -0
  66. {datamule-1.6.0 → datamule-1.6.1}/datamule/seclibrary/__init__.py +0 -0
  67. {datamule-1.6.0 → datamule-1.6.1}/datamule/seclibrary/bq.py +0 -0
  68. {datamule-1.6.0 → datamule-1.6.1}/datamule/seclibrary/downloader.py +0 -0
  69. {datamule-1.6.0 → datamule-1.6.1}/datamule/seclibrary/query.py +0 -0
  70. {datamule-1.6.0 → datamule-1.6.1}/datamule/sheet.py +0 -0
  71. {datamule-1.6.0 → datamule-1.6.1}/datamule/submission.py +0 -0
  72. {datamule-1.6.0 → datamule-1.6.1}/datamule.egg-info/dependency_links.txt +0 -0
  73. {datamule-1.6.0 → datamule-1.6.1}/datamule.egg-info/requires.txt +0 -0
  74. {datamule-1.6.0 → datamule-1.6.1}/datamule.egg-info/top_level.txt +0 -0
  75. {datamule-1.6.0 → datamule-1.6.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamule
3
- Version: 1.6.0
3
+ Version: 1.6.1
4
4
  Summary: Work with SEC submissions at scale.
5
5
  Home-page: https://github.com/john-friedman/datamule-python
6
6
  Author: John Friedman
@@ -9,16 +9,14 @@ from .eftsquery import EFTSQuery
9
9
  import aiohttp
10
10
  from zoneinfo import ZoneInfo
11
11
 
12
- async def poll_rss(limiter):
12
+ async def poll_rss(limiter, session):
13
13
  base_url = 'https://www.sec.gov/cgi-bin/browse-edgar?count=100&action=getcurrent&output=rss'
14
14
 
15
- # Create a session specifically for this RSS polling operation
16
- async with aiohttp.ClientSession(headers=headers) as session:
17
- # Use the rate limiter before making the request
18
- async with limiter:
19
- # Make the HTTP request with the session
20
- async with session.get(base_url) as response:
21
- content = await response.read()
15
+ # Use the rate limiter before making the request
16
+ async with limiter:
17
+ # Use the provided session instead of creating a new one
18
+ async with session.get(base_url) as response:
19
+ content = await response.read()
22
20
 
23
21
  # Process the content
24
22
  content_str = content.decode('utf-8')
@@ -70,12 +68,31 @@ class Monitor():
70
68
  self.ratelimiters = {'sec.gov': PreciseRateLimiter(rate=5)}
71
69
  self.efts_query = EFTSQuery(quiet=True)
72
70
  self.efts_query.limiter = self.ratelimiters['sec.gov']
71
+ self.session = None
72
+ self.session_created_at = 0
73
+ self.session_lifetime = 300 # 5 minutes in seconds
73
74
 
74
75
  def set_domain_rate_limit(self, domain, rate):
75
76
  self.ratelimiters[domain] = PreciseRateLimiter(rate=rate)
76
77
  if domain == 'sec.gov':
77
78
  self.efts_query.limiter = self.ratelimiters[domain]
78
79
 
80
+ async def _ensure_fresh_session(self):
81
+ """Ensure we have a fresh session, recreating if expired or missing"""
82
+ current_time = time.time()
83
+
84
+ # Check if we need a new session
85
+ if (self.session is None or
86
+ current_time - self.session_created_at > self.session_lifetime):
87
+
88
+ # Close old session if it exists
89
+ if self.session:
90
+ await self.session.close()
91
+
92
+ # Create new session
93
+ self.session = aiohttp.ClientSession(headers=headers)
94
+ self.session_created_at = current_time
95
+
79
96
  async def _async_run_efts_query(self, **kwargs):
80
97
  """Async helper method to run EFTS query without creating a new event loop"""
81
98
  # Make sure to set quiet parameter if provided in kwargs
@@ -103,83 +120,106 @@ class Monitor():
103
120
  if polling_interval is None and validation_interval is None:
104
121
  raise ValueError("At least one of polling_interval or validation_interval must be specified")
105
122
 
106
- # Backfill if start_date is provided
107
- if start_date is not None:
108
- today_date = datetime.now(ZoneInfo("America/New_York")).strftime('%Y-%m-%d')
109
- if not quiet:
110
- print(f"Backfilling from {start_date} to {today_date}")
111
-
112
- hits = clean_efts_hits(await self._async_run_efts_query(
113
- filing_date=(start_date, today_date),
114
- quiet=quiet
115
- ))
116
-
117
- new_hits = self._filter_new_accessions(hits)
118
- if not quiet:
119
- print(f"New submissions found: {len(new_hits)}")
120
- if new_hits and data_callback:
121
- data_callback(new_hits)
122
-
123
- # Initialize timing variables
124
- current_time = time.time()
125
- last_polling_time = current_time
126
- last_validation_time = current_time
127
-
128
- # Determine which operations to perform
129
- do_polling = polling_interval is not None
130
- do_validation = validation_interval is not None
123
+ # Ensure we have a fresh session
124
+ await self._ensure_fresh_session()
131
125
 
132
- while True:
133
- current_time = time.time()
134
-
135
- # RSS polling (if enabled)
136
- if do_polling and (current_time - last_polling_time) >= polling_interval/1000:
137
- if not quiet:
138
- print(f"Polling RSS feed")
139
- results = await poll_rss(self.ratelimiters['sec.gov'])
140
- new_results = self._filter_new_accessions(results)
141
- if new_results:
142
- if not quiet:
143
- print(f"Found {len(new_results)} new submissions via RSS")
144
- if data_callback:
145
- data_callback(new_results)
146
- last_polling_time = current_time
147
-
148
- # EFTS validation (if enabled)
149
- if do_validation and (current_time - last_validation_time) >= validation_interval/1000:
150
- # Get submissions from the last 24 hours for validation
126
+ try:
127
+ # Backfill if start_date is provided
128
+ if start_date is not None:
151
129
  today_date = datetime.now(ZoneInfo("America/New_York")).strftime('%Y-%m-%d')
152
130
  if not quiet:
153
- print(f"Validating submissions from {today_date}")
131
+ print(f"Backfilling from {start_date} to {today_date}")
154
132
 
155
133
  hits = clean_efts_hits(await self._async_run_efts_query(
156
- filing_date=(today_date, today_date),
134
+ filing_date=(start_date, today_date),
157
135
  quiet=quiet
158
136
  ))
159
-
137
+
160
138
  new_hits = self._filter_new_accessions(hits)
161
- if new_hits:
162
- if not quiet:
163
- print(f"Found {len(new_hits)} new submissions via EFTS validation")
164
- if data_callback:
165
- data_callback(new_hits)
166
- last_validation_time = current_time
139
+ if not quiet:
140
+ print(f"New submissions found: {len(new_hits)}")
141
+ if new_hits and data_callback:
142
+ data_callback(new_hits)
143
+
144
+ # Initialize timing variables
145
+ current_time = time.time()
146
+ last_polling_time = current_time
147
+ last_validation_time = current_time
167
148
 
168
- # Interval callback
169
- if interval_callback:
170
- interval_callback()
171
-
172
- # Calculate next wake-up time
173
- next_times = []
174
- if do_polling:
175
- next_times.append(last_polling_time + (polling_interval / 1000))
176
- if do_validation:
177
- next_times.append(last_validation_time + (validation_interval / 1000))
149
+ # Determine which operations to perform
150
+ do_polling = polling_interval is not None
151
+ do_validation = validation_interval is not None
178
152
 
179
- next_wake_time = min(next_times)
180
- current_time = time.time()
181
- time_to_sleep = max(0, next_wake_time - current_time)
182
- await asyncio.sleep(time_to_sleep)
153
+ while True:
154
+ current_time = time.time()
155
+
156
+ # RSS polling (if enabled)
157
+ if do_polling and (current_time - last_polling_time) >= polling_interval/1000:
158
+ if not quiet:
159
+ print(f"Polling RSS feed")
160
+
161
+ # Ensure session is fresh before polling
162
+ await self._ensure_fresh_session()
163
+
164
+ try:
165
+ results = await poll_rss(self.ratelimiters['sec.gov'], self.session)
166
+ new_results = self._filter_new_accessions(results)
167
+ if new_results:
168
+ if not quiet:
169
+ print(f"Found {len(new_results)} new submissions via RSS")
170
+ if data_callback:
171
+ data_callback(new_results)
172
+ except Exception as e:
173
+ if not quiet:
174
+ print(f"RSS polling error: {e}, will recreate session on next poll")
175
+ # Force session recreation on next poll
176
+ if self.session:
177
+ await self.session.close()
178
+ self.session = None
179
+
180
+ last_polling_time = current_time
181
+
182
+ # EFTS validation (if enabled)
183
+ if do_validation and (current_time - last_validation_time) >= validation_interval/1000:
184
+ # Get submissions from the last 24 hours for validation
185
+ today_date = datetime.now(ZoneInfo("America/New_York")).strftime('%Y-%m-%d')
186
+ if not quiet:
187
+ print(f"Validating submissions from {today_date}")
188
+
189
+ hits = clean_efts_hits(await self._async_run_efts_query(
190
+ filing_date=(today_date, today_date),
191
+ quiet=quiet
192
+ ))
193
+
194
+ new_hits = self._filter_new_accessions(hits)
195
+ if new_hits:
196
+ if not quiet:
197
+ print(f"Found {len(new_hits)} new submissions via EFTS validation")
198
+ if data_callback:
199
+ data_callback(new_hits)
200
+ last_validation_time = current_time
201
+
202
+ # Interval callback
203
+ if interval_callback:
204
+ interval_callback()
205
+
206
+ # Calculate next wake-up time
207
+ next_times = []
208
+ if do_polling:
209
+ next_times.append(last_polling_time + (polling_interval / 1000))
210
+ if do_validation:
211
+ next_times.append(last_validation_time + (validation_interval / 1000))
212
+
213
+ next_wake_time = min(next_times)
214
+ current_time = time.time()
215
+ time_to_sleep = max(0, next_wake_time - current_time)
216
+ await asyncio.sleep(time_to_sleep)
217
+
218
+ finally:
219
+ # Clean up the session when done
220
+ if self.session:
221
+ await self.session.close()
222
+ self.session = None
183
223
 
184
224
  def monitor_submissions(self, data_callback=None, interval_callback=None,
185
225
  polling_interval=1000, quiet=True, start_date=None,
File without changes
@@ -0,0 +1,150 @@
1
+ import zipfile
2
+ import json
3
+ import csv
4
+ import os
5
+ import tempfile
6
+ from concurrent.futures import ThreadPoolExecutor
7
+ import threading
8
+ from tqdm import tqdm
9
+ import urllib.request
10
+
11
+ headers = {'User-Agent': 'John Smith johnsmith@gmail.com'}
12
+
13
+ def process_file_batch(zip_file, filenames_batch):
14
+ """Process a batch of files from the zip archive"""
15
+ batch_filings = []
16
+
17
+ for filename in filenames_batch:
18
+ if not filename.startswith('CIK'):
19
+ continue
20
+
21
+ try:
22
+ # Extract CIK from filename
23
+ cik = int(filename.split('.')[0].split('-')[0][3:])
24
+
25
+ # Read raw bytes and parse JSON
26
+ with zip_file.open(filename) as file:
27
+ raw_data = file.read()
28
+ submissions_dct = json.loads(raw_data)
29
+
30
+ # Handle different file types
31
+ if 'submissions' in filename:
32
+ filings_data = submissions_dct
33
+ else:
34
+ filings_data = submissions_dct['filings']['recent']
35
+
36
+ # Extract required data
37
+ accession_numbers = filings_data['accessionNumber']
38
+ filing_dates = filings_data['filingDate']
39
+ forms = filings_data['form']
40
+
41
+ # Create filing records for this file
42
+ for j in range(len(accession_numbers)):
43
+ filing_record = {
44
+ 'accessionNumber': accession_numbers[j],
45
+ 'filingDate': filing_dates[j],
46
+ 'form': forms[j],
47
+ 'cik': cik
48
+ }
49
+ batch_filings.append(filing_record)
50
+
51
+ except Exception as e:
52
+ print(f"Error processing {filename}: {e}")
53
+ continue
54
+
55
+ return batch_filings
56
+
57
+ def write_csv_chunk(output_path, filings_data, is_first_write, write_lock):
58
+ """Thread-safe CSV writing with lock"""
59
+ with write_lock:
60
+ if is_first_write:
61
+ with open(output_path, 'w', newline='') as csvfile:
62
+ fieldnames = ['accessionNumber', 'filingDate', 'form', 'cik']
63
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
64
+ writer.writeheader()
65
+ writer.writerows(filings_data)
66
+ else:
67
+ with open(output_path, 'a', newline='') as csvfile:
68
+ fieldnames = ['accessionNumber', 'filingDate', 'form', 'cik']
69
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
70
+ writer.writerows(filings_data)
71
+
72
+ def construct_submissions_data(output_path, submissions_zip_path=None, max_workers=4, batch_size=100):
73
+ """Creates a list of dicts of every accession number, with filing date, submission type, and ciks"""
74
+
75
+ if submissions_zip_path is None:
76
+ url = "https://www.sec.gov/Archives/edgar/daily-index/bulkdata/submissions.zip"
77
+
78
+ temp_dir = tempfile.mkdtemp()
79
+ zip_path = os.path.join(temp_dir, 'submissions.zip')
80
+
81
+ req = urllib.request.Request(url, headers=headers)
82
+
83
+ with urllib.request.urlopen(req) as response:
84
+ total_size = int(response.headers.get('Content-Length', 0))
85
+
86
+ with open(zip_path, 'wb') as f, tqdm(
87
+ desc="Downloading",
88
+ total=total_size,
89
+ unit='B',
90
+ unit_scale=True,
91
+ unit_divisor=1024,
92
+ ) as pbar:
93
+ while True:
94
+ chunk = response.read(8192)
95
+ if not chunk:
96
+ break
97
+ f.write(chunk)
98
+ pbar.update(len(chunk))
99
+
100
+ submissions_zip_path = zip_path
101
+
102
+ # Keep zip file open throughout processing
103
+ with zipfile.ZipFile(submissions_zip_path, 'r') as zip_file:
104
+ # Get all CIK filenames
105
+ all_filenames = [f for f in zip_file.namelist() if f.startswith('CIK')]
106
+
107
+ print(f"Processing {len(all_filenames)} files with {max_workers} workers...")
108
+
109
+ # Create batches of filenames
110
+ filename_batches = []
111
+ for i in range(0, len(all_filenames), batch_size):
112
+ batch = all_filenames[i:i + batch_size]
113
+ filename_batches.append(batch)
114
+
115
+ # Setup for threading
116
+ write_lock = threading.Lock()
117
+ total_filings = 0
118
+ is_first_write = True
119
+
120
+ # Process batches with thread pool
121
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
122
+ # Submit all batch jobs
123
+ future_to_batch = {
124
+ executor.submit(process_file_batch, zip_file, batch): i
125
+ for i, batch in enumerate(filename_batches)
126
+ }
127
+
128
+ # Process results with progress bar
129
+ with tqdm(total=len(filename_batches), desc="Processing batches", unit="batch") as pbar:
130
+ for future in future_to_batch:
131
+ try:
132
+ batch_filings = future.result()
133
+
134
+ if batch_filings: # Only write if we have data
135
+ write_csv_chunk(output_path, batch_filings, is_first_write, write_lock)
136
+ is_first_write = False
137
+ total_filings += len(batch_filings)
138
+
139
+ pbar.update(1)
140
+ pbar.set_postfix({
141
+ 'filings': total_filings,
142
+ 'files': len(filename_batches[future_to_batch[future]])
143
+ })
144
+
145
+ except Exception as e:
146
+ print(f"Error processing batch: {e}")
147
+ pbar.update(1)
148
+
149
+ print(f"Complete! Processed {total_filings} total filings")
150
+ print(f"Data saved to {output_path}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamule
3
- Version: 1.6.0
3
+ Version: 1.6.1
4
4
  Summary: Work with SEC submissions at scale.
5
5
  Home-page: https://github.com/john-friedman/datamule-python
6
6
  Author: John Friedman
@@ -68,4 +68,6 @@ datamule/sec/xbrl/xbrlmonitor.py
68
68
  datamule/seclibrary/__init__.py
69
69
  datamule/seclibrary/bq.py
70
70
  datamule/seclibrary/downloader.py
71
- datamule/seclibrary/query.py
71
+ datamule/seclibrary/query.py
72
+ datamule/utils/__init__.py
73
+ datamule/utils/construct_submissions_data.py
@@ -32,7 +32,7 @@ if not os.path.exists(file_path):
32
32
  setup(
33
33
  name="datamule",
34
34
  author="John Friedman",
35
- version="1.6.0",
35
+ version="1.6.1",
36
36
  description="Work with SEC submissions at scale.",
37
37
  packages=find_packages(include=['datamule', 'datamule.*']),
38
38
  url="https://github.com/john-friedman/datamule-python",
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes