datamule 1.0.0__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -6,8 +6,26 @@
6
6
  # likely new bottleneck will be local parsing() - will be bypassed in future when we have parsed archive
7
7
  # wow parsed archive is going to be crazy fast - like every 10k in 1 minute.
8
8
 
9
+ # example queries filter by sic = 7372, xbrl query = dei:operatingprofit > 0 in date range 2018-2019
10
+
11
+ # hmm do we go for sql esq or not.
12
+ # I think we do.
13
+ # i think we remove cik, ticker, sic, etc and just have a query object
14
+ # should be sql esq so users can use it easily w/o learnign new syntax
15
+
16
+ # WHERE submission_type = '10-K'
17
+ # AND us-gaap:ResearchAndDevelopmentExpense > 0
18
+ # AND dei:debt_to_equity < 2
19
+ # AND filing_date BETWEEN '2023-01-01' AND '2023-12-31'
20
+ # AND CIK in (123, 456, 789)
21
+ # AND SIC in (123, 456, 789)
22
+ # AND ticker in ('AAPL', 'GOOGL', 'AMZN')
23
+ # AND document_type = 'EX-99.1' # to select attachments
24
+
25
+ from .eftsquery import EFTSQuery
26
+
27
+
9
28
  class Book():
10
- pass
11
29
  def process_submissions(self,cik,ticker,sic,submission_type,document_type,date,
12
30
  xbrl_query={},
13
31
  metadata_callback=None,
@@ -0,0 +1,127 @@
1
+ import asyncio
2
+ import aiohttp
3
+ from tqdm import tqdm
4
+ from datetime import datetime
5
+ from urllib.parse import urlencode
6
+ import time
7
+
8
+ class PreciseRateLimiter:
9
+ def __init__(self, rate=10, interval=1.0):
10
+ self.rate = rate # requests per interval
11
+ self.interval = interval # in seconds
12
+ self.token_time = self.interval / self.rate # time per token
13
+ self.last_time = time.time()
14
+ self.lock = asyncio.Lock()
15
+
16
+ async def acquire(self):
17
+ async with self.lock:
18
+ now = time.time()
19
+ wait_time = self.last_time + self.token_time - now
20
+ if wait_time > 0:
21
+ await asyncio.sleep(wait_time)
22
+ self.last_time = time.time()
23
+ return True
24
+
25
+ class EFTSQuery:
26
+ def __init__(self):
27
+ self.headers = {
28
+ 'User-Agent': 'Your Name yourname@email.com',
29
+ 'Accept-Encoding': 'gzip, deflate',
30
+ 'Host': 'efts.sec.gov'
31
+ }
32
+ self.session = None
33
+ self.limiter = PreciseRateLimiter(10)
34
+
35
+ async def __aenter__(self):
36
+ if not self.session:
37
+ self.session = aiohttp.ClientSession(headers=self.headers)
38
+ return self
39
+
40
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
41
+ if self.session:
42
+ await self.session.close()
43
+ self.session = None
44
+
45
+ async def _fetch_json(self, url):
46
+ await self.limiter.acquire()
47
+ try:
48
+ async with self.session.get(url) as response:
49
+ if response.status == 429:
50
+ await asyncio.sleep(61)
51
+ return await self._fetch_json(url)
52
+ return await response.json()
53
+ except Exception as e:
54
+ print(f"Error fetching {url}: {str(e)}")
55
+ return None
56
+
57
+ async def _get_accession_numbers(self, base_url):
58
+ data = await self._fetch_json(f"{base_url}&from=0&size=1")
59
+ if not data or 'hits' not in data:
60
+ return []
61
+
62
+ total_hits = data['hits']['total']['value']
63
+ if not total_hits:
64
+ return []
65
+
66
+ accession_numbers = []
67
+ start = 0
68
+ page_size = 100
69
+ batch_size = 10 # Number of concurrent requests
70
+
71
+ with tqdm(total=total_hits) as pbar:
72
+ while start < total_hits:
73
+ tasks = []
74
+ for i in range(batch_size):
75
+ if start + i * page_size >= total_hits:
76
+ break
77
+ url = f"{base_url}&from={start + i * page_size}&size={page_size}"
78
+ tasks.append(self._fetch_json(url))
79
+
80
+ if not tasks:
81
+ break
82
+
83
+ results = await asyncio.gather(*tasks)
84
+
85
+ for data in results:
86
+ if data and 'hits' in data:
87
+ hits = data['hits']['hits']
88
+ batch_numbers = [
89
+ f"{hit['_source']['ciks'][0]}/{hit['_id'].split(':')[0]}"
90
+ for hit in hits
91
+ ]
92
+ accession_numbers.extend(batch_numbers)
93
+ pbar.update(len(hits))
94
+
95
+ start += batch_size * page_size
96
+
97
+ return accession_numbers
98
+
99
+ def query_efts(self, cik=None, ticker=None, submission_type=None, filing_date=None, search_text=None):
100
+ async def _download():
101
+ async with self as downloader:
102
+ params = {}
103
+
104
+ if cik:
105
+ params['ciks'] = str(cik).zfill(10)
106
+
107
+ if submission_type:
108
+ params['forms'] = ','.join(submission_type) if isinstance(submission_type, list) else submission_type
109
+
110
+ if isinstance(filing_date, list):
111
+ dates = [(d, d) for d in filing_date]
112
+ elif isinstance(filing_date, tuple):
113
+ dates = [filing_date]
114
+ else:
115
+ date_str = filing_date if filing_date else f"2001-01-01,{datetime.now().strftime('%Y-%m-%d')}"
116
+ start, end = date_str.split(',')
117
+ dates = [(start, end)]
118
+
119
+ params['startdt'], params['enddt'] = dates[0]
120
+
121
+ if search_text:
122
+ params['q'] = f'"{search_text}"'
123
+
124
+ base_url = f"https://efts.sec.gov/LATEST/search-index?{urlencode(params, doseq=True)}"
125
+ return await self._get_accession_numbers(base_url)
126
+
127
+ return asyncio.run(_download())
@@ -0,0 +1,88 @@
1
+ import asyncio
2
+ import aiohttp
3
+ import time
4
+
5
+ class PreciseRateLimiter:
6
+ def __init__(self, rate=10, interval=1.0):
7
+ self.rate = rate
8
+ self.interval = interval
9
+ self.token_time = self.interval / self.rate
10
+ self.last_time = time.time()
11
+ self.lock = asyncio.Lock()
12
+
13
+ async def acquire(self):
14
+ async with self.lock:
15
+ now = time.time()
16
+ wait_time = self.last_time + self.token_time - now
17
+ if wait_time > 0:
18
+ await asyncio.sleep(wait_time)
19
+ self.last_time = time.time()
20
+ return True
21
+
22
+ class XBRLRetriever:
23
+ def __init__(self):
24
+ self.base_url = "https://data.sec.gov/api/xbrl/frames"
25
+ self.headers = {
26
+ 'User-Agent': 'Your Name yourname@email.com',
27
+ 'Accept-Encoding': 'gzip, deflate',
28
+ 'Host': 'data.sec.gov'
29
+ }
30
+ self.session = None
31
+ self.limiter = PreciseRateLimiter(10)
32
+
33
+ async def __aenter__(self):
34
+ if not self.session:
35
+ self.session = aiohttp.ClientSession(headers=self.headers)
36
+ return self
37
+
38
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
39
+ if self.session:
40
+ await self.session.close()
41
+ self.session = None
42
+
43
+ async def _fetch_json(self, url):
44
+ await self.limiter.acquire()
45
+ try:
46
+ async with self.session.get(url) as response:
47
+ if response.status == 429:
48
+ await asyncio.sleep(61)
49
+ return await self._fetch_json(url)
50
+ elif response.status == 200:
51
+ return await response.json()
52
+ else:
53
+ print(f"Error {response.status} for URL: {url}")
54
+ return None
55
+ except Exception as e:
56
+ print(f"Error fetching {url}: {str(e)}")
57
+ return None
58
+
59
+ def _build_url(self, params):
60
+ taxonomy = params.get('taxonomy')
61
+ concept = params.get('concept')
62
+ unit = params.get('unit')
63
+ period = params.get('period')
64
+
65
+ if not all([taxonomy, concept, unit, period]):
66
+ raise ValueError("Missing required parameters")
67
+
68
+ return f"{self.base_url}/{taxonomy}/{concept}/{unit}/{period}.json"
69
+
70
+ async def _get_xbrl_data(self, params_list):
71
+ tasks = []
72
+ urls = {}
73
+
74
+ for params in params_list:
75
+ url = self._build_url(params)
76
+ urls[url] = params
77
+ tasks.append(self._fetch_json(url))
78
+
79
+ results = await asyncio.gather(*tasks)
80
+
81
+ return {url: result for url, result in zip(urls.keys(), results) if result is not None}
82
+
83
+ def get_xbrl_frames(self, params_list):
84
+ async def _download():
85
+ async with self as downloader:
86
+ return await self._get_xbrl_data(params_list)
87
+
88
+ return asyncio.run(_download())
datamule/document.py CHANGED
@@ -2,7 +2,7 @@ import json
2
2
  import csv
3
3
  from .helper import convert_to_dashed_accession
4
4
  import re
5
- from doc2dict import xml2dict, txt2dict
5
+ from doc2dict import xml2dict, txt2dict, dict2dict
6
6
  from doc2dict.mapping import flatten_hierarchy
7
7
  from .mapping_dicts import txt_mapping_dicts
8
8
  from .mapping_dicts import xml_mapping_dicts
@@ -126,7 +126,8 @@ class Document:
126
126
  elif self.type == 'SC 13G':
127
127
  mapping_dict = txt_mapping_dicts.dict_13g
128
128
 
129
- self.data = txt2dict(content=self.content, mapping_dict=mapping_dict)
129
+ self.data = {}
130
+ self.data['document'] = dict2dict(txt2dict(content=self.content, mapping_dict=mapping_dict))
130
131
  return self.data
131
132
 
132
133
  def write_json(self, output_filename=None):
@@ -29,10 +29,12 @@ dict_sgml = {
29
29
  }
30
30
  }
31
31
 
32
- item_pattern_mapping = r"^\n\n\s*(ITEM|Item)\b"
33
- part_pattern_mapping = r"^\n\n\s*(PART|Part)\b"
32
+ item_pattern_mapping = r"^\n\n\s*(ITEM|Item)\s+(\d+[a-zA-Z]?|ONE|TWO|THREE|FOUR|FIVE|SIX|SEVEN|EIGHT|NINE|TEN|ELEVEN|TWELVE|THIRTEEN|FOURTEEN|FIFTEEN|SIXTEEN|[0-9]+[a-zA-Z]?)\.?"
33
+ item_pattern_mapping_8k = r"^\n\n\s*(ITEM|Item)\s+(\d+(?:\.\d+)?[a-zA-Z]?|ONE|TWO|THREE|FOUR|FIVE|SIX|SEVEN|EIGHT|NINE|TEN|ELEVEN|TWELVE|THIRTEEN|FOURTEEN|FIFTEEN|SIXTEEN|[0-9]+[a-zA-Z]?)\.?"
34
+ part_pattern_mapping = r"^\n\n\s*(PART|Part)\s+(?:I{1,3}|IV)\.?"
34
35
 
35
36
  item_pattern_standardization = r"^\s*(?:ITEM|Item)\s+(\d+[a-zA-Z]?|ONE|TWO|THREE|FOUR|FIVE|SIX|SEVEN|EIGHT|NINE|TEN|ELEVEN|TWELVE|THIRTEEN|FOURTEEN|FIFTEEN|SIXTEEN|[0-9]+[a-zA-Z]?)\.?"
37
+ item_pattern_standardization_8k = r"^\s*(?:ITEM|Item)\s+(\d+(?:\.\d+)?[a-zA-Z]?|ONE|TWO|THREE|FOUR|FIVE|SIX|SEVEN|EIGHT|NINE|TEN|ELEVEN|TWELVE|THIRTEEN|FOURTEEN|FIFTEEN|SIXTEEN)\.?"
36
38
  part_pattern_standardization = r"^\s*(?:PART|Part)\s+([IVX]+)"
37
39
 
38
40
 
@@ -194,7 +196,7 @@ dict_8k["rules"]["mappings"].extend([
194
196
  {
195
197
  "type": "hierarchy",
196
198
  "name": "item",
197
- "pattern": item_pattern_mapping,
199
+ "pattern": item_pattern_mapping_8k,
198
200
  "hierarchy": 0
199
201
  },
200
202
  ])
@@ -204,7 +206,7 @@ dict_8k['transformations'] = [
204
206
  "type": "standardize",
205
207
  "match": {
206
208
  "type": "item",
207
- "text_pattern": item_pattern_standardization
209
+ "text_pattern": item_pattern_standardization_8k
208
210
  },
209
211
  "output": {
210
212
  "format": "item{}",
datamule/monitor.py CHANGED
@@ -11,6 +11,15 @@ def _get_current_eastern_date():
11
11
  eastern = pytz.timezone('America/New_York')
12
12
  return datetime.now(eastern)
13
13
 
14
+ def _parse_date(date_str):
15
+ """Parse YYYY-MM-DD date string to datetime object in Eastern timezone"""
16
+ try:
17
+ date = datetime.strptime(date_str, '%Y-%m-%d')
18
+ eastern = pytz.timezone('America/New_York')
19
+ return eastern.localize(date)
20
+ except ValueError as e:
21
+ raise ValueError(f"Invalid date format. Please use YYYY-MM-DD. Error: {str(e)}")
22
+
14
23
  class PreciseRateLimiter:
15
24
  def __init__(self, rate, interval=1.0):
16
25
  self.rate = rate # requests per interval
@@ -67,7 +76,8 @@ class RateMonitor:
67
76
  class Monitor:
68
77
  def __init__(self):
69
78
  self.last_total = 0
70
- self.last_date = _get_current_eastern_date()
79
+ self.last_date = None
80
+ self.current_monitor_date = None
71
81
  self.submissions = []
72
82
  self.max_hits = 10000
73
83
  self.limiter = PreciseRateLimiter(5) # 5 requests per second
@@ -91,16 +101,29 @@ class Monitor:
91
101
  """Poll API until new submissions are found."""
92
102
  while True:
93
103
  current_date = _get_current_eastern_date()
94
- date_str = current_date.strftime('%Y-%m-%d')
95
- timestamp = int(time.time()) # Add this line
96
104
 
97
- if self.last_date != current_date.strftime('%Y-%m-%d'):
98
- print(f"New date: {date_str}")
105
+ # If we're caught up to current date, use it, otherwise use our tracking date
106
+ if self.current_monitor_date.date() >= current_date.date():
107
+ self.current_monitor_date = current_date
108
+ else:
109
+ # If we're behind current date and haven't finished current date's processing,
110
+ # continue with current date
111
+ if self.last_date == self.current_monitor_date.strftime('%Y-%m-%d'):
112
+ pass
113
+ else:
114
+ # Move to next day
115
+ self.current_monitor_date += timedelta(days=1)
116
+
117
+ date_str = self.current_monitor_date.strftime('%Y-%m-%d')
118
+ timestamp = int(time.time())
119
+
120
+ if self.last_date != date_str:
121
+ print(f"Processing date: {date_str}")
99
122
  self.last_total = 0
100
123
  self.submissions = []
101
124
  self.last_date = date_str
102
125
 
103
- poll_url = f"{base_url}&startdt={date_str}&enddt={date_str}&v={timestamp}" # Modified this line
126
+ poll_url = f"{base_url}&startdt={date_str}&enddt={date_str}&v={timestamp}"
104
127
  if not quiet:
105
128
  print(f"Polling {poll_url}")
106
129
 
@@ -109,10 +132,16 @@ class Monitor:
109
132
  if data:
110
133
  current_total = data['hits']['total']['value']
111
134
  if current_total > self.last_total:
112
- print(f"Found {current_total - self.last_total} new submissions")
135
+ print(f"Found {current_total - self.last_total} new submissions for {date_str}")
113
136
  self.last_total = current_total
114
137
  return current_total, data, poll_url
115
138
  self.last_total = current_total
139
+
140
+ # If we have no hits and we're processing a past date,
141
+ # we can move to the next day immediately
142
+ if current_total == 0 and self.current_monitor_date.date() < current_date.date():
143
+ continue
144
+
116
145
  except Exception as e:
117
146
  print(f"Error in poll: {str(e)}")
118
147
 
@@ -120,7 +149,6 @@ class Monitor:
120
149
 
121
150
  async def _retrieve_batch(self, session, poll_url, from_positions, quiet):
122
151
  """Retrieve a batch of submissions concurrently."""
123
- # The poll_url already contains the timestamp from _poll
124
152
  tasks = [
125
153
  self._fetch_json(
126
154
  session,
@@ -176,11 +204,17 @@ class Monitor:
176
204
 
177
205
  return submissions
178
206
 
179
- async def _monitor(self, callback, form=None, cik=None, ticker=None, poll_interval=1000, quiet=True):
207
+ async def _monitor(self, callback, form=None, cik=None, ticker=None, start_date=None, poll_interval=1000, quiet=True):
180
208
  """Main monitoring loop with parallel processing."""
181
209
  if poll_interval < 100:
182
210
  raise ValueError("SEC rate limit is 10 requests per second, set poll_interval to 100ms or higher")
183
211
 
212
+ # Set up initial monitoring date
213
+ if start_date:
214
+ self.current_monitor_date = _parse_date(start_date)
215
+ else:
216
+ self.current_monitor_date = _get_current_eastern_date()
217
+
184
218
  # Handle form parameter
185
219
  if form is None:
186
220
  form = ['-0']
@@ -233,6 +267,17 @@ class Monitor:
233
267
 
234
268
  await asyncio.sleep(poll_interval / 1000)
235
269
 
236
- def monitor_submissions(self, callback=None, form=None, cik=None, ticker=None, poll_interval=1000, quiet=True):
237
- """Start the monitoring process."""
238
- asyncio.run(self._monitor(callback, form, cik, ticker, poll_interval, quiet))
270
+ def monitor_submissions(self, callback=None, form=None, cik=None, ticker=None, start_date=None, poll_interval=1000, quiet=True):
271
+ """
272
+ Start the monitoring process.
273
+
274
+ Parameters:
275
+ callback (callable, optional): Function to call when new submissions are found
276
+ form (str or list, optional): Form type(s) to monitor
277
+ cik (str or list, optional): CIK(s) to monitor
278
+ ticker (str, optional): Ticker symbol to monitor
279
+ start_date (str, optional): Start date in YYYY-MM-DD format
280
+ poll_interval (int, optional): Polling interval in milliseconds
281
+ quiet (bool, optional): Suppress verbose output
282
+ """
283
+ asyncio.run(self._monitor(callback, form, cik, ticker, start_date, poll_interval, quiet))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamule
3
- Version: 1.0.0
3
+ Version: 1.0.3
4
4
  Summary: Making it easier to use SEC filings.
5
5
  Home-page: https://github.com/john-friedman/datamule-python
6
6
  Author: John Friedman
@@ -1,12 +1,15 @@
1
1
  datamule/__init__.py,sha256=IDVK3i5i5DxLlQJ_71aYkloGNi528JOUx8hU6bDzLXM,1255
2
- datamule/book.py,sha256=hzp5Ae_PfTg3c_h_LdRQOc9fU6OifapKtV0bU-SyIOw,775
3
2
  datamule/config.py,sha256=Y--CVv7JcgrjJkMOSLrvm2S8B9ost6RMSkGviP-MKtg,883
4
- datamule/document.py,sha256=P1UeF-GLta08T7Ur77IFOp7vJBIYcQPbz3gfuWJ3fi0,10851
3
+ datamule/document.py,sha256=-XT3nkRb1oCOken7n3AUCI8itQNfuKVN86cuD3rys3E,10912
5
4
  datamule/helper.py,sha256=tr3AQWus9dHNZFKpLSglWjcb8zmm5qDXjOWACMhvMxQ,4594
6
- datamule/monitor.py,sha256=AfhGqC_GFTYWemRKgYE85V7rIGMN_pbcpxW6kORQtpw,9273
5
+ datamule/monitor.py,sha256=NgzOB_RCLAZDrowYagPWIM4PGNngd1lA3K9Qplkk3Ys,11325
7
6
  datamule/packageupdater.py,sha256=vEGqlTj6FudIeVHBVJltPh2eBDEqMG9HYmnyrRVKeSU,9595
8
7
  datamule/portfolio.py,sha256=U_QRNk_CbMmi3nJ0VBIwc9SVEGq6kA8LCZHBj9nOGXs,4032
9
8
  datamule/submission.py,sha256=uioIYJbsoe-87nRPyzlo-LZ8Hp7HG7A4KPGSnw86PKY,2790
9
+ datamule/book/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ datamule/book/book.py,sha256=ns7wVk_gLX5KhAghnSTxKcWIB8bKg7pwsM3es5GfCyM,1422
11
+ datamule/book/eftsquery.py,sha256=MW-rMb3L-S1sgAR_Y-Ca3zGa1W9hclTzH52twUK87oM,4596
12
+ datamule/book/xbrl_retriever.py,sha256=qLzrskqrQ8guEMFudlUCWFvRqAgSthhql5FXh_E3eCo,2910
10
13
  datamule/data/company_former_names.csv,sha256=HE9cAv-_QKFX6jT-_-D0rHmaDyQuAzL4MJwank5O1U8,706380
11
14
  datamule/data/company_metadata.csv,sha256=yPovrCVjYwLWTU_hBUFJymp8iNO0NBYuq_QwOkRLoN8,3068599
12
15
  datamule/data/company_tickers.csv,sha256=GW6lOP54RiGJCx-d9N5jEBy7tGVgU3zI-5xHJXrZfSI,400363
@@ -14,7 +17,7 @@ datamule/data/sec-glossary.csv,sha256=-cN7GjiadLw5C1sv4zSeCnfeZZDYeSgJl-0ydarMAo
14
17
  datamule/data/xbrl_descriptions.csv,sha256=SQ9wUURNqG424rnTiZtopsxV2q-PvU4NMj52LqgDsvg,2621524
15
18
  datamule/downloader/downloader.py,sha256=vnMsw0oWqRa84scu6ZcywxbJxsIn38vLV0tybakx3jQ,15217
16
19
  datamule/downloader/premiumdownloader.py,sha256=YhGFwkYqjLkdc5ex2YKM-L7nBAPm5MMCdTwVVP0JO78,14314
17
- datamule/mapping_dicts/txt_mapping_dicts.py,sha256=Eh6qYhseuKjjnxGh0A5blHr7mbq9CigFn6Zv9xcG2zU,5783
20
+ datamule/mapping_dicts/txt_mapping_dicts.py,sha256=DQPrGYbAPQxomRUtt4iiMGrwuF7BHc_LeFBQuYBzU9o,6311
18
21
  datamule/mapping_dicts/xml_mapping_dicts.py,sha256=Z22yDVwKYonUfM5foQP00dVDE8EHhhMKp0CLqVKV5OI,438
19
22
  datamule/mulebot/__init__.py,sha256=YvZXV6xQ0iP-oGD8rloufjdwJL6D46P3NNr0CY9PQCA,29
20
23
  datamule/mulebot/helper.py,sha256=olztOwltfELZ-IERM2bRNLBavD04kfB6ueWTisJAleA,1080
@@ -34,7 +37,7 @@ datamule/mulebot/mulebot_server/static/scripts/suggestions.js,sha256=TCyz8OYuXeI
34
37
  datamule/mulebot/mulebot_server/static/scripts/tableArtifacts.js,sha256=UtkUpLvELNI4Ibpb7VstgVA9Tk-8jbkxXhmXsgufFa4,4437
35
38
  datamule/mulebot/mulebot_server/static/scripts/utils.js,sha256=oGPMtyT9dvuqHqrfZj33t4vLZiF8UJrMXB1hpPXRNu4,1255
36
39
  datamule/mulebot/mulebot_server/templates/chat-minimalist.html,sha256=MsTbgpnLD0JCQiKKP3XeeNJRNsRqKsRa1j_XXW7nBKw,6975
37
- datamule-1.0.0.dist-info/METADATA,sha256=EMOJtZUCrYTNu7YOrhfZmzapdumfM0TXsnRhDBs0x6A,732
38
- datamule-1.0.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
39
- datamule-1.0.0.dist-info/top_level.txt,sha256=iOfgmtSMFVyr7JGl_bYSTDry79JbmsG4p8zKq89ktKk,9
40
- datamule-1.0.0.dist-info/RECORD,,
40
+ datamule-1.0.3.dist-info/METADATA,sha256=8PZAcyMcoQTNaV21b9N09t8cd4Uw0Kxm6aImKXlSsCo,732
41
+ datamule-1.0.3.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
42
+ datamule-1.0.3.dist-info/top_level.txt,sha256=iOfgmtSMFVyr7JGl_bYSTDry79JbmsG4p8zKq89ktKk,9
43
+ datamule-1.0.3.dist-info/RECORD,,