datamule 1.1.8__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
datamule/document.py CHANGED
@@ -129,85 +129,123 @@ class Document:
129
129
  json.dump(self.data, f, indent=2)
130
130
 
131
131
  def to_tabular(self, accession_number=None):
132
+ """
133
+ Convert the document to a tabular format suitable for CSV output.
134
+
135
+ Args:
136
+ accession_number: Optional accession number to include in the output
137
+
138
+ Returns:
139
+ list: List of dictionaries, each representing a row in the tabular output
140
+ """
132
141
  self.parse()
133
-
134
- if self.type == "INFORMATION TABLE":
135
- info_table = self.data['informationTable']['infoTable']
136
- if isinstance(info_table, dict):
137
- info_table = [info_table]
138
-
139
- flattened = self._flatten_dict(info_table)
140
-
141
- # Original field names
142
- original_columns = [
143
- "nameOfIssuer", "titleOfClass", "cusip", "value",
144
- "shrsOrPrnAmt_sshPrnamt", "shrsOrPrnAmt_sshPrnamtType",
145
- "investmentDiscretion", "votingAuthority_Sole",
146
- "votingAuthority_Shared", "votingAuthority_None",
147
- "reportingOwnerCIK", "putCall", "otherManager", 'figi'
148
- ]
149
-
150
- # Define mapping from original to camelCase field names
151
- field_mapping = {
152
- "shrsOrPrnAmt_sshPrnamt": "sshPrnamt",
153
- "shrsOrPrnAmt_sshPrnamtType": "sshPrnamtType",
154
- "votingAuthority_Sole": "votingAuthoritySole",
155
- "votingAuthority_Shared": "votingAuthorityShared",
156
- "votingAuthority_None": "votingAuthorityNone"
157
- }
142
+
143
+ # Common function to normalize and process dictionaries
144
+ def process_records(records, mapping_dict, is_derivative=None):
145
+ """
146
+ Process records into a standardized tabular format
147
+
148
+ Args:
149
+ records: List or single dictionary of records to process
150
+ mapping_dict: Dictionary mapping source keys to target keys
151
+ is_derivative: Boolean flag for derivative securities (or None if not applicable)
158
152
 
159
- # Create the new expected columns list with mapped field names
160
- expected_columns = []
161
- for column in original_columns:
162
- if column in field_mapping:
163
- expected_columns.append(field_mapping[column])
164
- else:
165
- expected_columns.append(column)
153
+ Returns:
154
+ list: Processed records in tabular format
155
+ """
156
+ # Convert single dict to list for uniform processing
157
+ if isinstance(records, dict):
158
+ records = [records]
166
159
 
167
- # Process each item in the flattened data
160
+ # Flatten nested dictionaries
161
+ flattened = self._flatten_dict(records)
162
+
163
+ # Process each record
164
+ result = []
168
165
  for item in flattened:
169
- # Remove newlines from items
166
+ # Normalize whitespace in all string values
170
167
  for key in item:
171
168
  if isinstance(item[key], str):
172
169
  item[key] = re.sub(r'\s+', ' ', item[key])
170
+
171
+ # Map keys according to the mapping dictionary
172
+ mapped_item = {}
173
+ for old_key, value in item.items():
174
+ target_key = mapping_dict.get(old_key, old_key)
175
+ mapped_item[target_key] = value
173
176
 
174
- new_item = {}
175
- for key, value in item.items():
176
- # Apply the mapping if the key is in our mapping dictionary
177
- if key in field_mapping:
178
- new_item[field_mapping[key]] = value
179
- else:
180
- new_item[key] = value
181
-
182
- # Update the original item with the new keys
183
- item.clear()
184
- item.update(new_item)
177
+ # Set derivative flags if applicable
178
+ if is_derivative is not None:
179
+ mapped_item["isDerivative"] = 1 if is_derivative else 0
180
+ mapped_item["isNonDerivative"] = 0 if is_derivative else 1
185
181
 
186
182
  # Ensure all expected columns exist
187
- for column in expected_columns:
188
- if column not in item:
189
- item[column] = None
190
-
191
- item['accession'] = accession_number
192
-
193
- # Add this block to reorder the items to match the expected order
194
- ordered_columns = ["nameOfIssuer", "titleOfClass", "cusip", "value", "sshPrnamt", "sshPrnamtType",
195
- "investmentDiscretion", "votingAuthoritySole", "votingAuthorityShared", "votingAuthorityNone",
196
- "reportingOwnerCIK", "putCall", "otherManager", "figi"]
197
- if accession_number is not None:
198
- ordered_columns.append("accession")
183
+ output_columns = list(dict.fromkeys(mapping_dict.values()))
184
+ ordered_item = {column: mapped_item.get(column, None) for column in output_columns}
199
185
 
200
- ordered_data = []
201
- for item in flattened:
202
- ordered_item = {column: item.get(column, None) for column in ordered_columns}
203
- ordered_data.append(ordered_item)
186
+ # Add accession number if provided
187
+ if accession_number is not None:
188
+ ordered_item['accession'] = accession_number
189
+
190
+ result.append(ordered_item)
204
191
 
205
- return ordered_data
192
+ return result
206
193
 
194
+ # Handle different document types
195
+ if self.type == "INFORMATION TABLE":
196
+ # Information Table mapping dictionary
197
+ info_table_mapping = {
198
+ "nameOfIssuer": "nameOfIssuer",
199
+ "titleOfClass": "titleOfClass",
200
+ "cusip": "cusip",
201
+ "value": "value",
202
+ "shrsOrPrnAmt_sshPrnamt": "sshPrnamt",
203
+ "shrsOrPrnAmt_sshPrnamtType": "sshPrnamtType",
204
+ "investmentDiscretion": "investmentDiscretion",
205
+ "votingAuthority_Sole": "votingAuthoritySole",
206
+ "votingAuthority_Shared": "votingAuthorityShared",
207
+ "votingAuthority_None": "votingAuthorityNone",
208
+ "reportingOwnerCIK": "reportingOwnerCIK",
209
+ "putCall": "putCall",
210
+ "otherManager": "otherManager",
211
+ "figi": "figi"
212
+ }
213
+
214
+ # Process the information table
215
+ info_table = self.data['informationTable']['infoTable']
216
+ return process_records(info_table, info_table_mapping)
217
+
218
+ elif self.type == "PROXY VOTING RECORD":
219
+ # Proxy voting record mapping dictionary
220
+ proxy_mapping = {
221
+ 'meetingDate': 'meetingDate',
222
+ 'isin': 'isin',
223
+ 'cusip': 'cusip',
224
+ 'issuerName': 'issuerName',
225
+ 'voteDescription': 'voteDescription',
226
+ 'sharesOnLoan': 'sharesOnLoan',
227
+ 'vote_voteRecord_sharesVoted': 'sharesVoted',
228
+ 'voteCategories_voteCategory_categoryType': 'voteCategory',
229
+ 'vote_voteRecord': 'voteRecord',
230
+ 'sharesVoted': 'sharesVoted',
231
+ 'voteSource': 'voteSource',
232
+ 'vote_voteRecord_howVoted': 'howVoted',
233
+ 'figi': 'figi',
234
+ 'vote_voteRecord_managementRecommendation': 'managementRecommendation'
235
+ }
236
+
237
+ # Process proxy voting records if they exist
238
+ all_results = []
239
+ if 'proxyVoteTable' in self.data and 'proxyTable' in self.data['proxyVoteTable'] and self.data['proxyVoteTable']['proxyTable'] is not None:
240
+ proxy_records = self.data['proxyVoteTable']['proxyTable']
241
+ proxy_results = process_records(proxy_records, proxy_mapping)
242
+ all_results.extend(proxy_results)
243
+
244
+ return all_results
245
+
207
246
  elif self.type in ["3", "4", "5"]:
208
- # Master mapping dictionary - includes all possible fields
209
- # The order of this dictionary will determine the output column order
210
- master_mapping_dict = {
247
+ # Forms 3, 4, 5 mapping dictionary
248
+ form_345_mapping = {
211
249
  # Flag fields (will be set programmatically)
212
250
  "isDerivative": "isDerivative",
213
251
  "isNonDerivative": "isNonDerivative",
@@ -270,45 +308,6 @@ class Document:
270
308
  "underlyingSecurity_underlyingSecurityValue_footnote": "underlyingSecurityValueFootnote"
271
309
  }
272
310
 
273
- # Get the unique target column names in order from the mapping dictionary
274
- output_columns = []
275
- for _, target_key in master_mapping_dict.items():
276
- if target_key not in output_columns:
277
- output_columns.append(target_key)
278
-
279
- # Process function that handles any table type
280
- def process_table(table_data, is_derivative):
281
- if isinstance(table_data, dict):
282
- table_data = [table_data]
283
-
284
- flattened = self._flatten_dict(table_data)
285
-
286
- # Apply mapping to the flattened data and ensure all expected columns are present
287
- mapped_data = []
288
- for item in flattened:
289
- mapped_item = {}
290
- # First, apply the mapping
291
- for old_key, value in item.items():
292
- target_key = master_mapping_dict.get(old_key, old_key)
293
- mapped_item[target_key] = value
294
-
295
- # Set the derivative/non-derivative flags
296
- mapped_item["isDerivative"] = 1 if is_derivative else 0
297
- mapped_item["isNonDerivative"] = 0 if is_derivative else 1
298
-
299
- # Create a new ordered dictionary with all columns
300
- ordered_item = {}
301
- for column in output_columns:
302
- ordered_item[column] = mapped_item.get(column, None)
303
-
304
- # Add accession_number if available
305
- if accession_number is not None:
306
- ordered_item['accession_number'] = accession_number
307
-
308
- mapped_data.append(ordered_item)
309
-
310
- return mapped_data
311
-
312
311
  # Results container
313
312
  all_results = []
314
313
 
@@ -316,39 +315,33 @@ class Document:
316
315
  if 'nonDerivativeTable' in self.data['ownershipDocument'] and self.data['ownershipDocument']['nonDerivativeTable'] is not None:
317
316
  if 'nonDerivativeTransaction' in self.data['ownershipDocument']['nonDerivativeTable']:
318
317
  non_deriv_trans = self.data['ownershipDocument']['nonDerivativeTable']['nonDerivativeTransaction']
319
- non_deriv_results = process_table(non_deriv_trans, is_derivative=False)
318
+ non_deriv_results = process_records(non_deriv_trans, form_345_mapping, is_derivative=False)
320
319
  all_results.extend(non_deriv_results)
321
320
 
322
321
  # Process non-derivative holdings (for Form 3)
323
322
  if 'nonDerivativeHolding' in self.data['ownershipDocument']['nonDerivativeTable']:
324
323
  non_deriv_hold = self.data['ownershipDocument']['nonDerivativeTable']['nonDerivativeHolding']
325
- non_deriv_hold_results = process_table(non_deriv_hold, is_derivative=False)
324
+ non_deriv_hold_results = process_records(non_deriv_hold, form_345_mapping, is_derivative=False)
326
325
  all_results.extend(non_deriv_hold_results)
327
326
 
328
327
  # Process derivative transactions if they exist
329
328
  if 'derivativeTable' in self.data['ownershipDocument'] and self.data['ownershipDocument']['derivativeTable'] is not None:
330
329
  if 'derivativeTransaction' in self.data['ownershipDocument']['derivativeTable']:
331
330
  deriv_trans = self.data['ownershipDocument']['derivativeTable']['derivativeTransaction']
332
- deriv_results = process_table(deriv_trans, is_derivative=True)
331
+ deriv_results = process_records(deriv_trans, form_345_mapping, is_derivative=True)
333
332
  all_results.extend(deriv_results)
334
333
 
335
334
  # Process derivative holdings (for Form 3)
336
335
  if 'derivativeHolding' in self.data['ownershipDocument']['derivativeTable']:
337
336
  deriv_hold = self.data['ownershipDocument']['derivativeTable']['derivativeHolding']
338
- deriv_hold_results = process_table(deriv_hold, is_derivative=True)
337
+ deriv_hold_results = process_records(deriv_hold, form_345_mapping, is_derivative=True)
339
338
  all_results.extend(deriv_hold_results)
340
-
341
- # check if any rows not in the mapping dict, raise error if so
342
- for item in all_results:
343
- for key in item.keys():
344
- if key not in master_mapping_dict.values() and key != 'accession_number':
345
- raise ValueError(f"Key '{key}' not found in mapping dictionary")
346
-
347
339
 
348
340
  return all_results
349
- else:
350
- raise ValueError("sorry, rejigging conversion to tabular format")
351
341
 
342
+ else:
343
+ raise ValueError(f"Document type '{self.type}' is not supported for tabular conversion")
344
+
352
345
  def write_csv(self, output_filename, accession_number=None):
353
346
 
354
347
  data = self.to_tabular(accession_number)
datamule/seclibrary/bq.py CHANGED
@@ -3,9 +3,6 @@ import requests
3
3
  import json
4
4
 
5
5
  def get_information_table(
6
- # Required parameters
7
- table_type="INFORMATION_TABLE",
8
-
9
6
  # Optional filtering parameters
10
7
  columns=None,
11
8
  name_of_issuer=None,
@@ -37,8 +34,6 @@ def get_information_table(
37
34
 
38
35
  Parameters:
39
36
  -----------
40
- table_type : str
41
- The table to query (default is "INFORMATION_TABLE")
42
37
  columns : List[str], optional
43
38
  Specific columns to return. If None, all columns are returned.
44
39
 
@@ -76,7 +71,7 @@ def get_information_table(
76
71
  raise ValueError("No API key found. Please set DATAMULE_API_KEY environment variable or provide api_key parameter")
77
72
 
78
73
  # 2. Build query parameters
79
- params = {'table_type': table_type}
74
+ params = {'table_type': 'INFORMATION_TABLE'}
80
75
 
81
76
  # Add columns parameter if provided
82
77
  if columns:
@@ -138,7 +133,198 @@ def get_information_table(
138
133
  # Exact match
139
134
  params[api_param_name] = value
140
135
 
141
- # 3. Make the API request
136
+ # Call common function to make API request
137
+ return _make_api_request(params, api_key, print_cost, verbose)
138
+
139
+ def get_345(
140
+ # Optional filtering parameters
141
+ columns=None,
142
+ is_derivative=None,
143
+ is_non_derivative=None,
144
+ security_title=None,
145
+ transaction_date=None,
146
+ document_type=None,
147
+ transaction_code=None,
148
+ equity_swap_involved=None,
149
+ transaction_timeliness=None,
150
+ transaction_shares=None,
151
+ transaction_price_per_share=None,
152
+ shares_owned_following_transaction=None,
153
+ ownership_type=None,
154
+ deemed_execution_date=None,
155
+ conversion_or_exercise_price=None,
156
+ exercise_date=None,
157
+ expiration_date=None,
158
+ underlying_security_title=None,
159
+ underlying_security_shares=None,
160
+ underlying_security_value=None,
161
+ accession=None,
162
+ reporting_owner_cik=None,
163
+ issuer_cik=None,
164
+ filing_date=None,
165
+
166
+ # API key handling
167
+ api_key=None,
168
+
169
+ # Additional options
170
+ print_cost=True,
171
+ verbose=False
172
+ ):
173
+ """
174
+ Query the SEC BigQuery API for Form 345 insider transaction data.
175
+
176
+ Parameters:
177
+ -----------
178
+ columns : List[str], optional
179
+ Specific columns to return. If None, all columns are returned.
180
+
181
+ # Filter parameters
182
+ is_derivative, security_title, etc. : Various filters that can be:
183
+ - str/bool: Exact match
184
+ - List[str]: Match any in list
185
+ - tuple: (min, max) range for numeric/date fields
186
+
187
+ reporting_owner_cik : str or List[str]
188
+ CIK(s) of the reporting insider(s). This is matched against an array in BigQuery.
189
+ Any match within the array will return the record.
190
+
191
+ issuer_cik : str or List[str]
192
+ CIK(s) of the company/companies
193
+
194
+ api_key : str, optional
195
+ SEC BigQuery API key. If None, looks for DATAMULE_API_KEY env variable.
196
+ print_cost : bool
197
+ Whether to print the query cost information
198
+ verbose : bool
199
+ Whether to print additional information about the query
200
+
201
+ Returns:
202
+ --------
203
+ List[Dict]
204
+ A list of dictionaries containing the query results
205
+
206
+ Raises:
207
+ -------
208
+ ValueError
209
+ If API key is missing or invalid
210
+ Exception
211
+ For API errors or other issues
212
+ """
213
+
214
+ # 1. Handle API key
215
+ if api_key is None:
216
+ api_key = os.getenv('DATAMULE_API_KEY')
217
+
218
+ if not api_key:
219
+ raise ValueError("No API key found. Please set DATAMULE_API_KEY environment variable or provide api_key parameter")
220
+
221
+ # 2. Build query parameters
222
+ params = {'table_type': 'FORM_345_TABLE'}
223
+
224
+ # Add columns parameter if provided
225
+ if columns:
226
+ if isinstance(columns, list):
227
+ params['columns'] = ','.join(columns)
228
+ else:
229
+ params['columns'] = columns
230
+
231
+ # Map Python parameter names to API parameter names
232
+ param_mapping = {
233
+ 'is_derivative': 'isDerivative',
234
+ 'is_non_derivative': 'isNonDerivative',
235
+ 'security_title': 'securityTitle',
236
+ 'transaction_date': 'transactionDate',
237
+ 'document_type': 'documentType',
238
+ 'transaction_code': 'transactionCode',
239
+ 'equity_swap_involved': 'equitySwapInvolved',
240
+ 'transaction_timeliness': 'transactionTimeliness',
241
+ 'transaction_shares': 'transactionShares',
242
+ 'transaction_price_per_share': 'transactionPricePerShare',
243
+ 'shares_owned_following_transaction': 'sharesOwnedFollowingTransaction',
244
+ 'ownership_type': 'ownershipType',
245
+ 'deemed_execution_date': 'deemedExecutionDate',
246
+ 'conversion_or_exercise_price': 'conversionOrExercisePrice',
247
+ 'exercise_date': 'exerciseDate',
248
+ 'expiration_date': 'expirationDate',
249
+ 'underlying_security_title': 'underlyingSecurityTitle',
250
+ 'underlying_security_shares': 'underlyingSecurityShares',
251
+ 'underlying_security_value': 'underlyingSecurityValue',
252
+ 'accession': 'accession',
253
+ 'reporting_owner_cik': 'reportingOwnerCIK',
254
+ 'issuer_cik': 'issuerCIK',
255
+ 'filing_date': 'filingDate'
256
+ }
257
+
258
+ # Process all possible filter parameters
259
+ date_params = ['transaction_date', 'filing_date', 'deemed_execution_date', 'exercise_date', 'expiration_date']
260
+ boolean_params = ['is_derivative', 'is_non_derivative']
261
+
262
+ for param_name, api_param_name in param_mapping.items():
263
+ value = locals()[param_name]
264
+ if value is not None:
265
+ # Handle different filter types
266
+ if isinstance(value, list):
267
+ # List filter
268
+ params[api_param_name] = f"[{','.join(str(v) for v in value)}]"
269
+ elif isinstance(value, tuple):
270
+ # Range filter
271
+ if len(value) == 2:
272
+ min_val, max_val = value
273
+ # Handle date range specially
274
+ if param_name in date_params:
275
+ # Dates need to be in quotes within the parentheses
276
+ if min_val is None:
277
+ min_val = ''
278
+ else:
279
+ min_val = f"'{min_val}'"
280
+
281
+ if max_val is None:
282
+ max_val = ''
283
+ else:
284
+ max_val = f"'{max_val}'"
285
+
286
+ range_str = f"({min_val},{max_val})"
287
+ params[api_param_name] = range_str
288
+ else:
289
+ raise ValueError(f"Range filter for {param_name} must be a tuple of (min, max)")
290
+ elif param_name in boolean_params:
291
+ # Boolean values
292
+ params[api_param_name] = str(value).lower()
293
+ else:
294
+ # Exact match
295
+ params[api_param_name] = value
296
+
297
+ # Call common function to make API request
298
+ return _make_api_request(params, api_key, print_cost, verbose)
299
+
300
+ def _make_api_request(params, api_key, print_cost=True, verbose=False):
301
+ """
302
+ Common function to make API requests to the SEC BigQuery API.
303
+
304
+ Parameters:
305
+ -----------
306
+ params : dict
307
+ Query parameters
308
+ api_key : str
309
+ API key for authentication
310
+ print_cost : bool
311
+ Whether to print cost information
312
+ verbose : bool
313
+ Whether to print debugging information
314
+
315
+ Returns:
316
+ --------
317
+ List[Dict]
318
+ Data returned from the API
319
+
320
+ Raises:
321
+ -------
322
+ ValueError
323
+ If API key is invalid
324
+ Exception
325
+ For other API errors
326
+ """
327
+ # Make the API request
142
328
  BASE_URL = "https://sec-bq.jgfriedman99.workers.dev/"
143
329
 
144
330
  headers = {
@@ -166,7 +352,15 @@ def get_information_table(
166
352
  # Extract metadata for cost reporting
167
353
  metadata = result.get('metadata', {})
168
354
 
169
- # 5. Print cost information if requested
355
+ # Process the data to handle array fields
356
+ data = result.get('data', [])
357
+ for row in data:
358
+ # Check if reportingOwnerCIK is an array that needs processing
359
+ if 'reportingOwnerCIK' in row and isinstance(row['reportingOwnerCIK'], list):
360
+ # Transform from [{'v': 'value1'}, {'v': 'value2'}] to comma-separated string
361
+ row['reportingOwnerCIK'] = ','.join([item['v'] for item in row['reportingOwnerCIK'] if 'v' in item])
362
+
363
+ # Print cost information if requested
170
364
  if print_cost and 'billing' in metadata:
171
365
  billing = metadata['billing']
172
366
  query_info = metadata.get('query_info', {})
@@ -181,11 +375,154 @@ def get_information_table(
181
375
  print(f"Cache Hit: {query_info.get('cache_hit', False)}")
182
376
  print("==============================\n")
183
377
 
184
- # 6. Return data
185
- return result.get('data', [])
378
+ # Return data
379
+ return data
186
380
 
187
381
  except requests.exceptions.RequestException as e:
188
- if response.status_code == 401:
382
+ if hasattr(e, 'response') and e.response is not None and e.response.status_code == 401:
189
383
  raise ValueError("Authentication failed: Invalid API key")
190
384
  else:
191
- raise Exception(f"Request failed: {str(e)}")
385
+ raise Exception(f"Request failed: {str(e)}")
386
+
387
+ def get_proxy_voting_record(
388
+ # Optional filtering parameters
389
+ columns=None,
390
+ meeting_date=None,
391
+ isin=None,
392
+ cusip=None,
393
+ issuer_name=None,
394
+ vote_description=None,
395
+ shares_on_loan=None,
396
+ shares_voted=None,
397
+ vote_category=None,
398
+ vote_record=None,
399
+ vote_source=None,
400
+ how_voted=None,
401
+ figi=None,
402
+ management_recommendation=None,
403
+ accession=None,
404
+ reporting_owner_cik=None,
405
+ filing_date=None,
406
+
407
+ # API key handling
408
+ api_key=None,
409
+
410
+ # Additional options
411
+ print_cost=True,
412
+ verbose=False
413
+ ):
414
+ """
415
+ Query the SEC BigQuery API for NPX proxy voting record data.
416
+
417
+ Parameters:
418
+ -----------
419
+ columns : List[str], optional
420
+ Specific columns to return. If None, all columns are returned.
421
+
422
+ # Filter parameters
423
+ meeting_date, isin, cusip, etc. : Various filters that can be:
424
+ - str: Exact match
425
+ - List[str]: Match any in list
426
+ - tuple: (min, max) range for numeric/date fields
427
+
428
+ shares_on_loan, shares_voted : int/float or tuple
429
+ Numeric values or (min, max) range
430
+
431
+ filing_date : str or tuple
432
+ Date string in 'YYYY-MM-DD' format or (start_date, end_date) tuple
433
+
434
+ api_key : str, optional
435
+ SEC BigQuery API key. If None, looks for DATAMULE_API_KEY env variable.
436
+ print_cost : bool
437
+ Whether to print the query cost information
438
+ verbose : bool
439
+ Whether to print additional information about the query
440
+
441
+ Returns:
442
+ --------
443
+ List[Dict]
444
+ A list of dictionaries containing the query results
445
+
446
+ Raises:
447
+ -------
448
+ ValueError
449
+ If API key is missing or invalid
450
+ Exception
451
+ For API errors or other issues
452
+ """
453
+
454
+ # 1. Handle API key
455
+ if api_key is None:
456
+ api_key = os.getenv('DATAMULE_API_KEY')
457
+
458
+ if not api_key:
459
+ raise ValueError("No API key found. Please set DATAMULE_API_KEY environment variable or provide api_key parameter")
460
+
461
+ # 2. Build query parameters
462
+ params = {'table_type': 'NPX_VOTING_TABLE'}
463
+
464
+ # Add columns parameter if provided
465
+ if columns:
466
+ if isinstance(columns, list):
467
+ params['columns'] = ','.join(columns)
468
+ else:
469
+ params['columns'] = columns
470
+
471
+ # Map Python parameter names to API parameter names
472
+ param_mapping = {
473
+ 'meeting_date': 'meetingDate',
474
+ 'isin': 'isin',
475
+ 'cusip': 'cusip',
476
+ 'issuer_name': 'issuerName',
477
+ 'vote_description': 'voteDescription',
478
+ 'shares_on_loan': 'sharesOnLoan',
479
+ 'shares_voted': 'sharesVoted',
480
+ 'vote_category': 'voteCategory',
481
+ 'vote_record': 'voteRecord',
482
+ 'vote_source': 'voteSource',
483
+ 'how_voted': 'howVoted',
484
+ 'figi': 'figi',
485
+ 'management_recommendation': 'managementRecommendation',
486
+ 'accession': 'accession',
487
+ 'reporting_owner_cik': 'reportingOwnerCIK',
488
+ 'filing_date': 'filingDate'
489
+ }
490
+
491
+ # Process all possible filter parameters
492
+ date_params = ['meeting_date', 'filing_date']
493
+ numeric_params = ['shares_on_loan', 'shares_voted']
494
+
495
+ for param_name, api_param_name in param_mapping.items():
496
+ value = locals()[param_name]
497
+ if value is not None:
498
+ # Handle different filter types
499
+ if isinstance(value, list):
500
+ # List filter
501
+ params[api_param_name] = f"[{','.join(str(v) for v in value)}]"
502
+ elif isinstance(value, tuple):
503
+ # Range filter
504
+ if len(value) == 2:
505
+ min_val, max_val = value
506
+ # Handle date range specially
507
+ if param_name in date_params:
508
+ # Dates need to be in quotes within the parentheses
509
+ if min_val is None:
510
+ min_val = ''
511
+ else:
512
+ min_val = f"'{min_val}'"
513
+
514
+ if max_val is None:
515
+ max_val = ''
516
+ else:
517
+ max_val = f"'{max_val}'"
518
+
519
+ range_str = f"({min_val},{max_val})"
520
+ params[api_param_name] = range_str
521
+ else:
522
+ raise ValueError(f"Range filter for {param_name} must be a tuple of (min, max)")
523
+ else:
524
+ # Exact match
525
+ params[api_param_name] = value
526
+
527
+ # Call common function to make API request
528
+ return _make_api_request(params, api_key, print_cost, verbose)
datamule/sheet.py CHANGED
@@ -3,7 +3,7 @@ import csv
3
3
  import os
4
4
  from .helper import _process_cik_and_metadata_filters, load_package_dataset
5
5
  from .sec.xbrl.downloadcompanyfacts import download_company_facts
6
- from .seclibrary.bq import get_information_table
6
+ from .seclibrary.bq import get_information_table, get_345, get_proxy_voting_record
7
7
 
8
8
  class Sheet:
9
9
  def __init__(self, path):
@@ -31,9 +31,6 @@ class Sheet:
31
31
 
32
32
  def get_information_table(
33
33
  self,
34
- # Required parameters
35
- table_type="INFORMATION_TABLE",
36
-
37
34
  # Optional filtering parameters
38
35
  columns=None,
39
36
  name_of_issuer=None,
@@ -65,8 +62,6 @@ class Sheet:
65
62
 
66
63
  Parameters:
67
64
  -----------
68
- table_type : str
69
- The table to query (default is "INFORMATION_TABLE")
70
65
  columns : List[str], optional
71
66
  Specific columns to return. If None, all columns are returned.
72
67
 
@@ -97,7 +92,6 @@ class Sheet:
97
92
  """
98
93
 
99
94
  return get_information_table(
100
- table_type=table_type,
101
95
  columns=columns,
102
96
  name_of_issuer=name_of_issuer,
103
97
  title_of_class=title_of_class,
@@ -124,12 +118,164 @@ class Sheet:
124
118
  verbose=verbose
125
119
  )
126
120
 
121
+ def get_345(
122
+ self,
123
+ # Optional filtering parameters
124
+ columns=None,
125
+ is_derivative=None,
126
+ is_non_derivative=None,
127
+ security_title=None,
128
+ transaction_date=None,
129
+ document_type=None,
130
+ transaction_code=None,
131
+ equity_swap_involved=None,
132
+ transaction_timeliness=None,
133
+ transaction_shares=None,
134
+ transaction_price_per_share=None,
135
+ shares_owned_following_transaction=None,
136
+ ownership_type=None,
137
+ deemed_execution_date=None,
138
+ conversion_or_exercise_price=None,
139
+ exercise_date=None,
140
+ expiration_date=None,
141
+ underlying_security_title=None,
142
+ underlying_security_shares=None,
143
+ underlying_security_value=None,
144
+ accession=None,
145
+ reporting_owner_cik=None,
146
+ issuer_cik=None,
147
+ filing_date=None,
148
+
149
+ # API key handling
150
+ api_key=None,
151
+
152
+ # Additional options
153
+ print_cost=True,
154
+ verbose=False
155
+ ):
156
+ """
157
+ Query the SEC BigQuery API for Form 345 insider transaction data.
158
+
159
+ Parameters:
160
+ -----------
161
+ columns : List[str], optional
162
+ Specific columns to return. If None, all columns are returned.
163
+
164
+ # Filter parameters
165
+ is_derivative, security_title, etc. : Various filters that can be:
166
+ - str/bool: Exact match
167
+ - List[str]: Match any in list
168
+ - tuple: (min, max) range for numeric/date fields
169
+
170
+ reporting_owner_cik : str or List[str]
171
+ CIK(s) of the reporting insider(s). This is matched against an array in BigQuery.
172
+ Any match within the array will return the record.
173
+
174
+ issuer_cik : str or List[str]
175
+ CIK(s) of the company/companies
176
+
177
+ api_key : str, optional
178
+ SEC BigQuery API key. If None, looks for DATAMULE_API_KEY env variable.
179
+ print_cost : bool
180
+ Whether to print the query cost information
181
+ verbose : bool
182
+ Whether to print additional information about the query
183
+
184
+ Returns:
185
+ --------
186
+ List[Dict]
187
+ A list of dictionaries containing the query results
188
+
189
+ Raises:
190
+ -------
191
+ ValueError
192
+ If API key is missing or invalid
193
+ Exception
194
+ For API errors or other issues
195
+ """
196
+
197
+ return get_345(
198
+ columns=columns,
199
+ is_derivative=is_derivative,
200
+ is_non_derivative=is_non_derivative,
201
+ security_title=security_title,
202
+ transaction_date=transaction_date,
203
+ document_type=document_type,
204
+ transaction_code=transaction_code,
205
+ equity_swap_involved=equity_swap_involved,
206
+ transaction_timeliness=transaction_timeliness,
207
+ transaction_shares=transaction_shares,
208
+ transaction_price_per_share=transaction_price_per_share,
209
+ shares_owned_following_transaction=shares_owned_following_transaction,
210
+ ownership_type=ownership_type,
211
+ deemed_execution_date=deemed_execution_date,
212
+ conversion_or_exercise_price=conversion_or_exercise_price,
213
+ exercise_date=exercise_date,
214
+ expiration_date=expiration_date,
215
+ underlying_security_title=underlying_security_title,
216
+ underlying_security_shares=underlying_security_shares,
217
+ underlying_security_value=underlying_security_value,
218
+ accession=accession,
219
+ reporting_owner_cik=reporting_owner_cik,
220
+ issuer_cik=issuer_cik,
221
+ filing_date=filing_date,
222
+
223
+ # API key handling
224
+ api_key=api_key,
225
+
226
+ # Additional options
227
+ print_cost=print_cost,
228
+ verbose=verbose
229
+ )
230
+
231
+ def _download_to_csv(self, data, filepath, verbose=False):
232
+ """
233
+ Helper method to download data to a CSV file.
234
+
235
+ Parameters:
236
+ -----------
237
+ data : List[Dict]
238
+ The data to save
239
+ filepath : str or Path
240
+ Path where to save the CSV file. If relative, it will be relative to the Sheet's path.
241
+ verbose : bool
242
+ Whether to print additional information
243
+
244
+ Returns:
245
+ --------
246
+ List[Dict]
247
+ The input data (for method chaining)
248
+ """
249
+ # If no data returned, nothing to save
250
+ if not data:
251
+ if verbose:
252
+ print("No data returned from API. No file was created.")
253
+ return data
254
+
255
+ # Resolve filepath - if it's not absolute, make it relative to self.path
256
+ filepath_obj = Path(filepath)
257
+ if not filepath_obj.is_absolute():
258
+ filepath_obj = self.path / filepath_obj
259
+
260
+ # Create directory if it doesn't exist
261
+ os.makedirs(filepath_obj.parent, exist_ok=True)
262
+
263
+ # Get fieldnames from the first record
264
+ fieldnames = data[0].keys()
265
+
266
+ # Write to CSV
267
+ with open(filepath_obj, 'w', newline='') as csvfile:
268
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
269
+ writer.writeheader()
270
+ writer.writerows(data)
271
+
272
+ if verbose:
273
+ print(f"Saved {len(data)} records to {filepath_obj}")
274
+
275
+
127
276
  def download_information_table(
128
277
  self,
129
278
  filepath,
130
- # Required parameters
131
- table_type="INFORMATION_TABLE",
132
-
133
279
  # Optional filtering parameters
134
280
  columns=None,
135
281
  name_of_issuer=None,
@@ -164,8 +310,6 @@ class Sheet:
164
310
  filepath : str
165
311
  Path where to save the CSV file. If relative, it will be relative to the Sheet's path.
166
312
 
167
- table_type : str
168
- The table to query (default is "INFORMATION_TABLE")
169
313
  columns : List[str], optional
170
314
  Specific columns to return. If None, all columns are returned.
171
315
 
@@ -196,7 +340,6 @@ class Sheet:
196
340
  """
197
341
  # Get the data from the API
198
342
  data = self.get_information_table(
199
- table_type=table_type,
200
343
  columns=columns,
201
344
  name_of_issuer=name_of_issuer,
202
345
  title_of_class=title_of_class,
@@ -219,30 +362,311 @@ class Sheet:
219
362
  verbose=verbose
220
363
  )
221
364
 
222
- # If no data returned, nothing to save
223
- if not data:
224
- if verbose:
225
- print("No data returned from API. No file was created.")
226
- return data
365
+ # Save to CSV using the helper method
366
+ return self._download_to_csv(data, filepath, verbose)
367
+
368
+ def download_345(
369
+ self,
370
+ filepath,
371
+ # Optional filtering parameters
372
+ columns=None,
373
+ is_derivative=None,
374
+ is_non_derivative=None,
375
+ security_title=None,
376
+ transaction_date=None,
377
+ document_type=None,
378
+ transaction_code=None,
379
+ equity_swap_involved=None,
380
+ transaction_timeliness=None,
381
+ transaction_shares=None,
382
+ transaction_price_per_share=None,
383
+ shares_owned_following_transaction=None,
384
+ ownership_type=None,
385
+ deemed_execution_date=None,
386
+ conversion_or_exercise_price=None,
387
+ exercise_date=None,
388
+ expiration_date=None,
389
+ underlying_security_title=None,
390
+ underlying_security_shares=None,
391
+ underlying_security_value=None,
392
+ accession=None,
393
+ reporting_owner_cik=None,
394
+ issuer_cik=None,
395
+ filing_date=None,
227
396
 
228
- # Resolve filepath - if it's not absolute, make it relative to self.path
229
- filepath_obj = Path(filepath)
230
- if not filepath_obj.is_absolute():
231
- filepath_obj = self.path / filepath_obj
397
+ # API key handling
398
+ api_key=None,
232
399
 
233
- # Create directory if it doesn't exist
234
- os.makedirs(filepath_obj.parent, exist_ok=True)
400
+ # Additional options
401
+ print_cost=True,
402
+ verbose=False
403
+ ):
404
+ """
405
+ Query the SEC BigQuery API for Form 345 insider transaction data and save to CSV.
235
406
 
236
- # Get fieldnames from the first record
237
- fieldnames = data[0].keys()
407
+ Parameters:
408
+ -----------
409
+ filepath : str
410
+ Path where to save the CSV file. If relative, it will be relative to the Sheet's path.
238
411
 
239
- # Write to CSV
240
- with open(filepath_obj, 'w', newline='') as csvfile:
241
- writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
242
- writer.writeheader()
243
- writer.writerows(data)
412
+ columns : List[str], optional
413
+ Specific columns to return. If None, all columns are returned.
414
+
415
+ # Filter parameters
416
+ is_derivative, security_title, etc. : Various filters that can be:
417
+ - str/bool: Exact match
418
+ - List[str]: Match any in list
419
+ - tuple: (min, max) range for numeric/date fields
244
420
 
245
- if verbose:
246
- print(f"Saved {len(data)} records to {filepath_obj}")
421
+ reporting_owner_cik : str or List[str]
422
+ CIK(s) of the reporting insider(s). This is matched against an array in BigQuery.
423
+ Any match within the array will return the record.
424
+
425
+ issuer_cik : str or List[str]
426
+ CIK(s) of the company/companies
427
+
428
+ api_key : str, optional
429
+ SEC BigQuery API key. If None, looks for DATAMULE_API_KEY env variable.
430
+ print_cost : bool
431
+ Whether to print the query cost information
432
+ verbose : bool
433
+ Whether to print additional information about the query
247
434
 
248
- return data
435
+ Returns:
436
+ --------
437
+ List[Dict]
438
+ A list of dictionaries containing the query results
439
+
440
+ Raises:
441
+ -------
442
+ ValueError
443
+ If API key is missing or invalid
444
+ Exception
445
+ For API errors or other issues
446
+ """
447
+ # Get the data from the API
448
+ data = self.get_345(
449
+ columns=columns,
450
+ is_derivative=is_derivative,
451
+ is_non_derivative=is_non_derivative,
452
+ security_title=security_title,
453
+ transaction_date=transaction_date,
454
+ document_type=document_type,
455
+ transaction_code=transaction_code,
456
+ equity_swap_involved=equity_swap_involved,
457
+ transaction_timeliness=transaction_timeliness,
458
+ transaction_shares=transaction_shares,
459
+ transaction_price_per_share=transaction_price_per_share,
460
+ shares_owned_following_transaction=shares_owned_following_transaction,
461
+ ownership_type=ownership_type,
462
+ deemed_execution_date=deemed_execution_date,
463
+ conversion_or_exercise_price=conversion_or_exercise_price,
464
+ exercise_date=exercise_date,
465
+ expiration_date=expiration_date,
466
+ underlying_security_title=underlying_security_title,
467
+ underlying_security_shares=underlying_security_shares,
468
+ underlying_security_value=underlying_security_value,
469
+ accession=accession,
470
+ reporting_owner_cik=reporting_owner_cik,
471
+ issuer_cik=issuer_cik,
472
+ filing_date=filing_date,
473
+ api_key=api_key,
474
+ print_cost=print_cost,
475
+ verbose=verbose
476
+ )
477
+
478
+ # Save to CSV using the helper method
479
+ return self._download_to_csv(data, filepath, verbose)
480
+
481
+ def get_proxy_voting_record(
482
+ self,
483
+ # Optional filtering parameters
484
+ columns=None,
485
+ meeting_date=None,
486
+ isin=None,
487
+ cusip=None,
488
+ issuer_name=None,
489
+ vote_description=None,
490
+ shares_on_loan=None,
491
+ shares_voted=None,
492
+ vote_category=None,
493
+ vote_record=None,
494
+ vote_source=None,
495
+ how_voted=None,
496
+ figi=None,
497
+ management_recommendation=None,
498
+ accession=None,
499
+ reporting_owner_cik=None,
500
+ filing_date=None,
501
+
502
+ # API key handling
503
+ api_key=None,
504
+
505
+ # Additional options
506
+ print_cost=True,
507
+ verbose=False
508
+ ):
509
+ """
510
+ Query the SEC BigQuery API for NPX proxy voting record data.
511
+
512
+ Parameters:
513
+ -----------
514
+ columns : List[str], optional
515
+ Specific columns to return. If None, all columns are returned.
516
+
517
+ # Filter parameters
518
+ meeting_date, isin, cusip, etc. : Various filters that can be:
519
+ - str: Exact match
520
+ - List[str]: Match any in list
521
+ - tuple: (min, max) range for numeric/date fields
522
+
523
+ shares_on_loan, shares_voted : int/float or tuple
524
+ Numeric values or (min, max) range
525
+
526
+ filing_date : str or tuple
527
+ Date string in 'YYYY-MM-DD' format or (start_date, end_date) tuple
528
+
529
+ api_key : str, optional
530
+ SEC BigQuery API key. If None, looks for DATAMULE_API_KEY env variable.
531
+ print_cost : bool
532
+ Whether to print the query cost information
533
+ verbose : bool
534
+ Whether to print additional information about the query
535
+
536
+ Returns:
537
+ --------
538
+ List[Dict]
539
+ A list of dictionaries containing the query results
540
+
541
+ Raises:
542
+ -------
543
+ ValueError
544
+ If API key is missing or invalid
545
+ Exception
546
+ For API errors or other issues
547
+ """
548
+
549
+ return get_proxy_voting_record(
550
+ columns=columns,
551
+ meeting_date=meeting_date,
552
+ isin=isin,
553
+ cusip=cusip,
554
+ issuer_name=issuer_name,
555
+ vote_description=vote_description,
556
+ shares_on_loan=shares_on_loan,
557
+ shares_voted=shares_voted,
558
+ vote_category=vote_category,
559
+ vote_record=vote_record,
560
+ vote_source=vote_source,
561
+ how_voted=how_voted,
562
+ figi=figi,
563
+ management_recommendation=management_recommendation,
564
+ accession=accession,
565
+ reporting_owner_cik=reporting_owner_cik,
566
+ filing_date=filing_date,
567
+
568
+ # API key handling
569
+ api_key=api_key,
570
+
571
+ # Additional options
572
+ print_cost=print_cost,
573
+ verbose=verbose
574
+ )
575
+
576
+ def download_proxy_voting_record(
577
+ self,
578
+ filepath,
579
+ # Optional filtering parameters
580
+ columns=None,
581
+ meeting_date=None,
582
+ isin=None,
583
+ cusip=None,
584
+ issuer_name=None,
585
+ vote_description=None,
586
+ shares_on_loan=None,
587
+ shares_voted=None,
588
+ vote_category=None,
589
+ vote_record=None,
590
+ vote_source=None,
591
+ how_voted=None,
592
+ figi=None,
593
+ management_recommendation=None,
594
+ accession=None,
595
+ reporting_owner_cik=None,
596
+ filing_date=None,
597
+
598
+ # API key handling
599
+ api_key=None,
600
+
601
+ # Additional options
602
+ print_cost=True,
603
+ verbose=False
604
+ ):
605
+ """
606
+ Query the SEC BigQuery API for NPX proxy voting record data and save to CSV.
607
+
608
+ Parameters:
609
+ -----------
610
+ filepath : str
611
+ Path where to save the CSV file. If relative, it will be relative to the Sheet's path.
612
+
613
+ columns : List[str], optional
614
+ Specific columns to return. If None, all columns are returned.
615
+
616
+ # Filter parameters
617
+ meeting_date, isin, cusip, etc. : Various filters that can be:
618
+ - str: Exact match
619
+ - List[str]: Match any in list
620
+ - tuple: (min, max) range for numeric/date fields
621
+
622
+ shares_on_loan, shares_voted : int/float or tuple
623
+ Numeric values or (min, max) range
624
+
625
+ filing_date : str or tuple
626
+ Date string in 'YYYY-MM-DD' format or (start_date, end_date) tuple
627
+
628
+ api_key : str, optional
629
+ SEC BigQuery API key. If None, looks for DATAMULE_API_KEY env variable.
630
+ print_cost : bool
631
+ Whether to print the query cost information
632
+ verbose : bool
633
+ Whether to print additional information about the query
634
+
635
+ Returns:
636
+ --------
637
+ List[Dict]
638
+ A list of dictionaries containing the query results
639
+
640
+ Raises:
641
+ -------
642
+ ValueError
643
+ If API key is missing or invalid
644
+ Exception
645
+ For API errors or other issues
646
+ """
647
+ # Get the data from the API
648
+ data = self.get_proxy_voting_record(
649
+ columns=columns,
650
+ meeting_date=meeting_date,
651
+ isin=isin,
652
+ cusip=cusip,
653
+ issuer_name=issuer_name,
654
+ vote_description=vote_description,
655
+ shares_on_loan=shares_on_loan,
656
+ shares_voted=shares_voted,
657
+ vote_category=vote_category,
658
+ vote_record=vote_record,
659
+ vote_source=vote_source,
660
+ how_voted=how_voted,
661
+ figi=figi,
662
+ management_recommendation=management_recommendation,
663
+ accession=accession,
664
+ reporting_owner_cik=reporting_owner_cik,
665
+ filing_date=filing_date,
666
+ api_key=api_key,
667
+ print_cost=print_cost,
668
+ verbose=verbose
669
+ )
670
+
671
+ # Save to CSV using the helper method
672
+ return self._download_to_csv(data, filepath, verbose)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamule
3
- Version: 1.1.8
3
+ Version: 1.2.0
4
4
  Summary: Making it easier to use SEC filings.
5
5
  Home-page: https://github.com/john-friedman/datamule-python
6
6
  Author: John Friedman
@@ -1,10 +1,10 @@
1
1
  datamule/__init__.py,sha256=l6YlwT5EeRxPlCtO5Jd4I8l266rSRUJyfFe97cRtSCM,991
2
2
  datamule/config.py,sha256=Y--CVv7JcgrjJkMOSLrvm2S8B9ost6RMSkGviP-MKtg,883
3
- datamule/document.py,sha256=qShyVKHQ1nSCNvSfrhAOMVXprOd1br1rFKLy52S9WnE,22007
3
+ datamule/document.py,sha256=CvuyazJ1qP8Ygpv49ikMc8DyGK7N-tApTU2Ccgv57q4,21556
4
4
  datamule/helper.py,sha256=xgOVnea-lUlQ5I-U0vYUp0VeKPNZehNhqjJvegA3lYE,3342
5
5
  datamule/index.py,sha256=0txvbzPcvY1GsdxA-wGdLzAByxSeE_1VyyBp9mZEQRM,2292
6
6
  datamule/portfolio.py,sha256=yWt5gYTjV7rJsLiPUmhc6Vmr3lfvfCR5MSpLQ_6Gdp4,7104
7
- datamule/sheet.py,sha256=QaArtx7LpT7bwyteelJV67C-lK0RjQbGS3ka7ftdi8w,7978
7
+ datamule/sheet.py,sha256=TvFqK9eAYuVoJ2uWdAlx5EN6vS9lke-aZf7FqtUiDBc,22304
8
8
  datamule/submission.py,sha256=LI7Zr60YbE_tU-v2N09k2dGjfztSgplKZACT3eRUkFE,4463
9
9
  datamule/mapping_dicts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  datamule/mapping_dicts/txt_mapping_dicts.py,sha256=DQPrGYbAPQxomRUtt4iiMGrwuF7BHc_LeFBQuYBzU9o,6311
@@ -27,10 +27,10 @@ datamule/sec/xbrl/filter_xbrl.py,sha256=g9OT4zrNS0tiUJeBIwbCs_zMisOBkpFnMR3tV4Tr
27
27
  datamule/sec/xbrl/streamcompanyfacts.py,sha256=WyJIwuy5mNMXWpx_IkhFzDMe9MOfQ-vNkWl_JzBzFmc,3323
28
28
  datamule/sec/xbrl/xbrlmonitor.py,sha256=TKFVfSyyUUfUgFQw4WxEVs4g8Nh-2C0tygNIRmTqW3Y,5848
29
29
  datamule/seclibrary/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- datamule/seclibrary/bq.py,sha256=C6kafFXWtm-MUjf70H1wTtpwv1Rxpcbk-Kfy8fkBPfo,6469
30
+ datamule/seclibrary/bq.py,sha256=C8sb_rpXTvchprrFLcbRar4Qi0XWW25tnv1YsHSS5o4,18025
31
31
  datamule/seclibrary/downloader.py,sha256=Zb1TxsIz887tO3MJVP66siYVtNus89ti-g9oZ6VywrM,11500
32
32
  datamule/seclibrary/query.py,sha256=qGuursTERRbOGfoDcYcpo4oWkW3PCBW6x1Qf1Puiak4,7352
33
- datamule-1.1.8.dist-info/METADATA,sha256=8HRRMz6l928E5tuHXkPi1_Kf-8nfPSjWQnnfReSxdPM,512
34
- datamule-1.1.8.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
35
- datamule-1.1.8.dist-info/top_level.txt,sha256=iOfgmtSMFVyr7JGl_bYSTDry79JbmsG4p8zKq89ktKk,9
36
- datamule-1.1.8.dist-info/RECORD,,
33
+ datamule-1.2.0.dist-info/METADATA,sha256=IDVSWEibrVQWmrNKEXrD1oaucOpmP7Agr4f6bv6o3Kg,512
34
+ datamule-1.2.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
35
+ datamule-1.2.0.dist-info/top_level.txt,sha256=iOfgmtSMFVyr7JGl_bYSTDry79JbmsG4p8zKq89ktKk,9
36
+ datamule-1.2.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.45.1)
2
+ Generator: bdist_wheel (0.42.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5