medicafe 0.240517.0__py3-none-any.whl → 0.240716.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of medicafe might be problematic. Click here for more details.

Files changed (37) hide show
  1. MediBot/MediBot.bat +46 -6
  2. MediBot/MediBot.py +9 -36
  3. MediBot/MediBot_Charges.py +0 -28
  4. MediBot/MediBot_Crosswalk_Library.py +16 -8
  5. MediBot/MediBot_Post.py +0 -0
  6. MediBot/MediBot_Preprocessor.py +26 -63
  7. MediBot/MediBot_Preprocessor_lib.py +182 -43
  8. MediBot/MediBot_UI.py +2 -7
  9. MediBot/MediBot_dataformat_library.py +0 -9
  10. MediBot/MediBot_docx_decoder.py +283 -60
  11. MediLink/MediLink.py +80 -120
  12. MediLink/MediLink_837p_encoder.py +3 -28
  13. MediLink/MediLink_837p_encoder_library.py +19 -53
  14. MediLink/MediLink_API_Generator.py +246 -0
  15. MediLink/MediLink_API_v2.py +2 -0
  16. MediLink/MediLink_API_v3.py +325 -0
  17. MediLink/MediLink_APIs.py +2 -0
  18. MediLink/MediLink_ClaimStatus.py +144 -0
  19. MediLink/MediLink_ConfigLoader.py +13 -7
  20. MediLink/MediLink_DataMgmt.py +224 -68
  21. MediLink/MediLink_Decoder.py +165 -0
  22. MediLink/MediLink_Deductible.py +203 -0
  23. MediLink/MediLink_Down.py +122 -96
  24. MediLink/MediLink_Gmail.py +453 -74
  25. MediLink/MediLink_Mailer.py +0 -7
  26. MediLink/MediLink_Parser.py +193 -0
  27. MediLink/MediLink_Scan.py +0 -0
  28. MediLink/MediLink_Scheduler.py +2 -172
  29. MediLink/MediLink_StatusCheck.py +0 -4
  30. MediLink/MediLink_UI.py +54 -18
  31. MediLink/MediLink_Up.py +6 -15
  32. {medicafe-0.240517.0.dist-info → medicafe-0.240716.2.dist-info}/METADATA +4 -1
  33. medicafe-0.240716.2.dist-info/RECORD +47 -0
  34. {medicafe-0.240517.0.dist-info → medicafe-0.240716.2.dist-info}/WHEEL +1 -1
  35. medicafe-0.240517.0.dist-info/RECORD +0 -39
  36. {medicafe-0.240517.0.dist-info → medicafe-0.240716.2.dist-info}/LICENSE +0 -0
  37. {medicafe-0.240517.0.dist-info → medicafe-0.240716.2.dist-info}/top_level.txt +0 -0
@@ -1,21 +1,25 @@
1
+ # MediLink_DataMgmt.py
1
2
  import csv
2
3
  import os
3
4
  from datetime import datetime, timedelta
5
+ import re
4
6
  import subprocess
5
7
 
6
8
  # Need this for running Medibot and MediLink
7
9
  try:
8
10
  import MediLink_ConfigLoader
11
+ import MediLink_UI
9
12
  except ImportError:
10
13
  from . import MediLink_ConfigLoader
14
+ from . import MediLink_UI
11
15
 
12
- # Helper function to slice and strip values
13
- def slice_data(data, slices):
16
+ # Helper function to slice and strip values with optional key suffix
17
+ def slice_data(data, slices, suffix=''):
14
18
  # Convert slices list to a tuple for slicing operation
15
- return {key: data[slice(*slices[key])].strip() for key in slices}
19
+ return {key + suffix: data[slice(*slices[key])].strip() for key in slices}
16
20
 
17
21
  # Function to parse fixed-width Medisoft output and extract claim data
18
- def parse_fixed_width_data(personal_info, insurance_info, service_info, config=None):
22
+ def parse_fixed_width_data(personal_info, insurance_info, service_info, service_info_2=None, service_info_3=None, config=None):
19
23
 
20
24
  # Make sure we have the right config
21
25
  if not config: # Checks if config is None or an empty dictionary
@@ -35,6 +39,12 @@ def parse_fixed_width_data(personal_info, insurance_info, service_info, config=N
35
39
  parsed_data.update(slice_data(insurance_info, insurance_slices))
36
40
  parsed_data.update(slice_data(service_info, service_slices))
37
41
 
42
+ if service_info_2:
43
+ parsed_data.update(slice_data(service_info_2, service_slices, suffix='_2'))
44
+
45
+ if service_info_3:
46
+ parsed_data.update(slice_data(service_info_3, service_slices, suffix='_3'))
47
+
38
48
  MediLink_ConfigLoader.log("Successfully parsed data from segments", config, level="INFO")
39
49
 
40
50
  return parsed_data
@@ -46,18 +56,32 @@ def read_fixed_width_data(file_path):
46
56
  MediLink_ConfigLoader.log("Starting to read fixed width data...")
47
57
  with open(file_path, 'r') as file:
48
58
  lines_buffer = [] # Buffer to hold lines for current patient data
59
+
60
+ def yield_record(buffer):
61
+ personal_info = buffer[0]
62
+ insurance_info = buffer[1]
63
+ service_info = buffer[2]
64
+ service_info_2 = buffer[3] if len(buffer) > 3 else None
65
+ service_info_3 = buffer[4] if len(buffer) > 4 else None
66
+ MediLink_ConfigLoader.log("Successfully read data from file: {}".format(file_path), level="INFO")
67
+ return personal_info, insurance_info, service_info, service_info_2, service_info_3
68
+
49
69
  for line in file:
50
70
  stripped_line = line.strip()
51
- if stripped_line: # Only process non-empty lines
71
+ if stripped_line:
52
72
  lines_buffer.append(stripped_line)
53
- # Once we have 3 lines of data, yield them as a patient record
54
- if len(lines_buffer) == 3:
55
- personal_info, insurance_info, service_info = lines_buffer
56
- MediLink_ConfigLoader.log("Successfully read data from file: {}".format(file_path), level="INFO")
57
- yield personal_info, insurance_info, service_info
58
- lines_buffer.clear() # Reset buffer for the next patient record
59
- # If the line is blank but we have already started collecting a patient record,
60
- # we continue without resetting the buffer, effectively skipping blank lines.
73
+ if 3 <= len(lines_buffer) <= 5:
74
+ next_line = file.readline().strip()
75
+ if not next_line:
76
+ yield yield_record(lines_buffer)
77
+ lines_buffer.clear()
78
+ else:
79
+ if len(lines_buffer) >= 3:
80
+ yield yield_record(lines_buffer)
81
+ lines_buffer.clear()
82
+
83
+ if lines_buffer: # Yield any remaining buffer if file ends without a blank line
84
+ yield yield_record(lines_buffer)
61
85
 
62
86
  # TODO (Refactor) Consider consolidating with the other read_fixed_with_data
63
87
  def read_general_fixed_width_data(file_path, slices):
@@ -68,21 +92,34 @@ def read_general_fixed_width_data(file_path, slices):
68
92
  insurance_name = {key: line[start:end].strip() for key, (start, end) in slices.items()}
69
93
  yield insurance_name, line_number
70
94
 
71
- def consolidate_csvs(source_directory):
95
+ def consolidate_csvs(source_directory, file_prefix="Consolidated", interactive=False):
72
96
  """
73
- This default overwrites any existing CSV for the same day. We want this for the automated runs but want to switch through
74
- the user interaction option if we're running interactive. This has not been implemented, but the helper function exists.
97
+ Consolidate CSV files in the source directory into a single CSV file.
98
+
99
+ Parameters:
100
+ source_directory (str): The directory containing the CSV files to consolidate.
101
+ file_prefix (str): The prefix for the consolidated file's name.
102
+ interactive (bool): If True, prompt the user for confirmation before overwriting existing files.
103
+
104
+ Returns:
105
+ str: The filepath of the consolidated CSV file, or None if no files were consolidated.
75
106
  """
76
107
  today = datetime.now()
77
- consolidated_filename = today.strftime("ERA_%m%d%y.csv")
108
+ consolidated_filename = "{}_{}.csv".format(file_prefix, today.strftime("%m%d%y"))
78
109
  consolidated_filepath = os.path.join(source_directory, consolidated_filename)
79
110
 
80
111
  consolidated_data = []
81
112
  header_saved = False
113
+ expected_header = None
82
114
 
83
115
  # Check if the file already exists and log the action
84
116
  if os.path.exists(consolidated_filepath):
85
- MediLink_ConfigLoader.log("The file {} already exists. It will be overwritten.".format(consolidated_filename))
117
+ MediLink_ConfigLoader.log("The file {} already exists. It will be overwritten.".format(consolidated_filename), level="INFO")
118
+ if interactive:
119
+ overwrite = input("The file {} already exists. Do you want to overwrite it? (y/n): ".format(consolidated_filename)).strip().lower()
120
+ if overwrite != 'y':
121
+ MediLink_ConfigLoader.log("User opted not to overwrite the file {}.".format(consolidated_filename), level="INFO")
122
+ return None
86
123
 
87
124
  for filename in os.listdir(source_directory):
88
125
  filepath = os.path.join(source_directory, filename)
@@ -94,26 +131,38 @@ def consolidate_csvs(source_directory):
94
131
  if modification_time < today - timedelta(days=1):
95
132
  continue # Skip files not modified in the last day
96
133
 
97
- # Read and append data from each CSV
98
- with open(filepath, 'r', newline='') as csvfile:
99
- reader = csv.reader(csvfile)
100
- header = next(reader) # Assumes all CSV files have the same header
101
- if not header_saved: # Save header from the first file
102
- consolidated_data.append(header)
103
- header_saved = True
104
- consolidated_data.extend(row for row in reader)
134
+ try:
135
+ with open(filepath, 'r') as csvfile:
136
+ reader = csv.reader(csvfile)
137
+ header = next(reader) # Read the header
138
+ if not header_saved:
139
+ expected_header = header
140
+ consolidated_data.append(header)
141
+ header_saved = True
142
+ elif header != expected_header:
143
+ MediLink_ConfigLoader.log("Header mismatch in file {}. Skipping file.".format(filepath), level="WARNING")
144
+ continue
105
145
 
106
- # Delete the source file after its contents have been added to the consolidation list
107
- os.remove(filepath)
146
+ consolidated_data.extend(row for row in reader)
147
+ except StopIteration:
148
+ MediLink_ConfigLoader.log("File {} is empty or contains only header. Skipping file.".format(filepath), level="WARNING")
149
+ continue
150
+ except Exception as e:
151
+ MediLink_ConfigLoader.log("Error processing file {}: {}".format(filepath, e), level="ERROR")
152
+ continue
108
153
 
109
- # Write consolidated data to a new or existing CSV file, overwriting it if it exists
110
- with open(consolidated_filepath, 'w', newline='') as csvfile:
111
- writer = csv.writer(csvfile)
112
- writer.writerows(consolidated_data)
154
+ os.remove(filepath)
155
+ MediLink_ConfigLoader.log("Deleted source file after consolidation: {}".format(filepath), level="INFO")
113
156
 
114
- MediLink_ConfigLoader.log("Consolidated CSVs into {}".format(consolidated_filepath))
115
-
116
- return consolidated_filepath
157
+ if consolidated_data:
158
+ with open(consolidated_filepath, 'w') as csvfile:
159
+ writer = csv.writer(csvfile)
160
+ writer.writerows(consolidated_data)
161
+ MediLink_ConfigLoader.log("Consolidated CSVs into {}".format(consolidated_filepath), level="INFO")
162
+ return consolidated_filepath
163
+ else:
164
+ MediLink_ConfigLoader.log("No valid CSV files were found for consolidation.", level="INFO")
165
+ return None
117
166
 
118
167
  def operate_winscp(operation_type, files, endpoint_config, local_storage_path, config):
119
168
  """
@@ -161,9 +210,20 @@ def operate_winscp(operation_type, files, endpoint_config, local_storage_path, c
161
210
  winscp_log_path = os.path.join(local_storage_path, log_filename)
162
211
 
163
212
  # Session and directory setup
164
- session_name = endpoint_config.get('session_name', '')
165
- remote_directory = endpoint_config['remote_directory_up'] if operation_type == "upload" else endpoint_config['remote_directory_down']
166
-
213
+ try:
214
+ session_name = endpoint_config.get('session_name', '')
215
+ if operation_type == "upload":
216
+ remote_directory = endpoint_config['remote_directory_up']
217
+ else:
218
+ remote_directory = endpoint_config['remote_directory_down']
219
+ except KeyError as e:
220
+ # Log the missing key information
221
+ missing_key = str(e)
222
+ message = "KeyError: Endpoint config is missing key: {}".format(missing_key)
223
+ MediLink_ConfigLoader.log(message)
224
+ # Set default values or handle the situation accordingly
225
+ session_name = ''
226
+ remote_directory = ''
167
227
  # Command building
168
228
  command = [
169
229
  winscp_path,
@@ -195,15 +255,18 @@ def operate_winscp(operation_type, files, endpoint_config, local_storage_path, c
195
255
  # TestMode is enabled, do not execute the command
196
256
  print("Test Mode is enabled! WinSCP Command not executed.")
197
257
  MediLink_ConfigLoader.log("Test Mode is enabled! WinSCP Command not executed.")
198
- MediLink_ConfigLoader.log("TEST MODE: Simulating WinSCP Upload File List.")
258
+ MediLink_ConfigLoader.log("TEST MODE: Simulating WinSCP {} File List.".format(operation_type))
199
259
  uploaded_files = []
200
- for file_path in files:
201
- normalized_path = os.path.normpath(file_path)
202
- if os.path.exists(normalized_path): # Check if the file exists before appending
203
- uploaded_files.append(normalized_path)
204
- else:
205
- MediLink_ConfigLoader.log("TEST MODE: Failed to upload file: {} does not exist.".format(normalized_path))
206
- return uploaded_files
260
+ if files is not None: # Check if files is not None
261
+ for file_path in files:
262
+ normalized_path = os.path.normpath(file_path)
263
+ if os.path.exists(normalized_path): # Check if the file exists before appending
264
+ uploaded_files.append(normalized_path)
265
+ else:
266
+ MediLink_ConfigLoader.log("TEST MODE: Failed to {} file: {} does not exist.".format(operation_type, normalized_path))
267
+ else:
268
+ MediLink_ConfigLoader.log("TEST MODE: No files to upload.")
269
+ return uploaded_files if files is not None else []
207
270
  else:
208
271
  # TestMode is not enabled, execute the command
209
272
  process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
@@ -233,26 +296,119 @@ def operate_winscp(operation_type, files, endpoint_config, local_storage_path, c
233
296
  MediLink_ConfigLoader.log("Failed to {} files. Details: {}".format(operation_type, stderr.decode('utf-8')))
234
297
  return [] # Return empty list to indicate failure. BUG check to make sure this doesn't break something else.
235
298
 
236
- # UNUSED CSV Functions
237
- """
238
- def remove_blank_rows_from_csv(csv_file_path):
239
- with open(csv_file_path, 'r') as csv_file:
240
- # Read the CSV file and filter out any empty rows
241
- rows = [row for row in csv.reader(csv_file) if any(field.strip() for field in row)]
299
+ def detect_new_files(directory_path, file_extension='.DAT'):
300
+ """
301
+ Scans the specified directory for new files with a given extension and adds a timestamp if needed.
302
+
303
+ :param directory_path: Path to the directory containing files to be detected.
304
+ :param file_extension: Extension of the files to detect.
305
+ :return: A tuple containing a list of paths to new files detected in the directory and a flag indicating if a new file was just renamed.
306
+ """
307
+ MediLink_ConfigLoader.log("Scanning directory: {}".format(directory_path), level="INFO")
308
+ detected_file_paths = []
309
+ file_flagged = False
242
310
 
243
- # Write the filtered rows back to the CSV file
244
- with open(csv_file_path, 'w', newline='') as csv_file:
245
- writer = csv.writer(csv_file)
246
- writer.writerows(rows)
247
-
248
- def list_chart_numbers_in_existing_file(filepath):
249
- # Lists the Chart Numbers contained in an existing CSV file.
250
- chart_numbers = []
251
- with open(filepath, 'r', newline='') as csvfile:
252
- reader = csv.reader(csvfile)
253
- next(reader) # Skip header
254
- for row in reader:
255
- if len(row) > 2: # Assuming Chart Number is in the 3rd column
256
- chart_numbers.append(row[2])
257
- return chart_numbers
258
- """
311
+ try:
312
+ filenames = os.listdir(directory_path)
313
+ MediLink_ConfigLoader.log("Files in directory: {}".format(filenames), level="INFO")
314
+
315
+ for filename in filenames:
316
+ MediLink_ConfigLoader.log("Checking file: {}".format(filename), level="INFO")
317
+ if filename.endswith(file_extension):
318
+ MediLink_ConfigLoader.log("File matches extension: {}".format(file_extension), level="INFO")
319
+ name, ext = os.path.splitext(filename)
320
+ MediLink_ConfigLoader.log("File name: {}, File extension: {}".format(name, ext), level="INFO")
321
+
322
+ if not is_timestamped(name):
323
+ MediLink_ConfigLoader.log("File is not timestamped: {}".format(filename), level="INFO")
324
+ new_name = "{}_{}{}".format(name, datetime.now().strftime('%Y%m%d_%H%M%S'), ext)
325
+ os.rename(os.path.join(directory_path, filename), os.path.join(directory_path, new_name))
326
+ MediLink_ConfigLoader.log("Renamed file from {} to {}".format(filename, new_name), level="INFO")
327
+ file_flagged = True
328
+ filename = new_name
329
+ else:
330
+ MediLink_ConfigLoader.log("File is already timestamped: {}".format(filename), level="INFO")
331
+
332
+ file_path = os.path.join(directory_path, filename)
333
+ detected_file_paths.append(file_path)
334
+ MediLink_ConfigLoader.log("Detected file path: {}".format(file_path), level="INFO")
335
+
336
+ except Exception as e:
337
+ MediLink_ConfigLoader.log("Error occurred: {}".format(str(e)), level="INFO")
338
+
339
+ MediLink_ConfigLoader.log("Detected files: {}".format(detected_file_paths), level="INFO")
340
+ MediLink_ConfigLoader.log("File flagged status: {}".format(file_flagged), level="INFO")
341
+
342
+ return detected_file_paths, file_flagged
343
+
344
+ def is_timestamped(name):
345
+ """
346
+ Checks if the given filename has a timestamp in the expected format.
347
+
348
+ :param name: The name of the file without extension.
349
+ :return: True if the filename includes a timestamp, False otherwise.
350
+ """
351
+ # Regular expression to match timestamps in the format YYYYMMDD_HHMMSS
352
+ timestamp_pattern = re.compile(r'.*_\d{8}_\d{6}$')
353
+ return bool(timestamp_pattern.match(name))
354
+
355
+ def organize_patient_data_by_endpoint(detailed_patient_data):
356
+ """
357
+ Organizes detailed patient data by their confirmed endpoints.
358
+ This simplifies processing and conversion per endpoint basis, ensuring that claims are generated and submitted
359
+ according to the endpoint-specific requirements.
360
+
361
+ :param detailed_patient_data: A list of dictionaries, each containing detailed patient data including confirmed endpoint.
362
+ :return: A dictionary with endpoints as keys and lists of detailed patient data as values for processing.
363
+ """
364
+ organized = {}
365
+ for data in detailed_patient_data:
366
+ # Retrieve confirmed endpoint from each patient's data
367
+ endpoint = data['confirmed_endpoint'] if 'confirmed_endpoint' in data else data['suggested_endpoint']
368
+ # Initialize a list for the endpoint if it doesn't exist
369
+ if endpoint not in organized:
370
+ organized[endpoint] = []
371
+ organized[endpoint].append(data)
372
+ return organized
373
+
374
+ def confirm_all_suggested_endpoints(detailed_patient_data):
375
+ """
376
+ Confirms all suggested endpoints for each patient's detailed data.
377
+ """
378
+ for data in detailed_patient_data:
379
+ if 'confirmed_endpoint' not in data:
380
+ data['confirmed_endpoint'] = data['suggested_endpoint']
381
+ return detailed_patient_data
382
+
383
+ def bulk_edit_insurance_types(detailed_patient_data, insurance_options):
384
+ # Allow user to edit insurance types in a table-like format with validation
385
+ print("Edit Insurance Type (Enter the 2-character code). Enter 'LIST' to display available insurance types.")
386
+
387
+ for data in detailed_patient_data:
388
+ current_insurance_type = data['insurance_type']
389
+ current_insurance_description = insurance_options.get(current_insurance_type, "Unknown")
390
+ print("({}) {:<25} | Current Ins. Type: {} - {}".format(
391
+ data['patient_id'], data['patient_name'], current_insurance_type, current_insurance_description))
392
+
393
+ while True:
394
+ new_insurance_type = input("Enter new insurance type (or press Enter to keep current): ").upper()
395
+ if new_insurance_type == 'LIST':
396
+ MediLink_UI.display_insurance_options(insurance_options)
397
+ elif not new_insurance_type or new_insurance_type in insurance_options:
398
+ if new_insurance_type:
399
+ data['insurance_type'] = new_insurance_type
400
+ break
401
+ else:
402
+ print("Invalid insurance type. Please enter a valid 2-character code or type 'LIST' to see options.")
403
+
404
+ def review_and_confirm_changes(detailed_patient_data, insurance_options):
405
+ # Review and confirm changes
406
+ print("\nReview changes:")
407
+ print("{:<20} {:<10} {:<30}".format("Patient Name", "Ins. Type", "Description"))
408
+ print("="*65)
409
+ for data in detailed_patient_data:
410
+ insurance_type = data['insurance_type']
411
+ insurance_description = insurance_options.get(insurance_type, "Unknown")
412
+ print("{:<20} {:<10} {:<30}".format(data['patient_name'], insurance_type, insurance_description))
413
+ confirm = input("\nConfirm changes? (y/n): ").strip().lower()
414
+ return confirm in ['y', 'yes', '']
@@ -0,0 +1,165 @@
1
+ # MediLink_Decoder.py
2
+ import os
3
+ import sys
4
+ import csv
5
+ from MediLink_ConfigLoader import load_configuration, log
6
+ from MediLink_Parser import parse_era_content, parse_277_content, parse_277IBR_content, parse_277EBR_content, parse_dpt_content, parse_ebt_content, parse_ibt_content
7
+
8
+ def process_file(file_path, output_directory, return_records=False):
9
+ if not os.path.exists(output_directory):
10
+ os.makedirs(output_directory)
11
+
12
+ file_type = determine_file_type(file_path)
13
+ content = read_file(file_path)
14
+
15
+ if file_type == 'ERA':
16
+ records = parse_era_content(content)
17
+ elif file_type in ['277', '277IBR', '277EBR']:
18
+ records = parse_277_content(content) if file_type == '277' else parse_277IBR_content(content) if file_type == '277IBR' else parse_277EBR_content(content)
19
+ elif file_type == 'DPT':
20
+ records = parse_dpt_content(content)
21
+ elif file_type == 'EBT':
22
+ records = parse_ebt_content(content)
23
+ elif file_type == 'IBT':
24
+ records = parse_ibt_content(content)
25
+ else:
26
+ log("Unsupported file type: {}".format(file_type))
27
+ return []
28
+
29
+ formatted_records = format_records(records, file_type)
30
+ if not return_records:
31
+ display_table(formatted_records)
32
+ output_file_path = os.path.join(output_directory, os.path.basename(file_path) + '_decoded.csv')
33
+ write_records_to_csv(formatted_records, output_file_path)
34
+ log("Decoded data written to {}".format(output_file_path))
35
+ return formatted_records
36
+
37
+ def determine_file_type(file_path):
38
+ if file_path.endswith('.era'):
39
+ return 'ERA'
40
+ elif file_path.endswith('.277'):
41
+ return '277'
42
+ elif file_path.endswith('.277ibr'):
43
+ return '277IBR'
44
+ elif file_path.endswith('.277ebr'):
45
+ return '277EBR'
46
+ elif file_path.endswith('.dpt'):
47
+ return 'DPT'
48
+ elif file_path.endswith('.ebt'):
49
+ return 'EBT'
50
+ elif file_path.endswith('.ibt'):
51
+ return 'IBT'
52
+ else:
53
+ log("Unsupported file type for file: {}".format(file_path))
54
+ return None
55
+
56
+ def read_file(file_path):
57
+ with open(file_path, 'r') as file:
58
+ content = file.read()
59
+ return content
60
+
61
+ def write_records_to_csv(records, output_file_path):
62
+ fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
63
+ with open(output_file_path, 'w', newline='') as csvfile:
64
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
65
+ writer.writeheader()
66
+ for record in records:
67
+ writer.writerow(record)
68
+
69
+ def format_records(records, file_type):
70
+ formatted_records = []
71
+ for record in records:
72
+ if file_type == 'IBT':
73
+ formatted_record = {
74
+ 'Claim #': record.get('Patient Control Number', ''),
75
+ 'Status': record.get('Status', ''),
76
+ 'Patient': record.get('Patient Name', ''),
77
+ 'Proc.': format_date(record.get('To Date', '')),
78
+ 'Serv.': format_date(record.get('From Date', '')),
79
+ 'Allowed': '',
80
+ 'Paid': '',
81
+ 'Pt Resp': '',
82
+ 'Charged': record.get('Charge', '')
83
+ }
84
+ else:
85
+ formatted_record = {
86
+ 'Claim #': record.get('Chart Number', record.get('Claim Status Tracking #', record.get('Claim #', ''))),
87
+ 'Status': record.get('claimStatus', record.get('Status', '')),
88
+ 'Patient': record.get('memberInfo', {}).get('ptntFn', '') + ' ' + record.get('memberInfo', {}).get('ptntLn', '') if 'memberInfo' in record else record.get('Patient', ''),
89
+ 'Proc.': format_date(record.get('processed_date', record.get('Received Date', ''))),
90
+ 'Serv.': format_date(record.get('firstSrvcDt', record.get('Date of Service', ''))),
91
+ 'Allowed': record.get('totalAllowdAmt', record.get('Allowed Amount', '')),
92
+ 'Paid': record.get('totalPaidAmt', record.get('Amount Paid', '')),
93
+ 'Pt Resp': record.get('totalPtntRespAmt', record.get('Patient Responsibility', '')),
94
+ 'Charged': record.get('totalChargedAmt', record.get('Charge', ''))
95
+ }
96
+ formatted_records.append(formatted_record)
97
+ return formatted_records
98
+
99
+ def format_date(date_str):
100
+ if date_str and len(date_str) >= 10:
101
+ return date_str[5:7] + '-' + date_str[8:10] # Assuming date format is YYYY-MM-DD, this returns MM-DD
102
+ return ''
103
+
104
+ def display_table(records):
105
+ # Define the new fieldnames and their respective widths
106
+ new_fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
107
+ col_widths = {field: len(field) for field in new_fieldnames}
108
+
109
+ # Update column widths based on records
110
+ for record in records:
111
+ for field in new_fieldnames:
112
+ col_widths[field] = max(col_widths[field], len(str(record.get(field, ''))))
113
+
114
+ # Create table header
115
+ header = " | ".join("{:<{}}".format(field, col_widths[field]) for field in new_fieldnames)
116
+ print(header)
117
+ print("-" * len(header))
118
+
119
+ # Create table rows
120
+ for record in records:
121
+ row = " | ".join("{:<{}}".format(str(record.get(field, '')), col_widths[field]) for field in new_fieldnames)
122
+ print(row)
123
+
124
+ def display_consolidated_records(records):
125
+ if not records:
126
+ return
127
+
128
+ new_fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
129
+ col_widths = {field: len(field) for field in new_fieldnames}
130
+
131
+ for record in records:
132
+ for field in new_fieldnames:
133
+ col_widths[field] = max(col_widths[field], len(str(record.get(field, ''))))
134
+
135
+ header = " | ".join("{:<{}}".format(field, col_widths[field]) for field in new_fieldnames)
136
+ print(header)
137
+ print("-" * len(header))
138
+
139
+ for record in records:
140
+ row = " | ".join("{:<{}}".format(str(record.get(field, '')), col_widths[field]) for field in new_fieldnames)
141
+ print(row)
142
+
143
+ if __name__ == "__main__":
144
+ config, _ = load_configuration()
145
+
146
+ files = sys.argv[1:]
147
+ if not files:
148
+ log("No files provided as arguments.", 'error')
149
+ sys.exit(1)
150
+
151
+ output_directory = config['MediLink_Config'].get('local_storage_path')
152
+ all_records = []
153
+ for file_path in files:
154
+ try:
155
+ records = process_file(file_path, output_directory, return_records=True)
156
+ all_records.extend(records)
157
+ except Exception as e:
158
+ log("Failed to process {}: {}".format(file_path, e), 'error')
159
+
160
+ display_consolidated_records(all_records)
161
+
162
+ if input("Do you want to export the consolidated records to a CSV file? (y/n): ").strip().lower() == 'y':
163
+ consolidated_csv_path = os.path.join(output_directory, "Consolidated_Records.csv")
164
+ write_records_to_csv(all_records, consolidated_csv_path)
165
+ log("Consolidated records written to {}".format(consolidated_csv_path))