medicafe 0.240613.0__py3-none-any.whl → 0.240809.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of medicafe might be problematic. Click here for more details.

@@ -3,9 +3,9 @@ import os
3
3
  import sys
4
4
  import csv
5
5
  from MediLink_ConfigLoader import load_configuration, log
6
- from MediLink_Parser import parse_era_content, parse_277_content
6
+ from MediLink_Parser import parse_era_content, parse_277_content, parse_277IBR_content, parse_277EBR_content, parse_dpt_content, parse_ebt_content, parse_ibt_content
7
7
 
8
- def process_file(file_path, output_directory):
8
+ def process_file(file_path, output_directory, return_records=False):
9
9
  if not os.path.exists(output_directory):
10
10
  os.makedirs(output_directory)
11
11
 
@@ -14,50 +14,152 @@ def process_file(file_path, output_directory):
14
14
 
15
15
  if file_type == 'ERA':
16
16
  records = parse_era_content(content)
17
- fieldnames = ['Date of Service', 'Check EFT', 'Chart Number', 'Payer Address', 'Amount Paid',
18
- 'Adjustment Amount', 'Allowed Amount', 'Write Off', 'Patient Responsibility', 'Charge']
19
- elif file_type == '277':
20
- records = parse_277_content(content)
21
- fieldnames = ['Clearing House', 'Received Date', 'Claim Status Tracking #', 'Billed Amt', 'Date of Service',
22
- 'Last', 'First', 'Acknowledged Amt', 'Status']
17
+ elif file_type in ['277', '277IBR', '277EBR']:
18
+ records = parse_277_content(content) if file_type == '277' else parse_277IBR_content(content) if file_type == '277IBR' else parse_277EBR_content(content)
19
+ elif file_type == 'DPT':
20
+ records = parse_dpt_content(content)
21
+ elif file_type == 'EBT':
22
+ records = parse_ebt_content(content)
23
+ elif file_type == 'IBT':
24
+ records = parse_ibt_content(content)
23
25
  else:
24
- raise ValueError("Unsupported file type: {}".format(file_type))
26
+ log("Unsupported file type: {}".format(file_type))
27
+ return []
25
28
 
26
- output_file_path = os.path.join(output_directory, os.path.basename(file_path) + '_decoded.csv')
27
- write_records_to_csv(records, output_file_path, fieldnames)
28
- print("Decoded data written to {}".format(output_file_path))
29
+ formatted_records = format_records(records, file_type)
30
+ if not return_records:
31
+ display_table(formatted_records)
32
+ output_file_path = os.path.join(output_directory, os.path.basename(file_path) + '_decoded.csv')
33
+ write_records_to_csv(formatted_records, output_file_path)
34
+ log("Decoded data written to {}".format(output_file_path))
35
+ return formatted_records
29
36
 
30
37
  def determine_file_type(file_path):
31
38
  if file_path.endswith('.era'):
32
39
  return 'ERA'
33
40
  elif file_path.endswith('.277'):
34
41
  return '277'
42
+ elif file_path.endswith('.277ibr'):
43
+ return '277IBR'
44
+ elif file_path.endswith('.277ebr'):
45
+ return '277EBR'
46
+ elif file_path.endswith('.dpt'):
47
+ return 'DPT'
48
+ elif file_path.endswith('.ebt'):
49
+ return 'EBT'
50
+ elif file_path.endswith('.ibt'):
51
+ return 'IBT'
35
52
  else:
36
- raise ValueError("Unsupported file type for file: {}".format(file_path))
37
-
53
+ log("Unsupported file type for file: {}".format(file_path))
54
+ return None
55
+
38
56
  def read_file(file_path):
39
57
  with open(file_path, 'r') as file:
40
- content = file.read().replace('\n', '')
58
+ content = file.read()
41
59
  return content
42
60
 
43
- def write_records_to_csv(records, output_file_path, fieldnames):
61
+ def write_records_to_csv(records, output_file_path):
62
+ fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
44
63
  with open(output_file_path, 'w', newline='') as csvfile:
45
64
  writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
46
65
  writer.writeheader()
47
66
  for record in records:
48
67
  writer.writerow(record)
49
68
 
69
+ def format_records(records, file_type):
70
+ formatted_records = []
71
+ for record in records:
72
+ if file_type == 'IBT':
73
+ formatted_record = {
74
+ 'Claim #': record.get('Patient Control Number', ''),
75
+ 'Status': record.get('Status', ''),
76
+ 'Patient': record.get('Patient Name', ''),
77
+ 'Proc.': format_date(record.get('To Date', '')),
78
+ 'Serv.': format_date(record.get('From Date', '')),
79
+ 'Allowed': '',
80
+ 'Paid': '',
81
+ 'Pt Resp': '',
82
+ 'Charged': record.get('Charge', '')
83
+ }
84
+ else:
85
+ formatted_record = {
86
+ 'Claim #': record.get('Chart Number', record.get('Claim Status Tracking #', record.get('Claim #', ''))),
87
+ 'Status': record.get('claimStatus', record.get('Status', '')),
88
+ 'Patient': record.get('memberInfo', {}).get('ptntFn', '') + ' ' + record.get('memberInfo', {}).get('ptntLn', '') if 'memberInfo' in record else record.get('Patient', ''),
89
+ 'Proc.': format_date(record.get('processed_date', record.get('Received Date', ''))),
90
+ 'Serv.': format_date(record.get('firstSrvcDt', record.get('Date of Service', ''))),
91
+ 'Allowed': record.get('totalAllowdAmt', record.get('Allowed Amount', '')),
92
+ 'Paid': record.get('totalPaidAmt', record.get('Amount Paid', '')),
93
+ 'Pt Resp': record.get('totalPtntRespAmt', record.get('Patient Responsibility', '')),
94
+ 'Charged': record.get('totalChargedAmt', record.get('Charge', ''))
95
+ }
96
+ formatted_records.append(formatted_record)
97
+ return formatted_records
98
+
99
+ def format_date(date_str):
100
+ if date_str and len(date_str) >= 10:
101
+ return date_str[5:7] + '-' + date_str[8:10] # Assuming date format is YYYY-MM-DD, this returns MM-DD
102
+ return ''
103
+
104
+ def display_table(records):
105
+ # Define the new fieldnames and their respective widths
106
+ new_fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
107
+ col_widths = {field: len(field) for field in new_fieldnames}
108
+
109
+ # Update column widths based on records
110
+ for record in records:
111
+ for field in new_fieldnames:
112
+ col_widths[field] = max(col_widths[field], len(str(record.get(field, ''))))
113
+
114
+ # Create table header
115
+ header = " | ".join("{:<{}}".format(field, col_widths[field]) for field in new_fieldnames)
116
+ print(header)
117
+ print("-" * len(header))
118
+
119
+ # Create table rows
120
+ for record in records:
121
+ row = " | ".join("{:<{}}".format(str(record.get(field, '')), col_widths[field]) for field in new_fieldnames)
122
+ print(row)
123
+
124
+ def display_consolidated_records(records):
125
+ if not records:
126
+ return
127
+
128
+ new_fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
129
+ col_widths = {field: len(field) for field in new_fieldnames}
130
+
131
+ for record in records:
132
+ for field in new_fieldnames:
133
+ col_widths[field] = max(col_widths[field], len(str(record.get(field, ''))))
134
+
135
+ header = " | ".join("{:<{}}".format(field, col_widths[field]) for field in new_fieldnames)
136
+ print(header)
137
+ print("-" * len(header))
138
+
139
+ for record in records:
140
+ row = " | ".join("{:<{}}".format(str(record.get(field, '')), col_widths[field]) for field in new_fieldnames)
141
+ print(row)
142
+
50
143
  if __name__ == "__main__":
51
- config = load_configuration()
144
+ config, _ = load_configuration()
52
145
 
53
146
  files = sys.argv[1:]
54
147
  if not files:
55
148
  log("No files provided as arguments.", 'error')
56
149
  sys.exit(1)
57
150
 
58
- output_directory = config['output_directory']
151
+ output_directory = config['MediLink_Config'].get('local_storage_path')
152
+ all_records = []
59
153
  for file_path in files:
60
154
  try:
61
- process_file(file_path, output_directory)
155
+ records = process_file(file_path, output_directory, return_records=True)
156
+ all_records.extend(records)
62
157
  except Exception as e:
63
- log("Failed to process {}: {}".format(file_path, e), 'error')
158
+ log("Failed to process {}: {}".format(file_path, e), 'error')
159
+
160
+ display_consolidated_records(all_records)
161
+
162
+ if input("Do you want to export the consolidated records to a CSV file? (y/n): ").strip().lower() == 'y':
163
+ consolidated_csv_path = os.path.join(output_directory, "Consolidated_Records.csv")
164
+ write_records_to_csv(all_records, consolidated_csv_path)
165
+ log("Consolidated records written to {}".format(consolidated_csv_path))
@@ -0,0 +1,210 @@
1
+ """
2
+ # Create a summary JSON
3
+ summary = {
4
+ "Payer ID": ins_payerID,
5
+ "Provider": provider_last_name,
6
+ "Member ID": ins_memberID,
7
+ "Date of Birth": dob,
8
+ "Patient Name": patient_name,
9
+ "Patient Info": {
10
+ "DOB": dob,
11
+ "Address": "{} {}".format(patient_info.get("addressLine1", ""), patient_info.get("addressLine2", "")).strip(),
12
+ "City": patient_info.get("city", ""),
13
+ "State": patient_info.get("state", ""),
14
+ "ZIP": patient_info.get("zip", ""),
15
+ "Relationship": patient_info.get("relationship", "")
16
+ },
17
+ "Insurance Info": {
18
+ "Payer Name": insurance_info.get("payerName", ""),
19
+ "Payer ID": ins_payerID,
20
+ "Member ID": ins_memberID,
21
+ "Group Number": insurance_info.get("groupNumber", ""),
22
+ "Insurance Type": ins_insuranceType,
23
+ "Type Code": ins_insuranceTypeCode,
24
+ "Address": "{} {}".format(insurance_info.get("addressLine1", ""), insurance_info.get("addressLine2", "")).strip(),
25
+ "City": insurance_info.get("city", ""),
26
+ "State": insurance_info.get("state", ""),
27
+ "ZIP": insurance_info.get("zip", "")
28
+ },
29
+ "Policy Info": {
30
+ "Eligibility Dates": eligibilityDates,
31
+ "Policy Member ID": policy_info.get("memberId", ""),
32
+ "Policy Status": policy_status
33
+ },
34
+ "Deductible Info": {
35
+ "Remaining Amount": remaining_amount
36
+ }
37
+ }
38
+
39
+ # Print debug JSON
40
+ # Uncomment below if you need to debug later
41
+ # print("\nDebug JSON Summary:")
42
+ # print(json.dumps(summary, indent=2))
43
+ """
44
+ import MediLink_API_v3
45
+ import os
46
+ import sys
47
+ from datetime import datetime
48
+ import requests
49
+ import json
50
+
51
+ try:
52
+ from MediLink import MediLink_ConfigLoader
53
+ except ImportError:
54
+ import MediLink_ConfigLoader
55
+
56
+ project_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
57
+ sys.path.append(project_dir)
58
+
59
+ try:
60
+ from MediBot import MediBot_Preprocessor_lib
61
+ except ImportError:
62
+ import MediBot_Preprocessor_lib
63
+
64
+ # Load configuration
65
+ config, _ = MediLink_ConfigLoader.load_configuration()
66
+
67
+ # Initialize the API client
68
+ client = MediLink_API_v3.APIClient()
69
+
70
+ # Get provider_last_name and npi from configuration
71
+ provider_last_name = config['MediLink_Config'].get('default_billing_provider_last_name', 'Unknown')
72
+ npi = config['MediLink_Config'].get('default_billing_provider_npi', 'Unknown')
73
+
74
+ # Check if the provider_last_name is still 'Unknown'
75
+ if provider_last_name == 'Unknown':
76
+ MediLink_ConfigLoader.log("Warning: provider_last_name was not found in the configuration.", level="WARNING")
77
+
78
+ # Define the list of payer_id's to iterate over
79
+ payer_ids = ['87726', '03432', '96385', '95467', '86050', '86047', '95378', '06111', '37602']
80
+
81
+ # Get the latest CSV
82
+ CSV_FILE_PATH = config.get('CSV_FILE_PATH', "")
83
+ csv_data = MediBot_Preprocessor_lib.load_csv_data(CSV_FILE_PATH)
84
+
85
+ # Only keep rows that contain a valid number from the payer_ids list
86
+ valid_rows = [row for row in csv_data if str(row['Ins1 Payer ID']) in payer_ids]
87
+
88
+ # Function to check if the date format is correct
89
+ def validate_and_format_date(date_str):
90
+ for fmt in ('%Y-%m-%d', '%m/%d/%Y', '%d-%b-%Y', '%d-%m-%Y'):
91
+ try:
92
+ formatted_date = datetime.strptime(date_str, fmt).strftime('%Y-%m-%d')
93
+ return formatted_date
94
+ except ValueError:
95
+ continue
96
+ return None
97
+
98
+ # List of patients with DOB and MemberID from CSV data
99
+ patients = [
100
+ (validate_and_format_date(row['Patient DOB']), row['Primary Policy Number'])
101
+ for row in valid_rows if validate_and_format_date(row['Patient DOB']) is not None
102
+ ]
103
+
104
+ # Function to get eligibility information
105
+ def get_eligibility_info(client, payer_id, provider_last_name, date_of_birth, member_id, npi):
106
+ try:
107
+ # Log the parameters being sent to the function
108
+ MediLink_ConfigLoader.log("Calling get_eligibility_v3 with parameters:", level="DEBUG")
109
+ MediLink_ConfigLoader.log("payer_id: {}".format(payer_id), level="DEBUG")
110
+ MediLink_ConfigLoader.log("provider_last_name: {}".format(provider_last_name), level="DEBUG")
111
+ MediLink_ConfigLoader.log("date_of_birth: {}".format(date_of_birth), level="DEBUG")
112
+ MediLink_ConfigLoader.log("member_id: {}".format(member_id), level="DEBUG")
113
+ MediLink_ConfigLoader.log("npi: {}".format(npi), level="DEBUG")
114
+
115
+ # Call the get_eligibility_v3 function
116
+ eligibility = MediLink_API_v3.get_eligibility_v3(
117
+ client, payer_id, provider_last_name, 'MemberIDDateOfBirth', date_of_birth, member_id, npi
118
+ )
119
+
120
+ # Log the response
121
+ MediLink_ConfigLoader.log("Eligibility response: {}".format(json.dumps(eligibility, indent=4)), level="DEBUG")
122
+
123
+ return eligibility
124
+ except requests.exceptions.HTTPError as e:
125
+ # Log the HTTP error response
126
+ MediLink_ConfigLoader.log("HTTPError: {}".format(e), level="ERROR")
127
+ MediLink_ConfigLoader.log("Response content: {}".format(e.response.content), level="ERROR")
128
+ except Exception as e:
129
+ # Log any other exceptions
130
+ MediLink_ConfigLoader.log("Error: {}".format(e), level="ERROR")
131
+ return None
132
+
133
+ # Function to extract required fields and display in a tabular format
134
+ def display_eligibility_info(data, dob, member_id, output_file):
135
+ if data is None:
136
+ return
137
+
138
+ for policy in data["memberPolicies"]:
139
+ # Skip non-medical policies
140
+ if policy["policyInfo"]["coverageType"] != "Medical":
141
+ continue
142
+
143
+ patient_info = policy["patientInfo"][0]
144
+ lastName = patient_info.get("lastName", "")
145
+ firstName = patient_info.get("firstName", "")
146
+ middleName = patient_info.get("middleName", "")
147
+
148
+ # Check if the remaining amount is per individual first, then fallback to family
149
+ if 'individual' in policy["deductibleInfo"]:
150
+ remaining_amount = policy["deductibleInfo"]["individual"]["inNetwork"].get("remainingAmount", "")
151
+ else:
152
+ remaining_amount = policy["deductibleInfo"]["family"]["inNetwork"].get("remainingAmount", "")
153
+
154
+ insurance_info = policy["insuranceInfo"]
155
+ ins_insuranceType = insurance_info.get("insuranceType", "")
156
+ ins_insuranceTypeCode = insurance_info.get("insuranceTypeCode", "")
157
+ ins_memberID = insurance_info.get("memberId", "")
158
+ ins_payerID = insurance_info.get("payerId", "")
159
+
160
+ policy_info = policy["policyInfo"]
161
+ policy_status = policy_info.get("policyStatus", "")
162
+
163
+ patient_name = "{} {} {}".format(firstName, middleName, lastName).strip()[:20]
164
+
165
+ # Display patient information in a table row format
166
+ table_row = "{:<20} | {:<10} | {:<40} | {:<5} | {:<15} | {:<15}".format(
167
+ patient_name, dob, ins_insuranceType, ins_payerID, policy_status, remaining_amount)
168
+ output_file.write(table_row + "\n")
169
+ print(table_row) # Print to console for progressive display
170
+
171
+ # Print the table header once before entering the loop
172
+ output_file_path = os.path.join(os.getenv('TEMP'), 'eligibility_report.txt')
173
+ with open(output_file_path, 'w') as output_file:
174
+ table_header = "{:<20} | {:<10} | {:<40} | {:<5} | {:<15} | {:<15}".format(
175
+ "Patient Name", "DOB", "Insurance Type", "PayID", "Policy Status", "Remaining Amt")
176
+ output_file.write(table_header + "\n")
177
+ output_file.write("-" * len(table_header) + "\n")
178
+ print(table_header)
179
+ print("-" * len(table_header))
180
+
181
+ # Set to keep track of processed patients
182
+ processed_patients = set()
183
+
184
+ # Loop through each payer_id and patient to call the API, then display the eligibility information
185
+ errors = []
186
+ for payer_id in payer_ids:
187
+ for dob, member_id in patients:
188
+ # Skip if this patient has already been processed
189
+ if (dob, member_id) in processed_patients:
190
+ continue
191
+ try:
192
+ eligibility_data = get_eligibility_info(client, payer_id, provider_last_name, dob, member_id, npi)
193
+ if eligibility_data is not None:
194
+ display_eligibility_info(eligibility_data, dob, member_id, output_file) # Display as we get the result
195
+ processed_patients.add((dob, member_id)) # Mark this patient as processed
196
+ except Exception as e:
197
+ errors.append((dob, member_id, str(e)))
198
+
199
+ # Display errors if any
200
+ if errors:
201
+ error_msg = "\nErrors encountered during API calls:\n"
202
+ output_file.write(error_msg)
203
+ print(error_msg)
204
+ for error in errors:
205
+ error_details = "DOB: {}, Member ID: {}, Error: {}\n".format(error[0], error[1], error[2])
206
+ output_file.write(error_details)
207
+ print(error_details)
208
+
209
+ # Open the generated file in Notepad
210
+ os.system('notepad.exe {}'.format(output_file_path))
MediLink/MediLink_Down.py CHANGED
@@ -3,22 +3,11 @@ import os
3
3
  import argparse
4
4
  import shutil
5
5
  import glob
6
+ import csv
6
7
  from MediLink_Decoder import process_file
7
- from MediLink_DataMgmt import operate_winscp, consolidate_csvs
8
- import MediLink_ConfigLoader
9
- # Import decoders for other file types
10
-
11
- """
12
- Main triaging function for handling report downloads and processing from various endpoints. This function
13
- handles downloading reports, moving files, and decoding them into a readable format. The goal is to
14
- provide detailed receipt and troubleshooting information for the claims.
15
-
16
- Key Enhancements:
17
- - Handle multiple file types (ERA, 277, etc.) and integrate respective decoders.
18
- - Support multi-endpoint processing.
19
- - Implement progress tracking for long-running operations.
20
- - Provide both consolidated CSV output and in-memory parsed data for real-time display.
21
- """
8
+ from MediLink_ConfigLoader import load_configuration, log
9
+ from MediLink_DataMgmt import operate_winscp
10
+
22
11
  def move_downloaded_files(local_storage_path, config):
23
12
  local_response_directory = os.path.join(local_storage_path, "responses")
24
13
 
@@ -26,7 +15,7 @@ def move_downloaded_files(local_storage_path, config):
26
15
  os.makedirs(local_response_directory)
27
16
 
28
17
  download_dir = config['MediLink_Config']['local_storage_path']
29
- file_extensions = ['.era', '.277'] # Extendable list of file extensions
18
+ file_extensions = ['.era', '.277', '.277ibr', '.277ebr', '.dpt', '.ebt', '.ibt', '.txt'] # Extendable list of file extensions
30
19
 
31
20
  for ext in file_extensions:
32
21
  downloaded_files = [f for f in os.listdir(download_dir) if f.endswith(ext)]
@@ -34,12 +23,13 @@ def move_downloaded_files(local_storage_path, config):
34
23
  source_path = os.path.join(download_dir, file)
35
24
  destination_path = os.path.join(local_response_directory, file)
36
25
  shutil.move(source_path, destination_path)
37
- MediLink_ConfigLoader.log("Moved '{}' to '{}'".format(file, local_response_directory))
26
+ log("Moved '{}' to '{}'".format(file, local_response_directory))
38
27
 
39
28
  def find_files(file_path_pattern):
40
29
  normalized_path = os.path.normpath(file_path_pattern)
41
-
42
- if "*" in normalized_path:
30
+ if os.path.isdir(normalized_path):
31
+ return [os.path.join(normalized_path, f) for f in os.listdir(normalized_path) if os.path.isfile(os.path.join(normalized_path, f))]
32
+ elif "*" in normalized_path:
43
33
  matching_files = glob.glob(normalized_path)
44
34
  return [os.path.normpath(file) for file in matching_files]
45
35
  else:
@@ -47,76 +37,117 @@ def find_files(file_path_pattern):
47
37
 
48
38
  def translate_files(files, output_directory):
49
39
  translated_files = []
40
+ consolidated_records = []
41
+ file_counts = {'.era': 0, '.277': 0, '.277ibr': 0, '.277ebr': 0, '.dpt': 0, '.ebt': 0, '.ibt': 0, '.txt': 0}
42
+
50
43
  for file in files:
44
+ ext = os.path.splitext(file)[1]
45
+ if ext in file_counts:
46
+ file_counts[ext] += 1
47
+
51
48
  try:
52
- process_file(file, output_directory)
49
+ records = process_file(file, output_directory, return_records=True)
50
+ consolidated_records.extend(records)
53
51
  csv_file_path = os.path.join(output_directory, os.path.basename(file) + '_decoded.csv')
54
- MediLink_ConfigLoader.log("Translated file to CSV: {}".format(csv_file_path), level="INFO")
52
+ log("Translated file to CSV: {}".format(csv_file_path), level="INFO")
55
53
  translated_files.append(csv_file_path)
56
54
  except ValueError as ve:
57
- MediLink_ConfigLoader.log("Unsupported file type: {}".format(file), level="WARNING")
55
+ log("Unsupported file type: {}".format(file), level="WARNING")
58
56
  except Exception as e:
59
- MediLink_ConfigLoader.log("Error processing file {}: {}".format(file, e), level="ERROR")
57
+ log("Error processing file {}: {}".format(file, e), level="ERROR")
60
58
 
61
- consolidate_csv_path = consolidate_csvs(output_directory, file_prefix="Consolidated", interactive=True)
62
- MediLink_ConfigLoader.log("Consolidated CSV path: {}".format(consolidate_csv_path), level="INFO")
63
- return consolidate_csv_path, translated_files
59
+ print("Detected and processed file counts by type:")
60
+ for ext, count in file_counts.items():
61
+ print("{}: {} files detected".format(ext, count))
62
+
63
+ return consolidated_records, translated_files
64
64
 
65
65
  def display_translated_files(translated_files):
66
66
  print("\nTranslated Files Summary:")
67
67
  for file in translated_files:
68
- print(" - {}",format(file))
68
+ print(" - {}".format(file))
69
69
 
70
- def main(desired_endpoint='AVAILITY'):
70
+ def main():
71
71
  parser = argparse.ArgumentParser(description="Process files and convert them to CSV format.")
72
72
  parser.add_argument('--config_path', type=str, help='Path to the configuration JSON file', default="json/config.json")
73
- parser.add_argument('--desired_endpoint', type=str, help='The desired endpoint key from the configuration.', default=desired_endpoint)
74
- parser.add_argument('--file_path_pattern', type=str, help='Optional: Specify a path pattern for files for direct translation.', default=None)
73
+ parser.add_argument('--file_path_pattern', type=str, help='Path pattern or directory for files to process.', default=None)
75
74
  args = parser.parse_args()
76
-
77
- config, _ = MediLink_ConfigLoader.load_configuration(args.config_path)
75
+
76
+ config, _ = load_configuration(args.config_path)
77
+
78
78
  local_storage_path = config['MediLink_Config']['local_storage_path']
79
79
  output_directory = os.path.join(local_storage_path, "translated_csvs")
80
-
80
+
81
81
  if args.file_path_pattern:
82
- files = find_files(args.file_path_pattern)
83
- if files:
84
- files_str = ', '.join(files)
85
- MediLink_ConfigLoader.log("Translating files: {}".format(files_str), level="INFO")
86
- consolidate_csv_path, translated_files = translate_files(files, output_directory)
87
- MediLink_ConfigLoader.log("Translation and consolidation completed.", level="INFO")
88
- display_translated_files(translated_files)
89
- return consolidate_csv_path
90
- else:
91
- MediLink_ConfigLoader.log("No files found matching: {}".format(args.file_path_pattern), level="WARNING")
92
- return
93
-
94
- endpoint_key = args.desired_endpoint
95
- if endpoint_key not in config['MediLink_Config']['endpoints']:
96
- MediLink_ConfigLoader.log("Endpoint '{}' not found in configuration. Using default 'AVAILITY'.".format(endpoint_key), level="WARNING")
97
- endpoint_key = 'AVAILITY'
98
-
99
- endpoint_configs = [config['MediLink_Config']['endpoints'][key] for key in config['MediLink_Config']['endpoints']]
100
- downloaded_files = []
82
+ process_files_by_pattern(args.file_path_pattern, output_directory)
83
+ else:
84
+ download_and_process_files(config, local_storage_path, output_directory)
85
+
86
+ def process_files_by_pattern(file_path_pattern, output_directory):
87
+ files = find_files(file_path_pattern)
88
+ if files:
89
+ files_str = ', '.join(files)
90
+ log("Translating files: {}".format(files_str), level="INFO")
91
+ consolidated_records, translated_files = translate_files(files, output_directory)
92
+ log("Translation completed.", level="INFO")
93
+ if consolidated_records:
94
+ display_consolidated_records(consolidated_records)
95
+ prompt_csv_export(consolidated_records, output_directory)
96
+ else:
97
+ log("No files found matching: {}".format(file_path_pattern), level="WARNING")
101
98
 
99
+ def download_and_process_files(config, local_storage_path, output_directory):
100
+ downloaded_files = download_files_from_endpoints(config, local_storage_path)
101
+ move_downloaded_files(local_storage_path, config)
102
+ consolidated_records, translated_files = translate_files(downloaded_files, output_directory)
103
+ if consolidated_records:
104
+ display_consolidated_records(consolidated_records)
105
+ prompt_csv_export(consolidated_records, output_directory)
106
+
107
+ def download_files_from_endpoints(config, local_storage_path):
108
+ endpoint_configs = config['MediLink_Config']['endpoints'].values()
109
+ downloaded_files = []
102
110
  for endpoint_config in endpoint_configs:
103
111
  downloaded_files += operate_winscp("download", None, endpoint_config, local_storage_path, config)
112
+ return downloaded_files
113
+
114
+ def display_consolidated_records(records):
115
+ # Define the new fieldnames and their respective widths
116
+ new_fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
117
+ col_widths = {field: len(field) for field in new_fieldnames}
104
118
 
105
- move_downloaded_files(local_storage_path, config)
106
-
107
- # Implement progress tracking
108
- # from tqdm import tqdm
109
- # for file in tqdm(downloaded_files, desc="Translating files"):
110
- # translate_files([file], output_directory)
119
+ # Update column widths based on records
120
+ for record in records:
121
+ for field in new_fieldnames:
122
+ col_widths[field] = max(col_widths[field], len(str(record.get(field, ''))))
111
123
 
112
- consolidate_csv_path, translated_files = translate_files(downloaded_files, output_directory)
113
- display_translated_files(translated_files)
124
+ # Create table header
125
+ header = " | ".join("{:<{}}".format(field, col_widths[field]) for field in new_fieldnames)
126
+ print(header)
127
+ print("-" * len(header))
114
128
 
115
- return consolidate_csv_path
129
+ # Create table rows
130
+ for record in records:
131
+ row = " | ".join("{:<{}}".format(str(record.get(field, '')), col_widths[field]) for field in new_fieldnames)
132
+ print(row)
133
+
134
+ def prompt_csv_export(records, output_directory):
135
+ if records:
136
+ user_input = input("Do you want to export the consolidated records to a CSV file? (y/n): ")
137
+ if user_input.lower() == 'y':
138
+ output_file_path = os.path.join(output_directory, "Consolidated_Records.csv")
139
+ write_records_to_csv(records, output_file_path)
140
+ log("Consolidated CSV file created at: {}".format(output_file_path), level="INFO")
141
+ else:
142
+ log("CSV export skipped by user.", level="INFO")
143
+
144
+ def write_records_to_csv(records, output_file_path):
145
+ fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
146
+ with open(output_file_path, 'w', newline='') as csvfile:
147
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
148
+ writer.writeheader()
149
+ for record in records:
150
+ writer.writerow(record)
116
151
 
117
152
  if __name__ == "__main__":
118
- consolidate_csv_path = main()
119
- if consolidate_csv_path:
120
- print("CSV File Created: {}".format(consolidate_csv_path))
121
- else:
122
- print("No CSV file was created.")
153
+ main()