medicafe 0.240809.0__py3-none-any.whl → 0.241015.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of medicafe might be problematic. Click here for more details.

MediLink/MediLink_Down.py CHANGED
@@ -1,137 +1,97 @@
1
1
  # MediLink_Down.py
2
- import os
3
- import argparse
4
- import shutil
5
- import glob
6
- import csv
7
- from MediLink_Decoder import process_file
8
- from MediLink_ConfigLoader import load_configuration, log
2
+ import os, shutil
3
+ from MediLink_Decoder import process_decoded_file, display_consolidated_records, write_records_to_csv
4
+ from MediLink_ConfigLoader import log, load_configuration
9
5
  from MediLink_DataMgmt import operate_winscp
10
6
 
11
- def move_downloaded_files(local_storage_path, config):
7
+ def handle_files(local_storage_path, downloaded_files):
8
+ """
9
+ Moves downloaded files to the appropriate directory and translates them to CSV format.
10
+ """
11
+ log("Starting to handle downloaded files.")
12
+
13
+ # Set the local response directory
12
14
  local_response_directory = os.path.join(local_storage_path, "responses")
15
+ os.makedirs(local_response_directory, exist_ok=True)
13
16
 
14
- if not os.path.exists(local_response_directory):
15
- os.makedirs(local_response_directory)
17
+ # Supported file extensions
18
+ file_extensions = ['.era', '.277', '.277ibr', '.277ebr', '.dpt', '.ebt', '.ibt', '.txt']
16
19
 
17
- download_dir = config['MediLink_Config']['local_storage_path']
18
- file_extensions = ['.era', '.277', '.277ibr', '.277ebr', '.dpt', '.ebt', '.ibt', '.txt'] # Extendable list of file extensions
20
+ files_moved = []
19
21
 
20
- for ext in file_extensions:
21
- downloaded_files = [f for f in os.listdir(download_dir) if f.endswith(ext)]
22
- for file in downloaded_files:
23
- source_path = os.path.join(download_dir, file)
22
+ for file in downloaded_files:
23
+ if any(file.lower().endswith(ext) for ext in file_extensions): # Case-insensitive match
24
+ source_path = os.path.join(local_storage_path, file)
24
25
  destination_path = os.path.join(local_response_directory, file)
25
- shutil.move(source_path, destination_path)
26
- log("Moved '{}' to '{}'".format(file, local_response_directory))
27
-
28
- def find_files(file_path_pattern):
29
- normalized_path = os.path.normpath(file_path_pattern)
30
- if os.path.isdir(normalized_path):
31
- return [os.path.join(normalized_path, f) for f in os.listdir(normalized_path) if os.path.isfile(os.path.join(normalized_path, f))]
32
- elif "*" in normalized_path:
33
- matching_files = glob.glob(normalized_path)
34
- return [os.path.normpath(file) for file in matching_files]
35
- else:
36
- return [normalized_path] if os.path.exists(normalized_path) else []
26
+
27
+ try:
28
+ shutil.move(source_path, destination_path)
29
+ log("Moved '{}' to '{}'".format(file, local_response_directory))
30
+ files_moved.append(destination_path)
31
+ except Exception as e:
32
+ log("Error moving file '{}' to '{}': {}".format(file, destination_path, e), level="ERROR")
33
+
34
+ if not files_moved:
35
+ log("No files were moved. Ensure that files with supported extensions exist in the download directory.", level="WARNING")
36
+
37
+ # Translate the files
38
+ consolidated_records, translated_files = translate_files(files_moved, local_response_directory)
39
+
40
+ return consolidated_records, translated_files
37
41
 
38
42
  def translate_files(files, output_directory):
43
+ """
44
+ Translates given files into CSV format and returns the list of translated files and consolidated records.
45
+ """
46
+ log("Translating files: {}".format(files), level="DEBUG")
47
+
48
+ if not files:
49
+ log("No files provided for translation. Exiting translate_files.", level="WARNING")
50
+ return [], []
51
+
39
52
  translated_files = []
40
53
  consolidated_records = []
41
- file_counts = {'.era': 0, '.277': 0, '.277ibr': 0, '.277ebr': 0, '.dpt': 0, '.ebt': 0, '.ibt': 0, '.txt': 0}
54
+
55
+ # Supported file extensions with selector
56
+ file_type_selector = {
57
+ '.era': False,
58
+ '.277': False,
59
+ '.277ibr': False,
60
+ '.277ebr': False,
61
+ '.dpt': False,
62
+ '.ebt': True, # Only EBT files are processed
63
+ '.ibt': False,
64
+ '.txt': False
65
+ }
66
+
67
+ file_counts = {ext: 0 for ext in file_type_selector.keys()}
42
68
 
43
69
  for file in files:
44
70
  ext = os.path.splitext(file)[1]
45
- if ext in file_counts:
71
+ if file_type_selector.get(ext, False): # Check if the file type is selected
46
72
  file_counts[ext] += 1
47
73
 
48
- try:
49
- records = process_file(file, output_directory, return_records=True)
50
- consolidated_records.extend(records)
51
- csv_file_path = os.path.join(output_directory, os.path.basename(file) + '_decoded.csv')
52
- log("Translated file to CSV: {}".format(csv_file_path), level="INFO")
53
- translated_files.append(csv_file_path)
54
- except ValueError as ve:
55
- log("Unsupported file type: {}".format(file), level="WARNING")
56
- except Exception as e:
57
- log("Error processing file {}: {}".format(file, e), level="ERROR")
58
-
59
- print("Detected and processed file counts by type:")
74
+ try:
75
+ records = process_decoded_file(os.path.join(output_directory, file), output_directory, return_records=True)
76
+ consolidated_records.extend(records)
77
+ csv_file_path = os.path.join(output_directory, os.path.basename(file) + '_decoded.csv')
78
+ translated_files.append(csv_file_path)
79
+ log("Translated file to CSV: {}".format(csv_file_path), level="INFO")
80
+ except ValueError:
81
+ log("Unsupported file type: {}".format(file), level="WARNING")
82
+ except Exception as e:
83
+ log("Error processing file {}: {}".format(file, e), level="ERROR")
84
+
85
+ log("Detected and processed file counts by type:")
60
86
  for ext, count in file_counts.items():
61
- print("{}: {} files detected".format(ext, count))
87
+ log("{}: {} files detected".format(ext, count), level="INFO")
62
88
 
63
89
  return consolidated_records, translated_files
64
90
 
65
- def display_translated_files(translated_files):
66
- print("\nTranslated Files Summary:")
67
- for file in translated_files:
68
- print(" - {}".format(file))
69
-
70
- def main():
71
- parser = argparse.ArgumentParser(description="Process files and convert them to CSV format.")
72
- parser.add_argument('--config_path', type=str, help='Path to the configuration JSON file', default="json/config.json")
73
- parser.add_argument('--file_path_pattern', type=str, help='Path pattern or directory for files to process.', default=None)
74
- args = parser.parse_args()
75
-
76
- config, _ = load_configuration(args.config_path)
77
-
78
- local_storage_path = config['MediLink_Config']['local_storage_path']
79
- output_directory = os.path.join(local_storage_path, "translated_csvs")
80
-
81
- if args.file_path_pattern:
82
- process_files_by_pattern(args.file_path_pattern, output_directory)
83
- else:
84
- download_and_process_files(config, local_storage_path, output_directory)
85
-
86
- def process_files_by_pattern(file_path_pattern, output_directory):
87
- files = find_files(file_path_pattern)
88
- if files:
89
- files_str = ', '.join(files)
90
- log("Translating files: {}".format(files_str), level="INFO")
91
- consolidated_records, translated_files = translate_files(files, output_directory)
92
- log("Translation completed.", level="INFO")
93
- if consolidated_records:
94
- display_consolidated_records(consolidated_records)
95
- prompt_csv_export(consolidated_records, output_directory)
96
- else:
97
- log("No files found matching: {}".format(file_path_pattern), level="WARNING")
98
-
99
- def download_and_process_files(config, local_storage_path, output_directory):
100
- downloaded_files = download_files_from_endpoints(config, local_storage_path)
101
- move_downloaded_files(local_storage_path, config)
102
- consolidated_records, translated_files = translate_files(downloaded_files, output_directory)
103
- if consolidated_records:
104
- display_consolidated_records(consolidated_records)
105
- prompt_csv_export(consolidated_records, output_directory)
106
-
107
- def download_files_from_endpoints(config, local_storage_path):
108
- endpoint_configs = config['MediLink_Config']['endpoints'].values()
109
- downloaded_files = []
110
- for endpoint_config in endpoint_configs:
111
- downloaded_files += operate_winscp("download", None, endpoint_config, local_storage_path, config)
112
- return downloaded_files
113
-
114
- def display_consolidated_records(records):
115
- # Define the new fieldnames and their respective widths
116
- new_fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
117
- col_widths = {field: len(field) for field in new_fieldnames}
118
-
119
- # Update column widths based on records
120
- for record in records:
121
- for field in new_fieldnames:
122
- col_widths[field] = max(col_widths[field], len(str(record.get(field, ''))))
123
-
124
- # Create table header
125
- header = " | ".join("{:<{}}".format(field, col_widths[field]) for field in new_fieldnames)
126
- print(header)
127
- print("-" * len(header))
128
-
129
- # Create table rows
130
- for record in records:
131
- row = " | ".join("{:<{}}".format(str(record.get(field, '')), col_widths[field]) for field in new_fieldnames)
132
- print(row)
133
-
134
91
  def prompt_csv_export(records, output_directory):
92
+ """
93
+ Prompts the user to export consolidated records to a CSV file.
94
+ """
135
95
  if records:
136
96
  user_input = input("Do you want to export the consolidated records to a CSV file? (y/n): ")
137
97
  if user_input.lower() == 'y':
@@ -141,13 +101,48 @@ def prompt_csv_export(records, output_directory):
141
101
  else:
142
102
  log("CSV export skipped by user.", level="INFO")
143
103
 
144
- def write_records_to_csv(records, output_file_path):
145
- fieldnames = ['Claim #', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
146
- with open(output_file_path, 'w', newline='') as csvfile:
147
- writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
148
- writer.writeheader()
149
- for record in records:
150
- writer.writerow(record)
104
+ def main(desired_endpoint=None):
105
+ """
106
+ Main function for running MediLink_Down as a standalone script.
107
+ Simplified to handle only CLI operations and delegate the actual processing to the high-level function.
108
+ """
109
+ log("Running MediLink_Down.main with desired_endpoint={}".format(desired_endpoint))
110
+
111
+ if not desired_endpoint:
112
+ log("No specific endpoint provided. Aborting operation.", level="ERROR")
113
+ return None, None
114
+
115
+ try:
116
+ config, _ = load_configuration()
117
+ endpoint_config = config['MediLink_Config']['endpoints'].get(desired_endpoint)
118
+ if not endpoint_config or 'remote_directory_down' not in endpoint_config:
119
+ log("Configuration for endpoint '{}' is incomplete or missing 'remote_directory_down'.".format(desired_endpoint), level="ERROR")
120
+ return None, None
121
+
122
+ local_storage_path = config['MediLink_Config']['local_storage_path']
123
+ log("Local storage path set to {}".format(local_storage_path))
124
+
125
+ downloaded_files = operate_winscp("download", None, endpoint_config, local_storage_path, config)
126
+
127
+ if downloaded_files:
128
+ log("From main(), WinSCP Downloaded the following files: \n{}".format(downloaded_files))
129
+ consolidated_records, translated_files = handle_files(local_storage_path, downloaded_files)
130
+
131
+ # Convert UnifiedRecord instances to dictionaries before displaying
132
+ dict_consolidated_records = [record.to_dict() for record in consolidated_records]
133
+ display_consolidated_records(dict_consolidated_records)
134
+
135
+ # Prompt for CSV export
136
+ prompt_csv_export(consolidated_records, local_storage_path)
137
+
138
+ return consolidated_records, translated_files
139
+ else:
140
+ log("No files were downloaded for endpoint: {}. Exiting...".format(desired_endpoint), level="WARNING")
141
+ return None, None
142
+
143
+ except Exception as e:
144
+ log("An error occurred in MediLink_Down.main: {}".format(e), level="ERROR")
145
+ return None, None
151
146
 
152
147
  if __name__ == "__main__":
153
- main()
148
+ main()
@@ -1,17 +1,8 @@
1
- import sys
2
- import os
3
- import subprocess
4
- import time
5
- import webbrowser
1
+ # MediLink_Gmail.py
2
+ import sys, os, subprocess, time, webbrowser, requests, json, ssl, signal
6
3
  from MediLink_ConfigLoader import log, load_configuration
7
-
8
- import requests
9
- import json
10
4
  from http.server import BaseHTTPRequestHandler, HTTPServer
11
- import ssl
12
- import signal
13
5
  from threading import Thread, Event
14
-
15
6
  import platform
16
7
 
17
8
  config, _ = load_configuration()
@@ -218,7 +209,7 @@ class RequestHandler(BaseHTTPRequestHandler):
218
209
  self.send_header('Content-type', 'text/html')
219
210
  self.end_headers()
220
211
  self.wfile.write("Authentication successful. You can close this window now.".encode())
221
- initiate_link_retrieval() # Proceed with link retrieval
212
+ initiate_link_retrieval(config) # Pass config here
222
213
  else:
223
214
  log("Authentication failed with response: {}".format(token_response)) # Add this line
224
215
  self.send_response(400)
@@ -353,13 +344,13 @@ def open_browser_with_executable(url, browser_path=None):
353
344
  except Exception as e:
354
345
  log("Failed to open browser: {}".format(e))
355
346
 
356
- def initiate_link_retrieval():
347
+ def initiate_link_retrieval(config):
357
348
  log("Initiating browser via implicit GET.")
358
- url_get = "https://script.google.com/macros/s/AKfycbzlq8d32mDlLdtFxgL_zvLJernlGPB64ftyxyH8F1nNlr3P-VBH6Yd0NGa1pbBc5AozvQ/exec?action=get_link"
349
+ url_get = "https://script.google.com/macros/s/{}/exec?action=get_link".format(config['MediLink_Config']['webapp_deployment_id']) # Use config here
359
350
  open_browser_with_executable(url_get)
360
351
 
361
352
  log("Preparing POST call.")
362
- url = "https://script.google.com/macros/s/AKfycbzlq8d32mDlLdtFxgL_zvLJernlGPB64ftyxyH8F1nNlr3P-VBH6Yd0NGa1pbBc5AozvQ/exec"
353
+ url = "https://script.google.com/macros/s/{}/exec".format(config['MediLink_Config']['webapp_deployment_id']) # Use config here
363
354
  downloaded_emails = list(load_downloaded_emails())
364
355
  payload = {
365
356
  "downloadedEmails": downloaded_emails
@@ -446,7 +437,7 @@ def auth_and_retrieval():
446
437
  shutdown_event.wait() # Wait for the shutdown event to be set after authentication
447
438
  else:
448
439
  log("Access token found. Proceeding.")
449
- initiate_link_retrieval()
440
+ initiate_link_retrieval(config) # Pass config here
450
441
  shutdown_event.wait() # Wait for the shutdown event to be set
451
442
 
452
443
  if __name__ == "__main__":
@@ -1,6 +1,8 @@
1
1
  # MediLink_Parser.py
2
2
 
3
- def parse_era_content(content):
3
+ import re
4
+
5
+ def parse_era_content(content, debug=False):
4
6
  extracted_data = []
5
7
  normalized_content = content.replace('~\n', '~')
6
8
  lines = normalized_content.split('~')
@@ -74,13 +76,14 @@ def parse_era_content(content):
74
76
  })
75
77
  extracted_data.append(record)
76
78
 
77
- print("Parsed ERA Content:")
78
- for data in extracted_data:
79
- print(data)
79
+ if debug:
80
+ print("Parsed ERA Content:")
81
+ for data in extracted_data:
82
+ print(data)
80
83
 
81
84
  return extracted_data
82
85
 
83
- def parse_277_content(content):
86
+ def parse_277_content(content, debug=False):
84
87
  segments = content.split('~')
85
88
  records = []
86
89
  current_record = {}
@@ -115,19 +118,20 @@ def parse_277_content(content):
115
118
  if current_record:
116
119
  records.append(current_record)
117
120
 
118
- print("Parsed 277 Content:")
119
- for record in records:
120
- print(record)
121
+ if debug:
122
+ print("Parsed 277 Content:")
123
+ for record in records:
124
+ print(record)
121
125
 
122
126
  return records
123
127
 
124
- def parse_277IBR_content(content):
125
- return parse_277_content(content)
128
+ def parse_277IBR_content(content, debug=False):
129
+ return parse_277_content(content, debug)
126
130
 
127
- def parse_277EBR_content(content):
128
- return parse_277_content(content)
131
+ def parse_277EBR_content(content, debug=False):
132
+ return parse_277_content(content, debug)
129
133
 
130
- def parse_dpt_content(content):
134
+ def parse_dpt_content(content, debug=False):
131
135
  extracted_data = []
132
136
  lines = content.splitlines()
133
137
  record = {}
@@ -143,34 +147,56 @@ def parse_dpt_content(content):
143
147
  if record:
144
148
  extracted_data.append(record)
145
149
 
146
- print("Parsed DPT Content:")
147
- for data in extracted_data:
148
- print(data)
150
+ if debug:
151
+ print("Parsed DPT Content:")
152
+ for data in extracted_data:
153
+ print(data)
149
154
 
150
155
  return extracted_data
151
156
 
152
- def parse_ebt_content(content):
153
- extracted_data = []
154
- lines = content.splitlines()
155
- record = {}
157
+ def parse_ebt_content(content, debug=False):
158
+ extracted_data = [] # List to hold all extracted records
159
+ lines = content.splitlines() # Split the content into individual lines
160
+ record = {} # Dictionary to hold the current record being processed
161
+
162
+ # Regular expression pattern to match key-value pairs in the format "Key: Value"
163
+ key_value_pattern = re.compile(r'([^:]+):\s*(.+?)(?=\s{2,}[^:]+?:|$)')
164
+
156
165
  for line in lines:
166
+ # Check for the start of a new record based on the presence of 'Patient Name'
157
167
  if 'Patient Name:' in line and record:
158
- extracted_data.append(record)
159
- record = {}
160
- parts = line.split(':')
161
- if len(parts) == 2:
162
- key, value = parts[0].strip(), parts[1].strip()
163
- record[key] = value
168
+ ebt_post_processor(record) # Process the current record before adding it to the list
169
+ extracted_data.append(record) # Add the completed record to the list
170
+ record = {} # Reset the record for the next entry
171
+
172
+ # Find all key-value pairs in the current line
173
+ matches = key_value_pattern.findall(line)
174
+ for key, value in matches:
175
+ key = key.strip() # Remove leading/trailing whitespace from the key
176
+ value = value.strip() # Remove leading/trailing whitespace from the value
177
+ record[key] = value # Add the key-value pair to the current record
178
+
179
+ # Process and add the last record if it exists
164
180
  if record:
165
- extracted_data.append(record)
181
+ ebt_post_processor(record) # Final processing of the last record
182
+ extracted_data.append(record) # Add the last record to the list
166
183
 
167
- print("Parsed EBT Content:")
168
- for data in extracted_data:
169
- print(data)
184
+ # Debug output to show parsed data if debugging is enabled
185
+ if debug:
186
+ print("Parsed EBT Content:")
187
+ for data in extracted_data:
188
+ print(data)
170
189
 
171
- return extracted_data
190
+ return extracted_data # Return the list of extracted records
191
+
192
+ def ebt_post_processor(record):
193
+ # Process the 'Message Initiator' field to separate it from 'Message Type'
194
+ if 'Message Initiator' in record and 'Message Type:' in record['Message Initiator']:
195
+ parts = record['Message Initiator'].split('Message Type:') # Split the string into parts
196
+ record['Message Initiator'] = parts[0].strip() # Clean up the 'Message Initiator'
197
+ record['Message Type'] = parts[1].strip() # Clean up the 'Message Type'
172
198
 
173
- def parse_ibt_content(content):
199
+ def parse_ibt_content(content, debug=False):
174
200
  extracted_data = []
175
201
  lines = content.splitlines()
176
202
  record = {}
@@ -186,8 +212,9 @@ def parse_ibt_content(content):
186
212
  if record:
187
213
  extracted_data.append(record)
188
214
 
189
- print("Parsed IBT Content:")
190
- for data in extracted_data:
191
- print(data)
215
+ if debug:
216
+ print("Parsed IBT Content:")
217
+ for data in extracted_data:
218
+ print(data)
192
219
 
193
- return extracted_data
220
+ return extracted_data
MediLink/MediLink_UI.py CHANGED
@@ -1,9 +1,10 @@
1
+ # MediLink_UI.py
1
2
  from datetime import datetime
2
- import os
3
- import sys
3
+ import os, sys
4
4
 
5
5
  project_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
6
- sys.path.append(project_dir)
6
+ if project_dir not in sys.path:
7
+ sys.path.append(project_dir)
7
8
 
8
9
  try:
9
10
  import MediLink_ConfigLoader
@@ -106,13 +107,6 @@ def display_patient_summaries(detailed_patient_data):
106
107
  print("Summary at index {} is missing key: {}".format(index, e))
107
108
  print() # add blank line for improved readability.
108
109
 
109
- def ask_for_proceeding_with_endpoints():
110
- """
111
- Asks the user if they want to proceed with all suggested endpoints.
112
- """
113
- proceed = input("Do you want to proceed with all suggested endpoints? (Y/N): ").strip().lower()
114
- return proceed == 'y'
115
-
116
110
  def display_file_summary(index, summary):
117
111
  # Ensure surgery_date is converted to a datetime object
118
112
  surgery_date = datetime.strptime(summary['surgery_date'], "%m-%d-%y")
@@ -137,26 +131,37 @@ def display_file_summary(index, summary):
137
131
  insurance_type[:2],
138
132
  summary['suggested_endpoint'][:20])
139
133
  )
140
-
134
+
141
135
  def user_select_files(file_list):
142
- """
143
- Prompt the user to select files from a list of detected files.
144
-
145
- :param file_list: List of detected files.
146
- :return: List of files selected by the user.
147
- """
148
- # Sort the files by creation time in descending order
149
- file_list = sorted(file_list, key=os.path.getctime, reverse=True)
136
+ # Sort files by creation time in descending order
137
+ file_list = sorted(file_list, key=os.path.getctime, reverse=True)[:10] # Limit to max 10 files
150
138
 
151
- print("Select claim files to submit from the following list:")
139
+ print("\nSelect the Z-form files to submit from the following list:\n")
140
+
141
+ formatted_files = []
152
142
  for i, file in enumerate(file_list):
153
- print("{}: {}".format(i+1, os.path.basename(file)))
143
+ basename = os.path.basename(file)
144
+ parts = basename.split('_')
145
+
146
+ # Try to parse the timestamp from the filename
147
+ if len(parts) > 2:
148
+ try:
149
+ timestamp_str = parts[1] + parts[2].split('.')[0]
150
+ timestamp = datetime.strptime(timestamp_str, '%Y%m%d%H%M%S')
151
+ formatted_date = timestamp.strftime('%m/%d %I:%M %p') # Changed to 12HR format with AM/PM
152
+ except ValueError:
153
+ formatted_date = basename # Fallback to original filename if parsing fails
154
+ else:
155
+ formatted_date = basename # Fallback to original filename if no timestamp
156
+
157
+ formatted_files.append((formatted_date, file))
158
+ print("{}: {}".format(i + 1, formatted_date))
154
159
 
155
160
  selected_indices = input("\nEnter the numbers of the files to process, separated by commas\n(or press Enter to select all): ")
156
161
  if not selected_indices:
157
- return file_list
162
+ return [file for _, file in formatted_files]
158
163
 
159
164
  selected_indices = [int(i.strip()) - 1 for i in selected_indices.split(',')]
160
- selected_files = [file_list[i] for i in selected_indices]
165
+ selected_files = [formatted_files[i][1] for i in selected_indices]
161
166
 
162
- return selected_files
167
+ return selected_files
MediLink/MediLink_Up.py CHANGED
@@ -1,7 +1,6 @@
1
+ # MediLink_Up.py
1
2
  from datetime import datetime
2
- import os
3
- import re
4
- import subprocess
3
+ import os, re, subprocess, traceback
5
4
  from tqdm import tqdm
6
5
  import MediLink_837p_encoder
7
6
  from MediLink_ConfigLoader import log, load_configuration
@@ -34,7 +33,7 @@ def check_internet_connection():
34
33
  print("An error occurred checking for internet connectivity:", e)
35
34
  return False
36
35
 
37
- def submit_claims(detailed_patient_data_grouped_by_endpoint, config):
36
+ def submit_claims(detailed_patient_data_grouped_by_endpoint, config, crosswalk):
38
37
  """
39
38
  Submits claims for each endpoint, either via WinSCP or API, based on configuration settings.
40
39
 
@@ -63,8 +62,9 @@ def submit_claims(detailed_patient_data_grouped_by_endpoint, config):
63
62
  # Attempt submission to each endpoint
64
63
  if True: #confirm_transmission({endpoint: patients_data}): # Confirm transmission to each endpoint with detailed overview
65
64
  if check_internet_connection():
65
+ client = MediLink_API_v3.APIClient()
66
66
  # Process files per endpoint
67
- converted_files = MediLink_837p_encoder.convert_files_for_submission(patients_data, config)
67
+ converted_files = MediLink_837p_encoder.convert_files_for_submission(patients_data, config, crosswalk, client)
68
68
  if converted_files:
69
69
  if method == 'winscp':
70
70
  # Transmit files via WinSCP
@@ -86,7 +86,6 @@ def submit_claims(detailed_patient_data_grouped_by_endpoint, config):
86
86
  # Transmit files via API
87
87
  try:
88
88
  api_responses = []
89
- client = MediLink_API_v3.APIClient()
90
89
  for file_path in converted_files:
91
90
  with open(file_path, 'r') as file:
92
91
  x12_request_data = file.read().replace('\n', '').replace('\r', '').strip()
@@ -242,10 +241,15 @@ def prepare_receipt_data(submission_results):
242
241
  # Unpack the tuple to get status and message
243
242
  status, message = file_result
244
243
  except ValueError as e:
245
- log("ValueError: {} for file_result: {}".format(e, file_result), level="ERROR")
244
+ file_result_length = len(file_result) if hasattr(file_result, '__len__') else 'Unknown'
245
+ error_msg = 'Too many values to unpack.' if 'too many values to unpack' in str(e) else \
246
+ 'Not enough values to unpack.' if 'not enough values to unpack' in str(e) else \
247
+ 'Value unpacking error.'
248
+ log("ValueError: {} for file_result: {} (Length: {})".format(error_msg, file_result, file_result_length), level="ERROR")
246
249
  continue
247
250
  except Exception as e:
248
- log("Unexpected error: {}".format(e), level="ERROR")
251
+ tb = traceback.format_exc()
252
+ log("Unexpected error: {}. Traceback: {}".format(e, tb), level="ERROR")
249
253
  continue
250
254
 
251
255
  log("Status: {}, Message: {}".format(status, message), level="DEBUG")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: medicafe
3
- Version: 0.240809.0
3
+ Version: 0.241015.0
4
4
  Summary: MediCafe
5
5
  Home-page: https://github.com/katanada2
6
6
  Author: Daniel Vidaud