medicafe 0.250722.0__py3-none-any.whl → 0.250723.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of medicafe might be problematic. Click here for more details.

MediBot/MediBot_UI.py CHANGED
@@ -23,6 +23,8 @@ class AppControl:
23
23
  self.script_paused = False
24
24
  self.mapat_med_path = ''
25
25
  self.medisoft_shortcut = ''
26
+ # PERFORMANCE FIX: Add configuration caching to reduce lookup overhead
27
+ self._config_cache = {} # Cache for Medicare vs Private configuration lookups
26
28
  # Load initial paths from config when instance is created
27
29
  self.load_paths_from_config()
28
30
 
@@ -47,12 +49,28 @@ class AppControl:
47
49
  def load_paths_from_config(self, medicare=False):
48
50
  # Assuming `config` is a module or a globally accessible configuration dictionary
49
51
  # TODO Is this where the MAINS paths should also be set?
50
- if medicare:
51
- self.mapat_med_path = config.get('MEDICARE_MAPAT_MED_PATH', "")
52
- self.medisoft_shortcut = config.get('MEDICARE_SHORTCUT', "")
53
- else:
54
- self.mapat_med_path = config.get('MAPAT_MED_PATH', "")
55
- self.medisoft_shortcut = config.get('PRIVATE_SHORTCUT', "")
52
+
53
+ # PERFORMANCE FIX: Cache configuration lookups to reduce Medicare vs Private overhead
54
+ cache_key = 'medicare' if medicare else 'private'
55
+
56
+ if cache_key not in self._config_cache:
57
+ # Build cache entry for this configuration type
58
+ if medicare:
59
+ cached_config = {
60
+ 'mapat_path': config.get('MEDICARE_MAPAT_MED_PATH', ""),
61
+ 'shortcut': config.get('MEDICARE_SHORTCUT', "")
62
+ }
63
+ else:
64
+ cached_config = {
65
+ 'mapat_path': config.get('MAPAT_MED_PATH', ""),
66
+ 'shortcut': config.get('PRIVATE_SHORTCUT', "")
67
+ }
68
+ self._config_cache[cache_key] = cached_config
69
+
70
+ # Use cached values to avoid repeated config lookups
71
+ cached = self._config_cache[cache_key]
72
+ self.mapat_med_path = cached['mapat_path']
73
+ self.medisoft_shortcut = cached['shortcut']
56
74
 
57
75
  app_control = AppControl()
58
76
 
@@ -178,6 +196,12 @@ def display_menu_header(title):
178
196
  print("-" * 60)
179
197
 
180
198
  def handle_user_interaction(interaction_mode, error_message):
199
+ # Import here to avoid circular imports
200
+ try:
201
+ from MediBot import current_patient_context
202
+ except ImportError:
203
+ current_patient_context = None
204
+
181
205
  while True:
182
206
  # If interaction_mode is neither 'triage' nor 'error', then it's normal mode.
183
207
  title = "Error Occurred" if interaction_mode == 'error' else "Data Entry Options"
@@ -186,9 +210,19 @@ def handle_user_interaction(interaction_mode, error_message):
186
210
  if interaction_mode == 'error':
187
211
  print("\nERROR: ", error_message)
188
212
 
189
- # Need to tell the user which patient we're talking about because it won't be obvious anymore.
190
- # Also, this ERROR might be called from a location where the menu below isn't relevant like selecting patients
191
- # -- need a better way to handle that.
213
+ # PERFORMANCE FIX: Display patient context to address "won't be obvious anymore" issue
214
+ # Show user which patient and field they're working with for better F11 menu usability
215
+ if current_patient_context:
216
+ patient_name = current_patient_context.get('patient_name', 'Unknown Patient')
217
+ surgery_date = current_patient_context.get('surgery_date', 'Unknown Date')
218
+ last_field = current_patient_context.get('last_field', 'Unknown Field')
219
+ print("\nCurrent Context:")
220
+ print(" Patient: {}".format(patient_name))
221
+ print(" Surgery Date: {}".format(surgery_date))
222
+ print(" Last Field: {}".format(last_field))
223
+ print("")
224
+
225
+ # Menu options with improved context
192
226
  print("1: Retry last entry")
193
227
  print("2: Skip to next patient and continue")
194
228
  print("3: Go back two patients and redo")
@@ -668,6 +668,9 @@ def submit_uhc_claim(client, x12_request_data):
668
668
  if not transaction_id:
669
669
  raise ValueError("transactionId not found in the submission response")
670
670
 
671
+ # Log the transaction ID for traceability
672
+ MediLink_ConfigLoader.log("UHCAPI claim submission transactionId: {}".format(transaction_id), level="INFO")
673
+
671
674
  # Prepare the request body for the claim acknowledgement retrieval
672
675
  acknowledgement_body = {'transactionId': transaction_id}
673
676
 
@@ -1,6 +1,8 @@
1
1
  # MediLink_ClaimStatus.py
2
2
  from datetime import datetime, timedelta
3
3
  import os
4
+ import time
5
+ import json
4
6
  import MediLink_API_v3
5
7
 
6
8
  try:
@@ -21,140 +23,251 @@ start_date_str = start_date.strftime('%m/%d/%Y')
21
23
  billing_provider_tin = config['MediLink_Config'].get('billing_provider_tin')
22
24
 
23
25
  # Define the list of payer_id's to iterate over
24
- payer_ids = ['87726'] # Default Value
26
+ payer_ids = ['87726', '03432', '96385', '95467', '86050', '86047', '95378', '37602']
25
27
  # Allowed payer id's for UHC 87726, 03432, 96385, 95467, 86050, 86047, 95378, 37602. This api does not support payerId 06111.
26
- # payer_ids = ['87726', '03432', '96385', '95467', '86050', '86047', '95378', '37602']
27
- # Oddly enough, the API is completely ignoring the payerId parameter and returning the exact same dataset for all payer IDs.
28
28
 
29
29
  # Initialize the API client
30
30
  client = MediLink_API_v3.APIClient()
31
31
 
32
- # Function to process and display the data in a compact, tabular format
33
- def display_claim_summary(claim_summary, payer_id, output_file):
34
- claims = claim_summary.get('claims', [])
32
+ class ClaimCache:
33
+ """In-memory cache for API responses"""
34
+ def __init__(self):
35
+ self.cache = {} # {cache_key: {'data': response, 'payer_id': payer_id}}
35
36
 
36
- # Display header
37
- header = "Payer ID: {} | Start Date: {} | End Date: {}".format(payer_id, start_date_str, end_date_str)
37
+ def get_cache_key(self, tin, start_date, end_date, payer_id):
38
+ """Generate unique cache key for API call parameters"""
39
+ return "{}_{}_{}_{}".format(tin, start_date, end_date, payer_id)
40
+
41
+ def is_cached(self, cache_key):
42
+ """Check if response is cached"""
43
+ return cache_key in self.cache
44
+
45
+ def get_cached_response(self, cache_key):
46
+ """Retrieve cached response"""
47
+ return self.cache[cache_key]['data']
48
+
49
+ def cache_response(self, cache_key, response, payer_id):
50
+ """Cache API response"""
51
+ self.cache[cache_key] = {
52
+ 'data': response,
53
+ 'payer_id': payer_id
54
+ }
55
+
56
+ def clear_cache(self):
57
+ """Clear all cached data"""
58
+ self.cache.clear()
59
+
60
+ class ConsolidatedClaims:
61
+ """Consolidated claims data structure"""
62
+ def __init__(self):
63
+ self.claims_by_number = {} # {claim_number: {claim_data, payer_sources: [payer_ids]}}
64
+ self.payer_ids_checked = set()
65
+ self.duplicate_warnings = []
66
+
67
+ def add_claim(self, claim_data, payer_id):
68
+ """Add claim to consolidated data, tracking payer sources"""
69
+ claim_number = claim_data['claim_number']
70
+
71
+ if claim_number not in self.claims_by_number:
72
+ self.claims_by_number[claim_number] = {
73
+ 'data': claim_data,
74
+ 'payer_sources': [payer_id]
75
+ }
76
+ else:
77
+ # Check if this is a duplicate with different data
78
+ existing_data = self.claims_by_number[claim_number]['data']
79
+ if self._claims_equal(existing_data, claim_data):
80
+ # Same data, just add payer source
81
+ if payer_id not in self.claims_by_number[claim_number]['payer_sources']:
82
+ self.claims_by_number[claim_number]['payer_sources'].append(payer_id)
83
+ else:
84
+ # Different data - create warning
85
+ self.duplicate_warnings.append({
86
+ 'claim_number': claim_number,
87
+ 'existing_payers': self.claims_by_number[claim_number]['payer_sources'],
88
+ 'new_payer': payer_id,
89
+ 'existing_data': existing_data,
90
+ 'new_data': claim_data
91
+ })
92
+
93
+ self.payer_ids_checked.add(payer_id)
94
+
95
+ def _claims_equal(self, claim1, claim2):
96
+ """Compare two claim data structures for equality"""
97
+ # Compare key fields that should be identical for the same claim
98
+ key_fields = ['claim_status', 'patient_name', 'processed_date', 'first_service_date',
99
+ 'total_charged_amount', 'total_allowed_amount', 'total_paid_amount',
100
+ 'total_patient_responsibility_amount']
101
+
102
+ for field in key_fields:
103
+ if claim1.get(field) != claim2.get(field):
104
+ return False
105
+ return True
106
+
107
+ def extract_claim_data(claim):
108
+ """Extract standardized claim data from API response"""
109
+ claim_number = claim['claimNumber']
110
+ claim_status = claim['claimStatus']
111
+ patient_first_name = claim['memberInfo']['ptntFn']
112
+ patient_last_name = claim['memberInfo']['ptntLn']
113
+ processed_date = claim['claimSummary']['processedDt']
114
+ first_service_date = claim['claimSummary']['firstSrvcDt']
115
+ total_charged_amount = claim['claimSummary']['totalChargedAmt']
116
+ total_allowed_amount = claim['claimSummary']['totalAllowdAmt']
117
+ total_paid_amount = claim['claimSummary']['totalPaidAmt']
118
+ total_patient_responsibility_amount = claim['claimSummary']['totalPtntRespAmt']
119
+
120
+ patient_name = "{} {}".format(patient_first_name, patient_last_name)
121
+
122
+ return {
123
+ 'claim_number': claim_number,
124
+ 'claim_status': claim_status,
125
+ 'patient_name': patient_name,
126
+ 'processed_date': processed_date,
127
+ 'first_service_date': first_service_date,
128
+ 'total_charged_amount': total_charged_amount,
129
+ 'total_allowed_amount': total_allowed_amount,
130
+ 'total_paid_amount': total_paid_amount,
131
+ 'total_patient_responsibility_amount': total_patient_responsibility_amount,
132
+ 'claim_xwalk_data': claim['claimSummary']['clmXWalkData']
133
+ }
134
+
135
+ def process_claims_with_payer_rotation(billing_provider_tin, start_date_str, end_date_str,
136
+ payer_ids, cache, consolidated_claims):
137
+ """
138
+ Process claims across multiple payer IDs with caching and consolidation
139
+ """
140
+ client = MediLink_API_v3.APIClient()
141
+
142
+ for payer_id in payer_ids:
143
+ print("Processing Payer ID: {}".format(payer_id))
144
+
145
+ # Generate cache key
146
+ cache_key = cache.get_cache_key(billing_provider_tin, start_date_str, end_date_str, payer_id)
147
+
148
+ # Check cache first
149
+ if cache.is_cached(cache_key):
150
+ print(" Using cached response for Payer ID: {}".format(payer_id))
151
+ claim_summary = cache.get_cached_response(cache_key)
152
+ else:
153
+ print(" Making API call for Payer ID: {}".format(payer_id))
154
+ try:
155
+ claim_summary = MediLink_API_v3.get_claim_summary_by_provider(
156
+ client, billing_provider_tin, start_date_str, end_date_str, payer_id=payer_id
157
+ )
158
+ cache.cache_response(cache_key, claim_summary, payer_id)
159
+ except Exception as e:
160
+ print(" Error processing Payer ID {}: {}".format(payer_id, e))
161
+ continue
162
+
163
+ # Process claims from this payer
164
+ claims = claim_summary.get('claims', [])
165
+ for claim in claims:
166
+ claim_data = extract_claim_data(claim)
167
+ consolidated_claims.add_claim(claim_data, payer_id)
168
+
169
+ def display_consolidated_claims(consolidated_claims, output_file):
170
+ """
171
+ Display consolidated claims with payer ID header and duplicate warnings
172
+ """
173
+ # Display header with all payer IDs checked
174
+ payer_ids_str = ", ".join(sorted(consolidated_claims.payer_ids_checked))
175
+ header = "Payer IDs Checked: {} | Start Date: {} | End Date: {}".format(
176
+ payer_ids_str, start_date_str, end_date_str)
38
177
  print(header)
39
- output_file.write(header + "\n") # Write header to the output file
178
+ output_file.write(header + "\n")
40
179
  print("=" * len(header))
41
- output_file.write("=" * len(header) + "\n") # Write separator to the output file
42
-
180
+ output_file.write("=" * len(header) + "\n")
181
+
43
182
  # Table header
44
- table_header = "{:<10} | {:<10} | {:<20} | {:<6} | {:<6} | {:<7} | {:<7} | {:<7} | {:<7}".format(
45
- "Claim #", "Status", "Patient", "Proc.", "Serv.", "Allowed", "Paid", "Pt Resp", "Charged")
183
+ table_header = "{:<10} | {:<10} | {:<20} | {:<6} | {:<6} | {:<7} | {:<7} | {:<7} | {:<7} | {:<15}".format(
184
+ "Claim #", "Status", "Patient", "Proc.", "Serv.", "Allowed", "Paid", "Pt Resp", "Charged", "Payer Sources")
46
185
  print(table_header)
47
- output_file.write(table_header + "\n") # Write table header to the output file
186
+ output_file.write(table_header + "\n")
48
187
  print("-" * len(table_header))
49
- output_file.write("-" * len(table_header) + "\n") # Write separator to the output file
50
-
51
- # Process each claim and display it in a compact format
52
- claims_dict = {}
53
- for claim in claims:
54
- claim_number = claim['claimNumber'] # String: e.g., "29285698"
55
- claim_status = claim['claimStatus'] # String: e.g., "Finalized"
56
- patient_first_name = claim['memberInfo']['ptntFn'] # String: e.g., "FRANK"
57
- patient_last_name = claim['memberInfo']['ptntLn'] # String: e.g., "LOHR"
58
- processed_date = claim['claimSummary']['processedDt'] # String (Date in "MM/DD/YYYY" format): e.g., "06/10/2024"
59
- first_service_date = claim['claimSummary']['firstSrvcDt'] # String (Date in "MM/DD/YYYY" format): e.g., "05/13/2024"
60
- total_charged_amount = claim['claimSummary']['totalChargedAmt'] # String (Decimal as String): e.g., "450.00"
61
- total_allowed_amount = claim['claimSummary']['totalAllowdAmt'] # String (Decimal as String): e.g., "108.95"
62
- total_paid_amount = claim['claimSummary']['totalPaidAmt'] # String (Decimal as String): e.g., "106.78"
63
- total_patient_responsibility_amount = claim['claimSummary']['totalPtntRespAmt'] # String (Decimal as String): e.g., "0.00"
188
+ output_file.write("-" * len(table_header) + "\n")
189
+
190
+ # Sort claims by first service date
191
+ sorted_claims = sorted(
192
+ consolidated_claims.claims_by_number.items(),
193
+ key=lambda x: x[1]['data']['first_service_date']
194
+ )
195
+
196
+ # Display each claim
197
+ for claim_number, claim_info in sorted_claims:
198
+ claim_data = claim_info['data']
199
+ payer_sources = claim_info['payer_sources']
200
+
201
+ # Format payer sources
202
+ payer_sources_str = ", ".join(sorted(payer_sources))
203
+
204
+ table_row = "{:<10} | {:<10} | {:<20} | {:<6} | {:<6} | {:<7} | {:<7} | {:<7} | {:<7} | {:<15}".format(
205
+ claim_number, claim_data['claim_status'], claim_data['patient_name'][:20],
206
+ claim_data['processed_date'][:5], claim_data['first_service_date'][:5],
207
+ claim_data['total_allowed_amount'], claim_data['total_paid_amount'],
208
+ claim_data['total_patient_responsibility_amount'], claim_data['total_charged_amount'],
209
+ payer_sources_str
210
+ )
211
+ print(table_row)
212
+ output_file.write(table_row + "\n")
64
213
 
65
- patient_name = "{} {}".format(patient_first_name, patient_last_name)
66
-
67
- # Store claims in a dictionary to handle duplicate claim numbers
68
- if claim_number not in claims_dict:
69
- claims_dict[claim_number] = []
70
- claims_dict[claim_number].append({
71
- 'claim_status': claim_status,
72
- 'patient_name': patient_name,
73
- 'processed_date': processed_date,
74
- 'first_service_date': first_service_date,
75
- 'total_charged_amount': total_charged_amount,
76
- 'total_allowed_amount': total_allowed_amount,
77
- 'total_paid_amount': total_paid_amount,
78
- 'total_patient_responsibility_amount': total_patient_responsibility_amount,
79
- 'claim_xwalk_data': claim['claimSummary']['clmXWalkData']
80
- })
81
-
82
- # Sort claims by first_service_date
83
- sorted_claims = sorted(claims_dict.items(), key=lambda x: x[1][0]['first_service_date'])
84
-
85
- for claim_number, claim_data_list in sorted_claims:
86
- # Check for repeated claim numbers and validate data
87
- if len(claim_data_list) > 1:
88
- # Validate data
89
- unique_claims = {tuple(claim_data.items()) for claim_data in claim_data_list}
90
- if len(unique_claims) == 1:
91
- # Data is the same, only print once
92
- claim_data = claim_data_list[0]
93
- table_row = "{:<10} | {:<10} | {:<20} | {:<6} | {:<6} | {:<7} | {:<7} | {:<7} | {:<7}".format(
94
- claim_number, claim_data['claim_status'], claim_data['patient_name'][:20],
95
- claim_data['processed_date'][:5], claim_data['first_service_date'][:5],
96
- claim_data['total_allowed_amount'], claim_data['total_paid_amount'],
97
- claim_data['total_patient_responsibility_amount'], claim_data['total_charged_amount']
214
+ # Display crosswalk data for $0.00 claims
215
+ if claim_data['total_paid_amount'] == '0.00':
216
+ for xwalk in claim_data['claim_xwalk_data']:
217
+ clm507Cd = xwalk['clm507Cd']
218
+ clm507CdDesc = xwalk['clm507CdDesc']
219
+ clm508Cd = xwalk['clm508Cd']
220
+ clm508CdDesc = xwalk['clm508CdDesc']
221
+ clmIcnSufxCd = xwalk['clmIcnSufxCd']
222
+ print(" 507: {} ({}) | 508: {} ({}) | ICN Suffix: {}".format(
223
+ clm507Cd, clm507CdDesc, clm508Cd, clm508CdDesc, clmIcnSufxCd))
224
+
225
+ # Display duplicate warnings (terminal and log only, not file)
226
+ if consolidated_claims.duplicate_warnings:
227
+ print("\n" + "="*80)
228
+ print("DUPLICATE CLAIM WARNINGS:")
229
+ print("="*80)
230
+
231
+ for warning in consolidated_claims.duplicate_warnings:
232
+ warning_msg = (
233
+ "Claim {} found in multiple payers with different data:\n"
234
+ " Existing payers: {}\n"
235
+ " New payer: {}\n"
236
+ " Status difference: {} vs {}\n"
237
+ " Amount difference: ${} vs ${}".format(
238
+ warning['claim_number'],
239
+ ", ".join(warning['existing_payers']),
240
+ warning['new_payer'],
241
+ warning['existing_data']['claim_status'],
242
+ warning['new_data']['claim_status'],
243
+ warning['existing_data']['total_paid_amount'],
244
+ warning['new_data']['total_paid_amount']
98
245
  )
99
- print(table_row)
100
- output_file.write(table_row + "\n") # Write each row to the output file
101
-
102
- if claim_data['total_paid_amount'] == '0.00':
103
- for xwalk in claim_data['claim_xwalk_data']:
104
- clm507Cd = xwalk['clm507Cd'] # String: e.g., "F1"
105
- clm507CdDesc = xwalk['clm507CdDesc'] # String: e.g., "Finalized/Payment-The claim/line has been paid."
106
- clm508Cd = xwalk['clm508Cd'] # String: e.g., "104"
107
- clm508CdDesc = xwalk['clm508CdDesc'] # String: e.g., "Processed according to plan provisions..."
108
- clmIcnSufxCd = xwalk['clmIcnSufxCd'] # String: e.g., "01"
109
- print(" 507: {} ({}) | 508: {} ({}) | ICN Suffix: {}".format(clm507Cd, clm507CdDesc, clm508Cd, clm508CdDesc, clmIcnSufxCd))
110
- else:
111
- # Data is different, print all
112
- for claim_data in claim_data_list:
113
- table_row = "{:<10} | {:<10} | {:<20} | {:<6} | {:<6} | {:<7} | {:<7} | {:<7} | {:<7}".format(
114
- claim_number, claim_data['claim_status'], claim_data['patient_name'][:20],
115
- claim_data['processed_date'][:5], claim_data['first_service_date'][:5],
116
- claim_data['total_allowed_amount'], claim_data['total_paid_amount'],
117
- claim_data['total_patient_responsibility_amount'], claim_data['total_charged_amount']
118
- )
119
- print(table_row + " (Duplicate with different data)")
120
- output_file.write(table_row + " (Duplicate with different data)\n") # Write each row to the output file
121
-
122
- if claim_data['total_paid_amount'] == '0.00':
123
- for xwalk in claim_data['claim_xwalk_data']:
124
- clm507Cd = xwalk['clm507Cd'] # String: e.g., "F1"
125
- clm507CdDesc = xwalk['clm507CdDesc'] # String: e.g., "Finalized/Payment-The claim/line has been paid."
126
- clm508Cd = xwalk['clm508Cd'] # String: e.g., "104"
127
- clm508CdDesc = xwalk['clm508CdDesc'] # String: e.g., "Processed according to plan provisions..."
128
- clmIcnSufxCd = xwalk['clmIcnSufxCd'] # String: e.g., "01"
129
- print(" 507: {} ({}) | 508: {} ({}) | ICN Suffix: {}".format(clm507Cd, clm507CdDesc, clm508Cd, clm508CdDesc, clmIcnSufxCd))
130
- else:
131
- # Only one claim, print normally
132
- claim_data = claim_data_list[0]
133
- table_row = "{:<10} | {:<10} | {:<20} | {:<6} | {:<6} | {:<7} | {:<7} | {:<7} | {:<7}".format(
134
- claim_number, claim_data['claim_status'], claim_data['patient_name'][:20],
135
- claim_data['processed_date'][:5], claim_data['first_service_date'][:5],
136
- claim_data['total_allowed_amount'], claim_data['total_paid_amount'],
137
- claim_data['total_patient_responsibility_amount'], claim_data['total_charged_amount']
138
246
  )
139
- print(table_row)
140
- output_file.write(table_row + "\n") # Write each row to the output file
141
-
142
- if claim_data['total_paid_amount'] == '0.00':
143
- for xwalk in claim_data['claim_xwalk_data']:
144
- clm507Cd = xwalk['clm507Cd'] # String: e.g., "F1"
145
- clm507CdDesc = xwalk['clm507CdDesc'] # String: e.g., "Finalized/Payment-The claim/line has been paid."
146
- clm508Cd = xwalk['clm508Cd'] # String: e.g., "104"
147
- clm508CdDesc = xwalk['clm508CdDesc'] # String: e.g., "Processed according to plan provisions..."
148
- clmIcnSufxCd = xwalk['clmIcnSufxCd'] # String: e.g., "01"
149
- print(" 507: {} ({}) | 508: {} ({}) | ICN Suffix: {}".format(clm507Cd, clm507CdDesc, clm508Cd, clm508CdDesc, clmIcnSufxCd))
150
-
151
- # Create a temporary file to store the claim summary
247
+ print(warning_msg)
248
+
249
+ # Log the warning
250
+ MediLink_ConfigLoader.log(
251
+ "Duplicate claim warning: {}".format(warning_msg),
252
+ level="WARNING"
253
+ )
254
+
255
+ # Initialize cache and consolidated claims
256
+ cache = ClaimCache()
257
+ consolidated_claims = ConsolidatedClaims()
258
+
259
+ # Process claims with payer rotation
260
+ process_claims_with_payer_rotation(
261
+ billing_provider_tin, start_date_str, end_date_str, payer_ids, cache, consolidated_claims
262
+ )
263
+
264
+ # Display consolidated results
152
265
  output_file_path = os.path.join(os.getenv('TEMP'), 'claim_summary_report.txt')
153
266
  with open(output_file_path, 'w') as output_file:
154
- # Loop through each payer_id and call the API, then display the claim summary
155
- for payer_id in payer_ids:
156
- claim_summary = MediLink_API_v3.get_claim_summary_by_provider(client, billing_provider_tin, start_date_str, end_date_str, payer_id=payer_id)
157
- display_claim_summary(claim_summary, payer_id, output_file) # Pass output_file to the display function
267
+ display_consolidated_claims(consolidated_claims, output_file)
268
+
269
+ # Clear cache after consolidated table is generated
270
+ cache.clear_cache()
158
271
 
159
272
  # Open the generated file in Notepad
160
273
  os.startfile(output_file_path) # Use os.startfile for better handling