medicafe 0.250720.0__py3-none-any.whl → 0.250722.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of medicafe might be problematic. Click here for more details.
- MediBot/MediBot.bat +2 -0
- MediBot/MediBot_Preprocessor.py +36 -21
- MediBot/MediBot_Preprocessor_lib.py +59 -22
- MediLink/MediLink_837p_cob_library.py +12 -39
- MediLink/MediLink_837p_encoder.py +27 -3
- MediLink/MediLink_837p_encoder_library.py +58 -117
- MediLink/MediLink_837p_utilities.py +264 -0
- MediLink/MediLink_DataMgmt.py +10 -5
- MediLink/MediLink_Deductible.py +64 -8
- MediLink/MediLink_Deductible_Validator.py +51 -7
- {medicafe-0.250720.0.dist-info → medicafe-0.250722.0.dist-info}/METADATA +1 -1
- {medicafe-0.250720.0.dist-info → medicafe-0.250722.0.dist-info}/RECORD +15 -14
- {medicafe-0.250720.0.dist-info → medicafe-0.250722.0.dist-info}/LICENSE +0 -0
- {medicafe-0.250720.0.dist-info → medicafe-0.250722.0.dist-info}/WHEEL +0 -0
- {medicafe-0.250720.0.dist-info → medicafe-0.250722.0.dist-info}/top_level.txt +0 -0
MediBot/MediBot.bat
CHANGED
|
@@ -281,9 +281,11 @@ for %%f in ("%target_folder%\!latest_csv!") do set "latest_csv_name=%%~nxf"
|
|
|
281
281
|
|
|
282
282
|
:: Compare the paths and prompt user if necessary
|
|
283
283
|
if not "!current_csv_name!"=="!latest_csv_name!" (
|
|
284
|
+
echo.
|
|
284
285
|
echo ALERT: Config file CSV path differs from the latest CSV. This can happen if a new CSV is downloaded.
|
|
285
286
|
echo Current CSV: !current_csv_name!
|
|
286
287
|
echo Latest CSV: !latest_csv_name!
|
|
288
|
+
echo.
|
|
287
289
|
set /p update_choice="Do you want to update to the latest CSV? (Y/N): "
|
|
288
290
|
if /i "!update_choice!"=="Y" (
|
|
289
291
|
echo Updating config file with latest CSV...
|
MediBot/MediBot_Preprocessor.py
CHANGED
|
@@ -25,7 +25,8 @@ except ImportError as e:
|
|
|
25
25
|
|
|
26
26
|
# Load configuration
|
|
27
27
|
# Should this also take args? Path for ./MediLink needed to be added for this to resolve
|
|
28
|
-
|
|
28
|
+
# Use cached configuration to avoid repeated I/O operations
|
|
29
|
+
config, crosswalk = MediBot_Preprocessor_lib.get_cached_configuration()
|
|
29
30
|
|
|
30
31
|
# CSV Preprocessor built for Carol
|
|
31
32
|
def preprocess_csv_data(csv_data, crosswalk):
|
|
@@ -57,9 +58,7 @@ def preprocess_csv_data(csv_data, crosswalk):
|
|
|
57
58
|
# and which haven't been yet. So, if the patient 'exists' in the system, the next quetion is about claims/billing status.
|
|
58
59
|
# Eventually, we really want to get out of Medisoft...
|
|
59
60
|
|
|
60
|
-
#
|
|
61
|
-
# Combine 'Patient First', 'Patient Middle', and 'Patient Last' into a single 'Patient Name' field.
|
|
62
|
-
# Combine 'Patient Address1' and 'Patient Address2' into a single 'Patient Street' field.
|
|
61
|
+
# Batch field operations: Convert dates, combine names/addresses, and apply replacements
|
|
63
62
|
MediLink_ConfigLoader.log("CSV Pre-processor: Constructing Patient Name and Address for Medisoft...", level="INFO")
|
|
64
63
|
MediBot_Preprocessor_lib.combine_fields(csv_data)
|
|
65
64
|
|
|
@@ -97,7 +96,8 @@ def preprocess_csv_data(csv_data, crosswalk):
|
|
|
97
96
|
|
|
98
97
|
def check_existing_patients(selected_patient_ids, MAPAT_MED_PATH):
|
|
99
98
|
existing_patients = []
|
|
100
|
-
|
|
99
|
+
# Convert to set for O(1) lookup performance
|
|
100
|
+
selected_patient_ids_set = set(selected_patient_ids)
|
|
101
101
|
|
|
102
102
|
try:
|
|
103
103
|
with open(MAPAT_MED_PATH, 'r') as file:
|
|
@@ -107,17 +107,17 @@ def check_existing_patients(selected_patient_ids, MAPAT_MED_PATH):
|
|
|
107
107
|
patient_id = line[194:202].strip() # Extract Patient ID (Columns 195-202)
|
|
108
108
|
patient_name = line[9:39].strip() # Extract Patient Name (Columns 10-39)
|
|
109
109
|
|
|
110
|
-
if patient_id in
|
|
110
|
+
if patient_id in selected_patient_ids_set:
|
|
111
111
|
existing_patients.append((patient_id, patient_name))
|
|
112
|
-
# Remove
|
|
113
|
-
|
|
112
|
+
# Remove from set for O(1) operation
|
|
113
|
+
selected_patient_ids_set.discard(patient_id)
|
|
114
114
|
except FileNotFoundError:
|
|
115
115
|
# Handle the case where MAPAT_MED_PATH is not found
|
|
116
116
|
print("MAPAT.med was not found at location indicated in config file.")
|
|
117
117
|
print("Skipping existing patient check and continuing...")
|
|
118
118
|
|
|
119
|
-
#
|
|
120
|
-
patients_to_process =
|
|
119
|
+
# Convert remaining set back to list for return
|
|
120
|
+
patients_to_process = list(selected_patient_ids_set)
|
|
121
121
|
|
|
122
122
|
return existing_patients, patients_to_process
|
|
123
123
|
|
|
@@ -129,14 +129,25 @@ def intake_scan(csv_headers, field_mapping):
|
|
|
129
129
|
MediLink_ConfigLoader.log("Intake Scan - Field Mapping: {}".format(field_mapping), level="DEBUG")
|
|
130
130
|
MediLink_ConfigLoader.log("Intake Scan - CSV Headers: {}".format(csv_headers), level="DEBUG")
|
|
131
131
|
|
|
132
|
+
# Pre-compile regex patterns for better performance
|
|
133
|
+
compiled_patterns = {}
|
|
134
|
+
for medisoft_field, patterns in field_mapping.items():
|
|
135
|
+
compiled_patterns[medisoft_field] = [re.compile(pattern, re.IGNORECASE) for pattern in patterns]
|
|
136
|
+
|
|
137
|
+
# Pre-compile the alphanumeric regex for policy number validation
|
|
138
|
+
alphanumeric_pattern = re.compile("^[a-zA-Z0-9]*$")
|
|
139
|
+
|
|
132
140
|
# Iterate over the Medisoft fields defined in field_mapping
|
|
133
141
|
for medisoft_field in field_mapping.keys():
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
142
|
+
matched = False
|
|
143
|
+
for pattern in compiled_patterns[medisoft_field]:
|
|
144
|
+
# Use early termination - find first match and break
|
|
145
|
+
for header in csv_headers:
|
|
146
|
+
if pattern.search(header):
|
|
147
|
+
identified_fields[header] = medisoft_field
|
|
148
|
+
matched = True
|
|
149
|
+
break
|
|
150
|
+
if matched:
|
|
140
151
|
break
|
|
141
152
|
else:
|
|
142
153
|
# Check if the missing field is a required field before appending the warning
|
|
@@ -160,7 +171,7 @@ def intake_scan(csv_headers, field_mapping):
|
|
|
160
171
|
if 'Insurance Policy Number' in field:
|
|
161
172
|
policy_number = identified_fields.get(header)
|
|
162
173
|
MediLink_ConfigLoader.log("Checking Insurance Policy Number '{}' for alphanumeric characters.".format(policy_number), level="DEBUG")
|
|
163
|
-
if not
|
|
174
|
+
if not alphanumeric_pattern.match(policy_number):
|
|
164
175
|
missing_fields_warnings.append("WARNING: Insurance Policy Number '{}' contains invalid characters.".format(policy_number))
|
|
165
176
|
MediLink_ConfigLoader.log("Insurance Policy Number '{}' contains invalid characters.".format(policy_number), level="WARNING")
|
|
166
177
|
# Additional checks can be added as needed for other fields
|
|
@@ -187,15 +198,19 @@ def main():
|
|
|
187
198
|
|
|
188
199
|
args = parser.parse_args()
|
|
189
200
|
|
|
190
|
-
config, crosswalk = MediLink_ConfigLoader.load_configuration()
|
|
191
|
-
|
|
192
|
-
client = APIClient()
|
|
193
|
-
|
|
194
201
|
# If no arguments provided, print usage instructions
|
|
195
202
|
if not any(vars(args).values()):
|
|
196
203
|
parser.print_help()
|
|
197
204
|
return
|
|
198
205
|
|
|
206
|
+
# Load configuration only when needed
|
|
207
|
+
if args.update_crosswalk or args.init_crosswalk or args.load_csv or args.preprocess_csv or args.open_csv:
|
|
208
|
+
config, crosswalk = MediBot_Preprocessor_lib.get_cached_configuration()
|
|
209
|
+
|
|
210
|
+
# Initialize API client only when needed
|
|
211
|
+
if args.update_crosswalk or args.init_crosswalk:
|
|
212
|
+
client = APIClient()
|
|
213
|
+
|
|
199
214
|
if args.update_crosswalk:
|
|
200
215
|
print("Updating the crosswalk...")
|
|
201
216
|
MediBot_Crosswalk_Library.crosswalk_update(client, config, crosswalk)
|
|
@@ -7,6 +7,10 @@ import chardet # Ensure chardet is imported
|
|
|
7
7
|
# Add the parent directory of the project to the Python path
|
|
8
8
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
|
9
9
|
|
|
10
|
+
# Configuration cache to avoid repeated loading
|
|
11
|
+
_config_cache = None
|
|
12
|
+
_crosswalk_cache = None
|
|
13
|
+
|
|
10
14
|
# Attempt to import necessary modules, falling back if they are not found
|
|
11
15
|
try:
|
|
12
16
|
import MediLink_ConfigLoader
|
|
@@ -44,6 +48,17 @@ def initialize(config):
|
|
|
44
48
|
except AttributeError:
|
|
45
49
|
raise InitializationError("Error: '{}' not found in config.".format(key))
|
|
46
50
|
|
|
51
|
+
def get_cached_configuration():
|
|
52
|
+
"""
|
|
53
|
+
Returns cached configuration and crosswalk data to avoid repeated I/O operations.
|
|
54
|
+
"""
|
|
55
|
+
global _config_cache, _crosswalk_cache
|
|
56
|
+
|
|
57
|
+
if _config_cache is None or _crosswalk_cache is None:
|
|
58
|
+
_config_cache, _crosswalk_cache = MediLink_ConfigLoader.load_configuration()
|
|
59
|
+
|
|
60
|
+
return _config_cache, _crosswalk_cache
|
|
61
|
+
|
|
47
62
|
def open_csv_for_editing(csv_file_path):
|
|
48
63
|
try:
|
|
49
64
|
# Open the CSV file with its associated application
|
|
@@ -105,13 +120,18 @@ def load_csv_data(csv_file_path):
|
|
|
105
120
|
# Clean the headers
|
|
106
121
|
cleaned_headers = clean_header(reader.fieldnames)
|
|
107
122
|
|
|
108
|
-
# Create a mapping of cleaned headers to original headers
|
|
123
|
+
# Create a mapping of cleaned headers to original headers (pre-compute once)
|
|
109
124
|
header_mapping = {cleaned_headers[i]: reader.fieldnames[i] for i in range(len(cleaned_headers))}
|
|
110
125
|
|
|
111
|
-
# Process the remaining rows
|
|
126
|
+
# Process the remaining rows - optimize by pre-allocating the list
|
|
112
127
|
csv_data = []
|
|
128
|
+
# Pre-allocate list size if we can estimate it (optional optimization)
|
|
129
|
+
# csv_data = [None] * estimated_size # if we had row count
|
|
130
|
+
|
|
113
131
|
for row in reader:
|
|
114
|
-
|
|
132
|
+
# Use dict() constructor with generator expression for better performance
|
|
133
|
+
cleaned_row = dict((cleaned_headers[i], row[header_mapping[cleaned_headers[i]]])
|
|
134
|
+
for i in range(len(cleaned_headers)))
|
|
115
135
|
csv_data.append(cleaned_row)
|
|
116
136
|
|
|
117
137
|
return csv_data # Return a list of dictionaries
|
|
@@ -205,12 +225,20 @@ def combine_fields(csv_data):
|
|
|
205
225
|
|
|
206
226
|
def apply_replacements(csv_data, crosswalk):
|
|
207
227
|
replacements = crosswalk.get('csv_replacements', {})
|
|
228
|
+
# Pre-define the keys to check for better performance
|
|
229
|
+
keys_to_check = ['Patient SSN', 'Primary Insurance', 'Ins1 Payer ID']
|
|
230
|
+
|
|
208
231
|
for row in csv_data:
|
|
232
|
+
# Use early termination - check each replacement only if needed
|
|
209
233
|
for old_value, new_value in replacements.items():
|
|
210
|
-
|
|
234
|
+
replacement_made = False
|
|
235
|
+
for key in keys_to_check:
|
|
211
236
|
if row.get(key) == old_value:
|
|
212
237
|
row[key] = new_value
|
|
213
|
-
|
|
238
|
+
replacement_made = True
|
|
239
|
+
break # Exit the key loop once a replacement is made
|
|
240
|
+
if replacement_made:
|
|
241
|
+
break # Exit the replacement loop once any replacement is made
|
|
214
242
|
|
|
215
243
|
import difflib
|
|
216
244
|
from collections import defaultdict
|
|
@@ -230,12 +258,15 @@ def find_best_medisoft_id(insurance_name, medisoft_ids, medisoft_to_mains_names)
|
|
|
230
258
|
best_match_ratio = 0
|
|
231
259
|
best_medisoft_id = None
|
|
232
260
|
|
|
261
|
+
# Pre-process insurance name once
|
|
262
|
+
processed_insurance = ''.join(c for c in insurance_name if not c.isdigit()).upper()
|
|
263
|
+
|
|
233
264
|
for medisoft_id in medisoft_ids:
|
|
234
265
|
mains_names = medisoft_to_mains_names.get(medisoft_id, [])
|
|
235
266
|
for mains_name in mains_names:
|
|
236
267
|
# Preprocess names by extracting non-numeric characters and converting to uppercase
|
|
237
|
-
|
|
238
|
-
|
|
268
|
+
# Use more efficient string processing
|
|
269
|
+
processed_mains = ''.join(c for c in mains_name if not c.isdigit()).upper()
|
|
239
270
|
|
|
240
271
|
# Log the processed names before computing the match ratio
|
|
241
272
|
MediLink_ConfigLoader.log("Processing Medisoft ID '{}': Comparing processed insurance '{}' with processed mains '{}'.".format(medisoft_id, processed_insurance, processed_mains), level="DEBUG")
|
|
@@ -414,8 +445,8 @@ def update_procedure_codes(csv_data, crosswalk):
|
|
|
414
445
|
|
|
415
446
|
def update_diagnosis_codes(csv_data):
|
|
416
447
|
try:
|
|
417
|
-
#
|
|
418
|
-
config, crosswalk =
|
|
448
|
+
# Use cached configuration instead of loading repeatedly
|
|
449
|
+
config, crosswalk = get_cached_configuration()
|
|
419
450
|
|
|
420
451
|
# Extract the local storage path from the configuration
|
|
421
452
|
local_storage_path = config['MediLink_Config']['local_storage_path']
|
|
@@ -449,17 +480,23 @@ def update_diagnosis_codes(csv_data):
|
|
|
449
480
|
MediLink_ConfigLoader.log("BAD IDEA: Processing DOCX files modified between {} and {}.".format(threshold_start, threshold_end), level="INFO")
|
|
450
481
|
|
|
451
482
|
# Gather all relevant DOCX files in the specified directory
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
483
|
+
# Optimize by combining file gathering and filtering in one pass
|
|
484
|
+
valid_files = []
|
|
485
|
+
try:
|
|
486
|
+
for filename in os.listdir(local_storage_path):
|
|
487
|
+
if filename.endswith(".docx"):
|
|
488
|
+
filepath = os.path.join(local_storage_path, filename)
|
|
489
|
+
# Check modification time only once per file
|
|
490
|
+
try:
|
|
491
|
+
mtime = os.path.getmtime(filepath)
|
|
492
|
+
if threshold_start <= datetime.fromtimestamp(mtime) <= threshold_end:
|
|
493
|
+
valid_files.append(filepath)
|
|
494
|
+
except (OSError, ValueError):
|
|
495
|
+
# Skip files with invalid modification times
|
|
496
|
+
continue
|
|
497
|
+
except OSError:
|
|
498
|
+
MediLink_ConfigLoader.log("Error accessing directory: {}".format(local_storage_path), level="ERROR")
|
|
499
|
+
return
|
|
463
500
|
|
|
464
501
|
# Process valid DOCX files
|
|
465
502
|
for filepath in valid_files:
|
|
@@ -583,8 +620,8 @@ def load_insurance_data_from_mains(config):
|
|
|
583
620
|
Returns:
|
|
584
621
|
dict: A dictionary mapping insurance names to insurance IDs.
|
|
585
622
|
"""
|
|
586
|
-
#
|
|
587
|
-
config, crosswalk =
|
|
623
|
+
# Use cached configuration to avoid repeated loading
|
|
624
|
+
config, crosswalk = get_cached_configuration()
|
|
588
625
|
|
|
589
626
|
# Retrieve MAINS path and slicing information from the configuration
|
|
590
627
|
# TODO (Low) For secondary insurance, this needs to be pulling from the correct MAINS (there are 2)
|
|
@@ -56,41 +56,16 @@ project_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
|
|
|
56
56
|
if project_dir not in sys.path:
|
|
57
57
|
sys.path.append(project_dir)
|
|
58
58
|
|
|
59
|
-
# Safe import
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
Uses dynamic imports to avoid circular dependencies while maintaining functionality.
|
|
64
|
-
|
|
65
|
-
Returns:
|
|
66
|
-
- Dictionary containing imported functions or None if import fails
|
|
67
|
-
"""
|
|
59
|
+
# Safe import for utility functions - works in multiple contexts
|
|
60
|
+
try:
|
|
61
|
+
from .MediLink_837p_utilities import convert_date_format
|
|
62
|
+
except (ImportError, SystemError):
|
|
68
63
|
try:
|
|
69
|
-
|
|
70
|
-
from MediLink_837p_encoder_library import convert_date_format
|
|
71
|
-
return {
|
|
72
|
-
'convert_date_format': convert_date_format
|
|
73
|
-
}
|
|
64
|
+
from MediLink_837p_utilities import convert_date_format
|
|
74
65
|
except ImportError as e:
|
|
75
|
-
# Fallback implementation for convert_date_format
|
|
76
|
-
MediLink_ConfigLoader.log("Warning: Could not import
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
# Initialize encoder functions with fallback
|
|
80
|
-
_encoder_functions = safe_import_encoder_functions()
|
|
81
|
-
|
|
82
|
-
def get_convert_date_format():
|
|
83
|
-
"""
|
|
84
|
-
Safely gets the convert_date_format function with fallback implementation.
|
|
85
|
-
|
|
86
|
-
Returns:
|
|
87
|
-
- convert_date_format function or fallback implementation
|
|
88
|
-
"""
|
|
89
|
-
if _encoder_functions and 'convert_date_format' in _encoder_functions:
|
|
90
|
-
return _encoder_functions['convert_date_format']
|
|
91
|
-
else:
|
|
92
|
-
# Fallback implementation
|
|
93
|
-
def fallback_convert_date_format(date_str):
|
|
66
|
+
# Fallback implementation for convert_date_format if utilities module is not available
|
|
67
|
+
MediLink_ConfigLoader.log("Warning: Could not import utilities functions: {}".format(e), level="WARNING")
|
|
68
|
+
def convert_date_format(date_str):
|
|
94
69
|
"""Fallback date format conversion function"""
|
|
95
70
|
try:
|
|
96
71
|
# Parse the input date string into a datetime object
|
|
@@ -101,7 +76,6 @@ def get_convert_date_format():
|
|
|
101
76
|
except (ValueError, TypeError):
|
|
102
77
|
# Return original string if conversion fails
|
|
103
78
|
return date_str
|
|
104
|
-
return fallback_convert_date_format
|
|
105
79
|
|
|
106
80
|
def create_2320_other_subscriber_segments(patient_data, config, crosswalk):
|
|
107
81
|
"""
|
|
@@ -253,9 +227,8 @@ def create_2430_service_line_cob_segments(patient_data, config, crosswalk):
|
|
|
253
227
|
|
|
254
228
|
# DTP*573 segment for adjudication date
|
|
255
229
|
if service.get('adjudication_date'):
|
|
256
|
-
convert_date = get_convert_date_format()
|
|
257
230
|
dtp_segment = "DTP*573*D8*{}~".format(
|
|
258
|
-
|
|
231
|
+
convert_date_format(service.get('adjudication_date'))
|
|
259
232
|
)
|
|
260
233
|
segments.append(dtp_segment)
|
|
261
234
|
|
|
@@ -310,9 +283,8 @@ def create_2330C_other_subscriber_name_segments(patient_data, config, crosswalk)
|
|
|
310
283
|
|
|
311
284
|
# Optional DMG segment for date of birth/gender
|
|
312
285
|
if patient_data.get('subscriber_dob'):
|
|
313
|
-
convert_date = get_convert_date_format()
|
|
314
286
|
dmg_segment = "DMG*D8*{}*{}~".format(
|
|
315
|
-
|
|
287
|
+
convert_date_format(patient_data.get('subscriber_dob')),
|
|
316
288
|
patient_data.get('subscriber_gender', '')
|
|
317
289
|
)
|
|
318
290
|
segments.append(dmg_segment)
|
|
@@ -636,7 +608,8 @@ def get_enhanced_insurance_options(config):
|
|
|
636
608
|
'MA': 'Medicare Advantage',
|
|
637
609
|
'MC': 'Medicare Part C'
|
|
638
610
|
}
|
|
639
|
-
enhanced_options =
|
|
611
|
+
enhanced_options = base_options.copy()
|
|
612
|
+
enhanced_options.update(medicare_options)
|
|
640
613
|
return enhanced_options
|
|
641
614
|
|
|
642
615
|
# Main COB processing function
|
|
@@ -1,9 +1,33 @@
|
|
|
1
1
|
# MediLink_837p_encoder.py
|
|
2
2
|
import re, argparse, os
|
|
3
3
|
from datetime import datetime
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
4
|
+
|
|
5
|
+
# Safe import for ConfigLoader - works in multiple contexts
|
|
6
|
+
try:
|
|
7
|
+
from . import MediLink_ConfigLoader
|
|
8
|
+
except (ImportError, SystemError):
|
|
9
|
+
try:
|
|
10
|
+
import MediLink_ConfigLoader
|
|
11
|
+
except ImportError:
|
|
12
|
+
from MediLink import MediLink_ConfigLoader
|
|
13
|
+
|
|
14
|
+
# Safe import for DataMgmt functions - works in multiple contexts
|
|
15
|
+
try:
|
|
16
|
+
from .MediLink_DataMgmt import parse_fixed_width_data, read_fixed_width_data
|
|
17
|
+
except (ImportError, SystemError):
|
|
18
|
+
try:
|
|
19
|
+
from MediLink_DataMgmt import parse_fixed_width_data, read_fixed_width_data
|
|
20
|
+
except ImportError:
|
|
21
|
+
from MediLink.MediLink_DataMgmt import parse_fixed_width_data, read_fixed_width_data
|
|
22
|
+
|
|
23
|
+
# Safe import for encoder library - works in multiple contexts
|
|
24
|
+
try:
|
|
25
|
+
from . import MediLink_837p_encoder_library
|
|
26
|
+
except (ImportError, SystemError):
|
|
27
|
+
try:
|
|
28
|
+
import MediLink_837p_encoder_library
|
|
29
|
+
except ImportError:
|
|
30
|
+
from MediLink import MediLink_837p_encoder_library
|
|
7
31
|
# TODO (COB ENHANCEMENT): Import COB library when implementing Medicare and secondary claim support
|
|
8
32
|
# import MediLink_837p_cob_library
|
|
9
33
|
#from tqdm import tqdm
|
|
@@ -10,27 +10,56 @@ if project_dir not in sys.path:
|
|
|
10
10
|
from MediBot import MediBot_Preprocessor_lib
|
|
11
11
|
load_insurance_data_from_mains = MediBot_Preprocessor_lib.load_insurance_data_from_mains
|
|
12
12
|
from MediBot import MediBot_Crosswalk_Library
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
#
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
13
|
+
|
|
14
|
+
# Safe import for API functions - works in multiple contexts
|
|
15
|
+
try:
|
|
16
|
+
from .MediLink_API_v3 import fetch_payer_name_from_api
|
|
17
|
+
except (ImportError, SystemError):
|
|
18
|
+
try:
|
|
19
|
+
from MediLink_API_v3 import fetch_payer_name_from_api
|
|
20
|
+
except ImportError:
|
|
21
|
+
import MediLink_API_v3
|
|
22
|
+
fetch_payer_name_from_api = MediLink_API_v3.fetch_payer_name_from_api
|
|
23
|
+
|
|
24
|
+
# Safe import for utility functions - works in multiple contexts
|
|
25
|
+
try:
|
|
26
|
+
from .MediLink_837p_utilities import (
|
|
27
|
+
convert_date_format,
|
|
28
|
+
format_datetime,
|
|
29
|
+
get_user_confirmation,
|
|
30
|
+
prompt_user_for_payer_id,
|
|
31
|
+
format_claim_number,
|
|
32
|
+
generate_segment_counts,
|
|
33
|
+
handle_validation_errors,
|
|
34
|
+
get_output_directory,
|
|
35
|
+
winscp_validate_output_directory
|
|
36
|
+
)
|
|
37
|
+
except (ImportError, SystemError):
|
|
38
|
+
try:
|
|
39
|
+
from MediLink_837p_utilities import (
|
|
40
|
+
convert_date_format,
|
|
41
|
+
format_datetime,
|
|
42
|
+
get_user_confirmation,
|
|
43
|
+
prompt_user_for_payer_id,
|
|
44
|
+
format_claim_number,
|
|
45
|
+
generate_segment_counts,
|
|
46
|
+
handle_validation_errors,
|
|
47
|
+
get_output_directory,
|
|
48
|
+
winscp_validate_output_directory
|
|
49
|
+
)
|
|
50
|
+
except ImportError:
|
|
51
|
+
import MediLink_837p_utilities
|
|
52
|
+
convert_date_format = MediLink_837p_utilities.convert_date_format
|
|
53
|
+
format_datetime = MediLink_837p_utilities.format_datetime
|
|
54
|
+
get_user_confirmation = MediLink_837p_utilities.get_user_confirmation
|
|
55
|
+
prompt_user_for_payer_id = MediLink_837p_utilities.prompt_user_for_payer_id
|
|
56
|
+
format_claim_number = MediLink_837p_utilities.format_claim_number
|
|
57
|
+
generate_segment_counts = MediLink_837p_utilities.generate_segment_counts
|
|
58
|
+
handle_validation_errors = MediLink_837p_utilities.handle_validation_errors
|
|
59
|
+
get_output_directory = MediLink_837p_utilities.get_output_directory
|
|
60
|
+
winscp_validate_output_directory = MediLink_837p_utilities.winscp_validate_output_directory
|
|
61
|
+
|
|
62
|
+
|
|
34
63
|
|
|
35
64
|
# Constructs the ST segment for transaction set.
|
|
36
65
|
def create_st_segment(transaction_set_control_number):
|
|
@@ -152,7 +181,11 @@ def create_1000A_submitter_name_segment(patient_data, config, endpoint):
|
|
|
152
181
|
# Submitter contact details
|
|
153
182
|
contact_name = config.get('submitter_name', 'NONE')
|
|
154
183
|
contact_telephone_number = config.get('submitter_tel', 'NONE')
|
|
155
|
-
|
|
184
|
+
|
|
185
|
+
# Get submitter first name to determine entity type qualifier
|
|
186
|
+
submitter_first_name = config.get('submitter_first_name', '')
|
|
187
|
+
# Determine entity_type_qualifier: '1' for individual (with first name), '2' for organization
|
|
188
|
+
entity_type_qualifier = '1' if submitter_first_name else '2' # Make sure that this is correct. Original default was 2.
|
|
156
189
|
|
|
157
190
|
# Construct NM1 segment for the submitter
|
|
158
191
|
nm1_segment = "NM1*41*{}*{}*****{}*{}~".format(entity_type_qualifier, submitter_name, submitter_id_qualifier, submitter_id) # BUG - need to check submitter_name because this is written as fixed ****** which implies a single entry and not a first and last name. This is weird.
|
|
@@ -237,15 +270,7 @@ def create_2010BB_payer_information_segment(parsed_data):
|
|
|
237
270
|
# Build NM1 segment using provided payer name and payer ID
|
|
238
271
|
return build_nm1_segment(payer_name, payer_id)
|
|
239
272
|
|
|
240
|
-
|
|
241
|
-
while True:
|
|
242
|
-
response = input(prompt_message).strip().lower()
|
|
243
|
-
if response in ['yes', 'y']:
|
|
244
|
-
return True
|
|
245
|
-
elif response in ['no', 'n']:
|
|
246
|
-
return False
|
|
247
|
-
else:
|
|
248
|
-
print("Please respond with 'yes' or 'no'.")
|
|
273
|
+
|
|
249
274
|
|
|
250
275
|
def resolve_payer_name(payer_id, config, primary_endpoint, insurance_name, parsed_data, crosswalk, client):
|
|
251
276
|
# Check if the payer_id is in the crosswalk with a name already attached to it.
|
|
@@ -405,18 +430,7 @@ def handle_missing_payer_id(insurance_name, config, crosswalk, client):
|
|
|
405
430
|
MediLink_ConfigLoader.log("User did not confirm the standard insurance name. Manual intervention is required.", config, level="CRITICAL")
|
|
406
431
|
return None
|
|
407
432
|
|
|
408
|
-
|
|
409
|
-
"""
|
|
410
|
-
Prompts the user to input the payer ID manually and ensures that a valid alphanumeric ID is provided.
|
|
411
|
-
"""
|
|
412
|
-
while True:
|
|
413
|
-
print("Manual intervention required: No payer ID found for insurance name '{}'.".format(insurance_name))
|
|
414
|
-
payer_id = input("Please enter the payer ID manually: ").strip()
|
|
415
|
-
|
|
416
|
-
if payer_id.isalnum():
|
|
417
|
-
return payer_id
|
|
418
|
-
else:
|
|
419
|
-
print("Error: Payer ID must be alphanumeric. Please try again.")
|
|
433
|
+
|
|
420
434
|
|
|
421
435
|
def build_nm1_segment(payer_name, payer_id):
|
|
422
436
|
# Step 1: Build NM1 segment using payer name and ID
|
|
@@ -696,15 +710,7 @@ def create_nm1_rendering_provider_segment(config, is_rendering_provider_differen
|
|
|
696
710
|
else:
|
|
697
711
|
return []
|
|
698
712
|
|
|
699
|
-
|
|
700
|
-
# Remove any non-alphanumeric characters from chart number and date
|
|
701
|
-
chart_number_alphanumeric = ''.join(filter(str.isalnum, chart_number))
|
|
702
|
-
date_of_service_alphanumeric = ''.join(filter(str.isalnum, date_of_service))
|
|
703
|
-
|
|
704
|
-
# Combine the alphanumeric components without spaces
|
|
705
|
-
formatted_claim_number = chart_number_alphanumeric + date_of_service_alphanumeric
|
|
706
|
-
|
|
707
|
-
return formatted_claim_number
|
|
713
|
+
|
|
708
714
|
|
|
709
715
|
# Constructs the CLM and related segments based on parsed data and configuration.
|
|
710
716
|
def create_clm_and_related_segments(parsed_data, config, crosswalk):
|
|
@@ -940,70 +946,5 @@ def create_interchange_trailer(config, num_transactions, isa13, num_functional_g
|
|
|
940
946
|
|
|
941
947
|
return ge_segment, iea_segment
|
|
942
948
|
|
|
943
|
-
# Generates segment counts for the formatted 837P transaction and updates SE segment.
|
|
944
|
-
def generate_segment_counts(compiled_segments, transaction_set_control_number):
|
|
945
|
-
# Count the number of segments, not including the placeholder SE segment
|
|
946
|
-
segment_count = compiled_segments.count('~') # + 1 Including SE segment itself, but seems to be giving errors.
|
|
947
|
-
|
|
948
|
-
# Ensure transaction set control number is correctly formatted as a string
|
|
949
|
-
formatted_control_number = str(transaction_set_control_number).zfill(4) # Pad to ensure minimum 4 characters
|
|
950
|
-
|
|
951
|
-
# Construct the SE segment with the actual segment count and the formatted transaction set control_number
|
|
952
|
-
se_segment = "SE*{0}*{1}~".format(segment_count, formatted_control_number)
|
|
953
949
|
|
|
954
|
-
# Assuming the placeholder SE segment was the last segment added before compiling
|
|
955
|
-
# This time, we directly replace the placeholder with the correct SE segment
|
|
956
|
-
formatted_837p = compiled_segments.rsplit('SE**', 1)[0] + se_segment
|
|
957
|
-
|
|
958
|
-
return formatted_837p
|
|
959
950
|
|
|
960
|
-
def handle_validation_errors(transaction_set_control_number, validation_errors, config):
|
|
961
|
-
for error in validation_errors:
|
|
962
|
-
MediLink_ConfigLoader.log("Validation error for transaction set {}: {}".format(transaction_set_control_number, error), config, level="WARNING")
|
|
963
|
-
|
|
964
|
-
print("Validation errors encountered for transaction set {}. Errors: {}".format(transaction_set_control_number, validation_errors))
|
|
965
|
-
user_input = input("Skip this patient and continue without incrementing transaction set number? (yes/no): ")
|
|
966
|
-
if user_input.lower() == 'yes':
|
|
967
|
-
print("Skipping patient...")
|
|
968
|
-
MediLink_ConfigLoader.log("Skipped processing of transaction set {} due to user decision.".format(transaction_set_control_number), config, level="INFO")
|
|
969
|
-
return True # Skip the current patient
|
|
970
|
-
else:
|
|
971
|
-
print("Processing halted due to validation errors.")
|
|
972
|
-
MediLink_ConfigLoader.log("HALT: Processing halted at transaction set {} due to unresolved validation errors.".format(transaction_set_control_number), config, level="ERROR")
|
|
973
|
-
sys.exit() # Optionally halt further processing
|
|
974
|
-
|
|
975
|
-
def winscp_validate_output_directory(output_directory):
|
|
976
|
-
"""
|
|
977
|
-
Validates the output directory path to ensure it has no spaces.
|
|
978
|
-
If spaces are found, prompts the user to input a new path.
|
|
979
|
-
If the directory doesn't exist, creates it.
|
|
980
|
-
"""
|
|
981
|
-
while ' ' in output_directory:
|
|
982
|
-
print("\nWARNING: The output directory path contains spaces, which can cause issues with upload operations.")
|
|
983
|
-
print(" Current proposed path: {}".format(output_directory))
|
|
984
|
-
new_path = input("Please enter a new path for the output directory: ")
|
|
985
|
-
output_directory = new_path.strip() # Remove leading/trailing spaces
|
|
986
|
-
|
|
987
|
-
# Check if the directory exists, if not, create it
|
|
988
|
-
if not os.path.exists(output_directory):
|
|
989
|
-
os.makedirs(output_directory)
|
|
990
|
-
print("INFO: Created output directory: {}".format(output_directory))
|
|
991
|
-
|
|
992
|
-
return output_directory
|
|
993
|
-
|
|
994
|
-
def get_output_directory(config):
|
|
995
|
-
# Retrieve desired default output file path from config
|
|
996
|
-
output_directory = config.get('outputFilePath', '').strip()
|
|
997
|
-
# BUG (Low SFTP) Add WinSCP validation because of the mishandling of spaces in paths. (This shouldn't need to exist.)
|
|
998
|
-
if not output_directory:
|
|
999
|
-
print("Output file path is not specified in the configuration.")
|
|
1000
|
-
output_directory = input("Please enter a valid output directory path: ").strip()
|
|
1001
|
-
|
|
1002
|
-
# Validate the directory path (checks for spaces and existence)
|
|
1003
|
-
output_directory = winscp_validate_output_directory(output_directory)
|
|
1004
|
-
|
|
1005
|
-
if not os.path.isdir(output_directory):
|
|
1006
|
-
print("Output directory does not exist or is not accessible. Please check the configuration.")
|
|
1007
|
-
return None
|
|
1008
|
-
|
|
1009
|
-
return output_directory
|
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
# MediLink_837p_utilities.py
|
|
2
|
+
"""
|
|
3
|
+
837P Encoder Utility Functions
|
|
4
|
+
|
|
5
|
+
This module contains utility functions extracted from MediLink_837p_encoder_library.py
|
|
6
|
+
to reduce the size and complexity of the main encoder library while avoiding circular imports.
|
|
7
|
+
|
|
8
|
+
Functions included:
|
|
9
|
+
- Date/time formatting utilities
|
|
10
|
+
- User interaction utilities
|
|
11
|
+
- File/path handling utilities
|
|
12
|
+
- Processing utilities
|
|
13
|
+
- Validation utilities
|
|
14
|
+
|
|
15
|
+
Import Strategy:
|
|
16
|
+
This module only imports base Python modules and MediLink_ConfigLoader to avoid
|
|
17
|
+
circular dependencies. Other modules import from this utilities module.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from datetime import datetime
|
|
21
|
+
import sys
|
|
22
|
+
import os
|
|
23
|
+
import re
|
|
24
|
+
|
|
25
|
+
# Import MediLink_ConfigLoader for logging functionality
|
|
26
|
+
try:
|
|
27
|
+
from MediLink import MediLink_ConfigLoader
|
|
28
|
+
except ImportError:
|
|
29
|
+
import MediLink_ConfigLoader
|
|
30
|
+
|
|
31
|
+
# =============================================================================
|
|
32
|
+
# DATE/TIME UTILITIES
|
|
33
|
+
# =============================================================================
|
|
34
|
+
|
|
35
|
+
def convert_date_format(date_str):
|
|
36
|
+
"""
|
|
37
|
+
Converts date format from one format to another.
|
|
38
|
+
|
|
39
|
+
Parameters:
|
|
40
|
+
- date_str: Date string in MM-DD-YYYY or MM-DD-YY format
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
- Date string in YYYYMMDD format
|
|
44
|
+
"""
|
|
45
|
+
# Parse the input date string into a datetime object using the input format
|
|
46
|
+
# Determine the input date format based on the length of the input string
|
|
47
|
+
input_format = "%m-%d-%Y" if len(date_str) == 10 else "%m-%d-%y"
|
|
48
|
+
date_obj = datetime.strptime(date_str, input_format)
|
|
49
|
+
# Format the datetime object into the desired output format and return
|
|
50
|
+
return date_obj.strftime("%Y%m%d")
|
|
51
|
+
|
|
52
|
+
def format_datetime(dt=None, format_type='date'):
|
|
53
|
+
"""
|
|
54
|
+
Formats date and time according to the specified format.
|
|
55
|
+
|
|
56
|
+
Parameters:
|
|
57
|
+
- dt: datetime object (defaults to current datetime if None)
|
|
58
|
+
- format_type: 'date', 'isa', or 'time'
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
- Formatted date/time string
|
|
62
|
+
"""
|
|
63
|
+
if dt is None:
|
|
64
|
+
dt = datetime.now()
|
|
65
|
+
if format_type == 'date':
|
|
66
|
+
return dt.strftime('%Y%m%d')
|
|
67
|
+
elif format_type == 'isa':
|
|
68
|
+
return dt.strftime('%y%m%d')
|
|
69
|
+
elif format_type == 'time':
|
|
70
|
+
return dt.strftime('%H%M')
|
|
71
|
+
|
|
72
|
+
# =============================================================================
|
|
73
|
+
# USER INTERACTION UTILITIES
|
|
74
|
+
# =============================================================================
|
|
75
|
+
|
|
76
|
+
def get_user_confirmation(prompt_message):
|
|
77
|
+
"""
|
|
78
|
+
Prompts user for yes/no confirmation with validation.
|
|
79
|
+
|
|
80
|
+
Parameters:
|
|
81
|
+
- prompt_message: Message to display to user
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
- Boolean: True for yes, False for no
|
|
85
|
+
"""
|
|
86
|
+
while True:
|
|
87
|
+
response = input(prompt_message).strip().lower()
|
|
88
|
+
if response in ['yes', 'y']:
|
|
89
|
+
return True
|
|
90
|
+
elif response in ['no', 'n']:
|
|
91
|
+
return False
|
|
92
|
+
else:
|
|
93
|
+
print("Please respond with 'yes' or 'no'.")
|
|
94
|
+
|
|
95
|
+
def prompt_user_for_payer_id(insurance_name):
|
|
96
|
+
"""
|
|
97
|
+
Prompts the user to input the payer ID manually and ensures that a valid alphanumeric ID is provided.
|
|
98
|
+
|
|
99
|
+
Parameters:
|
|
100
|
+
- insurance_name: Name of the insurance for context
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
- Valid alphanumeric payer ID
|
|
104
|
+
"""
|
|
105
|
+
while True:
|
|
106
|
+
print("Manual intervention required: No payer ID found for insurance name '{}'.".format(insurance_name))
|
|
107
|
+
payer_id = input("Please enter the payer ID manually: ").strip()
|
|
108
|
+
|
|
109
|
+
if payer_id.isalnum():
|
|
110
|
+
return payer_id
|
|
111
|
+
else:
|
|
112
|
+
print("Error: Payer ID must be alphanumeric. Please try again.")
|
|
113
|
+
|
|
114
|
+
# =============================================================================
|
|
115
|
+
# FILE/PATH UTILITIES
|
|
116
|
+
# =============================================================================
|
|
117
|
+
|
|
118
|
+
def format_claim_number(chart_number, date_of_service):
|
|
119
|
+
"""
|
|
120
|
+
Formats claim number by combining chart number and date of service.
|
|
121
|
+
|
|
122
|
+
Parameters:
|
|
123
|
+
- chart_number: Patient chart number
|
|
124
|
+
- date_of_service: Date of service
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
- Formatted claim number (alphanumeric only)
|
|
128
|
+
"""
|
|
129
|
+
# Remove any non-alphanumeric characters from chart number and date
|
|
130
|
+
chart_number_alphanumeric = ''.join(filter(str.isalnum, chart_number))
|
|
131
|
+
date_of_service_alphanumeric = ''.join(filter(str.isalnum, date_of_service))
|
|
132
|
+
|
|
133
|
+
# Combine the alphanumeric components without spaces
|
|
134
|
+
formatted_claim_number = chart_number_alphanumeric + date_of_service_alphanumeric
|
|
135
|
+
|
|
136
|
+
return formatted_claim_number
|
|
137
|
+
|
|
138
|
+
def winscp_validate_output_directory(output_directory):
|
|
139
|
+
"""
|
|
140
|
+
Validates the output directory path to ensure it has no spaces.
|
|
141
|
+
If spaces are found, prompts the user to input a new path.
|
|
142
|
+
If the directory doesn't exist, creates it.
|
|
143
|
+
|
|
144
|
+
Parameters:
|
|
145
|
+
- output_directory: Directory path to validate
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
- Validated directory path
|
|
149
|
+
"""
|
|
150
|
+
while ' ' in output_directory:
|
|
151
|
+
print("\nWARNING: The output directory path contains spaces, which can cause issues with upload operations.")
|
|
152
|
+
print(" Current proposed path: {}".format(output_directory))
|
|
153
|
+
new_path = input("Please enter a new path for the output directory: ")
|
|
154
|
+
output_directory = new_path.strip() # Remove leading/trailing spaces
|
|
155
|
+
|
|
156
|
+
# Check if the directory exists, if not, create it
|
|
157
|
+
if not os.path.exists(output_directory):
|
|
158
|
+
os.makedirs(output_directory)
|
|
159
|
+
print("INFO: Created output directory: {}".format(output_directory))
|
|
160
|
+
|
|
161
|
+
return output_directory
|
|
162
|
+
|
|
163
|
+
def get_output_directory(config):
|
|
164
|
+
"""
|
|
165
|
+
Retrieves and validates output directory from configuration.
|
|
166
|
+
|
|
167
|
+
Parameters:
|
|
168
|
+
- config: Configuration dictionary
|
|
169
|
+
|
|
170
|
+
Returns:
|
|
171
|
+
- Valid output directory path or None if invalid
|
|
172
|
+
"""
|
|
173
|
+
# Retrieve desired default output file path from config
|
|
174
|
+
output_directory = config.get('outputFilePath', '').strip()
|
|
175
|
+
# BUG (Low SFTP) Add WinSCP validation because of the mishandling of spaces in paths. (This shouldn't need to exist.)
|
|
176
|
+
if not output_directory:
|
|
177
|
+
print("Output file path is not specified in the configuration.")
|
|
178
|
+
output_directory = input("Please enter a valid output directory path: ").strip()
|
|
179
|
+
|
|
180
|
+
# Validate the directory path (checks for spaces and existence)
|
|
181
|
+
output_directory = winscp_validate_output_directory(output_directory)
|
|
182
|
+
|
|
183
|
+
if not os.path.isdir(output_directory):
|
|
184
|
+
print("Output directory does not exist or is not accessible. Please check the configuration.")
|
|
185
|
+
return None
|
|
186
|
+
|
|
187
|
+
return output_directory
|
|
188
|
+
|
|
189
|
+
# =============================================================================
|
|
190
|
+
# PROCESSING UTILITIES
|
|
191
|
+
# =============================================================================
|
|
192
|
+
|
|
193
|
+
def generate_segment_counts(compiled_segments, transaction_set_control_number):
|
|
194
|
+
"""
|
|
195
|
+
Generates segment counts for the formatted 837P transaction and updates SE segment.
|
|
196
|
+
|
|
197
|
+
Parameters:
|
|
198
|
+
- compiled_segments: String containing compiled 837P segments
|
|
199
|
+
- transaction_set_control_number: Transaction set control number
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
- Formatted 837P string with correct SE segment
|
|
203
|
+
"""
|
|
204
|
+
# Count the number of segments, not including the placeholder SE segment
|
|
205
|
+
segment_count = compiled_segments.count('~') # + 1 Including SE segment itself, but seems to be giving errors.
|
|
206
|
+
|
|
207
|
+
# Ensure transaction set control number is correctly formatted as a string
|
|
208
|
+
formatted_control_number = str(transaction_set_control_number).zfill(4) # Pad to ensure minimum 4 characters
|
|
209
|
+
|
|
210
|
+
# Construct the SE segment with the actual segment count and the formatted transaction set control_number
|
|
211
|
+
se_segment = "SE*{0}*{1}~".format(segment_count, formatted_control_number)
|
|
212
|
+
|
|
213
|
+
# Assuming the placeholder SE segment was the last segment added before compiling
|
|
214
|
+
# This time, we directly replace the placeholder with the correct SE segment
|
|
215
|
+
formatted_837p = compiled_segments.rsplit('SE**', 1)[0] + se_segment
|
|
216
|
+
|
|
217
|
+
return formatted_837p
|
|
218
|
+
|
|
219
|
+
# =============================================================================
|
|
220
|
+
# VALIDATION UTILITIES
|
|
221
|
+
# =============================================================================
|
|
222
|
+
|
|
223
|
+
def handle_validation_errors(transaction_set_control_number, validation_errors, config):
|
|
224
|
+
"""
|
|
225
|
+
Handles validation errors with user interaction for decision making.
|
|
226
|
+
|
|
227
|
+
Parameters:
|
|
228
|
+
- transaction_set_control_number: Current transaction set control number
|
|
229
|
+
- validation_errors: List of validation errors
|
|
230
|
+
- config: Configuration for logging
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
- Boolean: True to skip patient, False to halt processing
|
|
234
|
+
"""
|
|
235
|
+
for error in validation_errors:
|
|
236
|
+
MediLink_ConfigLoader.log("Validation error for transaction set {}: {}".format(transaction_set_control_number, error), config, level="WARNING")
|
|
237
|
+
|
|
238
|
+
print("Validation errors encountered for transaction set {}. Errors: {}".format(transaction_set_control_number, validation_errors))
|
|
239
|
+
user_input = input("Skip this patient and continue without incrementing transaction set number? (yes/no): ")
|
|
240
|
+
if user_input.lower() == 'yes':
|
|
241
|
+
print("Skipping patient...")
|
|
242
|
+
MediLink_ConfigLoader.log("Skipped processing of transaction set {} due to user decision.".format(transaction_set_control_number), config, level="INFO")
|
|
243
|
+
return True # Skip the current patient
|
|
244
|
+
else:
|
|
245
|
+
print("Processing halted due to validation errors.")
|
|
246
|
+
MediLink_ConfigLoader.log("HALT: Processing halted at transaction set {} due to unresolved validation errors.".format(transaction_set_control_number), config, level="ERROR")
|
|
247
|
+
sys.exit() # Optionally halt further processing
|
|
248
|
+
|
|
249
|
+
# =============================================================================
|
|
250
|
+
# UTILITY FUNCTION REGISTRY
|
|
251
|
+
# =============================================================================
|
|
252
|
+
|
|
253
|
+
# Export all utility functions for easy importing
|
|
254
|
+
__all__ = [
|
|
255
|
+
'convert_date_format',
|
|
256
|
+
'format_datetime',
|
|
257
|
+
'get_user_confirmation',
|
|
258
|
+
'prompt_user_for_payer_id',
|
|
259
|
+
'format_claim_number',
|
|
260
|
+
'winscp_validate_output_directory',
|
|
261
|
+
'get_output_directory',
|
|
262
|
+
'generate_segment_counts',
|
|
263
|
+
'handle_validation_errors'
|
|
264
|
+
]
|
MediLink/MediLink_DataMgmt.py
CHANGED
|
@@ -88,11 +88,16 @@ def read_fixed_width_data(file_path):
|
|
|
88
88
|
# TODO (Refactor) Consider consolidating with the other read_fixed_with_data
|
|
89
89
|
def read_general_fixed_width_data(file_path, slices):
|
|
90
90
|
# handle any fixed-width data based on provided slice definitions
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
91
|
+
try:
|
|
92
|
+
with open(file_path, 'r', encoding='utf-8') as file:
|
|
93
|
+
next(file) # Skip the header
|
|
94
|
+
for line_number, line in enumerate(file, start=1):
|
|
95
|
+
insurance_name = {key: line[start:end].strip() for key, (start, end) in slices.items()}
|
|
96
|
+
yield insurance_name, line_number
|
|
97
|
+
except FileNotFoundError:
|
|
98
|
+
print("File not found: {}".format(file_path))
|
|
99
|
+
MediLink_ConfigLoader.log("File not found: {}".format(file_path), level="ERROR")
|
|
100
|
+
return
|
|
96
101
|
|
|
97
102
|
def consolidate_csvs(source_directory, file_prefix="Consolidated", interactive=False):
|
|
98
103
|
"""
|
MediLink/MediLink_Deductible.py
CHANGED
|
@@ -229,14 +229,32 @@ def get_eligibility_info(client, payer_id, provider_last_name, date_of_birth, me
|
|
|
229
229
|
)
|
|
230
230
|
print("\nValidation report generated: {}".format(validation_file_path))
|
|
231
231
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
232
|
+
# Log any Super Connector API errors
|
|
233
|
+
if super_connector_eligibility and "rawGraphQLResponse" in super_connector_eligibility:
|
|
234
|
+
raw_response = super_connector_eligibility.get('rawGraphQLResponse', {})
|
|
235
|
+
errors = raw_response.get('errors', [])
|
|
236
|
+
if errors:
|
|
237
|
+
print("Super Connector API returned {} error(s):".format(len(errors)))
|
|
238
|
+
for i, error in enumerate(errors):
|
|
239
|
+
error_code = error.get('code', 'UNKNOWN')
|
|
240
|
+
error_desc = error.get('description', 'No description')
|
|
241
|
+
print(" Error {}: {} - {}".format(i+1, error_code, error_desc))
|
|
242
|
+
|
|
243
|
+
# Check for data in error extensions (some APIs return data here)
|
|
244
|
+
extensions = error.get('extensions', {})
|
|
245
|
+
if extensions and 'details' in extensions:
|
|
246
|
+
details = extensions.get('details', [])
|
|
247
|
+
if details:
|
|
248
|
+
print(" Found {} detail records in error extensions".format(len(details)))
|
|
249
|
+
# Log first detail record for debugging
|
|
250
|
+
if details:
|
|
251
|
+
first_detail = details[0]
|
|
252
|
+
print(" First detail: {}".format(first_detail))
|
|
253
|
+
|
|
254
|
+
# Check status code
|
|
255
|
+
status_code = super_connector_eligibility.get('statuscode')
|
|
256
|
+
if status_code and status_code != '200':
|
|
257
|
+
print("Super Connector API status code: {} (non-200 indicates errors)".format(status_code))
|
|
240
258
|
|
|
241
259
|
# Open validation report in Notepad
|
|
242
260
|
os.system('notepad.exe "{}"'.format(validation_file_path))
|
|
@@ -291,6 +309,22 @@ def extract_super_connector_patient_info(eligibility_data):
|
|
|
291
309
|
'firstName': member_info.get("firstName", ""),
|
|
292
310
|
'middleName': member_info.get("middleName", "")
|
|
293
311
|
}
|
|
312
|
+
|
|
313
|
+
# Check for data in error extensions (some APIs return data here despite errors)
|
|
314
|
+
errors = raw_response.get('errors', [])
|
|
315
|
+
for error in errors:
|
|
316
|
+
extensions = error.get('extensions', {})
|
|
317
|
+
if extensions and 'details' in extensions:
|
|
318
|
+
details = extensions.get('details', [])
|
|
319
|
+
if details:
|
|
320
|
+
# Use the first detail record that has patient info
|
|
321
|
+
for detail in details:
|
|
322
|
+
if detail.get('lastName') or detail.get('firstName'):
|
|
323
|
+
return {
|
|
324
|
+
'lastName': detail.get("lastName", ""),
|
|
325
|
+
'firstName': detail.get("firstName", ""),
|
|
326
|
+
'middleName': detail.get("middleName", "")
|
|
327
|
+
}
|
|
294
328
|
|
|
295
329
|
# Fallback to top-level fields
|
|
296
330
|
return {
|
|
@@ -446,6 +480,28 @@ def extract_super_connector_insurance_info(eligibility_data):
|
|
|
446
480
|
'memberId': insurance_info.get("memberId", ""),
|
|
447
481
|
'payerId': insurance_info.get("payerId", "")
|
|
448
482
|
}
|
|
483
|
+
|
|
484
|
+
# Check for data in error extensions (some APIs return data here despite errors)
|
|
485
|
+
errors = raw_response.get('errors', [])
|
|
486
|
+
for error in errors:
|
|
487
|
+
extensions = error.get('extensions', {})
|
|
488
|
+
if extensions and 'details' in extensions:
|
|
489
|
+
details = extensions.get('details', [])
|
|
490
|
+
if details:
|
|
491
|
+
# Use the first detail record that has insurance info
|
|
492
|
+
for detail in details:
|
|
493
|
+
if detail.get('memberId') or detail.get('payerId'):
|
|
494
|
+
# Try to determine insurance type from available data
|
|
495
|
+
insurance_type = detail.get('planType', '')
|
|
496
|
+
if not insurance_type:
|
|
497
|
+
insurance_type = detail.get('productType', '')
|
|
498
|
+
|
|
499
|
+
return {
|
|
500
|
+
'insuranceType': insurance_type,
|
|
501
|
+
'insuranceTypeCode': detail.get("productServiceCode", ""),
|
|
502
|
+
'memberId': detail.get("memberId", ""),
|
|
503
|
+
'payerId': detail.get("payerId", "")
|
|
504
|
+
}
|
|
449
505
|
|
|
450
506
|
# Fallback to top-level fields
|
|
451
507
|
insurance_type = eligibility_data.get("planTypeDescription", "")
|
|
@@ -266,13 +266,57 @@ def check_data_quality_issues(super_connector_data):
|
|
|
266
266
|
errors = raw_response.get('errors', [])
|
|
267
267
|
if errors:
|
|
268
268
|
for error in errors:
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
269
|
+
error_code = error.get('code', 'UNKNOWN')
|
|
270
|
+
error_desc = error.get('description', 'No description')
|
|
271
|
+
|
|
272
|
+
# Check if this is an informational error with data
|
|
273
|
+
if error_code == 'INFORMATIONAL':
|
|
274
|
+
extensions = error.get('extensions', {})
|
|
275
|
+
if extensions and 'details' in extensions:
|
|
276
|
+
details = extensions.get('details', [])
|
|
277
|
+
if details:
|
|
278
|
+
issues.append({
|
|
279
|
+
"type": "Informational Error with Data",
|
|
280
|
+
"field": "rawGraphQLResponse.errors",
|
|
281
|
+
"value": error_code,
|
|
282
|
+
"issue": "API returned informational error but provided data in extensions: {}".format(error_desc),
|
|
283
|
+
"recommendation": "Data available in error extensions - system will attempt to extract"
|
|
284
|
+
})
|
|
285
|
+
else:
|
|
286
|
+
issues.append({
|
|
287
|
+
"type": "API Error",
|
|
288
|
+
"field": "rawGraphQLResponse.errors",
|
|
289
|
+
"value": error_code,
|
|
290
|
+
"issue": "Super Connector API returned error: {}".format(error_desc),
|
|
291
|
+
"recommendation": "Review API implementation and error handling"
|
|
292
|
+
})
|
|
293
|
+
else:
|
|
294
|
+
issues.append({
|
|
295
|
+
"type": "API Error",
|
|
296
|
+
"field": "rawGraphQLResponse.errors",
|
|
297
|
+
"value": error_code,
|
|
298
|
+
"issue": "Super Connector API returned error: {}".format(error_desc),
|
|
299
|
+
"recommendation": "Review API implementation and error handling"
|
|
300
|
+
})
|
|
301
|
+
else:
|
|
302
|
+
issues.append({
|
|
303
|
+
"type": "API Error",
|
|
304
|
+
"field": "rawGraphQLResponse.errors",
|
|
305
|
+
"value": error_code,
|
|
306
|
+
"issue": "Super Connector API returned error: {}".format(error_desc),
|
|
307
|
+
"recommendation": "Review API implementation and error handling"
|
|
308
|
+
})
|
|
309
|
+
|
|
310
|
+
# Check status code
|
|
311
|
+
status_code = super_connector_data.get('statuscode')
|
|
312
|
+
if status_code and status_code != '200':
|
|
313
|
+
issues.append({
|
|
314
|
+
"type": "Non-200 Status Code",
|
|
315
|
+
"field": "statuscode",
|
|
316
|
+
"value": status_code,
|
|
317
|
+
"issue": "API returned status code {} instead of 200".format(status_code),
|
|
318
|
+
"recommendation": "Check API health and error handling"
|
|
319
|
+
})
|
|
276
320
|
|
|
277
321
|
# Check for multiple eligibility records (this is actually good, but worth noting)
|
|
278
322
|
if "rawGraphQLResponse" in super_connector_data:
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
MediBot/MediBot.bat,sha256=
|
|
1
|
+
MediBot/MediBot.bat,sha256=anz5i-Td1k3HhRUvkCqHsw9lBLVmO6q9bt5kLTfr1Iw,13282
|
|
2
2
|
MediBot/MediBot.py,sha256=KNR3Pj46W9dQaE3OH3fFAHoa6P-hS8pjJ9xB5STEqOU,19513
|
|
3
3
|
MediBot/MediBot_Charges.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
MediBot/MediBot_Crosswalk_Library.py,sha256=eYFcP6KjnzOfZbAYhs6Umv4sKguRJAQkKgYQQynJ50M,49025
|
|
5
5
|
MediBot/MediBot_Post.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
-
MediBot/MediBot_Preprocessor.py,sha256=
|
|
7
|
-
MediBot/MediBot_Preprocessor_lib.py,sha256=
|
|
6
|
+
MediBot/MediBot_Preprocessor.py,sha256=Lc9uQnE5SAa0dQTOREdPV1QUB2cywXTHJ1h2w-fyeeQ,13331
|
|
7
|
+
MediBot/MediBot_Preprocessor_lib.py,sha256=UszeR1YKi7jKrEQ4Fdn5ORmhoCrOqCStn05AG9M-SXg,39400
|
|
8
8
|
MediBot/MediBot_UI.py,sha256=tdTXLQ_nUVbtpkxUGSuKbEgYv6CFk6EsmEMAVMLL4_A,11165
|
|
9
9
|
MediBot/MediBot_dataformat_library.py,sha256=JXTV-HWahqeYF_lbNn1UYxqUtZ6ZBeFXHOyRGlDq4xM,8406
|
|
10
10
|
MediBot/MediBot_docx_decoder.py,sha256=z-_oVrSocu4-CenDGDHOkDeqPcKqZqm6Ao9mABgqxJU,23561
|
|
@@ -15,9 +15,10 @@ MediBot/update_json.py,sha256=9FJZb-32EujpKuSoCjyCbdTdthOIuhcMoN4Wchuzn8A,2508
|
|
|
15
15
|
MediBot/update_medicafe.py,sha256=rx1zUvCI99JRdr8c1csMGI2uJBl3pqusvX-xr3KhmR4,11881
|
|
16
16
|
MediLink/MediLink.py,sha256=O3VSLm2s5viCRBL1is7Loj_nSaLMMcFZ-weXAmVp_20,21588
|
|
17
17
|
MediLink/MediLink_277_decoder.py,sha256=Z3hQK2j-YzdXjov6aDlDRc7M_auFBnl3se4OF5q6_04,4358
|
|
18
|
-
MediLink/MediLink_837p_cob_library.py,sha256=
|
|
19
|
-
MediLink/MediLink_837p_encoder.py,sha256=
|
|
20
|
-
MediLink/MediLink_837p_encoder_library.py,sha256=
|
|
18
|
+
MediLink/MediLink_837p_cob_library.py,sha256=pWWd03yXTamNJKDbPCdOCkfglW4OLXQtIN3eiMSdfAA,29934
|
|
19
|
+
MediLink/MediLink_837p_encoder.py,sha256=ODdDl_hBDYCf3f683qB3I51FGCKxrMeKL3gfT0wNAFM,28073
|
|
20
|
+
MediLink/MediLink_837p_encoder_library.py,sha256=y4cTt8G2yQbMm8oEmccJJTb0yOTeUj8CrcfI1IpOLxY,48688
|
|
21
|
+
MediLink/MediLink_837p_utilities.py,sha256=Bi91S1aJbsEOpWXp_IOUgCQ76IPiOJNkOfXXtcirzmI,10416
|
|
21
22
|
MediLink/MediLink_API_Generator.py,sha256=vBZ8moR9tvv7mb200HlZnJrk1y-bQi8E16I2r41vgVM,10345
|
|
22
23
|
MediLink/MediLink_API_v2.py,sha256=mcIgLnXPS_NaUBrkKJ8mxCUaQ0AuQUeU1vG6DoplbVY,7733
|
|
23
24
|
MediLink/MediLink_API_v3.py,sha256=D17yXicLRvHfEsx5c-VUNZlke5oSnclQu6cKJACzeHA,40745
|
|
@@ -25,10 +26,10 @@ MediLink/MediLink_APIs.py,sha256=jm3f9T034MJKH8A_CIootULoeuk7H8s7PazpFZRCbKI,622
|
|
|
25
26
|
MediLink/MediLink_Azure.py,sha256=Ow70jctiHFIylskBExN7WUoRgrKOvBR6jNTnQMk6lJA,210
|
|
26
27
|
MediLink/MediLink_ClaimStatus.py,sha256=DkUL5AhmuaHsdKiQG1btciJIuexl0OLXBEH40j1KFTg,9927
|
|
27
28
|
MediLink/MediLink_ConfigLoader.py,sha256=u9ecB0SIN7zuJAo8KcoQys95BtyAo-8S2n4mRd0S3XU,4356
|
|
28
|
-
MediLink/MediLink_DataMgmt.py,sha256=
|
|
29
|
+
MediLink/MediLink_DataMgmt.py,sha256=MjCF1L-4RkQnz_vBULPB-DVsEtv0X1WHT1o9YjCGQ7s,33280
|
|
29
30
|
MediLink/MediLink_Decoder.py,sha256=Suw9CmUHgoe0ZW8sJP_pIO8URBrhO5FmxFF8RcUj9lI,13318
|
|
30
|
-
MediLink/MediLink_Deductible.py,sha256=
|
|
31
|
-
MediLink/MediLink_Deductible_Validator.py,sha256=
|
|
31
|
+
MediLink/MediLink_Deductible.py,sha256=nD9dwStQY34FYmnuqg361UgFX8vLpZk88Im0LZJ45IQ,36732
|
|
32
|
+
MediLink/MediLink_Deductible_Validator.py,sha256=2g-lZd-Y5fJ1mfP87vM6oABg0t5Om-7EkEkilVvDWYY,22888
|
|
32
33
|
MediLink/MediLink_Down.py,sha256=hrDODhs-zRfOKCdiRGENN5Czu-AvdtwJj4Q7grcRXME,6518
|
|
33
34
|
MediLink/MediLink_ERA_decoder.py,sha256=MiOtDcXnmevPfHAahIlTLlUc14VcQWAor9Xa7clA2Ts,8710
|
|
34
35
|
MediLink/MediLink_Gmail.py,sha256=OYsASNgP4YSTaSnj9XZxPPiy0cw41JC-suLIgRyNrlQ,31439
|
|
@@ -48,8 +49,8 @@ MediLink/test.py,sha256=kSvvJRL_3fWuNS3_x4hToOnUljGLoeEw6SUTHQWQRJk,3108
|
|
|
48
49
|
MediLink/test_cob_library.py,sha256=wUMv0-Y6fNsKcAs8Z9LwfmEBRO7oBzBAfWmmzwoNd1g,13841
|
|
49
50
|
MediLink/test_validation.py,sha256=FJrfdUFK--xRScIzrHCg1JeGdm0uJEoRnq6CgkP2lwM,4154
|
|
50
51
|
MediLink/webapp.html,sha256=JPKT559aFVBi1r42Hz7C77Jj0teZZRumPhBev8eSOLk,19806
|
|
51
|
-
medicafe-0.
|
|
52
|
-
medicafe-0.
|
|
53
|
-
medicafe-0.
|
|
54
|
-
medicafe-0.
|
|
55
|
-
medicafe-0.
|
|
52
|
+
medicafe-0.250722.0.dist-info/LICENSE,sha256=65lb-vVujdQK7uMH3RRJSMwUW-WMrMEsc5sOaUn2xUk,1096
|
|
53
|
+
medicafe-0.250722.0.dist-info/METADATA,sha256=FnSb5W8xyvEFPh047lU6jEWr650em4_eFgOMsD-L97E,5501
|
|
54
|
+
medicafe-0.250722.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
55
|
+
medicafe-0.250722.0.dist-info/top_level.txt,sha256=3uOwR4q_SP8Gufk2uCHoKngAgbtdOwQC6Qjl7ViBa_c,17
|
|
56
|
+
medicafe-0.250722.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|