medicafe 0.240419.2__py3-none-any.whl → 0.240613.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of medicafe might be problematic. Click here for more details.

Files changed (38) hide show
  1. MediBot/MediBot.bat +174 -38
  2. MediBot/MediBot.py +80 -77
  3. MediBot/MediBot_Charges.py +0 -28
  4. MediBot/MediBot_Crosswalk_Library.py +281 -0
  5. MediBot/MediBot_Post.py +0 -0
  6. MediBot/MediBot_Preprocessor.py +138 -211
  7. MediBot/MediBot_Preprocessor_lib.py +496 -0
  8. MediBot/MediBot_UI.py +80 -35
  9. MediBot/MediBot_dataformat_library.py +79 -35
  10. MediBot/MediBot_docx_decoder.py +295 -0
  11. MediBot/update_medicafe.py +46 -8
  12. MediLink/MediLink.py +207 -108
  13. MediLink/MediLink_837p_encoder.py +299 -214
  14. MediLink/MediLink_837p_encoder_library.py +445 -245
  15. MediLink/MediLink_API_v2.py +174 -0
  16. MediLink/MediLink_APIs.py +139 -0
  17. MediLink/MediLink_ConfigLoader.py +44 -32
  18. MediLink/MediLink_DataMgmt.py +297 -89
  19. MediLink/MediLink_Decoder.py +63 -0
  20. MediLink/MediLink_Down.py +73 -102
  21. MediLink/MediLink_ERA_decoder.py +4 -4
  22. MediLink/MediLink_Gmail.py +479 -4
  23. MediLink/MediLink_Mailer.py +0 -0
  24. MediLink/MediLink_Parser.py +111 -0
  25. MediLink/MediLink_Scan.py +0 -0
  26. MediLink/MediLink_Scheduler.py +2 -131
  27. MediLink/MediLink_StatusCheck.py +0 -4
  28. MediLink/MediLink_UI.py +87 -27
  29. MediLink/MediLink_Up.py +301 -45
  30. MediLink/MediLink_batch.bat +1 -1
  31. MediLink/test.py +74 -0
  32. medicafe-0.240613.0.dist-info/METADATA +55 -0
  33. medicafe-0.240613.0.dist-info/RECORD +43 -0
  34. {medicafe-0.240419.2.dist-info → medicafe-0.240613.0.dist-info}/WHEEL +5 -5
  35. medicafe-0.240419.2.dist-info/METADATA +0 -19
  36. medicafe-0.240419.2.dist-info/RECORD +0 -32
  37. {medicafe-0.240419.2.dist-info → medicafe-0.240613.0.dist-info}/LICENSE +0 -0
  38. {medicafe-0.240419.2.dist-info → medicafe-0.240613.0.dist-info}/top_level.txt +0 -0
@@ -1,218 +1,90 @@
1
- import csv
2
- import subprocess
3
1
  import os
4
2
  import re
5
- from datetime import datetime
6
3
  from collections import OrderedDict # so that the field_mapping stays in order.
7
4
  import re
8
5
  import sys
6
+ import argparse
7
+ import MediBot_Crosswalk_Library
9
8
 
10
9
  # Add parent directory of the project to the Python path
11
10
  project_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
12
11
  sys.path.append(project_dir)
13
12
 
14
- from MediLink import MediLink_ConfigLoader
15
- from MediLink import MediLink_DataMgmt
13
+ try:
14
+ from MediLink import MediLink_ConfigLoader
15
+ except ImportError:
16
+ import MediLink_ConfigLoader
16
17
 
17
- """
18
- Preprocessing Enhancements
19
- - [X] Preprocess Insurance Policy Numbers and Group Numbers to replace '-' with ''.
20
- - [X] De-duplicate entries in the CSV and only entering the px once even if they show up twice in the file.
21
- - [ ] Implement dynamic field combination in CSV pre-processing for flexibility with various CSV formats.
22
- - [ ] Enhance SSN cleaning logic to handle more variations of sensitive data masking.
23
- - [ ] Optimize script startup and CSV loading to reduce initial latency.
24
-
25
- Data Integrity and Validation
26
- - [ ] Conduct a thorough CSV integrity check before processing to flag potential issues upfront.
27
- - [ ] Implement a mechanism to confirm the accuracy of entered data, potentially through a verification step or summary report.
28
- - [ ] Explore the possibility of integrating direct database queries for existing patient checks to streamline the process.
29
- - [ ] Automate the replacement of spaces with underscores ('_') in last names for Medicare entries, ensuring data consistency.
30
- - [ ] Enhance CSV integrity checks to identify and report potential issues with data format, especially concerning insurance policy numbers and special character handling.
31
-
32
- Known Issues and Bugs
33
- - [ ] Address the handling of '.' and other special characters that may disrupt parsing, especially under Windows XP.
34
- - [ ] Investigate the issue with Excel modifying long policy numbers in the CSV and provide guidance or a workaround.
35
-
36
- Future Work
37
- - [ ] Consolidate data from multiple sources (Provider_Notes.csv, Surgery_Schedule.csv, and Carols_CSV.csv) into a single table with Patient ID as the key, ensuring all data elements are aligned and duplicate entries are minimized.
38
- - [ ] Implement logic to verify and match Patient IDs across different files to ensure data integrity before consolidation.
39
- - [ ] Optimize the preprocessing of surgery dates and diagnosis codes for use in patient billing and scheduling systems.
40
- - [ ] This needs to be able to take in the Surgery Schedule doc and parse out a Patient ID : Diagnosis Code table
41
- - [ ] The Minutes & Cacncellation data with logic to consolidate into one table in memory.
42
-
43
-
44
- Future Work: crosswalk_update() automates the process of updating the crosswalk.json file with new Medisoft insurance information.
45
-
46
- Development Roadmap:
47
- 1. Problem Statement:
48
- - The need to update the crosswalk.json file arises whenever a new Medisoft insurance is discovered. Automation of this process is required for accuracy and efficiency.
49
-
50
- 2. Identifying New Insurance:
51
- - New Medisoft insurances are identified based on the payer ID number.
52
- - The existence of the payer ID number is checked in the crosswalk.json under existing endpoints.
53
-
54
- 3. Adding New Insurance:
55
- - If the payer ID number does not exist in any endpoint, the tool prompts the user, assisted by endpoint APIs, to add the payer ID to a specific endpoint.
56
- - The corresponding name from Carol's spreadsheet is used as the value for the new payer ID.
57
-
58
- 4. Mapping to Main Insurance:
59
- - The tool presents the user with a list of the top 5-7 insurances, scored higher on a fuzzy search or above a certain score.
60
- - The user selects the appropriate insurance based on the identified Medisoft insurance, establishing the medisoft_insurance_to_payer_id relationship.
61
-
62
- 5. Confirming Mapping:
63
- - The tool implicitly establishes the insurance_to_endpoint_mapping based on the selected MediSoft name and endpoint.
64
- - This step is confirmed or re-evaluated to ensure accuracy.
65
-
66
- 6. User Interaction:
67
- - Unrecognized payer IDs are presented to the user.
68
- - Users can assign these payer IDs to MediSoft custom names individually.
69
- - Grouping of payer IDs may be facilitated, especially for insurances like CIGNA with multiple addresses but few payer IDs.
70
-
71
- 7. Handling Unavailable Payer IDs:
72
- - An extra endpoint named "Fax/Mail or Other" is created to handle cases where the payer ID is unavailable.
73
- - The tool retains payer IDs not existing in any endpoint, allowing users to assign them to the "Fax/Mail or Other" key in the crosswalk.
74
-
75
- 8. Implementation Considerations:
76
- - The tool should handle various scenarios, including checking for free payer IDs and determining the appropriate endpoint for assignment.
77
- - Integration of API checks to verify payer ID availability and associated information is recommended.
78
- - Validation mechanisms should be implemented to prevent incorrect mappings and ensure data integrity.
79
-
80
- NOTE: this needs to also pull from the CSV the listed address of the insruance.
81
- NOTE: La Forma Z can have the PatientID number which can link back to Carol's table which can then map the Medisoft insurance name to the payerID
82
- and payer name and address when the insurance is already selected in Medisoft so the program can learn retroactively and would know the Medisoft # from
83
- the sequencing rather than trying to feed it from the beginning. so that'll be out of ["fixedWidthSlices"]["personal_slices"]["PATID"].
84
- NOTE: Also check MAPAT because maybe the PatientID to Medisoft custom insurance name might exist there enmasse + the PatientID to PayerID link from Carol's CSV
85
- gives us the Medisoft custom insurance name to Payer ID. Then, the endpoint mapping is the clearinghouse PayerID list (API?). MAPAT has the PatientID to Medisoft
86
- insruance reference number which is the MAINS offset by 1 for the header. MAPAT has columns [159,162] for insurance and [195,200] for patient ID.
87
- """
18
+ try:
19
+ import MediBot_Preprocessor_lib
20
+ except ImportError:
21
+ from MediBot import MediBot_Preprocessor_lib
88
22
 
89
23
  # Load configuration
90
24
  # Should this also take args? Path for ./MediLink needed to be added for this to resolve
91
- config, _ = MediLink_ConfigLoader.load_configuration()
92
-
93
- class InitializationError(Exception):
94
- def __init__(self, message):
95
- self.message = message
96
- super().__init__(self.message)
97
-
98
- def initialize(config):
99
- global AHK_EXECUTABLE, CSV_FILE_PATH, field_mapping, page_end_markers
100
-
101
- try:
102
- AHK_EXECUTABLE = config.get('AHK_EXECUTABLE', "")
103
- except AttributeError:
104
- raise InitializationError("Error: 'AHK_EXECUTABLE' not found in config.")
105
-
106
- try:
107
- CSV_FILE_PATH = config.get('CSV_FILE_PATH', "")
108
- except AttributeError:
109
- raise InitializationError("Error: 'CSV_FILE_PATH' not found in config.")
110
-
111
- try:
112
- field_mapping = OrderedDict(config.get('field_mapping', {}))
113
- except AttributeError:
114
- raise InitializationError("Error: 'field_mapping' not found in config.")
115
-
116
- try:
117
- page_end_markers = config.get('page_end_markers', [])
118
- except AttributeError:
119
- raise InitializationError("Error: 'page_end_markers' not found in config.")
120
-
25
+ config, crosswalk = MediLink_ConfigLoader.load_configuration()
121
26
 
122
- def open_csv_for_editing(csv_file_path):
27
+ # CSV Preprocessor built for Carol
28
+ def preprocess_csv_data(csv_data, crosswalk):
123
29
  try:
124
- # Open the CSV file in the default program
125
- subprocess.run(['open' if os.name == 'posix' else 'start', csv_file_path], check=True, shell=True)
126
- print("After saving the revised CSV, please re-run MediBot.")
127
- except subprocess.CalledProcessError as e:
128
- print("Failed to open CSV file:", e)
30
+ # Add the "Ins1 Insurance ID" and "Default Diagnosis #1" columns to the CSV data.
31
+ # This initializes the columns with empty values for each row.
32
+ columns_to_add = ['Ins1 Insurance ID', 'Default Diagnosis #1', 'Procedure Code', 'Minutes', 'Amount']
33
+ MediLink_ConfigLoader.log("CSV Pre-processor: Initializing empty columns to the CSV data...", level="INFO")
34
+ MediBot_Preprocessor_lib.add_columns(csv_data, columns_to_add)
129
35
 
130
- # Function to load and process CSV data
131
- def load_csv_data(csv_file_path):
132
- try:
133
- # Check if the file exists
134
- if not os.path.exists(csv_file_path):
135
- raise FileNotFoundError("***Error: CSV file '{}' not found.".format(csv_file_path))
36
+ # Filter out rows without a Patient ID and rows where the Primary Insurance
37
+ # is 'AETNA', 'AETNA MEDICARE', or 'HUMANA MED HMO'.
38
+ MediLink_ConfigLoader.log("CSV Pre-processor: Filtering out missing Patient IDs and 'AETNA', 'AETNA MEDICARE', or 'HUMANA MED HMO'...", level="INFO")
39
+ MediBot_Preprocessor_lib.filter_rows(csv_data)
136
40
 
137
- with open(csv_file_path, 'r') as csvfile:
138
- reader = csv.DictReader(csvfile)
139
- return [row for row in reader] # Return a list of dictionaries
140
- except FileNotFoundError as e:
141
- print(e) # Print the informative error message
142
- print("Hint: Check if CSV file is located in the expected directory or specify a different path in config file.")
143
- print("Please correct the issue and re-run MediBot.")
144
- sys.exit(1) # Halt the script
145
- except IOError as e:
146
- print("Error reading CSV file: {}. Please check the file path and permissions.".format(e))
147
- sys.exit(1) # Halt the script in case of other IO errors
148
-
149
- # CSV Preprocessor built for Carol
150
- def preprocess_csv_data(csv_data):
151
- try:
152
- # Filter out rows without a Patient ID
153
- csv_data[:] = [row for row in csv_data if row.get('Patient ID', '').strip()]
41
+ # Convert 'Surgery Date' from string format to datetime objects for sorting purposes.
42
+ MediBot_Preprocessor_lib.convert_surgery_date(csv_data)
154
43
 
155
- # Remove Patients (rows) that are Primary Insurance: 'AETNA', 'AETNA MEDICARE', or 'HUMANA MED HMO'.
156
- csv_data[:] = [row for row in csv_data if row.get('Primary Insurance', '').strip() not in ['AETNA', 'AETNA MEDICARE', 'HUMANA MED HMO']]
157
-
158
- # Convert 'Surgery Date' to datetime objects for sorting
159
- for row in csv_data:
160
- try:
161
- row['Surgery Date'] = datetime.strptime(row.get('Surgery Date', ''), '%m/%d/%Y')
162
- except ValueError:
163
- # Handle or log the error if the date is invalid
164
- row['Surgery Date'] = datetime.min # Assign a minimum datetime value for sorting purposes
165
-
166
- # Initially sort the patients first by 'Surgery Date' and then by 'Patient Last' alphabetically
167
- csv_data.sort(key=lambda x: (x['Surgery Date'], x.get('Patient Last', '').strip()))
44
+ # Update the CSV data to include only unique patient records.
45
+ # Re-sort the CSV data after deduplication to ensure the correct order.
46
+ # Sort the patients by 'Surgery Date' and then by 'Patient Last' name alphabetically.
47
+ # Deduplicate patient records based on Patient ID, keeping the entry with the earliest surgery date.
48
+ MediLink_ConfigLoader.log("CSV Pre-processor: Sorting and de-duplicating patient records...", level="INFO")
49
+ MediBot_Preprocessor_lib.sort_and_deduplicate(csv_data)
50
+ # TODO This eventually needs to be handled differently because now we're wanting to handle both surgery dates.
51
+ # Instead of deleting, maybe we need to make a secondary dataset or some kind of flag or isolate here where
52
+ # MediBot knows to skip it when entering the patient data but is ready to put Charges for the second surgery date.
53
+ # MediLink_Scheduler will have a dictionary persist somewhere that would tell us which patients were billed
54
+ # and which haven't been yet. So, if the patient 'exists' in the system, the next quetion is about claims/billing status.
55
+ # Eventually, we really want to get out of Medisoft...
168
56
 
169
- # Deduplicate patient records based on Patient ID, keeping the entry with the earliest surgery date
170
- unique_patients = {}
171
- for row in csv_data:
172
- patient_id = row.get('Patient ID')
173
- if patient_id not in unique_patients or row['Surgery Date'] < unique_patients[patient_id]['Surgery Date']:
174
- unique_patients[patient_id] = row
57
+ # Convert 'Surgery Date' back to string format if needed for further processing.
58
+ # Combine 'Patient First', 'Patient Middle', and 'Patient Last' into a single 'Patient Name' field.
59
+ # Combine 'Patient Address1' and 'Patient Address2' into a single 'Patient Street' field.
60
+ MediLink_ConfigLoader.log("CSV Pre-processor: Constructing Patient Name and Address for Medisoft...", level="INFO")
61
+ MediBot_Preprocessor_lib.combine_fields(csv_data)
175
62
 
176
- # Update csv_data to only include unique patient records
177
- csv_data[:] = list(unique_patients.values())
178
-
179
- # Re-sort the csv_data after deduplication to ensure correct order
180
- csv_data.sort(key=lambda x: (x['Surgery Date'], x.get('Patient Last', '').strip()))
63
+ # Retrieve replacement values from the crosswalk.
64
+ # Iterate over each key-value pair in the replacements dictionary and replace the old value
65
+ # with the new value in the corresponding fields of each row.
66
+ MediLink_ConfigLoader.log("CSV Pre-processor: Applying mandatory replacements per Crosswalk...", level="INFO")
67
+ MediBot_Preprocessor_lib.apply_replacements(csv_data, crosswalk)
181
68
 
182
- # Maybe make a dataformat_library function for this? csv_data = format_preprocessor(csv_data)?
183
- for row in csv_data:
184
- # Convert 'Surgery Date' back to string format if needed for further processing (cleanup)
185
- row['Surgery Date'] = row['Surgery Date'].strftime('%m/%d/%Y')
186
-
187
- # Combine name fields
188
- first_name = row.get('Patient First', '').strip()
189
- middle_name = row.get('Patient Middle', '').strip()
190
- last_name = row.get('Patient Last', '').strip()
191
- row['Patient Name'] = "{}, {} {}".format(last_name, first_name, middle_name).strip()
192
-
193
- # Combine address fields
194
- address1 = row.get('Patient Address1', '').strip()
195
- address2 = row.get('Patient Address2', '').strip()
196
- row['Patient Street'] = "{} {}".format(address1, address2).strip()
197
-
198
- # Probably make a data_format function for this:
199
- # Define the replacements as a dictionary
200
- replacements = {
201
- '777777777': '', # Replace '777777777' with an empty string
202
- 'RAILROAD MEDICARE': 'RAILROAD', # Replace 'RAILROAD MEDICARE' with 'RAILROAD'
203
- 'AARP MEDICARE COMPLETE': 'AARP COMPLETE' # Replace 'AARP MEDICARE COMPLETE' with 'AARP COMPLETE'
204
- }
205
-
206
- # Iterate over each key-value pair in the replacements dictionary
207
- for old_value, new_value in replacements.items():
208
- # Replace the old value with the new value if it exists in the row
209
- if row.get('Patient SSN', '') == old_value:
210
- row['Patient SSN'] = new_value
211
- elif row.get('Primary Insurance', '') == old_value:
212
- row['Primary Insurance'] = new_value
213
-
69
+ # Update the "Ins1 Insurance ID" column based on the crosswalk and the "Ins1 Payer ID" column for each row.
70
+ # If the Payer ID is not found in the crosswalk, create a placeholder entry in the crosswalk and mark the row for review.
71
+ MediLink_ConfigLoader.log("CSV Pre-processor: Populating 'Ins1 Insurance ID' based on Crosswalk...", level="INFO")
72
+ MediBot_Preprocessor_lib.update_insurance_ids(csv_data, crosswalk)
73
+
74
+ # Enrich the "Default Diagnosis #1" column based on the parsed docx for each row.
75
+ # This needs to handle the different patient dates correctly so we get the right diagnosis code assigned to the right patient on the right date of service.
76
+ # Currently, we've deleted all the second date entries for patients. As long as they exist in the system, they're just deleted.
77
+ MediLink_ConfigLoader.log("CSV Pre-processor: Populating 'Default Diagnosis #1' based on Surgery Schedule and Crosswalk...", level="INFO")
78
+ MediBot_Preprocessor_lib.update_diagnosis_codes(csv_data)
79
+
80
+ # Enrich the procedure code column based on the diagnosis code for each patient.
81
+ MediLink_ConfigLoader.log("CSV Pre-processor: Populating 'Procedure Code' based on Crosswalk...", level="INFO")
82
+ MediBot_Preprocessor_lib.update_procedure_codes(csv_data)
83
+
214
84
  except Exception as e:
215
- print("An error occurred while pre-processing CSV data. Please repair the CSV directly and try again:", e)
85
+ message = "An error occurred while pre-processing CSV data. Please repair the CSV directly and try again: {}".format(e)
86
+ MediLink_ConfigLoader.log(message, level="ERROR")
87
+ print(message)
216
88
 
217
89
  def check_existing_patients(selected_patient_ids, MAPAT_MED_PATH):
218
90
  existing_patients = []
@@ -245,6 +117,9 @@ def intake_scan(csv_headers, field_mapping):
245
117
  missing_fields_warnings = []
246
118
  required_fields = config["required_fields"]
247
119
 
120
+ # MediLink_ConfigLoader.log("Intake Scan - Field Mapping: {}".format(field_mapping))
121
+ # MediLink_ConfigLoader.log("Intake Scan - CSV Headers: {}".format(csv_headers))
122
+
248
123
  # Iterate over the Medisoft fields defined in field_mapping
249
124
  for medisoft_field in field_mapping.keys():
250
125
  for pattern in field_mapping[medisoft_field]:
@@ -252,32 +127,84 @@ def intake_scan(csv_headers, field_mapping):
252
127
  if matched_headers:
253
128
  # Assuming the first matched header is the desired one
254
129
  identified_fields[matched_headers[0]] = medisoft_field
130
+ # MediLink_ConfigLoader.log("Found Header: {}".format(identified_fields[matched_headers[0]]))
255
131
  break
256
132
  else:
257
133
  # Check if the missing field is a required field before appending the warning
258
134
  if medisoft_field in required_fields:
259
135
  missing_fields_warnings.append("WARNING: No matching CSV header found for Medisoft field '{0}'".format(medisoft_field))
260
136
 
261
- #-----------------------
262
- # CSV Integrity Check
263
- #-----------------------
264
-
265
- # This section needs to be revamped further so that it can interpret the information from here and decide
266
- # if it's significant or not.
267
- # e.g. If the 'Street' value:key is 'Address', then any warnings about City, State, Zip can be ignored.
268
- # Insurance Policy Numbers should be all alphanumeric with no other characters.
269
- # Make sure that the name field has at least one name under it (basically check for a blank or
270
- # partially blank csv with just a header)
271
-
137
+ # CSV Integrity Checks
138
+ # Check for blank or partially blank CSV
139
+ if len(csv_headers) == 0 or all(header == "" for header in csv_headers):
140
+ missing_fields_warnings.append("WARNING: The CSV appears to be blank or contains only headers without data.")
141
+
272
142
  # Display the identified fields and missing fields warnings
273
- #print("The following Medisoft fields have been identified in the CSV:\n")
143
+ #MediLink_ConfigLoader.log("The following Medisoft fields have been identified in the CSV:")
274
144
  #for header, medisoft_field in identified_fields.items():
275
- # print("{0} (CSV header: {1})".format(medisoft_field, header))
145
+ # MediLink_ConfigLoader.log("{} (CSV header: {})".format(medisoft_field, header))
146
+
147
+ # This section interprets the information from identified_fields and decides if there are significant issues.
148
+ # e.g. If the 'Street' value:key is 'Address', then any warnings about City, State, Zip can be ignored.
149
+ for header, field in identified_fields.items():
150
+ # Insurance Policy Numbers should be all alphanumeric with no other characters.
151
+ if 'Insurance Policy Number' in field:
152
+ policy_number = identified_fields.get(header)
153
+ if not bool(re.match("^[a-zA-Z0-9]*$", policy_number)):
154
+ missing_fields_warnings.append("WARNING: Insurance Policy Number '{}' contains invalid characters.".format(policy_number))
155
+ # Additional checks can be added as needed for other fields
156
+
157
+ if missing_fields_warnings:
158
+ MediLink_ConfigLoader.log("\nSome required fields could not be matched:")
159
+ for warning in missing_fields_warnings:
160
+ MediLink_ConfigLoader.log(warning)
161
+
162
+ return identified_fields
163
+
164
+ def main():
165
+ parser = argparse.ArgumentParser(description='Run MediLink Data Management Tasks')
166
+ parser.add_argument('--update-crosswalk', action='store_true',
167
+ help='Run the crosswalk update independently')
168
+ parser.add_argument('--init-crosswalk', action='store_true',
169
+ help='Initialize the crosswalk using historical data from MAPAT and Carols CSV')
170
+ parser.add_argument('--load-csv', action='store_true',
171
+ help='Load and process CSV data')
172
+ parser.add_argument('--preprocess-csv', action='store_true',
173
+ help='Preprocess CSV data based on specific rules')
174
+ parser.add_argument('--open-csv', action='store_true',
175
+ help='Open CSV for manual editing')
176
+
177
+ args = parser.parse_args()
178
+
179
+ config, crosswalk = MediLink_ConfigLoader.load_configuration()
180
+
181
+ # If no arguments provided, print usage instructions
182
+ if not any(vars(args).values()):
183
+ parser.print_help()
184
+ return
185
+
186
+ if args.update_crosswalk:
187
+ print("Updating the crosswalk...")
188
+ MediBot_Crosswalk_Library.crosswalk_update(config, crosswalk)
189
+
190
+ if args.init_crosswalk:
191
+ MediBot_Crosswalk_Library.initialize_crosswalk_from_mapat()
192
+
193
+ if args.load_csv:
194
+ print("Loading CSV data...")
195
+ csv_data = MediBot_Preprocessor_lib.load_csv_data(config['CSV_FILE_PATH'])
196
+ print("Loaded {} records from the CSV.".format(len(csv_data)))
197
+
198
+ if args.preprocess_csv:
199
+ if 'csv_data' in locals():
200
+ print("Preprocessing CSV data...")
201
+ preprocess_csv_data(csv_data, crosswalk)
202
+ else:
203
+ print("Error: CSV data needs to be loaded before preprocessing. Use --load-csv.")
276
204
 
277
- #if missing_fields_warnings:
278
- # print("\nSome required fields could not be matched:")
279
- # for warning in missing_fields_warnings:
280
- # print(warning)
205
+ if args.open_csv:
206
+ print("Opening CSV for editing...")
207
+ MediBot_Preprocessor_lib.open_csv_for_editing(config['CSV_FILE_PATH'])
281
208
 
282
- #print("Debug - Identified fields mapping (intake scan):", identified_fields)
283
- return identified_fields
209
+ if __name__ == '__main__':
210
+ main()