medicafe 0.250813.2__py3-none-any.whl → 0.250814.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
MediBot/MediBot.bat CHANGED
@@ -772,7 +772,7 @@ if "!internet_available!"=="0" (
772
772
  pause >nul
773
773
  goto troubleshooting_menu
774
774
  )
775
- set "rollback_version=0.250529.2"
775
+ set "rollback_version=0.250813.1"
776
776
  echo Forcing reinstall of %medicafe_package%==%rollback_version% with no dependencies...
777
777
  python -m pip install --no-deps --force-reinstall %medicafe_package%==%rollback_version%
778
778
  if errorlevel 1 (
MediBot/MediBot.py CHANGED
@@ -633,7 +633,7 @@ if __name__ == "__main__":
633
633
  # Display existing patients table using the enhanced display function
634
634
  MediBot_UI.display_enhanced_patient_table(
635
635
  patient_info,
636
- "NOTE: The following patient(s) already EXIST in the system but have new dates of service.\n Their diagnosis codes will need to be updated manually by the user to the following list:",
636
+ "NOTE: The following patient(s) already EXIST in the system but may have new dates of service.\n Their diagnosis codes may need to be updated manually by the user to the following list:",
637
637
  show_line_numbers=False
638
638
  )
639
639
 
@@ -547,7 +547,18 @@ def sort_and_deduplicate(csv_data):
547
547
 
548
548
  # Store the surgery dates information in the first row of each patient for later access
549
549
  for patient_id, row in unique_patients.items():
550
- row['_all_surgery_dates'] = sorted(patient_surgery_dates[patient_id])
550
+ # Convert surgery dates to strings for consistent storage
551
+ surgery_date_strings = []
552
+ for date in patient_surgery_dates[patient_id]:
553
+ if isinstance(date, datetime):
554
+ if date == datetime.min:
555
+ surgery_date_strings.append('MISSING')
556
+ else:
557
+ surgery_date_strings.append(date.strftime('%m-%d-%Y'))
558
+ else:
559
+ surgery_date_strings.append(str(date) if date else 'MISSING')
560
+
561
+ row['_all_surgery_dates'] = sorted(surgery_date_strings)
551
562
  row['_primary_surgery_date'] = row['Surgery Date'] # Keep track of which date has the demographics
552
563
 
553
564
  # Convert the unique_patients dictionary back to a list and sort it
@@ -565,7 +576,7 @@ def combine_fields(csv_data):
565
576
  if surgery_date == datetime.min:
566
577
  row['Surgery Date'] = 'MISSING'
567
578
  else:
568
- row['Surgery Date'] = surgery_date.strftime('%m/%d/%Y')
579
+ row['Surgery Date'] = surgery_date.strftime('%m-%d-%Y')
569
580
  elif surgery_date:
570
581
  # Already a non-empty string
571
582
  row['Surgery Date'] = str(surgery_date)
@@ -1162,17 +1173,22 @@ def update_diagnosis_codes(csv_data):
1162
1173
  diagnosis_code, medisoft_shorthand), level="WARNING")
1163
1174
  MediLink_ConfigLoader.log("Converted diagnosis code to Medisoft shorthand: {}".format(medisoft_shorthand), level="DEBUG")
1164
1175
 
1165
- surgery_date_to_diagnosis[surgery_date] = medisoft_shorthand
1176
+ surgery_date_to_diagnosis[surgery_date_str] = medisoft_shorthand
1166
1177
  else:
1167
1178
  MediLink_ConfigLoader.log("No matching surgery date found for Patient ID: {} on date {}.".format(patient_id, surgery_date_str), level="INFO")
1168
- surgery_date_to_diagnosis[surgery_date] = 'N/A'
1179
+ surgery_date_to_diagnosis[surgery_date_str] = 'N/A'
1169
1180
 
1170
1181
  # Store the diagnosis mapping for all surgery dates
1171
1182
  row['_surgery_date_to_diagnosis'] = surgery_date_to_diagnosis
1172
1183
 
1173
1184
  # Set the primary diagnosis code (for the main surgery date)
1174
1185
  primary_surgery_date = row.get('Surgery Date')
1175
- primary_diagnosis = surgery_date_to_diagnosis.get(primary_surgery_date, 'N/A')
1186
+ # Convert primary surgery date to string for lookup
1187
+ if isinstance(primary_surgery_date, datetime):
1188
+ primary_surgery_date_str = primary_surgery_date.strftime('%m-%d-%Y')
1189
+ else:
1190
+ primary_surgery_date_str = str(primary_surgery_date)
1191
+ primary_diagnosis = surgery_date_to_diagnosis.get(primary_surgery_date_str, 'N/A')
1176
1192
  row['Default Diagnosis #1'] = primary_diagnosis
1177
1193
 
1178
1194
  updated_count += 1
MediBot/MediBot_UI.py CHANGED
@@ -2,6 +2,7 @@
2
2
  import ctypes, time, re
3
3
  from ctypes import wintypes
4
4
  from sys import exit
5
+ from datetime import datetime
5
6
 
6
7
  # Set up paths using core utilities
7
8
 
@@ -62,7 +63,18 @@ def display_enhanced_patient_table(patient_info, title, show_line_numbers=True):
62
63
  for surgery_date, patient_name, patient_id, diagnosis_code, patient_row in patient_info:
63
64
  # Format surgery_date safely whether it's a datetime/date or a string
64
65
  try:
65
- formatted_date = surgery_date.strftime('%m-%d')
66
+ if isinstance(surgery_date, datetime):
67
+ formatted_date = surgery_date.strftime('%m-%d')
68
+ else:
69
+ # Handle string dates - this should be the Surgery Date Display field
70
+ formatted_date = str(surgery_date)
71
+ # If it's a date string like "12-25-2023", format it as "12-25"
72
+ if '-' in formatted_date and len(formatted_date.split('-')) == 3:
73
+ try:
74
+ parts = formatted_date.split('-')
75
+ formatted_date = "{}-{}".format(parts[0], parts[1])
76
+ except:
77
+ pass # Use original string if parsing fails
66
78
  except Exception:
67
79
  formatted_date = str(surgery_date)
68
80
 
MediBot/__init__.py CHANGED
@@ -19,7 +19,7 @@ Smart Import Integration:
19
19
  medibot_main = get_components('medibot_main')
20
20
  """
21
21
 
22
- __version__ = "0.250728.9"
22
+ __version__ = "0.250814.3"
23
23
  __author__ = "Daniel Vidaud"
24
24
  __email__ = "daniel@personalizedtransformation.com"
25
25
 
@@ -1,4 +1,5 @@
1
1
  #update_medicafe.py
2
+ # Version: 1.0.0
2
3
  import subprocess, sys, time, platform, os, shutil, random
3
4
 
4
5
  # Safe import for pkg_resources with fallback
@@ -305,6 +306,7 @@ def upgrade_package(package, retries=4, delay=2, target_version=None): # Update
305
306
  def get_installed_version_fresh(package):
306
307
  """Get installed version using a fresh subprocess to avoid pkg_resources cache issues."""
307
308
  try:
309
+ # First try pip show
308
310
  process = subprocess.Popen(
309
311
  [sys.executable, '-m', 'pip', 'show', package],
310
312
  stdout=subprocess.PIPE,
@@ -315,6 +317,21 @@ def upgrade_package(package, retries=4, delay=2, target_version=None): # Update
315
317
  for line in stdout.decode().splitlines():
316
318
  if line.startswith("Version:"):
317
319
  return line.split(":", 1)[1].strip()
320
+
321
+ # If pip show fails, try pkg_resources in a fresh subprocess
322
+ try:
323
+ import subprocess
324
+ process = subprocess.Popen(
325
+ [sys.executable, '-c', 'import pkg_resources; print(pkg_resources.get_distribution("{}").version)'.format(package)],
326
+ stdout=subprocess.PIPE,
327
+ stderr=subprocess.PIPE
328
+ )
329
+ stdout, stderr = process.communicate()
330
+ if process.returncode == 0:
331
+ return stdout.decode().strip()
332
+ except Exception:
333
+ pass
334
+
318
335
  return None
319
336
  except Exception as e:
320
337
  print("Warning: Could not get fresh version: {}".format(e))
@@ -335,17 +352,40 @@ def upgrade_package(package, retries=4, delay=2, target_version=None): # Update
335
352
 
336
353
  if process.returncode == 0:
337
354
  print(stdout.decode().strip())
338
- # Add delay to allow file system to settle
339
- time.sleep(1)
340
- new_version = get_installed_version_fresh(package)
355
+ # Add longer delay to allow file system and package metadata to settle
356
+ print("Waiting for package metadata to update...")
357
+ time.sleep(3)
358
+
359
+ # Try multiple times to get the new version with increasing delays
360
+ new_version = None
361
+ for retry in range(3):
362
+ # Clear pkg_resources cache before each attempt
363
+ if pkg_resources:
364
+ try:
365
+ pkg_resources.working_set = pkg_resources.WorkingSet()
366
+ except Exception:
367
+ pass
368
+
369
+ new_version = get_installed_version_fresh(package)
370
+ if new_version:
371
+ print("Detected new version: {}".format(new_version))
372
+ break
373
+ print("Version detection attempt {} failed, retrying...".format(retry + 1))
374
+ time.sleep(2)
375
+
341
376
  expected_version = target_version or get_latest_version(package)
342
377
 
343
378
  if expected_version and new_version and compare_versions(new_version, expected_version) >= 0:
344
379
  print_status("Attempt {}: Upgrade succeeded with {}!".format(attempt, strategy_name), "SUCCESS")
345
380
  return True
381
+ elif new_version:
382
+ print_status("Upgrade may have succeeded but version mismatch. Current: {} Expected: {}".format(
383
+ new_version, expected_version), "WARNING")
384
+ # If we got a new version but it doesn't match expected, still consider it a success
385
+ # as the package was updated
386
+ return True
346
387
  else:
347
- print_status("Upgrade incomplete. Current version: {} Expected at least: {}".format(
348
- new_version or "unknown", expected_version), "WARNING")
388
+ print_status("Upgrade incomplete. Could not detect new version.", "WARNING")
349
389
  return False
350
390
  else:
351
391
  print(stderr.decode().strip())
@@ -604,10 +644,19 @@ def main():
604
644
  if upgrade_package(package, target_version=latest_version):
605
645
  # STEP 8: Verify upgrade
606
646
  debug_step(8, "Upgrade Verification")
647
+
648
+ # Clear cache and wait for package metadata to settle
649
+ if pkg_resources:
650
+ try:
651
+ pkg_resources.working_set = pkg_resources.WorkingSet()
652
+ except Exception:
653
+ pass
654
+
655
+ time.sleep(2)
607
656
  new_version = get_installed_version(package)
608
657
  print("New installed version: {}".format(new_version))
609
658
 
610
- if compare_versions(new_version, latest_version) >= 0:
659
+ if new_version and compare_versions(new_version, latest_version) >= 0:
611
660
  print_status("Upgrade successful. New version: {}".format(new_version), "SUCCESS")
612
661
 
613
662
  # DEBUG STEP 9: Clear cache
@@ -619,8 +668,21 @@ def main():
619
668
  print_status("Cache clearing failed, but update was successful.", "WARNING")
620
669
 
621
670
  print_final_result(True, "Successfully upgraded to version {}".format(new_version))
671
+ elif new_version:
672
+ print_status("Upgrade completed but version verification unclear. New version: {}".format(new_version), "WARNING")
673
+ print_status("The package was updated, but version comparison failed. This may be due to caching issues.", "WARNING")
674
+
675
+ # Still clear cache and exit successfully
676
+ debug_step(9, "Cache Clearing")
677
+ print_status("Clearing Python cache to prevent import issues...", "INFO")
678
+ if clear_python_cache():
679
+ print_status("Cache cleared successfully. Update complete.", "SUCCESS")
680
+ else:
681
+ print_status("Cache clearing failed, but update was successful.", "WARNING")
682
+
683
+ print_final_result(True, "Package updated (version verification unclear)")
622
684
  else:
623
- print_status("Upgrade failed. Current version remains: {}".format(new_version), "ERROR")
685
+ print_status("Upgrade verification failed. Could not detect new version.", "ERROR")
624
686
  print_final_result(False, "Upgrade verification failed")
625
687
  else:
626
688
  print_final_result(False, "Upgrade process failed")
MediCafe/__init__.py CHANGED
@@ -27,7 +27,7 @@ Smart Import System:
27
27
  api_suite = get_api_access()
28
28
  """
29
29
 
30
- __version__ = "0.250728.9"
30
+ __version__ = "0.250814.3"
31
31
  __author__ = "Daniel Vidaud"
32
32
  __email__ = "daniel@personalizedtransformation.com"
33
33
 
MediLink/__init__.py CHANGED
@@ -22,7 +22,7 @@ Smart Import Integration:
22
22
  datamgmt = get_components('medilink_datamgmt')
23
23
  """
24
24
 
25
- __version__ = "0.250728.9"
25
+ __version__ = "0.250814.3"
26
26
  __author__ = "Daniel Vidaud"
27
27
  __email__ = "daniel@personalizedtransformation.com"
28
28
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: medicafe
3
- Version: 0.250813.2
3
+ Version: 0.250814.3
4
4
  Summary: MediCafe
5
5
  Home-page: https://github.com/katanada2/MediCafe
6
6
  Author: Daniel Vidaud
@@ -1,23 +1,21 @@
1
- MediBot/MediBot.bat,sha256=el_8wWuikLkL-cmMX63L3VC0EqcuulkIFaT4xv7suzY,26687
2
- MediBot/MediBot.py,sha256=9u22KWhA7S3gKEXdo_4jnUFXpKqQb2lEeqsVIVSI62k,37089
1
+ MediBot/MediBot.bat,sha256=67wcth3JTvS1v0ycagk6HjY4MpJ8BoFOIUfC6ZPhczI,26687
2
+ MediBot/MediBot.py,sha256=IiDo987RlIrot_opJVsje4ZBPVSTXZWb-ZNPXpdcZJQ,37092
3
3
  MediBot/MediBot_Charges.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  MediBot/MediBot_Crosswalk_Library.py,sha256=jIaYdoxfT9YgQ5dWZC4jmTYxRX1Y14X-AJ6YEjR58Gc,25158
5
5
  MediBot/MediBot_Crosswalk_Utils.py,sha256=KVq2budurwdHB7dglOuPZEQGup-hjD1SeSPyySLpy9M,39015
6
6
  MediBot/MediBot_Post.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  MediBot/MediBot_Preprocessor.py,sha256=zAcfyuE8wl9JRzLGsUnnXiHxAr-hbCCIB2M-Jb3LUqI,16203
8
- MediBot/MediBot_Preprocessor_lib.py,sha256=BiVbbDTSHXLYVxxSxr7oo7Z6-Jqjsb49YWKoTO7fzTU,76110
9
- MediBot/MediBot_UI.py,sha256=DjdSSljLnbaoij04AA8F55JQ5xmp5PYMrlyrnWEZ6D8,17659
8
+ MediBot/MediBot_Preprocessor_lib.py,sha256=zWvPVGHxEA3tXQQ9AMb9jNjMeb9kXMfrNcTXHd6zFh8,76940
9
+ MediBot/MediBot_UI.py,sha256=RFsa5et6nveX4_PySBgIh4sTDur7_7OEntRl2NhDzzU,18329
10
10
  MediBot/MediBot_dataformat_library.py,sha256=D46fdPtxcgfWTzaLBtSvjtozzZBNqNiODgu4vKMZrBg,10746
11
11
  MediBot/MediBot_docx_decoder.py,sha256=gn7I7Ng5khVIzU0HTTOqi31YSSn1yW8Pyk-i_P9r1oA,32472
12
12
  MediBot/MediBot_smart_import.py,sha256=Emvz7NwemHGCHvG5kZcUyXMcCheidbGKaPfOTg-YCEs,6684
13
- MediBot/MediPost.py,sha256=C1hZJFr65rN6F_dckjdBxFC0vL2CoqY9W3YFqU5HXtE,336
14
- MediBot/PDF_to_CSV_Cleaner.py,sha256=ZZphmq-5K04DkrZNlcwNAIoZPOD_ROWvS3PMkKFxeiM,8799
15
- MediBot/__init__.py,sha256=6IdVLXaWxV5ZdpefonWrC1R8RsJn4V26K0PmUEZ_vU8,3192
13
+ MediBot/__init__.py,sha256=dPKix9JOR-ka_z0eW9lY0qL_lzIKj8fa1S1_l16JTYY,3192
16
14
  MediBot/get_medicafe_version.py,sha256=uyL_UIE42MyFuJ3SRYxJp8sZx8xjTqlYZ3FdQuxLduY,728
17
15
  MediBot/update_json.py,sha256=vvUF4mKCuaVly8MmoadDO59M231fCIInc0KI1EtDtPA,3704
18
- MediBot/update_medicafe.py,sha256=VTcQA_tfVILSAV29DM8nG-X4RJAQYdEiXY6oaeZfy4I,29072
16
+ MediBot/update_medicafe.py,sha256=IDkp4CU4uwpxj89VZ_aDCwpHsH5LAIWqzKI15_aAJiU,32093
19
17
  MediCafe/MediLink_ConfigLoader.py,sha256=Ia79dZQBvgbc6CtOaNZVlFHaN-fvUmJRpmmVHz_MFv8,8205
20
- MediCafe/__init__.py,sha256=DF0XUu3G43AejXvEmd5aCyy0GDQahQD0pMwexmxem-E,5477
18
+ MediCafe/__init__.py,sha256=ipDQxLhHT1C9w8H3LwlrQUaioBV5F1rcS1of-U12n70,5477
21
19
  MediCafe/__main__.py,sha256=mRNyk3D9Ilnu2XhgVI_rut7r5Ro7UIKtwV871giAHI8,12992
22
20
  MediCafe/api_core.py,sha256=IZaBXnP4E7eHzxVbCk2HtxywiVBuhaUyHeaqss8llgY,66378
23
21
  MediCafe/api_core_backup.py,sha256=Oy_Fqt0SEvGkQN1Oqw5iUPVFxPEokyju5CuPEb9k0OY,18686
@@ -31,43 +29,32 @@ MediCafe/migration_helpers.py,sha256=48GnP4xcgvDNNlzoWsKASCpF4H0KnyveHPbz6kjQy50
31
29
  MediCafe/smart_import.py,sha256=23pttO7QTZyvOP9HR9czDIv7lUsE1sHaE2CWC94Xxxo,19800
32
30
  MediCafe/submission_index.py,sha256=35gz8Anx1dIqG1I14GvuLY0nTO4dSBr2YsZwof9aIQg,11175
33
31
  MediLink/InsuranceTypeService.py,sha256=FKWC1nRfKV_OtCDUtZustauXNhmCYDFiY9jsAGHPPUM,2178
34
- MediLink/MediLink.py,sha256=p91MYghOCbNf3ikTzm5P9V1Luj035yd83EDbQ-Ov6oM,33258
35
- MediLink/MediLink_277_decoder.py,sha256=Z3hQK2j-YzdXjov6aDlDRc7M_auFBnl3se4OF5q6_04,4358
36
32
  MediLink/MediLink_837p_cob_library.py,sha256=glc7SJBDx0taCGmwmCs81GFJJcvA_D7nycIkTfmIuwE,30650
37
33
  MediLink/MediLink_837p_encoder.py,sha256=9rMYpvfQ-KwS1Xjo1fKtg1emxdYZBMkr9QAQPP7myeg,32191
38
34
  MediLink/MediLink_837p_encoder_library.py,sha256=6NnB5yPt46dmn75DTSsMCIw284FrHR7Vf-3Wr9aVBzM,69985
39
35
  MediLink/MediLink_837p_utilities.py,sha256=28H4F6HNXgNHpdnardKWeTPuXgVSzuvu5QEPmkCGp8Q,16285
40
36
  MediLink/MediLink_API_Generator.py,sha256=UUml-PBU3BQduun8RzFH4zfUuo6-p5Ufg7b6Vic-VrY,11171
41
- MediLink/MediLink_API_v2.py,sha256=mcIgLnXPS_NaUBrkKJ8mxCUaQ0AuQUeU1vG6DoplbVY,7733
42
- MediLink/MediLink_API_v3.py,sha256=5-4ZcQLfubhIfmAMiXodVHUPvLYbjwMQg4VUbbktZvw,48279
43
- MediLink/MediLink_APIs.py,sha256=jm3f9T034MJKH8A_CIootULoeuk7H8s7PazpFZRCbKI,6222
44
37
  MediLink/MediLink_Azure.py,sha256=Ow70jctiHFIylskBExN7WUoRgrKOvBR6jNTnQMk6lJA,210
45
38
  MediLink/MediLink_ClaimStatus.py,sha256=cO9drHSIBtltHfLSKeEf18_m75ixpxIOao5I-TGiHiI,18100
46
- MediLink/MediLink_ConfigLoader.py,sha256=u9ecB0SIN7zuJAo8KcoQys95BtyAo-8S2n4mRd0S3XU,4356
47
39
  MediLink/MediLink_DataMgmt.py,sha256=dKJtq8BibgGsfnTyWmayX4cTPWB8zgFMsgwKJVb7cJ8,52369
48
40
  MediLink/MediLink_Decoder.py,sha256=1gzdybNg4Vv69s5PNbX8bPNrXT_N_kPpFpt2HpkauWA,16430
49
41
  MediLink/MediLink_Deductible.py,sha256=fLBDQHDcTk86JtJUtUwrVl-o0KfNackFrWosMxr7qHU,45559
50
42
  MediLink/MediLink_Deductible_Validator.py,sha256=2g-lZd-Y5fJ1mfP87vM6oABg0t5Om-7EkEkilVvDWYY,22888
51
43
  MediLink/MediLink_Display_Utils.py,sha256=QyHk23VU1rJtNZr_QhtL76Avo66CEc7MZU84uIs-1Lo,4187
52
44
  MediLink/MediLink_Down.py,sha256=q4ByEh1h1WSHUyRy68e8wT8pXMXP6q8NaqS1LKveMFo,28093
53
- MediLink/MediLink_ERA_decoder.py,sha256=MiOtDcXnmevPfHAahIlTLlUc14VcQWAor9Xa7clA2Ts,8710
54
45
  MediLink/MediLink_Gmail.py,sha256=8iQjqcJMSa_Zfr5azR0dShKAQeXqt-9C-s8seYB9pic,23961
55
- MediLink/MediLink_GraphQL.py,sha256=O6OCaumT0zIC7YcIAwLOOYxiQnYhoMc48UL8ilNIBec,45720
56
46
  MediLink/MediLink_Mailer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
47
  MediLink/MediLink_Parser.py,sha256=eRVZ4ckZ5gDOrcvtCUZP3DOd3Djly66rCIk0aYXLz14,12567
58
48
  MediLink/MediLink_PatientProcessor.py,sha256=9r2w4p45d30Tn0kbXL3j5574MYOehP83tDirNOw_Aek,19977
59
49
  MediLink/MediLink_Scan.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
50
  MediLink/MediLink_Scheduler.py,sha256=UJvxhDvHraqra2_TlQVlGeh5jRFrrfK6nCVUHnKOEMY,38
61
- MediLink/MediLink_StatusCheck.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
51
  MediLink/MediLink_UI.py,sha256=6OR2obKUuBO12l3k6B53MXu1a3fCiV3FVBE2QrIYRqk,9279
63
52
  MediLink/MediLink_Up.py,sha256=QFdUtpEySc7ceZfFJ2q9XWClnhYJssG-UywFFedlv9w,34899
64
- MediLink/MediLink_api_utils.py,sha256=dsGLRPRvSwfXPLrrfgnkIKGDIF00wE93TrDB6HMDPQU,11857
65
- MediLink/MediLink_batch.bat,sha256=nqL5QwCLyRQFSPdv6kgtcV_cpky7FXSOWVl6OxjRXb4,118
66
53
  MediLink/MediLink_insurance_utils.py,sha256=g741Fj2K26cMy0JX5d_XavMw9LgkK6hjaUJYfysT7t8,9301
67
54
  MediLink/MediLink_main.py,sha256=Y26Bl_7KNIbz18lbgK-18dkqANfWK6QO4sQLFFRQGGw,23337
68
55
  MediLink/MediLink_smart_import.py,sha256=B5SfBn_4bYEWJJDolXbjnwKx_-MaqGZ76LyXQwWDV80,9838
69
56
  MediLink/Soumit_api.py,sha256=5JfOecK98ZC6NpZklZW2AkOzkjvrbYxpJpZNH3rFxDw,497
70
- MediLink/__init__.py,sha256=Z4Uxt4XZk4n-GwAkUoEeFiL-D7xHbttYiiWGjgKT_ng,3391
57
+ MediLink/__init__.py,sha256=slWM3Msb3llsp3DnDUPfkAflPeXj2t-Wr0u0z2lTBbA,3391
71
58
  MediLink/gmail_http_utils.py,sha256=gtqCCrzJC7e8JFQzMNrf7EbK8na2h4sfTu-NMaZ_UHc,4006
72
59
  MediLink/gmail_oauth_utils.py,sha256=MLuzO6awBanV7Ee2gOUrkWrxz8-Htwz2BEIFjLw9Izs,3734
73
60
  MediLink/insurance_type_integration_test.py,sha256=pz2OCXitAznqDciYn6OL9M326m9CYU7YiK-ynssdQ5g,15172
@@ -77,9 +64,9 @@ MediLink/test_cob_library.py,sha256=wUMv0-Y6fNsKcAs8Z9LwfmEBRO7oBzBAfWmmzwoNd1g,
77
64
  MediLink/test_timing.py,sha256=yH2b8QPLDlp1Zy5AhgtjzjnDHNGhAD16ZtXtZzzESZw,2042
78
65
  MediLink/test_validation.py,sha256=FJrfdUFK--xRScIzrHCg1JeGdm0uJEoRnq6CgkP2lwM,4154
79
66
  MediLink/webapp.html,sha256=JPKT559aFVBi1r42Hz7C77Jj0teZZRumPhBev8eSOLk,19806
80
- medicafe-0.250813.2.dist-info/LICENSE,sha256=65lb-vVujdQK7uMH3RRJSMwUW-WMrMEsc5sOaUn2xUk,1096
81
- medicafe-0.250813.2.dist-info/METADATA,sha256=2sZpkNcltzPkLNnx5QqEFpnZbYEd9vQT16cyBqGPWmc,3384
82
- medicafe-0.250813.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
83
- medicafe-0.250813.2.dist-info/entry_points.txt,sha256=m3RBUBjr-xRwEkKJ5W4a7NlqHZP_1rllGtjZnrRqKe8,52
84
- medicafe-0.250813.2.dist-info/top_level.txt,sha256=U6-WBJ9RCEjyIs0BlzbQq_PwedCp_IV9n1616NNV5zA,26
85
- medicafe-0.250813.2.dist-info/RECORD,,
67
+ medicafe-0.250814.3.dist-info/LICENSE,sha256=65lb-vVujdQK7uMH3RRJSMwUW-WMrMEsc5sOaUn2xUk,1096
68
+ medicafe-0.250814.3.dist-info/METADATA,sha256=WefSSHIwiHEqi3UkialwKYjsVMxabDwaN9ljXODHIkI,3384
69
+ medicafe-0.250814.3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
70
+ medicafe-0.250814.3.dist-info/entry_points.txt,sha256=m3RBUBjr-xRwEkKJ5W4a7NlqHZP_1rllGtjZnrRqKe8,52
71
+ medicafe-0.250814.3.dist-info/top_level.txt,sha256=U6-WBJ9RCEjyIs0BlzbQq_PwedCp_IV9n1616NNV5zA,26
72
+ medicafe-0.250814.3.dist-info/RECORD,,
MediBot/MediPost.py DELETED
@@ -1,5 +0,0 @@
1
- """
2
- Takes CSV from MediLink_Down.py and utilizes config and MediBot to post the CSV to Medisoft.
3
- This script now also processes ERAs and responses (277CA/277A) to update claims status and finalize billing records.
4
- Handles parsing and cleaning of input CSV files to ensure data accuracy and compliance with Medisoft requirements.
5
- """
@@ -1,211 +0,0 @@
1
- import pandas as pd
2
- import re
3
-
4
- def load_csv(file_path):
5
- # Loads a CSV file and returns a pandas DataFrame
6
- return pd.read_csv(file_path, header=None)
7
-
8
- def segment_data(data):
9
- # Segments the data into individual patient records
10
-
11
- patient_records = []
12
- current_record = []
13
-
14
- for line in data.itertuples(index=False):
15
- # Convert the tuple to a list to process it easier
16
- line = list(line)
17
- # Flatten the list and filter out NaN values
18
- line = [item for item in line if pd.notna(item)]
19
-
20
- if line: # Make sure there is data in the line
21
- # Check for the delimiter indicating a new patient record
22
- if 'PATIENT INFORMATION' in line[0]:
23
- if current_record:
24
- # If there's an existing record, this means we've reached a new one
25
- # Save the current record and start a new one
26
- patient_records.append(current_record)
27
- current_record = []
28
- # Add the line to the current patient record
29
- current_record.extend(line)
30
-
31
- # Don't forget to add the last record after exiting the loop
32
- if current_record:
33
- patient_records.append(current_record)
34
-
35
- return patient_records
36
-
37
- # Function to extract key-value pairs from a patient record segment
38
- def extract_patient_data(patient_record):
39
- patient_data = {
40
- "Name": None,
41
- "Patient ID": None,
42
- "Address": None,
43
- "Home Phone": None,
44
- "DOB": None,
45
- "Gender": None
46
- }
47
-
48
- # Function to extract value after a specific key in the patient record
49
- def extract_value_after_key(lines, key):
50
- for index, line in enumerate(lines):
51
- if key in line:
52
- try:
53
- split_line = line.split('\n')
54
- if len(split_line) > 1:
55
- # Return the value only if it exists after the newline character
56
- return split_line[1].strip()
57
- except AttributeError:
58
- # Handle the case where 'line' is not a string and doesn't have the 'split' method
59
- print("Error extracting value after key:", line)
60
- pass
61
-
62
- # For each key in patient_data, extract its value from the patient_record
63
- for key in patient_data.keys():
64
- patient_data[key] = extract_value_after_key(patient_record, key)
65
- return patient_data
66
-
67
-
68
- def parse_insurance_info(patient_record):
69
- insurance_data = {
70
- "Primary Insurance": None,
71
- "Primary Policy Number": None,
72
- "Primary Group Number": None,
73
- "Secondary Insurance": None,
74
- "Secondary Policy Number": None,
75
- "Secondary Group Number": None
76
- }
77
-
78
- insurance_section_started = False
79
- secondary_insurance_detected = False
80
- group_header_detected = False
81
-
82
- for element in patient_record:
83
- if 'INSURANCE INFORMATION' in element:
84
- insurance_section_started = True
85
- secondary_insurance_detected = False
86
- continue
87
-
88
- if insurance_section_started:
89
- split_element = element.split('\n')
90
- if 'Primary Insurance' in element:
91
- insurance_data["Primary Insurance"] = element.split('\n')[1].strip() if len(element.split('\n')) > 1 else None
92
- elif 'Secondary Insurance' in element and len(split_element) > 1 and split_element[1].strip():
93
- insurance_data["Secondary Insurance"] = element.split('\n')[1].strip() if len(element.split('\n')) > 1 else None
94
- secondary_insurance_detected = True
95
- elif 'Policy Number' in element:
96
- split_element = element.split('\n')
97
- if len(split_element) > 1:
98
- if not insurance_data["Primary Policy Number"]:
99
- insurance_data["Primary Policy Number"] = split_element[1].strip()
100
- elif secondary_insurance_detected and not insurance_data["Secondary Policy Number"]:
101
- insurance_data["Secondary Policy Number"] = split_element[1].strip()
102
- elif 'Group Number' in element:
103
- #print("Group Detected: ", element, secondary_insurance_detected)
104
- group_header_detected = not group_header_detected # toggle between T/F to proxy as first or second position.
105
- split_element = element.split('\n')
106
- if len(split_element) > 1:
107
- if not insurance_data["Primary Group Number"] and group_header_detected:
108
- insurance_data["Primary Group Number"] = split_element[1].strip()
109
- elif secondary_insurance_detected and not insurance_data["Secondary Group Number"] and not group_header_detected:
110
- insurance_data["Secondary Group Number"] = split_element[1].strip()
111
-
112
- return insurance_data
113
-
114
- def structure_data(patient_data_list):
115
- # Define the column headers based on the sample data provided earlier
116
- column_headers = [
117
- "Name",
118
- "Patient ID",
119
- "Address",
120
- "Home Phone",
121
- "DOB",
122
- "Gender",
123
- "Primary Insurance",
124
- "Primary Policy Number",
125
- "Primary Group Number",
126
- "Secondary Insurance",
127
- "Secondary Policy Number",
128
- "Secondary Group Number"
129
- ]
130
-
131
- # Initialize a list to hold structured patient records
132
- structured_patient_records = []
133
-
134
- # Iterate over each patient record in the list
135
- for patient_record in patient_data_list:
136
- # Extract the basic patient data
137
- patient_data = extract_patient_data(patient_record)
138
- # Extract the insurance information
139
- insurance_data = parse_insurance_info(patient_record)
140
- # Merge the two dictionaries
141
- full_patient_data = {**patient_data, **insurance_data}
142
-
143
- # Add the cleaned and transformed data to the list
144
- structured_patient_records.append(full_patient_data)
145
-
146
- # Create the DataFrame with the structured patient data
147
- structured_patient_df = pd.DataFrame(structured_patient_records, columns=column_headers)
148
-
149
- # Return the structured DataFrame
150
- return structured_patient_df
151
-
152
- def validate_data(data_frame):
153
- # Performing Quality Assurance and Validation checks on the structured data
154
-
155
- # Completeness Check: Check for missing values in critical fields
156
- missing_values_check = data_frame.isnull().sum()
157
-
158
- # Consistency Check: Ensure data formats are consistent
159
- date_format_check = data_frame['DOB'].apply(lambda x: bool(re.match(r'\d{4}-\d{2}-\d{2}', x)) if pd.notnull(x) else True)
160
- phone_format_check = data_frame['Home Phone'].apply(lambda x: bool(re.match(r'\+\d-\d{3}-\d{3}-\d{4}', x)) if pd.notnull(x) else True)
161
-
162
- # Anomaly Detection: This can be complex and domain-specific. As a basic check, we can look for outliers in data like dates.
163
- dob_anomalies_check = data_frame['DOB'].describe()
164
-
165
- # Compile the results of the checks
166
- validation_results = {
167
- "Missing Values Check": missing_values_check,
168
- "Date Format Consistency": all(date_format_check),
169
- "Phone Format Consistency": all(phone_format_check),
170
- "DOB Anomalies Check": dob_anomalies_check
171
- }
172
-
173
- print(validation_results) # Display validation results
174
- return data_frame # Return the validated DataFrame
175
-
176
-
177
- # Main function to orchestrate the cleaning process
178
- def clean_patient_data(file_path):
179
- # Load the CSV file
180
- sxpatient_data = load_csv(file_path)
181
-
182
- # Segment the data
183
- segmented_patient_records = segment_data(sxpatient_data)
184
-
185
- # Structure the data
186
- structured_data_frame = structure_data(segmented_patient_records)
187
-
188
- # Validate the data
189
- validated_data = validate_data(structured_data_frame)
190
-
191
- return validated_data
192
-
193
- # Path to the CSV file with escaped backslashes
194
- file_path_sxpatient = 'C:\\Users\\danie\\OneDrive\\Desktop\\CSV02012024.CSV'
195
- # Define the file path for the output CSV file
196
- output_file_path = 'G:\\My Drive\\CocoWave\\XP typing bot\\cleaned_FEB01SXcsv_group.csv'
197
-
198
- # Call the main function to clean the patient data
199
- cleaned_patient_data = clean_patient_data(file_path_sxpatient)
200
-
201
- # Display the first few rows of the cleaned and validated data to verify the output
202
- print(cleaned_patient_data.head())
203
-
204
- # Save the processed data to a CSV file
205
- cleaned_patient_data.to_csv(output_file_path, index=False)
206
-
207
- print(f"Processed data saved to {output_file_path}")
208
-
209
- # Development Roadmap
210
-
211
- # Do not delete leading zeros from insurance numbers