medicafe 0.250812.6__py3-none-any.whl → 0.250813.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
MediCafe/api_core.py CHANGED
@@ -983,18 +983,18 @@ def is_test_mode(client, body, endpoint_type):
983
983
  def submit_uhc_claim(client, x12_request_data):
984
984
  """
985
985
  Submits a UHC claim and retrieves the claim acknowledgement details.
986
-
986
+
987
987
  This function first submits the claim using the provided x12 837p data. If the client is in Test Mode,
988
988
  it returns a simulated response. If Test Mode is not enabled, it submits the claim and then retrieves
989
989
  the claim acknowledgement details using the transaction ID from the initial response.
990
-
990
+
991
991
  NOTE: This function uses endpoints that may not be available in the new swagger version:
992
992
  - /Claims/api/claim-submission/v1 (claim submission)
993
993
  - /Claims/api/claim-details/v1 (claim acknowledgement)
994
994
 
995
995
  If these endpoints are deprecated in the new swagger, this function will need to be updated
996
996
  to use the new available endpoints.
997
-
997
+
998
998
  :param client: An instance of APIClient
999
999
  :param x12_request_data: The x12 837p data as a string
1000
1000
  :return: The final response containing the claim acknowledgement details or a dummy response if in Test Mode
@@ -1014,24 +1014,24 @@ def submit_uhc_claim(client, x12_request_data):
1014
1014
  endpoints = medi.get('endpoints', {})
1015
1015
  claim_submission_url = endpoints.get(endpoint_name, {}).get('additional_endpoints', {}).get('claim_submission', '')
1016
1016
  claim_details_url = endpoints.get(endpoint_name, {}).get('additional_endpoints', {}).get('claim_details', '')
1017
-
1017
+
1018
1018
  MediLink_ConfigLoader.log("Claim Submission URL: {}".format(claim_submission_url), level="INFO")
1019
1019
  MediLink_ConfigLoader.log("Claim Details URL: {}".format(claim_details_url), level="INFO")
1020
-
1020
+
1021
1021
  # Headers for the request
1022
1022
  headers = {'Content-Type': 'application/json'}
1023
-
1023
+
1024
1024
  # Request body for claim submission
1025
1025
  claim_body = {'x12RequestData': x12_request_data}
1026
-
1026
+
1027
1027
  MediLink_ConfigLoader.log("Claim Body Keys: {}".format(list(claim_body.keys())), level="INFO")
1028
1028
  MediLink_ConfigLoader.log("Headers: {}".format(json.dumps(headers, indent=2)), level="INFO")
1029
-
1029
+
1030
1030
  # Check if Test Mode is enabled and return simulated response if so
1031
1031
  test_mode_response = is_test_mode(client, claim_body, 'claim_submission')
1032
1032
  if test_mode_response:
1033
1033
  return test_mode_response
1034
-
1034
+
1035
1035
  # Make the API call to submit the claim
1036
1036
  try:
1037
1037
  MediLink_ConfigLoader.log("Making claim submission API call...", level="INFO")
@@ -1047,16 +1047,43 @@ def submit_uhc_claim(client, x12_request_data):
1047
1047
 
1048
1048
  # Prepare the request body for the claim acknowledgement retrieval
1049
1049
  acknowledgement_body = {'transactionId': transaction_id}
1050
-
1050
+
1051
1051
  # Check if Test Mode is enabled and return simulated response if so
1052
1052
  test_mode_response = is_test_mode(client, acknowledgement_body, 'claim_details')
1053
1053
  if test_mode_response:
1054
1054
  return test_mode_response
1055
-
1055
+
1056
1056
  # Make the API call to retrieve the claim acknowledgement details
1057
1057
  acknowledgement_response = client.make_api_call(endpoint_name, 'POST', claim_details_url, data=acknowledgement_body, headers=headers)
1058
+
1059
+ # Persist as unified ack event (best-effort)
1060
+ try:
1061
+ from MediCafe.submission_index import append_ack_event, ensure_submission_index
1062
+ cfg, _ = MediLink_ConfigLoader.load_configuration()
1063
+ receipts_root = extract_medilink_config(cfg).get('local_claims_path', None)
1064
+ if receipts_root:
1065
+ ensure_submission_index(receipts_root)
1066
+ status_text = ''
1067
+ try:
1068
+ # Attempt to pull a readable status from the response
1069
+ status_text = acknowledgement_response.get('status') or acknowledgement_response.get('message') or ''
1070
+ except Exception:
1071
+ status_text = ''
1072
+ append_ack_event(
1073
+ receipts_root,
1074
+ '', # claim_key unknown here
1075
+ status_text,
1076
+ 'API-277',
1077
+ 'uhcapi',
1078
+ {'transactionId': transaction_id},
1079
+ 'api_ack',
1080
+ int(time.time())
1081
+ )
1082
+ except Exception:
1083
+ pass
1084
+
1058
1085
  return acknowledgement_response
1059
-
1086
+
1060
1087
  except Exception as e:
1061
1088
  print("Error during claim processing: {}".format(e))
1062
1089
  raise
@@ -22,6 +22,9 @@ META_FILENAME = 'submission_index_meta.json'
22
22
  INDEX_FILENAME = 'submission_index.jsonl'
23
23
  LOCK_FILENAME = 'submission_index.lock'
24
24
 
25
+ # New: ack field keys for richer timeline entries
26
+ ACK_FIELDS = ['ack_type', 'ack_timestamp', 'control_ids', 'source', 'file_name']
27
+
25
28
 
26
29
  def build_initial_index(receipts_root, lookback_days=200):
27
30
  """
@@ -123,6 +126,47 @@ def compute_claim_key(patient_id, payer_id, primary_insurance, date_of_service,
123
126
  ])
124
127
 
125
128
 
129
+ def append_ack_event(receipts_root, claim_key, status_text, ack_type, file_name, control_ids, source, ack_timestamp=None):
130
+ """
131
+ Append a lightweight ack/timeline event to the index. XP/Py3.4/ASCII-safe.
132
+ - claim_key may be empty if unknown. Caller should pass when available.
133
+ - control_ids is a dict with optional ISA/GS/ST/TRN or transactionId.
134
+ """
135
+ try:
136
+ _ensure_files_exist(receipts_root)
137
+ event = {
138
+ 'claim_key': claim_key or '',
139
+ 'patient_id': '',
140
+ 'payer_id': '',
141
+ 'primary_insurance': '',
142
+ 'dos': '',
143
+ 'endpoint': source or 'download_ack',
144
+ 'submitted_at': '',
145
+ 'receipt_file': file_name or '',
146
+ 'status': status_text or '',
147
+ 'notes': 'ack event',
148
+ }
149
+ # Attach ack fields with basic validation
150
+ try:
151
+ event['ack_type'] = ack_type or ''
152
+ event['ack_timestamp'] = ack_timestamp or int(time.time())
153
+ event['control_ids'] = control_ids or {}
154
+ event['source'] = source or ''
155
+ event['file_name'] = file_name or ''
156
+ except Exception:
157
+ pass
158
+ path = _index_path(receipts_root)
159
+ line = json.dumps(event)
160
+ f = open(path, 'a')
161
+ try:
162
+ f.write(line)
163
+ f.write("\n")
164
+ finally:
165
+ f.close()
166
+ except Exception:
167
+ pass
168
+
169
+
126
170
  # ------------------------- ASCII-safe meta/lock helpers -----------------------
127
171
 
128
172
  def _meta_path(root_dir):
@@ -330,7 +330,12 @@ def operate_winscp(operation_type, files, endpoint_config, local_storage_path, c
330
330
  local_storage_path = validate_local_storage_path(local_storage_path, config)
331
331
 
332
332
  remote_directory = get_remote_directory(endpoint_config, operation_type)
333
- command = build_command(winscp_path, winscp_log_path, endpoint_config, remote_directory, operation_type, files, local_storage_path)
333
+ if operation_type == "download":
334
+ # Prefer explicit ack-focused mask if not provided by endpoint
335
+ filemask = endpoint_config.get('filemask') or ['era', '277', '277ibr', '277ebr', '999']
336
+ else:
337
+ filemask = None
338
+ command = build_command(winscp_path, winscp_log_path, endpoint_config, remote_directory, operation_type, files, local_storage_path, newer_than=None, filemask=filemask)
334
339
 
335
340
  if config.get("TestMode", True):
336
341
  MediLink_ConfigLoader.log("Test mode is enabled. Simulating operation.")
@@ -470,6 +475,48 @@ def get_remote_directory(endpoint_config, operation_type):
470
475
  MediLink_ConfigLoader.log("Critical Error: Endpoint config is missing key: {}".format(e))
471
476
  raise RuntimeError("Configuration error: Missing required remote directory in endpoint configuration.")
472
477
 
478
+ def normalize_filemask(filemask):
479
+ """
480
+ Normalize various filemask inputs into WinSCP-compatible string.
481
+ Supports list of extensions, comma-separated string, or dict with 'extensions' and other filters.
482
+ Falls back to '*' when input is invalid.
483
+ """
484
+ try:
485
+ if not filemask:
486
+ return '*'
487
+ if isinstance(filemask, list):
488
+ parts = []
489
+ for ext in filemask:
490
+ s = str(ext).strip().lstrip('*.').lstrip('.')
491
+ if s:
492
+ parts.append('*.{}'.format(s))
493
+ return '|'.join(parts) if parts else '*'
494
+ if isinstance(filemask, dict):
495
+ exts = filemask.get('extensions', [])
496
+ other = []
497
+ for k, v in filemask.items():
498
+ if k == 'extensions':
499
+ continue
500
+ other.append(str(v))
501
+ ext_part = normalize_filemask(exts)
502
+ other_part = ';'.join(other)
503
+ if ext_part and other_part:
504
+ return '{};{}'.format(ext_part, other_part)
505
+ return ext_part or other_part or '*'
506
+ if isinstance(filemask, str):
507
+ # Support comma-separated or pipe-separated lists of extensions
508
+ raw = filemask.replace(' ', '')
509
+ if any(sep in raw for sep in [',', '|']):
510
+ tokens = raw.replace('|', ',').split(',')
511
+ return normalize_filemask([t for t in tokens if t])
512
+ # If looks like an extension, prefix
513
+ s = raw.lstrip('*.').lstrip('.')
514
+ if s and all(ch.isalnum() for ch in s):
515
+ return '*.{}'.format(s)
516
+ return raw or '*'
517
+ except Exception:
518
+ return '*'
519
+
473
520
  def build_command(winscp_path, winscp_log_path, endpoint_config, remote_directory, operation_type, files, local_storage_path, newer_than=None, filemask=None):
474
521
  # Log the operation type
475
522
  MediLink_ConfigLoader.log("[Build Command] Building WinSCP command for operation type: {}".format(operation_type))
@@ -581,14 +628,7 @@ def build_command(winscp_path, winscp_log_path, endpoint_config, remote_director
581
628
  # 5. Add validation for WinSCP-compatible patterns
582
629
  # 6. Add logging for debugging filemask translations
583
630
  # 7. XP QUIRK: Prefer simple masks (e.g., *.csv|*.txt) and avoid complex AND/OR until verified on XP.
584
- if isinstance(filemask, list):
585
- filemask_str = '|'.join(['*.' + ext for ext in filemask])
586
- elif isinstance(filemask, dict):
587
- filemask_str = '|'.join(['*.' + ext for ext in filemask.keys()])
588
- elif isinstance(filemask, str):
589
- filemask_str = filemask # Assume it's already in the correct format
590
- else:
591
- filemask_str = '*' # Default to all files if filemask is None or unsupported type
631
+ filemask_str = normalize_filemask(filemask)
592
632
  else:
593
633
  filemask_str = '*' # Default to all files if filemask is None
594
634
 
@@ -659,38 +699,25 @@ def execute_winscp_command(command, operation_type, files, local_storage_path):
659
699
  MediLink_ConfigLoader.log("WinSCP {} operation completed successfully.".format(operation_type))
660
700
 
661
701
  if operation_type == 'download':
662
- downloaded_files = list_downloaded_files(local_storage_path)
663
- # TODO (HIGH PRIORITY - WinSCP Path Configuration Issue):
664
- # PROBLEM: WinSCP is not downloading files to the expected local_storage_path directory.
665
- # The list_downloaded_files() function is checking the wrong location.
666
- #
667
- # XP/WinSCP SUGGESTION:
668
- # - Add config override 'winscp_download_path' to explicitly set WinSCP's target directory.
669
- # - If set, prefer list_downloaded_files(config['MediLink_Config']['winscp_download_path']).
670
- # - Otherwise, parse the WinSCP log to detect the actual path and fall back to local_storage_path.
671
- #
672
- # INVESTIGATION STEPS:
673
- # 1. Check WinSCP logs to determine actual download destination:
674
- # - Look in config['MediLink_Config']['local_claims_path'] + "winscp_download.log"
675
- # - Parse log entries for "file downloaded to:" or similar patterns
676
- # 2. Compare actual WinSCP download path vs configured local_storage_path
677
- # 3. Check if WinSCP uses different path conventions (forward/backward slashes)
678
- #
679
- # IMPLEMENTATION OPTIONS:
680
- # Option A: Fix WinSCP command to use correct target directory
681
- # - Update the lcd_command generation in execute_winscp_command()
682
- # - Ensure local_storage_path is properly escaped for WinSCP
683
- # Option B: Update list_downloaded_files() to check actual WinSCP location
684
- # - Add function get_actual_winscp_download_path() that parses logs
685
- # - Call list_downloaded_files(get_actual_winscp_download_path())
686
- # Option C: Add configuration parameter for WinSCP-specific download path
687
- # - Add 'winscp_download_path' to config
688
- # - Default to local_storage_path if not specified
689
- #
690
- # RECOMMENDED: Option A (fix root cause) + Option C (explicit config)
691
- # FILES TO MODIFY: This file (execute_winscp_command, list_downloaded_files functions)
692
- # TESTING: Verify downloads work correctly after fix with various file types
693
- MediLink_ConfigLoader.log("Files currently located in local_storage_path: {}".format(downloaded_files), level="DEBUG")
702
+ # Prefer configured override if present
703
+ winscp_download_path = None
704
+ try:
705
+ from MediCafe.core_utils import extract_medilink_config
706
+ config, _ = MediLink_ConfigLoader.load_configuration()
707
+ medi = extract_medilink_config(config)
708
+ winscp_download_path = medi.get('winscp_download_path')
709
+ except Exception:
710
+ winscp_download_path = None
711
+
712
+ target_dir = winscp_download_path or local_storage_path
713
+ downloaded_files = list_downloaded_files(target_dir)
714
+ MediLink_ConfigLoader.log("Files currently located in target directory ({}): {}".format(target_dir, downloaded_files), level="DEBUG")
715
+
716
+ if not downloaded_files and winscp_download_path and winscp_download_path != local_storage_path:
717
+ # Fallback to original path if override empty
718
+ fallback_files = list_downloaded_files(local_storage_path)
719
+ MediLink_ConfigLoader.log("Fallback to local_storage_path yielded: {}".format(fallback_files), level="DEBUG")
720
+ downloaded_files = fallback_files
694
721
 
695
722
  if not downloaded_files:
696
723
  MediLink_ConfigLoader.log("No files were downloaded or an error occurred during the listing process.", level="WARNING")
@@ -727,8 +754,12 @@ def list_downloaded_files(local_storage_path):
727
754
  except Exception as e:
728
755
  MediLink_ConfigLoader.log("Error occurred while listing files in {}: {}".format(local_storage_path, e), level="ERROR")
729
756
 
730
- # Ensure that the function always returns a list
731
- return downloaded_files
757
+ # Normalize to basenames so downstream move logic in MediLink_Down works cross-platform
758
+ try:
759
+ basenames = [os.path.basename(p) for p in downloaded_files]
760
+ return basenames
761
+ except Exception:
762
+ return downloaded_files
732
763
 
733
764
  def detect_new_files(directory_path, file_extension='.DAT'):
734
765
  """
@@ -20,7 +20,7 @@ else:
20
20
  return {}, {}
21
21
  def log(message, level="INFO"):
22
22
  print("[{}] {}".format(level, message))
23
- from MediLink_Parser import parse_era_content, parse_277_content, parse_277IBR_content, parse_277EBR_content, parse_dpt_content, parse_ebt_content, parse_ibt_content
23
+ from MediLink_Parser import parse_era_content, parse_277_content, parse_277IBR_content, parse_277EBR_content, parse_dpt_content, parse_ebt_content, parse_ibt_content, parse_999_content
24
24
 
25
25
  # Define new_fieldnames globally
26
26
  new_fieldnames = ['Claim #', 'Payer', 'Status', 'Patient', 'Proc.', 'Serv.', 'Allowed', 'Paid', 'Pt Resp', 'Charged']
@@ -75,7 +75,8 @@ def process_decoded_file(file_path, output_directory, return_records=False, debu
75
75
  '277EBR': parse_277EBR_content,
76
76
  'DPT': parse_dpt_content,
77
77
  'EBT': parse_ebt_content,
78
- 'IBT': parse_ibt_content
78
+ 'IBT': parse_ibt_content,
79
+ '999': parse_999_content
79
80
  }
80
81
 
81
82
  parse_function = parse_functions.get(file_type)
@@ -143,21 +144,26 @@ def format_records(records, file_type):
143
144
  claim_number = record.get('Chart Number', '')
144
145
  elif file_type == 'EBT':
145
146
  claim_number = record.get('Patient Control Number', '')
147
+ elif file_type == '277':
148
+ claim_number = record.get('Claim #', '')
149
+ elif file_type == '999':
150
+ claim_number = '' # 999 lacks a direct claim number
146
151
  else:
147
152
  claim_number = '' # Default to empty if file type is not recognized
148
153
 
149
- # Skip records without a claim number
150
- if not claim_number:
154
+ # Skip records without a claim number, except for 999 summary/detail rows
155
+ if not claim_number and file_type != '999':
151
156
  log("Record {} missing claim_number. Skipping.".format(i + 1), level="WARNING")
152
157
  continue
153
158
 
154
159
  # Check for duplicates (within this file and across files in this run)
155
- if claim_number in seen_claim_numbers or claim_number in GLOBAL_SEEN_CLAIM_NUMBERS:
160
+ if claim_number and (claim_number in seen_claim_numbers or claim_number in GLOBAL_SEEN_CLAIM_NUMBERS):
156
161
  log("Duplicate claim_number {} found at record {}. Skipping.".format(claim_number, i + 1), level="DEBUG")
157
162
  continue
158
163
 
159
- seen_claim_numbers.add(claim_number)
160
- GLOBAL_SEEN_CLAIM_NUMBERS.add(claim_number) # Add to cross-file set so later files also skip
164
+ if claim_number:
165
+ seen_claim_numbers.add(claim_number)
166
+ GLOBAL_SEEN_CLAIM_NUMBERS.add(claim_number) # Add to cross-file set so later files also skip
161
167
 
162
168
  unified_record = UnifiedRecord()
163
169
 
@@ -189,8 +195,7 @@ def format_records(records, file_type):
189
195
  'A': 'Accepted',
190
196
  'R': 'Rejected',
191
197
  }
192
- # unified_record.status = status_mapping.get(message_type, message_type)
193
- unified_record.status = record.get('Message', '')
198
+ unified_record.status = record.get('Message', '') or status_mapping.get(message_type, message_type)
194
199
  unified_record.payer = record.get('Message Initiator', '')
195
200
  unified_record.patient = record.get('Patient Name', '')
196
201
  unified_record.proc_date = format_date(record.get('To Date', ''))
@@ -204,6 +209,30 @@ def format_records(records, file_type):
204
209
  log("Skipped non-claim EBT Record {}: {}".format(i + 1, record), level="DEBUG")
205
210
  continue
206
211
 
212
+ elif file_type == '277':
213
+ unified_record.claim_number = claim_number
214
+ unified_record.status = record.get('Status', '')
215
+ unified_record.patient = record.get('Patient', '')
216
+ unified_record.proc_date = format_date(record.get('Proc.', ''))
217
+ unified_record.serv_date = format_date(record.get('Serv.', ''))
218
+ unified_record.allowed = ''
219
+ unified_record.paid = record.get('Paid', '')
220
+ unified_record.pt_resp = ''
221
+ unified_record.charged = record.get('Charged', '')
222
+
223
+ elif file_type == '999':
224
+ # Show 999 summary rows; leave claim_number empty
225
+ unified_record.claim_number = ''
226
+ unified_record.status = record.get('Status', '')
227
+ unified_record.patient = ''
228
+ unified_record.payer = record.get('Functional ID', '')
229
+ unified_record.proc_date = ''
230
+ unified_record.serv_date = ''
231
+ unified_record.allowed = ''
232
+ unified_record.paid = ''
233
+ unified_record.pt_resp = ''
234
+ unified_record.charged = ''
235
+
207
236
  # Append the unified record to the list
208
237
  formatted_records.append(unified_record)
209
238
 
MediLink/MediLink_Down.py CHANGED
@@ -45,6 +45,15 @@ except ImportError:
45
45
  def tqdm(iterable, **kwargs):
46
46
  return iterable
47
47
 
48
+ try:
49
+ from MediCafe.submission_index import append_submission_record as _append_submission_record, ensure_submission_index as _ensure_submission_index, append_ack_event as _append_ack_event
50
+ except ImportError:
51
+ # Fallback if submission_index not available
52
+ _append_submission_record = None
53
+ _ensure_submission_index = None
54
+ _append_ack_event = None
55
+
56
+
48
57
  def handle_files(local_storage_path, downloaded_files):
49
58
  """
50
59
  Moves downloaded files to the appropriate directory and translates them to CSV format.
@@ -55,15 +64,16 @@ def handle_files(local_storage_path, downloaded_files):
55
64
  local_response_directory = os.path.join(local_storage_path, "responses")
56
65
  os.makedirs(local_response_directory, exist_ok=True)
57
66
 
58
- # Supported file extensions
59
- file_extensions = ['.era', '.277', '.277ibr', '.277ebr', '.dpt', '.ebt', '.ibt', '.txt']
67
+ # Supported file extensions (enable ERA/277/999; keep EBT)
68
+ file_extensions = ['.era', '.277', '.277ibr', '.277ebr', '.999', '.dpt', '.ebt', '.ibt', '.txt']
60
69
 
61
70
  files_moved = []
62
71
 
63
72
  for file in downloaded_files:
64
- if any(file.lower().endswith(ext) for ext in file_extensions): # Case-insensitive match
73
+ lower = file.lower()
74
+ if any(lower.endswith(ext) for ext in file_extensions): # Case-insensitive match
65
75
  source_path = os.path.join(local_storage_path, file)
66
- destination_path = os.path.join(local_response_directory, file)
76
+ destination_path = os.path.join(local_response_directory, os.path.basename(file))
67
77
 
68
78
  try:
69
79
  shutil.move(source_path, destination_path)
@@ -71,6 +81,8 @@ def handle_files(local_storage_path, downloaded_files):
71
81
  files_moved.append(destination_path)
72
82
  except Exception as e:
73
83
  log("Error moving file '{}' to '{}': {}".format(file, destination_path, e), level="ERROR")
84
+ else:
85
+ log("Skipping unsupported file '{}'.".format(file), level="DEBUG")
74
86
 
75
87
  if not files_moved:
76
88
  log("No files were moved. Ensure that files with supported extensions exist in the download directory.", level="WARNING")
@@ -93,14 +105,15 @@ def translate_files(files, output_directory):
93
105
  translated_files = []
94
106
  consolidated_records = []
95
107
 
96
- # Supported file extensions with selector
108
+ # Enable processing for ERA, 277 family, 999, and EBT
97
109
  file_type_selector = {
98
- '.era': False,
99
- '.277': False,
100
- '.277ibr': False,
101
- '.277ebr': False,
110
+ '.era': True,
111
+ '.277': True,
112
+ '.277ibr': True,
113
+ '.277ebr': True,
114
+ '.999': True,
102
115
  '.dpt': False,
103
- '.ebt': True, # Only EBT files are processed
116
+ '.ebt': True,
104
117
  '.ibt': False,
105
118
  '.txt': False
106
119
  }
@@ -108,12 +121,24 @@ def translate_files(files, output_directory):
108
121
  file_counts = {ext: 0 for ext in file_type_selector.keys()}
109
122
 
110
123
  for file in files:
111
- ext = os.path.splitext(file)[1]
124
+ ext = os.path.splitext(file)[1].lower()
112
125
  if file_type_selector.get(ext, False): # Check if the file type is selected
113
126
  file_counts[ext] += 1
114
127
 
115
128
  try:
116
- records = process_decoded_file(os.path.join(output_directory, file), output_directory, return_records=True)
129
+ src_path = os.path.join(output_directory, os.path.basename(file))
130
+ records = process_decoded_file(src_path, output_directory, return_records=True)
131
+ # Annotate records with source metadata for downstream persistence
132
+ try:
133
+ mtime = os.path.getmtime(src_path)
134
+ except Exception:
135
+ mtime = None
136
+ for r in records:
137
+ try:
138
+ setattr(r, 'source_file', src_path)
139
+ setattr(r, 'source_mtime', mtime)
140
+ except Exception:
141
+ pass
117
142
  consolidated_records.extend(records)
118
143
  csv_file_path = os.path.join(output_directory, os.path.basename(file) + '_decoded.csv')
119
144
  translated_files.append(csv_file_path)
@@ -122,11 +147,34 @@ def translate_files(files, output_directory):
122
147
  log("Unsupported file type: {}".format(file), level="WARNING")
123
148
  except Exception as e:
124
149
  log("Error processing file {}: {}".format(file, e), level="ERROR")
150
+ else:
151
+ log("Skipping unselected file type for '{}'.".format(file), level="DEBUG")
125
152
 
126
153
  log("Detected and processed file counts by type:")
127
154
  for ext, count in file_counts.items():
128
155
  log("{}: {} files detected".format(ext, count), level="INFO")
129
156
 
157
+ # Simple, elegant summary for console UI
158
+ try:
159
+ if consolidated_records:
160
+ total = len(consolidated_records)
161
+ num_rejected = 0
162
+ num_accepted = 0
163
+ for r in consolidated_records:
164
+ status = getattr(r, 'status', '') if hasattr(r, 'status') else r.get('Status', '')
165
+ if status:
166
+ if ('Reject' in status) or (':' in status and status.upper().startswith('R')):
167
+ num_rejected += 1
168
+ elif ('Accept' in status) or (':' in status and status.upper().startswith('A')):
169
+ num_accepted += 1
170
+ print("\nAcknowledgements Summary:")
171
+ print(" Total records: {}".format(total))
172
+ print(" Accepted: {}".format(num_accepted))
173
+ print(" Rejected: {}".format(num_rejected))
174
+ print("")
175
+ except Exception:
176
+ pass
177
+
130
178
  return consolidated_records, translated_files
131
179
 
132
180
  def prompt_csv_export(records, output_directory):
@@ -134,6 +182,50 @@ def prompt_csv_export(records, output_directory):
134
182
  Prompts the user to export consolidated records to a CSV file.
135
183
  """
136
184
  if records:
185
+ # Persist lightweight ack events into receipts index (optional, best-effort)
186
+ try:
187
+ config, _ = load_configuration()
188
+ medi = extract_medilink_config(config)
189
+ receipts_root = medi.get('local_claims_path', None)
190
+ if receipts_root and _ensure_submission_index and _append_ack_event:
191
+ _ensure_submission_index(receipts_root)
192
+ for rec in records:
193
+ try:
194
+ # rec may be UnifiedRecord; convert
195
+ if hasattr(rec, 'to_dict'):
196
+ d = rec.to_dict()
197
+ else:
198
+ d = rec
199
+ claim_no = d.get('Claim #', '')
200
+ status_text = d.get('Status', '')
201
+ # infer ack_type by presence of fields
202
+ ack_type = ''
203
+ if d.get('Paid', '') != '' or d.get('Allowed', '') != '':
204
+ ack_type = 'ERA'
205
+ elif status_text and ':' in status_text:
206
+ ack_type = '277'
207
+ else:
208
+ ack_type = 'EBT' # default for text notifications
209
+ # Use file metadata when available
210
+ file_name = os.path.basename(getattr(rec, 'source_file', '')) if hasattr(rec, 'source_file') else 'responses'
211
+ ts = getattr(rec, 'source_mtime', None)
212
+ control_ids = {}
213
+ if claim_no:
214
+ _append_ack_event(
215
+ receipts_root,
216
+ '', # claim_key unknown here
217
+ status_text,
218
+ ack_type,
219
+ file_name,
220
+ control_ids,
221
+ 'download_ack',
222
+ int(ts) if isinstance(ts, (int, float)) else None
223
+ )
224
+ except Exception:
225
+ continue
226
+ except Exception:
227
+ pass
228
+
137
229
  user_input = input("Do you want to export the consolidated records to a CSV file? (y/n): ")
138
230
  if user_input.lower() == 'y':
139
231
  output_file_path = os.path.join(output_directory, "Consolidated_Records.csv")
@@ -241,4 +241,83 @@ def parse_ibt_content(content, debug=False):
241
241
  for data in extracted_data:
242
242
  print(data)
243
243
 
244
- return extracted_data
244
+ return extracted_data
245
+
246
+ def parse_999_content(content, debug=False):
247
+ """
248
+ Minimal 999 Implementation Acknowledgment parser.
249
+ Extracts overall transaction set acknowledgment (AK9) and per-set (AK5) statuses when available.
250
+ Returns a list with a single summary dict plus optional per-set entries.
251
+ """
252
+ records = []
253
+ segments = content.split('~')
254
+ overall_status = None
255
+ functional_id = None
256
+ control_numbers = [] # AK2 ST02 values
257
+ per_set_statuses = [] # List of {'set_control': str, 'status': str}
258
+
259
+ for seg in segments:
260
+ parts = seg.split('*')
261
+ if not parts or not parts[0]:
262
+ continue
263
+ tag = parts[0]
264
+ if tag == 'AK1' and len(parts) > 1:
265
+ functional_id = parts[1]
266
+ elif tag == 'AK2' and len(parts) > 2:
267
+ # Transaction Set Acknowledgment - capture ST02 control number
268
+ control_numbers.append(parts[2])
269
+ elif tag == 'AK5' and len(parts) > 1:
270
+ # Transaction Set Response Trailer - status code in AK5-01 (A, E, R)
271
+ status_code = parts[1]
272
+ per_set_statuses.append({'status': status_code})
273
+ elif tag == 'AK9' and len(parts) > 1:
274
+ # Functional Group Response Trailer - overall status in AK9-01
275
+ overall_status = parts[1]
276
+
277
+ # Map X12 codes to friendly text
278
+ status_map = {'A': 'Accepted', 'E': 'Accepted with Errors', 'R': 'Rejected'}
279
+ overall_text = status_map.get(overall_status, overall_status or '')
280
+
281
+ summary = {
282
+ 'Ack Type': '999',
283
+ 'Functional ID': functional_id or '',
284
+ 'Status': overall_text,
285
+ 'Sets Acknowledged': len(control_numbers) if control_numbers else 0,
286
+ }
287
+ records.append(summary)
288
+
289
+ # Optionally include per-set detail rows
290
+ for idx, st in enumerate(per_set_statuses):
291
+ detail = {
292
+ 'Ack Type': '999',
293
+ 'Functional ID': functional_id or '',
294
+ 'Set #': str(idx + 1),
295
+ 'Status': status_map.get(st.get('status', ''), st.get('status', '')),
296
+ }
297
+ # Claim # not available in 999; leave out
298
+ records.append(detail)
299
+
300
+ if debug:
301
+ print('Parsed 999 Content:')
302
+ for r in records:
303
+ print(r)
304
+ return records
305
+
306
+ def determine_file_type(file_path):
307
+ file_extensions = {
308
+ '.era': 'ERA',
309
+ '.277': '277',
310
+ '.277ibr': '277IBR',
311
+ '.277ebr': '277EBR',
312
+ '.dpt': 'DPT',
313
+ '.ebt': 'EBT',
314
+ '.ibt': 'IBT',
315
+ '.999': '999'
316
+ }
317
+
318
+ for ext, file_type in file_extensions.items():
319
+ if file_path.endswith(ext):
320
+ return file_type
321
+
322
+ log("Unsupported file type for file: {}".format(file_path))
323
+ return None
MediLink/MediLink_main.py CHANGED
@@ -54,6 +54,10 @@ if PERFORMANCE_LOGGING:
54
54
  # - XP note: default to console prompts; optional UI later.
55
55
  # This already happens when MediLink is opened.
56
56
 
57
+ # Simple in-process scheduler for ack polls
58
+ _last_ack_updated_at = None
59
+ _scheduled_ack_checks = [] # list of epoch timestamps
60
+
57
61
  def _tools_menu(config, medi):
58
62
  """Low-use maintenance tools submenu."""
59
63
  while True:
@@ -179,12 +183,63 @@ def main_menu():
179
183
  if PERFORMANCE_LOGGING:
180
184
  print("Main menu initialization completed in {:.2f} seconds".format(menu_init_end - menu_start_time))
181
185
 
186
+ # Validate the calculated date range
187
+ try:
188
+ from datetime import datetime, timedelta
189
+ current_date = datetime.now()
190
+ start_date = current_date - timedelta(days=15) # Default to 15-day range
191
+ end_date = current_date - timedelta(days=1)
192
+ def validate_date_range(start_date, end_date):
193
+ if start_date > end_date:
194
+ raise ValueError("Start date cannot be after end date.")
195
+ if start_date < (current_date - timedelta(days=30)): # Ensure it's not too far in the past
196
+ raise ValueError("Start date must be within the last 30 days.")
197
+ if end_date < (current_date - timedelta(days=30)): # Ensure it's not too far in the past
198
+ raise ValueError("End date must be within the last 30 days.")
199
+ except ImportError:
200
+ print("Date validation requires the 'datetime' module. Please ensure it's installed.")
201
+ # Fallback to a safe date range within 30 days
202
+ end_date = current_date - timedelta(days=1)
203
+ start_date = end_date - timedelta(days=15) # 15-day range as fallback
204
+
205
+ end_date_str = end_date.strftime('%m/%d/%Y')
206
+ start_date_str = start_date.strftime('%m/%d/%Y')
207
+
208
+ # Boot-time one-time ack poll (silent policy: just show summary output)
209
+ try:
210
+ print("\nChecking acknowledgements (boot-time scan)...")
211
+ MediLink_Down.check_for_new_remittances(config)
212
+ _last_ack_updated_at = int(time.time())
213
+ except Exception:
214
+ pass
215
+
182
216
  while True:
217
+ # Run any due scheduled ack checks before showing menu
218
+ try:
219
+ now_ts = int(time.time())
220
+ if _scheduled_ack_checks:
221
+ due = [t for t in _scheduled_ack_checks if t <= now_ts]
222
+ if due:
223
+ print("\nAuto-checking acknowledgements (scheduled)...")
224
+ MediLink_Down.check_for_new_remittances(config)
225
+ _last_ack_updated_at = now_ts
226
+ # remove executed
227
+ _scheduled_ack_checks = [t for t in _scheduled_ack_checks if t > now_ts]
228
+ except Exception:
229
+ pass
230
+
183
231
  # Define static menu options for consistent numbering
184
232
  options = ["Check for new remittances", "Submit claims", "Exit", "Tools"]
185
233
 
186
234
  # Display the menu options.
187
235
  menu_display_start = time.time()
236
+ # Show last updated info if available
237
+ try:
238
+ if _last_ack_updated_at:
239
+ ts_str = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(_last_ack_updated_at))
240
+ print("Last acknowledgements update: {}".format(ts_str))
241
+ except Exception:
242
+ pass
188
243
  MediLink_UI.display_menu(options)
189
244
  menu_display_end = time.time()
190
245
  if PERFORMANCE_LOGGING:
@@ -201,9 +256,15 @@ def main_menu():
201
256
  # Handle remittance checking.
202
257
  remittance_start = time.time()
203
258
  MediLink_Down.check_for_new_remittances(config)
259
+ _last_ack_updated_at = int(time.time())
204
260
  remittance_end = time.time()
205
261
  if PERFORMANCE_LOGGING:
206
262
  print("Remittance check completed in {:.2f} seconds".format(remittance_end - remittance_start))
263
+ # UX hint: suggest deeper United details
264
+ try:
265
+ print("Tip: For United details, run the United Claims Status option for the same date window.")
266
+ except Exception:
267
+ pass
207
268
  elif choice == '2':
208
269
  if not all_files:
209
270
  print("No files available to submit. Please check for new remittances first.")
@@ -226,6 +287,14 @@ def main_menu():
226
287
 
227
288
  # Process the claims submission.
228
289
  handle_submission(detailed_patient_data, config, crosswalk)
290
+ # Schedule ack checks for SFTP-based systems post-submit: T+90s and T+7200s
291
+ try:
292
+ now_ts2 = int(time.time())
293
+ _scheduled_ack_checks.append(now_ts2 + 90)
294
+ _scheduled_ack_checks.append(now_ts2 + 7200)
295
+ print("Scheduled acknowledgements checks in 1-2 minutes and again ~2 hours.")
296
+ except Exception:
297
+ pass
229
298
  submission_end = time.time()
230
299
  if PERFORMANCE_LOGGING:
231
300
  print("Claims submission flow completed in {:.2f} seconds".format(submission_end - submission_start))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: medicafe
3
- Version: 0.250812.6
3
+ Version: 0.250813.0
4
4
  Summary: MediCafe
5
5
  Home-page: https://github.com/katanada2/MediCafe
6
6
  Author: Daniel Vidaud
@@ -19,7 +19,7 @@ MediBot/update_medicafe.py,sha256=KuoGOsSvnNis9EgpAZe_j0Ny6ExVav3fnRQCAu4evnk,28
19
19
  MediCafe/MediLink_ConfigLoader.py,sha256=Ia79dZQBvgbc6CtOaNZVlFHaN-fvUmJRpmmVHz_MFv8,8205
20
20
  MediCafe/__init__.py,sha256=DF0XUu3G43AejXvEmd5aCyy0GDQahQD0pMwexmxem-E,5477
21
21
  MediCafe/__main__.py,sha256=mRNyk3D9Ilnu2XhgVI_rut7r5Ro7UIKtwV871giAHI8,12992
22
- MediCafe/api_core.py,sha256=1Vvez4aqJEYp5ymk6QAgmTCech1hadqYr92NptnxC1U,65245
22
+ MediCafe/api_core.py,sha256=rF-8XNc6ILSsoD_YQV-L9R_nW9_XAd0D4VMgqAMY5U4,66420
23
23
  MediCafe/api_core_backup.py,sha256=Oy_Fqt0SEvGkQN1Oqw5iUPVFxPEokyju5CuPEb9k0OY,18686
24
24
  MediCafe/api_factory.py,sha256=I5AeJoyu6m7oCrjc2OvVvO_4KSBRutTsR1riiWhTZV0,12086
25
25
  MediCafe/api_utils.py,sha256=KWQB0q1k5E6frOFFlKWcFpHNcqfrS7KJ_82672wbupw,14041
@@ -29,7 +29,7 @@ MediCafe/logging_config.py,sha256=auT65LN5oDEXVhkMeLke63kJHTWxYf2o8YihAfQFgzU,54
29
29
  MediCafe/logging_demo.py,sha256=TwUhzafna5pMdN3zSKGrpUWRqX96F1JGGsSUtr3dygs,1975
30
30
  MediCafe/migration_helpers.py,sha256=48GnP4xcgvDNNlzoWsKASCpF4H0KnyveHPbz6kjQy50,17737
31
31
  MediCafe/smart_import.py,sha256=23pttO7QTZyvOP9HR9czDIv7lUsE1sHaE2CWC94Xxxo,19800
32
- MediCafe/submission_index.py,sha256=PPKpNSBuWZQ7hgekIU7sRSB_yLXUlGOBKutaurbHfPA,9576
32
+ MediCafe/submission_index.py,sha256=35gz8Anx1dIqG1I14GvuLY0nTO4dSBr2YsZwof9aIQg,11175
33
33
  MediLink/InsuranceTypeService.py,sha256=FKWC1nRfKV_OtCDUtZustauXNhmCYDFiY9jsAGHPPUM,2178
34
34
  MediLink/MediLink.py,sha256=p91MYghOCbNf3ikTzm5P9V1Luj035yd83EDbQ-Ov6oM,33258
35
35
  MediLink/MediLink_277_decoder.py,sha256=Z3hQK2j-YzdXjov6aDlDRc7M_auFBnl3se4OF5q6_04,4358
@@ -44,17 +44,17 @@ MediLink/MediLink_APIs.py,sha256=jm3f9T034MJKH8A_CIootULoeuk7H8s7PazpFZRCbKI,622
44
44
  MediLink/MediLink_Azure.py,sha256=Ow70jctiHFIylskBExN7WUoRgrKOvBR6jNTnQMk6lJA,210
45
45
  MediLink/MediLink_ClaimStatus.py,sha256=cO9drHSIBtltHfLSKeEf18_m75ixpxIOao5I-TGiHiI,18100
46
46
  MediLink/MediLink_ConfigLoader.py,sha256=u9ecB0SIN7zuJAo8KcoQys95BtyAo-8S2n4mRd0S3XU,4356
47
- MediLink/MediLink_DataMgmt.py,sha256=GVKOkzJNRFZpvgcJ5WZi_2zLya_YH-w1hR4hhHDLvJ0,51840
48
- MediLink/MediLink_Decoder.py,sha256=194pMSZDs9vhQW6_gZB_s-j_TTq7GYCXH2k0rwbZQeE,14977
47
+ MediLink/MediLink_DataMgmt.py,sha256=dKJtq8BibgGsfnTyWmayX4cTPWB8zgFMsgwKJVb7cJ8,52369
48
+ MediLink/MediLink_Decoder.py,sha256=1gzdybNg4Vv69s5PNbX8bPNrXT_N_kPpFpt2HpkauWA,16430
49
49
  MediLink/MediLink_Deductible.py,sha256=fLBDQHDcTk86JtJUtUwrVl-o0KfNackFrWosMxr7qHU,45559
50
50
  MediLink/MediLink_Deductible_Validator.py,sha256=2g-lZd-Y5fJ1mfP87vM6oABg0t5Om-7EkEkilVvDWYY,22888
51
51
  MediLink/MediLink_Display_Utils.py,sha256=QyHk23VU1rJtNZr_QhtL76Avo66CEc7MZU84uIs-1Lo,4187
52
- MediLink/MediLink_Down.py,sha256=-jcj4KbecSQ1kmyx0V2VCjZJcotjVnZTE5Ig6XB1x8M,11830
52
+ MediLink/MediLink_Down.py,sha256=us3xKM5AcGpvqnbrKkV8iEt7MmLCkSU7CDFfCoqXO4o,16201
53
53
  MediLink/MediLink_ERA_decoder.py,sha256=MiOtDcXnmevPfHAahIlTLlUc14VcQWAor9Xa7clA2Ts,8710
54
54
  MediLink/MediLink_Gmail.py,sha256=8iQjqcJMSa_Zfr5azR0dShKAQeXqt-9C-s8seYB9pic,23961
55
55
  MediLink/MediLink_GraphQL.py,sha256=O6OCaumT0zIC7YcIAwLOOYxiQnYhoMc48UL8ilNIBec,45720
56
56
  MediLink/MediLink_Mailer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
- MediLink/MediLink_Parser.py,sha256=w2ZD4minjwkaMz7nzP_r8v_Ow_uM5KHjpPSY8mIHcdE,9787
57
+ MediLink/MediLink_Parser.py,sha256=eRVZ4ckZ5gDOrcvtCUZP3DOd3Djly66rCIk0aYXLz14,12567
58
58
  MediLink/MediLink_PatientProcessor.py,sha256=9r2w4p45d30Tn0kbXL3j5574MYOehP83tDirNOw_Aek,19977
59
59
  MediLink/MediLink_Scan.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
60
  MediLink/MediLink_Scheduler.py,sha256=UJvxhDvHraqra2_TlQVlGeh5jRFrrfK6nCVUHnKOEMY,38
@@ -64,7 +64,7 @@ MediLink/MediLink_Up.py,sha256=QFdUtpEySc7ceZfFJ2q9XWClnhYJssG-UywFFedlv9w,34899
64
64
  MediLink/MediLink_api_utils.py,sha256=dsGLRPRvSwfXPLrrfgnkIKGDIF00wE93TrDB6HMDPQU,11857
65
65
  MediLink/MediLink_batch.bat,sha256=nqL5QwCLyRQFSPdv6kgtcV_cpky7FXSOWVl6OxjRXb4,118
66
66
  MediLink/MediLink_insurance_utils.py,sha256=g741Fj2K26cMy0JX5d_XavMw9LgkK6hjaUJYfysT7t8,9301
67
- MediLink/MediLink_main.py,sha256=BwB6BXDT1xnCqqE1M-6FYM_xUS8E17OObZP7KYM0TN0,18424
67
+ MediLink/MediLink_main.py,sha256=iyMcEToFl2aPHP6xE51OnHQqqbGBh0owRqUfFV1F01M,21745
68
68
  MediLink/MediLink_smart_import.py,sha256=B5SfBn_4bYEWJJDolXbjnwKx_-MaqGZ76LyXQwWDV80,9838
69
69
  MediLink/Soumit_api.py,sha256=5JfOecK98ZC6NpZklZW2AkOzkjvrbYxpJpZNH3rFxDw,497
70
70
  MediLink/__init__.py,sha256=Z4Uxt4XZk4n-GwAkUoEeFiL-D7xHbttYiiWGjgKT_ng,3391
@@ -77,9 +77,9 @@ MediLink/test_cob_library.py,sha256=wUMv0-Y6fNsKcAs8Z9LwfmEBRO7oBzBAfWmmzwoNd1g,
77
77
  MediLink/test_timing.py,sha256=yH2b8QPLDlp1Zy5AhgtjzjnDHNGhAD16ZtXtZzzESZw,2042
78
78
  MediLink/test_validation.py,sha256=FJrfdUFK--xRScIzrHCg1JeGdm0uJEoRnq6CgkP2lwM,4154
79
79
  MediLink/webapp.html,sha256=JPKT559aFVBi1r42Hz7C77Jj0teZZRumPhBev8eSOLk,19806
80
- medicafe-0.250812.6.dist-info/LICENSE,sha256=65lb-vVujdQK7uMH3RRJSMwUW-WMrMEsc5sOaUn2xUk,1096
81
- medicafe-0.250812.6.dist-info/METADATA,sha256=Yj5TFAW1DhsTAh-WrLFvHJCGeF1Hvx4ywj_qTnVkTag,3384
82
- medicafe-0.250812.6.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
83
- medicafe-0.250812.6.dist-info/entry_points.txt,sha256=m3RBUBjr-xRwEkKJ5W4a7NlqHZP_1rllGtjZnrRqKe8,52
84
- medicafe-0.250812.6.dist-info/top_level.txt,sha256=U6-WBJ9RCEjyIs0BlzbQq_PwedCp_IV9n1616NNV5zA,26
85
- medicafe-0.250812.6.dist-info/RECORD,,
80
+ medicafe-0.250813.0.dist-info/LICENSE,sha256=65lb-vVujdQK7uMH3RRJSMwUW-WMrMEsc5sOaUn2xUk,1096
81
+ medicafe-0.250813.0.dist-info/METADATA,sha256=0dCrp1oFMB5Y1CdIUbqVjsuXrm0JFUionXfh9D7TuKk,3384
82
+ medicafe-0.250813.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
83
+ medicafe-0.250813.0.dist-info/entry_points.txt,sha256=m3RBUBjr-xRwEkKJ5W4a7NlqHZP_1rllGtjZnrRqKe8,52
84
+ medicafe-0.250813.0.dist-info/top_level.txt,sha256=U6-WBJ9RCEjyIs0BlzbQq_PwedCp_IV9n1616NNV5zA,26
85
+ medicafe-0.250813.0.dist-info/RECORD,,