medicafe 0.250812.3__py3-none-any.whl → 0.250812.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,288 @@
1
+ """
2
+ submission_index.py - Centralized submission index scaffolding (Phase 3)
3
+
4
+ Purpose:
5
+ - Provide a shared, efficient index of successful claim submissions (and optionally attempts)
6
+ - Avoid repeated scanning of historical receipt files across apps (MediLink, MediBot)
7
+ - Enable fast deconfliction, window validation, and reporting
8
+
9
+ Design:
10
+ - Backed by a compact JSONL file in receipts_root (one JSON record per line)
11
+ - Claim key: (patient_id, payer_id or primary_insurance, date_of_service, service_hash)
12
+ - Fields: claim_key, patient_id, payer_id, primary_insurance, dos, endpoint, submitted_at,
13
+ receipt_file, status, checksum, notes, duplicate_override
14
+
15
+ This is an incremental implementation with JSONL; SQLite can be added later if needed.
16
+ """
17
+ import os
18
+ import json
19
+ import time
20
+
21
+ META_FILENAME = 'submission_index_meta.json'
22
+ INDEX_FILENAME = 'submission_index.jsonl'
23
+ LOCK_FILENAME = 'submission_index.lock'
24
+
25
+
26
+ def build_initial_index(receipts_root, lookback_days=200):
27
+ """
28
+ Initial index builder for legacy receipts.
29
+ NOTE: Legacy receipts do not include patient_id in the printed table, so we cannot
30
+ reliably backfill claim-level keys from historical receipts. This builder will:
31
+ - Create index/meta files if missing
32
+ - Record meta stats
33
+ - Return 0 (no historical claim entries)
34
+ """
35
+ if not os.path.isdir(receipts_root):
36
+ return 0
37
+ _ensure_files_exist(receipts_root)
38
+ count, max_mtime = _get_receipt_stats(receipts_root)
39
+ meta = _read_meta(receipts_root)
40
+ meta['last_indexed_mtime'] = max_mtime
41
+ meta['last_indexed_count'] = count
42
+ meta['last_full_build_at'] = time.time()
43
+ meta['rebuild_state'] = 'none'
44
+ meta['rebuild_progress'] = 0
45
+ _write_meta(receipts_root, meta)
46
+ return 0
47
+
48
+
49
+ def append_submission_record(receipts_root, record):
50
+ """
51
+ Append a new successful submission record to the index after a claim is submitted.
52
+ """
53
+ try:
54
+ _ensure_files_exist(receipts_root)
55
+ line = json.dumps(record)
56
+ path = _index_path(receipts_root)
57
+ with open(path, 'a') as f:
58
+ f.write(line)
59
+ f.write("\n")
60
+ except Exception:
61
+ pass
62
+
63
+
64
+ def find_by_claim_key(receipts_root, claim_key):
65
+ """
66
+ Look up a claim by its stable key to detect potential duplicates.
67
+ Returns the first matching record or None.
68
+ """
69
+ try:
70
+ path = _index_path(receipts_root)
71
+ if not os.path.exists(path):
72
+ return None
73
+ with open(path, 'r') as f:
74
+ for line in f:
75
+ try:
76
+ entry = json.loads(line.strip())
77
+ if isinstance(entry, dict) and entry.get('claim_key') == claim_key:
78
+ return entry
79
+ except Exception:
80
+ continue
81
+ except Exception:
82
+ return None
83
+ return None
84
+
85
+
86
+ def reconcile_recent_receipts(receipts_root, since_timestamp, max_seconds):
87
+ """
88
+ Incrementally scan receipts newer than 'since_timestamp' and update meta only.
89
+ JSONL index is built at submission time; this reconcile only updates meta counters.
90
+ Returns number of new files detected.
91
+ """
92
+ start = time.time()
93
+ count = 0
94
+ try:
95
+ for root, dirs, files in os.walk(receipts_root):
96
+ for name in files:
97
+ try:
98
+ _ = name.encode('ascii')
99
+ except Exception:
100
+ continue
101
+ try:
102
+ mtime = os.path.getmtime(os.path.join(root, name))
103
+ if mtime > since_timestamp:
104
+ count += 1
105
+ except Exception:
106
+ continue
107
+ if int(time.time() - start) >= int(max_seconds):
108
+ return count
109
+ except Exception:
110
+ return count
111
+ return count
112
+
113
+
114
+ def compute_claim_key(patient_id, payer_id, primary_insurance, date_of_service, service_hash):
115
+ """
116
+ Compute a deterministic claim key for deconfliction.
117
+ """
118
+ return "|".join([
119
+ (patient_id or ""),
120
+ (payer_id or primary_insurance or ""),
121
+ (date_of_service or ""),
122
+ (service_hash or "")
123
+ ])
124
+
125
+
126
+ # ------------------------- ASCII-safe meta/lock helpers -----------------------
127
+
128
+ def _meta_path(root_dir):
129
+ return os.path.join(root_dir, META_FILENAME)
130
+
131
+
132
+ def _index_path(root_dir):
133
+ return os.path.join(root_dir, INDEX_FILENAME)
134
+
135
+
136
+ def _lock_path(root_dir):
137
+ return os.path.join(root_dir, LOCK_FILENAME)
138
+
139
+
140
+ def _ensure_files_exist(root_dir):
141
+ try:
142
+ meta_path = _meta_path(root_dir)
143
+ if not os.path.exists(meta_path):
144
+ _write_meta(root_dir, {
145
+ 'last_indexed_mtime': 0.0,
146
+ 'last_indexed_count': 0,
147
+ 'last_full_build_at': 0.0,
148
+ 'rebuild_state': 'none',
149
+ 'rebuild_progress': 0
150
+ })
151
+ index_path = _index_path(root_dir)
152
+ if not os.path.exists(index_path):
153
+ with open(index_path, 'w') as f:
154
+ f.write("")
155
+ except Exception:
156
+ pass
157
+
158
+
159
+ def _read_meta(root_dir):
160
+ path = _meta_path(root_dir)
161
+ if not os.path.exists(path):
162
+ return {
163
+ 'last_indexed_mtime': 0.0,
164
+ 'last_indexed_count': 0,
165
+ 'last_full_build_at': 0.0,
166
+ 'rebuild_state': 'none', # 'none' | 'pending' | 'in_progress'
167
+ 'rebuild_progress': 0
168
+ }
169
+ try:
170
+ with open(path, 'r') as f:
171
+ data = f.read()
172
+ try:
173
+ meta = json.loads(data)
174
+ except Exception:
175
+ return {
176
+ 'last_indexed_mtime': 0.0,
177
+ 'last_indexed_count': 0,
178
+ 'last_full_build_at': 0.0,
179
+ 'rebuild_state': 'none',
180
+ 'rebuild_progress': 0
181
+ }
182
+ return meta if isinstance(meta, dict) else {}
183
+ except Exception:
184
+ return {
185
+ 'last_indexed_mtime': 0.0,
186
+ 'last_indexed_count': 0,
187
+ 'last_full_build_at': 0.0,
188
+ 'rebuild_state': 'none',
189
+ 'rebuild_progress': 0
190
+ }
191
+
192
+
193
+ def _write_meta(root_dir, meta):
194
+ try:
195
+ with open(_meta_path(root_dir), 'w') as f:
196
+ f.write(json.dumps(meta))
197
+ except Exception:
198
+ pass
199
+
200
+
201
+ def _try_acquire_lock(root_dir):
202
+ lock = _lock_path(root_dir)
203
+ try:
204
+ fd = os.open(lock, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
205
+ os.close(fd)
206
+ return True
207
+ except Exception:
208
+ return False
209
+
210
+
211
+ def _release_lock(root_dir):
212
+ try:
213
+ os.remove(_lock_path(root_dir))
214
+ except Exception:
215
+ pass
216
+
217
+
218
+ def _get_receipt_stats(receipts_root):
219
+ count = 0
220
+ max_mtime = 0.0
221
+ try:
222
+ for root, dirs, files in os.walk(receipts_root):
223
+ for name in files:
224
+ try:
225
+ _ = name.encode('ascii')
226
+ except Exception:
227
+ continue
228
+ count += 1
229
+ try:
230
+ mtime = os.path.getmtime(os.path.join(root, name))
231
+ if mtime > max_mtime:
232
+ max_mtime = mtime
233
+ except Exception:
234
+ continue
235
+ except Exception:
236
+ pass
237
+ return count, max_mtime
238
+
239
+
240
+ # ------------------------- Public entry point --------------------------------
241
+
242
+ def ensure_submission_index(receipts_root, lookback_days=200, large_growth_threshold=0.1, max_inline_seconds=2):
243
+ """
244
+ XP/ASCII-safe, inline-only upkeep for the submission index.
245
+ - No background tasks
246
+ - Bounded work per call
247
+ - Chunked rebuild across boots
248
+ """
249
+ if not receipts_root or not os.path.isdir(receipts_root):
250
+ return
251
+
252
+ # Ensure files exist early
253
+ _ensure_files_exist(receipts_root)
254
+
255
+ if not _try_acquire_lock(receipts_root):
256
+ return
257
+
258
+ try:
259
+ meta = _read_meta(receipts_root)
260
+ current_count, current_max_mtime = _get_receipt_stats(receipts_root)
261
+
262
+ if meta.get('last_indexed_mtime', 0.0) == 0.0 and meta.get('last_indexed_count', 0) == 0:
263
+ # First-time or corrupt meta: do bounded initial build
264
+ build_initial_index(receipts_root, lookback_days)
265
+ return
266
+
267
+ # Incremental reconcile if new files detected by mtime
268
+ if current_max_mtime > meta.get('last_indexed_mtime', 0.0):
269
+ added = reconcile_recent_receipts(receipts_root, meta.get('last_indexed_mtime', 0.0), max_inline_seconds)
270
+ meta['last_indexed_mtime'] = current_max_mtime
271
+ meta['last_indexed_count'] = meta.get('last_indexed_count', 0) + int(added)
272
+ _write_meta(receipts_root, meta)
273
+ return
274
+
275
+ # Large growth heuristic -> schedule chunked rebuild (across boots)
276
+ last_count = meta.get('last_indexed_count', 0)
277
+ delta = current_count - last_count
278
+ if delta > 0 and delta >= max(100, int(large_growth_threshold * (last_count or 1))):
279
+ if meta.get('rebuild_state') == 'none':
280
+ meta['rebuild_state'] = 'pending'
281
+ meta['rebuild_progress'] = 0
282
+ _write_meta(receipts_root, meta)
283
+ return
284
+
285
+ # If rebuild pending, we would process a chunk here in a future implementation
286
+ return
287
+ finally:
288
+ _release_lock(receipts_root)
@@ -0,0 +1,57 @@
1
+ # InsuranceTypeService.py
2
+ """
3
+ InsuranceTypeService
4
+
5
+ Phase 2 scaffolding for future direct SBR09 extraction from API (GraphQL Super Connector).
6
+
7
+ - NOT ACTIVE: This module provides structure and validation only; integration is disabled
8
+ until the API provides the SBR09 code directly and the key name is known.
9
+ - Usage intent: When feature flag 'use_sbr09_direct_from_api' is enabled and the
10
+ GraphQL API returns an SBR09-compatible code in a known field, call the centralized
11
+ MediCafe.graphql_utils.extract_sbr09_direct() and use the returned value directly
12
+ after minimal validation (no internal mapping).
13
+
14
+ Implementation notes:
15
+ - Extraction and validation are centralized in MediCafe/graphql_utils.py to avoid duplication
16
+ and allow reuse across MediLink and MediBot.
17
+ """
18
+
19
+ try:
20
+ from MediCafe.core_utils import get_shared_config_loader
21
+ except Exception:
22
+ def get_shared_config_loader():
23
+ class _Dummy:
24
+ def load_configuration(self):
25
+ return {}, {}
26
+ def log(self, *args, **kwargs):
27
+ pass
28
+ return _Dummy()
29
+
30
+ ConfigLoader = get_shared_config_loader()
31
+
32
+ # Centralized extractor (preferred)
33
+ try:
34
+ from MediCafe.graphql_utils import extract_sbr09_direct as centralized_extract_sbr09
35
+ except Exception:
36
+ centralized_extract_sbr09 = None # Fallback handled in method
37
+
38
+
39
+ class InsuranceTypeService(object):
40
+ """
41
+ Placeholder service for future direct SBR09 integration via centralized API utilities.
42
+ """
43
+ def __init__(self):
44
+ self.config, _ = ConfigLoader.load_configuration()
45
+
46
+ def get_direct_sbr09_if_available(self, api_transformed_response):
47
+ """Try to extract SBR09 directly from API response; return None if unavailable/invalid."""
48
+ try:
49
+ if centralized_extract_sbr09 is None:
50
+ return None
51
+ return centralized_extract_sbr09(api_transformed_response)
52
+ except Exception as e:
53
+ try:
54
+ ConfigLoader.log("Direct SBR09 extraction error: {}".format(e), level="WARNING")
55
+ except Exception:
56
+ pass
57
+ return None
@@ -843,9 +843,11 @@ def bulk_edit_insurance_types(detailed_patient_data, insurance_options):
843
843
  patient_name = data.get('patient_name', 'Unknown')
844
844
  current_insurance_type = data.get('insurance_type', '12')
845
845
  current_insurance_description = insurance_options.get(current_insurance_type, "Unknown")
846
+ source = data.get('insurance_type_source', '')
847
+ src_disp = 'API' if source == 'API' else ('MAN' if source == 'MANUAL' else 'DEF')
846
848
 
847
- print("({}) {:<25} | Current Ins. Type: {} - {}".format(
848
- patient_id, patient_name, current_insurance_type, current_insurance_description))
849
+ print("({}) {:<25} | Src:{} | Current Ins. Type: {} - {}".format(
850
+ patient_id, patient_name, src_disp, current_insurance_type, current_insurance_description))
849
851
 
850
852
  while True:
851
853
  new_insurance_type = input("Enter new insurance type (or press Enter to keep current): ").strip().upper()
@@ -861,6 +863,7 @@ def bulk_edit_insurance_types(detailed_patient_data, insurance_options):
861
863
  elif new_insurance_type in insurance_options:
862
864
  # Valid insurance type from config
863
865
  data['insurance_type'] = new_insurance_type
866
+ data['insurance_type_source'] = 'MANUAL'
864
867
  break
865
868
 
866
869
  else:
@@ -868,11 +871,13 @@ def bulk_edit_insurance_types(detailed_patient_data, insurance_options):
868
871
  confirm = input("Code '{}' not found in configuration. Use it anyway? (y/n): ".format(new_insurance_type)).strip().lower()
869
872
  if confirm in ['y', 'yes']:
870
873
  data['insurance_type'] = new_insurance_type
874
+ data['insurance_type_source'] = 'MANUAL'
871
875
  break
872
876
  else:
873
877
  print("Invalid insurance type. Please enter a valid code or type 'LIST' to see options.")
874
878
  continue
875
879
 
880
+
876
881
  def review_and_confirm_changes(detailed_patient_data, insurance_options):
877
882
  # Review and confirm changes
878
883
  print("\nReview changes:")
@@ -29,12 +29,21 @@ def display_patient_summaries(detailed_patient_data):
29
29
  Displays summaries of all patients and their suggested endpoints.
30
30
  """
31
31
  print("\nSummary of patient details and suggested endpoint:")
32
- for index, summary in enumerate(detailed_patient_data, start=1):
32
+
33
+ # Sort by insurance_type_source priority for clearer grouping
34
+ priority = {'API': 0, 'MANUAL': 1, 'DEFAULT': 2, 'DEFAULT_FALLBACK': 2}
35
+ def sort_key(item):
36
+ src = item.get('insurance_type_source', '')
37
+ return (priority.get(src, 2), item.get('surgery_date', ''), item.get('patient_name', ''))
38
+ sorted_data = sorted(detailed_patient_data, key=sort_key)
39
+
40
+ for index, summary in enumerate(sorted_data, start=1):
33
41
  try:
34
42
  display_file_summary(index, summary)
35
43
  except KeyError as e:
36
44
  print("Summary at index {} is missing key: {}".format(index, e))
37
45
  print() # add blank line for improved readability.
46
+ print("Legend: Src=API (auto), MAN (manual), DEF (default) | [DUP] indicates a previously submitted matching claim")
38
47
 
39
48
  def display_file_summary(index, summary):
40
49
  # Ensure surgery_date is converted to a datetime object
@@ -42,13 +51,15 @@ def display_file_summary(index, summary):
42
51
 
43
52
  # Add header row if it's the first index
44
53
  if index == 1:
45
- print("{:<3} {:5} {:<10} {:20} {:15} {:3} {:20}".format(
46
- "No.", "Date", "ID", "Name", "Primary Ins.", "IT", "Current Endpoint"
54
+ print("{:<3} {:5} {:<10} {:<20} {:<15} {:<3} {:<5} {:<8} {:<20}".format(
55
+ "No.", "Date", "ID", "Name", "Primary Ins.", "IT", "Src", "Flag", "Current Endpoint"
47
56
  ))
48
- print("-"*82)
57
+ print("-"*100)
49
58
 
50
59
  # Check if insurance_type is available; if not, set a default placeholder (this should already be '12' at this point)
51
60
  insurance_type = summary.get('insurance_type', '--')
61
+ insurance_source = summary.get('insurance_type_source', '')
62
+ duplicate_flag = '[DUP]' if summary.get('duplicate_candidate') else ''
52
63
 
53
64
  # Get the effective endpoint (confirmed > user preference > suggestion > default)
54
65
  effective_endpoint = (summary.get('confirmed_endpoint') or
@@ -61,13 +72,24 @@ def display_file_summary(index, summary):
61
72
  else:
62
73
  insurance_display = insurance_type[:3] if insurance_type else '--'
63
74
 
64
- # Displays the summary of a file.
65
- print("{:02d}. {:5} ({:<8}) {:20} {:15} {:3} {:20}".format(
75
+ # Shorten source for compact display
76
+ if insurance_source in ['DEFAULT_FALLBACK', 'DEFAULT']:
77
+ source_display = 'DEF'
78
+ elif insurance_source == 'MANUAL':
79
+ source_display = 'MAN'
80
+ elif insurance_source == 'API':
81
+ source_display = 'API'
82
+ else:
83
+ source_display = ''
84
+
85
+ print("{:02d}. {:5} ({:<8}) {:<20} {:<15} {:<3} {:<5} {:<8} {:<20}".format(
66
86
  index,
67
87
  surgery_date.strftime("%m-%d"),
68
88
  summary['patient_id'],
69
89
  summary['patient_name'][:20],
70
90
  summary['primary_insurance'][:15],
71
91
  insurance_display,
92
+ source_display,
93
+ duplicate_flag,
72
94
  effective_endpoint[:20])
73
95
  )
@@ -11,6 +11,13 @@ MediLink_ConfigLoader = get_shared_config_loader()
11
11
  import MediLink_DataMgmt
12
12
  import MediLink_Display_Utils
13
13
 
14
+ # Optional import for submission index (duplicate detection)
15
+ try:
16
+ from MediCafe.submission_index import compute_claim_key, find_by_claim_key
17
+ except Exception:
18
+ compute_claim_key = None
19
+ find_by_claim_key = None
20
+
14
21
  # Add parent directory access for MediBot import
15
22
  project_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
16
23
  if project_dir not in sys.path:
@@ -184,7 +191,7 @@ def enrich_with_insurance_type(detailed_patient_data, patient_insurance_type_map
184
191
  data['insurance_type_source'] = 'DEFAULT_FALLBACK'
185
192
 
186
193
  else:
187
- # Legacy mode (preserve existing behavior exactly)
194
+ # Legacy mode (preserve existing behavior exactly) + always set source
188
195
  MediLink_ConfigLoader.log("Using legacy insurance type enrichment", level="INFO")
189
196
  for data in detailed_patient_data:
190
197
  # FIELD NAME CLARIFICATION: Use 'patient_id' field created by extract_and_suggest_endpoint()
@@ -192,16 +199,33 @@ def enrich_with_insurance_type(detailed_patient_data, patient_insurance_type_map
192
199
  patient_id = data.get('patient_id')
193
200
  if patient_id:
194
201
  insurance_type = patient_insurance_type_mapping.get(patient_id, '12') # Default to '12' (PPO/SBR09)
202
+ data['insurance_type'] = insurance_type
203
+ # Mirror enhanced mode semantics for source
204
+ data['insurance_type_source'] = 'MANUAL' if patient_id in patient_insurance_type_mapping else 'DEFAULT'
195
205
  else:
196
206
  # Handle case where patient_id is missing or empty
197
207
  MediLink_ConfigLoader.log("No patient_id found in data record", level="WARNING")
198
208
  insurance_type = '12' # Default when no patient ID available
199
-
200
- data['insurance_type'] = insurance_type
209
+ data['insurance_type'] = insurance_type
210
+ data['insurance_type_source'] = 'DEFAULT_FALLBACK'
201
211
 
202
212
  return detailed_patient_data
203
213
 
204
214
 
215
+ def _normalize_dos_to_iso(mm_dd_yy):
216
+ """Convert date like 'MM-DD-YY' to 'YYYY-MM-DD' safely."""
217
+ try:
218
+ parts = mm_dd_yy.split('-')
219
+ if len(parts) == 3:
220
+ mm, dd, yy = parts
221
+ # Assume 20xx for YY < 50 else 19xx (adjust as needed)
222
+ century = '20' if int(yy) < 50 else '19'
223
+ return "{}-{}-{}".format(century + yy, mm.zfill(2), dd.zfill(2))
224
+ except Exception:
225
+ pass
226
+ return mm_dd_yy
227
+
228
+
205
229
  def extract_and_suggest_endpoint(file_path, config, crosswalk):
206
230
  """
207
231
  Reads a fixed-width file, extracts file details including surgery date, patient ID,
@@ -237,6 +261,13 @@ def extract_and_suggest_endpoint(file_path, config, crosswalk):
237
261
  insurance_to_id = load_insurance_data_from_mains(config)
238
262
  MediLink_ConfigLoader.log("Insurance data loaded from MAINS. {} insurance providers found.".format(len(insurance_to_id)), level="INFO")
239
263
 
264
+ # Resolve receiptsRoot for duplicate detection (optional)
265
+ try:
266
+ medi_cfg = extract_medilink_config(config)
267
+ receipts_root = medi_cfg.get('local_claims_path', None)
268
+ except Exception:
269
+ receipts_root = None
270
+
240
271
  for personal_info, insurance_info, service_info, service_info_2, service_info_3 in MediLink_DataMgmt.read_fixed_width_data(file_path):
241
272
  # Parse reserved 5-line record: 3 active lines + 2 reserved for future expansion
242
273
  try:
@@ -246,6 +277,7 @@ def extract_and_suggest_endpoint(file_path, config, crosswalk):
246
277
  parsed_data = MediLink_DataMgmt.parse_fixed_width_data(personal_info, insurance_info, service_info, service_info_2, service_info_3, cfg_for_parse)
247
278
 
248
279
  primary_insurance = parsed_data.get('INAME')
280
+ primary_procedure_code = parsed_data.get('CODEA')
249
281
 
250
282
  # Retrieve the insurance ID associated with the primary insurance
251
283
  insurance_id = insurance_to_id.get(primary_insurance)
@@ -256,7 +288,6 @@ def extract_and_suggest_endpoint(file_path, config, crosswalk):
256
288
  if insurance_id:
257
289
  for payer_id, payer_data in crosswalk.get('payer_id', {}).items():
258
290
  medisoft_ids = [str(id) for id in payer_data.get('medisoft_id', [])]
259
- # MediLink_ConfigLoader.log("Payer ID: {}, Medisoft IDs: {}".format(payer_id, medisoft_ids))
260
291
  if str(insurance_id) in medisoft_ids:
261
292
  payer_ids.append(payer_id)
262
293
  if payer_ids:
@@ -283,22 +314,52 @@ def extract_and_suggest_endpoint(file_path, config, crosswalk):
283
314
  else:
284
315
  MediLink_ConfigLoader.log("No suggested endpoint found for payer IDs: {}".format(payer_ids))
285
316
 
317
+ # Normalize DOS for keying
318
+ raw_dos = parsed_data.get('DATE')
319
+ iso_dos = _normalize_dos_to_iso(raw_dos) if raw_dos else ''
320
+
286
321
  # Enrich detailed patient data with additional information and suggested endpoint
287
322
  detailed_data = parsed_data.copy() # Copy parsed_data to avoid modifying the original dictionary
288
323
  detailed_data.update({
289
324
  'file_path': file_path,
290
- # CRITICAL FIELD MAPPING: 'CHART' field from fixed-width file becomes 'patient_id'
291
- # This is the field that enrich_with_insurance_type() will use
292
- 'patient_id': parsed_data.get('CHART'), # <- This is the key field mapping for MediLink flow
325
+ 'patient_id': parsed_data.get('CHART'),
293
326
  'surgery_date': parsed_data.get('DATE'),
327
+ 'surgery_date_iso': iso_dos,
294
328
  'patient_name': ' '.join([parsed_data.get(key, '') for key in ['FIRST', 'MIDDLE', 'LAST']]),
295
329
  'amount': parsed_data.get('AMOUNT'),
296
330
  'primary_insurance': primary_insurance,
331
+ 'primary_procedure_code': primary_procedure_code,
297
332
  'suggested_endpoint': suggested_endpoint
298
333
  })
334
+
335
+ # Compute claim_key (optional)
336
+ claim_key = None
337
+ try:
338
+ if compute_claim_key:
339
+ claim_key = compute_claim_key(
340
+ detailed_data.get('patient_id', ''),
341
+ '', # payer_id not reliably known here
342
+ detailed_data.get('primary_insurance', ''),
343
+ detailed_data.get('surgery_date_iso', ''),
344
+ detailed_data.get('primary_procedure_code', '')
345
+ )
346
+ detailed_data['claim_key'] = claim_key
347
+ except Exception:
348
+ pass
349
+
350
+ # Duplicate candidate flag (optional upstream detection)
351
+ try:
352
+ if find_by_claim_key and receipts_root and claim_key:
353
+ existing = find_by_claim_key(receipts_root, claim_key)
354
+ detailed_data['duplicate_candidate'] = bool(existing)
355
+ else:
356
+ detailed_data['duplicate_candidate'] = False
357
+ except Exception:
358
+ detailed_data['duplicate_candidate'] = False
359
+
299
360
  detailed_patient_data.append(detailed_data)
300
361
 
301
- # Return only the enriched detailed patient data, eliminating the need for a separate summary list
362
+ # Return only the enriched detailed patient data
302
363
  return detailed_patient_data
303
364
 
304
365