medicafe 0.250812.4__py3-none-any.whl → 0.250812.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,288 @@
1
+ """
2
+ submission_index.py - Centralized submission index scaffolding (Phase 3)
3
+
4
+ Purpose:
5
+ - Provide a shared, efficient index of successful claim submissions (and optionally attempts)
6
+ - Avoid repeated scanning of historical receipt files across apps (MediLink, MediBot)
7
+ - Enable fast deconfliction, window validation, and reporting
8
+
9
+ Design:
10
+ - Backed by a compact JSONL file in receipts_root (one JSON record per line)
11
+ - Claim key: (patient_id, payer_id or primary_insurance, date_of_service, service_hash)
12
+ - Fields: claim_key, patient_id, payer_id, primary_insurance, dos, endpoint, submitted_at,
13
+ receipt_file, status, checksum, notes, duplicate_override
14
+
15
+ This is an incremental implementation with JSONL; SQLite can be added later if needed.
16
+ """
17
+ import os
18
+ import json
19
+ import time
20
+
21
+ META_FILENAME = 'submission_index_meta.json'
22
+ INDEX_FILENAME = 'submission_index.jsonl'
23
+ LOCK_FILENAME = 'submission_index.lock'
24
+
25
+
26
+ def build_initial_index(receipts_root, lookback_days=200):
27
+ """
28
+ Initial index builder for legacy receipts.
29
+ NOTE: Legacy receipts do not include patient_id in the printed table, so we cannot
30
+ reliably backfill claim-level keys from historical receipts. This builder will:
31
+ - Create index/meta files if missing
32
+ - Record meta stats
33
+ - Return 0 (no historical claim entries)
34
+ """
35
+ if not os.path.isdir(receipts_root):
36
+ return 0
37
+ _ensure_files_exist(receipts_root)
38
+ count, max_mtime = _get_receipt_stats(receipts_root)
39
+ meta = _read_meta(receipts_root)
40
+ meta['last_indexed_mtime'] = max_mtime
41
+ meta['last_indexed_count'] = count
42
+ meta['last_full_build_at'] = time.time()
43
+ meta['rebuild_state'] = 'none'
44
+ meta['rebuild_progress'] = 0
45
+ _write_meta(receipts_root, meta)
46
+ return 0
47
+
48
+
49
+ def append_submission_record(receipts_root, record):
50
+ """
51
+ Append a new successful submission record to the index after a claim is submitted.
52
+ """
53
+ try:
54
+ _ensure_files_exist(receipts_root)
55
+ line = json.dumps(record)
56
+ path = _index_path(receipts_root)
57
+ with open(path, 'a') as f:
58
+ f.write(line)
59
+ f.write("\n")
60
+ except Exception:
61
+ pass
62
+
63
+
64
+ def find_by_claim_key(receipts_root, claim_key):
65
+ """
66
+ Look up a claim by its stable key to detect potential duplicates.
67
+ Returns the first matching record or None.
68
+ """
69
+ try:
70
+ path = _index_path(receipts_root)
71
+ if not os.path.exists(path):
72
+ return None
73
+ with open(path, 'r') as f:
74
+ for line in f:
75
+ try:
76
+ entry = json.loads(line.strip())
77
+ if isinstance(entry, dict) and entry.get('claim_key') == claim_key:
78
+ return entry
79
+ except Exception:
80
+ continue
81
+ except Exception:
82
+ return None
83
+ return None
84
+
85
+
86
+ def reconcile_recent_receipts(receipts_root, since_timestamp, max_seconds):
87
+ """
88
+ Incrementally scan receipts newer than 'since_timestamp' and update meta only.
89
+ JSONL index is built at submission time; this reconcile only updates meta counters.
90
+ Returns number of new files detected.
91
+ """
92
+ start = time.time()
93
+ count = 0
94
+ try:
95
+ for root, dirs, files in os.walk(receipts_root):
96
+ for name in files:
97
+ try:
98
+ _ = name.encode('ascii')
99
+ except Exception:
100
+ continue
101
+ try:
102
+ mtime = os.path.getmtime(os.path.join(root, name))
103
+ if mtime > since_timestamp:
104
+ count += 1
105
+ except Exception:
106
+ continue
107
+ if int(time.time() - start) >= int(max_seconds):
108
+ return count
109
+ except Exception:
110
+ return count
111
+ return count
112
+
113
+
114
+ def compute_claim_key(patient_id, payer_id, primary_insurance, date_of_service, service_hash):
115
+ """
116
+ Compute a deterministic claim key for deconfliction.
117
+ """
118
+ return "|".join([
119
+ (patient_id or ""),
120
+ (payer_id or primary_insurance or ""),
121
+ (date_of_service or ""),
122
+ (service_hash or "")
123
+ ])
124
+
125
+
126
+ # ------------------------- ASCII-safe meta/lock helpers -----------------------
127
+
128
+ def _meta_path(root_dir):
129
+ return os.path.join(root_dir, META_FILENAME)
130
+
131
+
132
+ def _index_path(root_dir):
133
+ return os.path.join(root_dir, INDEX_FILENAME)
134
+
135
+
136
+ def _lock_path(root_dir):
137
+ return os.path.join(root_dir, LOCK_FILENAME)
138
+
139
+
140
+ def _ensure_files_exist(root_dir):
141
+ try:
142
+ meta_path = _meta_path(root_dir)
143
+ if not os.path.exists(meta_path):
144
+ _write_meta(root_dir, {
145
+ 'last_indexed_mtime': 0.0,
146
+ 'last_indexed_count': 0,
147
+ 'last_full_build_at': 0.0,
148
+ 'rebuild_state': 'none',
149
+ 'rebuild_progress': 0
150
+ })
151
+ index_path = _index_path(root_dir)
152
+ if not os.path.exists(index_path):
153
+ with open(index_path, 'w') as f:
154
+ f.write("")
155
+ except Exception:
156
+ pass
157
+
158
+
159
+ def _read_meta(root_dir):
160
+ path = _meta_path(root_dir)
161
+ if not os.path.exists(path):
162
+ return {
163
+ 'last_indexed_mtime': 0.0,
164
+ 'last_indexed_count': 0,
165
+ 'last_full_build_at': 0.0,
166
+ 'rebuild_state': 'none', # 'none' | 'pending' | 'in_progress'
167
+ 'rebuild_progress': 0
168
+ }
169
+ try:
170
+ with open(path, 'r') as f:
171
+ data = f.read()
172
+ try:
173
+ meta = json.loads(data)
174
+ except Exception:
175
+ return {
176
+ 'last_indexed_mtime': 0.0,
177
+ 'last_indexed_count': 0,
178
+ 'last_full_build_at': 0.0,
179
+ 'rebuild_state': 'none',
180
+ 'rebuild_progress': 0
181
+ }
182
+ return meta if isinstance(meta, dict) else {}
183
+ except Exception:
184
+ return {
185
+ 'last_indexed_mtime': 0.0,
186
+ 'last_indexed_count': 0,
187
+ 'last_full_build_at': 0.0,
188
+ 'rebuild_state': 'none',
189
+ 'rebuild_progress': 0
190
+ }
191
+
192
+
193
+ def _write_meta(root_dir, meta):
194
+ try:
195
+ with open(_meta_path(root_dir), 'w') as f:
196
+ f.write(json.dumps(meta))
197
+ except Exception:
198
+ pass
199
+
200
+
201
+ def _try_acquire_lock(root_dir):
202
+ lock = _lock_path(root_dir)
203
+ try:
204
+ fd = os.open(lock, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
205
+ os.close(fd)
206
+ return True
207
+ except Exception:
208
+ return False
209
+
210
+
211
+ def _release_lock(root_dir):
212
+ try:
213
+ os.remove(_lock_path(root_dir))
214
+ except Exception:
215
+ pass
216
+
217
+
218
+ def _get_receipt_stats(receipts_root):
219
+ count = 0
220
+ max_mtime = 0.0
221
+ try:
222
+ for root, dirs, files in os.walk(receipts_root):
223
+ for name in files:
224
+ try:
225
+ _ = name.encode('ascii')
226
+ except Exception:
227
+ continue
228
+ count += 1
229
+ try:
230
+ mtime = os.path.getmtime(os.path.join(root, name))
231
+ if mtime > max_mtime:
232
+ max_mtime = mtime
233
+ except Exception:
234
+ continue
235
+ except Exception:
236
+ pass
237
+ return count, max_mtime
238
+
239
+
240
+ # ------------------------- Public entry point --------------------------------
241
+
242
+ def ensure_submission_index(receipts_root, lookback_days=200, large_growth_threshold=0.1, max_inline_seconds=2):
243
+ """
244
+ XP/ASCII-safe, inline-only upkeep for the submission index.
245
+ - No background tasks
246
+ - Bounded work per call
247
+ - Chunked rebuild across boots
248
+ """
249
+ if not receipts_root or not os.path.isdir(receipts_root):
250
+ return
251
+
252
+ # Ensure files exist early
253
+ _ensure_files_exist(receipts_root)
254
+
255
+ if not _try_acquire_lock(receipts_root):
256
+ return
257
+
258
+ try:
259
+ meta = _read_meta(receipts_root)
260
+ current_count, current_max_mtime = _get_receipt_stats(receipts_root)
261
+
262
+ if meta.get('last_indexed_mtime', 0.0) == 0.0 and meta.get('last_indexed_count', 0) == 0:
263
+ # First-time or corrupt meta: do bounded initial build
264
+ build_initial_index(receipts_root, lookback_days)
265
+ return
266
+
267
+ # Incremental reconcile if new files detected by mtime
268
+ if current_max_mtime > meta.get('last_indexed_mtime', 0.0):
269
+ added = reconcile_recent_receipts(receipts_root, meta.get('last_indexed_mtime', 0.0), max_inline_seconds)
270
+ meta['last_indexed_mtime'] = current_max_mtime
271
+ meta['last_indexed_count'] = meta.get('last_indexed_count', 0) + int(added)
272
+ _write_meta(receipts_root, meta)
273
+ return
274
+
275
+ # Large growth heuristic -> schedule chunked rebuild (across boots)
276
+ last_count = meta.get('last_indexed_count', 0)
277
+ delta = current_count - last_count
278
+ if delta > 0 and delta >= max(100, int(large_growth_threshold * (last_count or 1))):
279
+ if meta.get('rebuild_state') == 'none':
280
+ meta['rebuild_state'] = 'pending'
281
+ meta['rebuild_progress'] = 0
282
+ _write_meta(receipts_root, meta)
283
+ return
284
+
285
+ # If rebuild pending, we would process a chunk here in a future implementation
286
+ return
287
+ finally:
288
+ _release_lock(receipts_root)
@@ -0,0 +1,57 @@
1
+ # InsuranceTypeService.py
2
+ """
3
+ InsuranceTypeService
4
+
5
+ Phase 2 scaffolding for future direct SBR09 extraction from API (GraphQL Super Connector).
6
+
7
+ - NOT ACTIVE: This module provides structure and validation only; integration is disabled
8
+ until the API provides the SBR09 code directly and the key name is known.
9
+ - Usage intent: When feature flag 'use_sbr09_direct_from_api' is enabled and the
10
+ GraphQL API returns an SBR09-compatible code in a known field, call the centralized
11
+ MediCafe.graphql_utils.extract_sbr09_direct() and use the returned value directly
12
+ after minimal validation (no internal mapping).
13
+
14
+ Implementation notes:
15
+ - Extraction and validation are centralized in MediCafe/graphql_utils.py to avoid duplication
16
+ and allow reuse across MediLink and MediBot.
17
+ """
18
+
19
+ try:
20
+ from MediCafe.core_utils import get_shared_config_loader
21
+ except Exception:
22
+ def get_shared_config_loader():
23
+ class _Dummy:
24
+ def load_configuration(self):
25
+ return {}, {}
26
+ def log(self, *args, **kwargs):
27
+ pass
28
+ return _Dummy()
29
+
30
+ ConfigLoader = get_shared_config_loader()
31
+
32
+ # Centralized extractor (preferred)
33
+ try:
34
+ from MediCafe.graphql_utils import extract_sbr09_direct as centralized_extract_sbr09
35
+ except Exception:
36
+ centralized_extract_sbr09 = None # Fallback handled in method
37
+
38
+
39
+ class InsuranceTypeService(object):
40
+ """
41
+ Placeholder service for future direct SBR09 integration via centralized API utilities.
42
+ """
43
+ def __init__(self):
44
+ self.config, _ = ConfigLoader.load_configuration()
45
+
46
+ def get_direct_sbr09_if_available(self, api_transformed_response):
47
+ """Try to extract SBR09 directly from API response; return None if unavailable/invalid."""
48
+ try:
49
+ if centralized_extract_sbr09 is None:
50
+ return None
51
+ return centralized_extract_sbr09(api_transformed_response)
52
+ except Exception as e:
53
+ try:
54
+ ConfigLoader.log("Direct SBR09 extraction error: {}".format(e), level="WARNING")
55
+ except Exception:
56
+ pass
57
+ return None
@@ -101,6 +101,10 @@ def create_2320_other_subscriber_segments(patient_data, config, crosswalk):
101
101
  is_secondary = patient_data.get('claim_type', 'primary') == 'secondary'
102
102
 
103
103
  if is_secondary:
104
+ # TODO (DATA CONTRACT): If 835-derived fields are present on patient_data, prefer them:
105
+ # - total_paid -> AMT*D
106
+ # - cas_adjustments -> CAS
107
+ # Otherwise accept 'primary_paid_amount' and 'cas_adjustments' provided by upstream workflow.
104
108
  # SBR segment for secondary payer
105
109
  responsibility_code = "S" # Secondary
106
110
  insurance_type = determine_medicare_payer_type(patient_data, config)
@@ -111,7 +115,9 @@ def create_2320_other_subscriber_segments(patient_data, config, crosswalk):
111
115
  segments.append(sbr_segment)
112
116
 
113
117
  # AMT*D segment for total amount paid by primary
114
- total_paid = patient_data.get('primary_paid_amount', '0.00')
118
+ # TODO (STRICT MODE): When config['MediLink_Config']['cob_settings']['validation_level'] >= 2,
119
+ # require presence of a numeric total (from 'total_paid' or 'primary_paid_amount').
120
+ total_paid = patient_data.get('total_paid', patient_data.get('primary_paid_amount', '0.00'))
115
121
  amt_segment = "AMT*D*{}~".format(total_paid)
116
122
  segments.append(amt_segment)
117
123
 
@@ -152,6 +158,7 @@ def create_2330B_prior_payer_segments(patient_data, config, crosswalk):
152
158
  segments = []
153
159
 
154
160
  # Get prior payer information
161
+ # TODO (CONFIG): Resolve Medicare payer ID from config['MediLink_Config']['cob_settings']['medicare_payer_ids'] if prior_payer_id not provided.
155
162
  prior_payer_name = patient_data.get('prior_payer_name', 'MEDICARE')
156
163
  prior_payer_id = patient_data.get('prior_payer_id', '00850')
157
164
 
@@ -613,6 +613,12 @@ def process_claim(config, endpoint, patient_data_list, crosswalk, client, suffix
613
613
  document_segments = []
614
614
 
615
615
  for patient_data in patient_data_list:
616
+ # TODO (SECONDARY PREP): Upstream should mark secondary claims and provide Medicare prior payer info when applicable.
617
+ # Expected minimal keys for Medicare-secondary:
618
+ # - claim_type='secondary'
619
+ # - prior_payer_name='MEDICARE'
620
+ # - prior_payer_id from config cob_settings.medicare_payer_ids (default '00850')
621
+ # - optional: primary_paid_amount, cas_adjustments
616
622
  # Validate each patient's data before processing
617
623
  is_valid, validation_errors = validate_claim_data(patient_data, medi)
618
624
  if is_valid:
@@ -980,6 +980,43 @@ def create_clm_and_related_segments(parsed_data, config, crosswalk):
980
980
  # - create_2330B_prior_payer_segments() for Medicare prior payer
981
981
  # - create_2430_service_line_cob_segments() for service-level adjudication
982
982
  # - create_2330C_other_subscriber_name_segments() when patient != subscriber
983
+ #
984
+ # Minimal, safe integration (guarded by feature flag):
985
+ if COB is not None:
986
+ cob_enabled = False
987
+ try:
988
+ # Read feature flag from configuration (expects medi['cob_settings']['enabled'])
989
+ from MediCafe.core_utils import extract_medilink_config
990
+ medi_cfg = extract_medilink_config(config)
991
+ cob_enabled = bool(medi_cfg.get('cob_settings', {}).get('enabled', False))
992
+ except Exception:
993
+ cob_enabled = False
994
+
995
+ # Only add COB loops when explicitly enabled and claim is secondary
996
+ # TODO (COB VALIDATION): When COB is enabled and claim is secondary, validate required fields:
997
+ # - prior_payer_id/prior_payer_name present
998
+ # - primary_paid_amount present when sending AMT*D (or skip AMT if not available)
999
+ # - cas_adjustments schema if provided (list of {group, reason, amount})
1000
+ # If critical fields are missing, log and proceed with best-effort unless config enforces strict mode.
1001
+ if cob_enabled and validated_data.get('claim_type') == 'secondary':
1002
+ try:
1003
+ # 2320 - Other Subscriber Information (OI, AMT, CAS, etc.)
1004
+ segments.extend(COB.create_2320_other_subscriber_segments(validated_data, config, crosswalk))
1005
+ except Exception as _e1:
1006
+ try:
1007
+ MediLink_ConfigLoader.log("COB 2320 insertion failed: {}".format(str(_e1)), config, level="WARNING")
1008
+ except Exception:
1009
+ pass
1010
+
1011
+ try:
1012
+ # 2330B - Prior Payer (Medicare prior payer info, e.g., 00850)
1013
+ segments.extend(COB.create_2330B_prior_payer_segments(validated_data, config, crosswalk))
1014
+ except Exception as _e2:
1015
+ try:
1016
+ MediLink_ConfigLoader.log("COB 2330B insertion failed: {}".format(str(_e2)), config, level="WARNING")
1017
+ except Exception:
1018
+ pass
1019
+
983
1020
  # TODO (COB ENHANCEMENT): Optional attachment references (PWK) for non-electronic EOB handling
984
1021
  # See MediLink_837p_cob_library.create_pwk_attachment_segment() for implementation
985
1022
  # Example: PWK*EB*FX*123456~ for attachment control number
@@ -1002,6 +1039,7 @@ def create_clm_and_related_segments(parsed_data, config, crosswalk):
1002
1039
  segments.append("DTP*472*D8*{}~".format(convert_date_format(validated_data['DATE'])))
1003
1040
 
1004
1041
  # Is there REF - Line Item Control Number missing here? Private insurance doesn't need it, but Medicare does?
1042
+ # TODO (MEDICARE REF): Add conditional REF*6R (line item control number) when payer is Medicare or when crosswalk/config requires it.
1005
1043
  # segments.append("REF*6R*1~") # REF01, Reference Identification Qualifier; REF02, Line Item Control Number.
1006
1044
  # 6R - Provider Control Number (Number assigned by information provider company for tracking and billing purposes)
1007
1045
  # 1 - Reference information as defined for a particular Transaction Set or as specified by the Reference Identification Qualifier
@@ -843,9 +843,11 @@ def bulk_edit_insurance_types(detailed_patient_data, insurance_options):
843
843
  patient_name = data.get('patient_name', 'Unknown')
844
844
  current_insurance_type = data.get('insurance_type', '12')
845
845
  current_insurance_description = insurance_options.get(current_insurance_type, "Unknown")
846
+ source = data.get('insurance_type_source', '')
847
+ src_disp = 'API' if source == 'API' else ('MAN' if source == 'MANUAL' else 'DEF')
846
848
 
847
- print("({}) {:<25} | Current Ins. Type: {} - {}".format(
848
- patient_id, patient_name, current_insurance_type, current_insurance_description))
849
+ print("({}) {:<25} | Src:{} | Current Ins. Type: {} - {}".format(
850
+ patient_id, patient_name, src_disp, current_insurance_type, current_insurance_description))
849
851
 
850
852
  while True:
851
853
  new_insurance_type = input("Enter new insurance type (or press Enter to keep current): ").strip().upper()
@@ -861,6 +863,7 @@ def bulk_edit_insurance_types(detailed_patient_data, insurance_options):
861
863
  elif new_insurance_type in insurance_options:
862
864
  # Valid insurance type from config
863
865
  data['insurance_type'] = new_insurance_type
866
+ data['insurance_type_source'] = 'MANUAL'
864
867
  break
865
868
 
866
869
  else:
@@ -868,11 +871,13 @@ def bulk_edit_insurance_types(detailed_patient_data, insurance_options):
868
871
  confirm = input("Code '{}' not found in configuration. Use it anyway? (y/n): ".format(new_insurance_type)).strip().lower()
869
872
  if confirm in ['y', 'yes']:
870
873
  data['insurance_type'] = new_insurance_type
874
+ data['insurance_type_source'] = 'MANUAL'
871
875
  break
872
876
  else:
873
877
  print("Invalid insurance type. Please enter a valid code or type 'LIST' to see options.")
874
878
  continue
875
879
 
880
+
876
881
  def review_and_confirm_changes(detailed_patient_data, insurance_options):
877
882
  # Review and confirm changes
878
883
  print("\nReview changes:")
@@ -29,12 +29,21 @@ def display_patient_summaries(detailed_patient_data):
29
29
  Displays summaries of all patients and their suggested endpoints.
30
30
  """
31
31
  print("\nSummary of patient details and suggested endpoint:")
32
- for index, summary in enumerate(detailed_patient_data, start=1):
32
+
33
+ # Sort by insurance_type_source priority for clearer grouping
34
+ priority = {'API': 0, 'MANUAL': 1, 'DEFAULT': 2, 'DEFAULT_FALLBACK': 2}
35
+ def sort_key(item):
36
+ src = item.get('insurance_type_source', '')
37
+ return (priority.get(src, 2), item.get('surgery_date', ''), item.get('patient_name', ''))
38
+ sorted_data = sorted(detailed_patient_data, key=sort_key)
39
+
40
+ for index, summary in enumerate(sorted_data, start=1):
33
41
  try:
34
42
  display_file_summary(index, summary)
35
43
  except KeyError as e:
36
44
  print("Summary at index {} is missing key: {}".format(index, e))
37
45
  print() # add blank line for improved readability.
46
+ print("Legend: Src=API (auto), MAN (manual), DEF (default) | [DUP] indicates a previously submitted matching claim")
38
47
 
39
48
  def display_file_summary(index, summary):
40
49
  # Ensure surgery_date is converted to a datetime object
@@ -42,13 +51,15 @@ def display_file_summary(index, summary):
42
51
 
43
52
  # Add header row if it's the first index
44
53
  if index == 1:
45
- print("{:<3} {:5} {:<10} {:20} {:15} {:3} {:20}".format(
46
- "No.", "Date", "ID", "Name", "Primary Ins.", "IT", "Current Endpoint"
54
+ print("{:<3} {:5} {:<10} {:<20} {:<15} {:<3} {:<5} {:<8} {:<20}".format(
55
+ "No.", "Date", "ID", "Name", "Primary Ins.", "IT", "Src", "Flag", "Current Endpoint"
47
56
  ))
48
- print("-"*82)
57
+ print("-"*100)
49
58
 
50
59
  # Check if insurance_type is available; if not, set a default placeholder (this should already be '12' at this point)
51
60
  insurance_type = summary.get('insurance_type', '--')
61
+ insurance_source = summary.get('insurance_type_source', '')
62
+ duplicate_flag = '[DUP]' if summary.get('duplicate_candidate') else ''
52
63
 
53
64
  # Get the effective endpoint (confirmed > user preference > suggestion > default)
54
65
  effective_endpoint = (summary.get('confirmed_endpoint') or
@@ -61,13 +72,24 @@ def display_file_summary(index, summary):
61
72
  else:
62
73
  insurance_display = insurance_type[:3] if insurance_type else '--'
63
74
 
64
- # Displays the summary of a file.
65
- print("{:02d}. {:5} ({:<8}) {:20} {:15} {:3} {:20}".format(
75
+ # Shorten source for compact display
76
+ if insurance_source in ['DEFAULT_FALLBACK', 'DEFAULT']:
77
+ source_display = 'DEF'
78
+ elif insurance_source == 'MANUAL':
79
+ source_display = 'MAN'
80
+ elif insurance_source == 'API':
81
+ source_display = 'API'
82
+ else:
83
+ source_display = ''
84
+
85
+ print("{:02d}. {:5} ({:<8}) {:<20} {:<15} {:<3} {:<5} {:<8} {:<20}".format(
66
86
  index,
67
87
  surgery_date.strftime("%m-%d"),
68
88
  summary['patient_id'],
69
89
  summary['patient_name'][:20],
70
90
  summary['primary_insurance'][:15],
71
91
  insurance_display,
92
+ source_display,
93
+ duplicate_flag,
72
94
  effective_endpoint[:20])
73
95
  )