pylantir 0.0.6__py3-none-any.whl → 0.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pylantir/cli/run.py CHANGED
@@ -212,9 +212,6 @@ def main() -> None:
212
212
  # Extract allowed AE Titles (default to empty list if missing)
213
213
  allowed_aet = config.get("allowed_aet", [])
214
214
 
215
- # Extract mri_visit_session_mapping (default to empty list if missing)
216
- mri_visit_session_mapping = config.get("mri_visit_session_mapping", {})
217
-
218
215
  # Extract the site id
219
216
  site = config.get("site", None)
220
217
 
@@ -229,7 +226,6 @@ def main() -> None:
229
226
  with ThreadPoolExecutor(max_workers=2) as executor:
230
227
  future = executor.submit(
231
228
  sync_redcap_to_db_repeatedly,
232
- mri_visit_mapping=mri_visit_session_mapping,
233
229
  site_id=site,
234
230
  protocol=protocol,
235
231
  redcap2wl=redcap2wl,
pylantir/models.py CHANGED
@@ -37,6 +37,7 @@ class WorklistItem(Base):
37
37
  procedure_description = Column(String(200))
38
38
  protocol_name = Column(String(100))
39
39
  station_name = Column(String(100))
40
+ hisris_coding_designator = Column(String(100))
40
41
  performed_procedure_step_status = Column(String, default="SCHEDULED")
41
42
 
42
43
 
pylantir/mwl_server.py CHANGED
@@ -45,39 +45,65 @@ def row_to_mwl_dataset(row: WorklistItem) -> Dataset:
45
45
  ds = Dataset()
46
46
 
47
47
  # Standard Patient Attributes
48
- ds.PatientName = row.patient_name
49
- ds.PatientID = row.patient_id
50
- if row.patient_birth_date:
51
- ds.PatientBirthDate = row.patient_birth_date
52
- if row.patient_sex:
53
- ds.PatientSex = row.patient_sex
54
- if row.study_instance_uid:
55
- ds.StudyInstanceUID = row.study_instance_uid
56
- if row.patient_weight_lb:
57
- ds.PatientWeight = row.patient_weight_lb or "100"
58
- if row.study_description:
59
- ds.StudyDescription = row.study_description
60
-
61
- # Protocol-related fields
62
- if row.protocol_name:
63
- ds.ProtocolName = row.protocol_name # (0018,1030)
48
+ ds.PatientName = row.patient_name or "UNKNOWN"
49
+ ds.PatientID = row.patient_id or "UNKNOWN"
50
+ # ds.IssuerOfPatientID = row.issuer_of_patient_id
51
+ ds.PatientBirthDate = row.patient_birth_date or ""
52
+ ds.PatientSex = row.patient_sex or ""
53
+ # ds.OtherPatientIDs = row.other_patient_ids or ""
54
+ # ds.PatientAge = row.patient_age or ""
55
+ # ds.PatientSize = row.patient_size or "0"
56
+ ds.PatientWeight = row.patient_weight_lb or "100"
57
+ # ds.MedicalAlerts = row.medical_alerts or ""
58
+ # ds.Allergies = row.allergies or ""
59
+ # ds.AdditionalPatientHistory = row.additional_patient_history or ""
60
+ # ds.PregnancyStatus = row.pregnancy_status or "0"
61
+
62
+ # Study-Level Attributes
63
+ ds.StudyInstanceUID = row.study_instance_uid or ""
64
+ # ds.StudyDate = row.study_date or ""
65
+ # ds.StudyTime = row.study_time or ""
66
+ # ds.AccessionNumber = row.accession_number or ""
67
+ ds.ReferringPhysicianName = row.referring_physician_name or ""
68
+ ds.StudyDescription = row.study_description or ""
69
+ # ds.NameOfPhysiciansReadingStudy = row.reading_physicians or ""
70
+ # ds.OperatorsName = row.operators_name or ""
71
+
72
+ # Requested Procedure Attributes
73
+ # ds.RequestingPhysician = row.requesting_physician or ""
74
+ # ds.RequestedProcedureDescription = row.requested_procedure_description or "111"
75
+ ds.RequestedProcedureDescription = "111"
76
+ # ds.RequestedProcedureID = row.requested_procedure_id or ""
77
+ ds.RequestedProcedureID = "111"
78
+ # Admission & Patient State
79
+ # ds.AdmissionID = row.admission_id or ""
80
+ # ds.IssuerOfAdmissionID = row.issuer_of_admission_id or ""
81
+ # ds.SpecialNeeds = row.special_needs or ""
82
+ # ds.CurrentPatientLocation = row.current_patient_location or ""
83
+ # ds.PatientState = row.patient_state or ""
64
84
 
65
85
  # Scheduled Procedure Step Sequence
66
86
  sps = Dataset()
67
87
  sps.Modality = row.modality or "MR"
68
- sps.ScheduledStationAETitle = row.scheduled_station_aetitle or ""
88
+ # sps.ScheduledStationAETitle = row.scheduled_station_aetitle or ""
69
89
  sps.ScheduledProcedureStepStartDate = row.scheduled_start_date or ""
70
90
  sps.ScheduledProcedureStepStartTime = row.scheduled_start_time or ""
71
- sps.ScheduledPerformingPhysicianName = row.performing_physician or ""
72
- sps.ScheduledProcedureStepDescription = row.procedure_description or "DEFAULT_PROCEDURE"
91
+ # sps.ScheduledPerformingPhysicianName = row.performing_physician or ""
92
+ sps.ScheduledProcedureStepDescription = row.protocol_name or "DEFAULT_PROCEDURE"
73
93
  sps.ScheduledStationName = row.station_name or ""
94
+ sps.ScheduledProcedureStepStatus = row.performed_procedure_step_status or "SCHEDULED"
74
95
 
75
- # Adding Local Protocol to Scheduled Protocol Code Sequence. This populates the recomended protocol name in the MWL.
76
- # TODO: improve the protocol name handling
96
+ # Protocol Code Sequence
97
+ # you need to map action code (CodeValue) and coding scheme designator (CodingSchemeDesignator) for this to work
77
98
  if row.protocol_name:
78
99
  protocol_seq = Dataset()
79
- protocol_seq.CodeValue = row.protocol_name[:16] # Trim long names
80
- protocol_seq.CodingSchemeDesignator = "LOCAL"
100
+ protocol_seq.CodeValue = row.protocol_name
101
+ # protocol_seq.CodeValue = "CPIP"
102
+
103
+ # protocol_seq.ActionCode = "cpipmar03"
104
+ # protocol_seq.CodingSchemeDesignator = "GEHC"
105
+ protocol_seq.CodingSchemeDesignator = row.hisris_coding_designator
106
+ # protocol_seq.CodeMeaning = row.protocol_name
81
107
  protocol_seq.CodeMeaning = row.protocol_name
82
108
  sps.ScheduledProtocolCodeSequence = [protocol_seq]
83
109
 
@@ -103,7 +129,11 @@ def handle_mwl_find(event):
103
129
 
104
130
  # Only return worklist entries that are still scheduled
105
131
  query = query.filter(
106
- or_(WorklistItem.performed_procedure_step_status == "SCHEDULED", WorklistItem.performed_procedure_step_status == "IN_PROGRESS")
132
+ or_(
133
+ WorklistItem.performed_procedure_step_status == "SCHEDULED",
134
+ WorklistItem.performed_procedure_step_status == "IN_PROGRESS",
135
+ WorklistItem.performed_procedure_step_status == "DISCONTINUED"
136
+ )
107
137
  )
108
138
 
109
139
  results = query.all()
@@ -125,85 +155,86 @@ def handle_mpps_n_create(event):
125
155
  """Handles N-CREATE for MPPS (Procedure Start)."""
126
156
  req = event.request
127
157
 
128
- if req.AffectedSOPInstanceUID is None:
129
- lgr.error("MPPS N-CREATE failed: Missing Affected SOP Instance UID")
130
- return 0x0106, None # Invalid Attribute Value
131
-
132
- # Prevent duplicate MPPS instances
133
- if req.AffectedSOPInstanceUID in managed_instances:
134
- lgr.error("MPPS N-CREATE failed: Duplicate SOP Instance UID")
135
- return 0x0111, None # Duplicate SOP Instance
136
-
137
158
  attr_list = event.attribute_list
138
-
139
- if "PerformedProcedureStepStatus" not in attr_list:
140
- lgr.error("MPPS N-CREATE failed: Missing PerformedProcedureStepStatus")
141
- return 0x0120, None # Missing Attribute
142
- if attr_list.PerformedProcedureStepStatus.upper() != "IN PROGRESS":
143
- lgr.error("MPPS N-CREATE failed: Invalid PerformedProcedureStepStatus")
144
- return 0x0106, None # Invalid Attribute Value
145
-
146
159
  ds = Dataset()
147
160
  ds.SOPClassUID = ModalityPerformedProcedureStep
148
- ds.SOPInstanceUID = req.AffectedSOPInstanceUID
149
-
150
- # Copy attributes
161
+ ds.SOPInstanceUID = req.AffectedSOPInstanceUID or "UNKNOWN_UID"
151
162
  ds.update(attr_list)
152
163
 
153
164
  # Store MPPS instance
154
165
  managed_instances[ds.SOPInstanceUID] = ds
155
166
 
167
+ # Validation logic (log warnings, don't return errors to MRI scanner)
168
+ if not req.AffectedSOPInstanceUID:
169
+ lgr.warning("MPPS N-CREATE: Missing Affected SOP Instance UID")
170
+ elif req.AffectedSOPInstanceUID in managed_instances:
171
+ lgr.warning("MPPS N-CREATE: Duplicate SOP Instance UID received")
172
+
173
+ status = attr_list.get("PerformedProcedureStepStatus", "").upper()
174
+ if not status:
175
+ lgr.warning("MPPS N-CREATE: Missing PerformedProcedureStepStatus")
176
+ elif status != "IN PROGRESS":
177
+ lgr.warning(f"MPPS N-CREATE: Unexpected PerformedProcedureStepStatus = {status}")
178
+
156
179
  # Update database: Set status to IN_PROGRESS
157
- study_uid = ds.get("StudyInstanceUID", None)
180
+ patient_id = ds.get("PatientID", None)
158
181
  session = Session()
159
- if study_uid:
160
- entry = session.query(WorklistItem).filter_by(study_instance_uid=study_uid).first()
182
+ if patient_id:
183
+ entry = session.query(WorklistItem).filter_by(patient_id=patient_id).first()
161
184
  if entry:
162
185
  entry.performed_procedure_step_status = "IN_PROGRESS"
163
186
  session.commit()
164
- lgr.info(f"DB updated: StudyInstanceUID {study_uid} set to IN_PROGRESS")
165
- session.close()
187
+ lgr.info(f"DB updated: PatientID {patient_id} set to IN_PROGRESS")
188
+ else:
189
+ lgr.warning("MPPS N-CREATE: No PatientID found in attributes. DB update skipped.")
166
190
 
167
- lgr.info(f"MPPS N-CREATE success: {ds.SOPInstanceUID} set to IN PROGRESS")
168
-
169
- return 0x0000, ds # Success
191
+ session.close()
192
+ lgr.info(f"MPPS N-CREATE processed: {ds.SOPInstanceUID}")
170
193
 
194
+ return 0x0000, ds # Always return Success
171
195
 
172
196
  def handle_mpps_n_set(event):
173
197
  """Handles N-SET for MPPS (Procedure Completion)."""
174
198
  req = event.request
175
- if req.RequestedSOPInstanceUID not in managed_instances:
176
- lgr.error("MPPS N-SET failed: SOP Instance not recognized")
177
- return 0x0112, None # No Such Object Instance
199
+ sop_uid = req.RequestedSOPInstanceUID
178
200
 
179
- ds = managed_instances[req.RequestedSOPInstanceUID]
180
- mod_list = event.attribute_list
201
+ if sop_uid not in managed_instances:
202
+ lgr.warning(f"MPPS N-SET: Unknown SOP Instance UID {sop_uid}")
203
+ ds = Dataset()
204
+ ds.SOPInstanceUID = sop_uid
205
+ return 0x0000, ds # Still return success
181
206
 
182
- # Update MPPS instance
207
+ ds = managed_instances[sop_uid]
208
+ mod_list = event.attribute_list
183
209
  ds.update(mod_list)
184
210
 
185
- # Log status update
186
211
  new_status = ds.get("PerformedProcedureStepStatus", None)
187
- study_uid = ds.get("StudyInstanceUID", None)
212
+ patient_id = ds.get("PatientID", None)
188
213
 
189
- # Update database
190
214
  session = Session()
191
- if study_uid and new_status:
192
- entry = session.query(WorklistItem).filter_by(study_instance_uid=study_uid).first()
215
+ if patient_id and new_status:
216
+ entry = session.query(WorklistItem).filter_by(patient_id=patient_id).first()
193
217
  if entry:
194
218
  if new_status.upper() == "COMPLETED":
195
219
  entry.performed_procedure_step_status = "COMPLETED"
196
220
  session.commit()
197
- lgr.info(f"DB updated: StudyInstanceUID {study_uid} set to COMPLETED")
221
+ lgr.info(f"DB updated: PatientID {patient_id} set to COMPLETED")
198
222
  elif new_status.upper() == "DISCONTINUED":
199
223
  entry.performed_procedure_step_status = "DISCONTINUED"
200
224
  session.commit()
201
- lgr.info(f"DB updated: StudyInstanceUID {study_uid} set to DISCONTINUED")
225
+ lgr.info(f"DB updated: PatientID {patient_id} set to DISCONTINUED")
226
+ else:
227
+ lgr.warning(f"MPPS N-SET: Unrecognized status {new_status}")
228
+ else:
229
+ lgr.warning(f"MPPS N-SET: No DB entry found for PatientID {patient_id}")
230
+ else:
231
+ lgr.warning("MPPS N-SET: Missing PatientID or status. No DB update.")
232
+
202
233
  session.close()
234
+ lgr.info(f"MPPS N-SET processed: {sop_uid} -> {new_status}")
203
235
 
204
- lgr.info(f"MPPS N-SET success: {req.RequestedSOPInstanceUID} updated to {new_status}")
236
+ return 0x0000, ds # Always return Success
205
237
 
206
- return 0x0000, ds # Success
207
238
 
208
239
 
209
240
  # --------------------------------------------------------------------
pylantir/redcap_to_db.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import os
2
2
  import logging
3
+ import pandas as pd
3
4
  from redcap import Project
4
5
  import uuid
5
6
  from sqlalchemy.orm import sessionmaker
@@ -22,7 +23,7 @@ Session = sessionmaker(bind=engine)
22
23
 
23
24
 
24
25
  def fetch_redcap_entries(redcap_fields: list) -> list:
25
- """Fetch scheduled procedures from REDCap using PyCap."""
26
+ """Fetch REDCap entries using PyCap and return a list of filtered dicts."""
26
27
  project = Project(REDCAP_API_URL, REDCAP_API_TOKEN)
27
28
 
28
29
  if not redcap_fields:
@@ -31,8 +32,6 @@ def fetch_redcap_entries(redcap_fields: list) -> list:
31
32
 
32
33
  # Fetch metadata to get valid REDCap field names
33
34
  valid_fields = {field["field_name"] for field in project.export_metadata()}
34
-
35
- # Only keep fields that actually exist in REDCap
36
35
  redcap_fields = [field for field in redcap_fields if field in valid_fields]
37
36
 
38
37
  if not redcap_fields:
@@ -41,19 +40,43 @@ def fetch_redcap_entries(redcap_fields: list) -> list:
41
40
 
42
41
  lgr.info(f"Fetching REDCap data for fields: {redcap_fields}")
43
42
 
44
- # Fetch records
45
- records = project.export_records(fields=redcap_fields, format_type="json")
46
-
47
- # Ensure all requested fields exist in every record (fill missing fields with None)
48
- for record in records:
49
- record["redcap_event_name"] = record.get("redcap_event_name", "UNKNOWN_EVENT")
50
- for field in redcap_fields:
51
- record.setdefault(field, None) # Fill missing fields with None
43
+ # Export data
44
+ records = project.export_records(fields=redcap_fields, format_type="df")
52
45
 
53
- if not records:
46
+ if records.empty:
54
47
  lgr.warning("No records retrieved from REDCap.")
48
+ return []
49
+
50
+ filtered_records = []
51
+
52
+ # Group by 'record_id' (index level 0)
53
+ for record_id, group in records.groupby(level=0):
54
+
55
+ # Try to get baseline (non-repeated instrument) values
56
+ baseline_rows = group[group['redcap_repeat_instrument'].isna()]
57
+ baseline_row = baseline_rows.iloc[0] if not baseline_rows.empty else {}
58
+
59
+ # Filter for valid MRI rows only
60
+ mri_rows = group[
61
+ (group["redcap_repeat_instrument"] == "mri") &
62
+ (group.get("mri_instance").notna()) &
63
+ (group.get("mri_instance") != "")
64
+ ]
65
+
66
+ for _, mri_row in mri_rows.iterrows():
67
+ record = {"record_id": record_id}
55
68
 
56
- return records
69
+ # Merge fields from baseline and mri_row, only include requested fields
70
+ for field in redcap_fields:
71
+ record[field] = (
72
+ mri_row.get(field)
73
+ if pd.notna(mri_row.get(field))
74
+ else baseline_row.get(field)
75
+ )
76
+
77
+ filtered_records.append(record)
78
+
79
+ return filtered_records
57
80
 
58
81
  # TODO: Implement age binning for paricipants
59
82
  def age_binning():
@@ -75,20 +98,7 @@ def convert_weight(weight, weight_unit):
75
98
  return weight, round(weight / 0.453592, 2) # (kg, lb)
76
99
 
77
100
 
78
- def mapping_redcap_event_name_to_ses_id(mri_visit_mapping, redcap_event):
79
- """Map REDCap event name to SES ID."""
80
- try:
81
- ses_id = mri_visit_mapping.get(redcap_event, None)
82
- if ses_id is None:
83
- raise ValueError(f"SES ID not found for REDCap event: {redcap_event}")
84
- return ses_id
85
- except Exception as e:
86
- lgr.error(f"Error mapping REDCap event name to SES ID: {e}")
87
- return None
88
-
89
-
90
101
  def sync_redcap_to_db(
91
- mri_visit_mapping: dict,
92
102
  site_id: str,
93
103
  protocol: dict,
94
104
  redcap2wl: dict,
@@ -100,8 +110,9 @@ def sync_redcap_to_db(
100
110
 
101
111
  session = Session()
102
112
 
113
+ #TODO: Implement the repeat visit mapping
103
114
  # Extract the REDCap fields that need to be pulled
104
- default_fields = ["study_id", "family_id", "youth_dob_y", "t1_date", "demo_sex"]
115
+ default_fields = ["record_id", "study_id", "redcap_repeat_instrument", "mri_instance", "mri_date", "mri_time", "family_id", "youth_dob_y", "t1_date", "demo_sex"]
105
116
  redcap_fields = list(redcap2wl.keys())
106
117
 
107
118
  # Ensure certain default fields are always present
@@ -116,9 +127,10 @@ def sync_redcap_to_db(
116
127
  study_id = study_id.split('-')[-1] if study_id else None
117
128
  family_id = record.get("family_id")
118
129
  family_id = family_id.split('-')[-1] if family_id else None
119
- ses_id = mapping_redcap_event_name_to_ses_id(
120
- mri_visit_mapping, record.get("redcap_event_name")
121
- )
130
+ repeat_id = record.get("redcap_repeat_instance") if record.get("redcap_repeat_instance") != "" else "1" # Default to 1 if not set
131
+ lgr.debug(f"Processing record for Study ID: {study_id} and Family ID: {family_id}")
132
+ lgr.debug(f"This is the repeat event {repeat_id}")
133
+ ses_id = record.get("mri_instance")
122
134
 
123
135
  PatientName = f"cpip-id-{study_id}^fa-{family_id}"
124
136
  PatientID = f"sub-{study_id}_ses-{ses_id}_fam-{family_id}_site-{site_id}"
@@ -144,7 +156,8 @@ def sync_redcap_to_db(
144
156
  existing_entry.patient_birth_date = record.get("youth_dob_y", "19000101")
145
157
  existing_entry.patient_sex = record.get("demo_sex")
146
158
  existing_entry.modality = record.get("modality", "MR")
147
-
159
+ existing_entry.scheduled_start_date = record.get("mri_date")
160
+ existing_entry.scheduled_start_time = record.get("mri_time")
148
161
  # Dynamically update DICOM worklist fields from REDCap
149
162
  for redcap_field, dicom_field in redcap2wl.items():
150
163
  if redcap_field in record:
@@ -169,9 +182,10 @@ def sync_redcap_to_db(
169
182
  patient_birth_date=f"{record.get('youth_dob_y', '2012')}0101",
170
183
  patient_sex=record.get("demo_sex"),
171
184
  modality=record.get("modality", "MR"),
172
- # scheduled_start_date=record.get("scheduled_date"),
173
- # scheduled_start_time=record.get("scheduled_time"),
185
+ scheduled_start_date=record.get("mri_date"),
186
+ scheduled_start_time=record.get("mri_time"),
174
187
  protocol_name=protocol.get(site_id, "DEFAULT_PROTOCOL"),
188
+ hisris_coding_designator=protocol.get("mapping", "scannermapper"),
175
189
  # patient_weight_kg=patient_weight_kg,
176
190
  patient_weight_lb=record.get("patient_weight_lb", ""),
177
191
  # referring_physician_name=record.get("referring_physician"),
@@ -188,7 +202,6 @@ def sync_redcap_to_db(
188
202
 
189
203
 
190
204
  def sync_redcap_to_db_repeatedly(
191
- mri_visit_mapping=None,
192
205
  site_id=None,
193
206
  protocol=None,
194
207
  redcap2wl=None,
@@ -201,7 +214,6 @@ def sync_redcap_to_db_repeatedly(
201
214
  while not STOP_EVENT.is_set():
202
215
  try:
203
216
  sync_redcap_to_db(
204
- mri_visit_mapping=mri_visit_mapping,
205
217
  site_id=site_id,
206
218
  protocol=protocol,
207
219
  redcap2wl=redcap2wl,
@@ -220,7 +232,6 @@ if __name__ == "__main__":
220
232
  # from a signal handler or from another part of your code.
221
233
  try:
222
234
  sync_redcap_to_db_repeatedly(
223
- mri_visit_mapping=None,
224
235
  site_id=None,
225
236
  protocol=None,
226
237
  redcap2wl=None,
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: pylantir
3
- Version: 0.0.6
3
+ Version: 0.0.8
4
4
  Summary: Python - DICOM Modality WorkList
5
5
  Author-email: Milton Camacho <miltoncamachoicc@gmail.com>
6
6
  Requires-Python: >=3.11.1
@@ -13,12 +13,14 @@ Classifier: Programming Language :: Python :: 3.8
13
13
  Classifier: Programming Language :: Python :: 3.9
14
14
  Classifier: Programming Language :: Python :: 3.10
15
15
  Classifier: Programming Language :: Python :: 3.11
16
+ License-File: LICENSE
16
17
  Requires-Dist: pynetdicom
17
18
  Requires-Dist: sqlalchemy
18
19
  Requires-Dist: PyCap
19
20
  Requires-Dist: uuid
20
21
  Requires-Dist: coloredlogs
21
22
  Requires-Dist: python-dotenv
23
+ Requires-Dist: pandas
22
24
  Requires-Dist: pyspark>=3.0.0 ; extra == "spark"
23
25
  Requires-Dist: bandit[toml]==1.7.5 ; extra == "test"
24
26
  Requires-Dist: black==23.3.0 ; extra == "test"
@@ -75,7 +77,7 @@ export REDCAP_API_TOKEN=<your API token>
75
77
  Start a server called with AEtitle MWL_SERVER.
76
78
 
77
79
  ```bash
78
- pylantir start --ip 127.0.0.1 --port 4242 --AEtitle MWL_SERVER
80
+ pylantir start --ip 127.0.0.1 --port 4242 --AEtitle MWL_SERVER --pylantir_config Path/to/your/config.json
79
81
  ```
80
82
 
81
83
  ## Tests
@@ -132,9 +134,8 @@ usage: pylantir [-h] [--AEtitle AETITLE] [--ip IP] [--port PORT] [--pylantir_con
132
134
  - **--port PORT**: Port for the server
133
135
  - **--pylantir_config PYLANTIR_CONFIG**: Path to the configuration JSON file containing pylantir configs:
134
136
  - **allowed_aet**: List of allowed AE titles e.g. `["MRI_SCANNER", "MRI_SCANNER_2"]`
135
- - **mri_visit_session_mapping**: Mapping of MRI visit to session e.g., `{"T1": "1", "T2": "2"}`
136
137
  - **site**: Site ID:string
137
- - **protocol**: `{"site": "protocol_name"}`
138
+ - **protocol**: `{"site": "protocol_name", "mapping": "HIS/RIS mapping"}`
138
139
  - **redcap2wl**: Dictionary of REDCap fields to worklist fields mapping e.g., `{"redcap_field": "worklist_field"}`
139
140
  - **--mpps_action {create,set}**: Action to perform for MPPS either create or set
140
141
  - **--mpps_status {COMPLETED,DISCONTINUED}**: Status to set for MPPS either COMPLETED or DISCONTINUED
@@ -152,20 +153,17 @@ As a default pylantir will try to read a JSON structured file with the following
152
153
  "db_echo": "False",
153
154
  "db_update_interval": 60,
154
155
  "allowed_aet": [],
155
- "mri_visit_session_mapping": {
156
- "t1_arm_1": "1",
157
- "t2_arm_1": "2",
158
- "t3_arm_1": "3"
159
- },
160
156
  "site": "792",
161
157
  "redcap2wl": {
162
158
  "study_id": "study_id",
159
+ "instrument": "redcap_repeat_instrument",
160
+ "session_id" : "mri_instance",
163
161
  "family_id": "family_id",
164
162
  "youth_dob_y": "youth_dob_y",
165
163
  "t1_date": "t1_date",
166
164
  "demo_sex": "demo_sex",
167
- "scheduled_date": "scheduled_start_date",
168
- "scheduled_time": "scheduled_start_time",
165
+ "scheduled_date": "mri_date",
166
+ "scheduled_time": "mri_time",
169
167
  "mri_wt_lbs": "patient_weight_lb",
170
168
  "referring_physician": "referring_physician_name",
171
169
  "performing_physician": "performing_physician",
@@ -173,7 +171,8 @@ As a default pylantir will try to read a JSON structured file with the following
173
171
  "status": "performed_procedure_step_status"
174
172
  },
175
173
  "protocol": {
176
- "792": "BRAIN_MRI_3T"
174
+ "792": "BRAIN_MRI_3T",
175
+ "mapping": "GEHC"
177
176
  }
178
177
  }
179
178
  ```
@@ -0,0 +1,15 @@
1
+ pylantir/.env,sha256=qU4xxA3iOy2DQGT78CQG05ljTsFwKzgF2wXdnBpg8xQ,56
2
+ pylantir/__init__.py,sha256=kl2Et644PvUIvziU4BTxiTD1W4_g7E0xBYCHgPE6RZc,363
3
+ pylantir/db_setup.py,sha256=KTILsRrH7V5EaPqbCfOYYECM9mUB-AvOdjqjMM2H1n0,1333
4
+ pylantir/models.py,sha256=7oK3NEl6Q-Q2kOFNXBsZaGn0rXfSpJY39CNXjKLt8Bg,1676
5
+ pylantir/mwl_server.py,sha256=GMJDcK0u_KM3oa6UqQ87NxMVye2pvG2cdkcI9k_iExg,10338
6
+ pylantir/populate_db.py,sha256=KIbkVA-EAuTlDArXMFOHkjMmVfjlsTApj7S1wpUu1bM,2207
7
+ pylantir/redcap_to_db.py,sha256=l21SXfVQjFDyatrkY1xcIWK2bUP5xu3BSbKZIecoI4E,9153
8
+ pylantir/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ pylantir/cli/run.py,sha256=vqb7kbKsf39tI8-xjDceS4j5V-YJSaC_k0Lu6vlajmo,10072
10
+ pylantir/config/mwl_config.json,sha256=1Ma2guYAEAXQh1z7959aZadAn3ORjBqnDDibSLcwv_g,851
11
+ pylantir-0.0.8.dist-info/entry_points.txt,sha256=vxaxvfGppLqRt9_4sqNDdP6b2jlgpcHIwP7UQfrM1T0,50
12
+ pylantir-0.0.8.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
13
+ pylantir-0.0.8.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
14
+ pylantir-0.0.8.dist-info/METADATA,sha256=Xg83-Nc9PFki3yLJmgU4bvn1yrkWUOCi5lvq1ogp7dA,7175
15
+ pylantir-0.0.8.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: flit 3.10.1
2
+ Generator: flit 3.11.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) Microsoft Corporation.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE
@@ -1,14 +0,0 @@
1
- pylantir/.env,sha256=qU4xxA3iOy2DQGT78CQG05ljTsFwKzgF2wXdnBpg8xQ,56
2
- pylantir/__init__.py,sha256=kl2Et644PvUIvziU4BTxiTD1W4_g7E0xBYCHgPE6RZc,363
3
- pylantir/db_setup.py,sha256=KTILsRrH7V5EaPqbCfOYYECM9mUB-AvOdjqjMM2H1n0,1333
4
- pylantir/models.py,sha256=FURRE7lRd8UBGTmLD9ozcNVW8kJV-M9Pz9bovSRKlh8,1625
5
- pylantir/mwl_server.py,sha256=0NzZY2_uRI8QIeLeTFzQBuPyYxrKYrF1eHLNAnT4rXc,8662
6
- pylantir/populate_db.py,sha256=KIbkVA-EAuTlDArXMFOHkjMmVfjlsTApj7S1wpUu1bM,2207
7
- pylantir/redcap_to_db.py,sha256=ITiMAY-XB7B_-_8jUSAjdNqTngULjXBbWYjY4_Byh1U,8465
8
- pylantir/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- pylantir/cli/run.py,sha256=aF_koq7zGo1q7pU68Bw776hWz_eNBvVkcm-nG_-vwIo,10293
10
- pylantir/config/mwl_config.json,sha256=1Ma2guYAEAXQh1z7959aZadAn3ORjBqnDDibSLcwv_g,851
11
- pylantir-0.0.6.dist-info/entry_points.txt,sha256=vxaxvfGppLqRt9_4sqNDdP6b2jlgpcHIwP7UQfrM1T0,50
12
- pylantir-0.0.6.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
13
- pylantir-0.0.6.dist-info/METADATA,sha256=n3YnxTkvEpN8rVpelcMi_IwFmfdt40GQSrrm0JQ4dRc,7176
14
- pylantir-0.0.6.dist-info/RECORD,,