pylantir 0.0.7__tar.gz → 0.0.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pylantir-0.0.9/LICENSE +21 -0
- {pylantir-0.0.7 → pylantir-0.0.9}/PKG-INFO +12 -13
- {pylantir-0.0.7 → pylantir-0.0.9}/README.md +8 -11
- {pylantir-0.0.7 → pylantir-0.0.9}/pyproject.toml +3 -2
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/cli/run.py +0 -4
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/models.py +4 -0
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/mwl_server.py +42 -37
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/redcap_to_db.py +60 -37
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/.env +0 -0
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/__init__.py +0 -0
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/cli/__init__.py +0 -0
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/config/mwl_config.json +0 -0
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/db_setup.py +0 -0
- {pylantir-0.0.7 → pylantir-0.0.9}/src/pylantir/populate_db.py +0 -0
pylantir-0.0.9/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) Microsoft Corporation.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pylantir
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.9
|
|
4
4
|
Summary: Python - DICOM Modality WorkList
|
|
5
5
|
Author-email: Milton Camacho <miltoncamachoicc@gmail.com>
|
|
6
6
|
Requires-Python: >=3.11.1
|
|
@@ -13,12 +13,14 @@ Classifier: Programming Language :: Python :: 3.8
|
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.9
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.10
|
|
15
15
|
Classifier: Programming Language :: Python :: 3.11
|
|
16
|
+
License-File: LICENSE
|
|
16
17
|
Requires-Dist: pynetdicom
|
|
17
18
|
Requires-Dist: sqlalchemy
|
|
18
19
|
Requires-Dist: PyCap
|
|
19
20
|
Requires-Dist: uuid
|
|
20
21
|
Requires-Dist: coloredlogs
|
|
21
22
|
Requires-Dist: python-dotenv
|
|
23
|
+
Requires-Dist: pandas
|
|
22
24
|
Requires-Dist: pyspark>=3.0.0 ; extra == "spark"
|
|
23
25
|
Requires-Dist: bandit[toml]==1.7.5 ; extra == "test"
|
|
24
26
|
Requires-Dist: black==23.3.0 ; extra == "test"
|
|
@@ -29,7 +31,7 @@ Requires-Dist: flake8-formatter_junit_xml ; extra == "test"
|
|
|
29
31
|
Requires-Dist: flake8 ; extra == "test"
|
|
30
32
|
Requires-Dist: flake8-pyproject ; extra == "test"
|
|
31
33
|
Requires-Dist: pre-commit==3.3.1 ; extra == "test"
|
|
32
|
-
Requires-Dist: pylint==
|
|
34
|
+
Requires-Dist: pylint==3.3.6 ; extra == "test"
|
|
33
35
|
Requires-Dist: pylint_junit ; extra == "test"
|
|
34
36
|
Requires-Dist: pytest-cov==4.0.0 ; extra == "test"
|
|
35
37
|
Requires-Dist: pytest-mock<3.10.1 ; extra == "test"
|
|
@@ -75,7 +77,7 @@ export REDCAP_API_TOKEN=<your API token>
|
|
|
75
77
|
Start a server called with AEtitle MWL_SERVER.
|
|
76
78
|
|
|
77
79
|
```bash
|
|
78
|
-
pylantir start --ip 127.0.0.1 --port 4242 --AEtitle MWL_SERVER
|
|
80
|
+
pylantir start --ip 127.0.0.1 --port 4242 --AEtitle MWL_SERVER --pylantir_config Path/to/your/config.json
|
|
79
81
|
```
|
|
80
82
|
|
|
81
83
|
## Tests
|
|
@@ -132,9 +134,8 @@ usage: pylantir [-h] [--AEtitle AETITLE] [--ip IP] [--port PORT] [--pylantir_con
|
|
|
132
134
|
- **--port PORT**: Port for the server
|
|
133
135
|
- **--pylantir_config PYLANTIR_CONFIG**: Path to the configuration JSON file containing pylantir configs:
|
|
134
136
|
- **allowed_aet**: List of allowed AE titles e.g. `["MRI_SCANNER", "MRI_SCANNER_2"]`
|
|
135
|
-
- **mri_visit_session_mapping**: Mapping of MRI visit to session e.g., `{"T1": "1", "T2": "2"}`
|
|
136
137
|
- **site**: Site ID:string
|
|
137
|
-
- **protocol**: `{"site": "protocol_name"}`
|
|
138
|
+
- **protocol**: `{"site": "protocol_name", "mapping": "HIS/RIS mapping"}`
|
|
138
139
|
- **redcap2wl**: Dictionary of REDCap fields to worklist fields mapping e.g., `{"redcap_field": "worklist_field"}`
|
|
139
140
|
- **--mpps_action {create,set}**: Action to perform for MPPS either create or set
|
|
140
141
|
- **--mpps_status {COMPLETED,DISCONTINUED}**: Status to set for MPPS either COMPLETED or DISCONTINUED
|
|
@@ -152,20 +153,17 @@ As a default pylantir will try to read a JSON structured file with the following
|
|
|
152
153
|
"db_echo": "False",
|
|
153
154
|
"db_update_interval": 60,
|
|
154
155
|
"allowed_aet": [],
|
|
155
|
-
"mri_visit_session_mapping": {
|
|
156
|
-
"t1_arm_1": "1",
|
|
157
|
-
"t2_arm_1": "2",
|
|
158
|
-
"t3_arm_1": "3"
|
|
159
|
-
},
|
|
160
156
|
"site": "792",
|
|
161
157
|
"redcap2wl": {
|
|
162
158
|
"study_id": "study_id",
|
|
159
|
+
"instrument": "redcap_repeat_instrument",
|
|
160
|
+
"session_id" : "mri_instance",
|
|
163
161
|
"family_id": "family_id",
|
|
164
162
|
"youth_dob_y": "youth_dob_y",
|
|
165
163
|
"t1_date": "t1_date",
|
|
166
164
|
"demo_sex": "demo_sex",
|
|
167
|
-
"scheduled_date": "
|
|
168
|
-
"scheduled_time": "
|
|
165
|
+
"scheduled_date": "mri_date",
|
|
166
|
+
"scheduled_time": "mri_time",
|
|
169
167
|
"mri_wt_lbs": "patient_weight_lb",
|
|
170
168
|
"referring_physician": "referring_physician_name",
|
|
171
169
|
"performing_physician": "performing_physician",
|
|
@@ -173,7 +171,8 @@ As a default pylantir will try to read a JSON structured file with the following
|
|
|
173
171
|
"status": "performed_procedure_step_status"
|
|
174
172
|
},
|
|
175
173
|
"protocol": {
|
|
176
|
-
"792": "BRAIN_MRI_3T"
|
|
174
|
+
"792": "BRAIN_MRI_3T",
|
|
175
|
+
"mapping": "GEHC"
|
|
177
176
|
}
|
|
178
177
|
}
|
|
179
178
|
```
|
|
@@ -30,7 +30,7 @@ export REDCAP_API_TOKEN=<your API token>
|
|
|
30
30
|
Start a server called with AEtitle MWL_SERVER.
|
|
31
31
|
|
|
32
32
|
```bash
|
|
33
|
-
pylantir start --ip 127.0.0.1 --port 4242 --AEtitle MWL_SERVER
|
|
33
|
+
pylantir start --ip 127.0.0.1 --port 4242 --AEtitle MWL_SERVER --pylantir_config Path/to/your/config.json
|
|
34
34
|
```
|
|
35
35
|
|
|
36
36
|
## Tests
|
|
@@ -87,9 +87,8 @@ usage: pylantir [-h] [--AEtitle AETITLE] [--ip IP] [--port PORT] [--pylantir_con
|
|
|
87
87
|
- **--port PORT**: Port for the server
|
|
88
88
|
- **--pylantir_config PYLANTIR_CONFIG**: Path to the configuration JSON file containing pylantir configs:
|
|
89
89
|
- **allowed_aet**: List of allowed AE titles e.g. `["MRI_SCANNER", "MRI_SCANNER_2"]`
|
|
90
|
-
- **mri_visit_session_mapping**: Mapping of MRI visit to session e.g., `{"T1": "1", "T2": "2"}`
|
|
91
90
|
- **site**: Site ID:string
|
|
92
|
-
- **protocol**: `{"site": "protocol_name"}`
|
|
91
|
+
- **protocol**: `{"site": "protocol_name", "mapping": "HIS/RIS mapping"}`
|
|
93
92
|
- **redcap2wl**: Dictionary of REDCap fields to worklist fields mapping e.g., `{"redcap_field": "worklist_field"}`
|
|
94
93
|
- **--mpps_action {create,set}**: Action to perform for MPPS either create or set
|
|
95
94
|
- **--mpps_status {COMPLETED,DISCONTINUED}**: Status to set for MPPS either COMPLETED or DISCONTINUED
|
|
@@ -107,20 +106,17 @@ As a default pylantir will try to read a JSON structured file with the following
|
|
|
107
106
|
"db_echo": "False",
|
|
108
107
|
"db_update_interval": 60,
|
|
109
108
|
"allowed_aet": [],
|
|
110
|
-
"mri_visit_session_mapping": {
|
|
111
|
-
"t1_arm_1": "1",
|
|
112
|
-
"t2_arm_1": "2",
|
|
113
|
-
"t3_arm_1": "3"
|
|
114
|
-
},
|
|
115
109
|
"site": "792",
|
|
116
110
|
"redcap2wl": {
|
|
117
111
|
"study_id": "study_id",
|
|
112
|
+
"instrument": "redcap_repeat_instrument",
|
|
113
|
+
"session_id" : "mri_instance",
|
|
118
114
|
"family_id": "family_id",
|
|
119
115
|
"youth_dob_y": "youth_dob_y",
|
|
120
116
|
"t1_date": "t1_date",
|
|
121
117
|
"demo_sex": "demo_sex",
|
|
122
|
-
"scheduled_date": "
|
|
123
|
-
"scheduled_time": "
|
|
118
|
+
"scheduled_date": "mri_date",
|
|
119
|
+
"scheduled_time": "mri_time",
|
|
124
120
|
"mri_wt_lbs": "patient_weight_lb",
|
|
125
121
|
"referring_physician": "referring_physician_name",
|
|
126
122
|
"performing_physician": "performing_physician",
|
|
@@ -128,7 +124,8 @@ As a default pylantir will try to read a JSON structured file with the following
|
|
|
128
124
|
"status": "performed_procedure_step_status"
|
|
129
125
|
},
|
|
130
126
|
"protocol": {
|
|
131
|
-
"792": "BRAIN_MRI_3T"
|
|
127
|
+
"792": "BRAIN_MRI_3T",
|
|
128
|
+
"mapping": "GEHC"
|
|
132
129
|
}
|
|
133
130
|
}
|
|
134
131
|
```
|
|
@@ -4,7 +4,7 @@ build-backend = "flit_core.buildapi"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "pylantir"
|
|
7
|
-
version = "0.0.
|
|
7
|
+
version = "0.0.9"
|
|
8
8
|
authors = [
|
|
9
9
|
{name = "Milton Camacho", email = "miltoncamachoicc@gmail.com"},
|
|
10
10
|
]
|
|
@@ -30,6 +30,7 @@ dependencies = [
|
|
|
30
30
|
"uuid",
|
|
31
31
|
"coloredlogs",
|
|
32
32
|
"python-dotenv",
|
|
33
|
+
"pandas",
|
|
33
34
|
]
|
|
34
35
|
|
|
35
36
|
[project.scripts]
|
|
@@ -49,7 +50,7 @@ test = [
|
|
|
49
50
|
"flake8",
|
|
50
51
|
"flake8-pyproject",
|
|
51
52
|
"pre-commit==3.3.1",
|
|
52
|
-
"pylint==
|
|
53
|
+
"pylint==3.3.6",
|
|
53
54
|
"pylint_junit",
|
|
54
55
|
"pytest-cov==4.0.0",
|
|
55
56
|
"pytest-mock<3.10.1",
|
|
@@ -212,9 +212,6 @@ def main() -> None:
|
|
|
212
212
|
# Extract allowed AE Titles (default to empty list if missing)
|
|
213
213
|
allowed_aet = config.get("allowed_aet", [])
|
|
214
214
|
|
|
215
|
-
# Extract mri_visit_session_mapping (default to empty list if missing)
|
|
216
|
-
mri_visit_session_mapping = config.get("mri_visit_session_mapping", {})
|
|
217
|
-
|
|
218
215
|
# Extract the site id
|
|
219
216
|
site = config.get("site", None)
|
|
220
217
|
|
|
@@ -229,7 +226,6 @@ def main() -> None:
|
|
|
229
226
|
with ThreadPoolExecutor(max_workers=2) as executor:
|
|
230
227
|
future = executor.submit(
|
|
231
228
|
sync_redcap_to_db_repeatedly,
|
|
232
|
-
mri_visit_mapping=mri_visit_session_mapping,
|
|
233
229
|
site_id=site,
|
|
234
230
|
protocol=protocol,
|
|
235
231
|
redcap2wl=redcap2wl,
|
|
@@ -37,6 +37,10 @@ class WorklistItem(Base):
|
|
|
37
37
|
procedure_description = Column(String(200))
|
|
38
38
|
protocol_name = Column(String(100))
|
|
39
39
|
station_name = Column(String(100))
|
|
40
|
+
try:
|
|
41
|
+
hisris_coding_designator = Column(String(100))
|
|
42
|
+
except:
|
|
43
|
+
lgr.warning("Could not get hisris_coding_designator check models.py ")
|
|
40
44
|
performed_procedure_step_status = Column(String, default="SCHEDULED")
|
|
41
45
|
|
|
42
46
|
|
|
@@ -94,10 +94,16 @@ def row_to_mwl_dataset(row: WorklistItem) -> Dataset:
|
|
|
94
94
|
sps.ScheduledProcedureStepStatus = row.performed_procedure_step_status or "SCHEDULED"
|
|
95
95
|
|
|
96
96
|
# Protocol Code Sequence
|
|
97
|
+
# you need to map action code (CodeValue) and coding scheme designator (CodingSchemeDesignator) for this to work
|
|
97
98
|
if row.protocol_name:
|
|
98
99
|
protocol_seq = Dataset()
|
|
99
|
-
protocol_seq.CodeValue = row.protocol_name
|
|
100
|
-
protocol_seq.
|
|
100
|
+
protocol_seq.CodeValue = row.protocol_name
|
|
101
|
+
# protocol_seq.CodeValue = "CPIP"
|
|
102
|
+
|
|
103
|
+
# protocol_seq.ActionCode = "cpipmar03"
|
|
104
|
+
# protocol_seq.CodingSchemeDesignator = "GEHC"
|
|
105
|
+
protocol_seq.CodingSchemeDesignator = row.hisris_coding_designator
|
|
106
|
+
# protocol_seq.CodeMeaning = row.protocol_name
|
|
101
107
|
protocol_seq.CodeMeaning = row.protocol_name
|
|
102
108
|
sps.ScheduledProtocolCodeSequence = [protocol_seq]
|
|
103
109
|
|
|
@@ -149,34 +155,27 @@ def handle_mpps_n_create(event):
|
|
|
149
155
|
"""Handles N-CREATE for MPPS (Procedure Start)."""
|
|
150
156
|
req = event.request
|
|
151
157
|
|
|
152
|
-
if req.AffectedSOPInstanceUID is None:
|
|
153
|
-
lgr.error("MPPS N-CREATE failed: Missing Affected SOP Instance UID")
|
|
154
|
-
return 0x0106, None # Invalid Attribute Value
|
|
155
|
-
|
|
156
|
-
# Prevent duplicate MPPS instances
|
|
157
|
-
if req.AffectedSOPInstanceUID in managed_instances:
|
|
158
|
-
lgr.error("MPPS N-CREATE failed: Duplicate SOP Instance UID")
|
|
159
|
-
return 0x0111, None # Duplicate SOP Instance
|
|
160
|
-
|
|
161
158
|
attr_list = event.attribute_list
|
|
162
|
-
|
|
163
|
-
if "PerformedProcedureStepStatus" not in attr_list:
|
|
164
|
-
lgr.error("MPPS N-CREATE failed: Missing PerformedProcedureStepStatus")
|
|
165
|
-
return 0x0120, None # Missing Attribute
|
|
166
|
-
if attr_list.PerformedProcedureStepStatus.upper() != "IN PROGRESS":
|
|
167
|
-
lgr.error("MPPS N-CREATE failed: Invalid PerformedProcedureStepStatus")
|
|
168
|
-
return 0x0106, None # Invalid Attribute Value
|
|
169
|
-
|
|
170
159
|
ds = Dataset()
|
|
171
160
|
ds.SOPClassUID = ModalityPerformedProcedureStep
|
|
172
|
-
ds.SOPInstanceUID = req.AffectedSOPInstanceUID
|
|
173
|
-
|
|
174
|
-
# Copy attributes
|
|
161
|
+
ds.SOPInstanceUID = req.AffectedSOPInstanceUID or "UNKNOWN_UID"
|
|
175
162
|
ds.update(attr_list)
|
|
176
163
|
|
|
177
164
|
# Store MPPS instance
|
|
178
165
|
managed_instances[ds.SOPInstanceUID] = ds
|
|
179
166
|
|
|
167
|
+
# Validation logic (log warnings, don't return errors to MRI scanner)
|
|
168
|
+
if not req.AffectedSOPInstanceUID:
|
|
169
|
+
lgr.warning("MPPS N-CREATE: Missing Affected SOP Instance UID")
|
|
170
|
+
elif req.AffectedSOPInstanceUID in managed_instances:
|
|
171
|
+
lgr.warning("MPPS N-CREATE: Duplicate SOP Instance UID received")
|
|
172
|
+
|
|
173
|
+
status = attr_list.get("PerformedProcedureStepStatus", "").upper()
|
|
174
|
+
if not status:
|
|
175
|
+
lgr.warning("MPPS N-CREATE: Missing PerformedProcedureStepStatus")
|
|
176
|
+
elif status != "IN PROGRESS":
|
|
177
|
+
lgr.warning(f"MPPS N-CREATE: Unexpected PerformedProcedureStepStatus = {status}")
|
|
178
|
+
|
|
180
179
|
# Update database: Set status to IN_PROGRESS
|
|
181
180
|
patient_id = ds.get("PatientID", None)
|
|
182
181
|
session = Session()
|
|
@@ -187,32 +186,31 @@ def handle_mpps_n_create(event):
|
|
|
187
186
|
session.commit()
|
|
188
187
|
lgr.info(f"DB updated: PatientID {patient_id} set to IN_PROGRESS")
|
|
189
188
|
else:
|
|
190
|
-
lgr.warning("MPPS N-
|
|
191
|
-
session.close()
|
|
192
|
-
|
|
193
|
-
lgr.info(f"MPPS N-CREATE success: {ds.SOPInstanceUID} set to IN PROGRESS")
|
|
189
|
+
lgr.warning("MPPS N-CREATE: No PatientID found in attributes. DB update skipped.")
|
|
194
190
|
|
|
195
|
-
|
|
191
|
+
session.close()
|
|
192
|
+
lgr.info(f"MPPS N-CREATE processed: {ds.SOPInstanceUID}")
|
|
196
193
|
|
|
194
|
+
return 0x0000, ds # Always return Success
|
|
197
195
|
|
|
198
196
|
def handle_mpps_n_set(event):
|
|
199
197
|
"""Handles N-SET for MPPS (Procedure Completion)."""
|
|
200
198
|
req = event.request
|
|
201
|
-
|
|
202
|
-
lgr.error("MPPS N-SET failed: SOP Instance not recognized")
|
|
203
|
-
return 0x0112, None # No Such Object Instance
|
|
199
|
+
sop_uid = req.RequestedSOPInstanceUID
|
|
204
200
|
|
|
205
|
-
|
|
206
|
-
|
|
201
|
+
if sop_uid not in managed_instances:
|
|
202
|
+
lgr.warning(f"MPPS N-SET: Unknown SOP Instance UID {sop_uid}")
|
|
203
|
+
ds = Dataset()
|
|
204
|
+
ds.SOPInstanceUID = sop_uid
|
|
205
|
+
return 0x0000, ds # Still return success
|
|
207
206
|
|
|
208
|
-
|
|
207
|
+
ds = managed_instances[sop_uid]
|
|
208
|
+
mod_list = event.attribute_list
|
|
209
209
|
ds.update(mod_list)
|
|
210
210
|
|
|
211
|
-
# Log status update
|
|
212
211
|
new_status = ds.get("PerformedProcedureStepStatus", None)
|
|
213
212
|
patient_id = ds.get("PatientID", None)
|
|
214
213
|
|
|
215
|
-
# Update database
|
|
216
214
|
session = Session()
|
|
217
215
|
if patient_id and new_status:
|
|
218
216
|
entry = session.query(WorklistItem).filter_by(patient_id=patient_id).first()
|
|
@@ -225,11 +223,18 @@ def handle_mpps_n_set(event):
|
|
|
225
223
|
entry.performed_procedure_step_status = "DISCONTINUED"
|
|
226
224
|
session.commit()
|
|
227
225
|
lgr.info(f"DB updated: PatientID {patient_id} set to DISCONTINUED")
|
|
226
|
+
else:
|
|
227
|
+
lgr.warning(f"MPPS N-SET: Unrecognized status {new_status}")
|
|
228
|
+
else:
|
|
229
|
+
lgr.warning(f"MPPS N-SET: No DB entry found for PatientID {patient_id}")
|
|
230
|
+
else:
|
|
231
|
+
lgr.warning("MPPS N-SET: Missing PatientID or status. No DB update.")
|
|
232
|
+
|
|
228
233
|
session.close()
|
|
234
|
+
lgr.info(f"MPPS N-SET processed: {sop_uid} -> {new_status}")
|
|
229
235
|
|
|
230
|
-
|
|
236
|
+
return 0x0000, ds # Always return Success
|
|
231
237
|
|
|
232
|
-
return 0x0000, ds # Success
|
|
233
238
|
|
|
234
239
|
|
|
235
240
|
# --------------------------------------------------------------------
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import logging
|
|
3
|
+
import pandas as pd
|
|
3
4
|
from redcap import Project
|
|
4
5
|
import uuid
|
|
5
6
|
from sqlalchemy.orm import sessionmaker
|
|
@@ -22,7 +23,7 @@ Session = sessionmaker(bind=engine)
|
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def fetch_redcap_entries(redcap_fields: list) -> list:
|
|
25
|
-
"""Fetch
|
|
26
|
+
"""Fetch REDCap entries using PyCap and return a list of filtered dicts."""
|
|
26
27
|
project = Project(REDCAP_API_URL, REDCAP_API_TOKEN)
|
|
27
28
|
|
|
28
29
|
if not redcap_fields:
|
|
@@ -31,8 +32,6 @@ def fetch_redcap_entries(redcap_fields: list) -> list:
|
|
|
31
32
|
|
|
32
33
|
# Fetch metadata to get valid REDCap field names
|
|
33
34
|
valid_fields = {field["field_name"] for field in project.export_metadata()}
|
|
34
|
-
|
|
35
|
-
# Only keep fields that actually exist in REDCap
|
|
36
35
|
redcap_fields = [field for field in redcap_fields if field in valid_fields]
|
|
37
36
|
|
|
38
37
|
if not redcap_fields:
|
|
@@ -41,19 +40,43 @@ def fetch_redcap_entries(redcap_fields: list) -> list:
|
|
|
41
40
|
|
|
42
41
|
lgr.info(f"Fetching REDCap data for fields: {redcap_fields}")
|
|
43
42
|
|
|
44
|
-
#
|
|
45
|
-
records = project.export_records(fields=redcap_fields, format_type="
|
|
46
|
-
|
|
47
|
-
# Ensure all requested fields exist in every record (fill missing fields with None)
|
|
48
|
-
for record in records:
|
|
49
|
-
record["redcap_event_name"] = record.get("redcap_event_name", "UNKNOWN_EVENT")
|
|
50
|
-
for field in redcap_fields:
|
|
51
|
-
record.setdefault(field, None) # Fill missing fields with None
|
|
43
|
+
# Export data
|
|
44
|
+
records = project.export_records(fields=redcap_fields, format_type="df")
|
|
52
45
|
|
|
53
|
-
if
|
|
46
|
+
if records.empty:
|
|
54
47
|
lgr.warning("No records retrieved from REDCap.")
|
|
48
|
+
return []
|
|
49
|
+
|
|
50
|
+
filtered_records = []
|
|
51
|
+
|
|
52
|
+
# Group by 'record_id' (index level 0)
|
|
53
|
+
for record_id, group in records.groupby(level=0):
|
|
54
|
+
|
|
55
|
+
# Try to get baseline (non-repeated instrument) values
|
|
56
|
+
baseline_rows = group[group['redcap_repeat_instrument'].isna()]
|
|
57
|
+
baseline_row = baseline_rows.iloc[0] if not baseline_rows.empty else {}
|
|
58
|
+
|
|
59
|
+
# Filter for valid MRI rows only
|
|
60
|
+
mri_rows = group[
|
|
61
|
+
(group["redcap_repeat_instrument"] == "mri") &
|
|
62
|
+
(group.get("mri_instance").notna()) &
|
|
63
|
+
(group.get("mri_instance") != "")
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
for _, mri_row in mri_rows.iterrows():
|
|
67
|
+
record = {"record_id": record_id}
|
|
68
|
+
|
|
69
|
+
# Merge fields from baseline and mri_row, only include requested fields
|
|
70
|
+
for field in redcap_fields:
|
|
71
|
+
record[field] = (
|
|
72
|
+
mri_row.get(field)
|
|
73
|
+
if pd.notna(mri_row.get(field))
|
|
74
|
+
else baseline_row.get(field)
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
filtered_records.append(record)
|
|
55
78
|
|
|
56
|
-
return
|
|
79
|
+
return filtered_records
|
|
57
80
|
|
|
58
81
|
# TODO: Implement age binning for paricipants
|
|
59
82
|
def age_binning():
|
|
@@ -75,20 +98,7 @@ def convert_weight(weight, weight_unit):
|
|
|
75
98
|
return weight, round(weight / 0.453592, 2) # (kg, lb)
|
|
76
99
|
|
|
77
100
|
|
|
78
|
-
def mapping_redcap_event_name_to_ses_id(mri_visit_mapping, redcap_event):
|
|
79
|
-
"""Map REDCap event name to SES ID."""
|
|
80
|
-
try:
|
|
81
|
-
ses_id = mri_visit_mapping.get(redcap_event, None)
|
|
82
|
-
if ses_id is None:
|
|
83
|
-
raise ValueError(f"SES ID not found for REDCap event: {redcap_event}")
|
|
84
|
-
return ses_id
|
|
85
|
-
except Exception as e:
|
|
86
|
-
lgr.error(f"Error mapping REDCap event name to SES ID: {e}")
|
|
87
|
-
return None
|
|
88
|
-
|
|
89
|
-
|
|
90
101
|
def sync_redcap_to_db(
|
|
91
|
-
mri_visit_mapping: dict,
|
|
92
102
|
site_id: str,
|
|
93
103
|
protocol: dict,
|
|
94
104
|
redcap2wl: dict,
|
|
@@ -97,11 +107,13 @@ def sync_redcap_to_db(
|
|
|
97
107
|
|
|
98
108
|
if not redcap2wl:
|
|
99
109
|
lgr.error("No field mapping (redcap2wl) provided for syncing.")
|
|
110
|
+
return
|
|
100
111
|
|
|
101
112
|
session = Session()
|
|
102
113
|
|
|
114
|
+
#TODO: Implement the repeat visit mapping
|
|
103
115
|
# Extract the REDCap fields that need to be pulled
|
|
104
|
-
default_fields = ["study_id", "family_id", "youth_dob_y", "t1_date", "demo_sex"]
|
|
116
|
+
default_fields = ["record_id", "study_id", "redcap_repeat_instrument", "mri_instance", "mri_date", "mri_time", "family_id", "youth_dob_y", "t1_date", "demo_sex"]
|
|
105
117
|
redcap_fields = list(redcap2wl.keys())
|
|
106
118
|
|
|
107
119
|
# Ensure certain default fields are always present
|
|
@@ -116,12 +128,14 @@ def sync_redcap_to_db(
|
|
|
116
128
|
study_id = study_id.split('-')[-1] if study_id else None
|
|
117
129
|
family_id = record.get("family_id")
|
|
118
130
|
family_id = family_id.split('-')[-1] if family_id else None
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
)
|
|
131
|
+
repeat_id = record.get("redcap_repeat_instance") if record.get("redcap_repeat_instance") != "" else "1" # Default to 1 if not set
|
|
132
|
+
lgr.debug(f"Processing record for Study ID: {study_id} and Family ID: {family_id}")
|
|
133
|
+
lgr.debug(f"This is the repeat event {repeat_id}")
|
|
134
|
+
ses_id = record.get("mri_instance")
|
|
122
135
|
|
|
123
136
|
PatientName = f"cpip-id-{study_id}^fa-{family_id}"
|
|
124
|
-
PatientID = f"
|
|
137
|
+
PatientID = f"sub_{study_id}_ses_{ses_id}_fam_{family_id}_site_{site_id}"
|
|
138
|
+
PatientID_ = f"sub-{study_id}_ses-{ses_id}_fam-{family_id}_site-{site_id}"
|
|
125
139
|
|
|
126
140
|
if not PatientID:
|
|
127
141
|
lgr.warning("Skipping record due to missing Study ID.")
|
|
@@ -137,14 +151,25 @@ def sync_redcap_to_db(
|
|
|
137
151
|
.first()
|
|
138
152
|
)
|
|
139
153
|
|
|
154
|
+
existing_entry_ = (
|
|
155
|
+
session.query(WorklistItem)
|
|
156
|
+
.filter_by(patient_id=PatientID_)
|
|
157
|
+
.first()
|
|
158
|
+
)
|
|
140
159
|
if existing_entry:
|
|
141
160
|
logging.debug(f"Updating existing worklist entry for PatientID {PatientID}")
|
|
161
|
+
elif existing_entry_:
|
|
162
|
+
logging.debug(f"Updating existing worklist entry for PatientID {PatientID_}")
|
|
163
|
+
existing_entry = existing_entry_
|
|
164
|
+
|
|
165
|
+
if existing_entry:
|
|
142
166
|
existing_entry.patient_name = PatientName
|
|
143
167
|
existing_entry.patient_id = PatientID
|
|
144
168
|
existing_entry.patient_birth_date = record.get("youth_dob_y", "19000101")
|
|
145
169
|
existing_entry.patient_sex = record.get("demo_sex")
|
|
146
170
|
existing_entry.modality = record.get("modality", "MR")
|
|
147
|
-
|
|
171
|
+
existing_entry.scheduled_start_date = record.get("mri_date")
|
|
172
|
+
existing_entry.scheduled_start_time = record.get("mri_time")
|
|
148
173
|
# Dynamically update DICOM worklist fields from REDCap
|
|
149
174
|
for redcap_field, dicom_field in redcap2wl.items():
|
|
150
175
|
if redcap_field in record:
|
|
@@ -169,9 +194,10 @@ def sync_redcap_to_db(
|
|
|
169
194
|
patient_birth_date=f"{record.get('youth_dob_y', '2012')}0101",
|
|
170
195
|
patient_sex=record.get("demo_sex"),
|
|
171
196
|
modality=record.get("modality", "MR"),
|
|
172
|
-
|
|
173
|
-
|
|
197
|
+
scheduled_start_date=record.get("mri_date"),
|
|
198
|
+
scheduled_start_time=record.get("mri_time"),
|
|
174
199
|
protocol_name=protocol.get(site_id, "DEFAULT_PROTOCOL"),
|
|
200
|
+
hisris_coding_designator=protocol.get("mapping", "scannermapper"),
|
|
175
201
|
# patient_weight_kg=patient_weight_kg,
|
|
176
202
|
patient_weight_lb=record.get("patient_weight_lb", ""),
|
|
177
203
|
# referring_physician_name=record.get("referring_physician"),
|
|
@@ -188,7 +214,6 @@ def sync_redcap_to_db(
|
|
|
188
214
|
|
|
189
215
|
|
|
190
216
|
def sync_redcap_to_db_repeatedly(
|
|
191
|
-
mri_visit_mapping=None,
|
|
192
217
|
site_id=None,
|
|
193
218
|
protocol=None,
|
|
194
219
|
redcap2wl=None,
|
|
@@ -201,7 +226,6 @@ def sync_redcap_to_db_repeatedly(
|
|
|
201
226
|
while not STOP_EVENT.is_set():
|
|
202
227
|
try:
|
|
203
228
|
sync_redcap_to_db(
|
|
204
|
-
mri_visit_mapping=mri_visit_mapping,
|
|
205
229
|
site_id=site_id,
|
|
206
230
|
protocol=protocol,
|
|
207
231
|
redcap2wl=redcap2wl,
|
|
@@ -220,7 +244,6 @@ if __name__ == "__main__":
|
|
|
220
244
|
# from a signal handler or from another part of your code.
|
|
221
245
|
try:
|
|
222
246
|
sync_redcap_to_db_repeatedly(
|
|
223
|
-
mri_visit_mapping=None,
|
|
224
247
|
site_id=None,
|
|
225
248
|
protocol=None,
|
|
226
249
|
redcap2wl=None,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|