fhir-sheets 1.2.1__tar.gz → 2.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fhir-sheets might be problematic. Click here for more details.

@@ -1,12 +1,14 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: fhir-sheets
3
- Version: 1.2.1
3
+ Version: 2.0.0
4
4
  Summary: FhirSheets is a command-line tool that reads an Excel file in FHIR cohort format and generates FHIR bundle JSON files from it. Each row in the template Excel file is used to create an individual JSON file, outputting them to a specified folder.
5
+ License-File: LICENSE
5
6
  Author: Michael Riley
6
7
  Author-email: Michael.Riley@gtri.gatech.edu
7
8
  Requires-Python: >=3.13
8
9
  Classifier: Programming Language :: Python :: 3
9
10
  Classifier: Programming Language :: Python :: 3.13
11
+ Classifier: Programming Language :: Python :: 3.14
10
12
  Requires-Dist: et-xmlfile (==1.1.0)
11
13
  Requires-Dist: jsonpath-ng (==1.6.1)
12
14
  Requires-Dist: openpyxl (==3.1.5)
@@ -1,10 +1,11 @@
1
1
  [tool.poetry]
2
2
  name = "fhir-sheets"
3
- version = "1.2.1"
3
+ version = "2.0.0"
4
4
  description = "FhirSheets is a command-line tool that reads an Excel file in FHIR cohort format and generates FHIR bundle JSON files from it. Each row in the template Excel file is used to create an individual JSON file, outputting them to a specified folder."
5
5
  authors = ["Michael Riley <Michael.Riley@gtri.gatech.edu>"]
6
6
  packages = [{include = "fhir_sheets", from = "src"}]
7
7
  readme = "README.md"
8
+ long-description-content-type = "text/markdown"
8
9
 
9
10
  [tool.poetry.dependencies]
10
11
  python = ">=3.13" # Specify the compatible Python version here
@@ -16,4 +17,7 @@ ply = "3.11"
16
17
 
17
18
  [build-system]
18
19
  requires = ["poetry-core"]
19
- build-backend = "poetry.core.masonry.api"
20
+ build-backend = "poetry.core.masonry.api"
21
+
22
+ [pytest]
23
+ testpaths = ["tests"]
@@ -31,7 +31,7 @@ def main(input_file, output_folder):
31
31
  resource_definition_entities, resource_link_entities, cohort_data = read_input.read_xlsx_and_process(input_file)
32
32
  pprint(cohort_data)
33
33
  #For each index of patients
34
- for i in range(0,cohort_data.num_entries):
34
+ for i in range(0,cohort_data.get_num_patients()):
35
35
  # Construct the file path for each JSON file
36
36
  file_path = output_folder_path / f"{i}.json"
37
37
  #Create a bundle
@@ -3,7 +3,7 @@ import uuid
3
3
  from jsonpath_ng.jsonpath import Fields, Slice, Where
4
4
  from jsonpath_ng.ext import parse as parse_ext
5
5
 
6
- from .model.cohort_data_entity import CohortData, FieldEntry
6
+ from .model.cohort_data_entity import CohortData, CohortData
7
7
  from .model.resource_definition_entity import ResourceDefinition
8
8
  from .model.resource_link_entity import ResourceLink
9
9
  from . import fhir_formatting
@@ -32,6 +32,13 @@ def initialize_bundle():
32
32
  root_bundle = {}
33
33
  root_bundle['resourceType'] = 'Bundle'
34
34
  root_bundle['id'] = str(uuid.uuid4())
35
+ root_bundle['meta'] = {
36
+ 'security': [{
37
+ 'system': 'http://terminology.hl7.org/CodeSystem/v3-ActReason',
38
+ 'code': 'HTEST',
39
+ 'display': 'test health data'
40
+ }]
41
+ }
35
42
  root_bundle['type'] = 'transaction'
36
43
  root_bundle['entry'] = []
37
44
  return root_bundle
@@ -43,24 +50,39 @@ def initialize_resource(resource_definition):
43
50
  initial_resource['id'] = str(uuid.uuid4()).strip()
44
51
  if resource_definition.profiles:
45
52
  initial_resource['meta'] = {
46
- 'profile': resource_definition.profiles
53
+ 'profile': resource_definition.profiles,
54
+ 'security': [{
55
+ 'system': 'http://terminology.hl7.org/CodeSystem/v3-ActReason',
56
+ 'code': 'HTEST',
57
+ 'display': 'test health data'
58
+ }]
47
59
  }
48
60
  return initial_resource
49
61
 
50
62
  # Creates a fhir-json structure from a resource definition entity and the patient_data_sheet
51
63
  def create_fhir_resource(resource_definition: ResourceDefinition, cohort_data: CohortData, index = 0):
52
64
  resource_dict = initialize_resource(resource_definition)
53
- #Get field entries for this entitiy
54
- try:
55
- all_field_entries = cohort_data.entities[resource_definition.entity_name].fields
56
- except KeyError:
65
+ #Get field entries for this entity
66
+ header_entries_for_resourcename = [
67
+ headerEntry
68
+ for headerEntry in cohort_data.headers
69
+ if headerEntry.entityName == resource_definition.entity_name
70
+ ]
71
+ dataelements_for_resourcename = {
72
+ key: value
73
+ for key, value in cohort_data.patients[index].entries.items()
74
+ if value['entity_name'] == resource_definition.entity_name
75
+ }
76
+ if len(dataelements_for_resourcename.keys()) == 0:
57
77
  print(f"WARNING: Patient index {index} - Create Fhir Resource Error - {resource_definition.entity_name} - No columns for entity '{resource_definition.entity_name}' found for resource in 'PatientData' sheet")
58
78
  return resource_dict
79
+ all_field_entries = cohort_data.entities[resource_definition.entity_name].fields
59
80
  #For each field within the entity
60
- for field_entry_key, field_entry in all_field_entries.items():
61
- #Create a jsonpath from each provided json path and value for this resource
62
- if field_entry.values and len(field_entry.values) > index:
63
- create_structure_from_jsonpath(resource_dict, field_entry.jsonpath, resource_definition, field_entry, field_entry.value_type, field_entry.values[index])
81
+ for fieldname, dataelement in dataelements_for_resourcename.items():
82
+ header_element = next((header for header in header_entries_for_resourcename if header.fieldName == fieldname), None)
83
+ if header_element is None:
84
+ print(f"WARNING: Field Name {fieldname} - No Header Entry found.")
85
+ create_structure_from_jsonpath(resource_dict, header_element.jsonpath, resource_definition, header_element.value_type, dataelement['value'])
64
86
  return resource_dict
65
87
 
66
88
  #Create a resource_link for default references in the cases where only 1 resourceType of the source and destination exist
@@ -192,7 +214,7 @@ def add_resource_to_transaction_bundle(root_bundle, fhir_resource):
192
214
  # resource_definition: resource description model from import
193
215
  # entity_definition: specific field entry information for this function
194
216
  # value: Actual value to assign
195
- def create_structure_from_jsonpath(root_struct: Dict, json_path: str, resource_definition: ResourceDefinition, field_entry: FieldEntry, dataType: str, value: Any):
217
+ def create_structure_from_jsonpath(root_struct: Dict, json_path: str, resource_definition: ResourceDefinition, dataType: str, value: Any):
196
218
  #Get all dot notation components as seperate
197
219
  if dataType is not None and dataType.strip().lower() == 'string':
198
220
  value = str(value)
@@ -202,10 +224,10 @@ def create_structure_from_jsonpath(root_struct: Dict, json_path: str, resource_d
202
224
  return root_struct
203
225
  #Start of top-level function which calls the enclosed recursive function
204
226
  parts = json_path.split('.')
205
- return build_structure(root_struct, json_path, resource_definition, field_entry, parts, value, [])
227
+ return build_structure(root_struct, json_path, resource_definition, dataType, parts, value, [])
206
228
 
207
229
  # main recursive function to drill into the json structure, assign paths, and create structure where needed
208
- def build_structure(current_struct, json_path, resource_definition, entity_definition, parts, value, previous_parts):
230
+ def build_structure(current_struct: Dict, json_path: str, resource_definition: ResourceDefinition, dataType: str, parts: List[str], value: Any, previous_parts: List[str]):
209
231
  if len(parts) == 0:
210
232
  return current_struct
211
233
  #Grab current part
@@ -213,11 +235,11 @@ def build_structure(current_struct, json_path, resource_definition, entity_defin
213
235
  #SPECIAL HANDLING CLAUSE
214
236
  matching_handler = next((handler for handler in special_values.custom_handlers if (json_path.startswith(handler) or json_path == handler)), None)
215
237
  if matching_handler is not None:
216
- return special_values.custom_handlers[matching_handler].assign_value(json_path, resource_definition, entity_definition, current_struct, parts[-1], value)
238
+ return special_values.custom_handlers[matching_handler].assign_value(json_path, resource_definition, current_struct, parts[-1], value)
217
239
  #Ignore dollar sign ($) and drill farther down
218
240
  if part == '$' or part == resource_definition.resource_type.strip():
219
241
  #Ignore the dollar sign and the resourcetype
220
- return build_structure_recurse(current_struct, json_path, resource_definition, entity_definition, parts, value, previous_parts, part)
242
+ return build_structure_recurse(current_struct, json_path, resource_definition, dataType, parts, value, previous_parts, part)
221
243
 
222
244
  # If parts length is one then this is the final key to access and pair
223
245
  if len(parts) == 1:
@@ -240,7 +262,7 @@ def build_structure(current_struct, json_path, resource_definition, entity_defin
240
262
  if part + 1 > len(current_struct):
241
263
  current_struct.extend({} for x in range (part + 1 - len(current_struct)))
242
264
  #Actual assigning to the path
243
- fhir_formatting.assign_value(current_struct, part, value, entity_definition.value_type)
265
+ fhir_formatting.assign_value(current_struct, part, value, dataType)
244
266
  return current_struct
245
267
 
246
268
  # If there is a simple qualifier with '['and ']'
@@ -262,7 +284,7 @@ def build_structure(current_struct, json_path, resource_definition, entity_defin
262
284
  if qualifier_as_number + 1 > len(current_struct):
263
285
  current_struct.extend({} for x in range (qualifier_as_number + 1 - len(current_struct)))
264
286
  inner_struct = current_struct[qualifier_as_number]
265
- inner_struct = build_structure_recurse(inner_struct, json_path, resource_definition, entity_definition, parts, value, previous_parts, part)
287
+ inner_struct = build_structure_recurse(inner_struct, json_path, resource_definition, dataType, parts, value, previous_parts, part)
266
288
  current_struct[qualifier_as_number] = inner_struct
267
289
  return current_struct
268
290
  # Create the key part in the structure
@@ -284,7 +306,7 @@ def build_structure(current_struct, json_path, resource_definition, entity_defin
284
306
  inner_struct = {qualifier_key: qualifier_value}
285
307
  current_struct[key_part].append(inner_struct)
286
308
  #Recurse into that innerstructure where the qualifier matched to continue the part traversal
287
- inner_struct = build_structure_recurse(inner_struct, json_path, resource_definition, entity_definition, parts, value, previous_parts, part)
309
+ inner_struct = build_structure_recurse(inner_struct, json_path, resource_definition, dataType, parts, value, previous_parts, part)
288
310
  return current_struct
289
311
  #If there's no qualifier condition, but an index aka '[0]', '[1]' etc, then it's a simple accessor
290
312
  elif qualifier.isdigit():
@@ -294,19 +316,19 @@ def build_structure(current_struct, json_path, resource_definition, entity_defin
294
316
  if qualifier_as_number > len(current_struct):
295
317
  current_struct[key_part].extend({} for x in range (qualifier_as_number - len(current_struct)))
296
318
  inner_struct = current_struct[key_part][qualifier_as_number]
297
- inner_struct = build_structure_recurse(inner_struct, json_path, resource_definition, entity_definition, parts, value, previous_parts, part)
319
+ inner_struct = build_structure_recurse(inner_struct, json_path, resource_definition, dataType, parts, value, previous_parts, part)
298
320
  current_struct[key_part][qualifier_as_number] = inner_struct
299
321
  return current_struct
300
322
  #None qualifier accessor
301
323
  else:
302
324
  if(part not in current_struct):
303
325
  current_struct[part] = {}
304
- inner_struct = build_structure_recurse(current_struct[part], json_path, resource_definition, entity_definition, parts, value, previous_parts, part)
326
+ inner_struct = build_structure_recurse(current_struct[part], json_path, resource_definition, dataType, parts, value, previous_parts, part)
305
327
  current_struct[part] = inner_struct
306
328
  return current_struct
307
329
 
308
330
  #Helper function to quickly recurse and return the next level of structure. Used by main recursive function
309
- def build_structure_recurse(current_struct, json_path, resource_definition, entity_definition, parts, value, previous_parts, part):
331
+ def build_structure_recurse(current_struct, json_path, resource_definition, dataType, parts, value, previous_parts, part):
310
332
  previous_parts.append(part)
311
- return_struct = build_structure(current_struct, json_path, resource_definition, entity_definition, parts[1:], value, previous_parts)
333
+ return_struct = build_structure(current_struct, json_path, resource_definition, dataType, parts[1:], value, previous_parts)
312
334
  return return_struct
@@ -196,7 +196,7 @@ def parse_flexible_address(address):
196
196
  country_pattern = r'(?:\s*(?P<country>[\w\s]+|))?$'
197
197
 
198
198
  # Compile the full pattern to match the postal code, state, and country
199
- full_pattern = rf'^(?P<line>.*?)\^(?P<city>.*?)\^(?P<district>.*?)\^{state_pattern}\^{postal_code_pattern}\^{country_pattern}'
199
+ full_pattern = rf'^(?P<line>.*?)\^(?P<city>.*?)\^(?P<district>.*?)\^{postal_code_pattern}\^{state_pattern}\^{country_pattern}'
200
200
 
201
201
  match = re.search(full_pattern, address)
202
202
 
@@ -0,0 +1,33 @@
1
+ import pprint
2
+ from typing import Dict, Any, List, Optional, Tuple
3
+
4
+ class HeaderEntry:
5
+ def __init__(self, data: Dict[str, Any]):
6
+ self.entityName: Optional[str] = data.get('entityName')
7
+ self.fieldName: Optional[str] = data.get('fieldName')
8
+ self.jsonpath: Optional[str] = data.get('jsonpath')
9
+ self.value_type: Optional[str] = data.get('valueType')
10
+ self.valuesets: Optional[str] = data.get('valuesets')
11
+
12
+ def __repr__(self) -> str:
13
+ return (f"\nHeaderEntry(entityName='{self.entityName}', \n\tfieldName='{self.fieldName}', \n\tjsonpath='{self.jsonpath}',\n\tvalue_type='{self.value_type}', "
14
+ f"\n\tvaluesets='{self.valuesets}')")
15
+
16
+ class PatientEntry:
17
+
18
+ def __init__(self, entries:Dict[str,str]):
19
+ self.entries = entries
20
+
21
+ def __repr__(self) -> str:
22
+ return (f"PatientEntry(\n\t'{self.entries}')")
23
+
24
+ class CohortData:
25
+ def __init__(self, headers: List[Dict[str,Any]], patients: List[Dict[str,str]]):
26
+ self.headers = [HeaderEntry(header_data) for header_data in headers]
27
+ self.patients = [PatientEntry(patient_data) for patient_data in patients]
28
+
29
+ def __repr__(self) -> str:
30
+ return (f"CohortData(\n\t-----\n\theaders='{self.headers}',\n\t-----\n\tpatients='{self.patients}')")
31
+
32
+ def get_num_patients(self):
33
+ return len(self.patients)
@@ -1,6 +1,6 @@
1
1
  import openpyxl
2
2
 
3
- from .model.cohort_data_entity import CohortData, EntityData, FieldEntry
3
+ from .model.cohort_data_entity import CohortData, CohortData
4
4
 
5
5
  from .model.resource_definition_entity import ResourceDefinition
6
6
  from .model.resource_link_entity import ResourceLink
@@ -21,7 +21,7 @@ def read_xlsx_and_process(file_path):
21
21
 
22
22
  if 'PatientData' in workbook.sheetnames:
23
23
  sheet = workbook['PatientData']
24
- cohort_data = process_sheet_patient_data(sheet, resource_definition_entities)
24
+ cohort_data = process_sheet_patient_data_revised(sheet, resource_definition_entities)
25
25
 
26
26
  return resource_definition_entities, resource_link_entities, cohort_data
27
27
 
@@ -104,4 +104,44 @@ def process_sheet_patient_data(sheet, resource_definition_entities):
104
104
  # Append the actual data values to the 'values' array
105
105
  cohort_data.entities[entity_name].fields[field_name].values.append(value)
106
106
  cohort_data.num_entries = num_entries
107
+ return cohort_data
108
+
109
+ # Function to process the "PatientData" sheet for the Revised CohortData
110
+ def process_sheet_patient_data_revised(sheet, resource_definition_entities):
111
+ headers = []
112
+ patients = []
113
+ # Initialize the dictionary to store the processed data
114
+ # Process the Header Entries from the first 6 rows (Entity To Query, JsonPath, etc.) and the data from the rest.
115
+ for col in sheet.iter_cols(min_row=1, min_col=3, values_only=True): # Start from 3rd column
116
+ if all(entry is None for entry in col):
117
+ continue
118
+ entity_name = col[0] # The entity name comes from the first row (Entity To Query)
119
+ field_name = col[5] #The "Data Element" comes from the fifth row
120
+ if (entity_name is None or entity_name == "") and (field_name is not None and field_name != ""):
121
+ print(f"WARNING: - Reading Patient Data Issue - {field_name} - 'Entity To Query' cell missing for column labelled '{field_name}', please provide entity name from the ResourceDefinitions tab.")
122
+
123
+ if entity_name not in [entry.entity_name for entry in resource_definition_entities]:
124
+ print(f"WARNING: - Reading Patient Data Issue - {field_name} - 'Entity To Query' cell has entity named '{entity_name}', however, the ResourceDefinition tab has no matching resource. Please provide a corresponding entry in the ResourceDefinition tab.")
125
+
126
+ # Create a header entry
127
+ header_data = {
128
+ "fieldName": field_name,
129
+ "entityName": entity_name,
130
+ "jsonpath": col[1], # JsonPath from the second row
131
+ "valueType": col[2], # Value Type from the third row
132
+ "valuesets": col[3] # Value Set from the fourth row
133
+ }
134
+ headers.append(header_data)
135
+ # Create a data entry
136
+ values = col[6:] # The values come from the 6th row and below
137
+ values = tuple(item for item in values if item is not None)
138
+ #Expand the patient dictionary set if needed
139
+ if len(values) > len(patients):
140
+ needed_count = len(values) - len(patients)
141
+ patients.extend([{}] * needed_count)
142
+ for patient_dict, value in zip(patients, values):
143
+ patient_dict[field_name] = {}
144
+ patient_dict[field_name]["entity_name"] = entity_name
145
+ patient_dict[field_name]["value"] = value
146
+ cohort_data = CohortData(headers=headers, patients=patients)
107
147
  return cohort_data
@@ -7,7 +7,7 @@ from abc import ABC, abstractmethod
7
7
  class AbstractCustomValueHandler(ABC):
8
8
 
9
9
  @abstractmethod
10
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
10
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
11
11
  pass
12
12
 
13
13
  class PatientRaceExtensionValueHandler(AbstractCustomValueHandler):
@@ -67,7 +67,7 @@ class PatientRaceExtensionValueHandler(AbstractCustomValueHandler):
67
67
  "url" : "http://hl7.org/fhir/us/core/StructureDefinition/us-core-race"
68
68
  }
69
69
  #Create an ombcategory and detailed section of race extension
70
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
70
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
71
71
  #Retrieve the race extension if it exists; make it if it does not.
72
72
  if 'extension' not in final_struct:
73
73
  final_struct['extension'] = []
@@ -128,7 +128,7 @@ class PatientEthnicityExtensionValueHandler(AbstractCustomValueHandler):
128
128
  "url" : "http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity"
129
129
  }
130
130
  #Create an ombcategory and detailed section of ethnicitiy extension
131
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
131
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
132
132
  #Retrieve the ethncitiy extension if it exists; make it if it does not.
133
133
  if 'extension' not in final_struct:
134
134
  final_struct['extension'] = []
@@ -154,7 +154,7 @@ class PatientBirthSexExtensionValueHandler(AbstractCustomValueHandler):
154
154
  "valueCode" : "$value"
155
155
  }
156
156
  #Assigna birthsex extension
157
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
157
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
158
158
  #Retrieve the birthsex extension if it exists; make it if it does not.
159
159
  if 'extension' not in final_struct:
160
160
  final_struct['extension'] = []
@@ -182,7 +182,7 @@ class PatientMRNIdentifierValueHandler(AbstractCustomValueHandler):
182
182
  "value" : "$value"
183
183
  }
184
184
  #Assign a MRN identifier
185
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
185
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
186
186
  #Retrieve the MRN identifier if it exists; make it if it does not.
187
187
  target_identifier = self.patient_mrn_block
188
188
  new_identifier = True
@@ -215,7 +215,7 @@ class PatientSSNIdentifierValueHandler(AbstractCustomValueHandler):
215
215
  "value" : "$value"
216
216
  }
217
217
  #Assign a MRN identifier
218
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
218
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
219
219
  #Retrieve the MRN identifier if it exists; make it if it does not.
220
220
  target_identifier = self.patient_mrn_block
221
221
  new_identifier = True
@@ -238,7 +238,7 @@ class OrganizationIdentiferNPIValueHandler(AbstractCustomValueHandler):
238
238
  "value" : "$value"
239
239
  }
240
240
  #Assigna birthsex extension
241
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
241
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
242
242
  #Retrieve the birthsex extension if it exists; make it if it does not.
243
243
  if 'identifier' not in final_struct:
244
244
  final_struct['identifier'] = []
@@ -255,7 +255,7 @@ class OrganizationIdentiferCLIAValueHandler(AbstractCustomValueHandler):
255
255
  "value" : "$value"
256
256
  }
257
257
  #Assign a birthsex extension
258
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
258
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
259
259
  #Retrieve the birthsex extension if it exists; make it if it does not.
260
260
  if 'identifier' not in final_struct:
261
261
  final_struct['identifier'] = []
@@ -272,7 +272,7 @@ class PractitionerIdentiferNPIValueHandler(AbstractCustomValueHandler):
272
272
  "value" : "$value"
273
273
  }
274
274
  #Assigna birthsex extension
275
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
275
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
276
276
  #Retrieve the birthsex extension if it exists; make it if it does not.
277
277
  if 'identifier' not in final_struct:
278
278
  final_struct['identifier'] = []
@@ -309,7 +309,7 @@ class ObservationComponentHandler(AbstractCustomValueHandler):
309
309
  }
310
310
  }
311
311
  #Find the appropriate component for the observaiton; then call build_structure again to continue the drill down
312
- def assign_value(self, json_path, resource_definition, entity_definition, final_struct, key, value):
312
+ def assign_value(self, json_path, resource_definition, final_struct, key, value):
313
313
  #Check to make sure the component part exists
314
314
  if 'component' not in final_struct:
315
315
  final_struct['component'] = []
@@ -330,7 +330,7 @@ class ObservationComponentHandler(AbstractCustomValueHandler):
330
330
  if target_component is self.pulse_oximetry_oxygen_concentration:
331
331
  components.append(target_component)
332
332
  #Recurse back down into
333
- return conversion.build_structure(target_component, '.'.join(parts[2:]), resource_definition, entity_definition, parts[2:], value, parts[:2])
333
+ return conversion.build_structure(target_component, '.'.join(parts[2:]), resource_definition, parts[2:], value, parts[:2])
334
334
  pass
335
335
 
336
336
  def utilFindExtensionWithURL(extension_block, url):
@@ -1,97 +0,0 @@
1
- import pprint
2
- from typing import Dict, Any, List, Optional
3
-
4
- class FieldEntry:
5
- def __init__(self, data: Dict[str, Any]):
6
- self.jsonpath: Optional[str] = data.get('jsonpath')
7
- self.value_type: Optional[str] = data.get('valueType')
8
- self.valuesets: Optional[str] = data.get('valuesets')
9
- self.values: Optional[List[str]] = data.get('values')
10
-
11
- def __repr__(self) -> str:
12
- return (f"FieldEntry(\n\tjsonpath='{self.jsonpath}',\n\tvalue_type='{self.value_type}', "
13
- f"\n\tvaluesets='{self.valuesets}', \n\tvalues={self.values})")
14
-
15
- class EntityData:
16
- def __init__(self, data: Dict[str, Dict[str, Any]]):
17
- """
18
- Initializes the EntityData object. Accepts either a dictionary of raw data
19
- or a list of FieldEntry objects.
20
-
21
- Args:
22
- data (Dict[str, Dict[str, Any]]): A dictionary mapping names to raw field data.
23
- entries (List[FieldEntry]): A list of pre-created FieldEntry objects.
24
- """
25
- self.fields: Dict[str, FieldEntry] = {}
26
- for name, field_data in data.items():
27
- self.fields[name] = FieldEntry(field_data)
28
-
29
- def __repr__(self) -> str:
30
- return f"EntityData(fields=\n{pprint.pformat(self.fields, indent=4)})"
31
-
32
- def insert(self, name: str, entry: FieldEntry):
33
- """
34
- Inserts a new FieldEntry into the collection.
35
-
36
- Args:
37
- name (str): The referential name for the field.
38
- entry (FieldEntry): The FieldEntry object to insert.
39
- """
40
- self.fields[name] = entry
41
-
42
- def remove(self, name: str) -> bool:
43
- """
44
- Removes a FieldEntry by its referential name.
45
-
46
- Args:
47
- name (str): The referential name of the field to remove.
48
-
49
- Returns:
50
- bool: True if the field was removed, False otherwise.
51
- """
52
- if name in self.fields:
53
- del self.fields[name]
54
- return True
55
- return False
56
-
57
- class CohortData:
58
- def __init__(self, data: Dict[str, EntityData] = None):
59
- """
60
- Initializes the CohortData object.
61
-
62
- Args:
63
- data (Dict[str, EntityData]): A dictionary where keys are entity names
64
- and values are EntityData objects.
65
- """
66
- self.entities: Dict[str, EntityData] = {}
67
- self.num_entries = 0
68
- if data:
69
- self.entities.update(data)
70
-
71
- def __repr__(self) -> str:
72
- return f"CohortData(entities={self.entities})"
73
-
74
- def insert_entity(self, name: str, entity_data: EntityData):
75
- """
76
- Inserts a new EntityData object into the cohort.
77
-
78
- Args:
79
- name (str): The name of the entity (e.g., 'PrimaryPatient').
80
- entity_data (EntityData): The EntityData object to insert.
81
- """
82
- self.entities[name] = entity_data
83
-
84
- def remove_entity(self, name: str) -> bool:
85
- """
86
- Removes an EntityData object by its name.
87
-
88
- Args:
89
- name (str): The name of the entity to remove.
90
-
91
- Returns:
92
- bool: True if the entity was removed, False otherwise.
93
- """
94
- if name in self.entities:
95
- del self.entities[name]
96
- return True
97
- return False
File without changes
File without changes