helix.fhir.client.sdk 4.2.21__py3-none-any.whl → 4.2.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -124,15 +124,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
124
124
  Yields:
125
125
  FhirGetResponse objects representing retrieved resources
126
126
  """
127
-
128
- profiling: dict[str, Any] = {
129
- "function": "process_simulate_graph_async",
130
- "start_time": time.perf_counter(),
131
- "steps": {},
132
- "extend_calls": [],
133
- "append_calls": [],
134
- }
135
-
136
127
  # Validate graph definition input
137
128
  assert graph_json, "Graph JSON must be provided"
138
129
  graph_definition: GraphDefinition = GraphDefinition.from_dict(graph_json)
@@ -168,7 +159,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
168
159
  cache: RequestCache = input_cache if input_cache is not None else RequestCache()
169
160
  async with cache:
170
161
  # Retrieve start resources based on graph definition
171
- step_start = time.perf_counter()
172
162
  start: str = graph_definition.start
173
163
  parent_response: FhirGetResponse
174
164
  cache_hits: int
@@ -182,18 +172,10 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
182
172
  add_cached_bundles_to_result=add_cached_bundles_to_result,
183
173
  compare_hash=compare_hash,
184
174
  )
185
- profiling["steps"]["get_start_resources"] = time.perf_counter() - step_start
186
175
 
187
176
  # If no parent resources found, yield empty response and exit
188
177
  parent_response_resource_count = parent_response.get_resource_count()
189
178
  if parent_response_resource_count == 0:
190
- profiling["total_time"] = time.perf_counter() - profiling["start_time"]
191
- if logger:
192
- logger.info(
193
- f"[PROFILING] process_simulate_graph_async: total={profiling['total_time']:.3f}s, "
194
- f"get_start_resources={profiling['steps'].get('get_start_resources', 0):.3f}s, "
195
- f"no parent resources found"
196
- )
197
179
  yield parent_response
198
180
  return # no resources to process
199
181
 
@@ -207,7 +189,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
207
189
 
208
190
  # Prepare parent bundle entries for further processing
209
191
  parent_bundle_entries: FhirBundleEntryList = parent_response.get_bundle_entries()
210
-
211
192
  if logger:
212
193
  logger.info(
213
194
  f"FhirClient.simulate_graph_async() got parent resources: {parent_response_resource_count} "
@@ -215,7 +196,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
215
196
  )
216
197
 
217
198
  # now process the graph links
218
- step_start = time.perf_counter()
219
199
  child_responses: list[FhirGetResponse] = []
220
200
  parent_link_map: list[tuple[list[GraphDefinitionLink], FhirBundleEntryList]] = []
221
201
 
@@ -224,7 +204,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
224
204
  parent_link_map.append((graph_definition.link, parent_bundle_entries))
225
205
 
226
206
  # Process graph links in parallel
227
- link_processing_count = 0
228
207
  while len(parent_link_map):
229
208
  new_parent_link_map: list[tuple[list[GraphDefinitionLink], FhirBundleEntryList]] = []
230
209
 
@@ -251,38 +230,22 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
251
230
  add_cached_bundles_to_result=add_cached_bundles_to_result,
252
231
  ifModifiedSince=ifModifiedSince,
253
232
  ):
254
- # Track extend operation
255
- extend_start = time.perf_counter()
256
233
  child_responses.extend(link_responses)
257
- extend_time = time.perf_counter() - extend_start
258
- profiling["extend_calls"].append(
259
- {"location": "child_responses.extend", "count": len(link_responses), "time": extend_time}
260
- )
261
- link_processing_count += 1
262
234
 
263
235
  # Update parent link map for next iteration
264
236
  parent_link_map = new_parent_link_map
265
237
 
266
- profiling["steps"]["process_graph_links"] = time.perf_counter() - step_start
267
- profiling["steps"]["link_processing_iterations"] = link_processing_count
268
-
238
+ start_time = time.time()
269
239
  # Combine and process responses
270
- step_start = time.perf_counter()
271
240
  parent_response = cast(FhirGetBundleResponse, parent_response.extend(child_responses))
272
- extend_time = time.perf_counter() - step_start
273
- profiling["steps"]["parent_response.extend"] = extend_time
274
- profiling["extend_calls"].append(
275
- {"location": "parent_response.extend", "count": len(child_responses), "time": extend_time}
276
- )
241
+ if logger:
242
+ logger.info(f"Parent_response.extend time: {time.time() - start_time}")
277
243
 
278
244
  # Optional resource sorting
279
245
  if sort_resources:
280
- step_start = time.perf_counter()
281
246
  parent_response = parent_response.sort_resources()
282
- profiling["steps"]["sort_resources"] = time.perf_counter() - step_start
283
247
 
284
248
  # Prepare final response based on bundling preferences
285
- step_start = time.perf_counter()
286
249
  full_response: FhirGetResponse
287
250
  if separate_bundle_resources:
288
251
  full_response = FhirGetListByResourceTypeResponse.from_response(other_response=parent_response)
@@ -290,38 +253,10 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
290
253
  full_response = FhirGetListResponse.from_response(other_response=parent_response)
291
254
  else:
292
255
  full_response = parent_response
293
- profiling["steps"]["prepare_final_response"] = time.perf_counter() - step_start
294
256
 
295
257
  # Set response URL
296
258
  full_response.url = url or parent_response.url
297
259
 
298
- # Calculate profiling summary
299
- profiling["total_time"] = time.perf_counter() - profiling["start_time"]
300
- total_extend_time = sum(call["time"] for call in profiling["extend_calls"])
301
- total_extend_count = sum(call["count"] for call in profiling["extend_calls"])
302
-
303
- # Log profiling information
304
- if logger:
305
- logger.info(
306
- f"[PROFILING] process_simulate_graph_async for id={id_}: "
307
- f"total={profiling['total_time']:.3f}s, "
308
- f"get_start_resources={profiling['steps'].get('get_start_resources', 0):.3f}s, "
309
- f"process_graph_links={profiling['steps'].get('process_graph_links', 0):.3f}s, "
310
- f"parent_response.extend={profiling['steps'].get('parent_response.extend', 0):.3f}s, "
311
- f"sort_resources={profiling['steps'].get('sort_resources', 0):.3f}s, "
312
- f"prepare_final_response={profiling['steps'].get('prepare_final_response', 0):.3f}s"
313
- )
314
- logger.info(
315
- f"[PROFILING] process_simulate_graph_async extend operations: "
316
- f"total_calls={len(profiling['extend_calls'])}, "
317
- f"total_items={total_extend_count}, "
318
- f"total_time={total_extend_time:.3f}s"
319
- )
320
- for call in profiling["extend_calls"]:
321
- logger.info(
322
- f"[PROFILING] extend at {call['location']}: items={call['count']}, time={call['time']:.3f}s"
323
- )
324
-
325
260
  # Log cache performance
326
261
  if logger:
327
262
  logger.info(
@@ -344,9 +279,26 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
344
279
  ) -> list[FhirGetResponse]:
345
280
  """
346
281
  Parallel processing function for graph definition links.
347
- """
348
- profiling_start = time.perf_counter()
349
282
 
283
+ This method is designed to be used with AsyncParallelProcessor to process
284
+ graph links concurrently, improving performance for complex FHIR resource
285
+ graph traversals.
286
+
287
+ Key Responsibilities:
288
+ - Process individual graph links in parallel
289
+ - Track and log processing details
290
+ - Handle resource retrieval for each link
291
+ - Manage parallel processing context
292
+
293
+ Args:
294
+ context: Parallel processing context information
295
+ row: Current GraphDefinitionLink being processed
296
+ parameters: Parameters for link processing
297
+ additional_parameters: Extra parameters for extended processing
298
+
299
+ Returns:
300
+ List of FhirGetResponse objects retrieved during link processing
301
+ """
350
302
  # Record the start time for performance tracking
351
303
  start_time: datetime = datetime.now()
352
304
 
@@ -377,8 +329,11 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
377
329
  logger=parameters.logger,
378
330
  cache=parameters.cache,
379
331
  scope_parser=parameters.scope_parser,
332
+ # Handle parent link map from additional parameters
380
333
  parent_link_map=(additional_parameters["parent_link_map"] if additional_parameters else []),
334
+ # Determine request size, default to 1 if not specified
381
335
  request_size=(additional_parameters["request_size"] if additional_parameters else 1),
336
+ # Track unsupported resources for ID-based search
382
337
  id_search_unsupported_resources=(
383
338
  additional_parameters["id_search_unsupported_resources"] if additional_parameters else []
384
339
  ),
@@ -394,8 +349,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
394
349
  # Record end time for performance tracking
395
350
  end_time: datetime = datetime.now()
396
351
 
397
- total_time = time.perf_counter() - profiling_start
398
-
399
352
  # Log detailed processing information
400
353
  if parameters.logger:
401
354
  parameters.logger.debug(
@@ -407,11 +360,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
407
360
  + f" | duration: {end_time - start_time}"
408
361
  + f" | resource_count: {len(result)}"
409
362
  )
410
- parameters.logger.info(
411
- f"[PROFILING] process_link_async_parallel_function for path={row.path}: "
412
- f"total={total_time:.3f}s, "
413
- f"results={len(result)}"
414
- )
415
363
 
416
364
  # Return the list of retrieved responses
417
365
  return result
@@ -897,18 +845,10 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
897
845
  logger: Logger | None,
898
846
  compare_hash: bool = True,
899
847
  ) -> FhirGetResponse | None:
900
- profiling_start = time.perf_counter()
901
- http_request_time = 0.0
902
- http_request_count = 0
903
- cache_check_time = 0.0
904
- cache_update_time = 0.0
905
- append_time = 0.0
906
-
907
848
  result: FhirGetResponse | None = None
908
849
  non_cached_id_list: list[str] = []
909
850
 
910
851
  # first check to see if we can find these in the cache
911
- cache_check_start = time.perf_counter()
912
852
  if ids:
913
853
  for resource_id in ids:
914
854
  cache_entry: RequestCacheEntry | None = await cache.get_async(
@@ -921,12 +861,9 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
921
861
  if logger:
922
862
  logger.info(f"Cache entry not found for {resource_type}/{resource_id} (1by1)")
923
863
  non_cached_id_list.append(resource_id)
924
- cache_check_time = time.perf_counter() - cache_check_start
925
864
 
926
- cache_update_start = time.perf_counter()
927
865
  for single_id in non_cached_id_list:
928
866
  result2: FhirGetResponse
929
- http_start = time.perf_counter()
930
867
  async for result2 in self._get_with_session_async(
931
868
  page_number=None,
932
869
  ids=[single_id],
@@ -935,15 +872,10 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
935
872
  fn_handle_streaming_chunk=None,
936
873
  resource_type=resource_type,
937
874
  ):
938
- http_request_time += time.perf_counter() - http_start
939
- http_request_count += 1
940
-
941
875
  if result2.resource_type == "OperationOutcome":
942
876
  result2 = FhirGetErrorResponse.from_response(other_response=result2)
943
877
  if result:
944
- append_start = time.perf_counter()
945
878
  result = result.append(result2)
946
- append_time += time.perf_counter() - append_start
947
879
  else:
948
880
  result = result2
949
881
  if result2.successful:
@@ -977,22 +909,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
977
909
  )
978
910
  if cache_updated and logger:
979
911
  logger.info(f"Inserted {result2.status} for {resource_type}/{single_id} into cache (1by1)")
980
- cache_update_time = time.perf_counter() - cache_update_start - http_request_time - append_time
981
-
982
- total_time = time.perf_counter() - profiling_start
983
- processing_time = total_time - http_request_time - cache_check_time - cache_update_time - append_time
984
-
985
- if logger and http_request_count > 0:
986
- logger.info(
987
- f"[PROFILING] _get_resources_by_id_one_by_one_async for {resource_type}: "
988
- f"total={total_time:.3f}s, "
989
- f"http_requests={http_request_time:.3f}s ({http_request_count} calls), "
990
- f"cache_check={cache_check_time:.3f}s, "
991
- f"cache_update={cache_update_time:.3f}s, "
992
- f"append={append_time:.3f}s, "
993
- f"processing={processing_time:.3f}s"
994
- )
995
-
996
912
  return result
997
913
 
998
914
  async def _get_resources_by_parameters_async(
@@ -1008,13 +924,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1008
924
  add_cached_bundles_to_result: bool = True,
1009
925
  compare_hash: bool = True,
1010
926
  ) -> tuple[FhirGetResponse, int]:
1011
- profiling_start = time.perf_counter()
1012
- http_request_time = 0.0
1013
- http_request_count = 0
1014
- cache_check_time = 0.0
1015
- cache_update_time = 0.0
1016
- append_time = 0.0
1017
-
1018
927
  assert resource_type
1019
928
  if not scope_parser.scope_allows(resource_type=resource_type):
1020
929
  if logger:
@@ -1048,13 +957,14 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1048
957
 
1049
958
  non_cached_id_list: list[str] = []
1050
959
  # get any cached resources
1051
- cache_check_start = time.perf_counter()
1052
960
  if id_list:
1053
961
  for resource_id in id_list:
1054
962
  cache_entry: RequestCacheEntry | None = await cache.get_async(
1055
963
  resource_type=resource_type, resource_id=resource_id
1056
964
  )
1057
965
  if cache_entry:
966
+ # if there is an entry then it means we tried to get it in the past
967
+ # so don't get it again whether we were successful or not
1058
968
  if logger:
1059
969
  logger.info(
1060
970
  f"{cache_entry.status} Returning {resource_type}/{resource_id} from cache (ByParam)"
@@ -1063,8 +973,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1063
973
  if logger:
1064
974
  logger.info(f"Cache entry not found for {resource_type}/{resource_id} (ByParam)")
1065
975
  non_cached_id_list.append(resource_id)
1066
- cache_check_time = time.perf_counter() - cache_check_start
1067
-
1068
976
  all_result: FhirGetResponse | None = None
1069
977
  # either we have non-cached ids or this is a query without id but has other parameters
1070
978
  if (
@@ -1075,7 +983,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1075
983
  # call the server to get the resources
1076
984
  result1: FhirGetResponse
1077
985
  result: FhirGetResponse | None
1078
- http_start = time.perf_counter()
1079
986
  async for result1 in self._get_with_session_async(
1080
987
  page_number=None,
1081
988
  ids=non_cached_id_list,
@@ -1084,8 +991,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1084
991
  fn_handle_streaming_chunk=None,
1085
992
  resource_type=resource_type,
1086
993
  ):
1087
- http_request_time += time.perf_counter() - http_start
1088
- http_request_count += 1
1089
994
  result = result1
1090
995
  # if we got a failure then check if we can get it one by one
1091
996
  if (not result or result.status != 200) and len(non_cached_id_list) > 1:
@@ -1098,7 +1003,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1098
1003
  f" Fetching one by one ids: {non_cached_id_list}"
1099
1004
  )
1100
1005
  # For some resources if search by _id doesn't work then fetch one by one.
1101
- one_by_one_start = time.perf_counter()
1102
1006
  result = await self._get_resources_by_id_one_by_one_async(
1103
1007
  resource_type=resource_type,
1104
1008
  ids=non_cached_id_list,
@@ -1107,9 +1011,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1107
1011
  logger=logger,
1108
1012
  compare_hash=compare_hash,
1109
1013
  )
1110
- one_by_one_time = time.perf_counter() - one_by_one_start
1111
- http_request_time += one_by_one_time
1112
- http_request_count += len(non_cached_id_list)
1113
1014
  else:
1114
1015
  if logger:
1115
1016
  logger.info(f"Fetched {resource_type} resources using _id for url {self._url}")
@@ -1125,14 +1026,11 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1125
1026
 
1126
1027
  # append to the response
1127
1028
  if all_result:
1128
- append_start = time.perf_counter()
1129
1029
  all_result = all_result.append(result)
1130
- append_time += time.perf_counter() - append_start
1131
1030
  else:
1132
1031
  all_result = result
1133
1032
  # If non_cached_id_list is not empty and resource_type does not support ?_id search then fetch it one by one
1134
1033
  elif len(non_cached_id_list):
1135
- one_by_one_start = time.perf_counter()
1136
1034
  all_result = await self._get_resources_by_id_one_by_one_async(
1137
1035
  resource_type=resource_type,
1138
1036
  ids=non_cached_id_list,
@@ -1141,13 +1039,10 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1141
1039
  logger=logger,
1142
1040
  compare_hash=compare_hash,
1143
1041
  )
1144
- http_request_time += time.perf_counter() - one_by_one_start
1145
- http_request_count += len(non_cached_id_list)
1146
1042
 
1147
1043
  # This list tracks the non-cached ids that were found
1148
1044
  found_non_cached_id_list: list[str] = []
1149
1045
  # Cache the fetched entries
1150
- cache_update_start = time.perf_counter()
1151
1046
  if all_result:
1152
1047
  non_cached_bundle_entry: FhirBundleEntry
1153
1048
  for non_cached_bundle_entry in all_result.get_bundle_entries():
@@ -1181,6 +1076,7 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1181
1076
  logger.debug(f"Inserted {resource_type}/{non_cached_resource_id} into cache (ByParam)")
1182
1077
  found_non_cached_id_list.append(non_cached_resource_id)
1183
1078
 
1079
+ # now add all the non-cached ids that were NOT found to the cache too so we don't look for them again
1184
1080
  for non_cached_id in non_cached_id_list:
1185
1081
  if non_cached_id not in found_non_cached_id_list:
1186
1082
  cache_updated = await cache.add_async(
@@ -1195,7 +1091,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1195
1091
  )
1196
1092
  if cache_updated and logger:
1197
1093
  logger.info(f"Inserted 404 for {resource_type}/{non_cached_id} into cache (ByParam)")
1198
- cache_update_time = time.perf_counter() - cache_update_start
1199
1094
 
1200
1095
  bundle_response: FhirGetBundleResponse = (
1201
1096
  FhirGetBundleResponse.from_response(other_response=all_result)
@@ -1237,21 +1132,6 @@ class SimulatedGraphProcessorMixin(ABC, FhirClientProtocol):
1237
1132
  storage_mode=self._storage_mode,
1238
1133
  )
1239
1134
  )
1240
-
1241
- total_time = time.perf_counter() - profiling_start
1242
- processing_time = total_time - http_request_time - cache_check_time - cache_update_time - append_time
1243
-
1244
- if logger and http_request_count > 0:
1245
- logger.info(
1246
- f"[PROFILING] _get_resources_by_parameters_async for {resource_type}: "
1247
- f"total={total_time:.3f}s, "
1248
- f"http_requests={http_request_time:.3f}s ({http_request_count} calls), "
1249
- f"cache_check={cache_check_time:.3f}s, "
1250
- f"cache_update={cache_update_time:.3f}s, "
1251
- f"append={append_time:.3f}s, "
1252
- f"processing={processing_time:.3f}s"
1253
- )
1254
-
1255
1135
  return bundle_response, cache.cache_hits
1256
1136
 
1257
1137
  # noinspection PyPep8Naming
@@ -1,5 +1,4 @@
1
1
  import json
2
- import logging
3
2
  from collections.abc import AsyncGenerator, Generator
4
3
  from datetime import datetime
5
4
  from logging import Logger
@@ -27,7 +26,7 @@ from helix_fhir_client_sdk.utilities.retryable_aiohttp_url_result import (
27
26
  RetryableAioHttpUrlResult,
28
27
  )
29
28
 
30
- logger = logging.getLogger(__name__)
29
+
31
30
  class FhirGetBundleResponse(FhirGetResponse):
32
31
  """
33
32
  This class represents a response from a FHIR server.
@@ -116,12 +115,13 @@ class FhirGetBundleResponse(FhirGetResponse):
116
115
  """
117
116
  other_response_entries: FhirBundleEntryList = other_response.get_bundle_entries()
118
117
 
119
- if len(other_response_entries):
118
+ if len(other_response_entries) > 0:
120
119
  # only append if there are entries in the other response
121
120
  if self._bundle_entries is None:
122
- self._bundle_entries = other_response_entries
123
- else:
124
- self._bundle_entries.extend(other_response_entries)
121
+ self._bundle_entries = FhirBundleEntryList()
122
+ from collections import deque
123
+
124
+ deque.extend(self._bundle_entries, other_response_entries)
125
125
 
126
126
  return self
127
127
 
@@ -133,34 +133,8 @@ class FhirGetBundleResponse(FhirGetResponse):
133
133
  :param others: list of FhirGetResponse objects
134
134
  :return: self
135
135
  """
136
- if not others:
137
- return self
138
-
139
- # Optimization: Since duplicates are already filtered by cache,
140
- # we can bypass ALL duplicate checking and directly extend the deque
141
- import time
142
- collect_start = time.perf_counter()
143
- all_entries: list[FhirBundleEntry] = []
144
136
  for other_response in others:
145
- get_start = time.perf_counter()
146
- other_response_entries: FhirBundleEntryList = other_response.get_bundle_entries()
147
- get_time = time.perf_counter() - get_start
148
- logger.info(f"[DEBUG] get_bundle_entries took {get_time:.3f}s for response with {len(other_response_entries)} entries")
149
- if len(other_response_entries) > 0:
150
- all_entries.extend(other_response_entries)
151
- collect_time = time.perf_counter() - collect_start
152
- print(f"[DEBUG] Collecting all entries from {len(others)} responses took {collect_time:.3f}s")
153
-
154
- if all_entries:
155
- if self._bundle_entries is None:
156
- self._bundle_entries = FhirBundleEntryList()
157
-
158
- extend_start = time.perf_counter()
159
- from collections import deque
160
- deque.extend(self._bundle_entries, all_entries)
161
- extend_time = time.perf_counter() - extend_start
162
- logger.info(f"[DEBUG] deque.extend of {len(all_entries)} entries took {extend_time:.3f}s")
163
-
137
+ self.append(other_response=other_response)
164
138
  return self
165
139
 
166
140
  @override
@@ -337,6 +311,9 @@ class FhirGetBundleResponse(FhirGetResponse):
337
311
  ) -> "FhirGetBundleResponse":
338
312
  """
339
313
  Removes the entries in the cache from the bundle
314
+ :param request_cache: RequestCache object to remove the entries from
315
+ :param compare_hash: if True, compare the hash of the resource with the hash in the cache
316
+ :return: self
340
317
  """
341
318
  # Build a lookup of cache entries by (resource_type, id)
342
319
  cache_map: dict[tuple[str, str], str | None] = {}
@@ -378,14 +355,6 @@ class FhirGetBundleResponse(FhirGetResponse):
378
355
  else:
379
356
  kept.append(entry)
380
357
 
381
- if logger and removed_entries and logger.isEnabledFor(logging.DEBUG):
382
- for entry in removed_entries:
383
- if entry.resource:
384
- logger.debug(
385
- f"Removing entry from bundle with id {entry.resource.id} and resource "
386
- f"type {entry.resource.resource_type}"
387
- )
388
-
389
358
  self._bundle_entries = FhirBundleEntryList(kept)
390
359
  return self
391
360
 
@@ -73,7 +73,7 @@ class TestFhirGetBundleResponse:
73
73
  assert response._bundle_metadata.id_ == "test-bundle-id"
74
74
  assert response._bundle_metadata.type_ == "searchset"
75
75
 
76
- def test_append_unique(self, sample_bundle_response: dict[str, Any]) -> None:
76
+ def test_append(self, sample_bundle_response: dict[str, Any]) -> None:
77
77
  """Test appending another response."""
78
78
  results_by_url: list[RetryableAioHttpUrlResult] = []
79
79
 
@@ -113,7 +113,8 @@ class TestFhirGetBundleResponse:
113
113
 
114
114
  first_response.append(second_response)
115
115
 
116
- assert len(first_response.get_bundle_entries()) == 2
116
+ # Updating it to 4 since we have integrated cache and do not do same lookups again so we do not need to remove duplicates here.
117
+ assert len(first_response.get_bundle_entries()) == 4
117
118
 
118
119
  def test_get_resources(self, sample_bundle_response: dict[str, Any]) -> None:
119
120
  """Test getting resources from the response."""
@@ -1,4 +1,5 @@
1
1
  import dataclasses
2
+ from copy import deepcopy
2
3
  from typing import Any, cast
3
4
 
4
5
 
@@ -26,42 +27,46 @@ class ResourceSeparator:
26
27
  extra_context_to_return: dict[str, Any] | None,
27
28
  ) -> ResourceSeparatorResult:
28
29
  """
29
- Separate contained resources without copying or mutating input resources.
30
- """
31
- resources_dicts: list[dict[str, str | None | list[dict[str, Any]]]] = []
32
- total_resource_count: int = 0
33
-
34
- for parent_resource in resources:
35
- resource_type_value = parent_resource.get("resourceType")
36
- if not resource_type_value:
37
- continue
38
-
39
- resource_type_key = str(resource_type_value).lower()
40
- resource_map: dict[str, str | None | list[dict[str, Any]]] = {}
30
+ Given a list of resources, return a list of resources with the contained resources separated out.
41
31
 
42
- # Add parent resource
43
- parent_list = cast(list[dict[str, Any]], resource_map.setdefault(resource_type_key, []))
44
- parent_list.append(parent_resource)
45
- total_resource_count += 1
32
+ :param resources: The resources list.
33
+ :param access_token: The access token.
34
+ :param url: The URL.
35
+ :param extra_context_to_return: The extra context to return.
46
36
 
47
- # Add contained resources (if present) without mutating parent
48
- contained_list = parent_resource.get("contained")
49
- if isinstance(contained_list, list) and contained_list:
50
- total_resource_count += len(contained_list)
51
- for contained_resource in contained_list:
52
- contained_type_value = contained_resource.get("resourceType")
53
- if not contained_type_value:
54
- continue
55
- contained_type_key = str(contained_type_value).lower()
56
- contained_list_out = cast(list[dict[str, Any]], resource_map.setdefault(contained_type_key, []))
57
- contained_list_out.append(contained_resource)
58
-
59
- # Context
60
- resource_map["token"] = access_token
61
- resource_map["url"] = url
37
+ :return: None
38
+ """
39
+ resources_dicts: list[dict[str, str | None | list[dict[str, Any]]]] = []
40
+ resource_count: int = 0
41
+ resource: dict[str, Any]
42
+ for resource in resources:
43
+ # make a copy so we are not changing the original resource
44
+ cloned_resource: dict[str, Any] = deepcopy(resource)
45
+ # This dict will hold the separated resources where the key is resourceType
46
+ # have to split these here otherwise when Spark loads them
47
+ # it can't handle that items in the entry array can have different schemas
48
+ resources_dict: dict[str, str | None | list[dict[str, Any]]] = {}
49
+ # add the parent resource to the resources_dict
50
+ resource_type = str(cloned_resource["resourceType"]).lower()
51
+ if resource_type not in resources_dict:
52
+ resources_dict[resource_type] = []
53
+ if isinstance(resources_dict[resource_type], list):
54
+ cast(list[dict[str, Any]], resources_dict[resource_type]).append(cloned_resource)
55
+ resource_count += 1
56
+ # now see if this resource has a contained array and if so, add those to the resources_dict
57
+ if "contained" in cloned_resource:
58
+ contained_resources = cloned_resource.pop("contained")
59
+ for contained_resource in contained_resources:
60
+ resource_type = str(contained_resource["resourceType"]).lower()
61
+ if resource_type not in resources_dict:
62
+ resources_dict[resource_type] = []
63
+ if isinstance(resources_dict[resource_type], list):
64
+ cast(list[dict[str, Any]], resources_dict[resource_type]).append(contained_resource)
65
+ resource_count += 1
66
+ resources_dict["token"] = access_token
67
+ resources_dict["url"] = url
62
68
  if extra_context_to_return:
63
- resource_map.update(extra_context_to_return)
64
-
65
- resources_dicts.append(resource_map)
69
+ resources_dict.update(extra_context_to_return)
70
+ resources_dicts.append(resources_dict)
66
71
 
67
- return ResourceSeparatorResult(resources_dicts=resources_dicts, total_count=total_resource_count)
72
+ return ResourceSeparatorResult(resources_dicts=resources_dicts, total_count=resource_count)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: helix.fhir.client.sdk
3
- Version: 4.2.21
3
+ Version: 4.2.23
4
4
  Summary: helix.fhir.client.sdk
5
5
  Home-page: https://github.com/icanbwell/helix.fhir.client.sdk
6
6
  Author: Imran Qureshi
@@ -32,7 +32,7 @@ helix_fhir_client_sdk/graph/fhir_graph_mixin.py,sha256=z0j9FmO2bOnmzgQmczfkWC70u
32
32
  helix_fhir_client_sdk/graph/graph_definition.py,sha256=FTa1GLjJ6oooAhNw7SPk-Y8duB-5WtJtnwADao-afaI,3878
33
33
  helix_fhir_client_sdk/graph/graph_link_parameters.py,sha256=3rknHL6SBgpT2A1fr-AikEFrR_9nIJUotZ82XFzROLo,599
34
34
  helix_fhir_client_sdk/graph/graph_target_parameters.py,sha256=fdYQpPZxDnyWyevuwDwxeTXOJoE2PgS5QhPaXpwtFcU,705
35
- helix_fhir_client_sdk/graph/simulated_graph_processor_mixin.py,sha256=CNXlqjkdrebypCY4JHmz1XbGT3kpcSpich9ourE76H0,66430
35
+ helix_fhir_client_sdk/graph/simulated_graph_processor_mixin.py,sha256=vNoiTL3wgo9slfwn-a8YsIeiwZIxVWVBaCQ-Ne1_4Gk,60460
36
36
  helix_fhir_client_sdk/graph/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
37
  helix_fhir_client_sdk/graph/test/test_graph_mixin.py,sha256=LNd4LVjryVLgzWeTXMDpsbdauXl7u3LMfj9irnNfb_k,5469
38
38
  helix_fhir_client_sdk/graph/test/test_simulate_graph_processor_mixin.py,sha256=EQDfhqJfUrP6SptXRP7ayEN7g5cZQMA00ccXzeXiSXM,46312
@@ -52,16 +52,16 @@ helix_fhir_client_sdk/responses/fhir_response_processor.py,sha256=fOSvqWjVI1BA6a
52
52
  helix_fhir_client_sdk/responses/fhir_update_response.py,sha256=_6zZz85KQP69WFxejlX8BBWAKWtzsMGSJjR_zqhl_m4,2727
53
53
  helix_fhir_client_sdk/responses/get_result.py,sha256=hkbZeu9h-01ZZckAuckn6UDR9GXGgRAIiKEN6ELRj80,1252
54
54
  helix_fhir_client_sdk/responses/paging_result.py,sha256=tpmfdgrtaAmmViVxlw-EBHoe0PVjSQW9zicwRmhUVpI,1360
55
- helix_fhir_client_sdk/responses/resource_separator.py,sha256=7Ic0_SPNKCBAk6l07Ke2-AoObkBMnKdmtbbtBBCtVjE,2606
55
+ helix_fhir_client_sdk/responses/resource_separator.py,sha256=jugaEkJYunx8VGVFCLwWNSjrBlI8DDm61LzSx9oR8iE,3230
56
56
  helix_fhir_client_sdk/responses/get/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
- helix_fhir_client_sdk/responses/get/fhir_get_bundle_response.py,sha256=ouQlsnqbDgFkMvjxOHtdLh-BMRhmVmP76sqtAjWefro,20147
57
+ helix_fhir_client_sdk/responses/get/fhir_get_bundle_response.py,sha256=ryNaHVBZOxsf3uKMhKrFIUCnpmC8h_X7d3-0mzwoqhQ,18659
58
58
  helix_fhir_client_sdk/responses/get/fhir_get_error_response.py,sha256=oNdKs_r7K4qRHD7fyeDhZz3v0wGTT2esAPPtDhxOyeI,12325
59
59
  helix_fhir_client_sdk/responses/get/fhir_get_list_by_resource_type_response.py,sha256=ssfb1IB2QTvqwWRzFs_VqPKH6mwcnWNo3iVh82M-Jso,13775
60
60
  helix_fhir_client_sdk/responses/get/fhir_get_list_response.py,sha256=KT5g6MjB9yWWUaSZpx1jK9Tm2yVmcFyZMHBBnDDAPtU,11858
61
61
  helix_fhir_client_sdk/responses/get/fhir_get_response_factory.py,sha256=OrizzAVoXvxnQbBxszeS9PUtbzg97RGNEJfD4PuOLig,6815
62
62
  helix_fhir_client_sdk/responses/get/fhir_get_single_response.py,sha256=pLjjDyxBvA_FNI7sGU2hM9urue3Bzrrwo_RcrOD5yz8,9755
63
63
  helix_fhir_client_sdk/responses/get/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
- helix_fhir_client_sdk/responses/get/test/test_get_bundle_response.py,sha256=k66eMtb0e4QcWYeF_M3TUrwCXQgD5EOcpUaijzg7a2I,19545
64
+ helix_fhir_client_sdk/responses/get/test/test_get_bundle_response.py,sha256=DF4sjy074jiro8jASavd-2hMkjIR6sIdIytzhIyBRuU,19674
65
65
  helix_fhir_client_sdk/responses/get/test/test_get_error_response.py,sha256=8P4zGgeHe-6OaSX152ixFW4W0iG2scK89O0VY_YcxuY,7613
66
66
  helix_fhir_client_sdk/responses/get/test/test_get_list_by_resource_type_response.py,sha256=eIZlhTYg5Iqh0vTmVAlesenQDyT8eCmFs1U4JngIM8w,12942
67
67
  helix_fhir_client_sdk/responses/get/test/test_get_list_response.py,sha256=vOsdgqSd5TwwgbwEX_kYvDyOnqAaKBKGAZFr4EZpGDM,14428
@@ -130,7 +130,7 @@ helix_fhir_client_sdk/validators/async_fhir_validator.py,sha256=i1BC98hZF6JhMQQz
130
130
  helix_fhir_client_sdk/validators/fhir_validator.py,sha256=HWBldSEB9yeKIcnLcV8R-LoTzwT_OMu8SchtUUBKzys,2331
131
131
  helix_fhir_client_sdk/validators/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
132
  helix_fhir_client_sdk/validators/test/test_async_fhir_validator.py,sha256=RmSowjPUdZee5nYuYujghxWyqJ20cu7U0lJFtFT-ZBs,3285
133
- helix_fhir_client_sdk-4.2.21.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
133
+ helix_fhir_client_sdk-4.2.23.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
134
134
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
135
135
  tests/logger_for_test.py,sha256=UC-7F6w6fDsUIYf37aRnvUdiUUVk8qkJEUSuO17NQnI,1525
136
136
  tests/test_fhir_client_clone.py,sha256=c5y1rWJ32nBSUnK1FfyymY005dNowd4Nf1xrbuQolNk,5368
@@ -213,7 +213,7 @@ tests_integration/test_emr_server_auth.py,sha256=2I4QUAspQN89uGf6JB2aVuYaBeDnRJz
213
213
  tests_integration/test_firely_fhir.py,sha256=ll6-plwQrKfdrEyfbw0wLTC1jB-Qei1Mj-81tYTl5eQ,697
214
214
  tests_integration/test_merge_vs_smart_merge_behavior.py,sha256=LrIuyxzw0YLaTjcRtG0jzy0M6xSv9qebmdBtMPDcacQ,3733
215
215
  tests_integration/test_staging_server_graph.py,sha256=5RfMxjhdX9o4-n_ZRvze4Sm8u8NjRijRLDpqiz8qD_0,7132
216
- helix_fhir_client_sdk-4.2.21.dist-info/METADATA,sha256=YjnoCZPuhvIXjOWEC647V37vZ0IzeXqhn-xBeCbGhuE,7210
217
- helix_fhir_client_sdk-4.2.21.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
218
- helix_fhir_client_sdk-4.2.21.dist-info/top_level.txt,sha256=BRnDS6ceQxs-4u2jXznATObgP8G2cGAerlH0ZS4sJ6M,46
219
- helix_fhir_client_sdk-4.2.21.dist-info/RECORD,,
216
+ helix_fhir_client_sdk-4.2.23.dist-info/METADATA,sha256=rIrYEAk9e96PhiNUoqRSwUAm-vA3qWB-34bOmhXwuoU,7210
217
+ helix_fhir_client_sdk-4.2.23.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
218
+ helix_fhir_client_sdk-4.2.23.dist-info/top_level.txt,sha256=BRnDS6ceQxs-4u2jXznATObgP8G2cGAerlH0ZS4sJ6M,46
219
+ helix_fhir_client_sdk-4.2.23.dist-info/RECORD,,