helix.fhir.client.sdk 4.2.9__py3-none-any.whl → 4.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -119,8 +119,6 @@ class FhirMergeResourcesMixin(FhirClientProtocol):
119
119
  access_token=self._access_token,
120
120
  access_token_expiry_date=self._access_token_expiry_date,
121
121
  persistent_session=self._persistent_session,
122
- use_persistent_session=self._use_persistent_session,
123
- close_session_on_exit=self._close_session,
124
122
  ) as client:
125
123
  http_post_start = time.time()
126
124
  response: RetryableAioHttpResponse = await client.post(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: helix.fhir.client.sdk
3
- Version: 4.2.9
3
+ Version: 4.2.11
4
4
  Summary: helix.fhir.client.sdk
5
5
  Home-page: https://github.com/icanbwell/helix.fhir.client.sdk
6
6
  Author: Imran Qureshi
@@ -6,7 +6,7 @@ helix_fhir_client_sdk/fhir_bundle_appender.py,sha256=t1hs7p_vXKC9MUFyUnN9dTuDhRF
6
6
  helix_fhir_client_sdk/fhir_client.py,sha256=vSkgWVgNlRkdu7RFPkD1-4BF9N88PnRXhCTSqUJqHlI,34930
7
7
  helix_fhir_client_sdk/fhir_delete_mixin.py,sha256=1YiKddTJTUzzrRvG7WgSisXY8rfJAHPAEXTsOevrom8,6521
8
8
  helix_fhir_client_sdk/fhir_merge_mixin.py,sha256=YTUODvc2rqXhk5_kwNMFPykm1I9_omww2WUcDExMESg,15264
9
- helix_fhir_client_sdk/fhir_merge_resources_mixin.py,sha256=GsVeqcYpDvV5oeNbwyHS5YQozAyayICNcIRTQTRcrY0,36130
9
+ helix_fhir_client_sdk/fhir_merge_resources_mixin.py,sha256=d5x41uL__yxMcaAnYDdmbpt529ZsOZcO0EBvnV1HSGI,36002
10
10
  helix_fhir_client_sdk/fhir_patch_mixin.py,sha256=YGcCPStoqVxWCYOaNI8vCKSFkAyRzP0YM_UWp4UKYro,5858
11
11
  helix_fhir_client_sdk/fhir_update_mixin.py,sha256=7psQTBsGPY1izuwn3yD4MGLjLVWQjqA_15_IeaUspew,6230
12
12
  helix_fhir_client_sdk/function_types.py,sha256=x95j6ix3Xa9b276Q741xX1jguqBuFT6EBLDw35_EoVM,3916
@@ -130,7 +130,7 @@ helix_fhir_client_sdk/validators/async_fhir_validator.py,sha256=Bgiw5atbc5YzBYpk
130
130
  helix_fhir_client_sdk/validators/fhir_validator.py,sha256=HWBldSEB9yeKIcnLcV8R-LoTzwT_OMu8SchtUUBKzys,2331
131
131
  helix_fhir_client_sdk/validators/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
132
  helix_fhir_client_sdk/validators/test/test_async_fhir_validator.py,sha256=RmSowjPUdZee5nYuYujghxWyqJ20cu7U0lJFtFT-ZBs,3285
133
- helix_fhir_client_sdk-4.2.9.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
133
+ helix_fhir_client_sdk-4.2.11.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
134
134
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
135
135
  tests/logger_for_test.py,sha256=UC-7F6w6fDsUIYf37aRnvUdiUUVk8qkJEUSuO17NQnI,1525
136
136
  tests/test_fhir_client_clone.py,sha256=ZjC0ctog5r8Ba0ZD94ggwbnbQMeQdxk5pTt3zTpE8rQ,2832
@@ -152,6 +152,7 @@ tests/async/test_async_fhir_client_patient_update.py,sha256=Q-hNO9D4FZ_r7oa62pyB
152
152
  tests/async/test_async_real_fhir_server_get_patients.py,sha256=0oMnUJg1KEspJ5_4eBen8z12Ion5HXv-gKAzFAkyII0,3837
153
153
  tests/async/test_async_real_fhir_server_get_patients_error.py,sha256=_s7chLogAg0yKgGpsq1o9_dDHBrzGaRWBAo8agFTN6U,1914
154
154
  tests/async/test_benchmark_compress.py,sha256=q1gDG7qXvof-3uVAqJlZAW7uO8cR0vEeDfzl-iwIEtY,16470
155
+ tests/async/test_benchmark_merge.py,sha256=ME0Pow_IXpIaVGWvq3ii7dGltXcz-3DGxz2gGF4LmYQ,19830
155
156
  tests/async/fhir_server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
156
157
  tests/async/fhir_server/test_async_real_fhir_server_get_graph_large.py,sha256=fM2MPF47nDF2Qwj2AkdTZ2CfvgUYGN4AVIS253KC9MQ,9430
157
158
  tests/async/fhir_server/test_async_real_fhir_server_get_patients_large.py,sha256=rXRF8E8Al7XANCmef1d_WqxSA9TVQjVC7B41OZaEQlY,5583
@@ -211,7 +212,7 @@ tests_integration/test_emr_server_auth.py,sha256=2I4QUAspQN89uGf6JB2aVuYaBeDnRJz
211
212
  tests_integration/test_firely_fhir.py,sha256=ll6-plwQrKfdrEyfbw0wLTC1jB-Qei1Mj-81tYTl5eQ,697
212
213
  tests_integration/test_merge_vs_smart_merge_behavior.py,sha256=LrIuyxzw0YLaTjcRtG0jzy0M6xSv9qebmdBtMPDcacQ,3733
213
214
  tests_integration/test_staging_server_graph.py,sha256=5RfMxjhdX9o4-n_ZRvze4Sm8u8NjRijRLDpqiz8qD_0,7132
214
- helix_fhir_client_sdk-4.2.9.dist-info/METADATA,sha256=e-ajvH1vdbrt84TIgYqnldqWv9RzfHeuyzgSURayZyk,5841
215
- helix_fhir_client_sdk-4.2.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
216
- helix_fhir_client_sdk-4.2.9.dist-info/top_level.txt,sha256=BRnDS6ceQxs-4u2jXznATObgP8G2cGAerlH0ZS4sJ6M,46
217
- helix_fhir_client_sdk-4.2.9.dist-info/RECORD,,
215
+ helix_fhir_client_sdk-4.2.11.dist-info/METADATA,sha256=VunX8xH3c-NbVsUU9tEISnIRudlQaS7QfOB1CeYf2G8,5842
216
+ helix_fhir_client_sdk-4.2.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
217
+ helix_fhir_client_sdk-4.2.11.dist-info/top_level.txt,sha256=BRnDS6ceQxs-4u2jXznATObgP8G2cGAerlH0ZS4sJ6M,46
218
+ helix_fhir_client_sdk-4.2.11.dist-info/RECORD,,
@@ -0,0 +1,506 @@
1
+ """
2
+ Benchmark tests for comparing compressed vs uncompressed FHIR client merge operations.
3
+
4
+ These tests measure the performance of:
5
+ - merge_async() with compress=True vs compress=False
6
+
7
+ =============================================================================
8
+ HOW TO RUN THESE TESTS
9
+ =============================================================================
10
+
11
+ 1. Start services using docker-compose:
12
+ docker-compose up -d mock-server
13
+
14
+ 2. First time only - rebuild dev container to include pytest-benchmark:
15
+ docker-compose build dev
16
+
17
+ OR install pytest-benchmark in the running container:
18
+ docker-compose run --rm dev pip install pytest-benchmark
19
+
20
+ 3. Run benchmark tests inside docker container:
21
+ docker-compose run --rm dev pytest tests/async/test_benchmark_merge.py -v --benchmark-only
22
+
23
+ 4. Or run all benchmark variations:
24
+ docker-compose run --rm dev pytest tests/async/test_benchmark_merge.py -v --benchmark-only --benchmark-group-by=func
25
+
26
+ 5. Save benchmark results for comparison:
27
+ docker-compose run --rm dev pytest tests/async/test_benchmark_merge.py -v --benchmark-autosave
28
+
29
+ 6. Compare with previous runs:
30
+ docker-compose run --rm dev pytest tests/async/test_benchmark_merge.py -v --benchmark-compare
31
+
32
+ 7. Run with more iterations for accuracy:
33
+ docker-compose run --rm dev pytest tests/async/test_benchmark_merge.py -v --benchmark-min-rounds=10
34
+
35
+ 8. To stop mock-server:
36
+ docker-compose down mock-server
37
+
38
+ =============================================================================
39
+ """
40
+
41
+ import asyncio
42
+ import json
43
+ import socket
44
+ from typing import Any
45
+
46
+ import pytest
47
+ from mockserver_client.mockserver_client import (
48
+ MockServerFriendlyClient,
49
+ mock_request,
50
+ mock_response,
51
+ times,
52
+ )
53
+
54
+ from helix_fhir_client_sdk.fhir_client import FhirClient
55
+ from helix_fhir_client_sdk.responses.fhir_merge_response import FhirMergeResponse
56
+
57
+
58
+ def is_mock_server_running(host: str = "mock-server", port: int = 1080) -> bool:
59
+ """Check if mock-server is reachable."""
60
+ try:
61
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
62
+ sock.settimeout(2)
63
+ result = sock.connect_ex((host, port))
64
+ sock.close()
65
+ return result == 0
66
+ except OSError:
67
+ return False
68
+
69
+
70
+ # Skip all tests if the mock-server is not running
71
+ pytestmark = pytest.mark.skipif(
72
+ not is_mock_server_running(), reason="Mock server not running. Start with: docker-compose up -d mock-server"
73
+ )
74
+
75
+
76
+ def generate_patient_resource(index: int) -> dict[str, Any]:
77
+ """Generate a realistic FHIR Patient resource."""
78
+ return {
79
+ "resourceType": "Patient",
80
+ "id": f"patient-{index}",
81
+ "meta": {
82
+ "versionId": "1",
83
+ "lastUpdated": "2025-01-15T10:30:00.000Z",
84
+ "source": "http://example.org/fhir",
85
+ "profile": ["http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient"],
86
+ },
87
+ "identifier": [
88
+ {
89
+ "use": "official",
90
+ "type": {
91
+ "coding": [
92
+ {
93
+ "system": "http://terminology.hl7.org/CodeSystem/v2-0203",
94
+ "code": "MR",
95
+ "display": "Medical Record Number",
96
+ }
97
+ ]
98
+ },
99
+ "system": "http://hospital.example.org/mrn",
100
+ "value": f"MRN-{index:08d}",
101
+ },
102
+ {
103
+ "use": "official",
104
+ "type": {
105
+ "coding": [
106
+ {
107
+ "system": "http://terminology.hl7.org/CodeSystem/v2-0203",
108
+ "code": "SS",
109
+ "display": "Social Security Number",
110
+ }
111
+ ]
112
+ },
113
+ "system": "http://hl7.org/fhir/sid/us-ssn",
114
+ "value": f"{100 + index:03d}-{50 + index:02d}-{1000 + index:04d}",
115
+ },
116
+ ],
117
+ "active": True,
118
+ "name": [
119
+ {
120
+ "use": "official",
121
+ "family": f"TestFamily{index}",
122
+ "given": [f"TestGiven{index}", f"MiddleName{index}"],
123
+ "prefix": ["Mr."],
124
+ "suffix": ["Jr."],
125
+ },
126
+ {
127
+ "use": "nickname",
128
+ "given": [f"Nick{index}"],
129
+ },
130
+ ],
131
+ "telecom": [
132
+ {"system": "phone", "value": f"555-{100 + index:03d}-{1000 + index:04d}", "use": "home"},
133
+ {"system": "phone", "value": f"555-{200 + index:03d}-{2000 + index:04d}", "use": "mobile"},
134
+ {"system": "email", "value": f"patient{index}@example.com", "use": "home"},
135
+ ],
136
+ "gender": "male" if index % 2 == 0 else "female",
137
+ "birthDate": f"{1950 + (index % 50)}-{(index % 12) + 1:02d}-{(index % 28) + 1:02d}",
138
+ "deceasedBoolean": False,
139
+ "address": [
140
+ {
141
+ "use": "home",
142
+ "type": "physical",
143
+ "line": [f"{100 + index} Main Street", f"Apt {index}"],
144
+ "city": "Boston",
145
+ "state": "MA",
146
+ "postalCode": f"02{100 + (index % 900):03d}",
147
+ "country": "USA",
148
+ },
149
+ {
150
+ "use": "work",
151
+ "type": "postal",
152
+ "line": [f"{200 + index} Business Ave"],
153
+ "city": "Cambridge",
154
+ "state": "MA",
155
+ "postalCode": f"02{200 + (index % 800):03d}",
156
+ "country": "USA",
157
+ },
158
+ ],
159
+ "maritalStatus": {
160
+ "coding": [
161
+ {
162
+ "system": "http://terminology.hl7.org/CodeSystem/v3-MaritalStatus",
163
+ "code": "M" if index % 2 == 0 else "S",
164
+ "display": "Married" if index % 2 == 0 else "Never Married",
165
+ }
166
+ ]
167
+ },
168
+ "communication": [
169
+ {
170
+ "language": {
171
+ "coding": [
172
+ {
173
+ "system": "urn:ietf:bcp:47",
174
+ "code": "en-US",
175
+ "display": "English (United States)",
176
+ }
177
+ ]
178
+ },
179
+ "preferred": True,
180
+ }
181
+ ],
182
+ "generalPractitioner": [{"reference": f"Practitioner/practitioner-{index % 10}"}],
183
+ "managingOrganization": {"reference": "Organization/org-1"},
184
+ }
185
+
186
+
187
+ def generate_patient_resources_list(count: int) -> list[dict[str, Any]]:
188
+ """Generate a list of FHIR Patient resources."""
189
+ return [generate_patient_resource(i) for i in range(count)]
190
+
191
+
192
+ def generate_merge_response(count: int) -> list[dict[str, Any]]:
193
+ """Generate a merge response for the given count of resources."""
194
+ return [{"created": 1, "updated": 0} for _ in range(count)]
195
+
196
+
197
+ @pytest.fixture(scope="module")
198
+ def mock_server_url() -> str:
199
+ return "http://mock-server:1080"
200
+
201
+
202
+ @pytest.fixture(scope="module")
203
+ def mock_client(mock_server_url: str) -> MockServerFriendlyClient:
204
+ return MockServerFriendlyClient(base_url=mock_server_url)
205
+
206
+
207
+ @pytest.fixture(scope="module")
208
+ def setup_mock_merge_endpoints(mock_client: MockServerFriendlyClient, mock_server_url: str) -> str:
209
+ """Set up mock endpoints for merge operations with different payload sizes."""
210
+ test_name = "benchmark_merge"
211
+
212
+ mock_client.clear(f"/{test_name}/*.*")
213
+ mock_client.reset()
214
+
215
+ # Create payloads of different sizes for benchmarking
216
+ payload_sizes = {
217
+ "small": 10, # 10 patients
218
+ "medium": 100, # 100 patients
219
+ "large": 500, # 500 patients
220
+ }
221
+
222
+ # Setup mock endpoints for each payload size - using regex to match any request body
223
+ for size, count in payload_sizes.items():
224
+ response_body = json.dumps(generate_merge_response(count))
225
+
226
+ # Endpoint for POST /Patient/1/$merge (single resource merge)
227
+ mock_client.expect(
228
+ request=mock_request(
229
+ path=f"/{test_name}/{size}/Patient/1/$merge",
230
+ method="POST",
231
+ ),
232
+ response=mock_response(body=response_body),
233
+ timing=times(10000), # Allow many requests for benchmarking
234
+ )
235
+
236
+ # Endpoint for POST /Patient/$merge (batch merge)
237
+ mock_client.expect(
238
+ request=mock_request(
239
+ path=f"/{test_name}/{size}/Patient/$merge",
240
+ method="POST",
241
+ ),
242
+ response=mock_response(body=response_body),
243
+ timing=times(10000),
244
+ )
245
+
246
+ return f"{mock_server_url}/{test_name}"
247
+
248
+
249
+ # ============================================================================
250
+ # Benchmark Tests for merge_async() - Small Payload (10 patients)
251
+ # ============================================================================
252
+
253
+
254
+ def test_benchmark_merge_async_compress_false_small(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
255
+ """Benchmark merge_async with compress=False and small payload (10 patients)."""
256
+ base_url = f"{setup_mock_merge_endpoints}/small"
257
+ resources = generate_patient_resources_list(10)
258
+ json_data_list = [json.dumps(r) for r in resources]
259
+
260
+ async def run_merge_async() -> FhirMergeResponse | None:
261
+ fhir_client = FhirClient().url(base_url).resource("Patient")
262
+ fhir_client = fhir_client.compress(False)
263
+ return await FhirMergeResponse.from_async_generator(
264
+ fhir_client.merge_async(id_="1", json_data_list=json_data_list)
265
+ )
266
+
267
+ def run_sync() -> FhirMergeResponse | None:
268
+ return asyncio.run(run_merge_async())
269
+
270
+ result = benchmark(run_sync)
271
+ assert result is not None
272
+
273
+
274
+ def test_benchmark_merge_async_compress_true_small(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
275
+ """Benchmark merge_async with compress=True and small payload (10 patients)."""
276
+ base_url = f"{setup_mock_merge_endpoints}/small"
277
+ resources = generate_patient_resources_list(10)
278
+ json_data_list = [json.dumps(r) for r in resources]
279
+
280
+ async def run_merge_async() -> FhirMergeResponse | None:
281
+ fhir_client = FhirClient().url(base_url).resource("Patient")
282
+ fhir_client = fhir_client.compress(True)
283
+ return await FhirMergeResponse.from_async_generator(
284
+ fhir_client.merge_async(id_="1", json_data_list=json_data_list)
285
+ )
286
+
287
+ def run_sync() -> FhirMergeResponse | None:
288
+ return asyncio.run(run_merge_async())
289
+
290
+ result = benchmark(run_sync)
291
+ assert result is not None
292
+
293
+
294
+ # ============================================================================
295
+ # Benchmark Tests for merge_async() - Medium Payload (100 patients)
296
+ # ============================================================================
297
+
298
+
299
+ def test_benchmark_merge_async_compress_false_medium(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
300
+ """Benchmark merge_async with compress=False and medium payload (100 patients)."""
301
+ base_url = f"{setup_mock_merge_endpoints}/medium"
302
+ resources = generate_patient_resources_list(100)
303
+ json_data_list = [json.dumps(r) for r in resources]
304
+
305
+ async def run_merge_async() -> FhirMergeResponse | None:
306
+ fhir_client = FhirClient().url(base_url).resource("Patient")
307
+ fhir_client = fhir_client.compress(False)
308
+ return await FhirMergeResponse.from_async_generator(
309
+ fhir_client.merge_async(id_="1", json_data_list=json_data_list)
310
+ )
311
+
312
+ def run_sync() -> FhirMergeResponse | None:
313
+ return asyncio.run(run_merge_async())
314
+
315
+ result = benchmark(run_sync)
316
+ assert result is not None
317
+
318
+
319
+ def test_benchmark_merge_async_compress_true_medium(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
320
+ """Benchmark merge_async with compress=True and medium payload (100 patients)."""
321
+ base_url = f"{setup_mock_merge_endpoints}/medium"
322
+ resources = generate_patient_resources_list(100)
323
+ json_data_list = [json.dumps(r) for r in resources]
324
+
325
+ async def run_merge_async() -> FhirMergeResponse | None:
326
+ fhir_client = FhirClient().url(base_url).resource("Patient")
327
+ fhir_client = fhir_client.compress(True)
328
+ return await FhirMergeResponse.from_async_generator(
329
+ fhir_client.merge_async(id_="1", json_data_list=json_data_list)
330
+ )
331
+
332
+ def run_sync() -> FhirMergeResponse | None:
333
+ return asyncio.run(run_merge_async())
334
+
335
+ result = benchmark(run_sync)
336
+ assert result is not None
337
+
338
+
339
+ # ============================================================================
340
+ # Benchmark Tests for merge_async() - Large Payload (500 patients)
341
+ # ============================================================================
342
+
343
+
344
+ def test_benchmark_merge_async_compress_false_large(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
345
+ """Benchmark merge_async with compress=False and large payload (500 patients)."""
346
+ base_url = f"{setup_mock_merge_endpoints}/large"
347
+ resources = generate_patient_resources_list(500)
348
+ json_data_list = [json.dumps(r) for r in resources]
349
+
350
+ async def run_merge_async() -> FhirMergeResponse | None:
351
+ fhir_client = FhirClient().url(base_url).resource("Patient")
352
+ fhir_client = fhir_client.compress(False)
353
+ return await FhirMergeResponse.from_async_generator(
354
+ fhir_client.merge_async(id_="1", json_data_list=json_data_list)
355
+ )
356
+
357
+ def run_sync() -> FhirMergeResponse | None:
358
+ return asyncio.run(run_merge_async())
359
+
360
+ result = benchmark(run_sync)
361
+ assert result is not None
362
+
363
+
364
+ def test_benchmark_merge_async_compress_true_large(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
365
+ """Benchmark merge_async with compress=True and large payload (500 patients)."""
366
+ base_url = f"{setup_mock_merge_endpoints}/large"
367
+ resources = generate_patient_resources_list(500)
368
+ json_data_list = [json.dumps(r) for r in resources]
369
+
370
+ async def run_merge_async() -> FhirMergeResponse | None:
371
+ fhir_client = FhirClient().url(base_url).resource("Patient")
372
+ fhir_client = fhir_client.compress(True)
373
+ return await FhirMergeResponse.from_async_generator(
374
+ fhir_client.merge_async(id_="1", json_data_list=json_data_list)
375
+ )
376
+
377
+ def run_sync() -> FhirMergeResponse | None:
378
+ return asyncio.run(run_merge_async())
379
+
380
+ result = benchmark(run_sync)
381
+ assert result is not None
382
+
383
+
384
+ # ============================================================================
385
+ # Benchmark Tests for batch merge_async() - Multiple resources in single call
386
+ # ============================================================================
387
+
388
+
389
+ def test_benchmark_batch_merge_async_compress_false_small(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
390
+ """Benchmark batch merge_async with compress=False and small payload (10 patients)."""
391
+ base_url = f"{setup_mock_merge_endpoints}/small"
392
+ resources = generate_patient_resources_list(10)
393
+ json_data_list = [json.dumps(r) for r in resources]
394
+
395
+ async def run_merge_async() -> FhirMergeResponse | None:
396
+ fhir_client = FhirClient().url(base_url).resource("Patient")
397
+ fhir_client = fhir_client.compress(False)
398
+ return await FhirMergeResponse.from_async_generator(
399
+ fhir_client.merge_async(json_data_list=json_data_list, batch_size=10)
400
+ )
401
+
402
+ def run_sync() -> FhirMergeResponse | None:
403
+ return asyncio.run(run_merge_async())
404
+
405
+ result = benchmark(run_sync)
406
+ assert result is not None
407
+
408
+
409
+ def test_benchmark_batch_merge_async_compress_true_small(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
410
+ """Benchmark batch merge_async with compress=True and small payload (10 patients)."""
411
+ base_url = f"{setup_mock_merge_endpoints}/small"
412
+ resources = generate_patient_resources_list(10)
413
+ json_data_list = [json.dumps(r) for r in resources]
414
+
415
+ async def run_merge_async() -> FhirMergeResponse | None:
416
+ fhir_client = FhirClient().url(base_url).resource("Patient")
417
+ fhir_client = fhir_client.compress(True)
418
+ return await FhirMergeResponse.from_async_generator(
419
+ fhir_client.merge_async(json_data_list=json_data_list, batch_size=10)
420
+ )
421
+
422
+ def run_sync() -> FhirMergeResponse | None:
423
+ return asyncio.run(run_merge_async())
424
+
425
+ result = benchmark(run_sync)
426
+ assert result is not None
427
+
428
+
429
+ def test_benchmark_batch_merge_async_compress_false_medium(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
430
+ """Benchmark batch merge_async with compress=False and medium payload (100 patients)."""
431
+ base_url = f"{setup_mock_merge_endpoints}/medium"
432
+ resources = generate_patient_resources_list(100)
433
+ json_data_list = [json.dumps(r) for r in resources]
434
+
435
+ async def run_merge_async() -> FhirMergeResponse | None:
436
+ fhir_client = FhirClient().url(base_url).resource("Patient")
437
+ fhir_client = fhir_client.compress(False)
438
+ return await FhirMergeResponse.from_async_generator(
439
+ fhir_client.merge_async(json_data_list=json_data_list, batch_size=50)
440
+ )
441
+
442
+ def run_sync() -> FhirMergeResponse | None:
443
+ return asyncio.run(run_merge_async())
444
+
445
+ result = benchmark(run_sync)
446
+ assert result is not None
447
+
448
+
449
+ def test_benchmark_batch_merge_async_compress_true_medium(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
450
+ """Benchmark batch merge_async with compress=True and medium payload (100 patients)."""
451
+ base_url = f"{setup_mock_merge_endpoints}/medium"
452
+ resources = generate_patient_resources_list(100)
453
+ json_data_list = [json.dumps(r) for r in resources]
454
+
455
+ async def run_merge_async() -> FhirMergeResponse | None:
456
+ fhir_client = FhirClient().url(base_url).resource("Patient")
457
+ fhir_client = fhir_client.compress(True)
458
+ return await FhirMergeResponse.from_async_generator(
459
+ fhir_client.merge_async(json_data_list=json_data_list, batch_size=50)
460
+ )
461
+
462
+ def run_sync() -> FhirMergeResponse | None:
463
+ return asyncio.run(run_merge_async())
464
+
465
+ result = benchmark(run_sync)
466
+ assert result is not None
467
+
468
+
469
+ def test_benchmark_batch_merge_async_compress_false_large(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
470
+ """Benchmark batch merge_async with compress=False and large payload (500 patients)."""
471
+ base_url = f"{setup_mock_merge_endpoints}/large"
472
+ resources = generate_patient_resources_list(500)
473
+ json_data_list = [json.dumps(r) for r in resources]
474
+
475
+ async def run_merge_async() -> FhirMergeResponse | None:
476
+ fhir_client = FhirClient().url(base_url).resource("Patient")
477
+ fhir_client = fhir_client.compress(False)
478
+ return await FhirMergeResponse.from_async_generator(
479
+ fhir_client.merge_async(json_data_list=json_data_list, batch_size=100)
480
+ )
481
+
482
+ def run_sync() -> FhirMergeResponse | None:
483
+ return asyncio.run(run_merge_async())
484
+
485
+ result = benchmark(run_sync)
486
+ assert result is not None
487
+
488
+
489
+ def test_benchmark_batch_merge_async_compress_true_large(benchmark: Any, setup_mock_merge_endpoints: str) -> None:
490
+ """Benchmark batch merge_async with compress=True and large payload (500 patients)."""
491
+ base_url = f"{setup_mock_merge_endpoints}/large"
492
+ resources = generate_patient_resources_list(500)
493
+ json_data_list = [json.dumps(r) for r in resources]
494
+
495
+ async def run_merge_async() -> FhirMergeResponse | None:
496
+ fhir_client = FhirClient().url(base_url).resource("Patient")
497
+ fhir_client = fhir_client.compress(True)
498
+ return await FhirMergeResponse.from_async_generator(
499
+ fhir_client.merge_async(json_data_list=json_data_list, batch_size=100)
500
+ )
501
+
502
+ def run_sync() -> FhirMergeResponse | None:
503
+ return asyncio.run(run_merge_async())
504
+
505
+ result = benchmark(run_sync)
506
+ assert result is not None