aio-sf 0.1.0b9__tar.gz → 0.1.0b10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/PKG-INFO +11 -7
  2. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/README.md +10 -6
  3. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/__init__.py +3 -3
  4. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/batch.py +61 -41
  5. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/client.py +13 -13
  6. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/tests/test_retry_and_batch.py +45 -35
  7. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/.cursor/rules/api-structure.mdc +0 -0
  8. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/.cursor/rules/async-patterns.mdc +0 -0
  9. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/.cursor/rules/project-tooling.mdc +0 -0
  10. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/.github/workflows/publish.yml +0 -0
  11. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/.github/workflows/test.yml +0 -0
  12. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/.gitignore +0 -0
  13. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/LICENSE +0 -0
  14. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/RELEASE.md +0 -0
  15. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/pyproject.toml +0 -0
  16. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/pytest.ini +0 -0
  17. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/__init__.py +0 -0
  18. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/__init__.py +0 -0
  19. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/__init__.py +0 -0
  20. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/base.py +0 -0
  21. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/client_credentials.py +0 -0
  22. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/refresh_token.py +0 -0
  23. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/sfdx_cli.py +0 -0
  24. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/static_token.py +0 -0
  25. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/bulk_v2/__init__.py +0 -0
  26. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/bulk_v2/client.py +0 -0
  27. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/bulk_v2/types.py +0 -0
  28. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/client.py +0 -0
  29. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/records.py +0 -0
  30. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/retry.py +0 -0
  31. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/types.py +0 -0
  32. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/describe/__init__.py +0 -0
  33. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/describe/client.py +0 -0
  34. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/describe/types.py +0 -0
  35. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/query/__init__.py +0 -0
  36. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/query/client.py +0 -0
  37. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/query/types.py +0 -0
  38. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/api/types.py +0 -0
  39. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/exporter/__init__.py +0 -0
  40. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/exporter/bulk_export.py +0 -0
  41. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/src/aio_sf/exporter/parquet_writer.py +0 -0
  42. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/tests/__init__.py +0 -0
  43. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/tests/conftest.py +0 -0
  44. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/tests/test_api_clients.py +0 -0
  45. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/tests/test_auth.py +0 -0
  46. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/tests/test_client.py +0 -0
  47. {aio_sf-0.1.0b9 → aio_sf-0.1.0b10}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aio-sf
3
- Version: 0.1.0b9
3
+ Version: 0.1.0b10
4
4
  Summary: Async Salesforce library for Python
5
5
  Project-URL: Homepage, https://github.com/callawaycloud/aio-salesforce
6
6
  Project-URL: Repository, https://github.com/callawaycloud/aio-salesforce
@@ -166,15 +166,19 @@ async with SalesforceClient(auth_strategy=auth) as sf:
166
166
 
167
167
  **Advanced - With Retries, Concurrency Scaling, and Progress:**
168
168
  ```python
169
- from aio_sf.api.collections import ProgressInfo
169
+ from aio_sf.api.collections import ResultInfo
170
170
 
171
- async def on_progress(info: ProgressInfo):
171
+ async def on_result(info: ResultInfo):
172
+ # Called after each batch completes with successes and errors split
172
173
  print(
173
- f"Attempt {info['current_attempt']}: "
174
- f"{info['records_succeeded']} succeeded, "
175
- f"{info['records_failed']} failed, "
174
+ f"Batch: {len(info['successes'])} succeeded, {len(info['errors'])} failed | "
175
+ f"Attempt {info['current_attempt']}, "
176
+ f"Overall: {info['records_succeeded']} OK, {info['records_failed']} failed, "
176
177
  f"{info['records_pending']} pending"
177
178
  )
179
+ # Inspect errors (includes both API errors and HTTP failures)
180
+ for error in info['errors']:
181
+ print(f" Error: {error['errors']}")
178
182
 
179
183
  async with SalesforceClient(auth_strategy=auth) as sf:
180
184
  results = await sf.collections.insert(
@@ -183,7 +187,7 @@ async with SalesforceClient(auth_strategy=auth) as sf:
183
187
  batch_size=[200, 100, 25], # Shrink batch size on retry
184
188
  max_concurrent_batches=[5, 3, 1], # Reduce concurrency on retry
185
189
  max_attempts=5, # Retry up to 5 times
186
- on_batch_complete=on_progress, # Progress callback
190
+ on_result=on_result, # Callback with results
187
191
  )
188
192
  ```
189
193
 
@@ -103,15 +103,19 @@ async with SalesforceClient(auth_strategy=auth) as sf:
103
103
 
104
104
  **Advanced - With Retries, Concurrency Scaling, and Progress:**
105
105
  ```python
106
- from aio_sf.api.collections import ProgressInfo
106
+ from aio_sf.api.collections import ResultInfo
107
107
 
108
- async def on_progress(info: ProgressInfo):
108
+ async def on_result(info: ResultInfo):
109
+ # Called after each batch completes with successes and errors split
109
110
  print(
110
- f"Attempt {info['current_attempt']}: "
111
- f"{info['records_succeeded']} succeeded, "
112
- f"{info['records_failed']} failed, "
111
+ f"Batch: {len(info['successes'])} succeeded, {len(info['errors'])} failed | "
112
+ f"Attempt {info['current_attempt']}, "
113
+ f"Overall: {info['records_succeeded']} OK, {info['records_failed']} failed, "
113
114
  f"{info['records_pending']} pending"
114
115
  )
116
+ # Inspect errors (includes both API errors and HTTP failures)
117
+ for error in info['errors']:
118
+ print(f" Error: {error['errors']}")
115
119
 
116
120
  async with SalesforceClient(auth_strategy=auth) as sf:
117
121
  results = await sf.collections.insert(
@@ -120,7 +124,7 @@ async with SalesforceClient(auth_strategy=auth) as sf:
120
124
  batch_size=[200, 100, 25], # Shrink batch size on retry
121
125
  max_concurrent_batches=[5, 3, 1], # Reduce concurrency on retry
122
126
  max_attempts=5, # Retry up to 5 times
123
- on_batch_complete=on_progress, # Progress callback
127
+ on_result=on_result, # Callback with results
124
128
  )
125
129
  ```
126
130
 
@@ -1,7 +1,7 @@
1
1
  """Salesforce Collections API module."""
2
2
 
3
3
  from .client import CollectionsAPI
4
- from .batch import ProgressInfo, ProgressCallback
4
+ from .batch import ResultInfo, ResultCallback
5
5
  from .retry import ShouldRetryCallback, default_should_retry
6
6
  from .types import (
7
7
  CollectionError,
@@ -14,8 +14,8 @@ from .types import (
14
14
 
15
15
  __all__ = [
16
16
  "CollectionsAPI",
17
- "ProgressInfo",
18
- "ProgressCallback",
17
+ "ResultInfo",
18
+ "ResultCallback",
19
19
  "ShouldRetryCallback",
20
20
  "default_should_retry",
21
21
  "CollectionError",
@@ -17,21 +17,25 @@ from .types import CollectionResult
17
17
  logger = logging.getLogger(__name__)
18
18
 
19
19
 
20
- class ProgressInfo(TypedDict):
21
- """Progress information for batch operations."""
20
+ class ResultInfo(TypedDict):
21
+ """Result information provided after each batch completes."""
22
22
 
23
+ successes: List[CollectionResult] # Successful results from this batch
24
+ errors: List[
25
+ CollectionResult
26
+ ] # Failed results from this batch (API and HTTP errors)
23
27
  total_records: int # Total records being processed
24
28
  records_completed: int # Records finished (succeeded or failed permanently)
25
- records_succeeded: int # Records that succeeded
26
- records_failed: int # Records that failed permanently (exhausted retries)
29
+ records_succeeded: int # Records that succeeded so far
30
+ records_failed: int # Records that failed permanently so far
27
31
  records_pending: int # Records still being retried
28
32
  current_attempt: int # Current retry attempt number (1-indexed)
29
33
  current_batch_size: int # Batch size for current attempt
30
34
  current_concurrency: int # Concurrency level for current attempt
31
35
 
32
36
 
33
- # Type alias for progress callback
34
- ProgressCallback = Callable[[ProgressInfo], Awaitable[None]]
37
+ # Type alias for result callback
38
+ ResultCallback = Callable[[ResultInfo], Awaitable[None]]
35
39
 
36
40
 
37
41
  def split_into_batches(
@@ -65,8 +69,9 @@ async def process_batches_concurrently(
65
69
  operation_func,
66
70
  max_concurrent_batches: int,
67
71
  total_records: int,
68
- on_batch_complete: Optional[ProgressCallback] = None,
72
+ on_result: Optional[ResultCallback] = None,
69
73
  progress_state: Optional[Dict[str, int]] = None,
74
+ final_results: Optional[List] = None,
70
75
  *args,
71
76
  **kwargs,
72
77
  ) -> List[Any]:
@@ -80,8 +85,8 @@ async def process_batches_concurrently(
80
85
  :param operation_func: Function to call for each batch
81
86
  :param max_concurrent_batches: Maximum number of concurrent batch operations
82
87
  :param total_records: Total number of records being processed
83
- :param on_batch_complete: Optional callback invoked after each batch completes
84
- :param progress_state: Dict with progress state (updated by caller)
88
+ :param on_result: Optional callback invoked after each batch completes with results
89
+ :param progress_state: Dict with progress state (to include in callback)
85
90
  :param args: Additional positional arguments for operation_func
86
91
  :param kwargs: Additional keyword arguments for operation_func
87
92
  :returns: List of results from all batches in the same order as input
@@ -91,7 +96,7 @@ async def process_batches_concurrently(
91
96
  raise ValueError("max_concurrent_batches must be greater than 0")
92
97
 
93
98
  semaphore = asyncio.Semaphore(max_concurrent_batches)
94
- callback_lock = asyncio.Lock() if on_batch_complete else None
99
+ callback_lock = asyncio.Lock() if on_result else None
95
100
 
96
101
  async def process_batch_with_semaphore(batch_index: int, batch):
97
102
  async with semaphore:
@@ -104,20 +109,52 @@ async def process_batches_concurrently(
104
109
  )
105
110
  result = [e for _ in range(len(batch))]
106
111
 
107
- # Invoke progress callback if provided
108
- if on_batch_complete and callback_lock and progress_state:
112
+ # Invoke callback if provided, with results and progress state
113
+ if on_result and callback_lock and progress_state:
109
114
  async with callback_lock:
110
- progress_info: ProgressInfo = {
115
+ # Split results into successes and errors
116
+ successes: List[CollectionResult] = []
117
+ errors: List[CollectionResult] = []
118
+
119
+ for item in result:
120
+ # Convert exceptions to CollectionResult format
121
+ if isinstance(item, Exception):
122
+ error_result = convert_exception_to_result(item)
123
+ errors.append(error_result)
124
+ elif item.get("success", False):
125
+ successes.append(item)
126
+ else:
127
+ errors.append(item)
128
+
129
+ # Compute current counts dynamically from final_results
130
+ if final_results:
131
+ records_succeeded = sum(
132
+ 1
133
+ for r in final_results
134
+ if r is not None and r.get("success", False)
135
+ )
136
+ records_failed = sum(
137
+ 1
138
+ for r in final_results
139
+ if r is not None and not r.get("success", False)
140
+ )
141
+ records_completed = records_succeeded + records_failed
142
+ else:
143
+ records_succeeded = records_failed = records_completed = 0
144
+
145
+ result_info: ResultInfo = {
146
+ "successes": successes,
147
+ "errors": errors,
111
148
  "total_records": progress_state["total_records"],
112
- "records_completed": progress_state["records_completed"],
113
- "records_succeeded": progress_state["records_succeeded"],
114
- "records_failed": progress_state["records_failed"],
149
+ "records_completed": records_completed,
150
+ "records_succeeded": records_succeeded,
151
+ "records_failed": records_failed,
115
152
  "records_pending": progress_state["records_pending"],
116
153
  "current_attempt": progress_state["current_attempt"],
117
154
  "current_batch_size": progress_state["current_batch_size"],
118
155
  "current_concurrency": progress_state["current_concurrency"],
119
156
  }
120
- await on_batch_complete(progress_info)
157
+ await on_result(result_info)
121
158
 
122
159
  return result
123
160
 
@@ -141,7 +178,7 @@ async def process_with_retries(
141
178
  max_attempts: int,
142
179
  should_retry_callback: Optional[ShouldRetryCallback],
143
180
  max_concurrent_batches: Union[int, List[int]],
144
- on_batch_complete: Optional[ProgressCallback],
181
+ on_result: Optional[ResultCallback],
145
182
  max_limit: int,
146
183
  *args,
147
184
  **kwargs,
@@ -155,7 +192,7 @@ async def process_with_retries(
155
192
  :param max_attempts: Maximum number of attempts per record
156
193
  :param should_retry_callback: Optional callback to determine if record should be retried
157
194
  :param max_concurrent_batches: Maximum concurrent batches (int or list of ints per attempt)
158
- :param on_batch_complete: Progress callback
195
+ :param on_result: Callback invoked after each batch completes with results and progress
159
196
  :param max_limit: Maximum batch size limit for the operation
160
197
  :param args: Additional args for operation_func
161
198
  :param kwargs: Additional kwargs for operation_func
@@ -203,14 +240,15 @@ async def process_with_retries(
203
240
  records_to_process = [r.record for r in current_records]
204
241
  batches = split_into_batches(records_to_process, current_batch_size, max_limit)
205
242
 
206
- # Process batches with current concurrency level (no callback here)
243
+ # Process batches - callback will be invoked for each batch with results
207
244
  batch_results = await process_batches_concurrently(
208
245
  batches,
209
246
  operation_func,
210
247
  current_concurrency,
211
248
  len(records_to_process),
212
- None, # Don't invoke callback during batch processing
213
- None,
249
+ on_result,
250
+ progress_state,
251
+ final_results,
214
252
  *args,
215
253
  **kwargs,
216
254
  )
@@ -224,11 +262,7 @@ async def process_with_retries(
224
262
  final_results,
225
263
  )
226
264
 
227
- # Update progress state based on results
228
- # Count completed records (those not being retried)
229
- records_completed_this_round = len(current_records) - len(records_to_retry)
230
-
231
- # Count successes and failures in final_results so far
265
+ # Update progress state based on results for next iteration
232
266
  records_succeeded = sum(
233
267
  1 for r in final_results if r is not None and r.get("success", False)
234
268
  )
@@ -241,20 +275,6 @@ async def process_with_retries(
241
275
  progress_state["records_failed"] = records_failed
242
276
  progress_state["records_pending"] = len(records_to_retry)
243
277
 
244
- # Invoke progress callback after we know the results
245
- if on_batch_complete:
246
- progress_info: ProgressInfo = {
247
- "total_records": progress_state["total_records"],
248
- "records_completed": progress_state["records_completed"],
249
- "records_succeeded": progress_state["records_succeeded"],
250
- "records_failed": progress_state["records_failed"],
251
- "records_pending": progress_state["records_pending"],
252
- "current_attempt": progress_state["current_attempt"],
253
- "current_batch_size": progress_state["current_batch_size"],
254
- "current_concurrency": progress_state["current_concurrency"],
255
- }
256
- await on_batch_complete(progress_info)
257
-
258
278
  if records_to_retry:
259
279
  logger.info(
260
280
  f"Retrying {len(records_to_retry)} failed records "
@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING
8
8
  if TYPE_CHECKING:
9
9
  from ..client import SalesforceClient
10
10
 
11
- from .batch import ProgressCallback, process_with_retries
11
+ from .batch import ResultCallback, process_with_retries
12
12
  from .records import (
13
13
  detect_record_type_and_sobject,
14
14
  prepare_records,
@@ -92,7 +92,7 @@ class CollectionsAPI:
92
92
  batch_size: Union[int, List[int]] = 200,
93
93
  max_concurrent_batches: Union[int, List[int]] = 5,
94
94
  api_version: Optional[str] = None,
95
- on_batch_complete: Optional[ProgressCallback] = None,
95
+ on_result: Optional[ResultCallback] = None,
96
96
  max_attempts: int = 1,
97
97
  should_retry: Optional[ShouldRetryCallback] = None,
98
98
  ) -> CollectionInsertResponse:
@@ -109,7 +109,7 @@ class CollectionsAPI:
109
109
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
110
110
  :param max_concurrent_batches: Maximum number of concurrent batch operations
111
111
  :param api_version: API version to use
112
- :param on_batch_complete: Optional async callback invoked after each batch completes
112
+ :param on_result: Optional async callback invoked after each batch completes with results
113
113
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
114
114
  :param should_retry: Optional callback to determine if a failed record should be retried
115
115
  :returns: List of results for each record, in same order as input
@@ -139,7 +139,7 @@ class CollectionsAPI:
139
139
  max_attempts,
140
140
  should_retry,
141
141
  max_concurrent_batches,
142
- on_batch_complete,
142
+ on_result,
143
143
  self.MAX_RECORDS_INSERT,
144
144
  actual_sobject_type,
145
145
  all_or_none,
@@ -200,7 +200,7 @@ class CollectionsAPI:
200
200
  batch_size: Union[int, List[int]] = 200,
201
201
  max_concurrent_batches: Union[int, List[int]] = 5,
202
202
  api_version: Optional[str] = None,
203
- on_batch_complete: Optional[ProgressCallback] = None,
203
+ on_result: Optional[ResultCallback] = None,
204
204
  max_attempts: int = 1,
205
205
  should_retry: Optional[ShouldRetryCallback] = None,
206
206
  ) -> CollectionUpdateResponse:
@@ -217,7 +217,7 @@ class CollectionsAPI:
217
217
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
218
218
  :param max_concurrent_batches: Maximum number of concurrent batch operations
219
219
  :param api_version: API version to use
220
- :param on_batch_complete: Optional async callback invoked after each batch completes
220
+ :param on_result: Optional async callback invoked after each batch completes with results
221
221
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
222
222
  :param should_retry: Optional callback to determine if a failed record should be retried
223
223
  :returns: List of results for each record, in same order as input
@@ -247,7 +247,7 @@ class CollectionsAPI:
247
247
  max_attempts,
248
248
  should_retry,
249
249
  max_concurrent_batches,
250
- on_batch_complete,
250
+ on_result,
251
251
  self.MAX_RECORDS_UPDATE,
252
252
  actual_sobject_type,
253
253
  all_or_none,
@@ -313,7 +313,7 @@ class CollectionsAPI:
313
313
  batch_size: Union[int, List[int]] = 200,
314
314
  max_concurrent_batches: Union[int, List[int]] = 5,
315
315
  api_version: Optional[str] = None,
316
- on_batch_complete: Optional[ProgressCallback] = None,
316
+ on_result: Optional[ResultCallback] = None,
317
317
  max_attempts: int = 1,
318
318
  should_retry: Optional[ShouldRetryCallback] = None,
319
319
  ) -> CollectionUpsertResponse:
@@ -331,7 +331,7 @@ class CollectionsAPI:
331
331
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
332
332
  :param max_concurrent_batches: Maximum number of concurrent batch operations
333
333
  :param api_version: API version to use
334
- :param on_batch_complete: Optional async callback invoked after each batch completes
334
+ :param on_result: Optional async callback invoked after each batch completes with results
335
335
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
336
336
  :param should_retry: Optional callback to determine if a failed record should be retried
337
337
  :returns: List of results for each record, in same order as input
@@ -361,7 +361,7 @@ class CollectionsAPI:
361
361
  max_attempts,
362
362
  should_retry,
363
363
  max_concurrent_batches,
364
- on_batch_complete,
364
+ on_result,
365
365
  self.MAX_RECORDS_UPSERT,
366
366
  external_id_field,
367
367
  actual_sobject_type,
@@ -411,7 +411,7 @@ class CollectionsAPI:
411
411
  batch_size: Union[int, List[int]] = 200,
412
412
  max_concurrent_batches: Union[int, List[int]] = 5,
413
413
  api_version: Optional[str] = None,
414
- on_batch_complete: Optional[ProgressCallback] = None,
414
+ on_result: Optional[ResultCallback] = None,
415
415
  max_attempts: int = 1,
416
416
  should_retry: Optional[ShouldRetryCallback] = None,
417
417
  ) -> CollectionDeleteResponse:
@@ -427,7 +427,7 @@ class CollectionsAPI:
427
427
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
428
428
  :param max_concurrent_batches: Maximum number of concurrent batch operations
429
429
  :param api_version: API version to use
430
- :param on_batch_complete: Optional async callback invoked after each batch completes
430
+ :param on_result: Optional async callback invoked after each batch completes with results
431
431
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
432
432
  :param should_retry: Optional callback to determine if a failed record should be retried
433
433
  :returns: List of results for each record, in same order as input
@@ -453,7 +453,7 @@ class CollectionsAPI:
453
453
  max_attempts,
454
454
  should_retry,
455
455
  max_concurrent_batches,
456
- on_batch_complete,
456
+ on_result,
457
457
  self.MAX_RECORDS_DELETE,
458
458
  all_or_none,
459
459
  api_version,
@@ -2,7 +2,7 @@
2
2
 
3
3
  import pytest
4
4
  import asyncio
5
- from aio_sf.api.collections import CollectionsAPI, ProgressInfo
5
+ from aio_sf.api.collections import CollectionsAPI, ResultInfo
6
6
 
7
7
 
8
8
  class MockClient:
@@ -463,31 +463,38 @@ class TestProgressTracking:
463
463
  ]
464
464
  )
465
465
 
466
- progress_calls = []
466
+ result_calls = []
467
467
 
468
- async def progress_callback(progress: ProgressInfo):
469
- progress_calls.append(dict(progress))
468
+ async def result_callback(result: ResultInfo):
469
+ result_calls.append(dict(result))
470
470
 
471
471
  results = await collections_api.insert(
472
472
  records,
473
473
  sobject_type="Account",
474
474
  batch_size=200,
475
- on_batch_complete=progress_callback,
475
+ on_result=result_callback,
476
476
  )
477
477
 
478
478
  assert len(results) == 300
479
- # With the new design, callback is invoked once per attempt (after all batches)
480
- # Since all records succeed on first attempt, we get 1 callback
481
- assert len(progress_calls) == 1
482
-
483
- # Verify progress data - after attempt completes, all succeeded
484
- assert progress_calls[0]["total_records"] == 300
485
- assert progress_calls[0]["current_batch_size"] == 200
486
- assert progress_calls[0]["current_concurrency"] == 5 # default
487
- assert progress_calls[0]["current_attempt"] == 1
488
- assert progress_calls[0]["records_succeeded"] == 300
489
- assert progress_calls[0]["records_failed"] == 0
490
- assert progress_calls[0]["records_pending"] == 0
479
+ # With the new design, callback is invoked once per batch
480
+ # 300 records with batch_size=200 means 2 batches
481
+ assert len(result_calls) == 2
482
+
483
+ # Both callbacks split successes and errors
484
+ assert len(result_calls[0]["successes"]) == 200
485
+ assert len(result_calls[0]["errors"]) == 0
486
+ assert len(result_calls[1]["successes"]) == 100
487
+ assert len(result_calls[1]["errors"]) == 0
488
+
489
+ # Verify successes are properly typed CollectionResults
490
+ assert all(r.get("success") for r in result_calls[0]["successes"])
491
+ assert all(r.get("id") is not None for r in result_calls[0]["successes"])
492
+
493
+ # Context information is provided
494
+ assert result_calls[0]["total_records"] == 300
495
+ assert result_calls[0]["current_batch_size"] == 200
496
+ assert result_calls[0]["current_concurrency"] == 5
497
+ assert result_calls[0]["current_attempt"] == 1
491
498
 
492
499
  @pytest.mark.asyncio
493
500
  async def test_progress_with_retries(self, client):
@@ -517,36 +524,39 @@ class TestProgressTracking:
517
524
  ]
518
525
  )
519
526
 
520
- progress_calls = []
527
+ result_calls = []
521
528
 
522
- async def progress_callback(progress: ProgressInfo):
523
- progress_calls.append(dict(progress))
529
+ async def result_callback(result: ResultInfo):
530
+ result_calls.append(dict(result))
524
531
 
525
532
  results = await collections_api.insert(
526
533
  records,
527
534
  sobject_type="Account",
528
535
  batch_size=200,
529
536
  max_attempts=2,
530
- on_batch_complete=progress_callback,
537
+ on_result=result_callback,
531
538
  )
532
539
 
533
540
  assert len(results) == 10
534
541
  assert all(r["success"] for r in results)
535
542
 
536
- # Should have 2 progress callbacks (initial + retry)
537
- assert len(progress_calls) == 2
538
-
539
- # First attempt: all failed, all pending retry
540
- assert progress_calls[0]["current_attempt"] == 1
541
- assert progress_calls[0]["records_succeeded"] == 0
542
- assert progress_calls[0]["records_failed"] == 0
543
- assert progress_calls[0]["records_pending"] == 10
544
-
545
- # Second attempt: all succeeded
546
- assert progress_calls[1]["current_attempt"] == 2
547
- assert progress_calls[1]["records_succeeded"] == 10
548
- assert progress_calls[1]["records_failed"] == 0
549
- assert progress_calls[1]["records_pending"] == 0
543
+ # Should have 2 callbacks (initial attempt + retry attempt)
544
+ assert len(result_calls) == 2
545
+
546
+ # First attempt: all failed - errors array contains them
547
+ assert result_calls[0]["current_attempt"] == 1
548
+ assert len(result_calls[0]["successes"]) == 0
549
+ assert len(result_calls[0]["errors"]) == 10
550
+ # Can inspect error codes in errors array
551
+ for error in result_calls[0]["errors"]:
552
+ assert not error.get("success")
553
+ assert error["errors"][0]["statusCode"] == "UNABLE_TO_LOCK_ROW"
554
+
555
+ # Second attempt: all succeeded - successes array contains them
556
+ assert result_calls[1]["current_attempt"] == 2
557
+ assert len(result_calls[1]["successes"]) == 10
558
+ assert len(result_calls[1]["errors"]) == 0
559
+ assert all(r.get("success") for r in result_calls[1]["successes"])
550
560
 
551
561
 
552
562
  class TestConcurrencyScaling:
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes