aio-sf 0.1.0b8__tar.gz → 0.1.0b10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/PKG-INFO +30 -10
  2. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/README.md +29 -9
  3. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/__init__.py +3 -3
  4. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/batch.py +104 -45
  5. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/client.py +13 -13
  6. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/tests/test_retry_and_batch.py +48 -25
  7. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/.cursor/rules/api-structure.mdc +0 -0
  8. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/.cursor/rules/async-patterns.mdc +0 -0
  9. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/.cursor/rules/project-tooling.mdc +0 -0
  10. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/.github/workflows/publish.yml +0 -0
  11. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/.github/workflows/test.yml +0 -0
  12. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/.gitignore +0 -0
  13. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/LICENSE +0 -0
  14. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/RELEASE.md +0 -0
  15. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/pyproject.toml +0 -0
  16. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/pytest.ini +0 -0
  17. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/__init__.py +0 -0
  18. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/__init__.py +0 -0
  19. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/__init__.py +0 -0
  20. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/base.py +0 -0
  21. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/client_credentials.py +0 -0
  22. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/refresh_token.py +0 -0
  23. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/sfdx_cli.py +0 -0
  24. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/auth/static_token.py +0 -0
  25. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/bulk_v2/__init__.py +0 -0
  26. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/bulk_v2/client.py +0 -0
  27. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/bulk_v2/types.py +0 -0
  28. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/client.py +0 -0
  29. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/records.py +0 -0
  30. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/retry.py +0 -0
  31. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/collections/types.py +0 -0
  32. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/describe/__init__.py +0 -0
  33. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/describe/client.py +0 -0
  34. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/describe/types.py +0 -0
  35. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/query/__init__.py +0 -0
  36. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/query/client.py +0 -0
  37. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/query/types.py +0 -0
  38. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/api/types.py +0 -0
  39. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/exporter/__init__.py +0 -0
  40. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/exporter/bulk_export.py +0 -0
  41. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/src/aio_sf/exporter/parquet_writer.py +0 -0
  42. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/tests/__init__.py +0 -0
  43. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/tests/conftest.py +0 -0
  44. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/tests/test_api_clients.py +0 -0
  45. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/tests/test_auth.py +0 -0
  46. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/tests/test_client.py +0 -0
  47. {aio_sf-0.1.0b8 → aio_sf-0.1.0b10}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aio-sf
3
- Version: 0.1.0b8
3
+ Version: 0.1.0b10
4
4
  Summary: Async Salesforce library for Python
5
5
  Project-URL: Homepage, https://github.com/callawaycloud/aio-salesforce
6
6
  Project-URL: Repository, https://github.com/callawaycloud/aio-salesforce
@@ -153,24 +153,44 @@ asyncio.run(main())
153
153
 
154
154
  ### Collections API - Batch Operations
155
155
 
156
- Efficiently handle bulk operations with automatic batching and concurrency:
156
+ Bulk operations (insert, update, upsert, delete) with automatic batching and concurrency.
157
157
 
158
+ **Basic Usage:**
158
159
  ```python
159
160
  async with SalesforceClient(auth_strategy=auth) as sf:
160
161
  records = [{"Name": f"Account {i}"} for i in range(1000)]
161
162
 
162
- # Insert with automatic batching and parallel processing
163
- results = await sf.collections.insert(
164
- records, sobject_type="Account",
165
- batch_size=200, max_concurrent_batches=5
166
- )
167
-
163
+ results = await sf.collections.insert(records, sobject_type="Account")
168
164
  # Also: update(), upsert(), delete()
169
165
  ```
170
166
 
171
- **Features:** Automatic batching Concurrent processing Order preservation • Smart retries • Progress tracking
167
+ **Advanced - With Retries, Concurrency Scaling, and Progress:**
168
+ ```python
169
+ from aio_sf.api.collections import ResultInfo
170
+
171
+ async def on_result(info: ResultInfo):
172
+ # Called after each batch completes with successes and errors split
173
+ print(
174
+ f"Batch: {len(info['successes'])} succeeded, {len(info['errors'])} failed | "
175
+ f"Attempt {info['current_attempt']}, "
176
+ f"Overall: {info['records_succeeded']} OK, {info['records_failed']} failed, "
177
+ f"{info['records_pending']} pending"
178
+ )
179
+ # Inspect errors (includes both API errors and HTTP failures)
180
+ for error in info['errors']:
181
+ print(f" Error: {error['errors']}")
182
+
183
+ async with SalesforceClient(auth_strategy=auth) as sf:
184
+ results = await sf.collections.insert(
185
+ records=records,
186
+ sobject_type="Account",
187
+ batch_size=[200, 100, 25], # Shrink batch size on retry
188
+ max_concurrent_batches=[5, 3, 1], # Reduce concurrency on retry
189
+ max_attempts=5, # Retry up to 5 times
190
+ on_result=on_result, # Callback with results
191
+ )
192
+ ```
172
193
 
173
- See [RETRY_GUIDE.md](RETRY_GUIDE.md) for advanced retry strategies, progress tracking, and custom error handling.
174
194
 
175
195
  ## Exporter
176
196
 
@@ -90,24 +90,44 @@ asyncio.run(main())
90
90
 
91
91
  ### Collections API - Batch Operations
92
92
 
93
- Efficiently handle bulk operations with automatic batching and concurrency:
93
+ Bulk operations (insert, update, upsert, delete) with automatic batching and concurrency.
94
94
 
95
+ **Basic Usage:**
95
96
  ```python
96
97
  async with SalesforceClient(auth_strategy=auth) as sf:
97
98
  records = [{"Name": f"Account {i}"} for i in range(1000)]
98
99
 
99
- # Insert with automatic batching and parallel processing
100
- results = await sf.collections.insert(
101
- records, sobject_type="Account",
102
- batch_size=200, max_concurrent_batches=5
103
- )
104
-
100
+ results = await sf.collections.insert(records, sobject_type="Account")
105
101
  # Also: update(), upsert(), delete()
106
102
  ```
107
103
 
108
- **Features:** Automatic batching Concurrent processing Order preservation • Smart retries • Progress tracking
104
+ **Advanced - With Retries, Concurrency Scaling, and Progress:**
105
+ ```python
106
+ from aio_sf.api.collections import ResultInfo
107
+
108
+ async def on_result(info: ResultInfo):
109
+ # Called after each batch completes with successes and errors split
110
+ print(
111
+ f"Batch: {len(info['successes'])} succeeded, {len(info['errors'])} failed | "
112
+ f"Attempt {info['current_attempt']}, "
113
+ f"Overall: {info['records_succeeded']} OK, {info['records_failed']} failed, "
114
+ f"{info['records_pending']} pending"
115
+ )
116
+ # Inspect errors (includes both API errors and HTTP failures)
117
+ for error in info['errors']:
118
+ print(f" Error: {error['errors']}")
119
+
120
+ async with SalesforceClient(auth_strategy=auth) as sf:
121
+ results = await sf.collections.insert(
122
+ records=records,
123
+ sobject_type="Account",
124
+ batch_size=[200, 100, 25], # Shrink batch size on retry
125
+ max_concurrent_batches=[5, 3, 1], # Reduce concurrency on retry
126
+ max_attempts=5, # Retry up to 5 times
127
+ on_result=on_result, # Callback with results
128
+ )
129
+ ```
109
130
 
110
- See [RETRY_GUIDE.md](RETRY_GUIDE.md) for advanced retry strategies, progress tracking, and custom error handling.
111
131
 
112
132
  ## Exporter
113
133
 
@@ -1,7 +1,7 @@
1
1
  """Salesforce Collections API module."""
2
2
 
3
3
  from .client import CollectionsAPI
4
- from .batch import ProgressInfo, ProgressCallback
4
+ from .batch import ResultInfo, ResultCallback
5
5
  from .retry import ShouldRetryCallback, default_should_retry
6
6
  from .types import (
7
7
  CollectionError,
@@ -14,8 +14,8 @@ from .types import (
14
14
 
15
15
  __all__ = [
16
16
  "CollectionsAPI",
17
- "ProgressInfo",
18
- "ProgressCallback",
17
+ "ResultInfo",
18
+ "ResultCallback",
19
19
  "ShouldRetryCallback",
20
20
  "default_should_retry",
21
21
  "CollectionError",
@@ -2,7 +2,7 @@
2
2
 
3
3
  import asyncio
4
4
  import logging
5
- from typing import Any, Awaitable, Callable, List, Optional, TypedDict, Union
5
+ from typing import Any, Awaitable, Callable, Dict, List, Optional, TypedDict, Union
6
6
 
7
7
  from .retry import (
8
8
  RecordWithAttempt,
@@ -17,19 +17,25 @@ from .types import CollectionResult
17
17
  logger = logging.getLogger(__name__)
18
18
 
19
19
 
20
- class ProgressInfo(TypedDict):
21
- """Progress information for batch operations."""
20
+ class ResultInfo(TypedDict):
21
+ """Result information provided after each batch completes."""
22
22
 
23
- batch_index: int # Current batch number (0-indexed)
24
- total_batches: int # Total number of batches
25
- records_processed: int # Number of records processed so far
26
- total_records: int # Total number of records to process
27
- batch_size: int # Size of the current batch
28
- retry_count: int # Number of records being retried in this operation
23
+ successes: List[CollectionResult] # Successful results from this batch
24
+ errors: List[
25
+ CollectionResult
26
+ ] # Failed results from this batch (API and HTTP errors)
27
+ total_records: int # Total records being processed
28
+ records_completed: int # Records finished (succeeded or failed permanently)
29
+ records_succeeded: int # Records that succeeded so far
30
+ records_failed: int # Records that failed permanently so far
31
+ records_pending: int # Records still being retried
32
+ current_attempt: int # Current retry attempt number (1-indexed)
33
+ current_batch_size: int # Batch size for current attempt
34
+ current_concurrency: int # Concurrency level for current attempt
29
35
 
30
36
 
31
- # Type alias for progress callback
32
- ProgressCallback = Callable[[ProgressInfo], Awaitable[None]]
37
+ # Type alias for result callback
38
+ ResultCallback = Callable[[ResultInfo], Awaitable[None]]
33
39
 
34
40
 
35
41
  def split_into_batches(
@@ -63,7 +69,9 @@ async def process_batches_concurrently(
63
69
  operation_func,
64
70
  max_concurrent_batches: int,
65
71
  total_records: int,
66
- on_batch_complete: Optional[ProgressCallback] = None,
72
+ on_result: Optional[ResultCallback] = None,
73
+ progress_state: Optional[Dict[str, int]] = None,
74
+ final_results: Optional[List] = None,
67
75
  *args,
68
76
  **kwargs,
69
77
  ) -> List[Any]:
@@ -77,7 +85,8 @@ async def process_batches_concurrently(
77
85
  :param operation_func: Function to call for each batch
78
86
  :param max_concurrent_batches: Maximum number of concurrent batch operations
79
87
  :param total_records: Total number of records being processed
80
- :param on_batch_complete: Optional callback invoked after each batch completes
88
+ :param on_result: Optional callback invoked after each batch completes with results
89
+ :param progress_state: Dict with progress state (to include in callback)
81
90
  :param args: Additional positional arguments for operation_func
82
91
  :param kwargs: Additional keyword arguments for operation_func
83
92
  :returns: List of results from all batches in the same order as input
@@ -87,12 +96,9 @@ async def process_batches_concurrently(
87
96
  raise ValueError("max_concurrent_batches must be greater than 0")
88
97
 
89
98
  semaphore = asyncio.Semaphore(max_concurrent_batches)
90
- total_batches = len(batches)
91
- callback_lock = asyncio.Lock() if on_batch_complete else None
92
- records_processed = 0
99
+ callback_lock = asyncio.Lock() if on_result else None
93
100
 
94
101
  async def process_batch_with_semaphore(batch_index: int, batch):
95
- nonlocal records_processed
96
102
  async with semaphore:
97
103
  try:
98
104
  result = await operation_func(batch, *args, **kwargs)
@@ -103,20 +109,52 @@ async def process_batches_concurrently(
103
109
  )
104
110
  result = [e for _ in range(len(batch))]
105
111
 
106
- # Invoke progress callback if provided
107
- if on_batch_complete and callback_lock:
108
- batch_size = len(batch)
112
+ # Invoke callback if provided, with results and progress state
113
+ if on_result and callback_lock and progress_state:
109
114
  async with callback_lock:
110
- records_processed += batch_size
111
- progress_info: ProgressInfo = {
112
- "batch_index": batch_index,
113
- "total_batches": total_batches,
114
- "records_processed": records_processed,
115
- "total_records": total_records,
116
- "batch_size": batch_size,
117
- "retry_count": 0, # Set by wrapper in process_with_retries
115
+ # Split results into successes and errors
116
+ successes: List[CollectionResult] = []
117
+ errors: List[CollectionResult] = []
118
+
119
+ for item in result:
120
+ # Convert exceptions to CollectionResult format
121
+ if isinstance(item, Exception):
122
+ error_result = convert_exception_to_result(item)
123
+ errors.append(error_result)
124
+ elif item.get("success", False):
125
+ successes.append(item)
126
+ else:
127
+ errors.append(item)
128
+
129
+ # Compute current counts dynamically from final_results
130
+ if final_results:
131
+ records_succeeded = sum(
132
+ 1
133
+ for r in final_results
134
+ if r is not None and r.get("success", False)
135
+ )
136
+ records_failed = sum(
137
+ 1
138
+ for r in final_results
139
+ if r is not None and not r.get("success", False)
140
+ )
141
+ records_completed = records_succeeded + records_failed
142
+ else:
143
+ records_succeeded = records_failed = records_completed = 0
144
+
145
+ result_info: ResultInfo = {
146
+ "successes": successes,
147
+ "errors": errors,
148
+ "total_records": progress_state["total_records"],
149
+ "records_completed": records_completed,
150
+ "records_succeeded": records_succeeded,
151
+ "records_failed": records_failed,
152
+ "records_pending": progress_state["records_pending"],
153
+ "current_attempt": progress_state["current_attempt"],
154
+ "current_batch_size": progress_state["current_batch_size"],
155
+ "current_concurrency": progress_state["current_concurrency"],
118
156
  }
119
- await on_batch_complete(progress_info)
157
+ await on_result(result_info)
120
158
 
121
159
  return result
122
160
 
@@ -140,7 +178,7 @@ async def process_with_retries(
140
178
  max_attempts: int,
141
179
  should_retry_callback: Optional[ShouldRetryCallback],
142
180
  max_concurrent_batches: Union[int, List[int]],
143
- on_batch_complete: Optional[ProgressCallback],
181
+ on_result: Optional[ResultCallback],
144
182
  max_limit: int,
145
183
  *args,
146
184
  **kwargs,
@@ -154,7 +192,7 @@ async def process_with_retries(
154
192
  :param max_attempts: Maximum number of attempts per record
155
193
  :param should_retry_callback: Optional callback to determine if record should be retried
156
194
  :param max_concurrent_batches: Maximum concurrent batches (int or list of ints per attempt)
157
- :param on_batch_complete: Progress callback
195
+ :param on_result: Callback invoked after each batch completes with results and progress
158
196
  :param max_limit: Maximum batch size limit for the operation
159
197
  :param args: Additional args for operation_func
160
198
  :param kwargs: Additional kwargs for operation_func
@@ -163,9 +201,21 @@ async def process_with_retries(
163
201
  # Initialize result array with None placeholders
164
202
  max_index = max(r.original_index for r in records_with_attempts)
165
203
  final_results: List[Optional[CollectionResult]] = [None] * (max_index + 1)
204
+ total_records_count = max_index + 1
205
+
206
+ # Initialize progress state
207
+ progress_state = {
208
+ "total_records": total_records_count,
209
+ "records_completed": 0,
210
+ "records_succeeded": 0,
211
+ "records_failed": 0,
212
+ "records_pending": total_records_count,
213
+ "current_attempt": 1,
214
+ "current_batch_size": 0,
215
+ "current_concurrency": 0,
216
+ }
166
217
 
167
218
  current_records = records_with_attempts
168
- total_retry_count = 0
169
219
 
170
220
  while current_records:
171
221
  current_attempt = current_records[0].attempt
@@ -176,6 +226,11 @@ async def process_with_retries(
176
226
  current_attempt, max_concurrent_batches
177
227
  )
178
228
 
229
+ # Update progress state for current attempt
230
+ progress_state["current_attempt"] = current_attempt
231
+ progress_state["current_batch_size"] = current_batch_size
232
+ progress_state["current_concurrency"] = current_concurrency
233
+
179
234
  logger.debug(
180
235
  f"Processing {len(current_records)} records on attempt {current_attempt} "
181
236
  f"with batch_size={current_batch_size}, concurrency={current_concurrency}"
@@ -185,23 +240,15 @@ async def process_with_retries(
185
240
  records_to_process = [r.record for r in current_records]
186
241
  batches = split_into_batches(records_to_process, current_batch_size, max_limit)
187
242
 
188
- # Wrap progress callback to include retry count
189
- wrapped_callback = None
190
- if on_batch_complete:
191
-
192
- async def progress_wrapper(progress: ProgressInfo):
193
- progress["retry_count"] = total_retry_count
194
- await on_batch_complete(progress)
195
-
196
- wrapped_callback = progress_wrapper
197
-
198
- # Process batches with current concurrency level
243
+ # Process batches - callback will be invoked for each batch with results
199
244
  batch_results = await process_batches_concurrently(
200
245
  batches,
201
246
  operation_func,
202
247
  current_concurrency,
203
248
  len(records_to_process),
204
- wrapped_callback,
249
+ on_result,
250
+ progress_state,
251
+ final_results,
205
252
  *args,
206
253
  **kwargs,
207
254
  )
@@ -215,12 +262,24 @@ async def process_with_retries(
215
262
  final_results,
216
263
  )
217
264
 
265
+ # Update progress state based on results for next iteration
266
+ records_succeeded = sum(
267
+ 1 for r in final_results if r is not None and r.get("success", False)
268
+ )
269
+ records_failed = sum(
270
+ 1 for r in final_results if r is not None and not r.get("success", False)
271
+ )
272
+
273
+ progress_state["records_completed"] = records_succeeded + records_failed
274
+ progress_state["records_succeeded"] = records_succeeded
275
+ progress_state["records_failed"] = records_failed
276
+ progress_state["records_pending"] = len(records_to_retry)
277
+
218
278
  if records_to_retry:
219
279
  logger.info(
220
280
  f"Retrying {len(records_to_retry)} failed records "
221
281
  f"(attempt {records_to_retry[0].attempt})"
222
282
  )
223
- total_retry_count += len(records_to_retry)
224
283
 
225
284
  current_records = records_to_retry
226
285
 
@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING
8
8
  if TYPE_CHECKING:
9
9
  from ..client import SalesforceClient
10
10
 
11
- from .batch import ProgressCallback, process_with_retries
11
+ from .batch import ResultCallback, process_with_retries
12
12
  from .records import (
13
13
  detect_record_type_and_sobject,
14
14
  prepare_records,
@@ -92,7 +92,7 @@ class CollectionsAPI:
92
92
  batch_size: Union[int, List[int]] = 200,
93
93
  max_concurrent_batches: Union[int, List[int]] = 5,
94
94
  api_version: Optional[str] = None,
95
- on_batch_complete: Optional[ProgressCallback] = None,
95
+ on_result: Optional[ResultCallback] = None,
96
96
  max_attempts: int = 1,
97
97
  should_retry: Optional[ShouldRetryCallback] = None,
98
98
  ) -> CollectionInsertResponse:
@@ -109,7 +109,7 @@ class CollectionsAPI:
109
109
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
110
110
  :param max_concurrent_batches: Maximum number of concurrent batch operations
111
111
  :param api_version: API version to use
112
- :param on_batch_complete: Optional async callback invoked after each batch completes
112
+ :param on_result: Optional async callback invoked after each batch completes with results
113
113
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
114
114
  :param should_retry: Optional callback to determine if a failed record should be retried
115
115
  :returns: List of results for each record, in same order as input
@@ -139,7 +139,7 @@ class CollectionsAPI:
139
139
  max_attempts,
140
140
  should_retry,
141
141
  max_concurrent_batches,
142
- on_batch_complete,
142
+ on_result,
143
143
  self.MAX_RECORDS_INSERT,
144
144
  actual_sobject_type,
145
145
  all_or_none,
@@ -200,7 +200,7 @@ class CollectionsAPI:
200
200
  batch_size: Union[int, List[int]] = 200,
201
201
  max_concurrent_batches: Union[int, List[int]] = 5,
202
202
  api_version: Optional[str] = None,
203
- on_batch_complete: Optional[ProgressCallback] = None,
203
+ on_result: Optional[ResultCallback] = None,
204
204
  max_attempts: int = 1,
205
205
  should_retry: Optional[ShouldRetryCallback] = None,
206
206
  ) -> CollectionUpdateResponse:
@@ -217,7 +217,7 @@ class CollectionsAPI:
217
217
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
218
218
  :param max_concurrent_batches: Maximum number of concurrent batch operations
219
219
  :param api_version: API version to use
220
- :param on_batch_complete: Optional async callback invoked after each batch completes
220
+ :param on_result: Optional async callback invoked after each batch completes with results
221
221
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
222
222
  :param should_retry: Optional callback to determine if a failed record should be retried
223
223
  :returns: List of results for each record, in same order as input
@@ -247,7 +247,7 @@ class CollectionsAPI:
247
247
  max_attempts,
248
248
  should_retry,
249
249
  max_concurrent_batches,
250
- on_batch_complete,
250
+ on_result,
251
251
  self.MAX_RECORDS_UPDATE,
252
252
  actual_sobject_type,
253
253
  all_or_none,
@@ -313,7 +313,7 @@ class CollectionsAPI:
313
313
  batch_size: Union[int, List[int]] = 200,
314
314
  max_concurrent_batches: Union[int, List[int]] = 5,
315
315
  api_version: Optional[str] = None,
316
- on_batch_complete: Optional[ProgressCallback] = None,
316
+ on_result: Optional[ResultCallback] = None,
317
317
  max_attempts: int = 1,
318
318
  should_retry: Optional[ShouldRetryCallback] = None,
319
319
  ) -> CollectionUpsertResponse:
@@ -331,7 +331,7 @@ class CollectionsAPI:
331
331
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
332
332
  :param max_concurrent_batches: Maximum number of concurrent batch operations
333
333
  :param api_version: API version to use
334
- :param on_batch_complete: Optional async callback invoked after each batch completes
334
+ :param on_result: Optional async callback invoked after each batch completes with results
335
335
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
336
336
  :param should_retry: Optional callback to determine if a failed record should be retried
337
337
  :returns: List of results for each record, in same order as input
@@ -361,7 +361,7 @@ class CollectionsAPI:
361
361
  max_attempts,
362
362
  should_retry,
363
363
  max_concurrent_batches,
364
- on_batch_complete,
364
+ on_result,
365
365
  self.MAX_RECORDS_UPSERT,
366
366
  external_id_field,
367
367
  actual_sobject_type,
@@ -411,7 +411,7 @@ class CollectionsAPI:
411
411
  batch_size: Union[int, List[int]] = 200,
412
412
  max_concurrent_batches: Union[int, List[int]] = 5,
413
413
  api_version: Optional[str] = None,
414
- on_batch_complete: Optional[ProgressCallback] = None,
414
+ on_result: Optional[ResultCallback] = None,
415
415
  max_attempts: int = 1,
416
416
  should_retry: Optional[ShouldRetryCallback] = None,
417
417
  ) -> CollectionDeleteResponse:
@@ -427,7 +427,7 @@ class CollectionsAPI:
427
427
  :param batch_size: Batch size (int for same size, or list of ints per attempt). Max 200.
428
428
  :param max_concurrent_batches: Maximum number of concurrent batch operations
429
429
  :param api_version: API version to use
430
- :param on_batch_complete: Optional async callback invoked after each batch completes
430
+ :param on_result: Optional async callback invoked after each batch completes with results
431
431
  :param max_attempts: Maximum number of attempts per record (default: 1, no retries)
432
432
  :param should_retry: Optional callback to determine if a failed record should be retried
433
433
  :returns: List of results for each record, in same order as input
@@ -453,7 +453,7 @@ class CollectionsAPI:
453
453
  max_attempts,
454
454
  should_retry,
455
455
  max_concurrent_batches,
456
- on_batch_complete,
456
+ on_result,
457
457
  self.MAX_RECORDS_DELETE,
458
458
  all_or_none,
459
459
  api_version,
@@ -2,7 +2,7 @@
2
2
 
3
3
  import pytest
4
4
  import asyncio
5
- from aio_sf.api.collections import CollectionsAPI, ProgressInfo
5
+ from aio_sf.api.collections import CollectionsAPI, ResultInfo
6
6
 
7
7
 
8
8
  class MockClient:
@@ -463,26 +463,38 @@ class TestProgressTracking:
463
463
  ]
464
464
  )
465
465
 
466
- progress_calls = []
466
+ result_calls = []
467
467
 
468
- async def progress_callback(progress: ProgressInfo):
469
- progress_calls.append(dict(progress))
468
+ async def result_callback(result: ResultInfo):
469
+ result_calls.append(dict(result))
470
470
 
471
471
  results = await collections_api.insert(
472
472
  records,
473
473
  sobject_type="Account",
474
474
  batch_size=200,
475
- on_batch_complete=progress_callback,
475
+ on_result=result_callback,
476
476
  )
477
477
 
478
478
  assert len(results) == 300
479
- # Should have 2 progress callbacks (one per batch)
480
- assert len(progress_calls) == 2
481
-
482
- # Verify progress data
483
- assert progress_calls[0]["total_batches"] == 2
484
- assert progress_calls[0]["total_records"] == 300
485
- assert progress_calls[0]["retry_count"] == 0
479
+ # With the new design, callback is invoked once per batch
480
+ # 300 records with batch_size=200 means 2 batches
481
+ assert len(result_calls) == 2
482
+
483
+ # Both callbacks split successes and errors
484
+ assert len(result_calls[0]["successes"]) == 200
485
+ assert len(result_calls[0]["errors"]) == 0
486
+ assert len(result_calls[1]["successes"]) == 100
487
+ assert len(result_calls[1]["errors"]) == 0
488
+
489
+ # Verify successes are properly typed CollectionResults
490
+ assert all(r.get("success") for r in result_calls[0]["successes"])
491
+ assert all(r.get("id") is not None for r in result_calls[0]["successes"])
492
+
493
+ # Context information is provided
494
+ assert result_calls[0]["total_records"] == 300
495
+ assert result_calls[0]["current_batch_size"] == 200
496
+ assert result_calls[0]["current_concurrency"] == 5
497
+ assert result_calls[0]["current_attempt"] == 1
486
498
 
487
499
  @pytest.mark.asyncio
488
500
  async def test_progress_with_retries(self, client):
@@ -512,28 +524,39 @@ class TestProgressTracking:
512
524
  ]
513
525
  )
514
526
 
515
- progress_calls = []
527
+ result_calls = []
516
528
 
517
- async def progress_callback(progress: ProgressInfo):
518
- progress_calls.append(dict(progress))
529
+ async def result_callback(result: ResultInfo):
530
+ result_calls.append(dict(result))
519
531
 
520
532
  results = await collections_api.insert(
521
533
  records,
522
534
  sobject_type="Account",
523
535
  batch_size=200,
524
536
  max_attempts=2,
525
- on_batch_complete=progress_callback,
537
+ on_result=result_callback,
526
538
  )
527
539
 
528
540
  assert len(results) == 10
529
541
  assert all(r["success"] for r in results)
530
542
 
531
- # Should have 2 progress callbacks (initial + retry)
532
- assert len(progress_calls) == 2
533
- # First has no retries
534
- assert progress_calls[0]["retry_count"] == 0
535
- # Second shows retries
536
- assert progress_calls[1]["retry_count"] == 10
543
+ # Should have 2 callbacks (initial attempt + retry attempt)
544
+ assert len(result_calls) == 2
545
+
546
+ # First attempt: all failed - errors array contains them
547
+ assert result_calls[0]["current_attempt"] == 1
548
+ assert len(result_calls[0]["successes"]) == 0
549
+ assert len(result_calls[0]["errors"]) == 10
550
+ # Can inspect error codes in errors array
551
+ for error in result_calls[0]["errors"]:
552
+ assert not error.get("success")
553
+ assert error["errors"][0]["statusCode"] == "UNABLE_TO_LOCK_ROW"
554
+
555
+ # Second attempt: all succeeded - successes array contains them
556
+ assert result_calls[1]["current_attempt"] == 2
557
+ assert len(result_calls[1]["successes"]) == 10
558
+ assert len(result_calls[1]["errors"]) == 0
559
+ assert all(r.get("success") for r in result_calls[1]["successes"])
537
560
 
538
561
 
539
562
  class TestConcurrencyScaling:
@@ -646,7 +669,7 @@ class TestHTTPErrorHandling:
646
669
  async def test_default_retries_transient_http_errors(self, client):
647
670
  """Test that default behavior retries transient HTTP errors."""
648
671
  import httpx
649
-
672
+
650
673
  collections_api = CollectionsAPI(client)
651
674
 
652
675
  records = [{"Name": "Account 1"}]
@@ -681,7 +704,7 @@ class TestHTTPErrorHandling:
681
704
  async def test_default_does_not_retry_4xx_errors(self, client):
682
705
  """Test that default behavior does NOT retry 4xx client errors."""
683
706
  import httpx
684
-
707
+
685
708
  collections_api = CollectionsAPI(client)
686
709
 
687
710
  records = [{"Name": "Account 1"}]
@@ -716,7 +739,7 @@ class TestHTTPErrorHandling:
716
739
  async def test_http_error_converts_to_retryable_failure(self, client):
717
740
  """Test that transient HTTP errors are retried and converted to results."""
718
741
  import httpx
719
-
742
+
720
743
  collections_api = CollectionsAPI(client)
721
744
 
722
745
  records = [{"Name": "Account 1"}, {"Name": "Account 2"}]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes