aio-sf 0.1.0b3__tar.gz → 0.1.0b4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/PKG-INFO +2 -2
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/pyproject.toml +1 -1
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/exporter/bulk_export.py +39 -33
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/exporter/parquet_writer.py +11 -6
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/.cursor/rules/api-structure.mdc +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/.cursor/rules/async-patterns.mdc +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/.cursor/rules/project-tooling.mdc +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/.github/workflows/publish.yml +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/.github/workflows/test.yml +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/.gitignore +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/LICENSE +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/README.md +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/RELEASE.md +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/pytest.ini +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/auth/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/auth/base.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/auth/client_credentials.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/auth/refresh_token.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/auth/sfdx_cli.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/auth/static_token.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/bulk_v2/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/bulk_v2/client.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/bulk_v2/types.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/client.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/collections/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/collections/client.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/collections/types.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/describe/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/describe/client.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/describe/types.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/query/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/query/client.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/query/types.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/api/types.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/src/aio_sf/exporter/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/tests/__init__.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/tests/conftest.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/tests/test_api_clients.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/tests/test_auth.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/tests/test_client.py +0 -0
- {aio_sf-0.1.0b3 → aio_sf-0.1.0b4}/uv.lock +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: aio-sf
|
|
3
|
-
Version: 0.1.
|
|
4
|
-
Summary: Async Salesforce library for Python
|
|
3
|
+
Version: 0.1.0b4
|
|
4
|
+
Summary: Async Salesforce library for Python
|
|
5
5
|
Project-URL: Homepage, https://github.com/callawaycloud/aio-salesforce
|
|
6
6
|
Project-URL: Repository, https://github.com/callawaycloud/aio-salesforce
|
|
7
7
|
Project-URL: Issues, https://github.com/callawaycloud/aio-salesforce/issues
|
|
@@ -5,7 +5,7 @@ build-backend = "hatchling.build"
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "aio-sf"
|
|
7
7
|
dynamic = ["version"]
|
|
8
|
-
description = "Async Salesforce library for Python
|
|
8
|
+
description = "Async Salesforce library for Python"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
license = {file = "LICENSE"}
|
|
11
11
|
authors = [
|
|
@@ -2,6 +2,7 @@ import logging
|
|
|
2
2
|
from typing import Any, Dict, List, Generator, Optional
|
|
3
3
|
import csv
|
|
4
4
|
import asyncio
|
|
5
|
+
import io
|
|
5
6
|
|
|
6
7
|
from ..api.describe.types import FieldInfo
|
|
7
8
|
from ..api.client import SalesforceClient
|
|
@@ -111,37 +112,50 @@ class QueryResult:
|
|
|
111
112
|
"""
|
|
112
113
|
Stream CSV response and convert to record dictionaries.
|
|
113
114
|
|
|
115
|
+
Uses proper CSV parsing to handle quotes, newlines, and special characters correctly.
|
|
116
|
+
|
|
114
117
|
:param response_text: CSV response text
|
|
115
118
|
:yields: Individual record dictionaries
|
|
116
119
|
"""
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
# Get the header row first
|
|
120
|
-
if not lines:
|
|
120
|
+
if not response_text or not response_text.strip():
|
|
121
121
|
# No data in this batch
|
|
122
122
|
return
|
|
123
123
|
|
|
124
124
|
try:
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
#
|
|
129
|
-
|
|
125
|
+
# Create a StringIO object for proper CSV parsing
|
|
126
|
+
csv_buffer = io.StringIO(response_text)
|
|
127
|
+
|
|
128
|
+
# Use DictReader for proper CSV parsing with header detection
|
|
129
|
+
# This handles quotes, newlines in fields, and escaping correctly
|
|
130
|
+
csv_reader = csv.DictReader(
|
|
131
|
+
csv_buffer,
|
|
132
|
+
delimiter=",",
|
|
133
|
+
quotechar='"',
|
|
134
|
+
quoting=csv.QUOTE_MINIMAL,
|
|
135
|
+
skipinitialspace=True,
|
|
136
|
+
)
|
|
130
137
|
|
|
131
|
-
|
|
132
|
-
for line in lines[1:]:
|
|
133
|
-
if line.strip(): # Skip empty lines
|
|
138
|
+
for row_num, record in enumerate(csv_reader, start=1):
|
|
134
139
|
try:
|
|
135
|
-
#
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
140
|
+
# Convert None values to empty strings for consistency
|
|
141
|
+
cleaned_record = {
|
|
142
|
+
key: (value if value is not None else "")
|
|
143
|
+
for key, value in record.items()
|
|
144
|
+
}
|
|
145
|
+
yield cleaned_record
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logging.warning(f"Error processing CSV record {row_num}: {e}")
|
|
148
|
+
# Continue processing other records
|
|
143
149
|
continue
|
|
144
150
|
|
|
151
|
+
except csv.Error as e:
|
|
152
|
+
logging.error(f"CSV parsing error: {e}")
|
|
153
|
+
# If CSV parsing fails completely, don't yield any records
|
|
154
|
+
return
|
|
155
|
+
except Exception as e:
|
|
156
|
+
logging.error(f"Unexpected error parsing CSV response: {e}")
|
|
157
|
+
return
|
|
158
|
+
|
|
145
159
|
async def _generate_records(self):
|
|
146
160
|
"""Async generator that yields individual records."""
|
|
147
161
|
locator = self._query_locator
|
|
@@ -170,7 +184,9 @@ class QueryResult:
|
|
|
170
184
|
|
|
171
185
|
except Exception as e:
|
|
172
186
|
raise Exception(
|
|
173
|
-
f"Error processing record {ctn}: {e}. Current Query Locator: {locator}"
|
|
187
|
+
f"Error processing record {ctn}: {e}. Current Query Locator: {locator}. "
|
|
188
|
+
f"This may indicate a CSV parsing issue - check if the response contains "
|
|
189
|
+
f"malformed CSV data or fields with special characters."
|
|
174
190
|
)
|
|
175
191
|
|
|
176
192
|
|
|
@@ -296,19 +312,9 @@ def resume_from_locator(
|
|
|
296
312
|
|
|
297
313
|
|
|
298
314
|
# Helper function to get all fields that can be queried by bulk API
|
|
299
|
-
async def get_bulk_fields(
|
|
300
|
-
|
|
301
|
-
) -> List[FieldInfo]:
|
|
302
|
-
"""Get field metadata for queryable fields in a Salesforce object.
|
|
303
|
-
|
|
304
|
-
:param sf: Salesforce client instance
|
|
305
|
-
:param object_type: Name of the Salesforce object (e.g., 'Account', 'Contact')
|
|
306
|
-
:param api_version: API version to use (defaults to client version)
|
|
307
|
-
:returns: List of field metadata dictionaries for queryable fields
|
|
308
|
-
"""
|
|
315
|
+
async def get_bulk_fields(fields_metadata: List[FieldInfo]) -> List[FieldInfo]:
|
|
316
|
+
"""Get field metadata for queryable fields in a Salesforce object."""
|
|
309
317
|
# Use the metadata API to get object description
|
|
310
|
-
describe_data = await sf.describe.sobject(object_type, api_version)
|
|
311
|
-
fields_metadata = describe_data["fields"]
|
|
312
318
|
|
|
313
319
|
# Create a set of all compound field names to exclude
|
|
314
320
|
compound_field_names = {
|
|
@@ -192,12 +192,17 @@ class ParquetWriter:
|
|
|
192
192
|
|
|
193
193
|
# Apply type-specific conversions
|
|
194
194
|
if pa.types.is_boolean(field.type):
|
|
195
|
-
# Convert string 'true'/'false' to boolean
|
|
196
|
-
df[field_name]
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
195
|
+
# Convert string 'true'/'false' to boolean, keeping original values for others
|
|
196
|
+
original_series = df[field_name]
|
|
197
|
+
mapped_series = original_series.map(
|
|
198
|
+
{"true": True, "false": False, None: None}
|
|
199
|
+
)
|
|
200
|
+
# For values that weren't mapped, keep the original values
|
|
201
|
+
# This avoids the fillna FutureWarning by using boolean indexing instead
|
|
202
|
+
mask = mapped_series.notna()
|
|
203
|
+
result_series = original_series.copy()
|
|
204
|
+
result_series.loc[mask] = mapped_series.loc[mask]
|
|
205
|
+
df[field_name] = result_series
|
|
201
206
|
elif pa.types.is_integer(field.type):
|
|
202
207
|
df[field_name] = pd.to_numeric(df[field_name], errors="coerce").astype(
|
|
203
208
|
"Int64"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|