airbyte-source-shopify 3.0.7__py3-none-any.whl → 3.0.8.dev202507101517__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_source_shopify-3.0.7.dist-info → airbyte_source_shopify-3.0.8.dev202507101517.dist-info}/METADATA +1 -1
- {airbyte_source_shopify-3.0.7.dist-info → airbyte_source_shopify-3.0.8.dev202507101517.dist-info}/RECORD +5 -5
- source_shopify/shopify_graphql/bulk/job.py +156 -5
- {airbyte_source_shopify-3.0.7.dist-info → airbyte_source_shopify-3.0.8.dev202507101517.dist-info}/WHEEL +0 -0
- {airbyte_source_shopify-3.0.7.dist-info → airbyte_source_shopify-3.0.8.dev202507101517.dist-info}/entry_points.txt +0 -0
|
@@ -52,7 +52,7 @@ source_shopify/schemas/transactions.json,sha256=vbwscH3UcAtbSsC70mBka4oNaFR4S3S6
|
|
|
52
52
|
source_shopify/scopes.py,sha256=N0njfMHn3Q1AQXuTj5VfjQOio10jaDarpC_oLYnWvqc,6490
|
|
53
53
|
source_shopify/shopify_graphql/bulk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
54
|
source_shopify/shopify_graphql/bulk/exceptions.py,sha256=4dj7Za4xIfwL-zf8joT9svF_RSoGlE3GviMiIl1e1rs,2532
|
|
55
|
-
source_shopify/shopify_graphql/bulk/job.py,sha256=
|
|
55
|
+
source_shopify/shopify_graphql/bulk/job.py,sha256=Bk158tbTtX18FfBJFCvv-mzfhsSV3qHFOgvV7T4e5Qk,35914
|
|
56
56
|
source_shopify/shopify_graphql/bulk/query.py,sha256=D8rnI1SDw50-Gt18lt7YwwNNdsbVMbBfxZa9xVJZbto,130981
|
|
57
57
|
source_shopify/shopify_graphql/bulk/record.py,sha256=X6VGngugv7a_S8UEeDo121BkdCVLj5nWlHK76A21kyo,16898
|
|
58
58
|
source_shopify/shopify_graphql/bulk/retry.py,sha256=R5rSJJE8D5zcj6mN-OmmNO2aFZEIdjAlWclDDVW5KPI,2626
|
|
@@ -64,7 +64,7 @@ source_shopify/streams/base_streams.py,sha256=FFIpHd5_-Z61W_jUucdr8D2MzUete1Y2E5
|
|
|
64
64
|
source_shopify/streams/streams.py,sha256=D70Ik1vU75NKlmJMnS7W2-5gApA2ANq9eRnKligMTNw,14555
|
|
65
65
|
source_shopify/transform.py,sha256=mn0htL812_90zc_YszGQa0hHcIZQpYYdmk8IqpZm5TI,4685
|
|
66
66
|
source_shopify/utils.py,sha256=DSqEchu-MQJ7zust7CNfqOkGIv9OSR-5UUsuD-bsDa8,16224
|
|
67
|
-
airbyte_source_shopify-3.0.
|
|
68
|
-
airbyte_source_shopify-3.0.
|
|
69
|
-
airbyte_source_shopify-3.0.
|
|
70
|
-
airbyte_source_shopify-3.0.
|
|
67
|
+
airbyte_source_shopify-3.0.8.dev202507101517.dist-info/METADATA,sha256=7TDtVOTJM9MamuDEQ6yxxpG-LzN39Jo8gApeqecPh5o,5325
|
|
68
|
+
airbyte_source_shopify-3.0.8.dev202507101517.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
69
|
+
airbyte_source_shopify-3.0.8.dev202507101517.dist-info/entry_points.txt,sha256=SyTwKSsPk9MCdPf01saWpnp8hcmZOgBssVcSIvMbBeQ,57
|
|
70
|
+
airbyte_source_shopify-3.0.8.dev202507101517.dist-info/RECORD,,
|
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
from dataclasses import dataclass, field
|
|
6
6
|
from datetime import datetime
|
|
7
7
|
from enum import Enum
|
|
8
|
+
from json import loads
|
|
8
9
|
from time import sleep, time
|
|
9
10
|
from typing import Any, Final, Iterable, List, Mapping, Optional
|
|
10
11
|
|
|
@@ -65,7 +66,7 @@ class ShopifyBulkManager:
|
|
|
65
66
|
|
|
66
67
|
# currents: _job_id, _job_state, _job_created_at, _job_self_canceled
|
|
67
68
|
_job_id: Optional[str] = field(init=False, default=None)
|
|
68
|
-
_job_state: str
|
|
69
|
+
_job_state: Optional[str] = field(init=False, default=None) # this string is based on ShopifyBulkJobStatus
|
|
69
70
|
# completed and saved Bulk Job result filename
|
|
70
71
|
_job_result_filename: Optional[str] = field(init=False, default=None)
|
|
71
72
|
# date-time when the Bulk Job was created on the server
|
|
@@ -83,7 +84,9 @@ class ShopifyBulkManager:
|
|
|
83
84
|
# the flag to adjust the next slice from the checkpointed cursor vaue
|
|
84
85
|
_job_adjust_slice_from_checkpoint: bool = field(init=False, default=False)
|
|
85
86
|
# keeps the last checkpointed cursor value for supported streams
|
|
86
|
-
_job_last_checkpoint_cursor_value: str
|
|
87
|
+
_job_last_checkpoint_cursor_value: Optional[str] = field(init=False, default=None)
|
|
88
|
+
# stores extracted cursor from INTERNAL_SERVER_ERROR recovery (temporary storage)
|
|
89
|
+
_job_extracted_checkpoint_cursor: Optional[str] = field(init=False, default=None)
|
|
87
90
|
|
|
88
91
|
# expand slice factor
|
|
89
92
|
_job_size_expand_factor: int = field(init=False, default=2)
|
|
@@ -214,6 +217,8 @@ class ShopifyBulkManager:
|
|
|
214
217
|
self._log_job_msg_count = 0
|
|
215
218
|
# set the running job object count to default
|
|
216
219
|
self._job_last_rec_count = 0
|
|
220
|
+
# clear any extracted cursor from INTERNAL_SERVER_ERROR recovery
|
|
221
|
+
self._job_extracted_checkpoint_cursor = None
|
|
217
222
|
|
|
218
223
|
def _set_checkpointing(self) -> None:
|
|
219
224
|
# set the flag to adjust the next slice from the checkpointed cursor value
|
|
@@ -313,6 +318,24 @@ class ShopifyBulkManager:
|
|
|
313
318
|
# fetch the collected records from CANCELED Job on checkpointing
|
|
314
319
|
self._job_result_filename = self._job_get_result(response)
|
|
315
320
|
|
|
321
|
+
# Special handling: For FAILED jobs with INTERNAL_SERVER_ERROR, extract the last processed cursor
|
|
322
|
+
if response:
|
|
323
|
+
parsed_response = response.json().get("data", {}).get("node", {}) if response else {}
|
|
324
|
+
error_code = parsed_response.get("errorCode")
|
|
325
|
+
if error_code == "INTERNAL_SERVER_ERROR":
|
|
326
|
+
last_cursor = self._extract_last_cursor_from_partial_data(response)
|
|
327
|
+
if last_cursor:
|
|
328
|
+
# Check if this cursor would cause a collision before storing it
|
|
329
|
+
if self._checkpoint_cursor_has_collision(last_cursor):
|
|
330
|
+
# Skip cursor extraction to avoid collision
|
|
331
|
+
pass
|
|
332
|
+
else:
|
|
333
|
+
# Store the extracted cursor for later use (don't set it yet to avoid collision)
|
|
334
|
+
self._job_extracted_checkpoint_cursor = last_cursor
|
|
335
|
+
else:
|
|
336
|
+
# Not processing data due to insufficient records or checkpointing disabled
|
|
337
|
+
pass
|
|
338
|
+
|
|
316
339
|
def _job_update_state(self, response: Optional[requests.Response] = None) -> None:
|
|
317
340
|
if response:
|
|
318
341
|
self._job_state = response.json().get("data", {}).get("node", {}).get("status")
|
|
@@ -363,7 +386,26 @@ class ShopifyBulkManager:
|
|
|
363
386
|
def _on_completed_job(self, response: Optional[requests.Response] = None) -> None:
|
|
364
387
|
self._job_result_filename = self._job_get_result(response)
|
|
365
388
|
|
|
366
|
-
def _on_failed_job(self, response: requests.Response) -> AirbyteTracedException
|
|
389
|
+
def _on_failed_job(self, response: requests.Response) -> Optional[AirbyteTracedException]:
|
|
390
|
+
# Special handling for FAILED jobs with INTERNAL_SERVER_ERROR that support checkpointing
|
|
391
|
+
parsed_response = response.json().get("data", {}).get("node", {}) if response else {}
|
|
392
|
+
error_code = parsed_response.get("errorCode")
|
|
393
|
+
|
|
394
|
+
if error_code == "INTERNAL_SERVER_ERROR" and self._supports_checkpointing:
|
|
395
|
+
LOGGER.info(
|
|
396
|
+
f"Stream: `{self.http_client.name}`, BULK Job: `{self._job_id}` failed with INTERNAL_SERVER_ERROR. Waiting for partial data availability..."
|
|
397
|
+
)
|
|
398
|
+
# For INTERNAL_SERVER_ERROR specifically, wait and retry to check if partial data becomes available
|
|
399
|
+
partial_response = self._wait_for_partial_data_on_failure()
|
|
400
|
+
if partial_response:
|
|
401
|
+
# Use the updated response that may contain partialDataUrl
|
|
402
|
+
response = partial_response
|
|
403
|
+
# Update the job state with the new response to ensure _job_last_rec_count is set correctly
|
|
404
|
+
self._job_update_state(response)
|
|
405
|
+
# For INTERNAL_SERVER_ERROR with partial data, extract cursor and treat as checkpointable
|
|
406
|
+
self._job_get_checkpointed_result(response)
|
|
407
|
+
return None # Don't raise exception, we recovered the data
|
|
408
|
+
|
|
367
409
|
if not self._supports_checkpointing:
|
|
368
410
|
raise ShopifyBulkExceptions.BulkJobFailed(
|
|
369
411
|
f"The BULK Job: `{self._job_id}` exited with {self._job_state}, details: {response.text}",
|
|
@@ -373,6 +415,102 @@ class ShopifyBulkManager:
|
|
|
373
415
|
# we leverage the checkpointing in this case.
|
|
374
416
|
self._job_get_checkpointed_result(response)
|
|
375
417
|
|
|
418
|
+
def _wait_for_partial_data_on_failure(self) -> Optional[requests.Response]:
|
|
419
|
+
"""
|
|
420
|
+
Wait for partial data to become available when a BULK job fails with INTERNAL_SERVER_ERROR.
|
|
421
|
+
|
|
422
|
+
This method is specifically designed for INTERNAL_SERVER_ERROR cases where
|
|
423
|
+
Shopify's BULK API may make partial data available (via partialDataUrl)
|
|
424
|
+
after a short wait, even though the job initially failed.
|
|
425
|
+
|
|
426
|
+
Returns:
|
|
427
|
+
Optional[requests.Response]: Updated response with potential partialDataUrl, or None if no data
|
|
428
|
+
"""
|
|
429
|
+
max_wait_attempts = 10 # Maximum number of wait attempts
|
|
430
|
+
wait_interval = 10 # Wait 10 seconds between checks
|
|
431
|
+
|
|
432
|
+
for attempt in range(max_wait_attempts):
|
|
433
|
+
sleep(wait_interval)
|
|
434
|
+
|
|
435
|
+
# Check job status again to see if partial data is now available
|
|
436
|
+
try:
|
|
437
|
+
_, response = self.http_client.send_request(
|
|
438
|
+
http_method="POST",
|
|
439
|
+
url=self.base_url,
|
|
440
|
+
json={"query": ShopifyBulkTemplates.status(self._job_id)},
|
|
441
|
+
request_kwargs={},
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
parsed_response = response.json().get("data", {}).get("node", {}) if response else {}
|
|
445
|
+
partial_data_url = parsed_response.get("partialDataUrl")
|
|
446
|
+
object_count = parsed_response.get("objectCount", "0")
|
|
447
|
+
|
|
448
|
+
# Only stop waiting if we actually have a partialDataUrl - objectCount alone is not sufficient
|
|
449
|
+
if partial_data_url and int(object_count) > 0:
|
|
450
|
+
LOGGER.info(f"Stream: `{self.http_client.name}`, partial data available after wait. Object count: {object_count}")
|
|
451
|
+
return response
|
|
452
|
+
elif int(object_count) > 0:
|
|
453
|
+
# objectCount available but no partialDataUrl yet - continue waiting
|
|
454
|
+
continue
|
|
455
|
+
|
|
456
|
+
except Exception as e:
|
|
457
|
+
# Error during partial data check - continue waiting
|
|
458
|
+
continue
|
|
459
|
+
|
|
460
|
+
LOGGER.warning(f"Stream: `{self.http_client.name}`, no partial data became available after {max_wait_attempts} attempts")
|
|
461
|
+
return None
|
|
462
|
+
|
|
463
|
+
def _extract_last_cursor_from_partial_data(self, response: Optional[requests.Response]) -> Optional[str]:
|
|
464
|
+
"""
|
|
465
|
+
Extract the last processed cursor value from partial data for INTERNAL_SERVER_ERROR recovery.
|
|
466
|
+
|
|
467
|
+
This method retrieves partial data from a failed INTERNAL_SERVER_ERROR job and extracts
|
|
468
|
+
the updatedAt value of the last record, which can be used to resume processing from that point.
|
|
469
|
+
Only used in INTERNAL_SERVER_ERROR scenarios with checkpointing support.
|
|
470
|
+
|
|
471
|
+
Args:
|
|
472
|
+
response: The response containing partial data information
|
|
473
|
+
|
|
474
|
+
Returns:
|
|
475
|
+
Optional[str]: The cursor value of the last processed record, or None if unavailable
|
|
476
|
+
"""
|
|
477
|
+
if not response:
|
|
478
|
+
return None
|
|
479
|
+
|
|
480
|
+
try:
|
|
481
|
+
parsed_response = response.json().get("data", {}).get("node", {})
|
|
482
|
+
partial_data_url = parsed_response.get("partialDataUrl")
|
|
483
|
+
|
|
484
|
+
if not partial_data_url:
|
|
485
|
+
return None
|
|
486
|
+
|
|
487
|
+
# Download the partial data
|
|
488
|
+
_, partial_response = self.http_client.send_request(http_method="GET", url=partial_data_url, request_kwargs={"stream": True})
|
|
489
|
+
partial_response.raise_for_status()
|
|
490
|
+
|
|
491
|
+
last_record = None
|
|
492
|
+
# Read through the JSONL data to find the last record
|
|
493
|
+
for line in partial_response.iter_lines(decode_unicode=True):
|
|
494
|
+
if line and line.strip() and line.strip() != END_OF_FILE:
|
|
495
|
+
try:
|
|
496
|
+
record = loads(line)
|
|
497
|
+
# Look for the main record types (Order, Product, etc.)
|
|
498
|
+
if record.get("__typename") in ["Order", "Product", "Customer", "FulfillmentOrder"]:
|
|
499
|
+
last_record = record
|
|
500
|
+
except Exception:
|
|
501
|
+
continue
|
|
502
|
+
|
|
503
|
+
# Extract the updatedAt cursor from the last record
|
|
504
|
+
if last_record and "updatedAt" in last_record:
|
|
505
|
+
cursor_value = last_record["updatedAt"]
|
|
506
|
+
return cursor_value
|
|
507
|
+
|
|
508
|
+
except Exception as e:
|
|
509
|
+
# Failed to extract cursor from partial data
|
|
510
|
+
pass
|
|
511
|
+
|
|
512
|
+
return None
|
|
513
|
+
|
|
376
514
|
def _on_timeout_job(self, **kwargs) -> AirbyteTracedException:
|
|
377
515
|
raise ShopifyBulkExceptions.BulkJobTimout(
|
|
378
516
|
f"The BULK Job: `{self._job_id}` exited with {self._job_state}, please reduce the `GraphQL BULK Date Range in Days` in SOURCES > Your Shopify Source > SETTINGS.",
|
|
@@ -535,9 +673,15 @@ class ShopifyBulkManager:
|
|
|
535
673
|
"""
|
|
536
674
|
|
|
537
675
|
if checkpointed_cursor:
|
|
676
|
+
# Check for collision and provide more context in the error
|
|
538
677
|
if self._checkpoint_cursor_has_collision(checkpointed_cursor):
|
|
678
|
+
# For INTERNAL_SERVER_ERROR recovery, if the cursor is the same, we might need to skip ahead slightly
|
|
679
|
+
# This can happen if the failure occurred right at the boundary of what was already processed
|
|
680
|
+
if hasattr(self, "_job_extracted_checkpoint_cursor") and self._job_extracted_checkpoint_cursor == checkpointed_cursor:
|
|
681
|
+
pass # Collision from INTERNAL_SERVER_ERROR recovery at boundary
|
|
682
|
+
|
|
539
683
|
raise ShopifyBulkExceptions.BulkJobCheckpointCollisionError(
|
|
540
|
-
f"The stream: `{self.http_client.name}` checkpoint collision is detected. Try to increase the `BULK Job checkpoint (rows collected)` to the bigger value. The stream will be synced again during the next sync attempt."
|
|
684
|
+
f"The stream: `{self.http_client.name}` checkpoint collision is detected. Current cursor: {self._job_last_checkpoint_cursor_value}, New cursor: {checkpointed_cursor}. Try to increase the `BULK Job checkpoint (rows collected)` to the bigger value. The stream will be synced again during the next sync attempt."
|
|
541
685
|
)
|
|
542
686
|
# set the checkpointed cursor value
|
|
543
687
|
self._set_last_checkpoint_cursor_value(checkpointed_cursor)
|
|
@@ -549,7 +693,14 @@ class ShopifyBulkManager:
|
|
|
549
693
|
if self._job_adjust_slice_from_checkpoint:
|
|
550
694
|
# set the checkpointing to default, before the next slice is emitted, to avoid inf.loop
|
|
551
695
|
self._reset_checkpointing()
|
|
552
|
-
|
|
696
|
+
# Clear the extracted cursor after use to avoid reusing it
|
|
697
|
+
if self._job_extracted_checkpoint_cursor:
|
|
698
|
+
extracted_cursor = self._job_extracted_checkpoint_cursor
|
|
699
|
+
self._job_extracted_checkpoint_cursor = None
|
|
700
|
+
cursor_to_use = extracted_cursor
|
|
701
|
+
else:
|
|
702
|
+
cursor_to_use = checkpointed_cursor or self._job_last_checkpoint_cursor_value
|
|
703
|
+
return self._adjust_slice_end(slice_end, cursor_to_use)
|
|
553
704
|
|
|
554
705
|
if self._is_long_running_job:
|
|
555
706
|
self._job_size_reduce_next()
|
|
File without changes
|
|
File without changes
|