port-ocean 0.18.5__py3-none-any.whl → 0.18.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. port_ocean/context/ocean.py +32 -0
  2. port_ocean/core/handlers/entities_state_applier/port/applier.py +18 -2
  3. port_ocean/core/handlers/port_app_config/models.py +3 -0
  4. port_ocean/core/handlers/queue/__init__.py +4 -0
  5. port_ocean/core/handlers/queue/abstract_queue.py +28 -0
  6. port_ocean/core/handlers/queue/local_queue.py +25 -0
  7. port_ocean/core/handlers/webhook/__init__.py +0 -0
  8. port_ocean/core/handlers/webhook/abstract_webhook_processor.py +101 -0
  9. port_ocean/core/handlers/webhook/processor_manager.py +237 -0
  10. port_ocean/core/handlers/webhook/webhook_event.py +77 -0
  11. port_ocean/core/integrations/mixins/sync.py +2 -1
  12. port_ocean/core/integrations/mixins/sync_raw.py +26 -20
  13. port_ocean/exceptions/webhook_processor.py +4 -0
  14. port_ocean/ocean.py +7 -1
  15. port_ocean/tests/core/handlers/entities_state_applier/test_applier.py +86 -0
  16. port_ocean/tests/core/handlers/mixins/test_sync_raw.py +149 -124
  17. port_ocean/tests/core/handlers/queue/test_local_queue.py +90 -0
  18. port_ocean/tests/core/handlers/webhook/test_abstract_webhook_processor.py +114 -0
  19. port_ocean/tests/core/handlers/webhook/test_processor_manager.py +391 -0
  20. port_ocean/tests/core/handlers/webhook/test_webhook_event.py +65 -0
  21. port_ocean/utils/signal.py +6 -2
  22. {port_ocean-0.18.5.dist-info → port_ocean-0.18.7.dist-info}/METADATA +1 -1
  23. {port_ocean-0.18.5.dist-info → port_ocean-0.18.7.dist-info}/RECORD +26 -13
  24. {port_ocean-0.18.5.dist-info → port_ocean-0.18.7.dist-info}/LICENSE.md +0 -0
  25. {port_ocean-0.18.5.dist-info → port_ocean-0.18.7.dist-info}/WHEEL +0 -0
  26. {port_ocean-0.18.5.dist-info → port_ocean-0.18.7.dist-info}/entry_points.txt +0 -0
@@ -5,6 +5,7 @@ from pydantic.main import BaseModel
5
5
  from werkzeug.local import LocalProxy
6
6
 
7
7
  from port_ocean.clients.port.types import UserAgentType
8
+
8
9
  from port_ocean.core.models import Entity
9
10
  from port_ocean.core.ocean_types import (
10
11
  RESYNC_EVENT_LISTENER,
@@ -145,6 +146,37 @@ class PortOceanContext:
145
146
  async def sync_raw_all(self) -> None:
146
147
  await self.integration.sync_raw_all(trigger_type="manual")
147
148
 
149
+ def add_webhook_processor(
150
+ self,
151
+ path: str,
152
+ processor: type,
153
+ events_filter: Callable[[Any], bool] = lambda _: True,
154
+ ) -> None:
155
+ """
156
+ Registers a webhook processor for a specific path.
157
+
158
+ Args:
159
+ path: The path to register the webhook processor for.
160
+ processor: The processor to register.
161
+ Examples:
162
+ >>> from port_ocean.context.ocean import ocean
163
+ >>> from port_ocean.core.handlers.webhook import AbstractWebhookProcessor
164
+ >>> from port_ocean.core.handlers.webhook import WebhookEvent
165
+ >>> class MyWebhookProcessor(AbstractWebhookProcessor):
166
+ ... async def authenticate(self, payload: EventPayload, headers: EventHeaders) -> bool:
167
+ ... return True
168
+ ... async def validate_payload(self, payload: EventPayload) -> bool:
169
+ ... return True
170
+ ... async def handle_event(self, payload: EventPayload) -> None:
171
+ ... pass
172
+ >>> def events_filter(event: WebhookEvent) -> bool:
173
+ ... return True
174
+ >>> ocean.add_webhook_processor('/my-webhook', MyWebhookProcessor, events_filter)
175
+ Raises:
176
+ ValueError: If the processor does not extend AbstractWebhookProcessor.
177
+ """
178
+ self.app.webhook_manager.register_processor(path, processor, events_filter)
179
+
148
180
 
149
181
  _port_ocean: PortOceanContext = PortOceanContext(None)
150
182
 
@@ -78,6 +78,7 @@ class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
78
78
  self,
79
79
  entities: EntityDiff,
80
80
  user_agent_type: UserAgentType,
81
+ entity_deletion_threshold: float | None = None,
81
82
  ) -> None:
82
83
  diff = get_port_diff(entities["before"], entities["after"])
83
84
 
@@ -87,10 +88,25 @@ class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
87
88
  kept_entities = diff.created + diff.modified
88
89
 
89
90
  logger.info(
90
- f"Determining entities to delete ({len(diff.deleted)}/{len(kept_entities)})"
91
+ f"Determining entities to delete ({len(diff.deleted)}/{len(kept_entities)})",
92
+ deleting_entities=len(diff.deleted),
93
+ keeping_entities=len(kept_entities),
94
+ entity_deletion_threshold=entity_deletion_threshold,
91
95
  )
92
96
 
93
- await self._safe_delete(diff.deleted, kept_entities, user_agent_type)
97
+ deletion_rate = len(diff.deleted) / len(entities["before"])
98
+ if (
99
+ entity_deletion_threshold is not None
100
+ and deletion_rate <= entity_deletion_threshold
101
+ ):
102
+ await self._safe_delete(diff.deleted, kept_entities, user_agent_type)
103
+ else:
104
+ logger.info(
105
+ f"Skipping deletion of entities with delition rate {deletion_rate}",
106
+ deletion_rate=deletion_rate,
107
+ deleting_entities=len(diff.deleted),
108
+ total_entities=len(entities),
109
+ )
94
110
 
95
111
  async def upsert(
96
112
  self, entities: list[Entity], user_agent_type: UserAgentType
@@ -58,6 +58,9 @@ class PortAppConfig(BaseModel):
58
58
  create_missing_related_entities: bool = Field(
59
59
  alias="createMissingRelatedEntities", default=True
60
60
  )
61
+ entity_deletion_threshold: float = Field(
62
+ alias="entityDeletionThreshold", default=0.9
63
+ )
61
64
  resources: list[ResourceConfig] = Field(default_factory=list)
62
65
 
63
66
  def get_port_request_options(self) -> RequestOptions:
@@ -0,0 +1,4 @@
1
+ from .abstract_queue import AbstractQueue
2
+ from .local_queue import LocalQueue
3
+
4
+ __all__ = ["AbstractQueue", "LocalQueue"]
@@ -0,0 +1,28 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Generic, TypeVar
3
+
4
+ T = TypeVar("T")
5
+
6
+
7
+ class AbstractQueue(ABC, Generic[T]):
8
+ """Abstract interface for queues"""
9
+
10
+ @abstractmethod
11
+ async def put(self, item: T) -> None:
12
+ """Put an item into the queue"""
13
+ pass
14
+
15
+ @abstractmethod
16
+ async def get(self) -> T:
17
+ """Get an item from the queue"""
18
+ pass
19
+
20
+ @abstractmethod
21
+ async def teardown(self) -> None:
22
+ """Wait for all items to be processed"""
23
+ pass
24
+
25
+ @abstractmethod
26
+ async def commit(self) -> None:
27
+ """Mark item as processed"""
28
+ pass
@@ -0,0 +1,25 @@
1
+ import asyncio
2
+ from typing import TypeVar
3
+
4
+ from .abstract_queue import AbstractQueue
5
+
6
+ T = TypeVar("T")
7
+
8
+
9
+ class LocalQueue(AbstractQueue[T]):
10
+ """Implementation of Queue using asyncio.Queue"""
11
+
12
+ def __init__(self) -> None:
13
+ self._queue: asyncio.Queue[T] = asyncio.Queue()
14
+
15
+ async def put(self, item: T) -> None:
16
+ await self._queue.put(item)
17
+
18
+ async def get(self) -> T:
19
+ return await self._queue.get()
20
+
21
+ async def teardown(self) -> None:
22
+ await self._queue.join()
23
+
24
+ async def commit(self) -> None:
25
+ self._queue.task_done()
File without changes
@@ -0,0 +1,101 @@
1
+ from abc import ABC, abstractmethod
2
+ from loguru import logger
3
+
4
+ from port_ocean.exceptions.webhook_processor import RetryableError
5
+
6
+ from .webhook_event import WebhookEvent, EventPayload, EventHeaders
7
+
8
+
9
+ class AbstractWebhookProcessor(ABC):
10
+ """
11
+ Abstract base class for webhook processors
12
+ Extend this class to implement custom webhook processing logic
13
+
14
+ Attributes:
15
+ max_retries: The maximum number of retries before giving up
16
+ initial_retry_delay_seconds: The initial delay before the first retry
17
+ max_retry_delay_seconds: The maximum delay between retries
18
+ exponential_base_seconds: The base for exponential backoff calculations
19
+
20
+ Args:
21
+ event: The webhook event to process
22
+
23
+ Examples:
24
+ >>> from port_ocean.core.handlers.webhook import AbstractWebhookProcessor
25
+ >>> from port_ocean.core.handlers.webhook import WebhookEvent
26
+ >>> class MyWebhookProcessor(AbstractWebhookProcessor):
27
+ ... async def authenticate(self, payload: EventPayload, headers: EventHeaders) -> bool:
28
+ ... return True
29
+ ... async def validate_payload(self, payload: EventPayload) -> bool:
30
+ ... return True
31
+ ... async def handle_event(self, payload: EventPayload) -> None:
32
+ ... pass
33
+ """
34
+
35
+ max_retries: int = 3
36
+ initial_retry_delay_seconds: float = 1.0
37
+ max_retry_delay_seconds: float = 30.0
38
+ exponential_base_seconds: float = 2.0
39
+
40
+ def __init__(self, event: WebhookEvent) -> None:
41
+ self.event = event
42
+ self.retry_count = 0
43
+
44
+ async def on_error(self, error: Exception) -> None:
45
+ """Hook to handle errors during processing. Override if needed"""
46
+ delay = self.calculate_retry_delay()
47
+
48
+ logger.error(
49
+ f"Attempt {self.retry_count}/{self.max_retries} failed. "
50
+ f"Retrying in {delay:.2f} seconds. Error: {str(error)}"
51
+ )
52
+
53
+ async def cancel(self) -> None:
54
+ """Handle cancellation of the request. Override if needed"""
55
+ pass
56
+
57
+ def validate_webhook_setup(self) -> bool:
58
+ """Validate webhook configuration. Override if needed"""
59
+ return True
60
+
61
+ def should_retry(self, error: Exception) -> bool:
62
+ """
63
+ Determine if the operation should be retried based on the error
64
+ Override to customize retry behavior
65
+ """
66
+ return isinstance(error, RetryableError)
67
+
68
+ def calculate_retry_delay(self) -> float:
69
+ """
70
+ Calculate the delay before the next retry using exponential backoff
71
+ Override to customize backoff strategy
72
+ """
73
+ delay = min(
74
+ self.initial_retry_delay_seconds
75
+ * (self.exponential_base_seconds**self.retry_count),
76
+ self.max_retry_delay_seconds,
77
+ )
78
+ return delay
79
+
80
+ async def before_processing(self) -> None:
81
+ """Hook to run before processing the event"""
82
+ pass
83
+
84
+ async def after_processing(self) -> None:
85
+ """Hook to run after processing the event"""
86
+ pass
87
+
88
+ @abstractmethod
89
+ async def authenticate(self, payload: EventPayload, headers: EventHeaders) -> bool:
90
+ """Authenticate the request."""
91
+ pass
92
+
93
+ @abstractmethod
94
+ async def validate_payload(self, payload: EventPayload) -> bool:
95
+ """Validate the payload structure and content."""
96
+ pass
97
+
98
+ @abstractmethod
99
+ async def handle_event(self, payload: EventPayload) -> None:
100
+ """Process the event."""
101
+ pass
@@ -0,0 +1,237 @@
1
+ from typing import Dict, Type, Set, Callable
2
+ from fastapi import APIRouter, Request
3
+ from loguru import logger
4
+ import asyncio
5
+ from dataclasses import dataclass
6
+
7
+ from .webhook_event import WebhookEvent, WebhookEventTimestamp
8
+
9
+
10
+ from .abstract_webhook_processor import AbstractWebhookProcessor
11
+ from port_ocean.utils.signal import SignalHandler
12
+ from port_ocean.core.handlers.queue import AbstractQueue, LocalQueue
13
+
14
+
15
+ @dataclass
16
+ class ProcessorRegistration:
17
+ """Represents a registered processor with its filter"""
18
+
19
+ processor: Type[AbstractWebhookProcessor]
20
+ filter: Callable[[WebhookEvent], bool]
21
+
22
+
23
+ class WebhookProcessorManager:
24
+ """Manages webhook processors and their routes"""
25
+
26
+ def __init__(
27
+ self,
28
+ router: APIRouter,
29
+ signal_handler: SignalHandler,
30
+ max_event_processing_seconds: float = 90.0,
31
+ max_wait_seconds_before_shutdown: float = 5.0,
32
+ ) -> None:
33
+ self._router = router
34
+ self._processors: Dict[str, list[ProcessorRegistration]] = {}
35
+ self._event_queues: Dict[str, AbstractQueue[WebhookEvent]] = {}
36
+ self._webhook_processor_tasks: Set[asyncio.Task[None]] = set()
37
+ self._max_event_processing_seconds = max_event_processing_seconds
38
+ self._max_wait_seconds_before_shutdown = max_wait_seconds_before_shutdown
39
+ signal_handler.register(self.shutdown)
40
+
41
+ async def start_processing_event_messages(self) -> None:
42
+ """Start processing events for all registered paths"""
43
+ loop = asyncio.get_event_loop()
44
+ for path in self._event_queues.keys():
45
+ try:
46
+ task = loop.create_task(self.process_queue(path))
47
+ self._webhook_processor_tasks.add(task)
48
+ task.add_done_callback(self._webhook_processor_tasks.discard)
49
+ except Exception as e:
50
+ logger.exception(f"Error starting queue processor for {path}: {str(e)}")
51
+
52
+ def _extract_matching_processors(
53
+ self, event: WebhookEvent, path: str
54
+ ) -> list[AbstractWebhookProcessor]:
55
+ """Find and extract the matching processor for an event"""
56
+ matching_processors = [
57
+ registration.processor
58
+ for registration in self._processors[path]
59
+ if registration.filter(event)
60
+ ]
61
+
62
+ if not matching_processors:
63
+ raise ValueError("No matching processors found")
64
+
65
+ created_processors: list[AbstractWebhookProcessor] = []
66
+ for processor_class in matching_processors:
67
+ processor = processor_class(event.clone())
68
+ created_processors.append(processor)
69
+ return created_processors
70
+
71
+ async def process_queue(self, path: str) -> None:
72
+ """Process events for a specific path in order"""
73
+ while True:
74
+ matching_processors: list[AbstractWebhookProcessor] = []
75
+ event: WebhookEvent | None = None
76
+ try:
77
+ event = await self._event_queues[path].get()
78
+ with logger.contextualize(webhook_path=path, trace_id=event.trace_id):
79
+ matching_processors = self._extract_matching_processors(event, path)
80
+ await asyncio.gather(
81
+ *(
82
+ self._process_single_event(processor, path)
83
+ for processor in matching_processors
84
+ )
85
+ )
86
+ except asyncio.CancelledError:
87
+ logger.info(f"Queue processor for {path} is shutting down")
88
+ for processor in matching_processors:
89
+ await processor.cancel()
90
+ self._timestamp_event_error(processor.event)
91
+ break
92
+ except Exception as e:
93
+ logger.exception(
94
+ f"Unexpected error in queue processor for {path}: {str(e)}"
95
+ )
96
+ for processor in matching_processors:
97
+ self._timestamp_event_error(processor.event)
98
+ finally:
99
+ if event:
100
+ await self._event_queues[path].commit()
101
+ # Prevents committing empty events for cases where we shutdown while processing
102
+ event = None
103
+
104
+ def _timestamp_event_error(self, event: WebhookEvent) -> None:
105
+ """Timestamp an event as having an error"""
106
+ event.set_timestamp(WebhookEventTimestamp.FinishedProcessingWithError)
107
+
108
+ async def _process_single_event(
109
+ self, processor: AbstractWebhookProcessor, path: str
110
+ ) -> None:
111
+ """Process a single event with a specific processor"""
112
+ try:
113
+ logger.debug("Start processing queued webhook")
114
+ processor.event.set_timestamp(WebhookEventTimestamp.StartedProcessing)
115
+
116
+ await self._execute_processor(processor)
117
+ processor.event.set_timestamp(
118
+ WebhookEventTimestamp.FinishedProcessingSuccessfully
119
+ )
120
+ except Exception as e:
121
+ logger.exception(f"Error processing queued webhook for {path}: {str(e)}")
122
+ self._timestamp_event_error(processor.event)
123
+
124
+ async def _execute_processor(self, processor: AbstractWebhookProcessor) -> None:
125
+ """Execute a single processor within a max processing time"""
126
+ try:
127
+ await asyncio.wait_for(
128
+ self._process_webhook_request(processor),
129
+ timeout=self._max_event_processing_seconds,
130
+ )
131
+ except asyncio.TimeoutError:
132
+ raise TimeoutError(
133
+ f"Processor processing timed out after {self._max_event_processing_seconds} seconds"
134
+ )
135
+
136
+ async def _process_webhook_request(
137
+ self, processor: AbstractWebhookProcessor
138
+ ) -> None:
139
+ """Process a webhook request with retry logic
140
+
141
+ Args:
142
+ processor: The webhook processor to use
143
+ """
144
+ await processor.before_processing()
145
+
146
+ payload = processor.event.payload
147
+ headers = processor.event.headers
148
+
149
+ if not await processor.authenticate(payload, headers):
150
+ raise ValueError("Authentication failed")
151
+
152
+ if not await processor.validate_payload(payload):
153
+ raise ValueError("Invalid payload")
154
+
155
+ while True:
156
+ try:
157
+ await processor.handle_event(payload)
158
+ break
159
+
160
+ except Exception as e:
161
+ await processor.on_error(e)
162
+
163
+ if (
164
+ processor.should_retry(e)
165
+ and processor.retry_count < processor.max_retries
166
+ ):
167
+ processor.retry_count += 1
168
+ delay = processor.calculate_retry_delay()
169
+ await asyncio.sleep(delay)
170
+ continue
171
+
172
+ raise
173
+
174
+ await processor.after_processing()
175
+
176
+ def register_processor(
177
+ self,
178
+ path: str,
179
+ processor: Type[AbstractWebhookProcessor],
180
+ event_filter: Callable[[WebhookEvent], bool] = lambda _: True,
181
+ ) -> None:
182
+ """Register a webhook processor for a specific path with optional filter"""
183
+
184
+ if not issubclass(processor, AbstractWebhookProcessor):
185
+ raise ValueError("Processor must extend AbstractWebhookProcessor")
186
+
187
+ if path not in self._processors:
188
+ self._processors[path] = []
189
+ self._event_queues[path] = LocalQueue()
190
+ self._register_route(path)
191
+
192
+ self._processors[path].append(
193
+ ProcessorRegistration(processor=processor, filter=event_filter)
194
+ )
195
+
196
+ def _register_route(self, path: str) -> None:
197
+ """Register a route for a specific path"""
198
+
199
+ async def handle_webhook(request: Request) -> Dict[str, str]:
200
+ """Handle incoming webhook requests for a specific path."""
201
+ try:
202
+ event = await WebhookEvent.from_request(request)
203
+ event.set_timestamp(WebhookEventTimestamp.AddedToQueue)
204
+ await self._event_queues[path].put(event)
205
+ return {"status": "ok"}
206
+ except Exception as e:
207
+ logger.exception(f"Error processing webhook: {str(e)}")
208
+ return {"status": "error", "message": str(e)}
209
+
210
+ self._router.add_api_route(
211
+ path,
212
+ handle_webhook,
213
+ methods=["POST"],
214
+ )
215
+
216
+ async def _cancel_all_tasks(self) -> None:
217
+ """Cancel all webhook processor tasks"""
218
+ for task in self._webhook_processor_tasks:
219
+ task.cancel()
220
+
221
+ await asyncio.gather(*self._webhook_processor_tasks, return_exceptions=True)
222
+
223
+ async def shutdown(self) -> None:
224
+ """Gracefully shutdown all queue processors"""
225
+ logger.warning("Shutting down webhook processor manager")
226
+
227
+ try:
228
+ await asyncio.wait_for(
229
+ asyncio.gather(
230
+ *(queue.teardown() for queue in self._event_queues.values())
231
+ ),
232
+ timeout=self._max_wait_seconds_before_shutdown,
233
+ )
234
+ except asyncio.TimeoutError:
235
+ logger.warning("Shutdown timed out waiting for queues to empty")
236
+
237
+ await self._cancel_all_tasks()
@@ -0,0 +1,77 @@
1
+ from enum import StrEnum
2
+ from typing import Any, Dict, Type, TypeAlias
3
+ from uuid import uuid4
4
+ from fastapi import Request
5
+ from loguru import logger
6
+
7
+
8
+ EventPayload: TypeAlias = Dict[str, Any]
9
+ EventHeaders: TypeAlias = Dict[str, str]
10
+
11
+
12
+ class WebhookEventTimestamp(StrEnum):
13
+ """Enum for timestamp keys"""
14
+
15
+ AddedToQueue = "Added To Queue"
16
+ StartedProcessing = "Started Processing"
17
+ FinishedProcessingSuccessfully = "Finished Processing Successfully"
18
+ FinishedProcessingWithError = "Finished Processing With Error"
19
+
20
+
21
+ class WebhookEvent:
22
+ """Represents a webhook event"""
23
+
24
+ def __init__(
25
+ self,
26
+ trace_id: str,
27
+ payload: EventPayload,
28
+ headers: EventHeaders,
29
+ original_request: Request | None = None,
30
+ ) -> None:
31
+ self.trace_id = trace_id
32
+ self.payload = payload
33
+ self.headers = headers
34
+ self._original_request = original_request
35
+
36
+ @classmethod
37
+ async def from_request(
38
+ cls: Type["WebhookEvent"], request: Request
39
+ ) -> "WebhookEvent":
40
+ trace_id = str(uuid4())
41
+ payload = await request.json()
42
+
43
+ return cls(
44
+ trace_id=trace_id,
45
+ payload=payload,
46
+ headers=dict(request.headers),
47
+ original_request=request,
48
+ )
49
+
50
+ @classmethod
51
+ def from_dict(cls: Type["WebhookEvent"], data: Dict[str, Any]) -> "WebhookEvent":
52
+ return cls(
53
+ trace_id=data["trace_id"],
54
+ payload=data["payload"],
55
+ headers=data["headers"],
56
+ original_request=None,
57
+ )
58
+
59
+ def clone(self) -> "WebhookEvent":
60
+ return WebhookEvent(
61
+ trace_id=self.trace_id,
62
+ payload=self.payload,
63
+ headers=self.headers,
64
+ original_request=self._original_request,
65
+ )
66
+
67
+ def set_timestamp(self, timestamp: WebhookEventTimestamp) -> None:
68
+ """Set a timestamp for a specific event"""
69
+ logger.info(
70
+ f"Webhook Event {timestamp.value}",
71
+ extra={
72
+ "trace_id": self.trace_id,
73
+ "payload": self.payload,
74
+ "headers": self.headers,
75
+ "timestamp_type": timestamp.value,
76
+ },
77
+ )
@@ -96,12 +96,13 @@ class SyncMixin(HandlerMixin):
96
96
  IntegrationNotStartedException: If EntitiesStateApplier class is not initialized.
97
97
  """
98
98
  entities_at_port = await ocean.port_client.search_entities(user_agent_type)
99
+ app_config = await self.port_app_config_handler.get_port_app_config()
99
100
 
100
101
  modified_entities = await self.entities_state_applier.upsert(
101
102
  entities, user_agent_type
102
103
  )
103
104
  await self.entities_state_applier.delete_diff(
104
- {"before": entities_at_port, "after": modified_entities}, user_agent_type
105
+ {"before": entities_at_port, "after": modified_entities}, user_agent_type, app_config.entity_deletion_threshold
105
106
  )
106
107
 
107
108
  logger.info("Finished syncing change")
@@ -220,26 +220,32 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
220
220
  )
221
221
  modified_objects = []
222
222
 
223
- try:
224
- changed_entities = await self._map_entities_compared_with_port(
225
- objects_diff[0].entity_selector_diff.passed,
226
- resource,
227
- user_agent_type
228
- )
229
- if changed_entities:
230
- logger.info("Upserting changed entities", changed_entities=len(changed_entities),
231
- total_entities=len(objects_diff[0].entity_selector_diff.passed))
232
- await self.entities_state_applier.upsert(
233
- changed_entities, user_agent_type
234
- )
235
- else:
236
- logger.info("Entities in batch didn't changed since last sync, skipping", total_entities=len(objects_diff[0].entity_selector_diff.passed))
237
- modified_objects = [ocean.port_client._reduce_entity(entity) for entity in objects_diff[0].entity_selector_diff.passed]
238
- except Exception as e:
239
- logger.warning(f"Failed to resolve batch entities with Port, falling back to upserting all entities: {str(e)}")
240
- modified_objects = await self.entities_state_applier.upsert(
241
- objects_diff[0].entity_selector_diff.passed, user_agent_type
223
+ if event.event_type == EventType.RESYNC:
224
+ try:
225
+ changed_entities = await self._map_entities_compared_with_port(
226
+ objects_diff[0].entity_selector_diff.passed,
227
+ resource,
228
+ user_agent_type
242
229
  )
230
+ if changed_entities:
231
+ logger.info("Upserting changed entities", changed_entities=len(changed_entities),
232
+ total_entities=len(objects_diff[0].entity_selector_diff.passed))
233
+ await self.entities_state_applier.upsert(
234
+ changed_entities, user_agent_type
235
+ )
236
+ else:
237
+ logger.info("Entities in batch didn't changed since last sync, skipping", total_entities=len(objects_diff[0].entity_selector_diff.passed))
238
+ modified_objects = [ocean.port_client._reduce_entity(entity) for entity in objects_diff[0].entity_selector_diff.passed]
239
+ except Exception as e:
240
+ logger.warning(f"Failed to resolve batch entities with Port, falling back to upserting all entities: {str(e)}")
241
+ modified_objects = await self.entities_state_applier.upsert(
242
+ objects_diff[0].entity_selector_diff.passed, user_agent_type
243
+ )
244
+ else:
245
+ modified_objects = await self.entities_state_applier.upsert(
246
+ objects_diff[0].entity_selector_diff.passed, user_agent_type
247
+ )
248
+
243
249
 
244
250
  return CalculationResult(
245
251
  objects_diff[0].entity_selector_diff._replace(passed=modified_objects),
@@ -608,7 +614,7 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
608
614
  )
609
615
  await self.entities_state_applier.delete_diff(
610
616
  {"before": entities_at_port, "after": generated_entities},
611
- user_agent_type,
617
+ user_agent_type, app_config.entity_deletion_threshold
612
618
  )
613
619
 
614
620
  logger.info("Resync finished successfully")
@@ -0,0 +1,4 @@
1
+ class RetryableError(Exception):
2
+ """Base exception class for errors that should trigger a retry."""
3
+
4
+ pass