port-ocean 0.20.3__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of port-ocean might be problematic. Click here for more details.

@@ -8,7 +8,7 @@ def handle_private_integration_flags():
8
8
  )
9
9
  root_dir = os.path.join("{{ cookiecutter._repo_dir }}", "../../../")
10
10
  infra_make_file = os.path.join(root_dir, "integrations/_infra/Makefile")
11
- infra_dockerfile = os.path.join(root_dir, "integrations/_infra/Dockerfile.deb")
11
+ infra_dockerfile = os.path.join(root_dir, "integrations/_infra/Dockerfile.Deb")
12
12
  infra_dockerignore = os.path.join(
13
13
  root_dir, "integrations/_infra/Dockerfile.dockerignore"
14
14
  )
@@ -182,9 +182,14 @@ async def event_context(
182
182
  f"Skipping resync due to empty mapping: {str(e)}", exc_info=True
183
183
  )
184
184
  raise
185
- except Exception as e:
185
+ except BaseException as e:
186
186
  success = False
187
- logger.error(f"Event failed with error: {str(e)}", exc_info=True)
187
+ if isinstance(e, KeyboardInterrupt):
188
+ logger.warning("Operation interrupted by user", exc_info=True)
189
+ elif isinstance(e, asyncio.CancelledError):
190
+ logger.warning("Operation was cancelled", exc_info=True)
191
+ else:
192
+ logger.error(f"Event failed with error: {repr(e)}", exc_info=True)
188
193
  raise
189
194
  else:
190
195
  success = True
@@ -12,6 +12,8 @@ from port_ocean.core.ocean_types import (
12
12
  START_EVENT_LISTENER,
13
13
  RawEntityDiff,
14
14
  EntityDiff,
15
+ BEFORE_RESYNC_EVENT_LISTENER,
16
+ AFTER_RESYNC_EVENT_LISTENER,
15
17
  )
16
18
  from port_ocean.exceptions.context import (
17
19
  PortOceanContextNotFoundError,
@@ -80,7 +82,7 @@ class PortOceanContext:
80
82
  ) -> RESYNC_EVENT_LISTENER | None:
81
83
  if not self.app.config.event_listener.should_resync:
82
84
  logger.debug(
83
- "Webhook only event listener is used, resync events are ignored"
85
+ f"Using event listener {self.app.config.event_listener.type}, which shouldn't perform any resyncs. Skipping resyncs setup..."
84
86
  )
85
87
  return None
86
88
  return self.integration.on_resync(function, kind)
@@ -93,6 +95,40 @@ class PortOceanContext:
93
95
 
94
96
  return wrapper
95
97
 
98
+ def on_resync_start(
99
+ self,
100
+ ) -> Callable[
101
+ [BEFORE_RESYNC_EVENT_LISTENER | None], BEFORE_RESYNC_EVENT_LISTENER | None
102
+ ]:
103
+ def wrapper(
104
+ function: BEFORE_RESYNC_EVENT_LISTENER | None,
105
+ ) -> BEFORE_RESYNC_EVENT_LISTENER | None:
106
+ if not self.app.config.event_listener.should_resync:
107
+ logger.debug(
108
+ f"Using event listener {self.app.config.event_listener.type}, which shouldn't perform any resyncs. Skipping resyncs setup..."
109
+ )
110
+ return None
111
+ return self.integration.on_resync_start(function)
112
+
113
+ return wrapper
114
+
115
+ def on_resync_complete(
116
+ self,
117
+ ) -> Callable[
118
+ [AFTER_RESYNC_EVENT_LISTENER | None], AFTER_RESYNC_EVENT_LISTENER | None
119
+ ]:
120
+ def wrapper(
121
+ function: AFTER_RESYNC_EVENT_LISTENER | None,
122
+ ) -> AFTER_RESYNC_EVENT_LISTENER | None:
123
+ if not self.app.config.event_listener.should_resync:
124
+ logger.debug(
125
+ f"Using event listener {self.app.config.event_listener.type}, which shouldn't perform any resyncs. Skipping resyncs setup..."
126
+ )
127
+ return None
128
+ return self.integration.on_resync_complete(function)
129
+
130
+ return wrapper
131
+
96
132
  async def update_raw_diff(
97
133
  self,
98
134
  kind: str,
@@ -109,9 +109,9 @@ class AbstractWebhookProcessor(ABC):
109
109
  pass
110
110
 
111
111
  @abstractmethod
112
- def should_process_event(self, event: WebhookEvent) -> bool:
112
+ async def should_process_event(self, event: WebhookEvent) -> bool:
113
113
  pass
114
114
 
115
115
  @abstractmethod
116
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
116
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
117
117
  pass
@@ -47,7 +47,7 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
47
47
  except Exception as e:
48
48
  logger.exception(f"Error starting queue processor for {path}: {str(e)}")
49
49
 
50
- def _extract_matching_processors(
50
+ async def _extract_matching_processors(
51
51
  self, webhook_event: WebhookEvent, path: str
52
52
  ) -> list[tuple[ResourceConfig, AbstractWebhookProcessor]]:
53
53
  """Find and extract the matching processor for an event"""
@@ -56,8 +56,8 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
56
56
 
57
57
  for processor_class in self._processors_classes[path]:
58
58
  processor = processor_class(webhook_event.clone())
59
- if processor.should_process_event(webhook_event):
60
- kinds = processor.get_matching_kinds(webhook_event)
59
+ if await processor.should_process_event(webhook_event):
60
+ kinds = await processor.get_matching_kinds(webhook_event)
61
61
  for kind in kinds:
62
62
  for resource in event.port_app_config.resources:
63
63
  if resource.kind == kind:
@@ -92,26 +92,30 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
92
92
  parent_override=webhook_event.event_context,
93
93
  ):
94
94
  matching_processors_with_resource = (
95
- self._extract_matching_processors(webhook_event, path)
95
+ await self._extract_matching_processors(webhook_event, path)
96
96
  )
97
97
  webhook_event_raw_results_for_all_resources = await asyncio.gather(
98
98
  *(
99
99
  self._process_single_event(processor, path, resource)
100
100
  for resource, processor in matching_processors_with_resource
101
- )
101
+ ),
102
+ return_exceptions=True,
102
103
  )
103
- if webhook_event_raw_results_for_all_resources and all(
104
- webhook_event_raw_results_for_all_resources
105
- ):
104
+
105
+ successful_raw_results: list[WebhookEventRawResults] = [
106
+ result
107
+ for result in webhook_event_raw_results_for_all_resources
108
+ if isinstance(result, WebhookEventRawResults)
109
+ ]
110
+
111
+ if successful_raw_results:
106
112
  logger.info(
107
113
  "Exporting raw event results to entities",
108
114
  webhook_event_raw_results_for_all_resources_length=len(
109
- webhook_event_raw_results_for_all_resources
115
+ successful_raw_results
110
116
  ),
111
117
  )
112
- await self.sync_raw_results(
113
- webhook_event_raw_results_for_all_resources
114
- )
118
+ await self.sync_raw_results(successful_raw_results)
115
119
  except asyncio.CancelledError:
116
120
  logger.info(f"Queue processor for {path} is shutting down")
117
121
  for _, processor in matching_processors_with_resource:
@@ -1,4 +1,5 @@
1
1
  from collections import defaultdict
2
+ from typing import Any
2
3
 
3
4
  from loguru import logger
4
5
 
@@ -6,6 +7,8 @@ from port_ocean.core.ocean_types import (
6
7
  IntegrationEventsCallbacks,
7
8
  START_EVENT_LISTENER,
8
9
  RESYNC_EVENT_LISTENER,
10
+ BEFORE_RESYNC_EVENT_LISTENER,
11
+ AFTER_RESYNC_EVENT_LISTENER,
9
12
  )
10
13
 
11
14
 
@@ -13,55 +16,56 @@ class EventsMixin:
13
16
  """A mixin class that provides event handling capabilities for the integration class.
14
17
 
15
18
  This mixin allows classes to register event listeners and manage event callbacks.
16
- It provides methods for attaching listeners to "start" and "resync" events.
19
+ It provides methods for attaching listeners to various lifecycle events.
17
20
 
18
21
  Attributes:
19
- event_strategy (IntegrationEventsCallbacks): A dictionary storing event callbacks.
20
- - "start": List of functions to be called on "start" event.
21
- - "resync": Default dictionary mapping event kinds to lists of functions
22
- to be called on "resync" events of the specified kind.
22
+ event_strategy: A dictionary storing event callbacks for different event types.
23
23
  """
24
24
 
25
25
  def __init__(self) -> None:
26
26
  self.event_strategy: IntegrationEventsCallbacks = {
27
27
  "start": [],
28
28
  "resync": defaultdict(list),
29
+ "resync_start": [],
30
+ "resync_complete": [],
29
31
  }
30
32
 
31
33
  @property
32
34
  def available_resync_kinds(self) -> list[str]:
33
35
  return list(self.event_strategy["resync"].keys())
34
36
 
35
- def on_start(self, func: START_EVENT_LISTENER) -> START_EVENT_LISTENER:
36
- """Register a function as a listener for the "start" event.
37
-
38
- Args:
39
- func (START_EVENT_LISTENER): The function to be called on the "start" event.
40
-
41
- Returns:
42
- START_EVENT_LISTENER: The input function, unchanged.
43
- """
44
- logger.debug(f"Registering {func} as a start event listener")
45
- self.event_strategy["start"].append(func)
46
- return func
37
+ def on_start(self, function: START_EVENT_LISTENER) -> START_EVENT_LISTENER:
38
+ """Register a function as a listener for the "start" event."""
39
+ logger.debug(f"Registering {function} as a start event listener")
40
+ self.event_strategy["start"].append(function)
41
+ return function
47
42
 
48
43
  def on_resync(
49
- self, func: RESYNC_EVENT_LISTENER| None, kind: str | None = None
50
- ) -> RESYNC_EVENT_LISTENER:
51
- """Register a function as a listener for a "resync" event.
44
+ self, function: RESYNC_EVENT_LISTENER | None, kind: str | None = None
45
+ ) -> RESYNC_EVENT_LISTENER | None:
46
+ """Register a function as a listener for a "resync" event."""
47
+ if function is not None:
48
+ if kind is None:
49
+ logger.debug("Registering resync event listener any kind")
50
+ else:
51
+ logger.info(f"Registering resync event listener for kind {kind}")
52
+ self.event_strategy["resync"][kind].append(function)
53
+ return function
52
54
 
53
- Args:
54
- func (RESYNC_EVENT_LISTENER): The function to be called on the "resync" event.
55
- kind (str | None, optional): The kind of "resync" event. Defaults to None.
55
+ def on_resync_start(
56
+ self, function: BEFORE_RESYNC_EVENT_LISTENER | None
57
+ ) -> BEFORE_RESYNC_EVENT_LISTENER | None:
58
+ """Register a function to be called when a resync operation starts."""
59
+ if function is not None:
60
+ logger.debug(f"Registering {function} as a resync_start event listener")
61
+ self.event_strategy["resync_start"].append(function)
62
+ return function
56
63
 
57
- Returns:
58
- RESYNC_EVENT_LISTENER: The input function, unchanged.
59
- """
60
- if func is None:
61
- return None
62
- if kind is None:
63
- logger.debug(f"Registering resync event listener any kind")
64
- else:
65
- logger.info(f"Registering resync event listener for kind {kind}")
66
- self.event_strategy["resync"][kind].append(func)
67
- return func
64
+ def on_resync_complete(
65
+ self, function: AFTER_RESYNC_EVENT_LISTENER | None
66
+ ) -> AFTER_RESYNC_EVENT_LISTENER | None:
67
+ """Register a function to be called when a resync operation completes."""
68
+ if function is not None:
69
+ logger.debug(f"Registering {function} as a resync_complete event listener")
70
+ self.event_strategy["resync_complete"].append(function)
71
+ return function
@@ -550,6 +550,10 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
550
550
  )
551
551
  logger.info(f"Resync will use the following mappings: {app_config.dict()}")
552
552
 
553
+ # Execute resync_start hooks
554
+ for resync_start_fn in self.event_strategy["resync_start"]:
555
+ await resync_start_fn()
556
+
553
557
  try:
554
558
  did_fetched_current_state = True
555
559
  except httpx.HTTPError as e:
@@ -598,7 +602,7 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
598
602
  if errors:
599
603
  message = f"Resync failed with {len(errors)}. Skipping delete phase due to incomplete state"
600
604
  error_group = ExceptionGroup(
601
- f"Resync failed with {len(errors)}. Skipping delete phase due to incomplete state",
605
+ message,
602
606
  errors,
603
607
  )
604
608
  if not silent:
@@ -618,3 +622,12 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
618
622
  )
619
623
 
620
624
  logger.info("Resync finished successfully")
625
+
626
+ # Execute resync_complete hooks
627
+ if "resync_complete" in self.event_strategy:
628
+ logger.info("Executing resync_complete hooks")
629
+
630
+ for resync_complete_fn in self.event_strategy["resync_complete"]:
631
+ await resync_complete_fn()
632
+
633
+ logger.info("Finished executing resync_complete hooks")
@@ -19,6 +19,9 @@ LISTENER_RESULT = Awaitable[RAW_RESULT] | ASYNC_GENERATOR_RESYNC_TYPE
19
19
  RESYNC_EVENT_LISTENER = Callable[[str], LISTENER_RESULT]
20
20
  START_EVENT_LISTENER = Callable[[], Awaitable[None]]
21
21
 
22
+ BEFORE_RESYNC_EVENT_LISTENER = Callable[[], Awaitable[None]]
23
+ AFTER_RESYNC_EVENT_LISTENER = Callable[[], Awaitable[None]]
24
+
22
25
 
23
26
  class RawEntityDiff(TypedDict):
24
27
  before: list[RAW_ITEM]
@@ -44,3 +47,5 @@ class CalculationResult(NamedTuple):
44
47
  class IntegrationEventsCallbacks(TypedDict):
45
48
  start: list[START_EVENT_LISTENER]
46
49
  resync: dict[str | None, list[RESYNC_EVENT_LISTENER]]
50
+ resync_start: list[BEFORE_RESYNC_EVENT_LISTENER]
51
+ resync_complete: list[AFTER_RESYNC_EVENT_LISTENER]
@@ -799,3 +799,177 @@ async def test_register_resource_raw_skip_event_type_http_request_upsert_called_
799
799
  mock_sync_raw_mixin._calculate_raw.assert_called_once()
800
800
  mock_sync_raw_mixin._map_entities_compared_with_port.assert_not_called()
801
801
  mock_sync_raw_mixin.entities_state_applier.upsert.assert_called_once()
802
+
803
+
804
+ @pytest.mark.asyncio
805
+ async def test_on_resync_start_hooks_are_called(
806
+ mock_sync_raw_mixin: SyncRawMixin,
807
+ mock_port_app_config: PortAppConfig,
808
+ ) -> None:
809
+ # Setup
810
+ resync_start_called = False
811
+
812
+ async def on_resync_start() -> None:
813
+ nonlocal resync_start_called
814
+ resync_start_called = True
815
+
816
+ mock_sync_raw_mixin.on_resync_start(on_resync_start)
817
+
818
+ # Execute
819
+ async with event_context(EventType.RESYNC, trigger_type="machine") as event:
820
+ event.port_app_config = mock_port_app_config
821
+ await mock_sync_raw_mixin.sync_raw_all(
822
+ trigger_type="machine",
823
+ user_agent_type=UserAgentType.exporter,
824
+ )
825
+
826
+ # Verify
827
+ assert resync_start_called, "on_resync_start hook was not called"
828
+
829
+
830
+ @pytest.mark.asyncio
831
+ async def test_on_resync_complete_hooks_are_called_on_success(
832
+ mock_sync_raw_mixin: SyncRawMixin,
833
+ mock_port_app_config: PortAppConfig,
834
+ mock_ocean: Ocean,
835
+ ) -> None:
836
+ # Setup
837
+ resync_complete_called = False
838
+
839
+ async def on_resync_complete() -> None:
840
+ nonlocal resync_complete_called
841
+ resync_complete_called = True
842
+
843
+ mock_sync_raw_mixin.on_resync_complete(on_resync_complete)
844
+ mock_ocean.port_client.search_entities.return_value = [] # type: ignore
845
+
846
+ # Execute
847
+ async with event_context(EventType.RESYNC, trigger_type="machine") as event:
848
+ event.port_app_config = mock_port_app_config
849
+ await mock_sync_raw_mixin.sync_raw_all(
850
+ trigger_type="machine",
851
+ user_agent_type=UserAgentType.exporter,
852
+ )
853
+
854
+ # Verify
855
+ assert resync_complete_called, "on_resync_complete hook was not called"
856
+
857
+
858
+ @pytest.mark.asyncio
859
+ async def test_on_resync_complete_hooks_not_called_on_error(
860
+ mock_sync_raw_mixin: SyncRawMixin,
861
+ mock_port_app_config: PortAppConfig,
862
+ ) -> None:
863
+ # Setup
864
+ resync_complete_called = False
865
+
866
+ async def on_resync_complete() -> None:
867
+ nonlocal resync_complete_called
868
+ resync_complete_called = True
869
+
870
+ mock_sync_raw_mixin.on_resync_complete(on_resync_complete)
871
+ mock_sync_raw_mixin._get_resource_raw_results.side_effect = Exception("Test error") # type: ignore
872
+
873
+ # Execute
874
+ async with event_context(EventType.RESYNC, trigger_type="machine") as event:
875
+ event.port_app_config = mock_port_app_config
876
+ with pytest.raises(Exception):
877
+ await mock_sync_raw_mixin.sync_raw_all(
878
+ trigger_type="machine",
879
+ user_agent_type=UserAgentType.exporter,
880
+ )
881
+
882
+ # Verify
883
+ assert (
884
+ not resync_complete_called
885
+ ), "on_resync_complete hook should not have been called on error"
886
+
887
+
888
+ @pytest.mark.asyncio
889
+ async def test_multiple_on_resync_start_on_resync_complete_hooks_called_in_order(
890
+ mock_sync_raw_mixin: SyncRawMixin,
891
+ mock_port_app_config: PortAppConfig,
892
+ mock_ocean: Ocean,
893
+ ) -> None:
894
+ # Setup
895
+ call_order: list[str] = []
896
+
897
+ async def on_resync_start1() -> None:
898
+ call_order.append("on_resync_start1")
899
+
900
+ async def on_resync_start2() -> None:
901
+ call_order.append("on_resync_start2")
902
+
903
+ async def on_resync_complete1() -> None:
904
+ call_order.append("on_resync_complete1")
905
+
906
+ async def on_resync_complete2() -> None:
907
+ call_order.append("on_resync_complete2")
908
+
909
+ mock_sync_raw_mixin.on_resync_start(on_resync_start1)
910
+ mock_sync_raw_mixin.on_resync_start(on_resync_start2)
911
+ mock_sync_raw_mixin.on_resync_complete(on_resync_complete1)
912
+ mock_sync_raw_mixin.on_resync_complete(on_resync_complete2)
913
+ mock_ocean.port_client.search_entities.return_value = [] # type: ignore
914
+
915
+ # Execute
916
+ async with event_context(EventType.RESYNC, trigger_type="machine") as event:
917
+ event.port_app_config = mock_port_app_config
918
+ await mock_sync_raw_mixin.sync_raw_all(
919
+ trigger_type="machine",
920
+ user_agent_type=UserAgentType.exporter,
921
+ )
922
+
923
+ # Verify
924
+ assert call_order == [
925
+ "on_resync_start1",
926
+ "on_resync_start2",
927
+ "on_resync_complete1",
928
+ "on_resync_complete2",
929
+ ], "Hooks were not called in the correct order"
930
+
931
+
932
+ @pytest.mark.asyncio
933
+ async def test_on_resync_start_hook_error_prevents_resync(
934
+ mock_sync_raw_mixin: SyncRawMixin,
935
+ mock_port_app_config: PortAppConfig,
936
+ ) -> None:
937
+ # Setup
938
+ resync_complete_called = False
939
+ resync_proceeded = False
940
+
941
+ async def on_resync_start() -> None:
942
+ raise Exception("Before resync error")
943
+
944
+ async def on_resync_complete() -> None:
945
+ nonlocal resync_complete_called
946
+ resync_complete_called = True
947
+
948
+ mock_sync_raw_mixin.on_resync_start(on_resync_start)
949
+ mock_sync_raw_mixin.on_resync_complete(on_resync_complete)
950
+
951
+ original_get_resource_raw_results = mock_sync_raw_mixin._get_resource_raw_results
952
+
953
+ async def track_resync(*args: Any, **kwargs: Any) -> Any:
954
+ nonlocal resync_proceeded
955
+ resync_proceeded = True
956
+ return await original_get_resource_raw_results(*args, **kwargs)
957
+
958
+ mock_sync_raw_mixin._get_resource_raw_results = track_resync # type: ignore
959
+
960
+ # Execute
961
+ async with event_context(EventType.RESYNC, trigger_type="machine") as event:
962
+ event.port_app_config = mock_port_app_config
963
+ with pytest.raises(Exception, match="Before resync error"):
964
+ await mock_sync_raw_mixin.sync_raw_all(
965
+ trigger_type="machine",
966
+ user_agent_type=UserAgentType.exporter,
967
+ )
968
+
969
+ # Verify
970
+ assert (
971
+ not resync_proceeded
972
+ ), "Resync should not have proceeded after before_resync hook error"
973
+ assert (
974
+ not resync_complete_called
975
+ ), "on_resync_complete hook should not have been called after error"
@@ -32,7 +32,7 @@ class ConcreteWebhookProcessor(AbstractWebhookProcessor):
32
32
  ) -> WebhookEventRawResults:
33
33
  return WebhookEventRawResults(updated_raw_results=[{}], deleted_raw_results=[])
34
34
 
35
- def should_process_event(self, webhook_event: WebhookEvent) -> bool:
35
+ async def should_process_event(self, webhook_event: WebhookEvent) -> bool:
36
36
  return True
37
37
 
38
38
  async def before_processing(self) -> None:
@@ -47,7 +47,7 @@ class ConcreteWebhookProcessor(AbstractWebhookProcessor):
47
47
  await super().cancel()
48
48
  self.cancel_called = True
49
49
 
50
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
50
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
51
51
  return ["test"]
52
52
 
53
53
 
@@ -54,10 +54,10 @@ class MockProcessor(AbstractWebhookProcessor):
54
54
  ) -> WebhookEventRawResults:
55
55
  return WebhookEventRawResults(updated_raw_results=[], deleted_raw_results=[])
56
56
 
57
- def should_process_event(self, event: WebhookEvent) -> bool:
57
+ async def should_process_event(self, event: WebhookEvent) -> bool:
58
58
  return True
59
59
 
60
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
60
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
61
61
  return ["repository"]
62
62
 
63
63
 
@@ -75,10 +75,10 @@ class MockProcessorFalse(AbstractWebhookProcessor):
75
75
  ) -> WebhookEventRawResults:
76
76
  return WebhookEventRawResults(updated_raw_results=[], deleted_raw_results=[])
77
77
 
78
- def should_process_event(self, event: WebhookEvent) -> bool:
78
+ async def should_process_event(self, event: WebhookEvent) -> bool:
79
79
  return False
80
80
 
81
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
81
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
82
82
  return ["repository"]
83
83
 
84
84
 
@@ -110,10 +110,10 @@ class MockWebhookProcessor(AbstractWebhookProcessor):
110
110
  async def cancel(self) -> None:
111
111
  self.cancel_called = True
112
112
 
113
- def should_process_event(self, event: WebhookEvent) -> bool:
113
+ async def should_process_event(self, event: WebhookEvent) -> bool:
114
114
  return True
115
115
 
116
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
116
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
117
117
  return ["test"]
118
118
 
119
119
 
@@ -155,10 +155,10 @@ class MockWebhookHandlerForProcessWebhookRequest(AbstractWebhookProcessor):
155
155
  self.handled = True
156
156
  return WebhookEventRawResults(updated_raw_results=[], deleted_raw_results=[])
157
157
 
158
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
158
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
159
159
  return ["repository"]
160
160
 
161
- def should_process_event(self, event: WebhookEvent) -> bool:
161
+ async def should_process_event(self, event: WebhookEvent) -> bool:
162
162
  """Filter the event data before processing."""
163
163
  return True
164
164
 
@@ -329,16 +329,16 @@ async def test_extractMatchingProcessors_processorMatch(
329
329
 
330
330
  async with event_context(EventType.HTTP_REQUEST, trigger_type="request") as event:
331
331
  event.port_app_config = mock_port_app_config
332
- processors = processor_manager._extract_matching_processors(
332
+ processors = await processor_manager._extract_matching_processors(
333
333
  webhook_event, test_path
334
334
  )
335
335
 
336
- assert len(processors) == 1
337
- config, processor = processors[0]
338
- assert isinstance(processor, MockProcessor)
339
- assert config.kind == "repository"
340
- assert processor.event != webhook_event
341
- assert processor.event.payload == webhook_event.payload
336
+ assert len(processors) == 1
337
+ config, processor = processors[0]
338
+ assert isinstance(processor, MockProcessor)
339
+ assert config.kind == "repository"
340
+ assert processor.event != webhook_event
341
+ assert processor.event.payload == webhook_event.payload
342
342
 
343
343
 
344
344
  @pytest.mark.asyncio
@@ -355,7 +355,9 @@ async def test_extractMatchingProcessors_noMatch(
355
355
  EventType.HTTP_REQUEST, trigger_type="request"
356
356
  ) as event:
357
357
  event.port_app_config = mock_port_app_config
358
- processor_manager._extract_matching_processors(webhook_event, test_path)
358
+ await processor_manager._extract_matching_processors(
359
+ webhook_event, test_path
360
+ )
359
361
 
360
362
 
361
363
  @pytest.mark.asyncio
@@ -370,7 +372,7 @@ async def test_extractMatchingProcessors_multipleMatches(
370
372
 
371
373
  async with event_context(EventType.HTTP_REQUEST, trigger_type="request") as event:
372
374
  event.port_app_config = mock_port_app_config
373
- processors = processor_manager._extract_matching_processors(
375
+ processors = await processor_manager._extract_matching_processors(
374
376
  webhook_event, test_path
375
377
  )
376
378
 
@@ -391,7 +393,7 @@ async def test_extractMatchingProcessors_onlyOneMatches(
391
393
 
392
394
  async with event_context(EventType.HTTP_REQUEST, trigger_type="request") as event:
393
395
  event.port_app_config = mock_port_app_config
394
- processors = processor_manager._extract_matching_processors(
396
+ processors = await processor_manager._extract_matching_processors(
395
397
  webhook_event, test_path
396
398
  )
397
399
 
@@ -542,10 +544,10 @@ async def test_integrationTest_postRequestSent_webhookEventRawResultProcessed_en
542
544
  processed_events.append(event_data)
543
545
  return event_data
544
546
 
545
- def should_process_event(self, event: WebhookEvent) -> bool:
547
+ async def should_process_event(self, event: WebhookEvent) -> bool:
546
548
  return True
547
549
 
548
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
550
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
549
551
  return ["repository"]
550
552
 
551
553
  processing_complete = asyncio.Event()
@@ -649,10 +651,10 @@ async def test_integrationTest_postRequestSent_reachedTimeout_entityNotUpserted(
649
651
  updated_raw_results=[], deleted_raw_results=[]
650
652
  )
651
653
 
652
- def should_process_event(self, event: WebhookEvent) -> bool:
654
+ async def should_process_event(self, event: WebhookEvent) -> bool:
653
655
  return True
654
656
 
655
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
657
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
656
658
  return ["repository"]
657
659
 
658
660
  processing_complete = asyncio.Event()
@@ -766,20 +768,20 @@ async def test_integrationTest_postRequestSent_noMatchingHandlers_entityNotUpser
766
768
  )
767
769
  return event_data
768
770
 
769
- def should_process_event(self, event: WebhookEvent) -> bool:
771
+ async def should_process_event(self, event: WebhookEvent) -> bool:
770
772
  return False
771
773
 
772
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
774
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
773
775
  return ["repository"]
774
776
 
775
777
  processing_complete = asyncio.Event()
776
778
  original_process_data = LiveEventsProcessorManager._extract_matching_processors
777
779
 
778
- def patched_extract_matching_processors(
780
+ async def patched_extract_matching_processors(
779
781
  self: LiveEventsProcessorManager, event: WebhookEvent, path: str
780
782
  ) -> list[tuple[ResourceConfig, AbstractWebhookProcessor]]:
781
783
  try:
782
- return original_process_data(self, event, path)
784
+ return await original_process_data(self, event, path)
783
785
  except Exception as e:
784
786
  test_state["exception_thrown"] = e # type: ignore
785
787
  return []
@@ -882,10 +884,10 @@ async def test_integrationTest_postRequestSent_webhookEventRawResultProcessedFor
882
884
  processed_events.append(event_data)
883
885
  return event_data
884
886
 
885
- def should_process_event(self, event: WebhookEvent) -> bool:
887
+ async def should_process_event(self, event: WebhookEvent) -> bool:
886
888
  return True
887
889
 
888
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
890
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
889
891
  return ["repository"]
890
892
 
891
893
  class TestProcessorB(AbstractWebhookProcessor):
@@ -913,10 +915,10 @@ async def test_integrationTest_postRequestSent_webhookEventRawResultProcessedFor
913
915
  processed_events.append(event_data)
914
916
  return event_data
915
917
 
916
- def should_process_event(self, event: WebhookEvent) -> bool:
918
+ async def should_process_event(self, event: WebhookEvent) -> bool:
917
919
  return True
918
920
 
919
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
921
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
920
922
  return ["repository"]
921
923
 
922
924
  class TestProcessorFiltersOut(AbstractWebhookProcessor):
@@ -944,10 +946,10 @@ async def test_integrationTest_postRequestSent_webhookEventRawResultProcessedFor
944
946
  processed_events.append(event_data)
945
947
  return event_data
946
948
 
947
- def should_process_event(self, event: WebhookEvent) -> bool:
949
+ async def should_process_event(self, event: WebhookEvent) -> bool:
948
950
  return False
949
951
 
950
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
952
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
951
953
  return ["repository"]
952
954
 
953
955
  processing_complete = asyncio.Event()
@@ -1071,10 +1073,10 @@ async def test_integrationTest_postRequestSent_webhookEventRawResultProcessedwit
1071
1073
  processed_events.append(event_data)
1072
1074
  return event_data
1073
1075
 
1074
- def should_process_event(self, event: WebhookEvent) -> bool:
1076
+ async def should_process_event(self, event: WebhookEvent) -> bool:
1075
1077
  return True
1076
1078
 
1077
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
1079
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
1078
1080
  return ["repository"]
1079
1081
 
1080
1082
  processing_complete = asyncio.Event()
@@ -1195,10 +1197,10 @@ async def test_integrationTest_postRequestSent_webhookEventRawResultProcessedwit
1195
1197
  processed_events.append(event_data)
1196
1198
  return event_data
1197
1199
 
1198
- def should_process_event(self, event: WebhookEvent) -> bool:
1200
+ async def should_process_event(self, event: WebhookEvent) -> bool:
1199
1201
  return True
1200
1202
 
1201
- def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
1203
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
1202
1204
  return ["repository"]
1203
1205
 
1204
1206
  processing_complete = asyncio.Event()
@@ -1262,3 +1264,139 @@ async def test_integrationTest_postRequestSent_webhookEventRawResultProcessedwit
1262
1264
  mock_delete.assert_not_called()
1263
1265
 
1264
1266
  await mock_context.app.webhook_manager.shutdown()
1267
+
1268
+
1269
+ @pytest.mark.asyncio
1270
+ @patch(
1271
+ "port_ocean.core.handlers.entities_state_applier.port.applier.HttpEntitiesStateApplier.upsert"
1272
+ )
1273
+ @patch(
1274
+ "port_ocean.core.handlers.entities_state_applier.port.applier.HttpEntitiesStateApplier.delete"
1275
+ )
1276
+ async def test_integrationTest_postRequestSent_oneProcessorThrowsException_onlySuccessfulResultsProcessed(
1277
+ mock_delete: AsyncMock,
1278
+ mock_upsert: AsyncMock,
1279
+ mock_context: PortOceanContext,
1280
+ mock_port_app_config: PortAppConfig,
1281
+ monkeypatch: pytest.MonkeyPatch,
1282
+ ) -> None:
1283
+ """Integration test for webhook processing where one processor throws an exception"""
1284
+
1285
+ monkeypatch.setattr(
1286
+ "port_ocean.core.integrations.mixins.handler.ocean", mock_context
1287
+ )
1288
+ monkeypatch.setattr(
1289
+ "port_ocean.core.integrations.mixins.live_events.ocean", mock_context
1290
+ )
1291
+ processed_events: list[WebhookEventRawResults] = []
1292
+ mock_upsert.return_value = [entity]
1293
+
1294
+ class SuccessfulProcessor(AbstractWebhookProcessor):
1295
+ async def authenticate(
1296
+ self, payload: Dict[str, Any], headers: Dict[str, str]
1297
+ ) -> bool:
1298
+ return True
1299
+
1300
+ async def validate_payload(self, payload: Dict[str, Any]) -> bool:
1301
+ return True
1302
+
1303
+ async def handle_event(
1304
+ self, payload: EventPayload, resource: ResourceConfig
1305
+ ) -> WebhookEventRawResults:
1306
+ event_data = WebhookEventRawResults(
1307
+ updated_raw_results=[
1308
+ {
1309
+ "name": "repo-one",
1310
+ "links": {"html": {"href": "https://example.com/repo-one"}},
1311
+ "main_branch": "main",
1312
+ }
1313
+ ],
1314
+ deleted_raw_results=[],
1315
+ )
1316
+ processed_events.append(event_data)
1317
+ return event_data
1318
+
1319
+ async def should_process_event(self, event: WebhookEvent) -> bool:
1320
+ return True
1321
+
1322
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
1323
+ return ["repository"]
1324
+
1325
+ class FailingProcessor(AbstractWebhookProcessor):
1326
+ async def authenticate(
1327
+ self, payload: Dict[str, Any], headers: Dict[str, str]
1328
+ ) -> bool:
1329
+ return True
1330
+
1331
+ async def validate_payload(self, payload: Dict[str, Any]) -> bool:
1332
+ return True
1333
+
1334
+ async def handle_event(
1335
+ self, payload: EventPayload, resource: ResourceConfig
1336
+ ) -> WebhookEventRawResults:
1337
+ raise ValueError("Simulated failure in processor")
1338
+
1339
+ async def should_process_event(self, event: WebhookEvent) -> bool:
1340
+ return True
1341
+
1342
+ async def get_matching_kinds(self, event: WebhookEvent) -> list[str]:
1343
+ return ["repository"]
1344
+
1345
+ processing_complete = asyncio.Event()
1346
+ original_process_data = LiveEventsMixin.sync_raw_results
1347
+
1348
+ async def patched_export_single_resource(
1349
+ self: LiveEventsMixin, webhookEventRawResults: list[WebhookEventRawResults]
1350
+ ) -> None:
1351
+ try:
1352
+ await original_process_data(self, webhookEventRawResults)
1353
+ except Exception as e:
1354
+ raise e
1355
+ finally:
1356
+ processing_complete.set()
1357
+
1358
+ monkeypatch.setattr(
1359
+ LiveEventsMixin,
1360
+ "sync_raw_results",
1361
+ patched_export_single_resource,
1362
+ )
1363
+ test_path = "/webhook-test"
1364
+ mock_context.app.integration = BaseIntegration(ocean)
1365
+ mock_context.app.webhook_manager = LiveEventsProcessorManager(
1366
+ mock_context.app.integration_router, SignalHandler()
1367
+ )
1368
+
1369
+ # Register both processors
1370
+ mock_context.app.webhook_manager.register_processor(test_path, SuccessfulProcessor)
1371
+ mock_context.app.webhook_manager.register_processor(test_path, FailingProcessor)
1372
+ await mock_context.app.webhook_manager.start_processing_event_messages()
1373
+ mock_context.app.fast_api_app.include_router(
1374
+ mock_context.app.webhook_manager._router
1375
+ )
1376
+ client = TestClient(mock_context.app.fast_api_app)
1377
+
1378
+ test_payload = {"test": "data"}
1379
+
1380
+ async with event_context(EventType.HTTP_REQUEST, trigger_type="request") as event:
1381
+ mock_context.app.webhook_manager.port_app_config_handler.get_port_app_config = AsyncMock(return_value=mock_port_app_config) # type: ignore
1382
+ event.port_app_config = (
1383
+ await mock_context.app.webhook_manager.port_app_config_handler.get_port_app_config()
1384
+ )
1385
+
1386
+ response = client.post(
1387
+ test_path, json=test_payload, headers={"Content-Type": "application/json"}
1388
+ )
1389
+
1390
+ assert response.status_code == 200
1391
+ assert response.json() == {"status": "ok"}
1392
+
1393
+ try:
1394
+ await asyncio.wait_for(processing_complete.wait(), timeout=10.0)
1395
+ except asyncio.TimeoutError:
1396
+ pytest.fail("Event processing timed out")
1397
+
1398
+ assert len(processed_events) == 1
1399
+ assert mock_upsert.call_count == 1
1400
+ mock_delete.assert_not_called()
1401
+
1402
+ await mock_context.app.webhook_manager.shutdown()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: port-ocean
3
- Version: 0.20.3
3
+ Version: 0.21.0
4
4
  Summary: Port Ocean is a CLI tool for managing your Port projects.
5
5
  Home-page: https://app.getport.io
6
6
  Keywords: ocean,port-ocean,port
@@ -24,7 +24,7 @@ port_ocean/cli/commands/version.py,sha256=hEuIEIcm6Zkamz41Z9nxeSM_4g3oNlAgWwQyDG
24
24
  port_ocean/cli/cookiecutter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  port_ocean/cli/cookiecutter/cookiecutter.json,sha256=ie-LJjg-ek3lP2RRosY2u_q2W4y2TykXm_Gynjjt6Es,814
26
26
  port_ocean/cli/cookiecutter/extensions.py,sha256=eQNjZvy2enDkJpvMbBGil77Xk9-38f862wfnmCjdoBc,446
27
- port_ocean/cli/cookiecutter/hooks/post_gen_project.py,sha256=tFqtsjSbu7HMN32WIiFO37S1a_dfHezvdPwmM6MmNJk,1182
27
+ port_ocean/cli/cookiecutter/hooks/post_gen_project.py,sha256=7DBdSv_vDI1YKW-y7y4wHenu4iF4zV1MukoUT1EgzXI,1182
28
28
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.env.example,sha256=ywAmZto6YBGXyhEmpG1uYsgaHr2N1ZBRjdtRNt6Vkpw,388
29
29
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.gitignore,sha256=32p1lDW_g5hyBz486GWfDeR9m7ikFlASVri5a8vmNoo,2698
30
30
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/.gitignore,sha256=kCpRPdl3S_jqYYZaOrc0-xa6-l3KqVjNRXc6jCkd_-Q,12
@@ -66,8 +66,8 @@ port_ocean/config/settings.py,sha256=PfMwhFQOI0zfK0bD32EunXqicVrlPYBkYC2A99nmZlg
66
66
  port_ocean/consumers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
67
  port_ocean/consumers/kafka_consumer.py,sha256=N8KocjBi9aR0BOPG8hgKovg-ns_ggpEjrSxqSqF_BSo,4710
68
68
  port_ocean/context/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
- port_ocean/context/event.py,sha256=QK2ben4fJtxdorq_yRroATttP0DRc4wLtlUJ1as5D58,6208
70
- port_ocean/context/ocean.py,sha256=Yt3G9CbcmFhxFTdxUhjkiZSfm92VKmzs7m-BMSGaU54,6316
69
+ port_ocean/context/event.py,sha256=pdLBnHl9Ue5Qyyxk_NLVnIizsj9rjFgAt5qzpXq-2yw,6492
70
+ port_ocean/context/ocean.py,sha256=Yt0KP3Rgc4MrLSo3dF0a40ww6ny0r2hDADZ42vTj22M,7750
71
71
  port_ocean/context/resource.py,sha256=yDj63URzQelj8zJPh4BAzTtPhpKr9Gw9DRn7I_0mJ1s,1692
72
72
  port_ocean/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
73
  port_ocean/core/defaults/__init__.py,sha256=8qCZg8n06WAdMu9s_FiRtDYLGPGHbOuS60vapeUoAks,142
@@ -103,20 +103,20 @@ port_ocean/core/handlers/queue/local_queue.py,sha256=EzqsGIX43xbVAcePwTcCg5QDrXA
103
103
  port_ocean/core/handlers/resync_state_updater/__init__.py,sha256=kG6y-JQGpPfuTHh912L_bctIDCzAK4DN-d00S7rguWU,81
104
104
  port_ocean/core/handlers/resync_state_updater/updater.py,sha256=Yg9ET6ZV5B9GW7u6zZA6GlB_71kmvxvYX2FWgQNzMvo,3182
105
105
  port_ocean/core/handlers/webhook/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
- port_ocean/core/handlers/webhook/abstract_webhook_processor.py,sha256=YRMr4M1m0XNZbxNGU0bzB3hEkPat1HCom66EU9NRffU,3879
107
- port_ocean/core/handlers/webhook/processor_manager.py,sha256=RoPV3cHjAv7jJ_vpC9y2fF2v9eb41h0VM6bNAPuzKdg,11779
106
+ port_ocean/core/handlers/webhook/abstract_webhook_processor.py,sha256=5KwZkdkDd5HdVkXPzKiqabodZKl-hOtMypkTKd8Hq3M,3891
107
+ port_ocean/core/handlers/webhook/processor_manager.py,sha256=Pmg81IT0i3MCFWC6648Ln4NfpAWvpyrPZwOFfdTPkDE,11928
108
108
  port_ocean/core/handlers/webhook/webhook_event.py,sha256=Iuw6IX3PPjwHECUeFgrJl6K249mJ-DPAGhP8OMxbc1c,4096
109
109
  port_ocean/core/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
110
110
  port_ocean/core/integrations/base.py,sha256=eS0WDOfCTim1UOQQrNuP14I6hvT_fr8dof_cr1ls01s,3107
111
111
  port_ocean/core/integrations/mixins/__init__.py,sha256=FA1FEKMM6P-L2_m7Q4L20mFa4_RgZnwSRmTCreKcBVM,220
112
- port_ocean/core/integrations/mixins/events.py,sha256=0jKRsBw6lU8Mqs7MaQK4n-t_H6Z4NEkXZ5VWzqTrKEc,2396
112
+ port_ocean/core/integrations/mixins/events.py,sha256=2L7P3Jhp8XBqddh2_o9Cn4N261nN1SySfrEdJoqLrIw,2714
113
113
  port_ocean/core/integrations/mixins/handler.py,sha256=mZ7-0UlG3LcrwJttFbMe-R4xcOU2H_g33tZar7PwTv8,3771
114
114
  port_ocean/core/integrations/mixins/live_events.py,sha256=8HklZmlyffYY_LeDe8xbt3Tb08rlLkqVhFF-2NQeJP4,4126
115
115
  port_ocean/core/integrations/mixins/sync.py,sha256=GHiFbnw0XrBfl7aCTH_w67f_N7EZbcUgssc-0fPujNU,4047
116
- port_ocean/core/integrations/mixins/sync_raw.py,sha256=7kk2p5lLKq9oivqqintZumuaIHSbcSmoUfWrE346l7g,24821
116
+ port_ocean/core/integrations/mixins/sync_raw.py,sha256=RzIQ7fawAeRR8g-ocZ_ChAE6PhZjcTHCq4iMd0T0y3Q,25316
117
117
  port_ocean/core/integrations/mixins/utils.py,sha256=oN4Okz6xlaefpid1_Pud8HPSw9BwwjRohyNsknq-Myg,2309
118
118
  port_ocean/core/models.py,sha256=FvTp-BlpbvLbMbngE0wsiimsCfmIhUR1PvsE__Z--1I,2206
119
- port_ocean/core/ocean_types.py,sha256=j_-or1VxDy22whLLxwxgzIsE4wAhFLH19Xff9l4oJA8,1124
119
+ port_ocean/core/ocean_types.py,sha256=onwYMsvdd2_9QmZ7qU6h-t2uF_PTIivpEro0ahevhdw,1354
120
120
  port_ocean/core/utils/entity_topological_sorter.py,sha256=MDUjM6OuDy4Xj68o-7InNN0w1jqjxeDfeY8U02vySNI,3081
121
121
  port_ocean/core/utils/utils.py,sha256=HmumOeH27N0NX1_OP3t4oGKt074ht9XyXhvfZ5I05s4,6474
122
122
  port_ocean/debug_cli.py,sha256=gHrv-Ey3cImKOcGZpjoHlo4pa_zfmyOl6TUM4o9VtcA,96
@@ -152,12 +152,12 @@ port_ocean/tests/core/defaults/test_common.py,sha256=sR7RqB3ZYV6Xn6NIg-c8k5K6JcG
152
152
  port_ocean/tests/core/handlers/entities_state_applier/test_applier.py,sha256=R9bqyJocUWTh0NW0s-5ttD_SYYeM5EbYILgVmgWa7qA,2776
153
153
  port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py,sha256=FnEnaDjuoAbKvKyv6xJ46n3j0ZcaT70Sg2zc7oy7HAA,13596
154
154
  port_ocean/tests/core/handlers/mixins/test_live_events.py,sha256=iAwVpr3n3PIkXQLw7hxd-iB_SR_vyfletVXJLOmyz28,12480
155
- port_ocean/tests/core/handlers/mixins/test_sync_raw.py,sha256=gxQ4e9hQuMS8-o5UbiUSt1I1uaK0DCO3yCFDVigpZvo,31740
155
+ port_ocean/tests/core/handlers/mixins/test_sync_raw.py,sha256=S1m6SQtlQVr3qD_otHEojj4pR87vICyO9ELNyAEwZRs,37428
156
156
  port_ocean/tests/core/handlers/port_app_config/test_api.py,sha256=eJZ6SuFBLz71y4ca3DNqKag6d6HUjNJS0aqQPwiLMTI,1999
157
157
  port_ocean/tests/core/handlers/port_app_config/test_base.py,sha256=tdjpFUnUZ6TNMxc3trKkzmMTGTb7oKIeu3rRXv_fV3g,6872
158
158
  port_ocean/tests/core/handlers/queue/test_local_queue.py,sha256=9Ly0HzZXbs6Rbl_bstsIdInC3h2bgABU3roP9S_PnJM,2582
159
- port_ocean/tests/core/handlers/webhook/test_abstract_webhook_processor.py,sha256=lLOVjjUCI973d9Ps_hugWOjyxNgEiPjqZVqN3qC2Rhs,3328
160
- port_ocean/tests/core/handlers/webhook/test_processor_manager.py,sha256=jwoimtQqrzro2Q2wBUmtKWN2bg9013GRpm7LEPCix80,44109
159
+ port_ocean/tests/core/handlers/webhook/test_abstract_webhook_processor.py,sha256=zKwHhPAYEZoZ5Z2UETp1t--mbkS8uyvlXThB0obZTTc,3340
160
+ port_ocean/tests/core/handlers/webhook/test_processor_manager.py,sha256=Nb-3VXVRol9fM5pLZQ1iGyPsZcHkWSNGG3bDHWjuR3Y,49285
161
161
  port_ocean/tests/core/handlers/webhook/test_webhook_event.py,sha256=oR4dEHLO65mp6rkfNfszZcfFoRZlB8ZWee4XetmsuIk,3181
162
162
  port_ocean/tests/core/test_utils.py,sha256=Z3kdhb5V7Svhcyy3EansdTpgHL36TL6erNtU-OPwAcI,2647
163
163
  port_ocean/tests/core/utils/test_entity_topological_sorter.py,sha256=zuq5WSPy_88PemG3mOUIHTxWMR_js1R7tOzUYlgBd68,3447
@@ -184,8 +184,8 @@ port_ocean/utils/repeat.py,sha256=U2OeCkHPWXmRTVoPV-VcJRlQhcYqPWI5NfmPlb1JIbc,32
184
184
  port_ocean/utils/signal.py,sha256=mMVq-1Ab5YpNiqN4PkiyTGlV_G0wkUDMMjTZp5z3pb0,1514
185
185
  port_ocean/utils/time.py,sha256=pufAOH5ZQI7gXvOvJoQXZXZJV-Dqktoj9Qp9eiRwmJ4,1939
186
186
  port_ocean/version.py,sha256=UsuJdvdQlazzKGD3Hd5-U7N69STh8Dq9ggJzQFnu9fU,177
187
- port_ocean-0.20.3.dist-info/LICENSE.md,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
188
- port_ocean-0.20.3.dist-info/METADATA,sha256=etjLtg8n350AlzlWET_M8HSbFlYiDwx2mb9vYIbihJk,6669
189
- port_ocean-0.20.3.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
190
- port_ocean-0.20.3.dist-info/entry_points.txt,sha256=F_DNUmGZU2Kme-8NsWM5LLE8piGMafYZygRYhOVtcjA,54
191
- port_ocean-0.20.3.dist-info/RECORD,,
187
+ port_ocean-0.21.0.dist-info/LICENSE.md,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
188
+ port_ocean-0.21.0.dist-info/METADATA,sha256=Dq7R51uDPsYbhOUC1gopJD0AY3X_F8kDn-vwrwuOCKY,6669
189
+ port_ocean-0.21.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
190
+ port_ocean-0.21.0.dist-info/entry_points.txt,sha256=F_DNUmGZU2Kme-8NsWM5LLE8piGMafYZygRYhOVtcjA,54
191
+ port_ocean-0.21.0.dist-info/RECORD,,