port-ocean 0.28.19__py3-none-any.whl → 0.29.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- port_ocean/clients/port/authentication.py +19 -0
- port_ocean/clients/port/client.py +3 -0
- port_ocean/clients/port/mixins/actions.py +93 -0
- port_ocean/clients/port/mixins/blueprints.py +0 -12
- port_ocean/clients/port/mixins/integrations.py +5 -2
- port_ocean/config/settings.py +35 -3
- port_ocean/context/ocean.py +7 -5
- port_ocean/core/defaults/initialize.py +12 -5
- port_ocean/core/event_listener/__init__.py +7 -0
- port_ocean/core/event_listener/actions_only.py +42 -0
- port_ocean/core/event_listener/base.py +4 -1
- port_ocean/core/event_listener/factory.py +18 -9
- port_ocean/core/event_listener/http.py +4 -3
- port_ocean/core/event_listener/kafka.py +3 -2
- port_ocean/core/event_listener/once.py +5 -2
- port_ocean/core/event_listener/polling.py +4 -3
- port_ocean/core/event_listener/webhooks_only.py +3 -2
- port_ocean/core/handlers/actions/__init__.py +7 -0
- port_ocean/core/handlers/actions/abstract_executor.py +150 -0
- port_ocean/core/handlers/actions/execution_manager.py +434 -0
- port_ocean/core/handlers/webhook/abstract_webhook_processor.py +16 -0
- port_ocean/core/handlers/webhook/processor_manager.py +30 -12
- port_ocean/core/integrations/mixins/sync_raw.py +2 -2
- port_ocean/core/models.py +35 -2
- port_ocean/exceptions/execution_manager.py +22 -0
- port_ocean/ocean.py +30 -4
- port_ocean/tests/core/event_listener/test_kafka.py +14 -7
- port_ocean/tests/core/handlers/actions/test_execution_manager.py +837 -0
- port_ocean/tests/core/handlers/webhook/test_processor_manager.py +3 -1
- {port_ocean-0.28.19.dist-info → port_ocean-0.29.1.dist-info}/METADATA +3 -2
- {port_ocean-0.28.19.dist-info → port_ocean-0.29.1.dist-info}/RECORD +34 -27
- {port_ocean-0.28.19.dist-info → port_ocean-0.29.1.dist-info}/LICENSE.md +0 -0
- {port_ocean-0.28.19.dist-info → port_ocean-0.29.1.dist-info}/WHEEL +0 -0
- {port_ocean-0.28.19.dist-info → port_ocean-0.29.1.dist-info}/entry_points.txt +0 -0
|
@@ -11,11 +11,17 @@ from port_ocean.core.handlers.queue.abstract_queue import AbstractQueue
|
|
|
11
11
|
from port_ocean.core.integrations.mixins.events import EventsMixin
|
|
12
12
|
from port_ocean.core.integrations.mixins.live_events import LiveEventsMixin
|
|
13
13
|
from port_ocean.exceptions.webhook_processor import WebhookEventNotSupportedError
|
|
14
|
-
from .webhook_event import
|
|
14
|
+
from port_ocean.core.handlers.webhook.webhook_event import (
|
|
15
|
+
WebhookEvent,
|
|
16
|
+
WebhookEventRawResults,
|
|
17
|
+
LiveEventTimestamp,
|
|
18
|
+
)
|
|
15
19
|
from port_ocean.context.event import event
|
|
16
20
|
|
|
17
|
-
|
|
18
|
-
|
|
21
|
+
from port_ocean.core.handlers.webhook.abstract_webhook_processor import (
|
|
22
|
+
AbstractWebhookProcessor,
|
|
23
|
+
WebhookProcessorType,
|
|
24
|
+
)
|
|
19
25
|
from port_ocean.utils.signal import SignalHandler
|
|
20
26
|
from port_ocean.core.handlers.queue import LocalQueue
|
|
21
27
|
|
|
@@ -56,7 +62,7 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
56
62
|
while True:
|
|
57
63
|
event = None
|
|
58
64
|
matching_processors: List[
|
|
59
|
-
Tuple[ResourceConfig, AbstractWebhookProcessor]
|
|
65
|
+
Tuple[ResourceConfig | None, AbstractWebhookProcessor]
|
|
60
66
|
] = []
|
|
61
67
|
try:
|
|
62
68
|
event = await queue.get()
|
|
@@ -133,16 +139,22 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
133
139
|
|
|
134
140
|
async def _extract_matching_processors(
|
|
135
141
|
self, webhook_event: WebhookEvent, path: str
|
|
136
|
-
) -> list[tuple[ResourceConfig, AbstractWebhookProcessor]]:
|
|
142
|
+
) -> list[tuple[ResourceConfig | None, AbstractWebhookProcessor]]:
|
|
137
143
|
"""Find and extract the matching processor for an event"""
|
|
138
144
|
|
|
139
|
-
created_processors: list[
|
|
145
|
+
created_processors: list[
|
|
146
|
+
tuple[ResourceConfig | None, AbstractWebhookProcessor]
|
|
147
|
+
] = []
|
|
140
148
|
event_processor_names = []
|
|
141
149
|
|
|
142
150
|
for processor_class in self._processors_classes[path]:
|
|
143
151
|
processor = processor_class(webhook_event.clone())
|
|
144
152
|
if await processor.should_process_event(webhook_event):
|
|
145
153
|
event_processor_names.append(processor.__class__.__name__)
|
|
154
|
+
if processor.get_processor_type() == WebhookProcessorType.ACTION:
|
|
155
|
+
created_processors.append((None, processor))
|
|
156
|
+
continue
|
|
157
|
+
|
|
146
158
|
kinds = await processor.get_matching_kinds(webhook_event)
|
|
147
159
|
for kind in kinds:
|
|
148
160
|
for resource in event.port_app_config.resources:
|
|
@@ -179,7 +191,10 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
179
191
|
event.set_timestamp(LiveEventTimestamp.FinishedProcessingWithError)
|
|
180
192
|
|
|
181
193
|
async def _process_single_event(
|
|
182
|
-
self,
|
|
194
|
+
self,
|
|
195
|
+
processor: AbstractWebhookProcessor,
|
|
196
|
+
path: str,
|
|
197
|
+
resource: ResourceConfig | None,
|
|
183
198
|
) -> WebhookEventRawResults:
|
|
184
199
|
"""Process a single event with a specific processor"""
|
|
185
200
|
try:
|
|
@@ -199,7 +214,7 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
199
214
|
raise
|
|
200
215
|
|
|
201
216
|
async def _execute_processor(
|
|
202
|
-
self, processor: AbstractWebhookProcessor, resource: ResourceConfig
|
|
217
|
+
self, processor: AbstractWebhookProcessor, resource: ResourceConfig | None
|
|
203
218
|
) -> WebhookEventRawResults:
|
|
204
219
|
"""Execute a single processor within a max processing time"""
|
|
205
220
|
try:
|
|
@@ -213,7 +228,7 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
213
228
|
)
|
|
214
229
|
|
|
215
230
|
async def _process_webhook_request(
|
|
216
|
-
self, processor: AbstractWebhookProcessor, resource: ResourceConfig
|
|
231
|
+
self, processor: AbstractWebhookProcessor, resource: ResourceConfig | None
|
|
217
232
|
) -> WebhookEventRawResults:
|
|
218
233
|
"""Process a webhook request with retry logic
|
|
219
234
|
|
|
@@ -235,9 +250,10 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
235
250
|
while True:
|
|
236
251
|
try:
|
|
237
252
|
webhook_event_raw_results = await processor.handle_event(
|
|
238
|
-
payload, resource
|
|
253
|
+
payload, resource # type: ignore[arg-type]
|
|
239
254
|
)
|
|
240
|
-
|
|
255
|
+
if resource is not None:
|
|
256
|
+
webhook_event_raw_results.resource = resource
|
|
241
257
|
break
|
|
242
258
|
|
|
243
259
|
except Exception as e:
|
|
@@ -258,7 +274,9 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
258
274
|
return webhook_event_raw_results
|
|
259
275
|
|
|
260
276
|
def register_processor(
|
|
261
|
-
self,
|
|
277
|
+
self,
|
|
278
|
+
path: str,
|
|
279
|
+
processor: Type[AbstractWebhookProcessor],
|
|
262
280
|
) -> None:
|
|
263
281
|
"""Register a webhook processor for a specific path with optional filter
|
|
264
282
|
|
|
@@ -24,7 +24,7 @@ from port_ocean.core.integrations.mixins.utils import (
|
|
|
24
24
|
resync_generator_wrapper,
|
|
25
25
|
resync_function_wrapper,
|
|
26
26
|
)
|
|
27
|
-
from port_ocean.core.models import Entity, ProcessExecutionMode
|
|
27
|
+
from port_ocean.core.models import Entity, IntegrationFeatureFlag, ProcessExecutionMode
|
|
28
28
|
from port_ocean.core.ocean_types import (
|
|
29
29
|
RAW_RESULT,
|
|
30
30
|
RESYNC_RESULT,
|
|
@@ -478,7 +478,7 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
478
478
|
bool: True if lakehouse data is enabled, False otherwise
|
|
479
479
|
"""
|
|
480
480
|
flags = await ocean.port_client.get_organization_feature_flags()
|
|
481
|
-
if
|
|
481
|
+
if IntegrationFeatureFlag.LAKEHOUSE_ELIGIBLE in flags and ocean.config.lakehouse_enabled:
|
|
482
482
|
return True
|
|
483
483
|
return False
|
|
484
484
|
|
port_ocean/core/models.py
CHANGED
|
@@ -1,11 +1,19 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from enum import Enum, StrEnum
|
|
3
|
-
from typing import Any, TypedDict
|
|
4
|
-
|
|
3
|
+
from typing import Any, Literal, TypedDict
|
|
5
4
|
from pydantic import BaseModel
|
|
6
5
|
from pydantic.fields import Field
|
|
7
6
|
|
|
8
7
|
|
|
8
|
+
class EventListenerType(StrEnum):
|
|
9
|
+
WEBHOOK = "WEBHOOK"
|
|
10
|
+
KAFKA = "KAFKA"
|
|
11
|
+
POLLING = "POLLING"
|
|
12
|
+
ONCE = "ONCE"
|
|
13
|
+
WEBHOOKS_ONLY = "WEBHOOKS_ONLY"
|
|
14
|
+
ACTIONS_ONLY = "ACTIONS_ONLY"
|
|
15
|
+
|
|
16
|
+
|
|
9
17
|
class CreatePortResourcesOrigin(StrEnum):
|
|
10
18
|
Ocean = "Ocean"
|
|
11
19
|
Port = "Port"
|
|
@@ -121,3 +129,28 @@ class EntityPortDiff:
|
|
|
121
129
|
deleted: list[Entity] = field(default_factory=list)
|
|
122
130
|
modified: list[Entity] = field(default_factory=list)
|
|
123
131
|
created: list[Entity] = field(default_factory=list)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class IntegrationFeatureFlag(StrEnum):
|
|
135
|
+
USE_PROVISIONED_DEFAULTS = "USE_PROVISIONED_DEFAULTS"
|
|
136
|
+
LAKEHOUSE_ELIGIBLE = "LAKEHOUSE_ELIGIBLE"
|
|
137
|
+
OCEAN_ACTIONS_PROCESSING_ENABLED = "OCEAN_ACTIONS_PROCESSING_ENABLED"
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class RunStatus(StrEnum):
|
|
141
|
+
IN_PROGRESS = "IN_PROGRESS"
|
|
142
|
+
SUCCESS = "SUCCESS"
|
|
143
|
+
FAILURE = "FAILURE"
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class IntegrationActionInvocationPayload(BaseModel):
|
|
147
|
+
type: Literal["INTEGRATION_ACTION"]
|
|
148
|
+
installationId: str
|
|
149
|
+
integrationActionType: str
|
|
150
|
+
integrationActionExecutionProperties: dict[str, Any] = Field(default_factory=dict)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class ActionRun(BaseModel):
|
|
154
|
+
id: str
|
|
155
|
+
status: RunStatus
|
|
156
|
+
payload: IntegrationActionInvocationPayload
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
class DuplicateActionExecutorError(Exception):
|
|
2
|
+
"""
|
|
3
|
+
Raised when attempting to register an executor for an action that already has an existing executor.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
pass
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class RunAlreadyAcknowledgedError(Exception):
|
|
10
|
+
"""
|
|
11
|
+
Raised when attempting to acknowledge a run that has already been acknowledged.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PartitionKeyNotFoundError(Exception):
|
|
18
|
+
"""
|
|
19
|
+
Raised when attempting to extract a partition key that is not found in the invocation payload.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
pass
|
port_ocean/ocean.py
CHANGED
|
@@ -26,6 +26,7 @@ from port_ocean.core.handlers.resync_state_updater import ResyncStateUpdater
|
|
|
26
26
|
from port_ocean.core.handlers.webhook.processor_manager import (
|
|
27
27
|
LiveEventsProcessorManager,
|
|
28
28
|
)
|
|
29
|
+
from port_ocean.core.handlers.actions.execution_manager import ExecutionManager
|
|
29
30
|
from port_ocean.core.integrations.base import BaseIntegration
|
|
30
31
|
from port_ocean.core.models import ProcessExecutionMode
|
|
31
32
|
from port_ocean.log.sensetive import sensitive_log_filter
|
|
@@ -88,6 +89,16 @@ class Ocean:
|
|
|
88
89
|
max_wait_seconds_before_shutdown=self.config.max_wait_seconds_before_shutdown,
|
|
89
90
|
)
|
|
90
91
|
|
|
92
|
+
self.execution_manager = ExecutionManager(
|
|
93
|
+
webhook_manager=self.webhook_manager,
|
|
94
|
+
signal_handler=signal_handler,
|
|
95
|
+
workers_count=self.config.actions_processor.workers_count,
|
|
96
|
+
runs_buffer_high_watermark=self.config.actions_processor.runs_buffer_high_watermark,
|
|
97
|
+
poll_check_interval_seconds=self.config.actions_processor.poll_check_interval_seconds,
|
|
98
|
+
visibility_timeout_ms=self.config.actions_processor.visibility_timeout_ms,
|
|
99
|
+
max_wait_seconds_before_shutdown=self.config.max_wait_seconds_before_shutdown,
|
|
100
|
+
)
|
|
101
|
+
|
|
91
102
|
self.integration = (
|
|
92
103
|
integration_class(ocean) if integration_class else BaseIntegration(ocean)
|
|
93
104
|
)
|
|
@@ -200,6 +211,24 @@ class Ocean:
|
|
|
200
211
|
)
|
|
201
212
|
return None
|
|
202
213
|
|
|
214
|
+
async def _register_addons(self) -> None:
|
|
215
|
+
if self.base_url and self.config.event_listener.should_process_webhooks:
|
|
216
|
+
await self.webhook_manager.start_processing_event_messages()
|
|
217
|
+
else:
|
|
218
|
+
logger.warning(
|
|
219
|
+
"No base URL provided, or webhook processing is disabled is this event listener, skipping webhook processing"
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
if (
|
|
223
|
+
self.config.actions_processor.enabled
|
|
224
|
+
and self.config.event_listener.should_run_actions
|
|
225
|
+
):
|
|
226
|
+
await self.execution_manager.start_processing_action_runs()
|
|
227
|
+
else:
|
|
228
|
+
logger.warning(
|
|
229
|
+
"Execution agent is not enabled, or actions processing is disabled in this event listener, skipping execution agent setup"
|
|
230
|
+
)
|
|
231
|
+
|
|
203
232
|
def initialize_app(self) -> None:
|
|
204
233
|
self.fast_api_app.include_router(self.integration_router, prefix="/integration")
|
|
205
234
|
self.fast_api_app.include_router(
|
|
@@ -210,10 +239,7 @@ class Ocean:
|
|
|
210
239
|
async def lifecycle(_: FastAPI) -> AsyncIterator[None]:
|
|
211
240
|
try:
|
|
212
241
|
await self.integration.start()
|
|
213
|
-
|
|
214
|
-
await self.webhook_manager.start_processing_event_messages()
|
|
215
|
-
else:
|
|
216
|
-
logger.warning("No base URL provided, skipping webhook processing")
|
|
242
|
+
await self._register_addons()
|
|
217
243
|
await self._setup_scheduled_resync()
|
|
218
244
|
yield None
|
|
219
245
|
except Exception:
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from port_ocean.core.event_listener.kafka import KafkaEventListenerSettings
|
|
2
|
+
from port_ocean.core.models import EventListenerType
|
|
2
3
|
import pytest
|
|
3
4
|
from pydantic import ValidationError
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
def test_default_kafka_settings() -> None:
|
|
7
8
|
"""Test default values are properly set"""
|
|
8
|
-
config = KafkaEventListenerSettings(type=
|
|
9
|
+
config = KafkaEventListenerSettings(type=EventListenerType.KAFKA)
|
|
9
10
|
assert config.type == "KAFKA"
|
|
10
11
|
assert config.security_protocol == "SASL_SSL"
|
|
11
12
|
assert config.authentication_mechanism == "SCRAM-SHA-512"
|
|
@@ -17,28 +18,32 @@ def test_default_kafka_settings() -> None:
|
|
|
17
18
|
def test_brokers_json_array_parsing() -> None:
|
|
18
19
|
"""Test that JSON array strings get converted to comma-separated"""
|
|
19
20
|
json_brokers = '["broker1:9092", "broker2:9092", "broker3:9092"]'
|
|
20
|
-
config = KafkaEventListenerSettings(
|
|
21
|
+
config = KafkaEventListenerSettings(
|
|
22
|
+
type=EventListenerType.KAFKA, brokers=json_brokers
|
|
23
|
+
)
|
|
21
24
|
assert config.brokers == "broker1:9092,broker2:9092,broker3:9092"
|
|
22
25
|
|
|
23
26
|
|
|
24
27
|
def test_brokers_regular_string_unchanged() -> None:
|
|
25
28
|
"""Test that regular comma-separated strings pass through unchanged"""
|
|
26
29
|
regular_brokers = "broker1:9092,broker2:9092"
|
|
27
|
-
config = KafkaEventListenerSettings(
|
|
30
|
+
config = KafkaEventListenerSettings(
|
|
31
|
+
type=EventListenerType.KAFKA, brokers=regular_brokers
|
|
32
|
+
)
|
|
28
33
|
assert config.brokers == regular_brokers
|
|
29
34
|
|
|
30
35
|
|
|
31
36
|
def test_brokers_malformed_json_unchanged() -> None:
|
|
32
37
|
"""Test that malformed JSON strings don't break validation"""
|
|
33
38
|
bad_json = "[broker1:9092, broker2:9092"
|
|
34
|
-
config = KafkaEventListenerSettings(type=
|
|
39
|
+
config = KafkaEventListenerSettings(type=EventListenerType.KAFKA, brokers=bad_json)
|
|
35
40
|
assert config.brokers == bad_json
|
|
36
41
|
|
|
37
42
|
|
|
38
43
|
def test_custom_values() -> None:
|
|
39
44
|
"""Test overriding default values"""
|
|
40
45
|
config = KafkaEventListenerSettings(
|
|
41
|
-
type=
|
|
46
|
+
type=EventListenerType.KAFKA,
|
|
42
47
|
brokers="custom:9092",
|
|
43
48
|
security_protocol="PLAINTEXT",
|
|
44
49
|
authentication_mechanism="PLAIN",
|
|
@@ -60,11 +65,13 @@ def test_type_literal_validation() -> None:
|
|
|
60
65
|
|
|
61
66
|
def test_empty_brokers_array() -> None:
|
|
62
67
|
"""Test empty JSON array becomes empty string"""
|
|
63
|
-
config = KafkaEventListenerSettings(type=
|
|
68
|
+
config = KafkaEventListenerSettings(type=EventListenerType.KAFKA, brokers="[]")
|
|
64
69
|
assert config.brokers == ""
|
|
65
70
|
|
|
66
71
|
|
|
67
72
|
def test_single_broker_array() -> None:
|
|
68
73
|
"""Test single broker in JSON array"""
|
|
69
|
-
config = KafkaEventListenerSettings(
|
|
74
|
+
config = KafkaEventListenerSettings(
|
|
75
|
+
type=EventListenerType.KAFKA, brokers='["single:9092"]'
|
|
76
|
+
)
|
|
70
77
|
assert config.brokers == "single:9092"
|