port-ocean 0.5.6__py3-none-any.whl → 0.17.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of port-ocean might be problematic. Click here for more details.
- integrations/_infra/Dockerfile.Deb +56 -0
- integrations/_infra/Dockerfile.alpine +108 -0
- integrations/_infra/Dockerfile.base.builder +26 -0
- integrations/_infra/Dockerfile.base.runner +13 -0
- integrations/_infra/Dockerfile.dockerignore +94 -0
- {port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}} → integrations/_infra}/Makefile +21 -8
- integrations/_infra/grpcio.sh +18 -0
- integrations/_infra/init.sh +5 -0
- port_ocean/bootstrap.py +1 -1
- port_ocean/cli/commands/defaults/clean.py +3 -1
- port_ocean/cli/commands/new.py +42 -7
- port_ocean/cli/commands/sail.py +7 -1
- port_ocean/cli/cookiecutter/cookiecutter.json +3 -0
- port_ocean/cli/cookiecutter/hooks/post_gen_project.py +20 -3
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.env.example +6 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/blueprints.json +41 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/port-app-config.yml +16 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml +6 -7
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CHANGELOG.md +1 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CONTRIBUTING.md +7 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/changelog/.gitignore +1 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/main.py +16 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/pyproject.toml +21 -10
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/tests/test_sample.py +2 -0
- port_ocean/clients/port/authentication.py +16 -4
- port_ocean/clients/port/client.py +17 -0
- port_ocean/clients/port/mixins/blueprints.py +7 -8
- port_ocean/clients/port/mixins/entities.py +108 -53
- port_ocean/clients/port/mixins/integrations.py +23 -34
- port_ocean/clients/port/retry_transport.py +0 -5
- port_ocean/clients/port/utils.py +9 -3
- port_ocean/config/base.py +16 -16
- port_ocean/config/settings.py +79 -11
- port_ocean/context/event.py +18 -5
- port_ocean/context/ocean.py +14 -3
- port_ocean/core/defaults/clean.py +10 -3
- port_ocean/core/defaults/common.py +25 -9
- port_ocean/core/defaults/initialize.py +111 -100
- port_ocean/core/event_listener/__init__.py +8 -0
- port_ocean/core/event_listener/base.py +49 -10
- port_ocean/core/event_listener/factory.py +9 -1
- port_ocean/core/event_listener/http.py +11 -3
- port_ocean/core/event_listener/kafka.py +24 -5
- port_ocean/core/event_listener/once.py +96 -4
- port_ocean/core/event_listener/polling.py +16 -14
- port_ocean/core/event_listener/webhooks_only.py +41 -0
- port_ocean/core/handlers/__init__.py +1 -2
- port_ocean/core/handlers/entities_state_applier/base.py +4 -1
- port_ocean/core/handlers/entities_state_applier/port/applier.py +29 -87
- port_ocean/core/handlers/entities_state_applier/port/order_by_entities_dependencies.py +5 -2
- port_ocean/core/handlers/entity_processor/base.py +26 -22
- port_ocean/core/handlers/entity_processor/jq_entity_processor.py +253 -45
- port_ocean/core/handlers/port_app_config/base.py +55 -15
- port_ocean/core/handlers/port_app_config/models.py +24 -5
- port_ocean/core/handlers/resync_state_updater/__init__.py +5 -0
- port_ocean/core/handlers/resync_state_updater/updater.py +84 -0
- port_ocean/core/integrations/base.py +5 -7
- port_ocean/core/integrations/mixins/events.py +3 -1
- port_ocean/core/integrations/mixins/sync.py +4 -2
- port_ocean/core/integrations/mixins/sync_raw.py +209 -74
- port_ocean/core/integrations/mixins/utils.py +1 -1
- port_ocean/core/models.py +44 -0
- port_ocean/core/ocean_types.py +29 -11
- port_ocean/core/utils/entity_topological_sorter.py +90 -0
- port_ocean/core/utils/utils.py +109 -0
- port_ocean/debug_cli.py +5 -0
- port_ocean/exceptions/core.py +4 -0
- port_ocean/exceptions/port_defaults.py +0 -2
- port_ocean/helpers/retry.py +85 -24
- port_ocean/log/handlers.py +23 -2
- port_ocean/log/logger_setup.py +8 -1
- port_ocean/log/sensetive.py +25 -10
- port_ocean/middlewares.py +10 -2
- port_ocean/ocean.py +57 -24
- port_ocean/run.py +10 -5
- port_ocean/tests/__init__.py +0 -0
- port_ocean/tests/clients/port/mixins/test_entities.py +53 -0
- port_ocean/tests/conftest.py +4 -0
- port_ocean/tests/core/defaults/test_common.py +166 -0
- port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py +350 -0
- port_ocean/tests/core/handlers/mixins/test_sync_raw.py +552 -0
- port_ocean/tests/core/test_utils.py +73 -0
- port_ocean/tests/core/utils/test_entity_topological_sorter.py +99 -0
- port_ocean/tests/helpers/__init__.py +0 -0
- port_ocean/tests/helpers/fake_port_api.py +191 -0
- port_ocean/tests/helpers/fixtures.py +46 -0
- port_ocean/tests/helpers/integration.py +31 -0
- port_ocean/tests/helpers/ocean_app.py +66 -0
- port_ocean/tests/helpers/port_client.py +21 -0
- port_ocean/tests/helpers/smoke_test.py +82 -0
- port_ocean/tests/log/test_handlers.py +71 -0
- port_ocean/tests/test_smoke.py +74 -0
- port_ocean/tests/utils/test_async_iterators.py +45 -0
- port_ocean/tests/utils/test_cache.py +189 -0
- port_ocean/utils/async_iterators.py +109 -0
- port_ocean/utils/cache.py +37 -1
- port_ocean/utils/misc.py +22 -4
- port_ocean/utils/queue_utils.py +88 -0
- port_ocean/utils/signal.py +1 -4
- port_ocean/utils/time.py +54 -0
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/METADATA +27 -19
- port_ocean-0.17.8.dist-info/RECORD +164 -0
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/WHEEL +1 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.dockerignore +0 -94
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Dockerfile +0 -15
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/config.yaml +0 -17
- port_ocean/core/handlers/entities_state_applier/port/validate_entity_relations.py +0 -40
- port_ocean/core/utils.py +0 -65
- port_ocean-0.5.6.dist-info/RECORD +0 -129
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/LICENSE.md +0 -0
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/entry_points.txt +0 -0
|
@@ -2,23 +2,26 @@ import asyncio
|
|
|
2
2
|
from typing import Type, Any
|
|
3
3
|
|
|
4
4
|
import httpx
|
|
5
|
-
from starlette import status
|
|
6
5
|
from loguru import logger
|
|
7
6
|
|
|
8
7
|
from port_ocean.clients.port.client import PortClient
|
|
9
8
|
from port_ocean.clients.port.types import UserAgentType
|
|
10
9
|
from port_ocean.config.settings import IntegrationConfiguration
|
|
11
10
|
from port_ocean.context.ocean import ocean
|
|
12
|
-
from port_ocean.core.defaults.common import
|
|
11
|
+
from port_ocean.core.defaults.common import (
|
|
12
|
+
Defaults,
|
|
13
|
+
get_port_integration_defaults,
|
|
14
|
+
)
|
|
13
15
|
from port_ocean.core.handlers.port_app_config.models import PortAppConfig
|
|
14
16
|
from port_ocean.core.models import Blueprint
|
|
17
|
+
from port_ocean.core.utils.utils import gather_and_split_errors_from_results
|
|
15
18
|
from port_ocean.exceptions.port_defaults import (
|
|
16
19
|
AbortDefaultCreationError,
|
|
17
20
|
)
|
|
18
21
|
|
|
19
22
|
|
|
20
23
|
def deconstruct_blueprints_to_creation_steps(
|
|
21
|
-
raw_blueprints: list[dict[str, Any]]
|
|
24
|
+
raw_blueprints: list[dict[str, Any]],
|
|
22
25
|
) -> tuple[list[dict[str, Any]], ...]:
|
|
23
26
|
"""
|
|
24
27
|
Deconstructing the blueprint into stages so the api wont fail to create a blueprint if there is a conflict
|
|
@@ -49,69 +52,98 @@ def deconstruct_blueprints_to_creation_steps(
|
|
|
49
52
|
)
|
|
50
53
|
|
|
51
54
|
|
|
52
|
-
async def
|
|
55
|
+
async def _initialize_required_integration_settings(
|
|
53
56
|
port_client: PortClient,
|
|
54
|
-
|
|
57
|
+
default_mapping: PortAppConfig,
|
|
55
58
|
integration_config: IntegrationConfiguration,
|
|
56
59
|
) -> None:
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
60
|
+
try:
|
|
61
|
+
logger.info("Initializing integration at port")
|
|
62
|
+
integration = await port_client.get_current_integration(
|
|
63
|
+
should_log=False, should_raise=False
|
|
64
|
+
)
|
|
65
|
+
if not integration:
|
|
66
|
+
logger.info(
|
|
67
|
+
"Integration does not exist, Creating new integration with default mapping"
|
|
68
|
+
)
|
|
69
|
+
integration = await port_client.create_integration(
|
|
70
|
+
integration_config.integration.type,
|
|
71
|
+
integration_config.event_listener.get_changelog_destination_details(),
|
|
72
|
+
port_app_config=default_mapping,
|
|
73
|
+
)
|
|
74
|
+
elif not integration.get("config"):
|
|
75
|
+
logger.info(
|
|
76
|
+
"Encountered that the integration's mapping is empty, Initializing to default mapping"
|
|
77
|
+
)
|
|
78
|
+
integration = await port_client.patch_integration(
|
|
79
|
+
integration_config.integration.type,
|
|
80
|
+
integration_config.event_listener.get_changelog_destination_details(),
|
|
81
|
+
port_app_config=default_mapping,
|
|
82
|
+
)
|
|
83
|
+
except httpx.HTTPStatusError as err:
|
|
84
|
+
logger.error(f"Failed to apply default mapping: {err.response.text}.")
|
|
85
|
+
raise err
|
|
63
86
|
|
|
87
|
+
logger.info("Checking for diff in integration configuration")
|
|
88
|
+
changelog_destination = (
|
|
89
|
+
integration_config.event_listener.get_changelog_destination_details().get(
|
|
90
|
+
"changelog_destination"
|
|
91
|
+
)
|
|
92
|
+
)
|
|
93
|
+
if (
|
|
94
|
+
integration.get("changelogDestination") != changelog_destination
|
|
95
|
+
or integration.get("installationAppType") != integration_config.integration.type
|
|
96
|
+
or integration.get("version") != port_client.integration_version
|
|
97
|
+
):
|
|
98
|
+
await port_client.patch_integration(
|
|
99
|
+
integration_config.integration.type, changelog_destination
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
async def _create_resources(
|
|
104
|
+
port_client: PortClient,
|
|
105
|
+
defaults: Defaults,
|
|
106
|
+
) -> None:
|
|
64
107
|
creation_stage, *blueprint_patches = deconstruct_blueprints_to_creation_steps(
|
|
65
108
|
defaults.blueprints
|
|
66
109
|
)
|
|
67
110
|
|
|
68
|
-
blueprints_results = await
|
|
69
|
-
|
|
111
|
+
blueprints_results, _ = await gather_and_split_errors_from_results(
|
|
112
|
+
[
|
|
70
113
|
port_client.get_blueprint(blueprint["identifier"], should_log=False)
|
|
71
114
|
for blueprint in creation_stage
|
|
72
|
-
|
|
73
|
-
|
|
115
|
+
],
|
|
116
|
+
lambda item: isinstance(item, Blueprint),
|
|
74
117
|
)
|
|
75
118
|
|
|
76
|
-
|
|
77
|
-
result.identifier
|
|
78
|
-
for result in blueprints_results
|
|
79
|
-
if not isinstance(result, httpx.HTTPStatusError)
|
|
80
|
-
and isinstance(result, Blueprint)
|
|
81
|
-
]
|
|
82
|
-
|
|
83
|
-
if existing_blueprints:
|
|
119
|
+
if blueprints_results:
|
|
84
120
|
logger.info(
|
|
85
|
-
f"Blueprints already exist: {
|
|
121
|
+
f"Blueprints already exist: {[result.identifier for result in blueprints_results]}. Skipping integration default creation..."
|
|
86
122
|
)
|
|
87
123
|
return
|
|
88
124
|
|
|
89
|
-
|
|
90
|
-
|
|
125
|
+
created_blueprints, blueprint_errors = await gather_and_split_errors_from_results(
|
|
126
|
+
(
|
|
91
127
|
port_client.create_blueprint(
|
|
92
128
|
blueprint, user_agent_type=UserAgentType.exporter
|
|
93
129
|
)
|
|
94
130
|
for blueprint in creation_stage
|
|
95
|
-
)
|
|
96
|
-
return_exceptions=True,
|
|
131
|
+
)
|
|
97
132
|
)
|
|
98
133
|
|
|
99
|
-
|
|
100
|
-
created_blueprints = [
|
|
101
|
-
result["identifier"]
|
|
102
|
-
for result in create_results
|
|
103
|
-
if not isinstance(result, BaseException)
|
|
104
|
-
]
|
|
134
|
+
created_blueprints_identifiers = [bp["identifier"] for bp in created_blueprints]
|
|
105
135
|
|
|
106
|
-
if
|
|
107
|
-
for error in
|
|
136
|
+
if blueprint_errors:
|
|
137
|
+
for error in blueprint_errors:
|
|
108
138
|
if isinstance(error, httpx.HTTPStatusError):
|
|
109
139
|
logger.warning(
|
|
110
140
|
f"Failed to create resources: {error.response.text}. Rolling back changes..."
|
|
111
141
|
)
|
|
112
142
|
|
|
113
|
-
raise AbortDefaultCreationError(
|
|
114
|
-
|
|
143
|
+
raise AbortDefaultCreationError(
|
|
144
|
+
created_blueprints_identifiers, blueprint_errors
|
|
145
|
+
)
|
|
146
|
+
|
|
115
147
|
try:
|
|
116
148
|
for patch_stage in blueprint_patches:
|
|
117
149
|
await asyncio.gather(
|
|
@@ -125,72 +157,67 @@ async def _create_resources(
|
|
|
125
157
|
)
|
|
126
158
|
)
|
|
127
159
|
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
160
|
+
except httpx.HTTPStatusError as err:
|
|
161
|
+
logger.error(f"Failed to create resources: {err.response.text}. continuing...")
|
|
162
|
+
raise AbortDefaultCreationError(created_blueprints_identifiers, [err])
|
|
163
|
+
try:
|
|
164
|
+
created_actions, actions_errors = await gather_and_split_errors_from_results(
|
|
165
|
+
(
|
|
166
|
+
port_client.create_action(action, should_log=False)
|
|
167
|
+
for action in defaults.actions
|
|
133
168
|
)
|
|
134
169
|
)
|
|
135
170
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
171
|
+
created_scorecards, scorecards_errors = (
|
|
172
|
+
await gather_and_split_errors_from_results(
|
|
173
|
+
(
|
|
174
|
+
port_client.create_scorecard(
|
|
175
|
+
blueprint_scorecards["blueprint"], action, should_log=False
|
|
176
|
+
)
|
|
177
|
+
for blueprint_scorecards in defaults.scorecards
|
|
178
|
+
for action in blueprint_scorecards["data"]
|
|
179
|
+
)
|
|
141
180
|
)
|
|
142
181
|
)
|
|
143
182
|
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
return_exceptions=True,
|
|
183
|
+
created_pages, pages_errors = await gather_and_split_errors_from_results(
|
|
184
|
+
(port_client.create_page(page, should_log=False) for page in defaults.pages)
|
|
147
185
|
)
|
|
148
186
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
for
|
|
152
|
-
if not isinstance(result, BaseException)
|
|
153
|
-
]
|
|
154
|
-
|
|
155
|
-
pages_errors = [
|
|
156
|
-
result for result in create_pages_result if isinstance(result, Exception)
|
|
157
|
-
]
|
|
158
|
-
|
|
159
|
-
if pages_errors:
|
|
160
|
-
for error in pages_errors:
|
|
187
|
+
errors = actions_errors + scorecards_errors + pages_errors
|
|
188
|
+
if errors:
|
|
189
|
+
for error in errors:
|
|
161
190
|
if isinstance(error, httpx.HTTPStatusError):
|
|
162
191
|
logger.warning(
|
|
163
|
-
f"Failed to create
|
|
192
|
+
f"Failed to create resource: {error.response.text}. continuing..."
|
|
164
193
|
)
|
|
165
194
|
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
)
|
|
169
|
-
|
|
170
|
-
await port_client.create_integration(
|
|
171
|
-
integration_config.integration.type,
|
|
172
|
-
integration_config.event_listener.to_request(),
|
|
173
|
-
port_app_config=defaults.port_app_config,
|
|
174
|
-
)
|
|
175
|
-
except httpx.HTTPStatusError as err:
|
|
176
|
-
logger.error(
|
|
177
|
-
f"Failed to create resources: {err.response.text}. Rolling back changes..."
|
|
178
|
-
)
|
|
179
|
-
raise AbortDefaultCreationError(created_blueprints, [err], created_pages)
|
|
195
|
+
except Exception as err:
|
|
196
|
+
logger.error(f"Failed to create resources: {err}. continuing...")
|
|
180
197
|
|
|
181
198
|
|
|
182
199
|
async def _initialize_defaults(
|
|
183
200
|
config_class: Type[PortAppConfig], integration_config: IntegrationConfiguration
|
|
184
201
|
) -> None:
|
|
185
202
|
port_client = ocean.port_client
|
|
186
|
-
defaults = get_port_integration_defaults(
|
|
203
|
+
defaults = get_port_integration_defaults(
|
|
204
|
+
config_class, integration_config.resources_path
|
|
205
|
+
)
|
|
187
206
|
if not defaults:
|
|
188
|
-
logger.warning("No defaults found. Skipping...")
|
|
207
|
+
logger.warning("No defaults found. Skipping initialization...")
|
|
189
208
|
return None
|
|
190
209
|
|
|
210
|
+
if defaults.port_app_config:
|
|
211
|
+
await _initialize_required_integration_settings(
|
|
212
|
+
port_client, defaults.port_app_config, integration_config
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
if not integration_config.initialize_port_resources:
|
|
216
|
+
return
|
|
217
|
+
|
|
191
218
|
try:
|
|
192
219
|
logger.info("Found default resources, starting creation process")
|
|
193
|
-
await _create_resources(port_client, defaults
|
|
220
|
+
await _create_resources(port_client, defaults)
|
|
194
221
|
except AbortDefaultCreationError as e:
|
|
195
222
|
logger.warning(
|
|
196
223
|
f"Failed to create resources. Rolling back blueprints : {e.blueprints_to_rollback}"
|
|
@@ -205,28 +232,12 @@ async def _initialize_defaults(
|
|
|
205
232
|
for identifier in e.blueprints_to_rollback
|
|
206
233
|
)
|
|
207
234
|
)
|
|
208
|
-
if e.pages_to_rollback:
|
|
209
|
-
logger.warning(
|
|
210
|
-
f"Failed to create resources. Rolling back pages : {e.pages_to_rollback}"
|
|
211
|
-
)
|
|
212
|
-
await asyncio.gather(
|
|
213
|
-
*(
|
|
214
|
-
port_client.delete_page(
|
|
215
|
-
identifier,
|
|
216
|
-
)
|
|
217
|
-
for identifier in e.pages_to_rollback
|
|
218
|
-
)
|
|
219
|
-
)
|
|
220
|
-
|
|
221
235
|
raise ExceptionGroup(str(e), e.errors)
|
|
222
236
|
|
|
223
237
|
|
|
224
238
|
def initialize_defaults(
|
|
225
239
|
config_class: Type[PortAppConfig], integration_config: IntegrationConfiguration
|
|
226
240
|
) -> None:
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
)
|
|
231
|
-
except Exception as e:
|
|
232
|
-
logger.debug(f"Failed to initialize defaults, skipping... Error: {e}")
|
|
241
|
+
asyncio.new_event_loop().run_until_complete(
|
|
242
|
+
_initialize_defaults(config_class, integration_config)
|
|
243
|
+
)
|
|
@@ -16,12 +16,18 @@ from port_ocean.core.event_listener.once import (
|
|
|
16
16
|
OnceEventListener,
|
|
17
17
|
)
|
|
18
18
|
|
|
19
|
+
from port_ocean.core.event_listener.webhooks_only import (
|
|
20
|
+
WebhooksOnlyEventListener,
|
|
21
|
+
WebhooksOnlyEventListenerSettings,
|
|
22
|
+
)
|
|
23
|
+
|
|
19
24
|
|
|
20
25
|
EventListenerSettingsType = (
|
|
21
26
|
HttpEventListenerSettings
|
|
22
27
|
| KafkaEventListenerSettings
|
|
23
28
|
| PollingEventListenerSettings
|
|
24
29
|
| OnceEventListenerSettings
|
|
30
|
+
| WebhooksOnlyEventListenerSettings
|
|
25
31
|
)
|
|
26
32
|
|
|
27
33
|
__all__ = [
|
|
@@ -34,4 +40,6 @@ __all__ = [
|
|
|
34
40
|
"PollingEventListenerSettings",
|
|
35
41
|
"OnceEventListener",
|
|
36
42
|
"OnceEventListenerSettings",
|
|
43
|
+
"WebhooksOnlyEventListener",
|
|
44
|
+
"WebhooksOnlyEventListenerSettings",
|
|
37
45
|
]
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from abc import abstractmethod
|
|
2
|
-
from asyncio import Task
|
|
3
2
|
from typing import TypedDict, Callable, Any, Awaitable
|
|
4
3
|
|
|
5
4
|
from pydantic import Extra
|
|
6
5
|
|
|
7
6
|
from port_ocean.config.base import BaseOceanModel
|
|
8
7
|
from port_ocean.utils.signal import signal_handler
|
|
8
|
+
from port_ocean.context.ocean import ocean
|
|
9
|
+
from port_ocean.utils.misc import IntegrationStateStatus
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
class EventListenerEvents(TypedDict):
|
|
@@ -22,7 +23,6 @@ class BaseEventListener:
|
|
|
22
23
|
events: EventListenerEvents,
|
|
23
24
|
):
|
|
24
25
|
self.events = events
|
|
25
|
-
self._tasks_to_close: list[Task[Any]] = []
|
|
26
26
|
|
|
27
27
|
async def start(self) -> None:
|
|
28
28
|
signal_handler.register(self._stop)
|
|
@@ -32,23 +32,62 @@ class BaseEventListener:
|
|
|
32
32
|
async def _start(self) -> None:
|
|
33
33
|
pass
|
|
34
34
|
|
|
35
|
-
def stop(self) -> None:
|
|
36
|
-
self._stop()
|
|
37
|
-
for task in self._tasks_to_close:
|
|
38
|
-
task.cancel()
|
|
39
|
-
|
|
40
35
|
def _stop(self) -> None:
|
|
41
36
|
"""
|
|
42
37
|
Can be used for event listeners that need cleanup before exiting.
|
|
43
38
|
"""
|
|
44
39
|
pass
|
|
45
40
|
|
|
41
|
+
async def _before_resync(self) -> None:
|
|
42
|
+
"""
|
|
43
|
+
Can be used for event listeners that need to perform some action before resync.
|
|
44
|
+
"""
|
|
45
|
+
await ocean.app.resync_state_updater.update_before_resync()
|
|
46
|
+
|
|
47
|
+
async def _after_resync(self) -> None:
|
|
48
|
+
"""
|
|
49
|
+
Can be used for event listeners that need to perform some action after resync.
|
|
50
|
+
"""
|
|
51
|
+
await ocean.app.resync_state_updater.update_after_resync()
|
|
52
|
+
|
|
53
|
+
async def _on_resync_failure(self, e: Exception) -> None:
|
|
54
|
+
"""
|
|
55
|
+
Can be used for event listeners that need to handle resync failures.
|
|
56
|
+
"""
|
|
57
|
+
await ocean.app.resync_state_updater.update_after_resync(
|
|
58
|
+
IntegrationStateStatus.Failed
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
async def _resync(
|
|
62
|
+
self,
|
|
63
|
+
resync_args: dict[Any, Any],
|
|
64
|
+
) -> None:
|
|
65
|
+
"""
|
|
66
|
+
Triggers the "on_resync" event.
|
|
67
|
+
"""
|
|
68
|
+
await self._before_resync()
|
|
69
|
+
try:
|
|
70
|
+
await self.events["on_resync"](resync_args)
|
|
71
|
+
await self._after_resync()
|
|
72
|
+
except Exception as e:
|
|
73
|
+
await self._on_resync_failure(e)
|
|
74
|
+
raise e
|
|
75
|
+
|
|
46
76
|
|
|
47
77
|
class EventListenerSettings(BaseOceanModel, extra=Extra.allow):
|
|
48
78
|
type: str
|
|
79
|
+
should_resync: bool = True
|
|
49
80
|
|
|
50
|
-
def
|
|
81
|
+
def get_changelog_destination_details(self) -> dict[str, Any]:
|
|
51
82
|
"""
|
|
52
|
-
|
|
83
|
+
Returns the changelog destination configuration for the event listener.
|
|
84
|
+
By default, returns an empty dict. Only KAFKA and WEBHOOK event listeners need to override this
|
|
85
|
+
to provide their specific changelog destination details.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
dict[str, Any]: The changelog destination configuration. For example:
|
|
89
|
+
- KAFKA returns {"type": "KAFKA"}
|
|
90
|
+
- WEBHOOK returns {"type": "WEBHOOK", "url": "https://example.com/resync"}
|
|
91
|
+
- Other event listeners return {}
|
|
53
92
|
"""
|
|
54
|
-
return {
|
|
93
|
+
return {}
|
|
@@ -17,6 +17,10 @@ from port_ocean.core.event_listener.base import (
|
|
|
17
17
|
BaseEventListener,
|
|
18
18
|
EventListenerEvents,
|
|
19
19
|
)
|
|
20
|
+
from port_ocean.core.event_listener.webhooks_only import (
|
|
21
|
+
WebhooksOnlyEventListener,
|
|
22
|
+
WebhooksOnlyEventListenerSettings,
|
|
23
|
+
)
|
|
20
24
|
from port_ocean.exceptions.core import UnsupportedEventListenerTypeException
|
|
21
25
|
|
|
22
26
|
|
|
@@ -88,7 +92,11 @@ class EventListenerFactory:
|
|
|
88
92
|
config, OnceEventListenerSettings
|
|
89
93
|
), assert_message.format(type(config))
|
|
90
94
|
event_listener = OnceEventListener(wrapped_events, config)
|
|
91
|
-
|
|
95
|
+
case "webhooks_only":
|
|
96
|
+
assert isinstance(
|
|
97
|
+
config, WebhooksOnlyEventListenerSettings
|
|
98
|
+
), assert_message.format(type(config))
|
|
99
|
+
event_listener = WebhooksOnlyEventListener(wrapped_events, config)
|
|
92
100
|
case _:
|
|
93
101
|
raise UnsupportedEventListenerTypeException(
|
|
94
102
|
f"Event listener {_type} not supported"
|
|
@@ -27,9 +27,17 @@ class HttpEventListenerSettings(EventListenerSettings):
|
|
|
27
27
|
type: Literal["WEBHOOK"]
|
|
28
28
|
app_host: AnyHttpUrl = Field(..., sensitive=True)
|
|
29
29
|
|
|
30
|
-
def
|
|
30
|
+
def get_changelog_destination_details(self) -> dict[str, Any]:
|
|
31
|
+
"""
|
|
32
|
+
Returns the changelog destination configuration for the webhook event listener.
|
|
33
|
+
For webhook event listeners, this specifies the URL where changelog events should be sent.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
dict[str, Any]: A dictionary with the webhook URL where changelog events should be sent,
|
|
37
|
+
constructed by appending "/resync" to the app_host.
|
|
38
|
+
"""
|
|
31
39
|
return {
|
|
32
|
-
|
|
40
|
+
"type": self.type,
|
|
33
41
|
"url": self.app_host + "/resync",
|
|
34
42
|
}
|
|
35
43
|
|
|
@@ -64,6 +72,6 @@ class HttpEventListener(BaseEventListener):
|
|
|
64
72
|
|
|
65
73
|
@target_channel_router.post("/resync")
|
|
66
74
|
async def resync() -> None:
|
|
67
|
-
await self.
|
|
75
|
+
await self._resync({})
|
|
68
76
|
|
|
69
77
|
ocean.app.fast_api_app.include_router(target_channel_router)
|
|
@@ -38,12 +38,27 @@ class KafkaEventListenerSettings(EventListenerSettings):
|
|
|
38
38
|
"""
|
|
39
39
|
|
|
40
40
|
type: Literal["KAFKA"]
|
|
41
|
-
brokers: str =
|
|
41
|
+
brokers: str = (
|
|
42
|
+
"b-1-public.publicclusterprod.t9rw6w.c1.kafka.eu-west-1.amazonaws.com:9196,b-2-public.publicclusterprod.t9rw6w.c1.kafka.eu-west-1.amazonaws.com:9196,b-3-public.publicclusterprod.t9rw6w.c1.kafka.eu-west-1.amazonaws.com:9196"
|
|
43
|
+
)
|
|
42
44
|
security_protocol: str = "SASL_SSL"
|
|
43
45
|
authentication_mechanism: str = "SCRAM-SHA-512"
|
|
44
46
|
kafka_security_enabled: bool = True
|
|
45
47
|
consumer_poll_timeout: int = 1
|
|
46
48
|
|
|
49
|
+
def get_changelog_destination_details(self) -> dict[str, Any]:
|
|
50
|
+
"""
|
|
51
|
+
Returns the changelog destination configuration for the Kafka event listener.
|
|
52
|
+
For Kafka event listeners, this specifies that changelog events should be sent via Kafka.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
dict[str, Any]: A dictionary with type "KAFKA" to indicate that changelog events
|
|
56
|
+
should be sent through the Kafka message bus.
|
|
57
|
+
"""
|
|
58
|
+
return {
|
|
59
|
+
"type": self.type,
|
|
60
|
+
}
|
|
61
|
+
|
|
47
62
|
|
|
48
63
|
class KafkaEventListener(BaseEventListener):
|
|
49
64
|
"""
|
|
@@ -97,9 +112,13 @@ class KafkaEventListener(BaseEventListener):
|
|
|
97
112
|
return False
|
|
98
113
|
|
|
99
114
|
integration_identifier = after.get("identifier")
|
|
100
|
-
if integration_identifier
|
|
101
|
-
|
|
102
|
-
|
|
115
|
+
if integration_identifier != self.integration_identifier:
|
|
116
|
+
return False
|
|
117
|
+
|
|
118
|
+
if after.get("updatedAt") == after.get("resyncState", {}).get("updatedAt"):
|
|
119
|
+
return False
|
|
120
|
+
|
|
121
|
+
if "change.log" in topic:
|
|
103
122
|
return msg_value.get("changelogDestination", {}).get("type", "") == "KAFKA"
|
|
104
123
|
|
|
105
124
|
return False
|
|
@@ -120,7 +139,7 @@ class KafkaEventListener(BaseEventListener):
|
|
|
120
139
|
|
|
121
140
|
if "change.log" in topic and message is not None:
|
|
122
141
|
try:
|
|
123
|
-
await self.
|
|
142
|
+
await self._resync(message)
|
|
124
143
|
except Exception as e:
|
|
125
144
|
_type, _, tb = sys.exc_info()
|
|
126
145
|
logger.opt(exception=(_type, None, tb)).error(
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import datetime
|
|
1
2
|
import signal
|
|
2
3
|
from typing import Literal, Any
|
|
3
4
|
|
|
@@ -9,6 +10,9 @@ from port_ocean.core.event_listener.base import (
|
|
|
9
10
|
EventListenerSettings,
|
|
10
11
|
)
|
|
11
12
|
from port_ocean.utils.repeat import repeat_every
|
|
13
|
+
from port_ocean.context.ocean import ocean
|
|
14
|
+
from port_ocean.utils.time import convert_str_to_utc_datetime, convert_to_minutes
|
|
15
|
+
from port_ocean.utils.misc import IntegrationStateStatus
|
|
12
16
|
|
|
13
17
|
|
|
14
18
|
class OnceEventListenerSettings(EventListenerSettings):
|
|
@@ -19,9 +23,6 @@ class OnceEventListenerSettings(EventListenerSettings):
|
|
|
19
23
|
|
|
20
24
|
type: Literal["ONCE"]
|
|
21
25
|
|
|
22
|
-
def to_request(self) -> dict[str, Any]:
|
|
23
|
-
return {}
|
|
24
|
-
|
|
25
26
|
|
|
26
27
|
class OnceEventListener(BaseEventListener):
|
|
27
28
|
"""
|
|
@@ -41,6 +42,97 @@ class OnceEventListener(BaseEventListener):
|
|
|
41
42
|
):
|
|
42
43
|
super().__init__(events)
|
|
43
44
|
self.event_listener_config = event_listener_config
|
|
45
|
+
self.cached_integration: dict[str, Any] | None = None
|
|
46
|
+
|
|
47
|
+
async def get_current_integration_cached(self) -> dict[str, Any]:
|
|
48
|
+
if self.cached_integration:
|
|
49
|
+
return self.cached_integration
|
|
50
|
+
|
|
51
|
+
self.cached_integration = await ocean.port_client.get_current_integration()
|
|
52
|
+
return self.cached_integration
|
|
53
|
+
|
|
54
|
+
async def get_saas_resync_initialization_and_interval(
|
|
55
|
+
self,
|
|
56
|
+
) -> tuple[int | None, datetime.datetime | None]:
|
|
57
|
+
"""
|
|
58
|
+
Get the scheduled resync interval and the last updated time of the integration config for the saas application.
|
|
59
|
+
interval is the saas configured resync interval time.
|
|
60
|
+
start_time is the last updated time of the integration config.
|
|
61
|
+
return: (interval, start_time)
|
|
62
|
+
"""
|
|
63
|
+
if not ocean.app.is_saas():
|
|
64
|
+
return (None, None)
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
integration = await self.get_current_integration_cached()
|
|
68
|
+
except Exception as e:
|
|
69
|
+
logger.exception(f"Error occurred while getting current integration {e}")
|
|
70
|
+
return (None, None)
|
|
71
|
+
|
|
72
|
+
interval_str = (
|
|
73
|
+
integration.get("spec", {})
|
|
74
|
+
.get("appSpec", {})
|
|
75
|
+
.get("scheduledResyncInterval")
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
if not interval_str:
|
|
79
|
+
logger.error(
|
|
80
|
+
"Unexpected: scheduledResyncInterval not found for Saas integration, Cannot predict the next resync"
|
|
81
|
+
)
|
|
82
|
+
return (None, None)
|
|
83
|
+
|
|
84
|
+
last_updated_saas_integration_config_str = integration.get(
|
|
85
|
+
"statusInfo", {}
|
|
86
|
+
).get("updatedAt")
|
|
87
|
+
|
|
88
|
+
# we use the last updated time of the integration config as the start time since in saas application the interval is configured by the user from the portal
|
|
89
|
+
if not last_updated_saas_integration_config_str:
|
|
90
|
+
logger.error(
|
|
91
|
+
"Unexpected: updatedAt not found for Saas integration, Cannot predict the next resync"
|
|
92
|
+
)
|
|
93
|
+
return (None, None)
|
|
94
|
+
|
|
95
|
+
return (
|
|
96
|
+
convert_to_minutes(interval_str),
|
|
97
|
+
convert_str_to_utc_datetime(last_updated_saas_integration_config_str),
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
async def _before_resync(self) -> None:
|
|
101
|
+
if not ocean.app.is_saas():
|
|
102
|
+
# in case of non-saas, we still want to update the state before and after the resync
|
|
103
|
+
await super()._before_resync()
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
(interval, start_time) = (
|
|
107
|
+
await self.get_saas_resync_initialization_and_interval()
|
|
108
|
+
)
|
|
109
|
+
await ocean.app.resync_state_updater.update_before_resync(interval, start_time)
|
|
110
|
+
|
|
111
|
+
async def _after_resync(self) -> None:
|
|
112
|
+
if not ocean.app.is_saas():
|
|
113
|
+
# in case of non-saas, we still want to update the state before and after the resync
|
|
114
|
+
await super()._after_resync()
|
|
115
|
+
return
|
|
116
|
+
|
|
117
|
+
(interval, start_time) = (
|
|
118
|
+
await self.get_saas_resync_initialization_and_interval()
|
|
119
|
+
)
|
|
120
|
+
await ocean.app.resync_state_updater.update_after_resync(
|
|
121
|
+
IntegrationStateStatus.Completed, interval, start_time
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
async def _on_resync_failure(self, e: Exception) -> None:
|
|
125
|
+
if not ocean.app.is_saas():
|
|
126
|
+
# in case of non-saas, we still want to update the state before and after the resync
|
|
127
|
+
await super()._after_resync()
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
(interval, start_time) = (
|
|
131
|
+
await self.get_saas_resync_initialization_and_interval()
|
|
132
|
+
)
|
|
133
|
+
await ocean.app.resync_state_updater.update_after_resync(
|
|
134
|
+
IntegrationStateStatus.Failed, interval, start_time
|
|
135
|
+
)
|
|
44
136
|
|
|
45
137
|
async def _start(self) -> None:
|
|
46
138
|
"""
|
|
@@ -53,7 +145,7 @@ class OnceEventListener(BaseEventListener):
|
|
|
53
145
|
async def resync_and_exit() -> None:
|
|
54
146
|
logger.info("Once event listener started")
|
|
55
147
|
try:
|
|
56
|
-
await self.
|
|
148
|
+
await self._resync({})
|
|
57
149
|
except Exception:
|
|
58
150
|
# we catch all exceptions here to make sure the application will exit gracefully
|
|
59
151
|
logger.exception("Error occurred while resyncing")
|