port-ocean 0.28.5__py3-none-any.whl → 0.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- integrations/_infra/Dockerfile.Deb +1 -0
- integrations/_infra/Dockerfile.local +1 -0
- port_ocean/clients/port/authentication.py +19 -0
- port_ocean/clients/port/client.py +3 -0
- port_ocean/clients/port/mixins/actions.py +93 -0
- port_ocean/clients/port/mixins/blueprints.py +0 -12
- port_ocean/clients/port/mixins/entities.py +79 -44
- port_ocean/clients/port/mixins/integrations.py +7 -2
- port_ocean/config/settings.py +35 -3
- port_ocean/context/ocean.py +7 -5
- port_ocean/core/defaults/initialize.py +12 -5
- port_ocean/core/event_listener/__init__.py +7 -0
- port_ocean/core/event_listener/actions_only.py +42 -0
- port_ocean/core/event_listener/base.py +4 -1
- port_ocean/core/event_listener/factory.py +18 -9
- port_ocean/core/event_listener/http.py +4 -3
- port_ocean/core/event_listener/kafka.py +3 -2
- port_ocean/core/event_listener/once.py +5 -2
- port_ocean/core/event_listener/polling.py +4 -3
- port_ocean/core/event_listener/webhooks_only.py +3 -2
- port_ocean/core/handlers/actions/__init__.py +7 -0
- port_ocean/core/handlers/actions/abstract_executor.py +150 -0
- port_ocean/core/handlers/actions/execution_manager.py +434 -0
- port_ocean/core/handlers/entity_processor/jq_entity_processor.py +479 -17
- port_ocean/core/handlers/entity_processor/jq_input_evaluator.py +137 -0
- port_ocean/core/handlers/port_app_config/models.py +4 -2
- port_ocean/core/handlers/webhook/abstract_webhook_processor.py +16 -0
- port_ocean/core/handlers/webhook/processor_manager.py +30 -12
- port_ocean/core/integrations/mixins/sync_raw.py +4 -4
- port_ocean/core/integrations/mixins/utils.py +250 -29
- port_ocean/core/models.py +35 -2
- port_ocean/core/utils/utils.py +16 -5
- port_ocean/exceptions/execution_manager.py +22 -0
- port_ocean/helpers/retry.py +4 -40
- port_ocean/log/logger_setup.py +2 -2
- port_ocean/ocean.py +30 -4
- port_ocean/tests/clients/port/mixins/test_entities.py +71 -5
- port_ocean/tests/core/event_listener/test_kafka.py +14 -7
- port_ocean/tests/core/handlers/actions/test_execution_manager.py +837 -0
- port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py +932 -1
- port_ocean/tests/core/handlers/entity_processor/test_jq_input_evaluator.py +932 -0
- port_ocean/tests/core/handlers/webhook/test_processor_manager.py +3 -1
- port_ocean/tests/core/utils/test_get_port_diff.py +164 -0
- port_ocean/tests/helpers/test_retry.py +241 -1
- port_ocean/tests/utils/test_cache.py +240 -0
- port_ocean/utils/cache.py +45 -9
- {port_ocean-0.28.5.dist-info → port_ocean-0.29.0.dist-info}/METADATA +2 -1
- {port_ocean-0.28.5.dist-info → port_ocean-0.29.0.dist-info}/RECORD +51 -41
- {port_ocean-0.28.5.dist-info → port_ocean-0.29.0.dist-info}/LICENSE.md +0 -0
- {port_ocean-0.28.5.dist-info → port_ocean-0.29.0.dist-info}/WHEEL +0 -0
- {port_ocean-0.28.5.dist-info → port_ocean-0.29.0.dist-info}/entry_points.txt +0 -0
port_ocean/core/models.py
CHANGED
|
@@ -1,11 +1,19 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
2
|
from enum import Enum, StrEnum
|
|
3
|
-
from typing import Any, TypedDict
|
|
4
|
-
|
|
3
|
+
from typing import Any, Literal, TypedDict
|
|
5
4
|
from pydantic import BaseModel
|
|
6
5
|
from pydantic.fields import Field
|
|
7
6
|
|
|
8
7
|
|
|
8
|
+
class EventListenerType(StrEnum):
|
|
9
|
+
WEBHOOK = "WEBHOOK"
|
|
10
|
+
KAFKA = "KAFKA"
|
|
11
|
+
POLLING = "POLLING"
|
|
12
|
+
ONCE = "ONCE"
|
|
13
|
+
WEBHOOKS_ONLY = "WEBHOOKS_ONLY"
|
|
14
|
+
ACTIONS_ONLY = "ACTIONS_ONLY"
|
|
15
|
+
|
|
16
|
+
|
|
9
17
|
class CreatePortResourcesOrigin(StrEnum):
|
|
10
18
|
Ocean = "Ocean"
|
|
11
19
|
Port = "Port"
|
|
@@ -121,3 +129,28 @@ class EntityPortDiff:
|
|
|
121
129
|
deleted: list[Entity] = field(default_factory=list)
|
|
122
130
|
modified: list[Entity] = field(default_factory=list)
|
|
123
131
|
created: list[Entity] = field(default_factory=list)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
class IntegrationFeatureFlag(StrEnum):
|
|
135
|
+
USE_PROVISIONED_DEFAULTS = "USE_PROVISIONED_DEFAULTS"
|
|
136
|
+
LAKEHOUSE_ELIGIBLE = "LAKEHOUSE_ELIGIBLE"
|
|
137
|
+
OCEAN_ACTIONS_PROCESSING_ENABLED = "OCEAN_ACTIONS_PROCESSING_ENABLED"
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class RunStatus(StrEnum):
|
|
141
|
+
IN_PROGRESS = "IN_PROGRESS"
|
|
142
|
+
SUCCESS = "SUCCESS"
|
|
143
|
+
FAILURE = "FAILURE"
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class IntegrationActionInvocationPayload(BaseModel):
|
|
147
|
+
type: Literal["INTEGRATION_ACTION"]
|
|
148
|
+
installationId: str
|
|
149
|
+
integrationActionType: str
|
|
150
|
+
integrationActionExecutionProperties: dict[str, Any] = Field(default_factory=dict)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class ActionRun(BaseModel):
|
|
154
|
+
id: str
|
|
155
|
+
status: RunStatus
|
|
156
|
+
payload: IntegrationActionInvocationPayload
|
port_ocean/core/utils/utils.py
CHANGED
|
@@ -4,7 +4,7 @@ import json
|
|
|
4
4
|
from typing import Iterable, Any, TypeVar, Callable, Awaitable
|
|
5
5
|
|
|
6
6
|
from loguru import logger
|
|
7
|
-
from pydantic import parse_obj_as, ValidationError
|
|
7
|
+
from pydantic import BaseModel, parse_obj_as, ValidationError
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
from port_ocean.clients.port.client import PortClient
|
|
@@ -79,6 +79,19 @@ async def gather_and_split_errors_from_results(
|
|
|
79
79
|
return valid_items, errors
|
|
80
80
|
|
|
81
81
|
|
|
82
|
+
def _get_entity_key(entity: Entity) -> tuple[str, str]:
|
|
83
|
+
identifier = entity.identifier
|
|
84
|
+
if isinstance(identifier, BaseModel):
|
|
85
|
+
identifier = identifier.dict()
|
|
86
|
+
|
|
87
|
+
key_part = (
|
|
88
|
+
json.dumps(identifier, sort_keys=True)
|
|
89
|
+
if isinstance(identifier, dict)
|
|
90
|
+
else str(identifier)
|
|
91
|
+
)
|
|
92
|
+
return key_part, entity.blueprint
|
|
93
|
+
|
|
94
|
+
|
|
82
95
|
def get_port_diff(before: Iterable[Entity], after: Iterable[Entity]) -> EntityPortDiff:
|
|
83
96
|
before_dict = {}
|
|
84
97
|
after_dict = {}
|
|
@@ -88,12 +101,10 @@ def get_port_diff(before: Iterable[Entity], after: Iterable[Entity]) -> EntityPo
|
|
|
88
101
|
|
|
89
102
|
# Create dictionaries for before and after lists
|
|
90
103
|
for entity in before:
|
|
91
|
-
|
|
92
|
-
before_dict[key] = entity
|
|
104
|
+
before_dict[_get_entity_key(entity)] = entity
|
|
93
105
|
|
|
94
106
|
for entity in after:
|
|
95
|
-
|
|
96
|
-
after_dict[key] = entity
|
|
107
|
+
after_dict[_get_entity_key(entity)] = entity
|
|
97
108
|
|
|
98
109
|
# Find created, modified, and deleted objects
|
|
99
110
|
for key, obj in after_dict.items():
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
class DuplicateActionExecutorError(Exception):
|
|
2
|
+
"""
|
|
3
|
+
Raised when attempting to register an executor for an action that already has an existing executor.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
pass
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class RunAlreadyAcknowledgedError(Exception):
|
|
10
|
+
"""
|
|
11
|
+
Raised when attempting to acknowledge a run that has already been acknowledged.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PartitionKeyNotFoundError(Exception):
|
|
18
|
+
"""
|
|
19
|
+
Raised when attempting to extract a partition key that is not found in the invocation payload.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
pass
|
port_ocean/helpers/retry.py
CHANGED
|
@@ -257,7 +257,7 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
257
257
|
else:
|
|
258
258
|
response = await transport.handle_async_request(request)
|
|
259
259
|
|
|
260
|
-
|
|
260
|
+
self._log_response_size(request, response)
|
|
261
261
|
|
|
262
262
|
return response
|
|
263
263
|
except Exception as e:
|
|
@@ -345,32 +345,6 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
345
345
|
return int(content_length)
|
|
346
346
|
return None
|
|
347
347
|
|
|
348
|
-
async def _log_response_size_async(
|
|
349
|
-
self, request: httpx.Request, response: httpx.Response
|
|
350
|
-
) -> None:
|
|
351
|
-
"""Log the size of the response."""
|
|
352
|
-
if not self._should_log_response_size(request):
|
|
353
|
-
return
|
|
354
|
-
|
|
355
|
-
# Try to get content length from headers first
|
|
356
|
-
content_length = self._get_content_length(response)
|
|
357
|
-
if content_length is not None:
|
|
358
|
-
size_info = content_length
|
|
359
|
-
else:
|
|
360
|
-
# If no Content-Length header, try to get actual content size
|
|
361
|
-
try:
|
|
362
|
-
actual_size = len(await response.aread())
|
|
363
|
-
size_info = actual_size
|
|
364
|
-
except Exception as e:
|
|
365
|
-
cast(logging.Logger, self._logger).error(
|
|
366
|
-
f"Error getting response size: {e}"
|
|
367
|
-
)
|
|
368
|
-
return
|
|
369
|
-
|
|
370
|
-
cast(logging.Logger, self._logger).info(
|
|
371
|
-
f"Response for {request.method} {request.url} - Size: {size_info} bytes"
|
|
372
|
-
)
|
|
373
|
-
|
|
374
348
|
def _log_response_size(
|
|
375
349
|
self, request: httpx.Request, response: httpx.Response
|
|
376
350
|
) -> None:
|
|
@@ -378,21 +352,11 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
378
352
|
return
|
|
379
353
|
|
|
380
354
|
content_length = self._get_content_length(response)
|
|
381
|
-
if content_length is
|
|
382
|
-
|
|
383
|
-
else:
|
|
384
|
-
# If no Content-Length header, try to get actual content size
|
|
385
|
-
try:
|
|
386
|
-
actual_size = len(response.read())
|
|
387
|
-
size_info = actual_size
|
|
388
|
-
except Exception as e:
|
|
389
|
-
cast(logging.Logger, self._logger).error(
|
|
390
|
-
f"Error getting response size: {e}"
|
|
391
|
-
)
|
|
392
|
-
return
|
|
355
|
+
if content_length is None:
|
|
356
|
+
return
|
|
393
357
|
|
|
394
358
|
cast(logging.Logger, self._logger).info(
|
|
395
|
-
f"Response for {request.method} {request.url} - Size: {
|
|
359
|
+
f"Response for {request.method} {request.url} - Size: {content_length} bytes"
|
|
396
360
|
)
|
|
397
361
|
|
|
398
362
|
async def _should_retry_async(self, response: httpx.Response) -> bool:
|
port_ocean/log/logger_setup.py
CHANGED
|
@@ -61,9 +61,9 @@ def _http_loguru_handler(level: LogLevelType) -> None:
|
|
|
61
61
|
|
|
62
62
|
http_memory_handler = HTTPMemoryHandler()
|
|
63
63
|
signal_handler.register(
|
|
64
|
-
http_memory_handler.wait_for_lingering_threads, priority=-
|
|
64
|
+
http_memory_handler.wait_for_lingering_threads, priority=-900
|
|
65
65
|
)
|
|
66
|
-
signal_handler.register(http_memory_handler.flush, priority=-
|
|
66
|
+
signal_handler.register(http_memory_handler.flush, priority=-899)
|
|
67
67
|
|
|
68
68
|
queue_listener = QueueListener(queue, http_memory_handler)
|
|
69
69
|
queue_listener.start()
|
port_ocean/ocean.py
CHANGED
|
@@ -26,6 +26,7 @@ from port_ocean.core.handlers.resync_state_updater import ResyncStateUpdater
|
|
|
26
26
|
from port_ocean.core.handlers.webhook.processor_manager import (
|
|
27
27
|
LiveEventsProcessorManager,
|
|
28
28
|
)
|
|
29
|
+
from port_ocean.core.handlers.actions.execution_manager import ExecutionManager
|
|
29
30
|
from port_ocean.core.integrations.base import BaseIntegration
|
|
30
31
|
from port_ocean.core.models import ProcessExecutionMode
|
|
31
32
|
from port_ocean.log.sensetive import sensitive_log_filter
|
|
@@ -88,6 +89,16 @@ class Ocean:
|
|
|
88
89
|
max_wait_seconds_before_shutdown=self.config.max_wait_seconds_before_shutdown,
|
|
89
90
|
)
|
|
90
91
|
|
|
92
|
+
self.execution_manager = ExecutionManager(
|
|
93
|
+
webhook_manager=self.webhook_manager,
|
|
94
|
+
signal_handler=signal_handler,
|
|
95
|
+
workers_count=self.config.actions_processor.workers_count,
|
|
96
|
+
runs_buffer_high_watermark=self.config.actions_processor.runs_buffer_high_watermark,
|
|
97
|
+
poll_check_interval_seconds=self.config.actions_processor.poll_check_interval_seconds,
|
|
98
|
+
visibility_timeout_ms=self.config.actions_processor.visibility_timeout_ms,
|
|
99
|
+
max_wait_seconds_before_shutdown=self.config.max_wait_seconds_before_shutdown,
|
|
100
|
+
)
|
|
101
|
+
|
|
91
102
|
self.integration = (
|
|
92
103
|
integration_class(ocean) if integration_class else BaseIntegration(ocean)
|
|
93
104
|
)
|
|
@@ -200,6 +211,24 @@ class Ocean:
|
|
|
200
211
|
)
|
|
201
212
|
return None
|
|
202
213
|
|
|
214
|
+
async def _register_addons(self) -> None:
|
|
215
|
+
if self.base_url and self.config.event_listener.should_process_webhooks:
|
|
216
|
+
await self.webhook_manager.start_processing_event_messages()
|
|
217
|
+
else:
|
|
218
|
+
logger.warning(
|
|
219
|
+
"No base URL provided, or webhook processing is disabled is this event listener, skipping webhook processing"
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
if (
|
|
223
|
+
self.config.actions_processor.enabled
|
|
224
|
+
and self.config.event_listener.should_run_actions
|
|
225
|
+
):
|
|
226
|
+
await self.execution_manager.start_processing_action_runs()
|
|
227
|
+
else:
|
|
228
|
+
logger.warning(
|
|
229
|
+
"Execution agent is not enabled, or actions processing is disabled in this event listener, skipping execution agent setup"
|
|
230
|
+
)
|
|
231
|
+
|
|
203
232
|
def initialize_app(self) -> None:
|
|
204
233
|
self.fast_api_app.include_router(self.integration_router, prefix="/integration")
|
|
205
234
|
self.fast_api_app.include_router(
|
|
@@ -210,10 +239,7 @@ class Ocean:
|
|
|
210
239
|
async def lifecycle(_: FastAPI) -> AsyncIterator[None]:
|
|
211
240
|
try:
|
|
212
241
|
await self.integration.start()
|
|
213
|
-
|
|
214
|
-
await self.webhook_manager.start_processing_event_messages()
|
|
215
|
-
else:
|
|
216
|
-
logger.warning("No base URL provided, skipping webhook processing")
|
|
242
|
+
await self._register_addons()
|
|
217
243
|
await self._setup_scheduled_resync()
|
|
218
244
|
yield None
|
|
219
245
|
except Exception:
|
|
@@ -179,7 +179,7 @@ async def test_search_entities_uses_datasource_route_when_query_is_none(
|
|
|
179
179
|
) -> None:
|
|
180
180
|
"""Test that search_entities uses datasource route when query is None"""
|
|
181
181
|
mock_response = MagicMock()
|
|
182
|
-
mock_response.json.return_value = {"entities": []}
|
|
182
|
+
mock_response.json.return_value = {"entities": [], "next": None}
|
|
183
183
|
mock_response.is_error = False
|
|
184
184
|
mock_response.status_code = 200
|
|
185
185
|
mock_response.headers = {}
|
|
@@ -208,8 +208,74 @@ async def test_search_entities_uses_datasource_route_when_query_is_none(
|
|
|
208
208
|
== "https://api.getport.io/v1/blueprints/entities/datasource-entities"
|
|
209
209
|
)
|
|
210
210
|
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
211
|
+
sent_json = call_args[1]["json"]
|
|
212
|
+
assert sent_json["datasource_prefix"] == "port-ocean/test-integration/"
|
|
213
|
+
assert sent_json["datasource_suffix"] == "/test-identifier/sync"
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
async def test_search_entities_uses_datasource_route_when_query_is_none_two_pages(
|
|
217
|
+
entity_client: EntityClientMixin,
|
|
218
|
+
) -> None:
|
|
219
|
+
"""Test that search_entities uses datasource route when query is None"""
|
|
220
|
+
# First response with pagination token
|
|
221
|
+
mock_response_first = MagicMock()
|
|
222
|
+
mock_response_first.json.return_value = {
|
|
223
|
+
"entities": [
|
|
224
|
+
Entity(identifier="entity_1", blueprint="entity_1"),
|
|
225
|
+
Entity(identifier="entity_2", blueprint="entity_2"),
|
|
226
|
+
],
|
|
227
|
+
"next": "next_page_token",
|
|
228
|
+
}
|
|
229
|
+
mock_response_first.is_error = False
|
|
230
|
+
mock_response_first.status_code = 200
|
|
231
|
+
mock_response_first.headers = {}
|
|
232
|
+
|
|
233
|
+
# Second response without pagination token (end of pagination)
|
|
234
|
+
mock_response_second = MagicMock()
|
|
235
|
+
mock_response_second.json.return_value = {
|
|
236
|
+
"entities": [Entity(identifier="entity_3", blueprint="entity_3")],
|
|
237
|
+
"next": None,
|
|
214
238
|
}
|
|
215
|
-
|
|
239
|
+
mock_response_second.is_error = False
|
|
240
|
+
mock_response_second.status_code = 200
|
|
241
|
+
mock_response_second.headers = {}
|
|
242
|
+
|
|
243
|
+
# Mock the client to return different responses for each call
|
|
244
|
+
entity_client.client.post = AsyncMock(side_effect=[mock_response_first, mock_response_second]) # type: ignore
|
|
245
|
+
entity_client.auth.headers = AsyncMock(return_value={"Authorization": "Bearer test"}) # type: ignore
|
|
246
|
+
|
|
247
|
+
entity_client.auth.integration_type = "test-integration"
|
|
248
|
+
entity_client.auth.integration_identifier = "test-identifier"
|
|
249
|
+
entity_client.auth.api_url = "https://api.getport.io/v1"
|
|
250
|
+
|
|
251
|
+
mock_user_agent_type = MagicMock()
|
|
252
|
+
mock_user_agent_type.value = "sync"
|
|
253
|
+
|
|
254
|
+
entities = await entity_client.search_entities(
|
|
255
|
+
user_agent_type=mock_user_agent_type,
|
|
256
|
+
query=None,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
# Should call the datasource-entities endpoint exactly twice for pagination
|
|
260
|
+
assert entity_client.client.post.await_count == 2
|
|
261
|
+
assert len(entities) == 3
|
|
262
|
+
|
|
263
|
+
# Check first call
|
|
264
|
+
first_call_args = entity_client.client.post.call_args_list[0]
|
|
265
|
+
assert (
|
|
266
|
+
first_call_args[0][0]
|
|
267
|
+
== "https://api.getport.io/v1/blueprints/entities/datasource-entities"
|
|
268
|
+
)
|
|
269
|
+
first_sent_json = first_call_args[1]["json"]
|
|
270
|
+
assert first_sent_json["datasource_prefix"] == "port-ocean/test-integration/"
|
|
271
|
+
assert first_sent_json["datasource_suffix"] == "/test-identifier/sync"
|
|
272
|
+
|
|
273
|
+
# Check second call
|
|
274
|
+
second_call_args = entity_client.client.post.call_args_list[1]
|
|
275
|
+
assert (
|
|
276
|
+
second_call_args[0][0]
|
|
277
|
+
== "https://api.getport.io/v1/blueprints/entities/datasource-entities"
|
|
278
|
+
)
|
|
279
|
+
second_sent_json = second_call_args[1]["json"]
|
|
280
|
+
assert second_sent_json["datasource_prefix"] == "port-ocean/test-integration/"
|
|
281
|
+
assert second_sent_json["datasource_suffix"] == "/test-identifier/sync"
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from port_ocean.core.event_listener.kafka import KafkaEventListenerSettings
|
|
2
|
+
from port_ocean.core.models import EventListenerType
|
|
2
3
|
import pytest
|
|
3
4
|
from pydantic import ValidationError
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
def test_default_kafka_settings() -> None:
|
|
7
8
|
"""Test default values are properly set"""
|
|
8
|
-
config = KafkaEventListenerSettings(type=
|
|
9
|
+
config = KafkaEventListenerSettings(type=EventListenerType.KAFKA)
|
|
9
10
|
assert config.type == "KAFKA"
|
|
10
11
|
assert config.security_protocol == "SASL_SSL"
|
|
11
12
|
assert config.authentication_mechanism == "SCRAM-SHA-512"
|
|
@@ -17,28 +18,32 @@ def test_default_kafka_settings() -> None:
|
|
|
17
18
|
def test_brokers_json_array_parsing() -> None:
|
|
18
19
|
"""Test that JSON array strings get converted to comma-separated"""
|
|
19
20
|
json_brokers = '["broker1:9092", "broker2:9092", "broker3:9092"]'
|
|
20
|
-
config = KafkaEventListenerSettings(
|
|
21
|
+
config = KafkaEventListenerSettings(
|
|
22
|
+
type=EventListenerType.KAFKA, brokers=json_brokers
|
|
23
|
+
)
|
|
21
24
|
assert config.brokers == "broker1:9092,broker2:9092,broker3:9092"
|
|
22
25
|
|
|
23
26
|
|
|
24
27
|
def test_brokers_regular_string_unchanged() -> None:
|
|
25
28
|
"""Test that regular comma-separated strings pass through unchanged"""
|
|
26
29
|
regular_brokers = "broker1:9092,broker2:9092"
|
|
27
|
-
config = KafkaEventListenerSettings(
|
|
30
|
+
config = KafkaEventListenerSettings(
|
|
31
|
+
type=EventListenerType.KAFKA, brokers=regular_brokers
|
|
32
|
+
)
|
|
28
33
|
assert config.brokers == regular_brokers
|
|
29
34
|
|
|
30
35
|
|
|
31
36
|
def test_brokers_malformed_json_unchanged() -> None:
|
|
32
37
|
"""Test that malformed JSON strings don't break validation"""
|
|
33
38
|
bad_json = "[broker1:9092, broker2:9092"
|
|
34
|
-
config = KafkaEventListenerSettings(type=
|
|
39
|
+
config = KafkaEventListenerSettings(type=EventListenerType.KAFKA, brokers=bad_json)
|
|
35
40
|
assert config.brokers == bad_json
|
|
36
41
|
|
|
37
42
|
|
|
38
43
|
def test_custom_values() -> None:
|
|
39
44
|
"""Test overriding default values"""
|
|
40
45
|
config = KafkaEventListenerSettings(
|
|
41
|
-
type=
|
|
46
|
+
type=EventListenerType.KAFKA,
|
|
42
47
|
brokers="custom:9092",
|
|
43
48
|
security_protocol="PLAINTEXT",
|
|
44
49
|
authentication_mechanism="PLAIN",
|
|
@@ -60,11 +65,13 @@ def test_type_literal_validation() -> None:
|
|
|
60
65
|
|
|
61
66
|
def test_empty_brokers_array() -> None:
|
|
62
67
|
"""Test empty JSON array becomes empty string"""
|
|
63
|
-
config = KafkaEventListenerSettings(type=
|
|
68
|
+
config = KafkaEventListenerSettings(type=EventListenerType.KAFKA, brokers="[]")
|
|
64
69
|
assert config.brokers == ""
|
|
65
70
|
|
|
66
71
|
|
|
67
72
|
def test_single_broker_array() -> None:
|
|
68
73
|
"""Test single broker in JSON array"""
|
|
69
|
-
config = KafkaEventListenerSettings(
|
|
74
|
+
config = KafkaEventListenerSettings(
|
|
75
|
+
type=EventListenerType.KAFKA, brokers='["single:9092"]'
|
|
76
|
+
)
|
|
70
77
|
assert config.brokers == "single:9092"
|