port-ocean 0.28.2__py3-none-any.whl → 0.29.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. integrations/_infra/Dockerfile.Deb +6 -1
  2. integrations/_infra/Dockerfile.local +1 -0
  3. port_ocean/clients/port/authentication.py +19 -0
  4. port_ocean/clients/port/client.py +3 -0
  5. port_ocean/clients/port/mixins/actions.py +93 -0
  6. port_ocean/clients/port/mixins/blueprints.py +0 -12
  7. port_ocean/clients/port/mixins/entities.py +79 -44
  8. port_ocean/clients/port/mixins/integrations.py +7 -2
  9. port_ocean/config/settings.py +35 -3
  10. port_ocean/context/ocean.py +7 -5
  11. port_ocean/core/defaults/initialize.py +12 -5
  12. port_ocean/core/event_listener/__init__.py +7 -0
  13. port_ocean/core/event_listener/actions_only.py +42 -0
  14. port_ocean/core/event_listener/base.py +4 -1
  15. port_ocean/core/event_listener/factory.py +18 -9
  16. port_ocean/core/event_listener/http.py +4 -3
  17. port_ocean/core/event_listener/kafka.py +3 -2
  18. port_ocean/core/event_listener/once.py +5 -2
  19. port_ocean/core/event_listener/polling.py +4 -3
  20. port_ocean/core/event_listener/webhooks_only.py +3 -2
  21. port_ocean/core/handlers/actions/__init__.py +7 -0
  22. port_ocean/core/handlers/actions/abstract_executor.py +150 -0
  23. port_ocean/core/handlers/actions/execution_manager.py +434 -0
  24. port_ocean/core/handlers/entity_processor/jq_entity_processor.py +479 -17
  25. port_ocean/core/handlers/entity_processor/jq_input_evaluator.py +137 -0
  26. port_ocean/core/handlers/port_app_config/models.py +4 -2
  27. port_ocean/core/handlers/resync_state_updater/updater.py +4 -2
  28. port_ocean/core/handlers/webhook/abstract_webhook_processor.py +16 -0
  29. port_ocean/core/handlers/webhook/processor_manager.py +30 -12
  30. port_ocean/core/integrations/mixins/sync_raw.py +10 -5
  31. port_ocean/core/integrations/mixins/utils.py +250 -29
  32. port_ocean/core/models.py +35 -2
  33. port_ocean/core/utils/utils.py +16 -5
  34. port_ocean/exceptions/execution_manager.py +22 -0
  35. port_ocean/helpers/metric/metric.py +1 -1
  36. port_ocean/helpers/retry.py +4 -40
  37. port_ocean/log/logger_setup.py +2 -2
  38. port_ocean/ocean.py +31 -5
  39. port_ocean/tests/clients/port/mixins/test_entities.py +71 -5
  40. port_ocean/tests/core/event_listener/test_kafka.py +14 -7
  41. port_ocean/tests/core/handlers/actions/test_execution_manager.py +837 -0
  42. port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py +932 -1
  43. port_ocean/tests/core/handlers/entity_processor/test_jq_input_evaluator.py +932 -0
  44. port_ocean/tests/core/handlers/webhook/test_processor_manager.py +3 -1
  45. port_ocean/tests/core/utils/test_get_port_diff.py +164 -0
  46. port_ocean/tests/helpers/test_retry.py +241 -1
  47. port_ocean/tests/utils/test_cache.py +240 -0
  48. port_ocean/utils/cache.py +45 -9
  49. {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/METADATA +2 -1
  50. {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/RECORD +53 -43
  51. {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/LICENSE.md +0 -0
  52. {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/WHEEL +0 -0
  53. {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/entry_points.txt +0 -0
@@ -1,13 +1,21 @@
1
+ import asyncio
2
+ import json
3
+ import multiprocessing
4
+ import os
5
+ import re
6
+ import shutil
7
+ import stat
8
+ import subprocess
9
+ import tempfile
1
10
  from contextlib import contextmanager
2
- from typing import Awaitable, Generator, Callable, cast
11
+ from typing import Any, AsyncGenerator, Awaitable, Callable, Generator, cast
3
12
 
13
+ import ijson
4
14
  from loguru import logger
5
15
 
6
- import asyncio
7
- import multiprocessing
8
-
16
+ from port_ocean.clients.port.utils import _http_client as _port_http_client
17
+ from port_ocean.context.ocean import ocean
9
18
  from port_ocean.core.handlers.entity_processor.jq_entity_processor import JQEntityProcessor
10
- from port_ocean.core.handlers.port_app_config.models import ResourceConfig
11
19
  from port_ocean.core.ocean_types import (
12
20
  ASYNC_GENERATOR_RESYNC_TYPE,
13
21
  RAW_RESULT,
@@ -20,11 +28,58 @@ from port_ocean.exceptions.core import (
20
28
  OceanAbortException,
21
29
  KindNotImplementedException,
22
30
  )
23
-
24
- from port_ocean.utils.async_http import _http_client
25
- from port_ocean.clients.port.utils import _http_client as _port_http_client
26
31
  from port_ocean.helpers.metric.metric import MetricType, MetricPhase
27
- from port_ocean.context.ocean import ocean
32
+ from port_ocean.utils.async_http import _http_client
33
+
34
+ def _process_path_type_items(
35
+ result: RAW_RESULT, items_to_parse: str | None = None
36
+ ) -> RAW_RESULT:
37
+ """
38
+ Process items in the result array to check for "__type": "path" fields.
39
+ If found, read the file contents and load them into a "content" field.
40
+ Skip processing if we're on the items_to_parse branch.
41
+ """
42
+ if not isinstance(result, list):
43
+ return result
44
+
45
+ # Skip processing if we're on the items_to_parse branch
46
+ if items_to_parse:
47
+ return result
48
+
49
+ processed_result = []
50
+ for item in result:
51
+ if isinstance(item, dict) and item.get("__type") == "path":
52
+ try:
53
+ # Read the file content and parse as JSON
54
+ file_path = item.get("file", {}).get("content", {}).get("path")
55
+ if file_path and os.path.exists(file_path):
56
+ with open(file_path, "r") as f:
57
+ content = json.loads(f.read())
58
+ # Create a copy of the item with the content field
59
+ processed_item = item.copy()
60
+ processed_item["file"]["content"] = content
61
+ processed_result.append(processed_item)
62
+ else:
63
+ # If file doesn't exist, keep the original item
64
+ processed_result.append(item)
65
+ except (json.JSONDecodeError, IOError, OSError) as e:
66
+ if isinstance(item, dict) and item.get("file") is not None:
67
+ content = item["file"].get("content") if isinstance(item["file"].get("content"), dict) else {}
68
+ data_path = content.get("path", None)
69
+ logger.warning(
70
+ f"Failed to read or parse file content for path {data_path}: {e}"
71
+ )
72
+ else:
73
+ logger.warning(
74
+ f"Failed to read or parse file content for unknown path: {e}. item: {json.dumps(item)}"
75
+ )
76
+ # Keep the original item if there's an error
77
+ processed_result.append(item)
78
+ else:
79
+ # Keep non-path type items as is
80
+ processed_result.append(item)
81
+
82
+ return processed_result
28
83
 
29
84
  @contextmanager
30
85
  def resync_error_handling() -> Generator[None, None, None]:
@@ -43,15 +98,16 @@ def resync_error_handling() -> Generator[None, None, None]:
43
98
 
44
99
 
45
100
  async def resync_function_wrapper(
46
- fn: Callable[[str], Awaitable[RAW_RESULT]], kind: str
101
+ fn: Callable[[str], Awaitable[RAW_RESULT]], kind: str, items_to_parse: str | None = None
47
102
  ) -> RAW_RESULT:
48
103
  with resync_error_handling():
49
104
  results = await fn(kind)
50
- return validate_result(results)
105
+ validated_results = validate_result(results)
106
+ return _process_path_type_items(validated_results, items_to_parse)
51
107
 
52
108
 
53
109
  async def resync_generator_wrapper(
54
- fn: Callable[[str], ASYNC_GENERATOR_RESYNC_TYPE], kind: str, items_to_parse: str | None = None
110
+ fn: Callable[[str], ASYNC_GENERATOR_RESYNC_TYPE], kind: str, items_to_parse_name: str, items_to_parse: str | None = None
55
111
  ) -> ASYNC_GENERATOR_RESYNC_TYPE:
56
112
  generator = fn(kind)
57
113
  errors = []
@@ -61,27 +117,23 @@ async def resync_generator_wrapper(
61
117
  with resync_error_handling():
62
118
  result = await anext(generator)
63
119
  if not ocean.config.yield_items_to_parse:
64
- yield validate_result(result)
120
+ validated_result = validate_result(result)
121
+ processed_result = _process_path_type_items(validated_result,items_to_parse)
122
+ yield processed_result
65
123
  else:
66
- batch_size = ocean.config.yield_items_to_parse_batch_size
67
124
  if items_to_parse:
68
125
  for data in result:
69
- items = await cast(JQEntityProcessor, ocean.app.integration.entity_processor)._search(data, items_to_parse)
70
- if not isinstance(items, list):
71
- logger.warning(
72
- f"Failed to parse items for JQ expression {items_to_parse}, Expected list but got {type(items)}."
73
- f" Skipping..."
74
- )
75
- yield []
76
- raw_data = [{"item": item, **data} for item in items]
77
- while True:
78
- raw_data_batch = raw_data[:batch_size]
79
- yield raw_data_batch
80
- raw_data = raw_data[batch_size:]
81
- if len(raw_data) == 0:
82
- break
126
+ data_path: str | None = None
127
+ if isinstance(data, dict) and data.get("file") is not None:
128
+ content = data["file"].get("content") if isinstance(data["file"].get("content"), dict) else {}
129
+ data_path = content.get("path", None)
130
+ bulks = get_items_to_parse_bulks(data, data_path, items_to_parse, items_to_parse_name, data.get("__base_jq", ".file.content"))
131
+ async for bulk in bulks:
132
+ yield bulk
83
133
  else:
84
- yield validate_result(result)
134
+ validated_result = validate_result(result)
135
+ processed_result = _process_path_type_items(validated_result, items_to_parse)
136
+ yield processed_result
85
137
  except OceanAbortException as error:
86
138
  errors.append(error)
87
139
  ocean.metrics.inc_metric(
@@ -101,6 +153,106 @@ def is_resource_supported(
101
153
  ) -> bool:
102
154
  return bool(resync_event_mapping[kind] or resync_event_mapping[None])
103
155
 
156
+ def _validate_jq_expression(expression: str) -> None:
157
+ """Validate jq expression to prevent command injection."""
158
+ try:
159
+ _ = cast(JQEntityProcessor, ocean.app.integration.entity_processor)._compile(expression)
160
+ except Exception as e:
161
+ raise ValueError(f"Invalid jq expression: {e}") from e
162
+ # Basic validation - reject expressions that could be dangerous
163
+ # Check for dangerous patterns (include, import, module)
164
+ dangerous_patterns = ['include', 'import', 'module', 'env', 'debug']
165
+ for pattern in dangerous_patterns:
166
+ # Use word boundary regex to match only complete words, not substrings
167
+ if re.search(rf'\b{re.escape(pattern)}\b', expression):
168
+ raise ValueError(f"Potentially dangerous pattern '{pattern}' found in jq expression")
169
+
170
+ # Special handling for 'env' - block environment variable access
171
+ if re.search(r'(?<!\w)\$ENV(?:\.)?', expression):
172
+ raise ValueError("Environment variable access '$ENV.' found in jq expression")
173
+ if re.search(r'\benv\.', expression):
174
+ raise ValueError("Environment variable access 'env.' found in jq expression")
175
+
176
+ def _create_secure_temp_file(suffix: str = ".json") -> str:
177
+ """Create a secure temporary file with restricted permissions."""
178
+ # Create temp directory if it doesn't exist
179
+ temp_dir = "/tmp/ocean"
180
+ os.makedirs(temp_dir, exist_ok=True)
181
+
182
+ # Create temporary file with secure permissions
183
+ fd, temp_path = tempfile.mkstemp(suffix=suffix, dir=temp_dir)
184
+ try:
185
+ # Set restrictive permissions (owner read/write only)
186
+ os.chmod(temp_path, stat.S_IRUSR | stat.S_IWUSR)
187
+ return temp_path
188
+ finally:
189
+ os.close(fd)
190
+
191
+ async def get_items_to_parse_bulks(raw_data: dict[Any, Any], data_path: str, items_to_parse: str, items_to_parse_name: str, base_jq: str) -> AsyncGenerator[list[dict[str, Any]], None]:
192
+ # Validate inputs to prevent command injection
193
+ _validate_jq_expression(items_to_parse)
194
+ items_to_parse = items_to_parse.replace(base_jq, ".") if data_path else items_to_parse
195
+
196
+ temp_data_path = None
197
+ temp_output_path = None
198
+
199
+ try:
200
+ # Create secure temporary files
201
+ if not data_path:
202
+ raw_data_serialized = json.dumps(raw_data)
203
+ temp_data_path = _create_secure_temp_file("_input.json")
204
+ with open(temp_data_path, "w") as f:
205
+ f.write(raw_data_serialized)
206
+ data_path = temp_data_path
207
+
208
+ temp_output_path = _create_secure_temp_file("_parsed.json")
209
+
210
+ delete_target = items_to_parse.split('|', 1)[0].strip() if not items_to_parse.startswith('map(') else '.'
211
+ base_jq_object_string = await _build_base_jq_object_string(raw_data, base_jq, delete_target)
212
+
213
+ # Build jq expression safely
214
+ jq_expression = f""". as $all
215
+ | ($all | {items_to_parse}) as $items
216
+ | $items
217
+ | map({{{items_to_parse_name}: ., {base_jq_object_string}}})"""
218
+
219
+ # Use subprocess with list arguments instead of shell=True
220
+
221
+ jq_path = shutil.which("jq") or "/bin/jq"
222
+ jq_args = [jq_path, jq_expression, data_path]
223
+
224
+ with open(temp_output_path, "w") as output_file:
225
+ result = subprocess.run(
226
+ jq_args,
227
+ stdout=output_file,
228
+ stderr=subprocess.PIPE,
229
+ text=True,
230
+ check=False # Don't raise exception, handle errors manually
231
+ )
232
+
233
+ if result.returncode != 0:
234
+ logger.error(f"Failed to parse items for JQ expression {items_to_parse}, error: {result.stderr}")
235
+ yield []
236
+ else:
237
+ with open(temp_output_path, "r") as f:
238
+ events_stream = get_events_as_a_stream(f, 'item', ocean.config.yield_items_to_parse_batch_size)
239
+ for items_bulk in events_stream:
240
+ yield items_bulk
241
+
242
+ except ValueError as e:
243
+ logger.error(f"Invalid jq expression: {e}")
244
+ yield []
245
+ except Exception as e:
246
+ logger.error(f"Failed to parse items for JQ expression {items_to_parse}, error: {e}")
247
+ yield []
248
+ finally:
249
+ # Cleanup temporary files
250
+ for temp_path in [temp_data_path, temp_output_path]:
251
+ if temp_path and os.path.exists(temp_path):
252
+ try:
253
+ os.remove(temp_path)
254
+ except OSError as e:
255
+ logger.warning(f"Failed to cleanup temporary file {temp_path}: {e}")
104
256
 
105
257
  def unsupported_kind_response(
106
258
  kind: str, available_resync_kinds: list[str]
@@ -108,6 +260,44 @@ def unsupported_kind_response(
108
260
  logger.error(f"Kind {kind} is not supported in this integration")
109
261
  return [], [KindNotImplementedException(kind, available_resync_kinds)]
110
262
 
263
+ async def _build_base_jq_object_string(raw_data: dict[Any, Any], base_jq: str, delete_target: str) -> str:
264
+ base_jq_object_before_parsing = await cast(JQEntityProcessor, ocean.app.integration.entity_processor)._search(raw_data, f"{base_jq} = {json.dumps("__all")}")
265
+ base_jq_object_before_parsing_serialized = json.dumps(base_jq_object_before_parsing)
266
+ base_jq_object_before_parsing_serialized = base_jq_object_before_parsing_serialized[1:-1] if len(base_jq_object_before_parsing_serialized) >= 2 else base_jq_object_before_parsing_serialized
267
+ base_jq_object_before_parsing_serialized = base_jq_object_before_parsing_serialized.replace("\"__all\"", f"(($all | del({delete_target})) // {{}})")
268
+ return base_jq_object_before_parsing_serialized
269
+
270
+
271
+ def get_events_as_a_stream(
272
+ stream: Any,
273
+ target_items: str = "item",
274
+ max_buffer_size_mb: int = 1
275
+ ) -> Generator[list[dict[str, Any]], None, None]:
276
+ events = ijson.sendable_list()
277
+ coro = ijson.items_coro(events, target_items, use_float=True)
278
+
279
+ # Convert MB to bytes for the buffer size
280
+ buffer_size = max_buffer_size_mb * 1024 * 1024
281
+
282
+ # Read chunks from the stream until exhausted
283
+ while True:
284
+ chunk = stream.read(buffer_size)
285
+ if not chunk: # End of stream
286
+ break
287
+
288
+ # Convert string to bytes if necessary (for text mode files)
289
+ if isinstance(chunk, str):
290
+ chunk = chunk.encode('utf-8')
291
+
292
+ coro.send(chunk)
293
+ yield events
294
+ del events[:]
295
+ try:
296
+ coro.close()
297
+ finally:
298
+ if events:
299
+ yield events
300
+ events[:] = []
111
301
 
112
302
  class ProcessWrapper(multiprocessing.Process):
113
303
  def __init__(self, *args, **kwargs):
@@ -134,3 +324,34 @@ def clear_http_client_context() -> None:
134
324
  _port_http_client.pop()
135
325
  except (RuntimeError, AttributeError):
136
326
  pass
327
+
328
+ class _AiterReader:
329
+ """
330
+ Wraps an iterable of byte chunks (e.g., response.iter_bytes())
331
+ and exposes a .read(n) method that ijson expects.
332
+ """
333
+ def __init__(self, iterable):
334
+ self._iter = iter(iterable)
335
+ self._buf = bytearray()
336
+ self._eof = False
337
+
338
+ def read(self, n=-1):
339
+ # If n < 0, return everything until EOF
340
+ if n is None or n < 0:
341
+ chunks = [bytes(self._buf)]
342
+ self._buf.clear()
343
+ chunks.extend(self._iter) # drain the iterator
344
+ return b"".join(chunks)
345
+
346
+ # Fill buffer until we have n bytes or hit EOF
347
+ while len(self._buf) < n and not self._eof:
348
+ try:
349
+ self._buf.extend(next(self._iter))
350
+ except StopIteration:
351
+ self._eof = True
352
+ break
353
+
354
+ # Serve up to n bytes
355
+ out = bytes(self._buf[:n])
356
+ del self._buf[:n]
357
+ return out
port_ocean/core/models.py CHANGED
@@ -1,11 +1,19 @@
1
1
  from dataclasses import dataclass, field
2
2
  from enum import Enum, StrEnum
3
- from typing import Any, TypedDict
4
-
3
+ from typing import Any, Literal, TypedDict
5
4
  from pydantic import BaseModel
6
5
  from pydantic.fields import Field
7
6
 
8
7
 
8
+ class EventListenerType(StrEnum):
9
+ WEBHOOK = "WEBHOOK"
10
+ KAFKA = "KAFKA"
11
+ POLLING = "POLLING"
12
+ ONCE = "ONCE"
13
+ WEBHOOKS_ONLY = "WEBHOOKS_ONLY"
14
+ ACTIONS_ONLY = "ACTIONS_ONLY"
15
+
16
+
9
17
  class CreatePortResourcesOrigin(StrEnum):
10
18
  Ocean = "Ocean"
11
19
  Port = "Port"
@@ -121,3 +129,28 @@ class EntityPortDiff:
121
129
  deleted: list[Entity] = field(default_factory=list)
122
130
  modified: list[Entity] = field(default_factory=list)
123
131
  created: list[Entity] = field(default_factory=list)
132
+
133
+
134
+ class IntegrationFeatureFlag(StrEnum):
135
+ USE_PROVISIONED_DEFAULTS = "USE_PROVISIONED_DEFAULTS"
136
+ LAKEHOUSE_ELIGIBLE = "LAKEHOUSE_ELIGIBLE"
137
+ OCEAN_ACTIONS_PROCESSING_ENABLED = "OCEAN_ACTIONS_PROCESSING_ENABLED"
138
+
139
+
140
+ class RunStatus(StrEnum):
141
+ IN_PROGRESS = "IN_PROGRESS"
142
+ SUCCESS = "SUCCESS"
143
+ FAILURE = "FAILURE"
144
+
145
+
146
+ class IntegrationActionInvocationPayload(BaseModel):
147
+ type: Literal["INTEGRATION_ACTION"]
148
+ installationId: str
149
+ integrationActionType: str
150
+ integrationActionExecutionProperties: dict[str, Any] = Field(default_factory=dict)
151
+
152
+
153
+ class ActionRun(BaseModel):
154
+ id: str
155
+ status: RunStatus
156
+ payload: IntegrationActionInvocationPayload
@@ -4,7 +4,7 @@ import json
4
4
  from typing import Iterable, Any, TypeVar, Callable, Awaitable
5
5
 
6
6
  from loguru import logger
7
- from pydantic import parse_obj_as, ValidationError
7
+ from pydantic import BaseModel, parse_obj_as, ValidationError
8
8
 
9
9
 
10
10
  from port_ocean.clients.port.client import PortClient
@@ -79,6 +79,19 @@ async def gather_and_split_errors_from_results(
79
79
  return valid_items, errors
80
80
 
81
81
 
82
+ def _get_entity_key(entity: Entity) -> tuple[str, str]:
83
+ identifier = entity.identifier
84
+ if isinstance(identifier, BaseModel):
85
+ identifier = identifier.dict()
86
+
87
+ key_part = (
88
+ json.dumps(identifier, sort_keys=True)
89
+ if isinstance(identifier, dict)
90
+ else str(identifier)
91
+ )
92
+ return key_part, entity.blueprint
93
+
94
+
82
95
  def get_port_diff(before: Iterable[Entity], after: Iterable[Entity]) -> EntityPortDiff:
83
96
  before_dict = {}
84
97
  after_dict = {}
@@ -88,12 +101,10 @@ def get_port_diff(before: Iterable[Entity], after: Iterable[Entity]) -> EntityPo
88
101
 
89
102
  # Create dictionaries for before and after lists
90
103
  for entity in before:
91
- key = (entity.identifier, entity.blueprint)
92
- before_dict[key] = entity
104
+ before_dict[_get_entity_key(entity)] = entity
93
105
 
94
106
  for entity in after:
95
- key = (entity.identifier, entity.blueprint)
96
- after_dict[key] = entity
107
+ after_dict[_get_entity_key(entity)] = entity
97
108
 
98
109
  # Find created, modified, and deleted objects
99
110
  for key, obj in after_dict.items():
@@ -0,0 +1,22 @@
1
+ class DuplicateActionExecutorError(Exception):
2
+ """
3
+ Raised when attempting to register an executor for an action that already has an existing executor.
4
+ """
5
+
6
+ pass
7
+
8
+
9
+ class RunAlreadyAcknowledgedError(Exception):
10
+ """
11
+ Raised when attempting to acknowledge a run that has already been acknowledged.
12
+ """
13
+
14
+ pass
15
+
16
+
17
+ class PartitionKeyNotFoundError(Exception):
18
+ """
19
+ Raised when attempting to extract a partition key that is not found in the invocation payload.
20
+ """
21
+
22
+ pass
@@ -131,7 +131,7 @@ class Metrics:
131
131
  self.load_metrics()
132
132
  self._integration_version: Optional[str] = None
133
133
  self._ocean_version: Optional[str] = None
134
- self.event_id = ""
134
+ self._event_id = ""
135
135
  self.sync_state = SyncState.PENDING
136
136
 
137
137
  @property
@@ -257,7 +257,7 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
257
257
  else:
258
258
  response = await transport.handle_async_request(request)
259
259
 
260
- await self._log_response_size_async(request, response)
260
+ self._log_response_size(request, response)
261
261
 
262
262
  return response
263
263
  except Exception as e:
@@ -345,32 +345,6 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
345
345
  return int(content_length)
346
346
  return None
347
347
 
348
- async def _log_response_size_async(
349
- self, request: httpx.Request, response: httpx.Response
350
- ) -> None:
351
- """Log the size of the response."""
352
- if not self._should_log_response_size(request):
353
- return
354
-
355
- # Try to get content length from headers first
356
- content_length = self._get_content_length(response)
357
- if content_length is not None:
358
- size_info = content_length
359
- else:
360
- # If no Content-Length header, try to get actual content size
361
- try:
362
- actual_size = len(await response.aread())
363
- size_info = actual_size
364
- except Exception as e:
365
- cast(logging.Logger, self._logger).error(
366
- f"Error getting response size: {e}"
367
- )
368
- return
369
-
370
- cast(logging.Logger, self._logger).info(
371
- f"Response for {request.method} {request.url} - Size: {size_info} bytes"
372
- )
373
-
374
348
  def _log_response_size(
375
349
  self, request: httpx.Request, response: httpx.Response
376
350
  ) -> None:
@@ -378,21 +352,11 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
378
352
  return
379
353
 
380
354
  content_length = self._get_content_length(response)
381
- if content_length is not None:
382
- size_info = content_length
383
- else:
384
- # If no Content-Length header, try to get actual content size
385
- try:
386
- actual_size = len(response.read())
387
- size_info = actual_size
388
- except Exception as e:
389
- cast(logging.Logger, self._logger).error(
390
- f"Error getting response size: {e}"
391
- )
392
- return
355
+ if content_length is None:
356
+ return
393
357
 
394
358
  cast(logging.Logger, self._logger).info(
395
- f"Response for {request.method} {request.url} - Size: {size_info} bytes"
359
+ f"Response for {request.method} {request.url} - Size: {content_length} bytes"
396
360
  )
397
361
 
398
362
  async def _should_retry_async(self, response: httpx.Response) -> bool:
@@ -61,9 +61,9 @@ def _http_loguru_handler(level: LogLevelType) -> None:
61
61
 
62
62
  http_memory_handler = HTTPMemoryHandler()
63
63
  signal_handler.register(
64
- http_memory_handler.wait_for_lingering_threads, priority=-200
64
+ http_memory_handler.wait_for_lingering_threads, priority=-900
65
65
  )
66
- signal_handler.register(http_memory_handler.flush, priority=-200)
66
+ signal_handler.register(http_memory_handler.flush, priority=-899)
67
67
 
68
68
  queue_listener = QueueListener(queue, http_memory_handler)
69
69
  queue_listener.start()
port_ocean/ocean.py CHANGED
@@ -26,6 +26,7 @@ from port_ocean.core.handlers.resync_state_updater import ResyncStateUpdater
26
26
  from port_ocean.core.handlers.webhook.processor_manager import (
27
27
  LiveEventsProcessorManager,
28
28
  )
29
+ from port_ocean.core.handlers.actions.execution_manager import ExecutionManager
29
30
  from port_ocean.core.integrations.base import BaseIntegration
30
31
  from port_ocean.core.models import ProcessExecutionMode
31
32
  from port_ocean.log.sensetive import sensitive_log_filter
@@ -88,6 +89,16 @@ class Ocean:
88
89
  max_wait_seconds_before_shutdown=self.config.max_wait_seconds_before_shutdown,
89
90
  )
90
91
 
92
+ self.execution_manager = ExecutionManager(
93
+ webhook_manager=self.webhook_manager,
94
+ signal_handler=signal_handler,
95
+ workers_count=self.config.actions_processor.workers_count,
96
+ runs_buffer_high_watermark=self.config.actions_processor.runs_buffer_high_watermark,
97
+ poll_check_interval_seconds=self.config.actions_processor.poll_check_interval_seconds,
98
+ visibility_timeout_ms=self.config.actions_processor.visibility_timeout_ms,
99
+ max_wait_seconds_before_shutdown=self.config.max_wait_seconds_before_shutdown,
100
+ )
101
+
91
102
  self.integration = (
92
103
  integration_class(ocean) if integration_class else BaseIntegration(ocean)
93
104
  )
@@ -105,7 +116,7 @@ class Ocean:
105
116
  This ensures Port is notified that the integration was interrupted.
106
117
  """
107
118
  try:
108
- if self.metrics.event_id != "":
119
+ if self.metrics.event_id.find("-done") == -1:
109
120
  await self.resync_state_updater.update_after_resync(
110
121
  IntegrationStateStatus.Aborted
111
122
  )
@@ -200,6 +211,24 @@ class Ocean:
200
211
  )
201
212
  return None
202
213
 
214
+ async def _register_addons(self) -> None:
215
+ if self.base_url and self.config.event_listener.should_process_webhooks:
216
+ await self.webhook_manager.start_processing_event_messages()
217
+ else:
218
+ logger.warning(
219
+ "No base URL provided, or webhook processing is disabled is this event listener, skipping webhook processing"
220
+ )
221
+
222
+ if (
223
+ self.config.actions_processor.enabled
224
+ and self.config.event_listener.should_run_actions
225
+ ):
226
+ await self.execution_manager.start_processing_action_runs()
227
+ else:
228
+ logger.warning(
229
+ "Execution agent is not enabled, or actions processing is disabled in this event listener, skipping execution agent setup"
230
+ )
231
+
203
232
  def initialize_app(self) -> None:
204
233
  self.fast_api_app.include_router(self.integration_router, prefix="/integration")
205
234
  self.fast_api_app.include_router(
@@ -210,10 +239,7 @@ class Ocean:
210
239
  async def lifecycle(_: FastAPI) -> AsyncIterator[None]:
211
240
  try:
212
241
  await self.integration.start()
213
- if self.base_url:
214
- await self.webhook_manager.start_processing_event_messages()
215
- else:
216
- logger.warning("No base URL provided, skipping webhook processing")
242
+ await self._register_addons()
217
243
  await self._setup_scheduled_resync()
218
244
  yield None
219
245
  except Exception: