port-ocean 0.22.12__py3-none-any.whl → 0.23.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of port-ocean might be problematic. Click here for more details.

@@ -0,0 +1,41 @@
1
+ ARG BASE_PYTHON_IMAGE=debian:trixie-slim
2
+ # debian:trixie-slim - Python 3.12
3
+ FROM ${BASE_PYTHON_IMAGE}
4
+
5
+ RUN apt-get update \
6
+ && apt-get install -y --no-install-recommends librdkafka-dev python3 \
7
+ && apt-get clean
8
+ RUN apt-get update \
9
+ && apt-get install -y \
10
+ --no-install-recommends \
11
+ wget \
12
+ g++ \
13
+ libssl-dev \
14
+ autoconf \
15
+ automake \
16
+ libtool \
17
+ curl \
18
+ librdkafka-dev \
19
+ python3 \
20
+ python3-pip \
21
+ python3-poetry \
22
+ build-essential\
23
+ git \
24
+ python3-venv \
25
+ && apt-get clean
26
+
27
+ ARG BUILD_CONTEXT
28
+
29
+ WORKDIR /app
30
+
31
+ COPY . .
32
+ RUN rm -rf .venv-docker ${BUILD_CONTEXT}/.venv-docker
33
+ RUN python3 -m venv .venv-docker
34
+ RUN python3 -m venv ${BUILD_CONTEXT}/.venv-docker
35
+
36
+
37
+ WORKDIR /app/${BUILD_CONTEXT}
38
+
39
+ WORKDIR /app
40
+
41
+ ENTRYPOINT ["./integrations/_infra/entry_local.sh"]
@@ -0,0 +1,27 @@
1
+ #!/bin/bash
2
+ mkdir -p /tmp/prometheus_multiproc_dir
3
+ export PROMETHEUS_MULTIPROC_DIR=/tmp/prometheus_multiproc_dir
4
+ if [ -z "$BUILD_CONTEXT" ]; then
5
+ echo "BUILD_CONTEXT is not set"
6
+ exit 1
7
+ fi
8
+
9
+ if [ ! -d ".venv-docker" ]; then
10
+ /usr/bin/python3 -m venv .venv-docker
11
+ source .venv-docker/bin/activate
12
+ python -m pip install poetry
13
+ python -m poetry install
14
+ fi
15
+
16
+ cd $BUILD_CONTEXT
17
+
18
+ if [ ! -d ".venv-docker" ]; then
19
+ /usr/bin/python3 -m venv .venv-docker
20
+ source .venv-docker/bin/activate
21
+ python -m pip install poetry
22
+ python -m poetry install
23
+ fi
24
+ source .venv-docker/bin/activate
25
+ python -m pip install -e ../../
26
+
27
+ ocean sail
port_ocean/__init__.py CHANGED
@@ -8,4 +8,9 @@ from .run import run # noqa: E402
8
8
  from .version import __integration_version__, __version__ # noqa: E402
9
9
 
10
10
 
11
- __all__ = ["Ocean", "run", "__version__", "__integration_version__"]
11
+ __all__ = [
12
+ "Ocean",
13
+ "run",
14
+ "__version__",
15
+ "__integration_version__",
16
+ ]
File without changes
@@ -0,0 +1,25 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Any, Optional
3
+
4
+ from port_ocean.core.models import CachingStorageMode
5
+
6
+
7
+ class CacheProvider(ABC):
8
+ """Base class for cache providers that defines the contract for all cache implementations."""
9
+
10
+ STORAGE_TYPE: CachingStorageMode
11
+
12
+ @abstractmethod
13
+ async def get(self, key: str) -> Optional[Any]:
14
+ """Get a value from the cache."""
15
+ pass
16
+
17
+ @abstractmethod
18
+ async def set(self, key: str, value: Any) -> None:
19
+ """Set a value in the cache."""
20
+ pass
21
+
22
+ @abstractmethod
23
+ async def clear(self) -> None:
24
+ """Clear all values from the cache."""
25
+ pass
@@ -0,0 +1,61 @@
1
+ import pickle
2
+ from pathlib import Path
3
+ from typing import Any, Optional
4
+
5
+ from port_ocean.cache.base import CacheProvider
6
+ from port_ocean.cache.errors import FailedToReadCacheError, FailedToWriteCacheError
7
+ from port_ocean.core.models import CachingStorageMode
8
+
9
+
10
+ class FailedToReadCacheFileError(FailedToReadCacheError):
11
+ pass
12
+
13
+
14
+ class FailedToWriteCacheFileError(FailedToWriteCacheError):
15
+ pass
16
+
17
+
18
+ class DiskCacheProvider(CacheProvider):
19
+ STORAGE_TYPE = CachingStorageMode.disk
20
+
21
+ def __init__(self, cache_dir: str | None = None) -> None:
22
+ if cache_dir is None:
23
+ cache_dir = ".ocean_cache"
24
+ self._cache_dir = Path(cache_dir)
25
+ self._cache_dir.mkdir(parents=True, exist_ok=True)
26
+
27
+ def _get_cache_path(self, key: str) -> Path:
28
+ return self._cache_dir / f"{key}.pkl"
29
+
30
+ async def get(self, key: str) -> Optional[Any]:
31
+ cache_path = self._get_cache_path(key)
32
+ if not cache_path.exists():
33
+ return None
34
+
35
+ try:
36
+ with open(cache_path, "rb") as f:
37
+ return pickle.load(f)
38
+ except (pickle.PickleError, EOFError) as e:
39
+ raise FailedToReadCacheFileError(
40
+ f"Failed to read cache file: {cache_path}: {str(e)}"
41
+ )
42
+
43
+ async def set(self, key: str, value: Any) -> None:
44
+ cache_path = self._get_cache_path(key)
45
+ try:
46
+ with open(cache_path, "wb") as f:
47
+ pickle.dump(value, f)
48
+ except (pickle.PickleError, IOError) as e:
49
+ raise FailedToWriteCacheFileError(
50
+ f"Failed to write cache file: {cache_path}: {str(e)}"
51
+ )
52
+
53
+ async def clear(self) -> None:
54
+ try:
55
+ for cache_file in self._cache_dir.glob("*.pkl"):
56
+ try:
57
+ cache_file.unlink()
58
+ except OSError:
59
+ pass
60
+ except OSError:
61
+ pass
@@ -0,0 +1,10 @@
1
+ class CacheError(Exception):
2
+ pass
3
+
4
+
5
+ class FailedToReadCacheError(CacheError):
6
+ pass
7
+
8
+
9
+ class FailedToWriteCacheError(CacheError):
10
+ pass
@@ -0,0 +1,36 @@
1
+ from typing import Any, Optional
2
+ from port_ocean.cache.base import CacheProvider
3
+ from port_ocean.cache.errors import FailedToReadCacheError, FailedToWriteCacheError
4
+ from port_ocean.core.models import CachingStorageMode
5
+
6
+
7
+ class FailedToReadCacheMemoryError(FailedToReadCacheError):
8
+ pass
9
+
10
+
11
+ class FailedToWriteCacheMemoryError(FailedToWriteCacheError):
12
+ pass
13
+
14
+
15
+ class InMemoryCacheProvider(CacheProvider):
16
+ CACHE_KEY = "cache"
17
+ STORAGE_TYPE = CachingStorageMode.memory
18
+
19
+ def __init__(self, caching_storage: dict[str, Any] | None = None) -> None:
20
+ self._storage = caching_storage or {}
21
+ self._storage[self.CACHE_KEY] = self._storage.get(self.CACHE_KEY, {})
22
+
23
+ async def get(self, key: str) -> Optional[Any]:
24
+ try:
25
+ return self._storage.get(self.CACHE_KEY, {}).get(key)
26
+ except KeyError as e:
27
+ raise FailedToReadCacheMemoryError(f"Failed to read cache: {str(e)}")
28
+
29
+ async def set(self, key: str, value: Any) -> None:
30
+ try:
31
+ self._storage[self.CACHE_KEY][key] = value
32
+ except KeyError as e:
33
+ raise FailedToWriteCacheMemoryError(f"Failed to write cache: {str(e)}")
34
+
35
+ async def clear(self) -> None:
36
+ self._storage[self.CACHE_KEY].clear()
@@ -1,4 +1,4 @@
1
- from typing import Any, Literal, Type, cast
1
+ from typing import Any, Literal, Optional, Type, cast
2
2
 
3
3
  from pydantic import AnyHttpUrl, Extra, parse_obj_as, parse_raw_as
4
4
  from pydantic.class_validators import root_validator, validator
@@ -8,7 +8,12 @@ from pydantic.main import BaseModel
8
8
 
9
9
  from port_ocean.config.base import BaseOceanModel, BaseOceanSettings
10
10
  from port_ocean.core.event_listener import EventListenerSettingsType
11
- from port_ocean.core.models import CreatePortResourcesOrigin, Runtime
11
+ from port_ocean.core.models import (
12
+ CachingStorageMode,
13
+ CreatePortResourcesOrigin,
14
+ Runtime,
15
+ ProcessExecutionMode,
16
+ )
12
17
  from port_ocean.utils.misc import get_integration_name, get_spec_file
13
18
 
14
19
  LogLevelType = Literal["ERROR", "WARNING", "INFO", "DEBUG", "CRITICAL"]
@@ -93,6 +98,8 @@ class IntegrationConfiguration(BaseOceanSettings, extra=Extra.allow):
93
98
  )
94
99
  max_event_processing_seconds: float = 90.0
95
100
  max_wait_seconds_before_shutdown: float = 5.0
101
+ caching_storage_mode: Optional[CachingStorageMode] = Field(default=None)
102
+ process_execution_mode: Optional[ProcessExecutionMode] = Field(default=None)
96
103
 
97
104
  @validator("metrics", pre=True)
98
105
  def validate_metrics(cls, v: Any) -> MetricsSettings | dict[str, Any] | None:
@@ -1,12 +1,12 @@
1
1
  import asyncio
2
+ import uuid
2
3
  from graphlib import CycleError
3
4
  import inspect
4
5
  import typing
5
6
  from typing import Callable, Awaitable, Any
6
-
7
+ import multiprocessing
7
8
  import httpx
8
9
  from loguru import logger
9
-
10
10
  from port_ocean.clients.port.types import UserAgentType
11
11
  from port_ocean.context.event import TriggerType, event_context, EventType, event
12
12
  from port_ocean.context.ocean import ocean
@@ -15,12 +15,13 @@ from port_ocean.context import resource
15
15
  from port_ocean.core.handlers.port_app_config.models import ResourceConfig
16
16
  from port_ocean.core.integrations.mixins import HandlerMixin, EventsMixin
17
17
  from port_ocean.core.integrations.mixins.utils import (
18
+ ProcessWrapper,
18
19
  is_resource_supported,
19
20
  unsupported_kind_response,
20
21
  resync_generator_wrapper,
21
22
  resync_function_wrapper,
22
23
  )
23
- from port_ocean.core.models import Entity
24
+ from port_ocean.core.models import Entity, ProcessExecutionMode
24
25
  from port_ocean.core.ocean_types import (
25
26
  RAW_RESULT,
26
27
  RESYNC_RESULT,
@@ -33,6 +34,7 @@ from port_ocean.core.utils.utils import resolve_entities_diff, zip_and_sum, gath
33
34
  from port_ocean.exceptions.core import OceanAbortException
34
35
  from port_ocean.helpers.metric.metric import SyncState, MetricType, MetricPhase
35
36
  from port_ocean.helpers.metric.utils import TimeMetric
37
+ from port_ocean.utils.ipc import FileIPC
36
38
 
37
39
  SEND_RAW_DATA_EXAMPLES_AMOUNT = 5
38
40
 
@@ -267,7 +269,6 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
267
269
  objects_diff[0].entity_selector_diff.passed, user_agent_type
268
270
  )
269
271
 
270
-
271
272
  return CalculationResult(
272
273
  number_of_transformed_entities=len(objects_diff[0].entity_selector_diff.passed),
273
274
  entity_selector_diff=objects_diff[0].entity_selector_diff._replace(passed=modified_objects),
@@ -344,8 +345,8 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
344
345
  user_agent_type,
345
346
  send_raw_data_examples_amount=send_raw_data_examples_amount
346
347
  )
347
- errors.extend(calculation_result.errors)
348
348
  passed_entities.extend(calculation_result.entity_selector_diff.passed)
349
+ errors.extend(calculation_result.errors)
349
350
  number_of_transformed_entities += calculation_result.number_of_transformed_entities
350
351
  except* OceanAbortException as error:
351
352
  ocean.metrics.sync_state = SyncState.FAILED
@@ -355,6 +356,7 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
355
356
  f"Finished registering kind: {resource_config.kind}-{resource.resource.index} ,{len(passed_entities)} entities out of {number_of_raw_results} raw results"
356
357
  )
357
358
 
359
+
358
360
  ocean.metrics.set_metric(
359
361
  name=MetricType.SUCCESS_NAME,
360
362
  labels=[ocean.metrics.current_resource_kind(), MetricPhase.RESYNC],
@@ -584,6 +586,72 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
584
586
  for entity in event.entity_topological_sorter.get_entities(False):
585
587
  await self.entities_state_applier.context.port_client.upsert_entity(entity,event.port_app_config.get_port_request_options(),user_agent_type,should_raise=False)
586
588
 
589
+ def process_resource_in_subprocess(self,
590
+ file_ipc_map: dict[str, FileIPC],
591
+ resource: ResourceConfig,
592
+ index: int,
593
+ user_agent_type: UserAgentType,
594
+ ) -> None:
595
+ logger.info(f"process started successfully for {resource.kind} with index {index}")
596
+
597
+ async def process_resource_task() -> None:
598
+ result = await self._process_resource(
599
+ resource, index, user_agent_type
600
+ )
601
+ file_ipc_map["process_resource"].save(result)
602
+ file_ipc_map["topological_entities"].save(
603
+ event.entity_topological_sorter.entities
604
+ )
605
+
606
+ asyncio.run(process_resource_task())
607
+ logger.info(f"Process finished for {resource.kind} with index {index}")
608
+
609
+ async def process_resource(self, resource: ResourceConfig, index: int, user_agent_type: UserAgentType) -> tuple[list[Entity], list[Exception]]:
610
+ if ocean.app.process_execution_mode == ProcessExecutionMode.multi_process:
611
+ id = uuid.uuid4()
612
+ logger.info(f"Starting subprocess with id {id}")
613
+ file_ipc_map = {
614
+ "process_resource": FileIPC(id, "process_resource",([],[])),
615
+ "topological_entities": FileIPC(id, "topological_entities",[]),
616
+ }
617
+ process = ProcessWrapper(target=self.process_resource_in_subprocess, args=(file_ipc_map,resource,index,user_agent_type))
618
+ process.start()
619
+ await process.join_async()
620
+
621
+
622
+ event.entity_topological_sorter.entities.extend(file_ipc_map["topological_entities"].load())
623
+ return file_ipc_map["process_resource"].load()
624
+
625
+ else:
626
+ return await self._process_resource(resource,index,user_agent_type)
627
+
628
+ async def _process_resource(self,resource: ResourceConfig, index: int, user_agent_type: UserAgentType)-> tuple[list[Entity], list[Exception]]:
629
+ # create resource context per resource kind, so resync method could have access to the resource
630
+ # config as we might have multiple resources in the same event
631
+ async with resource_context(resource,index):
632
+ resource_kind_id = f"{resource.kind}-{index}"
633
+ ocean.metrics.sync_state = SyncState.SYNCING
634
+ task = asyncio.create_task(
635
+ self._register_in_batches(resource, user_agent_type)
636
+ )
637
+ event.on_abort(lambda: task.cancel())
638
+ kind_results: tuple[list[Entity], list[Exception]] = await task
639
+ ocean.metrics.set_metric(
640
+ name=MetricType.OBJECT_COUNT_NAME,
641
+ labels=[ocean.metrics.current_resource_kind(), MetricPhase.LOAD, MetricPhase.LoadResult.LOADED],
642
+ value=len(kind_results[0])
643
+ )
644
+
645
+ if ocean.metrics.sync_state != SyncState.FAILED:
646
+ ocean.metrics.sync_state = SyncState.COMPLETED
647
+
648
+ await ocean.metrics.send_metrics_to_webhook(
649
+ kind=resource_kind_id
650
+ )
651
+ # await ocean.metrics.report_kind_sync_metrics(kind=resource_kind_id) # TODO: uncomment this when end points are ready
652
+
653
+ return kind_results
654
+
587
655
  @TimeMetric(MetricPhase.RESYNC)
588
656
  async def sync_raw_all(
589
657
  self,
@@ -622,6 +690,9 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
622
690
  ocean.metrics.initialize_metrics(kinds)
623
691
  # await ocean.metrics.report_sync_metrics(kinds=kinds) # TODO: uncomment this when end points are ready
624
692
 
693
+ # Clear cache
694
+ await ocean.app.cache_provider.clear()
695
+
625
696
  # Execute resync_start hooks
626
697
  for resync_start_fn in self.event_strategy["resync_start"]:
627
698
  await resync_start_fn()
@@ -640,32 +711,13 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
640
711
 
641
712
  creation_results: list[tuple[list[Entity], list[Exception]]] = []
642
713
 
643
-
714
+ multiprocessing.set_start_method('fork', True)
644
715
  try:
645
716
  for index,resource in enumerate(app_config.resources):
646
- # create resource context per resource kind, so resync method could have access to the resource
647
- # config as we might have multiple resources in the same event
648
- async with resource_context(resource,index):
649
- resource_kind_id = f"{resource.kind}-{index}"
650
- ocean.metrics.sync_state = SyncState.SYNCING
651
- # await ocean.metrics.report_kind_sync_metrics(kind=resource_kind_id) # TODO: uncomment this when end points are ready
652
-
653
- task = asyncio.create_task(
654
- self._register_in_batches(resource, user_agent_type)
655
- )
656
-
657
- event.on_abort(lambda: task.cancel())
658
- kind_results: tuple[list[Entity], list[Exception]] = await task
659
717
 
660
- creation_results.append(kind_results)
718
+ logger.info(f"Starting processing resource {resource.kind} with index {index}")
661
719
 
662
- if ocean.metrics.sync_state != SyncState.FAILED:
663
- ocean.metrics.sync_state = SyncState.COMPLETED
664
-
665
- await ocean.metrics.send_metrics_to_webhook(
666
- kind=resource_kind_id
667
- )
668
- # await ocean.metrics.report_kind_sync_metrics(kind=resource_kind_id) # TODO: uncomment this when end points are ready
720
+ creation_results.append(await self.process_resource(resource,index,user_agent_type))
669
721
 
670
722
  await self.sort_and_upsert_failed_entities(user_agent_type)
671
723
 
@@ -721,3 +773,5 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
721
773
  logger.info("Finished executing resync_complete hooks")
722
774
 
723
775
  return True
776
+ finally:
777
+ await ocean.app.cache_provider.clear()
@@ -3,6 +3,9 @@ from typing import Awaitable, Generator, Callable
3
3
 
4
4
  from loguru import logger
5
5
 
6
+ import asyncio
7
+ import multiprocessing
8
+
6
9
  from port_ocean.core.ocean_types import (
7
10
  ASYNC_GENERATOR_RESYNC_TYPE,
8
11
  RAW_RESULT,
@@ -72,3 +75,16 @@ def unsupported_kind_response(
72
75
  ) -> tuple[RESYNC_RESULT, list[Exception]]:
73
76
  logger.error(f"Kind {kind} is not supported in this integration")
74
77
  return [], [KindNotImplementedException(kind, available_resync_kinds)]
78
+
79
+ class ProcessWrapper(multiprocessing.Process):
80
+ def __init__(self, *args, **kwargs):
81
+ super().__init__(*args, **kwargs)
82
+
83
+ async def join_async(self) -> None:
84
+ while self.exitcode is None:
85
+ await asyncio.sleep(2)
86
+ if self.exitcode != 0:
87
+ logger.error(f"Process {self.pid} failed with exit code {self.exitcode}")
88
+ else:
89
+ logger.info(f"Process {self.pid} finished with exit code {self.exitcode}")
90
+ return super().join()
port_ocean/core/models.py CHANGED
@@ -11,6 +11,16 @@ class CreatePortResourcesOrigin(StrEnum):
11
11
  Port = "Port"
12
12
 
13
13
 
14
+ class ProcessExecutionMode(StrEnum):
15
+ multi_process = "multi_process"
16
+ single_process = "single_process"
17
+
18
+
19
+ class CachingStorageMode(StrEnum):
20
+ disk = "disk"
21
+ memory = "memory"
22
+
23
+
14
24
  class Runtime(Enum):
15
25
  Saas = "Saas"
16
26
  OnPrem = "OnPrem"
@@ -10,6 +10,7 @@ from prometheus_client import Gauge
10
10
  import prometheus_client.openmetrics
11
11
  import prometheus_client.openmetrics.exposition
12
12
  import prometheus_client.parser
13
+ from prometheus_client import multiprocess
13
14
 
14
15
  if TYPE_CHECKING:
15
16
  from port_ocean.config.settings import MetricsSettings, IntegrationSettings
@@ -108,11 +109,14 @@ class Metrics:
108
109
  metrics_settings: "MetricsSettings",
109
110
  integration_configuration: "IntegrationSettings",
110
111
  port_client: "PortClient",
112
+ multiprocessing_enabled: bool = False,
111
113
  ) -> None:
112
114
  self.metrics_settings = metrics_settings
113
115
  self.integration_configuration = integration_configuration
114
116
  self.port_client = port_client
115
117
  self.registry = prometheus_client.CollectorRegistry()
118
+ if multiprocessing_enabled:
119
+ multiprocess.MultiProcessCollector(self.registry)
116
120
  self.metrics: dict[str, Gauge] = {}
117
121
  self.load_metrics()
118
122
  self._integration_version: Optional[str] = None
port_ocean/ocean.py CHANGED
@@ -4,6 +4,10 @@ from contextlib import asynccontextmanager
4
4
  import threading
5
5
  from typing import Any, AsyncIterator, Callable, Dict, Type
6
6
 
7
+ from port_ocean.cache.base import CacheProvider
8
+ from port_ocean.cache.disk import DiskCacheProvider
9
+ from port_ocean.cache.memory import InMemoryCacheProvider
10
+ from port_ocean.core.models import ProcessExecutionMode
7
11
  import port_ocean.helpers.metric.metric
8
12
 
9
13
  from fastapi import FastAPI, APIRouter
@@ -66,11 +70,16 @@ class Ocean:
66
70
  integration_type=self.config.integration.type,
67
71
  integration_version=__integration_version__,
68
72
  )
69
-
73
+ self.cache_provider: CacheProvider = self._get_caching_provider()
74
+ self.process_execution_mode: ProcessExecutionMode = (
75
+ self._get_process_execution_mode()
76
+ )
70
77
  self.metrics = port_ocean.helpers.metric.metric.Metrics(
71
78
  metrics_settings=self.config.metrics,
72
79
  integration_configuration=self.config.integration,
73
80
  port_client=self.port_client,
81
+ multiprocessing_enabled=self.process_execution_mode
82
+ == ProcessExecutionMode.multi_process,
74
83
  )
75
84
 
76
85
  self.webhook_manager = LiveEventsProcessorManager(
@@ -90,6 +99,24 @@ class Ocean:
90
99
 
91
100
  self.app_initialized = False
92
101
 
102
+ def _get_process_execution_mode(self) -> ProcessExecutionMode:
103
+ if self.config.process_execution_mode:
104
+ return self.config.process_execution_mode
105
+ return ProcessExecutionMode.single_process
106
+
107
+ def _get_caching_provider(self) -> CacheProvider:
108
+ if self.config.caching_storage_mode:
109
+ caching_type_to_provider = {
110
+ DiskCacheProvider.STORAGE_TYPE: DiskCacheProvider,
111
+ InMemoryCacheProvider.STORAGE_TYPE: InMemoryCacheProvider,
112
+ }
113
+ if self.config.caching_storage_mode in caching_type_to_provider:
114
+ return caching_type_to_provider[self.config.caching_storage_mode]()
115
+
116
+ if self.config.process_execution_mode == ProcessExecutionMode.multi_process:
117
+ return DiskCacheProvider()
118
+ return InMemoryCacheProvider()
119
+
93
120
  def is_saas(self) -> bool:
94
121
  return self.config.runtime.is_saas_runtime
95
122
 
port_ocean/run.py CHANGED
@@ -21,7 +21,6 @@ def _get_default_config_factory() -> None | Type[BaseModel]:
21
21
  config_factory = None
22
22
  if spec is not None:
23
23
  config_factory = default_config_factory(spec.get("configurations", []))
24
-
25
24
  return config_factory
26
25
 
27
26
 
@@ -0,0 +1 @@
1
+ """Tests for cache providers."""
@@ -0,0 +1,92 @@
1
+ import os
2
+ import pytest
3
+ from pathlib import Path
4
+
5
+ from port_ocean.cache.disk import (
6
+ DiskCacheProvider,
7
+ FailedToReadCacheFileError,
8
+ FailedToWriteCacheFileError,
9
+ )
10
+
11
+
12
+ @pytest.fixture
13
+ def disk_cache(tmp_path: Path) -> DiskCacheProvider:
14
+ """Fixture that provides a DiskCacheProvider with a temporary directory."""
15
+ return DiskCacheProvider(cache_dir=str(tmp_path))
16
+
17
+
18
+ @pytest.mark.asyncio
19
+ async def test_disk_cache_set_get(disk_cache: DiskCacheProvider) -> None:
20
+ """Test setting and getting values from disk cache."""
21
+ # Test basic set/get
22
+ await disk_cache.set("test_key", "test_value")
23
+ assert await disk_cache.get("test_key") == "test_value"
24
+
25
+ # Test with different types
26
+ test_data = {
27
+ "string": "hello",
28
+ "int": 42,
29
+ "float": 3.14,
30
+ "list": [1, 2, 3],
31
+ "dict": {"a": 1, "b": 2},
32
+ }
33
+
34
+ for key, value in test_data.items():
35
+ await disk_cache.set(key, value)
36
+ assert await disk_cache.get(key) == value
37
+
38
+
39
+ @pytest.mark.asyncio
40
+ async def test_disk_cache_clear(disk_cache: DiskCacheProvider) -> None:
41
+ """Test clearing all values from disk cache."""
42
+ # Add multiple values
43
+ for i in range(5):
44
+ await disk_cache.set(f"key_{i}", f"value_{i}")
45
+
46
+ # Verify values exist
47
+ for i in range(5):
48
+ assert await disk_cache.get(f"key_{i}") == f"value_{i}"
49
+
50
+ # Clear cache
51
+ await disk_cache.clear()
52
+
53
+ # Verify all values are gone
54
+ for i in range(5):
55
+ assert await disk_cache.get(f"key_{i}") is None
56
+
57
+
58
+ @pytest.mark.asyncio
59
+ async def test_disk_cache_nonexistent_key(disk_cache: DiskCacheProvider) -> None:
60
+ """Test getting a nonexistent key from disk cache."""
61
+ assert await disk_cache.get("nonexistent_key") is None
62
+
63
+
64
+ @pytest.mark.asyncio
65
+ async def test_disk_cache_corrupted_file(
66
+ disk_cache: DiskCacheProvider, tmp_path: Path
67
+ ) -> None:
68
+ """Test handling of corrupted cache files."""
69
+ # Create a corrupted pickle file
70
+ cache_path = tmp_path / "test_key.pkl"
71
+ with open(cache_path, "wb") as f:
72
+ f.write(b"invalid pickle data")
73
+
74
+ # Attempting to read should raise FailedToReadCacheFileError
75
+ with pytest.raises(FailedToReadCacheFileError):
76
+ await disk_cache.get("test_key")
77
+
78
+
79
+ @pytest.mark.asyncio
80
+ async def test_disk_cache_write_error(
81
+ disk_cache: DiskCacheProvider, tmp_path: Path
82
+ ) -> None:
83
+ """Test handling of write errors."""
84
+ # Make the cache directory read-only
85
+ os.chmod(tmp_path, 0o444)
86
+
87
+ # Attempting to write should raise FailedToWriteCacheFileError
88
+ with pytest.raises(FailedToWriteCacheFileError):
89
+ await disk_cache.set("test_key", "test_value")
90
+
91
+ # Restore permissions
92
+ os.chmod(tmp_path, 0o755)
@@ -0,0 +1,59 @@
1
+ import pytest
2
+
3
+ from port_ocean.cache.memory import (
4
+ InMemoryCacheProvider,
5
+ )
6
+
7
+
8
+ @pytest.fixture
9
+ def memory_cache() -> InMemoryCacheProvider:
10
+ """Fixture that provides an InMemoryCacheProvider."""
11
+ return InMemoryCacheProvider()
12
+
13
+
14
+ @pytest.mark.asyncio
15
+ async def test_memory_cache_set_get(memory_cache: InMemoryCacheProvider) -> None:
16
+ """Test setting and getting values from memory cache."""
17
+ # Test basic set/get
18
+ await memory_cache.set("test_key", "test_value")
19
+ assert await memory_cache.get("test_key") == "test_value"
20
+
21
+ # Test with different types
22
+ test_data = {
23
+ "string": "hello",
24
+ "int": 42,
25
+ "float": 3.14,
26
+ "list": [1, 2, 3],
27
+ "dict": {"a": 1, "b": 2},
28
+ }
29
+
30
+ for key, value in test_data.items():
31
+ await memory_cache.set(key, value)
32
+ assert await memory_cache.get(key) == value
33
+
34
+
35
+ @pytest.mark.asyncio
36
+ async def test_memory_cache_clear(memory_cache: InMemoryCacheProvider) -> None:
37
+ """Test clearing all values from memory cache."""
38
+ # Add multiple values
39
+ for i in range(5):
40
+ await memory_cache.set(f"key_{i}", f"value_{i}")
41
+
42
+ # Verify values exist
43
+ for i in range(5):
44
+ assert await memory_cache.get(f"key_{i}") == f"value_{i}"
45
+
46
+ # Clear cache
47
+ await memory_cache.clear()
48
+
49
+ # Verify all values are gone
50
+ for i in range(5):
51
+ assert await memory_cache.get(f"key_{i}") is None
52
+
53
+
54
+ @pytest.mark.asyncio
55
+ async def test_memory_cache_nonexistent_key(
56
+ memory_cache: InMemoryCacheProvider,
57
+ ) -> None:
58
+ """Test getting a nonexistent key from memory cache."""
59
+ assert await memory_cache.get("nonexistent_key") is None
@@ -6,6 +6,7 @@ import pytest
6
6
  from httpx import Response
7
7
 
8
8
  from port_ocean.clients.port.client import PortClient
9
+ from port_ocean.config.settings import IntegrationSettings, MetricsSettings
9
10
  from port_ocean.context.event import EventContext
10
11
  from port_ocean.context.ocean import PortOceanContext, ocean
11
12
  from port_ocean.core.handlers.entities_state_applier.port.applier import (
@@ -22,8 +23,10 @@ from port_ocean.core.handlers.port_app_config.models import (
22
23
  ResourceConfig,
23
24
  Selector,
24
25
  )
25
- from port_ocean.core.models import Entity
26
+ from port_ocean.core.models import Entity, ProcessExecutionMode
27
+ from port_ocean.helpers.metric.metric import Metrics
26
28
  from port_ocean.ocean import Ocean
29
+ from port_ocean.cache.memory import InMemoryCacheProvider
27
30
 
28
31
 
29
32
  @pytest.fixture
@@ -83,8 +86,15 @@ def mock_ocean(mock_port_client: PortClient) -> Ocean:
83
86
  ocean_mock.config.port = MagicMock()
84
87
  ocean_mock.config.port.port_app_config_cache_ttl = 60
85
88
  ocean_mock.port_client = mock_port_client
86
- ocean_mock.metrics = MagicMock()
87
- ocean_mock.metrics.flush = AsyncMock()
89
+ ocean_mock.process_execution_mode = ProcessExecutionMode.single_process
90
+ ocean_mock.cache_provider = InMemoryCacheProvider()
91
+ metrics_settings = MetricsSettings(enabled=True)
92
+ integration_settings = IntegrationSettings(type="test", identifier="test")
93
+ ocean_mock.metrics = Metrics(
94
+ metrics_settings=metrics_settings,
95
+ integration_configuration=integration_settings,
96
+ port_client=mock_port_client,
97
+ )
88
98
 
89
99
  return ocean_mock
90
100
 
@@ -1,7 +1,6 @@
1
1
  from graphlib import CycleError
2
2
  from typing import Any, AsyncGenerator
3
3
 
4
- from port_ocean.clients.port.client import PortClient
5
4
  from port_ocean.core.utils.entity_topological_sorter import EntityTopologicalSorter
6
5
  from port_ocean.exceptions.core import OceanAbortException
7
6
  import pytest
@@ -25,8 +24,6 @@ from port_ocean.clients.port.types import UserAgentType
25
24
  from dataclasses import dataclass
26
25
  from typing import List, Optional
27
26
  from port_ocean.tests.core.conftest import create_entity, no_op_event_context
28
- from port_ocean.helpers.metric.metric import Metrics
29
- from port_ocean.config.settings import MetricsSettings, IntegrationSettings
30
27
 
31
28
 
32
29
  @pytest.fixture
@@ -94,29 +91,6 @@ def mock_sync_raw_mixin_with_jq_processor(
94
91
  return mock_sync_raw_mixin
95
92
 
96
93
 
97
- @pytest.fixture
98
- def mock_ocean(mock_port_client: PortClient) -> Ocean:
99
- with patch("port_ocean.ocean.Ocean.__init__", return_value=None):
100
- ocean_mock = Ocean(
101
- MagicMock(), MagicMock(), MagicMock(), MagicMock(), MagicMock()
102
- )
103
- ocean_mock.config = MagicMock()
104
- ocean_mock.config.port = MagicMock()
105
- ocean_mock.config.port.port_app_config_cache_ttl = 60
106
- ocean_mock.port_client = mock_port_client
107
-
108
- # Create real metrics instance
109
- metrics_settings = MetricsSettings(enabled=True)
110
- integration_settings = IntegrationSettings(type="test", identifier="test")
111
- ocean_mock.metrics = Metrics(
112
- metrics_settings=metrics_settings,
113
- integration_configuration=integration_settings,
114
- port_client=mock_port_client,
115
- )
116
-
117
- return ocean_mock
118
-
119
-
120
94
  @pytest.mark.asyncio
121
95
  async def test_sync_raw_mixin_self_dependency(
122
96
  mock_sync_raw_mixin: SyncRawMixin,
@@ -213,7 +187,7 @@ async def test_sync_raw_mixin_self_dependency(
213
187
  metric["metrics"]["phase"]["load"]["object_count_type"][
214
188
  "loaded"
215
189
  ]["object_count"]
216
- == 1
190
+ == 2
217
191
  )
218
192
 
219
193
  # Verify success
@@ -341,7 +315,7 @@ async def test_sync_raw_mixin_circular_dependency(
341
315
  metric["metrics"]["phase"]["load"]["object_count_type"][
342
316
  "loaded"
343
317
  ]["object_count"]
344
- == 0
318
+ == 2
345
319
  )
346
320
 
347
321
  # Verify success
@@ -1,19 +1,25 @@
1
1
  from typing import Any
2
2
  import asyncio
3
- from port_ocean.utils import cache # Import the module where 'event' is used
3
+ from port_ocean.utils import cache
4
4
  import pytest
5
- from dataclasses import dataclass, field
6
5
  from typing import AsyncGenerator, AsyncIterator, List, TypeVar
6
+ from unittest.mock import AsyncMock
7
+ from port_ocean.cache.errors import FailedToReadCacheError, FailedToWriteCacheError
8
+ from port_ocean.cache.memory import InMemoryCacheProvider
7
9
 
8
10
 
9
- @dataclass
10
- class EventContext:
11
- attributes: dict[str, Any] = field(default_factory=dict)
11
+ @pytest.fixture
12
+ def memory_cache() -> InMemoryCacheProvider:
13
+ return InMemoryCacheProvider()
12
14
 
13
15
 
14
16
  @pytest.fixture
15
- def event() -> EventContext:
16
- return EventContext()
17
+ def mock_ocean(memory_cache: InMemoryCacheProvider) -> Any:
18
+ return type(
19
+ "MockOcean",
20
+ (),
21
+ {"app": type("MockApp", (), {"cache_provider": memory_cache})()},
22
+ )()
17
23
 
18
24
 
19
25
  T = TypeVar("T")
@@ -27,8 +33,8 @@ async def collect_iterator_results(iterator: AsyncIterator[List[T]]) -> List[T]:
27
33
 
28
34
 
29
35
  @pytest.mark.asyncio
30
- async def test_cache_iterator_result(event: EventContext, monkeypatch: Any) -> None:
31
- monkeypatch.setattr(cache, "event", event)
36
+ async def test_cache_iterator_result(mock_ocean: Any, monkeypatch: Any) -> None:
37
+ monkeypatch.setattr(cache, "ocean", mock_ocean)
32
38
 
33
39
  call_count = 0
34
40
 
@@ -55,9 +61,9 @@ async def test_cache_iterator_result(event: EventContext, monkeypatch: Any) -> N
55
61
 
56
62
  @pytest.mark.asyncio
57
63
  async def test_cache_iterator_result_with_kwargs(
58
- event: EventContext, monkeypatch: Any
64
+ mock_ocean: Any, monkeypatch: Any
59
65
  ) -> None:
60
- monkeypatch.setattr(cache, "event", event)
66
+ monkeypatch.setattr(cache, "ocean", mock_ocean)
61
67
 
62
68
  call_count = 0
63
69
 
@@ -83,10 +89,16 @@ async def test_cache_iterator_result_with_kwargs(
83
89
 
84
90
 
85
91
  @pytest.mark.asyncio
86
- async def test_cache_iterator_result_no_cache(
87
- event: EventContext, monkeypatch: Any
92
+ async def test_cache_iterator_result_cache_errors(
93
+ mock_ocean: Any, monkeypatch: Any
88
94
  ) -> None:
89
- monkeypatch.setattr(cache, "event", event)
95
+ # Create a mock cache provider that raises errors
96
+ mock_cache_provider = AsyncMock()
97
+ mock_cache_provider.get.side_effect = FailedToReadCacheError("fail read")
98
+ mock_cache_provider.set.side_effect = FailedToWriteCacheError("fail write")
99
+
100
+ mock_ocean.app.cache_provider = mock_cache_provider
101
+ monkeypatch.setattr(cache, "ocean", mock_ocean)
90
102
 
91
103
  call_count = 0
92
104
 
@@ -98,20 +110,20 @@ async def test_cache_iterator_result_no_cache(
98
110
  await asyncio.sleep(0.1)
99
111
  yield [i]
100
112
 
113
+ # First call should execute the function since cache read fails
101
114
  result1 = await collect_iterator_results(sample_iterator(3))
102
115
  assert result1 == [0, 1, 2]
103
116
  assert call_count == 1
104
117
 
105
- event.attributes.clear()
106
-
118
+ # Second call should also execute the function since cache read fails
107
119
  result2 = await collect_iterator_results(sample_iterator(3))
108
120
  assert result2 == [0, 1, 2]
109
121
  assert call_count == 2
110
122
 
111
123
 
112
124
  @pytest.mark.asyncio
113
- async def test_cache_coroutine_result(event: EventContext, monkeypatch: Any) -> None:
114
- monkeypatch.setattr(cache, "event", event)
125
+ async def test_cache_coroutine_result(mock_ocean: Any, monkeypatch: Any) -> None:
126
+ monkeypatch.setattr(cache, "ocean", mock_ocean)
115
127
 
116
128
  call_count = 0
117
129
 
@@ -137,9 +149,9 @@ async def test_cache_coroutine_result(event: EventContext, monkeypatch: Any) ->
137
149
 
138
150
  @pytest.mark.asyncio
139
151
  async def test_cache_coroutine_result_with_kwargs(
140
- event: EventContext, monkeypatch: Any
152
+ mock_ocean: Any, monkeypatch: Any
141
153
  ) -> None:
142
- monkeypatch.setattr(cache, "event", event)
154
+ monkeypatch.setattr(cache, "ocean", mock_ocean)
143
155
 
144
156
  call_count = 0
145
157
 
@@ -164,10 +176,16 @@ async def test_cache_coroutine_result_with_kwargs(
164
176
 
165
177
 
166
178
  @pytest.mark.asyncio
167
- async def test_cache_coroutine_result_no_cache(
168
- event: EventContext, monkeypatch: Any
179
+ async def test_cache_coroutine_result_cache_errors(
180
+ mock_ocean: Any, monkeypatch: Any
169
181
  ) -> None:
170
- monkeypatch.setattr(cache, "event", event)
182
+ # Create a mock cache provider that raises errors
183
+ mock_cache_provider = AsyncMock()
184
+ mock_cache_provider.get.side_effect = FailedToReadCacheError("fail read")
185
+ mock_cache_provider.set.side_effect = FailedToWriteCacheError("fail write")
186
+
187
+ mock_ocean.app.cache_provider = mock_cache_provider
188
+ monkeypatch.setattr(cache, "ocean", mock_ocean)
171
189
 
172
190
  call_count = 0
173
191
 
@@ -178,12 +196,79 @@ async def test_cache_coroutine_result_no_cache(
178
196
  await asyncio.sleep(0.1)
179
197
  return x * 2
180
198
 
199
+ # First call should execute the function since cache read fails
181
200
  result1 = await sample_coroutine(2)
182
201
  assert result1 == 4
183
202
  assert call_count == 1
184
203
 
185
- event.attributes.clear()
186
-
204
+ # Second call should also execute the function since cache read fails
187
205
  result2 = await sample_coroutine(2)
188
206
  assert result2 == 4
189
207
  assert call_count == 2
208
+
209
+
210
+ @pytest.mark.asyncio
211
+ async def test_cache_failures_dont_affect_execution(
212
+ mock_ocean: Any, monkeypatch: Any
213
+ ) -> None:
214
+ """Test that cache failures (both read and write) don't affect the decorated function execution."""
215
+ # Create a mock cache provider that raises errors
216
+ mock_cache_provider = AsyncMock()
217
+ mock_cache_provider.get.side_effect = FailedToReadCacheError("fail read")
218
+ mock_cache_provider.set.side_effect = FailedToWriteCacheError("fail write")
219
+
220
+ mock_ocean.app.cache_provider = mock_cache_provider
221
+ monkeypatch.setattr(cache, "ocean", mock_ocean)
222
+
223
+ # Test both iterator and coroutine decorators
224
+ iterator_call_count = 0
225
+ coroutine_call_count = 0
226
+
227
+ @cache.cache_iterator_result()
228
+ async def sample_iterator(x: int) -> AsyncGenerator[List[int], None]:
229
+ nonlocal iterator_call_count
230
+ iterator_call_count += 1
231
+ for i in range(x):
232
+ await asyncio.sleep(0.1)
233
+ yield [i]
234
+
235
+ @cache.cache_coroutine_result()
236
+ async def sample_coroutine(x: int) -> int:
237
+ nonlocal coroutine_call_count
238
+ coroutine_call_count += 1
239
+ await asyncio.sleep(0.1)
240
+ return x * 2
241
+
242
+ # Test iterator function
243
+ # First call - should execute function (cache read fails)
244
+ result1 = await collect_iterator_results(sample_iterator(3))
245
+ assert result1 == [0, 1, 2]
246
+ assert iterator_call_count == 1
247
+ assert mock_cache_provider.get.call_count == 1
248
+ assert mock_cache_provider.set.call_count == 1
249
+
250
+ # Second call - should execute function again (cache read fails)
251
+ result2 = await collect_iterator_results(sample_iterator(3))
252
+ assert result2 == [0, 1, 2]
253
+ assert iterator_call_count == 2
254
+ assert mock_cache_provider.get.call_count == 2
255
+ assert mock_cache_provider.set.call_count == 2
256
+
257
+ # Test coroutine function
258
+ # First call - should execute function (cache read fails)
259
+ result3 = await sample_coroutine(4)
260
+ assert result3 == 8
261
+ assert coroutine_call_count == 1
262
+ assert mock_cache_provider.get.call_count == 3
263
+ assert mock_cache_provider.set.call_count == 3
264
+
265
+ # Second call - should execute function again (cache read fails)
266
+ result4 = await sample_coroutine(4)
267
+ assert result4 == 8
268
+ assert coroutine_call_count == 2
269
+ assert mock_cache_provider.get.call_count == 4
270
+ assert mock_cache_provider.set.call_count == 4
271
+
272
+ # Verify that both read and write errors were raised
273
+ assert isinstance(mock_cache_provider.get.side_effect, FailedToReadCacheError)
274
+ assert isinstance(mock_cache_provider.set.side_effect, FailedToWriteCacheError)
port_ocean/utils/cache.py CHANGED
@@ -1,7 +1,10 @@
1
1
  import functools
2
2
  import hashlib
3
+ import base64
3
4
  from typing import Callable, AsyncIterator, Awaitable, Any
4
- from port_ocean.context.event import event
5
+ from port_ocean.cache.errors import FailedToReadCacheError, FailedToWriteCacheError
6
+ from port_ocean.context.ocean import ocean
7
+ from loguru import logger
5
8
 
6
9
  AsyncIteratorCallable = Callable[..., AsyncIterator[list[Any]]]
7
10
  AsyncCallable = Callable[..., Awaitable[Any]]
@@ -12,15 +15,18 @@ def hash_func(function_name: str, *args: Any, **kwargs: Any) -> str:
12
15
  kwargs_str = str(kwargs)
13
16
  concatenated_string = args_str + kwargs_str
14
17
  hash_object = hashlib.sha256(concatenated_string.encode())
15
- return f"{function_name}_{hash_object.hexdigest()}"
18
+ short_hash = base64.urlsafe_b64encode(hash_object.digest()[:8]).decode("ascii")
19
+ short_hash = short_hash.rstrip("=").replace("-", "_").replace("+", "_")
20
+ return f"{function_name}_{short_hash}"
16
21
 
17
22
 
18
23
  def cache_iterator_result() -> Callable[[AsyncIteratorCallable], AsyncIteratorCallable]:
19
24
  """
20
25
  This decorator caches the results of an async iterator function. It checks if the result is already in the cache
21
- and if not, it fetches the all the data and caches it at ocean.attributes cache the end of the iteration.
26
+ and if not, it fetches the all the data and caches it at the end of the iteration.
22
27
 
23
28
  The cache will be stored in the scope of the running event and will be removed when the event is finished.
29
+ If a database is configured, the cache will also be stored in the database.
24
30
 
25
31
  For example, you can use this to cache data coming back from the third-party API to avoid making the same request
26
32
  multiple times for each kind.
@@ -39,13 +45,15 @@ def cache_iterator_result() -> Callable[[AsyncIteratorCallable], AsyncIteratorCa
39
45
  def decorator(func: AsyncIteratorCallable) -> AsyncIteratorCallable:
40
46
  @functools.wraps(func)
41
47
  async def wrapper(*args: Any, **kwargs: Any) -> Any:
42
- # Create Hash key from function name, args and kwargs
43
48
  cache_key = hash_func(func.__name__, *args, **kwargs)
44
49
 
45
50
  # Check if the result is already in the cache
46
- if cache := event.attributes.get(cache_key):
47
- yield cache
48
- return
51
+ try:
52
+ if cache := await ocean.app.cache_provider.get(cache_key):
53
+ yield cache
54
+ return
55
+ except FailedToReadCacheError as e:
56
+ logger.warning(f"Failed to read cache for {cache_key}: {str(e)}")
49
57
 
50
58
  # If not in cache, fetch the data
51
59
  cached_results = list()
@@ -54,7 +62,13 @@ def cache_iterator_result() -> Callable[[AsyncIteratorCallable], AsyncIteratorCa
54
62
  yield result
55
63
 
56
64
  # Cache the results
57
- event.attributes[cache_key] = cached_results
65
+ try:
66
+ await ocean.app.cache_provider.set(
67
+ cache_key,
68
+ cached_results,
69
+ )
70
+ except FailedToWriteCacheError as e:
71
+ logger.warning(f"Failed to write cache for {cache_key}: {str(e)}")
58
72
  return
59
73
 
60
74
  return wrapper
@@ -71,6 +85,7 @@ def cache_coroutine_result() -> Callable[[AsyncCallable], AsyncCallable]:
71
85
 
72
86
  The cache is stored in the scope of the running event and is
73
87
  removed when the event is finished.
88
+ If a database is configured, the cache will also be stored in the database.
74
89
 
75
90
  Usage:
76
91
  ```python
@@ -84,12 +99,20 @@ def cache_coroutine_result() -> Callable[[AsyncCallable], AsyncCallable]:
84
99
  @functools.wraps(func)
85
100
  async def wrapper(*args: Any, **kwargs: Any) -> Any:
86
101
  cache_key = hash_func(func.__name__, *args, **kwargs)
87
-
88
- if cache := event.attributes.get(cache_key):
89
- return cache
102
+ try:
103
+ if cache := await ocean.app.cache_provider.get(cache_key):
104
+ return cache
105
+ except FailedToReadCacheError as e:
106
+ logger.warning(f"Failed to read cache for {cache_key}: {str(e)}")
90
107
 
91
108
  result = await func(*args, **kwargs)
92
- event.attributes[cache_key] = result
109
+ try:
110
+ await ocean.app.cache_provider.set(
111
+ cache_key,
112
+ result,
113
+ )
114
+ except FailedToWriteCacheError as e:
115
+ logger.warning(f"Failed to write cache for {cache_key}: {str(e)}")
93
116
  return result
94
117
 
95
118
  return wrapper
@@ -0,0 +1,30 @@
1
+ import pickle
2
+ import os
3
+ from typing import Any
4
+
5
+
6
+ class FileIPC:
7
+ def __init__(self, process_id: str, name: str, default_return: Any = None):
8
+ self.process_id = process_id
9
+ self.name = name
10
+ self.dir_path = f"/tmp/p_{self.process_id}"
11
+ self.file_path = f"{self.dir_path}/{self.name}.pkl"
12
+ self.default_return = default_return
13
+ os.makedirs(self.dir_path, exist_ok=True)
14
+
15
+ def __del__(self) -> None:
16
+ self.delete()
17
+
18
+ def save(self, object: Any) -> None:
19
+ with open(self.file_path, "wb") as f:
20
+ pickle.dump(object, f)
21
+
22
+ def load(self) -> Any:
23
+ if not os.path.exists(self.file_path):
24
+ return self.default_return
25
+ with open(self.file_path, "rb") as f:
26
+ return pickle.load(f)
27
+
28
+ def delete(self) -> None:
29
+ if os.path.exists(self.file_path):
30
+ os.remove(self.file_path)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: port-ocean
3
- Version: 0.22.12
3
+ Version: 0.23.1
4
4
  Summary: Port Ocean is a CLI tool for managing your Port projects.
5
5
  Home-page: https://app.getport.io
6
6
  Keywords: ocean,port-ocean,port
@@ -3,11 +3,18 @@ integrations/_infra/Dockerfile.alpine,sha256=7E4Sb-8supsCcseerHwTkuzjHZoYcaHIyxi
3
3
  integrations/_infra/Dockerfile.base.builder,sha256=Ogp_fodUE-lD-HgcfcFJd7pr520LPoEU9GCNk6HQBxk,619
4
4
  integrations/_infra/Dockerfile.base.runner,sha256=T7Tis9EjmcOl2jUXPMSENbRXMqwoHnnFPhgRmfEMO5Y,357
5
5
  integrations/_infra/Dockerfile.dockerignore,sha256=CM1Fxt3I2AvSvObuUZRmy5BNLSGC7ylnbpWzFgD4cso,1163
6
+ integrations/_infra/Dockerfile.local,sha256=Aqj3y4U6XFS78i5Zz3IfyZkvVmAdB7eEAe6khQaxRxI,876
6
7
  integrations/_infra/Makefile,sha256=YgLKvuF_Dw4IA7X98Nus6zIW_3cJ60M1QFGs3imj5c4,2430
8
+ integrations/_infra/entry_local.sh,sha256=cH2Gd82qDnLKXvjoK1MNay9vdIZzTTF_hrhmvZuYZbg,648
7
9
  integrations/_infra/grpcio.sh,sha256=m924poYznoRZ6Tt7Ct8Cs5AV_cmmOx598yIZ3z4DvZE,616
8
10
  integrations/_infra/init.sh,sha256=nN8lTrOhB286UfFvD6sJ9YJ-9asT9zVSddQB-RAb7Z4,99
9
- port_ocean/__init__.py,sha256=J3Mqp7d-CkEe9eMigGG8gSEiVKICY2bf7csNEwVOXk0,294
11
+ port_ocean/__init__.py,sha256=uMpjg5d_cXgnyCxA_LmICR8zqBmC6Fe9Ivu9hcvJ7EY,313
10
12
  port_ocean/bootstrap.py,sha256=CN1M5pVecZ7z_Vfu86Dk2HjFMiuiwt6E_SSOLFCYRMk,1321
13
+ port_ocean/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ port_ocean/cache/base.py,sha256=XfsIfbE5y22pqr18sJMCCT4kS6h6BztHNpiH2aoubGM,661
15
+ port_ocean/cache/disk.py,sha256=f1FHP74qWwVxMs4UdOzyoTWW0mPVTwfka8rITLi3ISg,1903
16
+ port_ocean/cache/errors.py,sha256=KZ7c3L9k1e0Btw-BBERAxjONKkmQoH7EllTvFEiqQEA,145
17
+ port_ocean/cache/memory.py,sha256=w4Jhvpa56j1vQ1XQ-XFe8KQPVkVzLsF8RmgwZm4XUCA,1246
11
18
  port_ocean/cli/__init__.py,sha256=ZjTGS305llhbjC2BH2KkVj34gCASBGwqc5HZEO_0T_Q,328
12
19
  port_ocean/cli/cli.py,sha256=RvWTELEn5YFw9aM0vaNqm5YqZZrL50ILaBs27ptiGl0,57
13
20
  port_ocean/cli/commands/__init__.py,sha256=Y9Q6jeYw_ZAZ-mdfE_5DZTdS2KHhieQZoUTggk_AkwM,369
@@ -62,7 +69,7 @@ port_ocean/clients/port/utils.py,sha256=osFyAjw7Y5Qf2uVSqC7_RTCQfijiL1zS74JJM0go
62
69
  port_ocean/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
70
  port_ocean/config/base.py,sha256=x1gFbzujrxn7EJudRT81C6eN9WsYAb3vOHwcpcpX8Tc,6370
64
71
  port_ocean/config/dynamic.py,sha256=qOFkRoJsn_BW7581omi_AoMxoHqasf_foxDQ_G11_SI,2030
65
- port_ocean/config/settings.py,sha256=pqDn733zNJYxUH-y2oTofHz8_WKekw-4QwhovAzO1nw,6207
72
+ port_ocean/config/settings.py,sha256=kVXF5_Jr93qW4xDlYXbfehDlQjpv4REjiSAQWePKfYs,6438
66
73
  port_ocean/consumers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
74
  port_ocean/consumers/kafka_consumer.py,sha256=N8KocjBi9aR0BOPG8hgKovg-ns_ggpEjrSxqSqF_BSo,4710
68
75
  port_ocean/context/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -113,9 +120,9 @@ port_ocean/core/integrations/mixins/events.py,sha256=2L7P3Jhp8XBqddh2_o9Cn4N261n
113
120
  port_ocean/core/integrations/mixins/handler.py,sha256=mZ7-0UlG3LcrwJttFbMe-R4xcOU2H_g33tZar7PwTv8,3771
114
121
  port_ocean/core/integrations/mixins/live_events.py,sha256=8HklZmlyffYY_LeDe8xbt3Tb08rlLkqVhFF-2NQeJP4,4126
115
122
  port_ocean/core/integrations/mixins/sync.py,sha256=Vm_898pLKBwfVewtwouDWsXoxcOLicnAy6pzyqqk6U8,4053
116
- port_ocean/core/integrations/mixins/sync_raw.py,sha256=8RDzVXiN-8yFKycvq5g4S0bkgskMQnQZfdxz2kC1cgk,29796
117
- port_ocean/core/integrations/mixins/utils.py,sha256=_Pax41H3U75HZRPkrFbcSYdT4NDGb1LtdF_anuIcj1o,2347
118
- port_ocean/core/models.py,sha256=YpJ2XOB3Zt9_M-rcMrMjugFNzBDg2hCUKgqvEt7now0,2348
123
+ port_ocean/core/integrations/mixins/sync_raw.py,sha256=XIVx_Y9TM8TCMzuoNowYGNtQG98n2pLTCFFHWbkIbTo,32176
124
+ port_ocean/core/integrations/mixins/utils.py,sha256=0rzzFnxrFNaVLHXShfDda5zjO8WwEUBW9oPWxnDsaXQ,2878
125
+ port_ocean/core/models.py,sha256=MKfq69zGbFRzo0I2HRDUvSbz_pjrtcFVsD5B4Qwa3fw,2538
119
126
  port_ocean/core/ocean_types.py,sha256=4VipWFOHEh_d9LmWewQccwx1p2dtrRYW0YURVgNsAjo,1398
120
127
  port_ocean/core/utils/entity_topological_sorter.py,sha256=MDUjM6OuDy4Xj68o-7InNN0w1jqjxeDfeY8U02vySNI,3081
121
128
  port_ocean/core/utils/utils.py,sha256=XJ6ZZBR5hols19TcX4Bh49ygSNhPt3MLncLR-g41GTA,6858
@@ -131,7 +138,7 @@ port_ocean/exceptions/utils.py,sha256=gjOqpi-HpY1l4WlMFsGA9yzhxDhajhoGGdDDyGbLnq
131
138
  port_ocean/exceptions/webhook_processor.py,sha256=yQYazg53Y-ohb7HfViwq1opH_ZUuUdhHSRxcUNveFpI,114
132
139
  port_ocean/helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
133
140
  port_ocean/helpers/async_client.py,sha256=SRlP6o7_FCSY3UHnRlZdezppePVxxOzZ0z861vE3K40,1783
134
- port_ocean/helpers/metric/metric.py,sha256=F7R5JGmyGPsvqBY7wlDfOc15GX28-Kslbw99Y0aETv0,12785
141
+ port_ocean/helpers/metric/metric.py,sha256=iktHKXQNzkLYHgCLWA5wxRDvAMJrJIMoGQYFXV83mH0,12973
135
142
  port_ocean/helpers/metric/utils.py,sha256=Wnr-6HwVwBtYJ3so44OkhDRs8udLMSB1oduzl2-zRHo,781
136
143
  port_ocean/helpers/retry.py,sha256=gmS4YxM6N4fboFp7GSgtOzyBJemxs46bnrz4L4rDS6Y,16136
137
144
  port_ocean/log/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -139,23 +146,26 @@ port_ocean/log/handlers.py,sha256=ncVjgqrZRh6BhyRrA6DQG86Wsbxph1yWYuEC0cWfe-Q,36
139
146
  port_ocean/log/logger_setup.py,sha256=0K3zVG0YYrYOWEV8-rCGks1o-bMRxgHXlqawu9w_tSw,2656
140
147
  port_ocean/log/sensetive.py,sha256=lVKiZH6b7TkrZAMmhEJRhcl67HNM94e56x12DwFgCQk,2920
141
148
  port_ocean/middlewares.py,sha256=9wYCdyzRZGK1vjEJ28FY_DkfwDNENmXp504UKPf5NaQ,2727
142
- port_ocean/ocean.py,sha256=8yUw9pDBGkqBHza_PKtINz_7G69Ia6o1m8H-SvlKOOk,7462
149
+ port_ocean/ocean.py,sha256=h0d-lOf7FQdrRylEglD1MqNzHk-OdvwAorFWyiT6UBo,8825
143
150
  port_ocean/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
144
- port_ocean/run.py,sha256=COoRSmLG4hbsjIW5DzhV0NYVegI9xHd1POv6sg4U1No,2217
151
+ port_ocean/run.py,sha256=CmKz14bxfdOooNbQ5QqH1MwX-XLYVG4NgT4KbrzFaqI,2216
145
152
  port_ocean/sonar-project.properties,sha256=X_wLzDOkEVmpGLRMb2fg9Rb0DxWwUFSvESId8qpvrPI,73
146
153
  port_ocean/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
154
+ port_ocean/tests/cache/__init__.py,sha256=mFSl2_of1u8WxZIypvvHkobHsukM3_RjUfCMZQtzZeI,33
155
+ port_ocean/tests/cache/test_disk_cache.py,sha256=5-GXpUepomW4yCWkm43LwzNKlMmH6BOa5cYaXccXAKQ,2702
156
+ port_ocean/tests/cache/test_memory_cache.py,sha256=xlwIOBU0RVLYYJU83l_aoZDzZ6QIDwz-fcKfRkR6dFk,1653
147
157
  port_ocean/tests/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
148
158
  port_ocean/tests/clients/oauth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
149
159
  port_ocean/tests/clients/oauth/test_oauth_client.py,sha256=2XVMQUalDpiD539Z7_dk5BK_ngXQzsTmb2lNBsfEm9c,3266
150
160
  port_ocean/tests/clients/port/mixins/test_entities.py,sha256=Zq_wKTymxJ0R8lHKztvEV6lN__3FZk8uTSIVpKCE6NA,1815
151
161
  port_ocean/tests/clients/port/mixins/test_organization_mixin.py,sha256=zzKYz3h8dl4Z5A2QG_924m0y9U6XTth1XYOfwNrd_24,914
152
162
  port_ocean/tests/conftest.py,sha256=JXASSS0IY0nnR6bxBflhzxS25kf4iNaABmThyZ0mZt8,101
153
- port_ocean/tests/core/conftest.py,sha256=BHfi7egDVNRpg61lHZlWj81_ohUG7DEVMdFe9yX-vkc,5517
163
+ port_ocean/tests/core/conftest.py,sha256=tTOxB8HlCmXgSsXhtPI6xYgdpbpemxfBhA_gBDBuACQ,6115
154
164
  port_ocean/tests/core/defaults/test_common.py,sha256=sR7RqB3ZYV6Xn6NIg-c8k5K6JcGsYZ2SCe_PYX5vLYM,5560
155
165
  port_ocean/tests/core/handlers/entities_state_applier/test_applier.py,sha256=WNg1fWZsXu0MDnz9-ahRiPb_OPofWx7E8wxBx0cyZKs,8946
156
166
  port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py,sha256=8WpMn559Mf0TFWmloRpZrVgr6yWwyA0C4n2lVHCtyq4,13596
157
167
  port_ocean/tests/core/handlers/mixins/test_live_events.py,sha256=iAwVpr3n3PIkXQLw7hxd-iB_SR_vyfletVXJLOmyz28,12480
158
- port_ocean/tests/core/handlers/mixins/test_sync_raw.py,sha256=ZjAWXpheHa61M9nIj4FUGKt9xMeI4Z1AvE6Nko-uru8,43482
168
+ port_ocean/tests/core/handlers/mixins/test_sync_raw.py,sha256=QHFigCpvYHf6sVirLBI23pMFyZCn4TcWaaFbHtUjoFA,42441
159
169
  port_ocean/tests/core/handlers/port_app_config/test_api.py,sha256=eJZ6SuFBLz71y4ca3DNqKag6d6HUjNJS0aqQPwiLMTI,1999
160
170
  port_ocean/tests/core/handlers/port_app_config/test_base.py,sha256=hSh556bJM9zuELwhwnyKSfd9z06WqWXIfe-6hCl5iKI,9799
161
171
  port_ocean/tests/core/handlers/queue/test_local_queue.py,sha256=9Ly0HzZXbs6Rbl_bstsIdInC3h2bgABU3roP9S_PnJM,2582
@@ -177,19 +187,20 @@ port_ocean/tests/test_metric.py,sha256=gDdeJcqJDQ_o3VrYrW23iZyw2NuUsyATdrygSXhcD
177
187
  port_ocean/tests/test_ocean.py,sha256=bsXKGTVEjwLSbR7-qSmI4GZ-EzDo0eBE3TNSMsWzYxM,1502
178
188
  port_ocean/tests/test_smoke.py,sha256=uix2uIg_yOm8BHDgHw2hTFPy1fiIyxBGW3ENU_KoFlo,2557
179
189
  port_ocean/tests/utils/test_async_iterators.py,sha256=3PLk1emEXekb8LcC5GgVh3OicaX15i5WyaJT_eFnu_4,1336
180
- port_ocean/tests/utils/test_cache.py,sha256=GzoS8xGCBDbBcPwSDbdimsMMkRvJATrBC7UmFhdW3fw,4906
190
+ port_ocean/tests/utils/test_cache.py,sha256=MIYzHt1DeVJ_2KNpVfnUaivNlmdzXDBC5ZeixJPPKL8,8591
181
191
  port_ocean/utils/__init__.py,sha256=KMGnCPXZJbNwtgxtyMycapkDz8tpSyw23MSYT3iVeHs,91
182
192
  port_ocean/utils/async_http.py,sha256=aDsw3gQIMwt6qLegbZtkHqD8em48tKvbITnblsrTY3g,1260
183
193
  port_ocean/utils/async_iterators.py,sha256=CPXskYWkhkZtAG-ducEwM8537t3z5usPEqXR9vcivzw,3715
184
- port_ocean/utils/cache.py,sha256=RgfN4SjjHrEkbqUChyboeD1mrXomolUUjsJtvbkmr3U,3353
194
+ port_ocean/utils/cache.py,sha256=tRwPomG2VIxx8ZNi4QYH6Yc47d9yYV1A7Hx-L_fX4Dg,4494
195
+ port_ocean/utils/ipc.py,sha256=BMVUxdftf0i7Z2Xp8KMFlttUjZhTE7VUCpY4SBBnoVY,896
185
196
  port_ocean/utils/misc.py,sha256=0q2cJ5psqxn_5u_56pT7vOVQ3shDM02iC1lzyWQ_zl0,2098
186
197
  port_ocean/utils/queue_utils.py,sha256=KWWl8YVnG-glcfIHhM6nefY-2sou_C6DVP1VynQwzB4,2762
187
198
  port_ocean/utils/repeat.py,sha256=U2OeCkHPWXmRTVoPV-VcJRlQhcYqPWI5NfmPlb1JIbc,3229
188
199
  port_ocean/utils/signal.py,sha256=mMVq-1Ab5YpNiqN4PkiyTGlV_G0wkUDMMjTZp5z3pb0,1514
189
200
  port_ocean/utils/time.py,sha256=pufAOH5ZQI7gXvOvJoQXZXZJV-Dqktoj9Qp9eiRwmJ4,1939
190
201
  port_ocean/version.py,sha256=UsuJdvdQlazzKGD3Hd5-U7N69STh8Dq9ggJzQFnu9fU,177
191
- port_ocean-0.22.12.dist-info/LICENSE.md,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
192
- port_ocean-0.22.12.dist-info/METADATA,sha256=8OkTPjlMlUdWRx3Pk5cY8_1CkhYEjHhTgzpm1j4GCNw,6765
193
- port_ocean-0.22.12.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
194
- port_ocean-0.22.12.dist-info/entry_points.txt,sha256=F_DNUmGZU2Kme-8NsWM5LLE8piGMafYZygRYhOVtcjA,54
195
- port_ocean-0.22.12.dist-info/RECORD,,
202
+ port_ocean-0.23.1.dist-info/LICENSE.md,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
203
+ port_ocean-0.23.1.dist-info/METADATA,sha256=0SgpycGus6heRD2nQPUfrIhkYMLSLPJ0PW2r8qxx3uA,6764
204
+ port_ocean-0.23.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
205
+ port_ocean-0.23.1.dist-info/entry_points.txt,sha256=F_DNUmGZU2Kme-8NsWM5LLE8piGMafYZygRYhOVtcjA,54
206
+ port_ocean-0.23.1.dist-info/RECORD,,