port-ocean 0.14.0__py3-none-any.whl → 0.14.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of port-ocean might be problematic. Click here for more details.

@@ -0,0 +1,56 @@
1
+ ARG BASE_BUILDER_PYTHON_IMAGE=ghcr.io/port-labs/port-ocean-base-builder:latest
2
+ ARG BASE_RUNNER_PYTHON_IMAGE=ghcr.io/port-labs/port-ocean-base-runner:latest
3
+
4
+ FROM ${BASE_BUILDER_PYTHON_IMAGE} AS base
5
+
6
+ ARG BUILD_CONTEXT
7
+ ARG BUILDPLATFORM
8
+
9
+ ENV LIBRDKAFKA_VERSION=1.9.2 \
10
+ PYTHONUNBUFFERED=1 \
11
+ POETRY_VIRTUALENVS_IN_PROJECT=1 \
12
+ PIP_ROOT_USER_ACTION=ignore
13
+
14
+ WORKDIR /app
15
+
16
+ COPY ./${BUILD_CONTEXT}/pyproject.toml ./${BUILD_CONTEXT}/poetry.lock /app/
17
+
18
+ RUN poetry install --without dev --no-root --no-interaction --no-ansi --no-cache
19
+
20
+ FROM ${BASE_RUNNER_PYTHON_IMAGE} AS prod
21
+
22
+ ARG INTEGRATION_VERSION
23
+ ARG BUILD_CONTEXT
24
+
25
+ ENV LIBRDKAFKA_VERSION=1.9.2
26
+
27
+ RUN apt-get update \
28
+ && apt-get install -y \
29
+ ca-certificates \
30
+ openssl \
31
+ curl \
32
+ && apt-get clean
33
+
34
+ LABEL INTEGRATION_VERSION=${INTEGRATION_VERSION}
35
+ # Used to ensure that new integrations will be public, see https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility
36
+ LABEL org.opencontainers.image.source=https://github.com/port-labs/ocean
37
+
38
+ ENV PIP_ROOT_USER_ACTION=ignore
39
+
40
+ WORKDIR /app
41
+
42
+ # Copy the application code
43
+ COPY ./${BUILD_CONTEXT} /app
44
+
45
+ # Copy dependencies from the build stage
46
+ COPY --from=base /app/.venv /app/.venv
47
+
48
+ COPY ./integrations/_infra/init.sh /app/init.sh
49
+
50
+ # Ensure that ocean is available for all in path
51
+ RUN chmod a+x /app/.venv/bin/ocean
52
+
53
+ RUN chmod a+x /app/init.sh
54
+ RUN ln -s /app/.venv/bin/ocean /usr/bin/ocean
55
+ # Run the application
56
+ CMD ["bash", "/app/init.sh"]
@@ -0,0 +1,108 @@
1
+ ARG BASE_PYTHON_IMAGE=alpine:3.20.1
2
+
3
+ FROM ${BASE_PYTHON_IMAGE} AS base
4
+
5
+ ARG BUILD_CONTEXT
6
+ ARG BUILDPLATFORM
7
+
8
+ ENV LIBRDKAFKA_VERSION=1.9.2 \
9
+ PYTHONUNBUFFERED=1 \
10
+ PIP_DISABLE_PIP_VERSION_CHECK=on \
11
+ POETRY_NO_INTERACTION=1 \
12
+ PIP_NO_CACHE_DIR=on \
13
+ PIP_DEFAULT_TIMEOUT=100 \
14
+ GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=1 \
15
+ PYTHONDONTWRITEBYTECODE=1
16
+
17
+ # Install system dependencies and libraries
18
+ RUN \
19
+ apk add --no-cache \
20
+ gcc \
21
+ g++ \
22
+ musl-dev \
23
+ build-base \
24
+ bash \
25
+ oniguruma-dev \
26
+ make \
27
+ autoconf \
28
+ automake \
29
+ libtool \
30
+ curl \
31
+ openssl-dev \
32
+ cargo \
33
+ pkgconfig \
34
+ linux-headers \
35
+ libstdc++ \
36
+ libffi-dev \
37
+ py3-grpcio \
38
+ py3-protobuf \
39
+ python3-dev \
40
+ # librdkafka-dev \
41
+ # Install community librdkafka-dev since the default in alpine is older
42
+ && echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/main" >> /etc/apk/repositories \
43
+ && echo "@edgecommunity http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories \
44
+ && apk add --no-cache \
45
+ alpine-sdk \
46
+ "librdkafka@edgecommunity>=${LIBRDKAFKA_VERSION}" \
47
+ "librdkafka-dev@edgecommunity>=${LIBRDKAFKA_VERSION}" \
48
+ && curl -sSL https://install.python-poetry.org | python3 - \
49
+ && apk upgrade gcc linux-headers libstdc++ gcc g++ --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main/ \
50
+ && ln -s /root/.local/bin/poetry /usr/bin/poetry \
51
+ && poetry config virtualenvs.in-project true
52
+
53
+ WORKDIR /app
54
+
55
+ COPY ./${BUILD_CONTEXT}/pyproject.toml ./${BUILD_CONTEXT}/poetry.lock /app/
56
+
57
+ COPY ./integrations/_infra/grpcio.sh /app/grpcio.sh
58
+
59
+ RUN chmod a+x /app/grpcio.sh && /app/grpcio.sh "${BUILDPLATFORM}"
60
+
61
+ RUN export GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=1 \
62
+ && poetry install --without dev --no-root --no-interaction --no-ansi --no-cache
63
+
64
+ FROM ${BASE_PYTHON_IMAGE} AS prod
65
+
66
+ ARG INTEGRATION_VERSION
67
+ ARG BUILD_CONTEXT
68
+
69
+ ENV LIBRDKAFKA_VERSION=1.9.2
70
+
71
+ LABEL INTEGRATION_VERSION=${INTEGRATION_VERSION}
72
+ # Used to ensure that new integrations will be public, see https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility
73
+ LABEL org.opencontainers.image.source=https://github.com/port-labs/ocean
74
+
75
+ # Install only runtime dependencies
76
+ RUN \
77
+ apk add --no-cache \
78
+ bash \
79
+ oniguruma-dev \
80
+ python3 \
81
+ # Install community librdkafka-dev since the default in alpine is older
82
+ && echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/main" >> /etc/apk/repositories \
83
+ && echo "@edgecommunity http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories \
84
+ && apk update \
85
+ && apk add --no-cache \
86
+ alpine-sdk \
87
+ "librdkafka@edgecommunity>=${LIBRDKAFKA_VERSION}" \
88
+ "librdkafka-dev@edgecommunity>=${LIBRDKAFKA_VERSION}" \
89
+ # Fix security issues
90
+ && apk upgrade busybox libcrypto3 libssl3 --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main/
91
+
92
+ WORKDIR /app
93
+
94
+ # Copy the application code
95
+ COPY ./${BUILD_CONTEXT} /app
96
+
97
+ # Copy dependencies from the build stage
98
+ COPY --from=base /app/.venv /app/.venv
99
+
100
+ COPY ./integrations/_infra/init.sh /app/init.sh
101
+
102
+ # Ensure that ocean is available for all in path
103
+ RUN chmod a+x /app/.venv/bin/ocean
104
+
105
+ RUN chmod a+x /app/init.sh
106
+ RUN ln -s /app/.venv/bin/ocean /usr/bin/ocean
107
+ # Run the application
108
+ CMD ["bash", "/app/init.sh"]
@@ -0,0 +1,26 @@
1
+ ARG BASE_PYTHON_IMAGE=debian:trixie-slim
2
+ # debian:trixie-slim - Python 3.12
3
+ FROM ${BASE_PYTHON_IMAGE}
4
+
5
+ LABEL org.opencontainers.image.source=https://github.com/port-labs/ocean
6
+
7
+ ENV LIBRDKAFKA_VERSION=1.9.2 \
8
+ PYTHONUNBUFFERED=1 \
9
+ POETRY_VIRTUALENVS_IN_PROJECT=1 \
10
+ PIP_ROOT_USER_ACTION=ignore
11
+
12
+ RUN apt-get update \
13
+ && apt-get install -y \
14
+ --no-install-recommends \
15
+ wget \
16
+ g++ \
17
+ libssl-dev \
18
+ autoconf \
19
+ automake \
20
+ libtool \
21
+ curl \
22
+ librdkafka-dev \
23
+ python3 \
24
+ python3-pip \
25
+ python3-poetry \
26
+ && apt-get clean
@@ -0,0 +1,13 @@
1
+ ARG BASE_PYTHON_IMAGE=debian:trixie-slim
2
+ # debian:trixie-slim - Python 3.12
3
+ FROM ${BASE_PYTHON_IMAGE}
4
+
5
+ LABEL org.opencontainers.image.source=https://github.com/port-labs/ocean
6
+
7
+ ENV LIBRDKAFKA_VERSION=1.9.2
8
+
9
+ ENV PIP_ROOT_USER_ACTION=ignore
10
+
11
+ RUN apt-get update \
12
+ && apt-get install -y --no-install-recommends librdkafka-dev python3 \
13
+ && apt-get clean
@@ -0,0 +1,94 @@
1
+
2
+ # Git
3
+ **/.git
4
+ **/.gitignore
5
+ **/.gitattributes
6
+
7
+
8
+ # CI
9
+ **/.codeclimate.yml
10
+ **/.travis.yml
11
+ **/.taskcluster.yml
12
+
13
+ # Docker
14
+ **/docker-compose.yml
15
+ **/Dockerfile
16
+ **/.docker
17
+ **/.dockerignore
18
+
19
+ # Byte-compiled / optimized / DLL files
20
+ **/__pycache__/
21
+ **/*.py[cod]
22
+
23
+ # C extensions
24
+ **/*.so
25
+
26
+ # Distribution / packaging
27
+ **/.Python
28
+ **/env/
29
+ **/build/
30
+ **/develop-eggs/
31
+ **/dist/
32
+ **/downloads/
33
+ **/eggs/
34
+ **/lib/
35
+ **/lib64/
36
+ **/parts/
37
+ **/sdist/
38
+ **/var/
39
+ **/*.egg-info/
40
+ **/.installed.cfg
41
+ **/*.egg
42
+
43
+ # PyInstaller
44
+ # Usually these files are written by a python script from a template
45
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
46
+ **/*.manifest
47
+ **/*.spec
48
+
49
+ # Installer logs
50
+ **/pip-log.txt
51
+ **/pip-delete-this-directory.txt
52
+
53
+ # Unit test / coverage reports
54
+ **/htmlcov/
55
+ **/.tox/
56
+ **/.coverage
57
+ **/.cache
58
+ **/nosetests.xml
59
+ **/coverage.xml
60
+
61
+ # Translations
62
+ **/*.mo
63
+ **/*.pot
64
+
65
+ # Django stuff:
66
+ **/*.log
67
+
68
+ # Sphinx documentation
69
+ **/docs/_build/
70
+
71
+ # PyBuilder
72
+ **/target/
73
+
74
+ # Virtual environment
75
+ **/.env
76
+ **/.venv/
77
+ **/venv/
78
+
79
+ # PyCharm
80
+ **/.idea
81
+
82
+ # Python mode for VIM
83
+ **/.ropeproject
84
+
85
+ # Vim swap files
86
+ **/*.swp
87
+
88
+ # VS Code
89
+ **/.vscode/
90
+
91
+ **/*.md
92
+ **/.ruff_cache
93
+ **/changelog
94
+ **/tests
@@ -0,0 +1,89 @@
1
+ ACTIVATE := . .venv/bin/activate
2
+
3
+ define run_checks
4
+ exit_code=0; \
5
+ cd $1; \
6
+ echo "Running poetry check"; \
7
+ poetry check || exit_code=$$?;\
8
+ echo "Running mypy"; \
9
+ mypy . --exclude '/\.venv/' || exit_code=$$?; \
10
+ echo "Running ruff"; \
11
+ ruff check . || exit_code=$$?; \
12
+ echo "Running black"; \
13
+ black --check . || exit_code=$$?; \
14
+ echo "Running yamllint"; \
15
+ yamllint . || exit_code=$$?; \
16
+ if [ $$exit_code -eq 1 ]; then \
17
+ echo "\033[0;31mOne or more checks failed with exit code $$exit_code\033[0m"; \
18
+ else \
19
+ echo "\033[0;32mAll checks executed successfully.\033[0m"; \
20
+ fi; \
21
+ exit $$exit_code
22
+ endef
23
+
24
+ define install_poetry
25
+ if ! command -v poetry &> /dev/null; then \
26
+ pip install --upgrade pip; \
27
+ pip install poetry; \
28
+ else \
29
+ echo "Poetry is already installed."; \
30
+ fi
31
+ endef
32
+
33
+ define deactivate_virtualenv
34
+ if [ -n "$$VIRTUAL_ENV" ]; then \
35
+ unset VIRTUAL_ENV; \
36
+ unset PYTHONHOME; \
37
+ unset -f pydoc >/dev/null 2>&1; \
38
+ OLD_PATH="$$PATH"; \
39
+ PATH=$$(echo -n "$$PATH" | awk -v RS=: -v ORS=: '/\/virtualenv\/bin$$/ {next} {print}'); \
40
+ export PATH; \
41
+ hash -r; \
42
+ echo "Deactivated the virtual environment."; \
43
+ fi
44
+ endef
45
+
46
+ .SILENT: install install/prod install/local-core lint lint/fix run test clean
47
+
48
+ install:
49
+ $(call deactivate_virtualenv) && \
50
+ $(call install_poetry) && \
51
+ poetry install --with dev
52
+
53
+ install/local-core: install
54
+ # NOTE: This is a temporary change that shouldn't be committed
55
+ $(ACTIVATE) && pip install -e ../../
56
+
57
+ install/prod:
58
+ poetry install --without dev --no-root --no-interaction --no-ansi --no-cache
59
+ $(call install_poetry) && \
60
+
61
+ lint:
62
+ $(ACTIVATE) && \
63
+ $(call run_checks,.)
64
+
65
+ lint/fix:
66
+ $(ACTIVATE) && \
67
+ black .
68
+ ruff check --fix .
69
+
70
+ run:
71
+ $(ACTIVATE) && ocean sail
72
+
73
+ test:
74
+ $(ACTIVATE) && poetry run pytest -n auto
75
+
76
+ clean:
77
+ @find . -name '.venv' -type d -exec rm -rf {} \;
78
+ @find . -name '*.pyc' -exec rm -rf {} \;
79
+ @find . -name '__pycache__' -exec rm -rf {} \;
80
+ @find . -name 'Thumbs.db' -exec rm -rf {} \;
81
+ @find . -name '*~' -exec rm -rf {} \;
82
+ rm -rf .cache
83
+ rm -rf build
84
+ rm -rf dist
85
+ rm -rf *.egg-info
86
+ rm -rf htmlcov
87
+ rm -rf .tox/
88
+ rm -rf docs/_build
89
+ rm -rf dist/
@@ -0,0 +1,18 @@
1
+ #!/usr/bin/env bash
2
+ PLATFORM=${1}
3
+
4
+ if [[ ! $(grep -q 'grpcio' ./poetry.lock) ]]; then
5
+ echo 'grpcio not present, skipping explicit build'
6
+ else
7
+ echo 'found grpcio, checking platform'
8
+ fi
9
+
10
+ if [[ "${PLATFORM}" == "linux/arm64" ]]; then
11
+ echo "On arm, need to explicitly install grpcio"
12
+ poetry env use "$(which python)"
13
+ echo "${VIRTUAL_ENV}"
14
+ poetry run pip install --upgrade pip
15
+ GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1 GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=1 poetry run pip install 'grpcio==1.66.2'
16
+ else
17
+ echo "Not on arm, no need to explicitly install grpcio"
18
+ fi
@@ -0,0 +1,5 @@
1
+ if test -e /usr/local/share/ca-certificates/cert.crt; then
2
+ update-ca-certificates
3
+ fi
4
+
5
+ ocean sail
@@ -3,17 +3,27 @@ import shutil
3
3
 
4
4
 
5
5
  def handle_private_integration_flags():
6
- infra_make_file = "../_infra/Makefile"
7
- target_link_make_file = os.path.join("./Makefile")
6
+ target_dir = os.path.join(
7
+ "{{cookiecutter._output_dir}}", "{{cookiecutter.integration_slug}}"
8
+ )
9
+ root_dir = os.path.join("{{ cookiecutter._repo_dir }}", "../../../")
10
+ infra_make_file = os.path.join(root_dir, "integrations/_infra/Makefile")
11
+ infra_dockerfile = os.path.join(root_dir, "integrations/_infra/Dockerfile.deb")
12
+ infra_dockerignore = os.path.join(
13
+ root_dir, "integrations/_infra/Dockerfile.dockerignore"
14
+ )
15
+ target_link_make_file = os.path.join(target_dir, "./Makefile")
16
+ target_link_dockerfile = os.path.join(target_dir, "./Dockerfile")
17
+ target_link_dockerignore = os.path.join(target_dir, "./.dockerignore")
8
18
 
9
19
  if "{{ cookiecutter.is_private_integration }}" == "True":
10
20
  shutil.copyfile(infra_make_file, target_link_make_file)
21
+ shutil.copyfile(infra_dockerfile, target_link_dockerfile)
22
+ shutil.copyfile(infra_dockerignore, target_link_dockerignore)
11
23
  os.remove("sonar-project.properties")
12
24
  return
13
25
 
14
26
  os.symlink(infra_make_file, target_link_make_file)
15
- os.remove("Dockerfile")
16
- os.remove(".dockerignore")
17
27
 
18
28
 
19
29
  if __name__ == "__main__":
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
16
16
  # period of time, before raising an exception.
17
17
  # The max_connections value can't be too high, as it will cause the application to run out of memory.
18
18
  # The max_keepalive_connections can't be too high, as it will cause the application to run out of available connections.
19
- PORT_HTTP_MAX_CONNECTIONS_LIMIT = 200
19
+ PORT_HTTP_MAX_CONNECTIONS_LIMIT = 100
20
20
  PORT_HTTP_MAX_KEEP_ALIVE_CONNECTIONS = 50
21
21
  PORT_HTTP_TIMEOUT = 60.0
22
22
 
@@ -28,13 +28,19 @@ PORT_HTTPX_LIMITS = httpx.Limits(
28
28
 
29
29
  _http_client: LocalStack[httpx.AsyncClient] = LocalStack()
30
30
 
31
+ FIVE_MINUETS = 60 * 5
32
+
31
33
 
32
34
  def _get_http_client_context(port_client: "PortClient") -> httpx.AsyncClient:
33
35
  client = _http_client.top
34
36
  if client is None:
35
37
  client = OceanAsyncClient(
36
38
  TokenRetryTransport,
37
- transport_kwargs={"port_client": port_client},
39
+ transport_kwargs={
40
+ "port_client": port_client,
41
+ "max_backoff_wait": FIVE_MINUETS,
42
+ "base_delay": 0.3,
43
+ },
38
44
  timeout=PORT_HTTPX_TIMEOUT,
39
45
  limits=PORT_HTTPX_LIMITS,
40
46
  )
@@ -55,14 +55,14 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
55
55
  HTTPStatus.GATEWAY_TIMEOUT,
56
56
  ]
57
57
  )
58
- MAX_BACKOFF_WAIT = 60
58
+ MAX_BACKOFF_WAIT_IN_SECONDS = 60
59
59
 
60
60
  def __init__(
61
61
  self,
62
62
  wrapped_transport: Union[httpx.BaseTransport, httpx.AsyncBaseTransport],
63
63
  max_attempts: int = 10,
64
- max_backoff_wait: float = MAX_BACKOFF_WAIT,
65
- backoff_factor: float = 0.1,
64
+ max_backoff_wait: float = MAX_BACKOFF_WAIT_IN_SECONDS,
65
+ base_delay: float = 0.1,
66
66
  jitter_ratio: float = 0.1,
67
67
  respect_retry_after_header: bool = True,
68
68
  retryable_methods: Iterable[str] | None = None,
@@ -81,7 +81,7 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
81
81
  max_backoff_wait (float, optional):
82
82
  The maximum amount of time (in seconds) to wait before retrying a request.
83
83
  Defaults to 60.
84
- backoff_factor (float, optional):
84
+ base_delay (float, optional):
85
85
  The factor by which the waiting time will be multiplied in each retry attempt.
86
86
  Defaults to 0.1.
87
87
  jitter_ratio (float, optional):
@@ -105,7 +105,7 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
105
105
  )
106
106
 
107
107
  self._max_attempts = max_attempts
108
- self._backoff_factor = backoff_factor
108
+ self._base_delay = base_delay
109
109
  self._respect_retry_after_header = respect_retry_after_header
110
110
  self._retryable_methods = (
111
111
  frozenset(retryable_methods)
@@ -132,13 +132,18 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
132
132
  httpx.Response: The response received.
133
133
 
134
134
  """
135
- transport: httpx.BaseTransport = self._wrapped_transport # type: ignore
136
- if request.method in self._retryable_methods:
137
- send_method = partial(transport.handle_request)
138
- response = self._retry_operation(request, send_method)
139
- else:
140
- response = transport.handle_request(request)
141
- return response
135
+ try:
136
+ transport: httpx.BaseTransport = self._wrapped_transport # type: ignore
137
+ if request.method in self._retryable_methods:
138
+ send_method = partial(transport.handle_request)
139
+ response = self._retry_operation(request, send_method)
140
+ else:
141
+ response = transport.handle_request(request)
142
+ return response
143
+ except Exception as e:
144
+ if not self._is_retryable_method(request) and self._logger is not None:
145
+ self._logger.exception(f"{repr(e)} - {request.url}", exc_info=e)
146
+ raise e
142
147
 
143
148
  async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
144
149
  """Sends an HTTP request, possibly with retries.
@@ -150,13 +155,19 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
150
155
  The response.
151
156
 
152
157
  """
153
- transport: httpx.AsyncBaseTransport = self._wrapped_transport # type: ignore
154
- if self._is_retryable_method(request):
155
- send_method = partial(transport.handle_async_request)
156
- response = await self._retry_operation_async(request, send_method)
157
- else:
158
- response = await transport.handle_async_request(request)
159
- return response
158
+ try:
159
+ transport: httpx.AsyncBaseTransport = self._wrapped_transport # type: ignore
160
+ if self._is_retryable_method(request):
161
+ send_method = partial(transport.handle_async_request)
162
+ response = await self._retry_operation_async(request, send_method)
163
+ else:
164
+ response = await transport.handle_async_request(request)
165
+ return response
166
+ except Exception as e:
167
+ # Retyable methods are logged via _log_error
168
+ if not self._is_retryable_method(request) and self._logger is not None:
169
+ self._logger.exception(f"{repr(e)} - {request.url}", exc_info=e)
170
+ raise e
160
171
 
161
172
  async def aclose(self) -> None:
162
173
  """
@@ -255,7 +266,7 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
255
266
  except ValueError:
256
267
  pass
257
268
 
258
- backoff = self._backoff_factor * (2 ** (attempts_made - 1))
269
+ backoff = self._base_delay * (2 ** (attempts_made - 1))
259
270
  jitter = (backoff * self._jitter_ratio) * random.choice([1, -1])
260
271
  total_backoff = backoff + jitter
261
272
  return min(total_backoff, self._max_backoff_wait)
@@ -0,0 +1,189 @@
1
+ from typing import Any
2
+ import asyncio
3
+ from port_ocean.utils import cache # Import the module where 'event' is used
4
+ import pytest
5
+ from dataclasses import dataclass, field
6
+ from typing import AsyncGenerator, AsyncIterator, List, TypeVar
7
+
8
+
9
+ @dataclass
10
+ class EventContext:
11
+ attributes: dict[str, Any] = field(default_factory=dict)
12
+
13
+
14
+ @pytest.fixture
15
+ def event() -> EventContext:
16
+ return EventContext()
17
+
18
+
19
+ T = TypeVar("T")
20
+
21
+
22
+ async def collect_iterator_results(iterator: AsyncIterator[List[T]]) -> List[T]:
23
+ results = []
24
+ async for item in iterator:
25
+ results.extend(item)
26
+ return results
27
+
28
+
29
+ @pytest.mark.asyncio
30
+ async def test_cache_iterator_result(event: EventContext, monkeypatch: Any) -> None:
31
+ monkeypatch.setattr(cache, "event", event)
32
+
33
+ call_count = 0
34
+
35
+ @cache.cache_iterator_result()
36
+ async def sample_iterator(x: int) -> AsyncGenerator[List[int], None]:
37
+ nonlocal call_count
38
+ call_count += 1
39
+ for i in range(x):
40
+ await asyncio.sleep(0.1)
41
+ yield [i]
42
+
43
+ result1 = await collect_iterator_results(sample_iterator(3))
44
+ assert result1 == [0, 1, 2]
45
+ assert call_count == 1
46
+
47
+ result2 = await collect_iterator_results(sample_iterator(3))
48
+ assert result2 == [0, 1, 2]
49
+ assert call_count == 1
50
+
51
+ result3 = await collect_iterator_results(sample_iterator(4))
52
+ assert result3 == [0, 1, 2, 3]
53
+ assert call_count == 2
54
+
55
+
56
+ @pytest.mark.asyncio
57
+ async def test_cache_iterator_result_with_kwargs(
58
+ event: EventContext, monkeypatch: Any
59
+ ) -> None:
60
+ monkeypatch.setattr(cache, "event", event)
61
+
62
+ call_count = 0
63
+
64
+ @cache.cache_iterator_result()
65
+ async def sample_iterator(x: int, y: int = 1) -> AsyncGenerator[List[int], None]:
66
+ nonlocal call_count
67
+ call_count += 1
68
+ for i in range(x * y):
69
+ await asyncio.sleep(0.1)
70
+ yield [i]
71
+
72
+ result1 = await collect_iterator_results(sample_iterator(2, y=2))
73
+ assert result1 == [0, 1, 2, 3]
74
+ assert call_count == 1
75
+
76
+ result2 = await collect_iterator_results(sample_iterator(2, y=2))
77
+ assert result2 == [0, 1, 2, 3]
78
+ assert call_count == 1
79
+
80
+ result3 = await collect_iterator_results(sample_iterator(2, y=3))
81
+ assert result3 == [0, 1, 2, 3, 4, 5]
82
+ assert call_count == 2
83
+
84
+
85
+ @pytest.mark.asyncio
86
+ async def test_cache_iterator_result_no_cache(
87
+ event: EventContext, monkeypatch: Any
88
+ ) -> None:
89
+ monkeypatch.setattr(cache, "event", event)
90
+
91
+ call_count = 0
92
+
93
+ @cache.cache_iterator_result()
94
+ async def sample_iterator(x: int) -> AsyncGenerator[List[int], None]:
95
+ nonlocal call_count
96
+ call_count += 1
97
+ for i in range(x):
98
+ await asyncio.sleep(0.1)
99
+ yield [i]
100
+
101
+ result1 = await collect_iterator_results(sample_iterator(3))
102
+ assert result1 == [0, 1, 2]
103
+ assert call_count == 1
104
+
105
+ event.attributes.clear()
106
+
107
+ result2 = await collect_iterator_results(sample_iterator(3))
108
+ assert result2 == [0, 1, 2]
109
+ assert call_count == 2
110
+
111
+
112
+ @pytest.mark.asyncio
113
+ async def test_cache_coroutine_result(event: EventContext, monkeypatch: Any) -> None:
114
+ monkeypatch.setattr(cache, "event", event)
115
+
116
+ call_count = 0
117
+
118
+ @cache.cache_coroutine_result()
119
+ async def sample_coroutine(x: int) -> int:
120
+ nonlocal call_count
121
+ call_count += 1
122
+ await asyncio.sleep(0.1)
123
+ return x * 2
124
+
125
+ result1 = await sample_coroutine(2)
126
+ assert result1 == 4
127
+ assert call_count == 1
128
+
129
+ result2 = await sample_coroutine(2)
130
+ assert result2 == 4
131
+ assert call_count == 1
132
+
133
+ result3 = await sample_coroutine(3)
134
+ assert result3 == 6
135
+ assert call_count == 2
136
+
137
+
138
+ @pytest.mark.asyncio
139
+ async def test_cache_coroutine_result_with_kwargs(
140
+ event: EventContext, monkeypatch: Any
141
+ ) -> None:
142
+ monkeypatch.setattr(cache, "event", event)
143
+
144
+ call_count = 0
145
+
146
+ @cache.cache_coroutine_result()
147
+ async def sample_coroutine(x: int, y: int = 1) -> int:
148
+ nonlocal call_count
149
+ call_count += 1
150
+ await asyncio.sleep(0.1)
151
+ return x * y
152
+
153
+ result1 = await sample_coroutine(2, y=3)
154
+ assert result1 == 6
155
+ assert call_count == 1
156
+
157
+ result2 = await sample_coroutine(2, y=3)
158
+ assert result2 == 6
159
+ assert call_count == 1
160
+
161
+ result3 = await sample_coroutine(2, y=4)
162
+ assert result3 == 8
163
+ assert call_count == 2
164
+
165
+
166
+ @pytest.mark.asyncio
167
+ async def test_cache_coroutine_result_no_cache(
168
+ event: EventContext, monkeypatch: Any
169
+ ) -> None:
170
+ monkeypatch.setattr(cache, "event", event)
171
+
172
+ call_count = 0
173
+
174
+ @cache.cache_coroutine_result()
175
+ async def sample_coroutine(x: int) -> int:
176
+ nonlocal call_count
177
+ call_count += 1
178
+ await asyncio.sleep(0.1)
179
+ return x * 2
180
+
181
+ result1 = await sample_coroutine(2)
182
+ assert result1 == 4
183
+ assert call_count == 1
184
+
185
+ event.attributes.clear()
186
+
187
+ result2 = await sample_coroutine(2)
188
+ assert result2 == 4
189
+ assert call_count == 2
port_ocean/utils/cache.py CHANGED
@@ -1,9 +1,10 @@
1
1
  import functools
2
2
  import hashlib
3
- from typing import Callable, AsyncIterator, Any
3
+ from typing import Callable, AsyncIterator, Awaitable, Any
4
4
  from port_ocean.context.event import event
5
5
 
6
6
  AsyncIteratorCallable = Callable[..., AsyncIterator[list[Any]]]
7
+ AsyncCallable = Callable[..., Awaitable[Any]]
7
8
 
8
9
 
9
10
  def hash_func(function_name: str, *args: Any, **kwargs: Any) -> str:
@@ -59,3 +60,38 @@ def cache_iterator_result() -> Callable[[AsyncIteratorCallable], AsyncIteratorCa
59
60
  return wrapper
60
61
 
61
62
  return decorator
63
+
64
+
65
+ def cache_coroutine_result() -> Callable[[AsyncCallable], AsyncCallable]:
66
+ """Coroutine version of `cache_iterator_result` from port_ocean.utils.cache
67
+
68
+ Decorator that caches the result of a coroutine function.
69
+ It checks if the result is already in the cache, and if not,
70
+ fetches the result, caches it, and returns the cached value.
71
+
72
+ The cache is stored in the scope of the running event and is
73
+ removed when the event is finished.
74
+
75
+ Usage:
76
+ ```python
77
+ @cache_coroutine_result()
78
+ async def my_coroutine_function():
79
+ # Your code here
80
+ ```
81
+ """
82
+
83
+ def decorator(func: AsyncCallable) -> AsyncCallable:
84
+ @functools.wraps(func)
85
+ async def wrapper(*args: Any, **kwargs: Any) -> Any:
86
+ cache_key = hash_func(func.__name__, *args, **kwargs)
87
+
88
+ if cache := event.attributes.get(cache_key):
89
+ return cache
90
+
91
+ result = await func(*args, **kwargs)
92
+ event.attributes[cache_key] = result
93
+ return result
94
+
95
+ return wrapper
96
+
97
+ return decorator
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: port-ocean
3
- Version: 0.14.0
3
+ Version: 0.14.4
4
4
  Summary: Port Ocean is a CLI tool for managing your Port projects.
5
5
  Home-page: https://app.getport.io
6
6
  Keywords: ocean,port-ocean,port
@@ -1,3 +1,11 @@
1
+ integrations/_infra/Dockerfile.Deb,sha256=iEyen_5YTa0syesJWS7NKuE-Oo4rM34Wwgjq9JzEjdA,1519
2
+ integrations/_infra/Dockerfile.alpine,sha256=iauglyEzz5uEPBxsN-9SLFr6qca3Tf4b0DPXKFFeKq4,3482
3
+ integrations/_infra/Dockerfile.base.builder,sha256=LwKLfJvQfKksMqacAT_aDQxFMC2Ty5fFKIa0Eu4QcCc,619
4
+ integrations/_infra/Dockerfile.base.runner,sha256=dsjTWgLQFm4x5gcm-IPhwkDv-M6VRKwdf-qct457h2c,357
5
+ integrations/_infra/Dockerfile.dockerignore,sha256=CM1Fxt3I2AvSvObuUZRmy5BNLSGC7ylnbpWzFgD4cso,1163
6
+ integrations/_infra/Makefile,sha256=vnuFuDau8AYiNgif3gfx0I17X1HqiPxcIS6uUqE9q1Y,2186
7
+ integrations/_infra/grpcio.sh,sha256=m924poYznoRZ6Tt7Ct8Cs5AV_cmmOx598yIZ3z4DvZE,616
8
+ integrations/_infra/init.sh,sha256=nN8lTrOhB286UfFvD6sJ9YJ-9asT9zVSddQB-RAb7Z4,99
1
9
  port_ocean/__init__.py,sha256=J3Mqp7d-CkEe9eMigGG8gSEiVKICY2bf7csNEwVOXk0,294
2
10
  port_ocean/bootstrap.py,sha256=CN1M5pVecZ7z_Vfu86Dk2HjFMiuiwt6E_SSOLFCYRMk,1321
3
11
  port_ocean/cli/__init__.py,sha256=ZjTGS305llhbjC2BH2KkVj34gCASBGwqc5HZEO_0T_Q,328
@@ -16,8 +24,7 @@ port_ocean/cli/commands/version.py,sha256=hEuIEIcm6Zkamz41Z9nxeSM_4g3oNlAgWwQyDG
16
24
  port_ocean/cli/cookiecutter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
25
  port_ocean/cli/cookiecutter/cookiecutter.json,sha256=N5UrAP2e5JbgEDz_WTQFIZlzSveME6x32sHeA7idjh0,481
18
26
  port_ocean/cli/cookiecutter/extensions.py,sha256=eQNjZvy2enDkJpvMbBGil77Xk9-38f862wfnmCjdoBc,446
19
- port_ocean/cli/cookiecutter/hooks/post_gen_project.py,sha256=xZbDPSmfP-ZXNlPaqQDsYLuNfdhFpLX9fIshiAd94Qg,535
20
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.dockerignore,sha256=9Mz_WI7XBpKzlJ7ILb4vlcuzYkh98Ql3bP_5GHN1sRY,1034
27
+ port_ocean/cli/cookiecutter/hooks/post_gen_project.py,sha256=tFqtsjSbu7HMN32WIiFO37S1a_dfHezvdPwmM6MmNJk,1182
21
28
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.env.example,sha256=LnNPRe3RnzjWPL4tNLYEQiMvFEZHSy3ceqwQEapcpwE,92
22
29
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.gitignore,sha256=32p1lDW_g5hyBz486GWfDeR9m7ikFlASVri5a8vmNoo,2698
23
30
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/.gitignore,sha256=kCpRPdl3S_jqYYZaOrc0-xa6-l3KqVjNRXc6jCkd_-Q,12
@@ -26,7 +33,6 @@ port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/po
26
33
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml,sha256=ie8bI_QOZnJJVG-N1e4KlMebdYts4LUNO_kKw8nGdhA,531
27
34
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CHANGELOG.md,sha256=XVSgyxfXJZoZmtwaGbQ8XmCapIthe4E7flfuJub-m_s,338
28
35
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CONTRIBUTING.md,sha256=ZQwD3K35q0wugHZmb1z5wnynmn0uuzwGFSpjm7GieZU,259
29
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Dockerfile,sha256=LsH3vZqqEJkzeQG44cE7JkvPAuh_WPSqYam4YoMvG3M,328
30
36
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/README.md,sha256=5VZmgDRW9gO4d8UuzkujslOIDfIDBiAGL2Hd74HK770,468
31
37
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/changelog/.gitignore,sha256=JAo-DTfS6GthQGP1NH6wLU-ZymwlTea4KHH_jZVTKn0,14
32
38
  port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/debug.py,sha256=_TRsA2s6GV2E3CTI8CHcsH-ZuH4_Eh5-juDXWaET0ho,65
@@ -48,7 +54,7 @@ port_ocean/clients/port/mixins/integrations.py,sha256=t8OSa7Iopnpp8IOEcp3a7WgwOc
48
54
  port_ocean/clients/port/mixins/migrations.py,sha256=A6896oJF6WbFL2WroyTkMzr12yhVyWqGoq9dtLNSKBY,1457
49
55
  port_ocean/clients/port/retry_transport.py,sha256=PtIZOAZ6V-ncpVysRUsPOgt8Sf01QLnTKB5YeKBxkJk,1861
50
56
  port_ocean/clients/port/types.py,sha256=nvlgiAq4WH5_F7wQbz_GAWl-faob84LVgIjZ2Ww5mTk,451
51
- port_ocean/clients/port/utils.py,sha256=5B6rHgiVrtiL4YWh7Eq7_ncIeDwrDsB7jIvRik5xH8c,2373
57
+ port_ocean/clients/port/utils.py,sha256=SjhgmJXAqH2JqXfGy8GoGwzUYiJvUhWDrJyxQcenxZc,2512
52
58
  port_ocean/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
59
  port_ocean/config/base.py,sha256=x1gFbzujrxn7EJudRT81C6eN9WsYAb3vOHwcpcpX8Tc,6370
54
60
  port_ocean/config/dynamic.py,sha256=qOFkRoJsn_BW7581omi_AoMxoHqasf_foxDQ_G11_SI,2030
@@ -109,7 +115,7 @@ port_ocean/exceptions/port_defaults.py,sha256=45Bno5JEB-GXztvKsy8mw7TrydQmw13-4J
109
115
  port_ocean/exceptions/utils.py,sha256=gjOqpi-HpY1l4WlMFsGA9yzhxDhajhoGGdDDyGbLnqI,197
110
116
  port_ocean/helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
111
117
  port_ocean/helpers/async_client.py,sha256=SRlP6o7_FCSY3UHnRlZdezppePVxxOzZ0z861vE3K40,1783
112
- port_ocean/helpers/retry.py,sha256=IQ0RfQ2T5o6uoZh2WW2nrFH5TT6K_k3y2Im0HDp5j9Y,15059
118
+ port_ocean/helpers/retry.py,sha256=WO4yDFUd9NexZ0kESqxeTxBxNabU7_utKzgTj2-kMaM,15632
113
119
  port_ocean/log/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
114
120
  port_ocean/log/handlers.py,sha256=ncVjgqrZRh6BhyRrA6DQG86Wsbxph1yWYuEC0cWfe-Q,3631
115
121
  port_ocean/log/logger_setup.py,sha256=CoEDowe5OwNOL_5clU6Z4faktfh0VWaOTS0VLmyhHjw,2404
@@ -133,18 +139,19 @@ port_ocean/tests/helpers/smoke_test.py,sha256=_9aJJFRfuGJEg2D2YQJVJRmpreS6gEPHHQ
133
139
  port_ocean/tests/log/test_handlers.py,sha256=bTOGnuj8fMIEXepwYblRvcg0FKqApCdyCBtAQZ2BlXM,2115
134
140
  port_ocean/tests/test_smoke.py,sha256=uix2uIg_yOm8BHDgHw2hTFPy1fiIyxBGW3ENU_KoFlo,2557
135
141
  port_ocean/tests/utils/test_async_iterators.py,sha256=3PLk1emEXekb8LcC5GgVh3OicaX15i5WyaJT_eFnu_4,1336
142
+ port_ocean/tests/utils/test_cache.py,sha256=GzoS8xGCBDbBcPwSDbdimsMMkRvJATrBC7UmFhdW3fw,4906
136
143
  port_ocean/utils/__init__.py,sha256=KMGnCPXZJbNwtgxtyMycapkDz8tpSyw23MSYT3iVeHs,91
137
144
  port_ocean/utils/async_http.py,sha256=arnH458TExn2Dju_Sy6pHas_vF5RMWnOp-jBz5WAAcE,1226
138
145
  port_ocean/utils/async_iterators.py,sha256=CPXskYWkhkZtAG-ducEwM8537t3z5usPEqXR9vcivzw,3715
139
- port_ocean/utils/cache.py,sha256=3KItZDE2yVrbVDr-hoM8lNna8s2dlpxhP4ICdLjH4LQ,2231
146
+ port_ocean/utils/cache.py,sha256=RgfN4SjjHrEkbqUChyboeD1mrXomolUUjsJtvbkmr3U,3353
140
147
  port_ocean/utils/misc.py,sha256=0q2cJ5psqxn_5u_56pT7vOVQ3shDM02iC1lzyWQ_zl0,2098
141
148
  port_ocean/utils/queue_utils.py,sha256=KWWl8YVnG-glcfIHhM6nefY-2sou_C6DVP1VynQwzB4,2762
142
149
  port_ocean/utils/repeat.py,sha256=0EFWM9d8lLXAhZmAyczY20LAnijw6UbIECf5lpGbOas,3231
143
150
  port_ocean/utils/signal.py,sha256=K-6kKFQTltcmKDhtyZAcn0IMa3sUpOHGOAUdWKgx0_E,1369
144
151
  port_ocean/utils/time.py,sha256=pufAOH5ZQI7gXvOvJoQXZXZJV-Dqktoj9Qp9eiRwmJ4,1939
145
152
  port_ocean/version.py,sha256=UsuJdvdQlazzKGD3Hd5-U7N69STh8Dq9ggJzQFnu9fU,177
146
- port_ocean-0.14.0.dist-info/LICENSE.md,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
147
- port_ocean-0.14.0.dist-info/METADATA,sha256=A_jE7vo5o-cE6dE8fLzRZGFh7wOJFNquJN5LyZ_ygCA,6673
148
- port_ocean-0.14.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
149
- port_ocean-0.14.0.dist-info/entry_points.txt,sha256=F_DNUmGZU2Kme-8NsWM5LLE8piGMafYZygRYhOVtcjA,54
150
- port_ocean-0.14.0.dist-info/RECORD,,
153
+ port_ocean-0.14.4.dist-info/LICENSE.md,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
154
+ port_ocean-0.14.4.dist-info/METADATA,sha256=I8hKNbKejXTSk8cm5WKrjCZb1ElaKrNtuvETQoRVh9k,6673
155
+ port_ocean-0.14.4.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
156
+ port_ocean-0.14.4.dist-info/entry_points.txt,sha256=F_DNUmGZU2Kme-8NsWM5LLE8piGMafYZygRYhOVtcjA,54
157
+ port_ocean-0.14.4.dist-info/RECORD,,
@@ -1,94 +0,0 @@
1
- # Git
2
- .git
3
- .gitignore
4
- .gitattributes
5
-
6
-
7
- # CI
8
- .codeclimate.yml
9
- .travis.yml
10
- .taskcluster.yml
11
-
12
- # Docker
13
- docker-compose.yml
14
- Dockerfile
15
- .docker
16
- .dockerignore
17
-
18
- # Byte-compiled / optimized / DLL files
19
- **/__pycache__/
20
- **/*.py[cod]
21
-
22
- # C extensions
23
- *.so
24
-
25
- # Distribution / packaging
26
- .Python
27
- env/
28
- build/
29
- develop-eggs/
30
- dist/
31
- downloads/
32
- eggs/
33
- lib/
34
- lib64/
35
- parts/
36
- sdist/
37
- var/
38
- *.egg-info/
39
- .installed.cfg
40
- *.egg
41
-
42
- # PyInstaller
43
- # Usually these files are written by a python script from a template
44
- # before PyInstaller builds the exe, so as to inject date/other infos into it.
45
- *.manifest
46
- *.spec
47
-
48
- # Installer logs
49
- pip-log.txt
50
- pip-delete-this-directory.txt
51
-
52
- # Unit test / coverage reports
53
- htmlcov/
54
- .tox/
55
- .coverage
56
- .cache
57
- nosetests.xml
58
- coverage.xml
59
-
60
- # Translations
61
- *.mo
62
- *.pot
63
-
64
- # Django stuff:
65
- *.log
66
-
67
- # Sphinx documentation
68
- docs/_build/
69
-
70
- # PyBuilder
71
- target/
72
-
73
- # Virtual environment
74
- .env
75
- .venv/
76
- venv/
77
-
78
- # PyCharm
79
- .idea
80
-
81
- # Python mode for VIM
82
- .ropeproject
83
- **/.ropeproject
84
-
85
- # Vim swap files
86
- **/*.swp
87
-
88
- # VS Code
89
- .vscode/
90
-
91
- *.md
92
- **/.ruff_cache
93
- **/cahangelog
94
- **/tests
@@ -1,15 +0,0 @@
1
- FROM python:3.11-slim-bookworm
2
-
3
- ENV LIBRDKAFKA_VERSION 1.9.2
4
-
5
- WORKDIR /app
6
-
7
- RUN apt update && \
8
- apt install -y wget make g++ libssl-dev autoconf automake libtool curl librdkafka-dev && \
9
- apt-get clean
10
-
11
- COPY . /app
12
-
13
- RUN export POETRY_VIRTUALENVS_CREATE=false && make install/prod && pip cache purge
14
-
15
- ENTRYPOINT ocean sail