port-ocean 0.5.5__py3-none-any.whl → 0.17.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of port-ocean might be problematic. Click here for more details.
- integrations/_infra/Dockerfile.Deb +56 -0
- integrations/_infra/Dockerfile.alpine +108 -0
- integrations/_infra/Dockerfile.base.builder +26 -0
- integrations/_infra/Dockerfile.base.runner +13 -0
- integrations/_infra/Dockerfile.dockerignore +94 -0
- {port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}} → integrations/_infra}/Makefile +21 -8
- integrations/_infra/grpcio.sh +18 -0
- integrations/_infra/init.sh +5 -0
- port_ocean/bootstrap.py +1 -1
- port_ocean/cli/commands/defaults/clean.py +3 -1
- port_ocean/cli/commands/new.py +42 -7
- port_ocean/cli/commands/sail.py +7 -1
- port_ocean/cli/cookiecutter/cookiecutter.json +3 -0
- port_ocean/cli/cookiecutter/hooks/post_gen_project.py +20 -3
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.env.example +6 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/blueprints.json +41 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/port-app-config.yml +16 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml +6 -7
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CHANGELOG.md +1 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CONTRIBUTING.md +7 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/changelog/.gitignore +1 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/main.py +16 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/pyproject.toml +21 -10
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/tests/test_sample.py +2 -0
- port_ocean/clients/port/authentication.py +16 -4
- port_ocean/clients/port/client.py +17 -0
- port_ocean/clients/port/mixins/blueprints.py +7 -8
- port_ocean/clients/port/mixins/entities.py +108 -53
- port_ocean/clients/port/mixins/integrations.py +23 -34
- port_ocean/clients/port/retry_transport.py +0 -5
- port_ocean/clients/port/utils.py +9 -3
- port_ocean/config/base.py +16 -16
- port_ocean/config/dynamic.py +2 -0
- port_ocean/config/settings.py +79 -11
- port_ocean/context/event.py +18 -5
- port_ocean/context/ocean.py +14 -3
- port_ocean/core/defaults/clean.py +10 -3
- port_ocean/core/defaults/common.py +25 -9
- port_ocean/core/defaults/initialize.py +111 -100
- port_ocean/core/event_listener/__init__.py +8 -0
- port_ocean/core/event_listener/base.py +49 -10
- port_ocean/core/event_listener/factory.py +9 -1
- port_ocean/core/event_listener/http.py +11 -3
- port_ocean/core/event_listener/kafka.py +24 -5
- port_ocean/core/event_listener/once.py +96 -4
- port_ocean/core/event_listener/polling.py +16 -14
- port_ocean/core/event_listener/webhooks_only.py +41 -0
- port_ocean/core/handlers/__init__.py +1 -2
- port_ocean/core/handlers/entities_state_applier/base.py +4 -1
- port_ocean/core/handlers/entities_state_applier/port/applier.py +29 -87
- port_ocean/core/handlers/entities_state_applier/port/order_by_entities_dependencies.py +5 -2
- port_ocean/core/handlers/entity_processor/base.py +26 -22
- port_ocean/core/handlers/entity_processor/jq_entity_processor.py +253 -45
- port_ocean/core/handlers/port_app_config/base.py +55 -15
- port_ocean/core/handlers/port_app_config/models.py +24 -5
- port_ocean/core/handlers/resync_state_updater/__init__.py +5 -0
- port_ocean/core/handlers/resync_state_updater/updater.py +84 -0
- port_ocean/core/integrations/base.py +5 -7
- port_ocean/core/integrations/mixins/events.py +3 -1
- port_ocean/core/integrations/mixins/sync.py +4 -2
- port_ocean/core/integrations/mixins/sync_raw.py +209 -74
- port_ocean/core/integrations/mixins/utils.py +1 -1
- port_ocean/core/models.py +44 -0
- port_ocean/core/ocean_types.py +29 -11
- port_ocean/core/utils/entity_topological_sorter.py +90 -0
- port_ocean/core/utils/utils.py +109 -0
- port_ocean/debug_cli.py +5 -0
- port_ocean/exceptions/core.py +4 -0
- port_ocean/exceptions/port_defaults.py +0 -2
- port_ocean/helpers/retry.py +85 -24
- port_ocean/log/handlers.py +23 -2
- port_ocean/log/logger_setup.py +8 -1
- port_ocean/log/sensetive.py +25 -10
- port_ocean/middlewares.py +10 -2
- port_ocean/ocean.py +57 -24
- port_ocean/run.py +10 -5
- port_ocean/tests/__init__.py +0 -0
- port_ocean/tests/clients/port/mixins/test_entities.py +53 -0
- port_ocean/tests/conftest.py +4 -0
- port_ocean/tests/core/defaults/test_common.py +166 -0
- port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py +350 -0
- port_ocean/tests/core/handlers/mixins/test_sync_raw.py +552 -0
- port_ocean/tests/core/test_utils.py +73 -0
- port_ocean/tests/core/utils/test_entity_topological_sorter.py +99 -0
- port_ocean/tests/helpers/__init__.py +0 -0
- port_ocean/tests/helpers/fake_port_api.py +191 -0
- port_ocean/tests/helpers/fixtures.py +46 -0
- port_ocean/tests/helpers/integration.py +31 -0
- port_ocean/tests/helpers/ocean_app.py +66 -0
- port_ocean/tests/helpers/port_client.py +21 -0
- port_ocean/tests/helpers/smoke_test.py +82 -0
- port_ocean/tests/log/test_handlers.py +71 -0
- port_ocean/tests/test_smoke.py +74 -0
- port_ocean/tests/utils/test_async_iterators.py +45 -0
- port_ocean/tests/utils/test_cache.py +189 -0
- port_ocean/utils/async_iterators.py +109 -0
- port_ocean/utils/cache.py +37 -1
- port_ocean/utils/misc.py +22 -4
- port_ocean/utils/queue_utils.py +88 -0
- port_ocean/utils/signal.py +1 -4
- port_ocean/utils/time.py +54 -0
- {port_ocean-0.5.5.dist-info → port_ocean-0.17.8.dist-info}/METADATA +27 -19
- port_ocean-0.17.8.dist-info/RECORD +164 -0
- {port_ocean-0.5.5.dist-info → port_ocean-0.17.8.dist-info}/WHEEL +1 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.dockerignore +0 -94
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Dockerfile +0 -15
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/config.yaml +0 -17
- port_ocean/core/handlers/entities_state_applier/port/validate_entity_relations.py +0 -40
- port_ocean/core/utils.py +0 -65
- port_ocean-0.5.5.dist-info/RECORD +0 -129
- {port_ocean-0.5.5.dist-info → port_ocean-0.17.8.dist-info}/LICENSE.md +0 -0
- {port_ocean-0.5.5.dist-info → port_ocean-0.17.8.dist-info}/entry_points.txt +0 -0
|
@@ -15,6 +15,21 @@ async def on_resync(kind: str) -> list[dict[Any, Any]]:
|
|
|
15
15
|
# return [{"some_project_key": "someProjectValue", ...}]
|
|
16
16
|
# if kind == "issues":
|
|
17
17
|
# return [{"some_issue_key": "someIssueValue", ...}]
|
|
18
|
+
|
|
19
|
+
# Initial stub to show complete flow, replace this with your own logic
|
|
20
|
+
if kind == "{{ cookiecutter.integration_slug }}-example-kind":
|
|
21
|
+
return [
|
|
22
|
+
{
|
|
23
|
+
"my_custom_id": f"id_{x}",
|
|
24
|
+
"my_custom_text": f"very long text with {x} in it",
|
|
25
|
+
"my_special_score": x * 32 % 3,
|
|
26
|
+
"my_component": f"component-{x}",
|
|
27
|
+
"my_service": f"service-{x %2}",
|
|
28
|
+
"my_enum": "VALID" if x % 2 == 0 else "FAILED",
|
|
29
|
+
}
|
|
30
|
+
for x in range(25)
|
|
31
|
+
]
|
|
32
|
+
|
|
18
33
|
return []
|
|
19
34
|
|
|
20
35
|
|
|
@@ -38,4 +53,4 @@ async def on_resync(kind: str) -> list[dict[Any, Any]]:
|
|
|
38
53
|
async def on_start() -> None:
|
|
39
54
|
# Something to do when the integration starts
|
|
40
55
|
# For example create a client to query 3rd party services - GitHub, Jira, etc...
|
|
41
|
-
print("Starting integration")
|
|
56
|
+
print("Starting {{ cookiecutter.integration_slug }} integration")
|
|
@@ -1,25 +1,31 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
|
-
name = "{{cookiecutter.
|
|
3
|
-
version = "0.1.0"
|
|
2
|
+
name = "{{cookiecutter.integration_slug}}"
|
|
3
|
+
version = "0.1.0-beta"
|
|
4
4
|
description = "{{cookiecutter.integration_short_description}}"
|
|
5
5
|
authors = ["{{cookiecutter.full_name}} <{{cookiecutter.email}}>"]
|
|
6
6
|
|
|
7
7
|
[tool.poetry.dependencies]
|
|
8
|
-
python = "^3.
|
|
8
|
+
python = "^3.12"
|
|
9
9
|
port_ocean = { version = "^{% version %}", extras = ["cli"] }
|
|
10
10
|
|
|
11
11
|
[tool.poetry.group.dev.dependencies]
|
|
12
|
-
|
|
13
|
-
|
|
12
|
+
# Uncomment this if you want to debug the ocean core together with your integration
|
|
13
|
+
# port_ocean = { path = '../../', develop = true, extras = ['all'] }
|
|
14
|
+
black = "^24.4.2"
|
|
14
15
|
mypy = "^1.3.0"
|
|
15
|
-
|
|
16
|
-
|
|
16
|
+
pylint = ">=2.17.4,<4.0.0"
|
|
17
|
+
pytest = ">=8.2,<9.0"
|
|
18
|
+
pytest-asyncio = ">=0.24.0"
|
|
19
|
+
pytest-httpx = ">=0.30.0"
|
|
20
|
+
pytest-xdist = "^3.6.1"
|
|
21
|
+
ruff = "^0.6.3"
|
|
17
22
|
towncrier = "^23.6.0"
|
|
18
23
|
|
|
19
24
|
[tool.towncrier]
|
|
20
25
|
directory = "changelog"
|
|
21
26
|
filename = "CHANGELOG.md"
|
|
22
|
-
|
|
27
|
+
title_format = "## {version} ({project_date})"
|
|
28
|
+
underlines = [""]
|
|
23
29
|
|
|
24
30
|
[[tool.towncrier.type]]
|
|
25
31
|
directory = "breaking"
|
|
@@ -52,8 +58,8 @@ package = "port_ocean"
|
|
|
52
58
|
showcontent = true
|
|
53
59
|
|
|
54
60
|
[build-system]
|
|
55
|
-
requires = ["poetry>=0.
|
|
56
|
-
build-backend = "poetry.masonry.api"
|
|
61
|
+
requires = ["poetry-core>=1.0.0"]
|
|
62
|
+
build-backend = "poetry.core.masonry.api"
|
|
57
63
|
|
|
58
64
|
[tool.mypy]
|
|
59
65
|
exclude = [
|
|
@@ -100,3 +106,8 @@ exclude = '''
|
|
|
100
106
|
|\.venv
|
|
101
107
|
)/
|
|
102
108
|
'''
|
|
109
|
+
|
|
110
|
+
[tool.pytest.ini_options]
|
|
111
|
+
asyncio_mode = "auto"
|
|
112
|
+
asyncio_default_fixture_loop_scope = "function"
|
|
113
|
+
addopts = "-vv -n auto ./tests"
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import re
|
|
1
2
|
from typing import Any
|
|
2
3
|
|
|
3
4
|
import httpx
|
|
@@ -13,11 +14,11 @@ class TokenResponse(BaseModel):
|
|
|
13
14
|
access_token: str = Field(alias="accessToken")
|
|
14
15
|
expires_in: int = Field(alias="expiresIn")
|
|
15
16
|
token_type: str = Field(alias="tokenType")
|
|
16
|
-
_retrieved_time: int = PrivateAttr(get_time())
|
|
17
|
+
_retrieved_time: int = PrivateAttr(default_factory=lambda: int(get_time()))
|
|
17
18
|
|
|
18
19
|
@property
|
|
19
20
|
def expired(self) -> bool:
|
|
20
|
-
return self._retrieved_time + self.expires_in
|
|
21
|
+
return self._retrieved_time + self.expires_in <= get_time()
|
|
21
22
|
|
|
22
23
|
@property
|
|
23
24
|
def full_token(self) -> str:
|
|
@@ -46,10 +47,16 @@ class PortAuthentication:
|
|
|
46
47
|
|
|
47
48
|
async def _get_token(self, client_id: str, client_secret: str) -> TokenResponse:
|
|
48
49
|
logger.info(f"Fetching access token for clientId: {client_id}")
|
|
49
|
-
|
|
50
|
+
if self._is_personal_token(client_id):
|
|
51
|
+
logger.warning(
|
|
52
|
+
"Integration is using personal credentials, make sure to use machine credentials. "
|
|
53
|
+
"Usage of personal credentials might impose unexpected integration behavior."
|
|
54
|
+
)
|
|
50
55
|
credentials = {"clientId": client_id, "clientSecret": client_secret}
|
|
51
56
|
response = await self.client.post(
|
|
52
|
-
f"{self.api_url}/auth/access_token",
|
|
57
|
+
f"{self.api_url}/auth/access_token",
|
|
58
|
+
json=credentials,
|
|
59
|
+
extensions={"retryable": True},
|
|
53
60
|
)
|
|
54
61
|
handle_status_code(response)
|
|
55
62
|
return TokenResponse(**response.json())
|
|
@@ -80,3 +87,8 @@ class PortAuthentication:
|
|
|
80
87
|
self.client_id, self.client_secret
|
|
81
88
|
)
|
|
82
89
|
return self.last_token_object.full_token
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def _is_personal_token(client_id: str) -> bool:
|
|
93
|
+
email_regex = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$"
|
|
94
|
+
return re.match(email_regex, client_id) is not None
|
|
@@ -13,6 +13,7 @@ from port_ocean.clients.port.utils import (
|
|
|
13
13
|
get_internal_http_client,
|
|
14
14
|
)
|
|
15
15
|
from port_ocean.exceptions.clients import KafkaCredentialsNotFound
|
|
16
|
+
from typing import Any
|
|
16
17
|
|
|
17
18
|
|
|
18
19
|
class PortClient(
|
|
@@ -75,3 +76,19 @@ class PortClient(
|
|
|
75
76
|
handle_status_code(response)
|
|
76
77
|
|
|
77
78
|
return response.json()["organization"]["id"]
|
|
79
|
+
|
|
80
|
+
async def update_integration_state(
|
|
81
|
+
self, state: dict[str, Any], should_raise: bool = True, should_log: bool = True
|
|
82
|
+
) -> dict[str, Any]:
|
|
83
|
+
if should_log:
|
|
84
|
+
logger.debug(f"Updating integration resync state with: {state}")
|
|
85
|
+
response = await self.client.patch(
|
|
86
|
+
f"{self.api_url}/integration/{self.integration_identifier}/resync-state",
|
|
87
|
+
headers=await self.auth.headers(),
|
|
88
|
+
json=state,
|
|
89
|
+
)
|
|
90
|
+
handle_status_code(response, should_raise, should_log)
|
|
91
|
+
if response.is_success and should_log:
|
|
92
|
+
logger.info("Integration resync state updated successfully")
|
|
93
|
+
|
|
94
|
+
return response.json().get("integration", {})
|
|
@@ -64,7 +64,6 @@ class BlueprintClientMixin:
|
|
|
64
64
|
f"Deleting blueprint with id: {identifier} with all entities: {delete_entities}"
|
|
65
65
|
)
|
|
66
66
|
headers = await self.auth.headers(user_agent_type)
|
|
67
|
-
response = None
|
|
68
67
|
|
|
69
68
|
if not delete_entities:
|
|
70
69
|
response = await self.client.delete(
|
|
@@ -83,21 +82,22 @@ class BlueprintClientMixin:
|
|
|
83
82
|
return response.json().get("migrationId", "")
|
|
84
83
|
|
|
85
84
|
async def create_action(
|
|
86
|
-
self,
|
|
85
|
+
self, action: dict[str, Any], should_log: bool = True
|
|
87
86
|
) -> None:
|
|
88
87
|
logger.info(f"Creating action: {action}")
|
|
89
88
|
response = await self.client.post(
|
|
90
|
-
f"{self.auth.api_url}/
|
|
89
|
+
f"{self.auth.api_url}/actions",
|
|
91
90
|
json=action,
|
|
92
91
|
headers=await self.auth.headers(),
|
|
93
92
|
)
|
|
94
93
|
|
|
95
|
-
handle_status_code(response)
|
|
94
|
+
handle_status_code(response, should_log=should_log)
|
|
96
95
|
|
|
97
96
|
async def create_scorecard(
|
|
98
97
|
self,
|
|
99
98
|
blueprint_identifier: str,
|
|
100
99
|
scorecard: dict[str, Any],
|
|
100
|
+
should_log: bool = True,
|
|
101
101
|
) -> None:
|
|
102
102
|
logger.info(f"Creating scorecard: {scorecard}")
|
|
103
103
|
response = await self.client.post(
|
|
@@ -106,11 +106,10 @@ class BlueprintClientMixin:
|
|
|
106
106
|
headers=await self.auth.headers(),
|
|
107
107
|
)
|
|
108
108
|
|
|
109
|
-
handle_status_code(response)
|
|
109
|
+
handle_status_code(response, should_log=should_log)
|
|
110
110
|
|
|
111
111
|
async def create_page(
|
|
112
|
-
self,
|
|
113
|
-
page: dict[str, Any],
|
|
112
|
+
self, page: dict[str, Any], should_log: bool = True
|
|
114
113
|
) -> dict[str, Any]:
|
|
115
114
|
logger.info(f"Creating page: {page}")
|
|
116
115
|
response = await self.client.post(
|
|
@@ -119,7 +118,7 @@ class BlueprintClientMixin:
|
|
|
119
118
|
headers=await self.auth.headers(),
|
|
120
119
|
)
|
|
121
120
|
|
|
122
|
-
handle_status_code(response)
|
|
121
|
+
handle_status_code(response, should_log=should_log)
|
|
123
122
|
return page
|
|
124
123
|
|
|
125
124
|
async def delete_page(
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
from typing import Any, Literal
|
|
2
3
|
from urllib.parse import quote_plus
|
|
3
4
|
|
|
4
5
|
import httpx
|
|
@@ -10,7 +11,8 @@ from port_ocean.clients.port.utils import (
|
|
|
10
11
|
handle_status_code,
|
|
11
12
|
PORT_HTTP_MAX_CONNECTIONS_LIMIT,
|
|
12
13
|
)
|
|
13
|
-
from port_ocean.core.models import Entity
|
|
14
|
+
from port_ocean.core.models import Entity, PortAPIErrorMessage
|
|
15
|
+
from starlette import status
|
|
14
16
|
|
|
15
17
|
|
|
16
18
|
class EntityClientMixin:
|
|
@@ -28,10 +30,30 @@ class EntityClientMixin:
|
|
|
28
30
|
request_options: RequestOptions,
|
|
29
31
|
user_agent_type: UserAgentType | None = None,
|
|
30
32
|
should_raise: bool = True,
|
|
31
|
-
) -> None:
|
|
33
|
+
) -> Entity | None | Literal[False]:
|
|
34
|
+
"""
|
|
35
|
+
This function upserts an entity into Port.
|
|
36
|
+
|
|
37
|
+
Usage:
|
|
38
|
+
```python
|
|
39
|
+
upsertedEntity = await self.context.port_client.upsert_entity(
|
|
40
|
+
entity,
|
|
41
|
+
event.port_app_config.get_port_request_options(),
|
|
42
|
+
user_agent_type,
|
|
43
|
+
should_raise=False,
|
|
44
|
+
)
|
|
45
|
+
```
|
|
46
|
+
:param entity: An Entity to be upserted
|
|
47
|
+
:param request_options: A dictionary specifying how to upsert the entity
|
|
48
|
+
:param user_agent_type: a UserAgentType specifying who is preforming the action
|
|
49
|
+
:param should_raise: A boolean specifying whether the error should be raised or handled silently
|
|
50
|
+
:return: [Entity] if the upsert occured successfully
|
|
51
|
+
:return: [None] will be returned if entity is using search identifier
|
|
52
|
+
:return: [False] will be returned if upsert failed because of unmet dependency
|
|
53
|
+
"""
|
|
32
54
|
validation_only = request_options["validation_only"]
|
|
33
55
|
async with self.semaphore:
|
|
34
|
-
logger.
|
|
56
|
+
logger.debug(
|
|
35
57
|
f"{'Validating' if validation_only else 'Upserting'} entity: {entity.identifier} of blueprint: {entity.blueprint}"
|
|
36
58
|
)
|
|
37
59
|
headers = await self.auth.headers(user_agent_type)
|
|
@@ -47,15 +69,50 @@ class EntityClientMixin:
|
|
|
47
69
|
).lower(),
|
|
48
70
|
"validation_only": str(validation_only).lower(),
|
|
49
71
|
},
|
|
72
|
+
extensions={"retryable": True},
|
|
50
73
|
)
|
|
51
|
-
|
|
52
74
|
if response.is_error:
|
|
53
75
|
logger.error(
|
|
54
76
|
f"Error {'Validating' if validation_only else 'Upserting'} "
|
|
55
77
|
f"entity: {entity.identifier} of "
|
|
56
78
|
f"blueprint: {entity.blueprint}"
|
|
57
79
|
)
|
|
80
|
+
result = response.json()
|
|
81
|
+
|
|
82
|
+
if (
|
|
83
|
+
response.status_code == status.HTTP_404_NOT_FOUND
|
|
84
|
+
and not result.get("ok")
|
|
85
|
+
and result.get("error") == PortAPIErrorMessage.NOT_FOUND.value
|
|
86
|
+
):
|
|
87
|
+
# Return false to differentiate from `result_entity.is_using_search_identifier`
|
|
88
|
+
return False
|
|
58
89
|
handle_status_code(response, should_raise)
|
|
90
|
+
result = response.json()
|
|
91
|
+
|
|
92
|
+
result_entity = (
|
|
93
|
+
Entity.parse_obj(result["entity"]) if result.get("entity") else entity
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
# Happens when upsert fails and search identifier is defined.
|
|
97
|
+
# We return None to ignore the entity later in the delete process
|
|
98
|
+
if result_entity.is_using_search_identifier:
|
|
99
|
+
return None
|
|
100
|
+
|
|
101
|
+
# In order to save memory we'll keep only the identifier, blueprint and relations of the
|
|
102
|
+
# upserted entity result for later calculations
|
|
103
|
+
reduced_entity = Entity(
|
|
104
|
+
identifier=result_entity.identifier, blueprint=result_entity.blueprint
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
# Turning dict typed relations (raw search relations) is required
|
|
108
|
+
# for us to be able to successfully calculate the participation related entities
|
|
109
|
+
# and ignore the ones that don't as they weren't upserted
|
|
110
|
+
reduced_entity.relations = {
|
|
111
|
+
key: None if isinstance(relation, dict) else relation
|
|
112
|
+
for key, relation in result_entity.relations.items()
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return reduced_entity
|
|
59
116
|
|
|
60
117
|
async def batch_upsert_entities(
|
|
61
118
|
self,
|
|
@@ -63,8 +120,8 @@ class EntityClientMixin:
|
|
|
63
120
|
request_options: RequestOptions,
|
|
64
121
|
user_agent_type: UserAgentType | None = None,
|
|
65
122
|
should_raise: bool = True,
|
|
66
|
-
) ->
|
|
67
|
-
await asyncio.gather(
|
|
123
|
+
) -> list[Entity]:
|
|
124
|
+
modified_entities_results = await asyncio.gather(
|
|
68
125
|
*(
|
|
69
126
|
self.upsert_entity(
|
|
70
127
|
entity,
|
|
@@ -76,6 +133,17 @@ class EntityClientMixin:
|
|
|
76
133
|
),
|
|
77
134
|
return_exceptions=True,
|
|
78
135
|
)
|
|
136
|
+
entity_results = [
|
|
137
|
+
entity for entity in modified_entities_results if isinstance(entity, Entity)
|
|
138
|
+
]
|
|
139
|
+
if not should_raise:
|
|
140
|
+
return entity_results
|
|
141
|
+
|
|
142
|
+
for entity_result in modified_entities_results:
|
|
143
|
+
if isinstance(entity_result, Exception):
|
|
144
|
+
raise entity_result
|
|
145
|
+
|
|
146
|
+
return entity_results
|
|
79
147
|
|
|
80
148
|
async def delete_entity(
|
|
81
149
|
self,
|
|
@@ -133,23 +201,10 @@ class EntityClientMixin:
|
|
|
133
201
|
return_exceptions=True,
|
|
134
202
|
)
|
|
135
203
|
|
|
136
|
-
async def
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
f"{self.auth.api_url}/blueprints/{blueprint}/entities/{identifier}",
|
|
141
|
-
headers=await self.auth.headers(),
|
|
142
|
-
)
|
|
143
|
-
if response.is_error:
|
|
144
|
-
logger.error(
|
|
145
|
-
f"Error validating "
|
|
146
|
-
f"entity: {identifier} of "
|
|
147
|
-
f"blueprint: {blueprint}"
|
|
148
|
-
)
|
|
149
|
-
handle_status_code(response)
|
|
150
|
-
|
|
151
|
-
async def search_entities(self, user_agent_type: UserAgentType) -> list[Entity]:
|
|
152
|
-
query = {
|
|
204
|
+
async def search_entities(
|
|
205
|
+
self, user_agent_type: UserAgentType, query: dict[Any, Any] | None = None
|
|
206
|
+
) -> list[Entity]:
|
|
207
|
+
default_query = {
|
|
153
208
|
"combinator": "and",
|
|
154
209
|
"rules": [
|
|
155
210
|
{
|
|
@@ -165,6 +220,11 @@ class EntityClientMixin:
|
|
|
165
220
|
],
|
|
166
221
|
}
|
|
167
222
|
|
|
223
|
+
if query is None:
|
|
224
|
+
query = default_query
|
|
225
|
+
elif query.get("rules"):
|
|
226
|
+
query["rules"].append(default_query)
|
|
227
|
+
|
|
168
228
|
logger.info(f"Searching entities with query {query}")
|
|
169
229
|
response = await self.client.post(
|
|
170
230
|
f"{self.auth.api_url}/entities/search",
|
|
@@ -174,43 +234,38 @@ class EntityClientMixin:
|
|
|
174
234
|
"exclude_calculated_properties": "true",
|
|
175
235
|
"include": ["blueprint", "identifier"],
|
|
176
236
|
},
|
|
237
|
+
extensions={"retryable": True},
|
|
177
238
|
)
|
|
178
239
|
handle_status_code(response)
|
|
179
240
|
return [Entity.parse_obj(result) for result in response.json()["entities"]]
|
|
180
241
|
|
|
181
|
-
async def
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
242
|
+
async def search_batch_entities(
|
|
243
|
+
self, user_agent_type: UserAgentType, entities_to_search: list[Entity]
|
|
244
|
+
) -> list[Entity]:
|
|
245
|
+
search_rules = []
|
|
246
|
+
for entity in entities_to_search:
|
|
247
|
+
search_rules.append(
|
|
185
248
|
{
|
|
186
|
-
"
|
|
187
|
-
"
|
|
188
|
-
|
|
189
|
-
|
|
249
|
+
"combinator": "and",
|
|
250
|
+
"rules": [
|
|
251
|
+
{
|
|
252
|
+
"property": "$identifier",
|
|
253
|
+
"operator": "=",
|
|
254
|
+
"value": entity.identifier,
|
|
255
|
+
},
|
|
256
|
+
{
|
|
257
|
+
"property": "$blueprint",
|
|
258
|
+
"operator": "=",
|
|
259
|
+
"value": entity.blueprint,
|
|
260
|
+
},
|
|
261
|
+
],
|
|
190
262
|
}
|
|
191
|
-
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
logger.info(f"Searching dependent entity with body {body}")
|
|
195
|
-
response = await self.client.post(
|
|
196
|
-
f"{self.auth.api_url}/entities/search",
|
|
197
|
-
headers=await self.auth.headers(),
|
|
198
|
-
json=body,
|
|
199
|
-
)
|
|
200
|
-
handle_status_code(response)
|
|
201
|
-
|
|
202
|
-
return [Entity.parse_obj(result) for result in response.json()["entities"]]
|
|
263
|
+
)
|
|
203
264
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
) -> None:
|
|
207
|
-
logger.info(f"Validating entity {entity.identifier}")
|
|
208
|
-
await self.upsert_entity(
|
|
209
|
-
entity,
|
|
265
|
+
return await self.search_entities(
|
|
266
|
+
user_agent_type,
|
|
210
267
|
{
|
|
211
|
-
"
|
|
212
|
-
"
|
|
213
|
-
"delete_dependent_entities": False,
|
|
214
|
-
"validation_only": True,
|
|
268
|
+
"combinator": "and",
|
|
269
|
+
"rules": [{"combinator": "or", "rules": search_rules}],
|
|
215
270
|
},
|
|
216
271
|
)
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
from typing import Any, TYPE_CHECKING, Optional, TypedDict
|
|
2
|
+
from urllib.parse import quote_plus
|
|
2
3
|
|
|
3
4
|
import httpx
|
|
4
5
|
from loguru import logger
|
|
5
|
-
from starlette import status
|
|
6
|
-
|
|
7
6
|
from port_ocean.clients.port.authentication import PortAuthentication
|
|
8
7
|
from port_ocean.clients.port.utils import handle_status_code
|
|
8
|
+
from port_ocean.log.sensetive import sensitive_log_filter
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
11
11
|
from port_ocean.core.handlers.port_app_config.models import PortAppConfig
|
|
@@ -42,7 +42,7 @@ class IntegrationClientMixin:
|
|
|
42
42
|
) -> dict[str, Any]:
|
|
43
43
|
response = await self._get_current_integration()
|
|
44
44
|
handle_status_code(response, should_raise, should_log)
|
|
45
|
-
return response.json()
|
|
45
|
+
return response.json().get("integration", {})
|
|
46
46
|
|
|
47
47
|
async def get_log_attributes(self) -> LogAttributes:
|
|
48
48
|
if self._log_attributes is None:
|
|
@@ -55,7 +55,7 @@ class IntegrationClientMixin:
|
|
|
55
55
|
_type: str,
|
|
56
56
|
changelog_destination: dict[str, Any],
|
|
57
57
|
port_app_config: Optional["PortAppConfig"] = None,
|
|
58
|
-
) ->
|
|
58
|
+
) -> dict:
|
|
59
59
|
logger.info(f"Creating integration with id: {self.integration_identifier}")
|
|
60
60
|
headers = await self.auth.headers()
|
|
61
61
|
json = {
|
|
@@ -71,13 +71,14 @@ class IntegrationClientMixin:
|
|
|
71
71
|
f"{self.auth.api_url}/integration", headers=headers, json=json
|
|
72
72
|
)
|
|
73
73
|
handle_status_code(response)
|
|
74
|
+
return response.json()["integration"]
|
|
74
75
|
|
|
75
76
|
async def patch_integration(
|
|
76
77
|
self,
|
|
77
78
|
_type: str | None = None,
|
|
78
79
|
changelog_destination: dict[str, Any] | None = None,
|
|
79
80
|
port_app_config: Optional["PortAppConfig"] = None,
|
|
80
|
-
) ->
|
|
81
|
+
) -> dict:
|
|
81
82
|
logger.info(f"Updating integration with id: {self.integration_identifier}")
|
|
82
83
|
headers = await self.auth.headers()
|
|
83
84
|
json: dict[str, Any] = {}
|
|
@@ -95,34 +96,7 @@ class IntegrationClientMixin:
|
|
|
95
96
|
json=json,
|
|
96
97
|
)
|
|
97
98
|
handle_status_code(response)
|
|
98
|
-
|
|
99
|
-
async def initialize_integration(
|
|
100
|
-
self,
|
|
101
|
-
_type: str,
|
|
102
|
-
changelog_destination: dict[str, Any],
|
|
103
|
-
port_app_config: Optional["PortAppConfig"] = None,
|
|
104
|
-
) -> None:
|
|
105
|
-
logger.info(f"Initiating integration with id: {self.integration_identifier}")
|
|
106
|
-
response = await self._get_current_integration()
|
|
107
|
-
if response.status_code == status.HTTP_404_NOT_FOUND:
|
|
108
|
-
await self.create_integration(_type, changelog_destination, port_app_config)
|
|
109
|
-
else:
|
|
110
|
-
handle_status_code(response)
|
|
111
|
-
|
|
112
|
-
integration = response.json()["integration"]
|
|
113
|
-
logger.info("Checking for diff in integration configuration")
|
|
114
|
-
if (
|
|
115
|
-
integration["changelogDestination"] != changelog_destination
|
|
116
|
-
or integration["installationAppType"] != _type
|
|
117
|
-
or integration.get("version") != self.integration_version
|
|
118
|
-
):
|
|
119
|
-
await self.patch_integration(
|
|
120
|
-
_type, changelog_destination, port_app_config
|
|
121
|
-
)
|
|
122
|
-
|
|
123
|
-
logger.info(
|
|
124
|
-
f"Integration with id: {self.integration_identifier} successfully registered"
|
|
125
|
-
)
|
|
99
|
+
return response.json()["integration"]
|
|
126
100
|
|
|
127
101
|
async def ingest_integration_logs(self, logs: list[dict[str, Any]]) -> None:
|
|
128
102
|
logger.debug("Ingesting logs")
|
|
@@ -135,5 +109,20 @@ class IntegrationClientMixin:
|
|
|
135
109
|
"logs": logs,
|
|
136
110
|
},
|
|
137
111
|
)
|
|
138
|
-
handle_status_code(response)
|
|
112
|
+
handle_status_code(response, should_log=False)
|
|
139
113
|
logger.debug("Logs successfully ingested")
|
|
114
|
+
|
|
115
|
+
async def ingest_integration_kind_examples(
|
|
116
|
+
self, kind: str, data: list[dict[str, Any]], should_log: bool = True
|
|
117
|
+
):
|
|
118
|
+
logger.debug(f"Ingesting examples for kind: {kind}")
|
|
119
|
+
headers = await self.auth.headers()
|
|
120
|
+
response = await self.client.post(
|
|
121
|
+
f"{self.auth.api_url}/integration/{quote_plus(self.integration_identifier)}/kinds/{quote_plus(kind)}/examples",
|
|
122
|
+
headers=headers,
|
|
123
|
+
json={
|
|
124
|
+
"examples": sensitive_log_filter.mask_object(data, full_hide=True),
|
|
125
|
+
},
|
|
126
|
+
)
|
|
127
|
+
handle_status_code(response, should_log=should_log)
|
|
128
|
+
logger.debug(f"Examples for kind {kind} successfully ingested")
|
|
@@ -15,11 +15,6 @@ class TokenRetryTransport(RetryTransport):
|
|
|
15
15
|
super().__init__(**kwargs)
|
|
16
16
|
self.port_client = port_client
|
|
17
17
|
|
|
18
|
-
def _is_retryable_method(self, request: httpx.Request) -> bool:
|
|
19
|
-
return super()._is_retryable_method(request) or request.url.path.endswith(
|
|
20
|
-
"/auth/access_token"
|
|
21
|
-
)
|
|
22
|
-
|
|
23
18
|
async def _handle_unauthorized(self, response: httpx.Response) -> None:
|
|
24
19
|
token = await self.port_client.auth.token
|
|
25
20
|
response.headers["Authorization"] = f"Bearer {token}"
|
port_ocean/clients/port/utils.py
CHANGED
|
@@ -16,9 +16,9 @@ if TYPE_CHECKING:
|
|
|
16
16
|
# period of time, before raising an exception.
|
|
17
17
|
# The max_connections value can't be too high, as it will cause the application to run out of memory.
|
|
18
18
|
# The max_keepalive_connections can't be too high, as it will cause the application to run out of available connections.
|
|
19
|
-
PORT_HTTP_MAX_CONNECTIONS_LIMIT =
|
|
19
|
+
PORT_HTTP_MAX_CONNECTIONS_LIMIT = 100
|
|
20
20
|
PORT_HTTP_MAX_KEEP_ALIVE_CONNECTIONS = 50
|
|
21
|
-
PORT_HTTP_TIMEOUT =
|
|
21
|
+
PORT_HTTP_TIMEOUT = 60.0
|
|
22
22
|
|
|
23
23
|
PORT_HTTPX_TIMEOUT = httpx.Timeout(PORT_HTTP_TIMEOUT)
|
|
24
24
|
PORT_HTTPX_LIMITS = httpx.Limits(
|
|
@@ -28,13 +28,19 @@ PORT_HTTPX_LIMITS = httpx.Limits(
|
|
|
28
28
|
|
|
29
29
|
_http_client: LocalStack[httpx.AsyncClient] = LocalStack()
|
|
30
30
|
|
|
31
|
+
FIVE_MINUETS = 60 * 5
|
|
32
|
+
|
|
31
33
|
|
|
32
34
|
def _get_http_client_context(port_client: "PortClient") -> httpx.AsyncClient:
|
|
33
35
|
client = _http_client.top
|
|
34
36
|
if client is None:
|
|
35
37
|
client = OceanAsyncClient(
|
|
36
38
|
TokenRetryTransport,
|
|
37
|
-
transport_kwargs={
|
|
39
|
+
transport_kwargs={
|
|
40
|
+
"port_client": port_client,
|
|
41
|
+
"max_backoff_wait": FIVE_MINUETS,
|
|
42
|
+
"base_delay": 0.3,
|
|
43
|
+
},
|
|
38
44
|
timeout=PORT_HTTPX_TIMEOUT,
|
|
39
45
|
limits=PORT_HTTPX_LIMITS,
|
|
40
46
|
)
|