port-ocean 0.5.10__py3-none-any.whl → 0.5.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of port-ocean might be problematic. Click here for more details.
- port_ocean/clients/port/authentication.py +3 -1
- port_ocean/clients/port/mixins/entities.py +36 -47
- port_ocean/clients/port/retry_transport.py +0 -5
- port_ocean/core/handlers/entities_state_applier/port/applier.py +13 -75
- port_ocean/core/handlers/entity_processor/base.py +17 -7
- port_ocean/core/handlers/entity_processor/jq_entity_processor.py +42 -41
- port_ocean/core/integrations/mixins/sync_raw.py +80 -51
- port_ocean/core/ocean_types.py +16 -11
- port_ocean/helpers/retry.py +49 -4
- port_ocean/utils/async_iterators.py +49 -0
- {port_ocean-0.5.10.dist-info → port_ocean-0.5.14.dist-info}/METADATA +6 -5
- {port_ocean-0.5.10.dist-info → port_ocean-0.5.14.dist-info}/RECORD +15 -15
- port_ocean/core/handlers/entities_state_applier/port/validate_entity_relations.py +0 -40
- {port_ocean-0.5.10.dist-info → port_ocean-0.5.14.dist-info}/LICENSE.md +0 -0
- {port_ocean-0.5.10.dist-info → port_ocean-0.5.14.dist-info}/WHEEL +0 -0
- {port_ocean-0.5.10.dist-info → port_ocean-0.5.14.dist-info}/entry_points.txt +0 -0
|
@@ -49,7 +49,9 @@ class PortAuthentication:
|
|
|
49
49
|
|
|
50
50
|
credentials = {"clientId": client_id, "clientSecret": client_secret}
|
|
51
51
|
response = await self.client.post(
|
|
52
|
-
f"{self.api_url}/auth/access_token",
|
|
52
|
+
f"{self.api_url}/auth/access_token",
|
|
53
|
+
json=credentials,
|
|
54
|
+
extensions={"retryable": True},
|
|
53
55
|
)
|
|
54
56
|
handle_status_code(response)
|
|
55
57
|
return TokenResponse(**response.json())
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
from typing import Any
|
|
2
3
|
from urllib.parse import quote_plus
|
|
3
4
|
|
|
4
5
|
import httpx
|
|
@@ -133,23 +134,10 @@ class EntityClientMixin:
|
|
|
133
134
|
return_exceptions=True,
|
|
134
135
|
)
|
|
135
136
|
|
|
136
|
-
async def
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
f"{self.auth.api_url}/blueprints/{blueprint}/entities/{identifier}",
|
|
141
|
-
headers=await self.auth.headers(),
|
|
142
|
-
)
|
|
143
|
-
if response.is_error:
|
|
144
|
-
logger.error(
|
|
145
|
-
f"Error validating "
|
|
146
|
-
f"entity: {identifier} of "
|
|
147
|
-
f"blueprint: {blueprint}"
|
|
148
|
-
)
|
|
149
|
-
handle_status_code(response)
|
|
150
|
-
|
|
151
|
-
async def search_entities(self, user_agent_type: UserAgentType) -> list[Entity]:
|
|
152
|
-
query = {
|
|
137
|
+
async def search_entities(
|
|
138
|
+
self, user_agent_type: UserAgentType, query: dict[Any, Any] | None = None
|
|
139
|
+
) -> list[Entity]:
|
|
140
|
+
default_query = {
|
|
153
141
|
"combinator": "and",
|
|
154
142
|
"rules": [
|
|
155
143
|
{
|
|
@@ -165,6 +153,11 @@ class EntityClientMixin:
|
|
|
165
153
|
],
|
|
166
154
|
}
|
|
167
155
|
|
|
156
|
+
if query is None:
|
|
157
|
+
query = default_query
|
|
158
|
+
elif query.get("rules"):
|
|
159
|
+
query["rules"].append(default_query)
|
|
160
|
+
|
|
168
161
|
logger.info(f"Searching entities with query {query}")
|
|
169
162
|
response = await self.client.post(
|
|
170
163
|
f"{self.auth.api_url}/entities/search",
|
|
@@ -174,43 +167,39 @@ class EntityClientMixin:
|
|
|
174
167
|
"exclude_calculated_properties": "true",
|
|
175
168
|
"include": ["blueprint", "identifier"],
|
|
176
169
|
},
|
|
170
|
+
extensions={"retryable": True},
|
|
171
|
+
timeout=30,
|
|
177
172
|
)
|
|
178
173
|
handle_status_code(response)
|
|
179
174
|
return [Entity.parse_obj(result) for result in response.json()["entities"]]
|
|
180
175
|
|
|
181
|
-
async def
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
176
|
+
async def search_batch_entities(
|
|
177
|
+
self, user_agent_type: UserAgentType, entities_to_search: list[Entity]
|
|
178
|
+
) -> list[Entity]:
|
|
179
|
+
search_rules = []
|
|
180
|
+
for entity in entities_to_search:
|
|
181
|
+
search_rules.append(
|
|
185
182
|
{
|
|
186
|
-
"
|
|
187
|
-
"
|
|
188
|
-
|
|
189
|
-
|
|
183
|
+
"combinator": "and",
|
|
184
|
+
"rules": [
|
|
185
|
+
{
|
|
186
|
+
"property": "$identifier",
|
|
187
|
+
"operator": "=",
|
|
188
|
+
"value": entity.identifier,
|
|
189
|
+
},
|
|
190
|
+
{
|
|
191
|
+
"property": "$blueprint",
|
|
192
|
+
"operator": "=",
|
|
193
|
+
"value": entity.blueprint,
|
|
194
|
+
},
|
|
195
|
+
],
|
|
190
196
|
}
|
|
191
|
-
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
logger.info(f"Searching dependent entity with body {body}")
|
|
195
|
-
response = await self.client.post(
|
|
196
|
-
f"{self.auth.api_url}/entities/search",
|
|
197
|
-
headers=await self.auth.headers(),
|
|
198
|
-
json=body,
|
|
199
|
-
)
|
|
200
|
-
handle_status_code(response)
|
|
201
|
-
|
|
202
|
-
return [Entity.parse_obj(result) for result in response.json()["entities"]]
|
|
197
|
+
)
|
|
203
198
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
) -> None:
|
|
207
|
-
logger.info(f"Validating entity {entity.identifier}")
|
|
208
|
-
await self.upsert_entity(
|
|
209
|
-
entity,
|
|
199
|
+
return await self.search_entities(
|
|
200
|
+
user_agent_type,
|
|
210
201
|
{
|
|
211
|
-
"
|
|
212
|
-
"
|
|
213
|
-
"delete_dependent_entities": False,
|
|
214
|
-
"validation_only": True,
|
|
202
|
+
"combinator": "and",
|
|
203
|
+
"rules": [{"combinator": "or", "rules": search_rules}],
|
|
215
204
|
},
|
|
216
205
|
)
|
|
@@ -15,11 +15,6 @@ class TokenRetryTransport(RetryTransport):
|
|
|
15
15
|
super().__init__(**kwargs)
|
|
16
16
|
self.port_client = port_client
|
|
17
17
|
|
|
18
|
-
def _is_retryable_method(self, request: httpx.Request) -> bool:
|
|
19
|
-
return super()._is_retryable_method(request) or request.url.path.endswith(
|
|
20
|
-
"/auth/access_token"
|
|
21
|
-
)
|
|
22
|
-
|
|
23
18
|
async def _handle_unauthorized(self, response: httpx.Response) -> None:
|
|
24
19
|
token = await self.port_client.auth.token
|
|
25
20
|
response.headers["Authorization"] = f"Bearer {token}"
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
from itertools import chain
|
|
3
|
-
|
|
4
1
|
from loguru import logger
|
|
5
2
|
|
|
6
3
|
from port_ocean.clients.port.types import UserAgentType
|
|
@@ -14,14 +11,9 @@ from port_ocean.core.handlers.entities_state_applier.port.get_related_entities i
|
|
|
14
11
|
from port_ocean.core.handlers.entities_state_applier.port.order_by_entities_dependencies import (
|
|
15
12
|
order_by_entities_dependencies,
|
|
16
13
|
)
|
|
17
|
-
from port_ocean.core.handlers.entities_state_applier.port.validate_entity_relations import (
|
|
18
|
-
validate_entity_relations,
|
|
19
|
-
)
|
|
20
|
-
from port_ocean.core.handlers.entity_processor.base import EntityPortDiff
|
|
21
14
|
from port_ocean.core.models import Entity
|
|
22
15
|
from port_ocean.core.ocean_types import EntityDiff
|
|
23
|
-
from port_ocean.core.utils import is_same_entity,
|
|
24
|
-
from port_ocean.exceptions.core import RelationValidationException
|
|
16
|
+
from port_ocean.core.utils import is_same_entity, get_port_diff
|
|
25
17
|
|
|
26
18
|
|
|
27
19
|
class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
|
|
@@ -32,63 +24,17 @@ class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
|
|
|
32
24
|
through HTTP requests.
|
|
33
25
|
"""
|
|
34
26
|
|
|
35
|
-
async def
|
|
36
|
-
logger.info("Validated deleted entities")
|
|
37
|
-
if not event.port_app_config.delete_dependent_entities:
|
|
38
|
-
dependent_entities = await asyncio.gather(
|
|
39
|
-
*(
|
|
40
|
-
self.context.port_client.search_dependent_entities(entity)
|
|
41
|
-
for entity in entities
|
|
42
|
-
)
|
|
43
|
-
)
|
|
44
|
-
new_dependent_entities = get_unique(
|
|
45
|
-
[
|
|
46
|
-
entity
|
|
47
|
-
for entity in chain.from_iterable(dependent_entities)
|
|
48
|
-
if not any(is_same_entity(item, entity) for item in entities)
|
|
49
|
-
]
|
|
50
|
-
)
|
|
51
|
-
|
|
52
|
-
if new_dependent_entities:
|
|
53
|
-
raise RelationValidationException(
|
|
54
|
-
f"Must enable delete_dependent_entities flag or delete all dependent entities: "
|
|
55
|
-
f" {[(dep.blueprint, dep.identifier) for dep in new_dependent_entities]}"
|
|
56
|
-
)
|
|
57
|
-
|
|
58
|
-
async def _validate_entity_diff(self, diff: EntityPortDiff) -> None:
|
|
59
|
-
config = event.port_app_config
|
|
60
|
-
await self._validate_delete_dependent_entities(diff.deleted)
|
|
61
|
-
modified_or_created_entities = diff.modified + diff.created
|
|
62
|
-
|
|
63
|
-
if modified_or_created_entities and not config.create_missing_related_entities:
|
|
64
|
-
logger.info("Validating modified or created entities")
|
|
65
|
-
|
|
66
|
-
await asyncio.gather(
|
|
67
|
-
*(
|
|
68
|
-
self.context.port_client.validate_entity_payload(
|
|
69
|
-
entity,
|
|
70
|
-
config.enable_merge_entity,
|
|
71
|
-
create_missing_related_entities=config.create_missing_related_entities,
|
|
72
|
-
)
|
|
73
|
-
for entity in modified_or_created_entities
|
|
74
|
-
)
|
|
75
|
-
)
|
|
76
|
-
|
|
77
|
-
if not event.port_app_config.delete_dependent_entities:
|
|
78
|
-
logger.info("Validating no relation blocks the operation")
|
|
79
|
-
await validate_entity_relations(diff, self.context.port_client)
|
|
80
|
-
|
|
81
|
-
async def _delete_diff(
|
|
27
|
+
async def _safe_delete(
|
|
82
28
|
self,
|
|
83
29
|
entities_to_delete: list[Entity],
|
|
84
|
-
|
|
30
|
+
entities_to_protect: list[Entity],
|
|
85
31
|
user_agent_type: UserAgentType,
|
|
86
32
|
) -> None:
|
|
87
33
|
if not entities_to_delete:
|
|
88
34
|
return
|
|
89
35
|
|
|
90
36
|
related_entities = await get_related_entities(
|
|
91
|
-
|
|
37
|
+
entities_to_protect, self.context.port_client
|
|
92
38
|
)
|
|
93
39
|
|
|
94
40
|
allowed_entities_to_delete = []
|
|
@@ -98,7 +44,8 @@ class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
|
|
|
98
44
|
is_same_entity(entity, entity_to_delete) for entity in related_entities
|
|
99
45
|
)
|
|
100
46
|
is_part_of_created = any(
|
|
101
|
-
is_same_entity(entity, entity_to_delete)
|
|
47
|
+
is_same_entity(entity, entity_to_delete)
|
|
48
|
+
for entity in entities_to_protect
|
|
102
49
|
)
|
|
103
50
|
if is_part_of_related:
|
|
104
51
|
if event.port_app_config.create_missing_related_entities:
|
|
@@ -119,21 +66,14 @@ class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
|
|
|
119
66
|
user_agent_type: UserAgentType,
|
|
120
67
|
) -> None:
|
|
121
68
|
diff = get_port_diff(entities["before"], entities["after"])
|
|
69
|
+
kept_entities = diff.created + diff.modified
|
|
122
70
|
|
|
123
71
|
logger.info(
|
|
124
72
|
f"Updating entity diff (created: {len(diff.created)}, deleted: {len(diff.deleted)}, modified: {len(diff.modified)})"
|
|
125
73
|
)
|
|
126
|
-
await self.
|
|
74
|
+
await self.upsert(kept_entities, user_agent_type)
|
|
127
75
|
|
|
128
|
-
|
|
129
|
-
await self.upsert(diff.created, user_agent_type)
|
|
130
|
-
logger.info("Upserting modified entities")
|
|
131
|
-
await self.upsert(diff.modified, user_agent_type)
|
|
132
|
-
|
|
133
|
-
logger.info("Deleting diff entities")
|
|
134
|
-
await self._delete_diff(
|
|
135
|
-
diff.deleted, diff.created + diff.modified, user_agent_type
|
|
136
|
-
)
|
|
76
|
+
await self._safe_delete(diff.deleted, kept_entities, user_agent_type)
|
|
137
77
|
|
|
138
78
|
async def delete_diff(
|
|
139
79
|
self,
|
|
@@ -145,15 +85,13 @@ class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
|
|
|
145
85
|
if not diff.deleted:
|
|
146
86
|
return
|
|
147
87
|
|
|
88
|
+
kept_entities = diff.created + diff.modified
|
|
89
|
+
|
|
148
90
|
logger.info(
|
|
149
|
-
f"
|
|
91
|
+
f"Determining entities to delete ({len(diff.deleted)}/{len(kept_entities)})"
|
|
150
92
|
)
|
|
151
|
-
await self._validate_entity_diff(diff)
|
|
152
93
|
|
|
153
|
-
|
|
154
|
-
await self._delete_diff(
|
|
155
|
-
diff.deleted, diff.created + diff.modified, user_agent_type
|
|
156
|
-
)
|
|
94
|
+
await self._safe_delete(diff.deleted, kept_entities, user_agent_type)
|
|
157
95
|
|
|
158
96
|
async def upsert(
|
|
159
97
|
self, entities: list[Entity], user_agent_type: UserAgentType
|
|
@@ -6,7 +6,10 @@ from loguru import logger
|
|
|
6
6
|
from port_ocean.core.handlers.base import BaseHandler
|
|
7
7
|
from port_ocean.core.handlers.port_app_config.models import ResourceConfig
|
|
8
8
|
from port_ocean.core.models import Entity
|
|
9
|
-
from port_ocean.core.ocean_types import
|
|
9
|
+
from port_ocean.core.ocean_types import (
|
|
10
|
+
RAW_ITEM,
|
|
11
|
+
EntitySelectorDiff,
|
|
12
|
+
)
|
|
10
13
|
|
|
11
14
|
|
|
12
15
|
@dataclass
|
|
@@ -33,21 +36,28 @@ class BaseEntityProcessor(BaseHandler):
|
|
|
33
36
|
|
|
34
37
|
@abstractmethod
|
|
35
38
|
async def _parse_items(
|
|
36
|
-
self,
|
|
37
|
-
|
|
39
|
+
self,
|
|
40
|
+
mapping: ResourceConfig,
|
|
41
|
+
raw_data: list[RAW_ITEM],
|
|
42
|
+
parse_all: bool = False,
|
|
43
|
+
) -> EntitySelectorDiff:
|
|
38
44
|
pass
|
|
39
45
|
|
|
40
46
|
async def parse_items(
|
|
41
|
-
self,
|
|
42
|
-
|
|
47
|
+
self,
|
|
48
|
+
mapping: ResourceConfig,
|
|
49
|
+
raw_data: list[RAW_ITEM],
|
|
50
|
+
parse_all: bool = False,
|
|
51
|
+
) -> EntitySelectorDiff:
|
|
43
52
|
"""Public method to parse raw entity data and map it to an EntityDiff.
|
|
44
53
|
|
|
45
54
|
Args:
|
|
46
55
|
mapping (ResourceConfig): The configuration for entity mapping.
|
|
47
|
-
raw_data (
|
|
56
|
+
raw_data (list[RawEntity]): The raw data to be parsed.
|
|
57
|
+
parse_all (bool): Whether to parse all data or just data that passed the selector.
|
|
48
58
|
|
|
49
59
|
Returns:
|
|
50
60
|
EntityDiff: The parsed entity differences.
|
|
51
61
|
"""
|
|
52
62
|
with logger.contextualize(kind=mapping.kind):
|
|
53
|
-
return await self._parse_items(mapping, raw_data)
|
|
63
|
+
return await self._parse_items(mapping, raw_data, parse_all)
|
|
@@ -9,7 +9,10 @@ import pyjq as jq # type: ignore
|
|
|
9
9
|
from port_ocean.core.handlers.entity_processor.base import BaseEntityProcessor
|
|
10
10
|
from port_ocean.core.handlers.port_app_config.models import ResourceConfig
|
|
11
11
|
from port_ocean.core.models import Entity
|
|
12
|
-
from port_ocean.core.ocean_types import
|
|
12
|
+
from port_ocean.core.ocean_types import (
|
|
13
|
+
RAW_ITEM,
|
|
14
|
+
EntitySelectorDiff,
|
|
15
|
+
)
|
|
13
16
|
from port_ocean.exceptions.core import EntityProcessorException
|
|
14
17
|
|
|
15
18
|
|
|
@@ -68,16 +71,19 @@ class JQEntityProcessor(BaseEntityProcessor):
|
|
|
68
71
|
|
|
69
72
|
return result
|
|
70
73
|
|
|
71
|
-
async def
|
|
74
|
+
async def _get_mapped_entity(
|
|
72
75
|
self,
|
|
73
76
|
data: dict[str, Any],
|
|
74
77
|
raw_entity_mappings: dict[str, Any],
|
|
75
78
|
selector_query: str,
|
|
76
|
-
|
|
79
|
+
parse_all: bool = False,
|
|
80
|
+
) -> tuple[dict[str, Any], bool]:
|
|
77
81
|
should_run = await self._search_as_bool(data, selector_query)
|
|
78
|
-
if should_run:
|
|
79
|
-
|
|
80
|
-
|
|
82
|
+
if parse_all or should_run:
|
|
83
|
+
mapped_entity = await self._search_as_object(data, raw_entity_mappings)
|
|
84
|
+
return mapped_entity, should_run
|
|
85
|
+
|
|
86
|
+
return {}, False
|
|
81
87
|
|
|
82
88
|
async def _calculate_entity(
|
|
83
89
|
self,
|
|
@@ -85,16 +91,18 @@ class JQEntityProcessor(BaseEntityProcessor):
|
|
|
85
91
|
raw_entity_mappings: dict[str, Any],
|
|
86
92
|
items_to_parse: str | None,
|
|
87
93
|
selector_query: str,
|
|
88
|
-
|
|
94
|
+
parse_all: bool = False,
|
|
95
|
+
) -> list[tuple[dict[str, Any], bool]]:
|
|
89
96
|
if items_to_parse:
|
|
90
97
|
items = await self._search(data, items_to_parse)
|
|
91
98
|
if isinstance(items, list):
|
|
92
99
|
return await asyncio.gather(
|
|
93
100
|
*[
|
|
94
|
-
self.
|
|
101
|
+
self._get_mapped_entity(
|
|
95
102
|
{"item": item, **data},
|
|
96
103
|
raw_entity_mappings,
|
|
97
104
|
selector_query,
|
|
105
|
+
parse_all,
|
|
98
106
|
)
|
|
99
107
|
for item in items
|
|
100
108
|
]
|
|
@@ -105,51 +113,44 @@ class JQEntityProcessor(BaseEntityProcessor):
|
|
|
105
113
|
)
|
|
106
114
|
else:
|
|
107
115
|
return [
|
|
108
|
-
await self.
|
|
109
|
-
data, raw_entity_mappings, selector_query
|
|
116
|
+
await self._get_mapped_entity(
|
|
117
|
+
data, raw_entity_mappings, selector_query, parse_all
|
|
110
118
|
)
|
|
111
119
|
]
|
|
112
|
-
return [{}]
|
|
120
|
+
return [({}, False)]
|
|
113
121
|
|
|
114
|
-
async def
|
|
115
|
-
self,
|
|
116
|
-
|
|
122
|
+
async def _parse_items(
|
|
123
|
+
self,
|
|
124
|
+
mapping: ResourceConfig,
|
|
125
|
+
raw_results: list[RAW_ITEM],
|
|
126
|
+
parse_all: bool = False,
|
|
127
|
+
) -> EntitySelectorDiff:
|
|
117
128
|
raw_entity_mappings: dict[str, Any] = mapping.port.entity.mappings.dict(
|
|
118
129
|
exclude_unset=True
|
|
119
130
|
)
|
|
120
|
-
|
|
131
|
+
calculate_entities_tasks = [
|
|
121
132
|
asyncio.create_task(
|
|
122
133
|
self._calculate_entity(
|
|
123
134
|
data,
|
|
124
135
|
raw_entity_mappings,
|
|
125
136
|
mapping.port.items_to_parse,
|
|
126
137
|
mapping.selector.query,
|
|
138
|
+
parse_all,
|
|
127
139
|
)
|
|
128
140
|
)
|
|
129
|
-
for data in
|
|
130
|
-
]
|
|
131
|
-
entities = await asyncio.gather(*entities_tasks)
|
|
132
|
-
|
|
133
|
-
return [
|
|
134
|
-
Entity.parse_obj(entity_data)
|
|
135
|
-
for flatten in entities
|
|
136
|
-
for entity_data in filter(
|
|
137
|
-
lambda entity: entity.get("identifier") and entity.get("blueprint"),
|
|
138
|
-
flatten,
|
|
139
|
-
)
|
|
141
|
+
for data in raw_results
|
|
140
142
|
]
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
}
|
|
143
|
+
calculate_entities_results = await asyncio.gather(*calculate_entities_tasks)
|
|
144
|
+
|
|
145
|
+
passed_entities = []
|
|
146
|
+
failed_entities = []
|
|
147
|
+
for entities_results in calculate_entities_results:
|
|
148
|
+
for entity, did_entity_pass_selector in entities_results:
|
|
149
|
+
if entity.get("identifier") and entity.get("blueprint"):
|
|
150
|
+
parsed_entity = Entity.parse_obj(entity)
|
|
151
|
+
if did_entity_pass_selector:
|
|
152
|
+
passed_entities.append(parsed_entity)
|
|
153
|
+
else:
|
|
154
|
+
failed_entities.append(parsed_entity)
|
|
155
|
+
|
|
156
|
+
return EntitySelectorDiff(passed=passed_entities, failed=failed_entities)
|
|
@@ -22,8 +22,9 @@ from port_ocean.core.ocean_types import (
|
|
|
22
22
|
RAW_RESULT,
|
|
23
23
|
RESYNC_RESULT,
|
|
24
24
|
RawEntityDiff,
|
|
25
|
-
EntityDiff,
|
|
26
25
|
ASYNC_GENERATOR_RESYNC_TYPE,
|
|
26
|
+
RAW_ITEM,
|
|
27
|
+
EntitySelectorDiff,
|
|
27
28
|
)
|
|
28
29
|
from port_ocean.core.utils import zip_and_sum
|
|
29
30
|
from port_ocean.exceptions.core import OceanAbortException
|
|
@@ -120,12 +121,13 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
120
121
|
return results, errors
|
|
121
122
|
|
|
122
123
|
async def _calculate_raw(
|
|
123
|
-
self,
|
|
124
|
-
|
|
125
|
-
|
|
124
|
+
self,
|
|
125
|
+
raw_diff: list[tuple[ResourceConfig, list[RAW_ITEM]]],
|
|
126
|
+
parse_all: bool = False,
|
|
127
|
+
) -> list[EntitySelectorDiff]:
|
|
126
128
|
return await asyncio.gather(
|
|
127
129
|
*(
|
|
128
|
-
self.entity_processor.parse_items(mapping, results)
|
|
130
|
+
self.entity_processor.parse_items(mapping, results, parse_all)
|
|
129
131
|
for mapping, results in raw_diff
|
|
130
132
|
)
|
|
131
133
|
)
|
|
@@ -135,42 +137,24 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
135
137
|
resource: ResourceConfig,
|
|
136
138
|
results: list[dict[Any, Any]],
|
|
137
139
|
user_agent_type: UserAgentType,
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
{
|
|
144
|
-
"before": [],
|
|
145
|
-
"after": results,
|
|
146
|
-
},
|
|
147
|
-
)
|
|
148
|
-
]
|
|
140
|
+
parse_all: bool = False,
|
|
141
|
+
) -> EntitySelectorDiff:
|
|
142
|
+
objects_diff = await self._calculate_raw([(resource, results)], parse_all)
|
|
143
|
+
await self.entities_state_applier.upsert(
|
|
144
|
+
objects_diff[0].passed, user_agent_type
|
|
149
145
|
)
|
|
150
146
|
|
|
151
|
-
|
|
152
|
-
await self.entities_state_applier.upsert(entities_after, user_agent_type)
|
|
153
|
-
return entities_after
|
|
147
|
+
return objects_diff[0]
|
|
154
148
|
|
|
155
149
|
async def _unregister_resource_raw(
|
|
156
150
|
self,
|
|
157
151
|
resource: ResourceConfig,
|
|
158
|
-
results: list[
|
|
152
|
+
results: list[RAW_ITEM],
|
|
159
153
|
user_agent_type: UserAgentType,
|
|
160
154
|
) -> list[Entity]:
|
|
161
|
-
objects_diff = await self._calculate_raw(
|
|
162
|
-
[
|
|
163
|
-
(
|
|
164
|
-
resource,
|
|
165
|
-
{
|
|
166
|
-
"before": results,
|
|
167
|
-
"after": [],
|
|
168
|
-
},
|
|
169
|
-
)
|
|
170
|
-
]
|
|
171
|
-
)
|
|
155
|
+
objects_diff = await self._calculate_raw([(resource, results)])
|
|
172
156
|
|
|
173
|
-
entities_after: list[Entity] = objects_diff[0]
|
|
157
|
+
entities_after: list[Entity] = objects_diff[0].passed
|
|
174
158
|
await self.entities_state_applier.delete(entities_after, user_agent_type)
|
|
175
159
|
logger.info("Finished unregistering change")
|
|
176
160
|
return entities_after
|
|
@@ -187,17 +171,21 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
187
171
|
else:
|
|
188
172
|
async_generators.append(result)
|
|
189
173
|
|
|
190
|
-
entities =
|
|
191
|
-
|
|
192
|
-
|
|
174
|
+
entities = (
|
|
175
|
+
await self._register_resource_raw(
|
|
176
|
+
resource_config, raw_results, user_agent_type
|
|
177
|
+
)
|
|
178
|
+
).passed
|
|
193
179
|
|
|
194
180
|
for generator in async_generators:
|
|
195
181
|
try:
|
|
196
182
|
async for items in generator:
|
|
197
183
|
entities.extend(
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
184
|
+
(
|
|
185
|
+
await self._register_resource_raw(
|
|
186
|
+
resource_config, items, user_agent_type
|
|
187
|
+
)
|
|
188
|
+
).passed
|
|
201
189
|
)
|
|
202
190
|
except* OceanAbortException as error:
|
|
203
191
|
errors.append(error)
|
|
@@ -231,13 +219,44 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
231
219
|
resource for resource in config.resources if resource.kind == kind
|
|
232
220
|
]
|
|
233
221
|
|
|
234
|
-
|
|
222
|
+
diffs: list[EntitySelectorDiff] = await asyncio.gather(
|
|
235
223
|
*(
|
|
236
|
-
self._register_resource_raw(resource, results, user_agent_type)
|
|
224
|
+
self._register_resource_raw(resource, results, user_agent_type, True)
|
|
237
225
|
for resource in resource_mappings
|
|
238
226
|
)
|
|
239
227
|
)
|
|
240
228
|
|
|
229
|
+
registered_entities, entities_to_delete = zip_and_sum(
|
|
230
|
+
(entities_diff.passed, entities_diff.failed) for entities_diff in diffs
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
registered_entities_attributes = {
|
|
234
|
+
(entity.identifier, entity.blueprint) for entity in registered_entities
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
filtered_entities_to_delete: list[Entity] = (
|
|
238
|
+
await ocean.port_client.search_batch_entities(
|
|
239
|
+
user_agent_type,
|
|
240
|
+
[
|
|
241
|
+
entity
|
|
242
|
+
for entity in entities_to_delete
|
|
243
|
+
if (entity.identifier, entity.blueprint)
|
|
244
|
+
not in registered_entities_attributes
|
|
245
|
+
],
|
|
246
|
+
)
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
if filtered_entities_to_delete:
|
|
250
|
+
logger.info(
|
|
251
|
+
f"Deleting {len(filtered_entities_to_delete)} entities that didn't pass any of the selectors"
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
await self.entities_state_applier.delete(
|
|
255
|
+
filtered_entities_to_delete, user_agent_type
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
return registered_entities
|
|
259
|
+
|
|
241
260
|
async def unregister_raw(
|
|
242
261
|
self,
|
|
243
262
|
kind: str,
|
|
@@ -293,19 +312,28 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
293
312
|
with logger.contextualize(kind=kind):
|
|
294
313
|
logger.info(f"Found {len(resource_mappings)} resources for {kind}")
|
|
295
314
|
|
|
296
|
-
|
|
297
|
-
[
|
|
315
|
+
entities_before = await self._calculate_raw(
|
|
316
|
+
[
|
|
317
|
+
(mapping, raw_desired_state["before"])
|
|
318
|
+
for mapping in resource_mappings
|
|
319
|
+
]
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
entities_after = await self._calculate_raw(
|
|
323
|
+
[(mapping, raw_desired_state["after"]) for mapping in resource_mappings]
|
|
298
324
|
)
|
|
299
325
|
|
|
300
|
-
|
|
301
|
-
(
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
326
|
+
entities_before_flatten: list[Entity] = sum(
|
|
327
|
+
(entities_diff.passed for entities_diff in entities_before), []
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
entities_after_flatten: list[Entity] = sum(
|
|
331
|
+
(entities_diff.passed for entities_diff in entities_after), []
|
|
305
332
|
)
|
|
306
333
|
|
|
307
334
|
await self.entities_state_applier.apply_diff(
|
|
308
|
-
{"before":
|
|
335
|
+
{"before": entities_before_flatten, "after": entities_after_flatten},
|
|
336
|
+
user_agent_type,
|
|
309
337
|
)
|
|
310
338
|
|
|
311
339
|
async def sync_raw_all(
|
|
@@ -333,8 +361,6 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
333
361
|
):
|
|
334
362
|
app_config = await self.port_app_config_handler.get_port_app_config()
|
|
335
363
|
|
|
336
|
-
entities_at_port = await ocean.port_client.search_entities(user_agent_type)
|
|
337
|
-
|
|
338
364
|
creation_results: list[tuple[list[Entity], list[Exception]]] = []
|
|
339
365
|
|
|
340
366
|
try:
|
|
@@ -369,8 +395,11 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
369
395
|
|
|
370
396
|
logger.error(message, exc_info=error_group)
|
|
371
397
|
else:
|
|
398
|
+
entities_at_port = await ocean.port_client.search_entities(
|
|
399
|
+
user_agent_type
|
|
400
|
+
)
|
|
372
401
|
logger.info(
|
|
373
|
-
f"Running resync diff calculation, number of entities at Port
|
|
402
|
+
f"Running resync diff calculation, number of entities found at Port: {len(entities_at_port)}, number of entities found during sync: {len(flat_created_entities)}"
|
|
374
403
|
)
|
|
375
404
|
await self.entities_state_applier.delete_diff(
|
|
376
405
|
{"before": entities_at_port, "after": flat_created_entities},
|
port_ocean/core/ocean_types.py
CHANGED
|
@@ -1,18 +1,8 @@
|
|
|
1
|
-
from typing import TypedDict, Any, AsyncIterator, Callable, Awaitable
|
|
1
|
+
from typing import TypedDict, Any, AsyncIterator, Callable, Awaitable, NamedTuple
|
|
2
2
|
|
|
3
3
|
from port_ocean.core.models import Entity
|
|
4
4
|
|
|
5
5
|
|
|
6
|
-
class RawEntityDiff(TypedDict):
|
|
7
|
-
before: list[dict[Any, Any]]
|
|
8
|
-
after: list[dict[Any, Any]]
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class EntityDiff(TypedDict):
|
|
12
|
-
before: list[Entity]
|
|
13
|
-
after: list[Entity]
|
|
14
|
-
|
|
15
|
-
|
|
16
6
|
RAW_ITEM = dict[Any, Any]
|
|
17
7
|
RAW_RESULT = list[RAW_ITEM]
|
|
18
8
|
ASYNC_GENERATOR_RESYNC_TYPE = AsyncIterator[RAW_RESULT]
|
|
@@ -23,6 +13,21 @@ RESYNC_EVENT_LISTENER = Callable[[str], LISTENER_RESULT]
|
|
|
23
13
|
START_EVENT_LISTENER = Callable[[], Awaitable[None]]
|
|
24
14
|
|
|
25
15
|
|
|
16
|
+
class RawEntityDiff(TypedDict):
|
|
17
|
+
before: list[RAW_ITEM]
|
|
18
|
+
after: list[RAW_ITEM]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class EntityDiff(TypedDict):
|
|
22
|
+
before: list[Entity]
|
|
23
|
+
after: list[Entity]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EntitySelectorDiff(NamedTuple):
|
|
27
|
+
passed: list[Entity]
|
|
28
|
+
failed: list[Entity]
|
|
29
|
+
|
|
30
|
+
|
|
26
31
|
class IntegrationEventsCallbacks(TypedDict):
|
|
27
32
|
start: list[START_EVENT_LISTENER]
|
|
28
33
|
resync: dict[str | None, list[RESYNC_EVENT_LISTENER]]
|
port_ocean/helpers/retry.py
CHANGED
|
@@ -179,12 +179,35 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
179
179
|
transport.close()
|
|
180
180
|
|
|
181
181
|
def _is_retryable_method(self, request: httpx.Request) -> bool:
|
|
182
|
-
return request.method in self._retryable_methods
|
|
182
|
+
return request.method in self._retryable_methods or request.extensions.get(
|
|
183
|
+
"retryable", False
|
|
184
|
+
)
|
|
183
185
|
|
|
184
186
|
def _should_retry(self, response: httpx.Response) -> bool:
|
|
185
187
|
return response.status_code in self._retry_status_codes
|
|
186
188
|
|
|
187
|
-
def
|
|
189
|
+
def _log_error(
|
|
190
|
+
self,
|
|
191
|
+
request: httpx.Request,
|
|
192
|
+
error: Exception | None,
|
|
193
|
+
) -> None:
|
|
194
|
+
if not self._logger:
|
|
195
|
+
return
|
|
196
|
+
|
|
197
|
+
if isinstance(error, httpx.ConnectTimeout):
|
|
198
|
+
self._logger.error(
|
|
199
|
+
f"Request {request.method} {request.url} failed to connect: {str(error)}"
|
|
200
|
+
)
|
|
201
|
+
elif isinstance(error, httpx.TimeoutException):
|
|
202
|
+
self._logger.error(
|
|
203
|
+
f"Request {request.method} {request.url} failed with a timeout exception: {str(error)}"
|
|
204
|
+
)
|
|
205
|
+
elif isinstance(error, httpx.HTTPError):
|
|
206
|
+
self._logger.error(
|
|
207
|
+
f"Request {request.method} {request.url} failed with an HTTP error: {str(error)}"
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
def _log_before_retry(
|
|
188
211
|
self,
|
|
189
212
|
request: httpx.Request,
|
|
190
213
|
sleep_time: float,
|
|
@@ -249,7 +272,7 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
249
272
|
while True:
|
|
250
273
|
if attempts_made > 0:
|
|
251
274
|
sleep_time = self._calculate_sleep(attempts_made, {})
|
|
252
|
-
self.
|
|
275
|
+
self._log_before_retry(request, sleep_time, response, error)
|
|
253
276
|
await asyncio.sleep(sleep_time)
|
|
254
277
|
|
|
255
278
|
error = None
|
|
@@ -262,9 +285,20 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
262
285
|
):
|
|
263
286
|
return response
|
|
264
287
|
await response.aclose()
|
|
288
|
+
except httpx.ConnectTimeout as e:
|
|
289
|
+
error = e
|
|
290
|
+
if remaining_attempts < 1:
|
|
291
|
+
self._log_error(request, error)
|
|
292
|
+
raise
|
|
293
|
+
except httpx.TimeoutException as e:
|
|
294
|
+
error = e
|
|
295
|
+
if remaining_attempts < 1:
|
|
296
|
+
self._log_error(request, error)
|
|
297
|
+
raise
|
|
265
298
|
except httpx.HTTPError as e:
|
|
266
299
|
error = e
|
|
267
300
|
if remaining_attempts < 1:
|
|
301
|
+
self._log_error(request, error)
|
|
268
302
|
raise
|
|
269
303
|
attempts_made += 1
|
|
270
304
|
remaining_attempts -= 1
|
|
@@ -281,7 +315,7 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
281
315
|
while True:
|
|
282
316
|
if attempts_made > 0:
|
|
283
317
|
sleep_time = self._calculate_sleep(attempts_made, {})
|
|
284
|
-
self.
|
|
318
|
+
self._log_before_retry(request, sleep_time, response, error)
|
|
285
319
|
time.sleep(sleep_time)
|
|
286
320
|
|
|
287
321
|
error = None
|
|
@@ -292,9 +326,20 @@ class RetryTransport(httpx.AsyncBaseTransport, httpx.BaseTransport):
|
|
|
292
326
|
if remaining_attempts < 1 or not self._should_retry(response):
|
|
293
327
|
return response
|
|
294
328
|
response.close()
|
|
329
|
+
except httpx.ConnectTimeout as e:
|
|
330
|
+
error = e
|
|
331
|
+
if remaining_attempts < 1:
|
|
332
|
+
self._log_error(request, error)
|
|
333
|
+
raise
|
|
334
|
+
except httpx.TimeoutException as e:
|
|
335
|
+
error = e
|
|
336
|
+
if remaining_attempts < 1:
|
|
337
|
+
self._log_error(request, error)
|
|
338
|
+
raise
|
|
295
339
|
except httpx.HTTPError as e:
|
|
296
340
|
error = e
|
|
297
341
|
if remaining_attempts < 1:
|
|
342
|
+
self._log_error(request, error)
|
|
298
343
|
raise
|
|
299
344
|
attempts_made += 1
|
|
300
345
|
remaining_attempts -= 1
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import typing
|
|
2
|
+
|
|
3
|
+
import aiostream
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
async def stream_async_iterators_tasks(
|
|
7
|
+
*tasks: typing.AsyncIterable[typing.Any],
|
|
8
|
+
) -> typing.AsyncIterable[typing.Any]:
|
|
9
|
+
"""
|
|
10
|
+
This function takes a list of async iterators and streams the results of each iterator as they are available.
|
|
11
|
+
By using this function you can combine multiple async iterators into a single stream of results, instead of waiting
|
|
12
|
+
for each iterator to finish before starting the next one.
|
|
13
|
+
|
|
14
|
+
Usage:
|
|
15
|
+
```python
|
|
16
|
+
async def async_iterator1():
|
|
17
|
+
for i in range(10):
|
|
18
|
+
yield i
|
|
19
|
+
await asyncio.sleep(1)
|
|
20
|
+
|
|
21
|
+
async def async_iterator2():
|
|
22
|
+
for i in range(10, 20):
|
|
23
|
+
yield i
|
|
24
|
+
await asyncio.sleep(1)
|
|
25
|
+
|
|
26
|
+
async def main():
|
|
27
|
+
async for result in stream_async_iterators_tasks([async_iterator1(), async_iterator2()]):
|
|
28
|
+
print(result)
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
Caution - Before using this function, make sure that the third-party API you are calling allows the number of
|
|
32
|
+
concurrent requests you are making. If the API has a rate limit, you may need to adjust the number of concurrent
|
|
33
|
+
requests to avoid hitting the rate limit.
|
|
34
|
+
|
|
35
|
+
:param tasks: A list of async iterators
|
|
36
|
+
:return: A stream of results
|
|
37
|
+
"""
|
|
38
|
+
if not tasks:
|
|
39
|
+
raise StopAsyncIteration("No tasks provided")
|
|
40
|
+
|
|
41
|
+
if len(tasks) == 1:
|
|
42
|
+
async for batch_items in tasks[0]:
|
|
43
|
+
yield batch_items
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
combine = aiostream.stream.merge(tasks[0], *tasks[1:])
|
|
47
|
+
async with combine.stream() as streamer:
|
|
48
|
+
async for batch_items in streamer:
|
|
49
|
+
yield batch_items
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: port-ocean
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.14
|
|
4
4
|
Summary: Port Ocean is a CLI tool for managing your Port projects.
|
|
5
5
|
Home-page: https://app.getport.io
|
|
6
6
|
Keywords: ocean,port-ocean,port
|
|
@@ -21,11 +21,12 @@ Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
|
21
21
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
22
22
|
Classifier: Topic :: Utilities
|
|
23
23
|
Provides-Extra: cli
|
|
24
|
+
Requires-Dist: aiostream (>=0.5.2,<0.6.0)
|
|
24
25
|
Requires-Dist: click (>=8.1.3,<9.0.0) ; extra == "cli"
|
|
25
26
|
Requires-Dist: confluent-kafka (>=2.1.1,<3.0.0)
|
|
26
27
|
Requires-Dist: cookiecutter (>=2.1.1,<3.0.0) ; extra == "cli"
|
|
27
|
-
Requires-Dist: fastapi (>=0.100,<0.
|
|
28
|
-
Requires-Dist: httpx (>=0.24.1,<0.
|
|
28
|
+
Requires-Dist: fastapi (>=0.100,<0.111)
|
|
29
|
+
Requires-Dist: httpx (>=0.24.1,<0.28.0)
|
|
29
30
|
Requires-Dist: jinja2-time (>=0.2.0,<0.3.0) ; extra == "cli"
|
|
30
31
|
Requires-Dist: loguru (>=0.7.0,<0.8.0)
|
|
31
32
|
Requires-Dist: pydantic (>=1.10.8,<2.0.0)
|
|
@@ -36,8 +37,8 @@ Requires-Dist: pyyaml (>=6.0,<7.0)
|
|
|
36
37
|
Requires-Dist: rich (>=13.4.1,<14.0.0) ; extra == "cli"
|
|
37
38
|
Requires-Dist: six (>=1.16.0,<2.0.0)
|
|
38
39
|
Requires-Dist: tomli (>=2.0.1,<3.0.0)
|
|
39
|
-
Requires-Dist: urllib3 (>=1.26.16,<
|
|
40
|
-
Requires-Dist: uvicorn (>=0.22
|
|
40
|
+
Requires-Dist: urllib3 (>=1.26.16,<3.0.0)
|
|
41
|
+
Requires-Dist: uvicorn (>=0.22,<0.30)
|
|
41
42
|
Requires-Dist: werkzeug (>=2.3.4,<4.0.0)
|
|
42
43
|
Project-URL: Repository, https://github.com/port-labs/Port-Ocean
|
|
43
44
|
Description-Content-Type: text/markdown
|
|
@@ -36,14 +36,14 @@ port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/tests/__init__.py,
|
|
|
36
36
|
port_ocean/cli/utils.py,sha256=IUK2UbWqjci-lrcDdynZXqVP5B5TcjF0w5CpEVUks-k,54
|
|
37
37
|
port_ocean/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
38
|
port_ocean/clients/port/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
39
|
-
port_ocean/clients/port/authentication.py,sha256=
|
|
39
|
+
port_ocean/clients/port/authentication.py,sha256=t3z6h4vld-Tzkpth15sstaMJg0rccX-pXXjNtOa-nCY,2949
|
|
40
40
|
port_ocean/clients/port/client.py,sha256=3GYCM0ZkX3pB6sNoOb-7_6dm0Jr5_vqhflD9iltf_As,2640
|
|
41
41
|
port_ocean/clients/port/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
42
|
port_ocean/clients/port/mixins/blueprints.py,sha256=BiqkhvDFdkySWgL1NHI-LAQ9ieZWazZAojPo9E8d7U4,4575
|
|
43
|
-
port_ocean/clients/port/mixins/entities.py,sha256=
|
|
43
|
+
port_ocean/clients/port/mixins/entities.py,sha256=Lg5Sa6jQuhDTQKLURVavqXlBQt4-XPUUigB9JqQ1X0k,7364
|
|
44
44
|
port_ocean/clients/port/mixins/integrations.py,sha256=GJRweeibfKhukOkWB07pb09G6pp06FOiXPJU-pOV7M8,5155
|
|
45
45
|
port_ocean/clients/port/mixins/migrations.py,sha256=A6896oJF6WbFL2WroyTkMzr12yhVyWqGoq9dtLNSKBY,1457
|
|
46
|
-
port_ocean/clients/port/retry_transport.py,sha256=
|
|
46
|
+
port_ocean/clients/port/retry_transport.py,sha256=PtIZOAZ6V-ncpVysRUsPOgt8Sf01QLnTKB5YeKBxkJk,1861
|
|
47
47
|
port_ocean/clients/port/types.py,sha256=nvlgiAq4WH5_F7wQbz_GAWl-faob84LVgIjZ2Ww5mTk,451
|
|
48
48
|
port_ocean/clients/port/utils.py,sha256=O9mBu6zp4TfpS4SQ3qCPpn9ZVyYF8GKnji4UnYhM3yg,2373
|
|
49
49
|
port_ocean/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -73,13 +73,12 @@ port_ocean/core/handlers/base.py,sha256=cTarblazu8yh8xz2FpB-dzDKuXxtoi143XJgPbV_
|
|
|
73
73
|
port_ocean/core/handlers/entities_state_applier/__init__.py,sha256=kgLZDCeCEzi4r-0nzW9k78haOZNf6PX7mJOUr34A4c8,173
|
|
74
74
|
port_ocean/core/handlers/entities_state_applier/base.py,sha256=FMsrBOVgaO4o7B1klLDY8fobTUDvyrerCKCICyYtkXs,2193
|
|
75
75
|
port_ocean/core/handlers/entities_state_applier/port/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
76
|
-
port_ocean/core/handlers/entities_state_applier/port/applier.py,sha256=
|
|
76
|
+
port_ocean/core/handlers/entities_state_applier/port/applier.py,sha256=G6RyPgBAyUcP41ySKaiXhKQhdrz8NixEEwsnU5q_-0c,5225
|
|
77
77
|
port_ocean/core/handlers/entities_state_applier/port/get_related_entities.py,sha256=1zncwCbE-Gej0xaWKlzZgoXxOBe9bgs_YxlZ8QW3NdI,1751
|
|
78
78
|
port_ocean/core/handlers/entities_state_applier/port/order_by_entities_dependencies.py,sha256=82BvU8t5w9uhsxX8hbnwuRPuWhW3cMeuT_5sVIkip1I,1550
|
|
79
|
-
port_ocean/core/handlers/entities_state_applier/port/validate_entity_relations.py,sha256=nKuQ-RlalGG07olxm6l5NHeOuQT9dEZLoMpD-AN5nq0,1392
|
|
80
79
|
port_ocean/core/handlers/entity_processor/__init__.py,sha256=FvFCunFg44wNQoqlybem9MthOs7p1Wawac87uSXz9U8,156
|
|
81
|
-
port_ocean/core/handlers/entity_processor/base.py,sha256=
|
|
82
|
-
port_ocean/core/handlers/entity_processor/jq_entity_processor.py,sha256=
|
|
80
|
+
port_ocean/core/handlers/entity_processor/base.py,sha256=4JVCAAohEKtl8FdlnuyIxJ1afSXk3o2-e_m4LSy7vmw,1952
|
|
81
|
+
port_ocean/core/handlers/entity_processor/jq_entity_processor.py,sha256=R5PwTcsJYRVU-lwoYZa1Af8kserp2BXIpyzVtRdEqA8,5617
|
|
83
82
|
port_ocean/core/handlers/port_app_config/__init__.py,sha256=8AAT5OthiVM7KCcM34iEgEeXtn2pRMrT4Dze5r1Ixbk,134
|
|
84
83
|
port_ocean/core/handlers/port_app_config/api.py,sha256=6VbKPwFzsWG0IYsVD81hxSmfqtHUFqrfUuj1DBX5g4w,853
|
|
85
84
|
port_ocean/core/handlers/port_app_config/base.py,sha256=nnMZ4jH6a-4Of9Cn-apMsU0CgNLD9avd5q0gRmc7nZ8,1495
|
|
@@ -90,10 +89,10 @@ port_ocean/core/integrations/mixins/__init__.py,sha256=FA1FEKMM6P-L2_m7Q4L20mFa4
|
|
|
90
89
|
port_ocean/core/integrations/mixins/events.py,sha256=Ddfx2L4FpghV38waF8OfVeOV0bHBxNIgjU-q5ffillI,2341
|
|
91
90
|
port_ocean/core/integrations/mixins/handler.py,sha256=mZ7-0UlG3LcrwJttFbMe-R4xcOU2H_g33tZar7PwTv8,3771
|
|
92
91
|
port_ocean/core/integrations/mixins/sync.py,sha256=TKqRytxXONVhuCo3CB3rDvWNbITnZz33TYTDs3SWWVk,3880
|
|
93
|
-
port_ocean/core/integrations/mixins/sync_raw.py,sha256
|
|
92
|
+
port_ocean/core/integrations/mixins/sync_raw.py,sha256=-JhaWt7N4R4vYe73cmJ00PEeIHUYJq-x8Pf89_-WgZ0,14992
|
|
94
93
|
port_ocean/core/integrations/mixins/utils.py,sha256=7y1rGETZIjOQadyIjFJXIHKkQFKx_SwiP-TrAIsyyLY,2303
|
|
95
94
|
port_ocean/core/models.py,sha256=bDO_I4Yd33TEZIh2QSV8UwXQIuwE7IgrINkYDHI0dkc,714
|
|
96
|
-
port_ocean/core/ocean_types.py,sha256=
|
|
95
|
+
port_ocean/core/ocean_types.py,sha256=ltnn22eRuDMFW02kIgmIAu6S06-i9jJV2NJ-MZcwwj0,879
|
|
97
96
|
port_ocean/core/utils.py,sha256=B040Wokk28g9tQj_06qk_uvm85RIXc6XGXysZV6gtQw,1957
|
|
98
97
|
port_ocean/exceptions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
99
98
|
port_ocean/exceptions/api.py,sha256=TLmTMqn4uHGaHgZK8PMIJ0TVJlPB4iP7xl9rx7GtCyY,426
|
|
@@ -105,7 +104,7 @@ port_ocean/exceptions/port_defaults.py,sha256=pJ5im06ziDX2IVstSlYIjVHhs4JW-N0Vr0
|
|
|
105
104
|
port_ocean/exceptions/utils.py,sha256=gjOqpi-HpY1l4WlMFsGA9yzhxDhajhoGGdDDyGbLnqI,197
|
|
106
105
|
port_ocean/helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
107
106
|
port_ocean/helpers/async_client.py,sha256=SRlP6o7_FCSY3UHnRlZdezppePVxxOzZ0z861vE3K40,1783
|
|
108
|
-
port_ocean/helpers/retry.py,sha256=
|
|
107
|
+
port_ocean/helpers/retry.py,sha256=hcfVmt2Fp53EQViYiOWA-FTWD80jvS-mWKhwMHiBM7Q,14869
|
|
109
108
|
port_ocean/log/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
110
109
|
port_ocean/log/handlers.py,sha256=k9G_Mb4ga2-Jke9irpdlYqj6EYiwv0gEsh4TgyqqOmI,2853
|
|
111
110
|
port_ocean/log/logger_setup.py,sha256=NGU9EdRTPLloiHdgSw0JYaYGr9mx1hYJUEPFHP6BA14,2175
|
|
@@ -117,13 +116,14 @@ port_ocean/run.py,sha256=vyShtqg_jEiE6M4SJpci6c4oRD9k2ztesAXEx_6Sc9M,1906
|
|
|
117
116
|
port_ocean/sonar-project.properties,sha256=X_wLzDOkEVmpGLRMb2fg9Rb0DxWwUFSvESId8qpvrPI,73
|
|
118
117
|
port_ocean/utils/__init__.py,sha256=KMGnCPXZJbNwtgxtyMycapkDz8tpSyw23MSYT3iVeHs,91
|
|
119
118
|
port_ocean/utils/async_http.py,sha256=arnH458TExn2Dju_Sy6pHas_vF5RMWnOp-jBz5WAAcE,1226
|
|
119
|
+
port_ocean/utils/async_iterators.py,sha256=buFBiPdsqkNMCk91h6ZG8hJa181j7RjgHajbfgeB8A8,1608
|
|
120
120
|
port_ocean/utils/cache.py,sha256=3KItZDE2yVrbVDr-hoM8lNna8s2dlpxhP4ICdLjH4LQ,2231
|
|
121
121
|
port_ocean/utils/misc.py,sha256=2XmO8W0SgPjV0rd9HZvrHhoMlHprIwmMFsINxlAmgyw,1723
|
|
122
122
|
port_ocean/utils/repeat.py,sha256=0EFWM9d8lLXAhZmAyczY20LAnijw6UbIECf5lpGbOas,3231
|
|
123
123
|
port_ocean/utils/signal.py,sha256=Fab0049Cjs69TPTQgvEvilaVZKACQr6tGkRdySjNCi8,1515
|
|
124
124
|
port_ocean/version.py,sha256=UsuJdvdQlazzKGD3Hd5-U7N69STh8Dq9ggJzQFnu9fU,177
|
|
125
|
-
port_ocean-0.5.
|
|
126
|
-
port_ocean-0.5.
|
|
127
|
-
port_ocean-0.5.
|
|
128
|
-
port_ocean-0.5.
|
|
129
|
-
port_ocean-0.5.
|
|
125
|
+
port_ocean-0.5.14.dist-info/LICENSE.md,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
126
|
+
port_ocean-0.5.14.dist-info/METADATA,sha256=5DHXN2mLFMECLS5hooI0bnSMhqLa0FiTTyA65V9ozO8,6554
|
|
127
|
+
port_ocean-0.5.14.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
128
|
+
port_ocean-0.5.14.dist-info/entry_points.txt,sha256=F_DNUmGZU2Kme-8NsWM5LLE8piGMafYZygRYhOVtcjA,54
|
|
129
|
+
port_ocean-0.5.14.dist-info/RECORD,,
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
|
|
3
|
-
from port_ocean.clients.port.client import PortClient
|
|
4
|
-
from port_ocean.core.handlers.entities_state_applier.port.get_related_entities import (
|
|
5
|
-
get_related_entities,
|
|
6
|
-
)
|
|
7
|
-
from port_ocean.core.handlers.entity_processor.base import EntityPortDiff
|
|
8
|
-
from port_ocean.core.utils import is_same_entity
|
|
9
|
-
from port_ocean.exceptions.core import RelationValidationException
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
async def validate_entity_relations(
|
|
13
|
-
diff: EntityPortDiff, port_client: PortClient
|
|
14
|
-
) -> None:
|
|
15
|
-
modified_or_created_entities = diff.modified + diff.created
|
|
16
|
-
related_entities = await get_related_entities(
|
|
17
|
-
modified_or_created_entities, port_client
|
|
18
|
-
)
|
|
19
|
-
|
|
20
|
-
required_entities = []
|
|
21
|
-
|
|
22
|
-
for entity in related_entities:
|
|
23
|
-
if any(is_same_entity(item, entity) for item in diff.deleted):
|
|
24
|
-
raise RelationValidationException(
|
|
25
|
-
f"Cant delete entity {entity} of blueprint {entity.blueprint} "
|
|
26
|
-
f"because it was specified as relation target of entity {entity} "
|
|
27
|
-
f"of blueprint {entity.blueprint}"
|
|
28
|
-
)
|
|
29
|
-
|
|
30
|
-
if not any(
|
|
31
|
-
is_same_entity(item, entity) for item in modified_or_created_entities
|
|
32
|
-
):
|
|
33
|
-
required_entities.append(entity)
|
|
34
|
-
|
|
35
|
-
await asyncio.gather(
|
|
36
|
-
*(
|
|
37
|
-
port_client.validate_entity_exist(item.identifier, item.blueprint)
|
|
38
|
-
for item in required_entities
|
|
39
|
-
)
|
|
40
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|