port-ocean 0.5.6__py3-none-any.whl → 0.17.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of port-ocean might be problematic. Click here for more details.
- integrations/_infra/Dockerfile.Deb +56 -0
- integrations/_infra/Dockerfile.alpine +108 -0
- integrations/_infra/Dockerfile.base.builder +26 -0
- integrations/_infra/Dockerfile.base.runner +13 -0
- integrations/_infra/Dockerfile.dockerignore +94 -0
- {port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}} → integrations/_infra}/Makefile +21 -8
- integrations/_infra/grpcio.sh +18 -0
- integrations/_infra/init.sh +5 -0
- port_ocean/bootstrap.py +1 -1
- port_ocean/cli/commands/defaults/clean.py +3 -1
- port_ocean/cli/commands/new.py +42 -7
- port_ocean/cli/commands/sail.py +7 -1
- port_ocean/cli/cookiecutter/cookiecutter.json +3 -0
- port_ocean/cli/cookiecutter/hooks/post_gen_project.py +20 -3
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.env.example +6 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/blueprints.json +41 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/port-app-config.yml +16 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml +6 -7
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CHANGELOG.md +1 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CONTRIBUTING.md +7 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/changelog/.gitignore +1 -0
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/main.py +16 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/pyproject.toml +21 -10
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/tests/test_sample.py +2 -0
- port_ocean/clients/port/authentication.py +16 -4
- port_ocean/clients/port/client.py +17 -0
- port_ocean/clients/port/mixins/blueprints.py +7 -8
- port_ocean/clients/port/mixins/entities.py +108 -53
- port_ocean/clients/port/mixins/integrations.py +23 -34
- port_ocean/clients/port/retry_transport.py +0 -5
- port_ocean/clients/port/utils.py +9 -3
- port_ocean/config/base.py +16 -16
- port_ocean/config/settings.py +79 -11
- port_ocean/context/event.py +18 -5
- port_ocean/context/ocean.py +14 -3
- port_ocean/core/defaults/clean.py +10 -3
- port_ocean/core/defaults/common.py +25 -9
- port_ocean/core/defaults/initialize.py +111 -100
- port_ocean/core/event_listener/__init__.py +8 -0
- port_ocean/core/event_listener/base.py +49 -10
- port_ocean/core/event_listener/factory.py +9 -1
- port_ocean/core/event_listener/http.py +11 -3
- port_ocean/core/event_listener/kafka.py +24 -5
- port_ocean/core/event_listener/once.py +96 -4
- port_ocean/core/event_listener/polling.py +16 -14
- port_ocean/core/event_listener/webhooks_only.py +41 -0
- port_ocean/core/handlers/__init__.py +1 -2
- port_ocean/core/handlers/entities_state_applier/base.py +4 -1
- port_ocean/core/handlers/entities_state_applier/port/applier.py +29 -87
- port_ocean/core/handlers/entities_state_applier/port/order_by_entities_dependencies.py +5 -2
- port_ocean/core/handlers/entity_processor/base.py +26 -22
- port_ocean/core/handlers/entity_processor/jq_entity_processor.py +253 -45
- port_ocean/core/handlers/port_app_config/base.py +55 -15
- port_ocean/core/handlers/port_app_config/models.py +24 -5
- port_ocean/core/handlers/resync_state_updater/__init__.py +5 -0
- port_ocean/core/handlers/resync_state_updater/updater.py +84 -0
- port_ocean/core/integrations/base.py +5 -7
- port_ocean/core/integrations/mixins/events.py +3 -1
- port_ocean/core/integrations/mixins/sync.py +4 -2
- port_ocean/core/integrations/mixins/sync_raw.py +209 -74
- port_ocean/core/integrations/mixins/utils.py +1 -1
- port_ocean/core/models.py +44 -0
- port_ocean/core/ocean_types.py +29 -11
- port_ocean/core/utils/entity_topological_sorter.py +90 -0
- port_ocean/core/utils/utils.py +109 -0
- port_ocean/debug_cli.py +5 -0
- port_ocean/exceptions/core.py +4 -0
- port_ocean/exceptions/port_defaults.py +0 -2
- port_ocean/helpers/retry.py +85 -24
- port_ocean/log/handlers.py +23 -2
- port_ocean/log/logger_setup.py +8 -1
- port_ocean/log/sensetive.py +25 -10
- port_ocean/middlewares.py +10 -2
- port_ocean/ocean.py +57 -24
- port_ocean/run.py +10 -5
- port_ocean/tests/__init__.py +0 -0
- port_ocean/tests/clients/port/mixins/test_entities.py +53 -0
- port_ocean/tests/conftest.py +4 -0
- port_ocean/tests/core/defaults/test_common.py +166 -0
- port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py +350 -0
- port_ocean/tests/core/handlers/mixins/test_sync_raw.py +552 -0
- port_ocean/tests/core/test_utils.py +73 -0
- port_ocean/tests/core/utils/test_entity_topological_sorter.py +99 -0
- port_ocean/tests/helpers/__init__.py +0 -0
- port_ocean/tests/helpers/fake_port_api.py +191 -0
- port_ocean/tests/helpers/fixtures.py +46 -0
- port_ocean/tests/helpers/integration.py +31 -0
- port_ocean/tests/helpers/ocean_app.py +66 -0
- port_ocean/tests/helpers/port_client.py +21 -0
- port_ocean/tests/helpers/smoke_test.py +82 -0
- port_ocean/tests/log/test_handlers.py +71 -0
- port_ocean/tests/test_smoke.py +74 -0
- port_ocean/tests/utils/test_async_iterators.py +45 -0
- port_ocean/tests/utils/test_cache.py +189 -0
- port_ocean/utils/async_iterators.py +109 -0
- port_ocean/utils/cache.py +37 -1
- port_ocean/utils/misc.py +22 -4
- port_ocean/utils/queue_utils.py +88 -0
- port_ocean/utils/signal.py +1 -4
- port_ocean/utils/time.py +54 -0
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/METADATA +27 -19
- port_ocean-0.17.8.dist-info/RECORD +164 -0
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/WHEEL +1 -1
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.dockerignore +0 -94
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/Dockerfile +0 -15
- port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/config.yaml +0 -17
- port_ocean/core/handlers/entities_state_applier/port/validate_entity_relations.py +0 -40
- port_ocean/core/utils.py +0 -65
- port_ocean-0.5.6.dist-info/RECORD +0 -129
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/LICENSE.md +0 -0
- {port_ocean-0.5.6.dist-info → port_ocean-0.17.8.dist-info}/entry_points.txt +0 -0
|
@@ -1,15 +1,39 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import
|
|
2
|
+
from asyncio import Task
|
|
3
|
+
from dataclasses import dataclass, field
|
|
3
4
|
from functools import lru_cache
|
|
4
|
-
from typing import Any
|
|
5
|
-
|
|
6
|
-
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
import jq # type: ignore
|
|
7
|
+
from loguru import logger
|
|
7
8
|
|
|
9
|
+
from port_ocean.context.ocean import ocean
|
|
8
10
|
from port_ocean.core.handlers.entity_processor.base import BaseEntityProcessor
|
|
9
11
|
from port_ocean.core.handlers.port_app_config.models import ResourceConfig
|
|
10
12
|
from port_ocean.core.models import Entity
|
|
11
|
-
from port_ocean.core.ocean_types import
|
|
13
|
+
from port_ocean.core.ocean_types import (
|
|
14
|
+
RAW_ITEM,
|
|
15
|
+
EntitySelectorDiff,
|
|
16
|
+
CalculationResult,
|
|
17
|
+
)
|
|
18
|
+
from port_ocean.core.utils.utils import (
|
|
19
|
+
gather_and_split_errors_from_results,
|
|
20
|
+
zip_and_sum,
|
|
21
|
+
)
|
|
12
22
|
from port_ocean.exceptions.core import EntityProcessorException
|
|
23
|
+
from port_ocean.utils.queue_utils import process_in_queue
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class MappedEntity:
|
|
28
|
+
"""Represents the entity after applying the mapping
|
|
29
|
+
|
|
30
|
+
This class holds the mapping entity along with the selector boolean value and optionally the raw data.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
entity: dict[str, Any] = field(default_factory=dict)
|
|
34
|
+
did_entity_pass_selector: bool = False
|
|
35
|
+
raw_data: Optional[dict[str, Any]] = None
|
|
36
|
+
misconfigurations: dict[str, str] = field(default_factory=dict)
|
|
13
37
|
|
|
14
38
|
|
|
15
39
|
class JQEntityProcessor(BaseEntityProcessor):
|
|
@@ -22,38 +46,102 @@ class JQEntityProcessor(BaseEntityProcessor):
|
|
|
22
46
|
|
|
23
47
|
@lru_cache
|
|
24
48
|
def _compile(self, pattern: str) -> Any:
|
|
49
|
+
if not ocean.config.allow_environment_variables_jq_access:
|
|
50
|
+
pattern = "def env: {}; {} as $ENV | " + pattern
|
|
25
51
|
return jq.compile(pattern)
|
|
26
52
|
|
|
53
|
+
@staticmethod
|
|
54
|
+
def _stop_iterator_handler(func: Any) -> Any:
|
|
55
|
+
"""
|
|
56
|
+
Wrap the function to handle StopIteration exceptions.
|
|
57
|
+
Prevents StopIteration from stopping the thread and skipping further queue processing.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
def inner() -> Any:
|
|
61
|
+
try:
|
|
62
|
+
return func()
|
|
63
|
+
except StopIteration:
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
return inner
|
|
67
|
+
|
|
68
|
+
@staticmethod
|
|
69
|
+
def _notify_mapping_issues(
|
|
70
|
+
entity_misconfigurations: dict[str, str],
|
|
71
|
+
missing_required_fields: bool,
|
|
72
|
+
entity_mapping_fault_counter: int,
|
|
73
|
+
) -> None:
|
|
74
|
+
|
|
75
|
+
if len(entity_misconfigurations) > 0:
|
|
76
|
+
logger.info(
|
|
77
|
+
f"Unable to find valid data for: {entity_misconfigurations} (null, missing, or misconfigured)"
|
|
78
|
+
)
|
|
79
|
+
if missing_required_fields:
|
|
80
|
+
logger.info(
|
|
81
|
+
f"{entity_mapping_fault_counter} transformations of batch failed due to empty, null or missing values"
|
|
82
|
+
)
|
|
83
|
+
|
|
27
84
|
async def _search(self, data: dict[str, Any], pattern: str) -> Any:
|
|
28
85
|
try:
|
|
29
86
|
loop = asyncio.get_event_loop()
|
|
30
87
|
compiled_pattern = self._compile(pattern)
|
|
31
|
-
|
|
32
|
-
return await loop.run_in_executor(
|
|
33
|
-
|
|
88
|
+
func = compiled_pattern.input_value(data)
|
|
89
|
+
return await loop.run_in_executor(
|
|
90
|
+
None, self._stop_iterator_handler(func.first)
|
|
91
|
+
)
|
|
92
|
+
except Exception as exc:
|
|
93
|
+
logger.debug(
|
|
94
|
+
f"Search failed for pattern '{pattern}' in data: {data}, Error: {exc}"
|
|
95
|
+
)
|
|
34
96
|
return None
|
|
35
97
|
|
|
36
98
|
async def _search_as_bool(self, data: dict[str, Any], pattern: str) -> bool:
|
|
37
99
|
loop = asyncio.get_event_loop()
|
|
100
|
+
|
|
38
101
|
compiled_pattern = self._compile(pattern)
|
|
39
|
-
|
|
40
|
-
value = await loop.run_in_executor(None, first_value_callable)
|
|
102
|
+
func = compiled_pattern.input_value(data)
|
|
41
103
|
|
|
104
|
+
value = await loop.run_in_executor(
|
|
105
|
+
None, self._stop_iterator_handler(func.first)
|
|
106
|
+
)
|
|
42
107
|
if isinstance(value, bool):
|
|
43
108
|
return value
|
|
44
|
-
|
|
45
109
|
raise EntityProcessorException(
|
|
46
|
-
f"Expected boolean value, got {type(value)} instead"
|
|
110
|
+
f"Expected boolean value, got value:{value} of type: {type(value)} instead"
|
|
47
111
|
)
|
|
48
112
|
|
|
49
113
|
async def _search_as_object(
|
|
50
|
-
self,
|
|
114
|
+
self,
|
|
115
|
+
data: dict[str, Any],
|
|
116
|
+
obj: dict[str, Any],
|
|
117
|
+
misconfigurations: dict[str, str] | None = None,
|
|
51
118
|
) -> dict[str, Any | None]:
|
|
52
|
-
|
|
119
|
+
"""
|
|
120
|
+
Identify and extract the relevant value for the chosen key and populate it into the entity
|
|
121
|
+
:param data: the property itself that holds the key and the value, it is being passed to the task and we get back a task item,
|
|
122
|
+
if the data is a dict, we will recursively call this function again.
|
|
123
|
+
:param obj: the key that we want its value to be mapped into our entity.
|
|
124
|
+
:param misconfigurations: due to the recursive nature of this function,
|
|
125
|
+
we aim to have a dict that represents all of the misconfigured properties and when used recursively,
|
|
126
|
+
we pass this reference to misfoncigured object to add the relevant misconfigured keys.
|
|
127
|
+
:return: Mapped object with found value.
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
search_tasks: dict[
|
|
131
|
+
str, Task[dict[str, Any | None]] | list[Task[dict[str, Any | None]]]
|
|
132
|
+
] = {}
|
|
53
133
|
for key, value in obj.items():
|
|
54
|
-
if isinstance(value,
|
|
134
|
+
if isinstance(value, list):
|
|
135
|
+
search_tasks[key] = [
|
|
136
|
+
asyncio.create_task(
|
|
137
|
+
self._search_as_object(data, obj, misconfigurations)
|
|
138
|
+
)
|
|
139
|
+
for obj in value
|
|
140
|
+
]
|
|
141
|
+
|
|
142
|
+
elif isinstance(value, dict):
|
|
55
143
|
search_tasks[key] = asyncio.create_task(
|
|
56
|
-
self._search_as_object(data, value)
|
|
144
|
+
self._search_as_object(data, value, misconfigurations)
|
|
57
145
|
)
|
|
58
146
|
else:
|
|
59
147
|
search_tasks[key] = asyncio.create_task(self._search(data, value))
|
|
@@ -61,45 +149,165 @@ class JQEntityProcessor(BaseEntityProcessor):
|
|
|
61
149
|
result: dict[str, Any | None] = {}
|
|
62
150
|
for key, task in search_tasks.items():
|
|
63
151
|
try:
|
|
64
|
-
|
|
152
|
+
if isinstance(task, list):
|
|
153
|
+
result_list = []
|
|
154
|
+
for task in task:
|
|
155
|
+
task_result = await task
|
|
156
|
+
if task_result is None and misconfigurations is not None:
|
|
157
|
+
misconfigurations[key] = obj[key]
|
|
158
|
+
result_list.append(task_result)
|
|
159
|
+
result[key] = result_list
|
|
160
|
+
else:
|
|
161
|
+
task_result = await task
|
|
162
|
+
if task_result is None and misconfigurations is not None:
|
|
163
|
+
misconfigurations[key] = obj[key]
|
|
164
|
+
result[key] = task_result
|
|
65
165
|
except Exception:
|
|
66
166
|
result[key] = None
|
|
67
|
-
|
|
68
167
|
return result
|
|
69
168
|
|
|
70
|
-
async def
|
|
71
|
-
self,
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
169
|
+
async def _get_mapped_entity(
|
|
170
|
+
self,
|
|
171
|
+
data: dict[str, Any],
|
|
172
|
+
raw_entity_mappings: dict[str, Any],
|
|
173
|
+
selector_query: str,
|
|
174
|
+
parse_all: bool = False,
|
|
175
|
+
) -> MappedEntity:
|
|
176
|
+
should_run = await self._search_as_bool(data, selector_query)
|
|
177
|
+
if parse_all or should_run:
|
|
178
|
+
misconfigurations: dict[str, str] = {}
|
|
179
|
+
mapped_entity = await self._search_as_object(
|
|
180
|
+
data, raw_entity_mappings, misconfigurations
|
|
181
|
+
)
|
|
182
|
+
return MappedEntity(
|
|
183
|
+
mapped_entity,
|
|
184
|
+
did_entity_pass_selector=should_run,
|
|
185
|
+
raw_data=data if should_run else None,
|
|
186
|
+
misconfigurations=misconfigurations,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
return MappedEntity()
|
|
190
|
+
|
|
191
|
+
async def _calculate_entity(
|
|
192
|
+
self,
|
|
193
|
+
data: dict[str, Any],
|
|
194
|
+
raw_entity_mappings: dict[str, Any],
|
|
195
|
+
items_to_parse: str | None,
|
|
196
|
+
selector_query: str,
|
|
197
|
+
parse_all: bool = False,
|
|
198
|
+
) -> tuple[list[MappedEntity], list[Exception]]:
|
|
199
|
+
raw_data = [data.copy()]
|
|
200
|
+
if items_to_parse:
|
|
201
|
+
items = await self._search(data, items_to_parse)
|
|
202
|
+
if not isinstance(items, list):
|
|
203
|
+
logger.warning(
|
|
204
|
+
f"Failed to parse items for JQ expression {items_to_parse}, Expected list but got {type(items)}."
|
|
205
|
+
f" Skipping..."
|
|
78
206
|
)
|
|
79
|
-
|
|
207
|
+
return [], []
|
|
208
|
+
raw_data = [{"item": item, **data} for item in items]
|
|
80
209
|
|
|
81
|
-
|
|
82
|
-
|
|
210
|
+
entities, errors = await gather_and_split_errors_from_results(
|
|
211
|
+
[
|
|
212
|
+
self._get_mapped_entity(
|
|
213
|
+
raw,
|
|
214
|
+
raw_entity_mappings,
|
|
215
|
+
selector_query,
|
|
216
|
+
parse_all,
|
|
217
|
+
)
|
|
218
|
+
for raw in raw_data
|
|
219
|
+
]
|
|
220
|
+
)
|
|
221
|
+
if errors:
|
|
222
|
+
logger.error(
|
|
223
|
+
f"Failed to calculate entities with {len(errors)} errors. errors: {errors}"
|
|
224
|
+
)
|
|
225
|
+
return entities, errors
|
|
83
226
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
227
|
+
@staticmethod
|
|
228
|
+
async def _send_examples(data: list[dict[str, Any]], kind: str) -> None:
|
|
229
|
+
try:
|
|
230
|
+
if data:
|
|
231
|
+
await ocean.port_client.ingest_integration_kind_examples(
|
|
232
|
+
kind, data, should_log=False
|
|
233
|
+
)
|
|
234
|
+
except Exception as ex:
|
|
235
|
+
logger.warning(
|
|
236
|
+
f"Failed to send raw data example {ex}",
|
|
237
|
+
exc_info=True,
|
|
89
238
|
)
|
|
90
|
-
]
|
|
91
239
|
|
|
92
240
|
async def _parse_items(
|
|
93
|
-
self,
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
241
|
+
self,
|
|
242
|
+
mapping: ResourceConfig,
|
|
243
|
+
raw_results: list[RAW_ITEM],
|
|
244
|
+
parse_all: bool = False,
|
|
245
|
+
send_raw_data_examples_amount: int = 0,
|
|
246
|
+
) -> CalculationResult:
|
|
247
|
+
raw_entity_mappings: dict[str, Any] = mapping.port.entity.mappings.dict(
|
|
248
|
+
exclude_unset=True
|
|
249
|
+
)
|
|
250
|
+
logger.info(f"Parsing {len(raw_results)} raw results into entities")
|
|
251
|
+
calculated_entities_results, errors = zip_and_sum(
|
|
252
|
+
await process_in_queue(
|
|
253
|
+
raw_results,
|
|
254
|
+
self._calculate_entity,
|
|
255
|
+
raw_entity_mappings,
|
|
256
|
+
mapping.port.items_to_parse,
|
|
257
|
+
mapping.selector.query,
|
|
258
|
+
parse_all,
|
|
259
|
+
)
|
|
97
260
|
)
|
|
98
|
-
|
|
99
|
-
|
|
261
|
+
logger.debug(
|
|
262
|
+
f"Finished parsing raw results into entities with {len(errors)} errors. errors: {errors}"
|
|
100
263
|
)
|
|
101
264
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
}
|
|
265
|
+
passed_entities = []
|
|
266
|
+
failed_entities = []
|
|
267
|
+
examples_to_send: list[dict[str, Any]] = []
|
|
268
|
+
entity_misconfigurations: dict[str, str] = {}
|
|
269
|
+
missing_required_fields: bool = False
|
|
270
|
+
entity_mapping_fault_counter: int = 0
|
|
271
|
+
|
|
272
|
+
for result in calculated_entities_results:
|
|
273
|
+
if len(result.misconfigurations) > 0:
|
|
274
|
+
entity_misconfigurations |= result.misconfigurations
|
|
275
|
+
|
|
276
|
+
if result.entity.get("identifier") and result.entity.get("blueprint"):
|
|
277
|
+
parsed_entity = Entity.parse_obj(result.entity)
|
|
278
|
+
if result.did_entity_pass_selector:
|
|
279
|
+
passed_entities.append(parsed_entity)
|
|
280
|
+
if (
|
|
281
|
+
len(examples_to_send) < send_raw_data_examples_amount
|
|
282
|
+
and result.raw_data is not None
|
|
283
|
+
):
|
|
284
|
+
examples_to_send.append(result.raw_data)
|
|
285
|
+
else:
|
|
286
|
+
failed_entities.append(parsed_entity)
|
|
287
|
+
else:
|
|
288
|
+
missing_required_fields = True
|
|
289
|
+
entity_mapping_fault_counter += 1
|
|
290
|
+
|
|
291
|
+
self._notify_mapping_issues(
|
|
292
|
+
entity_misconfigurations,
|
|
293
|
+
missing_required_fields,
|
|
294
|
+
entity_mapping_fault_counter,
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
if (
|
|
298
|
+
not calculated_entities_results
|
|
299
|
+
and raw_results
|
|
300
|
+
and send_raw_data_examples_amount > 0
|
|
301
|
+
):
|
|
302
|
+
logger.warning(
|
|
303
|
+
f"No entities were parsed from {len(raw_results)} raw results, sending raw data examples"
|
|
304
|
+
)
|
|
305
|
+
examples_to_send = raw_results[:send_raw_data_examples_amount]
|
|
306
|
+
|
|
307
|
+
await self._send_examples(examples_to_send, mapping.kind)
|
|
308
|
+
|
|
309
|
+
return CalculationResult(
|
|
310
|
+
EntitySelectorDiff(passed=passed_entities, failed=failed_entities),
|
|
311
|
+
errors,
|
|
312
|
+
misonfigured_entity_keys=entity_misconfigurations,
|
|
313
|
+
)
|
|
@@ -5,8 +5,37 @@ from loguru import logger
|
|
|
5
5
|
from pydantic import ValidationError
|
|
6
6
|
|
|
7
7
|
from port_ocean.context.event import event
|
|
8
|
+
from port_ocean.context.ocean import PortOceanContext
|
|
8
9
|
from port_ocean.core.handlers.base import BaseHandler
|
|
9
10
|
from port_ocean.core.handlers.port_app_config.models import PortAppConfig
|
|
11
|
+
from port_ocean.utils.misc import get_time
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class PortAppConfigCache:
|
|
15
|
+
_port_app_config: PortAppConfig | None
|
|
16
|
+
_retrieval_time: float
|
|
17
|
+
|
|
18
|
+
def __init__(self, cache_ttl: int):
|
|
19
|
+
self._port_app_config = None
|
|
20
|
+
self._cache_ttl = cache_ttl
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def port_app_config(self) -> PortAppConfig:
|
|
24
|
+
if self._port_app_config is None:
|
|
25
|
+
raise ValueError("Port app config is not set")
|
|
26
|
+
return self._port_app_config
|
|
27
|
+
|
|
28
|
+
@port_app_config.setter
|
|
29
|
+
def port_app_config(self, value: PortAppConfig) -> None:
|
|
30
|
+
self._retrieval_time = get_time()
|
|
31
|
+
self._port_app_config = value
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def is_cache_invalid(self) -> bool:
|
|
35
|
+
return (
|
|
36
|
+
not self._port_app_config
|
|
37
|
+
or self._retrieval_time + self._cache_ttl < get_time()
|
|
38
|
+
)
|
|
10
39
|
|
|
11
40
|
|
|
12
41
|
class BasePortAppConfig(BaseHandler):
|
|
@@ -21,24 +50,35 @@ class BasePortAppConfig(BaseHandler):
|
|
|
21
50
|
|
|
22
51
|
CONFIG_CLASS: Type[PortAppConfig] = PortAppConfig
|
|
23
52
|
|
|
53
|
+
def __init__(self, context: PortOceanContext):
|
|
54
|
+
super().__init__(context)
|
|
55
|
+
self._app_config_cache = PortAppConfigCache(
|
|
56
|
+
self.context.config.port.port_app_config_cache_ttl
|
|
57
|
+
)
|
|
58
|
+
|
|
24
59
|
@abstractmethod
|
|
25
60
|
async def _get_port_app_config(self) -> dict[str, Any]:
|
|
26
61
|
pass
|
|
27
62
|
|
|
28
|
-
async def get_port_app_config(self) -> PortAppConfig:
|
|
29
|
-
"""
|
|
63
|
+
async def get_port_app_config(self, use_cache: bool = True) -> PortAppConfig:
|
|
64
|
+
"""
|
|
65
|
+
Retrieve and parse the port application configuration.
|
|
30
66
|
|
|
31
|
-
|
|
32
|
-
|
|
67
|
+
:param use_cache: Determines whether to use the cached port-app-config if it exists, or to fetch it regardless
|
|
68
|
+
:return: The parsed port application configuration.
|
|
33
69
|
"""
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
70
|
+
if not use_cache or self._app_config_cache.is_cache_invalid:
|
|
71
|
+
raw_config = await self._get_port_app_config()
|
|
72
|
+
try:
|
|
73
|
+
self._app_config_cache.port_app_config = self.CONFIG_CLASS.parse_obj(
|
|
74
|
+
raw_config
|
|
75
|
+
)
|
|
76
|
+
except ValidationError:
|
|
77
|
+
logger.error(
|
|
78
|
+
"Invalid port app config found. Please check that the integration has been configured correctly."
|
|
79
|
+
)
|
|
80
|
+
logger.warning(f"Invalid port app config: {raw_config}")
|
|
81
|
+
raise
|
|
82
|
+
|
|
83
|
+
event.port_app_config = self._app_config_cache.port_app_config
|
|
84
|
+
return self._app_config_cache.port_app_config
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
from typing import Any
|
|
2
4
|
|
|
3
5
|
from pydantic import BaseModel, Field
|
|
@@ -5,20 +7,37 @@ from pydantic import BaseModel, Field
|
|
|
5
7
|
from port_ocean.clients.port.types import RequestOptions
|
|
6
8
|
|
|
7
9
|
|
|
10
|
+
class Rule(BaseModel):
|
|
11
|
+
property: str
|
|
12
|
+
operator: str
|
|
13
|
+
value: str
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class IngestSearchQuery(BaseModel):
|
|
17
|
+
combinator: str
|
|
18
|
+
rules: list[Rule | IngestSearchQuery]
|
|
19
|
+
|
|
20
|
+
|
|
8
21
|
class EntityMapping(BaseModel):
|
|
9
|
-
identifier: str
|
|
22
|
+
identifier: str | IngestSearchQuery
|
|
10
23
|
title: str | None
|
|
11
24
|
blueprint: str
|
|
12
25
|
team: str | None
|
|
13
26
|
properties: dict[str, str] = Field(default_factory=dict)
|
|
14
|
-
relations: dict[str, str] = Field(default_factory=dict)
|
|
27
|
+
relations: dict[str, str | IngestSearchQuery] = Field(default_factory=dict)
|
|
15
28
|
|
|
29
|
+
@property
|
|
30
|
+
def is_using_search_identifier(self) -> bool:
|
|
31
|
+
return isinstance(self.identifier, dict)
|
|
16
32
|
|
|
17
|
-
class PortResourceConfig(BaseModel):
|
|
18
|
-
class MappingsConfig(BaseModel):
|
|
19
|
-
mappings: EntityMapping
|
|
20
33
|
|
|
34
|
+
class MappingsConfig(BaseModel):
|
|
35
|
+
mappings: EntityMapping
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class PortResourceConfig(BaseModel):
|
|
21
39
|
entity: MappingsConfig
|
|
40
|
+
items_to_parse: str | None = Field(alias="itemsToParse")
|
|
22
41
|
|
|
23
42
|
|
|
24
43
|
class Selector(BaseModel):
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from typing import Any, Literal
|
|
3
|
+
from port_ocean.clients.port.client import PortClient
|
|
4
|
+
from port_ocean.utils.misc import IntegrationStateStatus
|
|
5
|
+
from port_ocean.utils.time import get_next_occurrence
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ResyncStateUpdater:
|
|
9
|
+
def __init__(self, port_client: PortClient, scheduled_resync_interval: int | None):
|
|
10
|
+
self.port_client = port_client
|
|
11
|
+
self.initiated_at = datetime.datetime.now(tz=datetime.timezone.utc)
|
|
12
|
+
self.scheduled_resync_interval = scheduled_resync_interval
|
|
13
|
+
|
|
14
|
+
# This is used to differ between integration changes that require a full resync and state changes
|
|
15
|
+
# So that the polling event-listener can decide whether to perform a full resync or not
|
|
16
|
+
# TODO: remove this once we separate the state from the integration
|
|
17
|
+
self.last_integration_state_updated_at: str = ""
|
|
18
|
+
|
|
19
|
+
def _calculate_next_scheduled_resync(
|
|
20
|
+
self,
|
|
21
|
+
interval: int | None = None,
|
|
22
|
+
custom_start_time: datetime.datetime | None = None,
|
|
23
|
+
) -> str | None:
|
|
24
|
+
if interval is None:
|
|
25
|
+
return None
|
|
26
|
+
return get_next_occurrence(
|
|
27
|
+
interval * 60, custom_start_time or self.initiated_at
|
|
28
|
+
).isoformat()
|
|
29
|
+
|
|
30
|
+
async def update_before_resync(
|
|
31
|
+
self,
|
|
32
|
+
interval: int | None = None,
|
|
33
|
+
custom_start_time: datetime.datetime | None = None,
|
|
34
|
+
) -> None:
|
|
35
|
+
_interval = interval or self.scheduled_resync_interval
|
|
36
|
+
nest_resync = self._calculate_next_scheduled_resync(
|
|
37
|
+
_interval, custom_start_time
|
|
38
|
+
)
|
|
39
|
+
state: dict[str, Any] = {
|
|
40
|
+
"status": IntegrationStateStatus.Running.value,
|
|
41
|
+
"lastResyncEnd": None,
|
|
42
|
+
"lastResyncStart": datetime.datetime.now(
|
|
43
|
+
tz=datetime.timezone.utc
|
|
44
|
+
).isoformat(),
|
|
45
|
+
"nextResync": nest_resync,
|
|
46
|
+
"intervalInMinuets": _interval,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
integration = await self.port_client.update_integration_state(
|
|
50
|
+
state, should_raise=False
|
|
51
|
+
)
|
|
52
|
+
if integration:
|
|
53
|
+
self.last_integration_state_updated_at = integration["resyncState"][
|
|
54
|
+
"updatedAt"
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
async def update_after_resync(
|
|
58
|
+
self,
|
|
59
|
+
status: Literal[
|
|
60
|
+
IntegrationStateStatus.Completed, IntegrationStateStatus.Failed
|
|
61
|
+
] = IntegrationStateStatus.Completed,
|
|
62
|
+
interval: int | None = None,
|
|
63
|
+
custom_start_time: datetime.datetime | None = None,
|
|
64
|
+
) -> None:
|
|
65
|
+
_interval = interval or self.scheduled_resync_interval
|
|
66
|
+
nest_resync = self._calculate_next_scheduled_resync(
|
|
67
|
+
_interval, custom_start_time
|
|
68
|
+
)
|
|
69
|
+
state: dict[str, Any] = {
|
|
70
|
+
"status": status.value,
|
|
71
|
+
"lastResyncEnd": datetime.datetime.now(
|
|
72
|
+
tz=datetime.timezone.utc
|
|
73
|
+
).isoformat(),
|
|
74
|
+
"nextResync": nest_resync,
|
|
75
|
+
"intervalInMinuets": _interval,
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
integration = await self.port_client.update_integration_state(
|
|
79
|
+
state, should_raise=False
|
|
80
|
+
)
|
|
81
|
+
if integration:
|
|
82
|
+
self.last_integration_state_updated_at = integration["resyncState"][
|
|
83
|
+
"updatedAt"
|
|
84
|
+
]
|
|
@@ -54,24 +54,22 @@ class BaseIntegration(SyncRawMixin, SyncMixin):
|
|
|
54
54
|
"""
|
|
55
55
|
Initializes handlers, establishes integration at the specified port, and starts the event listener.
|
|
56
56
|
"""
|
|
57
|
-
logger.info(
|
|
57
|
+
logger.info(
|
|
58
|
+
"Starting integration",
|
|
59
|
+
integration_type=self.context.config.integration.type,
|
|
60
|
+
)
|
|
58
61
|
if self.started:
|
|
59
62
|
raise IntegrationAlreadyStartedException("Integration already started")
|
|
60
63
|
|
|
61
64
|
if (
|
|
62
65
|
not self.event_strategy["resync"]
|
|
63
66
|
and self.__class__._on_resync == BaseIntegration._on_resync
|
|
67
|
+
and self.context.config.event_listener.should_resync
|
|
64
68
|
):
|
|
65
69
|
raise NotImplementedError("on_resync is not implemented")
|
|
66
70
|
|
|
67
71
|
await self.initialize_handlers()
|
|
68
72
|
|
|
69
|
-
logger.info("Initializing integration at port")
|
|
70
|
-
await self.context.port_client.initialize_integration(
|
|
71
|
-
self.context.config.integration.type,
|
|
72
|
-
self.context.config.event_listener.to_request(),
|
|
73
|
-
)
|
|
74
|
-
|
|
75
73
|
self.started = True
|
|
76
74
|
|
|
77
75
|
async with event_context(
|
|
@@ -46,7 +46,7 @@ class EventsMixin:
|
|
|
46
46
|
return func
|
|
47
47
|
|
|
48
48
|
def on_resync(
|
|
49
|
-
self, func: RESYNC_EVENT_LISTENER, kind: str | None = None
|
|
49
|
+
self, func: RESYNC_EVENT_LISTENER| None, kind: str | None = None
|
|
50
50
|
) -> RESYNC_EVENT_LISTENER:
|
|
51
51
|
"""Register a function as a listener for a "resync" event.
|
|
52
52
|
|
|
@@ -57,6 +57,8 @@ class EventsMixin:
|
|
|
57
57
|
Returns:
|
|
58
58
|
RESYNC_EVENT_LISTENER: The input function, unchanged.
|
|
59
59
|
"""
|
|
60
|
+
if func is None:
|
|
61
|
+
return None
|
|
60
62
|
if kind is None:
|
|
61
63
|
logger.debug(f"Registering resync event listener any kind")
|
|
62
64
|
else:
|
|
@@ -97,9 +97,11 @@ class SyncMixin(HandlerMixin):
|
|
|
97
97
|
"""
|
|
98
98
|
entities_at_port = await ocean.port_client.search_entities(user_agent_type)
|
|
99
99
|
|
|
100
|
-
await self.entities_state_applier.upsert(
|
|
100
|
+
modified_entities = await self.entities_state_applier.upsert(
|
|
101
|
+
entities, user_agent_type
|
|
102
|
+
)
|
|
101
103
|
await self.entities_state_applier.delete_diff(
|
|
102
|
-
{"before": entities_at_port, "after":
|
|
104
|
+
{"before": entities_at_port, "after": modified_entities}, user_agent_type
|
|
103
105
|
)
|
|
104
106
|
|
|
105
107
|
logger.info("Finished syncing change")
|