port-ocean 0.28.2__py3-none-any.whl → 0.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- integrations/_infra/Dockerfile.Deb +6 -1
- integrations/_infra/Dockerfile.local +1 -0
- port_ocean/clients/port/authentication.py +19 -0
- port_ocean/clients/port/client.py +3 -0
- port_ocean/clients/port/mixins/actions.py +93 -0
- port_ocean/clients/port/mixins/blueprints.py +0 -12
- port_ocean/clients/port/mixins/entities.py +79 -44
- port_ocean/clients/port/mixins/integrations.py +7 -2
- port_ocean/config/settings.py +35 -3
- port_ocean/context/ocean.py +7 -5
- port_ocean/core/defaults/initialize.py +12 -5
- port_ocean/core/event_listener/__init__.py +7 -0
- port_ocean/core/event_listener/actions_only.py +42 -0
- port_ocean/core/event_listener/base.py +4 -1
- port_ocean/core/event_listener/factory.py +18 -9
- port_ocean/core/event_listener/http.py +4 -3
- port_ocean/core/event_listener/kafka.py +3 -2
- port_ocean/core/event_listener/once.py +5 -2
- port_ocean/core/event_listener/polling.py +4 -3
- port_ocean/core/event_listener/webhooks_only.py +3 -2
- port_ocean/core/handlers/actions/__init__.py +7 -0
- port_ocean/core/handlers/actions/abstract_executor.py +150 -0
- port_ocean/core/handlers/actions/execution_manager.py +434 -0
- port_ocean/core/handlers/entity_processor/jq_entity_processor.py +479 -17
- port_ocean/core/handlers/entity_processor/jq_input_evaluator.py +137 -0
- port_ocean/core/handlers/port_app_config/models.py +4 -2
- port_ocean/core/handlers/resync_state_updater/updater.py +4 -2
- port_ocean/core/handlers/webhook/abstract_webhook_processor.py +16 -0
- port_ocean/core/handlers/webhook/processor_manager.py +30 -12
- port_ocean/core/integrations/mixins/sync_raw.py +10 -5
- port_ocean/core/integrations/mixins/utils.py +250 -29
- port_ocean/core/models.py +35 -2
- port_ocean/core/utils/utils.py +16 -5
- port_ocean/exceptions/execution_manager.py +22 -0
- port_ocean/helpers/metric/metric.py +1 -1
- port_ocean/helpers/retry.py +4 -40
- port_ocean/log/logger_setup.py +2 -2
- port_ocean/ocean.py +31 -5
- port_ocean/tests/clients/port/mixins/test_entities.py +71 -5
- port_ocean/tests/core/event_listener/test_kafka.py +14 -7
- port_ocean/tests/core/handlers/actions/test_execution_manager.py +837 -0
- port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py +932 -1
- port_ocean/tests/core/handlers/entity_processor/test_jq_input_evaluator.py +932 -0
- port_ocean/tests/core/handlers/webhook/test_processor_manager.py +3 -1
- port_ocean/tests/core/utils/test_get_port_diff.py +164 -0
- port_ocean/tests/helpers/test_retry.py +241 -1
- port_ocean/tests/utils/test_cache.py +240 -0
- port_ocean/utils/cache.py +45 -9
- {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/METADATA +2 -1
- {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/RECORD +53 -43
- {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/LICENSE.md +0 -0
- {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/WHEEL +0 -0
- {port_ocean-0.28.2.dist-info → port_ocean-0.29.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from enum import Enum
|
|
3
|
+
|
|
4
|
+
# This file is used to classify the input that a jq expression to run on.
|
|
5
|
+
# It is used to determine if the jq expression can be executed without providing any JSON input (const expressions)
|
|
6
|
+
# or on a single item (in items to parse situation)
|
|
7
|
+
# or on all the data
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class InputClassifyingResult(Enum):
|
|
11
|
+
NONE = 1
|
|
12
|
+
SINGLE = 2
|
|
13
|
+
ALL = 3
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# Functions/filters that (even without ".") still require/assume input
|
|
17
|
+
_INPUT_DEPENDENT_FUNCS = r"""
|
|
18
|
+
\b(
|
|
19
|
+
map|select|reverse|sort|sort_by|unique|unique_by|group_by|flatten|transpose|
|
|
20
|
+
split|explode|join|add|length|has|in|index|indices|contains|
|
|
21
|
+
paths|leaf_paths|keys|keys_unsorted|values|to_entries|with_entries|from_entries|
|
|
22
|
+
del|delpaths|walk|reduce|foreach|input|inputs|limit|first|last|nth|
|
|
23
|
+
while|until|recurse|recurse_down|bsearch|combinations|permutations
|
|
24
|
+
)\b
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
_INPUT_DEPENDENT_RE = re.compile(_INPUT_DEPENDENT_FUNCS, re.VERBOSE)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# String literal handling (jq uses double quotes for strings)
|
|
31
|
+
_STRING_LITERAL_RE = re.compile(r'"(?:\\.|[^"\\])*"')
|
|
32
|
+
_STRING_ONLY_RE = re.compile(r'^\s*"(?:\\.|[^"\\])*"\s*$')
|
|
33
|
+
_NUMBER_ONLY_RE = re.compile(r"^\s*-?\d+(\.\d+)?\s*$")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _mask_strings(expr: str) -> str:
|
|
37
|
+
"""
|
|
38
|
+
Replace string literals with 'S' strings so '.' inside quotes don't count.
|
|
39
|
+
Example:
|
|
40
|
+
- '"this is a string"' ---> 'S'
|
|
41
|
+
- '"sting" + .field'. ---> 'S + .field'
|
|
42
|
+
"""
|
|
43
|
+
return _STRING_LITERAL_RE.sub("S", expr)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _mask_numbers(expr: str) -> str:
|
|
47
|
+
"""
|
|
48
|
+
Replace number literals with 'N' so decimal points in numbers don't count as input references.
|
|
49
|
+
Example:
|
|
50
|
+
- '3.14' ---> 'N'
|
|
51
|
+
- '42 + 3.14' ---> 'N + N'
|
|
52
|
+
"""
|
|
53
|
+
# Pattern to match numbers (integers and decimals, with optional sign)
|
|
54
|
+
number_pattern = re.compile(r"[-+]?\d+(?:\.\d+)?")
|
|
55
|
+
return number_pattern.sub("N", expr)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def can_expression_run_with_no_input(selector_query: str) -> bool:
|
|
59
|
+
"""
|
|
60
|
+
Returns True if the jq expression can be executed without providing any JSON input.
|
|
61
|
+
Rules:
|
|
62
|
+
- Whitespace-only => No Input Required
|
|
63
|
+
- A pure string literal => No Input Required (even if it contains '.')
|
|
64
|
+
- After masking strings, if it contains '.' => Input Required
|
|
65
|
+
- Disallow known input-dependent functions (functions that require input)
|
|
66
|
+
- After masking strings, if it contains only operators and numbers and 'S' => No Input Required
|
|
67
|
+
- Allow null/true/false/number/range/empty, and array/object literals that
|
|
68
|
+
don't reference input (no '.' after masking strings) => No Input Required
|
|
69
|
+
Example:
|
|
70
|
+
- blueprint: '"newRelicService"' in mapping, selector_query param would be '"newRelicService"' => No Input Required
|
|
71
|
+
"""
|
|
72
|
+
s = selector_query.strip()
|
|
73
|
+
if s == "":
|
|
74
|
+
return True # whitespace-only
|
|
75
|
+
|
|
76
|
+
# Pure string literal is nullary
|
|
77
|
+
if _STRING_ONLY_RE.match(s):
|
|
78
|
+
return True
|
|
79
|
+
|
|
80
|
+
# First mask strings, then mask numbers to prevent decimal points in numbers from being treated as input references
|
|
81
|
+
masked = _mask_strings(s).strip()
|
|
82
|
+
masked = _mask_numbers(masked).strip()
|
|
83
|
+
|
|
84
|
+
# If it contains any known input-dependent functions, don't shortcut
|
|
85
|
+
if _INPUT_DEPENDENT_RE.search(masked):
|
|
86
|
+
return False
|
|
87
|
+
|
|
88
|
+
# If it contains only operators and 'S'/'N', it can be executed with no input
|
|
89
|
+
# Example:
|
|
90
|
+
# - '"abc" + "def"' ---> 'S + S' => No Input Required
|
|
91
|
+
# - '3.14 + 2.5' ---> 'N + N' => No Input Required
|
|
92
|
+
# if re.fullmatch(
|
|
93
|
+
# r"(?:S|N)(?:\s*[+\-*/]\s*(?:S|N))*",
|
|
94
|
+
# masked,
|
|
95
|
+
# ):
|
|
96
|
+
# return True
|
|
97
|
+
|
|
98
|
+
if "." not in masked:
|
|
99
|
+
return True
|
|
100
|
+
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _can_expression_run_on_single_item(expr: str, key: str) -> bool:
|
|
105
|
+
"""
|
|
106
|
+
Detect `.key` outside of quotes, as a standalone path segment beginning
|
|
107
|
+
after a non-word boundary (start, space, |, (, [, {, , or :) and not part
|
|
108
|
+
of `.something.key`.
|
|
109
|
+
assuming key = 'item'
|
|
110
|
+
Examples:
|
|
111
|
+
- .item.yaeli => true
|
|
112
|
+
- map(.item.yaeli) => true
|
|
113
|
+
- .body.item => false
|
|
114
|
+
"""
|
|
115
|
+
if not key:
|
|
116
|
+
return False
|
|
117
|
+
|
|
118
|
+
masked = _mask_strings(expr)
|
|
119
|
+
masked = _mask_numbers(masked)
|
|
120
|
+
pattern = re.compile(rf"(?<![A-Za-z0-9_])\.{re.escape(key)}(?![A-Za-z0-9_])")
|
|
121
|
+
return bool(pattern.search(masked))
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def classify_input(
|
|
125
|
+
selector_query: str, single_item_key: str | None = None
|
|
126
|
+
) -> InputClassifyingResult:
|
|
127
|
+
"""
|
|
128
|
+
Returns the input evaluation result for the jq expression.
|
|
129
|
+
Conservative: requires NO '.' and must match a known nullary-safe pattern.
|
|
130
|
+
"""
|
|
131
|
+
if can_expression_run_with_no_input(selector_query):
|
|
132
|
+
return InputClassifyingResult.NONE
|
|
133
|
+
if single_item_key and _can_expression_run_on_single_item(
|
|
134
|
+
selector_query, single_item_key
|
|
135
|
+
):
|
|
136
|
+
return InputClassifyingResult.SINGLE
|
|
137
|
+
return InputClassifyingResult.ALL
|
|
@@ -29,7 +29,9 @@ class EntityMapping(BaseModel):
|
|
|
29
29
|
|
|
30
30
|
@property
|
|
31
31
|
def is_using_search_identifier(self) -> bool:
|
|
32
|
-
return isinstance(self.identifier, dict)
|
|
32
|
+
return isinstance(self.identifier, dict) or isinstance(
|
|
33
|
+
self.identifier, IngestSearchQuery
|
|
34
|
+
)
|
|
33
35
|
|
|
34
36
|
|
|
35
37
|
class MappingsConfig(BaseModel):
|
|
@@ -39,7 +41,7 @@ class MappingsConfig(BaseModel):
|
|
|
39
41
|
class PortResourceConfig(BaseModel):
|
|
40
42
|
entity: MappingsConfig
|
|
41
43
|
items_to_parse: str | None = Field(alias="itemsToParse")
|
|
42
|
-
items_to_parse_name: str
|
|
44
|
+
items_to_parse_name: str = Field(alias="itemsToParseName", default="item")
|
|
43
45
|
|
|
44
46
|
|
|
45
47
|
class Selector(BaseModel):
|
|
@@ -60,7 +60,9 @@ class ResyncStateUpdater:
|
|
|
60
60
|
async def update_after_resync(
|
|
61
61
|
self,
|
|
62
62
|
status: Literal[
|
|
63
|
-
IntegrationStateStatus.Completed,
|
|
63
|
+
IntegrationStateStatus.Completed,
|
|
64
|
+
IntegrationStateStatus.Failed,
|
|
65
|
+
IntegrationStateStatus.Aborted,
|
|
64
66
|
] = IntegrationStateStatus.Completed,
|
|
65
67
|
interval: int | None = None,
|
|
66
68
|
custom_start_time: datetime.datetime | None = None,
|
|
@@ -100,4 +102,4 @@ class ResyncStateUpdater:
|
|
|
100
102
|
await ocean.metrics.report_sync_metrics(
|
|
101
103
|
kinds=[ocean.metrics.current_resource_kind()]
|
|
102
104
|
)
|
|
103
|
-
ocean.metrics.event_id = ""
|
|
105
|
+
ocean.metrics.event_id = f"{ocean.metrics.event_id}-done"
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
|
+
from enum import StrEnum
|
|
2
3
|
from loguru import logger
|
|
3
4
|
|
|
4
5
|
from port_ocean.core.handlers.port_app_config.models import ResourceConfig
|
|
@@ -12,6 +13,17 @@ from .webhook_event import (
|
|
|
12
13
|
)
|
|
13
14
|
|
|
14
15
|
|
|
16
|
+
class WebhookProcessorType(StrEnum):
|
|
17
|
+
"""Type of webhook processor"""
|
|
18
|
+
|
|
19
|
+
# For action-related webhooks
|
|
20
|
+
# (e.g. update finished action using the workflow runs webhook)
|
|
21
|
+
ACTION = "action"
|
|
22
|
+
# For regular webhooks
|
|
23
|
+
# (e.g. repository events that should be reflected as Entities in Port)
|
|
24
|
+
WEBHOOK = "webhook"
|
|
25
|
+
|
|
26
|
+
|
|
15
27
|
class AbstractWebhookProcessor(ABC):
|
|
16
28
|
"""
|
|
17
29
|
Abstract base class for webhook processors
|
|
@@ -47,6 +59,10 @@ class AbstractWebhookProcessor(ABC):
|
|
|
47
59
|
self.event = event
|
|
48
60
|
self.retry_count = 0
|
|
49
61
|
|
|
62
|
+
@classmethod
|
|
63
|
+
def get_processor_type(cls) -> WebhookProcessorType:
|
|
64
|
+
return WebhookProcessorType.WEBHOOK
|
|
65
|
+
|
|
50
66
|
async def on_error(self, error: Exception) -> None:
|
|
51
67
|
"""Hook to handle errors during processing. Override if needed"""
|
|
52
68
|
delay = self.calculate_retry_delay()
|
|
@@ -11,11 +11,17 @@ from port_ocean.core.handlers.queue.abstract_queue import AbstractQueue
|
|
|
11
11
|
from port_ocean.core.integrations.mixins.events import EventsMixin
|
|
12
12
|
from port_ocean.core.integrations.mixins.live_events import LiveEventsMixin
|
|
13
13
|
from port_ocean.exceptions.webhook_processor import WebhookEventNotSupportedError
|
|
14
|
-
from .webhook_event import
|
|
14
|
+
from port_ocean.core.handlers.webhook.webhook_event import (
|
|
15
|
+
WebhookEvent,
|
|
16
|
+
WebhookEventRawResults,
|
|
17
|
+
LiveEventTimestamp,
|
|
18
|
+
)
|
|
15
19
|
from port_ocean.context.event import event
|
|
16
20
|
|
|
17
|
-
|
|
18
|
-
|
|
21
|
+
from port_ocean.core.handlers.webhook.abstract_webhook_processor import (
|
|
22
|
+
AbstractWebhookProcessor,
|
|
23
|
+
WebhookProcessorType,
|
|
24
|
+
)
|
|
19
25
|
from port_ocean.utils.signal import SignalHandler
|
|
20
26
|
from port_ocean.core.handlers.queue import LocalQueue
|
|
21
27
|
|
|
@@ -56,7 +62,7 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
56
62
|
while True:
|
|
57
63
|
event = None
|
|
58
64
|
matching_processors: List[
|
|
59
|
-
Tuple[ResourceConfig, AbstractWebhookProcessor]
|
|
65
|
+
Tuple[ResourceConfig | None, AbstractWebhookProcessor]
|
|
60
66
|
] = []
|
|
61
67
|
try:
|
|
62
68
|
event = await queue.get()
|
|
@@ -133,16 +139,22 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
133
139
|
|
|
134
140
|
async def _extract_matching_processors(
|
|
135
141
|
self, webhook_event: WebhookEvent, path: str
|
|
136
|
-
) -> list[tuple[ResourceConfig, AbstractWebhookProcessor]]:
|
|
142
|
+
) -> list[tuple[ResourceConfig | None, AbstractWebhookProcessor]]:
|
|
137
143
|
"""Find and extract the matching processor for an event"""
|
|
138
144
|
|
|
139
|
-
created_processors: list[
|
|
145
|
+
created_processors: list[
|
|
146
|
+
tuple[ResourceConfig | None, AbstractWebhookProcessor]
|
|
147
|
+
] = []
|
|
140
148
|
event_processor_names = []
|
|
141
149
|
|
|
142
150
|
for processor_class in self._processors_classes[path]:
|
|
143
151
|
processor = processor_class(webhook_event.clone())
|
|
144
152
|
if await processor.should_process_event(webhook_event):
|
|
145
153
|
event_processor_names.append(processor.__class__.__name__)
|
|
154
|
+
if processor.get_processor_type() == WebhookProcessorType.ACTION:
|
|
155
|
+
created_processors.append((None, processor))
|
|
156
|
+
continue
|
|
157
|
+
|
|
146
158
|
kinds = await processor.get_matching_kinds(webhook_event)
|
|
147
159
|
for kind in kinds:
|
|
148
160
|
for resource in event.port_app_config.resources:
|
|
@@ -179,7 +191,10 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
179
191
|
event.set_timestamp(LiveEventTimestamp.FinishedProcessingWithError)
|
|
180
192
|
|
|
181
193
|
async def _process_single_event(
|
|
182
|
-
self,
|
|
194
|
+
self,
|
|
195
|
+
processor: AbstractWebhookProcessor,
|
|
196
|
+
path: str,
|
|
197
|
+
resource: ResourceConfig | None,
|
|
183
198
|
) -> WebhookEventRawResults:
|
|
184
199
|
"""Process a single event with a specific processor"""
|
|
185
200
|
try:
|
|
@@ -199,7 +214,7 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
199
214
|
raise
|
|
200
215
|
|
|
201
216
|
async def _execute_processor(
|
|
202
|
-
self, processor: AbstractWebhookProcessor, resource: ResourceConfig
|
|
217
|
+
self, processor: AbstractWebhookProcessor, resource: ResourceConfig | None
|
|
203
218
|
) -> WebhookEventRawResults:
|
|
204
219
|
"""Execute a single processor within a max processing time"""
|
|
205
220
|
try:
|
|
@@ -213,7 +228,7 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
213
228
|
)
|
|
214
229
|
|
|
215
230
|
async def _process_webhook_request(
|
|
216
|
-
self, processor: AbstractWebhookProcessor, resource: ResourceConfig
|
|
231
|
+
self, processor: AbstractWebhookProcessor, resource: ResourceConfig | None
|
|
217
232
|
) -> WebhookEventRawResults:
|
|
218
233
|
"""Process a webhook request with retry logic
|
|
219
234
|
|
|
@@ -235,9 +250,10 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
235
250
|
while True:
|
|
236
251
|
try:
|
|
237
252
|
webhook_event_raw_results = await processor.handle_event(
|
|
238
|
-
payload, resource
|
|
253
|
+
payload, resource # type: ignore[arg-type]
|
|
239
254
|
)
|
|
240
|
-
|
|
255
|
+
if resource is not None:
|
|
256
|
+
webhook_event_raw_results.resource = resource
|
|
241
257
|
break
|
|
242
258
|
|
|
243
259
|
except Exception as e:
|
|
@@ -258,7 +274,9 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
|
258
274
|
return webhook_event_raw_results
|
|
259
275
|
|
|
260
276
|
def register_processor(
|
|
261
|
-
self,
|
|
277
|
+
self,
|
|
278
|
+
path: str,
|
|
279
|
+
processor: Type[AbstractWebhookProcessor],
|
|
262
280
|
) -> None:
|
|
263
281
|
"""Register a webhook processor for a specific path with optional filter
|
|
264
282
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import sys
|
|
2
3
|
import uuid
|
|
3
4
|
from graphlib import CycleError
|
|
4
5
|
import inspect
|
|
@@ -23,7 +24,7 @@ from port_ocean.core.integrations.mixins.utils import (
|
|
|
23
24
|
resync_generator_wrapper,
|
|
24
25
|
resync_function_wrapper,
|
|
25
26
|
)
|
|
26
|
-
from port_ocean.core.models import Entity, ProcessExecutionMode
|
|
27
|
+
from port_ocean.core.models import Entity, IntegrationFeatureFlag, ProcessExecutionMode
|
|
27
28
|
from port_ocean.core.ocean_types import (
|
|
28
29
|
RAW_RESULT,
|
|
29
30
|
RESYNC_RESULT,
|
|
@@ -116,13 +117,13 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
116
117
|
logger.info(
|
|
117
118
|
f"Found async generator function for {resource_config.kind} name: {task.__qualname__}"
|
|
118
119
|
)
|
|
119
|
-
results.append(resync_generator_wrapper(task, resource_config.kind,resource_config.port.items_to_parse))
|
|
120
|
+
results.append(resync_generator_wrapper(task, resource_config.kind, resource_config.port.items_to_parse_name, resource_config.port.items_to_parse))
|
|
120
121
|
else:
|
|
121
122
|
logger.info(
|
|
122
123
|
f"Found sync function for {resource_config.kind} name: {task.__qualname__}"
|
|
123
124
|
)
|
|
124
125
|
task = typing.cast(Callable[[str], Awaitable[RAW_RESULT]], task)
|
|
125
|
-
tasks.append(resync_function_wrapper(task, resource_config.kind))
|
|
126
|
+
tasks.append(resync_function_wrapper(task, resource_config.kind, resource_config.port.items_to_parse))
|
|
126
127
|
|
|
127
128
|
logger.info(
|
|
128
129
|
f"Found {len(tasks) + len(results)} resync tasks for {resource_config.kind}"
|
|
@@ -477,7 +478,7 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
477
478
|
bool: True if lakehouse data is enabled, False otherwise
|
|
478
479
|
"""
|
|
479
480
|
flags = await ocean.port_client.get_organization_feature_flags()
|
|
480
|
-
if
|
|
481
|
+
if IntegrationFeatureFlag.LAKEHOUSE_ELIGIBLE in flags and ocean.config.lakehouse_enabled:
|
|
481
482
|
return True
|
|
482
483
|
return False
|
|
483
484
|
|
|
@@ -1002,7 +1003,11 @@ class SyncRawMixin(HandlerMixin, EventsMixin):
|
|
|
1002
1003
|
|
|
1003
1004
|
creation_results: list[tuple[list[Entity], list[Exception]]] = []
|
|
1004
1005
|
|
|
1005
|
-
|
|
1006
|
+
if sys.platform.startswith("win"):
|
|
1007
|
+
# fork is not supported on windows
|
|
1008
|
+
multiprocessing.set_start_method("spawn", True)
|
|
1009
|
+
else:
|
|
1010
|
+
multiprocessing.set_start_method("fork", True)
|
|
1006
1011
|
try:
|
|
1007
1012
|
for index, resource in enumerate(app_config.resources):
|
|
1008
1013
|
logger.info(
|