malevich-coretools 0.3.54__tar.gz → 0.3.60__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of malevich-coretools might be problematic. Click here for more details.
- {malevich_coretools-0.3.54/malevich_coretools.egg-info → malevich_coretools-0.3.60}/PKG-INFO +1 -1
- malevich_coretools-0.3.60/VERSION +1 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/abstract.py +46 -5
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/funcs/funcs.py +27 -3
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/funcs/helpers.py +2 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/const.py +5 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/helpers.py +72 -30
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/utils.py +163 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60/malevich_coretools.egg-info}/PKG-INFO +1 -1
- malevich_coretools-0.3.54/VERSION +0 -1
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/LICENSE +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/MANIFEST.in +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/README.md +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/__init__.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/__init__.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/operations.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/pipeline.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/statuses.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/admin/__init__.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/admin/utils.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/batch/__init__.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/batch/utils.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/funcs/__init__.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/funcs/checks.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/__init__.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/config.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/kafka_utils.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/tools/__init__.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/tools/abstract.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/tools/vast.py +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools.egg-info/SOURCES.txt +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools.egg-info/dependency_links.txt +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools.egg-info/requires.txt +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools.egg-info/top_level.txt +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/pyproject.toml +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/requirements.txt +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/setup.cfg +0 -0
- {malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/setup.py +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
0.3.60
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/abstract.py
RENAMED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
1
|
+
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
|
2
2
|
|
|
3
3
|
from pydantic import BaseModel
|
|
4
4
|
|
|
@@ -197,6 +197,7 @@ class MainTask(BaseModel):
|
|
|
197
197
|
profileMode: Optional[str] = None
|
|
198
198
|
withLogs: bool = False # use only in prepare
|
|
199
199
|
saveFails: bool = True
|
|
200
|
+
clearDagLogs: bool = True
|
|
200
201
|
scaleCount: int = 1
|
|
201
202
|
scaleInfo: List[ScaleInfo]
|
|
202
203
|
component: TaskComponent
|
|
@@ -228,6 +229,7 @@ class MainPipeline(BaseModel):
|
|
|
228
229
|
run: bool = True
|
|
229
230
|
synthetic: bool = False
|
|
230
231
|
saveFails: bool = True
|
|
232
|
+
clearDagLogs: bool = True
|
|
231
233
|
scaleCount: int = 1
|
|
232
234
|
tags: Optional[Dict[str, str]] = None
|
|
233
235
|
|
|
@@ -241,6 +243,7 @@ class RunTask(Operation):
|
|
|
241
243
|
profileMode: Optional[str] = None
|
|
242
244
|
withLogs: bool = False
|
|
243
245
|
schedule: Optional[Schedule] = None
|
|
246
|
+
broadcast: bool = False
|
|
244
247
|
|
|
245
248
|
|
|
246
249
|
class AppManage(Operation):
|
|
@@ -385,8 +388,8 @@ class CollectionMetadata(BaseModel):
|
|
|
385
388
|
|
|
386
389
|
class LogsResult(BaseModel):
|
|
387
390
|
data: str
|
|
388
|
-
logs: Dict[str, str]
|
|
389
|
-
userLogs: Dict[str, str] = {}
|
|
391
|
+
logs: Optional[Dict[str, str]] = {}
|
|
392
|
+
userLogs: Optional[Dict[str, str]] = {}
|
|
390
393
|
|
|
391
394
|
|
|
392
395
|
class AppLog(BaseModel):
|
|
@@ -603,7 +606,7 @@ class UserLimits(BaseModel):
|
|
|
603
606
|
defaultGpuDisk: int
|
|
604
607
|
|
|
605
608
|
|
|
606
|
-
class
|
|
609
|
+
class BasePlatformSettingsMain(BaseModel):
|
|
607
610
|
memoryRequest: Optional[int] = None
|
|
608
611
|
memoryLimit: Optional[int] = None
|
|
609
612
|
cpuRequest: Optional[int] = None
|
|
@@ -613,7 +616,11 @@ class BasePlatformSettings(BaseModel):
|
|
|
613
616
|
kubeconfig: Optional[str] = None
|
|
614
617
|
|
|
615
618
|
|
|
616
|
-
class
|
|
619
|
+
class BasePlatformSettings(BasePlatformSettingsMain):
|
|
620
|
+
allowKafka: bool = False
|
|
621
|
+
|
|
622
|
+
|
|
623
|
+
class Limits(BasePlatformSettingsMain):
|
|
617
624
|
gpuDisk: Optional[int] = None
|
|
618
625
|
|
|
619
626
|
|
|
@@ -675,3 +682,37 @@ class MCPToolCall(BaseModel):
|
|
|
675
682
|
class RunsFilter(BaseModel):
|
|
676
683
|
data: Optional[Dict[str, str]] = None
|
|
677
684
|
withTags: bool = False
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
class AppLocalScheme(BaseModel):
|
|
688
|
+
keys: List[str]
|
|
689
|
+
optionalKeys: Set[str]
|
|
690
|
+
|
|
691
|
+
|
|
692
|
+
class AppErrorInfo(BaseModel):
|
|
693
|
+
operationId: str
|
|
694
|
+
runId: str
|
|
695
|
+
bindId: str
|
|
696
|
+
funId: str
|
|
697
|
+
iteration: int
|
|
698
|
+
isProcessor: bool = True
|
|
699
|
+
trace: str
|
|
700
|
+
errType: str
|
|
701
|
+
errArgs: List[str]
|
|
702
|
+
isMalevichErr: bool
|
|
703
|
+
cfg: Optional[Dict[str, Any]]
|
|
704
|
+
schemes: Optional[Dict[str, AppLocalScheme]] = None
|
|
705
|
+
args: List[List[Union[Union[str, List[str]], List[Union[str, List[str]]]]]] = None # mb empty for send info structure
|
|
706
|
+
argsNames: List[str]
|
|
707
|
+
|
|
708
|
+
|
|
709
|
+
class AppErrorInfos(BaseModel):
|
|
710
|
+
data: List[AppErrorInfo]
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
class AppErrorInfoFilter(BaseModel):
|
|
714
|
+
operationId: str
|
|
715
|
+
runId: Optional[str] = None
|
|
716
|
+
bindId: Optional[str] = None
|
|
717
|
+
errType: Optional[str] = None
|
|
718
|
+
isMalevichErr: bool = False
|
|
@@ -1578,6 +1578,30 @@ async def delete_mcp_tools_async(wait: bool, *args, **kwargs) -> Alias.Info:
|
|
|
1578
1578
|
return await send_to_core_modify_async(MCP_TOOLS_ALL(wait), *args, **kwargs, is_post=False)
|
|
1579
1579
|
|
|
1580
1580
|
|
|
1581
|
+
def get_app_error_info(data: AppErrorInfoFilter, *args, **kwargs) -> AppErrorInfos:
|
|
1582
|
+
return model_from_json(send_to_core_modify(APP_ERROR_INFO(None), data, *args, **kwargs), AppErrorInfos)
|
|
1583
|
+
|
|
1584
|
+
|
|
1585
|
+
async def get_app_error_info_async(data: AppErrorInfoFilter, *args, **kwargs) -> AppErrorInfos:
|
|
1586
|
+
return model_from_json(await send_to_core_modify_async(APP_ERROR_INFO(None), data, *args, **kwargs), AppErrorInfos)
|
|
1587
|
+
|
|
1588
|
+
|
|
1589
|
+
def delete_app_error_info(id: str, wait: bool, *args, **kwargs) -> Alias.Info:
|
|
1590
|
+
return send_to_core_modify(APP_ERROR_INFO_ID(id, wait), *args, **kwargs, is_post=False)
|
|
1591
|
+
|
|
1592
|
+
|
|
1593
|
+
async def delete_app_error_info_async(id: str, wait: bool, *args, **kwargs) -> Alias.Info:
|
|
1594
|
+
return await send_to_core_modify_async(APP_ERROR_INFO_ID(id, wait), *args, **kwargs, is_post=False)
|
|
1595
|
+
|
|
1596
|
+
|
|
1597
|
+
def delete_app_error_infos(wait: bool, *args, **kwargs) -> Alias.Info:
|
|
1598
|
+
return send_to_core_modify(APP_ERROR_INFO(wait), *args, **kwargs, is_post=False)
|
|
1599
|
+
|
|
1600
|
+
|
|
1601
|
+
async def delete_app_error_infos_async(wait: bool, *args, **kwargs) -> Alias.Info:
|
|
1602
|
+
return await send_to_core_modify_async(APP_ERROR_INFO(wait), *args, **kwargs, is_post=False)
|
|
1603
|
+
|
|
1604
|
+
|
|
1581
1605
|
async def kafka_send(data: KafkaMsg, *args, **kwargs) -> Union[Alias.Info, KafkaMsg]:
|
|
1582
1606
|
result = await send_to_core_post_async(KAFKA_SEND, data, *args, **kwargs)
|
|
1583
1607
|
try:
|
|
@@ -1687,7 +1711,7 @@ async def send_to_core_get_async(path: str, with_auth=True, show_func: Optional[
|
|
|
1687
1711
|
auth = (Config.CORE_USERNAME, Config.CORE_PASSWORD) if with_auth else None
|
|
1688
1712
|
if auth is not None:
|
|
1689
1713
|
auth = aiohttp.BasicAuth(login=auth[0], password=auth[1], encoding='utf-8')
|
|
1690
|
-
async with async_session or aiohttp.ClientSession(auth=auth, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
1714
|
+
async with async_session or aiohttp.ClientSession(auth=auth, connector=aiohttp.TCPConnector(verify_ssl=False), timeout=aiohttp.ClientTimeout(total=None)) as session:
|
|
1691
1715
|
async with session.get(f"{host}{path}", headers=HEADERS) as response:
|
|
1692
1716
|
await __async_check_response(response, show_func, f"{host}{path}")
|
|
1693
1717
|
if response.status == HTTPStatus.NO_CONTENT:
|
|
@@ -1738,7 +1762,7 @@ async def send_to_core_modify_async(path: str, operation: Optional[Any] = None,
|
|
|
1738
1762
|
if operation is not None:
|
|
1739
1763
|
operation = json.dumps(operation.model_dump())
|
|
1740
1764
|
|
|
1741
|
-
async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
1765
|
+
async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False), timeout=aiohttp.ClientTimeout(total=None)) as session:
|
|
1742
1766
|
if is_post:
|
|
1743
1767
|
response_cm = session.post(f"{host}{path}", data=operation, headers=HEADERS)
|
|
1744
1768
|
else:
|
|
@@ -1792,7 +1816,7 @@ async def send_to_core_modify_raw_async(path: str, data: bytes, with_auth: bool=
|
|
|
1792
1816
|
auth = (Config.CORE_USERNAME, Config.CORE_PASSWORD)
|
|
1793
1817
|
auth = aiohttp.BasicAuth(login=auth[0], password=auth[1], encoding='utf-8')
|
|
1794
1818
|
|
|
1795
|
-
async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
|
|
1819
|
+
async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False), timeout=aiohttp.ClientTimeout(total=None)) as session:
|
|
1796
1820
|
if is_post:
|
|
1797
1821
|
response_cm = session.post(f"{host}{path}", data=data, headers=HEADERS_RAW)
|
|
1798
1822
|
else:
|
|
@@ -439,6 +439,7 @@ def base_settings(
|
|
|
439
439
|
storage_request: Optional[int] = None,
|
|
440
440
|
storage_limit: Optional[int] = None,
|
|
441
441
|
kubeconfig: Optional[str] = None,
|
|
442
|
+
allow_kafka: bool = False,
|
|
442
443
|
) -> str:
|
|
443
444
|
return BasePlatformSettings(
|
|
444
445
|
memoryRequest=memory_request,
|
|
@@ -448,4 +449,5 @@ def base_settings(
|
|
|
448
449
|
storageRequest=storage_request,
|
|
449
450
|
storageLimit=storage_limit,
|
|
450
451
|
kubeconfig=kubeconfig,
|
|
452
|
+
allowKafka=allow_kafka,
|
|
451
453
|
).model_dump_json()
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/const.py
RENAMED
|
@@ -227,6 +227,11 @@ MCP_TOOLS_ALL = lambda wait: with_wait(f"{MCP_TOOLS_MAIN}/all", wait)
|
|
|
227
227
|
MCP_TOOLS_LIST = f"{MCP_TOOLS_MAIN}/list"
|
|
228
228
|
MCP_TOOLS_CALL = f"{MCP_TOOLS_MAIN}/call"
|
|
229
229
|
|
|
230
|
+
## AppErrorInfoController
|
|
231
|
+
APP_ERROR_INFO_MAIN = f"{API_VERSION}/errors"
|
|
232
|
+
APP_ERROR_INFO = lambda wait: with_wait(f"{APP_ERROR_INFO_MAIN}/", wait)
|
|
233
|
+
APP_ERROR_INFO_ID = lambda operationId, wait: with_wait(f"{APP_ERROR_INFO_MAIN}/{operationId}", wait)
|
|
234
|
+
|
|
230
235
|
### Kafka
|
|
231
236
|
KAFKA_SEND = f"{MANAGER_MAIN}/kafkaMsg"
|
|
232
237
|
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/helpers.py
RENAMED
|
@@ -7,6 +7,7 @@ from pydantic import BaseModel
|
|
|
7
7
|
|
|
8
8
|
from malevich_coretools.abstract.abstract import (
|
|
9
9
|
Alias,
|
|
10
|
+
AppLog,
|
|
10
11
|
AppLogs,
|
|
11
12
|
FlattenAppLogsWithResults,
|
|
12
13
|
LogsResult,
|
|
@@ -56,17 +57,36 @@ def bool_to_str(b: bool) -> str:
|
|
|
56
57
|
return "true" if b else "false"
|
|
57
58
|
|
|
58
59
|
|
|
59
|
-
def __show_logs_result(res: LogsResult): # noqa: ANN202
|
|
60
|
+
def __show_logs_result(res: LogsResult, i: Optional[int]): # noqa: ANN202
|
|
60
61
|
if len(res.data) > 0:
|
|
61
|
-
print("------- main:")
|
|
62
|
+
print(f"------- main ({i}):" if i is not None else "------- main:")
|
|
62
63
|
print(res.data)
|
|
64
|
+
if res.logs is None:
|
|
65
|
+
res.logs = {}
|
|
66
|
+
if res.userLogs is None:
|
|
67
|
+
res.userLogs = {}
|
|
63
68
|
for run_id, logs in res.logs.items():
|
|
64
|
-
print(f"------- {run_id}:")
|
|
65
69
|
userLogs = res.userLogs.get(run_id, "")
|
|
66
|
-
|
|
70
|
+
print(f"------- {run_id} ({i}):" if i is not None else f"------- {run_id}:")
|
|
71
|
+
if len(logs) == 0:
|
|
67
72
|
print(userLogs)
|
|
68
|
-
|
|
69
|
-
|
|
73
|
+
else:
|
|
74
|
+
if len(userLogs) > 0:
|
|
75
|
+
print(userLogs)
|
|
76
|
+
print("-------")
|
|
77
|
+
print(logs)
|
|
78
|
+
for run_id, logs in res.userLogs.items():
|
|
79
|
+
if run_id not in res.logs:
|
|
80
|
+
print(f"------- {run_id} ({i}):" if i is not None else f"------- {run_id}:")
|
|
81
|
+
print(logs)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def __logs_parts(app_log: AppLog) -> Dict[int, LogsResult]:
|
|
85
|
+
parts = {}
|
|
86
|
+
for i, log_res in enumerate(app_log.data):
|
|
87
|
+
if len(log_res.data) > 0 or (log_res.logs is not None and len(log_res.logs) > 0) or (log_res.userLogs is not None and len(log_res.userLogs) > 0):
|
|
88
|
+
parts[i] = log_res
|
|
89
|
+
return parts
|
|
70
90
|
|
|
71
91
|
|
|
72
92
|
def show_logs(app_logs: AppLogs, err: bool = False) -> None: # noqa: ANN202
|
|
@@ -75,59 +95,81 @@ def show_logs(app_logs: AppLogs, err: bool = False) -> None: # noqa: ANN202
|
|
|
75
95
|
if app_logs.error is not None:
|
|
76
96
|
show(f"error: {app_logs.error}")
|
|
77
97
|
print(__delimiter)
|
|
78
|
-
|
|
79
|
-
|
|
98
|
+
if len(app_logs.dagLogs) > 0:
|
|
99
|
+
print("------- dm logs -------")
|
|
100
|
+
print(app_logs.dagLogs)
|
|
80
101
|
for app_name, app_log in app_logs.data.items():
|
|
102
|
+
parts = __logs_parts(app_log)
|
|
103
|
+
if len(parts) == 0:
|
|
104
|
+
continue
|
|
81
105
|
print(f"------- {app_name} -------")
|
|
82
106
|
if len(app_log.data) == 1:
|
|
83
107
|
__show_logs_result(app_log.data[0])
|
|
84
108
|
else:
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
__show_logs_result(log_res)
|
|
88
|
-
|
|
109
|
+
c = len(parts)
|
|
110
|
+
for i, log_res in parts.items():
|
|
111
|
+
__show_logs_result(log_res, i)
|
|
112
|
+
c -= 1
|
|
113
|
+
if c != 0:
|
|
114
|
+
print(__mini__delimiter)
|
|
89
115
|
print(__delimiter)
|
|
90
116
|
|
|
91
117
|
|
|
92
118
|
def show_logs_colored(app_logs: AppLogs, colors_dict: Optional[Dict[str, str]] = None) -> None:
|
|
93
119
|
"""colors_dict - should be unique for all app_logs by operation_id"""
|
|
94
|
-
def format(log, color: Optional[str]) -> None:
|
|
120
|
+
def format(log, color: Optional[str] = None) -> None:
|
|
95
121
|
if color is None:
|
|
96
122
|
Config.logger.warning(log)
|
|
97
123
|
else:
|
|
98
124
|
Config.logger.warning(color + log + __color_reset)
|
|
99
125
|
|
|
100
126
|
def get_color(name: str) -> str:
|
|
101
|
-
if colors_dict is None:
|
|
102
|
-
return None
|
|
103
127
|
color = colors_dict.get(name, None)
|
|
104
128
|
if color is None:
|
|
105
129
|
color = __colors[len(colors_dict) % len(__colors)]
|
|
106
130
|
colors_dict[name] = color
|
|
107
131
|
return color
|
|
108
132
|
|
|
133
|
+
if colors_dict is None:
|
|
134
|
+
colors_dict = {}
|
|
135
|
+
|
|
136
|
+
format(f"operation_id = {app_logs.operationId}")
|
|
109
137
|
if app_logs.error is not None:
|
|
110
|
-
|
|
138
|
+
color = get_color("error")
|
|
139
|
+
format(f"error: {app_logs.error}", color)
|
|
140
|
+
format(__delimiter, color)
|
|
111
141
|
if len(app_logs.dagLogs) > 0:
|
|
112
|
-
color = get_color("
|
|
142
|
+
color = get_color("dm")
|
|
143
|
+
format("------- dm logs -------", color)
|
|
113
144
|
for line in app_logs.dagLogs.splitlines():
|
|
114
|
-
format(f"
|
|
145
|
+
format(f"dm: {line}", color)
|
|
115
146
|
for app_name, app_log in app_logs.data.items():
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
147
|
+
parts = __logs_parts(app_log)
|
|
148
|
+
if len(parts) == 0:
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
color = get_color(app_name)
|
|
152
|
+
for i, logs_result in parts.items():
|
|
153
|
+
app_name_prefix = f"{app_name}${i}" if i != 0 or len(parts) > 1 else app_name
|
|
119
154
|
if len(logs_result.data) > 0:
|
|
120
155
|
for line in logs_result.data.splitlines():
|
|
121
156
|
format(f"{app_name_prefix}$main: {line}", color)
|
|
122
|
-
if
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
157
|
+
if logs_result.logs is None:
|
|
158
|
+
logs_result.logs = {}
|
|
159
|
+
if logs_result.userLogs is None:
|
|
160
|
+
logs_result.userLogs = {}
|
|
161
|
+
for run_id, logs in logs_result.logs.items():
|
|
162
|
+
user_logs = logs_result.userLogs.get(run_id, "")
|
|
163
|
+
if len(user_logs) > 0:
|
|
164
|
+
for line in user_logs.splitlines():
|
|
165
|
+
format(f"{app_name_prefix}${run_id}$user: {line}", color)
|
|
166
|
+
if len(logs) > 0:
|
|
167
|
+
for line in logs.splitlines():
|
|
168
|
+
format(f"{app_name_prefix}${run_id}: {line}", color)
|
|
169
|
+
for run_id, user_logs in logs_result.userLogs.items():
|
|
170
|
+
if run_id not in logs_result.logs:
|
|
171
|
+
for line in user_logs.splitlines():
|
|
172
|
+
format(f"{app_name_prefix}${run_id}$user: {line}", color)
|
|
131
173
|
|
|
132
174
|
|
|
133
175
|
def show_logs_func(data: str, err: bool = False): # noqa: ANN201
|
|
@@ -6844,6 +6844,7 @@ def task_full(
|
|
|
6844
6844
|
single_request: bool = False,
|
|
6845
6845
|
profile_mode: Optional[str] = None,
|
|
6846
6846
|
save_fails: bool = True,
|
|
6847
|
+
clear_dag_logs: bool = True,
|
|
6847
6848
|
with_show: bool = True,
|
|
6848
6849
|
long: bool = False,
|
|
6849
6850
|
long_timeout: Optional[int] = WAIT_RESULT_TIMEOUT,
|
|
@@ -6906,6 +6907,7 @@ def task_full(
|
|
|
6906
6907
|
profileMode=profile_mode,
|
|
6907
6908
|
withLogs=True,
|
|
6908
6909
|
saveFails=save_fails,
|
|
6910
|
+
clearDagLogs=clear_dag_logs,
|
|
6909
6911
|
scaleInfo=scaleInfo,
|
|
6910
6912
|
component=component,
|
|
6911
6913
|
policy=policy,
|
|
@@ -7019,6 +7021,7 @@ def task_prepare(
|
|
|
7019
7021
|
with_logs: bool = False,
|
|
7020
7022
|
profile_mode: Optional[str] = None,
|
|
7021
7023
|
save_fails: bool = True,
|
|
7024
|
+
clear_dag_logs: bool = True,
|
|
7022
7025
|
with_show: bool = None,
|
|
7023
7026
|
long: bool = False,
|
|
7024
7027
|
long_timeout: int = WAIT_RESULT_TIMEOUT,
|
|
@@ -7091,6 +7094,7 @@ def task_prepare(
|
|
|
7091
7094
|
waitRuns=wait_runs,
|
|
7092
7095
|
withLogs=with_logs,
|
|
7093
7096
|
saveFails=save_fails,
|
|
7097
|
+
clearDagLogs=clear_dag_logs,
|
|
7094
7098
|
profileMode=profile_mode,
|
|
7095
7099
|
scaleInfo=scaleInfo,
|
|
7096
7100
|
component=component,
|
|
@@ -7135,6 +7139,7 @@ def task_run(
|
|
|
7135
7139
|
long_timeout: int = WAIT_RESULT_TIMEOUT,
|
|
7136
7140
|
with_logs: bool = False,
|
|
7137
7141
|
schedule: Optional[Schedule] = None,
|
|
7142
|
+
broadcast: bool = False,
|
|
7138
7143
|
wait: bool = True,
|
|
7139
7144
|
*,
|
|
7140
7145
|
auth: Optional[AUTH] = None,
|
|
@@ -7159,6 +7164,7 @@ def task_run(
|
|
|
7159
7164
|
long_timeout: int = WAIT_RESULT_TIMEOUT,
|
|
7160
7165
|
with_logs: bool = False,
|
|
7161
7166
|
schedule: Optional[Schedule] = None,
|
|
7167
|
+
broadcast: bool = False,
|
|
7162
7168
|
wait: bool = True,
|
|
7163
7169
|
*,
|
|
7164
7170
|
auth: Optional[AUTH] = None,
|
|
@@ -7182,6 +7188,7 @@ def task_run(
|
|
|
7182
7188
|
long_timeout: int = WAIT_RESULT_TIMEOUT,
|
|
7183
7189
|
with_logs: bool = False,
|
|
7184
7190
|
schedule: Optional[Schedule] = None,
|
|
7191
|
+
broadcast: bool = False,
|
|
7185
7192
|
wait: bool = True,
|
|
7186
7193
|
*,
|
|
7187
7194
|
auth: Optional[AUTH] = None,
|
|
@@ -7204,6 +7211,7 @@ def task_run(
|
|
|
7204
7211
|
long_timeout (Optional[int]): default timeout for long run (hour by default). If 'long=False' ignored. If None, then there is no limit. Doesn't stop the task, just stops trying to get the run result
|
|
7205
7212
|
with_logs (bool): return run logs if True after end
|
|
7206
7213
|
schedule: (Optional[Schedule]): schedule task runs settings - return scheduleId instead of operationId
|
|
7214
|
+
broadcast: run scale pipeline (for new scale)
|
|
7207
7215
|
wait (bool): is it worth waiting for the result or immediately return `operation_id`
|
|
7208
7216
|
auth (Optional[AUTH]): redefined auth if not None"""
|
|
7209
7217
|
if schedule is not None:
|
|
@@ -7224,6 +7232,7 @@ def task_run(
|
|
|
7224
7232
|
profileMode=profile_mode,
|
|
7225
7233
|
withLogs=with_logs,
|
|
7226
7234
|
schedule=schedule,
|
|
7235
|
+
broadcast=broadcast,
|
|
7227
7236
|
)
|
|
7228
7237
|
if batcher is not None:
|
|
7229
7238
|
return batcher.add("sendTaskRun", data=data, result_model=AppLogs if with_logs or schedule is not None else None)
|
|
@@ -7322,6 +7331,7 @@ def pipeline_full(
|
|
|
7322
7331
|
restrictions: Optional[Restrictions] = None,
|
|
7323
7332
|
scaleInfo: List[ScaleInfo] = None,
|
|
7324
7333
|
save_fails: bool = True,
|
|
7334
|
+
clear_dag_logs: bool = True,
|
|
7325
7335
|
tags: Optional[Dict[str, str]] = None,
|
|
7326
7336
|
with_show: bool = True,
|
|
7327
7337
|
long: bool = False,
|
|
@@ -7367,6 +7377,7 @@ def pipeline_full(
|
|
|
7367
7377
|
kafkaModeUrl=None,
|
|
7368
7378
|
run=True,
|
|
7369
7379
|
saveFails=save_fails,
|
|
7380
|
+
clearDagLogs=clear_dag_logs,
|
|
7370
7381
|
scaleCount=1,
|
|
7371
7382
|
tags=tags,
|
|
7372
7383
|
)
|
|
@@ -7489,6 +7500,7 @@ def pipeline_prepare(
|
|
|
7489
7500
|
kafka_mode_url_response: Optional[str] = None,
|
|
7490
7501
|
synthetic: bool = False,
|
|
7491
7502
|
save_fails: bool = True,
|
|
7503
|
+
clear_dag_logs: bool = True,
|
|
7492
7504
|
scale_count: int = 1,
|
|
7493
7505
|
tags: Optional[Dict[str, str]] = None,
|
|
7494
7506
|
with_show: bool = True,
|
|
@@ -7533,6 +7545,7 @@ def pipeline_prepare(
|
|
|
7533
7545
|
run=False,
|
|
7534
7546
|
synthetic=synthetic,
|
|
7535
7547
|
saveFails=save_fails,
|
|
7548
|
+
clearDagLogs=clear_dag_logs,
|
|
7536
7549
|
scaleCount=scale_count,
|
|
7537
7550
|
tags=tags,
|
|
7538
7551
|
)
|
|
@@ -9263,6 +9276,156 @@ def delete_mcp_tools(
|
|
|
9263
9276
|
return f.delete_mcp_tools(wait=wait, auth=auth, conn_url=conn_url)
|
|
9264
9277
|
|
|
9265
9278
|
|
|
9279
|
+
# app error info
|
|
9280
|
+
|
|
9281
|
+
|
|
9282
|
+
@overload
|
|
9283
|
+
def get_app_error_info(
|
|
9284
|
+
operation_id: str,
|
|
9285
|
+
run_id: Optional[str] = None,
|
|
9286
|
+
bind_id: Optional[str] = None,
|
|
9287
|
+
err_type: Optional[str] = None,
|
|
9288
|
+
is_malevich_err: bool = False,
|
|
9289
|
+
*,
|
|
9290
|
+
with_auth: bool = True,
|
|
9291
|
+
auth: Optional[AUTH] = None,
|
|
9292
|
+
conn_url: Optional[str] = None,
|
|
9293
|
+
batcher: Optional[Batcher] = None,
|
|
9294
|
+
is_async: Literal[False] = False,
|
|
9295
|
+
) -> AppErrorInfos:
|
|
9296
|
+
pass
|
|
9297
|
+
|
|
9298
|
+
|
|
9299
|
+
@overload
|
|
9300
|
+
def get_app_error_info(
|
|
9301
|
+
operation_id: str,
|
|
9302
|
+
run_id: Optional[str] = None,
|
|
9303
|
+
bind_id: Optional[str] = None,
|
|
9304
|
+
err_type: Optional[str] = None,
|
|
9305
|
+
is_malevich_err: bool = False,
|
|
9306
|
+
*,
|
|
9307
|
+
with_auth: bool = True,
|
|
9308
|
+
auth: Optional[AUTH] = None,
|
|
9309
|
+
conn_url: Optional[str] = None,
|
|
9310
|
+
batcher: Optional[Batcher] = None,
|
|
9311
|
+
is_async: Literal[True],
|
|
9312
|
+
) -> Coroutine[Any, Any, AppErrorInfos]:
|
|
9313
|
+
pass
|
|
9314
|
+
|
|
9315
|
+
|
|
9316
|
+
def get_app_error_info(
|
|
9317
|
+
operation_id: str,
|
|
9318
|
+
run_id: Optional[str] = None,
|
|
9319
|
+
bind_id: Optional[str] = None,
|
|
9320
|
+
err_type: Optional[str] = None,
|
|
9321
|
+
is_malevich_err: bool = False,
|
|
9322
|
+
*,
|
|
9323
|
+
with_auth: bool = True,
|
|
9324
|
+
auth: Optional[AUTH] = None,
|
|
9325
|
+
conn_url: Optional[str] = None,
|
|
9326
|
+
batcher: Optional[Batcher] = None,
|
|
9327
|
+
is_async: bool = False,
|
|
9328
|
+
) -> Union[AppErrorInfos, Coroutine[Any, Any, AppErrorInfos]]:
|
|
9329
|
+
"""return mcp tool struct by `id` or `name` """
|
|
9330
|
+
assert id is not None or name is not None, "id or name should be set"
|
|
9331
|
+
data = AppErrorInfoFilter(operationId=operation_id, runId=run_id, bindId=bind_id, errType=err_type, isMalevichErr=is_malevich_err)
|
|
9332
|
+
if batcher is None:
|
|
9333
|
+
batcher = Config.BATCHER
|
|
9334
|
+
if batcher is not None:
|
|
9335
|
+
return batcher.add("getAppErrorInfo", data=data, result_model=AppErrorInfos)
|
|
9336
|
+
if is_async:
|
|
9337
|
+
return f.get_app_error_info_async(data, with_auth=with_auth, auth=auth, conn_url=conn_url)
|
|
9338
|
+
return f.get_app_error_info(data, with_auth=with_auth, auth=auth, conn_url=conn_url)
|
|
9339
|
+
|
|
9340
|
+
|
|
9341
|
+
@overload
|
|
9342
|
+
def delete_app_error_info(
|
|
9343
|
+
operation_id: str,
|
|
9344
|
+
wait: bool = True,
|
|
9345
|
+
*,
|
|
9346
|
+
auth: Optional[AUTH] = None,
|
|
9347
|
+
conn_url: Optional[str] = None,
|
|
9348
|
+
batcher: Optional[Batcher] = None,
|
|
9349
|
+
is_async: Literal[False] = False,
|
|
9350
|
+
) -> Alias.Info:
|
|
9351
|
+
pass
|
|
9352
|
+
|
|
9353
|
+
|
|
9354
|
+
@overload
|
|
9355
|
+
def delete_app_error_info(
|
|
9356
|
+
operation_id: str,
|
|
9357
|
+
wait: bool = True,
|
|
9358
|
+
*,
|
|
9359
|
+
auth: Optional[AUTH] = None,
|
|
9360
|
+
conn_url: Optional[str] = None,
|
|
9361
|
+
batcher: Optional[Batcher] = None,
|
|
9362
|
+
is_async: Literal[True],
|
|
9363
|
+
) -> Coroutine[Any, Any, Alias.Info]:
|
|
9364
|
+
pass
|
|
9365
|
+
|
|
9366
|
+
|
|
9367
|
+
def delete_app_error_info(
|
|
9368
|
+
operation_id: str,
|
|
9369
|
+
wait: bool = True,
|
|
9370
|
+
*,
|
|
9371
|
+
auth: Optional[AUTH] = None,
|
|
9372
|
+
conn_url: Optional[str] = None,
|
|
9373
|
+
batcher: Optional[Batcher] = None,
|
|
9374
|
+
is_async: bool = False,
|
|
9375
|
+
) -> Union[Alias.Info, Coroutine[Any, Any, Alias.Info]]:
|
|
9376
|
+
"""delete app error info by `operationId` """
|
|
9377
|
+
assert id is not None or name is not None, "id or name should be set"
|
|
9378
|
+
if batcher is None:
|
|
9379
|
+
batcher = Config.BATCHER
|
|
9380
|
+
if batcher is not None:
|
|
9381
|
+
return batcher.add("deleteAppErrorInfoByOperationId", vars={"operationId": operation_id})
|
|
9382
|
+
if is_async:
|
|
9383
|
+
return f.delete_app_error_info_async(operation_id, wait=wait, auth=auth, conn_url=conn_url)
|
|
9384
|
+
return f.delete_app_error_info(operation_id, wait=wait, auth=auth, conn_url=conn_url)
|
|
9385
|
+
|
|
9386
|
+
|
|
9387
|
+
@overload
|
|
9388
|
+
def delete_app_error_infos(
|
|
9389
|
+
wait: bool = True,
|
|
9390
|
+
*,
|
|
9391
|
+
auth: Optional[AUTH] = None,
|
|
9392
|
+
conn_url: Optional[str] = None,
|
|
9393
|
+
batcher: Optional[Batcher] = None,
|
|
9394
|
+
is_async: Literal[False] = False,
|
|
9395
|
+
) -> Alias.Info:
|
|
9396
|
+
pass
|
|
9397
|
+
|
|
9398
|
+
|
|
9399
|
+
@overload
|
|
9400
|
+
def delete_app_error_infos(
|
|
9401
|
+
wait: bool = True,
|
|
9402
|
+
*,
|
|
9403
|
+
auth: Optional[AUTH] = None,
|
|
9404
|
+
conn_url: Optional[str] = None,
|
|
9405
|
+
batcher: Optional[Batcher] = None,
|
|
9406
|
+
is_async: Literal[True],
|
|
9407
|
+
) -> Coroutine[Any, Any, Alias.Info]:
|
|
9408
|
+
pass
|
|
9409
|
+
|
|
9410
|
+
|
|
9411
|
+
def delete_app_error_infos(
|
|
9412
|
+
wait: bool = True,
|
|
9413
|
+
*,
|
|
9414
|
+
auth: Optional[AUTH] = None,
|
|
9415
|
+
conn_url: Optional[str] = None,
|
|
9416
|
+
batcher: Optional[Batcher] = None,
|
|
9417
|
+
is_async: bool = False,
|
|
9418
|
+
) -> Union[Alias.Info, Coroutine[Any, Any, Alias.Info]]:
|
|
9419
|
+
"""delete all app error infos"""
|
|
9420
|
+
if batcher is None:
|
|
9421
|
+
batcher = Config.BATCHER
|
|
9422
|
+
if batcher is not None:
|
|
9423
|
+
return batcher.add("deleteAppErrorInfos")
|
|
9424
|
+
if is_async:
|
|
9425
|
+
return f.delete_app_error_infos_async(wait=wait, auth=auth, conn_url=conn_url)
|
|
9426
|
+
return f.delete_app_error_infos(wait=wait, auth=auth, conn_url=conn_url)
|
|
9427
|
+
|
|
9428
|
+
|
|
9266
9429
|
# kafka
|
|
9267
9430
|
|
|
9268
9431
|
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
0.3.54
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/__init__.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/operations.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/pipeline.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/abstract/statuses.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/admin/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/batch/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/funcs/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/__init__.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/config.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/secondary/kafka_utils.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/tools/__init__.py
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools/tools/abstract.py
RENAMED
|
File without changes
|
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools.egg-info/SOURCES.txt
RENAMED
|
File without changes
|
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools.egg-info/requires.txt
RENAMED
|
File without changes
|
{malevich_coretools-0.3.54 → malevich_coretools-0.3.60}/malevich_coretools.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|