malevich-coretools 0.3.54__py3-none-any.whl → 0.3.61__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of malevich-coretools might be problematic. Click here for more details.

@@ -1,4 +1,4 @@
1
- from typing import Any, Dict, List, Optional, Tuple, Union
1
+ from typing import Any, Dict, List, Optional, Set, Tuple, Union
2
2
 
3
3
  from pydantic import BaseModel
4
4
 
@@ -197,6 +197,7 @@ class MainTask(BaseModel):
197
197
  profileMode: Optional[str] = None
198
198
  withLogs: bool = False # use only in prepare
199
199
  saveFails: bool = True
200
+ clearDagLogs: bool = True
200
201
  scaleCount: int = 1
201
202
  scaleInfo: List[ScaleInfo]
202
203
  component: TaskComponent
@@ -228,6 +229,7 @@ class MainPipeline(BaseModel):
228
229
  run: bool = True
229
230
  synthetic: bool = False
230
231
  saveFails: bool = True
232
+ clearDagLogs: bool = True
231
233
  scaleCount: int = 1
232
234
  tags: Optional[Dict[str, str]] = None
233
235
 
@@ -241,6 +243,7 @@ class RunTask(Operation):
241
243
  profileMode: Optional[str] = None
242
244
  withLogs: bool = False
243
245
  schedule: Optional[Schedule] = None
246
+ broadcast: bool = False
244
247
 
245
248
 
246
249
  class AppManage(Operation):
@@ -385,8 +388,8 @@ class CollectionMetadata(BaseModel):
385
388
 
386
389
  class LogsResult(BaseModel):
387
390
  data: str
388
- logs: Dict[str, str]
389
- userLogs: Dict[str, str] = {}
391
+ logs: Optional[Dict[str, str]] = {}
392
+ userLogs: Optional[Dict[str, str]] = {}
390
393
 
391
394
 
392
395
  class AppLog(BaseModel):
@@ -603,7 +606,7 @@ class UserLimits(BaseModel):
603
606
  defaultGpuDisk: int
604
607
 
605
608
 
606
- class BasePlatformSettings(BaseModel):
609
+ class BasePlatformSettingsMain(BaseModel):
607
610
  memoryRequest: Optional[int] = None
608
611
  memoryLimit: Optional[int] = None
609
612
  cpuRequest: Optional[int] = None
@@ -613,7 +616,11 @@ class BasePlatformSettings(BaseModel):
613
616
  kubeconfig: Optional[str] = None
614
617
 
615
618
 
616
- class Limits(BasePlatformSettings):
619
+ class BasePlatformSettings(BasePlatformSettingsMain):
620
+ allowKafka: bool = False
621
+
622
+
623
+ class Limits(BasePlatformSettingsMain):
617
624
  gpuDisk: Optional[int] = None
618
625
 
619
626
 
@@ -675,3 +682,37 @@ class MCPToolCall(BaseModel):
675
682
  class RunsFilter(BaseModel):
676
683
  data: Optional[Dict[str, str]] = None
677
684
  withTags: bool = False
685
+
686
+
687
+ class AppLocalScheme(BaseModel):
688
+ keys: List[str]
689
+ optionalKeys: Set[str]
690
+
691
+
692
+ class AppErrorInfo(BaseModel):
693
+ operationId: str
694
+ runId: str
695
+ bindId: str
696
+ funId: str
697
+ iteration: int
698
+ isProcessor: bool = True
699
+ trace: str
700
+ errType: str
701
+ errArgs: List[str]
702
+ isMalevichErr: bool
703
+ cfg: Optional[Dict[str, Any]]
704
+ schemes: Optional[Dict[str, AppLocalScheme]] = None
705
+ args: List[List[Union[Union[str, List[str]], List[Union[str, List[str]]]]]] = None # mb empty for send info structure
706
+ argsNames: List[str]
707
+
708
+
709
+ class AppErrorInfos(BaseModel):
710
+ data: List[AppErrorInfo]
711
+
712
+
713
+ class AppErrorInfoFilter(BaseModel):
714
+ operationId: str
715
+ runId: Optional[str] = None
716
+ bindId: Optional[str] = None
717
+ errType: Optional[str] = None
718
+ isMalevichErr: bool = False
@@ -1222,14 +1222,22 @@ async def get_image_info_async(data: JsonImage, parse: bool, *args, **kwargs) ->
1222
1222
  return res
1223
1223
 
1224
1224
 
1225
- def get_task_schedules(data: Operation, with_show: bool, *args, **kwargs) -> Schedules:
1225
+ def get_task_schedules(*args, **kwargs) -> ResultIds:
1226
+ return model_from_json(send_to_core_get(MANAGER_TASK_SCHEDULES, *args, **kwargs), ResultIds)
1227
+
1228
+
1229
+ async def get_task_schedules_async(*args, **kwargs) -> ResultIds:
1230
+ return model_from_json(await send_to_core_get_async(MANAGER_TASK_SCHEDULES, *args, **kwargs), ResultIds)
1231
+
1232
+
1233
+ def post_task_schedules(data: Operation, with_show: bool, *args, **kwargs) -> Schedules:
1226
1234
  res = model_from_json(send_to_core_modify(MANAGER_TASK_SCHEDULES, data, with_show=False, *args, **kwargs), Schedules)
1227
1235
  if with_show:
1228
1236
  Config.logger.info(res)
1229
1237
  return res
1230
1238
 
1231
1239
 
1232
- async def get_task_schedules_async(data: Operation, with_show: bool, *args, **kwargs) -> Schedules:
1240
+ async def post_task_schedules_async(data: Operation, with_show: bool, *args, **kwargs) -> Schedules:
1233
1241
  res = model_from_json(await send_to_core_modify_async(MANAGER_TASK_SCHEDULES, data, with_show=False, *args, **kwargs), Schedules)
1234
1242
  if with_show:
1235
1243
  Config.logger.info(res)
@@ -1578,6 +1586,30 @@ async def delete_mcp_tools_async(wait: bool, *args, **kwargs) -> Alias.Info:
1578
1586
  return await send_to_core_modify_async(MCP_TOOLS_ALL(wait), *args, **kwargs, is_post=False)
1579
1587
 
1580
1588
 
1589
+ def get_app_error_info(data: AppErrorInfoFilter, *args, **kwargs) -> AppErrorInfos:
1590
+ return model_from_json(send_to_core_modify(APP_ERROR_INFO(None), data, *args, **kwargs), AppErrorInfos)
1591
+
1592
+
1593
+ async def get_app_error_info_async(data: AppErrorInfoFilter, *args, **kwargs) -> AppErrorInfos:
1594
+ return model_from_json(await send_to_core_modify_async(APP_ERROR_INFO(None), data, *args, **kwargs), AppErrorInfos)
1595
+
1596
+
1597
+ def delete_app_error_info(id: str, wait: bool, *args, **kwargs) -> Alias.Info:
1598
+ return send_to_core_modify(APP_ERROR_INFO_ID(id, wait), *args, **kwargs, is_post=False)
1599
+
1600
+
1601
+ async def delete_app_error_info_async(id: str, wait: bool, *args, **kwargs) -> Alias.Info:
1602
+ return await send_to_core_modify_async(APP_ERROR_INFO_ID(id, wait), *args, **kwargs, is_post=False)
1603
+
1604
+
1605
+ def delete_app_error_infos(wait: bool, *args, **kwargs) -> Alias.Info:
1606
+ return send_to_core_modify(APP_ERROR_INFO(wait), *args, **kwargs, is_post=False)
1607
+
1608
+
1609
+ async def delete_app_error_infos_async(wait: bool, *args, **kwargs) -> Alias.Info:
1610
+ return await send_to_core_modify_async(APP_ERROR_INFO(wait), *args, **kwargs, is_post=False)
1611
+
1612
+
1581
1613
  async def kafka_send(data: KafkaMsg, *args, **kwargs) -> Union[Alias.Info, KafkaMsg]:
1582
1614
  result = await send_to_core_post_async(KAFKA_SEND, data, *args, **kwargs)
1583
1615
  try:
@@ -1687,7 +1719,7 @@ async def send_to_core_get_async(path: str, with_auth=True, show_func: Optional[
1687
1719
  auth = (Config.CORE_USERNAME, Config.CORE_PASSWORD) if with_auth else None
1688
1720
  if auth is not None:
1689
1721
  auth = aiohttp.BasicAuth(login=auth[0], password=auth[1], encoding='utf-8')
1690
- async with async_session or aiohttp.ClientSession(auth=auth, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
1722
+ async with async_session or aiohttp.ClientSession(auth=auth, connector=aiohttp.TCPConnector(verify_ssl=False), timeout=aiohttp.ClientTimeout(total=None)) as session:
1691
1723
  async with session.get(f"{host}{path}", headers=HEADERS) as response:
1692
1724
  await __async_check_response(response, show_func, f"{host}{path}")
1693
1725
  if response.status == HTTPStatus.NO_CONTENT:
@@ -1738,7 +1770,7 @@ async def send_to_core_modify_async(path: str, operation: Optional[Any] = None,
1738
1770
  if operation is not None:
1739
1771
  operation = json.dumps(operation.model_dump())
1740
1772
 
1741
- async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
1773
+ async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False), timeout=aiohttp.ClientTimeout(total=None)) as session:
1742
1774
  if is_post:
1743
1775
  response_cm = session.post(f"{host}{path}", data=operation, headers=HEADERS)
1744
1776
  else:
@@ -1792,7 +1824,7 @@ async def send_to_core_modify_raw_async(path: str, data: bytes, with_auth: bool=
1792
1824
  auth = (Config.CORE_USERNAME, Config.CORE_PASSWORD)
1793
1825
  auth = aiohttp.BasicAuth(login=auth[0], password=auth[1], encoding='utf-8')
1794
1826
 
1795
- async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
1827
+ async with async_session or aiohttp.ClientSession(auth=auth if with_auth else None, connector=aiohttp.TCPConnector(verify_ssl=False), timeout=aiohttp.ClientTimeout(total=None)) as session:
1796
1828
  if is_post:
1797
1829
  response_cm = session.post(f"{host}{path}", data=data, headers=HEADERS_RAW)
1798
1830
  else:
@@ -439,6 +439,7 @@ def base_settings(
439
439
  storage_request: Optional[int] = None,
440
440
  storage_limit: Optional[int] = None,
441
441
  kubeconfig: Optional[str] = None,
442
+ allow_kafka: bool = False,
442
443
  ) -> str:
443
444
  return BasePlatformSettings(
444
445
  memoryRequest=memory_request,
@@ -448,4 +449,5 @@ def base_settings(
448
449
  storageRequest=storage_request,
449
450
  storageLimit=storage_limit,
450
451
  kubeconfig=kubeconfig,
452
+ allowKafka=allow_kafka,
451
453
  ).model_dump_json()
@@ -39,13 +39,13 @@ def with_key_values(url: str, key_values: Dict[str, Optional[str]]) -> str:
39
39
 
40
40
  ## DocsController
41
41
  DOCS_MAIN = f"{API_VERSION}/docs"
42
- DOCS = lambda wait: with_wait(f"{DOCS_MAIN}/", wait)
42
+ DOCS = lambda wait: with_wait(DOCS_MAIN, wait)
43
43
  DOCS_ID = lambda id, wait: with_wait(f"{DOCS_MAIN}/{id}", wait)
44
44
  DOCS_NAME = lambda name, wait: with_wait(f"{DOCS_MAIN}/name/{name}", wait)
45
45
 
46
46
  ## CollectionsController
47
47
  COLLECTIONS_MAIN = f"{API_VERSION}/collections"
48
- COLLECTIONS = lambda wait: with_wait(f"{COLLECTIONS_MAIN}/", wait)
48
+ COLLECTIONS = lambda wait: with_wait(COLLECTIONS_MAIN, wait)
49
49
  COLLECTIONS_IDS_NAME = lambda name, operation_id, run_id: with_key_values(f"{COLLECTIONS_MAIN}/ids/name/{urllib.parse.quote(str(name), safe='')}", {"operationId": operation_id, "runId": run_id})
50
50
  COLLECTIONS_NAME = lambda name, operation_id, run_id, offset, limit: with_key_values(f"{COLLECTIONS_MAIN}/name/{urllib.parse.quote(str(name), safe='')}", {"operationId": operation_id, "runId": run_id, "offset": offset, "limit": limit})
51
51
  COLLECTIONS_IDS_GROUP_NAME = lambda name, operation_id, run_id: with_key_values(f"{COLLECTIONS_MAIN}/ids/groupName/{urllib.parse.quote(str(name), safe='')}", {"operationId": operation_id, "runId": run_id})
@@ -67,7 +67,7 @@ COLLECTIONS_METADATA = lambda id, wait: with_wait(f"{COLLECTIONS_MAIN}/{urllib.p
67
67
  COLLECTION_OBJECTS_MAIN = f"{API_VERSION}/collectionObjects"
68
68
  COLLECTION_OBJECTS_ALL_GET = lambda path, recursive: with_key_values(f"{COLLECTION_OBJECTS_MAIN}/all", {"path": path, "recursive": recursive})
69
69
  COLLECTION_OBJECTS_ALL = lambda wait: with_wait(f"{COLLECTION_OBJECTS_MAIN}/all", wait)
70
- COLLECTION_OBJECTS_PATH = lambda path, wait, zip: with_key_values(f"{COLLECTION_OBJECTS_MAIN}/", {"path": path, "wait": None if wait is None else bool_to_str(wait), "zip": None if zip is None else bool_to_str(zip)})
70
+ COLLECTION_OBJECTS_PATH = lambda path, wait, zip: with_key_values(COLLECTION_OBJECTS_MAIN, {"path": path, "wait": None if wait is None else bool_to_str(wait), "zip": None if zip is None else bool_to_str(zip)})
71
71
  COLLECTION_OBJECTS_PRESIGN_PUT = lambda path, callback_url, expires_in, wait: with_key_values(f"{COLLECTION_OBJECTS_MAIN}/presign/put", {"path": path, "callback_url": callback_url, "expiresIn": expires_in, "wait": bool_to_str(wait)})
72
72
  COLLECTION_OBJECTS_PRESIGN_GET = lambda path, callback_url, expires_in, wait: with_key_values(f"{COLLECTION_OBJECTS_MAIN}/presign/get", {"path": path, "callback_url": callback_url, "expiresIn": expires_in, "wait": bool_to_str(wait)})
73
73
  COLLECTION_OBJECTS_PRESIGN = lambda signature, zip: with_key_values(f"{COLLECTION_OBJECTS_MAIN}/presign", {"signature": signature, "zip": None if zip is None else bool_to_str(zip)})
@@ -84,7 +84,7 @@ ENDPOINTS_RESUME = lambda hash, wait: with_wait(f"{ENDPOINTS_MAIN}/resume/{urlli
84
84
 
85
85
  ## SchemeController
86
86
  SCHEMES_MAIN = f"{API_VERSION}/schemes"
87
- SCHEMES = lambda wait: with_wait(f"{SCHEMES_MAIN}/", wait)
87
+ SCHEMES = lambda wait: with_wait(SCHEMES_MAIN, wait)
88
88
  SCHEMES_ID = lambda id, wait: with_wait(f"{SCHEMES_MAIN}/{urllib.parse.quote(str(id), safe='')}", wait)
89
89
  SCHEMES_ID_RAW = lambda id: f"{SCHEMES_MAIN}/{urllib.parse.quote(str(id), safe='')}/raw"
90
90
  SCHEMES_MAPPING = lambda wait: with_wait(f"{SCHEMES_MAIN}/mapping", wait)
@@ -100,7 +100,7 @@ PING = "ping"
100
100
 
101
101
  ## UserShareController
102
102
  SHARE_MAIN = f"{API_VERSION}/share"
103
- SHARE = lambda wait: with_wait(f"{SHARE_MAIN}/", wait)
103
+ SHARE = lambda wait: with_wait(SHARE_MAIN, wait)
104
104
  SHARE_COLLECTION_ID = lambda id, wait: with_wait(f"{SHARE_MAIN}/collection/{urllib.parse.quote(str(id), safe='')}", wait)
105
105
  SHARE_SCHEME_ID = lambda id, wait: with_wait(f"{SHARE_MAIN}/scheme/{urllib.parse.quote(str(id), safe='')}", wait)
106
106
  SHARE_USER_APP_ID = lambda id, wait: with_wait(f"{SHARE_MAIN}/userApp/{urllib.parse.quote(str(id), safe='')}", wait)
@@ -109,13 +109,13 @@ SHARE_ALL = lambda wait: with_wait(f"{SHARE_MAIN}/all", wait)
109
109
 
110
110
  ## RegistrationController
111
111
  REGISTER_MAIN = f"{API_VERSION}/register"
112
- REGISTER = f"{REGISTER_MAIN}/"
112
+ REGISTER = REGISTER_MAIN
113
113
  REGISTER_LOGIN = lambda login, wait: with_wait(f"{REGISTER_MAIN}/login/{urllib.parse.quote(str(login), safe='')}", wait)
114
114
  REGISTER_ALL = f"{REGISTER_MAIN}/all"
115
115
 
116
116
  ## UserAppsController
117
117
  USER_APPS_MAIN = f"{API_VERSION}/userApps"
118
- USER_APPS = lambda wait: with_wait(f"{USER_APPS_MAIN}/", wait)
118
+ USER_APPS = lambda wait: with_wait(USER_APPS_MAIN, wait)
119
119
  USER_APPS_REAL_IDS = f"{USER_APPS_MAIN}/realIds"
120
120
  USER_APPS_MAP_IDS = f"{USER_APPS_MAIN}/mapIds"
121
121
  USER_APPS_MAP_ID = lambda id: f"{USER_APPS_MAIN}/mapIds/{urllib.parse.quote(str(id), safe='')}"
@@ -124,7 +124,7 @@ USER_APPS_REAL_ID = lambda id: f"{USER_APPS_MAIN}/realIds/{urllib.parse.quote(st
124
124
 
125
125
  ## UserTasksController
126
126
  USER_TASKS_MAIN = f"{API_VERSION}/userTasks"
127
- USER_TASKS = lambda wait: with_wait(f"{USER_TASKS_MAIN}/", wait)
127
+ USER_TASKS = lambda wait: with_wait(USER_TASKS_MAIN, wait)
128
128
  USER_TASKS_REAL_IDS = f"{USER_TASKS_MAIN}/realIds"
129
129
  USER_TASKS_MAP_IDS = f"{USER_TASKS_MAIN}/mapIds"
130
130
  USER_TASKS_MAP_ID = lambda id: f"{USER_TASKS_MAIN}/mapIds/{urllib.parse.quote(str(id), safe='')}"
@@ -133,7 +133,7 @@ USER_TASKS_REAL_ID = lambda id: f"{USER_TASKS_MAIN}/realIds/{urllib.parse.quote(
133
133
 
134
134
  ## UserPipelinesController
135
135
  USER_PIPELINES_MAIN = f"{API_VERSION}/userPipelines"
136
- USER_PIPELINES = lambda wait: with_wait(f"{USER_PIPELINES_MAIN}/", wait)
136
+ USER_PIPELINES = lambda wait: with_wait(USER_PIPELINES_MAIN, wait)
137
137
  USER_PIPELINES_REAL_IDS = f"{USER_PIPELINES_MAIN}/realIds"
138
138
  USER_PIPELINES_MAP_IDS = f"{USER_PIPELINES_MAIN}/mapIds"
139
139
  USER_PIPELINES_MAP_ID = lambda id: f"{USER_PIPELINES_MAIN}/mapIds/{urllib.parse.quote(str(id), safe='')}"
@@ -144,7 +144,7 @@ USER_PIPELINES_TAG_REAL_IDS = lambda tag: f"{USER_PIPELINES_MAIN}/realIds/tagSea
144
144
 
145
145
  ## UserCfgsController
146
146
  USER_CFGS_MAIN = f"{API_VERSION}/userCfgs"
147
- USER_CFGS = lambda wait: with_wait(f"{USER_CFGS_MAIN}/", wait)
147
+ USER_CFGS = lambda wait: with_wait(USER_CFGS_MAIN, wait)
148
148
  USER_CFGS_REAL_IDS = f"{USER_CFGS_MAIN}/realIds"
149
149
  USER_CFGS_MAP_IDS = f"{USER_CFGS_MAIN}/mapIds"
150
150
  USER_CFGS_MAP_ID = lambda id: f"{USER_CFGS_MAIN}/mapIds/{urllib.parse.quote(str(id), safe='')}"
@@ -153,7 +153,7 @@ USER_CFGS_REAL_ID = lambda id: f"{USER_CFGS_MAIN}/realIds/{urllib.parse.quote(st
153
153
 
154
154
  ## OperationResultsController
155
155
  OPERATION_RESULTS_MAIN = f"{API_VERSION}/operationResults"
156
- OPERATION_RESULTS = lambda wait: with_wait(f"{OPERATION_RESULTS_MAIN}/", wait)
156
+ OPERATION_RESULTS = lambda wait: with_wait(OPERATION_RESULTS_MAIN, wait)
157
157
  OPERATION_RESULTS_ID = lambda id, wait: with_wait(f"{OPERATION_RESULTS_MAIN}/{urllib.parse.quote(str(id), safe='')}", wait)
158
158
 
159
159
  ## TempRunController
@@ -195,16 +195,16 @@ MANAGER_APP_PAUSE = lambda wait: with_wait(f"{MANAGER_MAIN}/app/pause", wait)
195
195
 
196
196
  ## LimitsController
197
197
  LIMITS_MAIN = f"{API_VERSION}/limits"
198
- LIMITS = lambda wait: with_wait(f"{LIMITS_MAIN}/", wait)
198
+ LIMITS = lambda wait: with_wait(LIMITS_MAIN, wait)
199
199
  LIMITS_USER = lambda wait: with_wait(f"{LIMITS_MAIN}/user", wait)
200
200
 
201
201
  ## HandlerUrlController
202
202
  HANDLER_URL_MAIN = f"{API_VERSION}/handlerUrls"
203
- HANDLER_URL = lambda url, wait: with_key_values(f"{HANDLER_URL_MAIN}/", {"url": url, "wait": bool_to_str(wait)})
203
+ HANDLER_URL = lambda url, wait: with_key_values(HANDLER_URL_MAIN, {"url": url, "wait": bool_to_str(wait)})
204
204
 
205
205
  ## AnalyticsController
206
206
  ANALYTICS_MAIN = f"{API_VERSION}/analytics"
207
- ANALYTICS = lambda wait: with_wait(f"{ANALYTICS_MAIN}/", wait)
207
+ ANALYTICS = lambda wait: with_wait(ANALYTICS_MAIN, wait)
208
208
  ANALYTICS_MANY = lambda wait: with_wait(f"{ANALYTICS_MAIN}/many", wait)
209
209
  ANALYTICS_ID = lambda id, wait: with_wait(f"{ANALYTICS_MAIN}/{id}", wait)
210
210
  ANALYTICS_NAME = lambda name, wait: with_wait(f"{ANALYTICS_MAIN}/name/{name}", wait)
@@ -216,20 +216,25 @@ RUNS_INFO_LAST_FAILED = lambda count: with_key_values(f"{RUNS_INFO_MAIN}/lastFai
216
216
 
217
217
  ## WSAppsController
218
218
  WS_APPS_MAIN = f"{API_VERSION}/ws/apps"
219
- WS_APPS = lambda only_active, full: with_key_values(f"{WS_APPS_MAIN}/", {"onlyActive": only_active, "full": full})
220
- WS_APPS_ = lambda only_not_active, wait: with_key_values(f"{WS_APPS_MAIN}/", {"onlyNotActive": only_not_active, "wait": wait})
219
+ WS_APPS = lambda only_active, full: with_key_values(WS_APPS_MAIN, {"onlyActive": only_active, "full": full})
220
+ WS_APPS_ = lambda only_not_active, wait: with_key_values(WS_APPS_MAIN, {"onlyNotActive": only_not_active, "wait": wait})
221
221
  WS_APPS_ID = lambda id, wait: with_wait(f"{WS_APPS_MAIN}/{id}", wait)
222
222
 
223
223
  ## McpToolController
224
224
  MCP_TOOLS_MAIN = f"{API_VERSION}/tools"
225
- MCP_TOOLS = lambda id, name, wait: with_key_values(f"{MCP_TOOLS_MAIN}/", {"id": id, "name": name, "wait": wait})
225
+ MCP_TOOLS = lambda id, name, wait: with_key_values(MCP_TOOLS_MAIN, {"id": id, "name": name, "wait": wait})
226
226
  MCP_TOOLS_ALL = lambda wait: with_wait(f"{MCP_TOOLS_MAIN}/all", wait)
227
227
  MCP_TOOLS_LIST = f"{MCP_TOOLS_MAIN}/list"
228
228
  MCP_TOOLS_CALL = f"{MCP_TOOLS_MAIN}/call"
229
229
 
230
+ ## AppErrorInfoController
231
+ APP_ERROR_INFO_MAIN = f"{API_VERSION}/errors"
232
+ APP_ERROR_INFO = lambda wait: with_wait(APP_ERROR_INFO_MAIN, wait)
233
+ APP_ERROR_INFO_ID = lambda operationId, wait: with_wait(f"{APP_ERROR_INFO_MAIN}/{operationId}", wait)
234
+
230
235
  ### Kafka
231
236
  KAFKA_SEND = f"{MANAGER_MAIN}/kafkaMsg"
232
237
 
233
238
  ## BatchController
234
239
  BATCH_MAIN = f"{API_VERSION}/batch"
235
- BATCH = f"{BATCH_MAIN}/"
240
+ BATCH = BATCH_MAIN
@@ -7,6 +7,7 @@ from pydantic import BaseModel
7
7
 
8
8
  from malevich_coretools.abstract.abstract import (
9
9
  Alias,
10
+ AppLog,
10
11
  AppLogs,
11
12
  FlattenAppLogsWithResults,
12
13
  LogsResult,
@@ -56,17 +57,36 @@ def bool_to_str(b: bool) -> str:
56
57
  return "true" if b else "false"
57
58
 
58
59
 
59
- def __show_logs_result(res: LogsResult): # noqa: ANN202
60
+ def __show_logs_result(res: LogsResult, i: Optional[int]): # noqa: ANN202
60
61
  if len(res.data) > 0:
61
- print("------- main:")
62
+ print(f"------- main ({i}):" if i is not None else "------- main:")
62
63
  print(res.data)
64
+ if res.logs is None:
65
+ res.logs = {}
66
+ if res.userLogs is None:
67
+ res.userLogs = {}
63
68
  for run_id, logs in res.logs.items():
64
- print(f"------- {run_id}:")
65
69
  userLogs = res.userLogs.get(run_id, "")
66
- if len(userLogs) > 0:
70
+ print(f"------- {run_id} ({i}):" if i is not None else f"------- {run_id}:")
71
+ if len(logs) == 0:
67
72
  print(userLogs)
68
- print("-------")
69
- print(logs)
73
+ else:
74
+ if len(userLogs) > 0:
75
+ print(userLogs)
76
+ print("-------")
77
+ print(logs)
78
+ for run_id, logs in res.userLogs.items():
79
+ if run_id not in res.logs:
80
+ print(f"------- {run_id} ({i}):" if i is not None else f"------- {run_id}:")
81
+ print(logs)
82
+
83
+
84
+ def __logs_parts(app_log: AppLog) -> Dict[int, LogsResult]:
85
+ parts = {}
86
+ for i, log_res in enumerate(app_log.data):
87
+ if len(log_res.data) > 0 or (log_res.logs is not None and len(log_res.logs) > 0) or (log_res.userLogs is not None and len(log_res.userLogs) > 0):
88
+ parts[i] = log_res
89
+ return parts
70
90
 
71
91
 
72
92
  def show_logs(app_logs: AppLogs, err: bool = False) -> None: # noqa: ANN202
@@ -75,59 +95,81 @@ def show_logs(app_logs: AppLogs, err: bool = False) -> None: # noqa: ANN202
75
95
  if app_logs.error is not None:
76
96
  show(f"error: {app_logs.error}")
77
97
  print(__delimiter)
78
- print("------- dag logs -------")
79
- print(app_logs.dagLogs)
98
+ if len(app_logs.dagLogs) > 0:
99
+ print("------- dm logs -------")
100
+ print(app_logs.dagLogs)
80
101
  for app_name, app_log in app_logs.data.items():
102
+ parts = __logs_parts(app_log)
103
+ if len(parts) == 0:
104
+ continue
81
105
  print(f"------- {app_name} -------")
82
106
  if len(app_log.data) == 1:
83
107
  __show_logs_result(app_log.data[0])
84
108
  else:
85
- for i, log_res in enumerate(app_log.data):
86
- print(f"------- {i}:")
87
- __show_logs_result(log_res)
88
- print(__mini__delimiter)
109
+ c = len(parts)
110
+ for i, log_res in parts.items():
111
+ __show_logs_result(log_res, i)
112
+ c -= 1
113
+ if c != 0:
114
+ print(__mini__delimiter)
89
115
  print(__delimiter)
90
116
 
91
117
 
92
118
  def show_logs_colored(app_logs: AppLogs, colors_dict: Optional[Dict[str, str]] = None) -> None:
93
119
  """colors_dict - should be unique for all app_logs by operation_id"""
94
- def format(log, color: Optional[str]) -> None:
120
+ def format(log, color: Optional[str] = None) -> None:
95
121
  if color is None:
96
122
  Config.logger.warning(log)
97
123
  else:
98
124
  Config.logger.warning(color + log + __color_reset)
99
125
 
100
126
  def get_color(name: str) -> str:
101
- if colors_dict is None:
102
- return None
103
127
  color = colors_dict.get(name, None)
104
128
  if color is None:
105
129
  color = __colors[len(colors_dict) % len(__colors)]
106
130
  colors_dict[name] = color
107
131
  return color
108
132
 
133
+ if colors_dict is None:
134
+ colors_dict = {}
135
+
136
+ format(f"operation_id = {app_logs.operationId}")
109
137
  if app_logs.error is not None:
110
- format(f"error: {app_logs.error}", get_color("error"))
138
+ color = get_color("error")
139
+ format(f"error: {app_logs.error}", color)
140
+ format(__delimiter, color)
111
141
  if len(app_logs.dagLogs) > 0:
112
- color = get_color("dagLogs")
142
+ color = get_color("dm")
143
+ format("------- dm logs -------", color)
113
144
  for line in app_logs.dagLogs.splitlines():
114
- format(f"dag: {line}", color)
145
+ format(f"dm: {line}", color)
115
146
  for app_name, app_log in app_logs.data.items():
116
- color = get_color(f"${app_name}")
117
- for i, logs_result in enumerate(app_log.data):
118
- app_name_prefix = f"{app_name}${i}" if i != 0 else app_name
147
+ parts = __logs_parts(app_log)
148
+ if len(parts) == 0:
149
+ continue
150
+
151
+ color = get_color(app_name)
152
+ for i, logs_result in parts.items():
153
+ app_name_prefix = f"{app_name}${i}" if i != 0 or len(parts) > 1 else app_name
119
154
  if len(logs_result.data) > 0:
120
155
  for line in logs_result.data.splitlines():
121
156
  format(f"{app_name_prefix}$main: {line}", color)
122
- if len(logs_result.logs) > 0:
123
- for run_id, logs in logs_result.logs.items():
124
- user_logs = logs_result.userLogs.get(run_id, "")
125
- if len(user_logs) > 0:
126
- for line in user_logs.splitlines():
127
- format(f"{app_name_prefix}${run_id}: {line}", color)
128
- if len(logs) > 0:
129
- for line in logs.splitlines():
130
- format(f"{app_name_prefix}${run_id}: {line}", color)
157
+ if logs_result.logs is None:
158
+ logs_result.logs = {}
159
+ if logs_result.userLogs is None:
160
+ logs_result.userLogs = {}
161
+ for run_id, logs in logs_result.logs.items():
162
+ user_logs = logs_result.userLogs.get(run_id, "")
163
+ if len(user_logs) > 0:
164
+ for line in user_logs.splitlines():
165
+ format(f"{app_name_prefix}${run_id}$user: {line}", color)
166
+ if len(logs) > 0:
167
+ for line in logs.splitlines():
168
+ format(f"{app_name_prefix}${run_id}: {line}", color)
169
+ for run_id, user_logs in logs_result.userLogs.items():
170
+ if run_id not in logs_result.logs:
171
+ for line in user_logs.splitlines():
172
+ format(f"{app_name_prefix}${run_id}$user: {line}", color)
131
173
 
132
174
 
133
175
  def show_logs_func(data: str, err: bool = False): # noqa: ANN201
@@ -6735,7 +6735,7 @@ def get_task_schedules(
6735
6735
  conn_url: Optional[str] = None,
6736
6736
  batcher: Optional[Batcher] = None,
6737
6737
  is_async: Literal[False] = False,
6738
- ) -> Schedules:
6738
+ ) -> Union[Schedules, ResultIds]:
6739
6739
  pass
6740
6740
 
6741
6741
 
@@ -6748,32 +6748,39 @@ def get_task_schedules(
6748
6748
  conn_url: Optional[str] = None,
6749
6749
  batcher: Optional[Batcher] = None,
6750
6750
  is_async: Literal[True],
6751
- ) -> Coroutine[Any, Any, Schedules]:
6751
+ ) -> Coroutine[Any, Any, Union[Schedules, ResultIds]]:
6752
6752
  pass
6753
6753
 
6754
6754
 
6755
6755
  def get_task_schedules(
6756
- operation_id: str,
6756
+ operation_id: Optional[str] = None,
6757
6757
  with_show: bool = True,
6758
6758
  *,
6759
6759
  auth: Optional[AUTH] = None,
6760
6760
  conn_url: Optional[str] = None,
6761
6761
  batcher: Optional[Batcher] = None,
6762
6762
  is_async: bool = False,
6763
- ) -> Union[Schedules, Coroutine[Any, Any, Schedules]]:
6763
+ ) -> Union[ResultIds, Schedules, Coroutine[Any, Any, Schedules], Coroutine[Any, Any, ResultIds]]:
6764
6764
  """return schedule ids by `operation_id` """
6765
6765
  if batcher is None:
6766
6766
  batcher = Config.BATCHER
6767
- data = Operation(operationId=operation_id)
6768
- if batcher is not None:
6769
- return batcher.add("sendTaskSchedules", data=data, result_model=Schedules)
6770
- if is_async:
6771
- return f.get_task_schedules_async(
6767
+ if operation_id is None:
6768
+ if batcher is not None:
6769
+ return batcher.add("getTaskSchedules", result_model=ResultIds)
6770
+ if is_async:
6771
+ return f.get_task_schedules_async(auth=auth, conn_url=conn_url)
6772
+ return f.get_task_schedules(auth=auth, conn_url=conn_url)
6773
+ else:
6774
+ data = Operation(operationId=operation_id)
6775
+ if batcher is not None:
6776
+ return batcher.add("sendTaskSchedules", data=data, result_model=Schedules)
6777
+ if is_async:
6778
+ return f.post_task_schedules_async(
6779
+ data, with_show=with_show, auth=auth, conn_url=conn_url
6780
+ )
6781
+ return f.post_task_schedules(
6772
6782
  data, with_show=with_show, auth=auth, conn_url=conn_url
6773
6783
  )
6774
- return f.get_task_schedules(
6775
- data, with_show=with_show, auth=auth, conn_url=conn_url
6776
- )
6777
6784
 
6778
6785
 
6779
6786
  @overload
@@ -6834,7 +6841,6 @@ def task_full(
6834
6841
  pass
6835
6842
 
6836
6843
 
6837
- # FIXME check component
6838
6844
  def task_full(
6839
6845
  task_id: str,
6840
6846
  cfg_id: str,
@@ -6844,6 +6850,7 @@ def task_full(
6844
6850
  single_request: bool = False,
6845
6851
  profile_mode: Optional[str] = None,
6846
6852
  save_fails: bool = True,
6853
+ clear_dag_logs: bool = True,
6847
6854
  with_show: bool = True,
6848
6855
  long: bool = False,
6849
6856
  long_timeout: Optional[int] = WAIT_RESULT_TIMEOUT,
@@ -6906,6 +6913,7 @@ def task_full(
6906
6913
  profileMode=profile_mode,
6907
6914
  withLogs=True,
6908
6915
  saveFails=save_fails,
6916
+ clearDagLogs=clear_dag_logs,
6909
6917
  scaleInfo=scaleInfo,
6910
6918
  component=component,
6911
6919
  policy=policy,
@@ -7019,6 +7027,7 @@ def task_prepare(
7019
7027
  with_logs: bool = False,
7020
7028
  profile_mode: Optional[str] = None,
7021
7029
  save_fails: bool = True,
7030
+ clear_dag_logs: bool = True,
7022
7031
  with_show: bool = None,
7023
7032
  long: bool = False,
7024
7033
  long_timeout: int = WAIT_RESULT_TIMEOUT,
@@ -7091,6 +7100,7 @@ def task_prepare(
7091
7100
  waitRuns=wait_runs,
7092
7101
  withLogs=with_logs,
7093
7102
  saveFails=save_fails,
7103
+ clearDagLogs=clear_dag_logs,
7094
7104
  profileMode=profile_mode,
7095
7105
  scaleInfo=scaleInfo,
7096
7106
  component=component,
@@ -7135,6 +7145,7 @@ def task_run(
7135
7145
  long_timeout: int = WAIT_RESULT_TIMEOUT,
7136
7146
  with_logs: bool = False,
7137
7147
  schedule: Optional[Schedule] = None,
7148
+ broadcast: bool = False,
7138
7149
  wait: bool = True,
7139
7150
  *,
7140
7151
  auth: Optional[AUTH] = None,
@@ -7159,6 +7170,7 @@ def task_run(
7159
7170
  long_timeout: int = WAIT_RESULT_TIMEOUT,
7160
7171
  with_logs: bool = False,
7161
7172
  schedule: Optional[Schedule] = None,
7173
+ broadcast: bool = False,
7162
7174
  wait: bool = True,
7163
7175
  *,
7164
7176
  auth: Optional[AUTH] = None,
@@ -7182,6 +7194,7 @@ def task_run(
7182
7194
  long_timeout: int = WAIT_RESULT_TIMEOUT,
7183
7195
  with_logs: bool = False,
7184
7196
  schedule: Optional[Schedule] = None,
7197
+ broadcast: bool = False,
7185
7198
  wait: bool = True,
7186
7199
  *,
7187
7200
  auth: Optional[AUTH] = None,
@@ -7204,6 +7217,7 @@ def task_run(
7204
7217
  long_timeout (Optional[int]): default timeout for long run (hour by default). If 'long=False' ignored. If None, then there is no limit. Doesn't stop the task, just stops trying to get the run result
7205
7218
  with_logs (bool): return run logs if True after end
7206
7219
  schedule: (Optional[Schedule]): schedule task runs settings - return scheduleId instead of operationId
7220
+ broadcast: run scale pipeline (for new scale)
7207
7221
  wait (bool): is it worth waiting for the result or immediately return `operation_id`
7208
7222
  auth (Optional[AUTH]): redefined auth if not None"""
7209
7223
  if schedule is not None:
@@ -7224,6 +7238,7 @@ def task_run(
7224
7238
  profileMode=profile_mode,
7225
7239
  withLogs=with_logs,
7226
7240
  schedule=schedule,
7241
+ broadcast=broadcast,
7227
7242
  )
7228
7243
  if batcher is not None:
7229
7244
  return batcher.add("sendTaskRun", data=data, result_model=AppLogs if with_logs or schedule is not None else None)
@@ -7322,6 +7337,7 @@ def pipeline_full(
7322
7337
  restrictions: Optional[Restrictions] = None,
7323
7338
  scaleInfo: List[ScaleInfo] = None,
7324
7339
  save_fails: bool = True,
7340
+ clear_dag_logs: bool = True,
7325
7341
  tags: Optional[Dict[str, str]] = None,
7326
7342
  with_show: bool = True,
7327
7343
  long: bool = False,
@@ -7367,6 +7383,7 @@ def pipeline_full(
7367
7383
  kafkaModeUrl=None,
7368
7384
  run=True,
7369
7385
  saveFails=save_fails,
7386
+ clearDagLogs=clear_dag_logs,
7370
7387
  scaleCount=1,
7371
7388
  tags=tags,
7372
7389
  )
@@ -7489,6 +7506,7 @@ def pipeline_prepare(
7489
7506
  kafka_mode_url_response: Optional[str] = None,
7490
7507
  synthetic: bool = False,
7491
7508
  save_fails: bool = True,
7509
+ clear_dag_logs: bool = True,
7492
7510
  scale_count: int = 1,
7493
7511
  tags: Optional[Dict[str, str]] = None,
7494
7512
  with_show: bool = True,
@@ -7533,6 +7551,7 @@ def pipeline_prepare(
7533
7551
  run=False,
7534
7552
  synthetic=synthetic,
7535
7553
  saveFails=save_fails,
7554
+ clearDagLogs=clear_dag_logs,
7536
7555
  scaleCount=scale_count,
7537
7556
  tags=tags,
7538
7557
  )
@@ -9263,6 +9282,156 @@ def delete_mcp_tools(
9263
9282
  return f.delete_mcp_tools(wait=wait, auth=auth, conn_url=conn_url)
9264
9283
 
9265
9284
 
9285
+ # app error info
9286
+
9287
+
9288
+ @overload
9289
+ def get_app_error_info(
9290
+ operation_id: str,
9291
+ run_id: Optional[str] = None,
9292
+ bind_id: Optional[str] = None,
9293
+ err_type: Optional[str] = None,
9294
+ is_malevich_err: bool = False,
9295
+ *,
9296
+ with_auth: bool = True,
9297
+ auth: Optional[AUTH] = None,
9298
+ conn_url: Optional[str] = None,
9299
+ batcher: Optional[Batcher] = None,
9300
+ is_async: Literal[False] = False,
9301
+ ) -> AppErrorInfos:
9302
+ pass
9303
+
9304
+
9305
+ @overload
9306
+ def get_app_error_info(
9307
+ operation_id: str,
9308
+ run_id: Optional[str] = None,
9309
+ bind_id: Optional[str] = None,
9310
+ err_type: Optional[str] = None,
9311
+ is_malevich_err: bool = False,
9312
+ *,
9313
+ with_auth: bool = True,
9314
+ auth: Optional[AUTH] = None,
9315
+ conn_url: Optional[str] = None,
9316
+ batcher: Optional[Batcher] = None,
9317
+ is_async: Literal[True],
9318
+ ) -> Coroutine[Any, Any, AppErrorInfos]:
9319
+ pass
9320
+
9321
+
9322
+ def get_app_error_info(
9323
+ operation_id: str,
9324
+ run_id: Optional[str] = None,
9325
+ bind_id: Optional[str] = None,
9326
+ err_type: Optional[str] = None,
9327
+ is_malevich_err: bool = False,
9328
+ *,
9329
+ with_auth: bool = True,
9330
+ auth: Optional[AUTH] = None,
9331
+ conn_url: Optional[str] = None,
9332
+ batcher: Optional[Batcher] = None,
9333
+ is_async: bool = False,
9334
+ ) -> Union[AppErrorInfos, Coroutine[Any, Any, AppErrorInfos]]:
9335
+ """return mcp tool struct by `id` or `name` """
9336
+ assert id is not None or name is not None, "id or name should be set"
9337
+ data = AppErrorInfoFilter(operationId=operation_id, runId=run_id, bindId=bind_id, errType=err_type, isMalevichErr=is_malevich_err)
9338
+ if batcher is None:
9339
+ batcher = Config.BATCHER
9340
+ if batcher is not None:
9341
+ return batcher.add("getAppErrorInfo", data=data, result_model=AppErrorInfos)
9342
+ if is_async:
9343
+ return f.get_app_error_info_async(data, with_auth=with_auth, auth=auth, conn_url=conn_url)
9344
+ return f.get_app_error_info(data, with_auth=with_auth, auth=auth, conn_url=conn_url)
9345
+
9346
+
9347
+ @overload
9348
+ def delete_app_error_info(
9349
+ operation_id: str,
9350
+ wait: bool = True,
9351
+ *,
9352
+ auth: Optional[AUTH] = None,
9353
+ conn_url: Optional[str] = None,
9354
+ batcher: Optional[Batcher] = None,
9355
+ is_async: Literal[False] = False,
9356
+ ) -> Alias.Info:
9357
+ pass
9358
+
9359
+
9360
+ @overload
9361
+ def delete_app_error_info(
9362
+ operation_id: str,
9363
+ wait: bool = True,
9364
+ *,
9365
+ auth: Optional[AUTH] = None,
9366
+ conn_url: Optional[str] = None,
9367
+ batcher: Optional[Batcher] = None,
9368
+ is_async: Literal[True],
9369
+ ) -> Coroutine[Any, Any, Alias.Info]:
9370
+ pass
9371
+
9372
+
9373
+ def delete_app_error_info(
9374
+ operation_id: str,
9375
+ wait: bool = True,
9376
+ *,
9377
+ auth: Optional[AUTH] = None,
9378
+ conn_url: Optional[str] = None,
9379
+ batcher: Optional[Batcher] = None,
9380
+ is_async: bool = False,
9381
+ ) -> Union[Alias.Info, Coroutine[Any, Any, Alias.Info]]:
9382
+ """delete app error info by `operationId` """
9383
+ assert id is not None or name is not None, "id or name should be set"
9384
+ if batcher is None:
9385
+ batcher = Config.BATCHER
9386
+ if batcher is not None:
9387
+ return batcher.add("deleteAppErrorInfoByOperationId", vars={"operationId": operation_id})
9388
+ if is_async:
9389
+ return f.delete_app_error_info_async(operation_id, wait=wait, auth=auth, conn_url=conn_url)
9390
+ return f.delete_app_error_info(operation_id, wait=wait, auth=auth, conn_url=conn_url)
9391
+
9392
+
9393
+ @overload
9394
+ def delete_app_error_infos(
9395
+ wait: bool = True,
9396
+ *,
9397
+ auth: Optional[AUTH] = None,
9398
+ conn_url: Optional[str] = None,
9399
+ batcher: Optional[Batcher] = None,
9400
+ is_async: Literal[False] = False,
9401
+ ) -> Alias.Info:
9402
+ pass
9403
+
9404
+
9405
+ @overload
9406
+ def delete_app_error_infos(
9407
+ wait: bool = True,
9408
+ *,
9409
+ auth: Optional[AUTH] = None,
9410
+ conn_url: Optional[str] = None,
9411
+ batcher: Optional[Batcher] = None,
9412
+ is_async: Literal[True],
9413
+ ) -> Coroutine[Any, Any, Alias.Info]:
9414
+ pass
9415
+
9416
+
9417
+ def delete_app_error_infos(
9418
+ wait: bool = True,
9419
+ *,
9420
+ auth: Optional[AUTH] = None,
9421
+ conn_url: Optional[str] = None,
9422
+ batcher: Optional[Batcher] = None,
9423
+ is_async: bool = False,
9424
+ ) -> Union[Alias.Info, Coroutine[Any, Any, Alias.Info]]:
9425
+ """delete all app error infos"""
9426
+ if batcher is None:
9427
+ batcher = Config.BATCHER
9428
+ if batcher is not None:
9429
+ return batcher.add("deleteAppErrorInfos")
9430
+ if is_async:
9431
+ return f.delete_app_error_infos_async(wait=wait, auth=auth, conn_url=conn_url)
9432
+ return f.delete_app_error_infos(wait=wait, auth=auth, conn_url=conn_url)
9433
+
9434
+
9266
9435
  # kafka
9267
9436
 
9268
9437
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: malevich-coretools
3
- Version: 0.3.54
3
+ Version: 0.3.61
4
4
  Author: Andrew Pogrebnoj
5
5
  Author-email: andrew@onjulius.co
6
6
  License-File: LICENSE
@@ -1,7 +1,7 @@
1
1
  malevich_coretools/__init__.py,sha256=DJtPESxkCZD2SbTZTrR_x0TKDQ4MJpmBqGw5YpKYidM,134
2
- malevich_coretools/utils.py,sha256=NlYAn3qCiCfhCpvkOyJBsrY-2duTw-0PJlxnX_JZQwg,270535
2
+ malevich_coretools/utils.py,sha256=9k2I7YMrLtSlfHWkhVuYWdtljZgWKnfJvFDdqexhx-I,275747
3
3
  malevich_coretools/abstract/__init__.py,sha256=6vQ08c8HPYyT_pPkKlc-EwQKE8xG3HTEo2p_GiI5rik,142
4
- malevich_coretools/abstract/abstract.py,sha256=CZRNn-pqg_eORgJh3XXhpf6hSb2ubR718DdHRz4p05c,16463
4
+ malevich_coretools/abstract/abstract.py,sha256=S5yYRhzpqsiIEN083LqUJlvu0MZ9Z__DdOLhS5ZsPdc,17475
5
5
  malevich_coretools/abstract/operations.py,sha256=cWlo2xzW-rzkTInzpDjBYeL68KfLYqSpZJRzCQ4OzjA,3070
6
6
  malevich_coretools/abstract/pipeline.py,sha256=HwhYp5G9yaZYaeDypChfpNd2W-kmJQfM9I54uek0B9k,7914
7
7
  malevich_coretools/abstract/statuses.py,sha256=9ISSw_evsylBshLXoU44TCoFOrZm4bXIxyAFFDqdUWc,333
@@ -11,18 +11,18 @@ malevich_coretools/batch/__init__.py,sha256=taxyZl8YOZd2EBd3leN6slzMkejUtjQ64Na3
11
11
  malevich_coretools/batch/utils.py,sha256=FRmCYU-zr-RjgT1Mo3CUNcB2mW1t_gKCJazcMx6aIW4,7719
12
12
  malevich_coretools/funcs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  malevich_coretools/funcs/checks.py,sha256=Q5pRtRevQrGv_-SMbn2GgYnulhclDLBXdRtbw2QOYKU,223
14
- malevich_coretools/funcs/funcs.py,sha256=ZbVsmd-4ypBK5yaKS5WQIgw-iu2bL3GyD2C7YYw-D50,83669
15
- malevich_coretools/funcs/helpers.py,sha256=0zkHyuuBGWHmoLFJiLQHXkcmtoFrK3gDvNWiU9oKvHw,13767
14
+ malevich_coretools/funcs/funcs.py,sha256=FgBrMRbGrZylVGzew48fUfy2VxRMK9gH-RXmJdyG7F4,85252
15
+ malevich_coretools/funcs/helpers.py,sha256=3-2s5-8hiBeslir8Qa1IgKkHwgYnjPG5w7Kog1e7dHI,13830
16
16
  malevich_coretools/secondary/__init__.py,sha256=048HqvG36_1WdDVZK_RuECmaf14Iq2fviUysG1inlaE,78
17
17
  malevich_coretools/secondary/config.py,sha256=hRlSJuPQnhKyt1wmOAJX_XmcliaO0fPGbW94AE_Mazs,463
18
- malevich_coretools/secondary/const.py,sha256=raoPX6aFuk_Rj9t4aYyLoqzEv6N8aHzIfdaF9QIeijE,15187
19
- malevich_coretools/secondary/helpers.py,sha256=lbLgHJeDQvn38DnKuXMaaNUD6PCxjzCusZO8DcLXqDk,6384
18
+ malevich_coretools/secondary/const.py,sha256=r_P9x0F8YFDFmPHF4oCUQJ4Va3v39CYG_Isg16CDpmo,15321
19
+ malevich_coretools/secondary/helpers.py,sha256=9zRU_EjESks_DbDxs6PjaubtKeJWTqOI_fCoTsEsP8w,7979
20
20
  malevich_coretools/secondary/kafka_utils.py,sha256=SIUnBFyfwsquN6MAUrEkKCw-1l7979Znl7OTQSX2UKo,989
21
21
  malevich_coretools/tools/__init__.py,sha256=jDxlCa5Dr6Y43qlI7JwsRAlBkKmFeTHTEnjNUvu-0iw,46
22
22
  malevich_coretools/tools/abstract.py,sha256=B1RW1FeNHrQ6r1k-cQZ4k4noCRXkIGt-JUwVoXEDkAg,4466
23
23
  malevich_coretools/tools/vast.py,sha256=63tvy70qQV9vnK0eWytlgjBGSnfA7l3kSIDgACBbMMs,12893
24
- malevich_coretools-0.3.54.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
25
- malevich_coretools-0.3.54.dist-info/METADATA,sha256=CFIWe_2rqGcnDQJOmB3GDRjoPToxQbmhvYlmXJnkA7c,347
26
- malevich_coretools-0.3.54.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
- malevich_coretools-0.3.54.dist-info/top_level.txt,sha256=wDX3s1Tso0otBPNrFRfXqyNpm48W4Bp5v6JfbITO2Z8,19
28
- malevich_coretools-0.3.54.dist-info/RECORD,,
24
+ malevich_coretools-0.3.61.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
25
+ malevich_coretools-0.3.61.dist-info/METADATA,sha256=Itxpu3ReVN0qcFy1YQuozsf4zOZAT6NIY4wmaYe6NNQ,347
26
+ malevich_coretools-0.3.61.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
+ malevich_coretools-0.3.61.dist-info/top_level.txt,sha256=wDX3s1Tso0otBPNrFRfXqyNpm48W4Bp5v6JfbITO2Z8,19
28
+ malevich_coretools-0.3.61.dist-info/RECORD,,