malevich-coretools 0.2.6__tar.gz → 0.2.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of malevich-coretools might be problematic. Click here for more details.
- {malevich-coretools-0.2.6/malevich_coretools.egg-info → malevich-coretools-0.2.7}/PKG-INFO +1 -1
- malevich-coretools-0.2.7/VERSION +1 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/abstract/abstract.py +4 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/funcs/funcs.py +8 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/secondary/const.py +3 -1
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/utils.py +153 -94
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7/malevich_coretools.egg-info}/PKG-INFO +1 -1
- malevich-coretools-0.2.6/VERSION +0 -1
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/LICENSE +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/MANIFEST.in +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/README.md +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/__init__.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/abstract/__init__.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/admin/__init__.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/admin/utils.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/funcs/__init__.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/funcs/helpers.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/secondary/__init__.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/secondary/config.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/secondary/helpers.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/tools/__init__.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/tools/vast.py +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools.egg-info/SOURCES.txt +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools.egg-info/dependency_links.txt +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools.egg-info/requires.txt +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools.egg-info/top_level.txt +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/pyproject.toml +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/requirements.txt +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/setup.cfg +0 -0
- {malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/setup.py +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
0.2.7
|
|
@@ -54,6 +54,14 @@ def get_collection_name(name: str, operation_id: Optional[str], run_id: Optional
|
|
|
54
54
|
return model_from_json(send_to_core_get(COLLECTIONS_NAME(name, operation_id, run_id, offset, limit), *args, **kwargs), ResultCollection)
|
|
55
55
|
|
|
56
56
|
|
|
57
|
+
def get_collections_ids_groupName(name: str, operation_id: str, run_id: str, *args, **kwargs) -> ResultIds:
|
|
58
|
+
return model_from_json(send_to_core_get(COLLECTIONS_IDS_GROUP_NAME(name, operation_id, run_id), *args, **kwargs), ResultIds)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def get_collections_groupName(name: str, operation_id: str, run_id: str, *args, **kwargs) -> ResultCollections:
|
|
62
|
+
return model_from_json(send_to_core_get(COLLECTIONS_GROUP_NAME(name, operation_id, run_id), *args, **kwargs), ResultCollections)
|
|
63
|
+
|
|
64
|
+
|
|
57
65
|
def get_collections_id(id: str, offset: int, limit: int, *args, **kwargs) -> ResultCollection:
|
|
58
66
|
return model_from_json(send_to_core_get(COLLECTIONS_ID(id, offset, limit), *args, **kwargs), ResultCollection)
|
|
59
67
|
|
|
@@ -19,7 +19,7 @@ SCHEME_PATTERN = r"[a-zA-Z_]\w+"
|
|
|
19
19
|
def with_wait(url, wait) -> str:
|
|
20
20
|
return url if wait is None else f"{url}?wait={bool_to_str(wait)}" # always first
|
|
21
21
|
|
|
22
|
-
def with_key_values(url: str, key_values: Dict[
|
|
22
|
+
def with_key_values(url: str, key_values: Dict[str, Optional[str]]) -> str:
|
|
23
23
|
sep = "?"
|
|
24
24
|
for key, value in key_values.items():
|
|
25
25
|
if value is not None:
|
|
@@ -38,6 +38,8 @@ COLLECTIONS_MAIN = f"{API_VERSION}/collections"
|
|
|
38
38
|
COLLECTIONS = lambda wait: with_wait(f"{COLLECTIONS_MAIN}/", wait)
|
|
39
39
|
COLLECTIONS_IDS_NAME = lambda name, operation_id, run_id: with_key_values(f"{COLLECTIONS_MAIN}/ids/name/{name}", {"operationId": operation_id, "runId": run_id})
|
|
40
40
|
COLLECTIONS_NAME = lambda name, operation_id, run_id, offset, limit: with_key_values(f"{COLLECTIONS_MAIN}/name/{name}", {"operationId": operation_id, "runId": run_id, "offset": offset, "limit": limit})
|
|
41
|
+
COLLECTIONS_IDS_GROUP_NAME = lambda name, operation_id, run_id: with_key_values(f"{COLLECTIONS_MAIN}/ids/groupName/{name}", {"operationId": operation_id, "runId": run_id})
|
|
42
|
+
COLLECTIONS_GROUP_NAME = lambda name, operation_id, run_id: with_key_values(f"{COLLECTIONS_MAIN}/groupName/{name}", {"operationId": operation_id, "runId": run_id})
|
|
41
43
|
COLLECTIONS_ID = lambda id, offset, limit: with_key_values(f"{COLLECTIONS_MAIN}/{id}", {"offset": offset, "limit": limit})
|
|
42
44
|
COLLECTIONS_ID_MODIFY = lambda id, wait: with_wait(f"{COLLECTIONS_MAIN}/{id}", wait)
|
|
43
45
|
COLLECTIONS_ID_S3 = lambda id, wait: with_wait(f"{COLLECTIONS_MAIN}/s3/{id}", wait)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import os
|
|
3
3
|
import re
|
|
4
|
+
import subprocess
|
|
4
5
|
from typing import Union
|
|
5
6
|
|
|
6
7
|
import pandas as pd
|
|
@@ -16,18 +17,20 @@ from malevich_coretools.secondary.const import (
|
|
|
16
17
|
)
|
|
17
18
|
from malevich_coretools.secondary.helpers import rand_str
|
|
18
19
|
|
|
20
|
+
__unique_digest_substring = "@sha256:"
|
|
21
|
+
|
|
19
22
|
# config
|
|
20
23
|
|
|
21
24
|
|
|
22
25
|
def set_host_port(host_port: str) -> None:
|
|
23
|
-
"""update host and port for malevich-core, example:
|
|
26
|
+
"""update host and port for malevich-core, example: `http://localhost:8080/` """
|
|
24
27
|
assert len(host_port) > 0, "empty host port"
|
|
25
28
|
host_port = host_port if host_port[-1] == "/" else f"{host_port}/"
|
|
26
29
|
Config.HOST_PORT = host_port
|
|
27
30
|
|
|
28
31
|
|
|
29
32
|
def set_conn_url(conn_url: str) -> None:
|
|
30
|
-
"""analogue set_host_port; update conn_url for malevich-core, example:
|
|
33
|
+
"""analogue set_host_port; update `conn_url` for malevich-core, example: `http://localhost:8080/` """
|
|
31
34
|
set_host_port(conn_url)
|
|
32
35
|
|
|
33
36
|
|
|
@@ -57,20 +60,48 @@ def update_core_credentials(username: USERNAME, password: PASSWORD) -> None:
|
|
|
57
60
|
Config.CORE_PASSWORD = password
|
|
58
61
|
|
|
59
62
|
|
|
63
|
+
def digest_by_image(image_ref: str, username: Optional[str] = None, password: Optional[str] = None) -> Optional[str]:
|
|
64
|
+
"""return image in digest format by `image_ref`, if fail - return None and log error. If `image_ref` in digest format - return itself without check
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
image_ref (str): image_ref
|
|
68
|
+
username (Optional[str], optional): username if necessary. Defaults to None.
|
|
69
|
+
password (Optional[str], optional): password if necessary. Defaults to None.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Optional[str]: image_ref in digest format or None if failed
|
|
73
|
+
"""
|
|
74
|
+
if __unique_digest_substring in image_ref:
|
|
75
|
+
return image_ref
|
|
76
|
+
if username is None and password is None:
|
|
77
|
+
cmd = ["skopeo", "inspect", "--no-creds", f"docker://{image_ref}"]
|
|
78
|
+
else:
|
|
79
|
+
cmd = ["skopeo", "inspect", "--username", username, "--password", password, f"docker://{image_ref}"]
|
|
80
|
+
result = subprocess.run(cmd, capture_output=True)
|
|
81
|
+
info = result.stdout.decode("utf-8")
|
|
82
|
+
if info != "":
|
|
83
|
+
digest = json.loads(info)["Digest"]
|
|
84
|
+
return f"{image_ref[:len(image_ref) if (index := image_ref.rfind(':')) == -1 else index]}@{digest}"
|
|
85
|
+
else:
|
|
86
|
+
info = result.stderr.decode("utf-8")
|
|
87
|
+
Config.logger.error(info)
|
|
88
|
+
return None
|
|
89
|
+
|
|
90
|
+
|
|
60
91
|
# Docs
|
|
61
92
|
|
|
62
93
|
|
|
63
94
|
def get_docs(
|
|
64
95
|
*, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
65
96
|
) -> ResultIds:
|
|
66
|
-
"""return list ids"""
|
|
97
|
+
"""return list ids """
|
|
67
98
|
return f.get_docs(auth=auth, conn_url=conn_url)
|
|
68
99
|
|
|
69
100
|
|
|
70
101
|
def get_doc(
|
|
71
102
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
72
103
|
) -> ResultDoc:
|
|
73
|
-
"""return doc by
|
|
104
|
+
"""return doc by `id` """
|
|
74
105
|
return f.get_docs_id(id, auth=auth, conn_url=conn_url)
|
|
75
106
|
|
|
76
107
|
|
|
@@ -82,7 +113,7 @@ def create_doc(
|
|
|
82
113
|
auth: Optional[AUTH] = None,
|
|
83
114
|
conn_url: Optional[str] = None,
|
|
84
115
|
) -> Alias.Id:
|
|
85
|
-
"""save doc with
|
|
116
|
+
"""save doc with `data` and `name`, return `id` """
|
|
86
117
|
return f.post_docs(
|
|
87
118
|
DocWithName(data=data, name=name), wait=wait, auth=auth, conn_url=conn_url
|
|
88
119
|
)
|
|
@@ -97,7 +128,7 @@ def update_doc(
|
|
|
97
128
|
auth: Optional[AUTH] = None,
|
|
98
129
|
conn_url: Optional[str] = None,
|
|
99
130
|
) -> Alias.Id:
|
|
100
|
-
"""update doc by
|
|
131
|
+
"""update doc by `id`, return `id` """
|
|
101
132
|
return f.post_docs_id(
|
|
102
133
|
id, DocWithName(data=data, name=name), wait=wait, auth=auth, conn_url=conn_url
|
|
103
134
|
)
|
|
@@ -110,7 +141,7 @@ def delete_doc(
|
|
|
110
141
|
auth: Optional[AUTH] = None,
|
|
111
142
|
conn_url: Optional[str] = None,
|
|
112
143
|
) -> Alias.Info:
|
|
113
|
-
"""delete doc by
|
|
144
|
+
"""delete doc by `id` """
|
|
114
145
|
return f.delete_docs_id(id, wait=wait, auth=auth, conn_url=conn_url)
|
|
115
146
|
|
|
116
147
|
|
|
@@ -139,7 +170,7 @@ def get_collections_by_name(
|
|
|
139
170
|
auth: Optional[AUTH] = None,
|
|
140
171
|
conn_url: Optional[str] = None,
|
|
141
172
|
) -> ResultOwnAndSharedIds:
|
|
142
|
-
"""return 2 list: own collections ids and shared collections ids by
|
|
173
|
+
"""return 2 list: own collections ids and shared collections ids by `name` and mb also `operation_id` and `run_id` with which it was saved"""
|
|
143
174
|
assert not (
|
|
144
175
|
operation_id is None and run_id is not None
|
|
145
176
|
), "if run_id set, operation_id should be set too"
|
|
@@ -156,7 +187,7 @@ def get_collection(
|
|
|
156
187
|
auth: Optional[AUTH] = None,
|
|
157
188
|
conn_url: Optional[str] = None,
|
|
158
189
|
) -> ResultCollection:
|
|
159
|
-
"""return collection by
|
|
190
|
+
"""return collection by `id`, pagination: unlimited - `limit` < 0"""
|
|
160
191
|
return f.get_collections_id(id, offset, limit, auth=auth, conn_url=conn_url)
|
|
161
192
|
|
|
162
193
|
|
|
@@ -170,7 +201,7 @@ def get_collection_by_name(
|
|
|
170
201
|
auth: Optional[AUTH] = None,
|
|
171
202
|
conn_url: Optional[str] = None,
|
|
172
203
|
) -> ResultCollection:
|
|
173
|
-
"""return collection by
|
|
204
|
+
"""return collection by `name` and mb also `operation_id` and `run_id` with which it was saved. raise if there are multiple collections, pagination: unlimited - `limit` < 0"""
|
|
174
205
|
assert not (
|
|
175
206
|
operation_id is None and run_id is not None
|
|
176
207
|
), "if run_id set, operation_id should be set too"
|
|
@@ -179,6 +210,34 @@ def get_collection_by_name(
|
|
|
179
210
|
)
|
|
180
211
|
|
|
181
212
|
|
|
213
|
+
def get_collections_ids_by_group_name(
|
|
214
|
+
group_name: str,
|
|
215
|
+
operation_id: str,
|
|
216
|
+
run_id: Optional[str] = None,
|
|
217
|
+
*,
|
|
218
|
+
auth: Optional[AUTH] = None,
|
|
219
|
+
conn_url: Optional[str] = None,
|
|
220
|
+
) -> ResultIds:
|
|
221
|
+
"""return collections ids by `group_name`, `operation_id` and `run_id` with which it was saved"""
|
|
222
|
+
return f.get_collections_ids_groupName(
|
|
223
|
+
group_name, operation_id, run_id, auth=auth, conn_url=conn_url
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def get_collections_by_group_name(
|
|
228
|
+
group_name: str,
|
|
229
|
+
operation_id: str,
|
|
230
|
+
run_id: Optional[str] = None,
|
|
231
|
+
*,
|
|
232
|
+
auth: Optional[AUTH] = None,
|
|
233
|
+
conn_url: Optional[str] = None,
|
|
234
|
+
) -> ResultCollections:
|
|
235
|
+
"""return collections by `group_name`, `operation_id` and `run_id` with which it was saved"""
|
|
236
|
+
return f.get_collections_groupName(
|
|
237
|
+
group_name, operation_id, run_id, auth=auth, conn_url=conn_url
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
|
|
182
241
|
def create_collection(
|
|
183
242
|
ids: List[str],
|
|
184
243
|
name: Optional[str] = None,
|
|
@@ -188,7 +247,7 @@ def create_collection(
|
|
|
188
247
|
auth: Optional[AUTH] = None,
|
|
189
248
|
conn_url: Optional[str] = None,
|
|
190
249
|
) -> Alias.Id:
|
|
191
|
-
"""save collection by list docs
|
|
250
|
+
"""save collection by list docs `ids`, return id"""
|
|
192
251
|
return f.post_collections(
|
|
193
252
|
DocsCollection(data=ids, name=name, metadata=metadata),
|
|
194
253
|
wait=wait,
|
|
@@ -207,7 +266,7 @@ def update_collection(
|
|
|
207
266
|
auth: Optional[AUTH] = None,
|
|
208
267
|
conn_url: Optional[str] = None,
|
|
209
268
|
) -> Alias.Id:
|
|
210
|
-
"""update collection with
|
|
269
|
+
"""update collection with `id` by list docs `ids`, return `id` """
|
|
211
270
|
return f.post_collections_id(
|
|
212
271
|
id,
|
|
213
272
|
DocsCollection(data=ids, name=name, metadata=metadata),
|
|
@@ -245,7 +304,7 @@ def create_collection_by_docs(
|
|
|
245
304
|
auth: Optional[AUTH] = None,
|
|
246
305
|
conn_url: Optional[str] = None,
|
|
247
306
|
) -> Alias.Id:
|
|
248
|
-
"""save collection by
|
|
307
|
+
"""save collection by `docs`, return `id` """
|
|
249
308
|
return f.post_collections_data(
|
|
250
309
|
DocsDataCollection(data=docs, name=name, metadata=metadata),
|
|
251
310
|
wait=wait,
|
|
@@ -262,7 +321,7 @@ def add_to_collection(
|
|
|
262
321
|
auth: Optional[AUTH] = None,
|
|
263
322
|
conn_url: Optional[str] = None,
|
|
264
323
|
) -> Alias.Info:
|
|
265
|
-
"""add to collection with
|
|
324
|
+
"""add to collection with `id` docs with `ids` """
|
|
266
325
|
return f.post_collections_id_add(
|
|
267
326
|
id, DocsCollectionChange(data=ids), wait=wait, auth=auth, conn_url=conn_url
|
|
268
327
|
)
|
|
@@ -276,7 +335,7 @@ def copy_collection(
|
|
|
276
335
|
auth: Optional[AUTH] = None,
|
|
277
336
|
conn_url: Optional[str] = None,
|
|
278
337
|
) -> Alias.Id:
|
|
279
|
-
"""copy collection with
|
|
338
|
+
"""copy collection with `id`, if not `full_copy` docs same as in collection with `id` """
|
|
280
339
|
return f.post_collections_id_copy(
|
|
281
340
|
id, full_copy=full_copy, wait=wait, auth=auth, conn_url=conn_url
|
|
282
341
|
)
|
|
@@ -291,7 +350,7 @@ def apply_scheme(
|
|
|
291
350
|
auth: Optional[AUTH] = None,
|
|
292
351
|
conn_url: Optional[str] = None,
|
|
293
352
|
) -> Alias.Id:
|
|
294
|
-
"""apply scheme with
|
|
353
|
+
"""apply scheme with `scheme_name` to collection with `coll_id` return new collection with another `coll_id` """
|
|
295
354
|
return f.post_collections_id_applyScheme(
|
|
296
355
|
coll_id,
|
|
297
356
|
FixScheme(schemeName=scheme_name, mode=mode),
|
|
@@ -310,7 +369,7 @@ def fix_scheme(
|
|
|
310
369
|
auth: Optional[AUTH] = None,
|
|
311
370
|
conn_url: Optional[str] = None,
|
|
312
371
|
) -> Alias.Info:
|
|
313
|
-
"""optimization to core (not necessary call), sets the schema with
|
|
372
|
+
"""optimization to core (not necessary call), sets the schema with `scheme_name` for the collection with `coll_id` """
|
|
314
373
|
fix_scheme_data = FixScheme(schemeName=scheme_name, mode=mode)
|
|
315
374
|
return f.post_collections_id_fixScheme(
|
|
316
375
|
coll_id, fix_scheme_data, wait=wait, auth=auth, conn_url=conn_url
|
|
@@ -324,7 +383,7 @@ def unfix_scheme(
|
|
|
324
383
|
auth: Optional[AUTH] = None,
|
|
325
384
|
conn_url: Optional[str] = None,
|
|
326
385
|
) -> Alias.Info:
|
|
327
|
-
"""unfix scheme for collection with
|
|
386
|
+
"""unfix scheme for collection with `coll_id` """
|
|
328
387
|
return f.post_collections_id_unfixScheme(
|
|
329
388
|
coll_id, wait=wait, auth=auth, conn_url=conn_url
|
|
330
389
|
)
|
|
@@ -338,7 +397,7 @@ def update_metadata(
|
|
|
338
397
|
auth: Optional[AUTH] = None,
|
|
339
398
|
conn_url: Optional[str] = None,
|
|
340
399
|
) -> Alias.Info:
|
|
341
|
-
"""update metadata for collection with
|
|
400
|
+
"""update `metadata` for collection with `coll_id` """
|
|
342
401
|
collection_metadata = CollectionMetadata(data=metadata)
|
|
343
402
|
return f.post_collections_metadata(
|
|
344
403
|
coll_id, collection_metadata, wait=wait, auth=auth, conn_url=conn_url
|
|
@@ -359,7 +418,7 @@ def delete_collection(
|
|
|
359
418
|
auth: Optional[AUTH] = None,
|
|
360
419
|
conn_url: Optional[str] = None,
|
|
361
420
|
) -> Alias.Info:
|
|
362
|
-
"""delete collection with
|
|
421
|
+
"""delete collection with `id` """
|
|
363
422
|
return f.delete_collections_id(id, wait=wait, auth=auth, conn_url=conn_url)
|
|
364
423
|
|
|
365
424
|
|
|
@@ -370,7 +429,7 @@ def s3_delete_collection(
|
|
|
370
429
|
auth: Optional[AUTH] = None,
|
|
371
430
|
conn_url: Optional[str] = None,
|
|
372
431
|
) -> Alias.Info:
|
|
373
|
-
"""delete collection from s3 by key (that =id if not specified in s3_save_collection)"""
|
|
432
|
+
"""delete collection from s3 by `key` (that =id if not specified in s3_save_collection)"""
|
|
374
433
|
return f.delete_collections_id_s3(key, wait=wait, auth=auth, conn_url=conn_url)
|
|
375
434
|
|
|
376
435
|
|
|
@@ -382,7 +441,7 @@ def delete_from_collection(
|
|
|
382
441
|
auth: Optional[AUTH] = None,
|
|
383
442
|
conn_url: Optional[str] = None,
|
|
384
443
|
) -> Alias.Info:
|
|
385
|
-
"""delete docs with
|
|
444
|
+
"""delete docs with `ids` from collection with `id` """
|
|
386
445
|
return f.delete_collections_id_del(
|
|
387
446
|
id, DocsCollectionChange(data=ids), wait=wait, auth=auth, conn_url=conn_url
|
|
388
447
|
)
|
|
@@ -401,14 +460,14 @@ def get_schemes(
|
|
|
401
460
|
def get_scheme(
|
|
402
461
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
403
462
|
) -> ResultScheme:
|
|
404
|
-
"""return scheme by id"""
|
|
463
|
+
"""return scheme by `id` """
|
|
405
464
|
return f.get_schemes_id(id, auth=auth, conn_url=conn_url)
|
|
406
465
|
|
|
407
466
|
|
|
408
467
|
def get_scheme_raw(
|
|
409
468
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
410
469
|
) -> Alias.Json:
|
|
411
|
-
"""return raw scheme data by id"""
|
|
470
|
+
"""return raw scheme data by `id` """
|
|
412
471
|
return f.get_schemes_id_raw(id, auth=auth, conn_url=conn_url)
|
|
413
472
|
|
|
414
473
|
|
|
@@ -434,8 +493,8 @@ def create_scheme(
|
|
|
434
493
|
conn_url: Optional[str] = None,
|
|
435
494
|
) -> Alias.Id:
|
|
436
495
|
"""create scheme\n
|
|
437
|
-
|
|
438
|
-
return
|
|
496
|
+
`scheme_data` must be json or dict
|
|
497
|
+
return `id` """
|
|
439
498
|
assert re.fullmatch(SCHEME_PATTERN, name) is not None, f"wrong scheme name: {name}"
|
|
440
499
|
scheme_json = to_json(scheme_data)
|
|
441
500
|
scheme = SchemeWithName(data=scheme_json, name=name)
|
|
@@ -452,8 +511,8 @@ def update_scheme(
|
|
|
452
511
|
conn_url: Optional[str] = None,
|
|
453
512
|
) -> Alias.Id:
|
|
454
513
|
"""update scheme\n
|
|
455
|
-
|
|
456
|
-
return
|
|
514
|
+
`scheme_data` must be json or dict
|
|
515
|
+
return `id` """
|
|
457
516
|
assert re.fullmatch(SCHEME_PATTERN, name) is not None, f"wrong scheme name: {name}"
|
|
458
517
|
scheme_json = to_json(scheme_data)
|
|
459
518
|
scheme = SchemeWithName(data=scheme_json, name=name)
|
|
@@ -494,7 +553,7 @@ def delete_scheme(
|
|
|
494
553
|
auth: Optional[AUTH] = None,
|
|
495
554
|
conn_url: Optional[str] = None,
|
|
496
555
|
) -> Alias.Info:
|
|
497
|
-
"""delete scheme by id"""
|
|
556
|
+
"""delete scheme by `id` """
|
|
498
557
|
return f.delete_schemes_id(id, wait=wait, auth=auth, conn_url=conn_url)
|
|
499
558
|
|
|
500
559
|
|
|
@@ -526,7 +585,7 @@ def check_auth(
|
|
|
526
585
|
|
|
527
586
|
|
|
528
587
|
def ping() -> Alias.Info:
|
|
529
|
-
"""return
|
|
588
|
+
"""return `pong` """
|
|
530
589
|
return f.get_ping()
|
|
531
590
|
|
|
532
591
|
|
|
@@ -537,17 +596,17 @@ def ping() -> Alias.Info:
|
|
|
537
596
|
|
|
538
597
|
|
|
539
598
|
# def get_mapping(id: str, *, auth: Optional[AUTH]=None, conn_url: Optional[str]=None) -> ResultMapping:
|
|
540
|
-
# """return mapping by
|
|
599
|
+
# """return mapping by `id`"""
|
|
541
600
|
# return f.get_mapping_id(id, auth=auth, conn_url=conn_url)
|
|
542
601
|
|
|
543
602
|
|
|
544
603
|
# def create_mapping(docs_ids: List[str], scheme_id: str, wait: bool = True, *, auth: Optional[AUTH]=None, conn_url: Optional[str]=None) -> Alias.Info:
|
|
545
|
-
# """try to do and save mapping docs with
|
|
604
|
+
# """try to do and save mapping docs with `docs_ids` with scheme with `scheme_id`, ignore if failed"""
|
|
546
605
|
# return f.post_mapping(DocsAndScheme(docsIds=docs_ids, schemeId=scheme_id), wait=wait, auth=auth, conn_url=conn_url)
|
|
547
606
|
|
|
548
607
|
|
|
549
608
|
# def delete_mapping(doc_id: str, wait: bool = True, *, auth: Optional[AUTH]=None, conn_url: Optional[str]=None) -> Alias.Info:
|
|
550
|
-
# """delete mappings for doc with
|
|
609
|
+
# """delete mappings for doc with `doc_id`"""
|
|
551
610
|
# return f.delete_mapping_id(doc_id, wait=wait, auth=auth, conn_url=conn_url)
|
|
552
611
|
|
|
553
612
|
|
|
@@ -561,28 +620,28 @@ def ping() -> Alias.Info:
|
|
|
561
620
|
def get_shared_collection(
|
|
562
621
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
563
622
|
) -> ResultLogins:
|
|
564
|
-
"""return list logins to which user has given access to the collection with
|
|
623
|
+
"""return list logins to which user has given access to the collection with `id` """
|
|
565
624
|
return f.get_share_collection_id(id, auth=auth, conn_url=conn_url)
|
|
566
625
|
|
|
567
626
|
|
|
568
627
|
def get_shared_scheme(
|
|
569
628
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
570
629
|
) -> ResultLogins:
|
|
571
|
-
"""return list logins to which user has given access to the scheme with
|
|
630
|
+
"""return list logins to which user has given access to the scheme with `id` """
|
|
572
631
|
return f.get_share_scheme_id(id, auth=auth, conn_url=conn_url)
|
|
573
632
|
|
|
574
633
|
|
|
575
634
|
def get_shared_app(
|
|
576
635
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
577
636
|
) -> ResultLogins:
|
|
578
|
-
"""return list logins to which user has given access to the app with
|
|
637
|
+
"""return list logins to which user has given access to the app with `id` """
|
|
579
638
|
return f.get_share_userApp_id(id, auth=auth, conn_url=conn_url)
|
|
580
639
|
|
|
581
640
|
|
|
582
641
|
def get_shared_by_login(
|
|
583
642
|
login: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
584
643
|
) -> ResultSharedForLogin:
|
|
585
|
-
"""return structure with all info about share to user with
|
|
644
|
+
"""return structure with all info about share to user with `login` """
|
|
586
645
|
return f.get_share_login(login, auth=auth, conn_url=conn_url)
|
|
587
646
|
|
|
588
647
|
|
|
@@ -594,7 +653,7 @@ def share_collection(
|
|
|
594
653
|
auth: Optional[AUTH] = None,
|
|
595
654
|
conn_url: Optional[str] = None,
|
|
596
655
|
) -> Alias.Info:
|
|
597
|
-
"""gives access to the collection with
|
|
656
|
+
"""gives access to the collection with `id` to all users with `user_logins` """
|
|
598
657
|
return f.post_share_collection_id(
|
|
599
658
|
id,
|
|
600
659
|
SharedWithUsers(userLogins=user_logins),
|
|
@@ -612,7 +671,7 @@ def share_scheme(
|
|
|
612
671
|
auth: Optional[AUTH] = None,
|
|
613
672
|
conn_url: Optional[str] = None,
|
|
614
673
|
) -> Alias.Info:
|
|
615
|
-
"""gives access to the scheme with
|
|
674
|
+
"""gives access to the scheme with `id` to all users with `user_logins` """
|
|
616
675
|
return f.post_share_scheme_id(
|
|
617
676
|
id,
|
|
618
677
|
SharedWithUsers(userLogins=user_logins),
|
|
@@ -630,7 +689,7 @@ def share_app(
|
|
|
630
689
|
auth: Optional[AUTH] = None,
|
|
631
690
|
conn_url: Optional[str] = None,
|
|
632
691
|
) -> Alias.Info:
|
|
633
|
-
"""gives access to the app with
|
|
692
|
+
"""gives access to the app with `id` to all users with `user_logins` """
|
|
634
693
|
return f.post_share_userApp_id(
|
|
635
694
|
id,
|
|
636
695
|
SharedWithUsers(userLogins=user_logins),
|
|
@@ -650,7 +709,7 @@ def share(
|
|
|
650
709
|
auth: Optional[AUTH] = None,
|
|
651
710
|
conn_url: Optional[str] = None,
|
|
652
711
|
) -> Alias.Info:
|
|
653
|
-
"""gives access to everything listed to all users with
|
|
712
|
+
"""gives access to everything listed to all users with `user_logins` """
|
|
654
713
|
assert user_logins is not None, '"user_logins" is empty'
|
|
655
714
|
collections_ids = [] if collections_ids is None else collections_ids
|
|
656
715
|
schemes_ids = [] if schemes_ids is None else schemes_ids
|
|
@@ -678,7 +737,7 @@ def delete_shared(
|
|
|
678
737
|
auth: Optional[AUTH] = None,
|
|
679
738
|
conn_url: Optional[str] = None,
|
|
680
739
|
) -> Alias.Info:
|
|
681
|
-
"""removes access to everything listed to all users with
|
|
740
|
+
"""removes access to everything listed to all users with `user_logins` """
|
|
682
741
|
assert user_logins is not None, '"user_logins" is empty'
|
|
683
742
|
collections_ids = [] if collections_ids is None else collections_ids
|
|
684
743
|
schemes_ids = [] if schemes_ids is None else schemes_ids
|
|
@@ -741,7 +800,7 @@ def delete_user_login(
|
|
|
741
800
|
auth: Optional[AUTH] = None,
|
|
742
801
|
conn_url: Optional[str] = None,
|
|
743
802
|
) -> None:
|
|
744
|
-
"""admin: delete user by login"""
|
|
803
|
+
"""admin: delete user by `login` """
|
|
745
804
|
f.delete_register_login(login, wait=wait, auth=auth, conn_url=conn_url)
|
|
746
805
|
|
|
747
806
|
|
|
@@ -772,14 +831,14 @@ def get_apps_map(
|
|
|
772
831
|
def get_app(
|
|
773
832
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
774
833
|
) -> UserApp:
|
|
775
|
-
"""return app by
|
|
834
|
+
"""return app by `id` """
|
|
776
835
|
return f.get_userApps_id(id, auth=auth, conn_url=conn_url)
|
|
777
836
|
|
|
778
837
|
|
|
779
838
|
def get_app_real(
|
|
780
839
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
781
840
|
) -> UserApp:
|
|
782
|
-
"""return app by
|
|
841
|
+
"""return app by real `id` """
|
|
783
842
|
return f.get_userApps_realId(id, auth=auth, conn_url=conn_url)
|
|
784
843
|
|
|
785
844
|
|
|
@@ -802,9 +861,9 @@ def create_app(
|
|
|
802
861
|
conn_url: Optional[str] = None,
|
|
803
862
|
) -> Alias.Id:
|
|
804
863
|
"""create app\n
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
return
|
|
864
|
+
`app_cfg` must be json or dict or None\n
|
|
865
|
+
`image_ref` automatically generated by id, but this is not always True, it is better to set it\n
|
|
866
|
+
return `id` """
|
|
808
867
|
assert (
|
|
809
868
|
platform in POSSIBLE_APPS_PLATFORMS
|
|
810
869
|
), f"wrong platform: {platform}, possible platforms: {POSSIBLE_APPS_PLATFORMS}"
|
|
@@ -852,9 +911,9 @@ def update_app(
|
|
|
852
911
|
auth: Optional[AUTH] = None,
|
|
853
912
|
conn_url: Optional[str] = None,
|
|
854
913
|
) -> Alias.Info:
|
|
855
|
-
"""update app by
|
|
856
|
-
|
|
857
|
-
|
|
914
|
+
"""update app by `id`\n
|
|
915
|
+
`app_cfg` must be json or dict or None\n
|
|
916
|
+
`image_ref` automatically generated by id, but this is not always True, it is better to set it"""
|
|
858
917
|
assert (
|
|
859
918
|
platform in POSSIBLE_APPS_PLATFORMS
|
|
860
919
|
), f"wrong platform: {platform}, possible platforms: {POSSIBLE_APPS_PLATFORMS}"
|
|
@@ -897,7 +956,7 @@ def delete_app(
|
|
|
897
956
|
auth: Optional[AUTH] = None,
|
|
898
957
|
conn_url: Optional[str] = None,
|
|
899
958
|
) -> Alias.Info:
|
|
900
|
-
"""delete user app by
|
|
959
|
+
"""delete user app by `id` """
|
|
901
960
|
return f.delete_userApps_id(id, wait=wait, auth=auth, conn_url=conn_url)
|
|
902
961
|
|
|
903
962
|
|
|
@@ -928,14 +987,14 @@ def get_tasks_map(
|
|
|
928
987
|
def get_task(
|
|
929
988
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
930
989
|
) -> UserTask:
|
|
931
|
-
"""return task by
|
|
990
|
+
"""return task by `id` """
|
|
932
991
|
return f.get_userTasks_id(id, auth=auth, conn_url=conn_url)
|
|
933
992
|
|
|
934
993
|
|
|
935
994
|
def get_task_real(
|
|
936
995
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
937
996
|
) -> UserTask:
|
|
938
|
-
"""return task by
|
|
997
|
+
"""return task by real `id` """
|
|
939
998
|
return f.get_userTasks_realId(id, auth=auth, conn_url=conn_url)
|
|
940
999
|
|
|
941
1000
|
|
|
@@ -980,7 +1039,7 @@ def update_task(
|
|
|
980
1039
|
auth: Optional[AUTH] = None,
|
|
981
1040
|
conn_url: Optional[str] = None,
|
|
982
1041
|
) -> Alias.Info:
|
|
983
|
-
"""update task by
|
|
1042
|
+
"""update task by `id` """
|
|
984
1043
|
if apps_depends is None:
|
|
985
1044
|
apps_depends = []
|
|
986
1045
|
if tasks_depends is None:
|
|
@@ -1008,7 +1067,7 @@ def delete_task(
|
|
|
1008
1067
|
auth: Optional[AUTH] = None,
|
|
1009
1068
|
conn_url: Optional[str] = None,
|
|
1010
1069
|
) -> Alias.Info:
|
|
1011
|
-
"""delete user task by
|
|
1070
|
+
"""delete user task by `id` """
|
|
1012
1071
|
return f.delete_userTasks_id(id, wait=wait, auth=auth, conn_url=conn_url)
|
|
1013
1072
|
|
|
1014
1073
|
|
|
@@ -1039,14 +1098,14 @@ def get_cfgs_map(
|
|
|
1039
1098
|
def get_cfg(
|
|
1040
1099
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
1041
1100
|
) -> ResultUserCfg:
|
|
1042
|
-
"""return cfg by
|
|
1101
|
+
"""return cfg by `id` """
|
|
1043
1102
|
return f.get_userCfgs_id(id, auth=auth, conn_url=conn_url)
|
|
1044
1103
|
|
|
1045
1104
|
|
|
1046
1105
|
def get_cfg_real(
|
|
1047
1106
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
1048
1107
|
) -> ResultUserCfg:
|
|
1049
|
-
"""return cfg by
|
|
1108
|
+
"""return cfg by real `id` """
|
|
1050
1109
|
return f.get_userCfgs_realId(id, auth=auth, conn_url=conn_url)
|
|
1051
1110
|
|
|
1052
1111
|
|
|
@@ -1059,8 +1118,8 @@ def create_cfg(
|
|
|
1059
1118
|
conn_url: Optional[str] = None,
|
|
1060
1119
|
) -> Alias.Id:
|
|
1061
1120
|
"""create configuration file\n
|
|
1062
|
-
|
|
1063
|
-
return
|
|
1121
|
+
`cfg` must be json or dict or Cfg\n
|
|
1122
|
+
return `id` """
|
|
1064
1123
|
if isinstance(cfg, Cfg):
|
|
1065
1124
|
cfg_json = cfg.json()
|
|
1066
1125
|
else:
|
|
@@ -1079,7 +1138,7 @@ def update_cfg(
|
|
|
1079
1138
|
conn_url: Optional[str] = None,
|
|
1080
1139
|
) -> Alias.Info:
|
|
1081
1140
|
"""update configuration file\n
|
|
1082
|
-
|
|
1141
|
+
`cfg` must be json or dict or Cfg"""
|
|
1083
1142
|
if isinstance(cfg, Cfg):
|
|
1084
1143
|
cfg_json = cfg.json()
|
|
1085
1144
|
else:
|
|
@@ -1102,7 +1161,7 @@ def delete_cfg(
|
|
|
1102
1161
|
auth: Optional[AUTH] = None,
|
|
1103
1162
|
conn_url: Optional[str] = None,
|
|
1104
1163
|
) -> Alias.Info:
|
|
1105
|
-
"""delete user cfg by
|
|
1164
|
+
"""delete user cfg by `id` """
|
|
1106
1165
|
return f.delete_userCfgs_id(id, wait=wait, auth=auth, conn_url=conn_url)
|
|
1107
1166
|
|
|
1108
1167
|
|
|
@@ -1119,7 +1178,7 @@ def get_operations_results(
|
|
|
1119
1178
|
def get_operation_result(
|
|
1120
1179
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
1121
1180
|
) -> str:
|
|
1122
|
-
"""return result by operation
|
|
1181
|
+
"""return result by operation `id` if operation status is `OK` """
|
|
1123
1182
|
return f.get_operationResults_id(id, auth=auth, conn_url=conn_url)
|
|
1124
1183
|
|
|
1125
1184
|
|
|
@@ -1137,7 +1196,7 @@ def delete_operation_result(
|
|
|
1137
1196
|
auth: Optional[AUTH] = None,
|
|
1138
1197
|
conn_url: Optional[str] = None,
|
|
1139
1198
|
) -> Alias.Info:
|
|
1140
|
-
"""delete operation result by
|
|
1199
|
+
"""delete operation result by `id` """
|
|
1141
1200
|
return f.delete_operationResults_id(id, wait=wait, auth=auth, conn_url=conn_url)
|
|
1142
1201
|
|
|
1143
1202
|
|
|
@@ -1147,7 +1206,7 @@ def delete_operation_result(
|
|
|
1147
1206
|
def get_run_condition(
|
|
1148
1207
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
1149
1208
|
) -> Condition:
|
|
1150
|
-
"""return run condition by operation
|
|
1209
|
+
"""return run condition by operation `id` for running task"""
|
|
1151
1210
|
return f.get_run_condition(id, auth=auth, conn_url=conn_url)
|
|
1152
1211
|
|
|
1153
1212
|
|
|
@@ -1161,7 +1220,7 @@ def get_run_active_runs(
|
|
|
1161
1220
|
def get_run_main_task_cfg(
|
|
1162
1221
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
1163
1222
|
) -> MainTaskCfg:
|
|
1164
|
-
"""return mainTaskCfg by operation
|
|
1223
|
+
"""return mainTaskCfg by operation `id` for running task"""
|
|
1165
1224
|
return f.get_run_mainTaskCfg(id, auth=auth, conn_url=conn_url)
|
|
1166
1225
|
|
|
1167
1226
|
|
|
@@ -1172,7 +1231,7 @@ def get_task_runs(
|
|
|
1172
1231
|
auth: Optional[AUTH] = None,
|
|
1173
1232
|
conn_url: Optional[str] = None,
|
|
1174
1233
|
) -> ResultIds:
|
|
1175
|
-
"""return list running operationIds with
|
|
1234
|
+
"""return list running operationIds with `task_id` and `cfg_id` if specified"""
|
|
1176
1235
|
return f.get_run_operationsIds(task_id, cfg_id, auth=auth, conn_url=conn_url)
|
|
1177
1236
|
|
|
1178
1237
|
|
|
@@ -1188,7 +1247,7 @@ def logs(
|
|
|
1188
1247
|
auth: Optional[AUTH] = None,
|
|
1189
1248
|
conn_url: Optional[str] = None,
|
|
1190
1249
|
) -> AppLogs:
|
|
1191
|
-
"""return task logs by operation
|
|
1250
|
+
"""return task logs by operation `id` and `run_id` """
|
|
1192
1251
|
task = LogsTask(operationId=id, runId=run_id)
|
|
1193
1252
|
return f.get_manager_logs(task, with_show=with_show, auth=auth, conn_url=conn_url)
|
|
1194
1253
|
|
|
@@ -1204,7 +1263,7 @@ def logs_app(
|
|
|
1204
1263
|
auth: Optional[AUTH] = None,
|
|
1205
1264
|
conn_url: Optional[str] = None,
|
|
1206
1265
|
) -> AppLogs:
|
|
1207
|
-
"""return app logs by operation
|
|
1266
|
+
"""return app logs by operation `id`, `run_id`, `task_id` (that "null" if not exist) and `app_id` """
|
|
1208
1267
|
task = LogsTask(
|
|
1209
1268
|
operationId=id, runId=run_id, appId=app_id, taskId=task_id, force=force
|
|
1210
1269
|
)
|
|
@@ -1221,14 +1280,14 @@ def logs_clickhouse(
|
|
|
1221
1280
|
def logs_clickhouse_id(
|
|
1222
1281
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
1223
1282
|
) -> Alias.Json:
|
|
1224
|
-
"""return clickhouse logs by operation
|
|
1283
|
+
"""return clickhouse logs by operation `id` """
|
|
1225
1284
|
return f.get_clickhouse_id(id, auth=auth, conn_url=conn_url)
|
|
1226
1285
|
|
|
1227
1286
|
|
|
1228
1287
|
def get_dag_key_value(
|
|
1229
1288
|
id: str, *, auth: Optional[AUTH] = None, conn_url: Optional[str] = None
|
|
1230
1289
|
) -> Alias.Json:
|
|
1231
|
-
"""return key-value cfg from dag by operation
|
|
1290
|
+
"""return key-value cfg from dag by operation `id` """
|
|
1232
1291
|
return f.get_manager_dagKeyValue_operationId(id, auth=auth, conn_url=conn_url)
|
|
1233
1292
|
|
|
1234
1293
|
|
|
@@ -1240,7 +1299,7 @@ def update_dag_key_value(
|
|
|
1240
1299
|
auth: Optional[AUTH] = None,
|
|
1241
1300
|
conn_url: Optional[str] = None,
|
|
1242
1301
|
) -> None:
|
|
1243
|
-
"""update key-value cfg from dag by operation
|
|
1302
|
+
"""update key-value cfg from dag by operation `id` and `data` """
|
|
1244
1303
|
return f.post_manager_dagKeyValue(
|
|
1245
1304
|
KeysValues(data=data, operationId=operationId),
|
|
1246
1305
|
wait=wait,
|
|
@@ -1256,7 +1315,7 @@ def get_app_info(
|
|
|
1256
1315
|
auth: Optional[AUTH] = None,
|
|
1257
1316
|
conn_url: Optional[str] = None,
|
|
1258
1317
|
) -> Union[Alias.Json, AppFunctionsInfo]:
|
|
1259
|
-
"""return json with functions app info, id is appId"""
|
|
1318
|
+
"""return json with functions app info, `id` is appId"""
|
|
1260
1319
|
return f.get_app_info(id, parse=parse, auth=auth, conn_url=conn_url)
|
|
1261
1320
|
|
|
1262
1321
|
|
|
@@ -1267,7 +1326,7 @@ def get_app_info_by_real_id(
|
|
|
1267
1326
|
auth: Optional[AUTH] = None,
|
|
1268
1327
|
conn_url: Optional[str] = None,
|
|
1269
1328
|
) -> Union[Alias.Json, AppFunctionsInfo]:
|
|
1270
|
-
"""return json with functions app info, id is real id for app"""
|
|
1329
|
+
"""return json with functions app info, `id` is real id for app"""
|
|
1271
1330
|
return f.get_app_info_by_real_id(id, parse=parse, auth=auth, conn_url=conn_url)
|
|
1272
1331
|
|
|
1273
1332
|
|
|
@@ -1296,7 +1355,7 @@ def get_task_schedules(
|
|
|
1296
1355
|
auth: Optional[AUTH] = None,
|
|
1297
1356
|
conn_url: Optional[str] = None,
|
|
1298
1357
|
) -> Schedules:
|
|
1299
|
-
"""return schedule ids by
|
|
1358
|
+
"""return schedule ids by `operation_id` """
|
|
1300
1359
|
operation = Operation(operationId=operation_id)
|
|
1301
1360
|
return f.get_task_schedules(
|
|
1302
1361
|
operation, with_show=with_show, auth=auth, conn_url=conn_url
|
|
@@ -1325,7 +1384,7 @@ def task_full(
|
|
|
1325
1384
|
auth: Optional[AUTH] = None,
|
|
1326
1385
|
conn_url: Optional[str] = None,
|
|
1327
1386
|
) -> AppLogs:
|
|
1328
|
-
"""prepare, run and stop task by
|
|
1387
|
+
"""prepare, run and stop task by `task_id`, `cfg_id` and other
|
|
1329
1388
|
|
|
1330
1389
|
Args:
|
|
1331
1390
|
task_id (str): task id
|
|
@@ -1343,7 +1402,7 @@ def task_full(
|
|
|
1343
1402
|
policy: (TaskPolicy): policy for task
|
|
1344
1403
|
schedule: (Optional[Schedule]): schedule task settings - return scheduleId instead of operationId
|
|
1345
1404
|
restrictions: (Optional[Restrictions]): permissions to handle deployment
|
|
1346
|
-
wait (bool): is it worth waiting for the result or immediately return operation_id
|
|
1405
|
+
wait (bool): is it worth waiting for the result or immediately return `operation_id`
|
|
1347
1406
|
auth (Optional[AUTH]): redefined auth if not None"""
|
|
1348
1407
|
if scaleInfo is None:
|
|
1349
1408
|
scaleInfo = []
|
|
@@ -1409,7 +1468,7 @@ def task_prepare(
|
|
|
1409
1468
|
auth: Optional[AUTH] = None,
|
|
1410
1469
|
conn_url: Optional[str] = None,
|
|
1411
1470
|
) -> AppLogs:
|
|
1412
|
-
"""prepare task by
|
|
1471
|
+
"""prepare task by `task_id`, `cfg_id` and other, return `operation_id`
|
|
1413
1472
|
|
|
1414
1473
|
Args:
|
|
1415
1474
|
task_id (str): task id
|
|
@@ -1432,7 +1491,7 @@ def task_prepare(
|
|
|
1432
1491
|
component: (TaskComponent): which component should run it (dag id, base id - None)
|
|
1433
1492
|
policy: (TaskPolicy): policy for task
|
|
1434
1493
|
restrictions: (Optional[Restrictions]): permissions to handle deployment
|
|
1435
|
-
wait (bool): is it worth waiting for the result or immediately return operation_id
|
|
1494
|
+
wait (bool): is it worth waiting for the result or immediately return `operation_id`
|
|
1436
1495
|
auth (Optional[AUTH]): redefined auth if not None"""
|
|
1437
1496
|
if kafka_mode_url_response is not None and kafka_mode is False:
|
|
1438
1497
|
Config.logger.info(
|
|
@@ -1498,10 +1557,10 @@ def task_run(
|
|
|
1498
1557
|
auth: Optional[AUTH] = None,
|
|
1499
1558
|
conn_url: Optional[str] = None,
|
|
1500
1559
|
) -> Optional[AppLogs]:
|
|
1501
|
-
"""run prepared task by
|
|
1560
|
+
"""run prepared task by `operation_id` with `cfg_id` and other overridden parameters
|
|
1502
1561
|
|
|
1503
1562
|
Args:
|
|
1504
|
-
operation_id (str): operation_id
|
|
1563
|
+
operation_id (str): `operation_id`, that returned from 'task_prepare'
|
|
1505
1564
|
cfg_id (Optional[str]): cfg id, override default cfg id (from 'task_prepare') if exist
|
|
1506
1565
|
info_url (Optional[str]): Rewrite for this run if exist
|
|
1507
1566
|
debug_mode (Optional[bool]): Rewrite for this run if exist
|
|
@@ -1513,7 +1572,7 @@ def task_run(
|
|
|
1513
1572
|
long_timeout (Optional[int]): default timeout for long run (hour by default). If 'long=False' ignored. If None, then there is no limit. Doesn't stop the task, just stops trying to get the run result
|
|
1514
1573
|
with_logs (bool): return run logs if True after end
|
|
1515
1574
|
schedule: (Optional[Schedule]): schedule task runs settings - return scheduleId instead of operationId
|
|
1516
|
-
wait (bool): is it worth waiting for the result or immediately return operation_id
|
|
1575
|
+
wait (bool): is it worth waiting for the result or immediately return `operation_id`
|
|
1517
1576
|
auth (Optional[AUTH]): redefined auth if not None"""
|
|
1518
1577
|
if run_id is None:
|
|
1519
1578
|
run_id = rand_str(15)
|
|
@@ -1549,7 +1608,7 @@ def task_unschedule(
|
|
|
1549
1608
|
auth: Optional[AUTH] = None,
|
|
1550
1609
|
conn_url: Optional[str] = None,
|
|
1551
1610
|
) -> Alias.Info:
|
|
1552
|
-
"""unschedule task by
|
|
1611
|
+
"""unschedule task by `schedule_id` """
|
|
1553
1612
|
operation = UnscheduleOperation(scheduleId=schedule_id)
|
|
1554
1613
|
return f.post_manager_task_unschedule(
|
|
1555
1614
|
operation, with_show=with_show, wait=wait, auth=auth, conn_url=conn_url
|
|
@@ -1566,14 +1625,14 @@ def task_stop(
|
|
|
1566
1625
|
auth: Optional[AUTH] = None,
|
|
1567
1626
|
conn_url: Optional[str] = None,
|
|
1568
1627
|
) -> Optional[AppLogs]:
|
|
1569
|
-
"""stop task by
|
|
1628
|
+
"""stop task by `operation_id`
|
|
1570
1629
|
|
|
1571
1630
|
Args:
|
|
1572
|
-
operation_id (str): operation_id
|
|
1631
|
+
operation_id (str): `operation_id`, that returned from 'task_prepare', or just operation_id for runned task
|
|
1573
1632
|
with_logs (bool): return logs for task if True
|
|
1574
1633
|
info_url (Optional[str]): send also result to info_url if it exist and 'with_logs=True'
|
|
1575
1634
|
with_show (bool): show result (like for each operation, default equals with_logs arg)
|
|
1576
|
-
wait (bool): is it worth waiting for the result or immediately return operation_id
|
|
1635
|
+
wait (bool): is it worth waiting for the result or immediately return `operation_id`
|
|
1577
1636
|
auth (Optional[AUTH]): redefined auth if not None"""
|
|
1578
1637
|
if with_show is None:
|
|
1579
1638
|
with_show = with_logs
|
|
@@ -1607,7 +1666,7 @@ def task_resume(
|
|
|
1607
1666
|
auth: Optional[AUTH] = None,
|
|
1608
1667
|
conn_url: Optional[str] = None,
|
|
1609
1668
|
) -> Alias.Empty:
|
|
1610
|
-
"""resume task by
|
|
1669
|
+
"""resume task by `operation_id` """
|
|
1611
1670
|
task = Operation(operationId=operation_id)
|
|
1612
1671
|
return f.post_manager_task_resume(
|
|
1613
1672
|
task, with_show=with_show, wait=wait, auth=auth, conn_url=conn_url
|
|
@@ -1622,7 +1681,7 @@ def task_pause(
|
|
|
1622
1681
|
auth: Optional[AUTH] = None,
|
|
1623
1682
|
conn_url: Optional[str] = None,
|
|
1624
1683
|
) -> Alias.Empty:
|
|
1625
|
-
"""pause task by
|
|
1684
|
+
"""pause task by `operation_id` """
|
|
1626
1685
|
task = Operation(operationId=operation_id)
|
|
1627
1686
|
return f.post_manager_task_pause(
|
|
1628
1687
|
task, with_show=with_show, wait=wait, auth=auth, conn_url=conn_url
|
|
@@ -1640,7 +1699,7 @@ def app_stop(
|
|
|
1640
1699
|
auth: Optional[AUTH] = None,
|
|
1641
1700
|
conn_url: Optional[str] = None,
|
|
1642
1701
|
) -> Alias.Empty:
|
|
1643
|
-
"""stop app by
|
|
1702
|
+
"""stop app by `operation_id`, `task_id` and `app_id` """
|
|
1644
1703
|
app_manage = AppManage(
|
|
1645
1704
|
operationId=operation_id, taskId=task_id, appId=app_id, runId=run_id
|
|
1646
1705
|
)
|
|
@@ -1660,7 +1719,7 @@ def app_resume(
|
|
|
1660
1719
|
auth: Optional[AUTH] = None,
|
|
1661
1720
|
conn_url: Optional[str] = None,
|
|
1662
1721
|
) -> Alias.Empty:
|
|
1663
|
-
"""resume app by
|
|
1722
|
+
"""resume app by `operation_id`, `task_id` and `app_id` """
|
|
1664
1723
|
app_manage = AppManage(
|
|
1665
1724
|
operationId=operation_id, taskId=task_id, appId=app_id, runId=run_id
|
|
1666
1725
|
)
|
|
@@ -1680,7 +1739,7 @@ def app_pause(
|
|
|
1680
1739
|
auth: Optional[AUTH] = None,
|
|
1681
1740
|
conn_url: Optional[str] = None,
|
|
1682
1741
|
) -> Alias.Empty:
|
|
1683
|
-
"""pause app by
|
|
1742
|
+
"""pause app by `operation_id`, `task_id` and `app_id` """
|
|
1684
1743
|
app_manage = AppManage(
|
|
1685
1744
|
operationId=operation_id, taskId=task_id, appId=app_id, runId=run_id
|
|
1686
1745
|
)
|
|
@@ -1702,7 +1761,7 @@ async def kafka_send(
|
|
|
1702
1761
|
auth: Optional[AUTH] = None,
|
|
1703
1762
|
conn_url: Optional[str] = None,
|
|
1704
1763
|
) -> Union[Alias.Info, KafkaMsg]: # TODO add tl
|
|
1705
|
-
"""send msg to kafka for task by
|
|
1764
|
+
"""send msg to kafka for task by `operation_id`, `run_id` and `data` """
|
|
1706
1765
|
assert data is not None, "data should exists in kafka_send"
|
|
1707
1766
|
if run_id is None:
|
|
1708
1767
|
run_id = rand_str(15)
|
|
@@ -1766,7 +1825,7 @@ def get_collection_to_df(
|
|
|
1766
1825
|
auth: Optional[AUTH] = None,
|
|
1767
1826
|
conn_url: Optional[str] = None,
|
|
1768
1827
|
) -> pd.DataFrame:
|
|
1769
|
-
"""return df from collection by
|
|
1828
|
+
"""return df from collection by `id`, pagination: unlimited - `limit` < 0"""
|
|
1770
1829
|
collection = get_collection(id, offset, limit, auth=auth, conn_url=conn_url)
|
|
1771
1830
|
records = list(map(lambda x: json.loads(x.data), collection.docs))
|
|
1772
1831
|
return pd.DataFrame.from_records(records)
|
|
@@ -1782,7 +1841,7 @@ def get_collection_by_name_to_df(
|
|
|
1782
1841
|
auth: Optional[AUTH] = None,
|
|
1783
1842
|
conn_url: Optional[str] = None,
|
|
1784
1843
|
) -> pd.DataFrame:
|
|
1785
|
-
"""return df from collection by
|
|
1844
|
+
"""return df from collection by `name` and mb also `operation_id` and `run_id` with which it was saved. raise if there are multiple collections, pagination: unlimited - `limit` < 0"""
|
|
1786
1845
|
collection = get_collection_by_name(
|
|
1787
1846
|
name, operation_id, run_id, offset, limit, auth=auth, conn_url=conn_url
|
|
1788
1847
|
)
|
malevich-coretools-0.2.6/VERSION
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
0.2.6
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/abstract/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/secondary/__init__.py
RENAMED
|
File without changes
|
{malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/secondary/config.py
RENAMED
|
File without changes
|
{malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools/secondary/helpers.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools.egg-info/SOURCES.txt
RENAMED
|
File without changes
|
|
File without changes
|
{malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools.egg-info/requires.txt
RENAMED
|
File without changes
|
{malevich-coretools-0.2.6 → malevich-coretools-0.2.7}/malevich_coretools.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|