cmem-cmemc 23.1.3__py3-none-any.whl → 23.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cmem/cmemc/cli/__init__.py +9 -3
- cmem/cmemc/cli/_cmemc.zsh +44 -0
- cmem/cmemc/cli/commands/__init__.py +3 -0
- cmem/cmemc/cli/commands/admin.py +3 -1
- cmem/cmemc/cli/commands/client.py +173 -0
- cmem/cmemc/cli/commands/config.py +1 -1
- cmem/cmemc/cli/commands/dataset.py +161 -70
- cmem/cmemc/cli/commands/graph.py +10 -10
- cmem/cmemc/cli/commands/metrics.py +3 -3
- cmem/cmemc/cli/commands/project.py +90 -27
- cmem/cmemc/cli/commands/python.py +110 -29
- cmem/cmemc/cli/commands/query.py +6 -6
- cmem/cmemc/cli/commands/resource.py +5 -5
- cmem/cmemc/cli/commands/scheduler.py +4 -4
- cmem/cmemc/cli/commands/store.py +34 -31
- cmem/cmemc/cli/commands/user.py +27 -10
- cmem/cmemc/cli/commands/variable.py +364 -0
- cmem/cmemc/cli/commands/vocabulary.py +5 -5
- cmem/cmemc/cli/commands/workflow.py +118 -55
- cmem/cmemc/cli/commands/workspace.py +5 -5
- cmem/cmemc/cli/completion.py +393 -154
- cmem/cmemc/cli/context.py +20 -3
- cmem/cmemc/cli/manual_helper/multi_page.py +11 -6
- cmem/cmemc/cli/utils.py +80 -0
- {cmem_cmemc-23.1.3.dist-info → cmem_cmemc-23.3.0.dist-info}/METADATA +6 -8
- cmem_cmemc-23.3.0.dist-info/RECORD +35 -0
- {cmem_cmemc-23.1.3.dist-info → cmem_cmemc-23.3.0.dist-info}/WHEEL +1 -1
- cmem_cmemc-23.1.3.dist-info/RECORD +0 -32
- {cmem_cmemc-23.1.3.dist-info → cmem_cmemc-23.3.0.dist-info}/LICENSE +0 -0
- {cmem_cmemc-23.1.3.dist-info → cmem_cmemc-23.3.0.dist-info}/entry_points.txt +0 -0
cmem/cmemc/cli/completion.py
CHANGED
|
@@ -4,9 +4,9 @@ import os
|
|
|
4
4
|
from collections import Counter
|
|
5
5
|
from contextlib import suppress
|
|
6
6
|
|
|
7
|
-
import requests
|
|
8
|
-
from bs4 import BeautifulSoup
|
|
9
7
|
from click import Context
|
|
8
|
+
from click.parser import split_arg_string
|
|
9
|
+
from click.shell_completion import CompletionItem
|
|
10
10
|
from natsort import natsorted, ns
|
|
11
11
|
from prometheus_client.parser import text_string_to_metric_families
|
|
12
12
|
|
|
@@ -14,10 +14,11 @@ from cmem.cmemc.cli.context import CONTEXT
|
|
|
14
14
|
from cmem.cmemc.cli.utils import (
|
|
15
15
|
get_graphs,
|
|
16
16
|
metric_get_labels,
|
|
17
|
-
metrics_get_dict, struct_to_table
|
|
17
|
+
metrics_get_dict, struct_to_table, get_published_packages
|
|
18
18
|
)
|
|
19
19
|
from cmem.cmempy.dp.admin import get_prometheus_data
|
|
20
20
|
from cmem.cmempy.health import get_complete_status_info
|
|
21
|
+
from cmem.cmempy.keycloak.client import list_open_id_clients
|
|
21
22
|
from cmem.cmempy.keycloak.group import list_groups
|
|
22
23
|
from cmem.cmempy.keycloak.user import list_users, get_user_by_username, user_groups
|
|
23
24
|
from cmem.cmempy.plugins.marshalling import get_marshalling_plugins
|
|
@@ -30,9 +31,12 @@ from cmem.cmempy.workspace import (
|
|
|
30
31
|
)
|
|
31
32
|
from cmem.cmempy.workspace.projects.project import get_projects
|
|
32
33
|
from cmem.cmempy.workspace.projects.resources import get_all_resources
|
|
34
|
+
from cmem.cmempy.workspace.projects.variables import get_all_variables
|
|
33
35
|
from cmem.cmempy.workspace.python import list_packages
|
|
34
36
|
from cmem.cmempy.workspace.search import list_items
|
|
35
37
|
|
|
38
|
+
from cmem.cmempy.workspace.projects.datasets.dataset import get_dataset
|
|
39
|
+
|
|
36
40
|
SORT_BY_KEY = 0
|
|
37
41
|
SORT_BY_DESC = 1
|
|
38
42
|
|
|
@@ -68,7 +72,6 @@ def _finalize_completion(
|
|
|
68
72
|
return candidates
|
|
69
73
|
# remove duplicates
|
|
70
74
|
candidates = list(set(candidates))
|
|
71
|
-
|
|
72
75
|
if isinstance(candidates[0], str):
|
|
73
76
|
# list of strings filtering and sorting
|
|
74
77
|
filtered_candidates = [
|
|
@@ -96,22 +99,56 @@ def _finalize_completion(
|
|
|
96
99
|
or str(element[1]).lower().find(incomplete) != -1
|
|
97
100
|
]
|
|
98
101
|
if nat_sort:
|
|
99
|
-
|
|
102
|
+
sorted_list = natsorted(
|
|
100
103
|
seq=filtered_candidates,
|
|
101
104
|
key=lambda k: k[sort_by], # type: ignore
|
|
102
105
|
alg=ns.IGNORECASE,
|
|
103
106
|
reverse=reverse
|
|
104
107
|
)
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
108
|
+
else:
|
|
109
|
+
sorted_list = sorted(
|
|
110
|
+
filtered_candidates,
|
|
111
|
+
key=lambda x: (str(x[sort_by]).casefold(), str(x[sort_by])),
|
|
112
|
+
reverse=reverse
|
|
113
|
+
)
|
|
114
|
+
return [
|
|
115
|
+
CompletionItem(
|
|
116
|
+
value=element[0].replace(":", r"\:"),
|
|
117
|
+
help=element[1]
|
|
118
|
+
) for element in sorted_list
|
|
119
|
+
]
|
|
110
120
|
raise ValueError(
|
|
111
121
|
"candidates should be a list of strings or a list of tuples."
|
|
112
122
|
)
|
|
113
123
|
|
|
114
124
|
|
|
125
|
+
def _get_completion_args(incomplete):
|
|
126
|
+
"""get completion args
|
|
127
|
+
|
|
128
|
+
This is a workaround to get partial tuple options in a completion function
|
|
129
|
+
see https://github.com/pallets/click/issues/2597
|
|
130
|
+
"""
|
|
131
|
+
args = split_arg_string(os.environ["COMP_WORDS"])
|
|
132
|
+
if incomplete and len(args) > 0 and args[len(args)-1] == incomplete:
|
|
133
|
+
args.pop()
|
|
134
|
+
return args
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _ignore_option(option, params):
|
|
138
|
+
"""
|
|
139
|
+
Check if the given 'option' is present in the 'params' dictionary
|
|
140
|
+
or any of its values.
|
|
141
|
+
"""
|
|
142
|
+
ignore_project_id = False
|
|
143
|
+
for _ in params:
|
|
144
|
+
if hasattr(params[_], '__iter__') and option in params[_]:
|
|
145
|
+
ignore_project_id = True
|
|
146
|
+
elif option == params[_]:
|
|
147
|
+
ignore_project_id = True
|
|
148
|
+
|
|
149
|
+
return ignore_project_id
|
|
150
|
+
|
|
151
|
+
|
|
115
152
|
def add_metadata_parameter(list_=None):
|
|
116
153
|
"""Extend a list with metadata keys and key descriptions."""
|
|
117
154
|
if list_ is None:
|
|
@@ -127,19 +164,56 @@ def add_metadata_parameter(list_=None):
|
|
|
127
164
|
return list_
|
|
128
165
|
|
|
129
166
|
|
|
130
|
-
def
|
|
167
|
+
def add_read_only_and_uri_property_parameters(list_=None):
|
|
168
|
+
"""Extend a list with readonly/uriProperty keys and key descriptions."""
|
|
169
|
+
if list_ is None:
|
|
170
|
+
list_ = []
|
|
171
|
+
list_.append(
|
|
172
|
+
(
|
|
173
|
+
"readOnly",
|
|
174
|
+
"Read-only: If enabled, all write operations using this dataset object "
|
|
175
|
+
"will fail, e.g. when used as output in workflows or transform/linking "
|
|
176
|
+
"executions. This will NOT protect the underlying resource in general, "
|
|
177
|
+
"e.g. files, databases or knowledge graphs could still be changed "
|
|
178
|
+
"externally."
|
|
179
|
+
)
|
|
180
|
+
)
|
|
181
|
+
list_.append(
|
|
182
|
+
(
|
|
183
|
+
"uriProperty",
|
|
184
|
+
"URI attribute: When reading data from the dataset, the specified "
|
|
185
|
+
"attribute will be used to get the URIs of the entities. "
|
|
186
|
+
"When writing to a dataset, the specified attribute will be automatically "
|
|
187
|
+
"added to the schema as well as the generated entity URIs will be added as "
|
|
188
|
+
"values for each entity. If the entered value is not a valid URI, "
|
|
189
|
+
"it will be converted to a valid URI."
|
|
190
|
+
)
|
|
191
|
+
)
|
|
192
|
+
return list_
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def dataset_parameter(ctx, param, incomplete):
|
|
131
196
|
"""Prepare a list of dataset parameters for a dataset type."""
|
|
132
|
-
CONTEXT.
|
|
197
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
198
|
+
args = _get_completion_args(incomplete)
|
|
133
199
|
incomplete = incomplete.lower()
|
|
134
200
|
# look if cursor is in value position of the -p option and
|
|
135
201
|
# return nothing in case it is (values are not completed atm)
|
|
136
202
|
if args[len(args) - 2] in ("-p", "--parameter"):
|
|
137
203
|
return []
|
|
138
204
|
# try to determine the dataset type
|
|
139
|
-
dataset_type =
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
205
|
+
dataset_type = ctx.params.get('dataset_type')
|
|
206
|
+
if dataset_type is None:
|
|
207
|
+
try:
|
|
208
|
+
dataset_id = ctx.args[0]
|
|
209
|
+
project = get_dataset(
|
|
210
|
+
project_name=dataset_id.split(":")[0],
|
|
211
|
+
dataset_name=dataset_id.split(":")[1]
|
|
212
|
+
)
|
|
213
|
+
dataset_type = project["data"]["type"]
|
|
214
|
+
except IndexError:
|
|
215
|
+
pass
|
|
216
|
+
|
|
143
217
|
# without type, we know nothing
|
|
144
218
|
if dataset_type is None:
|
|
145
219
|
return []
|
|
@@ -151,6 +225,8 @@ def dataset_parameter(ctx, args, incomplete):
|
|
|
151
225
|
description = properties[key]["description"]
|
|
152
226
|
option = f"{title}: {description}"
|
|
153
227
|
options.append((key, option))
|
|
228
|
+
|
|
229
|
+
options = add_read_only_and_uri_property_parameters(options)
|
|
154
230
|
# sorting: metadata on top, then parameter per key
|
|
155
231
|
options = sorted(options, key=lambda k: k[0].lower())
|
|
156
232
|
options = add_metadata_parameter(options)
|
|
@@ -161,12 +237,13 @@ def dataset_parameter(ctx, args, incomplete):
|
|
|
161
237
|
or key[1].lower().find(incomplete.lower()) != -1
|
|
162
238
|
)
|
|
163
239
|
]
|
|
164
|
-
|
|
240
|
+
|
|
241
|
+
return [CompletionItem(value=option[0], help=option[1]) for option in options]
|
|
165
242
|
|
|
166
243
|
|
|
167
|
-
def dataset_types(ctx,
|
|
244
|
+
def dataset_types(ctx, param, incomplete):
|
|
168
245
|
"""Prepare a list of dataset types."""
|
|
169
|
-
CONTEXT.
|
|
246
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
170
247
|
incomplete = incomplete.lower()
|
|
171
248
|
options = []
|
|
172
249
|
plugins = get_task_plugins()
|
|
@@ -184,31 +261,33 @@ def dataset_types(ctx, args, incomplete):
|
|
|
184
261
|
option
|
|
185
262
|
)
|
|
186
263
|
)
|
|
187
|
-
|
|
188
|
-
|
|
264
|
+
return _finalize_completion(
|
|
265
|
+
candidates=options,
|
|
266
|
+
incomplete=incomplete,
|
|
267
|
+
sort_by=SORT_BY_DESC
|
|
268
|
+
)
|
|
189
269
|
|
|
190
270
|
|
|
191
|
-
def dataset_ids(ctx,
|
|
271
|
+
def dataset_ids(ctx, param, incomplete):
|
|
192
272
|
"""Prepare a list of projectid:datasetid dataset identifier."""
|
|
193
|
-
CONTEXT.
|
|
273
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
194
274
|
options = []
|
|
195
275
|
results = list_items(item_type="dataset")
|
|
196
276
|
datasets = results["results"]
|
|
197
277
|
for _ in datasets:
|
|
198
278
|
options.append(
|
|
199
279
|
(
|
|
200
|
-
_[
|
|
280
|
+
_['projectId'] + ":" + _['id'],
|
|
201
281
|
_["label"]
|
|
202
282
|
)
|
|
203
283
|
)
|
|
204
284
|
return _finalize_completion(
|
|
205
285
|
candidates=options,
|
|
206
|
-
incomplete=incomplete
|
|
207
|
-
sort_by=SORT_BY_DESC
|
|
286
|
+
incomplete=incomplete
|
|
208
287
|
)
|
|
209
288
|
|
|
210
289
|
|
|
211
|
-
def dataset_list_filter(ctx,
|
|
290
|
+
def dataset_list_filter(ctx, param, incomplete):
|
|
212
291
|
"""Prepare a list of filter names and values for dataset list filter."""
|
|
213
292
|
filter_names = [
|
|
214
293
|
(
|
|
@@ -239,17 +318,18 @@ def dataset_list_filter(ctx, args, incomplete):
|
|
|
239
318
|
)
|
|
240
319
|
]
|
|
241
320
|
options = []
|
|
321
|
+
args = _get_completion_args(incomplete)
|
|
242
322
|
if args[len(args) - 1] == "--filter":
|
|
243
323
|
options = _finalize_completion(
|
|
244
324
|
candidates=filter_names,
|
|
245
325
|
incomplete=incomplete
|
|
246
326
|
)
|
|
247
327
|
if args[len(args) - 1] == "type":
|
|
248
|
-
options = dataset_types(ctx,
|
|
328
|
+
options = dataset_types(ctx, param, incomplete)
|
|
249
329
|
if args[len(args) - 1] == "project":
|
|
250
|
-
options = project_ids(ctx,
|
|
330
|
+
options = project_ids(ctx, param, incomplete)
|
|
251
331
|
if args[len(args) - 1] == "tag":
|
|
252
|
-
options = tag_labels(ctx,
|
|
332
|
+
options = tag_labels(ctx, param, incomplete, "dataset")
|
|
253
333
|
if args[len(args) - 1] == "regex":
|
|
254
334
|
options = _finalize_completion(
|
|
255
335
|
candidates=filter_regex,
|
|
@@ -258,9 +338,9 @@ def dataset_list_filter(ctx, args, incomplete):
|
|
|
258
338
|
return options
|
|
259
339
|
|
|
260
340
|
|
|
261
|
-
def resource_ids(ctx,
|
|
341
|
+
def resource_ids(ctx, param, incomplete):
|
|
262
342
|
"""Prepare a list of projectid:resourceid resource identifier."""
|
|
263
|
-
CONTEXT.
|
|
343
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
264
344
|
options = []
|
|
265
345
|
for _ in get_all_resources():
|
|
266
346
|
options.append(
|
|
@@ -275,9 +355,9 @@ def resource_ids(ctx, args, incomplete):
|
|
|
275
355
|
)
|
|
276
356
|
|
|
277
357
|
|
|
278
|
-
def scheduler_ids(ctx,
|
|
358
|
+
def scheduler_ids(ctx, param, incomplete):
|
|
279
359
|
"""Prepare a list of projectid:schedulerid scheduler identifier."""
|
|
280
|
-
CONTEXT.
|
|
360
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
281
361
|
options = []
|
|
282
362
|
schedulers = list_items(
|
|
283
363
|
item_type="task",
|
|
@@ -288,11 +368,11 @@ def scheduler_ids(ctx, args, incomplete):
|
|
|
288
368
|
}]
|
|
289
369
|
)["results"]
|
|
290
370
|
for _ in schedulers:
|
|
291
|
-
if _["projectId"] + ":" + _["id"]
|
|
371
|
+
if _ignore_option(_["projectId"] + ":" + _["id"], ctx.params):
|
|
292
372
|
continue
|
|
293
373
|
options.append(
|
|
294
374
|
(
|
|
295
|
-
_["projectId"] +
|
|
375
|
+
_["projectId"] + ":" + _["id"],
|
|
296
376
|
_["label"]
|
|
297
377
|
)
|
|
298
378
|
)
|
|
@@ -303,9 +383,9 @@ def scheduler_ids(ctx, args, incomplete):
|
|
|
303
383
|
)
|
|
304
384
|
|
|
305
385
|
|
|
306
|
-
def metric_ids(ctx,
|
|
386
|
+
def metric_ids(ctx, param, incomplete):
|
|
307
387
|
"""Prepare a list of metric identifier."""
|
|
308
|
-
CONTEXT.
|
|
388
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
309
389
|
options = []
|
|
310
390
|
|
|
311
391
|
data = get_prometheus_data().text
|
|
@@ -323,13 +403,13 @@ def metric_ids(ctx, args, incomplete):
|
|
|
323
403
|
)
|
|
324
404
|
|
|
325
405
|
|
|
326
|
-
def metric_label_filter(ctx,
|
|
406
|
+
def metric_label_filter(ctx, param, incomplete):
|
|
327
407
|
"""Prepare a list of label name or values."""
|
|
328
|
-
|
|
329
|
-
|
|
408
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
409
|
+
args = _get_completion_args(incomplete)
|
|
330
410
|
incomplete = incomplete.lower()
|
|
331
411
|
options = []
|
|
332
|
-
metric_id = args[
|
|
412
|
+
metric_id = ctx.args[0]
|
|
333
413
|
labels = metric_get_labels(metrics_get_dict()[metric_id])
|
|
334
414
|
if args[len(args) - 1] in "--filter":
|
|
335
415
|
# we are in the name position
|
|
@@ -345,9 +425,9 @@ def metric_label_filter(ctx, args, incomplete):
|
|
|
345
425
|
)
|
|
346
426
|
|
|
347
427
|
|
|
348
|
-
def vocabularies(ctx,
|
|
428
|
+
def vocabularies(ctx, param, incomplete, filter_="all"):
|
|
349
429
|
"""Prepare a list of vocabulary graphs for auto-completion."""
|
|
350
|
-
CONTEXT.
|
|
430
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
351
431
|
options = []
|
|
352
432
|
try:
|
|
353
433
|
vocabs = get_vocabularies(filter_=filter_)
|
|
@@ -356,9 +436,9 @@ def vocabularies(ctx, args, incomplete, filter_="all"):
|
|
|
356
436
|
return []
|
|
357
437
|
for _ in vocabs:
|
|
358
438
|
url = _["iri"]
|
|
359
|
-
if url
|
|
439
|
+
if _ignore_option(url, ctx.params):
|
|
360
440
|
continue
|
|
361
|
-
url = _["iri"]
|
|
441
|
+
url = _["iri"]
|
|
362
442
|
try:
|
|
363
443
|
label = _["label"]["title"]
|
|
364
444
|
except (KeyError, TypeError):
|
|
@@ -371,14 +451,14 @@ def vocabularies(ctx, args, incomplete, filter_="all"):
|
|
|
371
451
|
)
|
|
372
452
|
|
|
373
453
|
|
|
374
|
-
def installed_vocabularies(ctx,
|
|
454
|
+
def installed_vocabularies(ctx, param, incomplete):
|
|
375
455
|
"""Prepare a list of installed vocabulary graphs."""
|
|
376
|
-
return vocabularies(ctx,
|
|
456
|
+
return vocabularies(ctx, param, incomplete, filter_="installed")
|
|
377
457
|
|
|
378
458
|
|
|
379
|
-
def installable_vocabularies(ctx,
|
|
459
|
+
def installable_vocabularies(ctx, param, incomplete):
|
|
380
460
|
"""Prepare a list of installable vocabulary graphs."""
|
|
381
|
-
return vocabularies(ctx,
|
|
461
|
+
return vocabularies(ctx, param, incomplete, filter_="installable")
|
|
382
462
|
|
|
383
463
|
|
|
384
464
|
def file_list(incomplete="", suffix="", description="", prefix=""):
|
|
@@ -397,12 +477,12 @@ def file_list(incomplete="", suffix="", description="", prefix=""):
|
|
|
397
477
|
)
|
|
398
478
|
|
|
399
479
|
|
|
400
|
-
def workflow_io_ids(ctx,
|
|
480
|
+
def workflow_io_ids(ctx, param, incomplete):
|
|
401
481
|
"""Prepare a list of io workflows."""
|
|
402
|
-
CONTEXT.
|
|
482
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
403
483
|
options = []
|
|
404
484
|
for _ in get_workflows_io():
|
|
405
|
-
workflow_id = _["projectId"] +
|
|
485
|
+
workflow_id = _["projectId"] + ":" + _["id"]
|
|
406
486
|
label = _["label"]
|
|
407
487
|
options.append((workflow_id, label))
|
|
408
488
|
return _finalize_completion(
|
|
@@ -412,7 +492,7 @@ def workflow_io_ids(ctx, args, incomplete):
|
|
|
412
492
|
)
|
|
413
493
|
|
|
414
494
|
|
|
415
|
-
def replay_files(ctx,
|
|
495
|
+
def replay_files(ctx, param, incomplete):
|
|
416
496
|
"""Prepare a list of JSON replay files."""
|
|
417
497
|
return file_list(
|
|
418
498
|
incomplete=incomplete,
|
|
@@ -421,9 +501,9 @@ def replay_files(ctx, args, incomplete):
|
|
|
421
501
|
)
|
|
422
502
|
|
|
423
503
|
|
|
424
|
-
def installed_package_names(ctx,
|
|
504
|
+
def installed_package_names(ctx, param, incomplete):
|
|
425
505
|
"""Prepare a list of installed packages."""
|
|
426
|
-
CONTEXT.set_connection_from_args(
|
|
506
|
+
CONTEXT.set_connection_from_args(ctx.find_root().params)
|
|
427
507
|
options = []
|
|
428
508
|
packages = list_packages()
|
|
429
509
|
for _ in packages:
|
|
@@ -440,31 +520,14 @@ def installed_package_names(ctx, args, incomplete):
|
|
|
440
520
|
)
|
|
441
521
|
|
|
442
522
|
|
|
443
|
-
def published_package_names(ctx,
|
|
523
|
+
def published_package_names(ctx, param, incomplete):
|
|
444
524
|
"""List of plugin packages scraped from pypi.org."""
|
|
445
525
|
options = []
|
|
446
|
-
|
|
447
|
-
soup = BeautifulSoup(
|
|
448
|
-
requests.get(url, timeout=5).content,
|
|
449
|
-
"html.parser"
|
|
450
|
-
)
|
|
451
|
-
packages = soup.find_all("a", class_="package-snippet")
|
|
452
|
-
for package in packages:
|
|
453
|
-
name = package.findChildren(
|
|
454
|
-
class_="package-snippet__name"
|
|
455
|
-
)[0].getText()
|
|
456
|
-
if name == "cmem-plugin-base":
|
|
457
|
-
continue
|
|
458
|
-
description = package.findChildren(
|
|
459
|
-
class_="package-snippet__description"
|
|
460
|
-
)[0].getText()
|
|
461
|
-
version = package.findChildren(
|
|
462
|
-
class_="package-snippet__version"
|
|
463
|
-
)[0].getText()
|
|
526
|
+
for _ in get_published_packages():
|
|
464
527
|
options.append(
|
|
465
528
|
(
|
|
466
|
-
name,
|
|
467
|
-
f"{version}: {description}"
|
|
529
|
+
_.name,
|
|
530
|
+
f"{_.version}: {_.description}"
|
|
468
531
|
)
|
|
469
532
|
)
|
|
470
533
|
|
|
@@ -475,7 +538,7 @@ def published_package_names(ctx, args, incomplete):
|
|
|
475
538
|
)
|
|
476
539
|
|
|
477
540
|
|
|
478
|
-
def python_package_files(ctx,
|
|
541
|
+
def python_package_files(ctx, param, incomplete):
|
|
479
542
|
"""Prepare a list of acceptable python package files."""
|
|
480
543
|
return file_list(
|
|
481
544
|
incomplete=incomplete,
|
|
@@ -485,14 +548,14 @@ def python_package_files(ctx, args, incomplete):
|
|
|
485
548
|
)
|
|
486
549
|
|
|
487
550
|
|
|
488
|
-
def installable_packages(ctx,
|
|
551
|
+
def installable_packages(ctx, param, incomplete):
|
|
489
552
|
"""Installable packages from files and pypi.org."""
|
|
490
553
|
return python_package_files(
|
|
491
|
-
ctx,
|
|
492
|
-
ctx,
|
|
554
|
+
ctx, param, incomplete) + published_package_names(
|
|
555
|
+
ctx, param, incomplete)
|
|
493
556
|
|
|
494
557
|
|
|
495
|
-
def workflow_io_input_files(ctx,
|
|
558
|
+
def workflow_io_input_files(ctx, param, incomplete):
|
|
496
559
|
"""Prepare a list of acceptable workflow io input files."""
|
|
497
560
|
return file_list(
|
|
498
561
|
incomplete=incomplete,
|
|
@@ -506,10 +569,51 @@ def workflow_io_input_files(ctx, args, incomplete):
|
|
|
506
569
|
incomplete=incomplete,
|
|
507
570
|
suffix=".json",
|
|
508
571
|
description="JSON Dataset resource"
|
|
572
|
+
) + file_list(
|
|
573
|
+
incomplete=incomplete,
|
|
574
|
+
suffix=".xlsx",
|
|
575
|
+
description="Excel Dataset resource"
|
|
576
|
+
) + file_list(
|
|
577
|
+
incomplete=incomplete,
|
|
578
|
+
suffix=".txt",
|
|
579
|
+
description="Text Dataset resource"
|
|
580
|
+
) + file_list(
|
|
581
|
+
incomplete=incomplete,
|
|
582
|
+
suffix=".zip",
|
|
583
|
+
description="Multi CSV Dataset resource"
|
|
509
584
|
)
|
|
510
585
|
|
|
511
586
|
|
|
512
|
-
def
|
|
587
|
+
def workflow_io_input_mimetypes(ctx, args, incomplete):
|
|
588
|
+
"""Prepare a list of acceptable workflow io input mimetypes."""
|
|
589
|
+
return file_list(
|
|
590
|
+
incomplete=incomplete,
|
|
591
|
+
suffix=".csv",
|
|
592
|
+
description="CSV Dataset resource"
|
|
593
|
+
) + file_list(
|
|
594
|
+
incomplete=incomplete,
|
|
595
|
+
suffix=".xml",
|
|
596
|
+
description="XML Dataset resource"
|
|
597
|
+
) + file_list(
|
|
598
|
+
incomplete=incomplete,
|
|
599
|
+
suffix=".json",
|
|
600
|
+
description="JSON Dataset resource"
|
|
601
|
+
) + file_list(
|
|
602
|
+
incomplete=incomplete,
|
|
603
|
+
suffix=".xlsx",
|
|
604
|
+
description="Excel Dataset resource"
|
|
605
|
+
) + file_list(
|
|
606
|
+
incomplete=incomplete,
|
|
607
|
+
suffix=".txt",
|
|
608
|
+
description="Text Dataset resource"
|
|
609
|
+
) + file_list(
|
|
610
|
+
incomplete=incomplete,
|
|
611
|
+
suffix=".zip",
|
|
612
|
+
description="Multi CSV Dataset resource"
|
|
613
|
+
)
|
|
614
|
+
|
|
615
|
+
|
|
616
|
+
def workflow_io_output_files(ctx, param, incomplete):
|
|
513
617
|
"""Prepare a list of acceptable workflow io output files."""
|
|
514
618
|
return file_list(
|
|
515
619
|
incomplete=incomplete,
|
|
@@ -538,7 +642,7 @@ def workflow_io_output_files(ctx, args, incomplete):
|
|
|
538
642
|
)
|
|
539
643
|
|
|
540
644
|
|
|
541
|
-
def dataset_files(ctx,
|
|
645
|
+
def dataset_files(ctx, param, incomplete):
|
|
542
646
|
"""Prepare a list of SPARQL files."""
|
|
543
647
|
return file_list(
|
|
544
648
|
incomplete=incomplete,
|
|
@@ -571,7 +675,7 @@ def dataset_files(ctx, args, incomplete):
|
|
|
571
675
|
)
|
|
572
676
|
|
|
573
677
|
|
|
574
|
-
def graph_backup_files(ctx,
|
|
678
|
+
def graph_backup_files(ctx, param, incomplete):
|
|
575
679
|
"""Prepare a list of workspace files."""
|
|
576
680
|
return file_list(
|
|
577
681
|
incomplete=incomplete,
|
|
@@ -580,7 +684,7 @@ def graph_backup_files(ctx, args, incomplete):
|
|
|
580
684
|
)
|
|
581
685
|
|
|
582
686
|
|
|
583
|
-
def project_files(ctx,
|
|
687
|
+
def project_files(ctx, param, incomplete):
|
|
584
688
|
"""Prepare a list of workspace files."""
|
|
585
689
|
return file_list(
|
|
586
690
|
incomplete=incomplete,
|
|
@@ -589,7 +693,7 @@ def project_files(ctx, args, incomplete):
|
|
|
589
693
|
)
|
|
590
694
|
|
|
591
695
|
|
|
592
|
-
def ini_files(ctx,
|
|
696
|
+
def ini_files(ctx, param, incomplete):
|
|
593
697
|
"""Prepare a list of workspace files."""
|
|
594
698
|
return file_list(
|
|
595
699
|
incomplete=incomplete,
|
|
@@ -598,7 +702,7 @@ def ini_files(ctx, args, incomplete):
|
|
|
598
702
|
)
|
|
599
703
|
|
|
600
704
|
|
|
601
|
-
def workspace_files(ctx,
|
|
705
|
+
def workspace_files(ctx, param, incomplete):
|
|
602
706
|
"""Prepare a list of workspace files."""
|
|
603
707
|
return file_list(
|
|
604
708
|
incomplete=incomplete,
|
|
@@ -607,7 +711,7 @@ def workspace_files(ctx, args, incomplete):
|
|
|
607
711
|
)
|
|
608
712
|
|
|
609
713
|
|
|
610
|
-
def sparql_files(ctx,
|
|
714
|
+
def sparql_files(ctx, param, incomplete):
|
|
611
715
|
"""Prepare a list of SPARQL files."""
|
|
612
716
|
return file_list(
|
|
613
717
|
incomplete=incomplete,
|
|
@@ -620,7 +724,7 @@ def sparql_files(ctx, args, incomplete):
|
|
|
620
724
|
)
|
|
621
725
|
|
|
622
726
|
|
|
623
|
-
def triple_files(ctx,
|
|
727
|
+
def triple_files(ctx, param, incomplete):
|
|
624
728
|
"""Prepare a list of triple files."""
|
|
625
729
|
return file_list(
|
|
626
730
|
incomplete=incomplete,
|
|
@@ -633,14 +737,15 @@ def triple_files(ctx, args, incomplete):
|
|
|
633
737
|
)
|
|
634
738
|
|
|
635
739
|
|
|
636
|
-
def placeholder(ctx,
|
|
740
|
+
def placeholder(ctx, param, incomplete):
|
|
637
741
|
"""Prepare a list of placeholder from the to-be executed queries."""
|
|
638
742
|
# look if cursor is in value position of the -p option and
|
|
639
743
|
# return nothing in case it is (values are not completed atm)
|
|
744
|
+
args = _get_completion_args(incomplete)
|
|
640
745
|
if args[len(args) - 2] in ("-p", "--parameter"):
|
|
641
746
|
return []
|
|
642
747
|
# setup configuration
|
|
643
|
-
CONTEXT.
|
|
748
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
644
749
|
# extract placeholder from given queries in the command line
|
|
645
750
|
options = []
|
|
646
751
|
for num, arg in enumerate(args):
|
|
@@ -660,12 +765,12 @@ def placeholder(ctx, args, incomplete):
|
|
|
660
765
|
)
|
|
661
766
|
|
|
662
767
|
|
|
663
|
-
def remote_queries(ctx,
|
|
768
|
+
def remote_queries(ctx, param, incomplete):
|
|
664
769
|
"""Prepare a list of query URIs."""
|
|
665
|
-
CONTEXT.
|
|
770
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
666
771
|
options = []
|
|
667
772
|
for _, query in QUERY_CATALOG.get_queries().items():
|
|
668
|
-
url = query.short_url
|
|
773
|
+
url = query.short_url
|
|
669
774
|
label = query.label
|
|
670
775
|
options.append((url, label))
|
|
671
776
|
return _finalize_completion(
|
|
@@ -675,24 +780,24 @@ def remote_queries(ctx, args, incomplete):
|
|
|
675
780
|
)
|
|
676
781
|
|
|
677
782
|
|
|
678
|
-
def remote_queries_and_sparql_files(ctx,
|
|
783
|
+
def remote_queries_and_sparql_files(ctx, param, incomplete):
|
|
679
784
|
"""Prepare a list of named queries, query files and directories."""
|
|
680
|
-
remote = remote_queries(ctx,
|
|
681
|
-
files = sparql_files(ctx,
|
|
785
|
+
remote = remote_queries(ctx, param, incomplete)
|
|
786
|
+
files = sparql_files(ctx, param, incomplete)
|
|
682
787
|
return remote + files
|
|
683
788
|
|
|
684
789
|
|
|
685
|
-
def workflow_ids(ctx,
|
|
790
|
+
def workflow_ids(ctx, param, incomplete):
|
|
686
791
|
"""Prepare a list of projectid:taskid workflow identifier."""
|
|
687
|
-
CONTEXT.
|
|
792
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
688
793
|
workflows = list_items(item_type="workflow")["results"]
|
|
689
794
|
options = []
|
|
690
795
|
for _ in workflows:
|
|
691
796
|
workflow = _["projectId"] + ":" + _["id"]
|
|
692
797
|
label = _["label"]
|
|
693
|
-
if workflow
|
|
798
|
+
if _ignore_option(workflow, ctx.params):
|
|
694
799
|
continue
|
|
695
|
-
options.append((workflow
|
|
800
|
+
options.append((workflow, label))
|
|
696
801
|
return _finalize_completion(
|
|
697
802
|
candidates=options,
|
|
698
803
|
incomplete=incomplete,
|
|
@@ -700,19 +805,26 @@ def workflow_ids(ctx, args, incomplete):
|
|
|
700
805
|
)
|
|
701
806
|
|
|
702
807
|
|
|
703
|
-
def marshalling_plugins(ctx,
|
|
808
|
+
def marshalling_plugins(ctx, param, incomplete):
|
|
704
809
|
"""Prepare a list of supported workspace/project import/export plugins."""
|
|
705
|
-
CONTEXT.
|
|
810
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
706
811
|
options = get_marshalling_plugins()
|
|
707
812
|
if "description" in options[0].keys():
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
813
|
+
final_options = [(_["id"], _["description"]) for _ in options]
|
|
814
|
+
else:
|
|
815
|
+
# in case, no descriptions are available, labels are fine as well
|
|
816
|
+
final_options = [(_["id"], _["label"]) for _ in options]
|
|
817
|
+
|
|
818
|
+
return _finalize_completion(
|
|
819
|
+
candidates=final_options,
|
|
820
|
+
incomplete=incomplete,
|
|
821
|
+
sort_by=SORT_BY_DESC
|
|
822
|
+
)
|
|
711
823
|
|
|
712
824
|
|
|
713
|
-
def project_ids(ctx,
|
|
825
|
+
def project_ids(ctx, param, incomplete):
|
|
714
826
|
"""Prepare a list of project IDs for auto-completion."""
|
|
715
|
-
CONTEXT.
|
|
827
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
716
828
|
try:
|
|
717
829
|
projects = get_projects()
|
|
718
830
|
except Exception:
|
|
@@ -723,7 +835,7 @@ def project_ids(ctx, args, incomplete):
|
|
|
723
835
|
project_id = _["name"]
|
|
724
836
|
label = _["metaData"]["label"]
|
|
725
837
|
# do not add project if already in the command line
|
|
726
|
-
if project_id
|
|
838
|
+
if _ignore_option(project_id, ctx.params):
|
|
727
839
|
continue
|
|
728
840
|
options.append((project_id, label))
|
|
729
841
|
return _finalize_completion(
|
|
@@ -733,9 +845,9 @@ def project_ids(ctx, args, incomplete):
|
|
|
733
845
|
)
|
|
734
846
|
|
|
735
847
|
|
|
736
|
-
def graph_uris(ctx,
|
|
848
|
+
def graph_uris(ctx, param, incomplete, writeable=True, readonly=True):
|
|
737
849
|
"""Prepare a list of graphs for auto-completion."""
|
|
738
|
-
CONTEXT.
|
|
850
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
739
851
|
try:
|
|
740
852
|
graphs = get_graphs()
|
|
741
853
|
except Exception:
|
|
@@ -746,9 +858,9 @@ def graph_uris(ctx, args, incomplete, writeable=True, readonly=True):
|
|
|
746
858
|
iri = _["iri"]
|
|
747
859
|
label = _["label"]["title"]
|
|
748
860
|
# do not add graph if already in the command line
|
|
749
|
-
if iri
|
|
861
|
+
if _ignore_option(iri, ctx.params):
|
|
750
862
|
continue
|
|
751
|
-
options.append((iri
|
|
863
|
+
options.append((iri, label))
|
|
752
864
|
return _finalize_completion(
|
|
753
865
|
candidates=options,
|
|
754
866
|
incomplete=incomplete,
|
|
@@ -756,15 +868,15 @@ def graph_uris(ctx, args, incomplete, writeable=True, readonly=True):
|
|
|
756
868
|
)
|
|
757
869
|
|
|
758
870
|
|
|
759
|
-
def writable_graph_uris(ctx,
|
|
871
|
+
def writable_graph_uris(ctx, param, incomplete):
|
|
760
872
|
"""Prepare a list of writable graphs for auto-completion."""
|
|
761
|
-
return graph_uris(ctx,
|
|
873
|
+
return graph_uris(ctx, param, incomplete, writeable=True, readonly=False)
|
|
762
874
|
|
|
763
875
|
|
|
764
|
-
def connections(ctx,
|
|
876
|
+
def connections(ctx, param, incomplete):
|
|
765
877
|
"""Prepare a list of config connections for auto-completion."""
|
|
766
878
|
# since ctx does not have an obj here, we re-create the object
|
|
767
|
-
CONTEXT.
|
|
879
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
768
880
|
options = []
|
|
769
881
|
for section in CONTEXT.config.sections():
|
|
770
882
|
options.append(section)
|
|
@@ -774,7 +886,7 @@ def connections(ctx, args, incomplete):
|
|
|
774
886
|
)
|
|
775
887
|
|
|
776
888
|
|
|
777
|
-
def graph_export_templates(ctx,
|
|
889
|
+
def graph_export_templates(ctx, param, incomplete):
|
|
778
890
|
"""Prepare a list of example templates for the graph export command."""
|
|
779
891
|
examples = [
|
|
780
892
|
(
|
|
@@ -800,7 +912,7 @@ def graph_export_templates(ctx, args, incomplete):
|
|
|
800
912
|
)
|
|
801
913
|
|
|
802
914
|
|
|
803
|
-
def project_export_templates(ctx,
|
|
915
|
+
def project_export_templates(ctx, param, incomplete):
|
|
804
916
|
"""Prepare a list of example templates for the project export command."""
|
|
805
917
|
examples = [
|
|
806
918
|
(
|
|
@@ -821,7 +933,7 @@ def project_export_templates(ctx, args, incomplete):
|
|
|
821
933
|
)
|
|
822
934
|
|
|
823
935
|
|
|
824
|
-
def workspace_export_templates(ctx,
|
|
936
|
+
def workspace_export_templates(ctx, param, incomplete):
|
|
825
937
|
"""Prepare a list of example templates for the workspace export command."""
|
|
826
938
|
examples = [
|
|
827
939
|
(
|
|
@@ -842,7 +954,7 @@ def workspace_export_templates(ctx, args, incomplete):
|
|
|
842
954
|
)
|
|
843
955
|
|
|
844
956
|
|
|
845
|
-
def query_status_filter(ctx,
|
|
957
|
+
def query_status_filter(ctx, param, incomplete):
|
|
846
958
|
"""Prepare a list of filter names and values for query status filter."""
|
|
847
959
|
filter_names = [
|
|
848
960
|
(
|
|
@@ -963,7 +1075,7 @@ def query_status_filter(ctx, args, incomplete):
|
|
|
963
1075
|
"Queries of unknown type."
|
|
964
1076
|
)
|
|
965
1077
|
]
|
|
966
|
-
|
|
1078
|
+
args = _get_completion_args(incomplete)
|
|
967
1079
|
last_argument = args[len(args) - 1]
|
|
968
1080
|
options = None
|
|
969
1081
|
if last_argument == "--filter":
|
|
@@ -993,11 +1105,11 @@ def query_status_filter(ctx, args, incomplete):
|
|
|
993
1105
|
incomplete=incomplete
|
|
994
1106
|
)
|
|
995
1107
|
if last_argument == "user":
|
|
996
|
-
options = query_account_iris(ctx,
|
|
1108
|
+
options = query_account_iris(ctx, param, incomplete)
|
|
997
1109
|
if last_argument == "trace-id":
|
|
998
|
-
options = query_trace_ids(ctx,
|
|
1110
|
+
options = query_trace_ids(ctx, param, incomplete)
|
|
999
1111
|
if last_argument == "graph":
|
|
1000
|
-
options = query_graphs(ctx,
|
|
1112
|
+
options = query_graphs(ctx, param, incomplete)
|
|
1001
1113
|
|
|
1002
1114
|
if not options:
|
|
1003
1115
|
raise ValueError(
|
|
@@ -1006,9 +1118,9 @@ def query_status_filter(ctx, args, incomplete):
|
|
|
1006
1118
|
return options
|
|
1007
1119
|
|
|
1008
1120
|
|
|
1009
|
-
def query_account_iris(ctx,
|
|
1121
|
+
def query_account_iris(ctx, param, incomplete):
|
|
1010
1122
|
"""Prepare a list account IRIs from the query status."""
|
|
1011
|
-
CONTEXT.
|
|
1123
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1012
1124
|
accounts = {}
|
|
1013
1125
|
for _ in get_query_status():
|
|
1014
1126
|
if _["user"] in accounts:
|
|
@@ -1016,7 +1128,7 @@ def query_account_iris(ctx, args, incomplete):
|
|
|
1016
1128
|
else:
|
|
1017
1129
|
accounts[_["user"]] = 1
|
|
1018
1130
|
options = [
|
|
1019
|
-
(account
|
|
1131
|
+
(account, f"{count} queries")
|
|
1020
1132
|
for account, count in accounts.items()
|
|
1021
1133
|
]
|
|
1022
1134
|
return _finalize_completion(
|
|
@@ -1025,9 +1137,9 @@ def query_account_iris(ctx, args, incomplete):
|
|
|
1025
1137
|
)
|
|
1026
1138
|
|
|
1027
1139
|
|
|
1028
|
-
def query_trace_ids(ctx,
|
|
1140
|
+
def query_trace_ids(ctx, param, incomplete):
|
|
1029
1141
|
"""Prepare a list trace IDs from the query status."""
|
|
1030
|
-
CONTEXT.
|
|
1142
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1031
1143
|
options = Counter(
|
|
1032
1144
|
[query["traceId"] for query in get_query_status()]
|
|
1033
1145
|
).most_common()
|
|
@@ -1038,9 +1150,9 @@ def query_trace_ids(ctx, args, incomplete):
|
|
|
1038
1150
|
)
|
|
1039
1151
|
|
|
1040
1152
|
|
|
1041
|
-
def query_graphs(ctx,
|
|
1153
|
+
def query_graphs(ctx, param, incomplete):
|
|
1042
1154
|
"""Prepare a list graph URLs from the query status."""
|
|
1043
|
-
CONTEXT.
|
|
1155
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1044
1156
|
options = Counter()
|
|
1045
1157
|
for query in get_query_status():
|
|
1046
1158
|
for graph in query.get("affectedGraphs", []):
|
|
@@ -1052,7 +1164,7 @@ def query_graphs(ctx, args, incomplete):
|
|
|
1052
1164
|
)
|
|
1053
1165
|
|
|
1054
1166
|
|
|
1055
|
-
def graph_list_filter(ctx,
|
|
1167
|
+
def graph_list_filter(ctx, param, incomplete):
|
|
1056
1168
|
"""Prepare a list of filter names and values for graph list filter."""
|
|
1057
1169
|
filter_names = [
|
|
1058
1170
|
(
|
|
@@ -1076,7 +1188,7 @@ def graph_list_filter(ctx, args, incomplete):
|
|
|
1076
1188
|
"List only graphs which ARE writeable for the current user."
|
|
1077
1189
|
)
|
|
1078
1190
|
]
|
|
1079
|
-
|
|
1191
|
+
args = _get_completion_args(incomplete)
|
|
1080
1192
|
options = []
|
|
1081
1193
|
if args[len(args) - 1] == "--filter":
|
|
1082
1194
|
options = _finalize_completion(
|
|
@@ -1089,11 +1201,72 @@ def graph_list_filter(ctx, args, incomplete):
|
|
|
1089
1201
|
incomplete=incomplete
|
|
1090
1202
|
)
|
|
1091
1203
|
if args[len(args) - 1] == "imported-by":
|
|
1092
|
-
options = graph_uris(ctx,
|
|
1204
|
+
options = graph_uris(ctx, param, incomplete)
|
|
1093
1205
|
return options
|
|
1094
1206
|
|
|
1095
1207
|
|
|
1096
|
-
def
|
|
1208
|
+
def variable_ids(ctx, param, incomplete):
|
|
1209
|
+
"""Prepare a list of variables IDs for auto-completion."""
|
|
1210
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1211
|
+
try:
|
|
1212
|
+
variables = get_all_variables()
|
|
1213
|
+
except Exception:
|
|
1214
|
+
# if something went wrong, die silently
|
|
1215
|
+
return []
|
|
1216
|
+
options = []
|
|
1217
|
+
for _ in variables:
|
|
1218
|
+
variable_id = _["id"]
|
|
1219
|
+
label = _.get("description", "").partition('\n')[0]
|
|
1220
|
+
if label == "":
|
|
1221
|
+
label = f"Current value: {_['value']}"
|
|
1222
|
+
# do not add project if already in the command line
|
|
1223
|
+
if _ignore_option(variable_id, ctx.params):
|
|
1224
|
+
continue
|
|
1225
|
+
options.append((variable_id, label))
|
|
1226
|
+
return _finalize_completion(
|
|
1227
|
+
candidates=options,
|
|
1228
|
+
incomplete=incomplete,
|
|
1229
|
+
sort_by=SORT_BY_KEY
|
|
1230
|
+
)
|
|
1231
|
+
|
|
1232
|
+
|
|
1233
|
+
def variable_list_filter(ctx, param, incomplete):
|
|
1234
|
+
"""Prepare a list of filter names and values for variable list filter."""
|
|
1235
|
+
filter_names = [
|
|
1236
|
+
(
|
|
1237
|
+
"project",
|
|
1238
|
+
"Filter for variables from a specific project."
|
|
1239
|
+
),
|
|
1240
|
+
(
|
|
1241
|
+
"regex",
|
|
1242
|
+
"Filter for variables with a regular expression search over "
|
|
1243
|
+
"id, value and description."
|
|
1244
|
+
)
|
|
1245
|
+
]
|
|
1246
|
+
filter_values_regex = [
|
|
1247
|
+
(
|
|
1248
|
+
"ending$",
|
|
1249
|
+
"Variables name ends with 'ending'."
|
|
1250
|
+
),
|
|
1251
|
+
(
|
|
1252
|
+
"^starting",
|
|
1253
|
+
"Variables name starts with 'starting'."
|
|
1254
|
+
)
|
|
1255
|
+
]
|
|
1256
|
+
args = _get_completion_args(incomplete)
|
|
1257
|
+
if args[len(args) - 1] == "--filter":
|
|
1258
|
+
return [CompletionItem(value=f[0], help=f[1]) for f in filter_names]
|
|
1259
|
+
if args[len(args) - 1] == "regex":
|
|
1260
|
+
return _finalize_completion(
|
|
1261
|
+
candidates=filter_values_regex,
|
|
1262
|
+
incomplete=incomplete
|
|
1263
|
+
)
|
|
1264
|
+
if args[len(args) - 1] == "project":
|
|
1265
|
+
return project_ids(ctx, param, incomplete)
|
|
1266
|
+
return []
|
|
1267
|
+
|
|
1268
|
+
|
|
1269
|
+
def resource_list_filter(ctx, param, incomplete):
|
|
1097
1270
|
"""Prepare a list of filter names and values for resource list filter."""
|
|
1098
1271
|
filter_names = [
|
|
1099
1272
|
(
|
|
@@ -1116,12 +1289,12 @@ def resource_list_filter(ctx, args, incomplete):
|
|
|
1116
1289
|
"File resources which name has a date from 2021-10 in it"
|
|
1117
1290
|
)
|
|
1118
1291
|
]
|
|
1119
|
-
|
|
1292
|
+
args = _get_completion_args(incomplete)
|
|
1120
1293
|
if args[len(args) - 1] == "--filter":
|
|
1121
|
-
return filter_names
|
|
1294
|
+
return [CompletionItem(value=f[0], help=f[1]) for f in filter_names]
|
|
1122
1295
|
if args[len(args) - 1] == "project":
|
|
1123
1296
|
return _finalize_completion(
|
|
1124
|
-
candidates=project_ids(ctx,
|
|
1297
|
+
candidates=project_ids(ctx, param, incomplete),
|
|
1125
1298
|
incomplete=incomplete
|
|
1126
1299
|
)
|
|
1127
1300
|
if args[len(args) - 1] == "regex":
|
|
@@ -1132,7 +1305,7 @@ def resource_list_filter(ctx, args, incomplete):
|
|
|
1132
1305
|
return []
|
|
1133
1306
|
|
|
1134
1307
|
|
|
1135
|
-
def workflow_list_filter(ctx,
|
|
1308
|
+
def workflow_list_filter(ctx, param, incomplete):
|
|
1136
1309
|
"""Prepare a list of filter names and values for workflow list filter."""
|
|
1137
1310
|
filter_names = [
|
|
1138
1311
|
(
|
|
@@ -1181,23 +1354,28 @@ def workflow_list_filter(ctx, args, incomplete):
|
|
|
1181
1354
|
)
|
|
1182
1355
|
]
|
|
1183
1356
|
options = []
|
|
1357
|
+
args = _get_completion_args(incomplete)
|
|
1184
1358
|
if args[len(args) - 1] == "--filter":
|
|
1185
1359
|
options = filter_names
|
|
1186
1360
|
if args[len(args) - 1] == "io":
|
|
1187
1361
|
options = filter_values_io
|
|
1188
1362
|
if args[len(args) - 1] == "project":
|
|
1189
|
-
options = project_ids(ctx,
|
|
1363
|
+
options = project_ids(ctx, param, incomplete)
|
|
1190
1364
|
if args[len(args) - 1] == "tag":
|
|
1191
|
-
options = tag_labels(ctx,
|
|
1365
|
+
options = tag_labels(ctx, param, incomplete, "workflow")
|
|
1192
1366
|
if args[len(args) - 1] == "regex":
|
|
1193
1367
|
options = filter_regex
|
|
1368
|
+
|
|
1369
|
+
if len(options) > 0 and isinstance(options[0], CompletionItem):
|
|
1370
|
+
return options
|
|
1371
|
+
|
|
1194
1372
|
return _finalize_completion(
|
|
1195
1373
|
candidates=options,
|
|
1196
1374
|
incomplete=incomplete
|
|
1197
1375
|
)
|
|
1198
1376
|
|
|
1199
1377
|
|
|
1200
|
-
def tag_labels(ctx,
|
|
1378
|
+
def tag_labels(ctx, param, incomplete, item_type):
|
|
1201
1379
|
"""Prepare a list of tag labels for a item_type."""
|
|
1202
1380
|
datasets = list_items(item_type=item_type)
|
|
1203
1381
|
options = []
|
|
@@ -1218,7 +1396,7 @@ def tag_labels(ctx, args, incomplete, item_type):
|
|
|
1218
1396
|
)
|
|
1219
1397
|
|
|
1220
1398
|
|
|
1221
|
-
def status_keys(ctx,
|
|
1399
|
+
def status_keys(ctx, param, incomplete):
|
|
1222
1400
|
"""Prepare a list of status keys for the admin status command."""
|
|
1223
1401
|
options = ["all"]
|
|
1224
1402
|
os.environ["CMEMPY_IS_CHATTY"] = "false"
|
|
@@ -1231,9 +1409,9 @@ def status_keys(ctx, args, incomplete):
|
|
|
1231
1409
|
)
|
|
1232
1410
|
|
|
1233
1411
|
|
|
1234
|
-
def user_ids(ctx,
|
|
1412
|
+
def user_ids(ctx, param, incomplete):
|
|
1235
1413
|
"""Prepare a list of username for admin update/delete/password command."""
|
|
1236
|
-
CONTEXT.
|
|
1414
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1237
1415
|
options = []
|
|
1238
1416
|
for _ in list_users():
|
|
1239
1417
|
options.append(
|
|
@@ -1248,17 +1426,17 @@ def user_ids(ctx, args, incomplete):
|
|
|
1248
1426
|
)
|
|
1249
1427
|
|
|
1250
1428
|
|
|
1251
|
-
def user_group_ids(ctx: Context,
|
|
1429
|
+
def user_group_ids(ctx: Context, param, incomplete):
|
|
1252
1430
|
"""Prepare a list of group name for admin user update
|
|
1253
1431
|
--unassign-group/--assign-group parameter"""
|
|
1254
|
-
CONTEXT.
|
|
1432
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1255
1433
|
if not ctx.args:
|
|
1256
1434
|
return []
|
|
1257
1435
|
users = get_user_by_username(username=str(ctx.args[0]))
|
|
1258
1436
|
if not users:
|
|
1259
1437
|
return []
|
|
1260
1438
|
|
|
1261
|
-
if
|
|
1439
|
+
if param.name == "unassign_group":
|
|
1262
1440
|
groups = user_groups(user_id=users[0]["id"])
|
|
1263
1441
|
else:
|
|
1264
1442
|
user_group_names = (
|
|
@@ -1274,10 +1452,71 @@ def user_group_ids(ctx: Context, args, incomplete):
|
|
|
1274
1452
|
_["name"]
|
|
1275
1453
|
)
|
|
1276
1454
|
)
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1455
|
+
|
|
1456
|
+
for arg in ctx.params["assign_group"]:
|
|
1457
|
+
with suppress(ValueError):
|
|
1458
|
+
options.remove(arg)
|
|
1459
|
+
for arg in ctx.params["unassign_group"]:
|
|
1460
|
+
with suppress(ValueError):
|
|
1461
|
+
options.remove(arg)
|
|
1462
|
+
|
|
1463
|
+
return _finalize_completion(
|
|
1464
|
+
candidates=options,
|
|
1465
|
+
incomplete=incomplete,
|
|
1466
|
+
sort_by=SORT_BY_DESC
|
|
1467
|
+
)
|
|
1468
|
+
|
|
1469
|
+
|
|
1470
|
+
def client_ids(ctx, param, incomplete):
|
|
1471
|
+
"""Prepare a list of client ids for admin secret and update command."""
|
|
1472
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1473
|
+
options = []
|
|
1474
|
+
for _ in list_open_id_clients():
|
|
1475
|
+
options.append(
|
|
1476
|
+
(
|
|
1477
|
+
_["clientId"]
|
|
1478
|
+
)
|
|
1479
|
+
)
|
|
1480
|
+
return _finalize_completion(
|
|
1481
|
+
candidates=options,
|
|
1482
|
+
incomplete=incomplete,
|
|
1483
|
+
sort_by=SORT_BY_DESC
|
|
1484
|
+
)
|
|
1485
|
+
|
|
1486
|
+
|
|
1487
|
+
def transformation_task_ids(ctx, param, incomplete):
|
|
1488
|
+
"""Prepare a list of projectId:transformation task identifier."""
|
|
1489
|
+
CONTEXT.set_connection_from_params(ctx.find_root().params)
|
|
1490
|
+
options = []
|
|
1491
|
+
results = list_items(item_type="transform")
|
|
1492
|
+
datasets = results["results"]
|
|
1493
|
+
for _ in datasets:
|
|
1494
|
+
options.append(
|
|
1495
|
+
(
|
|
1496
|
+
_["projectId"] + ":" + _["id"],
|
|
1497
|
+
_["label"]
|
|
1498
|
+
)
|
|
1499
|
+
)
|
|
1500
|
+
return _finalize_completion(
|
|
1501
|
+
candidates=options,
|
|
1502
|
+
incomplete=incomplete,
|
|
1503
|
+
sort_by=SORT_BY_DESC
|
|
1504
|
+
)
|
|
1505
|
+
|
|
1506
|
+
|
|
1507
|
+
def linking_task_ids(ctx, args, incomplete):
|
|
1508
|
+
"""Prepare a list of projectId:linking task identifier."""
|
|
1509
|
+
CONTEXT.set_connection_from_args(args)
|
|
1510
|
+
options = []
|
|
1511
|
+
results = list_items(item_type="linking")
|
|
1512
|
+
datasets = results["results"]
|
|
1513
|
+
for _ in datasets:
|
|
1514
|
+
options.append(
|
|
1515
|
+
(
|
|
1516
|
+
_["projectId"] + r"\:" + _["id"],
|
|
1517
|
+
_["label"]
|
|
1518
|
+
)
|
|
1519
|
+
)
|
|
1281
1520
|
return _finalize_completion(
|
|
1282
1521
|
candidates=options,
|
|
1283
1522
|
incomplete=incomplete,
|