cmem-cmemc 25.5.0rc1__py3-none-any.whl → 26.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cmem_cmemc/cli.py +11 -6
- cmem_cmemc/command.py +1 -1
- cmem_cmemc/command_group.py +59 -31
- cmem_cmemc/commands/acl.py +403 -26
- cmem_cmemc/commands/admin.py +10 -10
- cmem_cmemc/commands/client.py +12 -5
- cmem_cmemc/commands/config.py +106 -12
- cmem_cmemc/commands/dataset.py +163 -172
- cmem_cmemc/commands/file.py +509 -0
- cmem_cmemc/commands/graph.py +200 -72
- cmem_cmemc/commands/graph_imports.py +12 -5
- cmem_cmemc/commands/graph_insights.py +157 -53
- cmem_cmemc/commands/metrics.py +15 -9
- cmem_cmemc/commands/migration.py +12 -4
- cmem_cmemc/commands/package.py +548 -0
- cmem_cmemc/commands/project.py +157 -22
- cmem_cmemc/commands/python.py +9 -5
- cmem_cmemc/commands/query.py +119 -25
- cmem_cmemc/commands/scheduler.py +6 -4
- cmem_cmemc/commands/store.py +2 -1
- cmem_cmemc/commands/user.py +124 -24
- cmem_cmemc/commands/validation.py +15 -10
- cmem_cmemc/commands/variable.py +264 -61
- cmem_cmemc/commands/vocabulary.py +31 -17
- cmem_cmemc/commands/workflow.py +21 -11
- cmem_cmemc/completion.py +126 -109
- cmem_cmemc/context.py +40 -10
- cmem_cmemc/exceptions.py +8 -2
- cmem_cmemc/manual_helper/graph.py +2 -2
- cmem_cmemc/manual_helper/multi_page.py +5 -7
- cmem_cmemc/object_list.py +234 -7
- cmem_cmemc/placeholder.py +2 -2
- cmem_cmemc/string_processor.py +153 -4
- cmem_cmemc/title_helper.py +50 -0
- cmem_cmemc/utils.py +9 -8
- {cmem_cmemc-25.5.0rc1.dist-info → cmem_cmemc-26.1.0rc1.dist-info}/METADATA +7 -6
- cmem_cmemc-26.1.0rc1.dist-info/RECORD +62 -0
- {cmem_cmemc-25.5.0rc1.dist-info → cmem_cmemc-26.1.0rc1.dist-info}/WHEEL +1 -1
- cmem_cmemc/commands/resource.py +0 -220
- cmem_cmemc-25.5.0rc1.dist-info/RECORD +0 -61
- {cmem_cmemc-25.5.0rc1.dist-info → cmem_cmemc-26.1.0rc1.dist-info}/entry_points.txt +0 -0
- {cmem_cmemc-25.5.0rc1.dist-info → cmem_cmemc-26.1.0rc1.dist-info}/licenses/LICENSE +0 -0
cmem_cmemc/commands/config.py
CHANGED
|
@@ -1,34 +1,128 @@
|
|
|
1
1
|
"""configuration commands for cmem command line interface."""
|
|
2
2
|
|
|
3
3
|
import click
|
|
4
|
-
from click import
|
|
4
|
+
from click import Context
|
|
5
5
|
|
|
6
6
|
from cmem_cmemc.command import CmemcCommand
|
|
7
7
|
from cmem_cmemc.command_group import CmemcGroup
|
|
8
|
-
from cmem_cmemc.context import KNOWN_CONFIG_KEYS, ApplicationContext
|
|
8
|
+
from cmem_cmemc.context import KNOWN_CONFIG_KEYS, ApplicationContext, build_caption
|
|
9
|
+
from cmem_cmemc.exceptions import CmemcError
|
|
10
|
+
from cmem_cmemc.object_list import (
|
|
11
|
+
DirectValuePropertyFilter,
|
|
12
|
+
ObjectList,
|
|
13
|
+
compare_regex,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def get_connections(ctx: Context) -> list[dict]:
|
|
18
|
+
"""Get connections for object list"""
|
|
19
|
+
app = ctx.obj
|
|
20
|
+
if app is not None:
|
|
21
|
+
config = app.get_config()
|
|
22
|
+
else:
|
|
23
|
+
# when in completion mode, obj is not set :-(
|
|
24
|
+
# return empty list
|
|
25
|
+
return []
|
|
26
|
+
|
|
27
|
+
connections = []
|
|
28
|
+
|
|
29
|
+
for section_name in config.sections():
|
|
30
|
+
if section_name != "DEFAULT":
|
|
31
|
+
section = config[section_name]
|
|
32
|
+
connections.append(
|
|
33
|
+
{
|
|
34
|
+
"name": section_name,
|
|
35
|
+
"base_uri": section.get("CMEM_BASE_URI", "-"),
|
|
36
|
+
"grant_type": section.get("OAUTH_GRANT_TYPE", "-"),
|
|
37
|
+
}
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
return connections
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
config_list = ObjectList(
|
|
44
|
+
name="connections",
|
|
45
|
+
get_objects=get_connections,
|
|
46
|
+
filters=[
|
|
47
|
+
DirectValuePropertyFilter(
|
|
48
|
+
name="regex",
|
|
49
|
+
description="Filter by regex matching the connection name.",
|
|
50
|
+
property_key="name",
|
|
51
|
+
compare=compare_regex,
|
|
52
|
+
completion_method="none",
|
|
53
|
+
),
|
|
54
|
+
],
|
|
55
|
+
)
|
|
9
56
|
|
|
10
57
|
|
|
11
58
|
@click.command(cls=CmemcCommand, name="list")
|
|
12
|
-
@click.
|
|
13
|
-
|
|
59
|
+
@click.option(
|
|
60
|
+
"--filter",
|
|
61
|
+
"filter_",
|
|
62
|
+
type=(str, str),
|
|
63
|
+
multiple=True,
|
|
64
|
+
help=config_list.get_filter_help_text(),
|
|
65
|
+
shell_complete=config_list.complete_values,
|
|
66
|
+
)
|
|
67
|
+
@click.option(
|
|
68
|
+
"--id-only",
|
|
69
|
+
is_flag=True,
|
|
70
|
+
help="Lists only connection names. "
|
|
71
|
+
"This is useful for piping the names into other cmemc commands.",
|
|
72
|
+
)
|
|
73
|
+
@click.pass_context
|
|
74
|
+
def list_command(ctx: Context, filter_: tuple[tuple[str, str]], id_only: bool) -> None:
|
|
14
75
|
"""List configured connections.
|
|
15
76
|
|
|
16
|
-
This command lists all configured
|
|
17
|
-
|
|
77
|
+
This command lists all configured connections from the currently used config file.
|
|
78
|
+
Each connection is listed with its name, base URI, and grant type.
|
|
18
79
|
|
|
19
80
|
The connection identifier can be used with the --connection option
|
|
20
81
|
in order to use a specific Corporate Memory instance.
|
|
21
82
|
|
|
83
|
+
You can use the --filter option to filter connections by regex matching
|
|
84
|
+
the connection name.
|
|
85
|
+
|
|
22
86
|
In order to apply commands on more than one instance, you need to use
|
|
23
87
|
typical unix gear such as xargs or parallel.
|
|
24
88
|
|
|
25
|
-
Example: cmemc config list
|
|
89
|
+
Example: cmemc config list
|
|
90
|
+
|
|
91
|
+
Example: cmemc config list --id-only | xargs -I % sh -c 'cmemc -c % admin status'
|
|
26
92
|
|
|
27
|
-
Example: cmemc config list | parallel --jobs 5 cmemc -c {} admin status
|
|
93
|
+
Example: cmemc config list --id-only | parallel --jobs 5 cmemc -c {} admin status
|
|
28
94
|
"""
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
95
|
+
app = ctx.obj
|
|
96
|
+
connections = config_list.apply_filters(ctx=ctx, filter_=filter_)
|
|
97
|
+
|
|
98
|
+
# Sort connections case-insensitively by name
|
|
99
|
+
connections = sorted(connections, key=lambda c: c["name"].casefold())
|
|
100
|
+
|
|
101
|
+
if id_only:
|
|
102
|
+
for connection in connections:
|
|
103
|
+
app.echo_result(connection["name"])
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
table = [
|
|
107
|
+
(
|
|
108
|
+
connection["name"],
|
|
109
|
+
connection["base_uri"],
|
|
110
|
+
connection["grant_type"],
|
|
111
|
+
)
|
|
112
|
+
for connection in connections
|
|
113
|
+
]
|
|
114
|
+
|
|
115
|
+
filtered = len(filter_) > 0
|
|
116
|
+
app.echo_info_table(
|
|
117
|
+
table,
|
|
118
|
+
headers=["Connection", "Base URI", "Grant Type"],
|
|
119
|
+
caption=build_caption(
|
|
120
|
+
len(table), "connection", instance=str(app.config_file), filtered=filtered
|
|
121
|
+
),
|
|
122
|
+
empty_table_message="No connections found for these filters."
|
|
123
|
+
if filtered
|
|
124
|
+
else "No connections found.",
|
|
125
|
+
)
|
|
32
126
|
|
|
33
127
|
|
|
34
128
|
@click.command(cls=CmemcCommand, name="edit")
|
|
@@ -60,7 +154,7 @@ def get_command(app: ApplicationContext, key: str) -> None:
|
|
|
60
154
|
value = KNOWN_CONFIG_KEYS[key]()
|
|
61
155
|
app.echo_debug(f"Type of {key} value is {type(value)}")
|
|
62
156
|
if value is None:
|
|
63
|
-
raise
|
|
157
|
+
raise CmemcError(f"Configuration key {key} is not used in this configuration.")
|
|
64
158
|
app.echo_info(str(value))
|
|
65
159
|
|
|
66
160
|
|
cmem_cmemc/commands/dataset.py
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
"""dataset commands for cmem command line interface."""
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
|
-
import re
|
|
5
4
|
|
|
6
5
|
import click
|
|
7
6
|
import requests.exceptions
|
|
8
|
-
from click import
|
|
7
|
+
from click import Context, UsageError
|
|
9
8
|
from cmem.cmempy.config import get_cmem_base_uri
|
|
10
9
|
from cmem.cmempy.workspace import get_task_plugin_description, get_task_plugins
|
|
11
10
|
from cmem.cmempy.workspace.projects.datasets.dataset import (
|
|
@@ -16,79 +15,77 @@ from cmem.cmempy.workspace.projects.datasets.dataset import (
|
|
|
16
15
|
update_dataset,
|
|
17
16
|
)
|
|
18
17
|
from cmem.cmempy.workspace.projects.resources.resource import (
|
|
19
|
-
create_resource,
|
|
20
18
|
get_resource_response,
|
|
21
|
-
resource_exist,
|
|
22
19
|
)
|
|
23
20
|
from cmem.cmempy.workspace.search import list_items
|
|
24
21
|
|
|
25
22
|
from cmem_cmemc import completion
|
|
26
23
|
from cmem_cmemc.command import CmemcCommand
|
|
27
24
|
from cmem_cmemc.command_group import CmemcGroup
|
|
28
|
-
from cmem_cmemc.commands.
|
|
25
|
+
from cmem_cmemc.commands.file import _upload_file_resource, resource
|
|
29
26
|
from cmem_cmemc.completion import get_dataset_file_mapping
|
|
30
|
-
from cmem_cmemc.context import ApplicationContext
|
|
27
|
+
from cmem_cmemc.context import ApplicationContext, build_caption
|
|
31
28
|
from cmem_cmemc.exceptions import CmemcError
|
|
29
|
+
from cmem_cmemc.object_list import (
|
|
30
|
+
DirectListPropertyFilter,
|
|
31
|
+
DirectMultiValuePropertyFilter,
|
|
32
|
+
DirectValuePropertyFilter,
|
|
33
|
+
ObjectList,
|
|
34
|
+
compare_regex,
|
|
35
|
+
transform_extract_labels,
|
|
36
|
+
)
|
|
32
37
|
from cmem_cmemc.parameter_types.path import ClickSmartPath
|
|
33
38
|
from cmem_cmemc.smart_path import SmartPath as Path
|
|
39
|
+
from cmem_cmemc.string_processor import DatasetLink, DatasetTypeLink
|
|
40
|
+
from cmem_cmemc.title_helper import DatasetTypeTitleHelper, ProjectTitleHelper
|
|
34
41
|
from cmem_cmemc.utils import check_or_select_project, struct_to_table
|
|
35
42
|
|
|
36
|
-
DATASET_FILTER_TYPES = sorted(["project", "regex", "tag", "type"])
|
|
37
|
-
DATASET_LIST_FILTER_HELP_TEXT = (
|
|
38
|
-
"Filter datasets based on metadata. First parameter"
|
|
39
|
-
f" can be one of the following values: {', '.join(DATASET_FILTER_TYPES)}."
|
|
40
|
-
" The options for the second parameter depend on the first parameter."
|
|
41
|
-
)
|
|
42
|
-
DATASET_DELETE_FILTER_HELP_TEXT = (
|
|
43
|
-
"Delete datasets based on metadata. First parameter --filter"
|
|
44
|
-
f" CHOICE can be one of {DATASET_FILTER_TYPES!s}."
|
|
45
|
-
" The second parameter is based on CHOICE."
|
|
46
|
-
)
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def _get_dataset_tag_labels(dataset_: dict) -> list[str]:
|
|
50
|
-
"""Output a list of tag labels from a single dataset."""
|
|
51
|
-
return [_["label"] for _ in dataset_["tags"]]
|
|
52
|
-
|
|
53
43
|
|
|
54
|
-
def
|
|
55
|
-
datasets
|
|
56
|
-
|
|
57
|
-
"""
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
)
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
44
|
+
def get_datasets(ctx: Context) -> list[dict]:
|
|
45
|
+
"""Get datasets for object list."""
|
|
46
|
+
_ = ctx
|
|
47
|
+
return list_items(item_type="dataset")["results"] # type: ignore[no-any-return]
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
dataset_list = ObjectList(
|
|
51
|
+
name="datasets",
|
|
52
|
+
get_objects=get_datasets,
|
|
53
|
+
filters=[
|
|
54
|
+
DirectValuePropertyFilter(
|
|
55
|
+
name="project",
|
|
56
|
+
description="Filter by project ID.",
|
|
57
|
+
property_key="projectId",
|
|
58
|
+
completion_method="values",
|
|
59
|
+
title_helper=ProjectTitleHelper(),
|
|
60
|
+
),
|
|
61
|
+
DirectValuePropertyFilter(
|
|
62
|
+
name="regex",
|
|
63
|
+
description="Filter by regex matching the dataset label.",
|
|
64
|
+
property_key="label",
|
|
65
|
+
compare=compare_regex,
|
|
66
|
+
completion_method="none",
|
|
67
|
+
),
|
|
68
|
+
DirectValuePropertyFilter(
|
|
69
|
+
name="type",
|
|
70
|
+
description="Filter by dataset type.",
|
|
71
|
+
property_key="pluginId",
|
|
72
|
+
compare=compare_regex,
|
|
73
|
+
completion_method="values",
|
|
74
|
+
title_helper=DatasetTypeTitleHelper(),
|
|
75
|
+
),
|
|
76
|
+
DirectListPropertyFilter(
|
|
77
|
+
name="tag",
|
|
78
|
+
description="Filter by tag label.",
|
|
79
|
+
property_key="tags",
|
|
80
|
+
transform=transform_extract_labels,
|
|
81
|
+
),
|
|
82
|
+
DirectMultiValuePropertyFilter(
|
|
83
|
+
name="ids",
|
|
84
|
+
description="Internal filter for multiple dataset IDs.",
|
|
85
|
+
property_key="id",
|
|
86
|
+
),
|
|
87
|
+
],
|
|
88
|
+
)
|
|
92
89
|
|
|
93
90
|
|
|
94
91
|
def _validate_and_split_dataset_id(dataset_id: str) -> tuple[str, str]:
|
|
@@ -107,13 +104,64 @@ def _validate_and_split_dataset_id(dataset_id: str) -> tuple[str, str]:
|
|
|
107
104
|
project_part = dataset_id.split(":")[0]
|
|
108
105
|
dataset_part = dataset_id.split(":")[1]
|
|
109
106
|
except IndexError as error:
|
|
110
|
-
raise
|
|
107
|
+
raise CmemcError(
|
|
111
108
|
f"{dataset_id} is not a valid dataset ID. Use the "
|
|
112
109
|
"'dataset list' command to get a list of existing datasets."
|
|
113
110
|
) from error
|
|
114
111
|
return project_part, dataset_part
|
|
115
112
|
|
|
116
113
|
|
|
114
|
+
def _validate_dataset_ids(dataset_ids: tuple[str, ...]) -> None:
|
|
115
|
+
"""Validate that all provided dataset IDs exist."""
|
|
116
|
+
if not dataset_ids:
|
|
117
|
+
return
|
|
118
|
+
all_datasets = list_items(item_type="dataset")["results"]
|
|
119
|
+
all_dataset_ids = [_["projectId"] + ":" + _["id"] for _ in all_datasets]
|
|
120
|
+
for dataset_id in dataset_ids:
|
|
121
|
+
if dataset_id not in all_dataset_ids:
|
|
122
|
+
raise CmemcError(
|
|
123
|
+
f"Dataset {dataset_id} not available. Use the 'dataset list' "
|
|
124
|
+
"command to get a list of existing datasets."
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def _get_datasets_to_delete(
|
|
129
|
+
ctx: Context,
|
|
130
|
+
dataset_ids: tuple[str, ...],
|
|
131
|
+
all_: bool,
|
|
132
|
+
filter_: tuple[tuple[str, str], ...],
|
|
133
|
+
) -> list[str]:
|
|
134
|
+
"""Get the list of dataset IDs to delete based on selection method."""
|
|
135
|
+
if all_:
|
|
136
|
+
# Get all datasets
|
|
137
|
+
datasets = list_items(item_type="dataset")["results"]
|
|
138
|
+
return [_["projectId"] + ":" + _["id"] for _ in datasets]
|
|
139
|
+
|
|
140
|
+
# Validate provided IDs exist before proceeding
|
|
141
|
+
_validate_dataset_ids(dataset_ids)
|
|
142
|
+
|
|
143
|
+
# Build filter list
|
|
144
|
+
filter_to_apply = list(filter_) if filter_ else []
|
|
145
|
+
|
|
146
|
+
# Add IDs if provided (using internal multi-value filter)
|
|
147
|
+
if dataset_ids:
|
|
148
|
+
# Extract just the dataset ID part (after the colon) for filtering
|
|
149
|
+
dataset_id_parts = [_.split(":")[1] for _ in dataset_ids]
|
|
150
|
+
filter_to_apply.append(("ids", ",".join(dataset_id_parts)))
|
|
151
|
+
|
|
152
|
+
# Apply filters
|
|
153
|
+
datasets = dataset_list.apply_filters(ctx=ctx, filter_=filter_to_apply)
|
|
154
|
+
|
|
155
|
+
# Build full dataset IDs
|
|
156
|
+
result = [_["projectId"] + ":" + _["id"] for _ in datasets]
|
|
157
|
+
|
|
158
|
+
# Validation: ensure we found datasets
|
|
159
|
+
if not result and not dataset_ids:
|
|
160
|
+
raise CmemcError("No datasets found matching the provided filters.")
|
|
161
|
+
|
|
162
|
+
return result
|
|
163
|
+
|
|
164
|
+
|
|
117
165
|
def _post_file_resource(
|
|
118
166
|
app: ApplicationContext,
|
|
119
167
|
project_id: str,
|
|
@@ -147,57 +195,6 @@ def _post_file_resource(
|
|
|
147
195
|
app.echo_success("done")
|
|
148
196
|
|
|
149
197
|
|
|
150
|
-
def _upload_file_resource(
|
|
151
|
-
app: ApplicationContext,
|
|
152
|
-
project_id: str,
|
|
153
|
-
local_file_name: str,
|
|
154
|
-
remote_file_name: str,
|
|
155
|
-
replace: bool,
|
|
156
|
-
) -> None:
|
|
157
|
-
"""Upload a local file as a dataset resource to a project.
|
|
158
|
-
|
|
159
|
-
Args:
|
|
160
|
-
----
|
|
161
|
-
app: the click cli app context.
|
|
162
|
-
project_id: The project ID in the workspace.
|
|
163
|
-
local_file_name: The path to the local file name
|
|
164
|
-
remote_file_name: The remote file name
|
|
165
|
-
replace: Replace resource if needed.
|
|
166
|
-
|
|
167
|
-
Raises:
|
|
168
|
-
------
|
|
169
|
-
ValueError: if resource exists and no replace
|
|
170
|
-
|
|
171
|
-
"""
|
|
172
|
-
exist = resource_exist(project_name=project_id, resource_name=remote_file_name)
|
|
173
|
-
if exist and not replace:
|
|
174
|
-
raise ClickException(
|
|
175
|
-
f"A file resource with the name '{remote_file_name}' already "
|
|
176
|
-
"exists in this project. \n"
|
|
177
|
-
"Please rename the file or use the '--replace' "
|
|
178
|
-
"parameter in order to overwrite the remote file."
|
|
179
|
-
)
|
|
180
|
-
if exist:
|
|
181
|
-
app.echo_info(
|
|
182
|
-
f"Replace content of {remote_file_name} with content from "
|
|
183
|
-
f"{local_file_name} in project {project_id} ... ",
|
|
184
|
-
nl=False,
|
|
185
|
-
)
|
|
186
|
-
else:
|
|
187
|
-
app.echo_info(
|
|
188
|
-
f"Upload {local_file_name} as a file resource "
|
|
189
|
-
f"{remote_file_name} to project {project_id} ... ",
|
|
190
|
-
nl=False,
|
|
191
|
-
)
|
|
192
|
-
create_resource(
|
|
193
|
-
project_name=project_id,
|
|
194
|
-
resource_name=remote_file_name,
|
|
195
|
-
file_resource=ClickSmartPath.open(local_file_name),
|
|
196
|
-
replace=replace,
|
|
197
|
-
)
|
|
198
|
-
app.echo_success("done")
|
|
199
|
-
|
|
200
|
-
|
|
201
198
|
def _get_metadata_out_of_parameter(parameter_dict: dict) -> dict:
|
|
202
199
|
"""Extract metadata keys out of the parameter dict.
|
|
203
200
|
|
|
@@ -235,7 +232,7 @@ def _get_read_only_out_of_parameter(parameter_dict: dict) -> bool:
|
|
|
235
232
|
return True
|
|
236
233
|
if read_only in ("false", False, "False"):
|
|
237
234
|
return False
|
|
238
|
-
raise
|
|
235
|
+
raise CmemcError(f"readOnly parameter should be 'true' or 'false' - was {read_only!r}")
|
|
239
236
|
|
|
240
237
|
|
|
241
238
|
def _extend_parameter_with_metadata(
|
|
@@ -338,7 +335,12 @@ def _show_parameter_list(app: ApplicationContext, dataset_type: str) -> None:
|
|
|
338
335
|
# metadata always on top, then sorted by key
|
|
339
336
|
table = sorted(table, key=lambda k: k[0].lower())
|
|
340
337
|
table = completion.add_metadata_parameter(table)
|
|
341
|
-
app.echo_info_table(
|
|
338
|
+
app.echo_info_table(
|
|
339
|
+
table,
|
|
340
|
+
headers=["Parameter", "Description"],
|
|
341
|
+
caption=build_caption(len(table), f"{dataset_type} dataset parameter"),
|
|
342
|
+
empty_table_message="No parameters found for this dataset type.",
|
|
343
|
+
)
|
|
342
344
|
|
|
343
345
|
|
|
344
346
|
def _show_type_list(app: ApplicationContext) -> None:
|
|
@@ -362,7 +364,13 @@ def _show_type_list(app: ApplicationContext) -> None:
|
|
|
362
364
|
f"{title}: {description}",
|
|
363
365
|
]
|
|
364
366
|
table.append(row)
|
|
365
|
-
app.echo_info_table(
|
|
367
|
+
app.echo_info_table(
|
|
368
|
+
table,
|
|
369
|
+
headers=["Dataset Type", "Description"],
|
|
370
|
+
sort_column=1,
|
|
371
|
+
caption=build_caption(len(table), "dataset type"),
|
|
372
|
+
empty_table_message="No dataset types found.",
|
|
373
|
+
)
|
|
366
374
|
|
|
367
375
|
|
|
368
376
|
def _check_or_select_dataset_type(app: ApplicationContext, dataset_type: str) -> tuple[str, dict]:
|
|
@@ -386,7 +394,7 @@ def _check_or_select_dataset_type(app: ApplicationContext, dataset_type: str) ->
|
|
|
386
394
|
app.echo_debug(f"check type {dataset_type}")
|
|
387
395
|
plugin = get_task_plugin_description(dataset_type)
|
|
388
396
|
except requests.exceptions.HTTPError as error:
|
|
389
|
-
raise CmemcError(
|
|
397
|
+
raise CmemcError(f"Unknown dataset type: {dataset_type}.") from error
|
|
390
398
|
else:
|
|
391
399
|
return dataset_type, plugin
|
|
392
400
|
|
|
@@ -397,8 +405,8 @@ def _check_or_select_dataset_type(app: ApplicationContext, dataset_type: str) ->
|
|
|
397
405
|
"filter_",
|
|
398
406
|
type=(str, str),
|
|
399
407
|
multiple=True,
|
|
400
|
-
shell_complete=
|
|
401
|
-
help=
|
|
408
|
+
shell_complete=dataset_list.complete_values,
|
|
409
|
+
help=dataset_list.get_filter_help_text(),
|
|
402
410
|
)
|
|
403
411
|
@click.option(
|
|
404
412
|
"--raw", is_flag=True, help="Outputs raw JSON objects of the dataset search API response."
|
|
@@ -409,19 +417,15 @@ def _check_or_select_dataset_type(app: ApplicationContext, dataset_type: str) ->
|
|
|
409
417
|
help="Lists only dataset IDs and no labels or other metadata. "
|
|
410
418
|
"This is useful for piping the IDs into other cmemc commands.",
|
|
411
419
|
)
|
|
412
|
-
@click.
|
|
413
|
-
def list_command(
|
|
414
|
-
app: ApplicationContext, filter_: tuple[tuple[str, str]], raw: bool, id_only: bool
|
|
415
|
-
) -> None:
|
|
420
|
+
@click.pass_context
|
|
421
|
+
def list_command(ctx: Context, filter_: tuple[tuple[str, str]], raw: bool, id_only: bool) -> None:
|
|
416
422
|
"""List available datasets.
|
|
417
423
|
|
|
418
424
|
Output and filter a list of available datasets. Each dataset is listed
|
|
419
425
|
with its ID, type and label.
|
|
420
426
|
"""
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
filter_type, filter_name = _
|
|
424
|
-
datasets = _get_datasets_filtered(datasets, filter_type, filter_name)
|
|
427
|
+
app = ctx.obj
|
|
428
|
+
datasets = dataset_list.apply_filters(ctx=ctx, filter_=filter_)
|
|
425
429
|
|
|
426
430
|
if raw:
|
|
427
431
|
app.echo_info_json(datasets)
|
|
@@ -431,18 +435,21 @@ def list_command(
|
|
|
431
435
|
else:
|
|
432
436
|
table = []
|
|
433
437
|
for _ in datasets:
|
|
434
|
-
row
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
]
|
|
438
|
+
# Build row with dataset ID; the Label column will be transformed by DatasetLink
|
|
439
|
+
dataset_id = _["projectId"] + ":" + _["id"]
|
|
440
|
+
row = [dataset_id, _["pluginId"], dataset_id]
|
|
441
|
+
|
|
439
442
|
table.append(row)
|
|
443
|
+
filtered = len(filter_) > 0
|
|
440
444
|
app.echo_info_table(
|
|
441
445
|
table,
|
|
442
446
|
headers=["Dataset ID", "Type", "Label"],
|
|
443
447
|
sort_column=2,
|
|
444
|
-
|
|
445
|
-
"
|
|
448
|
+
cell_processing={2: DatasetLink(), 1: DatasetTypeLink()},
|
|
449
|
+
caption=build_caption(len(table), "dataset", filtered=filtered),
|
|
450
|
+
empty_table_message="No datasets found for these filters."
|
|
451
|
+
if filtered
|
|
452
|
+
else "No datasets found. Use the `dataset create` command to create a new dataset.",
|
|
446
453
|
)
|
|
447
454
|
|
|
448
455
|
|
|
@@ -454,28 +461,18 @@ def list_command(
|
|
|
454
461
|
is_flag=True,
|
|
455
462
|
help="Delete all datasets. " "This is a dangerous option, so use it with care.",
|
|
456
463
|
)
|
|
457
|
-
@click.option(
|
|
458
|
-
"--project",
|
|
459
|
-
"project_id",
|
|
460
|
-
type=click.STRING,
|
|
461
|
-
shell_complete=completion.project_ids,
|
|
462
|
-
help="In combination with the '--all' flag, this option allows for "
|
|
463
|
-
"deletion of all datasets of a certain project. The behaviour is "
|
|
464
|
-
"similar to the 'dataset list --project' command.",
|
|
465
|
-
)
|
|
466
464
|
@click.option(
|
|
467
465
|
"--filter",
|
|
468
466
|
"filter_",
|
|
469
467
|
type=(str, str),
|
|
470
468
|
multiple=True,
|
|
471
|
-
shell_complete=
|
|
472
|
-
help=
|
|
469
|
+
shell_complete=dataset_list.complete_values,
|
|
470
|
+
help=dataset_list.get_filter_help_text(),
|
|
473
471
|
)
|
|
474
472
|
@click.argument("dataset_ids", nargs=-1, type=click.STRING, shell_complete=completion.dataset_ids)
|
|
475
|
-
@click.
|
|
473
|
+
@click.pass_context
|
|
476
474
|
def delete_command(
|
|
477
|
-
|
|
478
|
-
project_id: str,
|
|
475
|
+
ctx: Context,
|
|
479
476
|
all_: bool,
|
|
480
477
|
filter_: tuple[tuple[str, str]],
|
|
481
478
|
dataset_ids: tuple[str],
|
|
@@ -489,12 +486,10 @@ def delete_command(
|
|
|
489
486
|
|
|
490
487
|
Note: Datasets can be listed by using the `dataset list` command.
|
|
491
488
|
"""
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
)
|
|
497
|
-
if dataset_ids == () and not all_ and not filter_:
|
|
489
|
+
app = ctx.obj
|
|
490
|
+
|
|
491
|
+
# Validation: require at least one selection method
|
|
492
|
+
if not dataset_ids and not all_ and not filter_:
|
|
498
493
|
raise UsageError(
|
|
499
494
|
"Either specify at least one dataset ID"
|
|
500
495
|
" or use a --filter option,"
|
|
@@ -504,25 +499,21 @@ def delete_command(
|
|
|
504
499
|
if dataset_ids and (all_ or filter_):
|
|
505
500
|
raise UsageError("Either specify a dataset ID OR" " use a --filter or the --all option.")
|
|
506
501
|
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
dataset_ids = []
|
|
510
|
-
datasets = list_items(item_type="dataset", project=project_id)["results"]
|
|
511
|
-
for _ in filter_:
|
|
512
|
-
filter_type, filter_name = _
|
|
513
|
-
datasets = _get_datasets_filtered(datasets, filter_type, filter_name)
|
|
514
|
-
for _ in datasets:
|
|
515
|
-
dataset_ids.append(_["projectId"] + ":" + _["id"])
|
|
502
|
+
# Get datasets to delete based on selection method
|
|
503
|
+
datasets_to_delete = _get_datasets_to_delete(ctx, dataset_ids, all_, filter_)
|
|
516
504
|
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
505
|
+
# Avoid double removal as well as sort IDs
|
|
506
|
+
processed_ids = sorted(set(datasets_to_delete), key=lambda v: v.lower())
|
|
507
|
+
count = len(processed_ids)
|
|
508
|
+
|
|
509
|
+
# Delete each dataset
|
|
510
|
+
for current, dataset_id in enumerate(processed_ids, start=1):
|
|
511
|
+
current_string = str(current).zfill(len(str(count)))
|
|
512
|
+
app.echo_info(f"Delete dataset {current_string}/{count}: {dataset_id} ... ", nl=False)
|
|
513
|
+
project_part, dataset_part = _validate_and_split_dataset_id(dataset_id)
|
|
522
514
|
app.echo_debug(f"Project ID is {project_part}, dataset ID is {dataset_part}")
|
|
523
515
|
delete_dataset(project_part, dataset_part)
|
|
524
|
-
app.echo_success("
|
|
525
|
-
current = current + 1
|
|
516
|
+
app.echo_success("deleted")
|
|
526
517
|
|
|
527
518
|
|
|
528
519
|
@click.command(cls=CmemcCommand, name="download")
|
|
@@ -564,7 +555,7 @@ def download_command(
|
|
|
564
555
|
file = project["data"]["parameters"]["file"]
|
|
565
556
|
except KeyError as no_file_resource:
|
|
566
557
|
raise CmemcError(
|
|
567
|
-
|
|
558
|
+
f"The dataset {dataset_id} has no associated file resource."
|
|
568
559
|
) from no_file_resource
|
|
569
560
|
if Path(output_path).exists() and replace is not True:
|
|
570
561
|
raise UsageError(
|
|
@@ -897,7 +888,7 @@ def open_command(app: ApplicationContext, dataset_ids: tuple[str]) -> None:
|
|
|
897
888
|
app.echo_debug(f"Open {_}: {full_url}")
|
|
898
889
|
click.launch(full_url)
|
|
899
890
|
else:
|
|
900
|
-
raise
|
|
891
|
+
raise CmemcError(f"Dataset '{_}' not found.")
|
|
901
892
|
|
|
902
893
|
|
|
903
894
|
@click.group(cls=CmemcGroup)
|