cognite-toolkit 0.6.84__py3-none-any.whl → 0.6.86__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-toolkit might be problematic. Click here for more details.
- cognite_toolkit/_cdf_tk/apps/_purge.py +84 -4
- cognite_toolkit/_cdf_tk/commands/_download.py +19 -17
- cognite_toolkit/_cdf_tk/commands/_purge.py +171 -348
- cognite_toolkit/_cdf_tk/storageio/_asset_centric.py +21 -1
- cognite_toolkit/_cdf_tk/storageio/_base.py +16 -0
- cognite_toolkit/_cdf_tk/utils/aggregators.py +15 -4
- cognite_toolkit/_cdf_tk/utils/producer_worker.py +3 -3
- cognite_toolkit/_cdf_tk/utils/validate_access.py +205 -43
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.6.84.dist-info → cognite_toolkit-0.6.86.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.6.84.dist-info → cognite_toolkit-0.6.86.dist-info}/RECORD +17 -17
- {cognite_toolkit-0.6.84.dist-info → cognite_toolkit-0.6.86.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.6.84.dist-info → cognite_toolkit-0.6.86.dist-info}/entry_points.txt +0 -0
- {cognite_toolkit-0.6.84.dist-info → cognite_toolkit-0.6.86.dist-info}/licenses/LICENSE +0 -0
|
@@ -17,7 +17,7 @@ from cognite_toolkit._cdf_tk.storageio.selectors import (
|
|
|
17
17
|
)
|
|
18
18
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
19
19
|
from cognite_toolkit._cdf_tk.utils.cli_args import parse_view_str
|
|
20
|
-
from cognite_toolkit._cdf_tk.utils.interactive_select import DataModelingSelect
|
|
20
|
+
from cognite_toolkit._cdf_tk.utils.interactive_select import AssetInteractiveSelect, DataModelingSelect
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
class InstanceTypeEnum(str, Enum):
|
|
@@ -39,8 +39,8 @@ class PurgeApp(typer.Typer):
|
|
|
39
39
|
if ctx.invoked_subcommand is None:
|
|
40
40
|
print("Use [bold yellow]cdf purge --help[/] for more information.")
|
|
41
41
|
|
|
42
|
+
@staticmethod
|
|
42
43
|
def purge_dataset(
|
|
43
|
-
self,
|
|
44
44
|
ctx: typer.Context,
|
|
45
45
|
external_id: Annotated[
|
|
46
46
|
str | None,
|
|
@@ -53,7 +53,46 @@ class PurgeApp(typer.Typer):
|
|
|
53
53
|
typer.Option(
|
|
54
54
|
"--include-dataset",
|
|
55
55
|
"-i",
|
|
56
|
-
help="
|
|
56
|
+
help="Whether to archive the dataset itself after purging its contents.",
|
|
57
|
+
hidden=Flags.v07.is_enabled(),
|
|
58
|
+
),
|
|
59
|
+
] = False,
|
|
60
|
+
archive_dataset: Annotated[
|
|
61
|
+
bool,
|
|
62
|
+
typer.Option(
|
|
63
|
+
"--archive-dataset",
|
|
64
|
+
help="Whether to archive the dataset itself after purging its contents.",
|
|
65
|
+
hidden=not Flags.v07.is_enabled(),
|
|
66
|
+
),
|
|
67
|
+
] = False,
|
|
68
|
+
skip_data: Annotated[
|
|
69
|
+
bool,
|
|
70
|
+
typer.Option(
|
|
71
|
+
"--skip-data",
|
|
72
|
+
"-s",
|
|
73
|
+
help="Skip deleting the data in the dataset, only delete configurations. The resources that are "
|
|
74
|
+
"considered data are: time series, event, files, assets, sequences, relationships, "
|
|
75
|
+
"labels, and 3D Models",
|
|
76
|
+
hidden=not Flags.v07.is_enabled(),
|
|
77
|
+
),
|
|
78
|
+
] = False,
|
|
79
|
+
include_configurations: Annotated[
|
|
80
|
+
bool,
|
|
81
|
+
typer.Option(
|
|
82
|
+
"--include-configurations",
|
|
83
|
+
"-c",
|
|
84
|
+
help="Include configurations, workflows, extraction pipelines and transformations in the purge.",
|
|
85
|
+
hidden=not Flags.v07.is_enabled(),
|
|
86
|
+
),
|
|
87
|
+
] = False,
|
|
88
|
+
asset_recursive: Annotated[
|
|
89
|
+
bool,
|
|
90
|
+
typer.Option(
|
|
91
|
+
"--asset-recursive",
|
|
92
|
+
"-a",
|
|
93
|
+
help="When deleting assets, delete all child assets recursively. CAVEAT: This can lead to assets"
|
|
94
|
+
" not in the selected dataset being deleted if they are children of assets in the dataset.",
|
|
95
|
+
hidden=not Flags.v07.is_enabled(),
|
|
57
96
|
),
|
|
58
97
|
] = False,
|
|
59
98
|
dry_run: Annotated[
|
|
@@ -84,11 +123,52 @@ class PurgeApp(typer.Typer):
|
|
|
84
123
|
"""This command will delete the contents of the specified dataset"""
|
|
85
124
|
cmd = PurgeCommand()
|
|
86
125
|
client = EnvironmentVariables.create_from_environment().get_client()
|
|
126
|
+
|
|
127
|
+
if external_id is None:
|
|
128
|
+
# Is Interactive
|
|
129
|
+
interactive = AssetInteractiveSelect(client, operation="purge")
|
|
130
|
+
external_id = interactive.select_data_set(allow_empty=False)
|
|
131
|
+
if Flags.v07.is_enabled():
|
|
132
|
+
skip_data = not questionary.confirm(
|
|
133
|
+
"Delete data in the dataset (time series, events, files, assets, sequences, relationships, labels, 3D models)?",
|
|
134
|
+
default=True,
|
|
135
|
+
).ask()
|
|
136
|
+
include_configurations = questionary.confirm(
|
|
137
|
+
"Delete configurations (workflows, extraction pipelines and transformations) in the dataset?",
|
|
138
|
+
default=False,
|
|
139
|
+
).ask()
|
|
140
|
+
asset_recursive = questionary.confirm(
|
|
141
|
+
"When deleting assets, delete all child assets recursively? (WARNING: This can lead "
|
|
142
|
+
"to assets not in the selected dataset being deleted if they are children of assets in the dataset.)",
|
|
143
|
+
default=False,
|
|
144
|
+
).ask()
|
|
145
|
+
archive_dataset = questionary.confirm("Archive the dataset itself after purging?", default=False).ask()
|
|
146
|
+
dry_run = questionary.confirm("Dry run?", default=True).ask()
|
|
147
|
+
verbose = questionary.confirm("Verbose?", default=True).ask()
|
|
148
|
+
|
|
149
|
+
user_options = [archive_dataset, dry_run, verbose]
|
|
150
|
+
if Flags.v07.is_enabled():
|
|
151
|
+
user_options.extend([skip_data, include_configurations, asset_recursive])
|
|
152
|
+
|
|
153
|
+
if any(selected is None for selected in user_options):
|
|
154
|
+
raise typer.Abort("Aborted by user.")
|
|
155
|
+
|
|
156
|
+
else:
|
|
157
|
+
archive_dataset = archive_dataset if Flags.v07.is_enabled() else include_dataset
|
|
158
|
+
|
|
159
|
+
if not Flags.v07.is_enabled():
|
|
160
|
+
skip_data = False
|
|
161
|
+
include_configurations = True
|
|
162
|
+
asset_recursive = False
|
|
163
|
+
|
|
87
164
|
cmd.run(
|
|
88
165
|
lambda: cmd.dataset(
|
|
89
166
|
client,
|
|
90
167
|
external_id,
|
|
91
|
-
|
|
168
|
+
archive_dataset,
|
|
169
|
+
not skip_data,
|
|
170
|
+
include_configurations,
|
|
171
|
+
asset_recursive,
|
|
92
172
|
dry_run,
|
|
93
173
|
auto_yes,
|
|
94
174
|
verbose,
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from collections.abc import Iterable
|
|
1
|
+
from collections.abc import Callable, Iterable
|
|
2
2
|
from functools import partial
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
|
|
@@ -42,7 +42,7 @@ class DownloadCommand(ToolkitCommand):
|
|
|
42
42
|
|
|
43
43
|
console = io.client.console
|
|
44
44
|
for selector in selectors:
|
|
45
|
-
target_dir = output_dir / selector.group
|
|
45
|
+
target_dir = output_dir / sanitize_filename(selector.group)
|
|
46
46
|
if verbose:
|
|
47
47
|
console.print(f"Downloading {selector.display_name} '{selector!s}' to {target_dir.as_posix()!r}")
|
|
48
48
|
|
|
@@ -57,9 +57,10 @@ class DownloadCommand(ToolkitCommand):
|
|
|
57
57
|
|
|
58
58
|
selector.dump_to_file(target_dir)
|
|
59
59
|
columns: list[SchemaColumn] | None = None
|
|
60
|
-
|
|
60
|
+
is_table = file_format in TABLE_WRITE_CLS_BY_FORMAT
|
|
61
|
+
if is_table and isinstance(io, TableStorageIO):
|
|
61
62
|
columns = io.get_schema(selector)
|
|
62
|
-
elif
|
|
63
|
+
elif is_table:
|
|
63
64
|
raise ToolkitValueError(
|
|
64
65
|
f"Cannot download {selector.kind} in {file_format!r} format. The {selector.kind!r} storage type does not support table schemas."
|
|
65
66
|
)
|
|
@@ -69,7 +70,7 @@ class DownloadCommand(ToolkitCommand):
|
|
|
69
70
|
) as writer:
|
|
70
71
|
executor = ProducerWorkerExecutor[Page[T_CogniteResource], list[dict[str, JsonVal]]](
|
|
71
72
|
download_iterable=io.stream_data(selector, limit),
|
|
72
|
-
process=
|
|
73
|
+
process=self.create_data_process(io=io, selector=selector, is_table=is_table),
|
|
73
74
|
write=partial(writer.write_chunks, filestem=filestem),
|
|
74
75
|
iteration_count=iteration_count,
|
|
75
76
|
# Limit queue size to avoid filling up memory before the workers can write to disk.
|
|
@@ -124,19 +125,20 @@ class DownloadCommand(ToolkitCommand):
|
|
|
124
125
|
return False
|
|
125
126
|
|
|
126
127
|
@staticmethod
|
|
127
|
-
def
|
|
128
|
-
data_page: Page[T_CogniteResource],
|
|
128
|
+
def create_data_process(
|
|
129
129
|
io: StorageIO[T_Selector, T_CogniteResource],
|
|
130
130
|
selector: T_Selector,
|
|
131
|
-
|
|
132
|
-
|
|
131
|
+
is_table: bool,
|
|
132
|
+
) -> Callable[[Page[T_CogniteResource]], list[dict[str, JsonVal]]]:
|
|
133
|
+
"""Creates a data processing function based on the IO type and whether the output is a table."""
|
|
134
|
+
if is_table and isinstance(io, TableStorageIO):
|
|
133
135
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
io: The StorageIO instance that defines how to process the data.
|
|
137
|
-
selector: The selection criteria used to identify the data.
|
|
136
|
+
def row_data_process(chunk: Page[T_CogniteResource]) -> list[dict[str, JsonVal]]:
|
|
137
|
+
return io.data_to_row(chunk.items, selector)
|
|
138
138
|
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
139
|
+
return row_data_process
|
|
140
|
+
|
|
141
|
+
def chunk_data_process(data_page: Page[T_CogniteResource]) -> list[dict[str, JsonVal]]:
|
|
142
|
+
return io.data_to_json_chunk(data_page.items, selector)
|
|
143
|
+
|
|
144
|
+
return chunk_data_process
|