cognite-toolkit 0.6.118__py3-none-any.whl → 0.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf.py +2 -5
- cognite_toolkit/_cdf_tk/apps/_core_app.py +3 -6
- cognite_toolkit/_cdf_tk/apps/_data_app.py +1 -1
- cognite_toolkit/_cdf_tk/apps/_dump_app.py +35 -129
- cognite_toolkit/_cdf_tk/apps/_landing_app.py +1 -15
- cognite_toolkit/_cdf_tk/apps/_modules_app.py +0 -3
- cognite_toolkit/_cdf_tk/apps/_purge.py +15 -43
- cognite_toolkit/_cdf_tk/apps/_run.py +1 -3
- cognite_toolkit/_cdf_tk/apps/_upload_app.py +19 -11
- cognite_toolkit/_cdf_tk/commands/__init__.py +0 -2
- cognite_toolkit/_cdf_tk/commands/auth.py +0 -11
- cognite_toolkit/_cdf_tk/commands/build_cmd.py +5 -7
- cognite_toolkit/_cdf_tk/commands/deploy.py +1 -2
- cognite_toolkit/_cdf_tk/commands/init.py +2 -6
- cognite_toolkit/_cdf_tk/commands/modules.py +1 -2
- cognite_toolkit/_cdf_tk/cruds/__init__.py +1 -4
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/function.py +2 -7
- cognite_toolkit/_cdf_tk/feature_flags.py +0 -44
- cognite_toolkit/_cdf_tk/plugins.py +1 -1
- cognite_toolkit/_cdf_tk/resource_classes/functions.py +3 -1
- cognite_toolkit/_cdf_tk/utils/fileio/_writers.py +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +5 -4
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.6.118.dist-info → cognite_toolkit-0.7.1.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.6.118.dist-info → cognite_toolkit-0.7.1.dist-info}/RECORD +30 -32
- cognite_toolkit/_cdf_tk/commands/dump_data.py +0 -489
- cognite_toolkit/_cdf_tk/utils/table_writers.py +0 -434
- {cognite_toolkit-0.6.118.dist-info → cognite_toolkit-0.7.1.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.6.118.dist-info → cognite_toolkit-0.7.1.dist-info}/entry_points.txt +0 -0
- {cognite_toolkit-0.6.118.dist-info → cognite_toolkit-0.7.1.dist-info}/licenses/LICENSE +0 -0
cognite_toolkit/_cdf.py
CHANGED
|
@@ -31,7 +31,6 @@ from cognite_toolkit._cdf_tk.apps import (
|
|
|
31
31
|
MigrateApp,
|
|
32
32
|
ModulesApp,
|
|
33
33
|
ProfileApp,
|
|
34
|
-
PurgeApp,
|
|
35
34
|
RepoApp,
|
|
36
35
|
RunApp,
|
|
37
36
|
)
|
|
@@ -99,10 +98,8 @@ if Plugins.run.value.is_enabled():
|
|
|
99
98
|
if Plugins.dump.value.is_enabled():
|
|
100
99
|
_app.add_typer(DumpApp(**default_typer_kws), name="dump")
|
|
101
100
|
|
|
102
|
-
if Plugins.purge.value.is_enabled() and not Flags.v07.is_enabled():
|
|
103
|
-
_app.add_typer(PurgeApp(**default_typer_kws), name="purge")
|
|
104
101
|
|
|
105
|
-
if Plugins.dev.value.is_enabled()
|
|
102
|
+
if Plugins.dev.value.is_enabled():
|
|
106
103
|
_app.add_typer(DevApp(**default_typer_kws), name="dev")
|
|
107
104
|
|
|
108
105
|
if Flags.PROFILE.is_enabled():
|
|
@@ -111,7 +108,7 @@ if Flags.PROFILE.is_enabled():
|
|
|
111
108
|
if Flags.MIGRATE.is_enabled():
|
|
112
109
|
_app.add_typer(MigrateApp(**default_typer_kws), name="migrate")
|
|
113
110
|
|
|
114
|
-
if
|
|
111
|
+
if Plugins.data.value.is_enabled():
|
|
115
112
|
_app.add_typer(DataApp(**default_typer_kws), name="data")
|
|
116
113
|
|
|
117
114
|
_app.add_typer(ModulesApp(**default_typer_kws), name="modules")
|
|
@@ -15,7 +15,6 @@ from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
|
15
15
|
from cognite_toolkit._cdf_tk.commands import BuildCommand, CleanCommand, DeployCommand
|
|
16
16
|
from cognite_toolkit._cdf_tk.commands.clean import AVAILABLE_DATA_TYPES
|
|
17
17
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitFileNotFoundError
|
|
18
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
19
18
|
from cognite_toolkit._cdf_tk.utils import get_cicd_environment
|
|
20
19
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
21
20
|
from cognite_toolkit._version import __version__ as current_version
|
|
@@ -193,7 +192,6 @@ class CoreApp(typer.Typer):
|
|
|
193
192
|
"--exit-non-zero-on-warning",
|
|
194
193
|
"-w",
|
|
195
194
|
help="Exit with non-zero code on warning.",
|
|
196
|
-
hidden=not Flags.EXIT_ON_WARNING.is_enabled(),
|
|
197
195
|
),
|
|
198
196
|
] = False,
|
|
199
197
|
) -> None:
|
|
@@ -206,7 +204,7 @@ class CoreApp(typer.Typer):
|
|
|
206
204
|
client = EnvironmentVariables.create_from_environment().get_client()
|
|
207
205
|
|
|
208
206
|
print_warning = True
|
|
209
|
-
if
|
|
207
|
+
if exit_on_warning:
|
|
210
208
|
print_warning = False
|
|
211
209
|
|
|
212
210
|
cmd = BuildCommand(print_warning=print_warning)
|
|
@@ -223,7 +221,7 @@ class CoreApp(typer.Typer):
|
|
|
223
221
|
)
|
|
224
222
|
)
|
|
225
223
|
|
|
226
|
-
if
|
|
224
|
+
if exit_on_warning and cmd.warning_list:
|
|
227
225
|
print("\n[bold red]Warnings raised during the build process:[/]\n")
|
|
228
226
|
|
|
229
227
|
for warning in cmd.warning_list:
|
|
@@ -354,7 +352,6 @@ class CoreApp(typer.Typer):
|
|
|
354
352
|
"--module",
|
|
355
353
|
"-m",
|
|
356
354
|
help="Specify name or path of the module to clean",
|
|
357
|
-
hidden=not Flags.v07.is_enabled(),
|
|
358
355
|
),
|
|
359
356
|
] = None,
|
|
360
357
|
verbose: Annotated[
|
|
@@ -379,6 +376,6 @@ class CoreApp(typer.Typer):
|
|
|
379
376
|
include,
|
|
380
377
|
module,
|
|
381
378
|
verbose,
|
|
382
|
-
all_modules=
|
|
379
|
+
all_modules=False,
|
|
383
380
|
)
|
|
384
381
|
)
|
|
@@ -18,7 +18,7 @@ class DataApp(typer.Typer):
|
|
|
18
18
|
|
|
19
19
|
@staticmethod
|
|
20
20
|
def main(ctx: typer.Context) -> None:
|
|
21
|
-
"""
|
|
21
|
+
"""Plugin to work with data in CDF"""
|
|
22
22
|
if ctx.invoked_subcommand is None:
|
|
23
23
|
print("Use [bold yellow]cdf data --help[/] for more information.")
|
|
24
24
|
return None
|
|
@@ -7,13 +7,7 @@ from cognite.client.data_classes.data_modeling import DataModelId, ViewId
|
|
|
7
7
|
from rich import print
|
|
8
8
|
|
|
9
9
|
from cognite_toolkit._cdf_tk.client.data_classes.search_config import ViewId as SearchConfigViewId
|
|
10
|
-
from cognite_toolkit._cdf_tk.commands import
|
|
11
|
-
from cognite_toolkit._cdf_tk.commands.dump_data import (
|
|
12
|
-
AssetFinder,
|
|
13
|
-
EventFinder,
|
|
14
|
-
FileMetadataFinder,
|
|
15
|
-
TimeSeriesFinder,
|
|
16
|
-
)
|
|
10
|
+
from cognite_toolkit._cdf_tk.commands import DumpResourceCommand
|
|
17
11
|
from cognite_toolkit._cdf_tk.commands.dump_resource import (
|
|
18
12
|
AgentFinder,
|
|
19
13
|
DataModelFinder,
|
|
@@ -32,45 +26,32 @@ from cognite_toolkit._cdf_tk.commands.dump_resource import (
|
|
|
32
26
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitRequiredValueError
|
|
33
27
|
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
34
28
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
35
|
-
from cognite_toolkit._cdf_tk.utils.interactive_select import (
|
|
36
|
-
AssetInteractiveSelect,
|
|
37
|
-
EventInteractiveSelect,
|
|
38
|
-
FileMetadataInteractiveSelect,
|
|
39
|
-
TimeSeriesInteractiveSelect,
|
|
40
|
-
)
|
|
41
29
|
|
|
42
30
|
|
|
43
31
|
class DumpApp(typer.Typer):
|
|
44
32
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
45
33
|
super().__init__(*args, **kwargs)
|
|
46
34
|
self.callback(invoke_without_command=True)(self.dump_main)
|
|
47
|
-
|
|
48
|
-
self.add_typer(DumpDataApp(*args, **kwargs), name="data")
|
|
49
|
-
self.add_typer(DumpConfigApp(*args, **kwargs), name="config")
|
|
50
|
-
else:
|
|
51
|
-
self.command("datamodel")(DumpConfigApp.dump_datamodel_cmd)
|
|
35
|
+
self.command("datamodel")(DumpConfigApp.dump_datamodel_cmd)
|
|
52
36
|
|
|
53
|
-
|
|
54
|
-
|
|
37
|
+
self.command("asset")(DumpDataApp.dump_asset_cmd)
|
|
38
|
+
self.command("timeseries")(DumpDataApp.dump_timeseries_cmd)
|
|
55
39
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
40
|
+
self.command("workflow")(DumpConfigApp.dump_workflow)
|
|
41
|
+
self.command("transformation")(DumpConfigApp.dump_transformation)
|
|
42
|
+
self.command("group")(DumpConfigApp.dump_group)
|
|
43
|
+
self.command("node")(DumpConfigApp.dump_node)
|
|
44
|
+
self.command("spaces")(DumpConfigApp.dump_spaces)
|
|
61
45
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
self.command("streamlit")(DumpConfigApp.dump_streamlit)
|
|
46
|
+
self.command("location-filter")(DumpConfigApp.dump_location_filters)
|
|
47
|
+
self.command("extraction-pipeline")(DumpConfigApp.dump_extraction_pipeline)
|
|
48
|
+
self.command("functions")(DumpConfigApp.dump_functions)
|
|
49
|
+
self.command("datasets")(DumpConfigApp.dump_datasets)
|
|
50
|
+
self.command("streamlit")(DumpConfigApp.dump_streamlit)
|
|
68
51
|
|
|
69
|
-
|
|
70
|
-
self.command("agents")(DumpConfigApp.dump_agents)
|
|
52
|
+
self.command("agents")(DumpConfigApp.dump_agents)
|
|
71
53
|
|
|
72
|
-
|
|
73
|
-
self.command("search-config")(DumpConfigApp.dump_search_config)
|
|
54
|
+
self.command("search-config")(DumpConfigApp.dump_search_config)
|
|
74
55
|
|
|
75
56
|
@staticmethod
|
|
76
57
|
def dump_main(ctx: typer.Context) -> None:
|
|
@@ -91,18 +72,13 @@ class DumpConfigApp(typer.Typer):
|
|
|
91
72
|
self.command("group")(self.dump_group)
|
|
92
73
|
self.command("node")(self.dump_node)
|
|
93
74
|
self.command("spaces")(self.dump_spaces)
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
if Flags.AGENTS.is_enabled():
|
|
102
|
-
self.command("agents")(self.dump_agents)
|
|
103
|
-
|
|
104
|
-
if Flags.SEARCH_CONFIG.is_enabled():
|
|
105
|
-
self.command("search-config")(self.dump_search_config)
|
|
75
|
+
self.command("location-filters")(self.dump_location_filters)
|
|
76
|
+
self.command("extraction-pipeline")(self.dump_extraction_pipeline)
|
|
77
|
+
self.command("datasets")(DumpConfigApp.dump_datasets)
|
|
78
|
+
self.command("functions")(self.dump_functions)
|
|
79
|
+
self.command("streamlit")(DumpConfigApp.dump_streamlit)
|
|
80
|
+
self.command("agents")(self.dump_agents)
|
|
81
|
+
self.command("search-config")(self.dump_search_config)
|
|
106
82
|
|
|
107
83
|
@staticmethod
|
|
108
84
|
def dump_config_main(ctx: typer.Context) -> None:
|
|
@@ -862,30 +838,11 @@ class DumpDataApp(typer.Typer):
|
|
|
862
838
|
] = False,
|
|
863
839
|
) -> None:
|
|
864
840
|
"""This command will dump the selected assets in the selected format to the folder specified, defaults to /tmp."""
|
|
865
|
-
if Flags.
|
|
866
|
-
print(
|
|
867
|
-
"[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download assets` instead."
|
|
868
|
-
)
|
|
869
|
-
return None
|
|
870
|
-
elif Flags.v08:
|
|
841
|
+
if Flags.v08:
|
|
871
842
|
raise ValueError(
|
|
872
843
|
"The `cdf dump data asset` command has been removed. Please use `cdf data download assets` instead."
|
|
873
844
|
)
|
|
874
|
-
|
|
875
|
-
client = EnvironmentVariables.create_from_environment().get_client()
|
|
876
|
-
if hierarchy is None and data_set is None:
|
|
877
|
-
hierarchy, data_set = AssetInteractiveSelect(client, "dump").select_hierarchies_and_data_sets()
|
|
878
|
-
|
|
879
|
-
cmd.run(
|
|
880
|
-
lambda: cmd.dump_table(
|
|
881
|
-
AssetFinder(client, hierarchy or [], data_set or []),
|
|
882
|
-
output_dir,
|
|
883
|
-
clean,
|
|
884
|
-
limit,
|
|
885
|
-
format_,
|
|
886
|
-
verbose,
|
|
887
|
-
)
|
|
888
|
-
)
|
|
845
|
+
print("[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download assets` instead.")
|
|
889
846
|
|
|
890
847
|
@staticmethod
|
|
891
848
|
def dump_files_cmd(
|
|
@@ -949,30 +906,12 @@ class DumpDataApp(typer.Typer):
|
|
|
949
906
|
] = False,
|
|
950
907
|
) -> None:
|
|
951
908
|
"""This command will dump the selected events to the selected format in the folder specified, defaults to /tmp."""
|
|
952
|
-
if Flags.
|
|
953
|
-
print(
|
|
954
|
-
"[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download files` instead."
|
|
955
|
-
)
|
|
956
|
-
return None
|
|
957
|
-
elif Flags.v08:
|
|
909
|
+
if Flags.v08:
|
|
958
910
|
raise ValueError(
|
|
959
911
|
"The `cdf dump data files-metadata` command has been removed. Please use `cdf data download files` instead."
|
|
960
912
|
)
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
client = EnvironmentVariables.create_from_environment().get_client()
|
|
964
|
-
if hierarchy is None and data_set is None:
|
|
965
|
-
hierarchy, data_set = FileMetadataInteractiveSelect(client, "dump").select_hierarchies_and_data_sets()
|
|
966
|
-
cmd.run(
|
|
967
|
-
lambda: cmd.dump_table(
|
|
968
|
-
FileMetadataFinder(client, hierarchy or [], data_set or []),
|
|
969
|
-
output_dir,
|
|
970
|
-
clean,
|
|
971
|
-
limit,
|
|
972
|
-
format_,
|
|
973
|
-
verbose,
|
|
974
|
-
)
|
|
975
|
-
)
|
|
913
|
+
print("[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download files` instead.")
|
|
914
|
+
return None
|
|
976
915
|
|
|
977
916
|
@staticmethod
|
|
978
917
|
def dump_timeseries_cmd(
|
|
@@ -1036,29 +975,14 @@ class DumpDataApp(typer.Typer):
|
|
|
1036
975
|
] = False,
|
|
1037
976
|
) -> None:
|
|
1038
977
|
"""This command will dump the selected timeseries to the selected format in the folder specified, defaults to /tmp."""
|
|
1039
|
-
if Flags.
|
|
1040
|
-
print(
|
|
1041
|
-
"[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download timeseries` instead."
|
|
1042
|
-
)
|
|
1043
|
-
return None
|
|
1044
|
-
elif Flags.v08:
|
|
978
|
+
if Flags.v08:
|
|
1045
979
|
raise ValueError(
|
|
1046
980
|
"The `cdf dump data timeseries` command has been removed. Please use `cdf data download timeseries` instead."
|
|
1047
981
|
)
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
if hierarchy is None and data_set is None:
|
|
1051
|
-
hierarchy, data_set = TimeSeriesInteractiveSelect(client, "dump").select_hierarchies_and_data_sets()
|
|
1052
|
-
cmd.run(
|
|
1053
|
-
lambda: cmd.dump_table(
|
|
1054
|
-
TimeSeriesFinder(client, hierarchy or [], data_set or []),
|
|
1055
|
-
output_dir,
|
|
1056
|
-
clean,
|
|
1057
|
-
limit,
|
|
1058
|
-
format_,
|
|
1059
|
-
verbose,
|
|
1060
|
-
)
|
|
982
|
+
print(
|
|
983
|
+
"[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download timeseries` instead."
|
|
1061
984
|
)
|
|
985
|
+
return None
|
|
1062
986
|
|
|
1063
987
|
@staticmethod
|
|
1064
988
|
def dump_event_cmd(
|
|
@@ -1122,27 +1046,9 @@ class DumpDataApp(typer.Typer):
|
|
|
1122
1046
|
] = False,
|
|
1123
1047
|
) -> None:
|
|
1124
1048
|
"""This command will dump the selected events to the selected format in the folder specified, defaults to /tmp."""
|
|
1125
|
-
if Flags.
|
|
1126
|
-
print(
|
|
1127
|
-
"[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download events` instead."
|
|
1128
|
-
)
|
|
1129
|
-
return None
|
|
1130
|
-
elif Flags.v08:
|
|
1049
|
+
if Flags.v08:
|
|
1131
1050
|
raise ValueError(
|
|
1132
1051
|
"The `cdf dump data event` command has been removed. Please use `cdf data download events` instead."
|
|
1133
1052
|
)
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
client = EnvironmentVariables.create_from_environment().get_client()
|
|
1137
|
-
if hierarchy is None and data_set is None:
|
|
1138
|
-
hierarchy, data_set = EventInteractiveSelect(client, "dump").select_hierarchies_and_data_sets()
|
|
1139
|
-
cmd.run(
|
|
1140
|
-
lambda: cmd.dump_table(
|
|
1141
|
-
EventFinder(client, hierarchy or [], data_set or []),
|
|
1142
|
-
output_dir,
|
|
1143
|
-
clean,
|
|
1144
|
-
limit,
|
|
1145
|
-
format_,
|
|
1146
|
-
verbose,
|
|
1147
|
-
)
|
|
1148
|
-
)
|
|
1053
|
+
print("[bold yellow]Warning:[/] This command has been removed. Please use `cdf data download events` instead.")
|
|
1054
|
+
return None
|
|
@@ -2,9 +2,7 @@ from typing import Annotated
|
|
|
2
2
|
|
|
3
3
|
import typer
|
|
4
4
|
|
|
5
|
-
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
|
|
6
5
|
from cognite_toolkit._cdf_tk.commands import InitCommand
|
|
7
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
8
6
|
|
|
9
7
|
|
|
10
8
|
class LandingApp(typer.Typer):
|
|
@@ -21,22 +19,10 @@ class LandingApp(typer.Typer):
|
|
|
21
19
|
help="Whether to do a dry-run, do dry-run if present.",
|
|
22
20
|
),
|
|
23
21
|
] = False,
|
|
24
|
-
# TODO: this is a temporary solution to be able to test the functionality
|
|
25
|
-
# in a new environment, assuming that the toml file doesn't exist yet.
|
|
26
|
-
# remove this once v.07 is released
|
|
27
|
-
v7: Annotated[
|
|
28
|
-
bool,
|
|
29
|
-
typer.Option(
|
|
30
|
-
"--seven",
|
|
31
|
-
"-s",
|
|
32
|
-
help="Emulate v0.7",
|
|
33
|
-
hidden=(Flags.v07.is_enabled() or not CDFToml.load().is_loaded_from_file),
|
|
34
|
-
),
|
|
35
|
-
] = False,
|
|
36
22
|
) -> None:
|
|
37
23
|
"""Getting started checklist"""
|
|
38
24
|
cmd = InitCommand()
|
|
39
25
|
# Tracking command with the usual lambda run construct
|
|
40
26
|
# is intentionally left out because we don't want to expose the user to the warning
|
|
41
27
|
# before they've had the chance to opt in (which is something they'll do later using this command).
|
|
42
|
-
cmd.execute(dry_run=dry_run
|
|
28
|
+
cmd.execute(dry_run=dry_run)
|
|
@@ -6,7 +6,6 @@ from rich import print
|
|
|
6
6
|
|
|
7
7
|
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
|
|
8
8
|
from cognite_toolkit._cdf_tk.commands import ModulesCommand, PullCommand
|
|
9
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
10
9
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
11
10
|
from cognite_toolkit._version import __version__
|
|
12
11
|
|
|
@@ -58,7 +57,6 @@ class ModulesApp(typer.Typer):
|
|
|
58
57
|
"--library-url",
|
|
59
58
|
"-u",
|
|
60
59
|
help="URL of the library to add to the project.",
|
|
61
|
-
hidden=not Flags.EXTERNAL_LIBRARIES.is_enabled(),
|
|
62
60
|
),
|
|
63
61
|
] = None,
|
|
64
62
|
library_checksum: Annotated[
|
|
@@ -67,7 +65,6 @@ class ModulesApp(typer.Typer):
|
|
|
67
65
|
"--library-checksum",
|
|
68
66
|
"-c",
|
|
69
67
|
help="Checksum of the library to add to the project.",
|
|
70
|
-
hidden=not Flags.EXTERNAL_LIBRARIES.is_enabled(),
|
|
71
68
|
),
|
|
72
69
|
] = None,
|
|
73
70
|
verbose: Annotated[
|
|
@@ -8,7 +8,6 @@ from rich import print
|
|
|
8
8
|
|
|
9
9
|
from cognite_toolkit._cdf_tk.commands import PurgeCommand
|
|
10
10
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
|
|
11
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
12
11
|
from cognite_toolkit._cdf_tk.storageio.selectors import (
|
|
13
12
|
InstanceFileSelector,
|
|
14
13
|
InstanceSelector,
|
|
@@ -31,8 +30,7 @@ class PurgeApp(typer.Typer):
|
|
|
31
30
|
self.callback(invoke_without_command=True)(self.main)
|
|
32
31
|
self.command("dataset")(self.purge_dataset)
|
|
33
32
|
self.command("space")(self.purge_space)
|
|
34
|
-
|
|
35
|
-
self.command("instances")(self.purge_instances)
|
|
33
|
+
self.command("instances")(self.purge_instances)
|
|
36
34
|
|
|
37
35
|
def main(self, ctx: typer.Context) -> None:
|
|
38
36
|
"""Commands deleting data from Cognite Data Fusion."""
|
|
@@ -48,21 +46,11 @@ class PurgeApp(typer.Typer):
|
|
|
48
46
|
help="External id of the dataset to purge. If not provided, interactive mode will be used.",
|
|
49
47
|
),
|
|
50
48
|
] = None,
|
|
51
|
-
include_dataset: Annotated[
|
|
52
|
-
bool,
|
|
53
|
-
typer.Option(
|
|
54
|
-
"--include-dataset",
|
|
55
|
-
"-i",
|
|
56
|
-
help="Whether to archive the dataset itself after purging its contents.",
|
|
57
|
-
hidden=Flags.v07.is_enabled(),
|
|
58
|
-
),
|
|
59
|
-
] = False,
|
|
60
49
|
archive_dataset: Annotated[
|
|
61
50
|
bool,
|
|
62
51
|
typer.Option(
|
|
63
52
|
"--archive-dataset",
|
|
64
53
|
help="Whether to archive the dataset itself after purging its contents.",
|
|
65
|
-
hidden=not Flags.v07.is_enabled(),
|
|
66
54
|
),
|
|
67
55
|
] = False,
|
|
68
56
|
skip_data: Annotated[
|
|
@@ -73,7 +61,6 @@ class PurgeApp(typer.Typer):
|
|
|
73
61
|
help="Skip deleting the data in the dataset, only delete configurations. The resources that are "
|
|
74
62
|
"considered data are: time series, event, files, assets, sequences, relationships, "
|
|
75
63
|
"labels, and 3D Models",
|
|
76
|
-
hidden=not Flags.v07.is_enabled(),
|
|
77
64
|
),
|
|
78
65
|
] = False,
|
|
79
66
|
include_configurations: Annotated[
|
|
@@ -82,7 +69,6 @@ class PurgeApp(typer.Typer):
|
|
|
82
69
|
"--include-configurations",
|
|
83
70
|
"-c",
|
|
84
71
|
help="Include configurations, workflows, extraction pipelines and transformations in the purge.",
|
|
85
|
-
hidden=not Flags.v07.is_enabled(),
|
|
86
72
|
),
|
|
87
73
|
] = False,
|
|
88
74
|
asset_recursive: Annotated[
|
|
@@ -92,7 +78,6 @@ class PurgeApp(typer.Typer):
|
|
|
92
78
|
"-a",
|
|
93
79
|
help="When deleting assets, delete all child assets recursively. CAVEAT: This can lead to assets"
|
|
94
80
|
" not in the selected dataset being deleted if they are children of assets in the dataset.",
|
|
95
|
-
hidden=not Flags.v07.is_enabled(),
|
|
96
81
|
),
|
|
97
82
|
] = False,
|
|
98
83
|
dry_run: Annotated[
|
|
@@ -128,39 +113,28 @@ class PurgeApp(typer.Typer):
|
|
|
128
113
|
# Is Interactive
|
|
129
114
|
interactive = AssetInteractiveSelect(client, operation="purge")
|
|
130
115
|
external_id = interactive.select_data_set(allow_empty=False)
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
).ask()
|
|
116
|
+
skip_data = not questionary.confirm(
|
|
117
|
+
"Delete data in the dataset (time series, events, files, assets, sequences, relationships, labels, 3D models)?",
|
|
118
|
+
default=True,
|
|
119
|
+
).ask()
|
|
120
|
+
include_configurations = questionary.confirm(
|
|
121
|
+
"Delete configurations (workflows, extraction pipelines and transformations) in the dataset?",
|
|
122
|
+
default=False,
|
|
123
|
+
).ask()
|
|
124
|
+
asset_recursive = questionary.confirm(
|
|
125
|
+
"When deleting assets, delete all child assets recursively? (WARNING: This can lead "
|
|
126
|
+
"to assets not in the selected dataset being deleted if they are children of assets in the dataset.)",
|
|
127
|
+
default=False,
|
|
128
|
+
).ask()
|
|
145
129
|
archive_dataset = questionary.confirm("Archive the dataset itself after purging?", default=False).ask()
|
|
146
130
|
dry_run = questionary.confirm("Dry run?", default=True).ask()
|
|
147
131
|
verbose = questionary.confirm("Verbose?", default=True).ask()
|
|
148
132
|
|
|
149
|
-
user_options = [archive_dataset, dry_run, verbose]
|
|
150
|
-
if Flags.v07.is_enabled():
|
|
151
|
-
user_options.extend([skip_data, include_configurations, asset_recursive])
|
|
133
|
+
user_options = [archive_dataset, dry_run, verbose, skip_data, include_configurations, asset_recursive]
|
|
152
134
|
|
|
153
135
|
if any(selected is None for selected in user_options):
|
|
154
136
|
raise typer.Abort("Aborted by user.")
|
|
155
137
|
|
|
156
|
-
else:
|
|
157
|
-
archive_dataset = archive_dataset if Flags.v07.is_enabled() else include_dataset
|
|
158
|
-
|
|
159
|
-
if not Flags.v07.is_enabled():
|
|
160
|
-
skip_data = False
|
|
161
|
-
include_configurations = True
|
|
162
|
-
asset_recursive = False
|
|
163
|
-
|
|
164
138
|
cmd.run(
|
|
165
139
|
lambda: cmd.dataset(
|
|
166
140
|
client,
|
|
@@ -197,7 +171,6 @@ class PurgeApp(typer.Typer):
|
|
|
197
171
|
typer.Option(
|
|
198
172
|
"--delete-datapoints",
|
|
199
173
|
help="Delete datapoints linked to CogniteTimeSeries nodes in the space.",
|
|
200
|
-
hidden=not Flags.v07.is_enabled(),
|
|
201
174
|
),
|
|
202
175
|
] = False,
|
|
203
176
|
delete_file_content: Annotated[
|
|
@@ -205,7 +178,6 @@ class PurgeApp(typer.Typer):
|
|
|
205
178
|
typer.Option(
|
|
206
179
|
"--delete-file-content",
|
|
207
180
|
help="Delete file content linked to CogniteFile nodes in the space.",
|
|
208
|
-
hidden=not Flags.v07.is_enabled(),
|
|
209
181
|
),
|
|
210
182
|
] = False,
|
|
211
183
|
dry_run: Annotated[
|
|
@@ -10,7 +10,6 @@ from cognite_toolkit._cdf_tk.commands import (
|
|
|
10
10
|
RunTransformationCommand,
|
|
11
11
|
RunWorkflowCommand,
|
|
12
12
|
)
|
|
13
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
14
13
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
15
14
|
|
|
16
15
|
CDF_TOML = CDFToml.load(Path.cwd())
|
|
@@ -27,8 +26,7 @@ class RunApp(typer.Typer):
|
|
|
27
26
|
@staticmethod
|
|
28
27
|
def _print_deprecation_warning() -> None:
|
|
29
28
|
"""Print deprecation warning for the run plugin."""
|
|
30
|
-
|
|
31
|
-
print("The run plugin is deprecated and will be replaced by the dev plugin in v0.8.0.")
|
|
29
|
+
print("The run plugin is deprecated and will be replaced by the dev plugin in v0.8.0.")
|
|
32
30
|
|
|
33
31
|
@staticmethod
|
|
34
32
|
def main(ctx: typer.Context) -> None:
|
|
@@ -15,10 +15,18 @@ DEFAULT_INPUT_DIR = Path.cwd() / DATA_DEFAULT_DIR
|
|
|
15
15
|
class UploadApp(typer.Typer):
|
|
16
16
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
17
17
|
super().__init__(*args, **kwargs)
|
|
18
|
-
self.
|
|
18
|
+
self.callback(invoke_without_command=True)(self.upload_main)
|
|
19
|
+
self.command("dir")(self.upload_dir)
|
|
19
20
|
|
|
20
21
|
@staticmethod
|
|
21
|
-
def upload_main(
|
|
22
|
+
def upload_main(ctx: typer.Context) -> None:
|
|
23
|
+
"""Commands to upload data to CDF."""
|
|
24
|
+
if ctx.invoked_subcommand is None:
|
|
25
|
+
print("Use [bold yellow]cdf upload --help[/] for more information.")
|
|
26
|
+
return None
|
|
27
|
+
|
|
28
|
+
@staticmethod
|
|
29
|
+
def upload_dir(
|
|
22
30
|
ctx: typer.Context,
|
|
23
31
|
input_dir: Annotated[
|
|
24
32
|
Path | None,
|
|
@@ -87,13 +95,13 @@ class UploadApp(typer.Typer):
|
|
|
87
95
|
typer.echo("No selection made for deploying resources. Exiting.")
|
|
88
96
|
raise typer.Exit(code=1)
|
|
89
97
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
)
|
|
98
|
+
client = EnvironmentVariables.create_from_environment().get_client()
|
|
99
|
+
cmd.run(
|
|
100
|
+
lambda: cmd.upload(
|
|
101
|
+
input_dir=input_dir,
|
|
102
|
+
dry_run=dry_run,
|
|
103
|
+
verbose=verbose,
|
|
104
|
+
deploy_resources=deploy_resources,
|
|
105
|
+
client=client,
|
|
99
106
|
)
|
|
107
|
+
)
|
|
@@ -11,7 +11,6 @@ from .build_cmd import BuildCommand
|
|
|
11
11
|
from .clean import CleanCommand
|
|
12
12
|
from .collect import CollectCommand
|
|
13
13
|
from .deploy import DeployCommand
|
|
14
|
-
from .dump_data import DumpDataCommand
|
|
15
14
|
from .dump_resource import DumpResourceCommand
|
|
16
15
|
from .featureflag import FeatureFlagCommand
|
|
17
16
|
from .init import InitCommand
|
|
@@ -27,7 +26,6 @@ __all__ = [
|
|
|
27
26
|
"CollectCommand",
|
|
28
27
|
"DeployCommand",
|
|
29
28
|
"DownloadCommand",
|
|
30
|
-
"DumpDataCommand",
|
|
31
29
|
"DumpResourceCommand",
|
|
32
30
|
"FeatureFlagCommand",
|
|
33
31
|
"InitCommand",
|
|
@@ -53,7 +53,6 @@ from cognite_toolkit._cdf_tk.exceptions import (
|
|
|
53
53
|
ResourceDeleteError,
|
|
54
54
|
ToolkitMissingValueError,
|
|
55
55
|
)
|
|
56
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
57
56
|
from cognite_toolkit._cdf_tk.tk_warnings import (
|
|
58
57
|
HighSeverityWarning,
|
|
59
58
|
LowSeverityWarning,
|
|
@@ -96,16 +95,6 @@ class AuthCommand(ToolkitCommand):
|
|
|
96
95
|
raise AuthenticationError(f"Unable to verify the credentials.\n{e}")
|
|
97
96
|
|
|
98
97
|
print("[green]The credentials are valid.[/green]")
|
|
99
|
-
if no_verify or Flags.v07.is_enabled():
|
|
100
|
-
return
|
|
101
|
-
print(
|
|
102
|
-
Panel(
|
|
103
|
-
"Running verification, 'cdf auth verify'...",
|
|
104
|
-
title="",
|
|
105
|
-
expand=False,
|
|
106
|
-
)
|
|
107
|
-
)
|
|
108
|
-
self.verify(client, dry_run)
|
|
109
98
|
|
|
110
99
|
def _store_dotenv(self, env_vars: EnvironmentVariables) -> None:
|
|
111
100
|
new_env_file = env_vars.create_dotenv_file()
|
|
@@ -65,7 +65,6 @@ from cognite_toolkit._cdf_tk.exceptions import (
|
|
|
65
65
|
ToolkitMissingModuleError,
|
|
66
66
|
ToolkitYAMLFormatError,
|
|
67
67
|
)
|
|
68
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
69
68
|
from cognite_toolkit._cdf_tk.hints import Hint, ModuleDefinition, verify_module_directory
|
|
70
69
|
from cognite_toolkit._cdf_tk.tk_warnings import (
|
|
71
70
|
DuplicatedItemWarning,
|
|
@@ -361,13 +360,12 @@ class BuildCommand(ToolkitCommand):
|
|
|
361
360
|
|
|
362
361
|
if resource_name == "data_models":
|
|
363
362
|
resource_name = "data_modeling"
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
"Please rename the folder to 'data_modeling'."
|
|
369
|
-
)
|
|
363
|
+
self.warn(
|
|
364
|
+
MediumSeverityWarning(
|
|
365
|
+
"The resource folder 'data_models' is deprecated and will be removed in v1.0. "
|
|
366
|
+
"Please rename the folder to 'data_modeling'."
|
|
370
367
|
)
|
|
368
|
+
)
|
|
371
369
|
|
|
372
370
|
builder = self._get_builder(build_dir, resource_name)
|
|
373
371
|
|
|
@@ -44,7 +44,6 @@ from cognite_toolkit._cdf_tk.exceptions import (
|
|
|
44
44
|
ToolkitFileNotFoundError,
|
|
45
45
|
ToolkitNotADirectoryError,
|
|
46
46
|
)
|
|
47
|
-
from cognite_toolkit._cdf_tk.feature_flags import Flags
|
|
48
47
|
from cognite_toolkit._cdf_tk.protocols import (
|
|
49
48
|
T_ResourceRequest,
|
|
50
49
|
T_ResourceRequestList,
|
|
@@ -295,7 +294,7 @@ class DeployCommand(ToolkitCommand):
|
|
|
295
294
|
read_modules = build.read_modules
|
|
296
295
|
output_results = DeployResults([], "deploy", dry_run=dry_run) if results is None else results
|
|
297
296
|
for loader_cls in ordered_loaders:
|
|
298
|
-
if issubclass(loader_cls, DataCRUD)
|
|
297
|
+
if issubclass(loader_cls, DataCRUD):
|
|
299
298
|
self.warn(
|
|
300
299
|
HighSeverityWarning(
|
|
301
300
|
f"Uploading {loader_cls.kind} data is deprecated and will be removed in v0.8. "
|