cognite-toolkit 0.6.97__py3-none-any.whl → 0.7.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (136) hide show
  1. cognite_toolkit/_cdf.py +16 -17
  2. cognite_toolkit/_cdf_tk/apps/__init__.py +2 -0
  3. cognite_toolkit/_cdf_tk/apps/_core_app.py +13 -5
  4. cognite_toolkit/_cdf_tk/apps/_data_app.py +1 -1
  5. cognite_toolkit/_cdf_tk/apps/_dev_app.py +86 -0
  6. cognite_toolkit/_cdf_tk/apps/_download_app.py +692 -24
  7. cognite_toolkit/_cdf_tk/apps/_dump_app.py +43 -101
  8. cognite_toolkit/_cdf_tk/apps/_landing_app.py +18 -4
  9. cognite_toolkit/_cdf_tk/apps/_migrate_app.py +249 -9
  10. cognite_toolkit/_cdf_tk/apps/_modules_app.py +0 -3
  11. cognite_toolkit/_cdf_tk/apps/_purge.py +15 -43
  12. cognite_toolkit/_cdf_tk/apps/_run.py +11 -0
  13. cognite_toolkit/_cdf_tk/apps/_upload_app.py +45 -6
  14. cognite_toolkit/_cdf_tk/builders/__init__.py +2 -2
  15. cognite_toolkit/_cdf_tk/builders/_base.py +28 -42
  16. cognite_toolkit/_cdf_tk/cdf_toml.py +20 -1
  17. cognite_toolkit/_cdf_tk/client/_toolkit_client.py +23 -3
  18. cognite_toolkit/_cdf_tk/client/api/extended_functions.py +6 -9
  19. cognite_toolkit/_cdf_tk/client/api/infield.py +93 -1
  20. cognite_toolkit/_cdf_tk/client/api/migration.py +175 -1
  21. cognite_toolkit/_cdf_tk/client/api/streams.py +84 -0
  22. cognite_toolkit/_cdf_tk/client/api/three_d.py +50 -0
  23. cognite_toolkit/_cdf_tk/client/data_classes/base.py +25 -1
  24. cognite_toolkit/_cdf_tk/client/data_classes/canvas.py +46 -3
  25. cognite_toolkit/_cdf_tk/client/data_classes/charts.py +3 -3
  26. cognite_toolkit/_cdf_tk/client/data_classes/charts_data.py +95 -213
  27. cognite_toolkit/_cdf_tk/client/data_classes/infield.py +32 -18
  28. cognite_toolkit/_cdf_tk/client/data_classes/migration.py +10 -2
  29. cognite_toolkit/_cdf_tk/client/data_classes/streams.py +90 -0
  30. cognite_toolkit/_cdf_tk/client/data_classes/three_d.py +47 -0
  31. cognite_toolkit/_cdf_tk/client/testing.py +18 -2
  32. cognite_toolkit/_cdf_tk/commands/__init__.py +6 -6
  33. cognite_toolkit/_cdf_tk/commands/_changes.py +3 -42
  34. cognite_toolkit/_cdf_tk/commands/_download.py +21 -11
  35. cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py +0 -2
  36. cognite_toolkit/_cdf_tk/commands/_migrate/command.py +22 -20
  37. cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py +133 -91
  38. cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +73 -22
  39. cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +311 -43
  40. cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py +5 -5
  41. cognite_toolkit/_cdf_tk/commands/_migrate/issues.py +33 -0
  42. cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +157 -8
  43. cognite_toolkit/_cdf_tk/commands/_migrate/selectors.py +9 -4
  44. cognite_toolkit/_cdf_tk/commands/_purge.py +27 -28
  45. cognite_toolkit/_cdf_tk/commands/_questionary_style.py +16 -0
  46. cognite_toolkit/_cdf_tk/commands/_upload.py +109 -86
  47. cognite_toolkit/_cdf_tk/commands/about.py +221 -0
  48. cognite_toolkit/_cdf_tk/commands/auth.py +19 -12
  49. cognite_toolkit/_cdf_tk/commands/build_cmd.py +15 -61
  50. cognite_toolkit/_cdf_tk/commands/clean.py +63 -16
  51. cognite_toolkit/_cdf_tk/commands/deploy.py +20 -17
  52. cognite_toolkit/_cdf_tk/commands/dump_resource.py +6 -4
  53. cognite_toolkit/_cdf_tk/commands/init.py +225 -3
  54. cognite_toolkit/_cdf_tk/commands/modules.py +20 -44
  55. cognite_toolkit/_cdf_tk/commands/pull.py +6 -19
  56. cognite_toolkit/_cdf_tk/commands/resources.py +179 -0
  57. cognite_toolkit/_cdf_tk/constants.py +20 -1
  58. cognite_toolkit/_cdf_tk/cruds/__init__.py +19 -5
  59. cognite_toolkit/_cdf_tk/cruds/_base_cruds.py +14 -70
  60. cognite_toolkit/_cdf_tk/cruds/_data_cruds.py +8 -17
  61. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/__init__.py +4 -1
  62. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/agent.py +11 -9
  63. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/auth.py +4 -14
  64. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/classic.py +44 -43
  65. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/configuration.py +4 -11
  66. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/data_organization.py +4 -13
  67. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py +205 -66
  68. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/extraction_pipeline.py +5 -17
  69. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py +116 -27
  70. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/file.py +6 -27
  71. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/function.py +9 -28
  72. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/hosted_extractors.py +12 -30
  73. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/industrial_tool.py +3 -7
  74. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/location.py +3 -15
  75. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/migration.py +4 -12
  76. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/raw.py +4 -10
  77. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/relationship.py +3 -8
  78. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/robotics.py +15 -44
  79. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/streams.py +94 -0
  80. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/three_d_model.py +3 -7
  81. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/timeseries.py +5 -15
  82. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/transformation.py +39 -31
  83. cognite_toolkit/_cdf_tk/cruds/_resource_cruds/workflow.py +20 -40
  84. cognite_toolkit/_cdf_tk/cruds/_worker.py +24 -36
  85. cognite_toolkit/_cdf_tk/feature_flags.py +16 -36
  86. cognite_toolkit/_cdf_tk/plugins.py +2 -1
  87. cognite_toolkit/_cdf_tk/resource_classes/__init__.py +4 -0
  88. cognite_toolkit/_cdf_tk/resource_classes/capabilities.py +12 -0
  89. cognite_toolkit/_cdf_tk/resource_classes/functions.py +3 -1
  90. cognite_toolkit/_cdf_tk/resource_classes/infield_cdm_location_config.py +109 -0
  91. cognite_toolkit/_cdf_tk/resource_classes/migration.py +8 -17
  92. cognite_toolkit/_cdf_tk/resource_classes/streams.py +29 -0
  93. cognite_toolkit/_cdf_tk/storageio/__init__.py +9 -21
  94. cognite_toolkit/_cdf_tk/storageio/_annotations.py +19 -16
  95. cognite_toolkit/_cdf_tk/storageio/_applications.py +338 -26
  96. cognite_toolkit/_cdf_tk/storageio/_asset_centric.py +67 -104
  97. cognite_toolkit/_cdf_tk/storageio/_base.py +61 -29
  98. cognite_toolkit/_cdf_tk/storageio/_datapoints.py +276 -20
  99. cognite_toolkit/_cdf_tk/storageio/_file_content.py +436 -0
  100. cognite_toolkit/_cdf_tk/storageio/_instances.py +34 -2
  101. cognite_toolkit/_cdf_tk/storageio/_raw.py +26 -0
  102. cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py +62 -4
  103. cognite_toolkit/_cdf_tk/storageio/selectors/_base.py +14 -2
  104. cognite_toolkit/_cdf_tk/storageio/selectors/_canvas.py +14 -0
  105. cognite_toolkit/_cdf_tk/storageio/selectors/_charts.py +14 -0
  106. cognite_toolkit/_cdf_tk/storageio/selectors/_datapoints.py +23 -3
  107. cognite_toolkit/_cdf_tk/storageio/selectors/_file_content.py +164 -0
  108. cognite_toolkit/_cdf_tk/tk_warnings/other.py +4 -0
  109. cognite_toolkit/_cdf_tk/tracker.py +2 -2
  110. cognite_toolkit/_cdf_tk/utils/dtype_conversion.py +9 -3
  111. cognite_toolkit/_cdf_tk/utils/fileio/__init__.py +2 -0
  112. cognite_toolkit/_cdf_tk/utils/fileio/_base.py +5 -1
  113. cognite_toolkit/_cdf_tk/utils/fileio/_readers.py +112 -20
  114. cognite_toolkit/_cdf_tk/utils/fileio/_writers.py +15 -15
  115. cognite_toolkit/_cdf_tk/utils/http_client/_client.py +284 -18
  116. cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py +50 -4
  117. cognite_toolkit/_cdf_tk/utils/http_client/_data_classes2.py +187 -0
  118. cognite_toolkit/_cdf_tk/utils/interactive_select.py +9 -14
  119. cognite_toolkit/_cdf_tk/utils/sql_parser.py +2 -3
  120. cognite_toolkit/_cdf_tk/utils/useful_types.py +6 -2
  121. cognite_toolkit/_cdf_tk/validation.py +79 -1
  122. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  123. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  124. cognite_toolkit/_resources/cdf.toml +5 -4
  125. cognite_toolkit/_version.py +1 -1
  126. cognite_toolkit/config.dev.yaml +13 -0
  127. {cognite_toolkit-0.6.97.dist-info → cognite_toolkit-0.7.30.dist-info}/METADATA +24 -24
  128. {cognite_toolkit-0.6.97.dist-info → cognite_toolkit-0.7.30.dist-info}/RECORD +153 -143
  129. cognite_toolkit-0.7.30.dist-info/WHEEL +4 -0
  130. {cognite_toolkit-0.6.97.dist-info → cognite_toolkit-0.7.30.dist-info}/entry_points.txt +1 -0
  131. cognite_toolkit/_cdf_tk/commands/_migrate/canvas.py +0 -201
  132. cognite_toolkit/_cdf_tk/commands/dump_data.py +0 -489
  133. cognite_toolkit/_cdf_tk/commands/featureflag.py +0 -27
  134. cognite_toolkit/_cdf_tk/utils/table_writers.py +0 -434
  135. cognite_toolkit-0.6.97.dist-info/WHEEL +0 -4
  136. cognite_toolkit-0.6.97.dist-info/licenses/LICENSE +0 -18
@@ -8,7 +8,6 @@ from rich import print
8
8
 
9
9
  from cognite_toolkit._cdf_tk.commands import PurgeCommand
10
10
  from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
11
- from cognite_toolkit._cdf_tk.feature_flags import Flags
12
11
  from cognite_toolkit._cdf_tk.storageio.selectors import (
13
12
  InstanceFileSelector,
14
13
  InstanceSelector,
@@ -31,8 +30,7 @@ class PurgeApp(typer.Typer):
31
30
  self.callback(invoke_without_command=True)(self.main)
32
31
  self.command("dataset")(self.purge_dataset)
33
32
  self.command("space")(self.purge_space)
34
- if Flags.PURGE_INSTANCES.is_enabled() or Flags.v07.is_enabled():
35
- self.command("instances")(self.purge_instances)
33
+ self.command("instances")(self.purge_instances)
36
34
 
37
35
  def main(self, ctx: typer.Context) -> None:
38
36
  """Commands deleting data from Cognite Data Fusion."""
@@ -48,21 +46,11 @@ class PurgeApp(typer.Typer):
48
46
  help="External id of the dataset to purge. If not provided, interactive mode will be used.",
49
47
  ),
50
48
  ] = None,
51
- include_dataset: Annotated[
52
- bool,
53
- typer.Option(
54
- "--include-dataset",
55
- "-i",
56
- help="Whether to archive the dataset itself after purging its contents.",
57
- hidden=Flags.v07.is_enabled(),
58
- ),
59
- ] = False,
60
49
  archive_dataset: Annotated[
61
50
  bool,
62
51
  typer.Option(
63
52
  "--archive-dataset",
64
53
  help="Whether to archive the dataset itself after purging its contents.",
65
- hidden=not Flags.v07.is_enabled(),
66
54
  ),
67
55
  ] = False,
68
56
  skip_data: Annotated[
@@ -73,7 +61,6 @@ class PurgeApp(typer.Typer):
73
61
  help="Skip deleting the data in the dataset, only delete configurations. The resources that are "
74
62
  "considered data are: time series, event, files, assets, sequences, relationships, "
75
63
  "labels, and 3D Models",
76
- hidden=not Flags.v07.is_enabled(),
77
64
  ),
78
65
  ] = False,
79
66
  include_configurations: Annotated[
@@ -82,7 +69,6 @@ class PurgeApp(typer.Typer):
82
69
  "--include-configurations",
83
70
  "-c",
84
71
  help="Include configurations, workflows, extraction pipelines and transformations in the purge.",
85
- hidden=not Flags.v07.is_enabled(),
86
72
  ),
87
73
  ] = False,
88
74
  asset_recursive: Annotated[
@@ -92,7 +78,6 @@ class PurgeApp(typer.Typer):
92
78
  "-a",
93
79
  help="When deleting assets, delete all child assets recursively. CAVEAT: This can lead to assets"
94
80
  " not in the selected dataset being deleted if they are children of assets in the dataset.",
95
- hidden=not Flags.v07.is_enabled(),
96
81
  ),
97
82
  ] = False,
98
83
  dry_run: Annotated[
@@ -128,39 +113,28 @@ class PurgeApp(typer.Typer):
128
113
  # Is Interactive
129
114
  interactive = AssetInteractiveSelect(client, operation="purge")
130
115
  external_id = interactive.select_data_set(allow_empty=False)
131
- if Flags.v07.is_enabled():
132
- skip_data = not questionary.confirm(
133
- "Delete data in the dataset (time series, events, files, assets, sequences, relationships, labels, 3D models)?",
134
- default=True,
135
- ).ask()
136
- include_configurations = questionary.confirm(
137
- "Delete configurations (workflows, extraction pipelines and transformations) in the dataset?",
138
- default=False,
139
- ).ask()
140
- asset_recursive = questionary.confirm(
141
- "When deleting assets, delete all child assets recursively? (WARNING: This can lead "
142
- "to assets not in the selected dataset being deleted if they are children of assets in the dataset.)",
143
- default=False,
144
- ).ask()
116
+ skip_data = not questionary.confirm(
117
+ "Delete data in the dataset (time series, events, files, assets, sequences, relationships, labels, 3D models)?",
118
+ default=True,
119
+ ).ask()
120
+ include_configurations = questionary.confirm(
121
+ "Delete configurations (workflows, extraction pipelines and transformations) in the dataset?",
122
+ default=False,
123
+ ).ask()
124
+ asset_recursive = questionary.confirm(
125
+ "When deleting assets, delete all child assets recursively? (WARNING: This can lead "
126
+ "to assets not in the selected dataset being deleted if they are children of assets in the dataset.)",
127
+ default=False,
128
+ ).ask()
145
129
  archive_dataset = questionary.confirm("Archive the dataset itself after purging?", default=False).ask()
146
130
  dry_run = questionary.confirm("Dry run?", default=True).ask()
147
131
  verbose = questionary.confirm("Verbose?", default=True).ask()
148
132
 
149
- user_options = [archive_dataset, dry_run, verbose]
150
- if Flags.v07.is_enabled():
151
- user_options.extend([skip_data, include_configurations, asset_recursive])
133
+ user_options = [archive_dataset, dry_run, verbose, skip_data, include_configurations, asset_recursive]
152
134
 
153
135
  if any(selected is None for selected in user_options):
154
136
  raise typer.Abort("Aborted by user.")
155
137
 
156
- else:
157
- archive_dataset = archive_dataset if Flags.v07.is_enabled() else include_dataset
158
-
159
- if not Flags.v07.is_enabled():
160
- skip_data = False
161
- include_configurations = True
162
- asset_recursive = False
163
-
164
138
  cmd.run(
165
139
  lambda: cmd.dataset(
166
140
  client,
@@ -197,7 +171,6 @@ class PurgeApp(typer.Typer):
197
171
  typer.Option(
198
172
  "--delete-datapoints",
199
173
  help="Delete datapoints linked to CogniteTimeSeries nodes in the space.",
200
- hidden=not Flags.v07.is_enabled(),
201
174
  ),
202
175
  ] = False,
203
176
  delete_file_content: Annotated[
@@ -205,7 +178,6 @@ class PurgeApp(typer.Typer):
205
178
  typer.Option(
206
179
  "--delete-file-content",
207
180
  help="Delete file content linked to CogniteFile nodes in the space.",
208
- hidden=not Flags.v07.is_enabled(),
209
181
  ),
210
182
  ] = False,
211
183
  dry_run: Annotated[
@@ -23,9 +23,15 @@ class RunApp(typer.Typer):
23
23
  self.command("workflow")(self.run_workflow)
24
24
  self.add_typer(RunFunctionApp(*args, **kwargs), name="function")
25
25
 
26
+ @staticmethod
27
+ def _print_deprecation_warning() -> None:
28
+ """Print deprecation warning for the run plugin."""
29
+ print("The run plugin is deprecated and will be replaced by the dev plugin in v0.8.0.")
30
+
26
31
  @staticmethod
27
32
  def main(ctx: typer.Context) -> None:
28
33
  """Commands to execute processes in CDF."""
34
+ RunApp._print_deprecation_warning()
29
35
  if ctx.invoked_subcommand is None:
30
36
  print("Use [bold yellow]cdf run --help[/] for more information.")
31
37
 
@@ -51,6 +57,7 @@ class RunApp(typer.Typer):
51
57
  ] = False,
52
58
  ) -> None:
53
59
  """This command will run the specified transformation using a one-time session."""
60
+ RunApp._print_deprecation_warning()
54
61
  cmd = RunTransformationCommand()
55
62
  client = EnvironmentVariables.create_from_environment().get_client()
56
63
  cmd.run(lambda: cmd.run_transformation(client, external_id))
@@ -108,6 +115,7 @@ class RunApp(typer.Typer):
108
115
  ] = False,
109
116
  ) -> None:
110
117
  """This command will run the specified workflow."""
118
+ RunApp._print_deprecation_warning()
111
119
  cmd = RunWorkflowCommand()
112
120
  env_vars = EnvironmentVariables.create_from_environment()
113
121
  cmd.run(lambda: cmd.run_workflow(env_vars, organization_dir, env_name, external_id, version, wait))
@@ -123,6 +131,7 @@ class RunFunctionApp(typer.Typer):
123
131
  @staticmethod
124
132
  def main(ctx: typer.Context) -> None:
125
133
  """Commands to execute function."""
134
+ RunApp._print_deprecation_warning()
126
135
  if ctx.invoked_subcommand is None:
127
136
  print("Use [bold yellow]cdf run function --help[/] for more information.")
128
137
 
@@ -178,6 +187,7 @@ class RunFunctionApp(typer.Typer):
178
187
  ] = False,
179
188
  ) -> None:
180
189
  """This command will run the specified function locally."""
190
+ RunApp._print_deprecation_warning()
181
191
  cmd = RunFunctionCommand()
182
192
  env_vars = EnvironmentVariables.create_from_environment()
183
193
  cmd.run(
@@ -243,6 +253,7 @@ class RunFunctionApp(typer.Typer):
243
253
  ] = False,
244
254
  ) -> None:
245
255
  """This command will run the specified function (assuming it is deployed) in CDF."""
256
+ RunApp._print_deprecation_warning()
246
257
  cmd = RunFunctionCommand()
247
258
  env_vars = EnvironmentVariables.create_from_environment()
248
259
  cmd.run(lambda: cmd.run_cdf(env_vars, organization_dir, env_name, external_id, schedule, wait))
@@ -1,10 +1,12 @@
1
1
  from pathlib import Path
2
2
  from typing import Annotated, Any
3
3
 
4
+ import questionary
4
5
  import typer
6
+ from questionary import Choice
5
7
 
6
8
  from cognite_toolkit._cdf_tk.commands import UploadCommand
7
- from cognite_toolkit._cdf_tk.constants import DATA_DEFAULT_DIR
9
+ from cognite_toolkit._cdf_tk.constants import DATA_DEFAULT_DIR, DATA_MANIFEST_SUFFIX, DATA_RESOURCE_DIR
8
10
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
9
11
 
10
12
  DEFAULT_INPUT_DIR = Path.cwd() / DATA_DEFAULT_DIR
@@ -14,20 +16,28 @@ class UploadApp(typer.Typer):
14
16
  def __init__(self, *args: Any, **kwargs: Any) -> None:
15
17
  super().__init__(*args, **kwargs)
16
18
  self.callback(invoke_without_command=True)(self.upload_main)
19
+ self.command("dir")(self.upload_dir)
17
20
 
18
21
  @staticmethod
19
- def upload_main(
22
+ def upload_main(ctx: typer.Context) -> None:
23
+ """Commands to upload data to CDF."""
24
+ if ctx.invoked_subcommand is None:
25
+ print("Use [bold yellow]cdf upload --help[/] for more information.")
26
+ return None
27
+
28
+ @staticmethod
29
+ def upload_dir(
20
30
  ctx: typer.Context,
21
31
  input_dir: Annotated[
22
- Path,
32
+ Path | None,
23
33
  typer.Argument(
24
- help="The directory containing the data to upload.",
34
+ help="The directory containing the data to upload. If not specified, an interactive prompt will ask for the directory.",
25
35
  exists=True,
26
36
  file_okay=False,
27
37
  dir_okay=True,
28
38
  resolve_path=True,
29
39
  ),
30
- ],
40
+ ] = None,
31
41
  dry_run: Annotated[
32
42
  bool,
33
43
  typer.Option(
@@ -43,7 +53,7 @@ class UploadApp(typer.Typer):
43
53
  "-r",
44
54
  help="If set, the command will look for resource configuration files in adjacent folders and create them if they do not exist.",
45
55
  ),
46
- ] = True,
56
+ ] = False,
47
57
  verbose: Annotated[
48
58
  bool,
49
59
  typer.Option(
@@ -55,6 +65,35 @@ class UploadApp(typer.Typer):
55
65
  ) -> None:
56
66
  """Commands to upload data to CDF."""
57
67
  cmd = UploadCommand()
68
+ if input_dir is None:
69
+ input_candidate = sorted({p.parent for p in DEFAULT_INPUT_DIR.rglob(f"**/*{DATA_MANIFEST_SUFFIX}")})
70
+ if not input_candidate:
71
+ typer.echo(f"No data manifests found in default directory: {DEFAULT_INPUT_DIR}")
72
+ raise typer.Exit(code=1)
73
+ input_dir = questionary.select(
74
+ "Select the input directory containing the data to upload:",
75
+ choices=[Choice(str(option.name), value=option) for option in input_candidate],
76
+ ).ask()
77
+ if input_dir is None:
78
+ typer.echo("No input directory selected. Exiting.")
79
+ raise typer.Exit(code=1)
80
+ dry_run = questionary.confirm("Proceed with dry run?", default=dry_run).ask()
81
+ if dry_run is None:
82
+ typer.echo("No selection made for dry run. Exiting.")
83
+ raise typer.Exit(code=1)
84
+ resource_dir = Path(input_dir) / DATA_RESOURCE_DIR
85
+ if resource_dir.exists():
86
+ if resource_dir.is_relative_to(Path.cwd()):
87
+ display_name = resource_dir.relative_to(Path.cwd()).as_posix()
88
+ else:
89
+ display_name = resource_dir.as_posix()
90
+
91
+ deploy_resources = questionary.confirm(
92
+ f"Deploy resources found in {display_name!r}?", default=deploy_resources
93
+ ).ask()
94
+ if deploy_resources is None:
95
+ typer.echo("No selection made for deploying resources. Exiting.")
96
+ raise typer.Exit(code=1)
58
97
 
59
98
  client = EnvironmentVariables.create_from_environment().get_client()
60
99
  cmd.run(
@@ -3,7 +3,7 @@ from pathlib import Path
3
3
 
4
4
  from cognite_toolkit._cdf_tk.tk_warnings import ToolkitWarning
5
5
 
6
- from ._base import Builder, DefaultBuilder, get_loader
6
+ from ._base import Builder, DefaultBuilder, get_resource_crud
7
7
  from ._datamodels import DataModelBuilder
8
8
  from ._file import FileBuilder
9
9
  from ._function import FunctionBuilder
@@ -36,5 +36,5 @@ __all__ = [
36
36
  "StreamlitBuilder",
37
37
  "TransformationBuilder",
38
38
  "create_builder",
39
- "get_loader",
39
+ "get_resource_crud",
40
40
  ]
@@ -2,14 +2,12 @@ import difflib
2
2
  from abc import ABC, abstractmethod
3
3
  from collections.abc import Callable, Iterable, Sequence
4
4
  from pathlib import Path
5
- from typing import Any, ClassVar, cast
5
+ from typing import Any, ClassVar
6
6
 
7
7
  from cognite_toolkit._cdf_tk.constants import INDEX_PATTERN
8
8
  from cognite_toolkit._cdf_tk.cruds import (
9
- CRUDS_BY_FOLDER_NAME,
9
+ RESOURCE_CRUD_BY_FOLDER_NAME,
10
10
  GroupCRUD,
11
- RawDatabaseCRUD,
12
- RawTableCRUD,
13
11
  ResourceCRUD,
14
12
  )
15
13
  from cognite_toolkit._cdf_tk.data_classes import (
@@ -31,7 +29,6 @@ from cognite_toolkit._cdf_tk.tk_warnings.fileread import (
31
29
  )
32
30
  from cognite_toolkit._cdf_tk.utils import (
33
31
  humanize_collection,
34
- safe_read,
35
32
  )
36
33
 
37
34
 
@@ -103,34 +100,31 @@ class Builder(ABC):
103
100
  return destination_path
104
101
 
105
102
  def _get_loader(self, source_path: Path) -> tuple[None, ToolkitWarning] | tuple[type[ResourceCRUD], None]:
106
- return get_loader(source_path, self.resource_folder)
103
+ return get_resource_crud(source_path, self.resource_folder)
107
104
 
108
105
 
109
- def get_loader(
110
- source_path: Path,
111
- resource_folder: str,
112
- force_pattern: bool = False,
106
+ def get_resource_crud(
107
+ source_path: Path, resource_folder: str
113
108
  ) -> tuple[None, ToolkitWarning] | tuple[type[ResourceCRUD], None]:
114
- folder_loaders = CRUDS_BY_FOLDER_NAME.get(resource_folder, [])
115
- if not folder_loaders:
109
+ """Get the appropriate CRUD class for the given source file and resource folder."""
110
+ folder_cruds = RESOURCE_CRUD_BY_FOLDER_NAME.get(resource_folder, [])
111
+ if not folder_cruds:
116
112
  return None, ToolkitNotSupportedWarning(
117
113
  f"resource of type {resource_folder!r} in {source_path.name}.",
118
- details=f"Available resources are: {', '.join(CRUDS_BY_FOLDER_NAME.keys())}",
114
+ details=f"Available resources are: {humanize_collection(RESOURCE_CRUD_BY_FOLDER_NAME.keys())}",
119
115
  )
120
116
 
121
- loaders = [
122
- loader for loader in folder_loaders if loader.is_supported_file(source_path, force_pattern=force_pattern)
123
- ]
124
- if len(loaders) == 0:
117
+ crud_candidates = [crud_cls for crud_cls in folder_cruds if crud_cls.is_supported_file(source_path)]
118
+ if len(crud_candidates) == 0:
125
119
  suggestion: str | None = None
126
120
  if "." in source_path.stem:
127
121
  core, kind = source_path.stem.rsplit(".", 1)
128
- match = difflib.get_close_matches(kind, [loader.kind for loader in folder_loaders])
122
+ match = difflib.get_close_matches(kind, [crud_cls.kind for crud_cls in folder_cruds])
129
123
  if match:
130
124
  suggested_name = f"{core}.{match[0]}{source_path.suffix}"
131
125
  suggestion = f"Did you mean to call the file {suggested_name!r}?"
132
126
  else:
133
- kinds = [loader.kind for loader in folder_loaders]
127
+ kinds = [crud.kind for crud in folder_cruds]
134
128
  if len(kinds) == 1:
135
129
  suggestion = f"Did you mean to call the file '{source_path.stem}.{kinds[0]}{source_path.suffix}'?"
136
130
  else:
@@ -139,30 +133,22 @@ def get_loader(
139
133
  f"the resource type. Supported types are: {humanize_collection(kinds)}."
140
134
  )
141
135
  return None, UnknownResourceTypeWarning(source_path, suggestion)
142
- elif len(loaders) > 1 and all(loader.folder_name == "raw" for loader in loaders):
143
- # Raw files can be ambiguous, so we need to check the content.
144
- # If there is a tableName field, it is a table, otherwise it is a database.
145
- if any(
146
- line.strip().startswith("tableName:") or line.strip().startswith("- tableName:")
147
- for line in safe_read(source_path).splitlines()
148
- ):
149
- return RawTableCRUD, None
150
- else:
151
- return RawDatabaseCRUD, None
152
- elif len(loaders) > 1 and all(issubclass(loader, GroupCRUD) for loader in loaders):
153
- # There are two group loaders, one for resource scoped and one for all scoped.
136
+ elif len(crud_candidates) > 1 and all(issubclass(loader, GroupCRUD) for loader in crud_candidates):
137
+ # There are two group cruds, one for resource scoped and one for all scoped.
154
138
  return GroupCRUD, None
155
- elif len(loaders) > 1:
156
- names = humanize_collection(
157
- [f"'{source_path.stem}.{loader.kind}{source_path.suffix}'" for loader in loaders], bind_word="or"
158
- )
159
- raise AmbiguousResourceFileError(
160
- f"Ambiguous resource file {source_path.name} in {resource_folder} folder. "
161
- f"Unclear whether it is {humanize_collection([loader.kind for loader in loaders], bind_word='or')}."
162
- f"\nPlease name the file {names}."
163
- )
164
-
165
- return cast(type[ResourceCRUD], loaders[0]), None
139
+ elif len(crud_candidates) == 1:
140
+ return crud_candidates[0], None
141
+
142
+ # This is unreachable with our current ResourceCRUD classes. We have tests that is exhaustive over
143
+ # all ResourceCRUDs to ensure this.
144
+ names = humanize_collection(
145
+ [f"'{source_path.stem}.{loader.kind}{source_path.suffix}'" for loader in crud_candidates], bind_word="or"
146
+ )
147
+ raise AmbiguousResourceFileError(
148
+ f"Ambiguous resource file {source_path.name} in {resource_folder} folder. "
149
+ f"Unclear whether it is {humanize_collection([crud_cls.kind for crud_cls in crud_candidates], bind_word='or')}."
150
+ f"\nPlease name the file {names}."
151
+ )
166
152
 
167
153
 
168
154
  class DefaultBuilder(Builder):
@@ -9,7 +9,7 @@ from typing import Any, ClassVar
9
9
  from rich import print
10
10
 
11
11
  from cognite_toolkit import _version
12
- from cognite_toolkit._cdf_tk.constants import clean_name
12
+ from cognite_toolkit._cdf_tk.constants import RESOURCES_PATH, EnvType, clean_name
13
13
  from cognite_toolkit._cdf_tk.exceptions import (
14
14
  ToolkitRequiredValueError,
15
15
  ToolkitTOMLFormatError,
@@ -176,6 +176,25 @@ class CDFToml:
176
176
  is_loaded_from_file=False,
177
177
  )
178
178
 
179
+ @classmethod
180
+ def write(cls, organization_dir: Path, env: EnvType = "dev", version: str = _version.__version__) -> None:
181
+ destination = Path.cwd() / CDFToml.file_name
182
+ if destination.exists():
183
+ print("cdf.toml file already exists. Skipping creation.")
184
+ return
185
+ cdf_toml_content = (RESOURCES_PATH / CDFToml.file_name).read_text(encoding="utf-8")
186
+ cdf_toml_content = cdf_toml_content.replace("0.0.0", version)
187
+ if organization_dir != Path.cwd():
188
+ cdf_toml_content = cdf_toml_content.replace(
189
+ "#<PLACEHOLDER>",
190
+ f'''
191
+ default_organization_dir = "{organization_dir.name}"''',
192
+ )
193
+ else:
194
+ cdf_toml_content = cdf_toml_content.replace("#<PLACEHOLDER>", "")
195
+ cdf_toml_content = cdf_toml_content.replace("<DEFAULT_ENV_PLACEHOLDER>", env)
196
+ destination.write_text(cdf_toml_content, encoding="utf-8")
197
+
179
198
 
180
199
  def _read_toml(file_path: Path) -> dict[str, Any]:
181
200
  # TOML files are required to be UTF-8 encoded
@@ -19,23 +19,42 @@ from .api.migration import MigrationAPI
19
19
  from .api.project import ProjectAPI
20
20
  from .api.robotics import RoboticsAPI
21
21
  from .api.search import SearchAPI
22
+ from .api.streams import StreamsAPI
23
+ from .api.three_d import ThreeDAPI
22
24
  from .api.token import TokenAPI
23
25
  from .api.verify import VerifyAPI
24
26
  from .config import ToolkitClientConfig
25
27
 
26
28
 
29
+ class ToolAPI:
30
+ """This is reimplemented CogniteAPIs in Toolkit"""
31
+
32
+ def __init__(self, http_client: HTTPClient, console: Console) -> None:
33
+ self.http_client = http_client
34
+ self.three_d = ThreeDAPI(http_client, console)
35
+
36
+
27
37
  class ToolkitClient(CogniteClient):
28
- def __init__(self, config: ToolkitClientConfig | None = None, enable_set_pending_ids: bool = False) -> None:
38
+ def __init__(
39
+ self,
40
+ config: ToolkitClientConfig | None = None,
41
+ enable_set_pending_ids: bool = False,
42
+ console: Console | None = None,
43
+ ) -> None:
29
44
  super().__init__(config=config)
30
45
  http_client = HTTPClient(self.config)
46
+ self.http_client = http_client
31
47
  toolkit_config = ToolkitClientConfig.from_client_config(self.config)
32
- self.console = Console()
48
+ self.console = console or Console()
49
+ self.tool = ToolAPI(http_client, self.console)
33
50
  self.search = SearchAPI(self._config, self._API_VERSION, self)
34
51
  self.robotics = RoboticsAPI(self._config, self._API_VERSION, self)
35
52
  self.dml = DMLAPI(self._config, self._API_VERSION, self)
36
53
  self.verify = VerifyAPI(self._config, self._API_VERSION, self)
37
54
  self.lookup = LookUpGroup(self._config, self._API_VERSION, self, self.console)
38
- self.functions: ExtendedFunctionsAPI = ExtendedFunctionsAPI(toolkit_config, self._API_VERSION, self)
55
+ self.functions: ExtendedFunctionsAPI = ExtendedFunctionsAPI(
56
+ toolkit_config, self._API_VERSION, self, self.console
57
+ )
39
58
  self.data_modeling: ExtendedDataModelingAPI = ExtendedDataModelingAPI(self._config, self._API_VERSION, self)
40
59
  if enable_set_pending_ids:
41
60
  self.time_series: ExtendedTimeSeriesAPI = ExtendedTimeSeriesAPI(self._config, self._API_VERSION, self)
@@ -47,6 +66,7 @@ class ToolkitClient(CogniteClient):
47
66
  self.charts = ChartsAPI(self._config, self._API_VERSION, self)
48
67
  self.project = ProjectAPI(config=toolkit_config, cognite_client=self)
49
68
  self.infield = InfieldAPI(http_client, self.console)
69
+ self.streams = StreamsAPI(http_client, self.console)
50
70
 
51
71
  @property
52
72
  def config(self) -> ToolkitClientConfig:
@@ -17,15 +17,16 @@ class ExtendedFunctionsAPI(FunctionsAPI):
17
17
  config: ToolkitClientConfig,
18
18
  api_version: str | None,
19
19
  cognite_client: CogniteClient,
20
+ console: Console | None = None,
20
21
  ) -> None:
21
22
  """
22
23
  Extended Functions API to include custom headers and payload preparation.
23
24
  """
24
25
  super().__init__(config, api_version, cognite_client)
25
26
  self._toolkit_config = config
26
- self._toolkit_http_client = HTTPClient(config, max_retries=global_config.max_retries)
27
+ self._toolkit_http_client = HTTPClient(config, max_retries=global_config.max_retries, console=console)
27
28
 
28
- def create_with_429_retry(self, function: FunctionWrite, console: Console | None = None) -> Function:
29
+ def create_with_429_retry(self, function: FunctionWrite) -> Function:
29
30
  """Create a function with manual retry handling for 429 Too Many Requests responses.
30
31
 
31
32
  This method is a workaround for scenarios where the function creation API is temporarily unavailable
@@ -43,16 +44,13 @@ class ExtendedFunctionsAPI(FunctionsAPI):
43
44
  endpoint_url=self._toolkit_config.create_api_url("/functions"),
44
45
  method="POST",
45
46
  body_content={"items": [function.dump(camel_case=True)]},
46
- ),
47
- console=console,
47
+ )
48
48
  )
49
49
  result.raise_for_status()
50
50
  # We assume the API response is one item on a successful creation
51
51
  return Function._load(result.get_first_body()["items"][0], cognite_client=self._cognite_client) # type: ignore[arg-type,index]
52
52
 
53
- def delete_with_429_retry(
54
- self, external_id: SequenceNotStr[str], ignore_unknown_ids: bool = False, console: Console | None = None
55
- ) -> None:
53
+ def delete_with_429_retry(self, external_id: SequenceNotStr[str], ignore_unknown_ids: bool = False) -> None:
56
54
  """Delete one or more functions with retry handling for 429 Too Many Requests responses.
57
55
 
58
56
  This method is an improvement over the standard delete method in the FunctionsAPI.
@@ -77,7 +75,6 @@ class ExtendedFunctionsAPI(FunctionsAPI):
77
75
  endpoint_url=self._toolkit_config.create_api_url("/functions/delete"),
78
76
  method="POST",
79
77
  body_content=body_content,
80
- ),
81
- console=console,
78
+ )
82
79
  ).raise_for_status()
83
80
  return None
@@ -4,7 +4,11 @@ from typing import Any, cast
4
4
  from rich.console import Console
5
5
 
6
6
  from cognite_toolkit._cdf_tk.client.data_classes.api_classes import PagedResponse, QueryResponse
7
- from cognite_toolkit._cdf_tk.client.data_classes.infield import DataExplorationConfig, InfieldLocationConfig
7
+ from cognite_toolkit._cdf_tk.client.data_classes.infield import (
8
+ DataExplorationConfig,
9
+ InFieldCDMLocationConfig,
10
+ InfieldLocationConfig,
11
+ )
8
12
  from cognite_toolkit._cdf_tk.client.data_classes.instance_api import (
9
13
  InstanceResponseItem,
10
14
  InstanceResult,
@@ -150,7 +154,95 @@ class InfieldConfigAPI:
150
154
  return result
151
155
 
152
156
 
157
+ class InFieldCDMConfigAPI:
158
+ ENDPOINT = "/models/instances"
159
+ LOCATION_REF = "cdmLocationConfig"
160
+
161
+ def __init__(self, http_client: HTTPClient, console: Console) -> None:
162
+ self._http_client = http_client
163
+ self._console = console
164
+ self._config = http_client.config
165
+
166
+ def apply(self, items: Sequence[InFieldCDMLocationConfig]) -> list[InstanceResult]:
167
+ if len(items) > 500:
168
+ raise ValueError("Cannot apply more than 500 InFieldCDMLocationConfig items at once.")
169
+
170
+ request_items = [item.as_request_item() for item in items]
171
+ responses = self._http_client.request_with_retries(
172
+ ItemsRequest(
173
+ endpoint_url=self._config.create_api_url(self.ENDPOINT),
174
+ method="POST",
175
+ items=request_items, # type: ignore[arg-type]
176
+ )
177
+ )
178
+ responses.raise_for_status()
179
+ return PagedResponse[InstanceResult].model_validate(responses.get_first_body()).items
180
+
181
+ def retrieve(self, items: Sequence[NodeIdentifier]) -> list[InFieldCDMLocationConfig]:
182
+ if len(items) > 100:
183
+ raise ValueError("Cannot retrieve more than 100 InFieldCDMLocationConfig items at once.")
184
+ if not items:
185
+ return []
186
+ responses = self._http_client.request_with_retries(
187
+ SimpleBodyRequest(
188
+ endpoint_url=self._config.create_api_url(f"{self.ENDPOINT}/query"),
189
+ method="POST",
190
+ body_content=self._retrieve_query(items),
191
+ )
192
+ )
193
+ responses.raise_for_status()
194
+ parsed_response = QueryResponse[InstanceResponseItem].model_validate(responses.get_first_body())
195
+ return self._parse_retrieve_response(parsed_response)
196
+
197
+ def delete(self, items: Sequence[InFieldCDMLocationConfig]) -> list[NodeIdentifier]:
198
+ if len(items) > 500:
199
+ raise ValueError("Cannot delete more than 500 InFieldCDMLocationConfig items at once.")
200
+
201
+ identifiers: Sequence = [item.as_id() for item in items]
202
+ responses = self._http_client.request_with_retries(
203
+ ItemsRequest(
204
+ endpoint_url=self._config.create_api_url(f"{self.ENDPOINT}/delete"),
205
+ method="POST",
206
+ items=identifiers, # type: ignore[arg-type]
207
+ )
208
+ )
209
+ responses.raise_for_status()
210
+ return PagedResponse[NodeIdentifier].model_validate(responses.get_first_body()).items
211
+
212
+ @classmethod
213
+ def _retrieve_query(cls, items: Sequence[NodeIdentifier]) -> dict[str, Any]:
214
+ return {
215
+ "with": {
216
+ cls.LOCATION_REF: {
217
+ "limit": len(items),
218
+ "nodes": {
219
+ "filter": {
220
+ "instanceReferences": [
221
+ {"space": item.space, "externalId": item.external_id} for item in items
222
+ ]
223
+ },
224
+ },
225
+ },
226
+ },
227
+ "select": {
228
+ cls.LOCATION_REF: {
229
+ "sources": [{"source": InFieldCDMLocationConfig.VIEW_ID.dump(), "properties": ["*"]}],
230
+ },
231
+ },
232
+ }
233
+
234
+ def _parse_retrieve_response(
235
+ self, parsed_response: QueryResponse[InstanceResponseItem]
236
+ ) -> list[InFieldCDMLocationConfig]:
237
+ result: list[InFieldCDMLocationConfig] = []
238
+ for item in parsed_response.items[self.LOCATION_REF]:
239
+ properties = item.get_properties_for_source(InFieldCDMLocationConfig.VIEW_ID, include_identifier=True)
240
+ result.append(InFieldCDMLocationConfig.model_validate(properties))
241
+ return result
242
+
243
+
153
244
  class InfieldAPI:
154
245
  def __init__(self, http_client: HTTPClient, console: Console) -> None:
155
246
  self._http_client = http_client
156
247
  self.config = InfieldConfigAPI(http_client, console)
248
+ self.cdm_config = InFieldCDMConfigAPI(http_client, console)