cognite-toolkit 0.6.110__py3-none-any.whl → 0.6.112__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. cognite_toolkit/_cdf_tk/apps/_download_app.py +307 -25
  2. cognite_toolkit/_cdf_tk/apps/_migrate_app.py +67 -1
  3. cognite_toolkit/_cdf_tk/client/data_classes/base.py +25 -1
  4. cognite_toolkit/_cdf_tk/client/data_classes/infield.py +6 -21
  5. cognite_toolkit/_cdf_tk/client/data_classes/streams.py +10 -39
  6. cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +5 -1
  7. cognite_toolkit/_cdf_tk/commands/_upload.py +15 -28
  8. cognite_toolkit/_cdf_tk/constants.py +1 -0
  9. cognite_toolkit/_cdf_tk/storageio/__init__.py +4 -16
  10. cognite_toolkit/_cdf_tk/storageio/_asset_centric.py +4 -23
  11. cognite_toolkit/_cdf_tk/storageio/_base.py +3 -1
  12. cognite_toolkit/_cdf_tk/storageio/_datapoints.py +3 -1
  13. cognite_toolkit/_cdf_tk/storageio/_file_content.py +149 -0
  14. cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py +13 -1
  15. cognite_toolkit/_cdf_tk/storageio/selectors/_base.py +14 -2
  16. cognite_toolkit/_cdf_tk/storageio/selectors/_file_content.py +95 -0
  17. cognite_toolkit/_cdf_tk/utils/fileio/_readers.py +11 -3
  18. cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py +19 -1
  19. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  20. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  21. cognite_toolkit/_resources/cdf.toml +1 -1
  22. cognite_toolkit/_version.py +1 -1
  23. {cognite_toolkit-0.6.110.dist-info → cognite_toolkit-0.6.112.dist-info}/METADATA +1 -1
  24. {cognite_toolkit-0.6.110.dist-info → cognite_toolkit-0.6.112.dist-info}/RECORD +27 -25
  25. {cognite_toolkit-0.6.110.dist-info → cognite_toolkit-0.6.112.dist-info}/WHEEL +0 -0
  26. {cognite_toolkit-0.6.110.dist-info → cognite_toolkit-0.6.112.dist-info}/entry_points.txt +0 -0
  27. {cognite_toolkit-0.6.110.dist-info → cognite_toolkit-0.6.112.dist-info}/licenses/LICENSE +0 -0
@@ -2,7 +2,9 @@ from enum import Enum
2
2
  from pathlib import Path
3
3
  from typing import Annotated, Any
4
4
 
5
+ import questionary
5
6
  import typer
7
+ from questionary import Choice
6
8
  from rich import print
7
9
 
8
10
  from cognite_toolkit._cdf_tk.client.data_classes.raw import RawTable
@@ -11,12 +13,14 @@ from cognite_toolkit._cdf_tk.constants import DATA_DEFAULT_DIR
11
13
  from cognite_toolkit._cdf_tk.storageio import (
12
14
  AssetIO,
13
15
  ChartIO,
16
+ EventIO,
17
+ FileMetadataIO,
14
18
  HierarchyIO,
15
19
  InstanceIO,
16
20
  RawIO,
21
+ TimeSeriesIO,
17
22
  )
18
23
  from cognite_toolkit._cdf_tk.storageio.selectors import (
19
- AssetCentricSelector,
20
24
  AssetSubtreeSelector,
21
25
  ChartExternalIdSelector,
22
26
  ChartSelector,
@@ -28,11 +32,16 @@ from cognite_toolkit._cdf_tk.storageio.selectors import (
28
32
  )
29
33
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
30
34
  from cognite_toolkit._cdf_tk.utils.interactive_select import (
35
+ AssetCentricInteractiveSelect,
31
36
  AssetInteractiveSelect,
32
37
  DataModelingSelect,
38
+ EventInteractiveSelect,
39
+ FileMetadataInteractiveSelect,
33
40
  InteractiveChartSelect,
34
41
  RawTableInteractiveSelect,
42
+ TimeSeriesInteractiveSelect,
35
43
  )
44
+ from cognite_toolkit._cdf_tk.utils.useful_types import AssetCentricKind
36
45
 
37
46
 
38
47
  class RawFormats(str, Enum):
@@ -77,6 +86,9 @@ class DownloadApp(typer.Typer):
77
86
  self.callback(invoke_without_command=True)(self.download_main)
78
87
  self.command("raw")(self.download_raw_cmd)
79
88
  self.command("assets")(self.download_assets_cmd)
89
+ self.command("timeseries")(self.download_timeseries_cmd)
90
+ self.command("events")(self.download_events_cmd)
91
+ self.command("files")(self.download_files_cmd)
80
92
  self.command("hierarchy")(self.download_hierarchy_cmd)
81
93
  self.command("instances")(self.download_instances_cmd)
82
94
  self.command("charts")(self.download_charts_cmd)
@@ -178,23 +190,15 @@ class DownloadApp(typer.Typer):
178
190
  )
179
191
  )
180
192
 
181
- @staticmethod
182
193
  def download_assets_cmd(
194
+ self,
183
195
  ctx: typer.Context,
184
196
  data_sets: Annotated[
185
197
  list[str] | None,
186
198
  typer.Option(
187
199
  "--data-set",
188
200
  "-d",
189
- help="List of data sets to download assets from. If this and hierarchy are not provided, an interactive selection will be made.",
190
- ),
191
- ] = None,
192
- hierarchy: Annotated[
193
- list[str] | None,
194
- typer.Option(
195
- "--hierarchy",
196
- "-r",
197
- help="List of asset hierarchies to download assets from. If this and data sets are not provided, an interactive selection will be made.",
201
+ help="List of data sets to download assets from. If this is not provided, an interactive selection will be made.",
198
202
  ),
199
203
  ] = None,
200
204
  file_format: Annotated[
@@ -241,20 +245,17 @@ class DownloadApp(typer.Typer):
241
245
  ) -> None:
242
246
  """This command will download assets from CDF into a temporary directory."""
243
247
  client = EnvironmentVariables.create_from_environment().get_client()
244
- is_interactive = not data_sets and not hierarchy
245
- if is_interactive:
246
- interactive = AssetInteractiveSelect(client, "download assets")
247
- selector_type = interactive.select_hierarchies_or_data_sets()
248
- if selector_type == "Data Set":
249
- data_sets = interactive.select_data_sets()
250
- else:
251
- hierarchy = interactive.select_hierarchies()
252
-
253
- selectors: list[AssetCentricSelector] = []
254
- if data_sets:
255
- selectors.extend([DataSetSelector(data_set_external_id=ds, kind="Assets") for ds in data_sets])
256
- if hierarchy:
257
- selectors.extend([AssetSubtreeSelector(hierarchy=h, kind="Assets") for h in hierarchy])
248
+ if data_sets is None:
249
+ data_sets, file_format, compression, output_dir, limit = self._asset_centric_interactive(
250
+ AssetInteractiveSelect(client, "download"),
251
+ file_format,
252
+ compression,
253
+ output_dir,
254
+ limit,
255
+ "Assets",
256
+ )
257
+
258
+ selectors = [DataSetSelector(kind="Assets", data_set_external_id=data_set) for data_set in data_sets]
258
259
  cmd = DownloadCommand()
259
260
  cmd.run(
260
261
  lambda: cmd.download(
@@ -268,6 +269,287 @@ class DownloadApp(typer.Typer):
268
269
  )
269
270
  )
270
271
 
272
+ @classmethod
273
+ def _asset_centric_interactive(
274
+ cls,
275
+ selector: AssetCentricInteractiveSelect,
276
+ file_format: AssetCentricFormats,
277
+ compression: CompressionFormat,
278
+ output_dir: Path,
279
+ limit: int,
280
+ kind: AssetCentricKind,
281
+ ) -> tuple[list[str], AssetCentricFormats, CompressionFormat, Path, int]:
282
+ data_sets = selector.select_data_sets()
283
+ display_name = kind.casefold() + "s"
284
+ file_format = questionary.select(
285
+ f"Select format to download the {display_name} in:",
286
+ choices=[Choice(title=format_.value, value=format_) for format_ in AssetCentricFormats],
287
+ default=file_format,
288
+ ).ask()
289
+ compression = questionary.select(
290
+ f"Select compression format to use when downloading the {display_name}:",
291
+ choices=[Choice(title=comp.value, value=comp) for comp in CompressionFormat],
292
+ default=compression,
293
+ ).ask()
294
+ output_dir = Path(
295
+ questionary.path(
296
+ "Where to download the assets:",
297
+ default=str(output_dir),
298
+ only_directories=True,
299
+ ).ask()
300
+ )
301
+ while True:
302
+ limit_str = questionary.text(
303
+ f"The maximum number of {display_name} to download from each dataset. Use -1 to download all {display_name}.",
304
+ default=str(limit),
305
+ ).ask()
306
+ if limit_str is None:
307
+ raise typer.Abort()
308
+ try:
309
+ limit = int(limit_str)
310
+ break
311
+ except ValueError:
312
+ print("[red]Please enter a valid integer for the limit.[/]")
313
+ return data_sets, file_format, compression, output_dir, limit
314
+
315
+ def download_timeseries_cmd(
316
+ self,
317
+ ctx: typer.Context,
318
+ data_sets: Annotated[
319
+ list[str] | None,
320
+ typer.Option(
321
+ "--data-set",
322
+ "-d",
323
+ help="List of data sets to download time series from. If this is not provided, an interactive selection will be made.",
324
+ ),
325
+ ] = None,
326
+ file_format: Annotated[
327
+ AssetCentricFormats,
328
+ typer.Option(
329
+ "--format",
330
+ "-f",
331
+ help="Format to download the time series in.",
332
+ ),
333
+ ] = AssetCentricFormats.csv,
334
+ compression: Annotated[
335
+ CompressionFormat,
336
+ typer.Option(
337
+ "--compression",
338
+ "-z",
339
+ help="Compression format to use when downloading the time series.",
340
+ ),
341
+ ] = CompressionFormat.none,
342
+ output_dir: Annotated[
343
+ Path,
344
+ typer.Option(
345
+ "--output-dir",
346
+ "-o",
347
+ help="Where to download the time series.",
348
+ allow_dash=True,
349
+ ),
350
+ ] = DEFAULT_DOWNLOAD_DIR,
351
+ limit: Annotated[
352
+ int,
353
+ typer.Option(
354
+ "--limit",
355
+ "-l",
356
+ help="The maximum number of time series to download from each dataset. Use -1 to download all time series.",
357
+ ),
358
+ ] = 100_000,
359
+ verbose: Annotated[
360
+ bool,
361
+ typer.Option(
362
+ "--verbose",
363
+ "-v",
364
+ help="Turn on to get more verbose output when running the command",
365
+ ),
366
+ ] = False,
367
+ ) -> None:
368
+ """This command will download time series from CDF into a temporary directory."""
369
+ client = EnvironmentVariables.create_from_environment().get_client()
370
+ if data_sets is None:
371
+ data_sets, file_format, compression, output_dir, limit = self._asset_centric_interactive(
372
+ TimeSeriesInteractiveSelect(client, "download"),
373
+ file_format,
374
+ compression,
375
+ output_dir,
376
+ limit,
377
+ "TimeSeries",
378
+ )
379
+
380
+ selectors = [DataSetSelector(kind="TimeSeries", data_set_external_id=data_set) for data_set in data_sets]
381
+ cmd = DownloadCommand()
382
+ cmd.run(
383
+ lambda: cmd.download(
384
+ selectors=selectors,
385
+ io=TimeSeriesIO(client),
386
+ output_dir=output_dir,
387
+ file_format=f".{file_format.value}",
388
+ compression=compression.value,
389
+ limit=limit if limit != -1 else None,
390
+ verbose=verbose,
391
+ )
392
+ )
393
+
394
+ def download_events_cmd(
395
+ self,
396
+ ctx: typer.Context,
397
+ data_sets: Annotated[
398
+ list[str] | None,
399
+ typer.Option(
400
+ "--data-set",
401
+ "-d",
402
+ help="List of data sets to download events from. If this is not provided, an interactive selection will be made.",
403
+ ),
404
+ ] = None,
405
+ file_format: Annotated[
406
+ AssetCentricFormats,
407
+ typer.Option(
408
+ "--format",
409
+ "-f",
410
+ help="Format to download the events in.",
411
+ ),
412
+ ] = AssetCentricFormats.csv,
413
+ compression: Annotated[
414
+ CompressionFormat,
415
+ typer.Option(
416
+ "--compression",
417
+ "-z",
418
+ help="Compression format to use when downloading the events.",
419
+ ),
420
+ ] = CompressionFormat.none,
421
+ output_dir: Annotated[
422
+ Path,
423
+ typer.Option(
424
+ "--output-dir",
425
+ "-o",
426
+ help="Where to download the events.",
427
+ allow_dash=True,
428
+ ),
429
+ ] = DEFAULT_DOWNLOAD_DIR,
430
+ limit: Annotated[
431
+ int,
432
+ typer.Option(
433
+ "--limit",
434
+ "-l",
435
+ help="The maximum number of events to download from each dataset. Use -1 to download all events.",
436
+ ),
437
+ ] = 100_000,
438
+ verbose: Annotated[
439
+ bool,
440
+ typer.Option(
441
+ "--verbose",
442
+ "-v",
443
+ help="Turn on to get more verbose output when running the command",
444
+ ),
445
+ ] = False,
446
+ ) -> None:
447
+ """This command will download events from CDF into a temporary directory."""
448
+ client = EnvironmentVariables.create_from_environment().get_client()
449
+ if data_sets is None:
450
+ data_sets, file_format, compression, output_dir, limit = self._asset_centric_interactive(
451
+ EventInteractiveSelect(client, "download"),
452
+ file_format,
453
+ compression,
454
+ output_dir,
455
+ limit,
456
+ "Events",
457
+ )
458
+
459
+ selectors = [DataSetSelector(kind="Events", data_set_external_id=data_set) for data_set in data_sets]
460
+ cmd = DownloadCommand()
461
+
462
+ cmd.run(
463
+ lambda: cmd.download(
464
+ selectors=selectors,
465
+ io=EventIO(client),
466
+ output_dir=output_dir,
467
+ file_format=f".{file_format.value}",
468
+ compression=compression.value,
469
+ limit=limit if limit != -1 else None,
470
+ verbose=verbose,
471
+ )
472
+ )
473
+
474
+ def download_files_cmd(
475
+ self,
476
+ ctx: typer.Context,
477
+ data_sets: Annotated[
478
+ list[str] | None,
479
+ typer.Option(
480
+ "--data-set",
481
+ "-d",
482
+ help="List of data sets to download file metadata from. If this is not provided, an interactive selection will be made.",
483
+ ),
484
+ ] = None,
485
+ file_format: Annotated[
486
+ AssetCentricFormats,
487
+ typer.Option(
488
+ "--format",
489
+ "-f",
490
+ help="Format to download the file metadata in.",
491
+ ),
492
+ ] = AssetCentricFormats.csv,
493
+ compression: Annotated[
494
+ CompressionFormat,
495
+ typer.Option(
496
+ "--compression",
497
+ "-z",
498
+ help="Compression format to use when downloading the file metadata.",
499
+ ),
500
+ ] = CompressionFormat.none,
501
+ output_dir: Annotated[
502
+ Path,
503
+ typer.Option(
504
+ "--output-dir",
505
+ "-o",
506
+ help="Where to download the file metadata.",
507
+ allow_dash=True,
508
+ ),
509
+ ] = DEFAULT_DOWNLOAD_DIR,
510
+ limit: Annotated[
511
+ int,
512
+ typer.Option(
513
+ "--limit",
514
+ "-l",
515
+ help="The maximum number of file metadata to download from each dataset. Use -1 to download all file metadata.",
516
+ ),
517
+ ] = 100_000,
518
+ verbose: Annotated[
519
+ bool,
520
+ typer.Option(
521
+ "--verbose",
522
+ "-v",
523
+ help="Turn on to get more verbose output when running the command",
524
+ ),
525
+ ] = False,
526
+ ) -> None:
527
+ """This command will download file metadata from CDF into a temporary directory."""
528
+ client = EnvironmentVariables.create_from_environment().get_client()
529
+ if data_sets is None:
530
+ data_sets, file_format, compression, output_dir, limit = self._asset_centric_interactive(
531
+ FileMetadataInteractiveSelect(client, "download"),
532
+ file_format,
533
+ compression,
534
+ output_dir,
535
+ limit,
536
+ "FileMetadata",
537
+ )
538
+
539
+ selectors = [DataSetSelector(kind="FileMetadata", data_set_external_id=data_set) for data_set in data_sets]
540
+ cmd = DownloadCommand()
541
+ cmd.run(
542
+ lambda: cmd.download(
543
+ selectors=selectors,
544
+ io=FileMetadataIO(client),
545
+ output_dir=output_dir,
546
+ file_format=f".{file_format.value}",
547
+ compression=compression.value,
548
+ limit=limit if limit != -1 else None,
549
+ verbose=verbose,
550
+ )
551
+ )
552
+
271
553
  @staticmethod
272
554
  def download_hierarchy_cmd(
273
555
  ctx: typer.Context,
@@ -18,7 +18,7 @@ from cognite_toolkit._cdf_tk.commands._migrate.creators import (
18
18
  InstanceSpaceCreator,
19
19
  SourceSystemCreator,
20
20
  )
21
- from cognite_toolkit._cdf_tk.commands._migrate.data_mapper import AssetCentricMapper
21
+ from cognite_toolkit._cdf_tk.commands._migrate.data_mapper import AssetCentricMapper, ChartMapper
22
22
  from cognite_toolkit._cdf_tk.commands._migrate.migration_io import (
23
23
  AnnotationMigrationIO,
24
24
  AssetCentricMigrationIO,
@@ -28,12 +28,15 @@ from cognite_toolkit._cdf_tk.commands._migrate.selectors import (
28
28
  MigrateDataSetSelector,
29
29
  MigrationCSVFileSelector,
30
30
  )
31
+ from cognite_toolkit._cdf_tk.storageio import ChartIO
32
+ from cognite_toolkit._cdf_tk.storageio.selectors import ChartExternalIdSelector
31
33
  from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
32
34
  from cognite_toolkit._cdf_tk.utils.cli_args import parse_view_str
33
35
  from cognite_toolkit._cdf_tk.utils.interactive_select import (
34
36
  AssetInteractiveSelect,
35
37
  DataModelingSelect,
36
38
  FileMetadataInteractiveSelect,
39
+ InteractiveChartSelect,
37
40
  ResourceViewMappingInteractiveSelect,
38
41
  )
39
42
  from cognite_toolkit._cdf_tk.utils.useful_types import AssetCentricKind
@@ -54,6 +57,7 @@ class MigrateApp(typer.Typer):
54
57
  self.command("files")(self.files)
55
58
  self.command("annotations")(self.annotations)
56
59
  self.command("canvas")(self.canvas)
60
+ self.command("charts")(self.charts)
57
61
  # Uncomment when infield v2 config migration is ready
58
62
  # self.command("infield-configs")(self.infield_configs)
59
63
 
@@ -887,6 +891,68 @@ class MigrateApp(typer.Typer):
887
891
  )
888
892
  )
889
893
 
894
+ @staticmethod
895
+ def charts(
896
+ ctx: typer.Context,
897
+ external_id: Annotated[
898
+ list[str] | None,
899
+ typer.Argument(
900
+ help="The external ID of the Chart to migrate. If not provided, an interactive selection will be "
901
+ "performed to select the Charts to migrate."
902
+ ),
903
+ ] = None,
904
+ log_dir: Annotated[
905
+ Path,
906
+ typer.Option(
907
+ "--log-dir",
908
+ "-l",
909
+ help="Path to the directory where migration logs will be stored.",
910
+ ),
911
+ ] = Path(f"migration_logs_{TODAY}"),
912
+ dry_run: Annotated[
913
+ bool,
914
+ typer.Option(
915
+ "--dry-run",
916
+ "-d",
917
+ help="If set, the migration will not be executed, but only a report of "
918
+ "what would be done is printed. This is useful for checking that all time series referenced by the Charts "
919
+ "have been migrated to the new data modeling resources in CDF.",
920
+ ),
921
+ ] = False,
922
+ verbose: Annotated[
923
+ bool,
924
+ typer.Option(
925
+ "--verbose",
926
+ "-v",
927
+ help="Turn on to get more verbose output when running the command",
928
+ ),
929
+ ] = False,
930
+ ) -> None:
931
+ """Migrate Charts from time series references to data modeling in CDF.
932
+
933
+ This command expects that the CogniteMigration data model is already deployed, and that the Mapping view
934
+ is populated with the mapping from time series to the new data modeling resources.
935
+ """
936
+ client = EnvironmentVariables.create_from_environment().get_client()
937
+
938
+ selected_external_ids: list[str]
939
+ if external_id:
940
+ selected_external_ids = external_id
941
+ else:
942
+ selected_external_ids = InteractiveChartSelect(client).select_external_ids()
943
+
944
+ cmd = MigrationCommand()
945
+ cmd.run(
946
+ lambda: cmd.migrate(
947
+ selected=ChartExternalIdSelector(external_ids=tuple(selected_external_ids)),
948
+ data=ChartIO(client),
949
+ mapper=ChartMapper(client),
950
+ log_dir=log_dir,
951
+ dry_run=dry_run,
952
+ verbose=verbose,
953
+ )
954
+ )
955
+
890
956
  @staticmethod
891
957
  def infield_configs(
892
958
  ctx: typer.Context,
@@ -1,10 +1,14 @@
1
1
  import sys
2
2
  from abc import ABC, abstractmethod
3
- from typing import Any, Generic, TypeVar
3
+ from collections import UserList
4
+ from typing import TYPE_CHECKING, Any, Generic, TypeVar
4
5
 
5
6
  from pydantic import BaseModel, ConfigDict
6
7
  from pydantic.alias_generators import to_camel
7
8
 
9
+ if TYPE_CHECKING:
10
+ from cognite.client import CogniteClient
11
+
8
12
  if sys.version_info >= (3, 11):
9
13
  from typing import Self
10
14
  else:
@@ -61,3 +65,23 @@ class Identifier(BaseModel):
61
65
 
62
66
  def as_id(self) -> Self:
63
67
  return self
68
+
69
+
70
+ T_Resource = TypeVar("T_Resource", bound=RequestResource | ResponseResource)
71
+
72
+
73
+ class BaseResourceList(UserList[T_Resource]):
74
+ """Base class for resource lists."""
75
+
76
+ _RESOURCE: type[T_Resource]
77
+
78
+ def __init__(self, initlist: list[T_Resource] | None = None, **_: Any) -> None:
79
+ super().__init__(initlist or [])
80
+
81
+ def dump(self, camel_case: bool = True) -> list[dict[str, Any]]:
82
+ return [item.dump(camel_case) for item in self.data]
83
+
84
+ @classmethod
85
+ def load(cls, data: list[dict[str, Any]], cognite_client: "CogniteClient | None" = None) -> Self:
86
+ items = [cls._RESOURCE.model_validate(item) for item in data]
87
+ return cls(items) # type: ignore[arg-type]
@@ -1,15 +1,16 @@
1
1
  import sys
2
- from collections import UserList
3
2
  from typing import Any, ClassVar, Literal
4
3
 
5
- from cognite.client import CogniteClient
6
4
  from pydantic import JsonValue, field_validator
7
5
  from pydantic_core.core_schema import ValidationInfo
8
6
 
9
- from cognite_toolkit._cdf_tk.protocols import ResourceRequestListProtocol, ResourceResponseListProtocol
7
+ from cognite_toolkit._cdf_tk.protocols import (
8
+ ResourceRequestListProtocol,
9
+ ResourceResponseListProtocol,
10
+ )
10
11
  from cognite_toolkit._cdf_tk.utils.text import sanitize_instance_external_id
11
12
 
12
- from .base import ResponseResource
13
+ from .base import BaseResourceList, ResponseResource
13
14
  from .instance_api import InstanceRequestResource, ViewReference
14
15
 
15
16
  if sys.version_info >= (3, 11):
@@ -74,29 +75,13 @@ class InfieldLocationConfig(
74
75
 
75
76
 
76
77
  class InfieldLocationConfigList(
77
- UserList[InfieldLocationConfig],
78
+ BaseResourceList[InfieldLocationConfig],
78
79
  ResourceResponseListProtocol,
79
80
  ResourceRequestListProtocol,
80
81
  ):
81
82
  """A list of InfieldLocationConfig objects."""
82
83
 
83
84
  _RESOURCE = InfieldLocationConfig
84
- data: list[InfieldLocationConfig]
85
-
86
- def __init__(self, initlist: list[InfieldLocationConfig] | None = None, **_: Any) -> None:
87
- super().__init__(initlist or [])
88
-
89
- def dump(self, camel_case: bool = True) -> list[dict[str, Any]]:
90
- """Serialize the list of InfieldLocationConfig objects to a list of dictionaries."""
91
- return [item.dump(camel_case) for item in self.data]
92
-
93
- @classmethod
94
- def load(
95
- cls, data: list[dict[str, Any]], cognite_client: CogniteClient | None = None
96
- ) -> "InfieldLocationConfigList":
97
- """Deserialize a list of dictionaries to an InfieldLocationConfigList."""
98
- items = [InfieldLocationConfig.model_validate(item) for item in data]
99
- return cls(items)
100
85
 
101
86
  def as_write(self) -> Self:
102
87
  return self
@@ -1,18 +1,12 @@
1
- import sys
2
- from collections import UserList
3
- from typing import Any, Literal
4
-
5
- from cognite.client import CogniteClient
1
+ from typing import Literal
6
2
 
7
3
  from cognite_toolkit._cdf_tk.constants import StreamTemplateName
8
- from cognite_toolkit._cdf_tk.protocols import ResourceRequestListProtocol, ResourceResponseListProtocol
9
-
10
- from .base import BaseModelObject, RequestResource, ResponseResource
4
+ from cognite_toolkit._cdf_tk.protocols import (
5
+ ResourceRequestListProtocol,
6
+ ResourceResponseListProtocol,
7
+ )
11
8
 
12
- if sys.version_info >= (3, 11):
13
- from typing import Self
14
- else:
15
- from typing_extensions import Self
9
+ from .base import BaseModelObject, BaseResourceList, RequestResource, ResponseResource
16
10
 
17
11
 
18
12
  class StreamRequest(RequestResource):
@@ -25,22 +19,11 @@ class StreamRequest(RequestResource):
25
19
  return self.external_id
26
20
 
27
21
 
28
- class StreamRequestList(UserList[StreamRequest], ResourceRequestListProtocol):
22
+ class StreamRequestList(BaseResourceList[StreamRequest], ResourceRequestListProtocol):
29
23
  """List of Stream request resources."""
30
24
 
31
25
  _RESOURCE = StreamRequest
32
26
 
33
- def __init__(self, initlist: list[StreamRequest] | None = None, **_: Any) -> None:
34
- super().__init__(initlist or [])
35
-
36
- def dump(self, camel_case: bool = True) -> list[dict[str, Any]]:
37
- return [item.dump(camel_case) for item in self.data]
38
-
39
- @classmethod
40
- def load(cls, data: list[dict[str, Any]], cognite_client: CogniteClient | None = None) -> "StreamRequestList":
41
- items = [StreamRequest.model_validate(item) for item in data]
42
- return cls(items)
43
-
44
27
 
45
28
  class LifecycleObject(BaseModelObject):
46
29
  """Lifecycle object."""
@@ -98,22 +81,10 @@ class StreamResponse(ResponseResource["StreamRequest"]):
98
81
  )
99
82
 
100
83
 
101
- class StreamResponseList(UserList[StreamResponse], ResourceResponseListProtocol):
84
+ class StreamResponseList(BaseResourceList[StreamResponse], ResourceResponseListProtocol):
102
85
  """List of Stream response resources."""
103
86
 
104
87
  _RESOURCE = StreamResponse
105
- data: list[StreamResponse]
106
-
107
- def __init__(self, initlist: list[StreamResponse] | None = None, **_: Any) -> None:
108
- super().__init__(initlist or [])
109
-
110
- def dump(self, camel_case: bool = True) -> list[dict[str, Any]]:
111
- return [item.dump(camel_case) for item in self.data]
112
-
113
- @classmethod
114
- def load(cls, data: list[dict[str, Any]], cognite_client: CogniteClient | None = None) -> "StreamResponseList":
115
- items = [StreamResponse.model_validate(item) for item in data]
116
- return cls(items)
117
88
 
118
- def as_write(self) -> Self:
119
- return self
89
+ def as_write(self) -> StreamRequestList:
90
+ return StreamRequestList([item.as_write() for item in self.data])
@@ -17,7 +17,11 @@ from cognite.client.data_classes.data_modeling import (
17
17
 
18
18
  from cognite_toolkit._cdf_tk.client import ToolkitClient
19
19
  from cognite_toolkit._cdf_tk.client.data_classes.charts import Chart, ChartWrite
20
- from cognite_toolkit._cdf_tk.client.data_classes.charts_data import ChartCoreTimeseries, ChartSource, ChartTimeseries
20
+ from cognite_toolkit._cdf_tk.client.data_classes.charts_data import (
21
+ ChartCoreTimeseries,
22
+ ChartSource,
23
+ ChartTimeseries,
24
+ )
21
25
  from cognite_toolkit._cdf_tk.client.data_classes.migration import ResourceViewMappingApply
22
26
  from cognite_toolkit._cdf_tk.commands._migrate.conversion import DirectRelationCache, asset_centric_to_dm
23
27
  from cognite_toolkit._cdf_tk.commands._migrate.data_classes import AssetCentricMapping