cognite-toolkit 0.6.78__py3-none-any.whl → 0.6.80__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-toolkit might be problematic. Click here for more details.

Files changed (35) hide show
  1. cognite_toolkit/_builtin_modules/cdf.toml +1 -1
  2. cognite_toolkit/_cdf.py +0 -4
  3. cognite_toolkit/_cdf_tk/apps/__init__.py +0 -2
  4. cognite_toolkit/_cdf_tk/apps/_migrate_app.py +134 -21
  5. cognite_toolkit/_cdf_tk/apps/_modules_app.py +27 -0
  6. cognite_toolkit/_cdf_tk/commands/__init__.py +0 -6
  7. cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py +0 -4
  8. cognite_toolkit/_cdf_tk/commands/_migrate/command.py +6 -8
  9. cognite_toolkit/_cdf_tk/commands/_migrate/creators.py +26 -2
  10. cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +42 -8
  11. cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +4 -6
  12. cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py +1 -0
  13. cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +203 -0
  14. cognite_toolkit/_cdf_tk/commands/_migrate/selectors.py +66 -0
  15. cognite_toolkit/_cdf_tk/commands/modules.py +59 -14
  16. cognite_toolkit/_cdf_tk/constants.py +3 -0
  17. cognite_toolkit/_cdf_tk/feature_flags.py +0 -4
  18. cognite_toolkit/_cdf_tk/storageio/_asset_centric.py +6 -6
  19. cognite_toolkit/_cdf_tk/storageio/_base.py +2 -5
  20. cognite_toolkit/_cdf_tk/utils/useful_types.py +3 -1
  21. cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
  22. cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
  23. cognite_toolkit/_resources/cdf.toml +13 -0
  24. cognite_toolkit/_version.py +1 -1
  25. {cognite_toolkit-0.6.78.dist-info → cognite_toolkit-0.6.80.dist-info}/METADATA +1 -1
  26. {cognite_toolkit-0.6.78.dist-info → cognite_toolkit-0.6.80.dist-info}/RECORD +29 -32
  27. cognite_toolkit/_cdf_tk/apps/_populate_app.py +0 -80
  28. cognite_toolkit/_cdf_tk/commands/_migrate/adapter.py +0 -368
  29. cognite_toolkit/_cdf_tk/commands/_migrate/assets.py +0 -0
  30. cognite_toolkit/_cdf_tk/commands/_migrate/files.py +0 -165
  31. cognite_toolkit/_cdf_tk/commands/_migrate/timeseries.py +0 -165
  32. cognite_toolkit/_cdf_tk/commands/_populate.py +0 -306
  33. {cognite_toolkit-0.6.78.dist-info → cognite_toolkit-0.6.80.dist-info}/WHEEL +0 -0
  34. {cognite_toolkit-0.6.78.dist-info → cognite_toolkit-0.6.80.dist-info}/entry_points.txt +0 -0
  35. {cognite_toolkit-0.6.78.dist-info → cognite_toolkit-0.6.80.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,203 @@
1
+ from collections.abc import Iterator, Mapping, Sequence
2
+ from typing import ClassVar, cast
3
+
4
+ from cognite.client.data_classes._base import (
5
+ T_WritableCogniteResource,
6
+ )
7
+ from cognite.client.data_classes.data_modeling import InstanceApply, NodeId
8
+
9
+ from cognite_toolkit._cdf_tk.client import ToolkitClient
10
+ from cognite_toolkit._cdf_tk.client.data_classes.pending_instances_ids import PendingInstanceId
11
+ from cognite_toolkit._cdf_tk.constants import MISSING_EXTERNAL_ID, MISSING_INSTANCE_SPACE
12
+ from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
13
+ from cognite_toolkit._cdf_tk.storageio import (
14
+ HierarchyIO,
15
+ InstanceIO,
16
+ UploadableStorageIO,
17
+ )
18
+ from cognite_toolkit._cdf_tk.storageio._base import Page, UploadItem
19
+ from cognite_toolkit._cdf_tk.utils.collection import chunker_sequence
20
+ from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, HTTPMessage, ItemsRequest, SuccessResponseItems
21
+ from cognite_toolkit._cdf_tk.utils.useful_types import (
22
+ AssetCentricKind,
23
+ AssetCentricType,
24
+ JsonVal,
25
+ )
26
+
27
+ from .data_classes import AssetCentricMapping, AssetCentricMappingList, MigrationMapping, MigrationMappingList
28
+ from .data_model import INSTANCE_SOURCE_VIEW_ID
29
+ from .selectors import AssetCentricMigrationSelector, MigrateDataSetSelector, MigrationCSVFileSelector
30
+
31
+
32
+ class AssetCentricMigrationIO(
33
+ UploadableStorageIO[AssetCentricMigrationSelector, AssetCentricMapping[T_WritableCogniteResource], InstanceApply]
34
+ ):
35
+ KIND = "AssetCentricMigration"
36
+ SUPPORTED_DOWNLOAD_FORMATS = frozenset({".parquet", ".csv", ".ndjson"})
37
+ SUPPORTED_COMPRESSIONS = frozenset({".gz"})
38
+ SUPPORTED_READ_FORMATS = frozenset({".parquet", ".csv", ".ndjson", ".yaml", ".yml"})
39
+ CHUNK_SIZE = 1000
40
+ UPLOAD_ENDPOINT = InstanceIO.UPLOAD_ENDPOINT
41
+
42
+ PENDING_INSTANCE_ID_ENDPOINT_BY_KIND: ClassVar[Mapping[AssetCentricKind, str]] = {
43
+ "TimeSeries": "/timeseries/set-pending-instance-ids",
44
+ "FileMetadata": "/files/set-pending-instance-ids",
45
+ }
46
+
47
+ def __init__(self, client: ToolkitClient, skip_linking: bool = True) -> None:
48
+ super().__init__(client)
49
+ self.hierarchy = HierarchyIO(client)
50
+ self.skip_linking = skip_linking
51
+
52
+ def as_id(self, item: AssetCentricMapping) -> str:
53
+ return f"{item.mapping.resource_type}_{item.mapping.id}"
54
+
55
+ def stream_data(self, selector: AssetCentricMigrationSelector, limit: int | None = None) -> Iterator[Page]:
56
+ if isinstance(selector, MigrationCSVFileSelector):
57
+ iterator = self._stream_from_csv(selector, limit)
58
+ elif isinstance(selector, MigrateDataSetSelector):
59
+ iterator = self._stream_given_dataset(selector, limit)
60
+ else:
61
+ raise ToolkitNotImplementedError(f"Selector {type(selector)} is not supported for stream_data")
62
+ yield from (Page(worker_id="main", items=items) for items in iterator)
63
+
64
+ def _stream_from_csv(
65
+ self, selector: MigrationCSVFileSelector, limit: int | None = None
66
+ ) -> Iterator[Sequence[AssetCentricMapping[T_WritableCogniteResource]]]:
67
+ items = selector.items
68
+ if limit is not None:
69
+ items = MigrationMappingList(items[:limit])
70
+ chunk: list[AssetCentricMapping[T_WritableCogniteResource]] = []
71
+ for current_batch in chunker_sequence(items, self.CHUNK_SIZE):
72
+ resources = self.hierarchy.get_resource_io(selector.kind).retrieve(current_batch.get_ids())
73
+ for mapping, resource in zip(current_batch, resources, strict=True):
74
+ chunk.append(AssetCentricMapping(mapping=mapping, resource=resource))
75
+ if chunk:
76
+ yield chunk
77
+ chunk = []
78
+
79
+ def count(self, selector: AssetCentricMigrationSelector) -> int | None:
80
+ if isinstance(selector, MigrationCSVFileSelector):
81
+ return len(selector.items)
82
+ elif isinstance(selector, MigrateDataSetSelector):
83
+ return self.hierarchy.count(selector.as_asset_centric_selector())
84
+ else:
85
+ raise ToolkitNotImplementedError(f"Selector {type(selector)} is not supported for count")
86
+
87
+ def _stream_given_dataset(
88
+ self, selector: MigrateDataSetSelector, limit: int | None = None
89
+ ) -> Iterator[Sequence[AssetCentricMapping[T_WritableCogniteResource]]]:
90
+ asset_centric_selector = selector.as_asset_centric_selector()
91
+ for data_chunk in self.hierarchy.stream_data(asset_centric_selector, limit):
92
+ mapping_list = AssetCentricMappingList[T_WritableCogniteResource]([])
93
+ for resource in data_chunk.items:
94
+ # We know data_set_id is here as we are using a DataSetSelector
95
+ data_set_id = cast(int, resource.data_set_id)
96
+ space_source = self.client.migration.space_source.retrieve(data_set_id=data_set_id)
97
+ instance_space = space_source.instance_space if space_source else None
98
+ if instance_space is None:
99
+ instance_space = MISSING_INSTANCE_SPACE
100
+ external_id = resource.external_id
101
+ if external_id is None:
102
+ external_id = MISSING_EXTERNAL_ID.format(project=self.client.config.project, id=resource.id)
103
+ mapping = MigrationMapping(
104
+ resource_type=self._kind_to_resource_type(selector.kind),
105
+ instance_id=NodeId(
106
+ space=instance_space,
107
+ external_id=external_id,
108
+ ),
109
+ id=resource.id,
110
+ data_set_id=resource.data_set_id,
111
+ ingestion_view=selector.ingestion_mapping,
112
+ preferred_consumer_view=selector.preferred_consumer_view,
113
+ )
114
+ mapping_list.append(AssetCentricMapping(mapping=mapping, resource=resource))
115
+ yield mapping_list
116
+
117
+ @staticmethod
118
+ def _kind_to_resource_type(kind: AssetCentricKind) -> AssetCentricType:
119
+ mapping: dict[AssetCentricKind, AssetCentricType] = {
120
+ "Assets": "asset",
121
+ "Events": "event",
122
+ "TimeSeries": "timeseries",
123
+ "FileMetadata": "file",
124
+ }
125
+ try:
126
+ return mapping[kind]
127
+ except KeyError as e:
128
+ raise ToolkitNotImplementedError(f"Kind '{kind}' is not supported") from e
129
+
130
+ def data_to_json_chunk(
131
+ self,
132
+ data_chunk: Sequence[AssetCentricMapping[T_WritableCogniteResource]],
133
+ selector: AssetCentricMigrationSelector | None = None,
134
+ ) -> list[dict[str, JsonVal]]:
135
+ return [item.dump() for item in data_chunk]
136
+
137
+ def json_to_resource(self, item_json: dict[str, JsonVal]) -> InstanceApply:
138
+ raise NotImplementedError()
139
+
140
+ def upload_items(
141
+ self,
142
+ data_chunk: Sequence[UploadItem[InstanceApply]],
143
+ http_client: HTTPClient,
144
+ selector: AssetCentricMigrationSelector | None = None,
145
+ ) -> Sequence[HTTPMessage]:
146
+ """Upload items by first linking them using files/set-pending-instance-ids and then uploading the instances."""
147
+ if self.skip_linking:
148
+ return list(super().upload_items(data_chunk, http_client, None))
149
+ elif selector is None:
150
+ raise ToolkitNotImplementedError(f"Selector must be provided for uploading {self.KIND} items.")
151
+ elif selector.kind not in self.PENDING_INSTANCE_ID_ENDPOINT_BY_KIND:
152
+ return list(super().upload_items(data_chunk, http_client, None))
153
+
154
+ pending_instance_id_endpoint = self.PENDING_INSTANCE_ID_ENDPOINT_BY_KIND[selector.kind]
155
+ results: list[HTTPMessage] = []
156
+ to_upload = self.link_asset_centric(data_chunk, http_client, results, pending_instance_id_endpoint)
157
+ if to_upload:
158
+ results.extend(list(super().upload_items(to_upload, http_client, None)))
159
+ return results
160
+
161
+ @classmethod
162
+ def link_asset_centric(
163
+ cls,
164
+ data_chunk: Sequence[UploadItem[InstanceApply]],
165
+ http_client: HTTPClient,
166
+ results: list[HTTPMessage],
167
+ pending_instance_id_endpoint: str,
168
+ ) -> Sequence[UploadItem[InstanceApply]]:
169
+ """Links asset-centric resources to their (uncreated) instances using the pending-instance-ids endpoint."""
170
+ config = http_client.config
171
+ successful_linked: set[str] = set()
172
+ for batch in chunker_sequence(data_chunk, cls.CHUNK_SIZE):
173
+ batch_results = http_client.request_with_retries(
174
+ message=ItemsRequest(
175
+ endpoint_url=config.create_api_url(pending_instance_id_endpoint),
176
+ method="POST",
177
+ api_version="alpha",
178
+ items=[
179
+ UploadItem(source_id=item.source_id, item=cls.as_pending_instance_id(item.item))
180
+ for item in batch
181
+ ],
182
+ )
183
+ )
184
+ for res in batch_results:
185
+ if isinstance(res, SuccessResponseItems):
186
+ successful_linked.update(res.ids)
187
+ results.extend(batch_results)
188
+ to_upload = [item for item in data_chunk if item.source_id in successful_linked]
189
+ return to_upload
190
+
191
+ @staticmethod
192
+ def as_pending_instance_id(item: InstanceApply) -> PendingInstanceId:
193
+ """Convert an InstanceApply to a PendingInstanceId for linking."""
194
+ source = next((source for source in item.sources if source.source == INSTANCE_SOURCE_VIEW_ID), None)
195
+ if source is None:
196
+ raise ValueError(f"Cannot extract ID from item of type {type(item).__name__!r}")
197
+ if not isinstance(source.properties["id"], int):
198
+ raise ValueError(f"Unexpected ID type: {type(source.properties['id']).__name__!r}")
199
+ id_ = source.properties["id"]
200
+ return PendingInstanceId(
201
+ pending_instance_id=NodeId(item.space, item.external_id),
202
+ id=id_,
203
+ )
@@ -0,0 +1,66 @@
1
+ from abc import ABC, abstractmethod
2
+ from functools import cached_property
3
+ from pathlib import Path
4
+ from typing import Literal
5
+
6
+ from cognite.client.data_classes.data_modeling import ViewId
7
+
8
+ from cognite_toolkit._cdf_tk.commands._migrate.data_classes import MigrationMappingList
9
+ from cognite_toolkit._cdf_tk.storageio import DataSelector
10
+ from cognite_toolkit._cdf_tk.storageio.selectors import DataSetSelector
11
+ from cognite_toolkit._cdf_tk.utils.useful_types import AssetCentricKind
12
+
13
+
14
+ class AssetCentricMigrationSelector(DataSelector, ABC):
15
+ kind: AssetCentricKind
16
+
17
+ @abstractmethod
18
+ def get_ingestion_mappings(self) -> list[str]:
19
+ raise NotImplementedError()
20
+
21
+
22
+ class MigrationCSVFileSelector(AssetCentricMigrationSelector):
23
+ type: Literal["migrationCSVFile"] = "migrationCSVFile"
24
+ datafile: Path
25
+
26
+ @property
27
+ def group(self) -> str:
28
+ return f"Migration_{self.kind}"
29
+
30
+ def __str__(self) -> str:
31
+ return f"file_{self.datafile.name}"
32
+
33
+ def get_ingestion_mappings(self) -> list[str]:
34
+ views = {item.get_ingestion_view() for item in self.items}
35
+ return sorted(views)
36
+
37
+ @cached_property
38
+ def items(self) -> MigrationMappingList:
39
+ return MigrationMappingList.read_csv_file(self.datafile, resource_type=self.kind)
40
+
41
+
42
+ class MigrateDataSetSelector(AssetCentricMigrationSelector):
43
+ type: Literal["migrateDataSet"] = "migrateDataSet"
44
+ kind: AssetCentricKind
45
+ data_set_external_id: str
46
+ ingestion_mapping: str | None = None
47
+ preferred_consumer_view: ViewId | None = None
48
+
49
+ @property
50
+ def group(self) -> str:
51
+ return f"DataSet_{self.data_set_external_id}"
52
+
53
+ def __str__(self) -> str:
54
+ return self.kind
55
+
56
+ def get_schema_spaces(self) -> list[str] | None:
57
+ return None
58
+
59
+ def get_instance_spaces(self) -> list[str] | None:
60
+ return None
61
+
62
+ def get_ingestion_mappings(self) -> list[str]:
63
+ return [self.ingestion_mapping] if self.ingestion_mapping else []
64
+
65
+ def as_asset_centric_selector(self) -> DataSetSelector:
66
+ return DataSetSelector(data_set_external_id=self.data_set_external_id, kind=self.kind)
@@ -4,7 +4,6 @@ import tempfile
4
4
  import zipfile
5
5
  from collections import Counter
6
6
  from hashlib import sha256
7
- from importlib import resources
8
7
  from pathlib import Path
9
8
  from types import TracebackType
10
9
  from typing import Any, Literal
@@ -23,8 +22,7 @@ from rich.rule import Rule
23
22
  from rich.table import Table
24
23
  from rich.tree import Tree
25
24
 
26
- import cognite_toolkit
27
- from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
25
+ from cognite_toolkit._cdf_tk.cdf_toml import CDFToml, Library
28
26
  from cognite_toolkit._cdf_tk.commands import _cli_commands as CLICommands
29
27
  from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand
30
28
  from cognite_toolkit._cdf_tk.commands._changes import (
@@ -37,8 +35,8 @@ from cognite_toolkit._cdf_tk.commands._changes import (
37
35
  UpdateModuleVersion,
38
36
  )
39
37
  from cognite_toolkit._cdf_tk.constants import (
40
- BUILTIN_MODULES,
41
38
  MODULES,
39
+ RESOURCES_PATH,
42
40
  SUPPORT_MODULE_UPGRADE_FROM_VERSION,
43
41
  EnvType,
44
42
  )
@@ -92,9 +90,15 @@ _FILE_DOWNLOADERS_BY_TYPE: dict[str, type[FileDownloader]] = {
92
90
 
93
91
 
94
92
  class ModulesCommand(ToolkitCommand):
95
- def __init__(self, print_warning: bool = True, skip_tracking: bool = False, silent: bool = False):
93
+ def __init__(
94
+ self,
95
+ print_warning: bool = True,
96
+ skip_tracking: bool = False,
97
+ silent: bool = False,
98
+ module_source_dir: Path | None = None,
99
+ ):
96
100
  super().__init__(print_warning, skip_tracking, silent)
97
- self._builtin_modules_path = Path(resources.files(cognite_toolkit.__name__)) / BUILTIN_MODULES # type: ignore [arg-type]
101
+ self._module_source_dir: Path | None = module_source_dir
98
102
  self._temp_download_dir = Path(tempfile.gettempdir()) / MODULES
99
103
  if not self._temp_download_dir.exists():
100
104
  self._temp_download_dir.mkdir(parents=True, exist_ok=True)
@@ -282,7 +286,7 @@ class ModulesCommand(ToolkitCommand):
282
286
  destination.write_text(cdf_toml_content, encoding="utf-8")
283
287
 
284
288
  def create_cdf_toml(self, organization_dir: Path, env: EnvType = "dev") -> str:
285
- cdf_toml_content = safe_read(self._builtin_modules_path / CDFToml.file_name)
289
+ cdf_toml_content = safe_read(RESOURCES_PATH / CDFToml.file_name)
286
290
  if organization_dir != Path.cwd():
287
291
  cdf_toml_content = cdf_toml_content.replace(
288
292
  "#<PLACEHOLDER>",
@@ -302,6 +306,8 @@ default_organization_dir = "{organization_dir.name}"''',
302
306
  user_select: str | None = None,
303
307
  user_environments: list[str] | None = None,
304
308
  user_download_data: bool | None = None,
309
+ library_url: str | None = None,
310
+ library_checksum: str | None = None,
305
311
  ) -> None:
306
312
  if not organization_dir:
307
313
  new_line = "\n "
@@ -315,7 +321,16 @@ default_organization_dir = "{organization_dir.name}"''',
315
321
 
316
322
  modules_root_dir = organization_dir / MODULES
317
323
 
318
- packages, modules_source_path = self._get_available_packages()
324
+ if library_url:
325
+ if not library_checksum:
326
+ raise ToolkitRequiredValueError(
327
+ "The '--library-checksum' is required when '--library-url' is provided."
328
+ )
329
+
330
+ user_library = Library(url=library_url, checksum=library_checksum)
331
+ packages, modules_source_path = self._get_available_packages(user_library)
332
+ else:
333
+ packages, modules_source_path = self._get_available_packages()
319
334
 
320
335
  if select_all:
321
336
  print(Panel("Instantiating all available modules"))
@@ -745,16 +760,22 @@ default_organization_dir = "{organization_dir.name}"''',
745
760
  modules_source_path=modules_source_path,
746
761
  )
747
762
 
748
- def _get_available_packages(self) -> tuple[Packages, Path]:
763
+ def _get_available_packages(self, user_library: Library | None = None) -> tuple[Packages, Path]:
749
764
  """
750
765
  Returns a list of available packages, either from the CDF TOML file or from external libraries if the feature flag is enabled.
751
766
  If the feature flag is not enabled and no libraries are specified, it returns the built-in modules.
752
767
  """
753
768
 
754
769
  cdf_toml = CDFToml.load()
755
- if Flags.EXTERNAL_LIBRARIES.is_enabled() and cdf_toml.libraries:
756
- for library_name, library in cdf_toml.libraries.items():
770
+ if Flags.EXTERNAL_LIBRARIES.is_enabled() or user_library:
771
+ libraries = {"userdefined": user_library} if user_library else cdf_toml.libraries
772
+
773
+ for library_name, library in libraries.items():
757
774
  try:
775
+ additional_tracking_info = self._additional_tracking_info.setdefault("downloadedLibraryIds", [])
776
+ if library_name not in additional_tracking_info:
777
+ additional_tracking_info.append(library_name)
778
+
758
779
  print(f"[green]Adding library {library_name} from {library.url}[/]")
759
780
  # Extract filename from URL, fallback to library_name.zip if no filename found
760
781
  from urllib.parse import urlparse
@@ -772,6 +793,18 @@ default_organization_dir = "{organization_dir.name}"''',
772
793
  for warning in packages.warnings:
773
794
  self.warn(warning)
774
795
  self._validate_packages(packages, f"library {library_name}")
796
+
797
+ # Track deployment pack download for each package and module
798
+ for package in packages.values():
799
+ downloaded_package_ids = self._additional_tracking_info.setdefault("downloadedPackageIds", [])
800
+ if package.id and package.id not in downloaded_package_ids:
801
+ downloaded_package_ids.append(package.id)
802
+
803
+ downloaded_module_ids = self._additional_tracking_info.setdefault("downloadedModuleIds", [])
804
+ for module in package.modules:
805
+ if module.module_id and module.module_id not in downloaded_module_ids:
806
+ downloaded_module_ids.append(module.module_id)
807
+
775
808
  return packages, file_path.parent
776
809
  except Exception as e:
777
810
  if isinstance(e, ToolkitError):
@@ -785,9 +818,21 @@ default_organization_dir = "{organization_dir.name}"''',
785
818
  # If no libraries are specified or the flag is not enabled, load the built-in modules
786
819
  raise ValueError("No valid libraries found.")
787
820
  else:
788
- packages = Packages.load(self._builtin_modules_path)
789
- self._validate_packages(packages, "built-in modules")
790
- return packages, self._builtin_modules_path
821
+ if user_library:
822
+ self.warn(
823
+ MediumSeverityWarning(
824
+ "External library provided but not enabled in cdf.toml. Please enable the feature flag and try again."
825
+ )
826
+ )
827
+
828
+ if self._module_source_dir is None:
829
+ self.warn(
830
+ MediumSeverityWarning("No external libraries and no local module source directory specified.")
831
+ )
832
+ return Packages(), Path(".")
833
+ packages = Packages.load(self._module_source_dir)
834
+ self._validate_packages(packages, self._module_source_dir.name)
835
+ return packages, self._module_source_dir
791
836
 
792
837
  def _validate_packages(self, packages: Packages, source_name: str) -> None:
793
838
  """
@@ -38,6 +38,7 @@ MODULES = "modules"
38
38
  REPO_FILES_DIR = "_repo_files"
39
39
  DOCKER_IMAGE_NAME = "cognite/toolkit"
40
40
  BUILD_FOLDER_ENCODING = "utf-8"
41
+ RESOURCES = "_resources"
41
42
 
42
43
  ROOT_MODULES = [MODULES, CUSTOM_MODULES, COGNITE_MODULES, EXTERNAL_PACKAGE]
43
44
  MODULE_PATH_SEP = "/"
@@ -61,6 +62,7 @@ ROOT_PATH = Path(__file__).parent.parent
61
62
  COGNITE_MODULES_PATH = ROOT_PATH / COGNITE_MODULES
62
63
  MODULES_PATH = ROOT_PATH / MODULES
63
64
  BUILTIN_MODULES_PATH = ROOT_PATH / BUILTIN_MODULES
65
+ RESOURCES_PATH = ROOT_PATH / RESOURCES
64
66
  SUPPORT_MODULE_UPGRADE_FROM_VERSION = "0.2.0"
65
67
  # This is used in the build directory to keep track of order and flatten the
66
68
  # module directory structure with accounting for duplicated names.
@@ -168,3 +170,4 @@ DATA_MANIFEST_STEM = "Manifest"
168
170
 
169
171
  # Migration Constants
170
172
  MISSING_INSTANCE_SPACE = "<InstanceSpaceMissing>"
173
+ MISSING_EXTERNAL_ID = "INTERNAL_ID_project_{project}_id_{id}"
@@ -29,10 +29,6 @@ class Flags(Enum):
29
29
  visible=True,
30
30
  description="Enables the support for repeating modules in the config file",
31
31
  )
32
- POPULATE = FlagMetadata(
33
- visible=True,
34
- description="Enables support for the populate command",
35
- )
36
32
  AGENTS = FlagMetadata(
37
33
  visible=True,
38
34
  description="Enables support for Atlas Agents and Agent Tools",
@@ -508,20 +508,20 @@ class HierarchyIO(ConfigurableStorageIO[AssetCentricSelector, AssetCentricResour
508
508
  def stream_data(
509
509
  self, selector: AssetCentricSelector, limit: int | None = None
510
510
  ) -> Iterable[Page[AssetCentricResource]]:
511
- yield from self._get_io(selector).stream_data(selector, limit)
511
+ yield from self.get_resource_io(selector.kind).stream_data(selector, limit)
512
512
 
513
513
  def count(self, selector: AssetCentricSelector) -> int | None:
514
- return self._get_io(selector).count(selector)
514
+ return self.get_resource_io(selector.kind).count(selector)
515
515
 
516
516
  def data_to_json_chunk(
517
517
  self, data_chunk: Sequence[AssetCentricResource], selector: AssetCentricSelector | None = None
518
518
  ) -> list[dict[str, JsonVal]]:
519
519
  if selector is None:
520
520
  raise ValueError(f"Selector must be provided to convert data to JSON chunk for {type(self).__name__}.)")
521
- return self._get_io(selector).data_to_json_chunk(data_chunk, selector)
521
+ return self.get_resource_io(selector.kind).data_to_json_chunk(data_chunk, selector)
522
522
 
523
523
  def configurations(self, selector: AssetCentricSelector) -> Iterable[StorageIOConfig]:
524
- yield from self._get_io(selector).configurations(selector)
524
+ yield from self.get_resource_io(selector.kind).configurations(selector)
525
525
 
526
- def _get_io(self, selector: AssetCentricSelector) -> BaseAssetCentricIO:
527
- return self._io_by_kind[selector.kind]
526
+ def get_resource_io(self, kind: str) -> BaseAssetCentricIO:
527
+ return self._io_by_kind[kind]
@@ -3,13 +3,13 @@ from collections.abc import Iterable, Mapping, Sequence, Sized
3
3
  from dataclasses import dataclass
4
4
  from typing import ClassVar, Generic, TypeVar
5
5
 
6
- from cognite.client.data_classes._base import CogniteObject, T_CogniteResource
6
+ from cognite.client.data_classes._base import T_CogniteResource
7
7
 
8
8
  from cognite_toolkit._cdf_tk.client import ToolkitClient
9
9
  from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
10
10
  from cognite_toolkit._cdf_tk.utils.fileio import SchemaColumn
11
11
  from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, HTTPMessage, ItemsRequest
12
- from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
12
+ from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal, T_WriteCogniteResource
13
13
 
14
14
  from .selectors import DataSelector
15
15
 
@@ -35,9 +35,6 @@ class Page(Generic[T_CogniteResource], Sized):
35
35
  return len(self.items)
36
36
 
37
37
 
38
- T_WriteCogniteResource = TypeVar("T_WriteCogniteResource", bound=CogniteObject)
39
-
40
-
41
38
  @dataclass
42
39
  class UploadItem(Generic[T_WriteCogniteResource]):
43
40
  """An item to be uploaded to CDF, consisting of a source ID and the writable Cognite resource.
@@ -3,7 +3,7 @@ from datetime import date, datetime
3
3
  from typing import Any, Literal, TypeAlias, TypeVar, get_args
4
4
 
5
5
  from cognite.client.data_classes import Asset, Event, FileMetadata, Sequence, TimeSeries
6
- from cognite.client.data_classes._base import WriteableCogniteResourceList
6
+ from cognite.client.data_classes._base import CogniteObject, WriteableCogniteResourceList
7
7
 
8
8
  JsonVal: TypeAlias = None | str | int | float | bool | dict[str, "JsonVal"] | list["JsonVal"]
9
9
 
@@ -20,3 +20,5 @@ T_ID = TypeVar("T_ID", bound=Hashable)
20
20
  T_WritableCogniteResourceList = TypeVar("T_WritableCogniteResourceList", bound=WriteableCogniteResourceList)
21
21
  T_Value = TypeVar("T_Value")
22
22
  PrimitiveType: TypeAlias = str | int | float | bool
23
+
24
+ T_WriteCogniteResource = TypeVar("T_WriteCogniteResource", bound=CogniteObject)
@@ -12,7 +12,7 @@ jobs:
12
12
  environment: dev
13
13
  name: Deploy
14
14
  container:
15
- image: cognite/toolkit:0.6.78
15
+ image: cognite/toolkit:0.6.80
16
16
  env:
17
17
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
18
18
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -10,7 +10,7 @@ jobs:
10
10
  environment: dev
11
11
  name: Deploy Dry Run
12
12
  container:
13
- image: cognite/toolkit:0.6.78
13
+ image: cognite/toolkit:0.6.80
14
14
  env:
15
15
  CDF_CLUSTER: ${{ vars.CDF_CLUSTER }}
16
16
  CDF_PROJECT: ${{ vars.CDF_PROJECT }}
@@ -0,0 +1,13 @@
1
+ [cdf]#<PLACEHOLDER>
2
+ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
3
+
4
+ [modules]
5
+ # This is the version of the modules. It should not be changed manually.
6
+ # It will be updated by the 'cdf modules upgrade' command.
7
+ version = "0.6.80"
8
+
9
+
10
+ [plugins]
11
+ run = true
12
+ pull = false
13
+ dump = false
@@ -1 +1 @@
1
- __version__ = "0.6.78"
1
+ __version__ = "0.6.80"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite_toolkit
3
- Version: 0.6.78
3
+ Version: 0.6.80
4
4
  Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
5
5
  Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
6
6
  Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases