cognite-toolkit 0.6.105__py3-none-any.whl → 0.6.106__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py +6 -3
- cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py +17 -1
- cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py +7 -4
- cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py +5 -5
- cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py +44 -4
- cognite_toolkit/_cdf_tk/commands/_upload.py +39 -44
- cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py +6 -4
- cognite_toolkit/_cdf_tk/storageio/_annotations.py +16 -14
- cognite_toolkit/_cdf_tk/storageio/_base.py +3 -4
- cognite_toolkit/_cdf_tk/storageio/_datapoints.py +3 -4
- cognite_toolkit/_cdf_tk/utils/fileio/__init__.py +2 -0
- cognite_toolkit/_cdf_tk/utils/fileio/_base.py +5 -1
- cognite_toolkit/_cdf_tk/utils/fileio/_readers.py +61 -18
- cognite_toolkit/_cdf_tk/utils/fileio/_writers.py +14 -14
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.6.105.dist-info → cognite_toolkit-0.6.106.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.6.105.dist-info → cognite_toolkit-0.6.106.dist-info}/RECORD +23 -23
- {cognite_toolkit-0.6.105.dist-info → cognite_toolkit-0.6.106.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.6.105.dist-info → cognite_toolkit-0.6.106.dist-info}/entry_points.txt +0 -0
- {cognite_toolkit-0.6.105.dist-info → cognite_toolkit-0.6.106.dist-info}/licenses/LICENSE +0 -0
|
@@ -15,7 +15,10 @@ from cognite.client.data_classes.data_modeling.views import ViewProperty
|
|
|
15
15
|
from cognite.client.utils._identifier import InstanceId
|
|
16
16
|
|
|
17
17
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
18
|
-
from cognite_toolkit._cdf_tk.client.data_classes.migration import
|
|
18
|
+
from cognite_toolkit._cdf_tk.client.data_classes.migration import (
|
|
19
|
+
AssetCentricId,
|
|
20
|
+
ResourceViewMappingApply,
|
|
21
|
+
)
|
|
19
22
|
from cognite_toolkit._cdf_tk.utils.collection import flatten_dict_json_path
|
|
20
23
|
from cognite_toolkit._cdf_tk.utils.dtype_conversion import (
|
|
21
24
|
asset_centric_convert_to_primary_property,
|
|
@@ -163,7 +166,7 @@ class DirectRelationCache:
|
|
|
163
166
|
def asset_centric_to_dm(
|
|
164
167
|
resource: AssetCentricResourceExtended,
|
|
165
168
|
instance_id: InstanceId,
|
|
166
|
-
view_source:
|
|
169
|
+
view_source: ResourceViewMappingApply,
|
|
167
170
|
view_properties: dict[str, ViewProperty],
|
|
168
171
|
direct_relation_cache: DirectRelationCache,
|
|
169
172
|
) -> tuple[NodeApply | EdgeApply | None, ConversionIssue]:
|
|
@@ -172,7 +175,7 @@ def asset_centric_to_dm(
|
|
|
172
175
|
Args:
|
|
173
176
|
resource (CogniteResource): The asset-centric resource to convert.
|
|
174
177
|
instance_id (NodeId | EdgeApply): The ID of the instance to create or update.
|
|
175
|
-
view_source (
|
|
178
|
+
view_source (ResourceViewMappingApply): The view source defining how to map the resource to the data model.
|
|
176
179
|
view_properties (dict[str, ViewProperty]): The defined properties referenced in the view source mapping.
|
|
177
180
|
direct_relation_cache (DirectRelationCache): Cache for direct relation references.
|
|
178
181
|
|
|
@@ -14,7 +14,11 @@ from pydantic import BaseModel, BeforeValidator, field_validator, model_validato
|
|
|
14
14
|
from cognite_toolkit._cdf_tk.client.data_classes.instances import InstanceApplyList
|
|
15
15
|
from cognite_toolkit._cdf_tk.client.data_classes.migration import AssetCentricId
|
|
16
16
|
from cognite_toolkit._cdf_tk.client.data_classes.pending_instances_ids import PendingInstanceId
|
|
17
|
-
from cognite_toolkit._cdf_tk.commands._migrate.default_mappings import
|
|
17
|
+
from cognite_toolkit._cdf_tk.commands._migrate.default_mappings import (
|
|
18
|
+
ASSET_ANNOTATIONS_ID,
|
|
19
|
+
FILE_ANNOTATIONS_ID,
|
|
20
|
+
create_default_mappings,
|
|
21
|
+
)
|
|
18
22
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
|
|
19
23
|
from cognite_toolkit._cdf_tk.storageio._data_classes import ModelList
|
|
20
24
|
from cognite_toolkit._cdf_tk.utils.useful_types import (
|
|
@@ -182,6 +186,18 @@ class FileMapping(MigrationMapping):
|
|
|
182
186
|
class AnnotationMapping(MigrationMapping):
|
|
183
187
|
resource_type: Literal["annotation"] = "annotation"
|
|
184
188
|
instance_id: EdgeId
|
|
189
|
+
annotation_type: Literal["diagrams.AssetLink", "diagrams.FileLink"] | None = None
|
|
190
|
+
|
|
191
|
+
def get_ingestion_view(self) -> str:
|
|
192
|
+
"""Get the ingestion view for the mapping. If not specified, return the default ingestion view."""
|
|
193
|
+
if self.ingestion_view:
|
|
194
|
+
return self.ingestion_view
|
|
195
|
+
elif self.annotation_type == "diagrams.AssetLink":
|
|
196
|
+
return ASSET_ANNOTATIONS_ID
|
|
197
|
+
elif self.annotation_type == "diagrams.FileLink":
|
|
198
|
+
return FILE_ANNOTATIONS_ID
|
|
199
|
+
else:
|
|
200
|
+
raise ToolkitValueError("Cannot determine default ingestion view for annotation without annotation_type")
|
|
185
201
|
|
|
186
202
|
@field_validator("instance_id", mode="before")
|
|
187
203
|
def _validate_instance_id(cls, v: Any) -> Any:
|
|
@@ -14,9 +14,10 @@ from cognite.client.data_classes.data_modeling import (
|
|
|
14
14
|
)
|
|
15
15
|
|
|
16
16
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
17
|
-
from cognite_toolkit._cdf_tk.client.data_classes.migration import
|
|
17
|
+
from cognite_toolkit._cdf_tk.client.data_classes.migration import ResourceViewMappingApply
|
|
18
18
|
from cognite_toolkit._cdf_tk.commands._migrate.conversion import DirectRelationCache, asset_centric_to_dm
|
|
19
19
|
from cognite_toolkit._cdf_tk.commands._migrate.data_classes import AssetCentricMapping
|
|
20
|
+
from cognite_toolkit._cdf_tk.commands._migrate.default_mappings import create_default_mappings
|
|
20
21
|
from cognite_toolkit._cdf_tk.commands._migrate.issues import ConversionIssue, MigrationIssue
|
|
21
22
|
from cognite_toolkit._cdf_tk.commands._migrate.selectors import AssetCentricMigrationSelector
|
|
22
23
|
from cognite_toolkit._cdf_tk.constants import MISSING_INSTANCE_SPACE
|
|
@@ -61,20 +62,22 @@ class AssetCentricMapper(
|
|
|
61
62
|
def __init__(self, client: ToolkitClient) -> None:
|
|
62
63
|
self.client = client
|
|
63
64
|
self._ingestion_view_by_id: dict[ViewId, View] = {}
|
|
64
|
-
self._view_mapping_by_id: dict[str,
|
|
65
|
+
self._view_mapping_by_id: dict[str, ResourceViewMappingApply] = {}
|
|
65
66
|
self._direct_relation_cache = DirectRelationCache(client)
|
|
66
67
|
|
|
67
68
|
def prepare(self, source_selector: AssetCentricMigrationSelector) -> None:
|
|
68
69
|
ingestion_view_ids = source_selector.get_ingestion_mappings()
|
|
69
70
|
ingestion_views = self.client.migration.resource_view_mapping.retrieve(ingestion_view_ids)
|
|
70
|
-
|
|
71
|
+
defaults = {mapping.external_id: mapping for mapping in create_default_mappings()}
|
|
72
|
+
# Custom mappings from CDF override the default mappings
|
|
73
|
+
self._view_mapping_by_id = defaults | {view.external_id: view.as_write() for view in ingestion_views}
|
|
71
74
|
missing_mappings = set(ingestion_view_ids) - set(self._view_mapping_by_id.keys())
|
|
72
75
|
if missing_mappings:
|
|
73
76
|
raise ToolkitValueError(
|
|
74
77
|
f"The following ingestion views were not found: {humanize_collection(missing_mappings)}"
|
|
75
78
|
)
|
|
76
79
|
|
|
77
|
-
view_ids = list({
|
|
80
|
+
view_ids = list({mapping.view_id for mapping in self._view_mapping_by_id.values()})
|
|
78
81
|
views = self.client.data_modeling.views.retrieve(view_ids)
|
|
79
82
|
self._ingestion_view_by_id = {view.as_id(): view for view in views}
|
|
80
83
|
missing_views = set(view_ids) - set(self._ingestion_view_by_id.keys())
|
|
@@ -93,7 +93,7 @@ def create_default_mappings() -> list[ResourceViewMappingApply]:
|
|
|
93
93
|
view_id=ViewId("cdf_cdm", "CogniteDiagramAnnotation", "v1"),
|
|
94
94
|
property_mapping={
|
|
95
95
|
# We are ignoring the symbol region in the default mapping.
|
|
96
|
-
"
|
|
96
|
+
"annotatedResourceId": "edge.startNode",
|
|
97
97
|
"annotationType": "edge.type.externalId",
|
|
98
98
|
"creatingUser": "sourceCreatedUser",
|
|
99
99
|
"creatingApp": "sourceId",
|
|
@@ -114,16 +114,16 @@ def create_default_mappings() -> list[ResourceViewMappingApply]:
|
|
|
114
114
|
ResourceViewMappingApply(
|
|
115
115
|
external_id=FILE_ANNOTATIONS_ID,
|
|
116
116
|
resource_type="fileAnnotation",
|
|
117
|
-
view_id=ViewId("cdf_cdm", "
|
|
117
|
+
view_id=ViewId("cdf_cdm", "CogniteDiagramAnnotation", "v1"),
|
|
118
118
|
property_mapping={
|
|
119
|
-
"
|
|
119
|
+
"annotatedResourceId": "edge.startNode",
|
|
120
120
|
"annotationType": "edge.type.externalId",
|
|
121
121
|
"creatingUser": "sourceCreatedUser",
|
|
122
122
|
"creatingApp": "sourceId",
|
|
123
123
|
"creatingAppVersion": "sourceContext",
|
|
124
124
|
"status": "status",
|
|
125
|
-
"data.fileRef.id": "edge.
|
|
126
|
-
"data.fileRef.externalId": "edge.
|
|
125
|
+
"data.fileRef.id": "edge.endNode",
|
|
126
|
+
"data.fileRef.externalId": "edge.endNode",
|
|
127
127
|
"data.description": "description",
|
|
128
128
|
"data.pageNumber": "startNodePageNumber",
|
|
129
129
|
"data.textRegion.confidence": "confidence",
|
|
@@ -33,6 +33,7 @@ from .data_classes import (
|
|
|
33
33
|
MigrationMappingList,
|
|
34
34
|
)
|
|
35
35
|
from .data_model import INSTANCE_SOURCE_VIEW_ID
|
|
36
|
+
from .default_mappings import ASSET_ANNOTATIONS_ID, FILE_ANNOTATIONS_ID
|
|
36
37
|
from .selectors import AssetCentricMigrationSelector, MigrateDataSetSelector, MigrationCSVFileSelector
|
|
37
38
|
|
|
38
39
|
|
|
@@ -213,6 +214,16 @@ class AssetCentricMigrationIO(
|
|
|
213
214
|
class AnnotationMigrationIO(
|
|
214
215
|
UploadableStorageIO[AssetCentricMigrationSelector, AssetCentricMapping[Annotation], InstanceApply]
|
|
215
216
|
):
|
|
217
|
+
"""IO class for migrating Annotations.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
client: The ToolkitClient to use for CDF interactions.
|
|
221
|
+
instance_space: The instance space to use for the migrated annotations.
|
|
222
|
+
default_asset_annotation_mapping: The default ingestion mapping to use for asset-linked annotations.
|
|
223
|
+
default_file_annotation_mapping: The default ingestion mappingto use for file-linked annotations.
|
|
224
|
+
|
|
225
|
+
"""
|
|
226
|
+
|
|
216
227
|
KIND = "AnnotationMigration"
|
|
217
228
|
SUPPORTED_DOWNLOAD_FORMATS = frozenset({".parquet", ".csv", ".ndjson"})
|
|
218
229
|
SUPPORTED_COMPRESSIONS = frozenset({".gz"})
|
|
@@ -220,17 +231,28 @@ class AnnotationMigrationIO(
|
|
|
220
231
|
CHUNK_SIZE = 1000
|
|
221
232
|
UPLOAD_ENDPOINT = InstanceIO.UPLOAD_ENDPOINT
|
|
222
233
|
|
|
223
|
-
def __init__(
|
|
234
|
+
def __init__(
|
|
235
|
+
self,
|
|
236
|
+
client: ToolkitClient,
|
|
237
|
+
instance_space: str | None = None,
|
|
238
|
+
default_asset_annotation_mapping: str = ASSET_ANNOTATIONS_ID,
|
|
239
|
+
default_file_annotation_mapping: str = FILE_ANNOTATIONS_ID,
|
|
240
|
+
) -> None:
|
|
224
241
|
super().__init__(client)
|
|
225
242
|
self.annotation_io = AnnotationIO(client)
|
|
226
243
|
self.instance_space = instance_space
|
|
244
|
+
self.default_asset_annotation_mapping = default_asset_annotation_mapping
|
|
245
|
+
self.default_file_annotation_mapping = default_file_annotation_mapping
|
|
227
246
|
|
|
228
247
|
def as_id(self, item: AssetCentricMapping[Annotation]) -> str:
|
|
229
248
|
return f"Annotation_{item.mapping.id}"
|
|
230
249
|
|
|
231
250
|
def count(self, selector: AssetCentricMigrationSelector) -> int | None:
|
|
232
|
-
|
|
233
|
-
|
|
251
|
+
if isinstance(selector, MigrationCSVFileSelector):
|
|
252
|
+
return len(selector.items)
|
|
253
|
+
else:
|
|
254
|
+
# There is no efficient way to count annotations in CDF.
|
|
255
|
+
return None
|
|
234
256
|
|
|
235
257
|
def stream_data(self, selector: AssetCentricMigrationSelector, limit: int | None = None) -> Iterable[Page]:
|
|
236
258
|
if isinstance(selector, MigrateDataSetSelector):
|
|
@@ -253,8 +275,10 @@ class AnnotationMigrationIO(
|
|
|
253
275
|
mapping = AnnotationMapping(
|
|
254
276
|
instance_id=EdgeId(space=self.instance_space, external_id=f"annotation_{resource.id!r}"),
|
|
255
277
|
id=resource.id,
|
|
256
|
-
ingestion_view=selector.ingestion_mapping,
|
|
278
|
+
ingestion_view=self._get_mapping(selector.ingestion_mapping, resource),
|
|
257
279
|
preferred_consumer_view=selector.preferred_consumer_view,
|
|
280
|
+
# The PySDK is poorly typed.
|
|
281
|
+
annotation_type=resource.annotation_type, # type: ignore[arg-type]
|
|
258
282
|
)
|
|
259
283
|
mapping_list.append(AssetCentricMapping(mapping=mapping, resource=resource))
|
|
260
284
|
yield mapping_list
|
|
@@ -275,6 +299,7 @@ class AnnotationMigrationIO(
|
|
|
275
299
|
if resource is None:
|
|
276
300
|
not_found += 1
|
|
277
301
|
continue
|
|
302
|
+
mapping.ingestion_view = self._get_mapping(mapping.ingestion_view, resource)
|
|
278
303
|
chunk.append(AssetCentricMapping(mapping=mapping, resource=resource))
|
|
279
304
|
if chunk:
|
|
280
305
|
yield chunk
|
|
@@ -284,6 +309,21 @@ class AnnotationMigrationIO(
|
|
|
284
309
|
f"Could not find {not_found} annotations referenced in the CSV file. They will be skipped during migration."
|
|
285
310
|
).print_warning(include_timestamp=True, console=self.client.console)
|
|
286
311
|
|
|
312
|
+
def _get_mapping(self, current_mapping: str | None, resource: Annotation) -> str:
|
|
313
|
+
try:
|
|
314
|
+
return (
|
|
315
|
+
current_mapping
|
|
316
|
+
or {
|
|
317
|
+
"diagrams.AssetLink": self.default_asset_annotation_mapping,
|
|
318
|
+
"diagrams.FileLink": self.default_file_annotation_mapping,
|
|
319
|
+
}[resource.annotation_type]
|
|
320
|
+
)
|
|
321
|
+
except KeyError as e:
|
|
322
|
+
raise ToolkitValueError(
|
|
323
|
+
f"Could not determine default ingestion view for annotation type '{resource.annotation_type}'. "
|
|
324
|
+
"Please specify the ingestion view explicitly in the CSV file."
|
|
325
|
+
) from e
|
|
326
|
+
|
|
287
327
|
def json_to_resource(self, item_json: dict[str, JsonVal]) -> InstanceApply:
|
|
288
328
|
raise NotImplementedError("Deserializing Annotation Migrations from JSON is not supported.")
|
|
289
329
|
|
|
@@ -27,7 +27,7 @@ from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning, MediumSever
|
|
|
27
27
|
from cognite_toolkit._cdf_tk.tk_warnings.fileread import ResourceFormatWarning
|
|
28
28
|
from cognite_toolkit._cdf_tk.utils.auth import EnvironmentVariables
|
|
29
29
|
from cognite_toolkit._cdf_tk.utils.file import read_yaml_file
|
|
30
|
-
from cognite_toolkit._cdf_tk.utils.fileio import
|
|
30
|
+
from cognite_toolkit._cdf_tk.utils.fileio import MultiFileReader
|
|
31
31
|
from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, ItemMessage, SuccessResponseItems
|
|
32
32
|
from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
|
|
33
33
|
from cognite_toolkit._cdf_tk.utils.progress_tracker import ProgressTracker
|
|
@@ -87,7 +87,7 @@ class UploadCommand(ToolkitCommand):
|
|
|
87
87
|
├── datafile2.Manifest.yaml # Manifest file for datafile2
|
|
88
88
|
└── ...
|
|
89
89
|
"""
|
|
90
|
-
console =
|
|
90
|
+
console = client.console
|
|
91
91
|
data_files_by_selector = self._find_data_files(input_dir, kind)
|
|
92
92
|
|
|
93
93
|
self._deploy_resource_folder(input_dir / DATA_RESOURCE_DIR, deploy_resources, client, console, dry_run, verbose)
|
|
@@ -225,49 +225,44 @@ class UploadCommand(ToolkitCommand):
|
|
|
225
225
|
io = self._create_selected_io(selector, datafiles[0], client)
|
|
226
226
|
if io is None:
|
|
227
227
|
continue
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
upload_client=upload_client,
|
|
245
|
-
io=io,
|
|
246
|
-
dry_run=dry_run,
|
|
247
|
-
selector=selector,
|
|
248
|
-
tracker=tracker,
|
|
249
|
-
console=console,
|
|
250
|
-
),
|
|
251
|
-
iteration_count=None,
|
|
252
|
-
max_queue_size=self._MAX_QUEUE_SIZE,
|
|
253
|
-
download_description=f"Reading {file_count:,}/{total_file_count + 1:,}: {file_display.as_posix()!s}",
|
|
254
|
-
process_description="Processing",
|
|
255
|
-
write_description=f"{action} {selector.display_name!r}",
|
|
228
|
+
reader = MultiFileReader(datafiles)
|
|
229
|
+
if reader.is_table and not isinstance(io, TableUploadableStorageIO):
|
|
230
|
+
raise ToolkitValueError(f"{selector.display_name} does not support {reader.format!r} files.")
|
|
231
|
+
tracker = ProgressTracker[str]([self._UPLOAD])
|
|
232
|
+
executor = ProducerWorkerExecutor[list[tuple[str, dict[str, JsonVal]]], Sequence[UploadItem]](
|
|
233
|
+
download_iterable=io.read_chunks(reader),
|
|
234
|
+
process=partial(io.rows_to_data, selector=selector)
|
|
235
|
+
if reader.is_table and isinstance(io, TableUploadableStorageIO)
|
|
236
|
+
else io.json_chunk_to_data,
|
|
237
|
+
write=partial(
|
|
238
|
+
self._upload_items,
|
|
239
|
+
upload_client=upload_client,
|
|
240
|
+
io=io,
|
|
241
|
+
dry_run=dry_run,
|
|
242
|
+
selector=selector,
|
|
243
|
+
tracker=tracker,
|
|
256
244
|
console=console,
|
|
257
|
-
)
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
245
|
+
),
|
|
246
|
+
iteration_count=None,
|
|
247
|
+
max_queue_size=self._MAX_QUEUE_SIZE,
|
|
248
|
+
download_description=f"Reading {selector.display_name!r} files",
|
|
249
|
+
process_description="Processing",
|
|
250
|
+
write_description=f"{action} {selector.display_name!r}",
|
|
251
|
+
console=console,
|
|
252
|
+
)
|
|
253
|
+
executor.run()
|
|
254
|
+
file_count += len(datafiles)
|
|
255
|
+
executor.raise_on_error()
|
|
256
|
+
final_action = "Uploaded" if not dry_run else "Would upload"
|
|
257
|
+
suffix = " successfully" if not dry_run else ""
|
|
258
|
+
results = tracker.aggregate()
|
|
259
|
+
success = results.get((self._UPLOAD, "success"), 0)
|
|
260
|
+
failed = results.get((self._UPLOAD, "failed"), 0)
|
|
261
|
+
if failed > 0:
|
|
262
|
+
suffix += f", {failed:,} failed"
|
|
263
|
+
console.print(
|
|
264
|
+
f"{final_action} {success:,} {selector.display_name} from {len(datafiles)} files{suffix}."
|
|
265
|
+
)
|
|
271
266
|
|
|
272
267
|
@staticmethod
|
|
273
268
|
def _path_as_display_name(input_path: Path, cwd: Path = Path.cwd()) -> Path:
|
|
@@ -328,6 +328,7 @@ class ContainerCRUD(ResourceContainerCRUD[ContainerId, ContainerApply, Container
|
|
|
328
328
|
|
|
329
329
|
def dump_resource(self, resource: Container, local: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
330
330
|
dumped = resource.as_write().dump()
|
|
331
|
+
has_local = local is not None
|
|
331
332
|
local = local or {}
|
|
332
333
|
for key in ["constraints", "indexes"]:
|
|
333
334
|
if not dumped.get(key) and key not in local:
|
|
@@ -339,15 +340,16 @@ class ContainerCRUD(ResourceContainerCRUD[ContainerId, ContainerApply, Container
|
|
|
339
340
|
continue
|
|
340
341
|
local_prop = local_prop_by_id[prop_id]
|
|
341
342
|
for key, default in [("immutable", False), ("autoIncrement", False), ("nullable", True)]:
|
|
342
|
-
if cdf_prop.get(key) is default and key not in local_prop:
|
|
343
|
+
if has_local and cdf_prop.get(key) is default and key not in local_prop:
|
|
343
344
|
cdf_prop.pop(key, None)
|
|
344
345
|
cdf_type = cdf_prop.get("type", {})
|
|
345
346
|
local_type = local_prop.get("type", {})
|
|
346
347
|
for key, type_default in [("list", False), ("collation", "ucs_basic")]:
|
|
347
|
-
if cdf_type.get(key) == type_default and key not in local_type:
|
|
348
|
+
if has_local and cdf_type.get(key) == type_default and key not in local_type:
|
|
348
349
|
cdf_type.pop(key, None)
|
|
349
|
-
|
|
350
|
-
|
|
350
|
+
if has_local and "usedFor" not in local and dumped.get("usedFor") == "node":
|
|
351
|
+
# Only drop if set to default by server.
|
|
352
|
+
dumped.pop("usedFor", None)
|
|
351
353
|
return dumped
|
|
352
354
|
|
|
353
355
|
def create(self, items: Sequence[ContainerApply]) -> ContainerList:
|
|
@@ -26,21 +26,23 @@ class AnnotationIO(StorageIO[AssetCentricSelector, Annotation]):
|
|
|
26
26
|
def stream_data(self, selector: AssetCentricSelector, limit: int | None = None) -> Iterable[Page[Annotation]]:
|
|
27
27
|
total = 0
|
|
28
28
|
for file_chunk in FileMetadataIO(self.client).stream_data(selector, None):
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
29
|
+
for annotation_type in ["diagrams.AssetLink", "diagrams.FileLink"]:
|
|
30
|
+
# Todo Support pagination. This is missing in the SDK.
|
|
31
|
+
results = self.client.annotations.list(
|
|
32
|
+
filter=AnnotationFilter(
|
|
33
|
+
annotated_resource_type="file",
|
|
34
|
+
annotated_resource_ids=[{"id": file_metadata.id} for file_metadata in file_chunk.items],
|
|
35
|
+
annotation_type=annotation_type,
|
|
36
|
+
)
|
|
34
37
|
)
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
total
|
|
42
|
-
|
|
43
|
-
break
|
|
38
|
+
if limit is not None and total + len(results) > limit:
|
|
39
|
+
results = results[: limit - total]
|
|
40
|
+
|
|
41
|
+
for chunk in chunker_sequence(results, self.CHUNK_SIZE):
|
|
42
|
+
yield Page(worker_id="main", items=chunk)
|
|
43
|
+
total += len(chunk)
|
|
44
|
+
if limit is not None and total >= limit:
|
|
45
|
+
break
|
|
44
46
|
|
|
45
47
|
def count(self, selector: AssetCentricSelector) -> int | None:
|
|
46
48
|
"""There is no efficient way to count annotations in CDF."""
|
|
@@ -8,8 +8,7 @@ from cognite.client.data_classes._base import T_CogniteResource
|
|
|
8
8
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
9
9
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
|
|
10
10
|
from cognite_toolkit._cdf_tk.utils.collection import chunker
|
|
11
|
-
from cognite_toolkit._cdf_tk.utils.fileio import
|
|
12
|
-
from cognite_toolkit._cdf_tk.utils.fileio._readers import TableReader
|
|
11
|
+
from cognite_toolkit._cdf_tk.utils.fileio import MultiFileReader, SchemaColumn
|
|
13
12
|
from cognite_toolkit._cdf_tk.utils.http_client import HTTPClient, HTTPMessage, ItemsRequest
|
|
14
13
|
from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal, T_WriteCogniteResource
|
|
15
14
|
|
|
@@ -217,8 +216,8 @@ class UploadableStorageIO(
|
|
|
217
216
|
raise NotImplementedError()
|
|
218
217
|
|
|
219
218
|
@classmethod
|
|
220
|
-
def read_chunks(cls, reader:
|
|
221
|
-
data_name = "row" if
|
|
219
|
+
def read_chunks(cls, reader: MultiFileReader) -> Iterable[list[tuple[str, dict[str, JsonVal]]]]:
|
|
220
|
+
data_name = "row" if reader.is_table else "line"
|
|
222
221
|
# Include name of line for better error messages
|
|
223
222
|
iterable = ((f"{data_name} {line_no}", item) for line_no, item in reader.read_chunks_with_line_numbers())
|
|
224
223
|
|
|
@@ -19,8 +19,7 @@ from cognite_toolkit._cdf_tk.utils.dtype_conversion import (
|
|
|
19
19
|
_TextConverter,
|
|
20
20
|
_ValueConverter,
|
|
21
21
|
)
|
|
22
|
-
from cognite_toolkit._cdf_tk.utils.fileio import
|
|
23
|
-
from cognite_toolkit._cdf_tk.utils.fileio._readers import TableReader
|
|
22
|
+
from cognite_toolkit._cdf_tk.utils.fileio._readers import MultiFileReader
|
|
24
23
|
from cognite_toolkit._cdf_tk.utils.http_client import DataBodyRequest, HTTPClient, HTTPMessage
|
|
25
24
|
from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
|
|
26
25
|
|
|
@@ -165,8 +164,8 @@ class DatapointsIO(TableUploadableStorageIO[DataPointsFileSelector, DataPointLis
|
|
|
165
164
|
)
|
|
166
165
|
|
|
167
166
|
@classmethod
|
|
168
|
-
def read_chunks(cls, reader:
|
|
169
|
-
if not
|
|
167
|
+
def read_chunks(cls, reader: MultiFileReader) -> Iterable[list[tuple[str, dict[str, JsonVal]]]]:
|
|
168
|
+
if not reader.is_table:
|
|
170
169
|
raise RuntimeError("DatapointsIO can only read from TableReader instances.")
|
|
171
170
|
iterator = iter(reader.read_chunks_with_line_numbers())
|
|
172
171
|
try:
|
|
@@ -12,6 +12,7 @@ from ._readers import (
|
|
|
12
12
|
CSVReader,
|
|
13
13
|
FailedParsing,
|
|
14
14
|
FileReader,
|
|
15
|
+
MultiFileReader,
|
|
15
16
|
NDJsonReader,
|
|
16
17
|
ParquetReader,
|
|
17
18
|
YAMLReader,
|
|
@@ -45,6 +46,7 @@ __all__ = [
|
|
|
45
46
|
"FileReader",
|
|
46
47
|
"FileWriter",
|
|
47
48
|
"GzipCompression",
|
|
49
|
+
"MultiFileReader",
|
|
48
50
|
"NDJsonReader",
|
|
49
51
|
"NDJsonWriter",
|
|
50
52
|
"ParquetReader",
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import csv
|
|
2
2
|
import json
|
|
3
|
+
import re
|
|
3
4
|
from abc import ABC, abstractmethod
|
|
4
5
|
from collections import Counter, defaultdict
|
|
5
6
|
from collections.abc import Callable, Iterator, Mapping, Sequence
|
|
@@ -40,7 +41,7 @@ class FileReader(FileIO, ABC):
|
|
|
40
41
|
raise NotImplementedError("This method should be implemented in subclasses.")
|
|
41
42
|
|
|
42
43
|
@classmethod
|
|
43
|
-
def from_filepath(cls, filepath: Path) -> "FileReader":
|
|
44
|
+
def from_filepath(cls, filepath: Path) -> "type[FileReader]":
|
|
44
45
|
if len(filepath.suffixes) == 0:
|
|
45
46
|
raise ToolkitValueError(
|
|
46
47
|
f"File has no suffix. Available formats: {humanize_collection(FILE_READ_CLS_BY_FORMAT.keys())}."
|
|
@@ -55,15 +56,57 @@ class FileReader(FileIO, ABC):
|
|
|
55
56
|
)
|
|
56
57
|
|
|
57
58
|
if suffix in FILE_READ_CLS_BY_FORMAT:
|
|
58
|
-
return FILE_READ_CLS_BY_FORMAT[suffix]
|
|
59
|
+
return FILE_READ_CLS_BY_FORMAT[suffix]
|
|
59
60
|
|
|
60
61
|
raise ToolkitValueError(
|
|
61
62
|
f"Unknown file format: {suffix}. Available formats: {humanize_collection(FILE_READ_CLS_BY_FORMAT.keys())}."
|
|
62
63
|
)
|
|
63
64
|
|
|
64
65
|
|
|
66
|
+
class MultiFileReader(FileReader):
|
|
67
|
+
"""Reads multiple files and yields chunks from each file sequentially.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
input_files (Sequence[Path]): The list of file paths to read.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
PART_PATTERN = re.compile(r"part-(\d{4})$")
|
|
74
|
+
|
|
75
|
+
def __init__(self, input_files: Sequence[Path]) -> None:
|
|
76
|
+
super().__init__(input_file=input_files[0])
|
|
77
|
+
self.input_files = input_files
|
|
78
|
+
reader_classes = Counter([FileReader.from_filepath(input_file) for input_file in self.input_files])
|
|
79
|
+
if len(reader_classes) > 1:
|
|
80
|
+
raise ToolkitValueError(
|
|
81
|
+
"All input files must be of the same format. "
|
|
82
|
+
f"Found formats: {humanize_collection([cls.FORMAT for cls in reader_classes.keys()])}."
|
|
83
|
+
)
|
|
84
|
+
self.reader_class = reader_classes.most_common(1)[0][0]
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def is_table(self) -> bool:
|
|
88
|
+
return issubclass(self.reader_class, TableReader)
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def format(self) -> str:
|
|
92
|
+
return self.reader_class.FORMAT
|
|
93
|
+
|
|
94
|
+
def read_chunks(self) -> Iterator[dict[str, JsonVal]]:
|
|
95
|
+
for input_file in sorted(self.input_files, key=self._part_no):
|
|
96
|
+
yield from self.reader_class(input_file).read_chunks()
|
|
97
|
+
|
|
98
|
+
def _part_no(self, path: Path) -> int:
|
|
99
|
+
match = self.PART_PATTERN.search(path.stem)
|
|
100
|
+
if match:
|
|
101
|
+
return int(match.group(1))
|
|
102
|
+
return 99999
|
|
103
|
+
|
|
104
|
+
def _read_chunks_from_file(self, file: TextIOWrapper) -> Iterator[dict[str, JsonVal]]:
|
|
105
|
+
raise NotImplementedError("This method is not used in MultiFileReader.")
|
|
106
|
+
|
|
107
|
+
|
|
65
108
|
class NDJsonReader(FileReader):
|
|
66
|
-
|
|
109
|
+
FORMAT = ".ndjson"
|
|
67
110
|
|
|
68
111
|
def _read_chunks_from_file(self, file: TextIOWrapper) -> Iterator[dict[str, JsonVal]]:
|
|
69
112
|
for line in file:
|
|
@@ -77,11 +120,11 @@ class YAMLBaseReader(FileReader, ABC):
|
|
|
77
120
|
|
|
78
121
|
|
|
79
122
|
class YAMLReader(YAMLBaseReader):
|
|
80
|
-
|
|
123
|
+
FORMAT = ".yaml"
|
|
81
124
|
|
|
82
125
|
|
|
83
126
|
class YMLReader(YAMLBaseReader):
|
|
84
|
-
|
|
127
|
+
FORMAT = ".yml"
|
|
85
128
|
|
|
86
129
|
|
|
87
130
|
@dataclass
|
|
@@ -171,8 +214,8 @@ class TableReader(FileReader, ABC):
|
|
|
171
214
|
|
|
172
215
|
if not input_file.exists():
|
|
173
216
|
raise ToolkitFileNotFoundError(f"File not found: {input_file.as_posix()!r}.")
|
|
174
|
-
if input_file.suffix != cls.
|
|
175
|
-
raise ToolkitValueError(f"Expected a {cls.
|
|
217
|
+
if input_file.suffix != cls.FORMAT:
|
|
218
|
+
raise ToolkitValueError(f"Expected a {cls.FORMAT} file got a {input_file.suffix!r} file instead.")
|
|
176
219
|
|
|
177
220
|
column_names, sample_rows = cls._read_sample_rows(input_file, sniff_rows)
|
|
178
221
|
cls._check_column_names(column_names)
|
|
@@ -213,7 +256,7 @@ class TableReader(FileReader, ABC):
|
|
|
213
256
|
class CSVReader(TableReader):
|
|
214
257
|
"""Reads CSV files and yields each row as a dictionary."""
|
|
215
258
|
|
|
216
|
-
|
|
259
|
+
FORMAT = ".csv"
|
|
217
260
|
|
|
218
261
|
def _read_chunks_from_file(self, file: TextIOWrapper) -> Iterator[dict[str, JsonVal]]:
|
|
219
262
|
if self.keep_failed_cells and self.failed_cell:
|
|
@@ -257,7 +300,7 @@ class CSVReader(TableReader):
|
|
|
257
300
|
|
|
258
301
|
|
|
259
302
|
class ParquetReader(TableReader):
|
|
260
|
-
|
|
303
|
+
FORMAT = ".parquet"
|
|
261
304
|
|
|
262
305
|
def __init__(self, input_file: Path) -> None:
|
|
263
306
|
# Parquet files have their own schema, so we don't need to sniff or provide one.
|
|
@@ -312,19 +355,19 @@ class ParquetReader(TableReader):
|
|
|
312
355
|
FILE_READ_CLS_BY_FORMAT: Mapping[str, type[FileReader]] = {}
|
|
313
356
|
TABLE_READ_CLS_BY_FORMAT: Mapping[str, type[TableReader]] = {}
|
|
314
357
|
for subclass in get_concrete_subclasses(FileReader): # type: ignore[type-abstract]
|
|
315
|
-
if not getattr(subclass, "
|
|
358
|
+
if not getattr(subclass, "FORMAT", None):
|
|
316
359
|
continue
|
|
317
|
-
if subclass.
|
|
360
|
+
if subclass.FORMAT in FILE_READ_CLS_BY_FORMAT:
|
|
318
361
|
raise TypeError(
|
|
319
|
-
f"Duplicate file format {subclass.
|
|
320
|
-
f"{FILE_READ_CLS_BY_FORMAT[subclass.
|
|
362
|
+
f"Duplicate file format {subclass.FORMAT!r} found for classes "
|
|
363
|
+
f"{FILE_READ_CLS_BY_FORMAT[subclass.FORMAT].__name__!r} and {subclass.__name__!r}."
|
|
321
364
|
)
|
|
322
365
|
# We know we have a dict, but we want to expose FILE_READ_CLS_BY_FORMAT as a Mapping
|
|
323
|
-
FILE_READ_CLS_BY_FORMAT[subclass.
|
|
366
|
+
FILE_READ_CLS_BY_FORMAT[subclass.FORMAT] = subclass # type: ignore[index]
|
|
324
367
|
if issubclass(subclass, TableReader):
|
|
325
|
-
if subclass.
|
|
368
|
+
if subclass.FORMAT in TABLE_READ_CLS_BY_FORMAT:
|
|
326
369
|
raise TypeError(
|
|
327
|
-
f"Duplicate table file format {subclass.
|
|
328
|
-
f"{TABLE_READ_CLS_BY_FORMAT[subclass.
|
|
370
|
+
f"Duplicate table file format {subclass.FORMAT!r} found for classes "
|
|
371
|
+
f"{TABLE_READ_CLS_BY_FORMAT[subclass.FORMAT].__name__!r} and {subclass.__name__!r}."
|
|
329
372
|
)
|
|
330
|
-
TABLE_READ_CLS_BY_FORMAT[subclass.
|
|
373
|
+
TABLE_READ_CLS_BY_FORMAT[subclass.FORMAT] = subclass # type: ignore[index]
|
|
@@ -154,7 +154,7 @@ class TableWriter(FileWriter[T_IO], ABC):
|
|
|
154
154
|
|
|
155
155
|
|
|
156
156
|
class NDJsonWriter(FileWriter[TextIOWrapper]):
|
|
157
|
-
|
|
157
|
+
FORMAT = ".ndjson"
|
|
158
158
|
|
|
159
159
|
class _DateTimeEncoder(json.JSONEncoder):
|
|
160
160
|
def default(self, obj: object) -> object:
|
|
@@ -181,15 +181,15 @@ class YAMLBaseWriter(FileWriter[TextIOWrapper], ABC):
|
|
|
181
181
|
|
|
182
182
|
|
|
183
183
|
class YAMLWriter(YAMLBaseWriter):
|
|
184
|
-
|
|
184
|
+
FORMAT = ".yaml"
|
|
185
185
|
|
|
186
186
|
|
|
187
187
|
class YMLWriter(YAMLBaseWriter):
|
|
188
|
-
|
|
188
|
+
FORMAT = ".yml"
|
|
189
189
|
|
|
190
190
|
|
|
191
191
|
class CSVWriter(TableWriter[TextIOWrapper]):
|
|
192
|
-
|
|
192
|
+
FORMAT = ".csv"
|
|
193
193
|
|
|
194
194
|
def __init__(
|
|
195
195
|
self,
|
|
@@ -241,7 +241,7 @@ class CSVWriter(TableWriter[TextIOWrapper]):
|
|
|
241
241
|
|
|
242
242
|
|
|
243
243
|
class ParquetWriter(TableWriter["pq.ParquetWriter"]):
|
|
244
|
-
|
|
244
|
+
FORMAT = ".parquet"
|
|
245
245
|
|
|
246
246
|
def _create_writer(self, filepath: Path) -> "pq.ParquetWriter":
|
|
247
247
|
import pyarrow.parquet as pq
|
|
@@ -411,19 +411,19 @@ class ParquetWriter(TableWriter["pq.ParquetWriter"]):
|
|
|
411
411
|
FILE_WRITE_CLS_BY_FORMAT: Mapping[str, type[FileWriter]] = {}
|
|
412
412
|
TABLE_WRITE_CLS_BY_FORMAT: Mapping[str, type[TableWriter]] = {}
|
|
413
413
|
for subclass in get_concrete_subclasses(FileWriter): # type: ignore[type-abstract]
|
|
414
|
-
if not getattr(subclass, "
|
|
414
|
+
if not getattr(subclass, "FORMAT", None):
|
|
415
415
|
continue
|
|
416
|
-
if subclass.
|
|
416
|
+
if subclass.FORMAT in FILE_WRITE_CLS_BY_FORMAT:
|
|
417
417
|
raise TypeError(
|
|
418
|
-
f"Duplicate file format {subclass.
|
|
419
|
-
f"{FILE_WRITE_CLS_BY_FORMAT[subclass.
|
|
418
|
+
f"Duplicate file format {subclass.FORMAT!r} found for classes "
|
|
419
|
+
f"{FILE_WRITE_CLS_BY_FORMAT[subclass.FORMAT].__name__!r} and {subclass.__name__!r}."
|
|
420
420
|
)
|
|
421
421
|
# We know we have a dict, but we want to expose FILE_WRITE_CLS_BY_FORMAT as a Mapping
|
|
422
|
-
FILE_WRITE_CLS_BY_FORMAT[subclass.
|
|
422
|
+
FILE_WRITE_CLS_BY_FORMAT[subclass.FORMAT] = subclass # type: ignore[index]
|
|
423
423
|
if issubclass(subclass, TableWriter):
|
|
424
|
-
if subclass.
|
|
424
|
+
if subclass.FORMAT in TABLE_WRITE_CLS_BY_FORMAT:
|
|
425
425
|
raise TypeError(
|
|
426
|
-
f"Duplicate table file format {subclass.
|
|
427
|
-
f"{TABLE_WRITE_CLS_BY_FORMAT[subclass.
|
|
426
|
+
f"Duplicate table file format {subclass.FORMAT!r} found for classes "
|
|
427
|
+
f"{TABLE_WRITE_CLS_BY_FORMAT[subclass.FORMAT].__name__!r} and {subclass.__name__!r}."
|
|
428
428
|
)
|
|
429
|
-
TABLE_WRITE_CLS_BY_FORMAT[subclass.
|
|
429
|
+
TABLE_WRITE_CLS_BY_FORMAT[subclass.FORMAT] = subclass # type: ignore[index]
|
|
@@ -4,7 +4,7 @@ default_env = "<DEFAULT_ENV_PLACEHOLDER>"
|
|
|
4
4
|
[modules]
|
|
5
5
|
# This is the version of the modules. It should not be changed manually.
|
|
6
6
|
# It will be updated by the 'cdf modules upgrade' command.
|
|
7
|
-
version = "0.6.
|
|
7
|
+
version = "0.6.106"
|
|
8
8
|
|
|
9
9
|
[alpha_flags]
|
|
10
10
|
external-libraries = true
|
cognite_toolkit/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.6.
|
|
1
|
+
__version__ = "0.6.106"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cognite_toolkit
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.106
|
|
4
4
|
Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
|
|
5
5
|
Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
|
|
6
6
|
Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
cognite_toolkit/_cdf.py,sha256=0abeQr1Tfk4lkGaoXyrnFC28wDSlR_8UGrh10noGduQ,6085
|
|
3
|
-
cognite_toolkit/_version.py,sha256=
|
|
3
|
+
cognite_toolkit/_version.py,sha256=Yr4MUMyngYF8CK8gD1i_5CFjzdbc9C-zEFAB5ViLDNk,24
|
|
4
4
|
cognite_toolkit/_cdf_tk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
5
|
cognite_toolkit/_cdf_tk/cdf_toml.py,sha256=VSWV9h44HusWIaKpWgjrOMrc3hDoPTTXBXlp6-NOrIM,9079
|
|
6
6
|
cognite_toolkit/_cdf_tk/constants.py,sha256=Gi7iGGzdUrOnBeIK6ix3XiBieHIwzLJO5BWjDI3a6l4,7082
|
|
@@ -108,7 +108,7 @@ cognite_toolkit/_cdf_tk/commands/_download.py,sha256=OBKPM_HGGA1i32th1SAgkQM_81C
|
|
|
108
108
|
cognite_toolkit/_cdf_tk/commands/_profile.py,sha256=_4iX3AHAI6eLmRVUlWXCSvVHx1BZW2yDr_i2i9ECg6U,43120
|
|
109
109
|
cognite_toolkit/_cdf_tk/commands/_purge.py,sha256=RadQHsmkPez3fZ5HCP9b82o2_fBx8P_-bTo7prkvWXU,32525
|
|
110
110
|
cognite_toolkit/_cdf_tk/commands/_questionary_style.py,sha256=h-w7fZKkGls3TrzIGBKjsZSGoXJJIYchgD1StfA40r8,806
|
|
111
|
-
cognite_toolkit/_cdf_tk/commands/_upload.py,sha256=
|
|
111
|
+
cognite_toolkit/_cdf_tk/commands/_upload.py,sha256=iwLRK53pXP68vmgIGBx7I1ODtlH6bks7_TanHpr04zw,13900
|
|
112
112
|
cognite_toolkit/_cdf_tk/commands/_utils.py,sha256=UxMJW5QYKts4om5n6x2Tq2ihvfO9gWjhQKeqZNFTlKg,402
|
|
113
113
|
cognite_toolkit/_cdf_tk/commands/_virtual_env.py,sha256=GFAid4hplixmj9_HkcXqU5yCLj-fTXm4cloGD6U2swY,2180
|
|
114
114
|
cognite_toolkit/_cdf_tk/commands/auth.py,sha256=PLjfxfJJKaqux1eB2fycIRlwwSMCbM3qxWDnFX-blJU,31720
|
|
@@ -127,14 +127,14 @@ cognite_toolkit/_cdf_tk/commands/run.py,sha256=JyX9jLEQej9eRrHVCCNlw4GuF80qETSol
|
|
|
127
127
|
cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py,sha256=i5ldcTah59K0E4fH5gHTV0GRvtDCEvVses9WQzn9Lno,226
|
|
128
128
|
cognite_toolkit/_cdf_tk/commands/_migrate/canvas.py,sha256=R-z0yfOFcJZj-zRLhN-7z_-SLxqzSmONMgrbzNF9dGs,8843
|
|
129
129
|
cognite_toolkit/_cdf_tk/commands/_migrate/command.py,sha256=jNoqqq81lbdfDTAQ5w2ctaYSUueLhZe0qjUKjCezk6s,14234
|
|
130
|
-
cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py,sha256=
|
|
130
|
+
cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py,sha256=Ew9JRYrd-Ol9G9csTzpnhXAgCFnX67MwDYOTsdJLP3E,16803
|
|
131
131
|
cognite_toolkit/_cdf_tk/commands/_migrate/creators.py,sha256=FTu7w3G8KyPY8pagG3KdPpOmpLcjehaAg2auEy6iM7A,9605
|
|
132
|
-
cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=
|
|
133
|
-
cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py,sha256=
|
|
132
|
+
cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=_vMS_qAPj4yup1VnmmojPVigAZtyPQH7PM0Raby5tao,10619
|
|
133
|
+
cognite_toolkit/_cdf_tk/commands/_migrate/data_mapper.py,sha256=Q1K-zuRe04AyKHm8nYp5qVxB4eeHC0poKT3Ob1pNL-c,5646
|
|
134
134
|
cognite_toolkit/_cdf_tk/commands/_migrate/data_model.py,sha256=i1eUsNX6Dueol9STIEwyksBnBsWUk13O8qHIjW964pM,7860
|
|
135
|
-
cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py,sha256=
|
|
135
|
+
cognite_toolkit/_cdf_tk/commands/_migrate/default_mappings.py,sha256=ERn3qFrJFXdtXaMjHq3Gk7MxH03MGFk3FrtWCOBJQts,5544
|
|
136
136
|
cognite_toolkit/_cdf_tk/commands/_migrate/issues.py,sha256=lWSnuS3CfRDbA7i1g12gJ2reJnQcLmZWxHDK19-Wxkk,5772
|
|
137
|
-
cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py,sha256=
|
|
137
|
+
cognite_toolkit/_cdf_tk/commands/_migrate/migration_io.py,sha256=UtmfXMjajSM-oNu8ORNPCVnAHtZBmvT92i-ebYvuB-Q,16247
|
|
138
138
|
cognite_toolkit/_cdf_tk/commands/_migrate/prepare.py,sha256=RfqaNoso5CyBwc-p6ckwcYqBfZXKhdJgdGIyd0TATaI,2635
|
|
139
139
|
cognite_toolkit/_cdf_tk/commands/_migrate/selectors.py,sha256=N1H_-rBpPUD6pbrlcofn1uEK1bA694EUXEe1zIXeqyo,2489
|
|
140
140
|
cognite_toolkit/_cdf_tk/cruds/__init__.py,sha256=kxiB8gZo0Y4TyttWHGTLPCW5R1DUkN1uTZewTvaZRjo,6298
|
|
@@ -147,7 +147,7 @@ cognite_toolkit/_cdf_tk/cruds/_resource_cruds/auth.py,sha256=iGG2_btpEqip3o6OKpc
|
|
|
147
147
|
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/classic.py,sha256=7RdiWvh6MLI1lLmt3gcqDQj61xbwREhsvoyjFuJn2F0,26402
|
|
148
148
|
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/configuration.py,sha256=KrL7bj8q5q18mGB2V-NDkW5U5nfseZOyorXiUbp2uLw,6100
|
|
149
149
|
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/data_organization.py,sha256=iXn9iAtwA8mhH-7j9GF-MlLomTcaw3GhEbFY28Wx0iA,9927
|
|
150
|
-
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py,sha256=
|
|
150
|
+
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/datamodel.py,sha256=XaVUjYKfa2ceOicAgE_mPknMdQm_ldwiaNIs3wWjFA0,65823
|
|
151
151
|
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/extraction_pipeline.py,sha256=zv36HPO9goRmU3NM_i1wOvWQEdsgpQTI4bcAl-eis1g,18232
|
|
152
152
|
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/fieldops.py,sha256=SnQMbxiZ3SSYkTLXQ_vIu2HVf_WyD1jplNRJuoeOUfA,16723
|
|
153
153
|
cognite_toolkit/_cdf_tk/cruds/_resource_cruds/file.py,sha256=F3n2FOWAPder4z3OTYs81VB-6C6r3oUzJsHvigdhaD0,15500
|
|
@@ -239,12 +239,12 @@ cognite_toolkit/_cdf_tk/resource_classes/robotics/frame.py,sha256=XmDqJ0pAxe_vAP
|
|
|
239
239
|
cognite_toolkit/_cdf_tk/resource_classes/robotics/location.py,sha256=dbc9HT-bc2Qt15hHoR63SM7pg321BhNuTNjI7HHCwSA,468
|
|
240
240
|
cognite_toolkit/_cdf_tk/resource_classes/robotics/map.py,sha256=j77z7CzCMiMj8r94BdUKCum9EuZRUjaSlUAy9K9DL_Q,942
|
|
241
241
|
cognite_toolkit/_cdf_tk/storageio/__init__.py,sha256=SSMV-W_uqMwS9I0xazBfAyNRqKWlAuLlABropMBEa50,2434
|
|
242
|
-
cognite_toolkit/_cdf_tk/storageio/_annotations.py,sha256=
|
|
242
|
+
cognite_toolkit/_cdf_tk/storageio/_annotations.py,sha256=JI_g18_Y9S7pbc9gm6dZMyo3Z-bCndJXF9C2lOva0bQ,4848
|
|
243
243
|
cognite_toolkit/_cdf_tk/storageio/_applications.py,sha256=CdqJueM9ZmXVh8RUme2lAgNasjAM8QTQDAfeJMm2ZYo,7026
|
|
244
244
|
cognite_toolkit/_cdf_tk/storageio/_asset_centric.py,sha256=DbTvIneN8Hw3ByhdH1kXkS7Gw68oXEWtIqlZGZgLMg0,33704
|
|
245
|
-
cognite_toolkit/_cdf_tk/storageio/_base.py,sha256=
|
|
245
|
+
cognite_toolkit/_cdf_tk/storageio/_base.py,sha256=S52TFdNZuXXkMU_jUobnjW4COwkDwn47FFrKeSv64xs,12120
|
|
246
246
|
cognite_toolkit/_cdf_tk/storageio/_data_classes.py,sha256=s3TH04BJ1q7rXndRhEbVMEnoOXjxrGg4n-w9Z5uUL-o,3480
|
|
247
|
-
cognite_toolkit/_cdf_tk/storageio/_datapoints.py,sha256=
|
|
247
|
+
cognite_toolkit/_cdf_tk/storageio/_datapoints.py,sha256=1Cfch0lVPc4dyXH7RGfGaXudB1O3-f56_cfGw-e-ya0,8593
|
|
248
248
|
cognite_toolkit/_cdf_tk/storageio/_instances.py,sha256=t9fNpHnT6kCk8LDoPj3qZXmHpyDbPF5BZ6pI8ziTyFw,10810
|
|
249
249
|
cognite_toolkit/_cdf_tk/storageio/_raw.py,sha256=5WjAFiVR0KKRhMqCy1IRy1TQFWj86D7nGu5WSFNLp6U,3869
|
|
250
250
|
cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py,sha256=kvk7zdI_N2VobkrWTYRDuq1fSpy2Z99MsJp1sBa_KrQ,1715
|
|
@@ -285,11 +285,11 @@ cognite_toolkit/_cdf_tk/utils/text.py,sha256=1-LQMo633_hEhNhishQo7Buj-7np5Pe4qKk
|
|
|
285
285
|
cognite_toolkit/_cdf_tk/utils/thread_safe_dict.py,sha256=NbRHcZvWpF9xHP5OkOMGFpxrPNbi0Q3Eea6PUNbGlt4,3426
|
|
286
286
|
cognite_toolkit/_cdf_tk/utils/useful_types.py,sha256=oK88W6G_aK3hebORSQKZjWrq7jG-pO2lkLWSWYMlngM,1872
|
|
287
287
|
cognite_toolkit/_cdf_tk/utils/validate_access.py,sha256=1puswcpgEDNCwdk91dhLqCBSu_aaUAd3Hsw21d-YVFs,21955
|
|
288
|
-
cognite_toolkit/_cdf_tk/utils/fileio/__init__.py,sha256=
|
|
289
|
-
cognite_toolkit/_cdf_tk/utils/fileio/_base.py,sha256=
|
|
288
|
+
cognite_toolkit/_cdf_tk/utils/fileio/__init__.py,sha256=0rJsL3jClj_smxh_Omqchf0K9xTi1DlKgmCDjBqJ38I,1243
|
|
289
|
+
cognite_toolkit/_cdf_tk/utils/fileio/_base.py,sha256=eC6mRIwSD4LjyFa83BoBnhO0t3l-ctQMW295LIyxXLk,827
|
|
290
290
|
cognite_toolkit/_cdf_tk/utils/fileio/_compression.py,sha256=8BAPgg5OKc3vkEEkqOvYsuyh12iXVNuEmC0omWwyJNQ,2355
|
|
291
|
-
cognite_toolkit/_cdf_tk/utils/fileio/_readers.py,sha256=
|
|
292
|
-
cognite_toolkit/_cdf_tk/utils/fileio/_writers.py,sha256=
|
|
291
|
+
cognite_toolkit/_cdf_tk/utils/fileio/_readers.py,sha256=plDxxRVipcddjhF8JOaVoDFeNFUmtl4fKMX8FLAU_eI,15527
|
|
292
|
+
cognite_toolkit/_cdf_tk/utils/fileio/_writers.py,sha256=4buAPp73Qfc0hw_LMyFI3g2DhdM4hbrasXuwMCiAcCQ,17732
|
|
293
293
|
cognite_toolkit/_cdf_tk/utils/http_client/__init__.py,sha256=G8b7Bg4yIet5R4Igh3dS2SntWzE6I0iTGBeNlNsSxkQ,857
|
|
294
294
|
cognite_toolkit/_cdf_tk/utils/http_client/_client.py,sha256=NTRfloXkCiS_rl5Vl1D_hsyTTowMKWDsiIR4oGwTADI,11208
|
|
295
295
|
cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py,sha256=PIlSmv3spObHeoylpSzz2fqUFAjIoE89qzvc0uSOGw0,12975
|
|
@@ -300,13 +300,13 @@ cognite_toolkit/_repo_files/.gitignore,sha256=ip9kf9tcC5OguF4YF4JFEApnKYw0nG0vPi
|
|
|
300
300
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
|
|
301
301
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=brULcs8joAeBC_w_aoWjDDUHs3JheLMIR9ajPUK96nc,693
|
|
302
302
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=OBFDhFWK1mlT4Dc6mDUE2Es834l8sAlYG50-5RxRtHk,723
|
|
303
|
-
cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=
|
|
304
|
-
cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=
|
|
305
|
-
cognite_toolkit/_resources/cdf.toml,sha256=
|
|
303
|
+
cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=1jeTnCHshDXK-_BpeGjx-DcYIyJXP4JobQOsrd1YdIg,668
|
|
304
|
+
cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=2uWDnr4F_8dy6F2dwTwae5_pkKMF9pUB4tMeV8BsN7Y,2431
|
|
305
|
+
cognite_toolkit/_resources/cdf.toml,sha256=C8EeKYvi8E10kX1WeG0difC8zHWuFseZo4vy6-bt1rI,488
|
|
306
306
|
cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
|
|
307
307
|
cognite_toolkit/demo/_base.py,sha256=6xKBUQpXZXGQ3fJ5f7nj7oT0s2n7OTAGIa17ZlKHZ5U,8052
|
|
308
|
-
cognite_toolkit-0.6.
|
|
309
|
-
cognite_toolkit-0.6.
|
|
310
|
-
cognite_toolkit-0.6.
|
|
311
|
-
cognite_toolkit-0.6.
|
|
312
|
-
cognite_toolkit-0.6.
|
|
308
|
+
cognite_toolkit-0.6.106.dist-info/METADATA,sha256=KvtAg2Al4AexHjxlR3MsdRVkqApWm7BWVxSbuQCdYI4,4502
|
|
309
|
+
cognite_toolkit-0.6.106.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
310
|
+
cognite_toolkit-0.6.106.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
|
|
311
|
+
cognite_toolkit-0.6.106.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
|
|
312
|
+
cognite_toolkit-0.6.106.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|