cognite-toolkit 0.7.15__py3-none-any.whl → 0.7.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite_toolkit/_cdf.py +0 -12
- cognite_toolkit/_cdf_tk/apps/_download_app.py +158 -0
- cognite_toolkit/_cdf_tk/commands/_download.py +13 -2
- cognite_toolkit/_cdf_tk/commands/_migrate/command.py +5 -1
- cognite_toolkit/_cdf_tk/feature_flags.py +1 -1
- cognite_toolkit/_cdf_tk/storageio/_applications.py +10 -2
- cognite_toolkit/_cdf_tk/storageio/_datapoints.py +272 -17
- cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py +5 -0
- cognite_toolkit/_cdf_tk/storageio/selectors/_datapoints.py +23 -3
- cognite_toolkit/_cdf_tk/tracker.py +2 -2
- cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py +18 -3
- cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml +1 -1
- cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml +1 -1
- cognite_toolkit/_resources/cdf.toml +1 -1
- cognite_toolkit/_version.py +1 -1
- {cognite_toolkit-0.7.15.dist-info → cognite_toolkit-0.7.17.dist-info}/METADATA +1 -1
- {cognite_toolkit-0.7.15.dist-info → cognite_toolkit-0.7.17.dist-info}/RECORD +20 -20
- {cognite_toolkit-0.7.15.dist-info → cognite_toolkit-0.7.17.dist-info}/WHEEL +0 -0
- {cognite_toolkit-0.7.15.dist-info → cognite_toolkit-0.7.17.dist-info}/entry_points.txt +0 -0
- {cognite_toolkit-0.7.15.dist-info → cognite_toolkit-0.7.17.dist-info}/licenses/LICENSE +0 -0
cognite_toolkit/_cdf.py
CHANGED
|
@@ -37,7 +37,6 @@ from cognite_toolkit._cdf_tk.apps import (
|
|
|
37
37
|
from cognite_toolkit._cdf_tk.cdf_toml import CDFToml
|
|
38
38
|
from cognite_toolkit._cdf_tk.commands import (
|
|
39
39
|
AboutCommand,
|
|
40
|
-
CollectCommand,
|
|
41
40
|
)
|
|
42
41
|
from cognite_toolkit._cdf_tk.constants import HINT_LEAD_TEXT, URL, USE_SENTRY
|
|
43
42
|
from cognite_toolkit._cdf_tk.exceptions import (
|
|
@@ -155,17 +154,6 @@ def app() -> NoReturn:
|
|
|
155
154
|
raise SystemExit(0)
|
|
156
155
|
|
|
157
156
|
|
|
158
|
-
@_app.command("collect", hidden=True)
|
|
159
|
-
def collect(
|
|
160
|
-
action: str = typer.Argument(
|
|
161
|
-
help="Whether to explicitly opt-in or opt-out of usage data collection. [opt-in, opt-out]"
|
|
162
|
-
),
|
|
163
|
-
) -> None:
|
|
164
|
-
"""Collect usage information for the toolkit."""
|
|
165
|
-
cmd = CollectCommand()
|
|
166
|
-
cmd.run(lambda: cmd.execute(action)) # type: ignore [arg-type]
|
|
167
|
-
|
|
168
|
-
|
|
169
157
|
@user_app.callback(invoke_without_command=True)
|
|
170
158
|
def user_main(ctx: typer.Context) -> None:
|
|
171
159
|
"""Commands to give information about the toolkit."""
|
|
@@ -15,6 +15,7 @@ from cognite_toolkit._cdf_tk.storageio import (
|
|
|
15
15
|
AssetIO,
|
|
16
16
|
CanvasIO,
|
|
17
17
|
ChartIO,
|
|
18
|
+
DatapointsIO,
|
|
18
19
|
DataSelector,
|
|
19
20
|
EventIO,
|
|
20
21
|
FileContentIO,
|
|
@@ -31,6 +32,7 @@ from cognite_toolkit._cdf_tk.storageio.selectors import (
|
|
|
31
32
|
CanvasSelector,
|
|
32
33
|
ChartExternalIdSelector,
|
|
33
34
|
ChartSelector,
|
|
35
|
+
DataPointsDataSetSelector,
|
|
34
36
|
DataSetSelector,
|
|
35
37
|
FileIdentifierSelector,
|
|
36
38
|
InstanceSpaceSelector,
|
|
@@ -73,6 +75,16 @@ class HierarchyFormats(str, Enum):
|
|
|
73
75
|
ndjson = "ndjson"
|
|
74
76
|
|
|
75
77
|
|
|
78
|
+
class DatapointFormats(str, Enum):
|
|
79
|
+
csv = "csv"
|
|
80
|
+
parquet = "parquet"
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class DatapointsDataTypes(str, Enum):
|
|
84
|
+
numeric = "numeric"
|
|
85
|
+
string = "string"
|
|
86
|
+
|
|
87
|
+
|
|
76
88
|
class InstanceFormats(str, Enum):
|
|
77
89
|
ndjson = "ndjson"
|
|
78
90
|
|
|
@@ -108,6 +120,8 @@ class DownloadApp(typer.Typer):
|
|
|
108
120
|
self.command("events")(self.download_events_cmd)
|
|
109
121
|
self.command("files")(self.download_files_cmd)
|
|
110
122
|
self.command("hierarchy")(self.download_hierarchy_cmd)
|
|
123
|
+
if Flags.EXTEND_DOWNLOAD.is_enabled():
|
|
124
|
+
self.command("datapoints")(self.download_datapoints_cmd)
|
|
111
125
|
self.command("instances")(self.download_instances_cmd)
|
|
112
126
|
self.command("charts")(self.download_charts_cmd)
|
|
113
127
|
self.command("canvas")(self.download_canvas_cmd)
|
|
@@ -841,6 +855,150 @@ class DownloadApp(typer.Typer):
|
|
|
841
855
|
)
|
|
842
856
|
)
|
|
843
857
|
|
|
858
|
+
@staticmethod
|
|
859
|
+
def download_datapoints_cmd(
|
|
860
|
+
dataset: Annotated[
|
|
861
|
+
str | None,
|
|
862
|
+
typer.Argument(
|
|
863
|
+
help="The dataset to download timeseries from. If not provided, an interactive selection will be made.",
|
|
864
|
+
),
|
|
865
|
+
] = None,
|
|
866
|
+
start_time: Annotated[
|
|
867
|
+
str | None,
|
|
868
|
+
typer.Option(
|
|
869
|
+
"--start-time",
|
|
870
|
+
"-s",
|
|
871
|
+
help="The start time for the datapoints to download. Can be in RFC3339 format or as a relative time (e.g., '1d-ago'). If not provided, all datapoints from the beginning will be downloaded.",
|
|
872
|
+
),
|
|
873
|
+
] = None,
|
|
874
|
+
end_time: Annotated[
|
|
875
|
+
str | None,
|
|
876
|
+
typer.Option(
|
|
877
|
+
"--end-time",
|
|
878
|
+
"-e",
|
|
879
|
+
help="The end time for the datapoints to download. Can be in RFC3339 format or as a relative time (e.g., '1d-ago'). If not provided, all datapoints up to the latest will be downloaded.",
|
|
880
|
+
),
|
|
881
|
+
] = None,
|
|
882
|
+
datapoint_type: Annotated[
|
|
883
|
+
DatapointsDataTypes,
|
|
884
|
+
typer.Option(
|
|
885
|
+
"--data-type",
|
|
886
|
+
"-d",
|
|
887
|
+
help="The type of datapoints to download.",
|
|
888
|
+
),
|
|
889
|
+
] = DatapointsDataTypes.numeric,
|
|
890
|
+
file_format: Annotated[
|
|
891
|
+
DatapointFormats,
|
|
892
|
+
typer.Option(
|
|
893
|
+
"--format",
|
|
894
|
+
"-f",
|
|
895
|
+
help="Format for downloading the datapoints.",
|
|
896
|
+
),
|
|
897
|
+
] = DatapointFormats.csv,
|
|
898
|
+
output_dir: Annotated[
|
|
899
|
+
Path,
|
|
900
|
+
typer.Option(
|
|
901
|
+
"--output-dir",
|
|
902
|
+
"-o",
|
|
903
|
+
help="Where to download the datapoints.",
|
|
904
|
+
allow_dash=True,
|
|
905
|
+
),
|
|
906
|
+
] = DEFAULT_DOWNLOAD_DIR,
|
|
907
|
+
limit: Annotated[
|
|
908
|
+
int,
|
|
909
|
+
typer.Option(
|
|
910
|
+
"--limit",
|
|
911
|
+
"-l",
|
|
912
|
+
help="The maximum number of timeseries to download datapoints from. Use -1 to download all timeseries."
|
|
913
|
+
"The maximum number of datapoints in total is 10 million and 100 000 per timeseries.",
|
|
914
|
+
max=10_000_000,
|
|
915
|
+
),
|
|
916
|
+
] = 1000,
|
|
917
|
+
verbose: Annotated[
|
|
918
|
+
bool,
|
|
919
|
+
typer.Option(
|
|
920
|
+
"--verbose",
|
|
921
|
+
"-v",
|
|
922
|
+
help="Turn on to get more verbose output when running the command",
|
|
923
|
+
),
|
|
924
|
+
] = False,
|
|
925
|
+
) -> None:
|
|
926
|
+
"""This command will download Datapoints from CDF into a temporary ."""
|
|
927
|
+
client = EnvironmentVariables.create_from_environment().get_client()
|
|
928
|
+
if dataset is None:
|
|
929
|
+
interactive = TimeSeriesInteractiveSelect(client, "download datapoints")
|
|
930
|
+
dataset = interactive.select_data_set(allow_empty=False)
|
|
931
|
+
|
|
932
|
+
datapoint_type = questionary.select(
|
|
933
|
+
"Select the type of datapoints to download:",
|
|
934
|
+
choices=[Choice(title=dt.value, value=dt) for dt in DatapointsDataTypes],
|
|
935
|
+
default=datapoint_type,
|
|
936
|
+
).ask()
|
|
937
|
+
|
|
938
|
+
start_time = (
|
|
939
|
+
questionary.text(
|
|
940
|
+
"Enter the start time for the datapoints to download (RFC3339 format or relative time, e.g., '1d-ago'). Leave empty to download from the beginning.",
|
|
941
|
+
default=start_time or "",
|
|
942
|
+
).ask()
|
|
943
|
+
or None
|
|
944
|
+
)
|
|
945
|
+
end_time = (
|
|
946
|
+
questionary.text(
|
|
947
|
+
"Enter the end time for the datapoints to download (RFC3339 format or relative time, e.g., '1d-ago'). Leave empty to download up to the latest.",
|
|
948
|
+
default=end_time or "",
|
|
949
|
+
).ask()
|
|
950
|
+
or None
|
|
951
|
+
)
|
|
952
|
+
file_format = questionary.select(
|
|
953
|
+
"Select format to download the datapoints in:",
|
|
954
|
+
choices=[Choice(title=format_.value, value=format_) for format_ in DatapointFormats],
|
|
955
|
+
default=file_format,
|
|
956
|
+
).ask()
|
|
957
|
+
output_dir = Path(
|
|
958
|
+
questionary.path(
|
|
959
|
+
"Where to download the datapoints:", default=str(output_dir), only_directories=True
|
|
960
|
+
).ask()
|
|
961
|
+
)
|
|
962
|
+
while True:
|
|
963
|
+
limit_str = questionary.text(
|
|
964
|
+
"The maximum number of timeseries to download datapoints from. Use -1 to download all timeseries."
|
|
965
|
+
"The maximum number of datapoints in total is 10 million and 100 000 per timeseries.",
|
|
966
|
+
default=str(limit),
|
|
967
|
+
).ask()
|
|
968
|
+
if limit_str is None:
|
|
969
|
+
raise typer.Abort()
|
|
970
|
+
try:
|
|
971
|
+
limit = int(limit_str)
|
|
972
|
+
except ValueError:
|
|
973
|
+
print("[red]Please enter a valid integer for the limit.[/]")
|
|
974
|
+
else:
|
|
975
|
+
if limit != -1 and limit < 1:
|
|
976
|
+
print("[red]Please enter a valid integer greater than 0 or -1 for unlimited.[/]")
|
|
977
|
+
else:
|
|
978
|
+
break
|
|
979
|
+
verbose = questionary.confirm(
|
|
980
|
+
"Turn on to get more verbose output when running the command?", default=verbose
|
|
981
|
+
).ask()
|
|
982
|
+
|
|
983
|
+
cmd = DownloadCommand()
|
|
984
|
+
selector = DataPointsDataSetSelector(
|
|
985
|
+
data_set_external_id=dataset,
|
|
986
|
+
start=start_time,
|
|
987
|
+
end=end_time,
|
|
988
|
+
data_type=datapoint_type.value,
|
|
989
|
+
)
|
|
990
|
+
cmd.run(
|
|
991
|
+
lambda: cmd.download(
|
|
992
|
+
selectors=[selector],
|
|
993
|
+
io=DatapointsIO(client),
|
|
994
|
+
output_dir=output_dir,
|
|
995
|
+
file_format=f".{file_format.value}",
|
|
996
|
+
compression="none",
|
|
997
|
+
limit=limit if limit != -1 else None,
|
|
998
|
+
verbose=verbose,
|
|
999
|
+
)
|
|
1000
|
+
)
|
|
1001
|
+
|
|
844
1002
|
@staticmethod
|
|
845
1003
|
def download_charts_cmd(
|
|
846
1004
|
ctx: typer.Context,
|
|
@@ -5,10 +5,21 @@ from pathlib import Path
|
|
|
5
5
|
from cognite_toolkit._cdf_tk.constants import DATA_MANIFEST_STEM, DATA_RESOURCE_DIR
|
|
6
6
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitValueError
|
|
7
7
|
from cognite_toolkit._cdf_tk.protocols import T_ResourceResponse
|
|
8
|
-
from cognite_toolkit._cdf_tk.storageio import
|
|
8
|
+
from cognite_toolkit._cdf_tk.storageio import (
|
|
9
|
+
ConfigurableStorageIO,
|
|
10
|
+
Page,
|
|
11
|
+
StorageIO,
|
|
12
|
+
T_Selector,
|
|
13
|
+
TableStorageIO,
|
|
14
|
+
)
|
|
9
15
|
from cognite_toolkit._cdf_tk.tk_warnings import LowSeverityWarning
|
|
10
16
|
from cognite_toolkit._cdf_tk.utils.file import safe_write, sanitize_filename, yaml_safe_dump
|
|
11
|
-
from cognite_toolkit._cdf_tk.utils.fileio import
|
|
17
|
+
from cognite_toolkit._cdf_tk.utils.fileio import (
|
|
18
|
+
TABLE_WRITE_CLS_BY_FORMAT,
|
|
19
|
+
Compression,
|
|
20
|
+
FileWriter,
|
|
21
|
+
SchemaColumn,
|
|
22
|
+
)
|
|
12
23
|
from cognite_toolkit._cdf_tk.utils.producer_worker import ProducerWorkerExecutor
|
|
13
24
|
from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
|
|
14
25
|
|
|
@@ -188,7 +188,11 @@ class MigrationCommand(ToolkitCommand):
|
|
|
188
188
|
return None
|
|
189
189
|
responses: Sequence[HTTPMessage]
|
|
190
190
|
if dry_run:
|
|
191
|
-
responses = [
|
|
191
|
+
responses = [
|
|
192
|
+
SuccessResponseItems(
|
|
193
|
+
status_code=200, body="", content=b"", ids=[item.source_id for item in data_item]
|
|
194
|
+
)
|
|
195
|
+
]
|
|
192
196
|
else:
|
|
193
197
|
responses = target.upload_items(data_chunk=data_item, http_client=write_client, selector=selected)
|
|
194
198
|
|
|
@@ -55,7 +55,7 @@ class Flags(Enum):
|
|
|
55
55
|
)
|
|
56
56
|
EXTEND_DOWNLOAD = FlagMetadata(
|
|
57
57
|
visible=True,
|
|
58
|
-
description="Enables extended download to support downloading file content",
|
|
58
|
+
description="Enables extended download to support downloading file content and datapoints",
|
|
59
59
|
)
|
|
60
60
|
|
|
61
61
|
def is_enabled(self) -> bool:
|
|
@@ -214,7 +214,7 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
|
|
|
214
214
|
self.client.lookup.files.external_id(list(file_ids))
|
|
215
215
|
|
|
216
216
|
def _dump_resource(self, canvas: IndustrialCanvas) -> dict[str, JsonVal]:
|
|
217
|
-
dumped = canvas.as_write().dump()
|
|
217
|
+
dumped = canvas.as_write().dump(keep_existing_version=False)
|
|
218
218
|
references = dumped.get("containerReferences", [])
|
|
219
219
|
if not isinstance(references, list):
|
|
220
220
|
return dumped
|
|
@@ -230,10 +230,18 @@ class CanvasIO(UploadableStorageIO[CanvasSelector, IndustrialCanvas, IndustrialC
|
|
|
230
230
|
properties = source["properties"]
|
|
231
231
|
if not isinstance(properties, dict):
|
|
232
232
|
continue
|
|
233
|
+
reference_type = properties.get("containerReferenceType")
|
|
234
|
+
if (
|
|
235
|
+
reference_type
|
|
236
|
+
in {
|
|
237
|
+
"charts",
|
|
238
|
+
"dataGrid",
|
|
239
|
+
}
|
|
240
|
+
): # These container reference types are special cases with a resourceId statically set to -1, which is why we skip them
|
|
241
|
+
continue
|
|
233
242
|
resource_id = properties.pop("resourceId", None)
|
|
234
243
|
if not isinstance(resource_id, int):
|
|
235
244
|
continue
|
|
236
|
-
reference_type = properties.get("containerReferenceType")
|
|
237
245
|
if reference_type == "asset":
|
|
238
246
|
external_id = self.client.lookup.assets.external_id(resource_id)
|
|
239
247
|
elif reference_type == "timeseries":
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from collections.abc import Iterable, Mapping, Sequence
|
|
2
|
+
from itertools import groupby
|
|
2
3
|
from typing import Any, ClassVar, cast
|
|
3
4
|
|
|
4
5
|
from cognite.client._proto.data_point_insertion_request_pb2 import DataPointInsertionItem, DataPointInsertionRequest
|
|
@@ -9,33 +10,51 @@ from cognite.client._proto.data_points_pb2 import (
|
|
|
9
10
|
StringDatapoint,
|
|
10
11
|
StringDatapoints,
|
|
11
12
|
)
|
|
13
|
+
from cognite.client.data_classes import TimeSeriesFilter
|
|
14
|
+
from cognite.client.data_classes.filters import Exists
|
|
15
|
+
from cognite.client.data_classes.time_series import TimeSeriesProperty
|
|
12
16
|
|
|
13
17
|
from cognite_toolkit._cdf_tk.client import ToolkitClient
|
|
14
18
|
from cognite_toolkit._cdf_tk.exceptions import ToolkitNotImplementedError
|
|
15
19
|
from cognite_toolkit._cdf_tk.tk_warnings import HighSeverityWarning
|
|
20
|
+
from cognite_toolkit._cdf_tk.utils import humanize_collection
|
|
16
21
|
from cognite_toolkit._cdf_tk.utils.dtype_conversion import (
|
|
17
22
|
_EpochConverter,
|
|
18
23
|
_Float64Converter,
|
|
19
24
|
_TextConverter,
|
|
20
25
|
_ValueConverter,
|
|
21
26
|
)
|
|
27
|
+
from cognite_toolkit._cdf_tk.utils.fileio import SchemaColumn
|
|
22
28
|
from cognite_toolkit._cdf_tk.utils.fileio._readers import MultiFileReader
|
|
23
|
-
from cognite_toolkit._cdf_tk.utils.http_client import
|
|
29
|
+
from cognite_toolkit._cdf_tk.utils.http_client import (
|
|
30
|
+
DataBodyRequest,
|
|
31
|
+
HTTPClient,
|
|
32
|
+
HTTPMessage,
|
|
33
|
+
SimpleBodyRequest,
|
|
34
|
+
SuccessResponse,
|
|
35
|
+
)
|
|
24
36
|
from cognite_toolkit._cdf_tk.utils.useful_types import JsonVal
|
|
25
37
|
|
|
26
|
-
from ._base import Page, TableUploadableStorageIO, UploadItem
|
|
27
|
-
from .selectors import DataPointsFileSelector
|
|
38
|
+
from ._base import Page, TableStorageIO, TableUploadableStorageIO, UploadItem
|
|
39
|
+
from .selectors import DataPointsDataSetSelector, DataPointsFileSelector, DataPointsSelector
|
|
28
40
|
|
|
29
41
|
|
|
30
|
-
class DatapointsIO(
|
|
42
|
+
class DatapointsIO(
|
|
43
|
+
TableStorageIO[DataPointsSelector, DataPointListResponse],
|
|
44
|
+
TableUploadableStorageIO[DataPointsSelector, DataPointListResponse, DataPointInsertionRequest],
|
|
45
|
+
):
|
|
31
46
|
SUPPORTED_DOWNLOAD_FORMATS = frozenset({".csv"})
|
|
32
47
|
SUPPORTED_COMPRESSIONS = frozenset({".gz"})
|
|
33
48
|
CHUNK_SIZE = 10_000
|
|
34
|
-
|
|
49
|
+
DOWNLOAD_CHUNK_SIZE = 100
|
|
50
|
+
BASE_SELECTOR = DataPointsSelector
|
|
35
51
|
KIND = "Datapoints"
|
|
36
52
|
SUPPORTED_READ_FORMATS = frozenset({".csv"})
|
|
37
53
|
UPLOAD_ENDPOINT = "/timeseries/data"
|
|
38
54
|
UPLOAD_EXTRA_ARGS: ClassVar[Mapping[str, JsonVal] | None] = None
|
|
55
|
+
MAX_TOTAL_DATAPOINTS = 10_000_000
|
|
56
|
+
MAX_PER_REQUEST_DATAPOINTS = 100_000
|
|
57
|
+
MAX_PER_REQUEST_DATAPOINTS_AGGREGATION = 10_000
|
|
39
58
|
|
|
40
59
|
def __init__(self, client: ToolkitClient) -> None:
|
|
41
60
|
super().__init__(client)
|
|
@@ -47,24 +66,181 @@ class DatapointsIO(TableUploadableStorageIO[DataPointsFileSelector, DataPointLis
|
|
|
47
66
|
def as_id(self, item: DataPointListResponse) -> str:
|
|
48
67
|
raise NotImplementedError()
|
|
49
68
|
|
|
50
|
-
def
|
|
51
|
-
|
|
69
|
+
def get_schema(self, selector: DataPointsSelector) -> list[SchemaColumn]:
|
|
70
|
+
return [
|
|
71
|
+
SchemaColumn(name="externalId", type="string"),
|
|
72
|
+
SchemaColumn(name="timestamp", type="epoch"),
|
|
73
|
+
SchemaColumn(
|
|
74
|
+
name="value",
|
|
75
|
+
type="string"
|
|
76
|
+
if isinstance(selector, DataPointsDataSetSelector) and selector.data_type == "string"
|
|
77
|
+
else "float",
|
|
78
|
+
),
|
|
79
|
+
]
|
|
52
80
|
|
|
53
|
-
def
|
|
54
|
-
|
|
81
|
+
def stream_data(
|
|
82
|
+
self, selector: DataPointsSelector, limit: int | None = None
|
|
83
|
+
) -> Iterable[Page[DataPointListResponse]]:
|
|
84
|
+
if not isinstance(selector, DataPointsDataSetSelector):
|
|
85
|
+
raise RuntimeError(
|
|
86
|
+
f"{type(self).__name__} only supports streaming data for DataPointsDataSetSelector selectors. Got {type(selector).__name__}."
|
|
87
|
+
)
|
|
88
|
+
timeseries_count = self.count(selector)
|
|
89
|
+
if limit is not None:
|
|
90
|
+
timeseries_count = min(timeseries_count or 0, limit)
|
|
91
|
+
limit_per_timeseries = (
|
|
92
|
+
(self.MAX_TOTAL_DATAPOINTS // timeseries_count) if timeseries_count else self.MAX_PER_REQUEST_DATAPOINTS
|
|
93
|
+
)
|
|
94
|
+
limit_per_timeseries = min(limit_per_timeseries, self.MAX_PER_REQUEST_DATAPOINTS)
|
|
95
|
+
config = self.client.config
|
|
96
|
+
for timeseries in self.client.time_series(
|
|
97
|
+
data_set_external_ids=[selector.data_set_external_id],
|
|
98
|
+
chunk_size=self.DOWNLOAD_CHUNK_SIZE,
|
|
99
|
+
is_string=True if selector.data_type == "string" else False,
|
|
100
|
+
advanced_filter=Exists(TimeSeriesProperty.external_id),
|
|
101
|
+
limit=limit,
|
|
102
|
+
# We cannot use partitions here as it is not thread safe. This spawn multiple threads
|
|
103
|
+
# that are not shut down until all data is downloaded. We need to be able to abort.
|
|
104
|
+
partitions=None,
|
|
105
|
+
):
|
|
106
|
+
if not timeseries:
|
|
107
|
+
continue
|
|
108
|
+
# Aggregation of datapoints per timeseries
|
|
109
|
+
items = [
|
|
110
|
+
{
|
|
111
|
+
"id": ts.id,
|
|
112
|
+
"start": selector.start,
|
|
113
|
+
"end": selector.end,
|
|
114
|
+
"limit": self.MAX_PER_REQUEST_DATAPOINTS_AGGREGATION // len(timeseries),
|
|
115
|
+
"aggregates": ["count"],
|
|
116
|
+
"granularity": "1200mo",
|
|
117
|
+
}
|
|
118
|
+
for ts in timeseries
|
|
119
|
+
]
|
|
120
|
+
responses = self.client.http_client.request_with_retries(
|
|
121
|
+
SimpleBodyRequest(
|
|
122
|
+
endpoint_url=config.create_api_url("/timeseries/data/list"),
|
|
123
|
+
method="POST",
|
|
124
|
+
accept="application/protobuf",
|
|
125
|
+
content_type="application/json",
|
|
126
|
+
body_content={"items": items}, # type: ignore[dict-item]
|
|
127
|
+
)
|
|
128
|
+
)
|
|
129
|
+
first_success = next((resp for resp in responses if isinstance(resp, SuccessResponse)), None)
|
|
130
|
+
if first_success is None:
|
|
131
|
+
continue
|
|
132
|
+
aggregate_response: DataPointListResponse = DataPointListResponse.FromString(first_success.content)
|
|
133
|
+
timeseries_ids_with_data: dict[int, int] = {}
|
|
134
|
+
for dp in aggregate_response.items:
|
|
135
|
+
if dp.aggregateDatapoints.datapoints:
|
|
136
|
+
ts_datapoint_count = int(sum(agg.count for agg in dp.aggregateDatapoints.datapoints))
|
|
137
|
+
timeseries_ids_with_data[dp.id] = ts_datapoint_count
|
|
138
|
+
total_datapoints = int(sum(timeseries_ids_with_data.values()))
|
|
139
|
+
if total_datapoints == 0:
|
|
140
|
+
continue
|
|
141
|
+
|
|
142
|
+
batch: list[dict[str, Any]] = []
|
|
143
|
+
batch_count = 0
|
|
144
|
+
for ts_id, count in timeseries_ids_with_data.items():
|
|
145
|
+
count = min(count, limit_per_timeseries)
|
|
146
|
+
ts_limit = count
|
|
147
|
+
left_over = 0
|
|
148
|
+
if (batch_count + ts_limit) > self.MAX_PER_REQUEST_DATAPOINTS:
|
|
149
|
+
ts_limit = self.MAX_PER_REQUEST_DATAPOINTS - batch_count
|
|
150
|
+
left_over = count - ts_limit
|
|
151
|
+
batch.append(
|
|
152
|
+
{
|
|
153
|
+
"id": ts_id,
|
|
154
|
+
"start": selector.start,
|
|
155
|
+
"end": selector.end,
|
|
156
|
+
"limit": ts_limit,
|
|
157
|
+
}
|
|
158
|
+
)
|
|
159
|
+
batch_count += ts_limit
|
|
160
|
+
if batch_count >= self.MAX_PER_REQUEST_DATAPOINTS:
|
|
161
|
+
if page := self._fetch_datapoints_batch(batch, config):
|
|
162
|
+
yield page
|
|
163
|
+
batch = []
|
|
164
|
+
|
|
165
|
+
if left_over > 0:
|
|
166
|
+
batch.append(
|
|
167
|
+
{
|
|
168
|
+
"id": ts_id,
|
|
169
|
+
"start": selector.start,
|
|
170
|
+
"end": selector.end,
|
|
171
|
+
"limit": left_over,
|
|
172
|
+
}
|
|
173
|
+
)
|
|
174
|
+
batch_count += left_over
|
|
175
|
+
if batch and (page := self._fetch_datapoints_batch(batch, config)):
|
|
176
|
+
yield page
|
|
177
|
+
|
|
178
|
+
def _fetch_datapoints_batch(self, batch: list[dict[str, Any]], config: Any) -> Page[DataPointListResponse] | None:
|
|
179
|
+
responses = self.client.http_client.request_with_retries(
|
|
180
|
+
SimpleBodyRequest(
|
|
181
|
+
endpoint_url=config.create_api_url("/timeseries/data/list"),
|
|
182
|
+
method="POST",
|
|
183
|
+
accept="application/protobuf",
|
|
184
|
+
content_type="application/json",
|
|
185
|
+
body_content={"items": batch}, # type: ignore[dict-item]
|
|
186
|
+
)
|
|
187
|
+
)
|
|
188
|
+
first_success = next((resp for resp in responses if isinstance(resp, SuccessResponse)), None)
|
|
189
|
+
if first_success is None:
|
|
190
|
+
return None
|
|
191
|
+
data_response: DataPointListResponse = DataPointListResponse.FromString(first_success.content)
|
|
192
|
+
return Page("Main", [data_response])
|
|
193
|
+
|
|
194
|
+
def count(self, selector: DataPointsSelector) -> int | None:
|
|
195
|
+
if isinstance(selector, DataPointsDataSetSelector):
|
|
196
|
+
return self.client.time_series.aggregate_count(
|
|
197
|
+
filter=TimeSeriesFilter(
|
|
198
|
+
data_set_ids=[{"externalId": selector.data_set_external_id}],
|
|
199
|
+
is_string=True if selector.data_type == "string" else False,
|
|
200
|
+
),
|
|
201
|
+
# We only want time series that have externalID set.
|
|
202
|
+
advanced_filter=Exists(TimeSeriesProperty.external_id),
|
|
203
|
+
)
|
|
204
|
+
return None
|
|
55
205
|
|
|
56
206
|
def data_to_json_chunk(
|
|
57
|
-
self, data_chunk: Sequence[DataPointListResponse], selector:
|
|
207
|
+
self, data_chunk: Sequence[DataPointListResponse], selector: DataPointsSelector | None = None
|
|
58
208
|
) -> list[dict[str, JsonVal]]:
|
|
59
209
|
raise ToolkitNotImplementedError(
|
|
60
210
|
f"Download of {type(DatapointsIO).__name__.removesuffix('IO')} does not support json format."
|
|
61
211
|
)
|
|
62
212
|
|
|
213
|
+
def data_to_row(
|
|
214
|
+
self, data_chunk: Sequence[DataPointListResponse], selector: DataPointsSelector | None = None
|
|
215
|
+
) -> list[dict[str, JsonVal]]:
|
|
216
|
+
output: list[dict[str, JsonVal]] = []
|
|
217
|
+
for response in data_chunk:
|
|
218
|
+
for item in response.items:
|
|
219
|
+
if item.numericDatapoints.datapoints:
|
|
220
|
+
for dp in item.numericDatapoints.datapoints:
|
|
221
|
+
output.append(
|
|
222
|
+
{
|
|
223
|
+
"externalId": item.externalId,
|
|
224
|
+
"timestamp": dp.timestamp,
|
|
225
|
+
"value": dp.value,
|
|
226
|
+
}
|
|
227
|
+
)
|
|
228
|
+
if item.stringDatapoints.datapoints:
|
|
229
|
+
for dp in item.stringDatapoints.datapoints:
|
|
230
|
+
output.append(
|
|
231
|
+
{
|
|
232
|
+
"externalId": item.externalId,
|
|
233
|
+
"timestamp": dp.timestamp,
|
|
234
|
+
"value": dp.value,
|
|
235
|
+
}
|
|
236
|
+
)
|
|
237
|
+
return output
|
|
238
|
+
|
|
63
239
|
def upload_items(
|
|
64
240
|
self,
|
|
65
241
|
data_chunk: Sequence[UploadItem[DataPointInsertionRequest]],
|
|
66
242
|
http_client: HTTPClient,
|
|
67
|
-
selector:
|
|
243
|
+
selector: DataPointsSelector | None = None,
|
|
68
244
|
) -> Sequence[HTTPMessage]:
|
|
69
245
|
results: list[HTTPMessage] = []
|
|
70
246
|
for item in data_chunk:
|
|
@@ -80,12 +256,25 @@ class DatapointsIO(TableUploadableStorageIO[DataPointsFileSelector, DataPointLis
|
|
|
80
256
|
return results
|
|
81
257
|
|
|
82
258
|
def row_to_resource(
|
|
83
|
-
self, source_id: str, row: dict[str, JsonVal], selector:
|
|
259
|
+
self, source_id: str, row: dict[str, JsonVal], selector: DataPointsSelector | None = None
|
|
84
260
|
) -> DataPointInsertionRequest:
|
|
85
261
|
if selector is None:
|
|
86
262
|
raise ValueError("Selector must be provided to convert row to DataPointInsertionItem.")
|
|
87
263
|
# We assume that the row was read using the read_chunks method.
|
|
88
264
|
rows = cast(dict[str, list[Any]], row)
|
|
265
|
+
if isinstance(selector, DataPointsFileSelector):
|
|
266
|
+
datapoints_items = self._rows_to_datapoint_items_file_selector(rows, selector, source_id)
|
|
267
|
+
elif isinstance(selector, DataPointsDataSetSelector):
|
|
268
|
+
datapoints_items = self._rows_to_datapoint_items_data_set_selector(rows, selector, source_id)
|
|
269
|
+
else:
|
|
270
|
+
raise RuntimeError(
|
|
271
|
+
f"Unsupported selector type {type(selector).__name__} for {type(self).__name__}. Trying to transform {source_id!r} from rows to DataPointInsertionRequest."
|
|
272
|
+
)
|
|
273
|
+
return DataPointInsertionRequest(items=datapoints_items)
|
|
274
|
+
|
|
275
|
+
def _rows_to_datapoint_items_file_selector(
|
|
276
|
+
self, rows: dict[str, list[Any]], selector: DataPointsFileSelector, source_id: str
|
|
277
|
+
) -> list[DataPointInsertionItem]:
|
|
89
278
|
if selector.timestamp_column not in rows:
|
|
90
279
|
raise RuntimeError(f"Timestamp column '{selector.timestamp_column}' not found.")
|
|
91
280
|
|
|
@@ -133,7 +322,66 @@ class DatapointsIO(TableUploadableStorageIO[DataPointsFileSelector, DataPointLis
|
|
|
133
322
|
raise RuntimeError(f"Unsupported dtype {column.dtype} for column {col}.")
|
|
134
323
|
|
|
135
324
|
datapoints_items.append(DataPointInsertionItem(**args))
|
|
136
|
-
|
|
325
|
+
|
|
326
|
+
return datapoints_items
|
|
327
|
+
|
|
328
|
+
def _rows_to_datapoint_items_data_set_selector(
|
|
329
|
+
self, rows: dict[str, list[Any]], selector: DataPointsDataSetSelector, source_id: str
|
|
330
|
+
) -> list[DataPointInsertionItem]:
|
|
331
|
+
if "externalId" not in rows:
|
|
332
|
+
raise RuntimeError("Column 'externalId' not found.")
|
|
333
|
+
if "value" not in rows:
|
|
334
|
+
raise RuntimeError("Column 'value' not found.")
|
|
335
|
+
if "timestamp" not in rows:
|
|
336
|
+
raise RuntimeError("Column 'timestamp' not found.")
|
|
337
|
+
|
|
338
|
+
external_ids = rows["externalId"]
|
|
339
|
+
timestamps = list(
|
|
340
|
+
self._convert_values(
|
|
341
|
+
rows["timestamp"],
|
|
342
|
+
self._epoc_converter,
|
|
343
|
+
"timestamps (column 'timestamp')",
|
|
344
|
+
source_id,
|
|
345
|
+
)
|
|
346
|
+
)
|
|
347
|
+
values = list(
|
|
348
|
+
self._convert_values(
|
|
349
|
+
rows["value"],
|
|
350
|
+
self._numeric_converter if selector.data_type == "numeric" else self._string_converter,
|
|
351
|
+
"values (column 'value')",
|
|
352
|
+
source_id,
|
|
353
|
+
)
|
|
354
|
+
)
|
|
355
|
+
sorted_datapoints = sorted(zip(external_ids, timestamps, values), key=lambda x: x[0])
|
|
356
|
+
datapoints_items: list[DataPointInsertionItem] = []
|
|
357
|
+
if selector.data_type == "numeric":
|
|
358
|
+
for external_id, datapoints in groupby(sorted_datapoints, key=lambda x: x[0]):
|
|
359
|
+
datapoints_items.append(
|
|
360
|
+
DataPointInsertionItem(
|
|
361
|
+
externalId=external_id,
|
|
362
|
+
numericDatapoints=NumericDatapoints(
|
|
363
|
+
datapoints=[
|
|
364
|
+
NumericDatapoint(timestamp=timestamp, value=value) for _, timestamp, value in datapoints
|
|
365
|
+
]
|
|
366
|
+
),
|
|
367
|
+
)
|
|
368
|
+
)
|
|
369
|
+
elif selector.data_type == "string":
|
|
370
|
+
for external_id, datapoints in groupby(sorted_datapoints, key=lambda x: x[0]):
|
|
371
|
+
datapoints_items.append(
|
|
372
|
+
DataPointInsertionItem(
|
|
373
|
+
externalId=external_id,
|
|
374
|
+
stringDatapoints=StringDatapoints(
|
|
375
|
+
datapoints=[
|
|
376
|
+
StringDatapoint(timestamp=timestamp, value=value) for _, timestamp, value in datapoints
|
|
377
|
+
]
|
|
378
|
+
),
|
|
379
|
+
)
|
|
380
|
+
)
|
|
381
|
+
else:
|
|
382
|
+
raise RuntimeError(f"Unsupported data_type {selector.data_type} for DataPointsDataSetSelector.")
|
|
383
|
+
|
|
384
|
+
return datapoints_items
|
|
137
385
|
|
|
138
386
|
def _convert_values(
|
|
139
387
|
self, values: list[Any], converter: _ValueConverter, name: str, source_id: str
|
|
@@ -165,10 +413,11 @@ class DatapointsIO(TableUploadableStorageIO[DataPointsFileSelector, DataPointLis
|
|
|
165
413
|
|
|
166
414
|
@classmethod
|
|
167
415
|
def read_chunks(
|
|
168
|
-
cls, reader: MultiFileReader, selector:
|
|
416
|
+
cls, reader: MultiFileReader, selector: DataPointsSelector
|
|
169
417
|
) -> Iterable[list[tuple[str, dict[str, JsonVal]]]]:
|
|
170
418
|
if not reader.is_table:
|
|
171
|
-
raise RuntimeError("
|
|
419
|
+
raise RuntimeError(f"{cls.__name__} can only read from TableReader instances.")
|
|
420
|
+
|
|
172
421
|
iterator = iter(reader.read_chunks_with_line_numbers())
|
|
173
422
|
try:
|
|
174
423
|
start_row, first = next(iterator)
|
|
@@ -176,6 +425,12 @@ class DatapointsIO(TableUploadableStorageIO[DataPointsFileSelector, DataPointLis
|
|
|
176
425
|
# Empty file
|
|
177
426
|
return
|
|
178
427
|
column_names = list(first.keys())
|
|
428
|
+
if isinstance(selector, DataPointsDataSetSelector):
|
|
429
|
+
if set(column_names) != selector.required_columns:
|
|
430
|
+
raise RuntimeError(
|
|
431
|
+
"When uploading datapoints using a dataset manifest for datapoints, you must have exacatly the "
|
|
432
|
+
f"columns: {humanize_collection(selector.required_columns)} in the file. Got {humanize_collection(column_names)}. "
|
|
433
|
+
)
|
|
179
434
|
batch: dict[str, list[Any]] = {col: [value] for col, value in first.items()}
|
|
180
435
|
last_row = start_row
|
|
181
436
|
for row_no, chunk in iterator:
|
|
@@ -189,5 +444,5 @@ class DatapointsIO(TableUploadableStorageIO[DataPointsFileSelector, DataPointLis
|
|
|
189
444
|
start_row = row_no + 1
|
|
190
445
|
batch = {col: [] for col in column_names}
|
|
191
446
|
last_row = row_no
|
|
192
|
-
|
|
193
|
-
|
|
447
|
+
if any(batch.values()):
|
|
448
|
+
yield [(f"rows {start_row} to{last_row}", batch)] # type: ignore[list-item]
|
|
@@ -7,7 +7,9 @@ from ._base import DataSelector
|
|
|
7
7
|
from ._canvas import CanvasExternalIdSelector, CanvasSelector
|
|
8
8
|
from ._charts import AllChartsSelector, ChartExternalIdSelector, ChartOwnerSelector, ChartSelector
|
|
9
9
|
from ._datapoints import (
|
|
10
|
+
DataPointsDataSetSelector,
|
|
10
11
|
DataPointsFileSelector,
|
|
12
|
+
DataPointsSelector,
|
|
11
13
|
ExternalIdColumn,
|
|
12
14
|
InstanceColumn,
|
|
13
15
|
InternalIdColumn,
|
|
@@ -40,6 +42,7 @@ Selector = Annotated[
|
|
|
40
42
|
| AssetSubtreeSelector
|
|
41
43
|
| AssetCentricFileSelector
|
|
42
44
|
| DataSetSelector
|
|
45
|
+
| DataPointsDataSetSelector
|
|
43
46
|
| DataPointsFileSelector
|
|
44
47
|
| ChartExternalIdSelector
|
|
45
48
|
| CanvasExternalIdSelector
|
|
@@ -62,7 +65,9 @@ __all__ = [
|
|
|
62
65
|
"ChartExternalIdSelector",
|
|
63
66
|
"ChartOwnerSelector",
|
|
64
67
|
"ChartSelector",
|
|
68
|
+
"DataPointsDataSetSelector",
|
|
65
69
|
"DataPointsFileSelector",
|
|
70
|
+
"DataPointsSelector",
|
|
66
71
|
"DataSelector",
|
|
67
72
|
"DataSetSelector",
|
|
68
73
|
"ExternalIdColumn",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
2
|
from functools import cached_property
|
|
3
|
-
from typing import Annotated, Any, Literal
|
|
3
|
+
from typing import Annotated, Any, ClassVar, Literal
|
|
4
4
|
|
|
5
5
|
from cognite.client._proto.data_points_pb2 import (
|
|
6
6
|
InstanceId,
|
|
@@ -50,10 +50,13 @@ TimeSeriesColumn = Annotated[
|
|
|
50
50
|
]
|
|
51
51
|
|
|
52
52
|
|
|
53
|
-
class
|
|
54
|
-
type: Literal["datapointsFile"] = "datapointsFile"
|
|
53
|
+
class DataPointsSelector(DataSelector, ABC):
|
|
55
54
|
kind: Literal["Datapoints"] = "Datapoints"
|
|
56
55
|
|
|
56
|
+
|
|
57
|
+
class DataPointsFileSelector(DataPointsSelector):
|
|
58
|
+
type: Literal["datapointsFile"] = "datapointsFile"
|
|
59
|
+
|
|
57
60
|
timestamp_column: str
|
|
58
61
|
columns: tuple[TimeSeriesColumn, ...]
|
|
59
62
|
|
|
@@ -67,3 +70,20 @@ class DataPointsFileSelector(DataSelector):
|
|
|
67
70
|
@cached_property
|
|
68
71
|
def id_by_column(self) -> dict[str, Column]:
|
|
69
72
|
return {col.column: col for col in self.columns}
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class DataPointsDataSetSelector(DataPointsSelector):
|
|
76
|
+
required_columns: ClassVar[frozenset[str]] = frozenset({"externalId", "timestamp", "value"})
|
|
77
|
+
type: Literal["datapointsDataSet"] = "datapointsDataSet"
|
|
78
|
+
|
|
79
|
+
data_set_external_id: str
|
|
80
|
+
start: int | str | None = None
|
|
81
|
+
end: int | str | None = None
|
|
82
|
+
data_type: Literal["numeric", "string"] = "numeric"
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def group(self) -> str:
|
|
86
|
+
return f"DataSet_{self.data_set_external_id}"
|
|
87
|
+
|
|
88
|
+
def __str__(self) -> str:
|
|
89
|
+
return f"datapoints_dataset_{self.data_set_external_id}"
|
|
@@ -38,11 +38,11 @@ class Tracker:
|
|
|
38
38
|
|
|
39
39
|
@property
|
|
40
40
|
def opted_out(self) -> bool:
|
|
41
|
-
return
|
|
41
|
+
return False
|
|
42
42
|
|
|
43
43
|
@property
|
|
44
44
|
def opted_in(self) -> bool:
|
|
45
|
-
return
|
|
45
|
+
return True
|
|
46
46
|
|
|
47
47
|
def track_cli_command(
|
|
48
48
|
self,
|
|
@@ -115,6 +115,13 @@ class RequestMessage(HTTPMessage):
|
|
|
115
115
|
@dataclass
|
|
116
116
|
class SuccessResponse(ResponseMessage):
|
|
117
117
|
body: str
|
|
118
|
+
content: bytes
|
|
119
|
+
|
|
120
|
+
def dump(self) -> dict[str, JsonVal]:
|
|
121
|
+
output = super().dump()
|
|
122
|
+
# We cannot serialize bytes, so we indicate its presence instead
|
|
123
|
+
output["content"] = "<bytes>" if self.content else None
|
|
124
|
+
return output
|
|
118
125
|
|
|
119
126
|
|
|
120
127
|
@dataclass
|
|
@@ -134,7 +141,7 @@ class SimpleRequest(RequestMessage):
|
|
|
134
141
|
|
|
135
142
|
@classmethod
|
|
136
143
|
def create_success_response(cls, response: httpx.Response) -> Sequence[ResponseMessage]:
|
|
137
|
-
return [SuccessResponse(status_code=response.status_code, body=response.text)]
|
|
144
|
+
return [SuccessResponse(status_code=response.status_code, body=response.text, content=response.content)]
|
|
138
145
|
|
|
139
146
|
@classmethod
|
|
140
147
|
def create_failure_response(cls, response: httpx.Response) -> Sequence[HTTPMessage]:
|
|
@@ -309,7 +316,11 @@ class ItemsRequest(Generic[T_COVARIANT_ID], BodyRequest):
|
|
|
309
316
|
|
|
310
317
|
def create_success_response(self, response: httpx.Response) -> Sequence[HTTPMessage]:
|
|
311
318
|
ids = [item.as_id() for item in self.items]
|
|
312
|
-
return [
|
|
319
|
+
return [
|
|
320
|
+
SuccessResponseItems(
|
|
321
|
+
status_code=response.status_code, ids=ids, body=response.text, content=response.content
|
|
322
|
+
)
|
|
323
|
+
]
|
|
313
324
|
|
|
314
325
|
def create_failure_response(self, response: httpx.Response) -> Sequence[HTTPMessage]:
|
|
315
326
|
error = ErrorDetails.from_response(response)
|
|
@@ -369,7 +380,11 @@ class ResponseList(UserList[ResponseMessage | FailedRequestMessage]):
|
|
|
369
380
|
results: list[ResponseMessage | FailedRequestMessage] = []
|
|
370
381
|
for message in self.data:
|
|
371
382
|
if isinstance(message, SuccessResponse):
|
|
372
|
-
results.append(
|
|
383
|
+
results.append(
|
|
384
|
+
SuccessResponseItems(
|
|
385
|
+
status_code=message.status_code, content=message.content, ids=[item_id], body=message.body
|
|
386
|
+
)
|
|
387
|
+
)
|
|
373
388
|
elif isinstance(message, FailedResponse):
|
|
374
389
|
results.append(
|
|
375
390
|
FailedResponseItems(
|
cognite_toolkit/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.7.
|
|
1
|
+
__version__ = "0.7.17"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cognite_toolkit
|
|
3
|
-
Version: 0.7.
|
|
3
|
+
Version: 0.7.17
|
|
4
4
|
Summary: Official Cognite Data Fusion tool for project templates and configuration deployment
|
|
5
5
|
Project-URL: Homepage, https://docs.cognite.com/cdf/deploy/cdf_toolkit/
|
|
6
6
|
Project-URL: Changelog, https://github.com/cognitedata/toolkit/releases
|
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
cognite_toolkit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
cognite_toolkit/_cdf.py,sha256=
|
|
3
|
-
cognite_toolkit/_version.py,sha256=
|
|
2
|
+
cognite_toolkit/_cdf.py,sha256=sefGD2JQuOTBZhEqSj_ECbNZ7nTRN4AwGwX1pSUhoow,5636
|
|
3
|
+
cognite_toolkit/_version.py,sha256=OaW7Nqbk6j_afc_1rZbViV0T5t8K2yQ3XoJdd50o-lc,23
|
|
4
4
|
cognite_toolkit/config.dev.yaml,sha256=M33FiIKdS3XKif-9vXniQ444GTZ-bLXV8aFH86u9iUQ,332
|
|
5
5
|
cognite_toolkit/_cdf_tk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
cognite_toolkit/_cdf_tk/cdf_toml.py,sha256=VSWV9h44HusWIaKpWgjrOMrc3hDoPTTXBXlp6-NOrIM,9079
|
|
7
7
|
cognite_toolkit/_cdf_tk/constants.py,sha256=TplKm2J9pGRHq7nAnLI0caTMHetS04OIz3hfq-jvGzo,7236
|
|
8
8
|
cognite_toolkit/_cdf_tk/exceptions.py,sha256=xG0jMwi5A20nvPvyo6sCyz_cyKycynPyIzpYiGR4gcU,6064
|
|
9
|
-
cognite_toolkit/_cdf_tk/feature_flags.py,sha256=
|
|
9
|
+
cognite_toolkit/_cdf_tk/feature_flags.py,sha256=ESxrtl2hbe0Fr6e1535MAjSIM326Zc6W7d2PtfjAWkk,2071
|
|
10
10
|
cognite_toolkit/_cdf_tk/hints.py,sha256=UI1ymi2T5wCcYOpEbKbVaDnlyFReFy8TDtMVt-5E1h8,6493
|
|
11
11
|
cognite_toolkit/_cdf_tk/plugins.py,sha256=0V14rceAWLZQF8iWdyL5QmK7xB796YaDEtb9RIj5AOc,836
|
|
12
12
|
cognite_toolkit/_cdf_tk/protocols.py,sha256=Lc8XnBfmDZN6dwmSopmK7cFE9a9jZ2zdUryEeCXn27I,3052
|
|
13
|
-
cognite_toolkit/_cdf_tk/tracker.py,sha256=
|
|
13
|
+
cognite_toolkit/_cdf_tk/tracker.py,sha256=jhxzI8LOSZw3zDBPsTLW3zC2YcQK2abp_aVtRKcUIwE,5913
|
|
14
14
|
cognite_toolkit/_cdf_tk/validation.py,sha256=KFdPgnNIbVM0yjFF0cqmpBB8MI8e-U-YbBYrP4IiClE,8441
|
|
15
15
|
cognite_toolkit/_cdf_tk/apps/__init__.py,sha256=KKmhbpvPKTwqQS2g_XqAC2yvtPsvdl8wV5TgJA3zqhs,702
|
|
16
16
|
cognite_toolkit/_cdf_tk/apps/_auth_app.py,sha256=ER7uYb3ViwsHMXiQEZpyhwU6TIjKaB9aEy32VI4MPpg,3397
|
|
17
17
|
cognite_toolkit/_cdf_tk/apps/_core_app.py,sha256=YK0MOK7Tv3cDSe5_6o9GtM5n_6sE7I0Wm-Se4eJnyNM,13744
|
|
18
18
|
cognite_toolkit/_cdf_tk/apps/_data_app.py,sha256=LeplXlxXtyIymRPgbatQrRFodU4VZBFxI0bqDutLSbg,806
|
|
19
19
|
cognite_toolkit/_cdf_tk/apps/_dev_app.py,sha256=FaY67PFdKwdiMKgJbTcjHT1X2Xfbog2PKL6T-kcawyc,2818
|
|
20
|
-
cognite_toolkit/_cdf_tk/apps/_download_app.py,sha256=
|
|
20
|
+
cognite_toolkit/_cdf_tk/apps/_download_app.py,sha256=2nPn9P_9br9poynSpKKSZF7WYTYT--BfxlxXkSEeH-8,41156
|
|
21
21
|
cognite_toolkit/_cdf_tk/apps/_dump_app.py,sha256=EPq6fWSaScj9ncKfRY253rRJ37er47PIM60IFgkQK_k,37127
|
|
22
22
|
cognite_toolkit/_cdf_tk/apps/_landing_app.py,sha256=YR9z83OY7PhhgBVC5gmRLgo9iTXoGoZfRhOU3gd_r2o,888
|
|
23
23
|
cognite_toolkit/_cdf_tk/apps/_migrate_app.py,sha256=g4S_53kbIgk57ziPLdRMuR6xUe434gkMqa69VmVm5Vg,39619
|
|
@@ -107,7 +107,7 @@ cognite_toolkit/_cdf_tk/commands/__init__.py,sha256=HQIHw18EU09fdo7bxbDXi8-0Lc5t
|
|
|
107
107
|
cognite_toolkit/_cdf_tk/commands/_base.py,sha256=1gl8Y-yqfedRMfdbwM3iPTIUIZriX1UvC1deLsJSJwM,2667
|
|
108
108
|
cognite_toolkit/_cdf_tk/commands/_changes.py,sha256=sU0KaTtPVSJgAZcaZ1Tkcajj36pmhd13kh7V8QbIED8,22987
|
|
109
109
|
cognite_toolkit/_cdf_tk/commands/_cli_commands.py,sha256=TK6U_rm6VZT_V941kTyHMoulWgJzbDC8YIIQDPJ5x3w,1011
|
|
110
|
-
cognite_toolkit/_cdf_tk/commands/_download.py,sha256=
|
|
110
|
+
cognite_toolkit/_cdf_tk/commands/_download.py,sha256=dVddH9t7oGx1kdQ3CCYYQb96Uxxy-xC8Opph98lo46U,6869
|
|
111
111
|
cognite_toolkit/_cdf_tk/commands/_profile.py,sha256=_4iX3AHAI6eLmRVUlWXCSvVHx1BZW2yDr_i2i9ECg6U,43120
|
|
112
112
|
cognite_toolkit/_cdf_tk/commands/_purge.py,sha256=HqN2T0OrHG53BfQrolYLi0kFa-_KuJjbZldFtQdMdnY,32914
|
|
113
113
|
cognite_toolkit/_cdf_tk/commands/_questionary_style.py,sha256=h-w7fZKkGls3TrzIGBKjsZSGoXJJIYchgD1StfA40r8,806
|
|
@@ -129,7 +129,7 @@ cognite_toolkit/_cdf_tk/commands/resources.py,sha256=NeHVA1b1TMsP-2wgd5u1vif_N6n
|
|
|
129
129
|
cognite_toolkit/_cdf_tk/commands/run.py,sha256=JyX9jLEQej9eRrHVCCNlw4GuF80qETSol3-T5CCofgw,37331
|
|
130
130
|
cognite_toolkit/_cdf_tk/commands/_migrate/__init__.py,sha256=i5ldcTah59K0E4fH5gHTV0GRvtDCEvVses9WQzn9Lno,226
|
|
131
131
|
cognite_toolkit/_cdf_tk/commands/_migrate/canvas.py,sha256=R-z0yfOFcJZj-zRLhN-7z_-SLxqzSmONMgrbzNF9dGs,8843
|
|
132
|
-
cognite_toolkit/_cdf_tk/commands/_migrate/command.py,sha256=
|
|
132
|
+
cognite_toolkit/_cdf_tk/commands/_migrate/command.py,sha256=l2P0Em05aEJvNZH4WkEIm-QfO3TAjG1rc_YxELuQIQM,14214
|
|
133
133
|
cognite_toolkit/_cdf_tk/commands/_migrate/conversion.py,sha256=Ew9JRYrd-Ol9G9csTzpnhXAgCFnX67MwDYOTsdJLP3E,16803
|
|
134
134
|
cognite_toolkit/_cdf_tk/commands/_migrate/creators.py,sha256=FTu7w3G8KyPY8pagG3KdPpOmpLcjehaAg2auEy6iM7A,9605
|
|
135
135
|
cognite_toolkit/_cdf_tk/commands/_migrate/data_classes.py,sha256=_vMS_qAPj4yup1VnmmojPVigAZtyPQH7PM0Raby5tao,10619
|
|
@@ -244,20 +244,20 @@ cognite_toolkit/_cdf_tk/resource_classes/robotics/location.py,sha256=dbc9HT-bc2Q
|
|
|
244
244
|
cognite_toolkit/_cdf_tk/resource_classes/robotics/map.py,sha256=j77z7CzCMiMj8r94BdUKCum9EuZRUjaSlUAy9K9DL_Q,942
|
|
245
245
|
cognite_toolkit/_cdf_tk/storageio/__init__.py,sha256=h5Wr4i7zNIgsslrsRJxmp7ls4bNRKl0uZzQ7GLRMP7g,1920
|
|
246
246
|
cognite_toolkit/_cdf_tk/storageio/_annotations.py,sha256=JI_g18_Y9S7pbc9gm6dZMyo3Z-bCndJXF9C2lOva0bQ,4848
|
|
247
|
-
cognite_toolkit/_cdf_tk/storageio/_applications.py,sha256=
|
|
247
|
+
cognite_toolkit/_cdf_tk/storageio/_applications.py,sha256=VlTRHqp8jVu16SW7LtrN05BNYZHSPtJ_wf9EsBAAsvE,16419
|
|
248
248
|
cognite_toolkit/_cdf_tk/storageio/_asset_centric.py,sha256=GZZSQ8NLCP8tSQKOc8BUb2NCZAvw_BoCVcA1Og7vnIs,30821
|
|
249
249
|
cognite_toolkit/_cdf_tk/storageio/_base.py,sha256=ElvqhIEBnhcz0yY1Ds164wVN0_7CFNK-uT0-z7LcR9U,13067
|
|
250
250
|
cognite_toolkit/_cdf_tk/storageio/_data_classes.py,sha256=s3TH04BJ1q7rXndRhEbVMEnoOXjxrGg4n-w9Z5uUL-o,3480
|
|
251
|
-
cognite_toolkit/_cdf_tk/storageio/_datapoints.py,sha256=
|
|
251
|
+
cognite_toolkit/_cdf_tk/storageio/_datapoints.py,sha256=xE1YgoP98-mJjIeF5536KwChzhVY90KYl-bW5sRVhFQ,20206
|
|
252
252
|
cognite_toolkit/_cdf_tk/storageio/_file_content.py,sha256=qcJDk7YGUZ7YmVTYUDAi8eOK2sozEoxmehpmWrA45Ak,17127
|
|
253
253
|
cognite_toolkit/_cdf_tk/storageio/_instances.py,sha256=t9fNpHnT6kCk8LDoPj3qZXmHpyDbPF5BZ6pI8ziTyFw,10810
|
|
254
254
|
cognite_toolkit/_cdf_tk/storageio/_raw.py,sha256=pgZN5MbqDwMZl9Ow1KouDJUO2Ngga8_b6hwv7H31SVQ,5161
|
|
255
|
-
cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py,sha256=
|
|
255
|
+
cognite_toolkit/_cdf_tk/storageio/selectors/__init__.py,sha256=VUK1A76zsu4a25A3oaPUrQEEuRcCpUBK6o8UMMKw7qg,2458
|
|
256
256
|
cognite_toolkit/_cdf_tk/storageio/selectors/_asset_centric.py,sha256=7Iv_ccVX6Vzt3ZLFZ0Er3hN92iEsFTm9wgF-yermOWE,1467
|
|
257
257
|
cognite_toolkit/_cdf_tk/storageio/selectors/_base.py,sha256=hjFkbmNGsK3QIW-jnJV_8YNmvVROERxzG82qIZhU7SM,3065
|
|
258
258
|
cognite_toolkit/_cdf_tk/storageio/selectors/_canvas.py,sha256=E9S-wr-JUqRosI_2cSCfR0tF8MdIFTrMxDItuWRcuO4,597
|
|
259
259
|
cognite_toolkit/_cdf_tk/storageio/selectors/_charts.py,sha256=lQHuNtF3i6SEIMPAlziMm0QlqRcvZJ7MKIug6HMTDrs,1012
|
|
260
|
-
cognite_toolkit/_cdf_tk/storageio/selectors/_datapoints.py,sha256=
|
|
260
|
+
cognite_toolkit/_cdf_tk/storageio/selectors/_datapoints.py,sha256=qdR9wttPUoHZIRQjt2RiLO0cPH8C4CD09GsH1KA5KF4,2343
|
|
261
261
|
cognite_toolkit/_cdf_tk/storageio/selectors/_file_content.py,sha256=A2ikNImKTC-WuG5c-WLEJ6LfaB7FytXS57-D2ORuA1k,5326
|
|
262
262
|
cognite_toolkit/_cdf_tk/storageio/selectors/_instances.py,sha256=NCFSJrAw52bNX6UTfOali8PvNjlqHnvxzL0hYBr7ZmA,4934
|
|
263
263
|
cognite_toolkit/_cdf_tk/storageio/selectors/_raw.py,sha256=sZq9C4G9DMe3S46_usKet0FphQ6ow7cWM_PfXrEAakk,503
|
|
@@ -297,7 +297,7 @@ cognite_toolkit/_cdf_tk/utils/fileio/_readers.py,sha256=IjOSHyW0GiID_lKdgAwQZao9
|
|
|
297
297
|
cognite_toolkit/_cdf_tk/utils/fileio/_writers.py,sha256=mc23m0kJgl57FUDvwLmS7yR3xVZWQguPJa_63-qQ_L0,17731
|
|
298
298
|
cognite_toolkit/_cdf_tk/utils/http_client/__init__.py,sha256=G8b7Bg4yIet5R4Igh3dS2SntWzE6I0iTGBeNlNsSxkQ,857
|
|
299
299
|
cognite_toolkit/_cdf_tk/utils/http_client/_client.py,sha256=NTRfloXkCiS_rl5Vl1D_hsyTTowMKWDsiIR4oGwTADI,11208
|
|
300
|
-
cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py,sha256=
|
|
300
|
+
cognite_toolkit/_cdf_tk/utils/http_client/_data_classes.py,sha256=6A4qinERG2MhnmNlYogfi0La4Y7Kk19_Mn4QDmg5QHQ,14779
|
|
301
301
|
cognite_toolkit/_cdf_tk/utils/http_client/_exception.py,sha256=fC9oW6BN0HbUe2AkYABMP7Kj0-9dNYXVFBY5RQztq2c,126
|
|
302
302
|
cognite_toolkit/_cdf_tk/utils/http_client/_tracker.py,sha256=EBBnd-JZ7nc_jYNFJokCHN2UZ9sx0McFLZvlceUYYic,1215
|
|
303
303
|
cognite_toolkit/_repo_files/.env.tmpl,sha256=UmgKZVvIp-OzD8oOcYuwb_6c7vSJsqkLhuFaiVgK7RI,972
|
|
@@ -305,13 +305,13 @@ cognite_toolkit/_repo_files/.gitignore,sha256=ip9kf9tcC5OguF4YF4JFEApnKYw0nG0vPi
|
|
|
305
305
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/README.md,sha256=OLA0D7yCX2tACpzvkA0IfkgQ4_swSd-OlJ1tYcTBpsA,240
|
|
306
306
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/deploy-pipeline.yml,sha256=brULcs8joAeBC_w_aoWjDDUHs3JheLMIR9ajPUK96nc,693
|
|
307
307
|
cognite_toolkit/_repo_files/AzureDevOps/.devops/dry-run-pipeline.yml,sha256=OBFDhFWK1mlT4Dc6mDUE2Es834l8sAlYG50-5RxRtHk,723
|
|
308
|
-
cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=
|
|
309
|
-
cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=
|
|
310
|
-
cognite_toolkit/_resources/cdf.toml,sha256=
|
|
308
|
+
cognite_toolkit/_repo_files/GitHub/.github/workflows/deploy.yaml,sha256=EUzITcPyar_jQHyPpumj-oz8Wh29jxcxyuHGWgEacyI,667
|
|
309
|
+
cognite_toolkit/_repo_files/GitHub/.github/workflows/dry-run.yaml,sha256=8dZ8qd7ULG37B1OZSolqH1YWjRlQnOIozjz9d633z5A,2430
|
|
310
|
+
cognite_toolkit/_resources/cdf.toml,sha256=MCLdFpOFpEYVnJyvVUyEQp-oOnKFrsnCkxIBxsmakjw,475
|
|
311
311
|
cognite_toolkit/demo/__init__.py,sha256=-m1JoUiwRhNCL18eJ6t7fZOL7RPfowhCuqhYFtLgrss,72
|
|
312
312
|
cognite_toolkit/demo/_base.py,sha256=6xKBUQpXZXGQ3fJ5f7nj7oT0s2n7OTAGIa17ZlKHZ5U,8052
|
|
313
|
-
cognite_toolkit-0.7.
|
|
314
|
-
cognite_toolkit-0.7.
|
|
315
|
-
cognite_toolkit-0.7.
|
|
316
|
-
cognite_toolkit-0.7.
|
|
317
|
-
cognite_toolkit-0.7.
|
|
313
|
+
cognite_toolkit-0.7.17.dist-info/METADATA,sha256=KYWmME0vzKZxaYwIG8MBMW9V746d9Ndb1RTiTd_KqMY,4501
|
|
314
|
+
cognite_toolkit-0.7.17.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
315
|
+
cognite_toolkit-0.7.17.dist-info/entry_points.txt,sha256=JlR7MH1_UMogC3QOyN4-1l36VbrCX9xUdQoHGkuJ6-4,83
|
|
316
|
+
cognite_toolkit-0.7.17.dist-info/licenses/LICENSE,sha256=CW0DRcx5tL-pCxLEN7ts2S9g2sLRAsWgHVEX4SN9_Mc,752
|
|
317
|
+
cognite_toolkit-0.7.17.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|