cognite-extractor-utils 7.6.0__py3-none-any.whl → 7.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-extractor-utils might be problematic. Click here for more details.

@@ -16,7 +16,7 @@
16
16
  Cognite extractor utils is a Python package that simplifies the development of new extractors.
17
17
  """
18
18
 
19
- __version__ = "7.6.0"
19
+ __version__ = "7.7.0"
20
20
  from .base import Extractor
21
21
 
22
22
  __all__ = ["Extractor"]
@@ -71,6 +71,7 @@ from .events import EventUploadQueue
71
71
  from .files import BytesUploadQueue, FileUploadQueue, IOFileUploadQueue
72
72
  from .raw import RawUploadQueue
73
73
  from .time_series import (
74
+ CDMTimeSeriesUploadQueue,
74
75
  DataPoint,
75
76
  DataPointList,
76
77
  SequenceUploadQueue,
@@ -81,6 +82,7 @@ from .time_series import (
81
82
  __all__ = [
82
83
  "AssetUploadQueue",
83
84
  "BytesUploadQueue",
85
+ "CDMTimeSeriesUploadQueue",
84
86
  "DataPoint",
85
87
  "DataPointList",
86
88
  "EventUploadQueue",
@@ -19,7 +19,7 @@ import math
19
19
  from collections.abc import Callable
20
20
  from datetime import datetime
21
21
  from types import TracebackType
22
- from typing import Any
22
+ from typing import Any, Generic, Literal, TypedDict, TypeVar
23
23
 
24
24
  from cognite.client import CogniteClient
25
25
  from cognite.client.data_classes import (
@@ -29,6 +29,9 @@ from cognite.client.data_classes import (
29
29
  StatusCode,
30
30
  TimeSeries,
31
31
  )
32
+ from cognite.client.data_classes.data_modeling import NodeId
33
+ from cognite.client.data_classes.data_modeling.extractor_extensions.v1 import CogniteExtractorTimeSeriesApply
34
+ from cognite.client.data_classes.data_modeling.instances import DirectRelationReference
32
35
  from cognite.client.exceptions import CogniteDuplicatedError, CogniteNotFoundError
33
36
  from cognite.extractorutils.threading import CancellationToken
34
37
  from cognite.extractorutils.uploader._base import (
@@ -62,6 +65,18 @@ DataPointWithStatus = tuple[TimeStamp, float, FullStatusCode] | tuple[TimeStamp,
62
65
  DataPoint = DataPointWithoutStatus | DataPointWithStatus
63
66
  DataPointList = list[DataPoint]
64
67
 
68
+ TQueue = TypeVar("TQueue", bound="BaseTimeSeriesUploadQueue")
69
+ IdType = TypeVar("IdType", EitherId, NodeId)
70
+
71
+
72
+ class CdmDatapointsPayload(TypedDict):
73
+ """
74
+ Represents a payload for CDF datapoints, linking them to a specific instance.
75
+ """
76
+
77
+ instanceId: NodeId
78
+ datapoints: DataPointList
79
+
65
80
 
66
81
  def default_time_series_factory(external_id: str, datapoints: DataPointList) -> TimeSeries:
67
82
  """
@@ -82,9 +97,9 @@ def default_time_series_factory(external_id: str, datapoints: DataPointList) ->
82
97
  return TimeSeries(external_id=external_id, is_string=is_string)
83
98
 
84
99
 
85
- class TimeSeriesUploadQueue(AbstractUploadQueue):
100
+ class BaseTimeSeriesUploadQueue(AbstractUploadQueue, Generic[IdType]):
86
101
  """
87
- Upload queue for time series.
102
+ Abstract base upload queue for time series.
88
103
 
89
104
  Args:
90
105
  cdf_client: Cognite Data Fusion client to use
@@ -96,12 +111,6 @@ class TimeSeriesUploadQueue(AbstractUploadQueue):
96
111
  methods).
97
112
  trigger_log_level: Log level to log upload triggers to.
98
113
  thread_name: Thread name of uploader thread.
99
- create_missing: Create missing time series if possible (ie, if external id is used). Either given as a boolean
100
- (True would auto-create a time series with nothing but an external ID), or as a factory function taking an
101
- external ID and a list of datapoints about to be inserted and returning a TimeSeries object.
102
- data_set_id: Data set id passed to create_missing. Does nothing if create_missing is False.
103
- If a custom timeseries creation method is set in create_missing, this is used as fallback if
104
- that method does not set data set id on its own.
105
114
  """
106
115
 
107
116
  def __init__(
@@ -112,8 +121,6 @@ class TimeSeriesUploadQueue(AbstractUploadQueue):
112
121
  max_upload_interval: int | None = None,
113
122
  trigger_log_level: str = "DEBUG",
114
123
  thread_name: str | None = None,
115
- create_missing: Callable[[str, DataPointList], TimeSeries] | bool = False,
116
- data_set_id: int | None = None,
117
124
  cancellation_token: CancellationToken | None = None,
118
125
  ):
119
126
  # Super sets post_upload and threshold
@@ -127,21 +134,11 @@ class TimeSeriesUploadQueue(AbstractUploadQueue):
127
134
  cancellation_token,
128
135
  )
129
136
 
130
- self.missing_factory: Callable[[str, DataPointList], TimeSeries]
131
-
132
- if isinstance(create_missing, bool):
133
- self.create_missing = create_missing
134
- self.missing_factory = default_time_series_factory
135
- else:
136
- self.create_missing = True
137
- self.missing_factory = create_missing
138
-
139
- self.upload_queue: dict[EitherId, DataPointList] = {}
137
+ self.upload_queue: dict[IdType, DataPointList] = {}
140
138
 
141
139
  self.points_queued = TIMESERIES_UPLOADER_POINTS_QUEUED
142
140
  self.points_written = TIMESERIES_UPLOADER_POINTS_WRITTEN
143
141
  self.queue_size = TIMESERIES_UPLOADER_QUEUE_SIZE
144
- self.data_set_id = data_set_id
145
142
 
146
143
  def _verify_datapoint_time(self, time: int | float | datetime | str) -> bool:
147
144
  if isinstance(time, int | float):
@@ -171,6 +168,109 @@ class TimeSeriesUploadQueue(AbstractUploadQueue):
171
168
  else:
172
169
  return True
173
170
 
171
+ def _sanitize_datapoints(self, datapoints: DataPointList | None) -> DataPointList:
172
+ datapoints = datapoints or []
173
+ old_len = len(datapoints)
174
+ datapoints = list(filter(self._is_datapoint_valid, datapoints))
175
+
176
+ new_len = len(datapoints)
177
+
178
+ if old_len > new_len:
179
+ diff = old_len - new_len
180
+ self.logger.warning(f"Discarding {diff} datapoints due to bad timestamp or value")
181
+ TIMESERIES_UPLOADER_POINTS_DISCARDED.inc(diff)
182
+
183
+ return datapoints
184
+
185
+ def __exit__(
186
+ self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
187
+ ) -> None:
188
+ """
189
+ Wraps around stop method, for use as context manager.
190
+
191
+ Args:
192
+ exc_type: Exception type
193
+ exc_val: Exception value
194
+ exc_tb: Traceback
195
+ """
196
+ self.stop()
197
+
198
+ def __len__(self) -> int:
199
+ """
200
+ The size of the upload queue.
201
+
202
+ Returns:
203
+ Number of data points in queue
204
+ """
205
+ return self.upload_queue_size
206
+
207
+ def __enter__(self: TQueue) -> TQueue:
208
+ """
209
+ Wraps around start method, for use as context manager.
210
+
211
+ Returns:
212
+ self
213
+ """
214
+ self.start()
215
+ return self
216
+
217
+
218
+ class TimeSeriesUploadQueue(BaseTimeSeriesUploadQueue[EitherId]):
219
+ """
220
+ Upload queue for time series.
221
+
222
+ Args:
223
+ cdf_client: Cognite Data Fusion client to use
224
+ post_upload_function: A function that will be called after each upload. The function will be given one argument:
225
+ A list of dicts containing the datapoints that were uploaded (on the same format as the kwargs in
226
+ datapoints upload in the Cognite SDK).
227
+ max_queue_size: Maximum size of upload queue. Defaults to no max size.
228
+ max_upload_interval: Automatically trigger an upload each m seconds when run as a thread (use start/stop
229
+ methods).
230
+ trigger_log_level: Log level to log upload triggers to.
231
+ thread_name: Thread name of uploader thread.
232
+ create_missing: Create missing time series if possible (ie, if external id is used). Either given as a boolean
233
+ (True would auto-create a time series with nothing but an external ID), or as a factory function taking an
234
+ external ID and a list of datapoints about to be inserted and returning a TimeSeries object.
235
+ data_set_id: Data set id passed to create_missing. Does nothing if create_missing is False.
236
+ If a custom timeseries creation method is set in create_missing, this is used as fallback if
237
+ that method does not set data set id on its own.
238
+ """
239
+
240
+ def __init__(
241
+ self,
242
+ cdf_client: CogniteClient,
243
+ post_upload_function: Callable[[list[dict[str, str | DataPointList]]], None] | None = None,
244
+ max_queue_size: int | None = None,
245
+ max_upload_interval: int | None = None,
246
+ trigger_log_level: str = "DEBUG",
247
+ thread_name: str | None = None,
248
+ create_missing: Callable[[str, DataPointList], TimeSeries] | bool = False,
249
+ data_set_id: int | None = None,
250
+ cancellation_token: CancellationToken | None = None,
251
+ ):
252
+ # Super sets post_upload and threshold
253
+ super().__init__(
254
+ cdf_client,
255
+ post_upload_function,
256
+ max_queue_size,
257
+ max_upload_interval,
258
+ trigger_log_level,
259
+ thread_name,
260
+ cancellation_token,
261
+ )
262
+
263
+ self.missing_factory: Callable[[str, DataPointList], TimeSeries]
264
+
265
+ if isinstance(create_missing, bool):
266
+ self.create_missing = create_missing
267
+ self.missing_factory = default_time_series_factory
268
+ else:
269
+ self.create_missing = True
270
+ self.missing_factory = create_missing
271
+
272
+ self.data_set_id = data_set_id
273
+
174
274
  def add_to_upload_queue(
175
275
  self,
176
276
  *,
@@ -188,16 +288,7 @@ class TimeSeriesUploadQueue(AbstractUploadQueue):
188
288
  external_id: External ID of time series. Either this or external_id must be set.
189
289
  datapoints: list of data points to add
190
290
  """
191
- datapoints = datapoints or []
192
- old_len = len(datapoints)
193
- datapoints = list(filter(self._is_datapoint_valid, datapoints))
194
-
195
- new_len = len(datapoints)
196
-
197
- if old_len > new_len:
198
- diff = old_len - new_len
199
- self.logger.warning(f"Discarding {diff} datapoints due to bad timestamp or value")
200
- TIMESERIES_UPLOADER_POINTS_DISCARDED.inc(diff)
291
+ datapoints = self._sanitize_datapoints(datapoints)
201
292
 
202
293
  either_id = EitherId(id=id, external_id=external_id)
203
294
 
@@ -310,37 +401,213 @@ class TimeSeriesUploadQueue(AbstractUploadQueue):
310
401
  self.upload_queue_size = 0
311
402
  self.queue_size.set(self.upload_queue_size)
312
403
 
313
- def __enter__(self) -> "TimeSeriesUploadQueue":
404
+
405
+ class CDMTimeSeriesUploadQueue(BaseTimeSeriesUploadQueue[NodeId]):
406
+ """
407
+ Upload queue for CDM time series.
408
+
409
+ Args:
410
+ cdf_client: Cognite Data Fusion client to use
411
+ post_upload_function: A function that will be called after each upload. The function will be given one argument:
412
+ A list of dicts containing the datapoints that were uploaded (on the same format as the kwargs in
413
+ datapoints upload in the Cognite SDK).
414
+ max_queue_size: Maximum size of upload queue. Defaults to no max size.
415
+ max_upload_interval: Automatically trigger an upload each m seconds when run as a thread (use start/stop
416
+ methods).
417
+ trigger_log_level: Log level to log upload triggers to.
418
+ thread_name: Thread name of uploader thread.
419
+ """
420
+
421
+ def __init__(
422
+ self,
423
+ cdf_client: CogniteClient,
424
+ post_upload_function: Callable[[list[dict[str, str | DataPointList]]], None] | None = None,
425
+ max_queue_size: int | None = None,
426
+ max_upload_interval: int | None = None,
427
+ trigger_log_level: str = "DEBUG",
428
+ thread_name: str | None = None,
429
+ create_missing: Callable[[NodeId, DataPointList], CogniteExtractorTimeSeriesApply] | bool = False,
430
+ cancellation_token: CancellationToken | None = None,
431
+ source: DirectRelationReference | None = None,
432
+ ):
433
+ super().__init__(
434
+ cdf_client,
435
+ post_upload_function,
436
+ max_queue_size,
437
+ max_upload_interval,
438
+ trigger_log_level,
439
+ thread_name,
440
+ cancellation_token,
441
+ )
442
+
443
+ self.missing_factory: Callable[[NodeId, DataPointList], CogniteExtractorTimeSeriesApply]
444
+ self.source = source
445
+
446
+ if isinstance(create_missing, bool):
447
+ self.create_missing = create_missing
448
+ self.missing_factory = self.default_cdm_time_series_factory
449
+ else:
450
+ self.create_missing = True
451
+ self.missing_factory = create_missing
452
+
453
+ def default_cdm_time_series_factory(
454
+ self, instance_id: NodeId, datapoints: DataPointList
455
+ ) -> CogniteExtractorTimeSeriesApply:
314
456
  """
315
- Wraps around start method, for use as context manager.
457
+ Default CDM time series factory used when create_missing in a CDMTimeSeriesUploadQueue is given as a boolean.
316
458
 
459
+ Args:
460
+ instance_id: Instance ID of time series to create
461
+ datapoints: The list of datapoints that were tried to be inserted
462
+ source: The source of the time series, used for creating the DirectRelationReference
317
463
  Returns:
318
- self
464
+ A CogniteExtractorTimeSeriesApply object with instance_id set, and the is_string automatically detected
319
465
  """
320
- self.start()
321
- return self
466
+ is_string = (
467
+ isinstance(datapoints[0].get("value"), str)
468
+ if isinstance(datapoints[0], dict)
469
+ else isinstance(datapoints[0][1], str)
470
+ )
322
471
 
323
- def __exit__(
324
- self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
472
+ time_series_type: Literal["numeric", "string"] = "string" if is_string else "numeric"
473
+
474
+ return CogniteExtractorTimeSeriesApply(
475
+ space=instance_id.space,
476
+ external_id=instance_id.external_id,
477
+ is_step=False,
478
+ time_series_type=time_series_type,
479
+ source=self.source,
480
+ )
481
+
482
+ def add_to_upload_queue(
483
+ self,
484
+ *,
485
+ instance_id: NodeId,
486
+ datapoints: DataPointList | None = None,
325
487
  ) -> None:
326
488
  """
327
- Wraps around stop method, for use as context manager.
489
+ Add data points to upload queue.
490
+
491
+ The queue will be uploaded if the queue size is larger than the threshold specified in the __init__.
328
492
 
329
493
  Args:
330
- exc_type: Exception type
331
- exc_val: Exception value
332
- exc_tb: Traceback
494
+ instance_id: The identifier for the time series to which the datapoints belong.
495
+ datapoints: list of data points to add
333
496
  """
334
- self.stop()
497
+ datapoints = self._sanitize_datapoints(datapoints)
335
498
 
336
- def __len__(self) -> int:
337
- """
338
- The size of the upload queue.
499
+ with self.lock:
500
+ if instance_id not in self.upload_queue:
501
+ self.upload_queue[instance_id] = []
339
502
 
340
- Returns:
341
- Number of data points in queue
503
+ self.upload_queue[instance_id].extend(datapoints)
504
+ self.points_queued.inc(len(datapoints))
505
+ self.upload_queue_size += len(datapoints)
506
+ self.queue_size.set(self.upload_queue_size)
507
+
508
+ self._check_triggers()
509
+
510
+ def upload(self) -> None:
342
511
  """
343
- return self.upload_queue_size
512
+ Trigger an upload of the queue, clears queue afterwards.
513
+ """
514
+
515
+ @retry(
516
+ exceptions=cognite_exceptions(),
517
+ cancellation_token=self.cancellation_token,
518
+ tries=RETRIES,
519
+ delay=RETRY_DELAY,
520
+ max_delay=RETRY_MAX_DELAY,
521
+ backoff=RETRY_BACKOFF_FACTOR,
522
+ )
523
+ def _upload_batch(upload_this: list[CdmDatapointsPayload], retries: int = 5) -> list[CdmDatapointsPayload]:
524
+ if len(upload_this) == 0:
525
+ return upload_this
526
+
527
+ try:
528
+ self.cdf_client.time_series.data.insert_multiple(upload_this) # type: ignore[arg-type]
529
+ except CogniteNotFoundError as ex:
530
+ if not retries:
531
+ raise ex
532
+
533
+ if not self.create_missing:
534
+ self.logger.error("Could not upload data points to %s: %s", str(ex.not_found), str(ex))
535
+
536
+ # Get IDs of time series that exists, but failed because of the non-existing time series
537
+ retry_these = [
538
+ NodeId(id_dict["instanceId"]["space"], id_dict["instanceId"]["externalId"])
539
+ for id_dict in ex.failed
540
+ if id_dict not in ex.not_found
541
+ ]
542
+
543
+ if self.create_missing:
544
+ # Get the time series that can be created
545
+ create_these_ids = {
546
+ NodeId(id_dict["instanceId"]["space"], id_dict["instanceId"]["externalId"])
547
+ for id_dict in ex.not_found
548
+ }
549
+ self.logger.info(f"Creating {len(create_these_ids)} time series")
550
+
551
+ datapoints_lists: dict[NodeId, DataPointList] = {
552
+ ts_dict["instanceId"]: ts_dict["datapoints"]
553
+ for ts_dict in upload_this
554
+ if ts_dict["instanceId"] in create_these_ids
555
+ }
556
+
557
+ to_create: list[CogniteExtractorTimeSeriesApply] = [
558
+ self.missing_factory(instance_id, datapoints_lists[instance_id])
559
+ for instance_id in create_these_ids
560
+ ]
561
+
562
+ instance_result = self.cdf_client.data_modeling.instances.apply(to_create)
563
+ retry_these.extend([node.as_id() for node in instance_result.nodes])
564
+
565
+ if len(ex.not_found) != len(create_these_ids):
566
+ missing = [
567
+ id_dict
568
+ for id_dict in ex.not_found
569
+ if NodeId(id_dict["instanceId"]["space"], id_dict["instanceId"]["externalId"])
570
+ not in retry_these
571
+ ]
572
+ missing_num = len(ex.not_found) - len(create_these_ids)
573
+ self.logger.error(
574
+ f"{missing_num} time series not found, and could not be created automatically: "
575
+ + str(missing)
576
+ + " Data will be dropped"
577
+ )
578
+
579
+ # Remove entries with non-existing time series from upload queue
580
+ upload_this = [entry for entry in upload_this if entry["instanceId"] in retry_these]
581
+
582
+ # Upload remaining
583
+ _upload_batch(upload_this, retries - 1)
584
+
585
+ return upload_this
586
+
587
+ if len(self.upload_queue) == 0:
588
+ return
589
+
590
+ with self.lock:
591
+ upload_this = _upload_batch(
592
+ [
593
+ {"instanceId": instance_id, "datapoints": list(datapoints)}
594
+ for instance_id, datapoints in self.upload_queue.items()
595
+ if len(datapoints) > 0
596
+ ]
597
+ )
598
+
599
+ for datapoints in self.upload_queue.values():
600
+ self.points_written.inc(len(datapoints))
601
+
602
+ try:
603
+ self._post_upload(upload_this)
604
+ except Exception as e:
605
+ self.logger.error("Error in upload callback: %s", str(e))
606
+
607
+ self.upload_queue.clear()
608
+ self.logger.info(f"Uploaded {self.upload_queue_size} datapoints")
609
+ self.upload_queue_size = 0
610
+ self.queue_size.set(self.upload_queue_size)
344
611
 
345
612
 
346
613
  class SequenceUploadQueue(AbstractUploadQueue):
@@ -31,8 +31,13 @@ from cognite.extractorutils.configtools import BaseConfig, TimeIntervalConfig
31
31
  from cognite.extractorutils.metrics import BaseMetrics
32
32
  from cognite.extractorutils.statestore import AbstractStateStore
33
33
  from cognite.extractorutils.threading import CancellationToken
34
- from cognite.extractorutils.uploader import EventUploadQueue, RawUploadQueue, TimeSeriesUploadQueue
35
- from cognite.extractorutils.uploader_types import CdfTypes, Event, InsertDatapoints, RawRow
34
+ from cognite.extractorutils.uploader import (
35
+ CDMTimeSeriesUploadQueue,
36
+ EventUploadQueue,
37
+ RawUploadQueue,
38
+ TimeSeriesUploadQueue,
39
+ )
40
+ from cognite.extractorutils.uploader_types import CdfTypes, Event, InsertCDMDatapoints, InsertDatapoints, RawRow
36
41
 
37
42
 
38
43
  @dataclass
@@ -153,6 +158,10 @@ class UploaderExtractor(Extractor[UploaderExtractorConfigClass]):
153
158
  self.time_series_queue.add_to_upload_queue(
154
159
  id=dp.id, external_id=dp.external_id, datapoints=dp.datapoints
155
160
  )
161
+ elif isinstance(peek, InsertCDMDatapoints):
162
+ for dp in peekable_output:
163
+ if isinstance(dp, InsertCDMDatapoints):
164
+ self.cdm_time_series_queue.add_to_upload_queue(instance_id=dp.instance_id, datapoints=dp.datapoints)
156
165
  else:
157
166
  raise ValueError(f"Unexpected type: {type(peek)}")
158
167
 
@@ -187,7 +196,12 @@ class UploaderExtractor(Extractor[UploaderExtractorConfigClass]):
187
196
  trigger_log_level="INFO",
188
197
  create_missing=True,
189
198
  ).__enter__()
190
-
199
+ self.cdm_time_series_queue = CDMTimeSeriesUploadQueue(
200
+ self.cognite_client,
201
+ max_queue_size=queue_config.timeseries_size,
202
+ max_upload_interval=queue_config.upload_interval.seconds,
203
+ trigger_log_level="INFO",
204
+ ).__enter__()
191
205
  return self
192
206
 
193
207
  def __exit__(
@@ -199,4 +213,5 @@ class UploaderExtractor(Extractor[UploaderExtractorConfigClass]):
199
213
  self.event_queue.__exit__(exc_type, exc_val, exc_tb)
200
214
  self.raw_queue.__exit__(exc_type, exc_val, exc_tb)
201
215
  self.time_series_queue.__exit__(exc_type, exc_val, exc_tb)
216
+ self.cdm_time_series_queue.__exit__(exc_type, exc_val, exc_tb)
202
217
  return super().__exit__(exc_type, exc_val, exc_tb)
@@ -9,6 +9,7 @@ from typing import TypeAlias
9
9
 
10
10
  from cognite.client.data_classes import Event as _Event
11
11
  from cognite.client.data_classes import Row as _Row
12
+ from cognite.client.data_classes.data_modeling import NodeId
12
13
  from cognite.extractorutils.uploader.time_series import DataPoint
13
14
 
14
15
 
@@ -23,6 +24,16 @@ class InsertDatapoints:
23
24
  self.datapoints = datapoints
24
25
 
25
26
 
27
+ class InsertCDMDatapoints:
28
+ """
29
+ A class representing a batch of datapoints to be inserted into a cdm time series.
30
+ """
31
+
32
+ def __init__(self, *, instance_id: NodeId, datapoints: list[DataPoint]):
33
+ self.instance_id = instance_id
34
+ self.datapoints = datapoints
35
+
36
+
26
37
  class RawRow:
27
38
  """
28
39
  A class representing a row of data to be inserted into a RAW table.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cognite-extractor-utils
3
- Version: 7.6.0
3
+ Version: 7.7.0
4
4
  Summary: Utilities for easier development of extractors for CDF
5
5
  Project-URL: repository, https://github.com/cognitedata/python-extractor-utils
6
6
  Author-email: Mathias Lohne <mathias.lohne@cognite.com>
@@ -1,12 +1,12 @@
1
- cognite/extractorutils/__init__.py,sha256=U1YH1j-q2xL7GZf-1I0pvGe7gE5L01XYq_fRj4fqBcI,764
1
+ cognite/extractorutils/__init__.py,sha256=Wn0kPh1z4ahQxBmba_sEQKfDuquoNoSIok9yk7kN5g4,764
2
2
  cognite/extractorutils/_inner_util.py,sha256=v0SvTyFqwjWkJLGoYh2-i5jry3I43BFoRkhj2MjMSQ0,1780
3
3
  cognite/extractorutils/base.py,sha256=Kabgxd269K_aw-P5EdCTP45mrFaIA4YBj5SZjlBVRnY,18605
4
4
  cognite/extractorutils/exceptions.py,sha256=VAGAO5sH6-2DgJZGCMeoCojdRDf9o3g1r3__8REFv-Y,1361
5
5
  cognite/extractorutils/metrics.py,sha256=gLoDWQlXNOOVLr4uQzw_l7RE85l4ljHweakzFbrx6-Y,15621
6
6
  cognite/extractorutils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  cognite/extractorutils/threading.py,sha256=GybbefZkxIqxUgxQHAiGZyEcMz-Cc3k1peILcDiG1G8,4555
8
- cognite/extractorutils/uploader_extractor.py,sha256=ZqBgDkNcwdWKWzSC_Gq6CCtYl0m7VHQDyo5KHuiUcgw,8326
9
- cognite/extractorutils/uploader_types.py,sha256=bMvaFMCd5B1QDuvHhwb-v7i5QPanme52jIoiyzp5BkI,1302
8
+ cognite/extractorutils/uploader_extractor.py,sha256=ZPy-tc9F9fOG6GS2oOdWodolbcFa-4gHXIY8IWaWraA,9015
9
+ cognite/extractorutils/uploader_types.py,sha256=86wPnkEBE3_3r__4KHjxcF0unqdUXHaNrUU1bi0bOjY,1648
10
10
  cognite/extractorutils/util.py,sha256=hs0jKwQZO3Owvrld2wzA8HUDjz4uAAcjnJ8hfLYqHv4,22887
11
11
  cognite/extractorutils/configtools/__init__.py,sha256=oK0VmZ5hBEuTRwJebVPl_3zlbf6P_Cf5G3VU7u6Shic,3533
12
12
  cognite/extractorutils/configtools/_util.py,sha256=NVGaUjCwUCKFMFOKZxco02q6jwdVU4vHYk67i6R_Zxc,4829
@@ -34,7 +34,7 @@ cognite/extractorutils/unstable/core/tasks.py,sha256=8R0kvKTydpnGaGLlgvhp8_uaq9J
34
34
  cognite/extractorutils/unstable/scheduling/__init__.py,sha256=NGVNw-dq7eYwm8jLwb8ChPTSyfAnAKMzi3_QDsh46yw,809
35
35
  cognite/extractorutils/unstable/scheduling/_scheduler.py,sha256=xfAKAI_pyahdFo7zPOmvAQ_16MFE46zFIYSopzwpLag,3741
36
36
  cognite/extractorutils/unstable/scheduling/_schedules.py,sha256=y0NVeXYZOFcAyzBgAe8jqK0W-SZL5m99UwXAacGzqIw,677
37
- cognite/extractorutils/uploader/__init__.py,sha256=Kf71QoyIhR4tF7R92jnQ0SC87z8WQDEK2NoEzn7OGxc,3396
37
+ cognite/extractorutils/uploader/__init__.py,sha256=U6feg43wlrB3K5lhGXBSDJJWOdtuHIse0G1ZsP3MdxM,3458
38
38
  cognite/extractorutils/uploader/_base.py,sha256=ZcCVAC7rCe2klCapbKUg4q7p_9KqQg890MvUmHt6j1M,5230
39
39
  cognite/extractorutils/uploader/_metrics.py,sha256=J2LJXb19L_SLSJ_voNIQHYLp0pjxUKevpH1q_xKX6Hk,3247
40
40
  cognite/extractorutils/uploader/assets.py,sha256=Dio6m-KFZ4EgFfNKET36K3vL-O6Mp9aHVEC3GPfo6J8,5753
@@ -42,9 +42,9 @@ cognite/extractorutils/uploader/data_modeling.py,sha256=ncSbUsybirOg7WqPqhJKe5n-
42
42
  cognite/extractorutils/uploader/events.py,sha256=YSU0wRfN0dLKMmhmyVrZ1afJcX7kT0wxDLgQ7gBpoco,5676
43
43
  cognite/extractorutils/uploader/files.py,sha256=NIPg6CpUp2y5fRMDHQXt1igSxyJkdN-K1ANGTv0r98E,28680
44
44
  cognite/extractorutils/uploader/raw.py,sha256=KBvlBBwKNfaoY9RgOsyKq7ylHjQCxhIznlQ-lQS_XM4,6754
45
- cognite/extractorutils/uploader/time_series.py,sha256=PR-Wj-k9P4xltTrspQexjnKIVVS-BOs5EADTDHbvvIs,26546
45
+ cognite/extractorutils/uploader/time_series.py,sha256=LV_z2XLiY3oyQwnkgTF4QkPSrvIQ40C1mbDYLh86NW8,37324
46
46
  cognite/extractorutils/uploader/upload_failure_handler.py,sha256=nXw9IVcOxxuywd_17ybXijdpo0omtY0Bkb9fT_fWYFM,3048
47
- cognite_extractor_utils-7.6.0.dist-info/METADATA,sha256=uQityPrYAtrEXqt4gSNE7aZx4YNnvXGVSKYRJo5Gb64,4888
48
- cognite_extractor_utils-7.6.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
49
- cognite_extractor_utils-7.6.0.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
50
- cognite_extractor_utils-7.6.0.dist-info/RECORD,,
47
+ cognite_extractor_utils-7.7.0.dist-info/METADATA,sha256=_HrMZjtxJ_fnHfguavXWF6pi8-QI8eZRD1re5jM0NMg,4888
48
+ cognite_extractor_utils-7.7.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
49
+ cognite_extractor_utils-7.7.0.dist-info/licenses/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
50
+ cognite_extractor_utils-7.7.0.dist-info/RECORD,,