cognite-extractor-utils 7.2.3__py3-none-any.whl → 7.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-extractor-utils might be problematic. Click here for more details.

@@ -16,5 +16,5 @@
16
16
  Cognite extractor utils is a Python package that simplifies the development of new extractors.
17
17
  """
18
18
 
19
- __version__ = "7.2.3"
19
+ __version__ = "7.3.0"
20
20
  from .base import Extractor
@@ -0,0 +1,8 @@
1
+ """
2
+ The unstable package contains experimental functions and classes currently
3
+ deemed unstable. The contents of this package is subject to change without
4
+ notice, even in minor or patch releases.
5
+
6
+ Whenever you import anything from the unstable package, you should make sure to
7
+ run a type checker such as mypy to help catch these changes.
8
+ """
@@ -18,12 +18,16 @@ from io import BytesIO, RawIOBase
18
18
  from math import ceil
19
19
  from os import PathLike
20
20
  from types import TracebackType
21
- from typing import Any, BinaryIO, Callable, Dict, List, Optional, Tuple, Type, Union
21
+ from typing import Any, BinaryIO, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union
22
22
 
23
+ from httpx import URL, Client, Headers, Request, StreamConsumed, SyncByteStream
23
24
  from requests.utils import super_len
24
25
 
25
26
  from cognite.client import CogniteClient
26
27
  from cognite.client.data_classes import FileMetadata
28
+ from cognite.client.data_classes.data_modeling import NodeId
29
+ from cognite.client.data_classes.data_modeling.extractor_extensions.v1 import CogniteExtractorFileApply
30
+ from cognite.client.utils._identifier import IdentifierSequence
27
31
  from cognite.extractorutils.threading import CancellationToken
28
32
  from cognite.extractorutils.uploader._base import (
29
33
  RETRIES,
@@ -47,6 +51,10 @@ _MAX_SINGLE_CHUNK_FILE_SIZE = 5 * 1024 * 1024 * 1024
47
51
  # 4000 MiB
48
52
  _MAX_FILE_CHUNK_SIZE = 4 * 1024 * 1024 * 1000
49
53
 
54
+ _CDF_ALPHA_VERSION_HEADER = {"cdf-version": "alpha"}
55
+
56
+ FileMetadataOrCogniteExtractorFile = Union[FileMetadata, CogniteExtractorFileApply]
57
+
50
58
 
51
59
  class ChunkedStream(RawIOBase, BinaryIO):
52
60
  """
@@ -140,6 +148,22 @@ class ChunkedStream(RawIOBase, BinaryIO):
140
148
  return True
141
149
 
142
150
 
151
+ class IOByteStream(SyncByteStream):
152
+ CHUNK_SIZE = 65_536
153
+
154
+ def __init__(self, stream: BinaryIO) -> None:
155
+ self._stream = stream
156
+ self._is_stream_consumed = False
157
+
158
+ def __iter__(self) -> Iterator[bytes]:
159
+ if self._is_stream_consumed:
160
+ raise StreamConsumed()
161
+ chunk = self._stream.read(self.CHUNK_SIZE)
162
+ while chunk:
163
+ yield chunk
164
+ chunk = self._stream.read(self.CHUNK_SIZE)
165
+
166
+
143
167
  class IOFileUploadQueue(AbstractUploadQueue):
144
168
  """
145
169
  Upload queue for files using BinaryIO
@@ -161,7 +185,7 @@ class IOFileUploadQueue(AbstractUploadQueue):
161
185
  def __init__(
162
186
  self,
163
187
  cdf_client: CogniteClient,
164
- post_upload_function: Optional[Callable[[List[FileMetadata]], None]] = None,
188
+ post_upload_function: Optional[Callable[[List[FileMetadataOrCogniteExtractorFile]], None]] = None,
165
189
  max_queue_size: Optional[int] = None,
166
190
  trigger_log_level: str = "DEBUG",
167
191
  thread_name: Optional[str] = None,
@@ -205,6 +229,8 @@ class IOFileUploadQueue(AbstractUploadQueue):
205
229
 
206
230
  self._full_queue = threading.Condition()
207
231
 
232
+ self._httpx_client = Client(follow_redirects=True)
233
+
208
234
  global _QUEUES, _QUEUES_LOCK
209
235
  with _QUEUES_LOCK:
210
236
  self._pool = ThreadPoolExecutor(
@@ -219,9 +245,81 @@ class IOFileUploadQueue(AbstractUploadQueue):
219
245
 
220
246
  self.cancellation_token.wait(5)
221
247
 
248
+ def _apply_cognite_file(self, file_apply: CogniteExtractorFileApply) -> NodeId:
249
+ instance_result = self.cdf_client.data_modeling.instances.apply(file_apply)
250
+ node = instance_result.nodes[0]
251
+ return node.as_id()
252
+
253
+ def _upload_empty(
254
+ self, meta_or_apply: FileMetadataOrCogniteExtractorFile
255
+ ) -> tuple[FileMetadataOrCogniteExtractorFile, str]:
256
+ if isinstance(meta_or_apply, CogniteExtractorFileApply):
257
+ node_id = self._apply_cognite_file(meta_or_apply)
258
+ meta_or_apply, url = self._create_cdm(instance_id=node_id)
259
+ else:
260
+ meta_or_apply, url = self.cdf_client.files.create(
261
+ file_metadata=meta_or_apply, overwrite=self.overwrite_existing
262
+ )
263
+ return meta_or_apply, url
264
+
265
+ def _upload_bytes(self, size: int, file: BinaryIO, meta_or_apply: FileMetadataOrCogniteExtractorFile) -> None:
266
+ meta_or_apply, url = self._upload_empty(meta_or_apply)
267
+ resp = self._httpx_client.send(self._get_file_upload_request(url, file, size))
268
+ resp.raise_for_status()
269
+
270
+ def _upload_multipart(self, size: int, file: BinaryIO, meta_or_apply: FileMetadataOrCogniteExtractorFile) -> None:
271
+ chunks = ChunkedStream(file, self.max_file_chunk_size, size)
272
+ self.logger.debug(
273
+ f"File {meta_or_apply.external_id} is larger than 5GiB ({size})"
274
+ f", uploading in {chunks.chunk_count} chunks"
275
+ )
276
+
277
+ returned_file_metadata = self._create_multi_part(meta_or_apply, chunks)
278
+ upload_urls = returned_file_metadata["uploadUrls"]
279
+ upload_id = returned_file_metadata["uploadId"]
280
+ file_meta = FileMetadata.load(returned_file_metadata)
281
+
282
+ for url in upload_urls:
283
+ chunks.next_chunk()
284
+ resp = self._httpx_client.send(self._get_file_upload_request(url, chunks, len(chunks)))
285
+ resp.raise_for_status()
286
+
287
+ completed_headers = (
288
+ _CDF_ALPHA_VERSION_HEADER if isinstance(meta_or_apply, CogniteExtractorFileApply) is not None else None
289
+ )
290
+
291
+ res = self.cdf_client.files._post(
292
+ url_path="/files/completemultipartupload",
293
+ json={"id": file_meta.id, "uploadId": upload_id},
294
+ headers=completed_headers,
295
+ )
296
+ res.raise_for_status()
297
+
298
+ def _create_multi_part(self, meta_or_apply: FileMetadataOrCogniteExtractorFile, chunks: ChunkedStream) -> dict:
299
+ if isinstance(meta_or_apply, CogniteExtractorFileApply):
300
+ node_id = self._apply_cognite_file(meta_or_apply)
301
+ identifiers = IdentifierSequence.load(instance_ids=node_id).as_singleton()
302
+ self.cdf_client.files._warn_alpha()
303
+ res = self.cdf_client.files._post(
304
+ url_path="/files/multiuploadlink",
305
+ json={"items": identifiers.as_dicts()},
306
+ params={"parts": chunks.chunk_count},
307
+ headers=_CDF_ALPHA_VERSION_HEADER,
308
+ )
309
+ res.raise_for_status()
310
+ return res.json()["items"][0]
311
+ else:
312
+ res = self.cdf_client.files._post(
313
+ url_path="/files/initmultipartupload",
314
+ json=meta_or_apply.dump(camel_case=True),
315
+ params={"overwrite": self.overwrite_existing, "parts": chunks.chunk_count},
316
+ )
317
+ res.raise_for_status()
318
+ return res.json()
319
+
222
320
  def add_io_to_upload_queue(
223
321
  self,
224
- file_meta: FileMetadata,
322
+ meta_or_apply: FileMetadataOrCogniteExtractorFile,
225
323
  read_file: Callable[[], BinaryIO],
226
324
  extra_retries: Optional[
227
325
  Union[Tuple[Type[Exception], ...], Dict[Type[Exception], Callable[[Any], bool]]]
@@ -229,7 +327,7 @@ class IOFileUploadQueue(AbstractUploadQueue):
229
327
  ) -> None:
230
328
  """
231
329
  Add file to upload queue. The file will start uploading immedeately. If the size of the queue is larger than
232
- the specified max size, this call will block until it's
330
+ the specified max size, this call will block until it's completed the upload.
233
331
 
234
332
  Args:
235
333
  file_meta: File metadata-object
@@ -239,7 +337,7 @@ class IOFileUploadQueue(AbstractUploadQueue):
239
337
  """
240
338
  retries = cognite_exceptions()
241
339
  if isinstance(extra_retries, tuple):
242
- retries.update({exc: lambda _e: True for exc in extra_retries or []})
340
+ retries.update({exc: lambda _: True for exc in extra_retries or []})
243
341
  elif isinstance(extra_retries, dict):
244
342
  retries.update(extra_retries)
245
343
 
@@ -251,72 +349,36 @@ class IOFileUploadQueue(AbstractUploadQueue):
251
349
  max_delay=RETRY_MAX_DELAY,
252
350
  backoff=RETRY_BACKOFF_FACTOR,
253
351
  )
254
- def upload_file(read_file: Callable[[], BinaryIO], file_meta: FileMetadata) -> None:
352
+ def upload_file(read_file: Callable[[], BinaryIO], meta_or_apply: FileMetadataOrCogniteExtractorFile) -> None:
255
353
  with read_file() as file:
256
354
  size = super_len(file)
257
355
  if size == 0:
258
356
  # upload just the file metadata witout data
259
- file_meta, _url = self.cdf_client.files.create(
260
- file_metadata=file_meta, overwrite=self.overwrite_existing
261
- )
357
+ meta_or_apply, _ = self._upload_empty(meta_or_apply)
262
358
  elif size >= self.max_single_chunk_file_size:
263
359
  # The minimum chunk size is 4000MiB.
264
- chunks = ChunkedStream(file, self.max_file_chunk_size, size)
265
- self.logger.debug(
266
- f"File {file_meta.external_id} is larger than 5GiB ({size})"
267
- f", uploading in {chunks.chunk_count} chunks"
268
- )
269
- with self.cdf_client.files.multipart_upload_session(
270
- file_meta.name if file_meta.name is not None else "",
271
- parts=chunks.chunk_count,
272
- overwrite=self.overwrite_existing,
273
- external_id=file_meta.external_id,
274
- source=file_meta.source,
275
- mime_type=file_meta.mime_type,
276
- metadata=file_meta.metadata,
277
- directory=file_meta.directory,
278
- asset_ids=file_meta.asset_ids,
279
- data_set_id=file_meta.data_set_id,
280
- labels=file_meta.labels,
281
- geo_location=file_meta.geo_location,
282
- source_created_time=file_meta.source_created_time,
283
- source_modified_time=file_meta.source_modified_time,
284
- security_categories=file_meta.security_categories,
285
- ) as session:
286
- while chunks.next_chunk():
287
- session.upload_part(chunks.current_chunk, chunks)
288
- file_meta = session.file_metadata
360
+ self._upload_multipart(size, file, meta_or_apply)
361
+
289
362
  else:
290
- file_meta = self.cdf_client.files.upload_bytes(
291
- file,
292
- file_meta.name if file_meta.name is not None else "",
293
- overwrite=self.overwrite_existing,
294
- external_id=file_meta.external_id,
295
- source=file_meta.source,
296
- mime_type=file_meta.mime_type,
297
- metadata=file_meta.metadata,
298
- directory=file_meta.directory,
299
- asset_ids=file_meta.asset_ids,
300
- data_set_id=file_meta.data_set_id,
301
- labels=file_meta.labels,
302
- geo_location=file_meta.geo_location,
303
- source_created_time=file_meta.source_created_time,
304
- source_modified_time=file_meta.source_modified_time,
305
- security_categories=file_meta.security_categories,
306
- )
363
+ self._upload_bytes(size, file, meta_or_apply)
364
+
365
+ if isinstance(meta_or_apply, CogniteExtractorFileApply):
366
+ meta_or_apply.is_uploaded = True
307
367
 
308
368
  if self.post_upload_function:
309
369
  try:
310
- self.post_upload_function([file_meta])
370
+ self.post_upload_function([meta_or_apply])
311
371
  except Exception as e:
312
372
  self.logger.error("Error in upload callback: %s", str(e))
313
373
 
314
- def wrapped_upload(read_file: Callable[[], BinaryIO], file_meta: FileMetadata) -> None:
374
+ def wrapped_upload(
375
+ read_file: Callable[[], BinaryIO], meta_or_apply: FileMetadataOrCogniteExtractorFile
376
+ ) -> None:
315
377
  try:
316
- upload_file(read_file, file_meta)
378
+ upload_file(read_file, meta_or_apply)
317
379
 
318
380
  except Exception as e:
319
- self.logger.exception(f"Unexpected error while uploading file: {file_meta.external_id}")
381
+ self.logger.exception(f"Unexpected error while uploading file: {meta_or_apply.external_id}")
320
382
  self.errors.append(e)
321
383
 
322
384
  finally:
@@ -333,11 +395,42 @@ class IOFileUploadQueue(AbstractUploadQueue):
333
395
  pass
334
396
 
335
397
  with self.lock:
336
- self.upload_queue.append(self._pool.submit(wrapped_upload, read_file, file_meta))
398
+ self.upload_queue.append(self._pool.submit(wrapped_upload, read_file, meta_or_apply))
337
399
  self.upload_queue_size += 1
338
400
  self.files_queued.inc()
339
401
  self.queue_size.set(self.upload_queue_size)
340
402
 
403
+ def _get_file_upload_request(self, url_str: str, stream: BinaryIO, size: int) -> Request:
404
+ url = URL(url_str)
405
+ headers = Headers(self._httpx_client.headers)
406
+ headers.update(
407
+ {
408
+ "Accept": "*/*",
409
+ "Content-Length": str(size),
410
+ "Host": url.netloc.decode("ascii"),
411
+ "x-cdp-app": self.cdf_client._config.client_name,
412
+ }
413
+ )
414
+
415
+ return Request(
416
+ method="PUT",
417
+ url=url,
418
+ stream=IOByteStream(stream),
419
+ headers=headers,
420
+ )
421
+
422
+ def _create_cdm(self, instance_id: NodeId) -> tuple[FileMetadata, str]:
423
+ self.cdf_client.files._warn_alpha()
424
+ identifiers = IdentifierSequence.load(instance_ids=instance_id).as_singleton()
425
+ res = self.cdf_client.files._post(
426
+ url_path="/files/uploadlink",
427
+ json={"items": identifiers.as_dicts()},
428
+ headers=_CDF_ALPHA_VERSION_HEADER,
429
+ )
430
+ res.raise_for_status()
431
+ resp_json = res.json()["items"][0]
432
+ return FileMetadata.load(resp_json), resp_json["uploadUrl"]
433
+
341
434
  def upload(self, fail_on_errors: bool = True, timeout: Optional[float] = None) -> None:
342
435
  """
343
436
  Wait for all uploads to finish
@@ -402,7 +495,7 @@ class FileUploadQueue(IOFileUploadQueue):
402
495
  def __init__(
403
496
  self,
404
497
  cdf_client: CogniteClient,
405
- post_upload_function: Optional[Callable[[List[FileMetadata]], None]] = None,
498
+ post_upload_function: Optional[Callable[[List[FileMetadataOrCogniteExtractorFile]], None]] = None,
406
499
  max_queue_size: Optional[int] = None,
407
500
  max_upload_interval: Optional[int] = None,
408
501
  trigger_log_level: str = "DEBUG",
@@ -421,7 +514,9 @@ class FileUploadQueue(IOFileUploadQueue):
421
514
  cancellation_token,
422
515
  )
423
516
 
424
- def add_to_upload_queue(self, file_meta: FileMetadata, file_name: Union[str, PathLike]) -> None:
517
+ def add_to_upload_queue(
518
+ self, meta_or_apply: FileMetadataOrCogniteExtractorFile, file_name: Union[str, PathLike]
519
+ ) -> None:
425
520
  """
426
521
  Add file to upload queue. The queue will be uploaded if the queue size is larger than the threshold
427
522
  specified in the __init__.
@@ -435,7 +530,7 @@ class FileUploadQueue(IOFileUploadQueue):
435
530
  def load_file_from_path() -> BinaryIO:
436
531
  return open(file_name, "rb")
437
532
 
438
- self.add_io_to_upload_queue(file_meta, load_file_from_path)
533
+ self.add_io_to_upload_queue(meta_or_apply, load_file_from_path)
439
534
 
440
535
 
441
536
  class BytesUploadQueue(IOFileUploadQueue):
@@ -455,7 +550,7 @@ class BytesUploadQueue(IOFileUploadQueue):
455
550
  def __init__(
456
551
  self,
457
552
  cdf_client: CogniteClient,
458
- post_upload_function: Optional[Callable[[List[FileMetadata]], None]] = None,
553
+ post_upload_function: Optional[Callable[[List[FileMetadataOrCogniteExtractorFile]], None]] = None,
459
554
  max_queue_size: Optional[int] = None,
460
555
  trigger_log_level: str = "DEBUG",
461
556
  thread_name: Optional[str] = None,
@@ -472,7 +567,7 @@ class BytesUploadQueue(IOFileUploadQueue):
472
567
  cancellation_token,
473
568
  )
474
569
 
475
- def add_to_upload_queue(self, content: bytes, metadata: FileMetadata) -> None:
570
+ def add_to_upload_queue(self, content: bytes, meta_or_apply: FileMetadataOrCogniteExtractorFile) -> None:
476
571
  """
477
572
  Add object to upload queue. The queue will be uploaded if the queue size is larger than the threshold
478
573
  specified in the __init__.
@@ -484,4 +579,4 @@ class BytesUploadQueue(IOFileUploadQueue):
484
579
  def get_byte_io() -> BinaryIO:
485
580
  return BytesIO(content)
486
581
 
487
- self.add_io_to_upload_queue(metadata, get_byte_io)
582
+ self.add_io_to_upload_queue(meta_or_apply, get_byte_io)
@@ -19,6 +19,7 @@ extractors.
19
19
 
20
20
  import logging
21
21
  import random
22
+ from datetime import datetime, timezone
22
23
  from functools import partial, wraps
23
24
  from threading import Thread
24
25
  from time import time
@@ -501,3 +502,11 @@ def cognite_exceptions(
501
502
  return True
502
503
 
503
504
  return {CogniteException: handle_cognite_errors}
505
+
506
+
507
+ def datetime_to_timestamp(dt: datetime) -> int:
508
+ return int(dt.timestamp() * 1000)
509
+
510
+
511
+ def timestamp_to_datetime(ts: int) -> datetime:
512
+ return datetime.fromtimestamp(ts / 1000, tz=timezone.utc)
@@ -1,15 +1,14 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cognite-extractor-utils
3
- Version: 7.2.3
3
+ Version: 7.4.0
4
4
  Summary: Utilities for easier development of extractors for CDF
5
5
  Home-page: https://github.com/cognitedata/python-extractor-utils
6
6
  License: Apache-2.0
7
7
  Author: Mathias Lohne
8
8
  Author-email: mathias.lohne@cognite.com
9
- Requires-Python: >=3.8.0,<4.0.0
9
+ Requires-Python: >=3.9.0,<4.0.0
10
10
  Classifier: License :: OSI Approved :: Apache Software License
11
11
  Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3.8
13
12
  Classifier: Programming Language :: Python :: 3.9
14
13
  Classifier: Programming Language :: Python :: 3.10
15
14
  Classifier: Programming Language :: Python :: 3.11
@@ -18,9 +17,10 @@ Provides-Extra: experimental
18
17
  Requires-Dist: arrow (>=1.0.0,<2.0.0)
19
18
  Requires-Dist: azure-identity (>=1.14.0,<2.0.0)
20
19
  Requires-Dist: azure-keyvault-secrets (>=4.7.0,<5.0.0)
21
- Requires-Dist: cognite-sdk (>=7.43.3,<8.0.0)
20
+ Requires-Dist: cognite-sdk (>=7.54.17,<8.0.0)
22
21
  Requires-Dist: dacite (>=1.6.0,<2.0.0)
23
22
  Requires-Dist: decorator (>=5.1.1,<6.0.0)
23
+ Requires-Dist: httpx (>=0.27.0,<0.28.0)
24
24
  Requires-Dist: more-itertools (>=10.0.0,<11.0.0)
25
25
  Requires-Dist: orjson (>=3.10.3,<4.0.0)
26
26
  Requires-Dist: prometheus-client (>0.7.0,<=1.0.0)
@@ -1,4 +1,4 @@
1
- cognite/extractorutils/__init__.py,sha256=No8QzFZvAX7DY63jX70dTCPGuk7k0Rd0k99nU_sqeTo,739
1
+ cognite/extractorutils/__init__.py,sha256=xQex_pCl54rp85pvtXkNBPlRtgkgs5dfw7G_CBOPtpM,739
2
2
  cognite/extractorutils/_inner_util.py,sha256=gmz6aqS7jDNsg8z4RHgJjMFohDLOMiaU4gMWBhg3xcE,1558
3
3
  cognite/extractorutils/base.py,sha256=q6NU2bPec3WOasVnnIFoh-aUJudVZWZ2R6emz3IRj8Q,16391
4
4
  cognite/extractorutils/configtools/__init__.py,sha256=L-daaqInIsmHcjb2forJeY0fW8tz1mlteOUo7IsWnrU,3059
@@ -13,19 +13,20 @@ cognite/extractorutils/statestore/_base.py,sha256=PM4C-bz41tldA5Lx8rD0AzgXJciAZc
13
13
  cognite/extractorutils/statestore/hashing.py,sha256=o-efTv21_ATQnyxYmple3MF7r5Afy-7qZsdZhR47emw,8083
14
14
  cognite/extractorutils/statestore/watermark.py,sha256=c_lcmJfo8bOvWyCJ9iRbbE4BlqRVulom4TpHb2pOnkE,16755
15
15
  cognite/extractorutils/threading.py,sha256=2Hke5cFvP-wA45Crvh58JahoKXB64P3tr7R4y_BhBqM,3605
16
+ cognite/extractorutils/unstable/__init__.py,sha256=L6nqJHjylpk67CE-PbXJyb_TBI4yjhEYEz9J9WShDfM,341
16
17
  cognite/extractorutils/uploader/__init__.py,sha256=W22u6QHA4cR0j78LN5LTL5YGbfC-uTApagTyP5ab7uQ,3110
17
18
  cognite/extractorutils/uploader/_base.py,sha256=wktbV8dpb8zBOsNaECZkBNoJSpOz437NlNMER3-a3xQ,5304
18
19
  cognite/extractorutils/uploader/_metrics.py,sha256=J2LJXb19L_SLSJ_voNIQHYLp0pjxUKevpH1q_xKX6Hk,3247
19
20
  cognite/extractorutils/uploader/assets.py,sha256=2E90N1kxsaA6Ah4h0_r_dTVhDYY_68ItRWrHYkkltJw,5628
20
21
  cognite/extractorutils/uploader/data_modeling.py,sha256=w35Ix5mu0Cgfn4ywnDyif4VVjo04LVTlkMEevk6ztUs,3639
21
22
  cognite/extractorutils/uploader/events.py,sha256=NZP2tMoU_rh_rb-EZiUBsOT5KdNABHN4c9Oddk0OsdE,5680
22
- cognite/extractorutils/uploader/files.py,sha256=31kPS4fwz8ZSXWss-CKmYTM6ZLVx9LtsDe7LHT7Wy98,18329
23
+ cognite/extractorutils/uploader/files.py,sha256=jEZ_QwUnXTsfQ5Xsm03j_vNWTlYBg2gmSxE3MOyoC6s,21765
23
24
  cognite/extractorutils/uploader/raw.py,sha256=wFjF90PFTjmByOWx_Y4_YfDJ2w2jl0EQJ2Tjx2MP2PM,6738
24
25
  cognite/extractorutils/uploader/time_series.py,sha256=HBtQdsQoIOaL-EG5lMsaY-ORwVb0kGiXG86VjE5-_Bg,26815
25
26
  cognite/extractorutils/uploader_extractor.py,sha256=E-mpVvbPg_Tk90U4S9JybV0duptJ2SXE88HB6npE3zI,7732
26
27
  cognite/extractorutils/uploader_types.py,sha256=wxfrsiKPTzG5lmoYtQsxt8Xyj-s5HnaLl8WDzJNrazg,1020
27
- cognite/extractorutils/util.py,sha256=UA6mUZ1caHd6vtA45gZXrk6cxo5cSB2PZ32bMwfEU0M,17229
28
- cognite_extractor_utils-7.2.3.dist-info/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
29
- cognite_extractor_utils-7.2.3.dist-info/METADATA,sha256=u-YGjhv1yLXR0t8OYCx07D7vp1_8Vg_yFwMke0HThZ8,5486
30
- cognite_extractor_utils-7.2.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
31
- cognite_extractor_utils-7.2.3.dist-info/RECORD,,
28
+ cognite/extractorutils/util.py,sha256=T6ef5b7aYJ8yq9swQwybYaLe3YGr3hElsJQy8E-d5Rs,17469
29
+ cognite_extractor_utils-7.4.0.dist-info/LICENSE,sha256=psuoW8kuDP96RQsdhzwOqi6fyWv0ct8CR6Jr7He_P_k,10173
30
+ cognite_extractor_utils-7.4.0.dist-info/METADATA,sha256=eD0--_YZWCF4Vj9oOcVvPIM2hiAOBrGsbZwAc5lRr9Q,5477
31
+ cognite_extractor_utils-7.4.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
32
+ cognite_extractor_utils-7.4.0.dist-info/RECORD,,