cognite-extractor-utils 6.1.0__tar.gz → 6.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-extractor-utils might be problematic. Click here for more details.

Files changed (27) hide show
  1. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/PKG-INFO +1 -1
  2. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/__init__.py +1 -1
  3. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/configtools/elements.py +42 -8
  4. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/__init__.py +1 -1
  5. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/files.py +105 -103
  6. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/pyproject.toml +2 -2
  7. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/LICENSE +0 -0
  8. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/README.md +0 -0
  9. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/_inner_util.py +0 -0
  10. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/base.py +0 -0
  11. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/configtools/__init__.py +0 -0
  12. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/configtools/_util.py +0 -0
  13. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/configtools/loaders.py +0 -0
  14. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/exceptions.py +0 -0
  15. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/metrics.py +0 -0
  16. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/middleware.py +0 -0
  17. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/py.typed +0 -0
  18. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/statestore.py +0 -0
  19. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/_base.py +0 -0
  20. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/_metrics.py +0 -0
  21. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/assets.py +0 -0
  22. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/events.py +0 -0
  23. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/raw.py +0 -0
  24. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader/time_series.py +0 -0
  25. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader_extractor.py +0 -0
  26. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/uploader_types.py +0 -0
  27. {cognite_extractor_utils-6.1.0 → cognite_extractor_utils-6.2.0}/cognite/extractorutils/util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cognite-extractor-utils
3
- Version: 6.1.0
3
+ Version: 6.2.0
4
4
  Summary: Utilities for easier development of extractors for CDF
5
5
  Home-page: https://github.com/cognitedata/python-extractor-utils
6
6
  License: Apache-2.0
@@ -16,5 +16,5 @@
16
16
  Cognite extractor utils is a Python package that simplifies the development of new extractors.
17
17
  """
18
18
 
19
- __version__ = "6.1.0"
19
+ __version__ = "6.2.0"
20
20
  from .base import Extractor
@@ -27,12 +27,25 @@ import yaml
27
27
  from prometheus_client import REGISTRY, start_http_server
28
28
 
29
29
  from cognite.client import ClientConfig, CogniteClient
30
- from cognite.client.credentials import CredentialProvider, OAuthClientCertificate, OAuthClientCredentials
30
+ from cognite.client.credentials import (
31
+ CredentialProvider,
32
+ OAuthClientCertificate,
33
+ OAuthClientCredentials,
34
+ )
31
35
  from cognite.client.data_classes import Asset, DataSet, ExtractionPipeline
32
36
  from cognite.extractorutils.configtools._util import _load_certificate_data
33
37
  from cognite.extractorutils.exceptions import InvalidConfigError
34
- from cognite.extractorutils.metrics import AbstractMetricsPusher, CognitePusher, PrometheusPusher
35
- from cognite.extractorutils.statestore import AbstractStateStore, LocalStateStore, NoStateStore, RawStateStore
38
+ from cognite.extractorutils.metrics import (
39
+ AbstractMetricsPusher,
40
+ CognitePusher,
41
+ PrometheusPusher,
42
+ )
43
+ from cognite.extractorutils.statestore import (
44
+ AbstractStateStore,
45
+ LocalStateStore,
46
+ NoStateStore,
47
+ RawStateStore,
48
+ )
36
49
  from cognite.extractorutils.util import EitherId
37
50
 
38
51
  _logger = logging.getLogger(__name__)
@@ -93,6 +106,14 @@ class TimeIntervalConfig(yaml.YAMLObject):
93
106
  def __init__(self, expression: str) -> None:
94
107
  self._interval, self._expression = TimeIntervalConfig._parse_expression(expression)
95
108
 
109
+ def __eq__(self, other: object) -> bool:
110
+ if not isinstance(other, TimeIntervalConfig):
111
+ return NotImplemented
112
+ return self._interval == other._interval
113
+
114
+ def __hash__(self) -> int:
115
+ return hash(self._interval)
116
+
96
117
  @classmethod
97
118
  def _parse_expression(cls, expression: str) -> Tuple[int, str]:
98
119
  # First, try to parse pure number and assume seconds (for backwards compatibility)
@@ -170,7 +191,10 @@ class FileSizeConfig(yaml.YAMLObject):
170
191
  expression_lower = expression.lower()
171
192
  for size in sizes:
172
193
  if expression_lower.endswith(size):
173
- return int(float(expression_lower.replace(size, "")) * sizes[size]), expression
194
+ return (
195
+ int(float(expression_lower.replace(size, "")) * sizes[size]),
196
+ expression,
197
+ )
174
198
  else:
175
199
  raise InvalidConfigError(f"Invalid unit for file size: {expression}. Valid units: {sizes.keys()}")
176
200
 
@@ -241,7 +265,10 @@ class CogniteConfig:
241
265
  host: str = "https://api.cognitedata.com"
242
266
 
243
267
  def get_cognite_client(
244
- self, client_name: str, token_custom_args: Optional[Dict[str, str]] = None, use_experimental_sdk: bool = False
268
+ self,
269
+ client_name: str,
270
+ token_custom_args: Optional[Dict[str, str]] = None,
271
+ use_experimental_sdk: bool = False,
245
272
  ) -> CogniteClient:
246
273
  from cognite.client.config import global_config
247
274
 
@@ -264,7 +291,8 @@ class CogniteConfig:
264
291
  else:
265
292
  raise InvalidConfigError("Either authority-url or tenant is required for certificate authentication")
266
293
  (thumprint, key) = _load_certificate_data(
267
- self.idp_authentication.certificate.path, self.idp_authentication.certificate.password
294
+ self.idp_authentication.certificate.path,
295
+ self.idp_authentication.certificate.password,
268
296
  )
269
297
  credential_provider = OAuthClientCertificate(
270
298
  authority_url=authority_url,
@@ -479,7 +507,10 @@ class MetricsConfig:
479
507
  asset = None
480
508
 
481
509
  if self.cognite.asset_name is not None and self.cognite.asset_external_id:
482
- asset = Asset(name=self.cognite.asset_name, external_id=self.cognite.asset_external_id)
510
+ asset = Asset(
511
+ name=self.cognite.asset_name,
512
+ external_id=self.cognite.asset_external_id,
513
+ )
483
514
 
484
515
  pusher = CognitePusher(
485
516
  cdf_client=cdf_client,
@@ -586,7 +617,10 @@ class StateStoreConfig:
586
617
  )
587
618
 
588
619
  if self.local:
589
- return LocalStateStore(file_path=self.local.path, save_interval=self.local.save_interval.seconds)
620
+ return LocalStateStore(
621
+ file_path=self.local.path,
622
+ save_interval=self.local.save_interval.seconds,
623
+ )
590
624
 
591
625
  if default_to_local:
592
626
  return LocalStateStore(file_path="states.json")
@@ -66,7 +66,7 @@ instead. If both are used, the condition being met first will trigger the upload
66
66
 
67
67
  from .assets import AssetUploadQueue
68
68
  from .events import EventUploadQueue
69
- from .files import BytesUploadQueue, FileUploadQueue
69
+ from .files import BytesUploadQueue, FileUploadQueue, IOFileUploadQueue
70
70
  from .raw import RawUploadQueue
71
71
  from .time_series import (
72
72
  DataPoint,
@@ -13,10 +13,11 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import threading
16
- from concurrent.futures import ThreadPoolExecutor
16
+ from concurrent.futures import Future, ThreadPoolExecutor
17
+ from io import BytesIO
17
18
  from os import PathLike
18
19
  from types import TracebackType
19
- from typing import Any, Callable, List, Optional, Tuple, Type, Union
20
+ from typing import Any, BinaryIO, Callable, List, Optional, Tuple, Type, Union
20
21
 
21
22
  from requests import ConnectionError
22
23
 
@@ -31,9 +32,6 @@ from cognite.extractorutils.uploader._base import (
31
32
  AbstractUploadQueue,
32
33
  )
33
34
  from cognite.extractorutils.uploader._metrics import (
34
- BYTES_UPLOADER_QUEUE_SIZE,
35
- BYTES_UPLOADER_QUEUED,
36
- BYTES_UPLOADER_WRITTEN,
37
35
  FILES_UPLOADER_QUEUE_SIZE,
38
36
  FILES_UPLOADER_QUEUED,
39
37
  FILES_UPLOADER_WRITTEN,
@@ -41,9 +39,12 @@ from cognite.extractorutils.uploader._metrics import (
41
39
  from cognite.extractorutils.util import retry
42
40
 
43
41
 
44
- class FileUploadQueue(AbstractUploadQueue):
42
+ class IOFileUploadQueue(AbstractUploadQueue):
45
43
  """
46
- Upload queue for files
44
+ Upload queue for files using BinaryIO
45
+
46
+ Note that if the upload fails, the stream needs to be restarted, so
47
+ the enqueued callback needs to produce a new IO object for each call.
47
48
 
48
49
  Args:
49
50
  cdf_client: Cognite Data Fusion client to use
@@ -54,6 +55,10 @@ class FileUploadQueue(AbstractUploadQueue):
54
55
  methods).
55
56
  trigger_log_level: Log level to log upload triggers to.
56
57
  thread_name: Thread name of uploader thread.
58
+ max_parallelism: Maximum number of parallel uploads. If this is greater than 0,
59
+ the largest of this and client.config.max_workers is used to limit the number
60
+ of parallel uploads. This may be important if the IO objects being processed
61
+ also load data from an external system.
57
62
  """
58
63
 
59
64
  def __init__(
@@ -66,6 +71,7 @@ class FileUploadQueue(AbstractUploadQueue):
66
71
  thread_name: Optional[str] = None,
67
72
  overwrite_existing: bool = False,
68
73
  cancellation_token: threading.Event = threading.Event(),
74
+ max_parallelism: int = 0,
69
75
  ):
70
76
  # Super sets post_upload and threshold
71
77
  super().__init__(
@@ -78,14 +84,20 @@ class FileUploadQueue(AbstractUploadQueue):
78
84
  cancellation_token,
79
85
  )
80
86
 
81
- self.upload_queue: List[Tuple[FileMetadata, Union[str, PathLike]]] = []
87
+ self.upload_queue: List[Tuple[FileMetadata, Union[str, Callable[[], BinaryIO]]]] = []
82
88
  self.overwrite_existing = overwrite_existing
83
89
 
90
+ self.parallelism = self.cdf_client.config.max_workers
91
+ if max_parallelism > 0 and max_parallelism < self.parallelism:
92
+ self.parallelism = max_parallelism
93
+ if self.parallelism <= 0:
94
+ self.parallelism = 4
95
+
84
96
  self.files_queued = FILES_UPLOADER_QUEUED
85
97
  self.files_written = FILES_UPLOADER_WRITTEN
86
98
  self.queue_size = FILES_UPLOADER_QUEUE_SIZE
87
99
 
88
- def add_to_upload_queue(self, file_meta: FileMetadata, file_name: Union[str, PathLike]) -> None:
100
+ def add_io_to_upload_queue(self, file_meta: FileMetadata, read_file: Callable[[], BinaryIO]) -> None:
89
101
  """
90
102
  Add file to upload queue. The queue will be uploaded if the queue size is larger than the threshold
91
103
  specified in the __init__.
@@ -96,7 +108,7 @@ class FileUploadQueue(AbstractUploadQueue):
96
108
  If none, the file object will still be created, but no data is uploaded
97
109
  """
98
110
  with self.lock:
99
- self.upload_queue.append((file_meta, file_name))
111
+ self.upload_queue.append((file_meta, read_file))
100
112
  self.upload_queue_size += 1
101
113
  self.files_queued.inc()
102
114
  self.queue_size.set(self.upload_queue_size)
@@ -131,21 +143,41 @@ class FileUploadQueue(AbstractUploadQueue):
131
143
  max_delay=RETRY_MAX_DELAY,
132
144
  backoff=RETRY_BACKOFF_FACTOR,
133
145
  )
134
- def _upload_single(self, index: int, file_name: Union[str, PathLike], file_meta: FileMetadata) -> None:
146
+ def _upload_single(self, index: int, read_file: Callable[[], BinaryIO], file_meta: FileMetadata) -> None:
135
147
  # Upload file
136
- file_meta = self.cdf_client.files.upload(str(file_name), overwrite=self.overwrite_existing, **file_meta.dump()) # type: ignore
148
+ with read_file() as file:
149
+ file_meta = self.cdf_client.files.upload_bytes(
150
+ file,
151
+ file_meta.name if file_meta.name is not None else "",
152
+ overwrite=self.overwrite_existing,
153
+ external_id=file_meta.external_id,
154
+ source=file_meta.source,
155
+ mime_type=file_meta.mime_type,
156
+ metadata=file_meta.metadata,
157
+ directory=file_meta.directory,
158
+ asset_ids=file_meta.asset_ids,
159
+ data_set_id=file_meta.data_set_id,
160
+ labels=file_meta.labels,
161
+ geo_location=file_meta.geo_location,
162
+ source_created_time=file_meta.source_created_time,
163
+ source_modified_time=file_meta.source_modified_time,
164
+ security_categories=file_meta.security_categories,
165
+ )
137
166
 
138
167
  # Update meta-object in queue
139
- self.upload_queue[index] = (file_meta, file_name)
168
+ self.upload_queue[index] = (file_meta, read_file)
140
169
 
141
170
  def _upload_batch(self) -> None:
142
171
  # Concurrently execute file-uploads
143
172
 
144
- with ThreadPoolExecutor(self.cdf_client.config.max_workers) as pool:
173
+ futures: List[Future] = []
174
+ with ThreadPoolExecutor(self.parallelism) as pool:
145
175
  for i, (file_meta, file_name) in enumerate(self.upload_queue):
146
- pool.submit(self._upload_single, i, file_name, file_meta)
176
+ futures.append(pool.submit(self._upload_single, i, file_name, file_meta))
177
+ for fut in futures:
178
+ fut.result(0.0)
147
179
 
148
- def __enter__(self) -> "FileUploadQueue":
180
+ def __enter__(self) -> "IOFileUploadQueue":
149
181
  """
150
182
  Wraps around start method, for use as context manager
151
183
 
@@ -178,9 +210,9 @@ class FileUploadQueue(AbstractUploadQueue):
178
210
  return self.upload_queue_size
179
211
 
180
212
 
181
- class BytesUploadQueue(AbstractUploadQueue):
213
+ class FileUploadQueue(IOFileUploadQueue):
182
214
  """
183
- Upload queue for bytes
215
+ Upload queue for files
184
216
 
185
217
  Args:
186
218
  cdf_client: Cognite Data Fusion client to use
@@ -191,20 +223,20 @@ class BytesUploadQueue(AbstractUploadQueue):
191
223
  methods).
192
224
  trigger_log_level: Log level to log upload triggers to.
193
225
  thread_name: Thread name of uploader thread.
194
- overwrite_existing: If 'overwrite' is set to true, fields for the files found for externalIds can be overwritten
195
226
  """
196
227
 
197
228
  def __init__(
198
229
  self,
199
230
  cdf_client: CogniteClient,
200
- post_upload_function: Optional[Callable[[List[Any]], None]] = None,
231
+ post_upload_function: Optional[Callable[[List[Event]], None]] = None,
201
232
  max_queue_size: Optional[int] = None,
202
233
  max_upload_interval: Optional[int] = None,
203
234
  trigger_log_level: str = "DEBUG",
204
235
  thread_name: Optional[str] = None,
205
236
  overwrite_existing: bool = False,
206
237
  cancellation_token: threading.Event = threading.Event(),
207
- ) -> None:
238
+ ):
239
+ # Super sets post_upload and threshold
208
240
  super().__init__(
209
241
  cdf_client,
210
242
  post_upload_function,
@@ -212,105 +244,75 @@ class BytesUploadQueue(AbstractUploadQueue):
212
244
  max_upload_interval,
213
245
  trigger_log_level,
214
246
  thread_name,
247
+ overwrite_existing,
215
248
  cancellation_token,
216
249
  )
217
- self.upload_queue: List[Tuple[bytes, FileMetadata]] = []
218
- self.overwrite_existing = overwrite_existing
219
- self.upload_queue_size = 0
220
-
221
- self.bytes_queued = BYTES_UPLOADER_QUEUED
222
- self.queue_size = BYTES_UPLOADER_QUEUE_SIZE
223
- self.bytes_written = BYTES_UPLOADER_WRITTEN
224
250
 
225
- def add_to_upload_queue(self, content: bytes, metadata: FileMetadata) -> None:
251
+ def add_to_upload_queue(self, file_meta: FileMetadata, file_name: Union[str, PathLike]) -> None:
226
252
  """
227
- Add object to upload queue. The queue will be uploaded if the queue size is larger than the threshold
253
+ Add file to upload queue. The queue will be uploaded if the queue size is larger than the threshold
228
254
  specified in the __init__.
229
- Args:
230
- content: bytes object to upload
231
- metadata: metadata for the given bytes object
232
- """
233
- with self.lock:
234
- self.upload_queue.append((content, metadata))
235
- self.upload_queue_size += 1
236
- self.bytes_queued.inc()
237
- self.queue_size.set(self.upload_queue_size)
238
255
 
239
- def upload(self) -> None:
240
- """
241
- Trigger an upload of the queue, clears queue afterwards
256
+ Args:
257
+ file_meta: File metadata-object
258
+ file_name: Path to file to be uploaded.
259
+ If none, the file object will still be created, but no data is uploaded
242
260
  """
243
- if len(self.upload_queue) == 0:
244
- return
245
261
 
246
- with self.lock:
247
- # Upload frames in batches
248
- self._upload_batch()
262
+ def load_file_from_path() -> BinaryIO:
263
+ return open(file_name, "rb")
249
264
 
250
- # Log stats
251
- self.bytes_written.inc(self.upload_queue_size)
265
+ self.add_io_to_upload_queue(file_meta, load_file_from_path)
252
266
 
253
- try:
254
- self._post_upload(self.upload_queue)
255
- except Exception as e:
256
- self.logger.error("Error in upload callback: %s", str(e))
257
267
 
258
- # Clear queue
259
- self.upload_queue.clear()
260
- self.upload_queue_size = 0
261
- self.logger.info(f"Uploaded {self.upload_queue_size} files")
262
- self.queue_size.set(self.upload_queue_size)
268
+ class BytesUploadQueue(IOFileUploadQueue):
269
+ """
270
+ Upload queue for bytes
263
271
 
264
- def _upload_batch(self) -> None:
265
- # Concurrently execute bytes-uploads
266
- with ThreadPoolExecutor(self.cdf_client.config.max_workers) as pool:
267
- for i, (frame, metadata) in enumerate(self.upload_queue):
268
- pool.submit(self._upload_single, i, frame, metadata)
272
+ Args:
273
+ cdf_client: Cognite Data Fusion client to use
274
+ post_upload_function: A function that will be called after each upload. The function will be given one argument:
275
+ A list of the events that were uploaded.
276
+ max_queue_size: Maximum size of upload queue. Defaults to no max size.
277
+ max_upload_interval: Automatically trigger an upload each m seconds when run as a thread (use start/stop
278
+ methods).
279
+ trigger_log_level: Log level to log upload triggers to.
280
+ thread_name: Thread name of uploader thread.
281
+ overwrite_existing: If 'overwrite' is set to true, fields for the files found for externalIds can be overwritten
282
+ """
269
283
 
270
- @retry(
271
- exceptions=(CogniteAPIError, ConnectionError),
272
- tries=RETRIES,
273
- delay=RETRY_DELAY,
274
- max_delay=RETRY_MAX_DELAY,
275
- backoff=RETRY_BACKOFF_FACTOR,
276
- )
277
- def _upload_single(self, index: int, content: bytes, metadata: FileMetadata) -> None:
278
- # Upload object
279
- file_meta_data: FileMetadata = self.cdf_client.files.upload_bytes(
280
- content, overwrite=self.overwrite_existing, **metadata.dump()
284
+ def __init__(
285
+ self,
286
+ cdf_client: CogniteClient,
287
+ post_upload_function: Optional[Callable[[List[Any]], None]] = None,
288
+ max_queue_size: Optional[int] = None,
289
+ max_upload_interval: Optional[int] = None,
290
+ trigger_log_level: str = "DEBUG",
291
+ thread_name: Optional[str] = None,
292
+ overwrite_existing: bool = False,
293
+ cancellation_token: threading.Event = threading.Event(),
294
+ ) -> None:
295
+ super().__init__(
296
+ cdf_client,
297
+ post_upload_function,
298
+ max_queue_size,
299
+ max_upload_interval,
300
+ trigger_log_level,
301
+ thread_name,
302
+ overwrite_existing,
303
+ cancellation_token,
281
304
  )
282
305
 
283
- # Update meta-object in queue
284
- self.upload_queue[index] = (content, file_meta_data)
285
-
286
- def __enter__(self) -> "BytesUploadQueue":
287
- """
288
- Wraps around start method, for use as context manager
289
-
290
- Returns:
291
- self
292
- """
293
- self.start()
294
- return self
295
-
296
- def __exit__(
297
- self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
298
- ) -> None:
306
+ def add_to_upload_queue(self, content: bytes, metadata: FileMetadata) -> None:
299
307
  """
300
- Wraps around stop method, for use as context manager
301
-
308
+ Add object to upload queue. The queue will be uploaded if the queue size is larger than the threshold
309
+ specified in the __init__.
302
310
  Args:
303
- exc_type: Exception type
304
- exc_val: Exception value
305
- exc_tb: Traceback
311
+ content: bytes object to upload
312
+ metadata: metadata for the given bytes object
306
313
  """
307
- self.stop()
308
314
 
309
- def __len__(self) -> int:
310
- """
311
- The size of the upload queue
315
+ def get_byte_io() -> BinaryIO:
316
+ return BytesIO(content)
312
317
 
313
- Returns:
314
- Number of events in queue
315
- """
316
- return self.upload_queue_size
318
+ self.add_io_to_upload_queue(metadata, get_byte_io)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "cognite-extractor-utils"
3
- version = "6.1.0"
3
+ version = "6.2.0"
4
4
  description = "Utilities for easier development of extractors for CDF"
5
5
  authors = ["Mathias Lohne <mathias.lohne@cognite.com>"]
6
6
  license = "Apache-2.0"
@@ -67,7 +67,7 @@ jq = [{version = "^1.3.0", platform = "darwin"}, {version = "^1.3.0", platform =
67
67
  experimental = ["cognite-sdk-experimental"]
68
68
 
69
69
  [tool.poetry.group.dev.dependencies]
70
- mypy = "1.7.1"
70
+ mypy = "1.8.0"
71
71
  ruff = "^0.1.0"
72
72
  pytest = "^7.0.0"
73
73
  pytest-cov = "^4.0.0"