cognite-extractor-utils 7.5.6__py3-none-any.whl → 7.5.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-extractor-utils might be problematic. Click here for more details.

Files changed (36) hide show
  1. cognite/extractorutils/__init__.py +1 -1
  2. cognite/extractorutils/_inner_util.py +1 -1
  3. cognite/extractorutils/base.py +4 -3
  4. cognite/extractorutils/configtools/_util.py +2 -1
  5. cognite/extractorutils/configtools/elements.py +1 -1
  6. cognite/extractorutils/configtools/loaders.py +10 -9
  7. cognite/extractorutils/exceptions.py +1 -1
  8. cognite/extractorutils/metrics.py +7 -6
  9. cognite/extractorutils/statestore/hashing.py +6 -6
  10. cognite/extractorutils/statestore/watermark.py +13 -13
  11. cognite/extractorutils/threading.py +1 -1
  12. cognite/extractorutils/unstable/configuration/exceptions.py +2 -5
  13. cognite/extractorutils/unstable/configuration/loaders.py +8 -8
  14. cognite/extractorutils/unstable/configuration/models.py +12 -12
  15. cognite/extractorutils/unstable/core/base.py +6 -9
  16. cognite/extractorutils/unstable/core/errors.py +1 -1
  17. cognite/extractorutils/unstable/core/restart_policy.py +1 -1
  18. cognite/extractorutils/unstable/core/runtime.py +10 -55
  19. cognite/extractorutils/unstable/core/tasks.py +1 -1
  20. cognite/extractorutils/unstable/scheduling/_scheduler.py +1 -1
  21. cognite/extractorutils/uploader/_base.py +2 -1
  22. cognite/extractorutils/uploader/assets.py +3 -2
  23. cognite/extractorutils/uploader/data_modeling.py +3 -2
  24. cognite/extractorutils/uploader/events.py +2 -2
  25. cognite/extractorutils/uploader/files.py +13 -18
  26. cognite/extractorutils/uploader/raw.py +3 -2
  27. cognite/extractorutils/uploader/time_series.py +9 -8
  28. cognite/extractorutils/uploader/upload_failure_handler.py +2 -2
  29. cognite/extractorutils/uploader_extractor.py +7 -6
  30. cognite/extractorutils/uploader_types.py +2 -1
  31. cognite/extractorutils/util.py +9 -8
  32. {cognite_extractor_utils-7.5.6.dist-info → cognite_extractor_utils-7.5.8.dist-info}/METADATA +30 -36
  33. cognite_extractor_utils-7.5.8.dist-info/RECORD +49 -0
  34. {cognite_extractor_utils-7.5.6.dist-info → cognite_extractor_utils-7.5.8.dist-info}/WHEEL +1 -1
  35. cognite_extractor_utils-7.5.6.dist-info/RECORD +0 -49
  36. {cognite_extractor_utils-7.5.6.dist-info → cognite_extractor_utils-7.5.8.dist-info/licenses}/LICENSE +0 -0
@@ -16,7 +16,7 @@
16
16
  Cognite extractor utils is a Python package that simplifies the development of new extractors.
17
17
  """
18
18
 
19
- __version__ = "7.5.6"
19
+ __version__ = "7.5.8"
20
20
  from .base import Extractor
21
21
 
22
22
  __all__ = ["Extractor"]
@@ -40,7 +40,7 @@ class _DecimalEncoder(json.JSONEncoder):
40
40
  def default(self, obj: Any) -> dict[str, str]:
41
41
  if isinstance(obj, Decimal):
42
42
  return {"type": "decimal_encoded", "value": str(obj)}
43
- return super(_DecimalEncoder, self).default(obj)
43
+ return super().default(obj)
44
44
 
45
45
 
46
46
  class _DecimalDecoder(json.JSONDecoder):
@@ -15,11 +15,12 @@
15
15
  import logging
16
16
  import os
17
17
  import sys
18
+ from collections.abc import Callable
18
19
  from dataclasses import is_dataclass
19
20
  from enum import Enum
20
21
  from threading import Thread
21
22
  from types import TracebackType
22
- from typing import Any, Callable, Generic, Type, TypeVar
23
+ from typing import Any, Generic, TypeVar
23
24
 
24
25
  from dotenv import find_dotenv, load_dotenv
25
26
 
@@ -79,7 +80,7 @@ class Extractor(Generic[CustomConfigClass]):
79
80
  description: str,
80
81
  version: str | None = None,
81
82
  run_handle: RunHandle | None = None,
82
- config_class: Type[CustomConfigClass],
83
+ config_class: type[CustomConfigClass],
83
84
  metrics: BaseMetrics | None = None,
84
85
  use_default_state_store: bool = True,
85
86
  cancellation_token: CancellationToken | None = None,
@@ -322,7 +323,7 @@ class Extractor(Generic[CustomConfigClass]):
322
323
  return self
323
324
 
324
325
  def __exit__(
325
- self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
326
+ self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
326
327
  ) -> bool:
327
328
  """
328
329
  Shuts down the extractor. Makes sure states are preserved, that all uploads of data and metrics are done, etc.
@@ -13,8 +13,9 @@
13
13
  # limitations under the License.
14
14
  import base64
15
15
  import re
16
+ from collections.abc import Callable
16
17
  from pathlib import Path
17
- from typing import Any, Callable
18
+ from typing import Any
18
19
 
19
20
  from cryptography.hazmat.primitives import hashes
20
21
  from cryptography.hazmat.primitives import serialization as serialization
@@ -770,7 +770,7 @@ class CastableInt(int):
770
770
  floats and other types supported by standard int.
771
771
  """
772
772
 
773
- if not isinstance(value, (int, str, bytes)):
773
+ if not isinstance(value, int | str | bytes):
774
774
  raise ValueError(f"CastableInt cannot be created form value {value!r} of type {type(value)!r}.")
775
775
 
776
776
  return super().__new__(cls, value)
@@ -19,10 +19,11 @@ import logging
19
19
  import os
20
20
  import re
21
21
  import sys
22
+ from collections.abc import Callable, Iterable
22
23
  from enum import Enum
23
24
  from hashlib import sha256
24
25
  from pathlib import Path
25
- from typing import Any, Callable, Generic, Iterable, TextIO, Type, TypeVar, cast
26
+ from typing import Any, Generic, TextIO, TypeVar, cast
26
27
 
27
28
  import dacite
28
29
  import yaml
@@ -211,7 +212,7 @@ def _load_yaml_dict(
211
212
 
212
213
  def _load_yaml(
213
214
  source: TextIO | str,
214
- config_type: Type[CustomConfigClass],
215
+ config_type: type[CustomConfigClass],
215
216
  case_style: str = "hyphen",
216
217
  expand_envvars: bool = True,
217
218
  dict_manipulator: Callable[[dict[str, Any]], dict[str, Any]] = lambda x: x,
@@ -243,13 +244,13 @@ def _load_yaml(
243
244
  else:
244
245
  path = None
245
246
 
246
- def name(type_: Type) -> str:
247
+ def name(type_: type) -> str:
247
248
  return type_.__name__ if hasattr(type_, "__name__") else str(type_)
248
249
 
249
- def all_types(type_: Type) -> Iterable[Type]:
250
+ def all_types(type_: type) -> Iterable[type]:
250
251
  return type_.__args__ if hasattr(type_, "__args__") else [type_]
251
252
 
252
- if isinstance(e, (dacite.WrongTypeError, dacite.UnionMatchError)) and e.value is not None:
253
+ if isinstance(e, dacite.WrongTypeError | dacite.UnionMatchError) and e.value is not None:
253
254
  got_type = name(type(e.value))
254
255
  need_type = ", ".join(name(t) for t in all_types(e.field_type))
255
256
 
@@ -268,7 +269,7 @@ def _load_yaml(
268
269
 
269
270
  def load_yaml(
270
271
  source: TextIO | str,
271
- config_type: Type[CustomConfigClass],
272
+ config_type: type[CustomConfigClass],
272
273
  case_style: str = "hyphen",
273
274
  expand_envvars: bool = True,
274
275
  keyvault_loader: KeyVaultLoader | None = None,
@@ -346,7 +347,7 @@ def compile_patterns(ignore_patterns: list[str | IgnorePattern]) -> list[re.Patt
346
347
 
347
348
 
348
349
  class ConfigResolver(Generic[CustomConfigClass]):
349
- def __init__(self, config_path: str, config_type: Type[CustomConfigClass]):
350
+ def __init__(self, config_path: str, config_type: type[CustomConfigClass]):
350
351
  self.config_path = config_path
351
352
  self.config_type = config_type
352
353
 
@@ -356,7 +357,7 @@ class ConfigResolver(Generic[CustomConfigClass]):
356
357
  self._cognite_client: CogniteClient | None = None
357
358
 
358
359
  def _reload_file(self) -> None:
359
- with open(self.config_path, "r") as stream:
360
+ with open(self.config_path) as stream:
360
361
  self._config_text = stream.read()
361
362
 
362
363
  @property
@@ -401,7 +402,7 @@ class ConfigResolver(Generic[CustomConfigClass]):
401
402
 
402
403
  @classmethod
403
404
  def from_cli(
404
- cls, name: str, description: str, version: str, config_type: Type[CustomConfigClass]
405
+ cls, name: str, description: str, version: str, config_type: type[CustomConfigClass]
405
406
  ) -> "ConfigResolver":
406
407
  argument_parser = argparse.ArgumentParser(sys.argv[0], description=description)
407
408
  argument_parser.add_argument(
@@ -23,7 +23,7 @@ class InvalidConfigError(Exception):
23
23
  """
24
24
 
25
25
  def __init__(self, message: str, details: list[str] | None = None):
26
- super(InvalidConfigError, self).__init__()
26
+ super().__init__()
27
27
  self.message = message
28
28
  self.details = details
29
29
 
@@ -41,9 +41,10 @@ import logging
41
41
  import os
42
42
  import threading
43
43
  from abc import ABC, abstractmethod
44
+ from collections.abc import Callable
44
45
  from time import sleep
45
46
  from types import TracebackType
46
- from typing import Any, Callable, Type, TypeVar
47
+ from typing import Any, TypeVar
47
48
 
48
49
  import arrow
49
50
  import psutil
@@ -65,7 +66,7 @@ _metrics_singularities = {}
65
66
  T = TypeVar("T")
66
67
 
67
68
 
68
- def safe_get(cls: Type[T], *args: Any, **kwargs: Any) -> T:
69
+ def safe_get(cls: type[T], *args: Any, **kwargs: Any) -> T:
69
70
  """
70
71
  A factory for instances of metrics collections.
71
72
 
@@ -232,7 +233,7 @@ class AbstractMetricsPusher(ABC):
232
233
  return self
233
234
 
234
235
  def __exit__(
235
- self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
236
+ self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
236
237
  ) -> None:
237
238
  """
238
239
  Wraps around stop method, for use as context manager
@@ -269,7 +270,7 @@ class PrometheusPusher(AbstractMetricsPusher):
269
270
  thread_name: str | None = None,
270
271
  cancellation_token: CancellationToken | None = None,
271
272
  ):
272
- super(PrometheusPusher, self).__init__(push_interval, thread_name, cancellation_token)
273
+ super().__init__(push_interval, thread_name, cancellation_token)
273
274
 
274
275
  self.username = username
275
276
  self.job_name = job_name
@@ -345,7 +346,7 @@ class CognitePusher(AbstractMetricsPusher):
345
346
  thread_name: str | None = None,
346
347
  cancellation_token: CancellationToken | None = None,
347
348
  ):
348
- super(CognitePusher, self).__init__(push_interval, thread_name, cancellation_token)
349
+ super().__init__(push_interval, thread_name, cancellation_token)
349
350
 
350
351
  self.cdf_client = cdf_client
351
352
  self.asset = asset
@@ -409,7 +410,7 @@ class CognitePusher(AbstractMetricsPusher):
409
410
  datapoints: list[dict[str, str | int | list[Any] | Datapoints | DatapointsArray]] = []
410
411
 
411
412
  for metric in REGISTRY.collect():
412
- if type(metric) == Metric and metric.type in ["gauge", "counter"]:
413
+ if isinstance(metric, Metric) and metric.type in ["gauge", "counter"]:
413
414
  if len(metric.samples) == 0:
414
415
  continue
415
416
 
@@ -1,8 +1,9 @@
1
1
  import hashlib
2
2
  import json
3
3
  from abc import ABC
4
+ from collections.abc import Iterable, Iterator
4
5
  from types import TracebackType
5
- from typing import Any, Iterable, Iterator, Type
6
+ from typing import Any
6
7
 
7
8
  import orjson
8
9
 
@@ -66,8 +67,7 @@ class AbstractHashStateStore(_BaseStateStore, ABC):
66
67
 
67
68
  def __iter__(self) -> Iterator[str]:
68
69
  with self.lock:
69
- for key in self._local_state:
70
- yield key
70
+ yield from self._local_state
71
71
 
72
72
 
73
73
  class RawHashStateStore(AbstractHashStateStore):
@@ -169,7 +169,7 @@ class RawHashStateStore(AbstractHashStateStore):
169
169
 
170
170
  def __exit__(
171
171
  self,
172
- exc_type: Type[BaseException] | None,
172
+ exc_type: type[BaseException] | None,
173
173
  exc_val: BaseException | None,
174
174
  exc_tb: TracebackType | None,
175
175
  ) -> None:
@@ -214,7 +214,7 @@ class LocalHashStateStore(AbstractHashStateStore):
214
214
 
215
215
  with self.lock:
216
216
  try:
217
- with open(self._file_path, "r") as f:
217
+ with open(self._file_path) as f:
218
218
  self._local_state = json.load(f, cls=_DecimalDecoder)
219
219
  except FileNotFoundError:
220
220
  pass
@@ -243,7 +243,7 @@ class LocalHashStateStore(AbstractHashStateStore):
243
243
 
244
244
  def __exit__(
245
245
  self,
246
- exc_type: Type[BaseException] | None,
246
+ exc_type: type[BaseException] | None,
247
247
  exc_val: BaseException | None,
248
248
  exc_tb: TracebackType | None,
249
249
  ) -> None:
@@ -87,8 +87,9 @@ You can set a state store to automatically update on upload triggers from an upl
87
87
 
88
88
  import json
89
89
  from abc import ABC
90
+ from collections.abc import Callable, Iterator
90
91
  from types import TracebackType
91
- from typing import Any, Callable, Dict, Iterator, List, Tuple, Type, Union
92
+ from typing import Any
92
93
 
93
94
  from cognite.client import CogniteClient
94
95
  from cognite.client.exceptions import CogniteAPIError
@@ -126,10 +127,10 @@ class AbstractStateStore(_BaseStateStore, ABC):
126
127
  cancellation_token=cancellation_token,
127
128
  )
128
129
 
129
- self._local_state: Dict[str, Dict[str, Any]] = {}
130
- self._deleted: List[str] = []
130
+ self._local_state: dict[str, dict[str, Any]] = {}
131
+ self._deleted: list[str] = []
131
132
 
132
- def get_state(self, external_id: Union[str, List[str]]) -> Union[Tuple[Any, Any], List[Tuple[Any, Any]]]:
133
+ def get_state(self, external_id: str | list[str]) -> tuple[Any, Any] | list[tuple[Any, Any]]:
133
134
  """
134
135
  Get state(s) for external ID(s)
135
136
 
@@ -192,7 +193,7 @@ class AbstractStateStore(_BaseStateStore, ABC):
192
193
  self._local_state.pop(external_id, None)
193
194
  self._deleted.append(external_id)
194
195
 
195
- def post_upload_handler(self) -> Callable[[List[Dict[str, Union[str, DataPointList]]]], None]:
196
+ def post_upload_handler(self) -> Callable[[list[dict[str, str | DataPointList]]], None]:
196
197
  """
197
198
  Get a callable suitable for passing to a time series upload queue as post_upload_function, that will
198
199
  automatically update the states in this state store when that upload queue is uploading.
@@ -201,7 +202,7 @@ class AbstractStateStore(_BaseStateStore, ABC):
201
202
  A function that expands the current states with the values given
202
203
  """
203
204
 
204
- def callback(uploaded_points: List[Dict[str, Union[str, DataPointList]]]) -> None:
205
+ def callback(uploaded_points: list[dict[str, str | DataPointList]]) -> None:
205
206
  for time_series in uploaded_points:
206
207
  # Use CDF timestamps
207
208
  data_points = time_series["datapoints"]
@@ -238,10 +239,10 @@ class AbstractStateStore(_BaseStateStore, ABC):
238
239
 
239
240
  return False
240
241
 
241
- def __getitem__(self, external_id: str) -> Tuple[Any, Any]:
242
+ def __getitem__(self, external_id: str) -> tuple[Any, Any]:
242
243
  return self.get_state(external_id) # type: ignore # will not be list if input is single str
243
244
 
244
- def __setitem__(self, key: str, value: Tuple[Any, Any]) -> None:
245
+ def __setitem__(self, key: str, value: tuple[Any, Any]) -> None:
245
246
  self.set_state(external_id=key, low=value[0], high=value[1])
246
247
 
247
248
  def __contains__(self, external_id: str) -> bool:
@@ -251,8 +252,7 @@ class AbstractStateStore(_BaseStateStore, ABC):
251
252
  return len(self._local_state)
252
253
 
253
254
  def __iter__(self) -> Iterator[str]:
254
- for key in self._local_state:
255
- yield key
255
+ yield from self._local_state
256
256
 
257
257
 
258
258
  class RawStateStore(AbstractStateStore):
@@ -380,7 +380,7 @@ class RawStateStore(AbstractStateStore):
380
380
  return self
381
381
 
382
382
  def __exit__(
383
- self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
383
+ self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
384
384
  ) -> None:
385
385
  """
386
386
  Wraps around stop method, for use as context manager
@@ -430,7 +430,7 @@ class LocalStateStore(AbstractStateStore):
430
430
 
431
431
  with self.lock:
432
432
  try:
433
- with open(self._file_path, "r") as f:
433
+ with open(self._file_path) as f:
434
434
  self._local_state = json.load(f, cls=_DecimalDecoder)
435
435
  except FileNotFoundError:
436
436
  pass
@@ -460,7 +460,7 @@ class LocalStateStore(AbstractStateStore):
460
460
 
461
461
  def __exit__(
462
462
  self,
463
- exc_type: Type[BaseException] | None,
463
+ exc_type: type[BaseException] | None,
464
464
  exc_val: BaseException | None,
465
465
  exc_tb: TracebackType | None,
466
466
  ) -> None:
@@ -17,7 +17,7 @@ class CancellationToken:
17
17
  def __init__(self, condition: Condition | None = None) -> None:
18
18
  self._cv: Condition = condition or Condition()
19
19
  self._is_cancelled_int: bool = False
20
- self._parent: "CancellationToken" | None = None
20
+ self._parent: CancellationToken | None = None
21
21
 
22
22
  def __repr__(self) -> str:
23
23
  cls = self.__class__
@@ -1,6 +1,3 @@
1
- from typing import List, Optional
2
-
3
-
4
1
  class InvalidConfigError(Exception):
5
2
  """
6
3
  Exception thrown from ``load_yaml`` and ``load_yaml_dict`` if config file is invalid. This can be due to
@@ -10,8 +7,8 @@ class InvalidConfigError(Exception):
10
7
  * Unkown fields
11
8
  """
12
9
 
13
- def __init__(self, message: str, details: Optional[List[str]] = None):
14
- super(InvalidConfigError, self).__init__()
10
+ def __init__(self, message: str, details: list[str] | None = None):
11
+ super().__init__()
15
12
  self.message = message
16
13
  self.details = details
17
14
 
@@ -2,7 +2,7 @@ import json
2
2
  from enum import Enum
3
3
  from io import StringIO
4
4
  from pathlib import Path
5
- from typing import Dict, Optional, TextIO, Tuple, Type, TypeVar, Union
5
+ from typing import TextIO, TypeVar
6
6
 
7
7
  from pydantic import ValidationError
8
8
 
@@ -23,7 +23,7 @@ class ConfigFormat(Enum):
23
23
  YAML = "yaml"
24
24
 
25
25
 
26
- def load_file(path: Path, schema: Type[_T]) -> _T:
26
+ def load_file(path: Path, schema: type[_T]) -> _T:
27
27
  if path.suffix in [".yaml", ".yml"]:
28
28
  format = ConfigFormat.YAML
29
29
  elif path.suffix == ".json":
@@ -31,14 +31,14 @@ def load_file(path: Path, schema: Type[_T]) -> _T:
31
31
  else:
32
32
  raise InvalidConfigError(f"Unknown file type {path.suffix}")
33
33
 
34
- with open(path, "r") as stream:
34
+ with open(path) as stream:
35
35
  return load_io(stream, format, schema)
36
36
 
37
37
 
38
38
  def load_from_cdf(
39
- cognite_client: CogniteClient, external_id: str, schema: Type[_T], revision: Optional[int] = None
40
- ) -> Tuple[_T, int]:
41
- params: Dict[str, Union[str, int]] = {"externalId": external_id}
39
+ cognite_client: CogniteClient, external_id: str, schema: type[_T], revision: int | None = None
40
+ ) -> tuple[_T, int]:
41
+ params: dict[str, str | int] = {"integration": external_id}
42
42
  if revision:
43
43
  params["revision"] = revision
44
44
  response = cognite_client.get(
@@ -61,7 +61,7 @@ def load_from_cdf(
61
61
  raise new_e from e
62
62
 
63
63
 
64
- def load_io(stream: TextIO, format: ConfigFormat, schema: Type[_T]) -> _T:
64
+ def load_io(stream: TextIO, format: ConfigFormat, schema: type[_T]) -> _T:
65
65
  if format == ConfigFormat.JSON:
66
66
  data = json.load(stream)
67
67
 
@@ -97,7 +97,7 @@ def _make_loc_str(loc: tuple) -> str:
97
97
  return loc_str
98
98
 
99
99
 
100
- def load_dict(data: dict, schema: Type[_T]) -> _T:
100
+ def load_dict(data: dict, schema: type[_T]) -> _T:
101
101
  try:
102
102
  return schema.model_validate(data)
103
103
 
@@ -2,7 +2,7 @@ import re
2
2
  from datetime import timedelta
3
3
  from enum import Enum
4
4
  from pathlib import Path
5
- from typing import Annotated, Any, Dict, List, Literal, Optional, Union
5
+ from typing import Annotated, Any, Literal
6
6
 
7
7
  from humps import kebabize
8
8
  from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler
@@ -49,21 +49,21 @@ class _ClientCredentialsConfig(ConfigModel):
49
49
  client_id: str
50
50
  client_secret: str
51
51
  token_url: str
52
- scopes: List[str]
53
- resource: Optional[str] = None
54
- audience: Optional[str] = None
52
+ scopes: list[str]
53
+ resource: str | None = None
54
+ audience: str | None = None
55
55
 
56
56
 
57
57
  class _ClientCertificateConfig(ConfigModel):
58
58
  type: Literal["client-certificate"]
59
59
  client_id: str
60
60
  path: Path
61
- password: Optional[str] = None
61
+ password: str | None = None
62
62
  authority_url: str
63
- scopes: List[str]
63
+ scopes: list[str]
64
64
 
65
65
 
66
- AuthenticationConfig = Annotated[Union[_ClientCredentialsConfig, _ClientCertificateConfig], Field(discriminator="type")]
66
+ AuthenticationConfig = Annotated[_ClientCredentialsConfig | _ClientCertificateConfig, Field(discriminator="type")]
67
67
 
68
68
 
69
69
  class TimeIntervalConfig:
@@ -76,7 +76,7 @@ class TimeIntervalConfig:
76
76
 
77
77
  @classmethod
78
78
  def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
79
- return core_schema.no_info_after_validator_function(cls, handler(Union[str, int]))
79
+ return core_schema.no_info_after_validator_function(cls, handler(str | int))
80
80
 
81
81
  def __eq__(self, other: object) -> bool:
82
82
  if not isinstance(other, TimeIntervalConfig):
@@ -140,13 +140,13 @@ class TimeIntervalConfig:
140
140
 
141
141
  class _ConnectionParameters(ConfigModel):
142
142
  gzip_compression: bool = False
143
- status_forcelist: List[int] = Field(default_factory=lambda: [429, 502, 503, 504])
143
+ status_forcelist: list[int] = Field(default_factory=lambda: [429, 502, 503, 504])
144
144
  max_retries: int = 10
145
145
  max_retries_connect: int = 3
146
146
  max_retry_backoff: TimeIntervalConfig = Field(default_factory=lambda: TimeIntervalConfig("30s"))
147
147
  max_connection_pool_size: int = 50
148
148
  ssl_verify: bool = True
149
- proxies: Dict[str, str] = Field(default_factory=dict)
149
+ proxies: dict[str, str] = Field(default_factory=dict)
150
150
  timeout: TimeIntervalConfig = Field(default_factory=lambda: TimeIntervalConfig("30s"))
151
151
 
152
152
 
@@ -253,9 +253,9 @@ LogHandlerConfig = Annotated[LogFileHandlerConfig | LogConsoleHandlerConfig, Fie
253
253
 
254
254
 
255
255
  # Mypy BS
256
- def _log_handler_default() -> List[LogHandlerConfig]:
256
+ def _log_handler_default() -> list[LogHandlerConfig]:
257
257
  return [LogConsoleHandlerConfig(type="console", level=LogLevel.INFO)]
258
258
 
259
259
 
260
260
  class ExtractorConfig(ConfigModel):
261
- log_handlers: List[LogHandlerConfig] = Field(default_factory=_log_handler_default)
261
+ log_handlers: list[LogHandlerConfig] = Field(default_factory=_log_handler_default)
@@ -8,7 +8,7 @@ from multiprocessing import Queue
8
8
  from threading import RLock, Thread
9
9
  from traceback import format_exception
10
10
  from types import TracebackType
11
- from typing import Generic, Literal, Type, TypeVar
11
+ from typing import Generic, Literal, TypeVar
12
12
 
13
13
  from humps import pascalize
14
14
  from typing_extensions import Self, assert_never
@@ -45,12 +45,10 @@ class FullConfig(Generic[_T]):
45
45
  connection_config: ConnectionConfig,
46
46
  application_config: _T,
47
47
  current_config_revision: ConfigRevision,
48
- newest_config_revision: ConfigRevision,
49
48
  ) -> None:
50
49
  self.connection_config = connection_config
51
50
  self.application_config = application_config
52
51
  self.current_config_revision = current_config_revision
53
- self.newest_config_revision = newest_config_revision
54
52
 
55
53
 
56
54
  class Extractor(Generic[ConfigType]):
@@ -59,7 +57,7 @@ class Extractor(Generic[ConfigType]):
59
57
  DESCRIPTION: str
60
58
  VERSION: str
61
59
 
62
- CONFIG_TYPE: Type[ConfigType]
60
+ CONFIG_TYPE: type[ConfigType]
63
61
 
64
62
  RESTART_POLICY: RestartPolicy = WHEN_CONTINUOUS_TASKS_CRASHES
65
63
 
@@ -70,7 +68,6 @@ class Extractor(Generic[ConfigType]):
70
68
  self.connection_config = config.connection_config
71
69
  self.application_config = config.application_config
72
70
  self.current_config_revision = config.current_config_revision
73
- self.newest_config_revision = config.newest_config_revision
74
71
 
75
72
  self.cognite_client = self.connection_config.get_cognite_client(f"{self.EXTERNAL_ID}-{self.VERSION}")
76
73
 
@@ -158,7 +155,7 @@ class Extractor(Generic[ConfigType]):
158
155
  self._errors.clear()
159
156
 
160
157
  res = self.cognite_client.post(
161
- f"/api/v1/projects/{self.cognite_client.config.project}/odin/checkin",
158
+ f"/api/v1/projects/{self.cognite_client.config.project}/integrations/checkin",
162
159
  json={
163
160
  "externalId": self.connection_config.integration,
164
161
  "taskEvents": task_updates,
@@ -171,7 +168,7 @@ class Extractor(Generic[ConfigType]):
171
168
  if (
172
169
  new_config_revision
173
170
  and self.current_config_revision != "local"
174
- and new_config_revision > self.newest_config_revision
171
+ and new_config_revision > self.current_config_revision
175
172
  ):
176
173
  self.restart()
177
174
 
@@ -272,7 +269,7 @@ class Extractor(Generic[ConfigType]):
272
269
 
273
270
  def _report_extractor_info(self) -> None:
274
271
  self.cognite_client.post(
275
- f"/api/v1/projects/{self.cognite_client.config.project}/odin/extractorinfo",
272
+ f"/api/v1/projects/{self.cognite_client.config.project}/integrations/extractorinfo",
276
273
  json={
277
274
  "externalId": self.connection_config.integration,
278
275
  "activeConfigRevision": self.current_config_revision,
@@ -305,7 +302,7 @@ class Extractor(Generic[ConfigType]):
305
302
 
306
303
  def __exit__(
307
304
  self,
308
- exc_type: Type[BaseException] | None,
305
+ exc_type: type[BaseException] | None,
309
306
  exc_val: BaseException | None,
310
307
  exc_tb: TracebackType | None,
311
308
  ) -> bool:
@@ -64,7 +64,7 @@ class Error:
64
64
 
65
65
  def __exit__(
66
66
  self,
67
- exc_type: typing.Type[BaseException] | None,
67
+ exc_type: type[BaseException] | None,
68
68
  exc_val: BaseException | None,
69
69
  exc_tb: TracebackType | None,
70
70
  ) -> bool:
@@ -1,4 +1,4 @@
1
- from typing import Callable
1
+ from collections.abc import Callable
2
2
 
3
3
  from cognite.extractorutils.unstable.core.tasks import ContinuousTask, Task
4
4