cognite-extractor-utils 7.5.4__py3-none-any.whl → 7.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-extractor-utils might be problematic. Click here for more details.

Files changed (42) hide show
  1. cognite/extractorutils/__init__.py +3 -1
  2. cognite/extractorutils/_inner_util.py +14 -3
  3. cognite/extractorutils/base.py +14 -15
  4. cognite/extractorutils/configtools/__init__.py +25 -0
  5. cognite/extractorutils/configtools/_util.py +7 -9
  6. cognite/extractorutils/configtools/elements.py +58 -49
  7. cognite/extractorutils/configtools/loaders.py +29 -26
  8. cognite/extractorutils/configtools/validators.py +2 -3
  9. cognite/extractorutils/exceptions.py +1 -4
  10. cognite/extractorutils/metrics.py +18 -18
  11. cognite/extractorutils/statestore/_base.py +3 -4
  12. cognite/extractorutils/statestore/hashing.py +24 -24
  13. cognite/extractorutils/statestore/watermark.py +17 -14
  14. cognite/extractorutils/threading.py +4 -4
  15. cognite/extractorutils/unstable/configuration/exceptions.py +24 -0
  16. cognite/extractorutils/unstable/configuration/loaders.py +18 -7
  17. cognite/extractorutils/unstable/configuration/models.py +25 -3
  18. cognite/extractorutils/unstable/core/_dto.py +10 -0
  19. cognite/extractorutils/unstable/core/base.py +179 -29
  20. cognite/extractorutils/unstable/core/errors.py +72 -0
  21. cognite/extractorutils/unstable/core/restart_policy.py +29 -0
  22. cognite/extractorutils/unstable/core/runtime.py +170 -26
  23. cognite/extractorutils/unstable/core/tasks.py +2 -0
  24. cognite/extractorutils/unstable/scheduling/_scheduler.py +4 -4
  25. cognite/extractorutils/uploader/__init__.py +14 -0
  26. cognite/extractorutils/uploader/_base.py +8 -8
  27. cognite/extractorutils/uploader/assets.py +15 -9
  28. cognite/extractorutils/uploader/data_modeling.py +13 -13
  29. cognite/extractorutils/uploader/events.py +9 -9
  30. cognite/extractorutils/uploader/files.py +127 -31
  31. cognite/extractorutils/uploader/raw.py +10 -10
  32. cognite/extractorutils/uploader/time_series.py +56 -58
  33. cognite/extractorutils/uploader/upload_failure_handler.py +64 -0
  34. cognite/extractorutils/uploader_extractor.py +11 -11
  35. cognite/extractorutils/uploader_types.py +4 -12
  36. cognite/extractorutils/util.py +21 -23
  37. {cognite_extractor_utils-7.5.4.dist-info → cognite_extractor_utils-7.5.5.dist-info}/METADATA +3 -2
  38. cognite_extractor_utils-7.5.5.dist-info/RECORD +49 -0
  39. {cognite_extractor_utils-7.5.4.dist-info → cognite_extractor_utils-7.5.5.dist-info}/WHEEL +1 -1
  40. cognite/extractorutils/unstable/core/__main__.py +0 -31
  41. cognite_extractor_utils-7.5.4.dist-info/RECORD +0 -46
  42. {cognite_extractor_utils-7.5.4.dist-info → cognite_extractor_utils-7.5.5.dist-info}/LICENSE +0 -0
@@ -6,15 +6,23 @@ from argparse import ArgumentParser, Namespace
6
6
  from multiprocessing import Process, Queue
7
7
  from pathlib import Path
8
8
  from typing import Any, Generic, Type, TypeVar
9
+ from uuid import uuid4
9
10
 
11
+ from requests.exceptions import ConnectionError
10
12
  from typing_extensions import assert_never
11
13
 
14
+ from cognite.client.exceptions import CogniteAPIError, CogniteAuthError, CogniteConnectionError
12
15
  from cognite.extractorutils.threading import CancellationToken
16
+ from cognite.extractorutils.unstable.configuration.exceptions import InvalidConfigError
13
17
  from cognite.extractorutils.unstable.configuration.loaders import load_file, load_from_cdf
14
18
  from cognite.extractorutils.unstable.configuration.models import ConnectionConfig
19
+ from cognite.extractorutils.unstable.core._dto import Error
20
+ from cognite.extractorutils.util import now
15
21
 
16
22
  from ._messaging import RuntimeMessage
17
- from .base import ConfigRevision, ConfigType, Extractor
23
+ from .base import ConfigRevision, ConfigType, Extractor, FullConfig
24
+
25
+ __all__ = ["Runtime", "ExtractorType"]
18
26
 
19
27
  ExtractorType = TypeVar("ExtractorType", bound=Extractor)
20
28
 
@@ -59,13 +67,18 @@ class Runtime(Generic[ExtractorType]):
59
67
  default=None,
60
68
  help="Include to use a local application configuration instead of fetching it from CDF",
61
69
  )
70
+ argparser.add_argument(
71
+ "--skip-init-checks",
72
+ action="store_true",
73
+ help="Skip any checks during startup. Useful for debugging, not recommended for production deployments.",
74
+ )
62
75
 
63
76
  return argparser
64
77
 
65
78
  def _setup_logging(self) -> None:
66
79
  # TODO: Figure out file logging for runtime
67
80
  fmt = logging.Formatter(
68
- "%(asctime)s.%(msecs)03d UTC [%(levelname)-8s] %(threadName)s - %(message)s",
81
+ "%(asctime)s.%(msecs)03d UTC [%(levelname)-8s] %(process)d %(threadName)s - %(message)s",
69
82
  "%Y-%m-%d %H:%M:%S",
70
83
  )
71
84
  # Set logging to UTC
@@ -82,16 +95,10 @@ class Runtime(Generic[ExtractorType]):
82
95
  def _inner_run(
83
96
  self,
84
97
  message_queue: Queue,
85
- connection_config: ConnectionConfig,
86
- application_config: ConfigType,
87
- current_config_revision: ConfigRevision,
98
+ config: FullConfig,
88
99
  ) -> None:
89
100
  # This code is run inside the new extractor process
90
- extractor = self._extractor_class.init_from_runtime(
91
- connection_config,
92
- application_config,
93
- current_config_revision,
94
- )
101
+ extractor = self._extractor_class._init_from_runtime(config)
95
102
  extractor._set_runtime_message_queue(message_queue)
96
103
 
97
104
  try:
@@ -104,56 +111,193 @@ class Runtime(Generic[ExtractorType]):
104
111
 
105
112
  def _spawn_extractor(
106
113
  self,
107
- connection_config: ConnectionConfig,
108
- application_config: ConfigType,
109
- current_config_revision: ConfigRevision,
114
+ config: FullConfig,
110
115
  ) -> Process:
111
116
  self._message_queue = Queue()
112
117
  process = Process(
113
118
  target=self._inner_run,
114
- args=(self._message_queue, connection_config, application_config, current_config_revision),
119
+ args=(self._message_queue, config),
115
120
  )
116
121
 
117
122
  process.start()
118
- self.logger.info(f"Started extractor as {process.pid}")
123
+ self.logger.info(f"Started extractor with PID {process.pid}")
119
124
  return process
120
125
 
121
126
  def _get_application_config(
122
127
  self,
123
128
  args: Namespace,
124
129
  connection_config: ConnectionConfig,
125
- ) -> tuple[ConfigType, ConfigRevision]:
130
+ ) -> tuple[ConfigType, ConfigRevision, ConfigRevision]:
126
131
  current_config_revision: ConfigRevision
132
+ newest_config_revision: ConfigRevision
133
+
127
134
  if args.local_override:
135
+ self.logger.info("Loading local application config")
136
+
128
137
  current_config_revision = "local"
129
- application_config = load_file(args.local_override[0], self._extractor_class.CONFIG_TYPE)
138
+ newest_config_revision = "local"
139
+ try:
140
+ application_config = load_file(args.local_override[0], self._extractor_class.CONFIG_TYPE)
141
+ except InvalidConfigError as e:
142
+ self.logger.critical(str(e))
143
+ raise e
144
+ except FileNotFoundError as e:
145
+ self.logger.critical(str(e))
146
+ raise InvalidConfigError(str(e)) from e
147
+
130
148
  else:
149
+ self.logger.info("Loading application config from CDF")
131
150
  client = connection_config.get_cognite_client(
132
151
  f"{self._extractor_class.EXTERNAL_ID}-{self._extractor_class.VERSION}"
133
152
  )
134
- application_config, current_config_revision = load_from_cdf(
135
- client,
136
- connection_config.extraction_pipeline,
137
- self._extractor_class.CONFIG_TYPE,
153
+
154
+ errors: list[Error] = []
155
+
156
+ revision: int | None = None
157
+ try:
158
+ while True:
159
+ try:
160
+ application_config, current_config_revision = load_from_cdf(
161
+ client,
162
+ connection_config.integration,
163
+ self._extractor_class.CONFIG_TYPE,
164
+ revision=revision,
165
+ )
166
+ break
167
+
168
+ except InvalidConfigError as e:
169
+ if e.attempted_revision is None:
170
+ # Should never happen, attempted_revision is set in every handler in load_from_cdf, but it's
171
+ # needed for type checks to pass
172
+ raise e
173
+
174
+ self.logger.error(f"Revision {e.attempted_revision} is invalid: {e.message}")
175
+
176
+ t = now()
177
+ errors.append(
178
+ Error(
179
+ external_id=str(uuid4()),
180
+ level="error",
181
+ description=f"Revision {e.attempted_revision} is invalid",
182
+ details=e.message,
183
+ start_time=t,
184
+ end_time=t,
185
+ task=None,
186
+ )
187
+ )
188
+
189
+ if revision is None:
190
+ revision = e.attempted_revision - 1
191
+ newest_config_revision = e.attempted_revision
192
+ else:
193
+ revision -= 1
194
+
195
+ if revision > 0:
196
+ self.logger.info(f"Falling back to revision {revision}")
197
+ else:
198
+ self.logger.critical("No more revisions to fall back to")
199
+ raise e
200
+
201
+ finally:
202
+ if errors:
203
+ client.post(
204
+ f"/api/v1/projects/{client.config.project}/odin/checkin",
205
+ json={
206
+ "externalId": connection_config.integration,
207
+ "errors": [e.model_dump() for e in errors],
208
+ },
209
+ headers={"cdf-version": "alpha"},
210
+ )
211
+
212
+ return application_config, current_config_revision, newest_config_revision
213
+
214
+ def _verify_connection_config(self, connection_config: ConnectionConfig) -> bool:
215
+ client = connection_config.get_cognite_client(
216
+ f"{self._extractor_class.EXTERNAL_ID}-{self._extractor_class.VERSION}"
217
+ )
218
+ try:
219
+ client.post(
220
+ f"/api/v1/projects/{client.config.project}/odin/checkin",
221
+ json={
222
+ "externalId": connection_config.integration,
223
+ },
224
+ headers={"cdf-version": "alpha"},
138
225
  )
139
226
 
140
- return application_config, current_config_revision
227
+ except CogniteConnectionError as e:
228
+ if e.__cause__ is not None:
229
+ self.logger.error(str(e.__cause__))
230
+ self.logger.critical("Could not connect to CDF. Please check your configuration.")
231
+ return False
232
+
233
+ except CogniteAuthError as e:
234
+ # Error while fetching auth token
235
+ self.logger.error(str(e))
236
+ self.logger.critical("Could not get an access token. Please check your configuration.")
237
+ return False
238
+
239
+ except CogniteAPIError as e:
240
+ # Error response from the CDF API
241
+ if e.code == 401:
242
+ self.logger.critical(
243
+ "Got a 401 error from CDF. Please check your configuration. "
244
+ "Make sure the credentials and project is correct."
245
+ )
246
+
247
+ elif e.message:
248
+ self.logger.critical(str(e.message))
249
+
250
+ else:
251
+ self.logger.critical(f"Error while connecting to CDF {str(e)}")
252
+
253
+ return False
254
+
255
+ except ConnectionError as e:
256
+ # This is sometime thrown, I've seen it when trying to get an auth token but it might happen elsewhere too
257
+ self.logger.error(str(e))
258
+ self.logger.critical("Could not initiate connection. Please check your configuration.")
259
+ return False
260
+
261
+ return True
141
262
 
142
263
  def run(self) -> None:
143
264
  argparser = self._create_argparser()
144
265
  args = argparser.parse_args()
145
266
 
146
- self.logger.info(f"Started runtime as {os.getpid()}")
267
+ self.logger.info(f"Started runtime with PID {os.getpid()}")
268
+
269
+ try:
270
+ connection_config = load_file(args.connection_config[0], ConnectionConfig)
271
+ except InvalidConfigError as e:
272
+ self.logger.error(str(e))
273
+ self.logger.critical("Could not load connection config")
274
+ sys.exit(1)
147
275
 
148
- connection_config = load_file(args.connection_config[0], ConnectionConfig)
276
+ if not args.skip_init_checks and not self._verify_connection_config(connection_config):
277
+ sys.exit(1)
149
278
 
150
279
  # This has to be Any. We don't know the type of the extractors' config at type checking since the sel doesn't
151
280
  # exist yet, and I have not found a way to represent it in a generic way that isn't just an Any in disguise.
152
281
  application_config: Any
153
282
  while not self._cancellation_token.is_cancelled:
154
- application_config, current_config_revision = self._get_application_config(args, connection_config)
283
+ try:
284
+ application_config, current_config_revision, newest_config_revision = self._get_application_config(
285
+ args, connection_config
286
+ )
287
+
288
+ except InvalidConfigError:
289
+ self.logger.critical("Could not get a valid application config file. Shutting down")
290
+ sys.exit(1)
291
+
155
292
  # Start extractor in separate process, and wait for it to end
156
- process = self._spawn_extractor(connection_config, application_config, current_config_revision)
293
+ process = self._spawn_extractor(
294
+ FullConfig(
295
+ connection_config=connection_config,
296
+ application_config=application_config,
297
+ current_config_revision=current_config_revision,
298
+ newest_config_revision=newest_config_revision,
299
+ )
300
+ )
157
301
  process.join()
158
302
 
159
303
  # Check if we are asked to restart the extractor, shut down otherwise
@@ -4,6 +4,8 @@ from typing import Callable
4
4
 
5
5
  from cognite.extractorutils.unstable.configuration.models import ScheduleConfig
6
6
 
7
+ __all__ = ["ScheduledTask", "ContinuousTask", "StartupTask", "Task"]
8
+
7
9
 
8
10
  @dataclass
9
11
  class _Task(ABC):
@@ -56,7 +56,7 @@ class TaskScheduler:
56
56
  def _run_job(self, job: Job) -> bool:
57
57
  with self._running_lock:
58
58
  if job in self._running:
59
- self._logger.warning(f"Job {job.name} already running")
59
+ self._logger.warning(f"Job '{job.name}' already running")
60
60
  return False
61
61
 
62
62
  def wrap() -> None:
@@ -65,13 +65,13 @@ class TaskScheduler:
65
65
  try:
66
66
  job.call()
67
67
 
68
- self._logger.info(f"Job {job.name} done. Next run at {arrow.get(job.schedule.next()).isoformat()}")
68
+ self._logger.info(f"Job '{job.name}' done. Next run at {arrow.get(job.schedule.next()).isoformat()}")
69
69
 
70
70
  finally:
71
71
  with self._running_lock:
72
72
  self._running.remove(job)
73
73
 
74
- Thread(target=wrap, name=f"Run{pascalize(job.name)}").start()
74
+ Thread(target=wrap, name=f"{pascalize(job.name)}").start()
75
75
  return True
76
76
 
77
77
  def trigger(self, name: str) -> bool:
@@ -97,7 +97,7 @@ class TaskScheduler:
97
97
  break
98
98
 
99
99
  for job in next_runs:
100
- self._logger.info(f"Starting job {job.name}")
100
+ self._logger.info(f"Starting job '{job.name}'")
101
101
  self._run_job(job)
102
102
 
103
103
  def stop(self) -> None:
@@ -75,3 +75,17 @@ from .time_series import (
75
75
  TimeSeriesUploadQueue,
76
76
  default_time_series_factory,
77
77
  )
78
+
79
+ __all__ = [
80
+ "AssetUploadQueue",
81
+ "EventUploadQueue",
82
+ "BytesUploadQueue",
83
+ "FileUploadQueue",
84
+ "IOFileUploadQueue",
85
+ "RawUploadQueue",
86
+ "DataPoint",
87
+ "DataPointList",
88
+ "SequenceUploadQueue",
89
+ "TimeSeriesUploadQueue",
90
+ "default_time_series_factory",
91
+ ]
@@ -16,7 +16,7 @@ import logging
16
16
  import threading
17
17
  from abc import ABC, abstractmethod
18
18
  from dataclasses import dataclass
19
- from typing import Any, Callable, List, Optional
19
+ from typing import Any, Callable
20
20
 
21
21
  from arrow import Arrow
22
22
 
@@ -43,12 +43,12 @@ class AbstractUploadQueue(ABC):
43
43
  def __init__(
44
44
  self,
45
45
  cdf_client: CogniteClient,
46
- post_upload_function: Optional[Callable[[List[Any]], None]] = None,
47
- max_queue_size: Optional[int] = None,
48
- max_upload_interval: Optional[int] = None,
46
+ post_upload_function: Callable[[list[Any]], None] | None = None,
47
+ max_queue_size: int | None = None,
48
+ max_upload_interval: int | None = None,
49
49
  trigger_log_level: str = "DEBUG",
50
- thread_name: Optional[str] = None,
51
- cancellation_token: Optional[CancellationToken] = None,
50
+ thread_name: str | None = None,
51
+ cancellation_token: CancellationToken | None = None,
52
52
  ):
53
53
  self.cdf_client = cdf_client
54
54
 
@@ -81,12 +81,12 @@ class AbstractUploadQueue(ABC):
81
81
 
82
82
  return None
83
83
 
84
- def _post_upload(self, uploaded: List[Any]) -> None:
84
+ def _post_upload(self, uploaded: list[Any]) -> None:
85
85
  """
86
86
  Perform post_upload_function to uploaded data, if applicable
87
87
 
88
88
  Args:
89
- uploaded: List of uploaded data
89
+ uploaded: list of uploaded data
90
90
  """
91
91
  if self.post_upload_function is not None:
92
92
  try:
@@ -12,7 +12,8 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from typing import Any, Callable, List, Optional, Type
15
+ from types import TracebackType
16
+ from typing import Any, Callable, Type
16
17
 
17
18
  from cognite.client import CogniteClient
18
19
  from cognite.client.data_classes.assets import Asset
@@ -52,12 +53,12 @@ class AssetUploadQueue(AbstractUploadQueue):
52
53
  def __init__(
53
54
  self,
54
55
  cdf_client: CogniteClient,
55
- post_upload_function: Optional[Callable[[List[Any]], None]] = None,
56
- max_queue_size: Optional[int] = None,
57
- max_upload_interval: Optional[int] = None,
56
+ post_upload_function: Callable[[list[Any]], None] | None = None,
57
+ max_queue_size: int | None = None,
58
+ max_upload_interval: int | None = None,
58
59
  trigger_log_level: str = "DEBUG",
59
- thread_name: Optional[str] = None,
60
- cancellation_token: Optional[CancellationToken] = None,
60
+ thread_name: str | None = None,
61
+ cancellation_token: CancellationToken | None = None,
61
62
  ):
62
63
  super().__init__(
63
64
  cdf_client,
@@ -68,7 +69,7 @@ class AssetUploadQueue(AbstractUploadQueue):
68
69
  thread_name,
69
70
  cancellation_token,
70
71
  )
71
- self.upload_queue: List[Asset] = []
72
+ self.upload_queue: list[Asset] = []
72
73
  self.assets_queued = ASSETS_UPLOADER_QUEUED
73
74
  self.assets_written = ASSETS_UPLOADER_WRITTEN
74
75
  self.queue_size = ASSETS_UPLOADER_QUEUE_SIZE
@@ -106,7 +107,7 @@ class AssetUploadQueue(AbstractUploadQueue):
106
107
  self.cdf_client.assets.create(self.upload_queue)
107
108
  except CogniteDuplicatedError as e:
108
109
  duplicated_ids = set([dup["externalId"] for dup in e.duplicated if "externalId" in dup])
109
- failed: List[Asset] = [e for e in e.failed]
110
+ failed: list[Asset] = [e for e in e.failed]
110
111
  to_create = []
111
112
  to_update = []
112
113
  for asset in failed:
@@ -144,7 +145,12 @@ class AssetUploadQueue(AbstractUploadQueue):
144
145
  self.start()
145
146
  return self
146
147
 
147
- def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException]) -> None:
148
+ def __exit__(
149
+ self,
150
+ exc_type: Type[BaseException] | None,
151
+ exc_val: BaseException | None,
152
+ exc_tb: TracebackType | None,
153
+ ) -> None:
148
154
  """
149
155
  Wraps around stop method, for use as context manager
150
156
 
@@ -1,5 +1,5 @@
1
1
  from types import TracebackType
2
- from typing import Any, Callable, List, Optional, Type
2
+ from typing import Any, Callable, Type
3
3
 
4
4
  from cognite.client import CogniteClient
5
5
  from cognite.client.data_classes.data_modeling import EdgeApply, NodeApply
@@ -18,12 +18,12 @@ class InstanceUploadQueue(AbstractUploadQueue):
18
18
  def __init__(
19
19
  self,
20
20
  cdf_client: CogniteClient,
21
- post_upload_function: Optional[Callable[[List[Any]], None]] = None,
22
- max_queue_size: Optional[int] = None,
23
- max_upload_interval: Optional[int] = None,
21
+ post_upload_function: Callable[[list[Any]], None] | None = None,
22
+ max_queue_size: int | None = None,
23
+ max_upload_interval: int | None = None,
24
24
  trigger_log_level: str = "DEBUG",
25
- thread_name: Optional[str] = None,
26
- cancellation_token: Optional[CancellationToken] = None,
25
+ thread_name: str | None = None,
26
+ cancellation_token: CancellationToken | None = None,
27
27
  auto_create_start_nodes: bool = True,
28
28
  auto_create_end_nodes: bool = True,
29
29
  auto_create_direct_relations: bool = True,
@@ -42,14 +42,14 @@ class InstanceUploadQueue(AbstractUploadQueue):
42
42
  self.auto_create_end_nodes = auto_create_end_nodes
43
43
  self.auto_create_direct_relations = auto_create_direct_relations
44
44
 
45
- self.node_queue: List[NodeApply] = []
46
- self.edge_queue: List[EdgeApply] = []
45
+ self.node_queue: list[NodeApply] = []
46
+ self.edge_queue: list[EdgeApply] = []
47
47
 
48
48
  def add_to_upload_queue(
49
49
  self,
50
50
  *,
51
- node_data: Optional[List[NodeApply]] = None,
52
- edge_data: Optional[List[EdgeApply]] = None,
51
+ node_data: list[NodeApply] | None = None,
52
+ edge_data: list[EdgeApply] | None = None,
53
53
  ) -> None:
54
54
  if node_data:
55
55
  with self.lock:
@@ -100,9 +100,9 @@ class InstanceUploadQueue(AbstractUploadQueue):
100
100
 
101
101
  def __exit__(
102
102
  self,
103
- exc_type: Optional[Type[BaseException]],
104
- exc_val: Optional[BaseException],
105
- exc_tb: Optional[TracebackType],
103
+ exc_type: Type[BaseException] | None,
104
+ exc_val: BaseException | None,
105
+ exc_tb: TracebackType | None,
106
106
  ) -> None:
107
107
  """
108
108
  Wraps around stop method, for use as context manager
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  from types import TracebackType
16
- from typing import Callable, List, Optional, Type
16
+ from typing import Callable, Type
17
17
 
18
18
  from cognite.client import CogniteClient
19
19
  from cognite.client.data_classes import Event
@@ -52,12 +52,12 @@ class EventUploadQueue(AbstractUploadQueue):
52
52
  def __init__(
53
53
  self,
54
54
  cdf_client: CogniteClient,
55
- post_upload_function: Optional[Callable[[List[Event]], None]] = None,
56
- max_queue_size: Optional[int] = None,
57
- max_upload_interval: Optional[int] = None,
55
+ post_upload_function: Callable[[list[Event]], None] | None = None,
56
+ max_queue_size: int | None = None,
57
+ max_upload_interval: int | None = None,
58
58
  trigger_log_level: str = "DEBUG",
59
- thread_name: Optional[str] = None,
60
- cancellation_token: Optional[CancellationToken] = None,
59
+ thread_name: str | None = None,
60
+ cancellation_token: CancellationToken | None = None,
61
61
  ):
62
62
  # Super sets post_upload and threshold
63
63
  super().__init__(
@@ -70,7 +70,7 @@ class EventUploadQueue(AbstractUploadQueue):
70
70
  cancellation_token,
71
71
  )
72
72
 
73
- self.upload_queue: List[Event] = []
73
+ self.upload_queue: list[Event] = []
74
74
 
75
75
  self.events_queued = EVENTS_UPLOADER_QUEUED
76
76
  self.events_written = EVENTS_UPLOADER_WRITTEN
@@ -110,7 +110,7 @@ class EventUploadQueue(AbstractUploadQueue):
110
110
  self.cdf_client.events.create([e for e in self.upload_queue])
111
111
  except CogniteDuplicatedError as e:
112
112
  duplicated_ids = set([dup["externalId"] for dup in e.duplicated if "externalId" in dup])
113
- failed: List[Event] = [e for e in e.failed]
113
+ failed: list[Event] = [e for e in e.failed]
114
114
  to_create = []
115
115
  to_update = []
116
116
  for evt in failed:
@@ -151,7 +151,7 @@ class EventUploadQueue(AbstractUploadQueue):
151
151
  return self
152
152
 
153
153
  def __exit__(
154
- self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
154
+ self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
155
155
  ) -> None:
156
156
  """
157
157
  Wraps around stop method, for use as context manager