airbyte-cdk 6.26.0.dev4107__py3-none-any.whl → 6.26.0.dev4109__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. airbyte_cdk/cli/source_declarative_manifest/_run.py +3 -3
  2. airbyte_cdk/connector_builder/connector_builder_handler.py +2 -2
  3. airbyte_cdk/sources/declarative/async_job/job_orchestrator.py +7 -7
  4. airbyte_cdk/sources/declarative/auth/jwt.py +17 -11
  5. airbyte_cdk/sources/declarative/auth/oauth.py +22 -13
  6. airbyte_cdk/sources/declarative/auth/token.py +3 -8
  7. airbyte_cdk/sources/declarative/auth/token_provider.py +4 -5
  8. airbyte_cdk/sources/declarative/checks/check_dynamic_stream.py +19 -9
  9. airbyte_cdk/sources/declarative/concurrent_declarative_source.py +71 -34
  10. airbyte_cdk/sources/declarative/declarative_component_schema.yaml +33 -4
  11. airbyte_cdk/sources/declarative/declarative_stream.py +3 -1
  12. airbyte_cdk/sources/declarative/incremental/concurrent_partition_cursor.py +93 -27
  13. airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py +7 -6
  14. airbyte_cdk/sources/declarative/manifest_declarative_source.py +5 -3
  15. airbyte_cdk/sources/declarative/models/declarative_component_schema.py +22 -5
  16. airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +138 -38
  17. airbyte_cdk/sources/declarative/partition_routers/async_job_partition_router.py +5 -5
  18. airbyte_cdk/sources/declarative/partition_routers/list_partition_router.py +4 -2
  19. airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py +49 -25
  20. airbyte_cdk/sources/declarative/requesters/error_handlers/http_response_filter.py +4 -4
  21. airbyte_cdk/sources/declarative/requesters/http_requester.py +5 -1
  22. airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py +6 -5
  23. airbyte_cdk/sources/declarative/requesters/request_option.py +83 -4
  24. airbyte_cdk/sources/declarative/requesters/request_options/datetime_based_request_options_provider.py +7 -6
  25. airbyte_cdk/sources/declarative/retrievers/async_retriever.py +6 -12
  26. airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +4 -1
  27. airbyte_cdk/sources/declarative/schema/__init__.py +2 -0
  28. airbyte_cdk/sources/declarative/schema/dynamic_schema_loader.py +44 -5
  29. airbyte_cdk/sources/file_based/config/abstract_file_based_spec.py +1 -17
  30. airbyte_cdk/sources/file_based/config/validate_config_transfer_modes.py +20 -9
  31. airbyte_cdk/sources/file_based/file_based_source.py +9 -6
  32. airbyte_cdk/sources/file_based/file_based_stream_reader.py +28 -2
  33. airbyte_cdk/sources/file_based/schema_helpers.py +0 -25
  34. airbyte_cdk/sources/file_based/stream/__init__.py +2 -2
  35. airbyte_cdk/sources/file_based/stream/default_file_based_stream.py +2 -3
  36. airbyte_cdk/sources/file_based/stream/identities_stream.py +8 -57
  37. airbyte_cdk/sources/specs/transfer_modes.py +26 -0
  38. airbyte_cdk/sources/streams/concurrent/state_converters/datetime_stream_state_converter.py +22 -13
  39. airbyte_cdk/sources/streams/core.py +6 -6
  40. airbyte_cdk/sources/streams/http/http.py +1 -2
  41. airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py +231 -62
  42. airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py +166 -83
  43. airbyte_cdk/sources/streams/permissions/identities.py +77 -0
  44. airbyte_cdk/sources/types.py +4 -2
  45. airbyte_cdk/sources/utils/transform.py +23 -2
  46. airbyte_cdk/utils/datetime_helpers.py +499 -0
  47. airbyte_cdk/utils/mapping_helpers.py +86 -27
  48. airbyte_cdk/utils/slice_hasher.py +8 -1
  49. airbyte_cdk-6.26.0.dev4109.dist-info/LICENSE_SHORT +1 -0
  50. {airbyte_cdk-6.26.0.dev4107.dist-info → airbyte_cdk-6.26.0.dev4109.dist-info}/METADATA +5 -5
  51. {airbyte_cdk-6.26.0.dev4107.dist-info → airbyte_cdk-6.26.0.dev4109.dist-info}/RECORD +54 -50
  52. {airbyte_cdk-6.26.0.dev4107.dist-info → airbyte_cdk-6.26.0.dev4109.dist-info}/WHEEL +1 -1
  53. {airbyte_cdk-6.26.0.dev4107.dist-info → airbyte_cdk-6.26.0.dev4109.dist-info}/LICENSE.txt +0 -0
  54. {airbyte_cdk-6.26.0.dev4107.dist-info → airbyte_cdk-6.26.0.dev4109.dist-info}/entry_points.txt +0 -0
@@ -58,7 +58,7 @@ from airbyte_cdk.sources.file_based.schema_validation_policies import (
58
58
  from airbyte_cdk.sources.file_based.stream import (
59
59
  AbstractFileBasedStream,
60
60
  DefaultFileBasedStream,
61
- IdentitiesStream,
61
+ FileIdentities,
62
62
  )
63
63
  from airbyte_cdk.sources.file_based.stream.concurrent.adapters import FileBasedStreamFacade
64
64
  from airbyte_cdk.sources.file_based.stream.concurrent.cursor import (
@@ -67,7 +67,6 @@ from airbyte_cdk.sources.file_based.stream.concurrent.cursor import (
67
67
  FileBasedFinalStateCursor,
68
68
  )
69
69
  from airbyte_cdk.sources.file_based.stream.cursor import AbstractFileBasedCursor
70
- from airbyte_cdk.sources.file_based.stream.identities_stream import IDENTITIES_STREAM_NAME
71
70
  from airbyte_cdk.sources.message.repository import InMemoryMessageRepository, MessageRepository
72
71
  from airbyte_cdk.sources.streams import Stream
73
72
  from airbyte_cdk.sources.streams.concurrent.cursor import CursorField
@@ -169,8 +168,12 @@ class FileBasedSource(ConcurrentSourceAdapter, ABC):
169
168
  errors = []
170
169
  tracebacks = []
171
170
  for stream in streams:
172
- if isinstance(stream, IdentitiesStream):
173
- # Probably need to check identities endpoint/api access but will skip for now.
171
+ if isinstance(stream, FileIdentities):
172
+ identity = next(iter(stream.load_identity_groups()))
173
+ if not identity:
174
+ errors.append(
175
+ "Unable to get identities for current configuration, please check your credentials"
176
+ )
174
177
  continue
175
178
  if not isinstance(stream, AbstractFileBasedStream):
176
179
  raise ValueError(f"Stream {stream} is not a file-based stream.")
@@ -337,8 +340,8 @@ class FileBasedSource(ConcurrentSourceAdapter, ABC):
337
340
  def _make_identities_stream(
338
341
  self,
339
342
  ) -> Stream:
340
- return IdentitiesStream(
341
- catalog_schema=self.stream_schemas.get(IDENTITIES_STREAM_NAME),
343
+ return FileIdentities(
344
+ catalog_schema=self.stream_schemas.get(FileIdentities.IDENTITIES_STREAM_NAME),
342
345
  stream_reader=self.stream_reader,
343
346
  discovery_policy=self.discovery_policy,
344
347
  errors_collector=self.errors_collector,
@@ -191,7 +191,9 @@ class AbstractFileBasedStreamReader(ABC):
191
191
  This is required for connectors that will support syncing
192
192
  ACL Permissions from files.
193
193
  """
194
- ...
194
+ raise NotImplementedError(
195
+ f"{self.__class__.__name__} does not implement get_file_acl_permissions(). To support ACL permissions, implement this method and update file_permissions_schema."
196
+ )
195
197
 
196
198
  @abstractmethod
197
199
  def load_identity_groups(self, logger: logging.Logger) -> Iterable[Dict[str, Any]]:
@@ -199,4 +201,28 @@ class AbstractFileBasedStreamReader(ABC):
199
201
  This is required for connectors that will support syncing
200
202
  identities.
201
203
  """
202
- ...
204
+ raise NotImplementedError(
205
+ f"{self.__class__.__name__} does not implement load_identity_groups(). To support identities, implement this method and update identities_schema."
206
+ )
207
+
208
+ @property
209
+ @abstractmethod
210
+ def file_permissions_schema(self) -> Dict[str, Any]:
211
+ """
212
+ This is required for connectors that will support syncing
213
+ ACL Permissions from files.
214
+ """
215
+ raise NotImplementedError(
216
+ f"{self.__class__.__name__} does not implement file_permissions_schema, please return json schema for your permissions streams."
217
+ )
218
+
219
+ @property
220
+ @abstractmethod
221
+ def identities_schema(self) -> Dict[str, Any]:
222
+ """
223
+ This is required for connectors that will support syncing
224
+ identities.
225
+ """
226
+ raise NotImplementedError(
227
+ f"{self.__class__.__name__} does not implement identities_schema, please return json schema for your identities stream."
228
+ )
@@ -23,31 +23,6 @@ file_transfer_schema = {
23
23
  "properties": {"data": {"type": "object"}, "file": {"type": "object"}},
24
24
  }
25
25
 
26
- remote_file_permissions_schema = {
27
- "type": "object",
28
- "properties": {
29
- "id": {"type": "string"},
30
- "file_path": {"type": "string"},
31
- "allowed_identity_remote_ids": {"type": "array", "items": {"type": "string"}},
32
- "publicly_accessible": {"type": "boolean"},
33
- },
34
- }
35
-
36
- remote_file_identity_schema = {
37
- "type": "object",
38
- "properties": {
39
- "id": {"type": "string"},
40
- "remote_id": {"type": "string"},
41
- "parent_id": {"type": ["null", "string"]},
42
- "name": {"type": ["null", "string"]},
43
- "description": {"type": ["null", "string"]},
44
- "email_address": {"type": ["null", "string"]},
45
- "member_email_addresses": {"type": ["null", "array"]},
46
- "type": {"type": "string"},
47
- "modified_at": {"type": "string"},
48
- },
49
- }
50
-
51
26
 
52
27
  @total_ordering
53
28
  class ComparableType(Enum):
@@ -1,5 +1,5 @@
1
1
  from airbyte_cdk.sources.file_based.stream.abstract_file_based_stream import AbstractFileBasedStream
2
2
  from airbyte_cdk.sources.file_based.stream.default_file_based_stream import DefaultFileBasedStream
3
- from airbyte_cdk.sources.file_based.stream.identities_stream import IdentitiesStream
3
+ from airbyte_cdk.sources.file_based.stream.identities_stream import FileIdentities
4
4
 
5
- __all__ = ["AbstractFileBasedStream", "DefaultFileBasedStream", "IdentitiesStream"]
5
+ __all__ = ["AbstractFileBasedStream", "DefaultFileBasedStream", "FileIdentities"]
@@ -29,7 +29,6 @@ from airbyte_cdk.sources.file_based.schema_helpers import (
29
29
  SchemaType,
30
30
  file_transfer_schema,
31
31
  merge_schemas,
32
- remote_file_permissions_schema,
33
32
  schemaless_schema,
34
33
  )
35
34
  from airbyte_cdk.sources.file_based.stream import AbstractFileBasedStream
@@ -111,7 +110,7 @@ class DefaultFileBasedStream(AbstractFileBasedStream, IncrementalMixin):
111
110
  },
112
111
  }
113
112
  elif self.use_permissions_transfer:
114
- return remote_file_permissions_schema
113
+ return self.stream_reader.file_permissions_schema
115
114
  else:
116
115
  return super()._filter_schema_invalid_properties(configured_catalog_json_schema)
117
116
 
@@ -315,7 +314,7 @@ class DefaultFileBasedStream(AbstractFileBasedStream, IncrementalMixin):
315
314
  if self.use_file_transfer:
316
315
  return file_transfer_schema
317
316
  elif self.use_permissions_transfer:
318
- return remote_file_permissions_schema
317
+ return self.stream_reader.file_permissions_schema
319
318
  elif self.config.input_schema:
320
319
  return self.config.get_input_schema() # type: ignore
321
320
  elif self.config.schemaless:
@@ -2,30 +2,18 @@
2
2
  # Copyright (c) 2024 Airbyte, Inc., all rights reserved.
3
3
  #
4
4
 
5
- import traceback
6
5
  from functools import cache
7
- from typing import Any, Iterable, List, Mapping, MutableMapping, Optional
6
+ from typing import Any, Dict, Iterable, Mapping, MutableMapping, Optional
8
7
 
9
- from airbyte_protocol_dataclasses.models import SyncMode
10
-
11
- from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level
12
- from airbyte_cdk.models import Type as MessageType
13
8
  from airbyte_cdk.sources.file_based.config.file_based_stream_config import PrimaryKeyType
14
9
  from airbyte_cdk.sources.file_based.discovery_policy import AbstractDiscoveryPolicy
15
- from airbyte_cdk.sources.file_based.exceptions import FileBasedErrorsCollector, FileBasedSourceError
10
+ from airbyte_cdk.sources.file_based.exceptions import FileBasedErrorsCollector
16
11
  from airbyte_cdk.sources.file_based.file_based_stream_reader import AbstractFileBasedStreamReader
17
- from airbyte_cdk.sources.file_based.schema_helpers import remote_file_identity_schema
18
- from airbyte_cdk.sources.file_based.types import StreamSlice
19
- from airbyte_cdk.sources.streams import Stream
20
- from airbyte_cdk.sources.streams.checkpoint import Cursor
21
12
  from airbyte_cdk.sources.streams.core import JsonSchema
22
- from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message
23
- from airbyte_cdk.utils.traced_exception import AirbyteTracedException
24
-
25
- IDENTITIES_STREAM_NAME = "identities"
13
+ from airbyte_cdk.sources.streams.permissions.identities import Identities
26
14
 
27
15
 
28
- class IdentitiesStream(Stream):
16
+ class FileIdentities(Identities):
29
17
  """
30
18
  The identities stream. A full refresh stream to sync identities from a certain domain.
31
19
  The stream reader manage the logic to get such data, which is implemented on connector side.
@@ -39,7 +27,7 @@ class IdentitiesStream(Stream):
39
27
  stream_reader: AbstractFileBasedStreamReader,
40
28
  discovery_policy: AbstractDiscoveryPolicy,
41
29
  errors_collector: FileBasedErrorsCollector,
42
- ):
30
+ ) -> None:
43
31
  super().__init__()
44
32
  self.catalog_schema = catalog_schema
45
33
  self.stream_reader = stream_reader
@@ -47,50 +35,13 @@ class IdentitiesStream(Stream):
47
35
  self.errors_collector = errors_collector
48
36
  self._cursor: MutableMapping[str, Any] = {}
49
37
 
50
- @property
51
- def state(self) -> MutableMapping[str, Any]:
52
- return self._cursor
53
-
54
- @state.setter
55
- def state(self, value: MutableMapping[str, Any]) -> None:
56
- """State setter, accept state serialized by state getter."""
57
- self._cursor = value
58
-
59
38
  @property
60
39
  def primary_key(self) -> PrimaryKeyType:
61
40
  return None
62
41
 
63
- def read_records(
64
- self,
65
- sync_mode: SyncMode,
66
- cursor_field: Optional[List[str]] = None,
67
- stream_slice: Optional[StreamSlice] = None,
68
- stream_state: Optional[Mapping[str, Any]] = None,
69
- ) -> Iterable[Mapping[str, Any] | AirbyteMessage]:
70
- try:
71
- identity_groups = self.stream_reader.load_identity_groups(logger=self.logger)
72
- for record in identity_groups:
73
- yield stream_data_to_airbyte_message(self.name, record)
74
- except AirbyteTracedException as exc:
75
- # Re-raise the exception to stop the whole sync immediately as this is a fatal error
76
- raise exc
77
- except Exception:
78
- yield AirbyteMessage(
79
- type=MessageType.LOG,
80
- log=AirbyteLogMessage(
81
- level=Level.ERROR,
82
- message=f"{FileBasedSourceError.ERROR_PARSING_RECORD.value} stream={self.name}",
83
- stack_trace=traceback.format_exc(),
84
- ),
85
- )
42
+ def load_identity_groups(self) -> Iterable[Dict[str, Any]]:
43
+ return self.stream_reader.load_identity_groups(logger=self.logger)
86
44
 
87
45
  @cache
88
46
  def get_json_schema(self) -> JsonSchema:
89
- return remote_file_identity_schema
90
-
91
- @property
92
- def name(self) -> str:
93
- return IDENTITIES_STREAM_NAME
94
-
95
- def get_cursor(self) -> Optional[Cursor]:
96
- return None
47
+ return self.stream_reader.identities_schema
@@ -0,0 +1,26 @@
1
+ #
2
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
3
+ #
4
+
5
+ from typing import Literal
6
+
7
+ from pydantic.v1 import AnyUrl, BaseModel, Field
8
+
9
+ from airbyte_cdk import OneOfOptionConfig
10
+
11
+
12
+ class DeliverPermissions(BaseModel):
13
+ class Config(OneOfOptionConfig):
14
+ title = "Replicate Permissions ACL"
15
+ description = "Sends one identity stream and one for more permissions (ACL) streams to the destination. This data can be used in downstream systems to recreate permission restrictions mirroring the original source."
16
+ discriminator = "delivery_type"
17
+
18
+ delivery_type: Literal["use_permissions_transfer"] = Field(
19
+ "use_permissions_transfer", const=True
20
+ )
21
+
22
+ include_identities_stream: bool = Field(
23
+ title="Include Identity Stream",
24
+ description="This data can be used in downstream systems to recreate permission restrictions mirroring the original source",
25
+ default=True,
26
+ )
@@ -6,9 +6,6 @@ from abc import abstractmethod
6
6
  from datetime import datetime, timedelta, timezone
7
7
  from typing import Any, Callable, List, MutableMapping, Optional, Tuple
8
8
 
9
- import pendulum
10
- from pendulum.datetime import DateTime
11
-
12
9
  # FIXME We would eventually like the Concurrent package do be agnostic of the declarative package. However, this is a breaking change and
13
10
  # the goal in the short term is only to fix the issue we are seeing for source-declarative-manifest.
14
11
  from airbyte_cdk.sources.declarative.datetime.datetime_parser import DatetimeParser
@@ -17,6 +14,7 @@ from airbyte_cdk.sources.streams.concurrent.state_converters.abstract_stream_sta
17
14
  AbstractStreamStateConverter,
18
15
  ConcurrencyCompatibleStateType,
19
16
  )
17
+ from airbyte_cdk.utils.datetime_helpers import AirbyteDateTime, ab_datetime_now, ab_datetime_parse
20
18
 
21
19
 
22
20
  class DateTimeStreamStateConverter(AbstractStreamStateConverter):
@@ -36,7 +34,7 @@ class DateTimeStreamStateConverter(AbstractStreamStateConverter):
36
34
 
37
35
  @classmethod
38
36
  def get_end_provider(cls) -> Callable[[], datetime]:
39
- return lambda: datetime.now(timezone.utc)
37
+ return ab_datetime_now
40
38
 
41
39
  @abstractmethod
42
40
  def increment(self, timestamp: datetime) -> datetime: ...
@@ -136,10 +134,10 @@ class EpochValueConcurrentStreamStateConverter(DateTimeStreamStateConverter):
136
134
  return int(timestamp.timestamp())
137
135
 
138
136
  def parse_timestamp(self, timestamp: int) -> datetime:
139
- dt_object = pendulum.from_timestamp(timestamp)
140
- if not isinstance(dt_object, DateTime):
137
+ dt_object = AirbyteDateTime.fromtimestamp(timestamp, timezone.utc)
138
+ if not isinstance(dt_object, AirbyteDateTime):
141
139
  raise ValueError(
142
- f"DateTime object was expected but got {type(dt_object)} from pendulum.parse({timestamp})"
140
+ f"AirbyteDateTime object was expected but got {type(dt_object)} from AirbyteDateTime.fromtimestamp({timestamp})"
143
141
  )
144
142
  return dt_object
145
143
 
@@ -169,14 +167,25 @@ class IsoMillisConcurrentStreamStateConverter(DateTimeStreamStateConverter):
169
167
  def increment(self, timestamp: datetime) -> datetime:
170
168
  return timestamp + self._cursor_granularity
171
169
 
172
- def output_format(self, timestamp: datetime) -> Any:
173
- return timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
170
+ def output_format(self, timestamp: datetime) -> str:
171
+ """Format datetime with milliseconds always included.
172
+
173
+ Args:
174
+ timestamp: The datetime to format.
175
+
176
+ Returns:
177
+ str: ISO8601/RFC3339 formatted string with milliseconds.
178
+ """
179
+ dt = AirbyteDateTime.from_datetime(timestamp)
180
+ # Always include milliseconds, even if zero
181
+ millis = dt.microsecond // 1000 if dt.microsecond else 0
182
+ return f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d}T{dt.hour:02d}:{dt.minute:02d}:{dt.second:02d}.{millis:03d}Z"
174
183
 
175
184
  def parse_timestamp(self, timestamp: str) -> datetime:
176
- dt_object = pendulum.parse(timestamp)
177
- if not isinstance(dt_object, DateTime):
185
+ dt_object = ab_datetime_parse(timestamp)
186
+ if not isinstance(dt_object, AirbyteDateTime):
178
187
  raise ValueError(
179
- f"DateTime object was expected but got {type(dt_object)} from pendulum.parse({timestamp})"
188
+ f"AirbyteDateTime object was expected but got {type(dt_object)} from parse({timestamp})"
180
189
  )
181
190
  return dt_object
182
191
 
@@ -184,7 +193,7 @@ class IsoMillisConcurrentStreamStateConverter(DateTimeStreamStateConverter):
184
193
  class CustomFormatConcurrentStreamStateConverter(IsoMillisConcurrentStreamStateConverter):
185
194
  """
186
195
  Datetime State converter that emits state according to the supplied datetime format. The converter supports reading
187
- incoming state in any valid datetime format via Pendulum.
196
+ incoming state in any valid datetime format using AirbyteDateTime parsing utilities.
188
197
  """
189
198
 
190
199
  def __init__(
@@ -223,17 +223,17 @@ class Stream(ABC):
223
223
  record_counter += 1
224
224
 
225
225
  checkpoint_interval = self.state_checkpoint_interval
226
- checkpoint = checkpoint_reader.get_checkpoint()
227
226
  if (
228
227
  should_checkpoint
229
228
  and checkpoint_interval
230
229
  and record_counter % checkpoint_interval == 0
231
- and checkpoint is not None
232
230
  ):
233
- airbyte_state_message = self._checkpoint_state(
234
- checkpoint, state_manager=state_manager
235
- )
236
- yield airbyte_state_message
231
+ checkpoint = checkpoint_reader.get_checkpoint()
232
+ if checkpoint:
233
+ airbyte_state_message = self._checkpoint_state(
234
+ checkpoint, state_manager=state_manager
235
+ )
236
+ yield airbyte_state_message
237
237
 
238
238
  if internal_config.is_limit_reached(record_counter):
239
239
  break
@@ -423,8 +423,6 @@ class HttpStream(Stream, CheckpointMixin, ABC):
423
423
  stream_slice: Optional[Mapping[str, Any]] = None,
424
424
  stream_state: Optional[Mapping[str, Any]] = None,
425
425
  ) -> Iterable[StreamData]:
426
- partition, _, _ = self._extract_slice_fields(stream_slice=stream_slice)
427
-
428
426
  stream_state = stream_state or {}
429
427
  pagination_complete = False
430
428
  next_page_token = None
@@ -438,6 +436,7 @@ class HttpStream(Stream, CheckpointMixin, ABC):
438
436
 
439
437
  cursor = self.get_cursor()
440
438
  if cursor and isinstance(cursor, SubstreamResumableFullRefreshCursor):
439
+ partition, _, _ = self._extract_slice_fields(stream_slice=stream_slice)
441
440
  # Substreams checkpoint state by marking an entire parent partition as completed so that on the subsequent attempt
442
441
  # after a failure, completed parents are skipped and the sync can make progress
443
442
  cursor.close_slice(StreamSlice(cursor_slice={}, partition=partition))