airbyte-cdk 6.48.7.dev2__py3-none-any.whl → 6.48.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. airbyte_cdk/cli/airbyte_cdk/_connector.py +18 -20
  2. airbyte_cdk/cli/airbyte_cdk/_image.py +16 -18
  3. airbyte_cdk/cli/airbyte_cdk/_secrets.py +14 -33
  4. airbyte_cdk/destinations/destination.py +50 -78
  5. airbyte_cdk/models/__init__.py +0 -4
  6. airbyte_cdk/models/airbyte_protocol_serializers.py +3 -2
  7. airbyte_cdk/sources/declarative/models/base_model_with_deprecations.py +6 -1
  8. airbyte_cdk/sources/declarative/schema/dynamic_schema_loader.py +12 -49
  9. airbyte_cdk/test/catalog_builder.py +1 -9
  10. airbyte_cdk/test/entrypoint_wrapper.py +4 -0
  11. airbyte_cdk/test/mock_http/request.py +1 -5
  12. airbyte_cdk/test/standard_tests/_job_runner.py +9 -6
  13. airbyte_cdk/test/standard_tests/connector_base.py +22 -15
  14. airbyte_cdk/test/standard_tests/declarative_sources.py +8 -4
  15. airbyte_cdk/test/standard_tests/models/scenario.py +14 -3
  16. airbyte_cdk/test/standard_tests/source_base.py +24 -0
  17. airbyte_cdk/test/standard_tests/util.py +1 -1
  18. airbyte_cdk/utils/connector_paths.py +223 -0
  19. airbyte_cdk/utils/docker.py +116 -29
  20. {airbyte_cdk-6.48.7.dev2.dist-info → airbyte_cdk-6.48.8.dist-info}/METADATA +2 -2
  21. {airbyte_cdk-6.48.7.dev2.dist-info → airbyte_cdk-6.48.8.dist-info}/RECORD +25 -27
  22. airbyte_cdk/cli/airbyte_cdk/_util.py +0 -69
  23. airbyte_cdk/test/standard_tests/test_resources.py +0 -69
  24. airbyte_cdk/utils/docker_image_templates.py +0 -136
  25. {airbyte_cdk-6.48.7.dev2.dist-info → airbyte_cdk-6.48.8.dist-info}/LICENSE.txt +0 -0
  26. {airbyte_cdk-6.48.7.dev2.dist-info → airbyte_cdk-6.48.8.dist-info}/LICENSE_SHORT +0 -0
  27. {airbyte_cdk-6.48.7.dev2.dist-info → airbyte_cdk-6.48.8.dist-info}/WHEEL +0 -0
  28. {airbyte_cdk-6.48.7.dev2.dist-info → airbyte_cdk-6.48.8.dist-info}/entry_points.txt +0 -0
@@ -44,11 +44,14 @@ from types import ModuleType
44
44
 
45
45
  import rich_click as click
46
46
 
47
- # from airbyte_cdk.test.standard_tests import pytest_hooks
48
- from airbyte_cdk.cli.airbyte_cdk._util import resolve_connector_name_and_directory
49
- from airbyte_cdk.test.standard_tests.test_resources import find_connector_root_from_name
50
47
  from airbyte_cdk.test.standard_tests.util import create_connector_test_suite
51
48
 
49
+ # from airbyte_cdk.test.standard_tests import pytest_hooks
50
+ from airbyte_cdk.utils.connector_paths import (
51
+ find_connector_root_from_name,
52
+ resolve_connector_name_and_directory,
53
+ )
54
+
52
55
  click.rich_click.TEXT_MARKUP = "markdown"
53
56
 
54
57
  pytest: ModuleType | None
@@ -63,7 +66,7 @@ except ImportError:
63
66
 
64
67
  TEST_FILE_TEMPLATE = '''
65
68
  # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
66
- """FAST Airbyte Standard Tests for the source_pokeapi_w_components source."""
69
+ """FAST Airbyte Standard Tests for the {connector_name} source."""
67
70
 
68
71
  #from airbyte_cdk.test.standard_tests import {base_class_name}
69
72
  from airbyte_cdk.test.standard_tests.util import create_connector_test_suite
@@ -78,7 +81,7 @@ TestSuite = create_connector_test_suite(
78
81
  )
79
82
 
80
83
  # class TestSuite({base_class_name}):
81
- # """Test suite for the source_pokeapi_w_components source.
84
+ # """Test suite for the {connector_name} source.
82
85
 
83
86
  # This class inherits from SourceTestSuiteBase and implements all of the tests in the suite.
84
87
 
@@ -98,15 +101,11 @@ def connector_cli_group() -> None:
98
101
 
99
102
 
100
103
  @connector_cli_group.command()
101
- @click.option(
102
- "--connector-name",
104
+ @click.argument(
105
+ "connector",
106
+ required=False,
103
107
  type=str,
104
- help="Name of the connector to test. Ignored if --connector-directory is provided.",
105
- )
106
- @click.option(
107
- "--connector-directory",
108
- type=click.Path(exists=True, file_okay=False, path_type=Path),
109
- help="Path to the connector directory.",
108
+ metavar="[CONNECTOR]",
110
109
  )
111
110
  @click.option(
112
111
  "--collect-only",
@@ -115,8 +114,7 @@ def connector_cli_group() -> None:
115
114
  help="Only collect tests, do not run them.",
116
115
  )
117
116
  def test(
118
- connector_name: str | None = None,
119
- connector_directory: Path | None = None,
117
+ connector: str | Path | None = None,
120
118
  *,
121
119
  collect_only: bool = False,
122
120
  ) -> None:
@@ -124,6 +122,9 @@ def test(
124
122
 
125
123
  This command runs the standard connector tests for a specific connector.
126
124
 
125
+ [CONNECTOR] can be a connector name (e.g. 'source-pokeapi'), a path to a connector directory, or omitted to use the current working directory.
126
+ If a string containing '/' is provided, it is treated as a path. Otherwise, it is treated as a connector name.
127
+
127
128
  If no connector name or directory is provided, we will look within the current working
128
129
  directory. If the current working directory is not a connector directory (e.g. starting
129
130
  with 'source-') and no connector name or path is provided, the process will fail.
@@ -133,10 +134,7 @@ def test(
133
134
  "pytest is not installed. Please install pytest to run the connector tests."
134
135
  )
135
136
  click.echo("Connector test command executed.")
136
- connector_name, connector_directory = resolve_connector_name_and_directory(
137
- connector_name=connector_name,
138
- connector_directory=connector_directory,
139
- )
137
+ connector_name, connector_directory = resolve_connector_name_and_directory(connector)
140
138
 
141
139
  connector_test_suite = create_connector_test_suite(
142
140
  connector_name=connector_name if not connector_directory else None,
@@ -152,7 +150,7 @@ def test(
152
150
 
153
151
  file_text = TEST_FILE_TEMPLATE.format(
154
152
  base_class_name=connector_test_suite.__bases__[0].__name__,
155
- connector_directory=str(connector_directory),
153
+ connector_name=connector_name,
156
154
  )
157
155
  test_file_path = Path() / ".tmp" / "integration_tests/test_airbyte_standards.py"
158
156
  test_file_path = test_file_path.resolve().absolute()
@@ -10,8 +10,8 @@ from pathlib import Path
10
10
 
11
11
  import rich_click as click
12
12
 
13
- from airbyte_cdk.cli.airbyte_cdk._util import resolve_connector_name_and_directory
14
13
  from airbyte_cdk.models.connector_metadata import MetadataFile
14
+ from airbyte_cdk.utils.connector_paths import resolve_connector_name_and_directory
15
15
  from airbyte_cdk.utils.docker import (
16
16
  ConnectorImageBuildError,
17
17
  build_connector_image,
@@ -28,30 +28,30 @@ def image_cli_group() -> None:
28
28
 
29
29
 
30
30
  @image_cli_group.command()
31
- @click.option(
32
- "--connector-name",
31
+ @click.argument(
32
+ "connector",
33
+ required=False,
33
34
  type=str,
34
- help="Name of the connector to test. Ignored if --connector-directory is provided.",
35
- )
36
- @click.option(
37
- "--connector-directory",
38
- type=click.Path(exists=True, file_okay=False, path_type=Path),
39
- help="Path to the connector directory.",
35
+ metavar="[CONNECTOR]",
40
36
  )
41
37
  @click.option("--tag", default="dev", help="Tag to apply to the built image (default: dev)")
42
38
  @click.option("--no-verify", is_flag=True, help="Skip verification of the built image")
39
+ @click.option(
40
+ "--dockerfile",
41
+ type=click.Path(exists=True, file_okay=True, path_type=Path),
42
+ help="Optional. Override the Dockerfile used for building the image.",
43
+ )
43
44
  def build(
44
- connector_name: str | None = None,
45
- connector_directory: Path | None = None,
45
+ connector: str | None = None,
46
46
  *,
47
47
  tag: str = "dev",
48
48
  no_verify: bool = False,
49
+ dockerfile: Path | None = None,
49
50
  ) -> None:
50
51
  """Build a connector Docker image.
51
52
 
52
- This command builds a Docker image for a connector, using either
53
- the connector's Dockerfile or a base image specified in the metadata.
54
- The image is built for both AMD64 and ARM64 architectures.
53
+ [CONNECTOR] can be a connector name (e.g. 'source-pokeapi'), a path to a connector directory, or omitted to use the current working directory.
54
+ If a string containing '/' is provided, it is treated as a path. Otherwise, it is treated as a connector name.
55
55
  """
56
56
  if not verify_docker_installation():
57
57
  click.echo(
@@ -59,10 +59,7 @@ def build(
59
59
  )
60
60
  sys.exit(1)
61
61
 
62
- connector_name, connector_directory = resolve_connector_name_and_directory(
63
- connector_name=connector_name,
64
- connector_directory=connector_directory,
65
- )
62
+ connector_name, connector_directory = resolve_connector_name_and_directory(connector)
66
63
 
67
64
  metadata_file_path: Path = connector_directory / "metadata.yaml"
68
65
  try:
@@ -81,6 +78,7 @@ def build(
81
78
  metadata=metadata,
82
79
  tag=tag,
83
80
  no_verify=no_verify,
81
+ dockerfile_override=dockerfile or None,
84
82
  )
85
83
  except ConnectorImageBuildError as e:
86
84
  click.echo(
@@ -43,7 +43,7 @@ from click import style
43
43
  from rich.console import Console
44
44
  from rich.table import Table
45
45
 
46
- from airbyte_cdk.cli.airbyte_cdk._util import (
46
+ from airbyte_cdk.utils.connector_paths import (
47
47
  resolve_connector_name,
48
48
  resolve_connector_name_and_directory,
49
49
  )
@@ -73,15 +73,11 @@ def secrets_cli_group() -> None:
73
73
 
74
74
 
75
75
  @secrets_cli_group.command()
76
- @click.option(
77
- "--connector-name",
76
+ @click.argument(
77
+ "connector",
78
+ required=False,
78
79
  type=str,
79
- help="Name of the connector to fetch secrets for. Ignored if --connector-directory is provided.",
80
- )
81
- @click.option(
82
- "--connector-directory",
83
- type=click.Path(exists=True, file_okay=False, path_type=Path),
84
- help="Path to the connector directory.",
80
+ metavar="[CONNECTOR]",
85
81
  )
86
82
  @click.option(
87
83
  "--gcp-project-id",
@@ -97,8 +93,7 @@ def secrets_cli_group() -> None:
97
93
  default=False,
98
94
  )
99
95
  def fetch(
100
- connector_name: str | None = None,
101
- connector_directory: Path | None = None,
96
+ connector: str | Path | None = None,
102
97
  gcp_project_id: str = AIRBYTE_INTERNAL_GCP_PROJECT,
103
98
  print_ci_secrets_masks: bool = False,
104
99
  ) -> None:
@@ -107,6 +102,9 @@ def fetch(
107
102
  This command fetches secrets for a connector from Google Secret Manager and writes them
108
103
  to the connector's secrets directory.
109
104
 
105
+ [CONNECTOR] can be a connector name (e.g. 'source-pokeapi'), a path to a connector directory, or omitted to use the current working directory.
106
+ If a string containing '/' is provided, it is treated as a path. Otherwise, it is treated as a connector name.
107
+
110
108
  If no connector name or directory is provided, we will look within the current working
111
109
  directory. If the current working directory is not a connector directory (e.g. starting
112
110
  with 'source-') and no connector name or path is provided, the process will fail.
@@ -114,17 +112,14 @@ def fetch(
114
112
  The `--print-ci-secrets-masks` option will print the GitHub CI mask for the secrets.
115
113
  This is useful for masking secrets in CI logs.
116
114
 
117
- WARNING: This action causes the secrets to be printed in clear text to `STDOUT`. For security
118
- reasons, this function will only execute if the `CI` environment variable is set. Otherwise,
119
- masks will not be printed.
115
+ WARNING: The `--print-ci-secrets-masks` option causes the secrets to be printed in clear text to
116
+ `STDOUT`. For security reasons, this argument will be ignored if the `CI` environment
117
+ variable is not set.
120
118
  """
121
119
  click.echo("Fetching secrets...", err=True)
122
120
 
123
121
  client = _get_gsm_secrets_client()
124
- connector_name, connector_directory = resolve_connector_name_and_directory(
125
- connector_name=connector_name,
126
- connector_directory=connector_directory,
127
- )
122
+ connector_name, connector_directory = resolve_connector_name_and_directory(connector)
128
123
  secrets_dir = _get_secrets_dir(
129
124
  connector_directory=connector_directory,
130
125
  connector_name=connector_name,
@@ -289,21 +284,7 @@ def _get_secrets_dir(
289
284
  connector_name: str,
290
285
  ensure_exists: bool = True,
291
286
  ) -> Path:
292
- try:
293
- connector_name, connector_directory = resolve_connector_name_and_directory(
294
- connector_name=connector_name,
295
- connector_directory=connector_directory,
296
- )
297
- except FileNotFoundError as e:
298
- raise FileNotFoundError(
299
- f"Could not find connector directory for '{connector_name}'. "
300
- "Please provide the --connector-directory option with the path to the connector. "
301
- "Note: This command requires either running from within a connector directory, "
302
- "being in the airbyte monorepo, or explicitly providing the connector directory path."
303
- ) from e
304
- except ValueError as e:
305
- raise ValueError(str(e))
306
-
287
+ _ = connector_name # Unused, but it may be used in the future for logging
307
288
  secrets_dir = connector_directory / "secrets"
308
289
  if ensure_exists:
309
290
  secrets_dir.mkdir(parents=True, exist_ok=True)
@@ -10,18 +10,15 @@ from abc import ABC, abstractmethod
10
10
  from typing import Any, Iterable, List, Mapping
11
11
 
12
12
  import orjson
13
- from airbyte_protocol_dataclasses.models import (
14
- AirbyteMessage,
15
- ConfiguredAirbyteCatalog,
16
- DestinationCatalog,
17
- Type,
18
- )
19
13
 
20
14
  from airbyte_cdk.connector import Connector
21
15
  from airbyte_cdk.exception_handler import init_uncaught_exception_handler
22
16
  from airbyte_cdk.models import (
17
+ AirbyteMessage,
23
18
  AirbyteMessageSerializer,
19
+ ConfiguredAirbyteCatalog,
24
20
  ConfiguredAirbyteCatalogSerializer,
21
+ Type,
25
22
  )
26
23
  from airbyte_cdk.sources.utils.schema_helpers import check_config_against_spec_or_exit
27
24
  from airbyte_cdk.utils.traced_exception import AirbyteTracedException
@@ -29,74 +26,8 @@ from airbyte_cdk.utils.traced_exception import AirbyteTracedException
29
26
  logger = logging.getLogger("airbyte")
30
27
 
31
28
 
32
- def parse_args(args: List[str]) -> argparse.Namespace:
33
- """
34
- :param args: commandline arguments
35
- :return:
36
- """
37
-
38
- parent_parser = argparse.ArgumentParser(add_help=False)
39
- parent_parser.add_argument(
40
- "--debug", action="store_true", help="enables detailed debug logs related to the sync"
41
- )
42
- main_parser = argparse.ArgumentParser()
43
- subparsers = main_parser.add_subparsers(title="commands", dest="command")
44
-
45
- # spec
46
- subparsers.add_parser(
47
- "spec", help="outputs the json configuration specification", parents=[parent_parser]
48
- )
49
-
50
- # check
51
- check_parser = subparsers.add_parser(
52
- "check", help="checks the config can be used to connect", parents=[parent_parser]
53
- )
54
- required_check_parser = check_parser.add_argument_group("required named arguments")
55
- required_check_parser.add_argument(
56
- "--config", type=str, required=True, help="path to the json configuration file"
57
- )
58
-
59
- # discover
60
- discover_parser = subparsers.add_parser(
61
- "discover",
62
- help="discover the objects available in the destination",
63
- parents=[parent_parser],
64
- )
65
- required_discover_parser = discover_parser.add_argument_group("required named arguments")
66
- required_discover_parser.add_argument(
67
- "--config", type=str, required=True, help="path to the json configuration file"
68
- )
69
-
70
- # write
71
- write_parser = subparsers.add_parser(
72
- "write", help="Writes data to the destination", parents=[parent_parser]
73
- )
74
- write_required = write_parser.add_argument_group("required named arguments")
75
- write_required.add_argument(
76
- "--config", type=str, required=True, help="path to the JSON configuration file"
77
- )
78
- write_required.add_argument(
79
- "--catalog", type=str, required=True, help="path to the configured catalog JSON file"
80
- )
81
-
82
- parsed_args = main_parser.parse_args(args)
83
- cmd = parsed_args.command
84
- if not cmd:
85
- raise Exception("No command entered. ")
86
- elif cmd not in ["spec", "check", "discover", "write"]:
87
- # This is technically dead code since parse_args() would fail if this was the case
88
- # But it's non-obvious enough to warrant placing it here anyways
89
- raise Exception(f"Unknown command entered: {cmd}")
90
-
91
- return parsed_args
92
-
93
-
94
29
  class Destination(Connector, ABC):
95
- VALID_CMDS = {"spec", "check", "discover", "write"}
96
-
97
- def discover(self) -> DestinationCatalog:
98
- """Implement to define what objects are available in the destination"""
99
- raise NotImplementedError("Discover method is not implemented")
30
+ VALID_CMDS = {"spec", "check", "write"}
100
31
 
101
32
  @abstractmethod
102
33
  def write(
@@ -137,9 +68,52 @@ class Destination(Connector, ABC):
137
68
  )
138
69
  logger.info("Writing complete.")
139
70
 
140
- @staticmethod
141
- def parse_args(args: List[str]) -> argparse.Namespace:
142
- return parse_args(args)
71
+ def parse_args(self, args: List[str]) -> argparse.Namespace:
72
+ """
73
+ :param args: commandline arguments
74
+ :return:
75
+ """
76
+
77
+ parent_parser = argparse.ArgumentParser(add_help=False)
78
+ main_parser = argparse.ArgumentParser()
79
+ subparsers = main_parser.add_subparsers(title="commands", dest="command")
80
+
81
+ # spec
82
+ subparsers.add_parser(
83
+ "spec", help="outputs the json configuration specification", parents=[parent_parser]
84
+ )
85
+
86
+ # check
87
+ check_parser = subparsers.add_parser(
88
+ "check", help="checks the config can be used to connect", parents=[parent_parser]
89
+ )
90
+ required_check_parser = check_parser.add_argument_group("required named arguments")
91
+ required_check_parser.add_argument(
92
+ "--config", type=str, required=True, help="path to the json configuration file"
93
+ )
94
+
95
+ # write
96
+ write_parser = subparsers.add_parser(
97
+ "write", help="Writes data to the destination", parents=[parent_parser]
98
+ )
99
+ write_required = write_parser.add_argument_group("required named arguments")
100
+ write_required.add_argument(
101
+ "--config", type=str, required=True, help="path to the JSON configuration file"
102
+ )
103
+ write_required.add_argument(
104
+ "--catalog", type=str, required=True, help="path to the configured catalog JSON file"
105
+ )
106
+
107
+ parsed_args = main_parser.parse_args(args)
108
+ cmd = parsed_args.command
109
+ if not cmd:
110
+ raise Exception("No command entered. ")
111
+ elif cmd not in ["spec", "check", "write"]:
112
+ # This is technically dead code since parse_args() would fail if this was the case
113
+ # But it's non-obvious enough to warrant placing it here anyways
114
+ raise Exception(f"Unknown command entered: {cmd}")
115
+
116
+ return parsed_args
143
117
 
144
118
  def run_cmd(self, parsed_args: argparse.Namespace) -> Iterable[AirbyteMessage]:
145
119
  cmd = parsed_args.command
@@ -163,8 +137,6 @@ class Destination(Connector, ABC):
163
137
 
164
138
  if cmd == "check":
165
139
  yield self._run_check(config=config)
166
- elif cmd == "discover":
167
- yield AirbyteMessage(type=Type.DESTINATION_CATALOG, destination_catalog=self.discover())
168
140
  elif cmd == "write":
169
141
  # Wrap in UTF-8 to override any other input encodings
170
142
  wrapped_stdin = io.TextIOWrapper(sys.stdin.buffer, encoding="utf-8")
@@ -35,10 +35,6 @@ from .airbyte_protocol import (
35
35
  ConfiguredAirbyteCatalog,
36
36
  ConfiguredAirbyteStream,
37
37
  ConnectorSpecification,
38
- DestinationCatalog,
39
- DestinationObject,
40
- DestinationObjectProperty,
41
- DestinationOperation,
42
38
  DestinationSyncMode,
43
39
  EstimateType,
44
40
  FailureType,
@@ -1,7 +1,9 @@
1
1
  # Copyright (c) 2024 Airbyte, Inc., all rights reserved.
2
2
  from typing import Any, Dict
3
3
 
4
- from airbyte_protocol_dataclasses.models import ( # type: ignore[attr-defined] # all classes are imported to airbyte_protocol via *
4
+ from serpyco_rs import CustomType, Serializer
5
+
6
+ from .airbyte_protocol import ( # type: ignore[attr-defined] # all classes are imported to airbyte_protocol via *
5
7
  AirbyteMessage,
6
8
  AirbyteStateBlob,
7
9
  AirbyteStateMessage,
@@ -10,7 +12,6 @@ from airbyte_protocol_dataclasses.models import ( # type: ignore[attr-defined]
10
12
  ConfiguredAirbyteStream,
11
13
  ConnectorSpecification,
12
14
  )
13
- from serpyco_rs import CustomType, Serializer
14
15
 
15
16
 
16
17
  class AirbyteStateBlobType(CustomType[AirbyteStateBlob, Dict[str, Any]]):
@@ -4,6 +4,10 @@
4
4
  # WHEN DEPRECATED FIELDS ARE ACCESSED
5
5
 
6
6
  import warnings
7
+
8
+ # ignore the SyntaxWarning in the Airbyte log messages, during the string evaluation
9
+ warnings.filterwarnings("ignore", category=SyntaxWarning)
10
+
7
11
  from typing import Any, List
8
12
 
9
13
  from pydantic.v1 import BaseModel
@@ -12,9 +16,10 @@ from airbyte_cdk.connector_builder.models import LogMessage as ConnectorBuilderL
12
16
 
13
17
  # format the warning message
14
18
  warnings.formatwarning = (
15
- lambda message, category, *args, **kwargs: f"{category.__name__}: {message}"
19
+ lambda message, category, *args, **kwargs: f"{category.__name__}: {message}\n"
16
20
  )
17
21
 
22
+
18
23
  FIELDS_TAG = "__fields__"
19
24
  DEPRECATED = "deprecated"
20
25
  DEPRECATION_MESSAGE = "deprecation_message"
@@ -1,10 +1,11 @@
1
1
  #
2
2
  # Copyright (c) 2024 Airbyte, Inc., all rights reserved.
3
3
  #
4
- from abc import ABC, abstractmethod
4
+
5
+
5
6
  from copy import deepcopy
6
7
  from dataclasses import InitVar, dataclass, field
7
- from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Union
8
+ from typing import Any, List, Mapping, MutableMapping, Optional, Union
8
9
 
9
10
  import dpath
10
11
  from typing_extensions import deprecated
@@ -15,7 +16,7 @@ from airbyte_cdk.sources.declarative.retrievers.retriever import Retriever
15
16
  from airbyte_cdk.sources.declarative.schema.schema_loader import SchemaLoader
16
17
  from airbyte_cdk.sources.declarative.transformations import RecordTransformation
17
18
  from airbyte_cdk.sources.source import ExperimentalClassWarning
18
- from airbyte_cdk.sources.types import Config
19
+ from airbyte_cdk.sources.types import Config, StreamSlice, StreamState
19
20
 
20
21
  AIRBYTE_DATA_TYPES: Mapping[str, MutableMapping[str, Any]] = {
21
22
  "string": {"type": ["null", "string"]},
@@ -113,38 +114,6 @@ class SchemaTypeIdentifier:
113
114
  )
114
115
 
115
116
 
116
- @deprecated("This class is experimental. Use at your own risk.", category=ExperimentalClassWarning)
117
- class AdditionalPropertyFieldsInferrer(ABC):
118
- """
119
- Infers additional fields to be added to each property. For example, if this inferrer returns {"toto": "tata"}, a property that would have looked like this:
120
- ```
121
- "properties": {
122
- "Id": {
123
- "type": ["null", "string"],
124
- },
125
- <...>
126
- }
127
- ```
128
- ... will look like this:
129
- ```
130
- "properties": {
131
- "Id": {
132
- "type": ["null", "string"],
133
- "toto": "tata"
134
- },
135
- <...>
136
- }
137
- ```
138
- """
139
-
140
- @abstractmethod
141
- def infer(self, property_definition: MutableMapping[str, Any]) -> MutableMapping[str, Any]:
142
- """
143
- Infers additional property fields from the given property definition.
144
- """
145
- pass
146
-
147
-
148
117
  @deprecated("This class is experimental. Use at your own risk.", category=ExperimentalClassWarning)
149
118
  @dataclass
150
119
  class DynamicSchemaLoader(SchemaLoader):
@@ -157,8 +126,6 @@ class DynamicSchemaLoader(SchemaLoader):
157
126
  parameters: InitVar[Mapping[str, Any]]
158
127
  schema_type_identifier: SchemaTypeIdentifier
159
128
  schema_transformations: List[RecordTransformation] = field(default_factory=lambda: [])
160
- additional_property_fields_inferrer: Optional[AdditionalPropertyFieldsInferrer] = None
161
- allow_additional_properties: bool = True
162
129
 
163
130
  def get_json_schema(self) -> Mapping[str, Any]:
164
131
  """
@@ -182,26 +149,22 @@ class DynamicSchemaLoader(SchemaLoader):
182
149
  property_definition,
183
150
  self.schema_type_identifier.type_pointer,
184
151
  )
185
-
186
- value.update(
187
- self.additional_property_fields_inferrer.infer(property_definition)
188
- if self.additional_property_fields_inferrer
189
- else {}
190
- )
191
152
  properties[key] = value
192
153
 
193
- transformed_properties = self._transform(properties)
154
+ transformed_properties = self._transform(properties, {})
194
155
 
195
156
  return {
196
157
  "$schema": "https://json-schema.org/draft-07/schema#",
197
158
  "type": "object",
198
- "additionalProperties": self.allow_additional_properties,
159
+ "additionalProperties": True,
199
160
  "properties": transformed_properties,
200
161
  }
201
162
 
202
163
  def _transform(
203
164
  self,
204
165
  properties: Mapping[str, Any],
166
+ stream_state: StreamState,
167
+ stream_slice: Optional[StreamSlice] = None,
205
168
  ) -> Mapping[str, Any]:
206
169
  for transformation in self.schema_transformations:
207
170
  transformation.transform(
@@ -227,7 +190,7 @@ class DynamicSchemaLoader(SchemaLoader):
227
190
  self,
228
191
  raw_schema: MutableMapping[str, Any],
229
192
  field_type_path: Optional[List[Union[InterpolatedString, str]]],
230
- ) -> Dict[str, Any]:
193
+ ) -> Union[Mapping[str, Any], List[Mapping[str, Any]]]:
231
194
  """
232
195
  Determines the JSON Schema type for a field, supporting nullable and combined types.
233
196
  """
@@ -257,7 +220,7 @@ class DynamicSchemaLoader(SchemaLoader):
257
220
  f"Invalid data type. Available string or two items list of string. Got {mapped_field_type}."
258
221
  )
259
222
 
260
- def _resolve_complex_type(self, complex_type: ComplexFieldType) -> Dict[str, Any]:
223
+ def _resolve_complex_type(self, complex_type: ComplexFieldType) -> Mapping[str, Any]:
261
224
  if not complex_type.items:
262
225
  return self._get_airbyte_type(complex_type.field_type)
263
226
 
@@ -292,14 +255,14 @@ class DynamicSchemaLoader(SchemaLoader):
292
255
  return field_type
293
256
 
294
257
  @staticmethod
295
- def _get_airbyte_type(field_type: str) -> Dict[str, Any]:
258
+ def _get_airbyte_type(field_type: str) -> MutableMapping[str, Any]:
296
259
  """
297
260
  Maps a field type to its corresponding Airbyte type definition.
298
261
  """
299
262
  if field_type not in AIRBYTE_DATA_TYPES:
300
263
  raise ValueError(f"Invalid Airbyte data type: {field_type}")
301
264
 
302
- return deepcopy(AIRBYTE_DATA_TYPES[field_type]) # type: ignore # a copy of a dict should be a dict, not a MutableMapping
265
+ return deepcopy(AIRBYTE_DATA_TYPES[field_type])
303
266
 
304
267
  def _extract_data(
305
268
  self,
@@ -2,8 +2,6 @@
2
2
 
3
3
  from typing import Any, Dict, List, Union, overload
4
4
 
5
- from airbyte_protocol_dataclasses.models import DestinationSyncMode
6
-
7
5
  from airbyte_cdk.models import (
8
6
  ConfiguredAirbyteCatalog,
9
7
  ConfiguredAirbyteStream,
@@ -34,12 +32,6 @@ class ConfiguredAirbyteStreamBuilder:
34
32
  self._stream["sync_mode"] = sync_mode.name
35
33
  return self
36
34
 
37
- def with_destination_sync_mode(
38
- self, sync_mode: DestinationSyncMode
39
- ) -> "ConfiguredAirbyteStreamBuilder":
40
- self._stream["destination_sync_mode"] = sync_mode.name
41
- return self
42
-
43
35
  def with_primary_key(self, pk: List[List[str]]) -> "ConfiguredAirbyteStreamBuilder":
44
36
  self._stream["primary_key"] = pk
45
37
  self._stream["stream"]["source_defined_primary_key"] = pk # type: ignore # we assume that self._stream["stream"] is a Dict[str, Any]
@@ -66,7 +58,7 @@ class CatalogBuilder:
66
58
  def with_stream(
67
59
  self,
68
60
  name: Union[str, ConfiguredAirbyteStreamBuilder],
69
- sync_mode: SyncMode = SyncMode.full_refresh,
61
+ sync_mode: Union[SyncMode, None] = None,
70
62
  ) -> "CatalogBuilder":
71
63
  # As we are introducing a fully fledge ConfiguredAirbyteStreamBuilder, we would like to deprecate the previous interface
72
64
  # with_stream(str, SyncMode)
@@ -82,6 +82,10 @@ class EntrypointOutput:
82
82
  def state_messages(self) -> List[AirbyteMessage]:
83
83
  return self._get_message_by_types([Type.STATE])
84
84
 
85
+ @property
86
+ def spec_messages(self) -> List[AirbyteMessage]:
87
+ return self._get_message_by_types([Type.SPEC])
88
+
85
89
  @property
86
90
  def connection_status_messages(self) -> List[AirbyteMessage]:
87
91
  return self._get_message_by_types([Type.CONNECTION_STATUS])
@@ -72,11 +72,7 @@ class HttpRequest:
72
72
  elif isinstance(body, bytes):
73
73
  return json.loads(body.decode()) # type: ignore # assumes return type of Mapping[str, Any]
74
74
  elif isinstance(body, str):
75
- try:
76
- return json.loads(body) # type: ignore # assumes return type of Mapping[str, Any]
77
- except json.JSONDecodeError:
78
- # one of the body is a mapping while the other isn't so comparison should fail anyway
79
- return None
75
+ return json.loads(body) # type: ignore # assumes return type of Mapping[str, Any]
80
76
  return None
81
77
 
82
78
  @staticmethod