airbyte-internal-ops 0.5.0__py3-none-any.whl → 0.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.2.dist-info}/METADATA +2 -1
  2. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.2.dist-info}/RECORD +18 -87
  3. airbyte_ops_mcp/cli/cloud.py +12 -8
  4. airbyte_ops_mcp/cloud_admin/api_client.py +51 -51
  5. airbyte_ops_mcp/constants.py +58 -0
  6. airbyte_ops_mcp/{_legacy/airbyte_ci/metadata_service/docker_hub.py → docker_hub.py} +16 -10
  7. airbyte_ops_mcp/mcp/cloud_connector_versions.py +44 -23
  8. airbyte_ops_mcp/mcp/prod_db_queries.py +128 -4
  9. airbyte_ops_mcp/mcp/regression_tests.py +10 -5
  10. airbyte_ops_mcp/{_legacy/airbyte_ci/metadata_service/validators/metadata_validator.py → metadata_validator.py} +18 -12
  11. airbyte_ops_mcp/prod_db_access/queries.py +51 -0
  12. airbyte_ops_mcp/prod_db_access/sql.py +76 -0
  13. airbyte_ops_mcp/regression_tests/__init__.py +2 -0
  14. airbyte_ops_mcp/regression_tests/connection_fetcher.py +16 -5
  15. airbyte_ops_mcp/regression_tests/connection_secret_retriever.py +25 -5
  16. airbyte_ops_mcp/regression_tests/models.py +2 -2
  17. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/README.md +0 -91
  18. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/bin/bundle-schemas.js +0 -48
  19. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/bin/generate-metadata-models.sh +0 -36
  20. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ActorDefinitionResourceRequirements.py +0 -54
  21. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/AirbyteInternal.py +0 -22
  22. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/AllowedHosts.py +0 -18
  23. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorBreakingChanges.py +0 -65
  24. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorBuildOptions.py +0 -15
  25. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorIPCOptions.py +0 -25
  26. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetadataDefinitionV0.json +0 -897
  27. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetadataDefinitionV0.py +0 -478
  28. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetrics.py +0 -24
  29. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorPackageInfo.py +0 -12
  30. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryDestinationDefinition.py +0 -407
  31. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryReleases.py +0 -406
  32. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistrySourceDefinition.py +0 -407
  33. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryV0.py +0 -413
  34. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorReleases.py +0 -98
  35. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorTestSuiteOptions.py +0 -58
  36. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/GeneratedFields.py +0 -62
  37. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/GitInfo.py +0 -31
  38. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/JobType.py +0 -23
  39. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/NormalizationDestinationDefinitionConfig.py +0 -24
  40. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RegistryOverrides.py +0 -111
  41. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ReleaseStage.py +0 -15
  42. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RemoteRegistries.py +0 -23
  43. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ResourceRequirements.py +0 -18
  44. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RolloutConfiguration.py +0 -29
  45. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/Secret.py +0 -34
  46. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SecretStore.py +0 -22
  47. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SourceFileInfo.py +0 -16
  48. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SuggestedStreams.py +0 -18
  49. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SupportLevel.py +0 -15
  50. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/TestConnections.py +0 -14
  51. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/__init__.py +0 -31
  52. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/airbyte-connector-metadata-schema.json +0 -0
  53. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ActorDefinitionResourceRequirements.yaml +0 -30
  54. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/AirbyteInternal.yaml +0 -32
  55. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/AllowedHosts.yaml +0 -13
  56. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorBreakingChanges.yaml +0 -65
  57. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorBuildOptions.yaml +0 -10
  58. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorIPCOptions.yaml +0 -29
  59. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorMetadataDefinitionV0.yaml +0 -172
  60. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorMetrics.yaml +0 -30
  61. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorPackageInfo.yaml +0 -9
  62. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryDestinationDefinition.yaml +0 -90
  63. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryReleases.yaml +0 -35
  64. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistrySourceDefinition.yaml +0 -92
  65. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryV0.yaml +0 -18
  66. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorReleases.yaml +0 -16
  67. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorTestSuiteOptions.yaml +0 -28
  68. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/GeneratedFields.yaml +0 -16
  69. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/GitInfo.yaml +0 -21
  70. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/JobType.yaml +0 -14
  71. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/NormalizationDestinationDefinitionConfig.yaml +0 -21
  72. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RegistryOverrides.yaml +0 -38
  73. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ReleaseStage.yaml +0 -11
  74. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RemoteRegistries.yaml +0 -25
  75. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ResourceRequirements.yaml +0 -16
  76. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RolloutConfiguration.yaml +0 -29
  77. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/Secret.yaml +0 -19
  78. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SecretStore.yaml +0 -16
  79. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SourceFileInfo.yaml +0 -17
  80. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SuggestedStreams.yaml +0 -13
  81. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SupportLevel.yaml +0 -10
  82. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/TestConnections.yaml +0 -17
  83. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/package-lock.json +0 -62
  84. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/package.json +0 -12
  85. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/transform.py +0 -71
  86. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.2.dist-info}/WHEEL +0 -0
  87. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.2.dist-info}/entry_points.txt +0 -0
@@ -96,7 +96,7 @@ def fetch_connection_data(
96
96
  )
97
97
 
98
98
  access_token = _get_access_token(client_id, client_secret)
99
- api_root = constants.CLOUD_API_ROOT
99
+ public_api_root = constants.CLOUD_API_ROOT
100
100
  headers = {
101
101
  "Authorization": f"Bearer {access_token}",
102
102
  "Content-Type": "application/json",
@@ -104,7 +104,7 @@ def fetch_connection_data(
104
104
 
105
105
  # Get connection details
106
106
  conn_response = requests.get(
107
- f"{api_root}/connections/{connection_id}",
107
+ f"{public_api_root}/connections/{connection_id}",
108
108
  headers=headers,
109
109
  timeout=30,
110
110
  )
@@ -120,7 +120,7 @@ def fetch_connection_data(
120
120
 
121
121
  # Get source details (includes config)
122
122
  source_response = requests.get(
123
- f"{api_root}/sources/{source_id}",
123
+ f"{public_api_root}/sources/{source_id}",
124
124
  headers=headers,
125
125
  timeout=30,
126
126
  )
@@ -160,7 +160,9 @@ def fetch_connection_data(
160
160
  stream_names = [s["name"] for s in streams_config]
161
161
 
162
162
  # Build Airbyte protocol catalog format
163
- catalog = _build_configured_catalog(streams_config, source_id, headers, api_root)
163
+ catalog = _build_configured_catalog(
164
+ streams_config, source_id, headers, public_api_root
165
+ )
164
166
 
165
167
  return ConnectionData(
166
168
  connection_id=connection_id,
@@ -179,12 +181,21 @@ def _build_configured_catalog(
179
181
  streams_config: list[dict[str, Any]],
180
182
  source_id: str,
181
183
  headers: dict[str, str],
182
- api_root: str,
184
+ public_api_root: str,
183
185
  ) -> dict[str, Any]:
184
186
  """Build a configured catalog from connection stream configuration.
185
187
 
186
188
  This creates a catalog in the Airbyte protocol format that can be used
187
189
  with connector commands.
190
+
191
+ Args:
192
+ streams_config: List of stream configuration dicts from the connection.
193
+ source_id: The source ID.
194
+ headers: HTTP headers for API requests.
195
+ public_api_root: The Public API root URL (e.g., CLOUD_API_ROOT).
196
+
197
+ Returns:
198
+ A configured catalog dict in Airbyte protocol format.
188
199
  """
189
200
  # For now, create a minimal catalog structure
190
201
  # A full implementation would fetch the source's discovered catalog
@@ -107,9 +107,18 @@ def retrieve_unmasked_config(
107
107
  return None
108
108
 
109
109
 
110
+ class SecretRetrievalError(Exception):
111
+ """Raised when secret retrieval fails.
112
+
113
+ This exception is raised when USE_CONNECTION_SECRET_RETRIEVER is enabled
114
+ but secrets cannot be retrieved (e.g., EU data residency restrictions).
115
+ """
116
+
117
+
110
118
  def enrich_config_with_secrets(
111
119
  connection_data: ConnectionData,
112
120
  retrieval_reason: str = "MCP live tests",
121
+ raise_on_failure: bool = True,
113
122
  ) -> ConnectionData:
114
123
  """Enrich connection data with unmasked secrets from internal retriever.
115
124
 
@@ -120,10 +129,16 @@ def enrich_config_with_secrets(
120
129
  Args:
121
130
  connection_data: The connection data to enrich.
122
131
  retrieval_reason: Reason for retrieval (for audit logging).
132
+ raise_on_failure: If True (default), raise SecretRetrievalError when
133
+ secrets cannot be retrieved. If False, return the original
134
+ connection_data with masked secrets (legacy behavior).
123
135
 
124
136
  Returns:
125
- A new ConnectionData with unmasked config, or the original if
126
- retrieval fails or is not available.
137
+ A new ConnectionData with unmasked config.
138
+
139
+ Raises:
140
+ SecretRetrievalError: If raise_on_failure is True and secrets cannot
141
+ be retrieved (e.g., due to EU data residency restrictions).
127
142
  """
128
143
  unmasked_config = retrieve_unmasked_config(
129
144
  connection_id=connection_data.connection_id,
@@ -131,10 +146,15 @@ def enrich_config_with_secrets(
131
146
  )
132
147
 
133
148
  if unmasked_config is None:
134
- logger.info(
135
- f"Could not retrieve unmasked config for {connection_data.connection_id}, "
136
- "using masked config from Cloud API"
149
+ error_msg = (
150
+ "Could not retrieve unmasked secrets for connection "
151
+ f"{connection_data.connection_id}. This may be due to EU data "
152
+ "residency restrictions or database connectivity issues. "
153
+ "The connection's credentials cannot be used for regression testing."
137
154
  )
155
+ logger.warning(error_msg)
156
+ if raise_on_failure:
157
+ raise SecretRetrievalError(error_msg)
138
158
  return connection_data
139
159
 
140
160
  logger.info(
@@ -250,7 +250,7 @@ class ExecutionResult:
250
250
  messages_by_type: dict[str, list[str]] = defaultdict(list)
251
251
  for message in self.airbyte_messages:
252
252
  type_name = message.type.value.lower()
253
- messages_by_type[type_name].append(message.json())
253
+ messages_by_type[type_name].append(message.model_dump_json())
254
254
 
255
255
  for type_name, messages in messages_by_type.items():
256
256
  file_path = airbyte_messages_dir / f"{type_name}.jsonl"
@@ -259,7 +259,7 @@ class ExecutionResult:
259
259
  # Save configured catalog (input) if available
260
260
  if self.configured_catalog is not None:
261
261
  catalog_path = output_dir / "configured_catalog.json"
262
- catalog_path.write_text(self.configured_catalog.json(indent=2))
262
+ catalog_path.write_text(self.configured_catalog.model_dump_json(indent=2))
263
263
  self.logger.info(f"Saved configured catalog to {catalog_path}")
264
264
 
265
265
  self.logger.info(f"Artifacts saved to {output_dir}")
@@ -1,91 +0,0 @@
1
- # Connector Metadata Service Library
2
-
3
- This submodule is responsible for managing all the logic related to validating, uploading, and managing connector metadata.
4
-
5
- ## Installation
6
-
7
- To use this submodule, it is recommended that you use Poetry to manage dependencies.
8
-
9
- ```
10
- poetry install
11
- ```
12
-
13
- ### Node.js Requirement
14
-
15
- The model generation process also requires Node.js to bundle JSON schemas. Install Node.js:
16
-
17
- - On macOS: `brew install node`
18
- - On Ubuntu/Debian: `sudo apt-get install nodejs npm`
19
- - On other systems: https://nodejs.org/
20
-
21
- Node.js dependencies will be automatically installed when running `poetry run poe generate-models`.
22
-
23
- ## Generating Models
24
-
25
- This submodule includes a tool for generating Python models from JSON Schema specifications. To generate the models, we use the library [datamodel-code-generator](https://github.com/koxudaxi/datamodel-code-generator). The generated models are stored in `models/generated`.
26
-
27
- To generate the models, run the following command:
28
-
29
- ```bash
30
- poetry run poe generate-models
31
-
32
- ```
33
-
34
- This will read the JSON Schema specifications in `models/src` and generate Python models in `models/generated`.
35
-
36
- ## Running Tests
37
-
38
- ```bash
39
- poetry run pytest
40
- ```
41
-
42
- ## Changelog
43
-
44
- ### 0.24.1
45
- Update Python version requirement from 3.10 to 3.11.
46
-
47
- ## Validating Metadata Files
48
-
49
- To be considered valid, a connector must have a metadata.yaml file which must conform to the [ConnectorMetadataDefinitionV0](./metadata_service/models/src/ConnectorMetadataDefinitionV0.yaml) schema, and a documentation file.
50
-
51
- The paths to both files must be passed to the validate command.
52
-
53
- ```bash
54
- poetry run metadata_service validate tests/fixtures/metadata_validate/valid/metadata_simple.yaml tests/fixtures/doc.md
55
- ```
56
-
57
- ## Useful Commands
58
-
59
- ### Replicate Production Data in your Development Bucket
60
-
61
- This will replicate all the production data to your development bucket. This is useful for testing the metadata service with real up to date data.
62
-
63
- _💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage/docs/gsutil) installed and have run `gsutil auth login`_
64
-
65
- _⚠️ Warning: Its important to know that this will remove ANY files you have in your destination buckets as it calls `gsutil rsync` with `-d` enabled._
66
-
67
- ```bash
68
- TARGET_BUCKET=<YOUR-DEV_BUCKET> poetry run poe replicate-prod
69
- ```
70
-
71
- ### Copy specific connector version to your Development Bucket
72
-
73
- This will copy the specified connector version to your development bucket. This is useful for testing the metadata service with a specific version of a connector.
74
-
75
- _💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage/docs/gsutil) installed and have run `gsutil auth login`_
76
-
77
- ```bash
78
- TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-preview.ea013c8" poetry run poe copy-connector-from-prod
79
- ```
80
-
81
- ### Promote Connector Version to Latest
82
-
83
- This will promote the specified connector version to the latest version in the registry. This is useful for creating a mocked registry in which a prerelease connector is treated as if it was already published.
84
-
85
- _💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage/docs/gsutil) installed and have run `gsutil auth login`_
86
-
87
- _⚠️ Warning: Its important to know that this will remove ANY existing files in the latest folder that are not in the versioned folder as it calls `gsutil rsync` with `-d` enabled._
88
-
89
- ```bash
90
- TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-preview.ea013c8" poetry run poe promote-connector-to-latest
91
- ```
@@ -1,48 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- /**
4
- * Bundle JSON schemas using @apidevtools/json-schema-ref-parser
5
- * This script resolves all $ref references in the schema files and creates a single bundled schema.
6
- */
7
-
8
- const $RefParser = require('@apidevtools/json-schema-ref-parser');
9
- const fs = require('fs');
10
- const path = require('path');
11
-
12
- const YAML_DIR = 'metadata_service/models/src';
13
- const OUTPUT_DIR = 'metadata_service/models/generated';
14
- const ENTRY_SCHEMA = path.join(YAML_DIR, 'ConnectorMetadataDefinitionV0.yaml');
15
- const BUNDLE_OUTPUT = path.join(OUTPUT_DIR, 'ConnectorMetadataDefinitionV0.json');
16
-
17
- async function bundleSchemas() {
18
- try {
19
- console.log('📦 Bundling JSON schemas...');
20
- console.log(` Entry schema: ${ENTRY_SCHEMA}`);
21
- console.log(` Output: ${BUNDLE_OUTPUT}`);
22
-
23
- if (!fs.existsSync(YAML_DIR)) {
24
- console.error(`❌ Error: The yaml directory does not exist: ${YAML_DIR}`);
25
- process.exit(1);
26
- }
27
-
28
- if (!fs.existsSync(OUTPUT_DIR)) {
29
- fs.mkdirSync(OUTPUT_DIR, { recursive: true });
30
- }
31
-
32
- const schema = await $RefParser.bundle(ENTRY_SCHEMA, {
33
- dereference: {
34
- circular: 'ignore' // Handle circular references gracefully
35
- }
36
- });
37
-
38
- fs.writeFileSync(BUNDLE_OUTPUT, JSON.stringify(schema, null, 2));
39
-
40
- console.log('✅ Successfully bundled schema to', BUNDLE_OUTPUT);
41
- console.log(' This bundled schema can be used for IDE validation and other tools.');
42
- } catch (error) {
43
- console.error('❌ Error bundling schemas:', error.message);
44
- process.exit(1);
45
- }
46
- }
47
-
48
- bundleSchemas();
@@ -1,36 +0,0 @@
1
- #!/usr/bin/env bash
2
-
3
- set -e
4
-
5
-
6
- YAML_DIR=metadata_service/models/src
7
- OUTPUT_DIR=metadata_service/models/generated
8
-
9
- # Ensure the yaml directory exists
10
- if [ ! -d "$YAML_DIR" ]; then
11
- echo "The yaml directory does not exist: $YAML_DIR"
12
- exit 1
13
- fi
14
-
15
-
16
- rm -rf "$OUTPUT_DIR"/*.py
17
- mkdir -p "$OUTPUT_DIR"
18
-
19
- echo "# generated by generate-metadata-models" > "$OUTPUT_DIR"/__init__.py
20
-
21
- for f in "$YAML_DIR"/*.yaml; do
22
- filename_wo_ext=$(basename "$f" | cut -d . -f 1)
23
- echo "from .$filename_wo_ext import *" >> "$OUTPUT_DIR"/__init__.py
24
-
25
- datamodel-codegen \
26
- --input "$YAML_DIR/$filename_wo_ext.yaml" \
27
- --output "$OUTPUT_DIR/$filename_wo_ext.py" \
28
- --use-title-as-name \
29
- --use-double-quotes \
30
- --enum-field-as-literal all \
31
- --disable-timestamp
32
- done
33
-
34
- echo ""
35
- echo "Generating bundled JSON schema..."
36
- node bin/bundle-schemas.js
@@ -1,54 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ActorDefinitionResourceRequirements.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import List, Optional
7
-
8
- from pydantic import BaseModel, Extra, Field
9
- from typing_extensions import Literal
10
-
11
-
12
- class ResourceRequirements(BaseModel):
13
- class Config:
14
- extra = Extra.forbid
15
-
16
- cpu_request: Optional[str] = None
17
- cpu_limit: Optional[str] = None
18
- memory_request: Optional[str] = None
19
- memory_limit: Optional[str] = None
20
-
21
-
22
- class JobType(BaseModel):
23
- __root__: Literal[
24
- "get_spec",
25
- "check_connection",
26
- "discover_schema",
27
- "sync",
28
- "reset_connection",
29
- "connection_updater",
30
- "replicate",
31
- ] = Field(
32
- ...,
33
- description="enum that describes the different types of jobs that the platform runs.",
34
- title="JobType",
35
- )
36
-
37
-
38
- class JobTypeResourceLimit(BaseModel):
39
- class Config:
40
- extra = Extra.forbid
41
-
42
- jobType: JobType
43
- resourceRequirements: ResourceRequirements
44
-
45
-
46
- class ActorDefinitionResourceRequirements(BaseModel):
47
- class Config:
48
- extra = Extra.forbid
49
-
50
- default: Optional[ResourceRequirements] = Field(
51
- None,
52
- description="if set, these are the requirements that should be set for ALL jobs run for this actor definition.",
53
- )
54
- jobSpecific: Optional[List[JobTypeResourceLimit]] = None
@@ -1,22 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: AirbyteInternal.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import Optional
7
-
8
- from pydantic import BaseModel, Extra, Field
9
- from typing_extensions import Literal
10
-
11
-
12
- class AirbyteInternal(BaseModel):
13
- class Config:
14
- extra = Extra.allow
15
-
16
- sl: Optional[Literal[0, 100, 200, 300]] = None
17
- ql: Optional[Literal[0, 100, 200, 300, 400, 500, 600]] = None
18
- isEnterprise: Optional[bool] = False
19
- requireVersionIncrementsInPullRequests: Optional[bool] = Field(
20
- True,
21
- description="When false, version increment checks will be skipped for this connector",
22
- )
@@ -1,18 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: AllowedHosts.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import List, Optional
7
-
8
- from pydantic import BaseModel, Extra, Field
9
-
10
-
11
- class AllowedHosts(BaseModel):
12
- class Config:
13
- extra = Extra.allow
14
-
15
- hosts: Optional[List[str]] = Field(
16
- None,
17
- description="An array of hosts that this connector can connect to. AllowedHosts not being present for the source or destination means that access to all hosts is allowed. An empty list here means that no network access is granted.",
18
- )
@@ -1,65 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ConnectorBreakingChanges.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from datetime import date
7
- from typing import Dict, List, Optional
8
-
9
- from pydantic import AnyUrl, BaseModel, Extra, Field, constr
10
- from typing_extensions import Literal
11
-
12
-
13
- class StreamBreakingChangeScope(BaseModel):
14
- class Config:
15
- extra = Extra.forbid
16
-
17
- scopeType: str = Field("stream", const=True)
18
- impactedScopes: List[str] = Field(
19
- ...,
20
- description="List of streams that are impacted by the breaking change.",
21
- min_items=1,
22
- )
23
-
24
-
25
- class BreakingChangeScope(BaseModel):
26
- __root__: StreamBreakingChangeScope = Field(
27
- ...,
28
- description="A scope that can be used to limit the impact of a breaking change.",
29
- )
30
-
31
-
32
- class VersionBreakingChange(BaseModel):
33
- class Config:
34
- extra = Extra.forbid
35
-
36
- upgradeDeadline: date = Field(
37
- ...,
38
- description="The deadline by which to upgrade before the breaking change takes effect.",
39
- )
40
- message: str = Field(
41
- ..., description="Descriptive message detailing the breaking change."
42
- )
43
- deadlineAction: Optional[Literal["auto_upgrade", "disable"]] = Field(
44
- None, description="Action to do when the deadline is reached."
45
- )
46
- migrationDocumentationUrl: Optional[AnyUrl] = Field(
47
- None,
48
- description="URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version}",
49
- )
50
- scopedImpact: Optional[List[BreakingChangeScope]] = Field(
51
- None,
52
- description="List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types.",
53
- min_items=1,
54
- )
55
-
56
-
57
- class ConnectorBreakingChanges(BaseModel):
58
- class Config:
59
- extra = Extra.forbid
60
-
61
- __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field(
62
- ...,
63
- description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.",
64
- title="ConnectorBreakingChanges",
65
- )
@@ -1,15 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ConnectorBuildOptions.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import Optional
7
-
8
- from pydantic import BaseModel, Extra
9
-
10
-
11
- class ConnectorBuildOptions(BaseModel):
12
- class Config:
13
- extra = Extra.forbid
14
-
15
- baseImage: Optional[str] = None
@@ -1,25 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ConnectorIPCOptions.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import List
7
-
8
- from pydantic import BaseModel, Extra
9
- from typing_extensions import Literal
10
-
11
-
12
- class DataChannel(BaseModel):
13
- class Config:
14
- extra = Extra.forbid
15
-
16
- version: str
17
- supportedSerialization: List[Literal["JSONL", "PROTOBUF", "FLATBUFFERS"]]
18
- supportedTransport: List[Literal["STDIO", "SOCKET"]]
19
-
20
-
21
- class ConnectorIPCOptions(BaseModel):
22
- class Config:
23
- extra = Extra.forbid
24
-
25
- dataChannel: DataChannel