airbyte-internal-ops 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/METADATA +2 -1
  2. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/RECORD +16 -85
  3. airbyte_ops_mcp/cli/cloud.py +4 -2
  4. airbyte_ops_mcp/cloud_admin/api_client.py +51 -51
  5. airbyte_ops_mcp/constants.py +58 -0
  6. airbyte_ops_mcp/{_legacy/airbyte_ci/metadata_service/docker_hub.py → docker_hub.py} +16 -10
  7. airbyte_ops_mcp/mcp/cloud_connector_versions.py +44 -23
  8. airbyte_ops_mcp/mcp/prod_db_queries.py +128 -4
  9. airbyte_ops_mcp/mcp/regression_tests.py +10 -5
  10. airbyte_ops_mcp/{_legacy/airbyte_ci/metadata_service/validators/metadata_validator.py → metadata_validator.py} +18 -12
  11. airbyte_ops_mcp/prod_db_access/queries.py +51 -0
  12. airbyte_ops_mcp/prod_db_access/sql.py +76 -0
  13. airbyte_ops_mcp/regression_tests/connection_fetcher.py +16 -5
  14. airbyte_ops_mcp/regression_tests/models.py +2 -2
  15. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/README.md +0 -91
  16. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/bin/bundle-schemas.js +0 -48
  17. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/bin/generate-metadata-models.sh +0 -36
  18. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ActorDefinitionResourceRequirements.py +0 -54
  19. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/AirbyteInternal.py +0 -22
  20. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/AllowedHosts.py +0 -18
  21. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorBreakingChanges.py +0 -65
  22. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorBuildOptions.py +0 -15
  23. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorIPCOptions.py +0 -25
  24. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetadataDefinitionV0.json +0 -897
  25. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetadataDefinitionV0.py +0 -478
  26. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetrics.py +0 -24
  27. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorPackageInfo.py +0 -12
  28. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryDestinationDefinition.py +0 -407
  29. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryReleases.py +0 -406
  30. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistrySourceDefinition.py +0 -407
  31. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryV0.py +0 -413
  32. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorReleases.py +0 -98
  33. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorTestSuiteOptions.py +0 -58
  34. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/GeneratedFields.py +0 -62
  35. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/GitInfo.py +0 -31
  36. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/JobType.py +0 -23
  37. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/NormalizationDestinationDefinitionConfig.py +0 -24
  38. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RegistryOverrides.py +0 -111
  39. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ReleaseStage.py +0 -15
  40. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RemoteRegistries.py +0 -23
  41. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ResourceRequirements.py +0 -18
  42. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RolloutConfiguration.py +0 -29
  43. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/Secret.py +0 -34
  44. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SecretStore.py +0 -22
  45. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SourceFileInfo.py +0 -16
  46. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SuggestedStreams.py +0 -18
  47. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SupportLevel.py +0 -15
  48. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/TestConnections.py +0 -14
  49. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/__init__.py +0 -31
  50. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/airbyte-connector-metadata-schema.json +0 -0
  51. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ActorDefinitionResourceRequirements.yaml +0 -30
  52. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/AirbyteInternal.yaml +0 -32
  53. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/AllowedHosts.yaml +0 -13
  54. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorBreakingChanges.yaml +0 -65
  55. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorBuildOptions.yaml +0 -10
  56. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorIPCOptions.yaml +0 -29
  57. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorMetadataDefinitionV0.yaml +0 -172
  58. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorMetrics.yaml +0 -30
  59. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorPackageInfo.yaml +0 -9
  60. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryDestinationDefinition.yaml +0 -90
  61. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryReleases.yaml +0 -35
  62. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistrySourceDefinition.yaml +0 -92
  63. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryV0.yaml +0 -18
  64. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorReleases.yaml +0 -16
  65. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorTestSuiteOptions.yaml +0 -28
  66. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/GeneratedFields.yaml +0 -16
  67. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/GitInfo.yaml +0 -21
  68. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/JobType.yaml +0 -14
  69. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/NormalizationDestinationDefinitionConfig.yaml +0 -21
  70. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RegistryOverrides.yaml +0 -38
  71. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ReleaseStage.yaml +0 -11
  72. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RemoteRegistries.yaml +0 -25
  73. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ResourceRequirements.yaml +0 -16
  74. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RolloutConfiguration.yaml +0 -29
  75. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/Secret.yaml +0 -19
  76. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SecretStore.yaml +0 -16
  77. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SourceFileInfo.yaml +0 -17
  78. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SuggestedStreams.yaml +0 -13
  79. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SupportLevel.yaml +0 -10
  80. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/TestConnections.yaml +0 -17
  81. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/package-lock.json +0 -62
  82. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/package.json +0 -12
  83. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/transform.py +0 -71
  84. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/WHEEL +0 -0
  85. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/entry_points.txt +0 -0
@@ -934,3 +934,79 @@ SELECT_DESTINATION_CONNECTION_STATS = sqlalchemy.text(
934
934
  ORDER BY total_connections DESC
935
935
  """
936
936
  )
937
+
938
+ # =============================================================================
939
+ # Stream-based Connection Queries
940
+ # =============================================================================
941
+
942
+ # Query connections by source connector type that have a specific stream enabled
943
+ # The catalog field is JSONB with structure: {"streams": [{"stream": {"name": "..."}, ...}, ...]}
944
+ SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM = sqlalchemy.text(
945
+ """
946
+ SELECT
947
+ connection.id AS connection_id,
948
+ connection.name AS connection_name,
949
+ connection.source_id,
950
+ connection.status AS connection_status,
951
+ workspace.id AS workspace_id,
952
+ workspace.name AS workspace_name,
953
+ workspace.organization_id,
954
+ workspace.dataplane_group_id,
955
+ dataplane_group.name AS dataplane_name,
956
+ source_actor.actor_definition_id AS source_definition_id,
957
+ source_actor.name AS source_name
958
+ FROM connection
959
+ JOIN actor AS source_actor
960
+ ON connection.source_id = source_actor.id
961
+ AND source_actor.tombstone = false
962
+ JOIN workspace
963
+ ON source_actor.workspace_id = workspace.id
964
+ AND workspace.tombstone = false
965
+ LEFT JOIN dataplane_group
966
+ ON workspace.dataplane_group_id = dataplane_group.id
967
+ WHERE
968
+ source_actor.actor_definition_id = :connector_definition_id
969
+ AND connection.status = 'active'
970
+ AND EXISTS (
971
+ SELECT 1 FROM jsonb_array_elements(connection.catalog->'streams') AS stream
972
+ WHERE stream->'stream'->>'name' = :stream_name
973
+ )
974
+ LIMIT :limit
975
+ """
976
+ )
977
+
978
+ # Query connections by source connector type and stream, filtered by organization
979
+ SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM_AND_ORG = sqlalchemy.text(
980
+ """
981
+ SELECT
982
+ connection.id AS connection_id,
983
+ connection.name AS connection_name,
984
+ connection.source_id,
985
+ connection.status AS connection_status,
986
+ workspace.id AS workspace_id,
987
+ workspace.name AS workspace_name,
988
+ workspace.organization_id,
989
+ workspace.dataplane_group_id,
990
+ dataplane_group.name AS dataplane_name,
991
+ source_actor.actor_definition_id AS source_definition_id,
992
+ source_actor.name AS source_name
993
+ FROM connection
994
+ JOIN actor AS source_actor
995
+ ON connection.source_id = source_actor.id
996
+ AND source_actor.tombstone = false
997
+ JOIN workspace
998
+ ON source_actor.workspace_id = workspace.id
999
+ AND workspace.tombstone = false
1000
+ LEFT JOIN dataplane_group
1001
+ ON workspace.dataplane_group_id = dataplane_group.id
1002
+ WHERE
1003
+ source_actor.actor_definition_id = :connector_definition_id
1004
+ AND workspace.organization_id = :organization_id
1005
+ AND connection.status = 'active'
1006
+ AND EXISTS (
1007
+ SELECT 1 FROM jsonb_array_elements(connection.catalog->'streams') AS stream
1008
+ WHERE stream->'stream'->>'name' = :stream_name
1009
+ )
1010
+ LIMIT :limit
1011
+ """
1012
+ )
@@ -96,7 +96,7 @@ def fetch_connection_data(
96
96
  )
97
97
 
98
98
  access_token = _get_access_token(client_id, client_secret)
99
- api_root = constants.CLOUD_API_ROOT
99
+ public_api_root = constants.CLOUD_API_ROOT
100
100
  headers = {
101
101
  "Authorization": f"Bearer {access_token}",
102
102
  "Content-Type": "application/json",
@@ -104,7 +104,7 @@ def fetch_connection_data(
104
104
 
105
105
  # Get connection details
106
106
  conn_response = requests.get(
107
- f"{api_root}/connections/{connection_id}",
107
+ f"{public_api_root}/connections/{connection_id}",
108
108
  headers=headers,
109
109
  timeout=30,
110
110
  )
@@ -120,7 +120,7 @@ def fetch_connection_data(
120
120
 
121
121
  # Get source details (includes config)
122
122
  source_response = requests.get(
123
- f"{api_root}/sources/{source_id}",
123
+ f"{public_api_root}/sources/{source_id}",
124
124
  headers=headers,
125
125
  timeout=30,
126
126
  )
@@ -160,7 +160,9 @@ def fetch_connection_data(
160
160
  stream_names = [s["name"] for s in streams_config]
161
161
 
162
162
  # Build Airbyte protocol catalog format
163
- catalog = _build_configured_catalog(streams_config, source_id, headers, api_root)
163
+ catalog = _build_configured_catalog(
164
+ streams_config, source_id, headers, public_api_root
165
+ )
164
166
 
165
167
  return ConnectionData(
166
168
  connection_id=connection_id,
@@ -179,12 +181,21 @@ def _build_configured_catalog(
179
181
  streams_config: list[dict[str, Any]],
180
182
  source_id: str,
181
183
  headers: dict[str, str],
182
- api_root: str,
184
+ public_api_root: str,
183
185
  ) -> dict[str, Any]:
184
186
  """Build a configured catalog from connection stream configuration.
185
187
 
186
188
  This creates a catalog in the Airbyte protocol format that can be used
187
189
  with connector commands.
190
+
191
+ Args:
192
+ streams_config: List of stream configuration dicts from the connection.
193
+ source_id: The source ID.
194
+ headers: HTTP headers for API requests.
195
+ public_api_root: The Public API root URL (e.g., CLOUD_API_ROOT).
196
+
197
+ Returns:
198
+ A configured catalog dict in Airbyte protocol format.
188
199
  """
189
200
  # For now, create a minimal catalog structure
190
201
  # A full implementation would fetch the source's discovered catalog
@@ -250,7 +250,7 @@ class ExecutionResult:
250
250
  messages_by_type: dict[str, list[str]] = defaultdict(list)
251
251
  for message in self.airbyte_messages:
252
252
  type_name = message.type.value.lower()
253
- messages_by_type[type_name].append(message.json())
253
+ messages_by_type[type_name].append(message.model_dump_json())
254
254
 
255
255
  for type_name, messages in messages_by_type.items():
256
256
  file_path = airbyte_messages_dir / f"{type_name}.jsonl"
@@ -259,7 +259,7 @@ class ExecutionResult:
259
259
  # Save configured catalog (input) if available
260
260
  if self.configured_catalog is not None:
261
261
  catalog_path = output_dir / "configured_catalog.json"
262
- catalog_path.write_text(self.configured_catalog.json(indent=2))
262
+ catalog_path.write_text(self.configured_catalog.model_dump_json(indent=2))
263
263
  self.logger.info(f"Saved configured catalog to {catalog_path}")
264
264
 
265
265
  self.logger.info(f"Artifacts saved to {output_dir}")
@@ -1,91 +0,0 @@
1
- # Connector Metadata Service Library
2
-
3
- This submodule is responsible for managing all the logic related to validating, uploading, and managing connector metadata.
4
-
5
- ## Installation
6
-
7
- To use this submodule, it is recommended that you use Poetry to manage dependencies.
8
-
9
- ```
10
- poetry install
11
- ```
12
-
13
- ### Node.js Requirement
14
-
15
- The model generation process also requires Node.js to bundle JSON schemas. Install Node.js:
16
-
17
- - On macOS: `brew install node`
18
- - On Ubuntu/Debian: `sudo apt-get install nodejs npm`
19
- - On other systems: https://nodejs.org/
20
-
21
- Node.js dependencies will be automatically installed when running `poetry run poe generate-models`.
22
-
23
- ## Generating Models
24
-
25
- This submodule includes a tool for generating Python models from JSON Schema specifications. To generate the models, we use the library [datamodel-code-generator](https://github.com/koxudaxi/datamodel-code-generator). The generated models are stored in `models/generated`.
26
-
27
- To generate the models, run the following command:
28
-
29
- ```bash
30
- poetry run poe generate-models
31
-
32
- ```
33
-
34
- This will read the JSON Schema specifications in `models/src` and generate Python models in `models/generated`.
35
-
36
- ## Running Tests
37
-
38
- ```bash
39
- poetry run pytest
40
- ```
41
-
42
- ## Changelog
43
-
44
- ### 0.24.1
45
- Update Python version requirement from 3.10 to 3.11.
46
-
47
- ## Validating Metadata Files
48
-
49
- To be considered valid, a connector must have a metadata.yaml file which must conform to the [ConnectorMetadataDefinitionV0](./metadata_service/models/src/ConnectorMetadataDefinitionV0.yaml) schema, and a documentation file.
50
-
51
- The paths to both files must be passed to the validate command.
52
-
53
- ```bash
54
- poetry run metadata_service validate tests/fixtures/metadata_validate/valid/metadata_simple.yaml tests/fixtures/doc.md
55
- ```
56
-
57
- ## Useful Commands
58
-
59
- ### Replicate Production Data in your Development Bucket
60
-
61
- This will replicate all the production data to your development bucket. This is useful for testing the metadata service with real up to date data.
62
-
63
- _💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage/docs/gsutil) installed and have run `gsutil auth login`_
64
-
65
- _⚠️ Warning: Its important to know that this will remove ANY files you have in your destination buckets as it calls `gsutil rsync` with `-d` enabled._
66
-
67
- ```bash
68
- TARGET_BUCKET=<YOUR-DEV_BUCKET> poetry run poe replicate-prod
69
- ```
70
-
71
- ### Copy specific connector version to your Development Bucket
72
-
73
- This will copy the specified connector version to your development bucket. This is useful for testing the metadata service with a specific version of a connector.
74
-
75
- _💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage/docs/gsutil) installed and have run `gsutil auth login`_
76
-
77
- ```bash
78
- TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-preview.ea013c8" poetry run poe copy-connector-from-prod
79
- ```
80
-
81
- ### Promote Connector Version to Latest
82
-
83
- This will promote the specified connector version to the latest version in the registry. This is useful for creating a mocked registry in which a prerelease connector is treated as if it was already published.
84
-
85
- _💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage/docs/gsutil) installed and have run `gsutil auth login`_
86
-
87
- _⚠️ Warning: Its important to know that this will remove ANY existing files in the latest folder that are not in the versioned folder as it calls `gsutil rsync` with `-d` enabled._
88
-
89
- ```bash
90
- TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-preview.ea013c8" poetry run poe promote-connector-to-latest
91
- ```
@@ -1,48 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- /**
4
- * Bundle JSON schemas using @apidevtools/json-schema-ref-parser
5
- * This script resolves all $ref references in the schema files and creates a single bundled schema.
6
- */
7
-
8
- const $RefParser = require('@apidevtools/json-schema-ref-parser');
9
- const fs = require('fs');
10
- const path = require('path');
11
-
12
- const YAML_DIR = 'metadata_service/models/src';
13
- const OUTPUT_DIR = 'metadata_service/models/generated';
14
- const ENTRY_SCHEMA = path.join(YAML_DIR, 'ConnectorMetadataDefinitionV0.yaml');
15
- const BUNDLE_OUTPUT = path.join(OUTPUT_DIR, 'ConnectorMetadataDefinitionV0.json');
16
-
17
- async function bundleSchemas() {
18
- try {
19
- console.log('📦 Bundling JSON schemas...');
20
- console.log(` Entry schema: ${ENTRY_SCHEMA}`);
21
- console.log(` Output: ${BUNDLE_OUTPUT}`);
22
-
23
- if (!fs.existsSync(YAML_DIR)) {
24
- console.error(`❌ Error: The yaml directory does not exist: ${YAML_DIR}`);
25
- process.exit(1);
26
- }
27
-
28
- if (!fs.existsSync(OUTPUT_DIR)) {
29
- fs.mkdirSync(OUTPUT_DIR, { recursive: true });
30
- }
31
-
32
- const schema = await $RefParser.bundle(ENTRY_SCHEMA, {
33
- dereference: {
34
- circular: 'ignore' // Handle circular references gracefully
35
- }
36
- });
37
-
38
- fs.writeFileSync(BUNDLE_OUTPUT, JSON.stringify(schema, null, 2));
39
-
40
- console.log('✅ Successfully bundled schema to', BUNDLE_OUTPUT);
41
- console.log(' This bundled schema can be used for IDE validation and other tools.');
42
- } catch (error) {
43
- console.error('❌ Error bundling schemas:', error.message);
44
- process.exit(1);
45
- }
46
- }
47
-
48
- bundleSchemas();
@@ -1,36 +0,0 @@
1
- #!/usr/bin/env bash
2
-
3
- set -e
4
-
5
-
6
- YAML_DIR=metadata_service/models/src
7
- OUTPUT_DIR=metadata_service/models/generated
8
-
9
- # Ensure the yaml directory exists
10
- if [ ! -d "$YAML_DIR" ]; then
11
- echo "The yaml directory does not exist: $YAML_DIR"
12
- exit 1
13
- fi
14
-
15
-
16
- rm -rf "$OUTPUT_DIR"/*.py
17
- mkdir -p "$OUTPUT_DIR"
18
-
19
- echo "# generated by generate-metadata-models" > "$OUTPUT_DIR"/__init__.py
20
-
21
- for f in "$YAML_DIR"/*.yaml; do
22
- filename_wo_ext=$(basename "$f" | cut -d . -f 1)
23
- echo "from .$filename_wo_ext import *" >> "$OUTPUT_DIR"/__init__.py
24
-
25
- datamodel-codegen \
26
- --input "$YAML_DIR/$filename_wo_ext.yaml" \
27
- --output "$OUTPUT_DIR/$filename_wo_ext.py" \
28
- --use-title-as-name \
29
- --use-double-quotes \
30
- --enum-field-as-literal all \
31
- --disable-timestamp
32
- done
33
-
34
- echo ""
35
- echo "Generating bundled JSON schema..."
36
- node bin/bundle-schemas.js
@@ -1,54 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ActorDefinitionResourceRequirements.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import List, Optional
7
-
8
- from pydantic import BaseModel, Extra, Field
9
- from typing_extensions import Literal
10
-
11
-
12
- class ResourceRequirements(BaseModel):
13
- class Config:
14
- extra = Extra.forbid
15
-
16
- cpu_request: Optional[str] = None
17
- cpu_limit: Optional[str] = None
18
- memory_request: Optional[str] = None
19
- memory_limit: Optional[str] = None
20
-
21
-
22
- class JobType(BaseModel):
23
- __root__: Literal[
24
- "get_spec",
25
- "check_connection",
26
- "discover_schema",
27
- "sync",
28
- "reset_connection",
29
- "connection_updater",
30
- "replicate",
31
- ] = Field(
32
- ...,
33
- description="enum that describes the different types of jobs that the platform runs.",
34
- title="JobType",
35
- )
36
-
37
-
38
- class JobTypeResourceLimit(BaseModel):
39
- class Config:
40
- extra = Extra.forbid
41
-
42
- jobType: JobType
43
- resourceRequirements: ResourceRequirements
44
-
45
-
46
- class ActorDefinitionResourceRequirements(BaseModel):
47
- class Config:
48
- extra = Extra.forbid
49
-
50
- default: Optional[ResourceRequirements] = Field(
51
- None,
52
- description="if set, these are the requirements that should be set for ALL jobs run for this actor definition.",
53
- )
54
- jobSpecific: Optional[List[JobTypeResourceLimit]] = None
@@ -1,22 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: AirbyteInternal.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import Optional
7
-
8
- from pydantic import BaseModel, Extra, Field
9
- from typing_extensions import Literal
10
-
11
-
12
- class AirbyteInternal(BaseModel):
13
- class Config:
14
- extra = Extra.allow
15
-
16
- sl: Optional[Literal[0, 100, 200, 300]] = None
17
- ql: Optional[Literal[0, 100, 200, 300, 400, 500, 600]] = None
18
- isEnterprise: Optional[bool] = False
19
- requireVersionIncrementsInPullRequests: Optional[bool] = Field(
20
- True,
21
- description="When false, version increment checks will be skipped for this connector",
22
- )
@@ -1,18 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: AllowedHosts.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import List, Optional
7
-
8
- from pydantic import BaseModel, Extra, Field
9
-
10
-
11
- class AllowedHosts(BaseModel):
12
- class Config:
13
- extra = Extra.allow
14
-
15
- hosts: Optional[List[str]] = Field(
16
- None,
17
- description="An array of hosts that this connector can connect to. AllowedHosts not being present for the source or destination means that access to all hosts is allowed. An empty list here means that no network access is granted.",
18
- )
@@ -1,65 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ConnectorBreakingChanges.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from datetime import date
7
- from typing import Dict, List, Optional
8
-
9
- from pydantic import AnyUrl, BaseModel, Extra, Field, constr
10
- from typing_extensions import Literal
11
-
12
-
13
- class StreamBreakingChangeScope(BaseModel):
14
- class Config:
15
- extra = Extra.forbid
16
-
17
- scopeType: str = Field("stream", const=True)
18
- impactedScopes: List[str] = Field(
19
- ...,
20
- description="List of streams that are impacted by the breaking change.",
21
- min_items=1,
22
- )
23
-
24
-
25
- class BreakingChangeScope(BaseModel):
26
- __root__: StreamBreakingChangeScope = Field(
27
- ...,
28
- description="A scope that can be used to limit the impact of a breaking change.",
29
- )
30
-
31
-
32
- class VersionBreakingChange(BaseModel):
33
- class Config:
34
- extra = Extra.forbid
35
-
36
- upgradeDeadline: date = Field(
37
- ...,
38
- description="The deadline by which to upgrade before the breaking change takes effect.",
39
- )
40
- message: str = Field(
41
- ..., description="Descriptive message detailing the breaking change."
42
- )
43
- deadlineAction: Optional[Literal["auto_upgrade", "disable"]] = Field(
44
- None, description="Action to do when the deadline is reached."
45
- )
46
- migrationDocumentationUrl: Optional[AnyUrl] = Field(
47
- None,
48
- description="URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version}",
49
- )
50
- scopedImpact: Optional[List[BreakingChangeScope]] = Field(
51
- None,
52
- description="List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types.",
53
- min_items=1,
54
- )
55
-
56
-
57
- class ConnectorBreakingChanges(BaseModel):
58
- class Config:
59
- extra = Extra.forbid
60
-
61
- __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field(
62
- ...,
63
- description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.",
64
- title="ConnectorBreakingChanges",
65
- )
@@ -1,15 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ConnectorBuildOptions.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import Optional
7
-
8
- from pydantic import BaseModel, Extra
9
-
10
-
11
- class ConnectorBuildOptions(BaseModel):
12
- class Config:
13
- extra = Extra.forbid
14
-
15
- baseImage: Optional[str] = None
@@ -1,25 +0,0 @@
1
- # generated by datamodel-codegen:
2
- # filename: ConnectorIPCOptions.yaml
3
-
4
- from __future__ import annotations
5
-
6
- from typing import List
7
-
8
- from pydantic import BaseModel, Extra
9
- from typing_extensions import Literal
10
-
11
-
12
- class DataChannel(BaseModel):
13
- class Config:
14
- extra = Extra.forbid
15
-
16
- version: str
17
- supportedSerialization: List[Literal["JSONL", "PROTOBUF", "FLATBUFFERS"]]
18
- supportedTransport: List[Literal["STDIO", "SOCKET"]]
19
-
20
-
21
- class ConnectorIPCOptions(BaseModel):
22
- class Config:
23
- extra = Extra.forbid
24
-
25
- dataChannel: DataChannel