airbyte-internal-ops 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/METADATA +2 -1
  2. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/RECORD +16 -85
  3. airbyte_ops_mcp/cli/cloud.py +4 -2
  4. airbyte_ops_mcp/cloud_admin/api_client.py +51 -51
  5. airbyte_ops_mcp/constants.py +58 -0
  6. airbyte_ops_mcp/{_legacy/airbyte_ci/metadata_service/docker_hub.py → docker_hub.py} +16 -10
  7. airbyte_ops_mcp/mcp/cloud_connector_versions.py +44 -23
  8. airbyte_ops_mcp/mcp/prod_db_queries.py +128 -4
  9. airbyte_ops_mcp/mcp/regression_tests.py +10 -5
  10. airbyte_ops_mcp/{_legacy/airbyte_ci/metadata_service/validators/metadata_validator.py → metadata_validator.py} +18 -12
  11. airbyte_ops_mcp/prod_db_access/queries.py +51 -0
  12. airbyte_ops_mcp/prod_db_access/sql.py +76 -0
  13. airbyte_ops_mcp/regression_tests/connection_fetcher.py +16 -5
  14. airbyte_ops_mcp/regression_tests/models.py +2 -2
  15. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/README.md +0 -91
  16. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/bin/bundle-schemas.js +0 -48
  17. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/bin/generate-metadata-models.sh +0 -36
  18. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ActorDefinitionResourceRequirements.py +0 -54
  19. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/AirbyteInternal.py +0 -22
  20. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/AllowedHosts.py +0 -18
  21. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorBreakingChanges.py +0 -65
  22. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorBuildOptions.py +0 -15
  23. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorIPCOptions.py +0 -25
  24. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetadataDefinitionV0.json +0 -897
  25. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetadataDefinitionV0.py +0 -478
  26. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorMetrics.py +0 -24
  27. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorPackageInfo.py +0 -12
  28. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryDestinationDefinition.py +0 -407
  29. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryReleases.py +0 -406
  30. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistrySourceDefinition.py +0 -407
  31. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorRegistryV0.py +0 -413
  32. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorReleases.py +0 -98
  33. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ConnectorTestSuiteOptions.py +0 -58
  34. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/GeneratedFields.py +0 -62
  35. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/GitInfo.py +0 -31
  36. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/JobType.py +0 -23
  37. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/NormalizationDestinationDefinitionConfig.py +0 -24
  38. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RegistryOverrides.py +0 -111
  39. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ReleaseStage.py +0 -15
  40. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RemoteRegistries.py +0 -23
  41. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/ResourceRequirements.py +0 -18
  42. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/RolloutConfiguration.py +0 -29
  43. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/Secret.py +0 -34
  44. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SecretStore.py +0 -22
  45. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SourceFileInfo.py +0 -16
  46. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SuggestedStreams.py +0 -18
  47. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/SupportLevel.py +0 -15
  48. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/TestConnections.py +0 -14
  49. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/__init__.py +0 -31
  50. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/generated/airbyte-connector-metadata-schema.json +0 -0
  51. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ActorDefinitionResourceRequirements.yaml +0 -30
  52. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/AirbyteInternal.yaml +0 -32
  53. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/AllowedHosts.yaml +0 -13
  54. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorBreakingChanges.yaml +0 -65
  55. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorBuildOptions.yaml +0 -10
  56. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorIPCOptions.yaml +0 -29
  57. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorMetadataDefinitionV0.yaml +0 -172
  58. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorMetrics.yaml +0 -30
  59. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorPackageInfo.yaml +0 -9
  60. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryDestinationDefinition.yaml +0 -90
  61. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryReleases.yaml +0 -35
  62. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistrySourceDefinition.yaml +0 -92
  63. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorRegistryV0.yaml +0 -18
  64. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorReleases.yaml +0 -16
  65. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ConnectorTestSuiteOptions.yaml +0 -28
  66. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/GeneratedFields.yaml +0 -16
  67. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/GitInfo.yaml +0 -21
  68. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/JobType.yaml +0 -14
  69. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/NormalizationDestinationDefinitionConfig.yaml +0 -21
  70. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RegistryOverrides.yaml +0 -38
  71. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ReleaseStage.yaml +0 -11
  72. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RemoteRegistries.yaml +0 -25
  73. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/ResourceRequirements.yaml +0 -16
  74. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/RolloutConfiguration.yaml +0 -29
  75. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/Secret.yaml +0 -19
  76. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SecretStore.yaml +0 -16
  77. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SourceFileInfo.yaml +0 -17
  78. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SuggestedStreams.yaml +0 -13
  79. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/SupportLevel.yaml +0 -10
  80. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/models/TestConnections.yaml +0 -17
  81. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/package-lock.json +0 -62
  82. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/package.json +0 -12
  83. airbyte_ops_mcp/_legacy/airbyte_ci/metadata_models/transform.py +0 -71
  84. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/WHEEL +0 -0
  85. {airbyte_internal_ops-0.5.0.dist-info → airbyte_internal_ops-0.5.1.dist-info}/entry_points.txt +0 -0
@@ -122,6 +122,64 @@ class OrganizationAliasEnum(StrEnum):
122
122
  return alias_mapping[org_id]
123
123
 
124
124
 
125
+ # =============================================================================
126
+ # Workspace ID Aliases
127
+ # =============================================================================
128
+
129
+
130
+ class WorkspaceAliasEnum(StrEnum):
131
+ """Workspace ID aliases that can be used in place of UUIDs.
132
+
133
+ Each member's name is the alias (e.g., "@devin-ai-sandbox") and its value
134
+ is the actual workspace UUID. Use `WorkspaceAliasEnum.resolve()` to
135
+ resolve aliases to actual IDs.
136
+ """
137
+
138
+ DEVIN_AI_SANDBOX = "266ebdfe-0d7b-4540-9817-de7e4505ba61"
139
+ """The Devin AI sandbox workspace for testing and development.
140
+
141
+ Alias: @devin-ai-sandbox
142
+ """
143
+
144
+ @classmethod
145
+ def resolve(cls, workspace_id: str | None) -> str | None:
146
+ """Resolve a workspace ID alias to its actual UUID.
147
+
148
+ Accepts either an alias string (e.g., "@devin-ai-sandbox") or a
149
+ WorkspaceAliasEnum enum member, and returns the actual UUID.
150
+
151
+ Returns:
152
+ The resolved workspace ID (UUID), or None if input is None.
153
+ If the input doesn't start with "@", it is returned unchanged.
154
+
155
+ Raises:
156
+ PyAirbyteInputError: If the input starts with "@" but is not a recognized alias.
157
+ """
158
+ if workspace_id is None:
159
+ return None
160
+
161
+ # Handle WorkspaceAliasEnum enum members directly
162
+ if isinstance(workspace_id, cls):
163
+ return workspace_id.value
164
+
165
+ # If it doesn't look like an alias, return as-is (assume it's a UUID)
166
+ if not workspace_id.startswith("@"):
167
+ return workspace_id
168
+
169
+ # Handle alias strings or raise an error if invalid
170
+ alias_mapping = {
171
+ "@devin-ai-sandbox": cls.DEVIN_AI_SANDBOX.value,
172
+ }
173
+ if workspace_id not in alias_mapping:
174
+ raise PyAirbyteInputError(
175
+ message=f"Unknown workspace alias: {workspace_id}",
176
+ context={
177
+ "valid_aliases": list(alias_mapping.keys()),
178
+ },
179
+ )
180
+ return alias_mapping[workspace_id]
181
+
182
+
125
183
  CONNECTION_RETRIEVER_PG_CONNECTION_DETAILS_SECRET_ID = (
126
184
  "projects/587336813068/secrets/CONNECTION_RETRIEVER_PG_CONNECTION_DETAILS"
127
185
  )
@@ -1,6 +1,13 @@
1
1
  #
2
2
  # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
3
  #
4
+ """DockerHub API client with authentication, pagination, and retry support.
5
+
6
+ Provides utilities for interacting with the DockerHub API, including fetching
7
+ image tags and digests, checking image existence, and finding the latest version.
8
+ Supports authenticated requests to avoid rate limits and includes retry logic
9
+ for handling API flakiness.
10
+ """
4
11
 
5
12
  import os
6
13
  import time
@@ -95,17 +102,16 @@ def get_docker_hub_tags_and_digests(
95
102
  for result in json_response.get("results", [])
96
103
  }
97
104
  )
98
- if paginate:
99
- if next_page_url := json_response.get("next"):
100
- tags_and_digests.update(
101
- get_docker_hub_tags_and_digests(
102
- image_name,
103
- retries=retries,
104
- wait_sec=wait_sec,
105
- next_page_url=next_page_url,
106
- tags_and_digests=tags_and_digests,
107
- )
105
+ if paginate and (next_page_url := json_response.get("next")):
106
+ tags_and_digests.update(
107
+ get_docker_hub_tags_and_digests(
108
+ image_name,
109
+ retries=retries,
110
+ wait_sec=wait_sec,
111
+ next_page_url=next_page_url,
112
+ tags_and_digests=tags_and_digests,
108
113
  )
114
+ )
109
115
  return tags_and_digests
110
116
 
111
117
 
@@ -29,6 +29,7 @@ from airbyte_ops_mcp.cloud_admin.models import (
29
29
  VersionOverrideOperationResult,
30
30
  WorkspaceVersionOverrideResult,
31
31
  )
32
+ from airbyte_ops_mcp.constants import WorkspaceAliasEnum
32
33
  from airbyte_ops_mcp.github_api import (
33
34
  GitHubAPIError,
34
35
  GitHubCommentParseError,
@@ -97,8 +98,11 @@ def _resolve_cloud_auth() -> _ResolvedCloudAuth:
97
98
  )
98
99
  def get_cloud_connector_version(
99
100
  workspace_id: Annotated[
100
- str,
101
- Field(description="The Airbyte Cloud workspace ID."),
101
+ str | WorkspaceAliasEnum,
102
+ Field(
103
+ description="The Airbyte Cloud workspace ID (UUID) or alias. "
104
+ "Accepts '@devin-ai-sandbox' as an alias for the Devin AI sandbox workspace."
105
+ ),
102
106
  ],
103
107
  actor_id: Annotated[
104
108
  str, "The ID of the deployed connector (source or destination)"
@@ -118,6 +122,9 @@ def get_cloud_connector_version(
118
122
  2. HTTP headers: X-Airbyte-Cloud-Client-Id, X-Airbyte-Cloud-Client-Secret
119
123
  3. Environment variables: AIRBYTE_CLOUD_CLIENT_ID, AIRBYTE_CLOUD_CLIENT_SECRET
120
124
  """
125
+ # Resolve workspace ID alias
126
+ resolved_workspace_id = WorkspaceAliasEnum.resolve(workspace_id)
127
+
121
128
  try:
122
129
  auth = _resolve_cloud_auth()
123
130
 
@@ -127,11 +134,11 @@ def get_cloud_connector_version(
127
134
  version_data = api_client.get_connector_version(
128
135
  connector_id=actor_id,
129
136
  connector_type=actor_type,
130
- api_root=constants.CLOUD_CONFIG_API_ROOT, # Use Config API, not public API
137
+ config_api_root=constants.CLOUD_CONFIG_API_ROOT,
131
138
  client_id=auth.client_id,
132
139
  client_secret=auth.client_secret,
133
140
  bearer_token=auth.bearer_token,
134
- workspace_id=workspace_id,
141
+ workspace_id=resolved_workspace_id,
135
142
  )
136
143
 
137
144
  # Determine if version is pinned from scoped config context (more reliable)
@@ -172,8 +179,11 @@ def get_cloud_connector_version(
172
179
  )
173
180
  def set_cloud_connector_version_override(
174
181
  workspace_id: Annotated[
175
- str,
176
- Field(description="The Airbyte Cloud workspace ID."),
182
+ str | WorkspaceAliasEnum,
183
+ Field(
184
+ description="The Airbyte Cloud workspace ID (UUID) or alias. "
185
+ "Accepts '@devin-ai-sandbox' as an alias for the Devin AI sandbox workspace."
186
+ ),
177
187
  ],
178
188
  actor_id: Annotated[
179
189
  str, "The ID of the deployed connector (source or destination)"
@@ -264,6 +274,10 @@ def set_cloud_connector_version_override(
264
274
  2. HTTP headers: X-Airbyte-Cloud-Client-Id, X-Airbyte-Cloud-Client-Secret
265
275
  3. Environment variables: AIRBYTE_CLOUD_CLIENT_ID, AIRBYTE_CLOUD_CLIENT_SECRET
266
276
  """
277
+ # Resolve workspace ID alias (workspace_id is required, so resolved value is never None)
278
+ resolved_workspace_id = WorkspaceAliasEnum.resolve(workspace_id)
279
+ assert resolved_workspace_id is not None # Type narrowing: workspace_id is required
280
+
267
281
  # Validate admin access (check env var flag)
268
282
  try:
269
283
  require_internal_admin_flag_only()
@@ -351,7 +365,7 @@ def set_cloud_connector_version_override(
351
365
  current_version_data = api_client.get_connector_version(
352
366
  connector_id=actor_id,
353
367
  connector_type=actor_type,
354
- api_root=constants.CLOUD_CONFIG_API_ROOT, # Use Config API
368
+ config_api_root=constants.CLOUD_CONFIG_API_ROOT,
355
369
  client_id=auth.client_id,
356
370
  client_secret=auth.client_secret,
357
371
  bearer_token=auth.bearer_token,
@@ -372,10 +386,10 @@ def set_cloud_connector_version_override(
372
386
  result = api_client.set_connector_version_override(
373
387
  connector_id=actor_id,
374
388
  connector_type=actor_type,
375
- api_root=constants.CLOUD_CONFIG_API_ROOT, # Use Config API
389
+ config_api_root=constants.CLOUD_CONFIG_API_ROOT,
376
390
  client_id=auth.client_id,
377
391
  client_secret=auth.client_secret,
378
- workspace_id=workspace_id,
392
+ workspace_id=resolved_workspace_id,
379
393
  version=version,
380
394
  unset=unset,
381
395
  override_reason=enhanced_override_reason,
@@ -388,7 +402,7 @@ def set_cloud_connector_version_override(
388
402
  updated_version_data = api_client.get_connector_version(
389
403
  connector_id=actor_id,
390
404
  connector_type=actor_type,
391
- api_root=constants.CLOUD_CONFIG_API_ROOT, # Use Config API
405
+ config_api_root=constants.CLOUD_CONFIG_API_ROOT,
392
406
  client_id=auth.client_id,
393
407
  client_secret=auth.client_secret,
394
408
  bearer_token=auth.bearer_token,
@@ -443,8 +457,11 @@ def set_cloud_connector_version_override(
443
457
  )
444
458
  def set_workspace_connector_version_override(
445
459
  workspace_id: Annotated[
446
- str,
447
- Field(description="The Airbyte Cloud workspace ID."),
460
+ str | WorkspaceAliasEnum,
461
+ Field(
462
+ description="The Airbyte Cloud workspace ID (UUID) or alias. "
463
+ "Accepts '@devin-ai-sandbox' as an alias for the Devin AI sandbox workspace."
464
+ ),
448
465
  ],
449
466
  connector_name: Annotated[
450
467
  str,
@@ -527,6 +544,10 @@ def set_workspace_connector_version_override(
527
544
  You must specify EXACTLY ONE of `version` OR `unset=True`, but not both.
528
545
  When setting a version, `override_reason` is required.
529
546
  """
547
+ # Resolve workspace ID alias (workspace_id is required, so resolved value is never None)
548
+ resolved_workspace_id = WorkspaceAliasEnum.resolve(workspace_id)
549
+ assert resolved_workspace_id is not None # Type narrowing: workspace_id is required
550
+
530
551
  # Validate admin access (check env var flag)
531
552
  try:
532
553
  require_internal_admin_flag_only()
@@ -534,7 +555,7 @@ def set_workspace_connector_version_override(
534
555
  return WorkspaceVersionOverrideResult(
535
556
  success=False,
536
557
  message=f"Admin authentication failed: {e}",
537
- workspace_id=workspace_id,
558
+ workspace_id=resolved_workspace_id,
538
559
  connector_name=connector_name,
539
560
  connector_type=connector_type,
540
561
  )
@@ -568,7 +589,7 @@ def set_workspace_connector_version_override(
568
589
  return WorkspaceVersionOverrideResult(
569
590
  success=False,
570
591
  message="Authorization validation failed: " + "; ".join(validation_errors),
571
- workspace_id=workspace_id,
592
+ workspace_id=resolved_workspace_id,
572
593
  connector_name=connector_name,
573
594
  connector_type=connector_type,
574
595
  )
@@ -580,7 +601,7 @@ def set_workspace_connector_version_override(
580
601
  return WorkspaceVersionOverrideResult(
581
602
  success=False,
582
603
  message=f"Failed to parse approval comment URL: {e}",
583
- workspace_id=workspace_id,
604
+ workspace_id=resolved_workspace_id,
584
605
  connector_name=connector_name,
585
606
  connector_type=connector_type,
586
607
  )
@@ -588,7 +609,7 @@ def set_workspace_connector_version_override(
588
609
  return WorkspaceVersionOverrideResult(
589
610
  success=False,
590
611
  message=f"Failed to fetch approval comment from GitHub: {e}",
591
- workspace_id=workspace_id,
612
+ workspace_id=resolved_workspace_id,
592
613
  connector_name=connector_name,
593
614
  connector_type=connector_type,
594
615
  )
@@ -596,7 +617,7 @@ def set_workspace_connector_version_override(
596
617
  return WorkspaceVersionOverrideResult(
597
618
  success=False,
598
619
  message=str(e),
599
- workspace_id=workspace_id,
620
+ workspace_id=resolved_workspace_id,
600
621
  connector_name=connector_name,
601
622
  connector_type=connector_type,
602
623
  )
@@ -616,10 +637,10 @@ def set_workspace_connector_version_override(
616
637
  auth = _resolve_cloud_auth()
617
638
 
618
639
  result = api_client.set_workspace_connector_version_override(
619
- workspace_id=workspace_id,
640
+ workspace_id=resolved_workspace_id,
620
641
  connector_name=connector_name,
621
642
  connector_type=connector_type,
622
- api_root=constants.CLOUD_CONFIG_API_ROOT,
643
+ config_api_root=constants.CLOUD_CONFIG_API_ROOT,
623
644
  client_id=auth.client_id,
624
645
  client_secret=auth.client_secret,
625
646
  bearer_token=auth.bearer_token,
@@ -641,7 +662,7 @@ def set_workspace_connector_version_override(
641
662
  return WorkspaceVersionOverrideResult(
642
663
  success=True,
643
664
  message=message,
644
- workspace_id=workspace_id,
665
+ workspace_id=resolved_workspace_id,
645
666
  connector_name=connector_name,
646
667
  connector_type=connector_type,
647
668
  version=version if not unset else None,
@@ -651,7 +672,7 @@ def set_workspace_connector_version_override(
651
672
  return WorkspaceVersionOverrideResult(
652
673
  success=False,
653
674
  message=str(e),
654
- workspace_id=workspace_id,
675
+ workspace_id=resolved_workspace_id,
655
676
  connector_name=connector_name,
656
677
  connector_type=connector_type,
657
678
  )
@@ -659,7 +680,7 @@ def set_workspace_connector_version_override(
659
680
  return WorkspaceVersionOverrideResult(
660
681
  success=False,
661
682
  message=f"Authentication failed: {e}",
662
- workspace_id=workspace_id,
683
+ workspace_id=resolved_workspace_id,
663
684
  connector_name=connector_name,
664
685
  connector_type=connector_type,
665
686
  )
@@ -848,7 +869,7 @@ def set_organization_connector_version_override(
848
869
  organization_id=organization_id,
849
870
  connector_name=connector_name,
850
871
  connector_type=connector_type,
851
- api_root=constants.CLOUD_CONFIG_API_ROOT,
872
+ config_api_root=constants.CLOUD_CONFIG_API_ROOT,
852
873
  client_id=auth.client_id,
853
874
  client_secret=auth.client_secret,
854
875
  bearer_token=auth.bearer_token,
@@ -16,12 +16,13 @@ from airbyte.exceptions import PyAirbyteInputError
16
16
  from fastmcp import FastMCP
17
17
  from pydantic import BaseModel, Field
18
18
 
19
- from airbyte_ops_mcp.constants import OrganizationAliasEnum
19
+ from airbyte_ops_mcp.constants import OrganizationAliasEnum, WorkspaceAliasEnum
20
20
  from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
21
21
  from airbyte_ops_mcp.prod_db_access.queries import (
22
22
  query_actors_pinned_to_version,
23
23
  query_connections_by_connector,
24
24
  query_connections_by_destination_connector,
25
+ query_connections_by_stream,
25
26
  query_connector_versions,
26
27
  query_dataplanes_list,
27
28
  query_destination_connection_stats,
@@ -299,8 +300,11 @@ def query_prod_dataplanes() -> list[dict[str, Any]]:
299
300
  )
300
301
  def query_prod_workspace_info(
301
302
  workspace_id: Annotated[
302
- str,
303
- Field(description="Workspace UUID to look up"),
303
+ str | WorkspaceAliasEnum,
304
+ Field(
305
+ description="Workspace UUID or alias to look up. "
306
+ "Accepts '@devin-ai-sandbox' as an alias for the Devin AI sandbox workspace."
307
+ ),
304
308
  ],
305
309
  ) -> dict[str, Any] | None:
306
310
  """Get workspace information including dataplane group.
@@ -313,7 +317,11 @@ def query_prod_workspace_info(
313
317
  dataplane_group_id, dataplane_name, created_at, tombstone
314
318
  Or None if workspace not found.
315
319
  """
316
- return query_workspace_info(workspace_id)
320
+ # Resolve workspace ID alias (workspace_id is required, so resolved value is never None)
321
+ resolved_workspace_id = WorkspaceAliasEnum.resolve(workspace_id)
322
+ assert resolved_workspace_id is not None # Type narrowing: workspace_id is required
323
+
324
+ return query_workspace_info(resolved_workspace_id)
317
325
 
318
326
 
319
327
  @mcp_tool(
@@ -868,6 +876,122 @@ def query_prod_connections_by_connector(
868
876
  ]
869
877
 
870
878
 
879
+ @mcp_tool(
880
+ read_only=True,
881
+ idempotent=True,
882
+ open_world=True,
883
+ )
884
+ def query_prod_connections_by_stream(
885
+ stream_name: Annotated[
886
+ str,
887
+ Field(
888
+ description=(
889
+ "Name of the stream to search for in connection catalogs. "
890
+ "This must match the exact stream name as configured in the connection. "
891
+ "Examples: 'global_exclusions', 'campaigns', 'users'."
892
+ ),
893
+ ),
894
+ ],
895
+ source_definition_id: Annotated[
896
+ str | None,
897
+ Field(
898
+ description=(
899
+ "Source connector definition ID (UUID) to search for. "
900
+ "Provide this OR source_canonical_name (exactly one required). "
901
+ "Example: 'afa734e4-3571-11ec-991a-1e0031268139' for YouTube Analytics."
902
+ ),
903
+ default=None,
904
+ ),
905
+ ],
906
+ source_canonical_name: Annotated[
907
+ str | None,
908
+ Field(
909
+ description=(
910
+ "Canonical source connector name to search for. "
911
+ "Provide this OR source_definition_id (exactly one required). "
912
+ "Examples: 'source-klaviyo', 'Klaviyo', 'source-youtube-analytics'."
913
+ ),
914
+ default=None,
915
+ ),
916
+ ],
917
+ organization_id: Annotated[
918
+ str | OrganizationAliasEnum | None,
919
+ Field(
920
+ description=(
921
+ "Optional organization ID (UUID) or alias to filter results. "
922
+ "If provided, only connections in this organization will be returned. "
923
+ "Accepts '@airbyte-internal' as an alias for the Airbyte internal org."
924
+ ),
925
+ default=None,
926
+ ),
927
+ ],
928
+ limit: Annotated[
929
+ int,
930
+ Field(description="Maximum number of results (default: 100)", default=100),
931
+ ],
932
+ ) -> list[dict[str, Any]]:
933
+ """Find connections that have a specific stream enabled in their catalog.
934
+
935
+ This tool searches the connection's configured catalog (JSONB) for streams
936
+ matching the specified name. It's particularly useful when validating
937
+ connector fixes that affect specific streams - you can quickly find
938
+ customer connections that use the affected stream.
939
+
940
+ Use cases:
941
+ - Finding connections with a specific stream enabled for regression testing
942
+ - Validating connector fixes that affect particular streams
943
+ - Identifying which customers use rarely-enabled streams
944
+
945
+ Returns a list of connection dicts with workspace context and clickable Cloud UI URLs.
946
+ """
947
+ provided_params = [source_definition_id, source_canonical_name]
948
+ num_provided = sum(p is not None for p in provided_params)
949
+ if num_provided != 1:
950
+ raise PyAirbyteInputError(
951
+ message=(
952
+ "Exactly one of source_definition_id or source_canonical_name "
953
+ "must be provided."
954
+ ),
955
+ )
956
+
957
+ resolved_definition_id: str
958
+ if source_canonical_name:
959
+ resolved_definition_id = _resolve_canonical_name_to_definition_id(
960
+ canonical_name=source_canonical_name,
961
+ )
962
+ else:
963
+ assert source_definition_id is not None
964
+ resolved_definition_id = source_definition_id
965
+
966
+ resolved_organization_id = OrganizationAliasEnum.resolve(organization_id)
967
+
968
+ return [
969
+ {
970
+ "organization_id": str(row.get("organization_id", "")),
971
+ "workspace_id": str(row["workspace_id"]),
972
+ "workspace_name": row.get("workspace_name", ""),
973
+ "connection_id": str(row["connection_id"]),
974
+ "connection_name": row.get("connection_name", ""),
975
+ "connection_status": row.get("connection_status", ""),
976
+ "connection_url": (
977
+ f"{CLOUD_UI_BASE_URL}/workspaces/{row['workspace_id']}"
978
+ f"/connections/{row['connection_id']}/status"
979
+ ),
980
+ "source_id": str(row["source_id"]),
981
+ "source_name": row.get("source_name", ""),
982
+ "source_definition_id": str(row["source_definition_id"]),
983
+ "dataplane_group_id": str(row.get("dataplane_group_id", "")),
984
+ "dataplane_name": row.get("dataplane_name", ""),
985
+ }
986
+ for row in query_connections_by_stream(
987
+ connector_definition_id=resolved_definition_id,
988
+ stream_name=stream_name,
989
+ organization_id=resolved_organization_id,
990
+ limit=limit,
991
+ )
992
+ ]
993
+
994
+
871
995
  @mcp_tool(
872
996
  read_only=True,
873
997
  idempotent=True,
@@ -31,6 +31,7 @@ from airbyte.exceptions import (
31
31
  from fastmcp import FastMCP
32
32
  from pydantic import BaseModel, Field
33
33
 
34
+ from airbyte_ops_mcp.constants import WorkspaceAliasEnum
34
35
  from airbyte_ops_mcp.github_actions import trigger_workflow_dispatch
35
36
  from airbyte_ops_mcp.github_api import GITHUB_API_BASE, resolve_github_token
36
37
  from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
@@ -341,9 +342,10 @@ def run_regression_tests(
341
342
  "Ignored if skip_compare=True.",
342
343
  ] = None,
343
344
  workspace_id: Annotated[
344
- str | None,
345
- "Optional Airbyte Cloud workspace ID. If provided with connection_id, validates "
346
- "that the connection belongs to this workspace before triggering tests.",
345
+ str | WorkspaceAliasEnum | None,
346
+ "Optional Airbyte Cloud workspace ID (UUID) or alias. If provided with connection_id, "
347
+ "validates that the connection belongs to this workspace before triggering tests. "
348
+ "Accepts '@devin-ai-sandbox' as an alias for the Devin AI sandbox workspace.",
347
349
  ] = None,
348
350
  ) -> RunRegressionTestsResponse:
349
351
  """Start a regression test run via GitHub Actions workflow.
@@ -368,6 +370,9 @@ def run_regression_tests(
368
370
  Requires GITHUB_CI_WORKFLOW_TRIGGER_PAT or GITHUB_TOKEN environment variable
369
371
  with 'actions:write' permission.
370
372
  """
373
+ # Resolve workspace ID alias
374
+ resolved_workspace_id = WorkspaceAliasEnum.resolve(workspace_id)
375
+
371
376
  # Generate a unique run ID for tracking
372
377
  run_id = str(uuid.uuid4())
373
378
 
@@ -383,9 +388,9 @@ def run_regression_tests(
383
388
  )
384
389
 
385
390
  # Validate workspace membership if workspace_id and connection_id are provided
386
- if workspace_id and connection_id:
391
+ if resolved_workspace_id and connection_id:
387
392
  try:
388
- validate_connection_workspace(connection_id, workspace_id)
393
+ validate_connection_workspace(connection_id, resolved_workspace_id)
389
394
  except (
390
395
  ValueError,
391
396
  AirbyteWorkspaceMismatchError,
@@ -1,6 +1,12 @@
1
1
  #
2
2
  # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
3
  #
4
+ """Connector metadata validation for Airbyte connectors.
5
+
6
+ Provides validation functions for connector metadata.yaml files, including checks
7
+ for Docker image availability, version constraints, breaking changes, and registry
8
+ configuration. Uses Pydantic models from airbyte-connector-models for schema validation.
9
+ """
4
10
 
5
11
  import logging
6
12
  import pathlib
@@ -9,14 +15,13 @@ from typing import Callable, List, Optional, Tuple, Union
9
15
 
10
16
  import semver
11
17
  import yaml
18
+ from airbyte_connector_models.metadata.v0.connector_metadata_definition_v0 import (
19
+ ConnectorMetadataDefinitionV0,
20
+ )
12
21
  from pydantic import ValidationError
13
22
  from pydash.objects import get
14
23
 
15
- from airbyte_ops_mcp._legacy.airbyte_ci.metadata_models.generated.ConnectorMetadataDefinitionV0 import (
16
- ConnectorMetadataDefinitionV0,
17
- ) # type: ignore
18
-
19
- from ..docker_hub import get_latest_version_on_dockerhub, is_image_on_docker_hub
24
+ from .docker_hub import get_latest_version_on_dockerhub, is_image_on_docker_hub
20
25
 
21
26
  logger = logging.getLogger(__name__)
22
27
 
@@ -42,7 +47,7 @@ def validate_metadata_images_in_dockerhub(
42
47
  if validator_opts.disable_dockerhub_checks:
43
48
  return True, None
44
49
 
45
- metadata_definition_dict = metadata_definition.dict()
50
+ metadata_definition_dict = metadata_definition.model_dump(exclude_unset=True)
46
51
  base_docker_image = get(metadata_definition_dict, "data.dockerRepository")
47
52
  base_docker_version = get(metadata_definition_dict, "data.dockerImageTag")
48
53
 
@@ -111,7 +116,7 @@ def validate_at_least_one_language_tag(
111
116
  ) -> ValidationResult:
112
117
  """Ensure that there is at least one tag in the data.tags field that matches language:<LANG>."""
113
118
  tags = get(metadata_definition, "data.tags", [])
114
- if not any([tag.startswith("language:") for tag in tags]):
119
+ if not any(tag.startswith("language:") for tag in tags):
115
120
  return False, "At least one tag must be of the form language:<LANG>"
116
121
 
117
122
  return True, None
@@ -145,7 +150,7 @@ def validate_major_version_bump_has_breaking_change_entry(
145
150
  _validator_opts: ValidatorOptions,
146
151
  ) -> ValidationResult:
147
152
  """Ensure that if the major version is incremented, there is a breaking change entry for that version."""
148
- metadata_definition_dict = metadata_definition.dict()
153
+ metadata_definition_dict = metadata_definition.model_dump(exclude_unset=True)
149
154
  image_tag = get(metadata_definition_dict, "data.dockerImageTag")
150
155
 
151
156
  if not is_major_version(image_tag):
@@ -169,7 +174,7 @@ def validate_major_version_bump_has_breaking_change_entry(
169
174
  )
170
175
 
171
176
  breaking_changes = get(metadata_definition_dict, "data.releases.breakingChanges")
172
- if breaking_changes is None or image_tag not in breaking_changes.keys():
177
+ if breaking_changes is None or image_tag not in breaking_changes:
173
178
  return (
174
179
  False,
175
180
  f"Major version {image_tag} needs a 'releases.breakingChanges' entry indicating what changed.",
@@ -194,7 +199,7 @@ def validate_metadata_base_images_in_dockerhub(
194
199
  if validator_opts.disable_dockerhub_checks:
195
200
  return True, None
196
201
 
197
- metadata_definition_dict = metadata_definition.dict()
202
+ metadata_definition_dict = metadata_definition.model_dump(exclude_unset=True)
198
203
 
199
204
  image_address = get(
200
205
  metadata_definition_dict, "data.connectorBuildOptions.baseImage"
@@ -365,7 +370,8 @@ PRE_UPLOAD_VALIDATORS = [
365
370
  ]
366
371
 
367
372
 
368
- POST_UPLOAD_VALIDATORS = PRE_UPLOAD_VALIDATORS + [
373
+ POST_UPLOAD_VALIDATORS = [
374
+ *PRE_UPLOAD_VALIDATORS,
369
375
  validate_metadata_images_in_dockerhub,
370
376
  ]
371
377
 
@@ -384,7 +390,7 @@ def validate_and_load(
384
390
  try:
385
391
  # Load the metadata file - this implicitly runs jsonschema validation
386
392
  metadata = yaml.safe_load(file_path.read_text())
387
- metadata_model = ConnectorMetadataDefinitionV0.parse_obj(metadata)
393
+ metadata_model = ConnectorMetadataDefinitionV0.model_validate(metadata)
388
394
  except ValidationError as e:
389
395
  return None, f"Validation error: {e}"
390
396
 
@@ -24,6 +24,8 @@ from airbyte_ops_mcp.prod_db_access.sql import (
24
24
  SELECT_CONNECTIONS_BY_CONNECTOR_AND_ORG,
25
25
  SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR,
26
26
  SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR_AND_ORG,
27
+ SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM,
28
+ SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM_AND_ORG,
27
29
  SELECT_CONNECTOR_VERSIONS,
28
30
  SELECT_DATAPLANES_LIST,
29
31
  SELECT_DESTINATION_CONNECTION_STATS,
@@ -565,3 +567,52 @@ def query_destination_connection_stats(
565
567
  query_name="SELECT_DESTINATION_CONNECTION_STATS",
566
568
  gsm_client=gsm_client,
567
569
  )
570
+
571
+
572
+ def query_connections_by_stream(
573
+ connector_definition_id: str,
574
+ stream_name: str,
575
+ organization_id: str | None = None,
576
+ limit: int = 100,
577
+ *,
578
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
579
+ ) -> list[dict[str, Any]]:
580
+ """Query connections by source connector type that have a specific stream enabled.
581
+
582
+ This searches the connection's configured catalog (JSONB) for streams matching
583
+ the specified name. Useful for finding connections that use a particular stream
584
+ when validating connector fixes that affect specific streams.
585
+
586
+ Args:
587
+ connector_definition_id: Source connector definition UUID to filter by
588
+ stream_name: Name of the stream to search for in the connection's catalog
589
+ organization_id: Optional organization UUID to filter results by
590
+ limit: Maximum number of results (default: 100)
591
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
592
+
593
+ Returns:
594
+ List of connection records with workspace and dataplane info
595
+ """
596
+ if organization_id is None:
597
+ return _run_sql_query(
598
+ SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM,
599
+ parameters={
600
+ "connector_definition_id": connector_definition_id,
601
+ "stream_name": stream_name,
602
+ "limit": limit,
603
+ },
604
+ query_name="SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM",
605
+ gsm_client=gsm_client,
606
+ )
607
+
608
+ return _run_sql_query(
609
+ SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM_AND_ORG,
610
+ parameters={
611
+ "connector_definition_id": connector_definition_id,
612
+ "stream_name": stream_name,
613
+ "organization_id": organization_id,
614
+ "limit": limit,
615
+ },
616
+ query_name="SELECT_CONNECTIONS_BY_SOURCE_CONNECTOR_AND_STREAM_AND_ORG",
617
+ gsm_client=gsm_client,
618
+ )