airbyte-internal-ops 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airbyte-internal-ops
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: MCP and API interfaces that let the agents do the admin work
5
5
  Author-email: Aaron Steers <aj@airbyte.io>
6
6
  Keywords: admin,airbyte,api,mcp
@@ -121,7 +121,23 @@ Some tools (like `list_org_connections_by_source_type_db`) require access to the
121
121
  gcloud auth application-default login
122
122
  ```
123
123
 
124
- 2. Start Cloud SQL Proxy:
124
+ 2. Start Cloud SQL Proxy using one of the following methods:
125
+
126
+ **Option A: Using the CLI (Recommended)**
127
+
128
+ Pre-install the CLI tool:
129
+ ```bash
130
+ uv tool install airbyte-internal-ops
131
+ airbyte-ops cloud db start-proxy --port=15432
132
+ ```
133
+
134
+ Or use as a single-step command:
135
+ ```bash
136
+ uvx --from=airbyte-internal-ops airbyte-ops cloud db start-proxy --port=15432
137
+ ```
138
+
139
+ **Option B: Manual startup**
140
+
125
141
  ```bash
126
142
  cloud-sql-proxy prod-ab-cloud-proj:us-west3:prod-pgsql-replica --port=15432
127
143
  ```
@@ -1,6 +1,6 @@
1
1
  airbyte_ops_mcp/__init__.py,sha256=tuzdlMkfnWBnsri5KGHM2M_xuNnzFk2u_aR79mmN7Yg,772
2
2
  airbyte_ops_mcp/_annotations.py,sha256=MO-SBDnbykxxHDESG7d8rviZZ4WlZgJKv0a8eBqcEzQ,1757
3
- airbyte_ops_mcp/constants.py,sha256=THmvIjU3pb7kpNjn7TpRWD86gtDLmtlQwYuFnaQp_rg,3095
3
+ airbyte_ops_mcp/constants.py,sha256=GeZ2_WWluMSrGkyqGvqUVFCy-5PD-lyzZbQ7eO-vyUo,5192
4
4
  airbyte_ops_mcp/gcp_auth.py,sha256=5k-k145ZoYhHLjyDES8nrA8f8BBihRI0ykrdD1IcfOs,3599
5
5
  airbyte_ops_mcp/github_actions.py,sha256=hcwwew98r0yetWsM7Qmdar3ATLBJQGIn3fJfJ_n59So,8599
6
6
  airbyte_ops_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -375,18 +375,18 @@ airbyte_ops_mcp/mcp/connector_qa.py,sha256=aImpqdnqBPDrz10BS0owsV4kuIU2XdalzgbaG
375
375
  airbyte_ops_mcp/mcp/github.py,sha256=h3M3VJrq09y_F9ueQVCq3bUbVBNFuTNKprHtGU_ttio,8045
376
376
  airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=PiERpt8abo20Gz4CfXhrDNlVM4o4FOt5sweZJND2a0s,5314
377
377
  airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
378
- airbyte_ops_mcp/mcp/prerelease.py,sha256=LHLaSd8q0l7boAsVqTXOjFGDxAGsPZdtL3kj5_IOTEE,8852
379
- airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=_eNMFM1CBQ4OM_daf2iq-L7lvlytqbI_6v48m5vJdSQ,15632
378
+ airbyte_ops_mcp/mcp/prerelease.py,sha256=6G4zMo0KeCIYJPEIryHKHoZUiBHQMagPJU-uw-IzK94,8939
379
+ airbyte_ops_mcp/mcp/prod_db_queries.py,sha256=FfGoq3aEj6ZUT4ysBIs1w7LzzwBeRXTaRvPGEx62RzI,25474
380
380
  airbyte_ops_mcp/mcp/prompts.py,sha256=mJld9mdPECXYZffWXGSvNs4Xevx3rxqUGNlzGKVC2_s,1599
381
381
  airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
382
- airbyte_ops_mcp/mcp/regression_tests.py,sha256=b-EBYPudZ3e0dTCKP4XduHTwe-eOTxsvD5WguMlUvtQ,17653
382
+ airbyte_ops_mcp/mcp/regression_tests.py,sha256=S1h-5S5gcZA4WEtIZyAQ836hd04tjSRRqMiYMx0S93g,16079
383
383
  airbyte_ops_mcp/mcp/server.py,sha256=wHRjJU0oJmwvQ0JJma_aIQlI83EPBckzmxwtI-OE_XU,5219
384
384
  airbyte_ops_mcp/mcp/server_info.py,sha256=Yi4B1auW64QZGBDas5mro_vwTjvrP785TFNSBP7GhRg,2361
385
385
  airbyte_ops_mcp/prod_db_access/__init__.py,sha256=5pxouMPY1beyWlB0UwPnbaLTKTHqU6X82rbbgKY2vYU,1069
386
386
  airbyte_ops_mcp/prod_db_access/db_engine.py,sha256=VUqEWZtharJUR-Cri_pMwtGh1C4Neu4s195mbEXlm-w,9190
387
387
  airbyte_ops_mcp/prod_db_access/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
388
- airbyte_ops_mcp/prod_db_access/queries.py,sha256=txeqRPbovgqbk7lu8ttiZXgA77abFzzeO3hql2o8c44,11228
389
- airbyte_ops_mcp/prod_db_access/sql.py,sha256=P6UbIHafg3ibs901DPlJxLilxsc-RrCPvnyzSwP-fMw,16300
388
+ airbyte_ops_mcp/prod_db_access/queries.py,sha256=BBPAQEfcing4G0Q9PEmI8C_9kN26sZc65ZGXd9WuFSw,14257
389
+ airbyte_ops_mcp/prod_db_access/sql.py,sha256=hTbPY4r_rrtJ34B5eVpwyuBMLotyuP--UTv0vl3ZwBw,19432
390
390
  airbyte_ops_mcp/registry/__init__.py,sha256=iEaPlt9GrnlaLbc__98TguNeZG8wuQu7S-_2QkhHcbA,858
391
391
  airbyte_ops_mcp/registry/models.py,sha256=B4L4TKr52wo0xs0CqvCBrpowqjShzVnZ5eTr2-EyhNs,2346
392
392
  airbyte_ops_mcp/registry/publish.py,sha256=VoPxsM2_0zJ829orzCRN-kjgcJtuBNyXgW4I9J680ro,12717
@@ -410,7 +410,7 @@ airbyte_ops_mcp/regression_tests/regression/comparators.py,sha256=MJkLZEKHivgrG0
410
410
  airbyte_ops_mcp/regression_tests/validation/__init__.py,sha256=MBEwGOoNuqT4_oCahtoK62OKWIjUCfWa7vZTxNj_0Ek,1532
411
411
  airbyte_ops_mcp/regression_tests/validation/catalog_validators.py,sha256=jqqVAMOk0mtdPgwu4d0hA0ZEjtsNh5gapvGydRv3_qk,12553
412
412
  airbyte_ops_mcp/regression_tests/validation/record_validators.py,sha256=RjauAhKWNwxMBTu0eNS2hMFNQVs5CLbQU51kp6FOVDk,7432
413
- airbyte_internal_ops-0.2.1.dist-info/METADATA,sha256=c4vegfJA-2IWLkwRY_uVHjjUR79hMLfi8ad-RQrn4Pw,5282
414
- airbyte_internal_ops-0.2.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
415
- airbyte_internal_ops-0.2.1.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
416
- airbyte_internal_ops-0.2.1.dist-info/RECORD,,
413
+ airbyte_internal_ops-0.2.2.dist-info/METADATA,sha256=1ah9ZGR3rZ1676mhAJzJvrbfqOQsV_fuedRitpJG9h8,5679
414
+ airbyte_internal_ops-0.2.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
415
+ airbyte_internal_ops-0.2.2.dist-info/entry_points.txt,sha256=WxP0l7bRFss4Cr5uQqVj9mTEKwnRKouNuphXQF0lotA,171
416
+ airbyte_internal_ops-0.2.2.dist-info/RECORD,,
@@ -3,7 +3,9 @@
3
3
 
4
4
  from __future__ import annotations
5
5
 
6
- from enum import Enum
6
+ from enum import Enum, StrEnum
7
+
8
+ from airbyte.exceptions import PyAirbyteInputError
7
9
 
8
10
  MCP_SERVER_NAME = "airbyte-internal-ops"
9
11
  """The name of the MCP server."""
@@ -59,6 +61,64 @@ CLOUD_REGISTRY_URL = (
59
61
  )
60
62
  """URL for the Airbyte Cloud connector registry."""
61
63
 
64
+ # =============================================================================
65
+ # Organization ID Aliases
66
+ # =============================================================================
67
+
68
+
69
+ class OrganizationAliasEnum(StrEnum):
70
+ """Organization ID aliases that can be used in place of UUIDs.
71
+
72
+ Each member's name is the alias (e.g., "@airbyte-internal") and its value
73
+ is the actual organization UUID. Use `OrganizationAliasEnum.resolve()` to
74
+ resolve aliases to actual IDs.
75
+ """
76
+
77
+ AIRBYTE_INTERNAL = "664c690e-5263-49ba-b01f-4a6759b3330a"
78
+ """The Airbyte internal organization for testing and internal operations.
79
+
80
+ Alias: @airbyte-internal
81
+ """
82
+
83
+ @classmethod
84
+ def resolve(cls, org_id: str | None) -> str | None:
85
+ """Resolve an organization ID alias to its actual UUID.
86
+
87
+ Accepts either an alias string (e.g., "@airbyte-internal") or an
88
+ OrganizationAliasEnum enum member, and returns the actual UUID.
89
+
90
+ Returns:
91
+ The resolved organization ID (UUID), or None if input is None.
92
+ If the input doesn't start with "@", it is returned unchanged.
93
+
94
+ Raises:
95
+ PyAirbyteInputError: If the input starts with "@" but is not a recognized alias.
96
+ """
97
+ if org_id is None:
98
+ return None
99
+
100
+ # Handle OrganizationAliasEnum enum members directly
101
+ if isinstance(org_id, cls):
102
+ return org_id.value
103
+
104
+ # If it doesn't look like an alias, return as-is (assume it's a UUID)
105
+ if not org_id.startswith("@"):
106
+ return org_id
107
+
108
+ # Handle alias strings or raise an error if invalid
109
+ alias_mapping = {
110
+ "@airbyte-internal": cls.AIRBYTE_INTERNAL.value,
111
+ }
112
+ if org_id not in alias_mapping:
113
+ raise PyAirbyteInputError(
114
+ message=f"Unknown organization alias: {org_id}",
115
+ context={
116
+ "valid_aliases": list(alias_mapping.keys()),
117
+ },
118
+ )
119
+ return alias_mapping[org_id]
120
+
121
+
62
122
  CONNECTION_RETRIEVER_PG_CONNECTION_DETAILS_SECRET_ID = (
63
123
  "projects/587336813068/secrets/CONNECTION_RETRIEVER_PG_CONNECTION_DETAILS"
64
124
  )
@@ -228,17 +228,17 @@ def publish_connector_to_airbyte_registry(
228
228
  # Guard: Check for required token
229
229
  token = resolve_github_token(PRERELEASE_TOKEN_ENV_VARS)
230
230
 
231
- # Get the PR's head ref and SHA
231
+ # Get the PR's head SHA for computing the docker image tag
232
+ # Note: We no longer pass gitref to the workflow - it derives the ref from PR number
232
233
  head_info = _get_pr_head_info(
233
234
  DEFAULT_REPO_OWNER, DEFAULT_REPO_NAME, pr_number, token
234
235
  )
235
236
 
236
237
  # Prepare workflow inputs
237
- # The workflow expects these inputs from slash-command-dispatch
238
+ # The workflow uses refs/pull/{pr}/head directly - no gitref needed
238
239
  # Note: The workflow auto-detects modified connectors from the PR
239
240
  workflow_inputs = {
240
241
  "repo": f"{DEFAULT_REPO_OWNER}/{DEFAULT_REPO_NAME}",
241
- "gitref": head_info.ref,
242
242
  "pr": str(pr_number),
243
243
  }
244
244
 
@@ -7,28 +7,78 @@ airbyte_ops_mcp.prod_db_access.queries for use by AI agents.
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
+ from datetime import datetime
10
11
  from typing import Annotated, Any
11
12
 
12
13
  import requests
13
14
  from airbyte.exceptions import PyAirbyteInputError
14
15
  from fastmcp import FastMCP
15
- from pydantic import Field
16
+ from pydantic import BaseModel, Field
16
17
 
18
+ from airbyte_ops_mcp.constants import OrganizationAliasEnum
17
19
  from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
18
20
  from airbyte_ops_mcp.prod_db_access.queries import (
19
21
  query_actors_pinned_to_version,
20
22
  query_connections_by_connector,
23
+ query_connections_by_destination_connector,
21
24
  query_connector_versions,
22
25
  query_dataplanes_list,
23
26
  query_failed_sync_attempts_for_connector,
24
27
  query_new_connector_releases,
25
28
  query_sync_results_for_version,
26
29
  query_workspace_info,
30
+ query_workspaces_by_email_domain,
27
31
  )
28
32
 
29
33
  # Cloud UI base URL for building connection URLs
30
34
  CLOUD_UI_BASE_URL = "https://cloud.airbyte.com"
31
35
 
36
+
37
+ # =============================================================================
38
+ # Pydantic Models for MCP Tool Responses
39
+ # =============================================================================
40
+
41
+
42
+ class WorkspaceInfo(BaseModel):
43
+ """Information about a workspace found by email domain search."""
44
+
45
+ organization_id: str = Field(description="The organization UUID")
46
+ workspace_id: str = Field(description="The workspace UUID")
47
+ workspace_name: str = Field(description="The name of the workspace")
48
+ slug: str | None = Field(
49
+ default=None, description="The workspace slug (URL-friendly identifier)"
50
+ )
51
+ email: str | None = Field(
52
+ default=None, description="The email address associated with the workspace"
53
+ )
54
+ dataplane_group_id: str | None = Field(
55
+ default=None, description="The dataplane group UUID (region)"
56
+ )
57
+ dataplane_name: str | None = Field(
58
+ default=None, description="The name of the dataplane (e.g., 'US', 'EU')"
59
+ )
60
+ created_at: datetime | None = Field(
61
+ default=None, description="When the workspace was created"
62
+ )
63
+
64
+
65
+ class WorkspacesByEmailDomainResult(BaseModel):
66
+ """Result of looking up workspaces by email domain."""
67
+
68
+ email_domain: str = Field(
69
+ description="The email domain that was searched for (e.g., 'motherduck.com')"
70
+ )
71
+ total_workspaces_found: int = Field(
72
+ description="Total number of workspaces matching the email domain"
73
+ )
74
+ unique_organization_ids: list[str] = Field(
75
+ description="List of unique organization IDs found"
76
+ )
77
+ workspaces: list[WorkspaceInfo] = Field(
78
+ description="List of workspaces matching the email domain"
79
+ )
80
+
81
+
32
82
  # Cloud registry URL for resolving canonical names
33
83
  CLOUD_REGISTRY_URL = (
34
84
  "https://connectors.airbyte.com/files/registries/v0/cloud_registry.json"
@@ -36,13 +86,18 @@ CLOUD_REGISTRY_URL = (
36
86
 
37
87
 
38
88
  def _resolve_canonical_name_to_definition_id(canonical_name: str) -> str:
39
- """Resolve a canonical source name to a definition ID.
89
+ """Resolve a canonical connector name to a definition ID.
90
+
91
+ Auto-detects whether the connector is a source or destination based on the
92
+ canonical name prefix ("source-" or "destination-"). If no prefix is present,
93
+ searches both sources and destinations.
40
94
 
41
95
  Args:
42
- canonical_name: Canonical source name (e.g., 'source-youtube-analytics').
96
+ canonical_name: Canonical connector name (e.g., 'source-youtube-analytics',
97
+ 'destination-duckdb', 'YouTube Analytics', 'DuckDB').
43
98
 
44
99
  Returns:
45
- The source definition ID (UUID).
100
+ The connector definition ID (UUID).
46
101
 
47
102
  Raises:
48
103
  PyAirbyteInputError: If the canonical name cannot be resolved.
@@ -56,31 +111,65 @@ def _resolve_canonical_name_to_definition_id(canonical_name: str) -> str:
56
111
  )
57
112
 
58
113
  data = response.json()
59
- sources = data.get("sources", [])
60
-
61
- # Normalize the canonical name for matching
62
114
  normalized_input = canonical_name.lower().strip()
63
115
 
64
- # Try exact match on name field
65
- for source in sources:
66
- source_name = source.get("name", "").lower()
67
- # The registry returns names like "YouTube Analytics"
68
- # So we need to handle both formats
69
- if source_name == normalized_input:
70
- return source["sourceDefinitionId"]
71
-
72
- # Also try matching against a slugified version
73
- # e.g., "YouTube Analytics" -> "youtube-analytics"
74
- slugified = source_name.replace(" ", "-")
75
- if slugified == normalized_input or f"source-{slugified}" == normalized_input:
76
- return source["sourceDefinitionId"]
116
+ # Determine which registries to search based on prefix
117
+ is_source = normalized_input.startswith("source-")
118
+ is_destination = normalized_input.startswith("destination-")
119
+
120
+ # Search sources if it looks like a source or has no prefix
121
+ if is_source or not is_destination:
122
+ sources = data.get("sources", [])
123
+ for source in sources:
124
+ source_name = source.get("name", "").lower()
125
+ if source_name == normalized_input:
126
+ return source["sourceDefinitionId"]
127
+ slugified = source_name.replace(" ", "-")
128
+ if (
129
+ slugified == normalized_input
130
+ or f"source-{slugified}" == normalized_input
131
+ ):
132
+ return source["sourceDefinitionId"]
133
+
134
+ # Search destinations if it looks like a destination or has no prefix
135
+ if is_destination or not is_source:
136
+ destinations = data.get("destinations", [])
137
+ for destination in destinations:
138
+ destination_name = destination.get("name", "").lower()
139
+ if destination_name == normalized_input:
140
+ return destination["destinationDefinitionId"]
141
+ slugified = destination_name.replace(" ", "-")
142
+ if (
143
+ slugified == normalized_input
144
+ or f"destination-{slugified}" == normalized_input
145
+ ):
146
+ return destination["destinationDefinitionId"]
147
+
148
+ # Build appropriate error message based on what was searched
149
+ if is_source:
150
+ connector_type = "source"
151
+ hint = (
152
+ "Use the exact canonical name (e.g., 'source-youtube-analytics') "
153
+ "or display name (e.g., 'YouTube Analytics')."
154
+ )
155
+ elif is_destination:
156
+ connector_type = "destination"
157
+ hint = (
158
+ "Use the exact canonical name (e.g., 'destination-duckdb') "
159
+ "or display name (e.g., 'DuckDB')."
160
+ )
161
+ else:
162
+ connector_type = "connector"
163
+ hint = (
164
+ "Use the exact canonical name (e.g., 'source-youtube-analytics', "
165
+ "'destination-duckdb') or display name (e.g., 'YouTube Analytics', 'DuckDB')."
166
+ )
77
167
 
78
168
  raise PyAirbyteInputError(
79
- message=f"Could not find source definition for canonical name: {canonical_name}",
169
+ message=f"Could not find {connector_type} definition for canonical name: {canonical_name}",
80
170
  context={
81
- "hint": "Use the exact canonical name (e.g., 'source-youtube-analytics') "
82
- "or display name (e.g., 'YouTube Analytics'). "
83
- "You can list available sources using the connector registry tools.",
171
+ "hint": hint
172
+ + " You can list available connectors using the connector registry tools.",
84
173
  "searched_for": canonical_name,
85
174
  },
86
175
  )
@@ -275,11 +364,12 @@ def query_prod_failed_sync_attempts_for_connector(
275
364
  ),
276
365
  ] = None,
277
366
  organization_id: Annotated[
278
- str | None,
367
+ str | OrganizationAliasEnum | None,
279
368
  Field(
280
369
  description=(
281
- "Optional organization ID (UUID) to filter results. "
282
- "If provided, only failed attempts from this organization will be returned."
370
+ "Optional organization ID (UUID) or alias to filter results. "
371
+ "If provided, only failed attempts from this organization will be returned. "
372
+ "Accepts '@airbyte-internal' as an alias for the Airbyte internal org."
283
373
  ),
284
374
  default=None,
285
375
  ),
@@ -327,9 +417,12 @@ def query_prod_failed_sync_attempts_for_connector(
327
417
  else:
328
418
  resolved_definition_id = source_definition_id # type: ignore[assignment]
329
419
 
420
+ # Resolve organization ID alias
421
+ resolved_organization_id = OrganizationAliasEnum.resolve(organization_id)
422
+
330
423
  return query_failed_sync_attempts_for_connector(
331
424
  connector_definition_id=resolved_definition_id,
332
- organization_id=organization_id,
425
+ organization_id=resolved_organization_id,
333
426
  days=days,
334
427
  limit=limit,
335
428
  )
@@ -346,7 +439,8 @@ def query_prod_connections_by_connector(
346
439
  Field(
347
440
  description=(
348
441
  "Source connector definition ID (UUID) to search for. "
349
- "Exactly one of this or source_canonical_name is required. "
442
+ "Exactly one of source_definition_id, source_canonical_name, "
443
+ "destination_definition_id, or destination_canonical_name is required. "
350
444
  "Example: 'afa734e4-3571-11ec-991a-1e0031268139' for YouTube Analytics."
351
445
  ),
352
446
  default=None,
@@ -357,18 +451,44 @@ def query_prod_connections_by_connector(
357
451
  Field(
358
452
  description=(
359
453
  "Canonical source connector name to search for. "
360
- "Exactly one of this or source_definition_id is required. "
454
+ "Exactly one of source_definition_id, source_canonical_name, "
455
+ "destination_definition_id, or destination_canonical_name is required. "
361
456
  "Examples: 'source-youtube-analytics', 'YouTube Analytics'."
362
457
  ),
363
458
  default=None,
364
459
  ),
365
460
  ] = None,
366
- organization_id: Annotated[
461
+ destination_definition_id: Annotated[
367
462
  str | None,
368
463
  Field(
369
464
  description=(
370
- "Optional organization ID (UUID) to filter results. "
371
- "If provided, only connections in this organization will be returned."
465
+ "Destination connector definition ID (UUID) to search for. "
466
+ "Exactly one of source_definition_id, source_canonical_name, "
467
+ "destination_definition_id, or destination_canonical_name is required. "
468
+ "Example: 'e5c8e66c-a480-4a5e-9c0e-e8e5e4c5c5c5' for DuckDB."
469
+ ),
470
+ default=None,
471
+ ),
472
+ ] = None,
473
+ destination_canonical_name: Annotated[
474
+ str | None,
475
+ Field(
476
+ description=(
477
+ "Canonical destination connector name to search for. "
478
+ "Exactly one of source_definition_id, source_canonical_name, "
479
+ "destination_definition_id, or destination_canonical_name is required. "
480
+ "Examples: 'destination-duckdb', 'DuckDB'."
481
+ ),
482
+ default=None,
483
+ ),
484
+ ] = None,
485
+ organization_id: Annotated[
486
+ str | OrganizationAliasEnum | None,
487
+ Field(
488
+ description=(
489
+ "Optional organization ID (UUID) or alias to filter results. "
490
+ "If provided, only connections in this organization will be returned. "
491
+ "Accepts '@airbyte-internal' as an alias for the Airbyte internal org."
372
492
  ),
373
493
  default=None,
374
494
  ),
@@ -378,38 +498,88 @@ def query_prod_connections_by_connector(
378
498
  Field(description="Maximum number of results (default: 1000)", default=1000),
379
499
  ] = 1000,
380
500
  ) -> list[dict[str, Any]]:
381
- """Search for all connections using a specific source connector type.
501
+ """Search for all connections using a specific source or destination connector type.
382
502
 
383
503
  This tool queries the Airbyte Cloud Prod DB Replica directly for fast results.
384
- It finds all connections where the source connector matches the specified type,
385
- regardless of how the source is named by users.
504
+ It finds all connections where the source or destination connector matches the
505
+ specified type, regardless of how the connector is named by users.
386
506
 
387
507
  Optionally filter by organization_id to limit results to a specific organization.
508
+ Use '@airbyte-internal' as an alias for the Airbyte internal organization.
388
509
 
389
510
  Returns a list of connection dicts with workspace context and clickable Cloud UI URLs.
390
- Each dict contains: connection_id, connection_name, connection_url, source_id,
511
+ For source queries, returns: connection_id, connection_name, connection_url, source_id,
391
512
  source_name, source_definition_id, workspace_id, workspace_name, organization_id,
392
513
  dataplane_group_id, dataplane_name.
514
+ For destination queries, returns: connection_id, connection_name, connection_url,
515
+ destination_id, destination_name, destination_definition_id, workspace_id,
516
+ workspace_name, organization_id, dataplane_group_id, dataplane_name.
393
517
  """
394
- # Validate that exactly one of the two parameters is provided
395
- if (source_definition_id is None) == (source_canonical_name is None):
518
+ # Validate that exactly one of the four connector parameters is provided
519
+ provided_params = [
520
+ source_definition_id,
521
+ source_canonical_name,
522
+ destination_definition_id,
523
+ destination_canonical_name,
524
+ ]
525
+ num_provided = sum(p is not None for p in provided_params)
526
+ if num_provided != 1:
396
527
  raise PyAirbyteInputError(
397
528
  message=(
398
- "Exactly one of source_definition_id or source_canonical_name "
399
- "must be provided, but not both."
529
+ "Exactly one of source_definition_id, source_canonical_name, "
530
+ "destination_definition_id, or destination_canonical_name must be provided."
400
531
  ),
401
532
  )
402
533
 
403
- # Resolve canonical name to definition ID if needed
534
+ # Determine if this is a source or destination query and resolve the definition ID
535
+ is_source_query = (
536
+ source_definition_id is not None or source_canonical_name is not None
537
+ )
404
538
  resolved_definition_id: str
539
+
405
540
  if source_canonical_name:
406
541
  resolved_definition_id = _resolve_canonical_name_to_definition_id(
407
542
  canonical_name=source_canonical_name,
408
543
  )
544
+ elif source_definition_id:
545
+ resolved_definition_id = source_definition_id
546
+ elif destination_canonical_name:
547
+ resolved_definition_id = _resolve_canonical_name_to_definition_id(
548
+ canonical_name=destination_canonical_name,
549
+ )
409
550
  else:
410
- resolved_definition_id = source_definition_id # type: ignore[assignment]
411
-
412
- # Query the database and transform rows to include connection URLs
551
+ resolved_definition_id = destination_definition_id # type: ignore[assignment]
552
+
553
+ # Resolve organization ID alias
554
+ resolved_organization_id = OrganizationAliasEnum.resolve(organization_id)
555
+
556
+ # Query the database based on connector type
557
+ if is_source_query:
558
+ return [
559
+ {
560
+ "organization_id": str(row.get("organization_id", "")),
561
+ "workspace_id": str(row["workspace_id"]),
562
+ "workspace_name": row.get("workspace_name", ""),
563
+ "connection_id": str(row["connection_id"]),
564
+ "connection_name": row.get("connection_name", ""),
565
+ "connection_url": (
566
+ f"{CLOUD_UI_BASE_URL}/workspaces/{row['workspace_id']}"
567
+ f"/connections/{row['connection_id']}/status"
568
+ ),
569
+ "source_id": str(row["source_id"]),
570
+ "source_name": row.get("source_name", ""),
571
+ "source_definition_id": str(row["source_definition_id"]),
572
+ "dataplane_group_id": str(row.get("dataplane_group_id", "")),
573
+ "dataplane_name": row.get("dataplane_name", ""),
574
+ }
575
+ for row in query_connections_by_connector(
576
+ connector_definition_id=resolved_definition_id,
577
+ organization_id=resolved_organization_id,
578
+ limit=limit,
579
+ )
580
+ ]
581
+
582
+ # Destination query
413
583
  return [
414
584
  {
415
585
  "organization_id": str(row.get("organization_id", "")),
@@ -421,20 +591,93 @@ def query_prod_connections_by_connector(
421
591
  f"{CLOUD_UI_BASE_URL}/workspaces/{row['workspace_id']}"
422
592
  f"/connections/{row['connection_id']}/status"
423
593
  ),
424
- "source_id": str(row["source_id"]),
425
- "source_name": row.get("source_name", ""),
426
- "source_definition_id": str(row["source_definition_id"]),
594
+ "destination_id": str(row["destination_id"]),
595
+ "destination_name": row.get("destination_name", ""),
596
+ "destination_definition_id": str(row["destination_definition_id"]),
427
597
  "dataplane_group_id": str(row.get("dataplane_group_id", "")),
428
598
  "dataplane_name": row.get("dataplane_name", ""),
429
599
  }
430
- for row in query_connections_by_connector(
600
+ for row in query_connections_by_destination_connector(
431
601
  connector_definition_id=resolved_definition_id,
432
- organization_id=organization_id,
602
+ organization_id=resolved_organization_id,
433
603
  limit=limit,
434
604
  )
435
605
  ]
436
606
 
437
607
 
608
+ @mcp_tool(
609
+ read_only=True,
610
+ idempotent=True,
611
+ )
612
+ def query_prod_workspaces_by_email_domain(
613
+ email_domain: Annotated[
614
+ str,
615
+ Field(
616
+ description=(
617
+ "Email domain to search for (e.g., 'motherduck.com', 'fivetran.com'). "
618
+ "Do not include the '@' symbol. This will find workspaces where users "
619
+ "have email addresses with this domain."
620
+ ),
621
+ ),
622
+ ],
623
+ limit: Annotated[
624
+ int,
625
+ Field(
626
+ description="Maximum number of workspaces to return (default: 100)",
627
+ default=100,
628
+ ),
629
+ ] = 100,
630
+ ) -> WorkspacesByEmailDomainResult:
631
+ """Find workspaces by email domain.
632
+
633
+ This tool searches for workspaces where users have email addresses matching
634
+ the specified domain. This is useful for identifying workspaces belonging to
635
+ specific companies - for example, searching for "motherduck.com" will find
636
+ workspaces belonging to MotherDuck employees.
637
+
638
+ Use cases:
639
+ - Finding partner organization connections for testing connector fixes
640
+ - Identifying internal test accounts for specific integrations
641
+ - Locating workspaces belonging to technology partners
642
+
643
+ The returned organization IDs can be used with other tools like
644
+ `query_prod_connections_by_connector` to find connections within
645
+ those organizations for safe testing.
646
+ """
647
+ # Strip leading @ if provided
648
+ clean_domain = email_domain.lstrip("@")
649
+
650
+ # Query the database
651
+ rows = query_workspaces_by_email_domain(email_domain=clean_domain, limit=limit)
652
+
653
+ # Convert rows to Pydantic models
654
+ workspaces = [
655
+ WorkspaceInfo(
656
+ organization_id=str(row["organization_id"]),
657
+ workspace_id=str(row["workspace_id"]),
658
+ workspace_name=row.get("workspace_name", ""),
659
+ slug=row.get("slug"),
660
+ email=row.get("email"),
661
+ dataplane_group_id=str(row["dataplane_group_id"])
662
+ if row.get("dataplane_group_id")
663
+ else None,
664
+ dataplane_name=row.get("dataplane_name"),
665
+ created_at=row.get("created_at"),
666
+ )
667
+ for row in rows
668
+ ]
669
+
670
+ # Extract unique organization IDs
671
+ unique_org_ids = list(dict.fromkeys(w.organization_id for w in workspaces))
672
+
673
+ return WorkspacesByEmailDomainResult(
674
+ email_domain=clean_domain,
675
+ total_workspaces_found=len(workspaces),
676
+ unique_organization_ids=unique_org_ids,
677
+ workspaces=workspaces,
678
+ )
679
+
680
+
438
681
  def register_prod_db_query_tools(app: FastMCP) -> None:
439
682
  """Register prod DB query tools with the FastMCP app."""
440
683
  register_mcp_tools(app, domain=__name__)
@@ -303,67 +303,56 @@ class RunRegressionTestsResponse(BaseModel):
303
303
  open_world=True,
304
304
  )
305
305
  def run_regression_tests(
306
- connection_id: Annotated[str, "The Airbyte Cloud connection ID to test"],
307
- skip_read_action: Annotated[
308
- bool,
309
- "If True, skip the read action (run only spec, check, discover). "
310
- "If False (default), run all verbs including read.",
311
- ] = False,
312
- workspace_id: Annotated[
306
+ connector_name: Annotated[
307
+ str,
308
+ "Connector name to build from source (e.g., 'source-pokeapi'). Required.",
309
+ ],
310
+ pr: Annotated[
311
+ int,
312
+ "PR number from the airbyte monorepo to checkout and build from (e.g., 70847). Required.",
313
+ ],
314
+ connection_id: Annotated[
313
315
  str | None,
314
- "Optional Airbyte Cloud workspace ID. If provided, validates that the connection "
315
- "belongs to this workspace before triggering tests. If omitted, no validation is done.",
316
+ "Airbyte Cloud connection ID to fetch config/catalog from. "
317
+ "If not provided, uses GSM integration test secrets.",
316
318
  ] = None,
317
319
  skip_compare: Annotated[
318
320
  bool,
319
321
  "If True, skip comparison and run single-version tests only. "
320
322
  "If False (default), run comparison tests (target vs control versions).",
321
323
  ] = False,
322
- connector_image: Annotated[
323
- str | None,
324
- "Optional connector image with tag for single-version tests "
325
- "(e.g., 'airbyte/source-github:1.0.0'). "
326
- "If not provided, auto-detected from connection. Only used when skip_compare=True.",
327
- ] = None,
328
- target_image: Annotated[
324
+ skip_read_action: Annotated[
325
+ bool,
326
+ "If True, skip the read action (run only spec, check, discover). "
327
+ "If False (default), run all verbs including read.",
328
+ ] = False,
329
+ override_test_image: Annotated[
329
330
  str | None,
330
- "Target connector image (new version) with tag for comparison tests "
331
- "(e.g., 'airbyte/source-github:2.0.0'). Optional if connector_name is provided. "
332
- "Only used when skip_compare=False (default).",
331
+ "Override test connector image with tag (e.g., 'airbyte/source-github:1.0.0'). "
332
+ "Ignored if skip_compare=False.",
333
333
  ] = None,
334
- control_image: Annotated[
334
+ override_control_image: Annotated[
335
335
  str | None,
336
- "Control connector image (baseline version) with tag for comparison tests "
337
- "(e.g., 'airbyte/source-github:1.0.0'). Optional if connection_id is provided "
338
- "(auto-detected from connection). Only used when skip_compare=False (default).",
336
+ "Override control connector image (baseline version) with tag. "
337
+ "Ignored if skip_compare=True.",
339
338
  ] = None,
340
- connector_name: Annotated[
339
+ workspace_id: Annotated[
341
340
  str | None,
342
- "Connector name to build the connector image from source "
343
- "(e.g., 'source-pokeapi'). If provided, builds the image locally with tag 'dev'. "
344
- "For comparison tests (default), this builds the target image while control is "
345
- "auto-detected from the connection. For single-version tests (skip_compare=True), "
346
- "this builds the test image.",
347
- ] = None,
348
- pr: Annotated[
349
- int | None,
350
- "PR number from the airbyte monorepo to checkout and build from "
351
- "(e.g., 70847). Only used when connector_name is provided. "
352
- "If not specified, builds from the default branch (master).",
341
+ "Optional Airbyte Cloud workspace ID. If provided with connection_id, validates "
342
+ "that the connection belongs to this workspace before triggering tests.",
353
343
  ] = None,
354
344
  ) -> RunRegressionTestsResponse:
355
345
  """Start a regression test run via GitHub Actions workflow.
356
346
 
357
- This tool triggers either the comparison or single-version regression test
358
- workflow depending on the skip_compare parameter:
347
+ This tool triggers the regression test workflow which builds the connector
348
+ from the specified PR and runs tests against it.
359
349
 
360
- - skip_compare=False (default): Triggers comparison regression test workflow.
361
- Compares the target connector version against a control (baseline) version.
362
- Provide either target_image or connector_name to specify the target version.
350
+ - skip_compare=False (default): Comparison mode - compares the PR version
351
+ against the baseline (control) version.
352
+ - skip_compare=True: Single-version mode - runs tests without comparison.
363
353
 
364
- - skip_compare=True: Triggers single-version regression test workflow.
365
- Runs the specified command against the connection and validates the output.
366
- No comparison is performed.
354
+ If connection_id is provided, config/catalog are fetched from Airbyte Cloud.
355
+ Otherwise, GSM integration test secrets are used.
367
356
 
368
357
  Returns immediately with a run_id and workflow URL. Check the workflow URL
369
358
  to monitor progress and view results.
@@ -385,8 +374,8 @@ def run_regression_tests(
385
374
  workflow_url=None,
386
375
  )
387
376
 
388
- # Validate workspace membership if workspace_id is provided
389
- if workspace_id:
377
+ # Validate workspace membership if workspace_id and connection_id are provided
378
+ if workspace_id and connection_id:
390
379
  try:
391
380
  validate_connection_workspace(connection_id, workspace_id)
392
381
  except (
@@ -401,42 +390,23 @@ def run_regression_tests(
401
390
  workflow_url=None,
402
391
  )
403
392
 
404
- # Build workflow inputs for the unified regression test workflow
393
+ # Build workflow inputs - connector_name and pr are required
405
394
  workflow_inputs: dict[str, str] = {
406
- "connection_id": connection_id,
395
+ "connector_name": connector_name,
396
+ "pr": str(pr),
407
397
  }
408
398
 
399
+ # Add optional inputs
400
+ if connection_id:
401
+ workflow_inputs["connection_id"] = connection_id
409
402
  if skip_compare:
410
- # Single-version mode
411
403
  workflow_inputs["skip_compare"] = "true"
412
- if connector_image:
413
- workflow_inputs["connector_image"] = connector_image
414
- else:
415
- # Comparison mode (default): validate that we have enough info
416
- if not target_image and not connector_name:
417
- return RunRegressionTestsResponse(
418
- run_id=run_id,
419
- status=TestRunStatus.FAILED,
420
- message=(
421
- "For comparison regression tests (skip_compare=False, the default), "
422
- "provide either target_image or connector_name so the workflow can "
423
- "determine the target image."
424
- ),
425
- workflow_url=None,
426
- )
427
- workflow_inputs["skip_compare"] = "false"
428
- if target_image:
429
- workflow_inputs["target_image"] = target_image
430
- if control_image:
431
- workflow_inputs["control_image"] = control_image
432
-
433
- # Common inputs for both modes
434
404
  if skip_read_action:
435
405
  workflow_inputs["skip_read_action"] = "true"
436
- if connector_name:
437
- workflow_inputs["connector_name"] = connector_name
438
- if pr:
439
- workflow_inputs["pr"] = str(pr)
406
+ if override_test_image:
407
+ workflow_inputs["override_test_image"] = override_test_image
408
+ if override_control_image:
409
+ workflow_inputs["override_control_image"] = override_control_image
440
410
 
441
411
  mode_description = "single-version" if skip_compare else "comparison"
442
412
  try:
@@ -460,12 +430,13 @@ def run_regression_tests(
460
430
  )
461
431
 
462
432
  view_url = dispatch_result.run_url or dispatch_result.workflow_url
433
+ connection_info = f" for connection {connection_id}" if connection_id else ""
463
434
  return RunRegressionTestsResponse(
464
435
  run_id=run_id,
465
436
  status=TestRunStatus.QUEUED,
466
437
  message=(
467
- f"{mode_description.capitalize()} regression test workflow triggered for "
468
- f"connection {connection_id}. View progress at: {view_url}"
438
+ f"{mode_description.capitalize()} regression test workflow triggered "
439
+ f"for {connector_name} (PR #{pr}){connection_info}. View progress at: {view_url}"
469
440
  ),
470
441
  workflow_url=dispatch_result.workflow_url,
471
442
  github_run_id=dispatch_result.run_id,
@@ -22,6 +22,8 @@ from airbyte_ops_mcp.prod_db_access.sql import (
22
22
  SELECT_ACTORS_PINNED_TO_VERSION,
23
23
  SELECT_CONNECTIONS_BY_CONNECTOR,
24
24
  SELECT_CONNECTIONS_BY_CONNECTOR_AND_ORG,
25
+ SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR,
26
+ SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR_AND_ORG,
25
27
  SELECT_CONNECTOR_VERSIONS,
26
28
  SELECT_DATAPLANES_LIST,
27
29
  SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
@@ -30,6 +32,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
30
32
  SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
31
33
  SELECT_SYNC_RESULTS_FOR_VERSION,
32
34
  SELECT_WORKSPACE_INFO,
35
+ SELECT_WORKSPACES_BY_EMAIL_DOMAIN,
33
36
  )
34
37
 
35
38
  logger = logging.getLogger(__name__)
@@ -113,6 +116,48 @@ def query_connections_by_connector(
113
116
  )
114
117
 
115
118
 
119
+ def query_connections_by_destination_connector(
120
+ connector_definition_id: str,
121
+ organization_id: str | None = None,
122
+ limit: int = 1000,
123
+ *,
124
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
125
+ ) -> list[dict[str, Any]]:
126
+ """Query connections by destination connector type, optionally filtered by organization.
127
+
128
+ Args:
129
+ connector_definition_id: Destination connector definition UUID to filter by
130
+ organization_id: Optional organization UUID to search within
131
+ limit: Maximum number of results (default: 1000)
132
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
133
+
134
+ Returns:
135
+ List of connection records with workspace and dataplane info
136
+ """
137
+ # Use separate queries to avoid pg8000 NULL parameter type issues
138
+ if organization_id is None:
139
+ return _run_sql_query(
140
+ SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR,
141
+ parameters={
142
+ "connector_definition_id": connector_definition_id,
143
+ "limit": limit,
144
+ },
145
+ query_name="SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR",
146
+ gsm_client=gsm_client,
147
+ )
148
+
149
+ return _run_sql_query(
150
+ SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR_AND_ORG,
151
+ parameters={
152
+ "connector_definition_id": connector_definition_id,
153
+ "organization_id": organization_id,
154
+ "limit": limit,
155
+ },
156
+ query_name="SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR_AND_ORG",
157
+ gsm_client=gsm_client,
158
+ )
159
+
160
+
116
161
  def query_connector_versions(
117
162
  connector_definition_id: str,
118
163
  *,
@@ -337,3 +382,37 @@ def query_org_workspaces(
337
382
  query_name="SELECT_ORG_WORKSPACES",
338
383
  gsm_client=gsm_client,
339
384
  )
385
+
386
+
387
+ def query_workspaces_by_email_domain(
388
+ email_domain: str,
389
+ limit: int = 100,
390
+ *,
391
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
392
+ ) -> list[dict[str, Any]]:
393
+ """Query workspaces by email domain.
394
+
395
+ This is useful for identifying workspaces based on user email domains.
396
+ For example, searching for "motherduck.com" will find workspaces where users have
397
+ @motherduck.com email addresses, which may belong to partner accounts.
398
+
399
+ Args:
400
+ email_domain: Email domain to search for (e.g., "motherduck.com", "fivetran.com").
401
+ Do not include the "@" symbol.
402
+ limit: Maximum number of results (default: 100)
403
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
404
+
405
+ Returns:
406
+ List of workspace records with organization_id, workspace_id, workspace_name,
407
+ slug, email, dataplane_group_id, dataplane_name, and created_at.
408
+ Results are ordered by organization_id and workspace_name.
409
+ """
410
+ # Strip leading @ if provided
411
+ clean_domain = email_domain.lstrip("@")
412
+
413
+ return _run_sql_query(
414
+ SELECT_WORKSPACES_BY_EMAIL_DOMAIN,
415
+ parameters={"email_domain": clean_domain, "limit": limit},
416
+ query_name="SELECT_WORKSPACES_BY_EMAIL_DOMAIN",
417
+ gsm_client=gsm_client,
418
+ )
@@ -131,6 +131,61 @@ SELECT_CONNECTIONS_BY_CONNECTOR_AND_ORG = sqlalchemy.text(
131
131
  """
132
132
  )
133
133
 
134
+ # Query connections by DESTINATION connector type (no organization filter)
135
+ SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR = sqlalchemy.text(
136
+ """
137
+ SELECT
138
+ connection.id AS connection_id,
139
+ connection.name AS connection_name,
140
+ connection.destination_id,
141
+ workspace.id AS workspace_id,
142
+ workspace.name AS workspace_name,
143
+ workspace.organization_id,
144
+ workspace.dataplane_group_id,
145
+ dataplane_group.name AS dataplane_name,
146
+ destination_actor.actor_definition_id AS destination_definition_id,
147
+ destination_actor.name AS destination_name
148
+ FROM connection
149
+ JOIN actor AS destination_actor
150
+ ON connection.destination_id = destination_actor.id
151
+ JOIN workspace
152
+ ON destination_actor.workspace_id = workspace.id
153
+ LEFT JOIN dataplane_group
154
+ ON workspace.dataplane_group_id = dataplane_group.id
155
+ WHERE
156
+ destination_actor.actor_definition_id = :connector_definition_id
157
+ LIMIT :limit
158
+ """
159
+ )
160
+
161
+ # Query connections by DESTINATION connector type, filtered by organization
162
+ SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR_AND_ORG = sqlalchemy.text(
163
+ """
164
+ SELECT
165
+ connection.id AS connection_id,
166
+ connection.name AS connection_name,
167
+ connection.destination_id,
168
+ workspace.id AS workspace_id,
169
+ workspace.name AS workspace_name,
170
+ workspace.organization_id,
171
+ workspace.dataplane_group_id,
172
+ dataplane_group.name AS dataplane_name,
173
+ destination_actor.actor_definition_id AS destination_definition_id,
174
+ destination_actor.name AS destination_name
175
+ FROM connection
176
+ JOIN actor AS destination_actor
177
+ ON connection.destination_id = destination_actor.id
178
+ JOIN workspace
179
+ ON destination_actor.workspace_id = workspace.id
180
+ LEFT JOIN dataplane_group
181
+ ON workspace.dataplane_group_id = dataplane_group.id
182
+ WHERE
183
+ destination_actor.actor_definition_id = :connector_definition_id
184
+ AND workspace.organization_id = :organization_id
185
+ LIMIT :limit
186
+ """
187
+ )
188
+
134
189
  # =============================================================================
135
190
  # Connector Version Queries
136
191
  # =============================================================================
@@ -441,3 +496,34 @@ SELECT_ORG_WORKSPACES = sqlalchemy.text(
441
496
  workspace.name
442
497
  """
443
498
  )
499
+
500
+ # =============================================================================
501
+ # Workspace Lookup by Email Domain
502
+ # =============================================================================
503
+
504
+ # Find workspaces by email domain
505
+ # This is useful for identifying workspaces based on user email domains
506
+ # (e.g., finding partner accounts like MotherDuck by searching for "motherduck.com")
507
+ SELECT_WORKSPACES_BY_EMAIL_DOMAIN = sqlalchemy.text(
508
+ """
509
+ SELECT DISTINCT
510
+ workspace.organization_id,
511
+ workspace.id AS workspace_id,
512
+ workspace.name AS workspace_name,
513
+ workspace.slug,
514
+ workspace.email,
515
+ workspace.dataplane_group_id,
516
+ dataplane_group.name AS dataplane_name,
517
+ workspace.created_at
518
+ FROM workspace
519
+ LEFT JOIN dataplane_group
520
+ ON workspace.dataplane_group_id = dataplane_group.id
521
+ WHERE
522
+ workspace.email LIKE '%@' || :email_domain
523
+ AND workspace.tombstone = false
524
+ ORDER BY
525
+ workspace.organization_id,
526
+ workspace.name
527
+ LIMIT :limit
528
+ """
529
+ )