airbyte-internal-ops 0.3.1__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,12 +31,10 @@ from airbyte.exceptions import (
31
31
  from fastmcp import FastMCP
32
32
  from pydantic import BaseModel, Field
33
33
 
34
- from airbyte_ops_mcp.github_actions import (
35
- GITHUB_API_BASE,
36
- resolve_github_token,
37
- trigger_workflow_dispatch,
38
- )
34
+ from airbyte_ops_mcp.github_actions import trigger_workflow_dispatch
35
+ from airbyte_ops_mcp.github_api import GITHUB_API_BASE, resolve_github_token
39
36
  from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
37
+ from airbyte_ops_mcp.mcp.prerelease import ConnectorRepo
40
38
 
41
39
  logger = logging.getLogger(__name__)
42
40
 
@@ -284,7 +282,7 @@ class RunRegressionTestsResponse(BaseModel):
284
282
  )
285
283
  github_run_id: int | None = Field(
286
284
  default=None,
287
- description="GitHub Actions workflow run ID (use with check_workflow_status)",
285
+ description="GitHub Actions workflow run ID (use with check_ci_workflow_status)",
288
286
  )
289
287
  github_run_url: str | None = Field(
290
288
  default=None,
@@ -309,7 +307,13 @@ def run_regression_tests(
309
307
  ],
310
308
  pr: Annotated[
311
309
  int,
312
- "PR number from the airbyte monorepo to checkout and build from (e.g., 70847). Required.",
310
+ "PR number to checkout and build from (e.g., 70847). Required. "
311
+ "The PR must be from the repository specified by the 'repo' parameter.",
312
+ ],
313
+ repo: Annotated[
314
+ ConnectorRepo,
315
+ "Repository where the connector PR is located. "
316
+ "Use 'airbyte' for OSS connectors (default) or 'airbyte-enterprise' for enterprise connectors.",
313
317
  ],
314
318
  connection_id: Annotated[
315
319
  str | None,
@@ -347,6 +351,10 @@ def run_regression_tests(
347
351
  This tool triggers the regression test workflow which builds the connector
348
352
  from the specified PR and runs tests against it.
349
353
 
354
+ Supports both OSS connectors (from airbytehq/airbyte) and enterprise connectors
355
+ (from airbytehq/airbyte-enterprise). Use the 'repo' parameter to specify which
356
+ repository contains the connector PR.
357
+
350
358
  - skip_compare=False (default): Comparison mode - compares the PR version
351
359
  against the baseline (control) version.
352
360
  - skip_compare=True: Single-version mode - runs tests without comparison.
@@ -390,10 +398,11 @@ def run_regression_tests(
390
398
  workflow_url=None,
391
399
  )
392
400
 
393
- # Build workflow inputs - connector_name and pr are required
401
+ # Build workflow inputs - connector_name, pr, and repo are required
394
402
  workflow_inputs: dict[str, str] = {
395
403
  "connector_name": connector_name,
396
404
  "pr": str(pr),
405
+ "repo": repo,
397
406
  }
398
407
 
399
408
  # Add optional inputs
@@ -409,6 +418,7 @@ def run_regression_tests(
409
418
  workflow_inputs["override_control_image"] = override_control_image
410
419
 
411
420
  mode_description = "single-version" if skip_compare else "comparison"
421
+
412
422
  try:
413
423
  dispatch_result = trigger_workflow_dispatch(
414
424
  owner=REGRESSION_TEST_REPO_OWNER,
@@ -431,12 +441,13 @@ def run_regression_tests(
431
441
 
432
442
  view_url = dispatch_result.run_url or dispatch_result.workflow_url
433
443
  connection_info = f" for connection {connection_id}" if connection_id else ""
444
+ repo_info = f" from {repo}" if repo != ConnectorRepo.AIRBYTE else ""
434
445
  return RunRegressionTestsResponse(
435
446
  run_id=run_id,
436
447
  status=TestRunStatus.QUEUED,
437
448
  message=(
438
449
  f"{mode_description.capitalize()} regression test workflow triggered "
439
- f"for {connector_name} (PR #{pr}){connection_info}. View progress at: {view_url}"
450
+ f"for {connector_name} (PR #{pr}{repo_info}){connection_info}. View progress at: {view_url}"
440
451
  ),
441
452
  workflow_url=dispatch_result.workflow_url,
442
453
  github_run_id=dispatch_result.run_id,
@@ -25,7 +25,7 @@ from airbyte_ops_mcp.mcp.cloud_connector_versions import (
25
25
  register_cloud_connector_version_tools,
26
26
  )
27
27
  from airbyte_ops_mcp.mcp.gcp_logs import register_gcp_logs_tools
28
- from airbyte_ops_mcp.mcp.github import register_github_tools
28
+ from airbyte_ops_mcp.mcp.github_actions import register_github_actions_tools
29
29
  from airbyte_ops_mcp.mcp.github_repo_ops import register_github_repo_ops_tools
30
30
  from airbyte_ops_mcp.mcp.prerelease import register_prerelease_tools
31
31
  from airbyte_ops_mcp.mcp.prod_db_queries import register_prod_db_query_tools
@@ -59,7 +59,7 @@ def register_server_assets(app: FastMCP) -> None:
59
59
  """
60
60
  register_server_info_resources(app)
61
61
  register_github_repo_ops_tools(app)
62
- register_github_tools(app)
62
+ register_github_actions_tools(app)
63
63
  register_prerelease_tools(app)
64
64
  register_cloud_connector_version_tools(app)
65
65
  register_prod_db_query_tools(app)
@@ -26,6 +26,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
26
26
  SELECT_CONNECTIONS_BY_DESTINATION_CONNECTOR_AND_ORG,
27
27
  SELECT_CONNECTOR_VERSIONS,
28
28
  SELECT_DATAPLANES_LIST,
29
+ SELECT_DESTINATION_CONNECTION_STATS,
29
30
  SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
30
31
  SELECT_NEW_CONNECTOR_RELEASES,
31
32
  SELECT_ORG_WORKSPACES,
@@ -35,6 +36,7 @@ from airbyte_ops_mcp.prod_db_access.sql import (
35
36
  SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR,
36
37
  SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR,
37
38
  SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR,
39
+ SELECT_SOURCE_CONNECTION_STATS,
38
40
  SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
39
41
  SELECT_SYNC_RESULTS_FOR_VERSION,
40
42
  SELECT_WORKSPACE_INFO,
@@ -497,3 +499,69 @@ def query_workspaces_by_email_domain(
497
499
  query_name="SELECT_WORKSPACES_BY_EMAIL_DOMAIN",
498
500
  gsm_client=gsm_client,
499
501
  )
502
+
503
+
504
+ def query_source_connection_stats(
505
+ connector_definition_id: str,
506
+ days: int = 7,
507
+ *,
508
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
509
+ ) -> list[dict[str, Any]]:
510
+ """Query aggregate connection stats for a SOURCE connector.
511
+
512
+ Returns counts of connections grouped by pinned version, including:
513
+ - Total, enabled, and active connection counts
514
+ - Pinned vs unpinned breakdown
515
+ - Latest attempt status breakdown (succeeded, failed, cancelled, running, unknown)
516
+
517
+ Args:
518
+ connector_definition_id: Source connector definition UUID
519
+ days: Number of days to look back for "active" connections (default: 7)
520
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
521
+
522
+ Returns:
523
+ List of dicts with aggregate counts grouped by pinned_version_id
524
+ """
525
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
526
+ return _run_sql_query(
527
+ SELECT_SOURCE_CONNECTION_STATS,
528
+ parameters={
529
+ "connector_definition_id": connector_definition_id,
530
+ "cutoff_date": cutoff_date,
531
+ },
532
+ query_name="SELECT_SOURCE_CONNECTION_STATS",
533
+ gsm_client=gsm_client,
534
+ )
535
+
536
+
537
+ def query_destination_connection_stats(
538
+ connector_definition_id: str,
539
+ days: int = 7,
540
+ *,
541
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
542
+ ) -> list[dict[str, Any]]:
543
+ """Query aggregate connection stats for a DESTINATION connector.
544
+
545
+ Returns counts of connections grouped by pinned version, including:
546
+ - Total, enabled, and active connection counts
547
+ - Pinned vs unpinned breakdown
548
+ - Latest attempt status breakdown (succeeded, failed, cancelled, running, unknown)
549
+
550
+ Args:
551
+ connector_definition_id: Destination connector definition UUID
552
+ days: Number of days to look back for "active" connections (default: 7)
553
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
554
+
555
+ Returns:
556
+ List of dicts with aggregate counts grouped by pinned_version_id
557
+ """
558
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
559
+ return _run_sql_query(
560
+ SELECT_DESTINATION_CONNECTION_STATS,
561
+ parameters={
562
+ "connector_definition_id": connector_definition_id,
563
+ "cutoff_date": cutoff_date,
564
+ },
565
+ query_name="SELECT_DESTINATION_CONNECTION_STATS",
566
+ gsm_client=gsm_client,
567
+ )
@@ -826,3 +826,111 @@ SELECT_WORKSPACES_BY_EMAIL_DOMAIN = sqlalchemy.text(
826
826
  LIMIT :limit
827
827
  """
828
828
  )
829
+
830
+ # =============================================================================
831
+ # Connector Connection Stats Queries (Aggregate Counts)
832
+ # =============================================================================
833
+
834
+ # Count connections by SOURCE connector with latest attempt status breakdown
835
+ # Groups by pinned version and provides counts of succeeded/failed/other attempts
836
+ # Uses a CTE to get the latest attempt per connection, then aggregates
837
+ SELECT_SOURCE_CONNECTION_STATS = sqlalchemy.text(
838
+ """
839
+ WITH latest_attempts AS (
840
+ SELECT DISTINCT ON (connection.id)
841
+ connection.id AS connection_id,
842
+ connection.status AS connection_status,
843
+ scoped_configuration.value AS pinned_version_id,
844
+ attempts.status::text AS latest_attempt_status
845
+ FROM connection
846
+ JOIN actor
847
+ ON connection.source_id = actor.id
848
+ AND actor.actor_definition_id = :connector_definition_id
849
+ AND actor.tombstone = false
850
+ JOIN workspace
851
+ ON actor.workspace_id = workspace.id
852
+ AND workspace.tombstone = false
853
+ LEFT JOIN scoped_configuration
854
+ ON scoped_configuration.scope_id = actor.id
855
+ AND scoped_configuration.key = 'connector_version'
856
+ AND scoped_configuration.scope_type = 'actor'
857
+ LEFT JOIN jobs
858
+ ON jobs.scope = connection.id::text
859
+ AND jobs.config_type = 'sync'
860
+ AND jobs.updated_at >= :cutoff_date
861
+ LEFT JOIN attempts
862
+ ON attempts.job_id = jobs.id
863
+ WHERE
864
+ connection.status != 'deprecated'
865
+ ORDER BY
866
+ connection.id,
867
+ attempts.ended_at DESC NULLS LAST
868
+ )
869
+ SELECT
870
+ pinned_version_id,
871
+ COUNT(*) AS total_connections,
872
+ COUNT(*) FILTER (WHERE connection_status = 'active') AS enabled_connections,
873
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NOT NULL) AS active_connections,
874
+ COUNT(*) FILTER (WHERE pinned_version_id IS NOT NULL) AS pinned_connections,
875
+ COUNT(*) FILTER (WHERE pinned_version_id IS NULL) AS unpinned_connections,
876
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'succeeded') AS succeeded_connections,
877
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'failed') AS failed_connections,
878
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'cancelled') AS cancelled_connections,
879
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'running') AS running_connections,
880
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NULL) AS unknown_connections
881
+ FROM latest_attempts
882
+ GROUP BY pinned_version_id
883
+ ORDER BY total_connections DESC
884
+ """
885
+ )
886
+
887
+ # Count connections by DESTINATION connector with latest attempt status breakdown
888
+ SELECT_DESTINATION_CONNECTION_STATS = sqlalchemy.text(
889
+ """
890
+ WITH latest_attempts AS (
891
+ SELECT DISTINCT ON (connection.id)
892
+ connection.id AS connection_id,
893
+ connection.status AS connection_status,
894
+ scoped_configuration.value AS pinned_version_id,
895
+ attempts.status::text AS latest_attempt_status
896
+ FROM connection
897
+ JOIN actor
898
+ ON connection.destination_id = actor.id
899
+ AND actor.actor_definition_id = :connector_definition_id
900
+ AND actor.tombstone = false
901
+ JOIN workspace
902
+ ON actor.workspace_id = workspace.id
903
+ AND workspace.tombstone = false
904
+ LEFT JOIN scoped_configuration
905
+ ON scoped_configuration.scope_id = actor.id
906
+ AND scoped_configuration.key = 'connector_version'
907
+ AND scoped_configuration.scope_type = 'actor'
908
+ LEFT JOIN jobs
909
+ ON jobs.scope = connection.id::text
910
+ AND jobs.config_type = 'sync'
911
+ AND jobs.updated_at >= :cutoff_date
912
+ LEFT JOIN attempts
913
+ ON attempts.job_id = jobs.id
914
+ WHERE
915
+ connection.status != 'deprecated'
916
+ ORDER BY
917
+ connection.id,
918
+ attempts.ended_at DESC NULLS LAST
919
+ )
920
+ SELECT
921
+ pinned_version_id,
922
+ COUNT(*) AS total_connections,
923
+ COUNT(*) FILTER (WHERE connection_status = 'active') AS enabled_connections,
924
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NOT NULL) AS active_connections,
925
+ COUNT(*) FILTER (WHERE pinned_version_id IS NOT NULL) AS pinned_connections,
926
+ COUNT(*) FILTER (WHERE pinned_version_id IS NULL) AS unpinned_connections,
927
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'succeeded') AS succeeded_connections,
928
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'failed') AS failed_connections,
929
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'cancelled') AS cancelled_connections,
930
+ COUNT(*) FILTER (WHERE latest_attempt_status = 'running') AS running_connections,
931
+ COUNT(*) FILTER (WHERE latest_attempt_status IS NULL) AS unknown_connections
932
+ FROM latest_attempts
933
+ GROUP BY pinned_version_id
934
+ ORDER BY total_connections DESC
935
+ """
936
+ )
@@ -105,16 +105,19 @@ class ConnectorRunner:
105
105
  if self.config is not None:
106
106
  config_path = temp_dir / self.CONFIG_FILE
107
107
  config_path.write_text(json.dumps(self.config))
108
+ config_path.chmod(0o666)
108
109
  self.logger.debug(f"Wrote config to {config_path}")
109
110
 
110
111
  if self.configured_catalog is not None:
111
112
  catalog_path = temp_dir / self.CATALOG_FILE
112
113
  catalog_path.write_text(self.configured_catalog.json())
114
+ catalog_path.chmod(0o666)
113
115
  self.logger.debug(f"Wrote catalog to {catalog_path}")
114
116
 
115
117
  if self.state is not None:
116
118
  state_path = temp_dir / self.STATE_FILE
117
119
  state_path.write_text(json.dumps(self.state))
120
+ state_path.chmod(0o666)
118
121
  self.logger.debug(f"Wrote state to {state_path}")
119
122
 
120
123
  def _build_docker_command(self, temp_dir: Path) -> list[str]:
@@ -135,7 +138,7 @@ class ConnectorRunner:
135
138
  "--name",
136
139
  container_name,
137
140
  "-v",
138
- f"{temp_dir}:{self.DATA_DIR}:ro",
141
+ f"{temp_dir}:{self.DATA_DIR}",
139
142
  ]
140
143
 
141
144
  if self.proxy_url:
@@ -168,9 +171,10 @@ class ConnectorRunner:
168
171
 
169
172
  with tempfile.TemporaryDirectory() as temp_dir:
170
173
  temp_path = Path(temp_dir)
171
- # Make temp directory world-readable so non-root container users can access it
172
- # Many connector images run as non-root users (e.g., 'airbyte' user)
173
- temp_path.chmod(0o755)
174
+ # Make temp directory world-writable so non-root container users can read/write
175
+ # Many connector images run as non-root users (e.g., 'airbyte' user) with
176
+ # different UIDs than the host user, so they need write access for config migration
177
+ temp_path.chmod(0o777)
174
178
  self._prepare_data_directory(temp_path)
175
179
 
176
180
  docker_cmd = self._build_docker_command(temp_path)