airbyte-internal-ops 0.2.4__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -121,6 +121,7 @@ def get_cloud_connector_version(
121
121
 
122
122
  # Use vendored API client instead of connector.get_connector_version()
123
123
  # Use Config API root for version management operations
124
+ # Pass workspace_id to get detailed scoped configuration context
124
125
  version_data = api_client.get_connector_version(
125
126
  connector_id=actor_id,
126
127
  connector_type=actor_type,
@@ -128,13 +129,31 @@ def get_cloud_connector_version(
128
129
  client_id=auth.client_id,
129
130
  client_secret=auth.client_secret,
130
131
  bearer_token=auth.bearer_token,
132
+ workspace_id=workspace_id,
133
+ )
134
+
135
+ # Determine if version is pinned from scoped config context (more reliable)
136
+ # The API's isVersionOverrideApplied only returns true for USER-created pins,
137
+ # not system-generated pins (e.g., breaking_change origin). Check scopedConfigs
138
+ # for a more accurate picture of whether ANY pin exists.
139
+ scoped_configs = version_data.get("scopedConfigs", {})
140
+ has_any_pin = (
141
+ any(config is not None for config in scoped_configs.values())
142
+ if scoped_configs
143
+ else False
144
+ )
145
+
146
+ # Use scoped config existence as the source of truth for "is pinned"
147
+ # Fall back to API's isVersionOverrideApplied if no scoped config data
148
+ is_pinned = (
149
+ has_any_pin if scoped_configs else version_data["isVersionOverrideApplied"]
131
150
  )
132
151
 
133
152
  return ConnectorVersionInfo(
134
153
  connector_id=actor_id,
135
154
  connector_type=actor_type,
136
155
  version=version_data["dockerImageTag"],
137
- is_version_pinned=version_data["isVersionOverrideApplied"],
156
+ is_version_pinned=is_pinned,
138
157
  )
139
158
  except CloudAuthError:
140
159
  raise
@@ -0,0 +1,92 @@
1
+ # Copyright (c) 2025 Airbyte, Inc., all rights reserved.
2
+ """MCP tools for GCP Cloud Logging operations.
3
+
4
+ This module provides MCP tools for querying GCP Cloud Logging,
5
+ particularly for looking up error details by error ID.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Annotated
11
+
12
+ from fastmcp import FastMCP
13
+ from pydantic import Field
14
+
15
+ from airbyte_ops_mcp.gcp_logs import (
16
+ GCPLogSearchResult,
17
+ GCPSeverity,
18
+ fetch_error_logs,
19
+ )
20
+ from airbyte_ops_mcp.gcp_logs.error_lookup import DEFAULT_GCP_PROJECT
21
+ from airbyte_ops_mcp.mcp._mcp_utils import mcp_tool, register_mcp_tools
22
+
23
+
24
+ @mcp_tool(
25
+ read_only=True,
26
+ idempotent=True,
27
+ )
28
+ def lookup_cloud_backend_error(
29
+ error_id: Annotated[
30
+ str,
31
+ Field(
32
+ description=(
33
+ "The error ID (UUID) to search for. This is typically returned "
34
+ "in API error responses as {'errorId': '...'}"
35
+ )
36
+ ),
37
+ ],
38
+ project: Annotated[
39
+ str,
40
+ Field(
41
+ default=DEFAULT_GCP_PROJECT,
42
+ description=(
43
+ "GCP project ID to search in. Defaults to 'prod-ab-cloud-proj' "
44
+ "(Airbyte Cloud production)."
45
+ ),
46
+ ),
47
+ ],
48
+ lookback_days: Annotated[
49
+ int,
50
+ Field(
51
+ default=7,
52
+ description="Number of days to look back in logs. Defaults to 7.",
53
+ ),
54
+ ],
55
+ min_severity_filter: Annotated[
56
+ GCPSeverity | None,
57
+ Field(
58
+ default=None,
59
+ description="Optional minimum severity level to filter logs.",
60
+ ),
61
+ ],
62
+ max_log_entries: Annotated[
63
+ int,
64
+ Field(
65
+ default=200,
66
+ description="Maximum number of log entries to return. Defaults to 200.",
67
+ ),
68
+ ],
69
+ ) -> GCPLogSearchResult:
70
+ """Look up error details from GCP Cloud Logging by error ID.
71
+
72
+ When an Airbyte Cloud API returns an error response with only an error ID
73
+ (e.g., {"errorId": "3173452e-8f22-4286-a1ec-b0f16c1e078a"}), this tool
74
+ fetches the full stack trace and error details from GCP Cloud Logging.
75
+
76
+ The tool searches for log entries containing the error ID and fetches
77
+ related entries (multi-line stack traces) from the same timestamp and pod.
78
+
79
+ Requires GCP credentials with Logs Viewer role on the target project.
80
+ """
81
+ return fetch_error_logs(
82
+ error_id=error_id,
83
+ project=project,
84
+ lookback_days=lookback_days,
85
+ min_severity_filter=min_severity_filter,
86
+ max_log_entries=max_log_entries,
87
+ )
88
+
89
+
90
+ def register_gcp_logs_tools(app: FastMCP) -> None:
91
+ """Register GCP logs tools with the FastMCP app."""
92
+ register_mcp_tools(app)
@@ -8,6 +8,7 @@ airbyte_ops_mcp.prod_db_access.queries for use by AI agents.
8
8
  from __future__ import annotations
9
9
 
10
10
  from datetime import datetime
11
+ from enum import StrEnum
11
12
  from typing import Annotated, Any
12
13
 
13
14
  import requests
@@ -25,11 +26,21 @@ from airbyte_ops_mcp.prod_db_access.queries import (
25
26
  query_dataplanes_list,
26
27
  query_failed_sync_attempts_for_connector,
27
28
  query_new_connector_releases,
28
- query_sync_results_for_version,
29
+ query_recent_syncs_for_connector,
30
+ query_syncs_for_version_pinned_connector,
29
31
  query_workspace_info,
30
32
  query_workspaces_by_email_domain,
31
33
  )
32
34
 
35
+
36
+ class StatusFilter(StrEnum):
37
+ """Filter for job status in sync queries."""
38
+
39
+ ALL = "all"
40
+ SUCCEEDED = "succeeded"
41
+ FAILED = "failed"
42
+
43
+
33
44
  # Cloud UI base URL for building connection URLs
34
45
  CLOUD_UI_BASE_URL = "https://cloud.airbyte.com"
35
46
 
@@ -293,7 +304,7 @@ def query_prod_actors_by_connector_version(
293
304
  read_only=True,
294
305
  idempotent=True,
295
306
  )
296
- def query_prod_connector_version_sync_results(
307
+ def query_prod_recent_syncs_for_version_pinned_connector(
297
308
  connector_version_id: Annotated[
298
309
  str,
299
310
  Field(description="Connector version UUID to find sync results for"),
@@ -314,11 +325,16 @@ def query_prod_connector_version_sync_results(
314
325
  ),
315
326
  ] = False,
316
327
  ) -> list[dict[str, Any]]:
317
- """List sync job results for actors pinned to a specific connector version.
328
+ """List sync job results for actors PINNED to a specific connector version.
318
329
 
319
- Returns sync job results for connections using actors that are pinned
320
- to the specified version. Useful for monitoring rollout health and
321
- identifying issues with specific connector versions.
330
+ IMPORTANT: This tool ONLY returns results for actors that have been explicitly
331
+ pinned to the specified version via scoped_configuration. Most connections run
332
+ unpinned and will NOT appear in these results.
333
+
334
+ Use this tool when you want to monitor rollout health for actors that have been
335
+ explicitly pinned to a pre-release or specific version. For finding healthy
336
+ connections across ALL actors using a connector type (regardless of pinning),
337
+ use query_prod_recent_syncs_for_connector instead.
322
338
 
323
339
  The actor_id field is the actor ID (superset of source_id/destination_id).
324
340
 
@@ -327,7 +343,7 @@ def query_prod_connector_version_sync_results(
327
343
  pin_origin_type, pin_origin, workspace_id, workspace_name, organization_id,
328
344
  dataplane_group_id, dataplane_name
329
345
  """
330
- return query_sync_results_for_version(
346
+ return query_syncs_for_version_pinned_connector(
331
347
  connector_version_id,
332
348
  days=days,
333
349
  limit=limit,
@@ -335,6 +351,163 @@ def query_prod_connector_version_sync_results(
335
351
  )
336
352
 
337
353
 
354
+ @mcp_tool(
355
+ read_only=True,
356
+ idempotent=True,
357
+ open_world=True,
358
+ )
359
+ def query_prod_recent_syncs_for_connector(
360
+ source_definition_id: Annotated[
361
+ str | None,
362
+ Field(
363
+ description=(
364
+ "Source connector definition ID (UUID) to search for. "
365
+ "Provide this OR source_canonical_name OR destination_definition_id "
366
+ "OR destination_canonical_name (exactly one required). "
367
+ "Example: 'afa734e4-3571-11ec-991a-1e0031268139' for YouTube Analytics."
368
+ ),
369
+ default=None,
370
+ ),
371
+ ],
372
+ source_canonical_name: Annotated[
373
+ str | None,
374
+ Field(
375
+ description=(
376
+ "Canonical source connector name to search for. "
377
+ "Provide this OR source_definition_id OR destination_definition_id "
378
+ "OR destination_canonical_name (exactly one required). "
379
+ "Examples: 'source-youtube-analytics', 'YouTube Analytics'."
380
+ ),
381
+ default=None,
382
+ ),
383
+ ],
384
+ destination_definition_id: Annotated[
385
+ str | None,
386
+ Field(
387
+ description=(
388
+ "Destination connector definition ID (UUID) to search for. "
389
+ "Provide this OR destination_canonical_name OR source_definition_id "
390
+ "OR source_canonical_name (exactly one required). "
391
+ "Example: '94bd199c-2ff0-4aa2-b98e-17f0acb72610' for DuckDB."
392
+ ),
393
+ default=None,
394
+ ),
395
+ ],
396
+ destination_canonical_name: Annotated[
397
+ str | None,
398
+ Field(
399
+ description=(
400
+ "Canonical destination connector name to search for. "
401
+ "Provide this OR destination_definition_id OR source_definition_id "
402
+ "OR source_canonical_name (exactly one required). "
403
+ "Examples: 'destination-duckdb', 'DuckDB'."
404
+ ),
405
+ default=None,
406
+ ),
407
+ ],
408
+ status_filter: Annotated[
409
+ StatusFilter,
410
+ Field(
411
+ description=(
412
+ "Filter by job status: 'all' (default), 'succeeded', or 'failed'. "
413
+ "Use 'succeeded' to find healthy connections with recent successful syncs. "
414
+ "Use 'failed' to find connections with recent failures."
415
+ ),
416
+ default=StatusFilter.ALL,
417
+ ),
418
+ ],
419
+ organization_id: Annotated[
420
+ str | OrganizationAliasEnum | None,
421
+ Field(
422
+ description=(
423
+ "Optional organization ID (UUID) or alias to filter results. "
424
+ "If provided, only syncs from this organization will be returned. "
425
+ "Accepts '@airbyte-internal' as an alias for the Airbyte internal org."
426
+ ),
427
+ default=None,
428
+ ),
429
+ ],
430
+ lookback_days: Annotated[
431
+ int,
432
+ Field(description="Number of days to look back (default: 7)", default=7),
433
+ ],
434
+ limit: Annotated[
435
+ int,
436
+ Field(description="Maximum number of results (default: 100)", default=100),
437
+ ],
438
+ ) -> list[dict[str, Any]]:
439
+ """List recent sync jobs for ALL actors using a connector type.
440
+
441
+ This tool finds all actors with the given connector definition and returns their
442
+ recent sync jobs, regardless of whether they have explicit version pins. It filters
443
+ out deleted actors, deleted workspaces, and deprecated connections.
444
+
445
+ Use this tool to:
446
+ - Find healthy connections with recent successful syncs (status_filter='succeeded')
447
+ - Investigate connector issues across all users (status_filter='failed')
448
+ - Get an overview of all recent sync activity (status_filter='all')
449
+
450
+ Supports both SOURCE and DESTINATION connectors. Provide exactly one of:
451
+ source_definition_id, source_canonical_name, destination_definition_id,
452
+ or destination_canonical_name.
453
+
454
+ Key fields in results:
455
+ - job_status: 'succeeded', 'failed', 'cancelled', etc.
456
+ - connection_id, connection_name: The connection that ran the sync
457
+ - actor_id, actor_name: The source or destination actor
458
+ - pin_origin_type, pin_origin, pinned_version_id: Version pin context (NULL if not pinned)
459
+ """
460
+ # Validate that exactly one connector parameter is provided
461
+ provided_params = [
462
+ source_definition_id,
463
+ source_canonical_name,
464
+ destination_definition_id,
465
+ destination_canonical_name,
466
+ ]
467
+ num_provided = sum(p is not None for p in provided_params)
468
+ if num_provided != 1:
469
+ raise PyAirbyteInputError(
470
+ message=(
471
+ "Exactly one of source_definition_id, source_canonical_name, "
472
+ "destination_definition_id, or destination_canonical_name must be provided."
473
+ ),
474
+ )
475
+
476
+ # Determine if this is a destination connector
477
+ is_destination = (
478
+ destination_definition_id is not None or destination_canonical_name is not None
479
+ )
480
+
481
+ # Resolve canonical name to definition ID if needed
482
+ resolved_definition_id: str
483
+ if source_canonical_name:
484
+ resolved_definition_id = _resolve_canonical_name_to_definition_id(
485
+ canonical_name=source_canonical_name,
486
+ )
487
+ elif destination_canonical_name:
488
+ resolved_definition_id = _resolve_canonical_name_to_definition_id(
489
+ canonical_name=destination_canonical_name,
490
+ )
491
+ elif source_definition_id:
492
+ resolved_definition_id = source_definition_id
493
+ else:
494
+ # We've validated exactly one param is provided, so this must be set
495
+ assert destination_definition_id is not None
496
+ resolved_definition_id = destination_definition_id
497
+
498
+ # Resolve organization ID alias
499
+ resolved_organization_id = OrganizationAliasEnum.resolve(organization_id)
500
+
501
+ return query_recent_syncs_for_connector(
502
+ connector_definition_id=resolved_definition_id,
503
+ is_destination=is_destination,
504
+ status_filter=status_filter,
505
+ organization_id=resolved_organization_id,
506
+ days=lookback_days,
507
+ limit=limit,
508
+ )
509
+
510
+
338
511
  @mcp_tool(
339
512
  read_only=True,
340
513
  idempotent=True,
@@ -24,6 +24,7 @@ from airbyte_ops_mcp.constants import MCP_SERVER_NAME
24
24
  from airbyte_ops_mcp.mcp.cloud_connector_versions import (
25
25
  register_cloud_connector_version_tools,
26
26
  )
27
+ from airbyte_ops_mcp.mcp.gcp_logs import register_gcp_logs_tools
27
28
  from airbyte_ops_mcp.mcp.github import register_github_tools
28
29
  from airbyte_ops_mcp.mcp.github_repo_ops import register_github_repo_ops_tools
29
30
  from airbyte_ops_mcp.mcp.prerelease import register_prerelease_tools
@@ -62,6 +63,7 @@ def register_server_assets(app: FastMCP) -> None:
62
63
  register_prerelease_tools(app)
63
64
  register_cloud_connector_version_tools(app)
64
65
  register_prod_db_query_tools(app)
66
+ register_gcp_logs_tools(app)
65
67
  register_prompts(app)
66
68
  register_regression_tests_tools(app)
67
69
 
@@ -29,6 +29,12 @@ from airbyte_ops_mcp.prod_db_access.sql import (
29
29
  SELECT_FAILED_SYNC_ATTEMPTS_FOR_CONNECTOR,
30
30
  SELECT_NEW_CONNECTOR_RELEASES,
31
31
  SELECT_ORG_WORKSPACES,
32
+ SELECT_RECENT_FAILED_SYNCS_FOR_DESTINATION_CONNECTOR,
33
+ SELECT_RECENT_FAILED_SYNCS_FOR_SOURCE_CONNECTOR,
34
+ SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_DESTINATION_CONNECTOR,
35
+ SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR,
36
+ SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR,
37
+ SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR,
32
38
  SELECT_SUCCESSFUL_SYNCS_FOR_VERSION,
33
39
  SELECT_SYNC_RESULTS_FOR_VERSION,
34
40
  SELECT_WORKSPACE_INFO,
@@ -227,7 +233,7 @@ def query_actors_pinned_to_version(
227
233
  )
228
234
 
229
235
 
230
- def query_sync_results_for_version(
236
+ def query_syncs_for_version_pinned_connector(
231
237
  connector_version_id: str,
232
238
  days: int = 7,
233
239
  limit: int = 100,
@@ -320,6 +326,81 @@ def query_failed_sync_attempts_for_connector(
320
326
  return results
321
327
 
322
328
 
329
+ def query_recent_syncs_for_connector(
330
+ connector_definition_id: str,
331
+ is_destination: bool = False,
332
+ status_filter: str = "all",
333
+ organization_id: str | None = None,
334
+ days: int = 7,
335
+ limit: int = 100,
336
+ *,
337
+ gsm_client: secretmanager.SecretManagerServiceClient | None = None,
338
+ ) -> list[dict[str, Any]]:
339
+ """Query recent sync jobs for ALL actors using a connector definition.
340
+
341
+ Finds all actors with the given actor_definition_id and returns their sync jobs,
342
+ regardless of whether they have explicit version pins. Filters out deleted actors,
343
+ deleted workspaces, and deprecated connections.
344
+
345
+ This is useful for finding healthy connections with recent successful syncs,
346
+ or for investigating connector issues across all users.
347
+
348
+ Args:
349
+ connector_definition_id: Connector definition UUID to filter by
350
+ is_destination: If True, query destination connectors; if False, query sources
351
+ status_filter: Filter by job status - "all", "succeeded", or "failed"
352
+ organization_id: Optional organization UUID to filter results by (post-query filter)
353
+ days: Number of days to look back (default: 7)
354
+ limit: Maximum number of results (default: 100)
355
+ gsm_client: GCP Secret Manager client. If None, a new client will be instantiated.
356
+
357
+ Returns:
358
+ List of sync job records with workspace info and optional pin context
359
+ """
360
+ cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
361
+
362
+ # Select the appropriate query based on connector type and status filter
363
+ if is_destination:
364
+ if status_filter == "succeeded":
365
+ query = SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_DESTINATION_CONNECTOR
366
+ query_name = "SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_DESTINATION_CONNECTOR"
367
+ elif status_filter == "failed":
368
+ query = SELECT_RECENT_FAILED_SYNCS_FOR_DESTINATION_CONNECTOR
369
+ query_name = "SELECT_RECENT_FAILED_SYNCS_FOR_DESTINATION_CONNECTOR"
370
+ else:
371
+ query = SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR
372
+ query_name = "SELECT_RECENT_SYNCS_FOR_DESTINATION_CONNECTOR"
373
+ else:
374
+ if status_filter == "succeeded":
375
+ query = SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR
376
+ query_name = "SELECT_RECENT_SUCCESSFUL_SYNCS_FOR_SOURCE_CONNECTOR"
377
+ elif status_filter == "failed":
378
+ query = SELECT_RECENT_FAILED_SYNCS_FOR_SOURCE_CONNECTOR
379
+ query_name = "SELECT_RECENT_FAILED_SYNCS_FOR_SOURCE_CONNECTOR"
380
+ else:
381
+ query = SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR
382
+ query_name = "SELECT_RECENT_SYNCS_FOR_SOURCE_CONNECTOR"
383
+
384
+ results = _run_sql_query(
385
+ query,
386
+ parameters={
387
+ "connector_definition_id": connector_definition_id,
388
+ "cutoff_date": cutoff_date,
389
+ "limit": limit,
390
+ },
391
+ query_name=query_name,
392
+ gsm_client=gsm_client,
393
+ )
394
+
395
+ # Post-query filter by organization_id if provided
396
+ if organization_id is not None:
397
+ results = [
398
+ r for r in results if str(r.get("organization_id")) == organization_id
399
+ ]
400
+
401
+ return results
402
+
403
+
323
404
  def query_dataplanes_list(
324
405
  *,
325
406
  gsm_client: secretmanager.SecretManagerServiceClient | None = None,