airbyte-internal-ops 0.1.4__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airbyte-internal-ops
3
- Version: 0.1.4
3
+ Version: 0.1.5
4
4
  Summary: MCP and API interfaces that let the agents do the admin work
5
5
  Author-email: Aaron Steers <aj@airbyte.io>
6
6
  Keywords: admin,airbyte,api,mcp
@@ -352,16 +352,16 @@ airbyte_ops_mcp/_legacy/airbyte_ci/metadata_service/templates/render.py,sha256=g
352
352
  airbyte_ops_mcp/_legacy/airbyte_ci/metadata_service/validators/metadata_validator.py,sha256=xh3Q7XgXcFaHS4Fa9A6anDecudaEXQX-SOetjgm5L4Q,14653
353
353
  airbyte_ops_mcp/airbyte_repo/__init__.py,sha256=3yEpCOop1h33UuCtU7-1UB3kun2QsxtgvsfVEpuqN6s,1572
354
354
  airbyte_ops_mcp/airbyte_repo/bump_version.py,sha256=rtXYv105BhElj7twQmy20VjNsj6vCb0Ulrzmc4Z-1cE,15384
355
- airbyte_ops_mcp/airbyte_repo/list_connectors.py,sha256=IyYLtcBDsRmxe0SGJd3VHsO_0iaiTjsNMV6BqBUtZPI,14634
356
- airbyte_ops_mcp/airbyte_repo/utils.py,sha256=IZ7bJJz3I0BBfSxRfINY193VylMeg2GEXe0A1ZCiR80,3030
355
+ airbyte_ops_mcp/airbyte_repo/list_connectors.py,sha256=rMiQFPGANSQzaBncVUh4oO8zdLafU4gxxqZB9o7_FAA,15714
356
+ airbyte_ops_mcp/airbyte_repo/utils.py,sha256=TXlOAfhiu_hVRNjCxB4PRPVDhTWCU5lYmgqz4QG_-EA,3201
357
357
  airbyte_ops_mcp/cli/__init__.py,sha256=XpL7FyVfgabfBF2JR7u7NwJ2krlYqjd_OwLcWf-Xc7s,114
358
358
  airbyte_ops_mcp/cli/_base.py,sha256=I8tWnyQf0ks4r3J8N8h-5GZxyn37T-55KsbuHnxYlcg,415
359
359
  airbyte_ops_mcp/cli/_shared.py,sha256=jg-xMyGzTCGPqKd8VTfE_3kGPIyO_3Kx5sQbG4rPc0Y,1311
360
360
  airbyte_ops_mcp/cli/app.py,sha256=SEdBpqFUG2O8zGV5ifwptxrLGFph_dLr66-MX9d69gQ,789
361
- airbyte_ops_mcp/cli/cloud.py,sha256=iEkwromM8VByNa9V-2zBnAJZfr6CzCTiYSajqu4hSok,31996
361
+ airbyte_ops_mcp/cli/cloud.py,sha256=BMFYs5bTEgdOhxwzBrtSyYMKaHhXnMM_SGzK2hFDPBY,32076
362
362
  airbyte_ops_mcp/cli/gh.py,sha256=91b1AxFXvHQCFyXhrrym-756ZjnMCqvxFdmwCtma1zI,2046
363
363
  airbyte_ops_mcp/cli/registry.py,sha256=wHyfiysASuy-HGvLJIiU8TRguaiuqRaXQP9QJ-LC7bk,2940
364
- airbyte_ops_mcp/cli/repo.py,sha256=2LurE6AvRpdgYpiRZuvA33bI1-e7_e0QsFbEO7Bba_g,15780
364
+ airbyte_ops_mcp/cli/repo.py,sha256=G1hoQpH0XYhUH3FFOsia9xabGB0LP9o3XcwBuqvFVo0,16331
365
365
  airbyte_ops_mcp/cloud_admin/__init__.py,sha256=cqE96Q10Kp6elhH9DAi6TVsIwSUy3sooDLLrxTaktGk,816
366
366
  airbyte_ops_mcp/cloud_admin/api_client.py,sha256=4vZv1J4S2Q8ETl6gIB20X1X6KHTVV-bx__b2Ax8oqyc,17358
367
367
  airbyte_ops_mcp/cloud_admin/auth.py,sha256=j45pRR8fg6CLwVdn7Uu5KW_kTz_CjRP6ZJGUzqHj_Dk,2558
@@ -400,14 +400,14 @@ airbyte_ops_mcp/mcp/connector_analysis.py,sha256=OC4KrOSkMkKPkOisWnSv96BDDE5TQYH
400
400
  airbyte_ops_mcp/mcp/connector_qa.py,sha256=aImpqdnqBPDrz10BS0owsV4kuIU2XdalzgbaGZsbOL0,258
401
401
  airbyte_ops_mcp/mcp/github.py,sha256=5ZPsSTy4-gummS96xGoG-n2RwCgyg3-UWAvmEmxd5x4,7686
402
402
  airbyte_ops_mcp/mcp/github_repo_ops.py,sha256=D7yDtqMISFqaUzqnyA0dLE_6j6G3wHrNz8Byo8ajR8E,4929
403
- airbyte_ops_mcp/mcp/live_tests.py,sha256=4KTGCfct69WTCCrxZ23l-Nrz3iKZA5tuieNM_Aa4aTs,17408
403
+ airbyte_ops_mcp/mcp/live_tests.py,sha256=KnxZLuUNmm_3Clt0DU8H9rJ01zOKefnL_wqdSCMDjkE,17992
404
404
  airbyte_ops_mcp/mcp/metadata.py,sha256=fwGW97WknR5lfKcQnFtK6dU87aA6TmLj1NkKyqDAV9g,270
405
405
  airbyte_ops_mcp/mcp/prerelease.py,sha256=2Mr0LdCLhEc9Q7CEtmganJXHGHCLCXODKlkSapLsSsY,9484
406
406
  airbyte_ops_mcp/mcp/prompts.py,sha256=6opN4ZweQxfSdtoK0gL6wTrlxkRvxTQvH1VTmAuhoBE,1645
407
407
  airbyte_ops_mcp/mcp/registry.py,sha256=PW-VYUj42qx2pQ_apUkVaoUFq7VgB9zEU7-aGrkSCCw,290
408
408
  airbyte_ops_mcp/mcp/server.py,sha256=oJOrwZP7hAwx2pjChZUZYv-aA4NITWiCRiXFiKVOf8E,2843
409
- airbyte_ops_mcp/mcp/server_info.py,sha256=VcNLn6CSy2UcgM2V1ep9-JGwV63p3cK1G14BFHXEn0U,2402
410
- airbyte_internal_ops-0.1.4.dist-info/METADATA,sha256=5vi9tCrTgfHK0NnN7hG2jkdRsuBalql3pCVO8nVh2RE,2866
411
- airbyte_internal_ops-0.1.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
412
- airbyte_internal_ops-0.1.4.dist-info/entry_points.txt,sha256=eUgJ9xIy9PlR-CgRbqRMsh1NVp6jz08v9bul9vCYlU4,111
413
- airbyte_internal_ops-0.1.4.dist-info/RECORD,,
409
+ airbyte_ops_mcp/mcp/server_info.py,sha256=4yNBA_N_vUyLwVJqp7abyFuzZkcnv6-ck_Beb2SXqTE,2426
410
+ airbyte_internal_ops-0.1.5.dist-info/METADATA,sha256=XmVAvFOU6d8fO_atjptKKYgjPPQeBYspi2pupKXHxiY,2866
411
+ airbyte_internal_ops-0.1.5.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
412
+ airbyte_internal_ops-0.1.5.dist-info/entry_points.txt,sha256=eUgJ9xIy9PlR-CgRbqRMsh1NVp6jz08v9bul9vCYlU4,111
413
+ airbyte_internal_ops-0.1.5.dist-info/RECORD,,
@@ -7,15 +7,22 @@ by comparing git diffs between branches.
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
+ import logging
10
11
  import re
11
12
  import subprocess
12
13
  from dataclasses import dataclass
13
14
  from enum import StrEnum
14
15
  from functools import lru_cache
15
16
  from pathlib import Path
17
+ from typing import Any
18
+
19
+ import yaml
16
20
 
17
21
  CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
18
22
  METADATA_FILE_NAME = "metadata.yaml"
23
+ GIT_DEFAULT_BRANCH = "origin/master"
24
+
25
+ logger = logging.getLogger(__name__)
19
26
 
20
27
 
21
28
  class ConnectorLanguage(StrEnum):
@@ -29,7 +36,7 @@ class ConnectorLanguage(StrEnum):
29
36
 
30
37
  def get_modified_connectors(
31
38
  repo_path: str | Path,
32
- base_ref: str = "origin/main",
39
+ base_ref: str = GIT_DEFAULT_BRANCH,
33
40
  head_ref: str = "HEAD",
34
41
  ) -> list[str]:
35
42
  """Get list of connector IDs that have been modified.
@@ -40,14 +47,14 @@ def get_modified_connectors(
40
47
 
41
48
  Args:
42
49
  repo_path: Path to the Airbyte monorepo
43
- base_ref: Base git reference to compare against (default: "origin/main")
50
+ base_ref: Base git reference to compare against (default: "origin/master")
44
51
  head_ref: Head git reference to compare (default: "HEAD")
45
52
 
46
53
  Returns:
47
54
  List of connector technical names (e.g., ["source-faker", "destination-postgres"])
48
55
 
49
56
  Example:
50
- >>> connectors = get_changed_connectors("/path/to/airbyte", "origin/main")
57
+ >>> connectors = get_changed_connectors("/path/to/airbyte", "origin/master")
51
58
  >>> print(connectors)
52
59
  ['source-faker', 'destination-postgres']
53
60
  """
@@ -158,6 +165,39 @@ def get_all_connectors(repo_path: str | Path) -> set[str]:
158
165
  return {p.name for p in connectors_dir.iterdir() if p.is_dir()}
159
166
 
160
167
 
168
+ def get_connector_metadata(
169
+ repo_path: str | Path,
170
+ connector_name: str,
171
+ ) -> dict[str, Any] | None:
172
+ """Get metadata for a specific connector.
173
+
174
+ Args:
175
+ repo_path: Path to the Airbyte monorepo
176
+ connector_name: Technical name of the connector (e.g., "source-faker")
177
+
178
+ Returns:
179
+ The connector's metadata dict (the 'data' section), or None if not found
180
+
181
+ Example:
182
+ >>> metadata = get_connector_metadata("/path/to/airbyte", "source-faker")
183
+ >>> metadata.get("supportLevel")
184
+ 'certified'
185
+ """
186
+ repo_path = Path(repo_path)
187
+ connector_dir = repo_path / CONNECTOR_PATH_PREFIX / connector_name
188
+ metadata_file = connector_dir / METADATA_FILE_NAME
189
+
190
+ if not metadata_file.exists():
191
+ return None
192
+
193
+ try:
194
+ with open(metadata_file) as f:
195
+ metadata = yaml.safe_load(f)
196
+ return metadata.get("data", {})
197
+ except Exception:
198
+ return None
199
+
200
+
161
201
  def get_connectors_by_language(
162
202
  repo_path: str | Path,
163
203
  language: ConnectorLanguage,
@@ -408,7 +448,7 @@ def list_connectors(
408
448
 
409
449
  # Apply modified filter
410
450
  if modified is not None:
411
- base = base_ref if base_ref is not None else "origin/main"
451
+ base = base_ref if base_ref is not None else GIT_DEFAULT_BRANCH
412
452
  head = head_ref if head_ref is not None else "HEAD"
413
453
  changed_set = set(get_modified_connectors(repo_path, base, head))
414
454
  if modified:
@@ -81,9 +81,11 @@ def resolve_diff_range(
81
81
 
82
82
  # Determine base_ref and head_ref based on PR detection
83
83
  if pr_number is not None:
84
- # PR detected - use origin/main vs HEAD (assumes CI checked out the PR)
85
- # TODO: In future, use GitHub API to get PR diff directly
86
- base_ref = "origin/main"
84
+ # PR detected - use origin/{base_branch} vs HEAD (assumes CI checked out the PR)
85
+ # Use GITHUB_BASE_REF if available (set by GitHub Actions for PRs)
86
+ # This handles repos with different default branches (main, master, etc.)
87
+ base_branch = os.getenv("GITHUB_BASE_REF", "master")
88
+ base_ref = f"origin/{base_branch}"
87
89
  head_ref = "HEAD"
88
90
  else:
89
91
  # No PR detected - fallback to HEAD~1 vs HEAD (post-merge use case)
@@ -23,7 +23,12 @@ from airbyte_protocol.models import ConfiguredAirbyteCatalog
23
23
  from cyclopts import App, Parameter
24
24
 
25
25
  from airbyte_ops_mcp.cli._base import app
26
- from airbyte_ops_mcp.cli._shared import print_error, print_json, print_success
26
+ from airbyte_ops_mcp.cli._shared import (
27
+ exit_with_error,
28
+ print_error,
29
+ print_json,
30
+ print_success,
31
+ )
27
32
  from airbyte_ops_mcp.cloud_admin.connection_config import fetch_connection_config
28
33
  from airbyte_ops_mcp.live_tests.ci_output import (
29
34
  generate_regression_report,
@@ -391,12 +396,11 @@ def live_test(
391
396
  # If connector_name is provided, build the image from source
392
397
  if connector_name:
393
398
  if connector_image:
394
- print_error("Cannot specify both connector_image and connector_name")
395
399
  write_github_output("success", False)
396
400
  write_github_output(
397
401
  "error", "Cannot specify both connector_image and connector_name"
398
402
  )
399
- return
403
+ exit_with_error("Cannot specify both connector_image and connector_name")
400
404
 
401
405
  repo_root_path = Path(repo_root) if repo_root else None
402
406
  built_image = _build_connector_image_from_source(
@@ -407,19 +411,18 @@ def live_test(
407
411
  if not built_image:
408
412
  write_github_output("success", False)
409
413
  write_github_output("error", f"Failed to build image for {connector_name}")
410
- return
414
+ exit_with_error(f"Failed to build image for {connector_name}")
411
415
  resolved_connector_image = built_image
412
416
 
413
417
  if connection_id:
414
418
  if config_path or catalog_path:
415
- print_error(
416
- "Cannot specify both connection_id and config_path/catalog_path"
417
- )
418
419
  write_github_output("success", False)
419
420
  write_github_output(
420
421
  "error", "Cannot specify both connection_id and file paths"
421
422
  )
422
- return
423
+ exit_with_error(
424
+ "Cannot specify both connection_id and config_path/catalog_path"
425
+ )
423
426
 
424
427
  print_success(f"Fetching config/catalog from connection: {connection_id}")
425
428
  connection_data = fetch_connection_data(connection_id)
@@ -439,25 +442,23 @@ def live_test(
439
442
  catalog_file = Path(catalog_path) if catalog_path else None
440
443
 
441
444
  if not resolved_connector_image:
442
- print_error(
445
+ write_github_output("success", False)
446
+ write_github_output("error", "Missing connector image")
447
+ exit_with_error(
443
448
  "You must provide one of the following: a connector_image, a connector_name, "
444
449
  "or a connection_id for a connection that has an associated connector image. "
445
450
  "If using connection_id, ensure the connection has a connector image configured."
446
451
  )
447
- write_github_output("success", False)
448
- write_github_output("error", "Missing connector image")
449
- return
450
452
 
451
453
  # If connector_name was provided, we just built the image locally and it is already
452
454
  # available in Docker, so we skip the image availability check/pull. Only try to pull
453
455
  # if we didn't just build it (i.e., using a pre-built image from registry).
454
456
  if not connector_name and not ensure_image_available(resolved_connector_image):
455
- print_error(f"Failed to pull connector image: {resolved_connector_image}")
456
457
  write_github_output("success", False)
457
458
  write_github_output(
458
459
  "error", f"Failed to pull image: {resolved_connector_image}"
459
460
  )
460
- return
461
+ exit_with_error(f"Failed to pull connector image: {resolved_connector_image}")
461
462
 
462
463
  result = _run_connector_command(
463
464
  connector_image=resolved_connector_image,
@@ -494,7 +495,7 @@ def live_test(
494
495
  if result["success"]:
495
496
  print_success(f"Live test passed for {resolved_connector_image}")
496
497
  else:
497
- print_error(f"Live test failed for {resolved_connector_image}")
498
+ exit_with_error(f"Live test failed for {resolved_connector_image}")
498
499
 
499
500
 
500
501
  def _run_with_optional_http_metrics(
@@ -673,12 +674,11 @@ def regression_test(
673
674
  # If connector_name is provided, build the target image from source
674
675
  if connector_name:
675
676
  if target_image:
676
- print_error("Cannot specify both target_image and connector_name")
677
677
  write_github_output("success", False)
678
678
  write_github_output(
679
679
  "error", "Cannot specify both target_image and connector_name"
680
680
  )
681
- return
681
+ exit_with_error("Cannot specify both target_image and connector_name")
682
682
 
683
683
  repo_root_path = Path(repo_root) if repo_root else None
684
684
  built_image = _build_connector_image_from_source(
@@ -689,19 +689,18 @@ def regression_test(
689
689
  if not built_image:
690
690
  write_github_output("success", False)
691
691
  write_github_output("error", f"Failed to build image for {connector_name}")
692
- return
692
+ exit_with_error(f"Failed to build image for {connector_name}")
693
693
  resolved_target_image = built_image
694
694
 
695
695
  if connection_id:
696
696
  if config_path or catalog_path:
697
- print_error(
698
- "Cannot specify both connection_id and config_path/catalog_path"
699
- )
700
697
  write_github_output("success", False)
701
698
  write_github_output(
702
699
  "error", "Cannot specify both connection_id and file paths"
703
700
  )
704
- return
701
+ exit_with_error(
702
+ "Cannot specify both connection_id and config_path/catalog_path"
703
+ )
705
704
 
706
705
  print_success(f"Fetching config/catalog from connection: {connection_id}")
707
706
  connection_data = fetch_connection_data(connection_id)
@@ -723,36 +722,36 @@ def regression_test(
723
722
 
724
723
  # Validate that we have both images
725
724
  if not resolved_target_image:
726
- print_error(
725
+ write_github_output("success", False)
726
+ write_github_output("error", "No target image specified")
727
+ exit_with_error(
727
728
  "You must provide one of the following: a target_image or a connector_name "
728
729
  "to build the target image from source."
729
730
  )
730
- write_github_output("success", False)
731
- write_github_output("error", "No target image specified")
732
- return
733
731
 
734
732
  if not resolved_control_image:
735
- print_error(
733
+ write_github_output("success", False)
734
+ write_github_output("error", "No control image specified")
735
+ exit_with_error(
736
736
  "You must provide one of the following: a control_image or a connection_id "
737
737
  "for a connection that has an associated connector image."
738
738
  )
739
- write_github_output("success", False)
740
- write_github_output("error", "No control image specified")
741
- return
742
739
 
743
740
  # Pull images if they weren't just built locally
744
741
  # If connector_name was provided, we just built the target image locally
745
742
  if not connector_name and not ensure_image_available(resolved_target_image):
746
- print_error(f"Failed to pull target connector image: {resolved_target_image}")
747
743
  write_github_output("success", False)
748
744
  write_github_output("error", f"Failed to pull image: {resolved_target_image}")
749
- return
745
+ exit_with_error(
746
+ f"Failed to pull target connector image: {resolved_target_image}"
747
+ )
750
748
 
751
749
  if not ensure_image_available(resolved_control_image):
752
- print_error(f"Failed to pull control connector image: {resolved_control_image}")
753
750
  write_github_output("success", False)
754
751
  write_github_output("error", f"Failed to pull image: {resolved_control_image}")
755
- return
752
+ exit_with_error(
753
+ f"Failed to pull control connector image: {resolved_control_image}"
754
+ )
756
755
 
757
756
  target_output = output_path / "target"
758
757
  control_output = output_path / "control"
@@ -819,7 +818,7 @@ def regression_test(
819
818
  write_github_summary(summary)
820
819
 
821
820
  if regression_detected:
822
- print_error(
821
+ exit_with_error(
823
822
  f"Regression detected between {resolved_target_image} and {resolved_control_image}"
824
823
  )
825
824
  elif both_succeeded:
@@ -827,7 +826,7 @@ def regression_test(
827
826
  f"Regression test passed for {resolved_target_image} vs {resolved_control_image}"
828
827
  )
829
828
  else:
830
- print_error(
829
+ exit_with_error(
831
830
  f"Both versions failed for {resolved_target_image} vs {resolved_control_image}"
832
831
  )
833
832
 
@@ -27,6 +27,7 @@ from airbyte_ops_mcp.airbyte_repo.list_connectors import (
27
27
  CONNECTOR_PATH_PREFIX,
28
28
  METADATA_FILE_NAME,
29
29
  _detect_connector_language,
30
+ get_connectors_with_local_cdk,
30
31
  )
31
32
  from airbyte_ops_mcp.cli._base import app
32
33
  from airbyte_ops_mcp.cli._shared import exit_with_error, print_json
@@ -160,6 +161,15 @@ def list_connectors(
160
161
  bool,
161
162
  Parameter(help="Include only modified connectors (requires PR context)."),
162
163
  ] = False,
164
+ local_cdk: Annotated[
165
+ bool,
166
+ Parameter(
167
+ help=(
168
+ "Include connectors using local CDK reference. "
169
+ "When combined with --modified-only, adds local-CDK connectors to the modified set."
170
+ )
171
+ ),
172
+ ] = False,
163
173
  language: Annotated[
164
174
  list[str] | None,
165
175
  Parameter(help="Languages to include (python, java, low-code, manifest-only)."),
@@ -286,6 +296,11 @@ def list_connectors(
286
296
  connectors = list(result.connectors)
287
297
  repo_path_obj = Path(repo_path)
288
298
 
299
+ # Add connectors with local CDK reference if --local-cdk flag is set
300
+ if local_cdk:
301
+ local_cdk_connectors = get_connectors_with_local_cdk(repo_path)
302
+ connectors = sorted(set(connectors) | local_cdk_connectors)
303
+
289
304
  # Apply connector type filter
290
305
  if connector_type_filter:
291
306
  connectors = [
@@ -17,6 +17,7 @@ from typing import Annotated, Any
17
17
 
18
18
  import requests
19
19
  from airbyte.cloud import CloudWorkspace
20
+ from airbyte.cloud.auth import resolve_cloud_client_id, resolve_cloud_client_secret
20
21
  from fastmcp import FastMCP
21
22
  from pydantic import BaseModel, Field
22
23
 
@@ -388,15 +389,29 @@ def run_live_connection_tests(
388
389
 
389
390
  # Validate workspace membership if workspace_id is provided
390
391
  if workspace_id:
391
- try:
392
- workspace = CloudWorkspace(workspace_id=workspace_id)
393
- # This will raise an exception if the connection doesn't belong to the workspace
394
- workspace.get_connection(connection_id)
395
- except Exception as e:
392
+ client_id = resolve_cloud_client_id()
393
+ client_secret = resolve_cloud_client_secret()
394
+ if not client_id or not client_secret:
395
+ return RunLiveConnectionTestsResponse(
396
+ run_id=run_id,
397
+ status=TestRunStatus.FAILED,
398
+ message=(
399
+ "Missing Airbyte Cloud credentials. "
400
+ "Set AIRBYTE_CLOUD_CLIENT_ID and AIRBYTE_CLOUD_CLIENT_SECRET env vars."
401
+ ),
402
+ workflow_url=None,
403
+ )
404
+ workspace = CloudWorkspace(
405
+ workspace_id=workspace_id,
406
+ client_id=client_id,
407
+ client_secret=client_secret,
408
+ )
409
+ connection = workspace.get_connection(connection_id)
410
+ if connection is None:
396
411
  return RunLiveConnectionTestsResponse(
397
412
  run_id=run_id,
398
413
  status=TestRunStatus.FAILED,
399
- message=f"Connection {connection_id} validation failed for workspace {workspace_id}: {e}",
414
+ message=f"Connection {connection_id} not found in workspace {workspace_id}",
400
415
  workflow_url=None,
401
416
  )
402
417
 
@@ -19,7 +19,7 @@ from airbyte_ops_mcp.mcp._mcp_utils import (
19
19
 
20
20
 
21
21
  @lru_cache(maxsize=1)
22
- def _get_version_info() -> dict[str, str | None]:
22
+ def _get_version_info() -> dict[str, str | list[str] | None]:
23
23
  """Get version information for the MCP server.
24
24
 
25
25
  Returns:
@@ -66,7 +66,7 @@ def _get_version_info() -> dict[str, str | None]:
66
66
  mime_type="application/json",
67
67
  domain=ToolDomain.SERVER_INFO,
68
68
  )
69
- def mcp_server_info() -> dict[str, str | None]:
69
+ def mcp_server_info() -> dict[str, str | list[str] | None]:
70
70
  """Resource that returns information for the MCP server.
71
71
 
72
72
  This includes package version, release history, help URLs, as well as other information.