airbyte-internal-ops 0.6.1__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {airbyte_internal_ops-0.6.1.dist-info → airbyte_internal_ops-0.7.1.dist-info}/METADATA +6 -1
  2. {airbyte_internal_ops-0.6.1.dist-info → airbyte_internal_ops-0.7.1.dist-info}/RECORD +33 -30
  3. airbyte_ops_mcp/_sentry.py +101 -0
  4. airbyte_ops_mcp/cli/app.py +1 -1
  5. airbyte_ops_mcp/cli/{repo.py → local.py} +131 -8
  6. airbyte_ops_mcp/connector_ops/__init__.py +17 -0
  7. airbyte_ops_mcp/connector_ops/utils.py +859 -0
  8. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/assets.py +4 -5
  9. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/documentation/__init__.py +1 -1
  10. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/documentation/documentation.py +23 -22
  11. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/documentation/models.py +7 -7
  12. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/metadata.py +15 -15
  13. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/packaging.py +11 -9
  14. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/security.py +16 -20
  15. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/version.py +94 -18
  16. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/cli.py +6 -8
  17. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/models.py +7 -8
  18. airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/utils.py +2 -2
  19. airbyte_ops_mcp/mcp/_guidance.py +37 -0
  20. airbyte_ops_mcp/mcp/cloud_connector_versions.py +46 -9
  21. airbyte_ops_mcp/mcp/server.py +5 -0
  22. {airbyte_internal_ops-0.6.1.dist-info → airbyte_internal_ops-0.7.1.dist-info}/WHEEL +0 -0
  23. {airbyte_internal_ops-0.6.1.dist-info → airbyte_internal_ops-0.7.1.dist-info}/entry_points.txt +0 -0
  24. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/README.md +0 -0
  25. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/__init__.py +0 -0
  26. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/__init__.py +0 -0
  27. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/documentation/helpers.py +0 -0
  28. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/documentation/templates/documentation_headers_check_description.md.j2 +0 -0
  29. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/documentation/templates/section_content_description.md.j2 +0 -0
  30. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/checks/documentation/templates/template.md.j2 +0 -0
  31. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/consts.py +0 -0
  32. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/templates/__init__.py +0 -0
  33. /airbyte_ops_mcp/{_legacy/airbyte_ci/connector_qa → connector_qa}/templates/qa_checks.md.j2 +0 -0
@@ -0,0 +1,859 @@
1
+ #
2
+ # Copyright (c) 2023 Airbyte, Inc., all rights reserved.
3
+ #
4
+
5
+ import functools
6
+ import json
7
+ import logging
8
+ import os
9
+ import re
10
+ from dataclasses import dataclass
11
+ from enum import Enum
12
+ from glob import glob
13
+ from pathlib import Path
14
+ from typing import List, Optional, Set, Tuple, Union
15
+
16
+ import git
17
+ import requests
18
+ import yaml
19
+ from pydash.collections import find
20
+ from pydash.objects import get
21
+ from rich.console import Console
22
+ from simpleeval import simple_eval
23
+
24
+ console = Console()
25
+
26
+ DIFFED_BRANCH = os.environ.get("DIFFED_BRANCH", "origin/master")
27
+ OSS_CATALOG_URL = "https://connectors.airbyte.com/files/registries/v0/oss_registry.json"
28
+ CLOUD_CATALOG_URL = (
29
+ "https://connectors.airbyte.com/files/registries/v0/cloud_registry.json"
30
+ )
31
+ BASE_AIRBYTE_DOCS_URL = "https://docs.airbyte.com"
32
+ CONNECTOR_PATH_PREFIX = "airbyte-integrations/connectors"
33
+ SOURCE_CONNECTOR_PATH_PREFIX = CONNECTOR_PATH_PREFIX + "/source-"
34
+ DESTINATION_CONNECTOR_PATH_PREFIX = CONNECTOR_PATH_PREFIX + "/destination-"
35
+
36
+ THIRD_PARTY_GLOB = "third-party"
37
+ THIRD_PARTY_CONNECTOR_PATH_PREFIX = CONNECTOR_PATH_PREFIX + f"/{THIRD_PARTY_GLOB}/"
38
+ SCAFFOLD_CONNECTOR_GLOB = "-scaffold-"
39
+
40
+
41
+ ACCEPTANCE_TEST_CONFIG_FILE_NAME = "acceptance-test-config.yml"
42
+ METADATA_FILE_NAME = "metadata.yaml"
43
+ AIRBYTE_DOCKER_REPO = "airbyte"
44
+ AIRBYTE_REPO_DIRECTORY_NAME = "airbyte"
45
+ GRADLE_PROJECT_RE_PATTERN = r"project\((['\"])(.+?)\1\)"
46
+ TEST_GRADLE_DEPENDENCIES = [
47
+ "testImplementation",
48
+ "testCompileOnly",
49
+ "integrationTestJavaImplementation",
50
+ "performanceTestJavaImplementation",
51
+ "testFixturesCompileOnly",
52
+ "testFixturesImplementation",
53
+ ]
54
+
55
+
56
+ def download_catalog(catalog_url):
57
+ response = requests.get(catalog_url)
58
+ response.raise_for_status()
59
+ return response.json()
60
+
61
+
62
+ OSS_CATALOG = download_catalog(OSS_CATALOG_URL)
63
+ MANIFEST_FILE_NAME = "manifest.yaml"
64
+ COMPONENTS_FILE_NAME = "components.py"
65
+ DOCKERFILE_FILE_NAME = "Dockerfile"
66
+ PYPROJECT_FILE_NAME = "pyproject.toml"
67
+ ICON_FILE_NAME = "icon.svg"
68
+ POETRY_LOCK_FILE_NAME = "poetry.lock"
69
+
70
+ STRATEGIC_CONNECTOR_THRESHOLDS = {
71
+ "sl": 200,
72
+ "ql": 400,
73
+ }
74
+
75
+ ALLOWED_HOST_THRESHOLD = {
76
+ "ql": 300,
77
+ }
78
+
79
+
80
+ class ConnectorInvalidNameError(Exception):
81
+ pass
82
+
83
+
84
+ class ConnectorVersionNotFound(Exception):
85
+ pass
86
+
87
+
88
+ def get_connector_name_from_path(path):
89
+ return path.split("/")[2]
90
+
91
+
92
+ def get_changed_metadata(diff_regex: Optional[str] = None) -> Set[str]:
93
+ """Retrieve the set of connectors for which the metadata file was changed in the current branch (compared to master).
94
+
95
+ Args:
96
+ diff_regex (str): Find the edited files that contain the following regex in their change.
97
+
98
+ Returns:
99
+ Set[Connector]: Set of connectors that were changed
100
+ """
101
+ return get_changed_file(METADATA_FILE_NAME, diff_regex)
102
+
103
+
104
+ def get_changed_file(file_name: str, diff_regex: Optional[str] = None) -> Set[str]:
105
+ """Retrieve the set of connectors for which the given file was changed in the current branch (compared to master).
106
+
107
+ Args:
108
+ diff_regex (str): Find the edited files that contain the following regex in their change.
109
+
110
+ Returns:
111
+ Set[Connector]: Set of connectors that were changed
112
+ """
113
+ airbyte_repo = git.Repo(search_parent_directories=True)
114
+
115
+ if diff_regex is None:
116
+ diff_command_args = ("--name-only", DIFFED_BRANCH)
117
+ else:
118
+ diff_command_args = ("--name-only", f"-G{diff_regex}", DIFFED_BRANCH)
119
+
120
+ changed_acceptance_test_config_paths = {
121
+ file_path
122
+ for file_path in airbyte_repo.git.diff(*diff_command_args).split("\n")
123
+ if file_path.startswith(SOURCE_CONNECTOR_PATH_PREFIX)
124
+ and file_path.endswith(file_name)
125
+ }
126
+ return {
127
+ Connector(get_connector_name_from_path(changed_file))
128
+ for changed_file in changed_acceptance_test_config_paths
129
+ }
130
+
131
+
132
+ def has_local_cdk_ref(build_file: Path) -> bool:
133
+ """Return true if the build file uses the local CDK.
134
+
135
+ Args:
136
+ build_file (Path): Path to the build.gradle/build.gradle.kts file of the project.
137
+
138
+ Returns:
139
+ bool: True if using local CDK.
140
+ """
141
+ contents = "\n".join(
142
+ [
143
+ # Return contents without inline code comments
144
+ line.split("//")[0]
145
+ for line in build_file.read_text().split("\n")
146
+ ]
147
+ )
148
+ contents = contents.replace(" ", "")
149
+ return "useLocalCdk=true" in contents
150
+
151
+
152
+ def get_gradle_dependencies_block(build_file: Path) -> str:
153
+ """Get the dependencies block of a Gradle file.
154
+
155
+ Args:
156
+ build_file (Path): Path to the build.gradle/build.gradle.kts file of the project.
157
+
158
+ Returns:
159
+ str: The dependencies block of the Gradle file.
160
+ """
161
+ contents = build_file.read_text().split("\n")
162
+ dependency_block = []
163
+ in_dependencies_block = False
164
+ for line in contents:
165
+ if line.strip().startswith("dependencies"):
166
+ in_dependencies_block = True
167
+ continue
168
+ if in_dependencies_block:
169
+ if line.startswith("}"):
170
+ in_dependencies_block = False
171
+ break
172
+ else:
173
+ dependency_block.append(line)
174
+ dependencies_block = "\n".join(dependency_block)
175
+ return dependencies_block
176
+
177
+
178
+ def parse_gradle_dependencies(build_file: Path) -> Tuple[List[Path], List[Path]]:
179
+ """Parse the dependencies block of a Gradle file and return the list of project dependencies and test dependencies.
180
+
181
+ Args:
182
+ build_file (Path): _description_
183
+
184
+ Returns:
185
+ Tuple[List[Tuple[str, Path]], List[Tuple[str, Path]]]: _description_
186
+ """
187
+
188
+ dependencies_block = get_gradle_dependencies_block(build_file)
189
+
190
+ project_dependencies: List[Path] = []
191
+ test_dependencies: List[Path] = []
192
+
193
+ # Find all matches for test dependencies and regular dependencies
194
+ matches = re.findall(
195
+ r"(compileOnly|testCompileOnly|testFixturesCompileOnly|testFixturesImplementation|testImplementation|integrationTestJavaImplementation|performanceTestJavaImplementation|implementation|api).*?project\(['\"](.*?)['\"]\)",
196
+ dependencies_block,
197
+ )
198
+ if matches:
199
+ # Iterate through each match
200
+ for match in matches:
201
+ dependency_type, project_path = match
202
+ path_parts = project_path.split(":")
203
+ path = Path(*path_parts)
204
+
205
+ if dependency_type in TEST_GRADLE_DEPENDENCIES:
206
+ test_dependencies.append(path)
207
+ else:
208
+ project_dependencies.append(path)
209
+
210
+ # Dedupe dependencies:
211
+ project_dependencies = list(set(project_dependencies))
212
+ test_dependencies = list(set(test_dependencies))
213
+
214
+ return project_dependencies, test_dependencies
215
+
216
+
217
+ def get_all_gradle_dependencies(
218
+ build_file: Path,
219
+ with_test_dependencies: bool = True,
220
+ found_dependencies: Optional[List[Path]] = None,
221
+ ) -> List[Path]:
222
+ """Recursively retrieve all transitive dependencies of a Gradle project.
223
+
224
+ Args:
225
+ build_file (Path): Path to the build.gradle/build.gradle.kts file of the project.
226
+ found_dependencies (List[Path]): List of dependencies that have already been found. Defaults to None.
227
+
228
+ Returns:
229
+ List[Path]: All dependencies of the project.
230
+ """
231
+ if found_dependencies is None:
232
+ found_dependencies = []
233
+ project_dependencies, test_dependencies = parse_gradle_dependencies(build_file)
234
+ all_dependencies = (
235
+ project_dependencies + test_dependencies
236
+ if with_test_dependencies
237
+ else project_dependencies
238
+ )
239
+ valid_build_files = ["build.gradle", "build.gradle.kts"]
240
+ for dependency_path in all_dependencies:
241
+ for build_file in valid_build_files:
242
+ if (
243
+ dependency_path not in found_dependencies
244
+ and Path(dependency_path / build_file).exists()
245
+ ):
246
+ found_dependencies.append(dependency_path)
247
+ get_all_gradle_dependencies(
248
+ dependency_path / build_file,
249
+ with_test_dependencies,
250
+ found_dependencies,
251
+ )
252
+
253
+ return found_dependencies
254
+
255
+
256
+ class ConnectorLanguage(str, Enum):
257
+ PYTHON = "python"
258
+ JAVA = "java"
259
+ LOW_CODE = "low-code"
260
+ MANIFEST_ONLY = "manifest-only"
261
+
262
+
263
+ class ConnectorLanguageError(Exception):
264
+ pass
265
+
266
+
267
+ @dataclass(frozen=True)
268
+ class Connector:
269
+ """Utility class to gather metadata about a connector."""
270
+
271
+ # Path to the connector directory relative to the CONNECTOR_PATH_PREFIX
272
+ # e.g source-google-sheets or third-party/farosai/airbyte-pagerduty-source
273
+ relative_connector_path: str
274
+
275
+ def _get_type_and_name_from_technical_name(self) -> Tuple[str, str]:
276
+ if "-" not in self.technical_name:
277
+ raise ConnectorInvalidNameError(
278
+ f"Connector type and name could not be inferred from {self.technical_name}"
279
+ )
280
+ _type = self.technical_name.split("-")[0]
281
+ name = self.technical_name[len(_type) + 1 :]
282
+ return _type, name
283
+
284
+ @property
285
+ def technical_name(self) -> str:
286
+ """
287
+ Return the technical name of the connector from the given relative_connector_path
288
+ e.g. source-google-sheets -> source-google-sheets or third-party/farosai/airbyte-pagerduty-source -> airbyte-pagerduty-source
289
+ """
290
+ return self.relative_connector_path.split("/")[-1]
291
+
292
+ @property
293
+ def name(self):
294
+ return self._get_type_and_name_from_technical_name()[1]
295
+
296
+ @property
297
+ def connector_type(self) -> str:
298
+ return self.metadata["connectorType"] if self.metadata else None
299
+
300
+ @property
301
+ def is_third_party(self) -> bool:
302
+ return THIRD_PARTY_GLOB in self.relative_connector_path
303
+
304
+ @property
305
+ def has_airbyte_docs(self) -> bool:
306
+ return (
307
+ self.metadata
308
+ and self.metadata.get("documentationUrl") is not None
309
+ and BASE_AIRBYTE_DOCS_URL in str(self.metadata.get("documentationUrl"))
310
+ )
311
+
312
+ @property
313
+ def local_connector_documentation_directory(self) -> Path:
314
+ return Path(f"./docs/integrations/{self.connector_type}s")
315
+
316
+ @property
317
+ def relative_documentation_path_str(self) -> str:
318
+ documentation_url = self.metadata["documentationUrl"]
319
+ relative_documentation_path = documentation_url.replace(
320
+ BASE_AIRBYTE_DOCS_URL, ""
321
+ )
322
+
323
+ # strip leading and trailing slashes
324
+ relative_documentation_path = relative_documentation_path.strip("/")
325
+
326
+ return f"./docs/{relative_documentation_path}"
327
+
328
+ @property
329
+ def documentation_file_name(self) -> str:
330
+ return self.metadata.get("documentationUrl").split("/")[-1] + ".md"
331
+
332
+ @property
333
+ def documentation_file_path(self) -> Optional[Path]:
334
+ return (
335
+ Path(f"{self.relative_documentation_path_str}.md")
336
+ if self.has_airbyte_docs
337
+ else None
338
+ )
339
+
340
+ @property
341
+ def inapp_documentation_file_path(self) -> Path:
342
+ if not self.has_airbyte_docs:
343
+ return None
344
+
345
+ return Path(f"{self.relative_documentation_path_str}.inapp.md")
346
+
347
+ @property
348
+ def migration_guide_file_name(self) -> str:
349
+ return f"{self.name}-migrations.md"
350
+
351
+ @property
352
+ def migration_guide_file_path(self) -> Path:
353
+ return (
354
+ self.local_connector_documentation_directory
355
+ / self.migration_guide_file_name
356
+ )
357
+
358
+ @property
359
+ def icon_path(self) -> Path:
360
+ file_path = self.code_directory / ICON_FILE_NAME
361
+ return file_path
362
+
363
+ @property
364
+ def code_directory(self) -> Path:
365
+ return Path(f"./{CONNECTOR_PATH_PREFIX}/{self.relative_connector_path}")
366
+
367
+ @property
368
+ def python_source_dir_path(self) -> Path:
369
+ return self.code_directory / self.technical_name.replace("-", "_")
370
+
371
+ @property
372
+ def _manifest_only_path(self) -> Path:
373
+ return self.code_directory / MANIFEST_FILE_NAME
374
+
375
+ @property
376
+ def _manifest_low_code_path(self) -> Path:
377
+ return self.python_source_dir_path / MANIFEST_FILE_NAME
378
+
379
+ @property
380
+ def manifest_path(self) -> Path:
381
+ if self._manifest_only_path.is_file():
382
+ return self._manifest_only_path
383
+
384
+ return self._manifest_low_code_path
385
+
386
+ @property
387
+ def manifest_only_components_path(self) -> Path:
388
+ """Return the path to the components.py file of a manifest-only connector."""
389
+ return self.code_directory / COMPONENTS_FILE_NAME
390
+
391
+ @property
392
+ def has_dockerfile(self) -> bool:
393
+ return self.dockerfile_file_path.is_file()
394
+
395
+ @property
396
+ def dockerfile_file_path(self) -> Path:
397
+ return self.code_directory / DOCKERFILE_FILE_NAME
398
+
399
+ @property
400
+ def pyproject_file_path(self) -> Path:
401
+ return self.code_directory / PYPROJECT_FILE_NAME
402
+
403
+ @property
404
+ def metadata_file_path(self) -> Path:
405
+ return self.code_directory / METADATA_FILE_NAME
406
+
407
+ @property
408
+ def metadata(self) -> Optional[dict]:
409
+ file_path = self.metadata_file_path
410
+ if not file_path.is_file():
411
+ return None
412
+ return yaml.safe_load((self.code_directory / METADATA_FILE_NAME).read_text())[
413
+ "data"
414
+ ]
415
+
416
+ @property
417
+ def connector_spec_file_content(self) -> Optional[dict]:
418
+ """
419
+ The spec source of truth is the actual output of the spec command, as connector can mutate their spec.
420
+ But this is the best effort approach at statically fetching a spec without running the command on the connector.
421
+ Which is "good enough" in some cases.
422
+ """
423
+ yaml_spec = Path(self.python_source_dir_path / "spec.yaml")
424
+ json_spec = Path(self.python_source_dir_path / "spec.json")
425
+
426
+ if yaml_spec.exists():
427
+ return yaml.safe_load(yaml_spec.read_text())
428
+ elif json_spec.exists():
429
+ with open(json_spec) as f:
430
+ return json.load(f)
431
+ elif self.manifest_path.exists():
432
+ return yaml.safe_load(self.manifest_path.read_text())["spec"]
433
+
434
+ return None
435
+
436
+ @property
437
+ def language(self) -> ConnectorLanguage:
438
+ if Path(self.code_directory / "manifest.yaml").is_file():
439
+ return ConnectorLanguage.MANIFEST_ONLY
440
+ if Path(
441
+ self.code_directory
442
+ / self.technical_name.replace("-", "_")
443
+ / "manifest.yaml"
444
+ ).is_file():
445
+ return ConnectorLanguage.LOW_CODE
446
+ if (
447
+ Path(self.code_directory / "setup.py").is_file()
448
+ or Path(self.code_directory / "pyproject.toml").is_file()
449
+ ):
450
+ return ConnectorLanguage.PYTHON
451
+ if (
452
+ Path(self.code_directory / "src" / "main" / "java").exists()
453
+ or Path(self.code_directory / "src" / "main" / "kotlin").exists()
454
+ ):
455
+ return ConnectorLanguage.JAVA
456
+ return None
457
+
458
+ @property
459
+ def version(self) -> Optional[str]:
460
+ if self.metadata is None:
461
+ return self.version_in_dockerfile_label
462
+ return self.metadata["dockerImageTag"]
463
+
464
+ @property
465
+ def version_in_dockerfile_label(self) -> Optional[str]:
466
+ if not self.has_dockerfile:
467
+ return None
468
+ with open(self.code_directory / "Dockerfile") as f:
469
+ for line in f:
470
+ if "io.airbyte.version" in line:
471
+ return line.split("=")[1].strip()
472
+ raise ConnectorVersionNotFound(
473
+ """
474
+ Could not find the connector version from its Dockerfile.
475
+ The io.airbyte.version tag is missing.
476
+ """
477
+ )
478
+
479
+ @property
480
+ def name_from_metadata(self) -> Optional[str]:
481
+ return self.metadata.get("name") if self.metadata else None
482
+
483
+ @property
484
+ def support_level(self) -> Optional[str]:
485
+ return self.metadata.get("supportLevel") if self.metadata else None
486
+
487
+ def metadata_query_match(self, query_string: str) -> bool:
488
+ """Evaluate a query string against the connector metadata.
489
+
490
+ Based on the simpleeval library:
491
+ https://github.com/danthedeckie/simpleeval
492
+
493
+ Examples
494
+ --------
495
+ >>> connector.metadata_query_match("'s3' in data.name")
496
+ True
497
+
498
+ >>> connector.metadata_query_match("data.supportLevel == 'certified'")
499
+ False
500
+
501
+ >>> connector.metadata_query_match("data.ab_internal.ql >= 100")
502
+ True
503
+
504
+ Args:
505
+ query_string (str): The query string to evaluate.
506
+
507
+ Returns:
508
+ bool: True if the query string matches the connector metadata, False otherwise.
509
+ """
510
+ try:
511
+ matches = simple_eval(query_string, names={"data": self.metadata})
512
+ return bool(matches)
513
+ except Exception as e:
514
+ # Skip on error as we not all fields are present in all connectors.
515
+ logging.debug(
516
+ f"Failed to evaluate query string {query_string} for connector {self.technical_name}, error: {e}"
517
+ )
518
+ return False
519
+
520
+ @property
521
+ def ab_internal_sl(self) -> int:
522
+ """Airbyte Internal Field.
523
+
524
+ More info can be found here: https://www.notion.so/Internal-Metadata-Fields-32b02037e7b244b7934214019d0b7cc9
525
+
526
+ Returns:
527
+ int: The value
528
+ """
529
+ default_value = 100
530
+ sl_value = get(self.metadata, "ab_internal.sl")
531
+
532
+ if sl_value is None:
533
+ logging.warning(
534
+ f"Connector {self.technical_name} does not have a `ab_internal.sl` defined in metadata.yaml. Defaulting to {default_value}"
535
+ )
536
+ return default_value
537
+
538
+ return sl_value
539
+
540
+ @property
541
+ def ab_internal_ql(self) -> int:
542
+ """Airbyte Internal Field.
543
+
544
+ More info can be found here: https://www.notion.so/Internal-Metadata-Fields-32b02037e7b244b7934214019d0b7cc9
545
+
546
+ Returns:
547
+ int: The value
548
+ """
549
+ default_value = 100
550
+ ql_value = get(self.metadata, "ab_internal.ql")
551
+
552
+ if ql_value is None:
553
+ logging.warning(
554
+ f"Connector {self.technical_name} does not have a `ab_internal.ql` defined in metadata.yaml. Defaulting to {default_value}"
555
+ )
556
+ return default_value
557
+
558
+ return ql_value
559
+
560
+ @property
561
+ def is_strategic_connector(self) -> bool:
562
+ """Check if a connector qualifies as a strategic connector.
563
+
564
+ Returns:
565
+ bool: True if the connector is a high value connector, False otherwise.
566
+ """
567
+ if self.ab_internal_sl >= STRATEGIC_CONNECTOR_THRESHOLDS["sl"]:
568
+ return True
569
+
570
+ if self.ab_internal_ql >= STRATEGIC_CONNECTOR_THRESHOLDS["ql"]:
571
+ return True
572
+
573
+ return False
574
+
575
+ @property
576
+ def requires_high_test_strictness_level(self) -> bool:
577
+ """Check if a connector requires high strictness CAT tests.
578
+
579
+ Returns:
580
+ bool: True if the connector requires high test strictness level, False otherwise.
581
+ """
582
+ return self.ab_internal_ql >= STRATEGIC_CONNECTOR_THRESHOLDS["ql"]
583
+
584
+ @property
585
+ def requires_allowed_hosts_check(self) -> bool:
586
+ """Check if a connector requires allowed hosts.
587
+
588
+ Returns:
589
+ bool: True if the connector requires allowed hosts, False otherwise.
590
+ """
591
+ return self.ab_internal_ql >= ALLOWED_HOST_THRESHOLD["ql"]
592
+
593
+ @property
594
+ def allowed_hosts(self) -> Optional[List[str]]:
595
+ return self.metadata.get("allowedHosts") if self.metadata else None
596
+
597
+ @property
598
+ def suggested_streams(self) -> Optional[List[str]]:
599
+ return self.metadata.get("suggestedStreams") if self.metadata else None
600
+
601
+ @property
602
+ def acceptance_test_config_path(self) -> Path:
603
+ return self.code_directory / ACCEPTANCE_TEST_CONFIG_FILE_NAME
604
+
605
+ @property
606
+ def acceptance_test_config(self) -> Optional[dict]:
607
+ try:
608
+ with open(self.acceptance_test_config_path) as acceptance_test_config_file:
609
+ return yaml.safe_load(acceptance_test_config_file)
610
+ except FileNotFoundError:
611
+ logging.warning(
612
+ f"No {ACCEPTANCE_TEST_CONFIG_FILE_NAME} file found for {self.technical_name}"
613
+ )
614
+ return None
615
+
616
+ @property
617
+ def supports_normalization(self) -> bool:
618
+ return self.metadata and self.metadata.get("normalizationConfig") is not None
619
+
620
+ @property
621
+ def normalization_repository(self) -> Optional[str]:
622
+ if self.supports_normalization:
623
+ return f"{self.metadata['normalizationConfig']['normalizationRepository']}"
624
+
625
+ @property
626
+ def normalization_tag(self) -> Optional[str]:
627
+ if self.supports_normalization:
628
+ return f"{self.metadata['normalizationConfig']['normalizationTag']}"
629
+
630
+ @property
631
+ def is_using_poetry(self) -> bool:
632
+ return Path(self.code_directory / "pyproject.toml").exists()
633
+
634
+ @property
635
+ def registry_primary_key_field(self) -> str:
636
+ """
637
+ The primary key field of the connector in the registry.
638
+
639
+ example:
640
+ - source -> sourceDefinitionId
641
+ - destination -> destinationDefinitionId
642
+ """
643
+ return f"{self.connector_type}DefinitionId"
644
+
645
+ @property
646
+ def is_enabled_in_any_registry(self) -> bool:
647
+ """Check if the connector is enabled in the registry.
648
+
649
+ Example:
650
+ - {registries: null} -> false
651
+ - {registries: {oss: {enabled: false }}} -> false
652
+ - {registries: {oss: {enabled: true }}} -> true
653
+ - {registries: {cloud: {enabled: true }}} -> true
654
+
655
+ Returns:
656
+ bool: True if the connector is enabled, False otherwise.
657
+ """
658
+ registries = self.metadata.get("registryOverrides")
659
+ if not registries:
660
+ return False
661
+
662
+ for registry in registries.values():
663
+ if registry.get("enabled"):
664
+ return True
665
+
666
+ return False
667
+
668
+ @property
669
+ def is_released(self) -> bool:
670
+ """Pull the the OSS registry and check if it the current definition ID and docker image tag are in the registry.
671
+ If there is a match it means the connector is released.
672
+ We use the OSS registry as the source of truth for released connectors as the cloud registry can be a subset of the OSS registry.
673
+
674
+ Returns:
675
+ bool: True if the connector is released, False otherwise.
676
+ """
677
+ metadata = self.metadata
678
+ registry = download_catalog(OSS_CATALOG_URL)
679
+ for connector in registry[f"{self.connector_type}s"]:
680
+ if (
681
+ connector[self.registry_primary_key_field] == metadata["definitionId"]
682
+ and connector["dockerImageTag"] == metadata["dockerImageTag"]
683
+ ):
684
+ return True
685
+ return False
686
+
687
+ @property
688
+ def cloud_usage(self) -> Optional[str]:
689
+ """Pull the cloud registry, check if the connector is in the registry and return the usage metrics.
690
+
691
+ Returns:
692
+ Optional[str]: The usage metrics of the connector, could be one of ["low", "medium", "high"] or None if the connector is not in the registry.
693
+ """
694
+ metadata = self.metadata
695
+ definition_id = metadata.get("definitionId")
696
+ cloud_registry = download_catalog(CLOUD_CATALOG_URL)
697
+
698
+ all_connectors_of_type = cloud_registry[f"{self.connector_type}s"]
699
+ connector_entry = find(
700
+ all_connectors_of_type, {self.registry_primary_key_field: definition_id}
701
+ )
702
+ if not connector_entry:
703
+ return None
704
+
705
+ return get(connector_entry, "generated.metrics.cloud.usage")
706
+
707
+ @property
708
+ def sbom_url(self) -> Optional[str]:
709
+ """
710
+ Fetches SBOM URL from the connector definition in the OSS registry, if it exists, None otherwise.
711
+ """
712
+ metadata = self.metadata
713
+ definition_id = metadata.get("definitionId")
714
+ # We use the OSS registry as the source of truth for released connectors as the cloud registry can be a subset of the OSS registry.
715
+ oss_registry = download_catalog(OSS_CATALOG_URL)
716
+
717
+ all_connectors_of_type = oss_registry[f"{self.connector_type}s"]
718
+ connector_entry = find(
719
+ all_connectors_of_type, {self.registry_primary_key_field: definition_id}
720
+ )
721
+ if not connector_entry:
722
+ return None
723
+
724
+ return get(connector_entry, "generated.sbomUrl")
725
+
726
+ @property
727
+ def image_address(self) -> str:
728
+ return f"{self.metadata['dockerRepository']}:{self.metadata['dockerImageTag']}"
729
+
730
+ @property
731
+ def cdk_name(self) -> str | None:
732
+ try:
733
+ return [
734
+ tag.split(":")[-1]
735
+ for tag in self.metadata["tags"]
736
+ if tag.startswith("cdk:")
737
+ ][0]
738
+ except IndexError:
739
+ return None
740
+
741
+ @property
742
+ def base_image_address(self) -> str | None:
743
+ return self.metadata.get("connectorBuildOptions", {}).get("baseImage")
744
+
745
+ @property
746
+ def uses_base_image(self) -> bool:
747
+ return self.base_image_address is not None
748
+
749
+ @property
750
+ def base_image_version(self) -> str | None:
751
+ if not self.uses_base_image:
752
+ return None
753
+ return self.base_image_address.split(":")[1].split("@")[0]
754
+
755
+ def __repr__(self) -> str:
756
+ return self.technical_name
757
+
758
+ @functools.lru_cache(maxsize=2)
759
+ def get_local_dependency_paths(
760
+ self, with_test_dependencies: bool = True
761
+ ) -> Set[Path]:
762
+ dependencies_paths = []
763
+ build_script = "build.gradle"
764
+ if Path(self.code_directory / "build.gradle.kts").exists():
765
+ build_script = "build.gradle.kts"
766
+
767
+ if self.language == ConnectorLanguage.JAVA:
768
+ dependencies_paths += [
769
+ Path("./airbyte-cdk/java/airbyte-cdk"),
770
+ Path("./airbyte-cdk/bulk"),
771
+ ]
772
+ dependencies_paths += get_all_gradle_dependencies(
773
+ self.code_directory / build_script,
774
+ with_test_dependencies=with_test_dependencies,
775
+ )
776
+ return sorted(list(set(dependencies_paths)))
777
+
778
+
779
+ def get_changed_connectors(
780
+ modified_files: Optional[Set[Union[str, Path]]] = None,
781
+ source: bool = True,
782
+ destination: bool = True,
783
+ third_party: bool = True,
784
+ ) -> Set[Connector]:
785
+ """Retrieve a set of Connectors that were changed in the current branch (compared to master)."""
786
+ if modified_files is None:
787
+ airbyte_repo = git.Repo(search_parent_directories=True)
788
+ modified_files = airbyte_repo.git.diff("--name-only", DIFFED_BRANCH).split("\n")
789
+
790
+ prefix_to_check = []
791
+ if source:
792
+ prefix_to_check.append(SOURCE_CONNECTOR_PATH_PREFIX)
793
+ if destination:
794
+ prefix_to_check.append(DESTINATION_CONNECTOR_PATH_PREFIX)
795
+ if third_party:
796
+ prefix_to_check.append(THIRD_PARTY_CONNECTOR_PATH_PREFIX)
797
+
798
+ changed_source_connector_files = {
799
+ file_path
800
+ for file_path in modified_files
801
+ if any(file_path.startswith(prefix) for prefix in prefix_to_check)
802
+ and SCAFFOLD_CONNECTOR_GLOB not in file_path
803
+ }
804
+ return {
805
+ Connector(get_connector_name_from_path(changed_file))
806
+ for changed_file in changed_source_connector_files
807
+ }
808
+
809
+
810
+ def _get_relative_connector_folder_name_from_metadata_path(
811
+ metadata_file_path: str,
812
+ ) -> str:
813
+ """Get the relative connector folder name from the metadata file path.
814
+
815
+ Args:
816
+ metadata_file_path (Path): Path to the metadata file.
817
+
818
+ Returns:
819
+ str: The relative connector folder name.
820
+ """
821
+ # remove CONNECTOR_PATH_PREFIX and anything before
822
+ metadata_file_path = metadata_file_path.split(CONNECTOR_PATH_PREFIX)[-1]
823
+
824
+ # remove metadata.yaml
825
+ metadata_file_path = metadata_file_path.replace(METADATA_FILE_NAME, "")
826
+
827
+ # remove leading and trailing slashes
828
+ metadata_file_path = metadata_file_path.strip("/")
829
+ return metadata_file_path
830
+
831
+
832
+ def get_all_connectors_in_repo() -> Set[Connector]:
833
+ """Retrieve a set of all Connectors in the repo.
834
+ We globe the connectors folder for metadata.yaml files and construct Connectors from the directory name.
835
+
836
+ Returns:
837
+ A set of Connectors.
838
+ """
839
+ repo = git.Repo(search_parent_directories=True)
840
+ repo_path = repo.working_tree_dir
841
+
842
+ return {
843
+ Connector(_get_relative_connector_folder_name_from_metadata_path(metadata_file))
844
+ for metadata_file in glob(
845
+ f"{repo_path}/{CONNECTOR_PATH_PREFIX}/**/metadata.yaml", recursive=True
846
+ )
847
+ if SCAFFOLD_CONNECTOR_GLOB not in metadata_file
848
+ }
849
+
850
+
851
+ class ConnectorTypeEnum(str, Enum):
852
+ source = "source"
853
+ destination = "destination"
854
+
855
+
856
+ class SupportLevelEnum(str, Enum):
857
+ certified = "certified"
858
+ community = "community"
859
+ archived = "archived"