airbyte-internal-ops 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airbyte_internal_ops-0.4.1.dist-info → airbyte_internal_ops-0.5.0.dist-info}/METADATA +1 -1
- {airbyte_internal_ops-0.4.1.dist-info → airbyte_internal_ops-0.5.0.dist-info}/RECORD +13 -52
- airbyte_ops_mcp/cli/cloud.py +42 -3
- airbyte_ops_mcp/cloud_admin/api_client.py +473 -0
- airbyte_ops_mcp/cloud_admin/models.py +56 -0
- airbyte_ops_mcp/mcp/cloud_connector_versions.py +460 -0
- airbyte_ops_mcp/mcp/prerelease.py +6 -46
- airbyte_ops_mcp/regression_tests/ci_output.py +151 -71
- airbyte_ops_mcp/regression_tests/http_metrics.py +21 -2
- airbyte_ops_mcp/regression_tests/models.py +6 -0
- airbyte_ops_mcp/telemetry.py +162 -0
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/.gitignore +0 -1
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/README.md +0 -420
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/__init__.py +0 -2
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/__init__.py +0 -1
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/backends/__init__.py +0 -8
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/backends/base_backend.py +0 -16
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/backends/duckdb_backend.py +0 -87
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/backends/file_backend.py +0 -165
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/connection_objects_retrieval.py +0 -377
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/connector_runner.py +0 -247
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/errors.py +0 -7
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/evaluation_modes.py +0 -25
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/hacks.py +0 -23
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/json_schema_helper.py +0 -384
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/mitm_addons.py +0 -37
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/models.py +0 -595
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/proxy.py +0 -207
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/secret_access.py +0 -47
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/segment_tracking.py +0 -45
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/commons/utils.py +0 -214
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/conftest.py.disabled +0 -751
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/consts.py +0 -4
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/poetry.lock +0 -4480
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/pytest.ini +0 -9
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/regression_tests/__init__.py +0 -1
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/regression_tests/test_check.py +0 -61
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/regression_tests/test_discover.py +0 -117
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/regression_tests/test_read.py +0 -627
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/regression_tests/test_spec.py +0 -43
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/report.py +0 -542
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/stash_keys.py +0 -38
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/templates/__init__.py +0 -0
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/templates/private_details.html.j2 +0 -305
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/templates/report.html.j2 +0 -515
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/utils.py +0 -187
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/validation_tests/__init__.py +0 -0
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/validation_tests/test_check.py +0 -61
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/validation_tests/test_discover.py +0 -217
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/validation_tests/test_read.py +0 -177
- airbyte_ops_mcp/_legacy/airbyte_ci/connector_live_tests/validation_tests/test_spec.py +0 -631
- {airbyte_internal_ops-0.4.1.dist-info → airbyte_internal_ops-0.5.0.dist-info}/WHEEL +0 -0
- {airbyte_internal_ops-0.4.1.dist-info → airbyte_internal_ops-0.5.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,631 +0,0 @@
|
|
|
1
|
-
#
|
|
2
|
-
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
|
3
|
-
#
|
|
4
|
-
from __future__ import annotations
|
|
5
|
-
|
|
6
|
-
from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Set, Tuple
|
|
7
|
-
|
|
8
|
-
import dpath.util
|
|
9
|
-
import jsonschema
|
|
10
|
-
import pytest
|
|
11
|
-
from airbyte_protocol.models import ConnectorSpecification
|
|
12
|
-
from live_tests.commons.json_schema_helper import (
|
|
13
|
-
JsonSchemaHelper,
|
|
14
|
-
get_expected_schema_structure,
|
|
15
|
-
get_paths_in_connector_config,
|
|
16
|
-
)
|
|
17
|
-
from live_tests.commons.models import ExecutionResult
|
|
18
|
-
from live_tests.utils import (
|
|
19
|
-
fail_test_on_failing_execution_results,
|
|
20
|
-
find_all_values_for_key_in_schema,
|
|
21
|
-
get_spec,
|
|
22
|
-
get_test_logger,
|
|
23
|
-
)
|
|
24
|
-
|
|
25
|
-
pytestmark = [
|
|
26
|
-
pytest.mark.anyio,
|
|
27
|
-
]
|
|
28
|
-
|
|
29
|
-
if TYPE_CHECKING:
|
|
30
|
-
from _pytest.fixtures import SubRequest
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
@pytest.fixture(name="secret_property_names")
|
|
34
|
-
def secret_property_names_fixture():
|
|
35
|
-
return (
|
|
36
|
-
"client_token",
|
|
37
|
-
"access_token",
|
|
38
|
-
"api_token",
|
|
39
|
-
"token",
|
|
40
|
-
"secret",
|
|
41
|
-
"client_secret",
|
|
42
|
-
"password",
|
|
43
|
-
"key",
|
|
44
|
-
"service_account_info",
|
|
45
|
-
"service_account",
|
|
46
|
-
"tenant_id",
|
|
47
|
-
"certificate",
|
|
48
|
-
"jwt",
|
|
49
|
-
"credentials",
|
|
50
|
-
"app_id",
|
|
51
|
-
"appid",
|
|
52
|
-
"refresh_token",
|
|
53
|
-
)
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
DATE_PATTERN = "^[0-9]{2}-[0-9]{2}-[0-9]{4}$"
|
|
57
|
-
DATETIME_PATTERN = "^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2})?$"
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
@pytest.fixture
|
|
61
|
-
def target_spec(
|
|
62
|
-
spec_target_execution_result: ExecutionResult,
|
|
63
|
-
) -> ConnectorSpecification:
|
|
64
|
-
return get_spec(spec_target_execution_result)
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
@pytest.fixture
|
|
68
|
-
def connector_config(spec_target_execution_result: ExecutionResult) -> Dict[str, any]:
|
|
69
|
-
return spec_target_execution_result.config
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
async def test_spec(
|
|
73
|
-
record_property: Callable,
|
|
74
|
-
spec_target_execution_result: ExecutionResult,
|
|
75
|
-
):
|
|
76
|
-
"""Check that the spec call succeeds"""
|
|
77
|
-
fail_test_on_failing_execution_results(
|
|
78
|
-
record_property, [spec_target_execution_result]
|
|
79
|
-
)
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
@pytest.mark.allow_diagnostic_mode
|
|
83
|
-
async def test_config_match_spec(
|
|
84
|
-
target_spec: ConnectorSpecification,
|
|
85
|
-
connector_config: Dict[str, any],
|
|
86
|
-
):
|
|
87
|
-
"""Check that config matches the actual schema from the spec call"""
|
|
88
|
-
# Getting rid of technical variables that start with an underscore
|
|
89
|
-
config = {
|
|
90
|
-
key: value
|
|
91
|
-
for key, value in connector_config.data.items()
|
|
92
|
-
if not key.startswith("_")
|
|
93
|
-
}
|
|
94
|
-
try:
|
|
95
|
-
jsonschema.validate(instance=config, schema=target_spec.connectionSpecification)
|
|
96
|
-
except jsonschema.exceptions.ValidationError as err:
|
|
97
|
-
pytest.fail(f"Config invalid: {err}")
|
|
98
|
-
except jsonschema.exceptions.SchemaError as err:
|
|
99
|
-
pytest.fail(f"Spec is invalid: {err}")
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
async def test_enum_usage(target_spec: ConnectorSpecification):
|
|
103
|
-
"""Check that enum lists in specs contain distinct values."""
|
|
104
|
-
docs_url = "https://docs.airbyte.io/connector-development/connector-specification-reference"
|
|
105
|
-
docs_msg = f"See specification reference at {docs_url}."
|
|
106
|
-
|
|
107
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
108
|
-
enum_paths = schema_helper.find_nodes(keys=["enum"])
|
|
109
|
-
|
|
110
|
-
for path in enum_paths:
|
|
111
|
-
enum_list = schema_helper.get_node(path)
|
|
112
|
-
assert len(set(enum_list)) == len(enum_list), (
|
|
113
|
-
f"Enum lists should not contain duplicate values. Misconfigured enum array: {enum_list}. {docs_msg}"
|
|
114
|
-
)
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
async def test_oneof_usage(target_spec: ConnectorSpecification):
|
|
118
|
-
"""Check that if spec contains oneOf it follows the rules according to reference
|
|
119
|
-
https://docs.airbyte.io/connector-development/connector-specification-reference
|
|
120
|
-
"""
|
|
121
|
-
docs_url = "https://docs.airbyte.io/connector-development/connector-specification-reference"
|
|
122
|
-
docs_msg = f"See specification reference at {docs_url}."
|
|
123
|
-
|
|
124
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
125
|
-
variant_paths = schema_helper.find_nodes(keys=["oneOf", "anyOf"])
|
|
126
|
-
|
|
127
|
-
for variant_path in variant_paths:
|
|
128
|
-
top_level_obj = schema_helper.get_node(variant_path[:-1])
|
|
129
|
-
assert top_level_obj.get("type") == "object", (
|
|
130
|
-
f"The top-level definition in a `oneOf` block should have type: object. misconfigured object: {top_level_obj}. {docs_msg}"
|
|
131
|
-
)
|
|
132
|
-
|
|
133
|
-
variants = schema_helper.get_node(variant_path)
|
|
134
|
-
for variant in variants:
|
|
135
|
-
assert "properties" in variant, (
|
|
136
|
-
f"Each item in the oneOf array should be a property with type object. {docs_msg}"
|
|
137
|
-
)
|
|
138
|
-
|
|
139
|
-
oneof_path = ".".join(map(str, variant_path))
|
|
140
|
-
variant_props = [set(v["properties"].keys()) for v in variants]
|
|
141
|
-
common_props = set.intersection(*variant_props)
|
|
142
|
-
assert common_props, (
|
|
143
|
-
f"There should be at least one common property for {oneof_path} subobjects. {docs_msg}"
|
|
144
|
-
)
|
|
145
|
-
|
|
146
|
-
const_common_props = set()
|
|
147
|
-
enum_common_props = set()
|
|
148
|
-
for common_prop in common_props:
|
|
149
|
-
if all(
|
|
150
|
-
["const" in variant["properties"][common_prop] for variant in variants]
|
|
151
|
-
):
|
|
152
|
-
const_common_props.add(common_prop)
|
|
153
|
-
if all(
|
|
154
|
-
["enum" in variant["properties"][common_prop] for variant in variants]
|
|
155
|
-
):
|
|
156
|
-
enum_common_props.add(common_prop)
|
|
157
|
-
assert len(const_common_props) == 1 or (
|
|
158
|
-
len(const_common_props) == 0 and len(enum_common_props) == 1
|
|
159
|
-
), (
|
|
160
|
-
f"There should be exactly one common property with 'const' keyword (or equivalent) for {oneof_path} subobjects. {docs_msg}"
|
|
161
|
-
)
|
|
162
|
-
|
|
163
|
-
const_common_prop = (
|
|
164
|
-
const_common_props.pop() if const_common_props else enum_common_props.pop()
|
|
165
|
-
)
|
|
166
|
-
for n, variant in enumerate(variants):
|
|
167
|
-
prop_obj = variant["properties"][const_common_prop]
|
|
168
|
-
prop_info = f"common property {oneof_path}[{n}].{const_common_prop}. It's recommended to just use `const`."
|
|
169
|
-
if "const" in prop_obj:
|
|
170
|
-
const_value = prop_obj["const"]
|
|
171
|
-
assert (
|
|
172
|
-
"default" not in prop_obj or prop_obj["default"] == const_value
|
|
173
|
-
), (
|
|
174
|
-
f"'default' needs to be identical to 'const' in {prop_info}. {docs_msg}"
|
|
175
|
-
)
|
|
176
|
-
assert "enum" not in prop_obj or prop_obj["enum"] == [const_value], (
|
|
177
|
-
f"'enum' needs to be an array with a single item identical to 'const' in {prop_info}. {docs_msg}"
|
|
178
|
-
)
|
|
179
|
-
else:
|
|
180
|
-
assert (
|
|
181
|
-
"enum" in prop_obj
|
|
182
|
-
and "default" in prop_obj
|
|
183
|
-
and prop_obj["enum"] == [prop_obj["default"]]
|
|
184
|
-
), (
|
|
185
|
-
f"'enum' needs to be an array with a single item identical to 'default' in {prop_info}. {docs_msg}"
|
|
186
|
-
)
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
def _is_spec_property_name_secret(
|
|
190
|
-
path: str, secret_property_names
|
|
191
|
-
) -> Tuple[Optional[str], bool]:
|
|
192
|
-
"""
|
|
193
|
-
Given a path to a type field, extract a field name and decide whether it is a name of secret or not
|
|
194
|
-
based on a provided list of secret names.
|
|
195
|
-
Split the path by `/`, drop the last item and make list reversed.
|
|
196
|
-
Then iterate over it and find the first item that's not a reserved keyword or an index.
|
|
197
|
-
Example:
|
|
198
|
-
properties/credentials/oneOf/1/properties/api_key/type -> [api_key, properties, 1, oneOf, credentials, properties] -> api_key
|
|
199
|
-
"""
|
|
200
|
-
reserved_keywords = (
|
|
201
|
-
"anyOf",
|
|
202
|
-
"oneOf",
|
|
203
|
-
"allOf",
|
|
204
|
-
"not",
|
|
205
|
-
"properties",
|
|
206
|
-
"items",
|
|
207
|
-
"type",
|
|
208
|
-
"prefixItems",
|
|
209
|
-
)
|
|
210
|
-
for part in reversed(path.split("/")[:-1]):
|
|
211
|
-
if part.isdigit() or part in reserved_keywords:
|
|
212
|
-
continue
|
|
213
|
-
return part, part.lower() in secret_property_names
|
|
214
|
-
return None, False
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
def _property_can_store_secret(prop: dict) -> bool:
|
|
218
|
-
"""
|
|
219
|
-
Some fields can not hold a secret by design, others can.
|
|
220
|
-
Null type as well as boolean can not hold a secret value.
|
|
221
|
-
A string, a number or an integer type can always store secrets.
|
|
222
|
-
Secret objects and arrays can not be rendered correctly in the UI:
|
|
223
|
-
A field with a constant value can not hold a secret as well.
|
|
224
|
-
"""
|
|
225
|
-
unsecure_types = {"string", "integer", "number"}
|
|
226
|
-
type_ = prop["type"]
|
|
227
|
-
is_property_constant_value = bool(prop.get("const"))
|
|
228
|
-
can_store_secret = any(
|
|
229
|
-
[
|
|
230
|
-
isinstance(type_, str) and type_ in unsecure_types,
|
|
231
|
-
isinstance(type_, list) and (set(type_) & unsecure_types),
|
|
232
|
-
]
|
|
233
|
-
)
|
|
234
|
-
if not can_store_secret:
|
|
235
|
-
return False
|
|
236
|
-
# if a property can store a secret, additional check should be done if it's a constant value
|
|
237
|
-
return not is_property_constant_value
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
async def test_secret_is_properly_marked(
|
|
241
|
-
target_spec: ConnectorSpecification, secret_property_names
|
|
242
|
-
):
|
|
243
|
-
"""
|
|
244
|
-
Each field has a type, therefore we can make a flat list of fields from the returned specification.
|
|
245
|
-
Iterate over the list, check if a field name is a secret name, can potentially hold a secret value
|
|
246
|
-
and make sure it is marked as `airbyte_secret`.
|
|
247
|
-
"""
|
|
248
|
-
secrets_exposed = []
|
|
249
|
-
non_secrets_hidden = []
|
|
250
|
-
spec_properties = target_spec.connectionSpecification["properties"]
|
|
251
|
-
for type_path, type_value in dpath.util.search(
|
|
252
|
-
spec_properties, "**/type", yielded=True
|
|
253
|
-
):
|
|
254
|
-
_, is_property_name_secret = _is_spec_property_name_secret(
|
|
255
|
-
type_path, secret_property_names
|
|
256
|
-
)
|
|
257
|
-
if not is_property_name_secret:
|
|
258
|
-
continue
|
|
259
|
-
absolute_path = f"/{type_path}"
|
|
260
|
-
property_path, _ = absolute_path.rsplit(sep="/", maxsplit=1)
|
|
261
|
-
property_definition = dpath.util.get(spec_properties, property_path)
|
|
262
|
-
marked_as_secret = property_definition.get("airbyte_secret", False)
|
|
263
|
-
possibly_a_secret = _property_can_store_secret(property_definition)
|
|
264
|
-
if marked_as_secret and not possibly_a_secret:
|
|
265
|
-
non_secrets_hidden.append(property_path)
|
|
266
|
-
if not marked_as_secret and possibly_a_secret:
|
|
267
|
-
secrets_exposed.append(property_path)
|
|
268
|
-
|
|
269
|
-
if non_secrets_hidden:
|
|
270
|
-
properties = "\n".join(non_secrets_hidden)
|
|
271
|
-
pytest.fail(
|
|
272
|
-
f"""Some properties are marked with `airbyte_secret` although they probably should not be.
|
|
273
|
-
Please double check them. If they're okay, please fix this test.
|
|
274
|
-
{properties}"""
|
|
275
|
-
)
|
|
276
|
-
if secrets_exposed:
|
|
277
|
-
properties = "\n".join(secrets_exposed)
|
|
278
|
-
pytest.fail(
|
|
279
|
-
f"""The following properties should be marked with `airbyte_secret!`
|
|
280
|
-
{properties}"""
|
|
281
|
-
)
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
def _fail_on_errors(errors: List[str]):
|
|
285
|
-
if len(errors) > 0:
|
|
286
|
-
pytest.fail("\n".join(errors))
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
def test_property_type_is_not_array(target_spec: ConnectorSpecification):
|
|
290
|
-
"""
|
|
291
|
-
Each field has one or multiple types, but the UI only supports a single type and optionally "null" as a second type.
|
|
292
|
-
"""
|
|
293
|
-
errors = []
|
|
294
|
-
for type_path, type_value in dpath.util.search(
|
|
295
|
-
target_spec.connectionSpecification, "**/properties/*/type", yielded=True
|
|
296
|
-
):
|
|
297
|
-
if isinstance(type_value, List):
|
|
298
|
-
number_of_types = len(type_value)
|
|
299
|
-
if number_of_types != 2 and number_of_types != 1:
|
|
300
|
-
errors.append(
|
|
301
|
-
f"{type_path} is not either a simple type or an array of a simple type plus null: {type_value} (for example: type: [string, null])"
|
|
302
|
-
)
|
|
303
|
-
if number_of_types == 2 and type_value[1] != "null":
|
|
304
|
-
errors.append(
|
|
305
|
-
f"Second type of {type_path} is not null: {type_value}. Type can either be a simple type or an array of a simple type plus null (for example: type: [string, null])"
|
|
306
|
-
)
|
|
307
|
-
_fail_on_errors(errors)
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
def test_object_not_empty(target_spec: ConnectorSpecification):
|
|
311
|
-
"""
|
|
312
|
-
Each object field needs to have at least one property as the UI won't be able to show them otherwise.
|
|
313
|
-
If the whole spec is empty, it's allowed to have a single empty object at the top level
|
|
314
|
-
"""
|
|
315
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
316
|
-
errors = []
|
|
317
|
-
for type_path, type_value in dpath.util.search(
|
|
318
|
-
target_spec.connectionSpecification, "**/type", yielded=True
|
|
319
|
-
):
|
|
320
|
-
if type_path == "type":
|
|
321
|
-
# allow empty root object
|
|
322
|
-
continue
|
|
323
|
-
if type_value == "object":
|
|
324
|
-
property = schema_helper.get_parent(type_path)
|
|
325
|
-
if "oneOf" not in property and (
|
|
326
|
-
"properties" not in property or len(property["properties"]) == 0
|
|
327
|
-
):
|
|
328
|
-
errors.append(
|
|
329
|
-
f"{type_path} is an empty object which will not be represented correctly in the UI. Either remove or add specific properties"
|
|
330
|
-
)
|
|
331
|
-
_fail_on_errors(errors)
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
async def test_array_type(target_spec: ConnectorSpecification):
|
|
335
|
-
"""
|
|
336
|
-
Each array has one or multiple types for its items, but the UI only supports a single type which can either be object, string or an enum
|
|
337
|
-
"""
|
|
338
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
339
|
-
errors = []
|
|
340
|
-
for type_path, type_type in dpath.util.search(
|
|
341
|
-
target_spec.connectionSpecification, "**/type", yielded=True
|
|
342
|
-
):
|
|
343
|
-
property_definition = schema_helper.get_parent(type_path)
|
|
344
|
-
if type_type != "array":
|
|
345
|
-
# unrelated "items", not an array definition
|
|
346
|
-
continue
|
|
347
|
-
items_value = property_definition.get("items", None)
|
|
348
|
-
if items_value is None:
|
|
349
|
-
continue
|
|
350
|
-
elif isinstance(items_value, List):
|
|
351
|
-
errors.append(f"{type_path} is not just a single item type: {items_value}")
|
|
352
|
-
elif (
|
|
353
|
-
items_value.get("type") not in ["object", "string", "number", "integer"]
|
|
354
|
-
and "enum" not in items_value
|
|
355
|
-
):
|
|
356
|
-
errors.append(
|
|
357
|
-
f"Items of {type_path} has to be either object or string or define an enum"
|
|
358
|
-
)
|
|
359
|
-
_fail_on_errors(errors)
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
async def test_forbidden_complex_types(target_spec: ConnectorSpecification):
|
|
363
|
-
"""
|
|
364
|
-
not, anyOf, patternProperties, prefixItems, allOf, if, then, else, dependentSchemas and dependentRequired are not allowed
|
|
365
|
-
"""
|
|
366
|
-
forbidden_keys = [
|
|
367
|
-
"not",
|
|
368
|
-
"anyOf",
|
|
369
|
-
"patternProperties",
|
|
370
|
-
"prefixItems",
|
|
371
|
-
"allOf",
|
|
372
|
-
"if",
|
|
373
|
-
"then",
|
|
374
|
-
"else",
|
|
375
|
-
"dependentSchemas",
|
|
376
|
-
"dependentRequired",
|
|
377
|
-
]
|
|
378
|
-
found_keys = set()
|
|
379
|
-
for forbidden_key in forbidden_keys:
|
|
380
|
-
for path, value in dpath.util.search(
|
|
381
|
-
target_spec.connectionSpecification, f"**/{forbidden_key}", yielded=True
|
|
382
|
-
):
|
|
383
|
-
found_keys.add(path)
|
|
384
|
-
|
|
385
|
-
for forbidden_key in forbidden_keys:
|
|
386
|
-
# remove forbidden keys if they are used as properties directly
|
|
387
|
-
for path, _value in dpath.util.search(
|
|
388
|
-
target_spec.connectionSpecification,
|
|
389
|
-
f"**/properties/{forbidden_key}",
|
|
390
|
-
yielded=True,
|
|
391
|
-
):
|
|
392
|
-
found_keys.remove(path)
|
|
393
|
-
|
|
394
|
-
if len(found_keys) > 0:
|
|
395
|
-
key_list = ", ".join(found_keys)
|
|
396
|
-
pytest.fail(f"Found the following disallowed JSON schema features: {key_list}")
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
async def test_date_pattern(request: SubRequest, target_spec: ConnectorSpecification):
|
|
400
|
-
"""
|
|
401
|
-
Properties with format date or date-time should always have a pattern defined how the date/date-time should be formatted
|
|
402
|
-
that corresponds with the format the datepicker component is creating.
|
|
403
|
-
"""
|
|
404
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
405
|
-
for format_path, format in dpath.util.search(
|
|
406
|
-
target_spec.connectionSpecification, "**/format", yielded=True
|
|
407
|
-
):
|
|
408
|
-
if not isinstance(format, str):
|
|
409
|
-
# format is not a format definition here but a property named format
|
|
410
|
-
continue
|
|
411
|
-
property_definition = schema_helper.get_parent(format_path)
|
|
412
|
-
pattern = property_definition.get("pattern")
|
|
413
|
-
logger = get_test_logger(request)
|
|
414
|
-
if format == "date" and not pattern == DATE_PATTERN:
|
|
415
|
-
logger.warning(
|
|
416
|
-
f"{format_path} is defining a date format without the corresponding pattern. Consider setting the pattern to {DATE_PATTERN} to make it easier for users to edit this field in the UI."
|
|
417
|
-
)
|
|
418
|
-
if format == "date-time" and not pattern == DATETIME_PATTERN:
|
|
419
|
-
logger.warning(
|
|
420
|
-
f"{format_path} is defining a date-time format without the corresponding pattern Consider setting the pattern to {DATETIME_PATTERN} to make it easier for users to edit this field in the UI."
|
|
421
|
-
)
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
async def test_date_format(request: SubRequest, target_spec: ConnectorSpecification):
|
|
425
|
-
"""
|
|
426
|
-
Properties with a pattern that looks like a date should have their format set to date or date-time.
|
|
427
|
-
"""
|
|
428
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
429
|
-
for pattern_path, pattern in dpath.util.search(
|
|
430
|
-
target_spec.connectionSpecification, "**/pattern", yielded=True
|
|
431
|
-
):
|
|
432
|
-
if not isinstance(pattern, str):
|
|
433
|
-
# pattern is not a pattern definition here but a property named pattern
|
|
434
|
-
continue
|
|
435
|
-
if pattern == DATE_PATTERN or pattern == DATETIME_PATTERN:
|
|
436
|
-
property_definition = schema_helper.get_parent(pattern_path)
|
|
437
|
-
format = property_definition.get("format")
|
|
438
|
-
logger = get_test_logger(request)
|
|
439
|
-
if not format == "date" and pattern == DATE_PATTERN:
|
|
440
|
-
logger.warning(
|
|
441
|
-
f"{pattern_path} is defining a pattern that looks like a date without setting the format to `date`. Consider specifying the format to make it easier for users to edit this field in the UI."
|
|
442
|
-
)
|
|
443
|
-
if not format == "date-time" and pattern == DATETIME_PATTERN:
|
|
444
|
-
logger.warning(
|
|
445
|
-
f"{pattern_path} is defining a pattern that looks like a date-time without setting the format to `date-time`. Consider specifying the format to make it easier for users to edit this field in the UI."
|
|
446
|
-
)
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
async def test_duplicate_order(target_spec: ConnectorSpecification):
|
|
450
|
-
"""
|
|
451
|
-
Custom ordering of field (via the "order" property defined in the field) is not allowed to have duplicates within the same group.
|
|
452
|
-
`{ "a": { "order": 1 }, "b": { "order": 1 } }` is invalid because there are two fields with order 1
|
|
453
|
-
`{ "a": { "order": 1 }, "b": { "order": 1, "group": "x" } }` is valid because the fields with the same order are in different groups
|
|
454
|
-
"""
|
|
455
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
456
|
-
errors = []
|
|
457
|
-
for properties_path, properties in dpath.util.search(
|
|
458
|
-
target_spec.connectionSpecification, "**/properties", yielded=True
|
|
459
|
-
):
|
|
460
|
-
definition = schema_helper.get_parent(properties_path)
|
|
461
|
-
if definition.get("type") != "object":
|
|
462
|
-
# unrelated "properties", not an actual object definition
|
|
463
|
-
continue
|
|
464
|
-
used_orders: Dict[str, Set[int]] = {}
|
|
465
|
-
for property in properties.values():
|
|
466
|
-
if "order" not in property:
|
|
467
|
-
continue
|
|
468
|
-
order = property.get("order")
|
|
469
|
-
group = property.get("group", "")
|
|
470
|
-
if group not in used_orders:
|
|
471
|
-
used_orders[group] = set()
|
|
472
|
-
orders_for_group = used_orders[group]
|
|
473
|
-
if order in orders_for_group:
|
|
474
|
-
errors.append(f"{properties_path} has duplicate order: {order}")
|
|
475
|
-
orders_for_group.add(order)
|
|
476
|
-
_fail_on_errors(errors)
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
async def test_nested_group(target_spec: ConnectorSpecification):
|
|
480
|
-
"""
|
|
481
|
-
Groups can only be defined on the top level properties
|
|
482
|
-
`{ "a": { "group": "x" }}` is valid because field "a" is a top level field
|
|
483
|
-
`{ "a": { "oneOf": [{ "type": "object", "properties": { "b": { "group": "x" } } }] }}` is invalid because field "b" is nested in a oneOf
|
|
484
|
-
"""
|
|
485
|
-
errors = []
|
|
486
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
487
|
-
for result in dpath.util.search(
|
|
488
|
-
target_spec.connectionSpecification, "/properties/**/group", yielded=True
|
|
489
|
-
):
|
|
490
|
-
group_path = result[0]
|
|
491
|
-
parent_path = schema_helper.get_parent_path(group_path)
|
|
492
|
-
is_property_named_group = parent_path.endswith("properties")
|
|
493
|
-
grandparent_path = schema_helper.get_parent_path(parent_path)
|
|
494
|
-
if grandparent_path != "/properties" and not is_property_named_group:
|
|
495
|
-
errors.append(
|
|
496
|
-
f"Groups can only be defined on top level, is defined at {group_path}"
|
|
497
|
-
)
|
|
498
|
-
_fail_on_errors(errors)
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
async def test_display_type(target_spec: ConnectorSpecification):
|
|
502
|
-
"""
|
|
503
|
-
The display_type property can only be set on fields which have a oneOf property, and must be either "dropdown" or "radio"
|
|
504
|
-
"""
|
|
505
|
-
errors = []
|
|
506
|
-
schema_helper = JsonSchemaHelper(target_spec.connectionSpecification)
|
|
507
|
-
for result in dpath.util.search(
|
|
508
|
-
target_spec.connectionSpecification, "/properties/**/display_type", yielded=True
|
|
509
|
-
):
|
|
510
|
-
display_type_path = result[0]
|
|
511
|
-
parent_path = schema_helper.get_parent_path(display_type_path)
|
|
512
|
-
is_property_named_display_type = parent_path.endswith("properties")
|
|
513
|
-
if is_property_named_display_type:
|
|
514
|
-
continue
|
|
515
|
-
parent_object = schema_helper.get_parent(display_type_path)
|
|
516
|
-
if "oneOf" not in parent_object:
|
|
517
|
-
errors.append(
|
|
518
|
-
f"display_type is only allowed on fields which have a oneOf property, but is set on {parent_path}"
|
|
519
|
-
)
|
|
520
|
-
display_type_value = parent_object.get("display_type")
|
|
521
|
-
if display_type_value != "dropdown" and display_type_value != "radio":
|
|
522
|
-
errors.append(
|
|
523
|
-
f"display_type must be either 'dropdown' or 'radio', but is set to '{display_type_value}' at {display_type_path}"
|
|
524
|
-
)
|
|
525
|
-
_fail_on_errors(errors)
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
async def test_defined_refs_exist_in_json_spec_file(
|
|
529
|
-
target_spec: ConnectorSpecification,
|
|
530
|
-
):
|
|
531
|
-
"""Checking for the presence of unresolved `$ref`s values within each json spec file"""
|
|
532
|
-
check_result = list(
|
|
533
|
-
find_all_values_for_key_in_schema(
|
|
534
|
-
target_spec.connectionSpecification["properties"], "$ref"
|
|
535
|
-
)
|
|
536
|
-
)
|
|
537
|
-
assert not check_result, "Found unresolved `$refs` value in spec.json file"
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
async def test_oauth_flow_parameters(target_spec: ConnectorSpecification):
|
|
541
|
-
"""Check if connector has correct oauth flow parameters according to
|
|
542
|
-
https://docs.airbyte.io/connector-development/connector-specification-reference
|
|
543
|
-
"""
|
|
544
|
-
advanced_auth = target_spec.advanced_auth
|
|
545
|
-
if not advanced_auth:
|
|
546
|
-
return
|
|
547
|
-
spec_schema = target_spec.connectionSpecification
|
|
548
|
-
paths_to_validate = set()
|
|
549
|
-
if advanced_auth.predicate_key:
|
|
550
|
-
paths_to_validate.add("/" + "/".join(advanced_auth.predicate_key))
|
|
551
|
-
oauth_config_specification = advanced_auth.oauth_config_specification
|
|
552
|
-
if oauth_config_specification:
|
|
553
|
-
if oauth_config_specification.oauth_user_input_from_connector_config_specification:
|
|
554
|
-
paths_to_validate.update(
|
|
555
|
-
get_paths_in_connector_config(
|
|
556
|
-
oauth_config_specification.oauth_user_input_from_connector_config_specification[
|
|
557
|
-
"properties"
|
|
558
|
-
]
|
|
559
|
-
)
|
|
560
|
-
)
|
|
561
|
-
if oauth_config_specification.complete_oauth_output_specification:
|
|
562
|
-
paths_to_validate.update(
|
|
563
|
-
get_paths_in_connector_config(
|
|
564
|
-
oauth_config_specification.complete_oauth_output_specification[
|
|
565
|
-
"properties"
|
|
566
|
-
]
|
|
567
|
-
)
|
|
568
|
-
)
|
|
569
|
-
if oauth_config_specification.complete_oauth_server_output_specification:
|
|
570
|
-
paths_to_validate.update(
|
|
571
|
-
get_paths_in_connector_config(
|
|
572
|
-
oauth_config_specification.complete_oauth_server_output_specification[
|
|
573
|
-
"properties"
|
|
574
|
-
]
|
|
575
|
-
)
|
|
576
|
-
)
|
|
577
|
-
|
|
578
|
-
diff = paths_to_validate - set(get_expected_schema_structure(spec_schema))
|
|
579
|
-
assert diff == set(), f"Specified oauth fields are missed from spec schema: {diff}"
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
async def test_oauth_is_default_method(target_spec: ConnectorSpecification):
|
|
583
|
-
"""
|
|
584
|
-
OAuth is default check.
|
|
585
|
-
If credentials do have oneOf: we check that the OAuth is listed at first.
|
|
586
|
-
If there is no oneOf and Oauth: OAuth is only option to authenticate the source and no check is needed.
|
|
587
|
-
"""
|
|
588
|
-
advanced_auth = target_spec.advanced_auth
|
|
589
|
-
if not advanced_auth:
|
|
590
|
-
pytest.skip("Source does not have OAuth method.")
|
|
591
|
-
if not advanced_auth.predicate_key:
|
|
592
|
-
pytest.skip(
|
|
593
|
-
"Advanced Auth object does not have predicate_key, only one option to authenticate."
|
|
594
|
-
)
|
|
595
|
-
|
|
596
|
-
spec_schema = target_spec.connectionSpecification
|
|
597
|
-
credentials = advanced_auth.predicate_key[0]
|
|
598
|
-
try:
|
|
599
|
-
one_of_default_method = dpath.util.get(
|
|
600
|
-
spec_schema, f"/**/{credentials}/oneOf/0"
|
|
601
|
-
)
|
|
602
|
-
except KeyError: # Key Error when oneOf is not in credentials object
|
|
603
|
-
pytest.skip("Credentials object does not have oneOf option.")
|
|
604
|
-
|
|
605
|
-
path_in_credentials = "/".join(advanced_auth.predicate_key[1:])
|
|
606
|
-
auth_method_predicate_const = dpath.util.get(
|
|
607
|
-
one_of_default_method, f"/**/{path_in_credentials}/const"
|
|
608
|
-
)
|
|
609
|
-
assert auth_method_predicate_const == advanced_auth.predicate_value, (
|
|
610
|
-
f"Oauth method should be a default option. Current default method is {auth_method_predicate_const}."
|
|
611
|
-
)
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
async def test_additional_properties_is_true(target_spec: ConnectorSpecification):
|
|
615
|
-
"""Check that value of the "additionalProperties" field is always true.
|
|
616
|
-
A spec declaring "additionalProperties": false introduces the risk of accidental breaking changes.
|
|
617
|
-
Specifically, when removing a property from the spec, existing connector configs will no longer be valid.
|
|
618
|
-
False value introduces the risk of accidental breaking changes.
|
|
619
|
-
Read https://github.com/airbytehq/airbyte/issues/14196 for more details"""
|
|
620
|
-
additional_properties_values = find_all_values_for_key_in_schema(
|
|
621
|
-
target_spec.connectionSpecification, "additionalProperties"
|
|
622
|
-
)
|
|
623
|
-
if additional_properties_values:
|
|
624
|
-
assert all(
|
|
625
|
-
[
|
|
626
|
-
additional_properties_value is True
|
|
627
|
-
for additional_properties_value in additional_properties_values
|
|
628
|
-
]
|
|
629
|
-
), (
|
|
630
|
-
"When set, additionalProperties field value must be true for backward compatibility."
|
|
631
|
-
)
|
|
File without changes
|
{airbyte_internal_ops-0.4.1.dist-info → airbyte_internal_ops-0.5.0.dist-info}/entry_points.txt
RENAMED
|
File without changes
|