dagster-airbyte 0.25.1__py3-none-any.whl → 0.25.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dagster-airbyte might be problematic. Click here for more details.
- dagster_airbyte/__init__.py +10 -1
- dagster_airbyte/asset_decorator.py +113 -0
- dagster_airbyte/asset_defs.py +152 -47
- dagster_airbyte/managed/generated/sources.py +33 -33
- dagster_airbyte/managed/reconciliation.py +22 -34
- dagster_airbyte/managed/types.py +8 -7
- dagster_airbyte/ops.py +3 -2
- dagster_airbyte/resources.py +626 -36
- dagster_airbyte/translator.py +236 -0
- dagster_airbyte/types.py +2 -1
- dagster_airbyte/utils.py +38 -2
- dagster_airbyte/version.py +1 -1
- {dagster_airbyte-0.25.1.dist-info → dagster_airbyte-0.25.10.dist-info}/METADATA +4 -3
- dagster_airbyte-0.25.10.dist-info/RECORD +23 -0
- dagster_airbyte-0.25.1.dist-info/RECORD +0 -21
- {dagster_airbyte-0.25.1.dist-info → dagster_airbyte-0.25.10.dist-info}/LICENSE +0 -0
- {dagster_airbyte-0.25.1.dist-info → dagster_airbyte-0.25.10.dist-info}/WHEEL +0 -0
- {dagster_airbyte-0.25.1.dist-info → dagster_airbyte-0.25.10.dist-info}/entry_points.txt +0 -0
- {dagster_airbyte-0.25.1.dist-info → dagster_airbyte-0.25.10.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
# ruff: noqa: A001, A002
|
|
2
|
-
from typing import
|
|
2
|
+
from typing import Optional, Union
|
|
3
3
|
|
|
4
4
|
import dagster._check as check
|
|
5
5
|
from dagster._annotations import public
|
|
@@ -154,7 +154,7 @@ class LinkedinAdsSource(GeneratedAirbyteSource):
|
|
|
154
154
|
name: str,
|
|
155
155
|
credentials: Union["LinkedinAdsSource.OAuth20", "LinkedinAdsSource.AccessToken"],
|
|
156
156
|
start_date: str,
|
|
157
|
-
account_ids: Optional[
|
|
157
|
+
account_ids: Optional[list[int]] = None,
|
|
158
158
|
):
|
|
159
159
|
"""Airbyte Source for Linkedin Ads.
|
|
160
160
|
|
|
@@ -826,9 +826,9 @@ class JiraSource(GeneratedAirbyteSource):
|
|
|
826
826
|
api_token: str,
|
|
827
827
|
domain: str,
|
|
828
828
|
email: str,
|
|
829
|
-
projects: Optional[
|
|
829
|
+
projects: Optional[list[str]] = None,
|
|
830
830
|
start_date: Optional[str] = None,
|
|
831
|
-
additional_fields: Optional[
|
|
831
|
+
additional_fields: Optional[list[str]] = None,
|
|
832
832
|
expand_issue_changelog: Optional[bool] = None,
|
|
833
833
|
render_fields: Optional[bool] = None,
|
|
834
834
|
enable_experimental_streams: Optional[bool] = None,
|
|
@@ -1078,7 +1078,7 @@ class QualarooSource(GeneratedAirbyteSource):
|
|
|
1078
1078
|
token: str,
|
|
1079
1079
|
key: str,
|
|
1080
1080
|
start_date: str,
|
|
1081
|
-
survey_ids: Optional[
|
|
1081
|
+
survey_ids: Optional[list[str]] = None,
|
|
1082
1082
|
):
|
|
1083
1083
|
"""Airbyte Source for Qualaroo.
|
|
1084
1084
|
|
|
@@ -1405,7 +1405,7 @@ class OracleSource(GeneratedAirbyteSource):
|
|
|
1405
1405
|
"OracleSource.TLSEncryptedVerifyCertificate",
|
|
1406
1406
|
],
|
|
1407
1407
|
password: Optional[str] = None,
|
|
1408
|
-
schemas: Optional[
|
|
1408
|
+
schemas: Optional[list[str]] = None,
|
|
1409
1409
|
jdbc_url_params: Optional[str] = None,
|
|
1410
1410
|
):
|
|
1411
1411
|
"""Airbyte Source for Oracle.
|
|
@@ -1694,7 +1694,7 @@ class LookerSource(GeneratedAirbyteSource):
|
|
|
1694
1694
|
domain: str,
|
|
1695
1695
|
client_id: str,
|
|
1696
1696
|
client_secret: str,
|
|
1697
|
-
run_look_ids: Optional[
|
|
1697
|
+
run_look_ids: Optional[list[str]] = None,
|
|
1698
1698
|
):
|
|
1699
1699
|
"""Airbyte Source for Looker.
|
|
1700
1700
|
|
|
@@ -1786,8 +1786,8 @@ class AmazonAdsSource(GeneratedAirbyteSource):
|
|
|
1786
1786
|
report_wait_timeout: Optional[int] = None,
|
|
1787
1787
|
report_generation_max_retries: Optional[int] = None,
|
|
1788
1788
|
start_date: Optional[str] = None,
|
|
1789
|
-
profiles: Optional[
|
|
1790
|
-
state_filter: Optional[
|
|
1789
|
+
profiles: Optional[list[int]] = None,
|
|
1790
|
+
state_filter: Optional[list[str]] = None,
|
|
1791
1791
|
):
|
|
1792
1792
|
"""Airbyte Source for Amazon Ads.
|
|
1793
1793
|
|
|
@@ -2192,7 +2192,7 @@ class SearchMetricsSource(GeneratedAirbyteSource):
|
|
|
2192
2192
|
class TypeformSource(GeneratedAirbyteSource):
|
|
2193
2193
|
@public
|
|
2194
2194
|
def __init__(
|
|
2195
|
-
self, name: str, start_date: str, token: str, form_ids: Optional[
|
|
2195
|
+
self, name: str, start_date: str, token: str, form_ids: Optional[list[str]] = None
|
|
2196
2196
|
):
|
|
2197
2197
|
"""Airbyte Source for Typeform.
|
|
2198
2198
|
|
|
@@ -2355,10 +2355,10 @@ class AdjustSource(GeneratedAirbyteSource):
|
|
|
2355
2355
|
self,
|
|
2356
2356
|
name: str,
|
|
2357
2357
|
api_token: str,
|
|
2358
|
-
dimensions:
|
|
2358
|
+
dimensions: list[str],
|
|
2359
2359
|
ingest_start: str,
|
|
2360
|
-
metrics:
|
|
2361
|
-
additional_metrics: Optional[
|
|
2360
|
+
metrics: list[str],
|
|
2361
|
+
additional_metrics: Optional[list[str]] = None,
|
|
2362
2362
|
until_today: Optional[bool] = None,
|
|
2363
2363
|
):
|
|
2364
2364
|
"""Airbyte Source for Adjust.
|
|
@@ -2448,7 +2448,7 @@ class GoogleAdsSource(GeneratedAirbyteSource):
|
|
|
2448
2448
|
customer_id: str,
|
|
2449
2449
|
start_date: str,
|
|
2450
2450
|
end_date: Optional[str] = None,
|
|
2451
|
-
custom_queries: Optional[
|
|
2451
|
+
custom_queries: Optional[list[CustomGAQLQueriesEntry]] = None,
|
|
2452
2452
|
login_customer_id: Optional[str] = None,
|
|
2453
2453
|
conversion_window_days: Optional[int] = None,
|
|
2454
2454
|
):
|
|
@@ -2590,7 +2590,7 @@ class SalesforceSource(GeneratedAirbyteSource):
|
|
|
2590
2590
|
is_sandbox: Optional[bool] = None,
|
|
2591
2591
|
auth_type: Optional[str] = None,
|
|
2592
2592
|
start_date: Optional[str] = None,
|
|
2593
|
-
streams_criteria: Optional[
|
|
2593
|
+
streams_criteria: Optional[list[FilterSalesforceObjectsEntry]] = None,
|
|
2594
2594
|
):
|
|
2595
2595
|
"""Airbyte Source for Salesforce.
|
|
2596
2596
|
|
|
@@ -2836,8 +2836,8 @@ class OrbSource(GeneratedAirbyteSource):
|
|
|
2836
2836
|
api_key: str,
|
|
2837
2837
|
start_date: Optional[str] = None,
|
|
2838
2838
|
lookback_window_days: Optional[int] = None,
|
|
2839
|
-
string_event_properties_keys: Optional[
|
|
2840
|
-
numeric_event_properties_keys: Optional[
|
|
2839
|
+
string_event_properties_keys: Optional[list[str]] = None,
|
|
2840
|
+
numeric_event_properties_keys: Optional[list[str]] = None,
|
|
2841
2841
|
):
|
|
2842
2842
|
"""Airbyte Source for Orb.
|
|
2843
2843
|
|
|
@@ -3230,7 +3230,7 @@ class SlackSource(GeneratedAirbyteSource):
|
|
|
3230
3230
|
credentials: Union[
|
|
3231
3231
|
"SlackSource.DefaultOAuth20Authorization", "SlackSource.APITokenCredentials"
|
|
3232
3232
|
],
|
|
3233
|
-
channel_filter: Optional[
|
|
3233
|
+
channel_filter: Optional[list[str]] = None,
|
|
3234
3234
|
):
|
|
3235
3235
|
"""Airbyte Source for Slack.
|
|
3236
3236
|
|
|
@@ -3575,7 +3575,7 @@ class PostgresSource(GeneratedAirbyteSource):
|
|
|
3575
3575
|
"PostgresSource.SSHKeyAuthentication",
|
|
3576
3576
|
"PostgresSource.PasswordAuthentication",
|
|
3577
3577
|
],
|
|
3578
|
-
schemas: Optional[
|
|
3578
|
+
schemas: Optional[list[str]] = None,
|
|
3579
3579
|
password: Optional[str] = None,
|
|
3580
3580
|
jdbc_url_params: Optional[str] = None,
|
|
3581
3581
|
ssl: Optional[bool] = None,
|
|
@@ -3643,7 +3643,7 @@ class TrelloSource(GeneratedAirbyteSource):
|
|
|
3643
3643
|
token: str,
|
|
3644
3644
|
key: str,
|
|
3645
3645
|
start_date: str,
|
|
3646
|
-
board_ids: Optional[
|
|
3646
|
+
board_ids: Optional[list[str]] = None,
|
|
3647
3647
|
):
|
|
3648
3648
|
"""Airbyte Source for Trello.
|
|
3649
3649
|
|
|
@@ -3741,7 +3741,7 @@ class S3Source(GeneratedAirbyteSource):
|
|
|
3741
3741
|
def __init__(
|
|
3742
3742
|
self,
|
|
3743
3743
|
filetype: Optional[str] = None,
|
|
3744
|
-
columns: Optional[
|
|
3744
|
+
columns: Optional[list[str]] = None,
|
|
3745
3745
|
batch_size: Optional[int] = None,
|
|
3746
3746
|
buffer_size: Optional[int] = None,
|
|
3747
3747
|
):
|
|
@@ -4005,7 +4005,7 @@ class MssqlSource(GeneratedAirbyteSource):
|
|
|
4005
4005
|
"MssqlSource.EncryptedVerifyCertificate",
|
|
4006
4006
|
],
|
|
4007
4007
|
replication_method: Union["MssqlSource.Standard", "MssqlSource.LogicalReplicationCDC"],
|
|
4008
|
-
schemas: Optional[
|
|
4008
|
+
schemas: Optional[list[str]] = None,
|
|
4009
4009
|
password: Optional[str] = None,
|
|
4010
4010
|
jdbc_url_params: Optional[str] = None,
|
|
4011
4011
|
):
|
|
@@ -4096,7 +4096,7 @@ class RedshiftSource(GeneratedAirbyteSource):
|
|
|
4096
4096
|
database: str,
|
|
4097
4097
|
username: str,
|
|
4098
4098
|
password: str,
|
|
4099
|
-
schemas: Optional[
|
|
4099
|
+
schemas: Optional[list[str]] = None,
|
|
4100
4100
|
jdbc_url_params: Optional[str] = None,
|
|
4101
4101
|
):
|
|
4102
4102
|
"""Airbyte Source for Redshift.
|
|
@@ -4228,7 +4228,7 @@ class SentrySource(GeneratedAirbyteSource):
|
|
|
4228
4228
|
organization: str,
|
|
4229
4229
|
project: str,
|
|
4230
4230
|
hostname: Optional[str] = None,
|
|
4231
|
-
discover_fields: Optional[
|
|
4231
|
+
discover_fields: Optional[list[str]] = None,
|
|
4232
4232
|
):
|
|
4233
4233
|
"""Airbyte Source for Sentry.
|
|
4234
4234
|
|
|
@@ -4317,7 +4317,7 @@ class PythonHttpTutorialSource(GeneratedAirbyteSource):
|
|
|
4317
4317
|
|
|
4318
4318
|
class AirtableSource(GeneratedAirbyteSource):
|
|
4319
4319
|
@public
|
|
4320
|
-
def __init__(self, name: str, api_key: str, base_id: str, tables:
|
|
4320
|
+
def __init__(self, name: str, api_key: str, base_id: str, tables: list[str]):
|
|
4321
4321
|
"""Airbyte Source for Airtable.
|
|
4322
4322
|
|
|
4323
4323
|
Documentation can be found at https://docs.airbyte.com/integrations/sources/airtable
|
|
@@ -5123,7 +5123,7 @@ class NetsuiteSource(GeneratedAirbyteSource):
|
|
|
5123
5123
|
token_key: str,
|
|
5124
5124
|
token_secret: str,
|
|
5125
5125
|
start_datetime: str,
|
|
5126
|
-
object_types: Optional[
|
|
5126
|
+
object_types: Optional[list[str]] = None,
|
|
5127
5127
|
window_in_days: Optional[int] = None,
|
|
5128
5128
|
):
|
|
5129
5129
|
"""Airbyte Source for Netsuite.
|
|
@@ -5190,7 +5190,7 @@ class Dv360Source(GeneratedAirbyteSource):
|
|
|
5190
5190
|
partner_id: int,
|
|
5191
5191
|
start_date: str,
|
|
5192
5192
|
end_date: Optional[str] = None,
|
|
5193
|
-
filters: Optional[
|
|
5193
|
+
filters: Optional[list[str]] = None,
|
|
5194
5194
|
):
|
|
5195
5195
|
"""Airbyte Source for Dv 360.
|
|
5196
5196
|
|
|
@@ -5775,7 +5775,7 @@ class GoogleSearchConsoleSource(GeneratedAirbyteSource):
|
|
|
5775
5775
|
def __init__(
|
|
5776
5776
|
self,
|
|
5777
5777
|
name: str,
|
|
5778
|
-
site_urls:
|
|
5778
|
+
site_urls: list[str],
|
|
5779
5779
|
start_date: str,
|
|
5780
5780
|
authorization: Union[
|
|
5781
5781
|
"GoogleSearchConsoleSource.OAuth",
|
|
@@ -5816,9 +5816,9 @@ class FacebookMarketingSource(GeneratedAirbyteSource):
|
|
|
5816
5816
|
def __init__(
|
|
5817
5817
|
self,
|
|
5818
5818
|
name: str,
|
|
5819
|
-
fields: Optional[
|
|
5820
|
-
breakdowns: Optional[
|
|
5821
|
-
action_breakdowns: Optional[
|
|
5819
|
+
fields: Optional[list[str]] = None,
|
|
5820
|
+
breakdowns: Optional[list[str]] = None,
|
|
5821
|
+
action_breakdowns: Optional[list[str]] = None,
|
|
5822
5822
|
time_increment: Optional[int] = None,
|
|
5823
5823
|
start_date: Optional[str] = None,
|
|
5824
5824
|
end_date: Optional[str] = None,
|
|
@@ -5847,7 +5847,7 @@ class FacebookMarketingSource(GeneratedAirbyteSource):
|
|
|
5847
5847
|
end_date: Optional[str] = None,
|
|
5848
5848
|
include_deleted: Optional[bool] = None,
|
|
5849
5849
|
fetch_thumbnail_images: Optional[bool] = None,
|
|
5850
|
-
custom_insights: Optional[
|
|
5850
|
+
custom_insights: Optional[list[InsightConfig]] = None,
|
|
5851
5851
|
page_size: Optional[int] = None,
|
|
5852
5852
|
insights_lookback_window: Optional[int] = None,
|
|
5853
5853
|
max_batch_size: Optional[int] = None,
|
|
@@ -5891,7 +5891,7 @@ class FacebookMarketingSource(GeneratedAirbyteSource):
|
|
|
5891
5891
|
class SurveymonkeySource(GeneratedAirbyteSource):
|
|
5892
5892
|
@public
|
|
5893
5893
|
def __init__(
|
|
5894
|
-
self, name: str, access_token: str, start_date: str, survey_ids: Optional[
|
|
5894
|
+
self, name: str, access_token: str, start_date: str, survey_ids: Optional[list[str]] = None
|
|
5895
5895
|
):
|
|
5896
5896
|
"""Airbyte Source for Surveymonkey.
|
|
5897
5897
|
|
|
@@ -1,16 +1,5 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
3
|
-
Callable,
|
|
4
|
-
Dict,
|
|
5
|
-
Iterable,
|
|
6
|
-
List,
|
|
7
|
-
Mapping,
|
|
8
|
-
Optional,
|
|
9
|
-
Sequence,
|
|
10
|
-
Tuple,
|
|
11
|
-
Union,
|
|
12
|
-
cast,
|
|
13
|
-
)
|
|
1
|
+
from collections.abc import Iterable, Mapping, Sequence
|
|
2
|
+
from typing import Any, Callable, Optional, Union, cast
|
|
14
3
|
|
|
15
4
|
import dagster._check as check
|
|
16
5
|
from dagster import AssetKey
|
|
@@ -36,7 +25,6 @@ from dagster_managed_elements.utils import UNSET, diff_dicts
|
|
|
36
25
|
from dagster_airbyte.asset_defs import (
|
|
37
26
|
AirbyteConnectionMetadata,
|
|
38
27
|
AirbyteInstanceCacheableAssetsDefinition,
|
|
39
|
-
_clean_name,
|
|
40
28
|
)
|
|
41
29
|
from dagster_airbyte.managed.types import (
|
|
42
30
|
MANAGED_ELEMENTS_DEPRECATION_MSG,
|
|
@@ -50,7 +38,7 @@ from dagster_airbyte.managed.types import (
|
|
|
50
38
|
InitializedAirbyteSource,
|
|
51
39
|
)
|
|
52
40
|
from dagster_airbyte.resources import AirbyteResource
|
|
53
|
-
from dagster_airbyte.utils import is_basic_normalization_operation
|
|
41
|
+
from dagster_airbyte.utils import clean_name, is_basic_normalization_operation
|
|
54
42
|
|
|
55
43
|
|
|
56
44
|
def gen_configured_stream_json(
|
|
@@ -170,13 +158,13 @@ def reconcile_sources(
|
|
|
170
158
|
dry_run: bool,
|
|
171
159
|
should_delete: bool,
|
|
172
160
|
ignore_secrets: bool,
|
|
173
|
-
) ->
|
|
161
|
+
) -> tuple[Mapping[str, InitializedAirbyteSource], ManagedElementCheckResult]:
|
|
174
162
|
"""Generates a diff of the configured and existing sources and reconciles them to match the
|
|
175
163
|
configured state if dry_run is False.
|
|
176
164
|
"""
|
|
177
165
|
diff = ManagedElementDiff()
|
|
178
166
|
|
|
179
|
-
initialized_sources:
|
|
167
|
+
initialized_sources: dict[str, InitializedAirbyteSource] = {}
|
|
180
168
|
for source_name in set(config_sources.keys()).union(existing_sources.keys()):
|
|
181
169
|
configured_source = config_sources.get(source_name)
|
|
182
170
|
existing_source = existing_sources.get(source_name)
|
|
@@ -224,7 +212,7 @@ def reconcile_sources(
|
|
|
224
212
|
else:
|
|
225
213
|
if not dry_run:
|
|
226
214
|
create_result = cast(
|
|
227
|
-
|
|
215
|
+
dict[str, str],
|
|
228
216
|
check.not_none(
|
|
229
217
|
res.make_request(
|
|
230
218
|
endpoint="/sources/create",
|
|
@@ -257,13 +245,13 @@ def reconcile_destinations(
|
|
|
257
245
|
dry_run: bool,
|
|
258
246
|
should_delete: bool,
|
|
259
247
|
ignore_secrets: bool,
|
|
260
|
-
) ->
|
|
248
|
+
) -> tuple[Mapping[str, InitializedAirbyteDestination], ManagedElementCheckResult]:
|
|
261
249
|
"""Generates a diff of the configured and existing destinations and reconciles them to match the
|
|
262
250
|
configured state if dry_run is False.
|
|
263
251
|
"""
|
|
264
252
|
diff = ManagedElementDiff()
|
|
265
253
|
|
|
266
|
-
initialized_destinations:
|
|
254
|
+
initialized_destinations: dict[str, InitializedAirbyteDestination] = {}
|
|
267
255
|
for destination_name in set(config_destinations.keys()).union(existing_destinations.keys()):
|
|
268
256
|
configured_destination = config_destinations.get(destination_name)
|
|
269
257
|
existing_destination = existing_destinations.get(destination_name)
|
|
@@ -312,7 +300,7 @@ def reconcile_destinations(
|
|
|
312
300
|
else:
|
|
313
301
|
if not dry_run:
|
|
314
302
|
create_result = cast(
|
|
315
|
-
|
|
303
|
+
dict[str, str],
|
|
316
304
|
check.not_none(
|
|
317
305
|
res.make_request(
|
|
318
306
|
endpoint="/destinations/create",
|
|
@@ -358,23 +346,23 @@ def reconcile_config(
|
|
|
358
346
|
workspace_id = res.get_default_workspace()
|
|
359
347
|
|
|
360
348
|
existing_sources_raw = cast(
|
|
361
|
-
|
|
349
|
+
dict[str, list[dict[str, Any]]],
|
|
362
350
|
check.not_none(
|
|
363
351
|
res.make_request(endpoint="/sources/list", data={"workspaceId": workspace_id})
|
|
364
352
|
),
|
|
365
353
|
)
|
|
366
354
|
existing_dests_raw = cast(
|
|
367
|
-
|
|
355
|
+
dict[str, list[dict[str, Any]]],
|
|
368
356
|
check.not_none(
|
|
369
357
|
res.make_request(endpoint="/destinations/list", data={"workspaceId": workspace_id})
|
|
370
358
|
),
|
|
371
359
|
)
|
|
372
360
|
|
|
373
|
-
existing_sources:
|
|
361
|
+
existing_sources: dict[str, InitializedAirbyteSource] = {
|
|
374
362
|
source_json["name"]: InitializedAirbyteSource.from_api_json(source_json)
|
|
375
363
|
for source_json in existing_sources_raw.get("sources", [])
|
|
376
364
|
}
|
|
377
|
-
existing_dests:
|
|
365
|
+
existing_dests: dict[str, InitializedAirbyteDestination] = {
|
|
378
366
|
destination_json["name"]: InitializedAirbyteDestination.from_api_json(destination_json)
|
|
379
367
|
for destination_json in existing_dests_raw.get("destinations", [])
|
|
380
368
|
}
|
|
@@ -434,7 +422,7 @@ def reconcile_normalization(
|
|
|
434
422
|
existing_basic_norm_op_id = None
|
|
435
423
|
if existing_connection_id:
|
|
436
424
|
operations = cast(
|
|
437
|
-
|
|
425
|
+
dict[str, list[dict[str, str]]],
|
|
438
426
|
check.not_none(
|
|
439
427
|
res.make_request(
|
|
440
428
|
endpoint="/operations/list",
|
|
@@ -462,7 +450,7 @@ def reconcile_normalization(
|
|
|
462
450
|
return existing_basic_norm_op_id
|
|
463
451
|
else:
|
|
464
452
|
return cast(
|
|
465
|
-
|
|
453
|
+
dict[str, str],
|
|
466
454
|
check.not_none(
|
|
467
455
|
res.make_request(
|
|
468
456
|
endpoint="/operations/create",
|
|
@@ -504,12 +492,12 @@ def reconcile_connections_pre(
|
|
|
504
492
|
diff = ManagedElementDiff()
|
|
505
493
|
|
|
506
494
|
existing_connections_raw = cast(
|
|
507
|
-
|
|
495
|
+
dict[str, list[dict[str, Any]]],
|
|
508
496
|
check.not_none(
|
|
509
497
|
res.make_request(endpoint="/connections/list", data={"workspaceId": workspace_id})
|
|
510
498
|
),
|
|
511
499
|
)
|
|
512
|
-
existing_connections:
|
|
500
|
+
existing_connections: dict[str, InitializedAirbyteConnection] = {
|
|
513
501
|
connection_json["name"]: InitializedAirbyteConnection.from_api_json(
|
|
514
502
|
connection_json, existing_sources, existing_destinations
|
|
515
503
|
)
|
|
@@ -549,7 +537,7 @@ def reconcile_connections_post(
|
|
|
549
537
|
) -> None:
|
|
550
538
|
"""Creates new and modifies existing connections based on the config if dry_run is False."""
|
|
551
539
|
existing_connections_raw = cast(
|
|
552
|
-
|
|
540
|
+
dict[str, list[dict[str, Any]]],
|
|
553
541
|
check.not_none(
|
|
554
542
|
res.make_request(endpoint="/connections/list", data={"workspaceId": workspace_id})
|
|
555
543
|
),
|
|
@@ -725,9 +713,9 @@ class AirbyteManagedElementCacheableAssetsDefinition(AirbyteInstanceCacheableAss
|
|
|
725
713
|
connection_to_asset_key_fn=connection_to_asset_key_fn,
|
|
726
714
|
connection_to_freshness_policy_fn=connection_to_freshness_policy_fn,
|
|
727
715
|
)
|
|
728
|
-
self._connections:
|
|
716
|
+
self._connections: list[AirbyteConnection] = list(connections)
|
|
729
717
|
|
|
730
|
-
def _get_connections(self) -> Sequence[
|
|
718
|
+
def _get_connections(self) -> Sequence[tuple[str, AirbyteConnectionMetadata]]:
|
|
731
719
|
diff = reconcile_config(self._airbyte_instance, self._connections, dry_run=True)
|
|
732
720
|
if isinstance(diff, ManagedElementDiff) and not diff.is_empty():
|
|
733
721
|
raise ValueError(
|
|
@@ -746,7 +734,7 @@ def load_assets_from_connections(
|
|
|
746
734
|
connections: Iterable[AirbyteConnection],
|
|
747
735
|
key_prefix: Optional[CoercibleToAssetKeyPrefix] = None,
|
|
748
736
|
create_assets_for_normalization_tables: bool = True,
|
|
749
|
-
connection_to_group_fn: Optional[Callable[[str], Optional[str]]] =
|
|
737
|
+
connection_to_group_fn: Optional[Callable[[str], Optional[str]]] = clean_name,
|
|
750
738
|
connection_meta_to_group_fn: Optional[
|
|
751
739
|
Callable[[AirbyteConnectionMetadata], Optional[str]]
|
|
752
740
|
] = None,
|
|
@@ -821,7 +809,7 @@ def load_assets_from_connections(
|
|
|
821
809
|
check.invariant(
|
|
822
810
|
not connection_meta_to_group_fn
|
|
823
811
|
or not connection_to_group_fn
|
|
824
|
-
or connection_to_group_fn ==
|
|
812
|
+
or connection_to_group_fn == clean_name,
|
|
825
813
|
"Cannot specify both connection_meta_to_group_fn and connection_to_group_fn",
|
|
826
814
|
)
|
|
827
815
|
|
dagster_airbyte/managed/types.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import json
|
|
2
2
|
from abc import ABC
|
|
3
|
+
from collections.abc import Mapping
|
|
3
4
|
from enum import Enum
|
|
4
|
-
from typing import Any,
|
|
5
|
+
from typing import Any, Optional, Union
|
|
5
6
|
|
|
6
7
|
import dagster._check as check
|
|
7
8
|
from dagster._annotations import deprecated, public
|
|
@@ -24,14 +25,14 @@ class AirbyteSyncMode(ABC):
|
|
|
24
25
|
def __eq__(self, other: Any) -> bool:
|
|
25
26
|
return isinstance(other, AirbyteSyncMode) and self.to_json() == other.to_json()
|
|
26
27
|
|
|
27
|
-
def __init__(self, json_repr:
|
|
28
|
+
def __init__(self, json_repr: dict[str, Any]):
|
|
28
29
|
self.json_repr = json_repr
|
|
29
30
|
|
|
30
|
-
def to_json(self) ->
|
|
31
|
+
def to_json(self) -> dict[str, Any]:
|
|
31
32
|
return self.json_repr
|
|
32
33
|
|
|
33
34
|
@classmethod
|
|
34
|
-
def from_json(cls, json_repr:
|
|
35
|
+
def from_json(cls, json_repr: dict[str, Any]) -> "AirbyteSyncMode":
|
|
35
36
|
return cls(
|
|
36
37
|
{
|
|
37
38
|
k: v
|
|
@@ -86,7 +87,7 @@ class AirbyteSyncMode(ABC):
|
|
|
86
87
|
def incremental_append_dedup(
|
|
87
88
|
cls,
|
|
88
89
|
cursor_field: Optional[str] = None,
|
|
89
|
-
primary_key: Optional[Union[str,
|
|
90
|
+
primary_key: Optional[Union[str, list[str]]] = None,
|
|
90
91
|
) -> "AirbyteSyncMode":
|
|
91
92
|
"""Syncs new records from the source, appending to an append-only history
|
|
92
93
|
table in the destination. Also generates a deduplicated view mirroring the
|
|
@@ -349,11 +350,11 @@ class InitializedAirbyteConnection:
|
|
|
349
350
|
)
|
|
350
351
|
|
|
351
352
|
|
|
352
|
-
def _remove_none_values(obj:
|
|
353
|
+
def _remove_none_values(obj: dict[str, Any]) -> dict[str, Any]:
|
|
353
354
|
return {k: v for k, v in obj.items() if v is not None}
|
|
354
355
|
|
|
355
356
|
|
|
356
|
-
def _dump_class(obj: Any) ->
|
|
357
|
+
def _dump_class(obj: Any) -> dict[str, Any]:
|
|
357
358
|
return json.loads(json.dumps(obj, default=lambda o: _remove_none_values(o.__dict__)))
|
|
358
359
|
|
|
359
360
|
|
dagster_airbyte/ops.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
from
|
|
1
|
+
from collections.abc import Iterable
|
|
2
|
+
from typing import Any, Optional
|
|
2
3
|
|
|
3
4
|
from dagster import Config, In, Nothing, Out, Output, op
|
|
4
5
|
from dagster._core.storage.tags import COMPUTE_KIND_TAG
|
|
@@ -40,7 +41,7 @@ class AirbyteSyncConfig(Config):
|
|
|
40
41
|
"be yielded when the op executes."
|
|
41
42
|
),
|
|
42
43
|
)
|
|
43
|
-
asset_key_prefix:
|
|
44
|
+
asset_key_prefix: list[str] = Field(
|
|
44
45
|
["airbyte"],
|
|
45
46
|
description=(
|
|
46
47
|
"If provided and yield_materializations is True, these components will be used to "
|