dagster-airbyte 0.24.3__py3-none-any.whl → 0.28.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,8 @@
1
- from .reconciliation import (
1
+ from dagster_airbyte.managed.reconciliation import (
2
2
  AirbyteManagedElementReconciler as AirbyteManagedElementReconciler,
3
3
  load_assets_from_connections as load_assets_from_connections,
4
4
  )
5
- from .types import (
5
+ from dagster_airbyte.managed.types import (
6
6
  AirbyteConnection as AirbyteConnection,
7
7
  AirbyteDestination as AirbyteDestination,
8
8
  AirbyteDestinationNamespace as AirbyteDestinationNamespace,
@@ -1,4 +1,4 @@
1
- from . import (
1
+ from dagster_airbyte.managed.generated import (
2
2
  destinations as destinations,
3
3
  sources as sources,
4
4
  )
@@ -1,5 +1,5 @@
1
1
  # ruff: noqa: A001, A002
2
- from typing import List, Optional, Union
2
+ from typing import Optional, Union
3
3
 
4
4
  import dagster._check as check
5
5
  from dagster._annotations import public
@@ -154,7 +154,7 @@ class LinkedinAdsSource(GeneratedAirbyteSource):
154
154
  name: str,
155
155
  credentials: Union["LinkedinAdsSource.OAuth20", "LinkedinAdsSource.AccessToken"],
156
156
  start_date: str,
157
- account_ids: Optional[List[int]] = None,
157
+ account_ids: Optional[list[int]] = None,
158
158
  ):
159
159
  """Airbyte Source for Linkedin Ads.
160
160
 
@@ -826,9 +826,9 @@ class JiraSource(GeneratedAirbyteSource):
826
826
  api_token: str,
827
827
  domain: str,
828
828
  email: str,
829
- projects: Optional[List[str]] = None,
829
+ projects: Optional[list[str]] = None,
830
830
  start_date: Optional[str] = None,
831
- additional_fields: Optional[List[str]] = None,
831
+ additional_fields: Optional[list[str]] = None,
832
832
  expand_issue_changelog: Optional[bool] = None,
833
833
  render_fields: Optional[bool] = None,
834
834
  enable_experimental_streams: Optional[bool] = None,
@@ -1078,7 +1078,7 @@ class QualarooSource(GeneratedAirbyteSource):
1078
1078
  token: str,
1079
1079
  key: str,
1080
1080
  start_date: str,
1081
- survey_ids: Optional[List[str]] = None,
1081
+ survey_ids: Optional[list[str]] = None,
1082
1082
  ):
1083
1083
  """Airbyte Source for Qualaroo.
1084
1084
 
@@ -1405,7 +1405,7 @@ class OracleSource(GeneratedAirbyteSource):
1405
1405
  "OracleSource.TLSEncryptedVerifyCertificate",
1406
1406
  ],
1407
1407
  password: Optional[str] = None,
1408
- schemas: Optional[List[str]] = None,
1408
+ schemas: Optional[list[str]] = None,
1409
1409
  jdbc_url_params: Optional[str] = None,
1410
1410
  ):
1411
1411
  """Airbyte Source for Oracle.
@@ -1694,7 +1694,7 @@ class LookerSource(GeneratedAirbyteSource):
1694
1694
  domain: str,
1695
1695
  client_id: str,
1696
1696
  client_secret: str,
1697
- run_look_ids: Optional[List[str]] = None,
1697
+ run_look_ids: Optional[list[str]] = None,
1698
1698
  ):
1699
1699
  """Airbyte Source for Looker.
1700
1700
 
@@ -1786,8 +1786,8 @@ class AmazonAdsSource(GeneratedAirbyteSource):
1786
1786
  report_wait_timeout: Optional[int] = None,
1787
1787
  report_generation_max_retries: Optional[int] = None,
1788
1788
  start_date: Optional[str] = None,
1789
- profiles: Optional[List[int]] = None,
1790
- state_filter: Optional[List[str]] = None,
1789
+ profiles: Optional[list[int]] = None,
1790
+ state_filter: Optional[list[str]] = None,
1791
1791
  ):
1792
1792
  """Airbyte Source for Amazon Ads.
1793
1793
 
@@ -2192,7 +2192,7 @@ class SearchMetricsSource(GeneratedAirbyteSource):
2192
2192
  class TypeformSource(GeneratedAirbyteSource):
2193
2193
  @public
2194
2194
  def __init__(
2195
- self, name: str, start_date: str, token: str, form_ids: Optional[List[str]] = None
2195
+ self, name: str, start_date: str, token: str, form_ids: Optional[list[str]] = None
2196
2196
  ):
2197
2197
  """Airbyte Source for Typeform.
2198
2198
 
@@ -2355,10 +2355,10 @@ class AdjustSource(GeneratedAirbyteSource):
2355
2355
  self,
2356
2356
  name: str,
2357
2357
  api_token: str,
2358
- dimensions: List[str],
2358
+ dimensions: list[str],
2359
2359
  ingest_start: str,
2360
- metrics: List[str],
2361
- additional_metrics: Optional[List[str]] = None,
2360
+ metrics: list[str],
2361
+ additional_metrics: Optional[list[str]] = None,
2362
2362
  until_today: Optional[bool] = None,
2363
2363
  ):
2364
2364
  """Airbyte Source for Adjust.
@@ -2448,7 +2448,7 @@ class GoogleAdsSource(GeneratedAirbyteSource):
2448
2448
  customer_id: str,
2449
2449
  start_date: str,
2450
2450
  end_date: Optional[str] = None,
2451
- custom_queries: Optional[List[CustomGAQLQueriesEntry]] = None,
2451
+ custom_queries: Optional[list[CustomGAQLQueriesEntry]] = None,
2452
2452
  login_customer_id: Optional[str] = None,
2453
2453
  conversion_window_days: Optional[int] = None,
2454
2454
  ):
@@ -2590,7 +2590,7 @@ class SalesforceSource(GeneratedAirbyteSource):
2590
2590
  is_sandbox: Optional[bool] = None,
2591
2591
  auth_type: Optional[str] = None,
2592
2592
  start_date: Optional[str] = None,
2593
- streams_criteria: Optional[List[FilterSalesforceObjectsEntry]] = None,
2593
+ streams_criteria: Optional[list[FilterSalesforceObjectsEntry]] = None,
2594
2594
  ):
2595
2595
  """Airbyte Source for Salesforce.
2596
2596
 
@@ -2836,8 +2836,8 @@ class OrbSource(GeneratedAirbyteSource):
2836
2836
  api_key: str,
2837
2837
  start_date: Optional[str] = None,
2838
2838
  lookback_window_days: Optional[int] = None,
2839
- string_event_properties_keys: Optional[List[str]] = None,
2840
- numeric_event_properties_keys: Optional[List[str]] = None,
2839
+ string_event_properties_keys: Optional[list[str]] = None,
2840
+ numeric_event_properties_keys: Optional[list[str]] = None,
2841
2841
  ):
2842
2842
  """Airbyte Source for Orb.
2843
2843
 
@@ -3230,7 +3230,7 @@ class SlackSource(GeneratedAirbyteSource):
3230
3230
  credentials: Union[
3231
3231
  "SlackSource.DefaultOAuth20Authorization", "SlackSource.APITokenCredentials"
3232
3232
  ],
3233
- channel_filter: Optional[List[str]] = None,
3233
+ channel_filter: Optional[list[str]] = None,
3234
3234
  ):
3235
3235
  """Airbyte Source for Slack.
3236
3236
 
@@ -3575,7 +3575,7 @@ class PostgresSource(GeneratedAirbyteSource):
3575
3575
  "PostgresSource.SSHKeyAuthentication",
3576
3576
  "PostgresSource.PasswordAuthentication",
3577
3577
  ],
3578
- schemas: Optional[List[str]] = None,
3578
+ schemas: Optional[list[str]] = None,
3579
3579
  password: Optional[str] = None,
3580
3580
  jdbc_url_params: Optional[str] = None,
3581
3581
  ssl: Optional[bool] = None,
@@ -3643,7 +3643,7 @@ class TrelloSource(GeneratedAirbyteSource):
3643
3643
  token: str,
3644
3644
  key: str,
3645
3645
  start_date: str,
3646
- board_ids: Optional[List[str]] = None,
3646
+ board_ids: Optional[list[str]] = None,
3647
3647
  ):
3648
3648
  """Airbyte Source for Trello.
3649
3649
 
@@ -3741,7 +3741,7 @@ class S3Source(GeneratedAirbyteSource):
3741
3741
  def __init__(
3742
3742
  self,
3743
3743
  filetype: Optional[str] = None,
3744
- columns: Optional[List[str]] = None,
3744
+ columns: Optional[list[str]] = None,
3745
3745
  batch_size: Optional[int] = None,
3746
3746
  buffer_size: Optional[int] = None,
3747
3747
  ):
@@ -4005,7 +4005,7 @@ class MssqlSource(GeneratedAirbyteSource):
4005
4005
  "MssqlSource.EncryptedVerifyCertificate",
4006
4006
  ],
4007
4007
  replication_method: Union["MssqlSource.Standard", "MssqlSource.LogicalReplicationCDC"],
4008
- schemas: Optional[List[str]] = None,
4008
+ schemas: Optional[list[str]] = None,
4009
4009
  password: Optional[str] = None,
4010
4010
  jdbc_url_params: Optional[str] = None,
4011
4011
  ):
@@ -4096,7 +4096,7 @@ class RedshiftSource(GeneratedAirbyteSource):
4096
4096
  database: str,
4097
4097
  username: str,
4098
4098
  password: str,
4099
- schemas: Optional[List[str]] = None,
4099
+ schemas: Optional[list[str]] = None,
4100
4100
  jdbc_url_params: Optional[str] = None,
4101
4101
  ):
4102
4102
  """Airbyte Source for Redshift.
@@ -4228,7 +4228,7 @@ class SentrySource(GeneratedAirbyteSource):
4228
4228
  organization: str,
4229
4229
  project: str,
4230
4230
  hostname: Optional[str] = None,
4231
- discover_fields: Optional[List[str]] = None,
4231
+ discover_fields: Optional[list[str]] = None,
4232
4232
  ):
4233
4233
  """Airbyte Source for Sentry.
4234
4234
 
@@ -4317,7 +4317,7 @@ class PythonHttpTutorialSource(GeneratedAirbyteSource):
4317
4317
 
4318
4318
  class AirtableSource(GeneratedAirbyteSource):
4319
4319
  @public
4320
- def __init__(self, name: str, api_key: str, base_id: str, tables: List[str]):
4320
+ def __init__(self, name: str, api_key: str, base_id: str, tables: list[str]):
4321
4321
  """Airbyte Source for Airtable.
4322
4322
 
4323
4323
  Documentation can be found at https://docs.airbyte.com/integrations/sources/airtable
@@ -4551,7 +4551,7 @@ class ZendeskSupportSource(GeneratedAirbyteSource):
4551
4551
  Args:
4552
4552
  name (str): The name of the destination.
4553
4553
  start_date (str): The date from which you'd like to replicate data for Zendesk Support API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
4554
- subdomain (str): This is your Zendesk subdomain that can be found in your account URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN is the value of your subdomain.
4554
+ subdomain (str): This is your Zendesk subdomain that can be found in your account URL. For example, in {my_subdomain}.zendesk.com, where my_subdomain is the value of your subdomain.
4555
4555
  credentials (Union[ZendeskSupportSource.OAuth20, ZendeskSupportSource.APIToken]): Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
4556
4556
  """
4557
4557
  self.start_date = check.str_param(start_date, "start_date")
@@ -4859,7 +4859,7 @@ class ZendeskTalkSource(GeneratedAirbyteSource):
4859
4859
 
4860
4860
  Args:
4861
4861
  name (str): The name of the destination.
4862
- subdomain (str): This is your Zendesk subdomain that can be found in your account URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN is the value of your subdomain.
4862
+ subdomain (str): This is your Zendesk subdomain that can be found in your account URL. For example, in {my_subdomain}.zendesk.com, where my_subdomain is the value of your subdomain.
4863
4863
  credentials (Union[ZendeskTalkSource.APIToken, ZendeskTalkSource.OAuth20]): Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
4864
4864
  start_date (str): The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
4865
4865
  """
@@ -5123,7 +5123,7 @@ class NetsuiteSource(GeneratedAirbyteSource):
5123
5123
  token_key: str,
5124
5124
  token_secret: str,
5125
5125
  start_datetime: str,
5126
- object_types: Optional[List[str]] = None,
5126
+ object_types: Optional[list[str]] = None,
5127
5127
  window_in_days: Optional[int] = None,
5128
5128
  ):
5129
5129
  """Airbyte Source for Netsuite.
@@ -5190,7 +5190,7 @@ class Dv360Source(GeneratedAirbyteSource):
5190
5190
  partner_id: int,
5191
5191
  start_date: str,
5192
5192
  end_date: Optional[str] = None,
5193
- filters: Optional[List[str]] = None,
5193
+ filters: Optional[list[str]] = None,
5194
5194
  ):
5195
5195
  """Airbyte Source for Dv 360.
5196
5196
 
@@ -5775,7 +5775,7 @@ class GoogleSearchConsoleSource(GeneratedAirbyteSource):
5775
5775
  def __init__(
5776
5776
  self,
5777
5777
  name: str,
5778
- site_urls: List[str],
5778
+ site_urls: list[str],
5779
5779
  start_date: str,
5780
5780
  authorization: Union[
5781
5781
  "GoogleSearchConsoleSource.OAuth",
@@ -5816,9 +5816,9 @@ class FacebookMarketingSource(GeneratedAirbyteSource):
5816
5816
  def __init__(
5817
5817
  self,
5818
5818
  name: str,
5819
- fields: Optional[List[str]] = None,
5820
- breakdowns: Optional[List[str]] = None,
5821
- action_breakdowns: Optional[List[str]] = None,
5819
+ fields: Optional[list[str]] = None,
5820
+ breakdowns: Optional[list[str]] = None,
5821
+ action_breakdowns: Optional[list[str]] = None,
5822
5822
  time_increment: Optional[int] = None,
5823
5823
  start_date: Optional[str] = None,
5824
5824
  end_date: Optional[str] = None,
@@ -5847,7 +5847,7 @@ class FacebookMarketingSource(GeneratedAirbyteSource):
5847
5847
  end_date: Optional[str] = None,
5848
5848
  include_deleted: Optional[bool] = None,
5849
5849
  fetch_thumbnail_images: Optional[bool] = None,
5850
- custom_insights: Optional[List[InsightConfig]] = None,
5850
+ custom_insights: Optional[list[InsightConfig]] = None,
5851
5851
  page_size: Optional[int] = None,
5852
5852
  insights_lookback_window: Optional[int] = None,
5853
5853
  max_batch_size: Optional[int] = None,
@@ -5891,7 +5891,7 @@ class FacebookMarketingSource(GeneratedAirbyteSource):
5891
5891
  class SurveymonkeySource(GeneratedAirbyteSource):
5892
5892
  @public
5893
5893
  def __init__(
5894
- self, name: str, access_token: str, start_date: str, survey_ids: Optional[List[str]] = None
5894
+ self, name: str, access_token: str, start_date: str, survey_ids: Optional[list[str]] = None
5895
5895
  ):
5896
5896
  """Airbyte Source for Surveymonkey.
5897
5897
 
@@ -1,23 +1,14 @@
1
- from typing import (
2
- Any,
3
- Callable,
4
- Dict,
5
- Iterable,
6
- List,
7
- Mapping,
8
- Optional,
9
- Sequence,
10
- Tuple,
11
- Union,
12
- cast,
13
- )
1
+ from collections.abc import Callable, Iterable, Mapping, Sequence
2
+ from typing import Any, Optional, Union, cast
14
3
 
15
4
  import dagster._check as check
16
5
  from dagster import AssetKey
17
- from dagster._annotations import deprecated, experimental, public
18
- from dagster._core.definitions.cacheable_assets import CacheableAssetsDefinition
6
+ from dagster._annotations import beta, deprecated, public
7
+ from dagster._core.definitions.assets.definition.cacheable_assets_definition import (
8
+ CacheableAssetsDefinition,
9
+ )
19
10
  from dagster._core.definitions.events import CoercibleToAssetKeyPrefix
20
- from dagster._core.definitions.freshness_policy import FreshnessPolicy
11
+ from dagster._core.definitions.freshness_policy import LegacyFreshnessPolicy
21
12
  from dagster._core.definitions.resource_definition import ResourceDefinition
22
13
  from dagster._core.execution.context.init import build_init_resource_context
23
14
  from dagster._utils.merger import deep_merge_dicts
@@ -36,8 +27,8 @@ from dagster_managed_elements.utils import UNSET, diff_dicts
36
27
  from dagster_airbyte.asset_defs import (
37
28
  AirbyteConnectionMetadata,
38
29
  AirbyteInstanceCacheableAssetsDefinition,
39
- _clean_name,
40
30
  )
31
+ from dagster_airbyte.legacy_resources import AirbyteResource
41
32
  from dagster_airbyte.managed.types import (
42
33
  MANAGED_ELEMENTS_DEPRECATION_MSG,
43
34
  AirbyteConnection,
@@ -49,8 +40,7 @@ from dagster_airbyte.managed.types import (
49
40
  InitializedAirbyteDestination,
50
41
  InitializedAirbyteSource,
51
42
  )
52
- from dagster_airbyte.resources import AirbyteResource
53
- from dagster_airbyte.utils import is_basic_normalization_operation
43
+ from dagster_airbyte.utils import clean_name, is_basic_normalization_operation
54
44
 
55
45
 
56
46
  def gen_configured_stream_json(
@@ -170,13 +160,13 @@ def reconcile_sources(
170
160
  dry_run: bool,
171
161
  should_delete: bool,
172
162
  ignore_secrets: bool,
173
- ) -> Tuple[Mapping[str, InitializedAirbyteSource], ManagedElementCheckResult]:
163
+ ) -> tuple[Mapping[str, InitializedAirbyteSource], ManagedElementCheckResult]:
174
164
  """Generates a diff of the configured and existing sources and reconciles them to match the
175
165
  configured state if dry_run is False.
176
166
  """
177
167
  diff = ManagedElementDiff()
178
168
 
179
- initialized_sources: Dict[str, InitializedAirbyteSource] = {}
169
+ initialized_sources: dict[str, InitializedAirbyteSource] = {}
180
170
  for source_name in set(config_sources.keys()).union(existing_sources.keys()):
181
171
  configured_source = config_sources.get(source_name)
182
172
  existing_source = existing_sources.get(source_name)
@@ -224,7 +214,7 @@ def reconcile_sources(
224
214
  else:
225
215
  if not dry_run:
226
216
  create_result = cast(
227
- Dict[str, str],
217
+ "dict[str, str]",
228
218
  check.not_none(
229
219
  res.make_request(
230
220
  endpoint="/sources/create",
@@ -257,13 +247,13 @@ def reconcile_destinations(
257
247
  dry_run: bool,
258
248
  should_delete: bool,
259
249
  ignore_secrets: bool,
260
- ) -> Tuple[Mapping[str, InitializedAirbyteDestination], ManagedElementCheckResult]:
250
+ ) -> tuple[Mapping[str, InitializedAirbyteDestination], ManagedElementCheckResult]:
261
251
  """Generates a diff of the configured and existing destinations and reconciles them to match the
262
252
  configured state if dry_run is False.
263
253
  """
264
254
  diff = ManagedElementDiff()
265
255
 
266
- initialized_destinations: Dict[str, InitializedAirbyteDestination] = {}
256
+ initialized_destinations: dict[str, InitializedAirbyteDestination] = {}
267
257
  for destination_name in set(config_destinations.keys()).union(existing_destinations.keys()):
268
258
  configured_destination = config_destinations.get(destination_name)
269
259
  existing_destination = existing_destinations.get(destination_name)
@@ -312,7 +302,7 @@ def reconcile_destinations(
312
302
  else:
313
303
  if not dry_run:
314
304
  create_result = cast(
315
- Dict[str, str],
305
+ "dict[str, str]",
316
306
  check.not_none(
317
307
  res.make_request(
318
308
  endpoint="/destinations/create",
@@ -358,23 +348,23 @@ def reconcile_config(
358
348
  workspace_id = res.get_default_workspace()
359
349
 
360
350
  existing_sources_raw = cast(
361
- Dict[str, List[Dict[str, Any]]],
351
+ "dict[str, list[dict[str, Any]]]",
362
352
  check.not_none(
363
353
  res.make_request(endpoint="/sources/list", data={"workspaceId": workspace_id})
364
354
  ),
365
355
  )
366
356
  existing_dests_raw = cast(
367
- Dict[str, List[Dict[str, Any]]],
357
+ "dict[str, list[dict[str, Any]]]",
368
358
  check.not_none(
369
359
  res.make_request(endpoint="/destinations/list", data={"workspaceId": workspace_id})
370
360
  ),
371
361
  )
372
362
 
373
- existing_sources: Dict[str, InitializedAirbyteSource] = {
363
+ existing_sources: dict[str, InitializedAirbyteSource] = {
374
364
  source_json["name"]: InitializedAirbyteSource.from_api_json(source_json)
375
365
  for source_json in existing_sources_raw.get("sources", [])
376
366
  }
377
- existing_dests: Dict[str, InitializedAirbyteDestination] = {
367
+ existing_dests: dict[str, InitializedAirbyteDestination] = {
378
368
  destination_json["name"]: InitializedAirbyteDestination.from_api_json(destination_json)
379
369
  for destination_json in existing_dests_raw.get("destinations", [])
380
370
  }
@@ -434,7 +424,7 @@ def reconcile_normalization(
434
424
  existing_basic_norm_op_id = None
435
425
  if existing_connection_id:
436
426
  operations = cast(
437
- Dict[str, List[Dict[str, str]]],
427
+ "dict[str, list[dict[str, str]]]",
438
428
  check.not_none(
439
429
  res.make_request(
440
430
  endpoint="/operations/list",
@@ -462,7 +452,7 @@ def reconcile_normalization(
462
452
  return existing_basic_norm_op_id
463
453
  else:
464
454
  return cast(
465
- Dict[str, str],
455
+ "dict[str, str]",
466
456
  check.not_none(
467
457
  res.make_request(
468
458
  endpoint="/operations/create",
@@ -504,12 +494,12 @@ def reconcile_connections_pre(
504
494
  diff = ManagedElementDiff()
505
495
 
506
496
  existing_connections_raw = cast(
507
- Dict[str, List[Dict[str, Any]]],
497
+ "dict[str, list[dict[str, Any]]]",
508
498
  check.not_none(
509
499
  res.make_request(endpoint="/connections/list", data={"workspaceId": workspace_id})
510
500
  ),
511
501
  )
512
- existing_connections: Dict[str, InitializedAirbyteConnection] = {
502
+ existing_connections: dict[str, InitializedAirbyteConnection] = {
513
503
  connection_json["name"]: InitializedAirbyteConnection.from_api_json(
514
504
  connection_json, existing_sources, existing_destinations
515
505
  )
@@ -549,7 +539,7 @@ def reconcile_connections_post(
549
539
  ) -> None:
550
540
  """Creates new and modifies existing connections based on the config if dry_run is False."""
551
541
  existing_connections_raw = cast(
552
- Dict[str, List[Dict[str, Any]]],
542
+ "dict[str, list[dict[str, Any]]]",
553
543
  check.not_none(
554
544
  res.make_request(endpoint="/connections/list", data={"workspaceId": workspace_id})
555
545
  ),
@@ -604,7 +594,7 @@ def reconcile_connections_post(
604
594
  connection_base_json["namespaceDefinition"] = config_conn.destination_namespace.value
605
595
  else:
606
596
  connection_base_json["namespaceDefinition"] = "customformat"
607
- connection_base_json["namespaceFormat"] = cast(str, config_conn.destination_namespace)
597
+ connection_base_json["namespaceFormat"] = cast("str", config_conn.destination_namespace)
608
598
 
609
599
  if config_conn.prefix:
610
600
  connection_base_json["prefix"] = config_conn.prefix
@@ -636,7 +626,7 @@ def reconcile_connections_post(
636
626
  )
637
627
 
638
628
 
639
- @experimental
629
+ @beta
640
630
  @deprecated(breaking_version="2.0", additional_warn_text=MANAGED_ELEMENTS_DEPRECATION_MSG)
641
631
  class AirbyteManagedElementReconciler(ManagedElementReconciler):
642
632
  """Reconciles Python-specified Airbyte connections with an Airbyte instance.
@@ -645,7 +635,7 @@ class AirbyteManagedElementReconciler(ManagedElementReconciler):
645
635
  CLI will allow you to check the state of your Python-code-specified Airbyte connections
646
636
  against an Airbyte instance, and reconcile them if necessary.
647
637
 
648
- This functionality is experimental and subject to change.
638
+ This functionality is in beta and subject to change.
649
639
  """
650
640
 
651
641
  @public
@@ -710,7 +700,7 @@ class AirbyteManagedElementCacheableAssetsDefinition(AirbyteInstanceCacheableAss
710
700
  connection_to_io_manager_key_fn: Optional[Callable[[str], Optional[str]]],
711
701
  connection_to_asset_key_fn: Optional[Callable[[AirbyteConnectionMetadata, str], AssetKey]],
712
702
  connection_to_freshness_policy_fn: Optional[
713
- Callable[[AirbyteConnectionMetadata], Optional[FreshnessPolicy]]
703
+ Callable[[AirbyteConnectionMetadata], Optional[LegacyFreshnessPolicy]]
714
704
  ],
715
705
  ):
716
706
  defined_conn_names = {conn.name for conn in connections}
@@ -725,9 +715,9 @@ class AirbyteManagedElementCacheableAssetsDefinition(AirbyteInstanceCacheableAss
725
715
  connection_to_asset_key_fn=connection_to_asset_key_fn,
726
716
  connection_to_freshness_policy_fn=connection_to_freshness_policy_fn,
727
717
  )
728
- self._connections: List[AirbyteConnection] = list(connections)
718
+ self._connections: list[AirbyteConnection] = list(connections)
729
719
 
730
- def _get_connections(self) -> Sequence[Tuple[str, AirbyteConnectionMetadata]]:
720
+ def _get_connections(self) -> Sequence[tuple[str, AirbyteConnectionMetadata]]:
731
721
  diff = reconcile_config(self._airbyte_instance, self._connections, dry_run=True)
732
722
  if isinstance(diff, ManagedElementDiff) and not diff.is_empty():
733
723
  raise ValueError(
@@ -739,14 +729,14 @@ class AirbyteManagedElementCacheableAssetsDefinition(AirbyteInstanceCacheableAss
739
729
  return super()._get_connections()
740
730
 
741
731
 
742
- @experimental
732
+ @beta
743
733
  @deprecated(breaking_version="2.0", additional_warn_text=MANAGED_ELEMENTS_DEPRECATION_MSG)
744
734
  def load_assets_from_connections(
745
735
  airbyte: Union[AirbyteResource, ResourceDefinition],
746
736
  connections: Iterable[AirbyteConnection],
747
737
  key_prefix: Optional[CoercibleToAssetKeyPrefix] = None,
748
738
  create_assets_for_normalization_tables: bool = True,
749
- connection_to_group_fn: Optional[Callable[[str], Optional[str]]] = _clean_name,
739
+ connection_to_group_fn: Optional[Callable[[str], Optional[str]]] = clean_name,
750
740
  connection_meta_to_group_fn: Optional[
751
741
  Callable[[AirbyteConnectionMetadata], Optional[str]]
752
742
  ] = None,
@@ -756,7 +746,7 @@ def load_assets_from_connections(
756
746
  Callable[[AirbyteConnectionMetadata, str], AssetKey]
757
747
  ] = None,
758
748
  connection_to_freshness_policy_fn: Optional[
759
- Callable[[AirbyteConnectionMetadata], Optional[FreshnessPolicy]]
749
+ Callable[[AirbyteConnectionMetadata], Optional[LegacyFreshnessPolicy]]
760
750
  ] = None,
761
751
  ) -> CacheableAssetsDefinition:
762
752
  """Loads Airbyte connection assets from a configured AirbyteResource instance, checking against a list of AirbyteConnection objects.
@@ -821,7 +811,7 @@ def load_assets_from_connections(
821
811
  check.invariant(
822
812
  not connection_meta_to_group_fn
823
813
  or not connection_to_group_fn
824
- or connection_to_group_fn == _clean_name,
814
+ or connection_to_group_fn == clean_name,
825
815
  "Cannot specify both connection_meta_to_group_fn and connection_to_group_fn",
826
816
  )
827
817
 
@@ -1,7 +1,8 @@
1
1
  import json
2
2
  from abc import ABC
3
+ from collections.abc import Mapping
3
4
  from enum import Enum
4
- from typing import Any, Dict, List, Mapping, Optional, Union
5
+ from typing import Any, Optional, Union
5
6
 
6
7
  import dagster._check as check
7
8
  from dagster._annotations import deprecated, public
@@ -24,14 +25,14 @@ class AirbyteSyncMode(ABC):
24
25
  def __eq__(self, other: Any) -> bool:
25
26
  return isinstance(other, AirbyteSyncMode) and self.to_json() == other.to_json()
26
27
 
27
- def __init__(self, json_repr: Dict[str, Any]):
28
+ def __init__(self, json_repr: dict[str, Any]):
28
29
  self.json_repr = json_repr
29
30
 
30
- def to_json(self) -> Dict[str, Any]:
31
+ def to_json(self) -> dict[str, Any]:
31
32
  return self.json_repr
32
33
 
33
34
  @classmethod
34
- def from_json(cls, json_repr: Dict[str, Any]) -> "AirbyteSyncMode":
35
+ def from_json(cls, json_repr: dict[str, Any]) -> "AirbyteSyncMode":
35
36
  return cls(
36
37
  {
37
38
  k: v
@@ -86,7 +87,7 @@ class AirbyteSyncMode(ABC):
86
87
  def incremental_append_dedup(
87
88
  cls,
88
89
  cursor_field: Optional[str] = None,
89
- primary_key: Optional[Union[str, List[str]]] = None,
90
+ primary_key: Optional[Union[str, list[str]]] = None,
90
91
  ) -> "AirbyteSyncMode":
91
92
  """Syncs new records from the source, appending to an append-only history
92
93
  table in the destination. Also generates a deduplicated view mirroring the
@@ -349,11 +350,11 @@ class InitializedAirbyteConnection:
349
350
  )
350
351
 
351
352
 
352
- def _remove_none_values(obj: Dict[str, Any]) -> Dict[str, Any]:
353
+ def _remove_none_values(obj: dict[str, Any]) -> dict[str, Any]:
353
354
  return {k: v for k, v in obj.items() if v is not None}
354
355
 
355
356
 
356
- def _dump_class(obj: Any) -> Dict[str, Any]:
357
+ def _dump_class(obj: Any) -> dict[str, Any]:
357
358
  return json.loads(json.dumps(obj, default=lambda o: _remove_none_values(o.__dict__)))
358
359
 
359
360
 
dagster_airbyte/ops.py CHANGED
@@ -1,14 +1,15 @@
1
- from typing import Any, Iterable, List, Optional
1
+ from collections.abc import Iterable
2
+ from typing import Any, Optional
2
3
 
3
4
  from dagster import Config, In, Nothing, Out, Output, op
4
5
  from dagster._core.storage.tags import COMPUTE_KIND_TAG
5
6
  from pydantic import Field
6
7
 
8
+ from dagster_airbyte.legacy_resources import BaseAirbyteResource
9
+ from dagster_airbyte.resources import DEFAULT_POLL_INTERVAL_SECONDS
7
10
  from dagster_airbyte.types import AirbyteOutput
8
11
  from dagster_airbyte.utils import _get_attempt, generate_materializations
9
12
 
10
- from .resources import DEFAULT_POLL_INTERVAL_SECONDS, BaseAirbyteResource
11
-
12
13
 
13
14
  class AirbyteSyncConfig(Config):
14
15
  connection_id: str = Field(
@@ -41,7 +42,7 @@ class AirbyteSyncConfig(Config):
41
42
  "be yielded when the op executes."
42
43
  ),
43
44
  )
44
- asset_key_prefix: List[str] = Field(
45
+ asset_key_prefix: list[str] = Field(
45
46
  ["airbyte"],
46
47
  description=(
47
48
  "If provided and yield_materializations is True, these components will be used to "