dagster-airbyte 0.26.9__tar.gz → 0.26.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dagster-airbyte might be problematic. Click here for more details.

Files changed (32) hide show
  1. {dagster-airbyte-0.26.9/dagster_airbyte.egg-info → dagster-airbyte-0.26.11}/PKG-INFO +1 -1
  2. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/asset_defs.py +17 -17
  3. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/managed/reconciliation.py +9 -9
  4. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/resources.py +18 -16
  5. dagster-airbyte-0.26.11/dagster_airbyte/version.py +1 -0
  6. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11/dagster_airbyte.egg-info}/PKG-INFO +1 -1
  7. dagster-airbyte-0.26.11/dagster_airbyte.egg-info/requires.txt +9 -0
  8. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/setup.py +2 -2
  9. dagster-airbyte-0.26.9/dagster_airbyte/version.py +0 -1
  10. dagster-airbyte-0.26.9/dagster_airbyte.egg-info/requires.txt +0 -9
  11. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/LICENSE +0 -0
  12. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/MANIFEST.in +0 -0
  13. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/README.md +0 -0
  14. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/__init__.py +0 -0
  15. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/asset_decorator.py +0 -0
  16. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/cli.py +0 -0
  17. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/managed/__init__.py +0 -0
  18. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/managed/generated/__init__.py +0 -0
  19. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/managed/generated/destinations.py +0 -0
  20. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/managed/generated/sources.py +0 -0
  21. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/managed/types.py +0 -0
  22. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/ops.py +0 -0
  23. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/py.typed +0 -0
  24. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/translator.py +0 -0
  25. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/types.py +0 -0
  26. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte/utils.py +0 -0
  27. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte.egg-info/SOURCES.txt +0 -0
  28. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte.egg-info/dependency_links.txt +0 -0
  29. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte.egg-info/entry_points.txt +0 -0
  30. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte.egg-info/not-zip-safe +0 -0
  31. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/dagster_airbyte.egg-info/top_level.txt +0 -0
  32. {dagster-airbyte-0.26.9 → dagster-airbyte-0.26.11}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagster-airbyte
3
- Version: 0.26.9
3
+ Version: 0.26.11
4
4
  Summary: Package for integrating Airbyte with Dagster.
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-airbyte
6
6
  Author: Dagster Labs
@@ -179,12 +179,12 @@ def _build_airbyte_assets_from_metadata(
179
179
  assets_defn_meta: AssetsDefinitionCacheableData,
180
180
  resource_defs: Optional[Mapping[str, ResourceDefinition]],
181
181
  ) -> AssetsDefinition:
182
- metadata = cast(Mapping[str, Any], assets_defn_meta.extra_metadata)
183
- connection_id = cast(str, metadata["connection_id"])
184
- group_name = cast(Optional[str], metadata["group_name"])
185
- destination_tables = cast(list[str], metadata["destination_tables"])
186
- normalization_tables = cast(Mapping[str, list[str]], metadata["normalization_tables"])
187
- io_manager_key = cast(Optional[str], metadata["io_manager_key"])
182
+ metadata = cast("Mapping[str, Any]", assets_defn_meta.extra_metadata)
183
+ connection_id = cast("str", metadata["connection_id"])
184
+ group_name = cast("Optional[str]", metadata["group_name"])
185
+ destination_tables = cast("list[str]", metadata["destination_tables"])
186
+ normalization_tables = cast("Mapping[str, list[str]]", metadata["normalization_tables"])
187
+ io_manager_key = cast("Optional[str]", metadata["io_manager_key"])
188
188
 
189
189
  @multi_asset(
190
190
  name=f"airbyte_sync_{connection_id.replace('-', '_')}",
@@ -501,7 +501,7 @@ class AirbyteConnectionMetadata(
501
501
  def from_config(
502
502
  cls, contents: Mapping[str, Any], destination: Mapping[str, Any]
503
503
  ) -> "AirbyteConnectionMetadata":
504
- config_contents = cast(Mapping[str, Any], contents.get("configuration"))
504
+ config_contents = cast("Mapping[str, Any]", contents.get("configuration"))
505
505
  check.invariant(
506
506
  config_contents is not None, "Airbyte connection config is missing 'configuration' key"
507
507
  )
@@ -531,7 +531,7 @@ class AirbyteConnectionMetadata(
531
531
  ]
532
532
 
533
533
  for stream in enabled_streams:
534
- name = cast(str, stream.get("stream", {}).get("name"))
534
+ name = cast("str", stream.get("stream", {}).get("name"))
535
535
  prefixed_name = f"{self.stream_prefix}{name}"
536
536
 
537
537
  schema = (
@@ -567,7 +567,7 @@ def _get_schema_by_table_name(
567
567
  [
568
568
  (k, v.schema)
569
569
  for k, v in cast(
570
- dict[str, AirbyteTableMetadata], meta.normalization_tables
570
+ "dict[str, AirbyteTableMetadata]", meta.normalization_tables
571
571
  ).items()
572
572
  ]
573
573
  for meta in stream_table_metadata.values()
@@ -740,7 +740,7 @@ class AirbyteInstanceCacheableAssetsDefinition(AirbyteCoreCacheableAssetsDefinit
740
740
  workspace_id = self._workspace_id
741
741
  if not workspace_id:
742
742
  workspaces = cast(
743
- list[dict[str, Any]],
743
+ "list[dict[str, Any]]",
744
744
  check.not_none(
745
745
  self._airbyte_instance.make_request(endpoint="/workspaces/list", data={})
746
746
  ).get("workspaces", []),
@@ -752,7 +752,7 @@ class AirbyteInstanceCacheableAssetsDefinition(AirbyteCoreCacheableAssetsDefinit
752
752
  workspace_id = workspaces[0].get("workspaceId")
753
753
 
754
754
  connections = cast(
755
- list[dict[str, Any]],
755
+ "list[dict[str, Any]]",
756
756
  check.not_none(
757
757
  self._airbyte_instance.make_request(
758
758
  endpoint="/connections/list", data={"workspaceId": workspace_id}
@@ -762,10 +762,10 @@ class AirbyteInstanceCacheableAssetsDefinition(AirbyteCoreCacheableAssetsDefinit
762
762
 
763
763
  output_connections: list[tuple[str, AirbyteConnectionMetadata]] = []
764
764
  for connection_json in connections:
765
- connection_id = cast(str, connection_json.get("connectionId"))
765
+ connection_id = cast("str", connection_json.get("connectionId"))
766
766
 
767
767
  operations_json = cast(
768
- dict[str, Any],
768
+ "dict[str, Any]",
769
769
  check.not_none(
770
770
  self._airbyte_instance.make_request(
771
771
  endpoint="/operations/list",
@@ -774,9 +774,9 @@ class AirbyteInstanceCacheableAssetsDefinition(AirbyteCoreCacheableAssetsDefinit
774
774
  ),
775
775
  )
776
776
 
777
- destination_id = cast(str, connection_json.get("destinationId"))
777
+ destination_id = cast("str", connection_json.get("destinationId"))
778
778
  destination_json = cast(
779
- dict[str, Any],
779
+ "dict[str, Any]",
780
780
  check.not_none(
781
781
  self._airbyte_instance.make_request(
782
782
  endpoint="/destinations/get",
@@ -850,7 +850,7 @@ class AirbyteYAMLCacheableAssetsDefinition(AirbyteCoreCacheableAssetsDefinition)
850
850
  connection_data = yaml.safe_load(f.read())
851
851
 
852
852
  destination_configuration_path = cast(
853
- str, connection_data.get("destination_configuration_path")
853
+ "str", connection_data.get("destination_configuration_path")
854
854
  )
855
855
  with open(
856
856
  os.path.join(self._project_dir, destination_configuration_path), encoding="utf-8"
@@ -886,7 +886,7 @@ class AirbyteYAMLCacheableAssetsDefinition(AirbyteCoreCacheableAssetsDefinition)
886
886
  )
887
887
  state_file = state_files[0]
888
888
 
889
- with open(os.path.join(connection_dir, cast(str, state_file)), encoding="utf-8") as f:
889
+ with open(os.path.join(connection_dir, cast("str", state_file)), encoding="utf-8") as f:
890
890
  state = yaml.safe_load(f.read())
891
891
  connection_id = state.get("resource_id")
892
892
 
@@ -212,7 +212,7 @@ def reconcile_sources(
212
212
  else:
213
213
  if not dry_run:
214
214
  create_result = cast(
215
- dict[str, str],
215
+ "dict[str, str]",
216
216
  check.not_none(
217
217
  res.make_request(
218
218
  endpoint="/sources/create",
@@ -300,7 +300,7 @@ def reconcile_destinations(
300
300
  else:
301
301
  if not dry_run:
302
302
  create_result = cast(
303
- dict[str, str],
303
+ "dict[str, str]",
304
304
  check.not_none(
305
305
  res.make_request(
306
306
  endpoint="/destinations/create",
@@ -346,13 +346,13 @@ def reconcile_config(
346
346
  workspace_id = res.get_default_workspace()
347
347
 
348
348
  existing_sources_raw = cast(
349
- dict[str, list[dict[str, Any]]],
349
+ "dict[str, list[dict[str, Any]]]",
350
350
  check.not_none(
351
351
  res.make_request(endpoint="/sources/list", data={"workspaceId": workspace_id})
352
352
  ),
353
353
  )
354
354
  existing_dests_raw = cast(
355
- dict[str, list[dict[str, Any]]],
355
+ "dict[str, list[dict[str, Any]]]",
356
356
  check.not_none(
357
357
  res.make_request(endpoint="/destinations/list", data={"workspaceId": workspace_id})
358
358
  ),
@@ -422,7 +422,7 @@ def reconcile_normalization(
422
422
  existing_basic_norm_op_id = None
423
423
  if existing_connection_id:
424
424
  operations = cast(
425
- dict[str, list[dict[str, str]]],
425
+ "dict[str, list[dict[str, str]]]",
426
426
  check.not_none(
427
427
  res.make_request(
428
428
  endpoint="/operations/list",
@@ -450,7 +450,7 @@ def reconcile_normalization(
450
450
  return existing_basic_norm_op_id
451
451
  else:
452
452
  return cast(
453
- dict[str, str],
453
+ "dict[str, str]",
454
454
  check.not_none(
455
455
  res.make_request(
456
456
  endpoint="/operations/create",
@@ -492,7 +492,7 @@ def reconcile_connections_pre(
492
492
  diff = ManagedElementDiff()
493
493
 
494
494
  existing_connections_raw = cast(
495
- dict[str, list[dict[str, Any]]],
495
+ "dict[str, list[dict[str, Any]]]",
496
496
  check.not_none(
497
497
  res.make_request(endpoint="/connections/list", data={"workspaceId": workspace_id})
498
498
  ),
@@ -537,7 +537,7 @@ def reconcile_connections_post(
537
537
  ) -> None:
538
538
  """Creates new and modifies existing connections based on the config if dry_run is False."""
539
539
  existing_connections_raw = cast(
540
- dict[str, list[dict[str, Any]]],
540
+ "dict[str, list[dict[str, Any]]]",
541
541
  check.not_none(
542
542
  res.make_request(endpoint="/connections/list", data={"workspaceId": workspace_id})
543
543
  ),
@@ -592,7 +592,7 @@ def reconcile_connections_post(
592
592
  connection_base_json["namespaceDefinition"] = config_conn.destination_namespace.value
593
593
  else:
594
594
  connection_base_json["namespaceDefinition"] = "customformat"
595
- connection_base_json["namespaceFormat"] = cast(str, config_conn.destination_namespace)
595
+ connection_base_json["namespaceFormat"] = cast("str", config_conn.destination_namespace)
596
596
 
597
597
  if config_conn.prefix:
598
598
  connection_base_json["prefix"] = config_conn.prefix
@@ -217,8 +217,8 @@ class BaseAirbyteResource(ConfigurableResource):
217
217
  """
218
218
  connection_details = self.get_connection_details(connection_id)
219
219
  job_details = self.start_sync(connection_id)
220
- job_info = cast(dict[str, object], job_details.get("job", {}))
221
- job_id = cast(int, job_info.get("id"))
220
+ job_info = cast("dict[str, object]", job_details.get("job", {}))
221
+ job_id = cast("int", job_info.get("id"))
222
222
 
223
223
  self._log.info(f"Job {job_id} initialized for connection_id={connection_id}.")
224
224
  start = time.monotonic()
@@ -235,7 +235,7 @@ class BaseAirbyteResource(ConfigurableResource):
235
235
  )
236
236
  time.sleep(poll_interval or self.poll_interval)
237
237
  job_details = self.get_job_status(connection_id, job_id)
238
- attempts = cast(list, job_details.get("attempts", []))
238
+ attempts = cast("list", job_details.get("attempts", []))
239
239
  cur_attempt = len(attempts)
240
240
  # spit out the available Airbyte log info
241
241
  if cur_attempt:
@@ -252,7 +252,7 @@ class BaseAirbyteResource(ConfigurableResource):
252
252
  logged_lines = 0
253
253
  logged_attempts += 1
254
254
 
255
- job_info = cast(dict[str, object], job_details.get("job", {}))
255
+ job_info = cast("dict[str, object]", job_details.get("job", {}))
256
256
  state = job_info.get("status")
257
257
 
258
258
  if state in (
@@ -577,7 +577,7 @@ class AirbyteResource(BaseAirbyteResource):
577
577
 
578
578
  def get_default_workspace(self) -> str:
579
579
  workspaces = cast(
580
- list[dict[str, Any]],
580
+ "list[dict[str, Any]]",
581
581
  check.not_none(self.make_request_cached(endpoint="/workspaces/list", data={})).get(
582
582
  "workspaces", []
583
583
  ),
@@ -589,7 +589,7 @@ class AirbyteResource(BaseAirbyteResource):
589
589
  definitions = check.not_none(
590
590
  self.make_request_cached(endpoint="/source_definitions/list", data={})
591
591
  )
592
- source_definitions = cast(list[dict[str, Any]], definitions["sourceDefinitions"])
592
+ source_definitions = cast("list[dict[str, Any]]", definitions["sourceDefinitions"])
593
593
 
594
594
  return next(
595
595
  (
@@ -603,7 +603,7 @@ class AirbyteResource(BaseAirbyteResource):
603
603
  def get_destination_definition_by_name(self, name: str):
604
604
  name_lower = name.lower()
605
605
  definitions = cast(
606
- dict[str, list[dict[str, str]]],
606
+ "dict[str, list[dict[str, str]]]",
607
607
  check.not_none(
608
608
  self.make_request_cached(endpoint="/destination_definitions/list", data={})
609
609
  ),
@@ -619,7 +619,7 @@ class AirbyteResource(BaseAirbyteResource):
619
619
 
620
620
  def get_source_catalog_id(self, source_id: str):
621
621
  result = cast(
622
- dict[str, Any],
622
+ "dict[str, Any]",
623
623
  check.not_none(
624
624
  self.make_request(endpoint="/sources/discover_schema", data={"sourceId": source_id})
625
625
  ),
@@ -628,7 +628,7 @@ class AirbyteResource(BaseAirbyteResource):
628
628
 
629
629
  def get_source_schema(self, source_id: str) -> Mapping[str, Any]:
630
630
  return cast(
631
- dict[str, Any],
631
+ "dict[str, Any]",
632
632
  check.not_none(
633
633
  self.make_request(endpoint="/sources/discover_schema", data={"sourceId": source_id})
634
634
  ),
@@ -640,7 +640,7 @@ class AirbyteResource(BaseAirbyteResource):
640
640
  # Airbyte API changed source of truth for normalization in PR
641
641
  # https://github.com/airbytehq/airbyte/pull/21005
642
642
  norm_dest_def_spec: bool = cast(
643
- dict[str, Any],
643
+ "dict[str, Any]",
644
644
  check.not_none(
645
645
  self.make_request_cached(
646
646
  endpoint="/destination_definition_specifications/get",
@@ -654,7 +654,7 @@ class AirbyteResource(BaseAirbyteResource):
654
654
 
655
655
  norm_dest_def: bool = (
656
656
  cast(
657
- dict[str, Any],
657
+ "dict[str, Any]",
658
658
  check.not_none(
659
659
  self.make_request_cached(
660
660
  endpoint="/destination_definitions/get",
@@ -687,7 +687,9 @@ class AirbyteResource(BaseAirbyteResource):
687
687
  },
688
688
  )
689
689
  )
690
- job = next((job for job in cast(list, out["jobs"]) if job["job"]["id"] == job_id), None)
690
+ job = next(
691
+ (job for job in cast("list", out["jobs"]) if job["job"]["id"] == job_id), None
692
+ )
691
693
 
692
694
  return check.not_none(job)
693
695
 
@@ -722,8 +724,8 @@ class AirbyteResource(BaseAirbyteResource):
722
724
  """
723
725
  connection_details = self.get_connection_details(connection_id)
724
726
  job_details = self.start_sync(connection_id)
725
- job_info = cast(dict[str, object], job_details.get("job", {}))
726
- job_id = cast(int, job_info.get("id"))
727
+ job_info = cast("dict[str, object]", job_details.get("job", {}))
728
+ job_id = cast("int", job_info.get("id"))
727
729
 
728
730
  self._log.info(f"Job {job_id} initialized for connection_id={connection_id}.")
729
731
  start = time.monotonic()
@@ -740,7 +742,7 @@ class AirbyteResource(BaseAirbyteResource):
740
742
  )
741
743
  time.sleep(poll_interval or self.poll_interval)
742
744
  job_details = self.get_job_status(connection_id, job_id)
743
- attempts = cast(list, job_details.get("attempts", []))
745
+ attempts = cast("list", job_details.get("attempts", []))
744
746
  cur_attempt = len(attempts)
745
747
  # spit out the available Airbyte log info
746
748
  if cur_attempt:
@@ -757,7 +759,7 @@ class AirbyteResource(BaseAirbyteResource):
757
759
  logged_lines = 0
758
760
  logged_attempts += 1
759
761
 
760
- job_info = cast(dict[str, object], job_details.get("job", {}))
762
+ job_info = cast("dict[str, object]", job_details.get("job", {}))
761
763
  state = job_info.get("status")
762
764
 
763
765
  if state in (
@@ -0,0 +1 @@
1
+ __version__ = "0.26.11"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagster-airbyte
3
- Version: 0.26.9
3
+ Version: 0.26.11
4
4
  Summary: Package for integrating Airbyte with Dagster.
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-airbyte
6
6
  Author: Dagster Labs
@@ -0,0 +1,9 @@
1
+ dagster==1.10.11
2
+ requests
3
+
4
+ [managed]
5
+ dagster-managed-elements==0.26.11
6
+
7
+ [test]
8
+ requests-mock
9
+ flaky
@@ -36,7 +36,7 @@ setup(
36
36
  include_package_data=True,
37
37
  python_requires=">=3.9,<3.13",
38
38
  install_requires=[
39
- "dagster==1.10.9",
39
+ "dagster==1.10.11",
40
40
  "requests",
41
41
  ],
42
42
  zip_safe=False,
@@ -51,7 +51,7 @@ setup(
51
51
  "flaky",
52
52
  ],
53
53
  "managed": [
54
- "dagster-managed-elements==0.26.9",
54
+ "dagster-managed-elements==0.26.11",
55
55
  ],
56
56
  },
57
57
  )
@@ -1 +0,0 @@
1
- __version__ = "0.26.9"
@@ -1,9 +0,0 @@
1
- dagster==1.10.9
2
- requests
3
-
4
- [managed]
5
- dagster-managed-elements==0.26.9
6
-
7
- [test]
8
- requests-mock
9
- flaky