apache-airflow-providers-postgres 6.3.0__py3-none-any.whl → 6.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-postgres might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "6.3.0"
32
+ __version__ = "6.4.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -41,24 +41,21 @@ class PostgresDialect(Dialect):
41
41
  """
42
42
  if schema is None:
43
43
  table, schema = self.extract_schema_from_table(table)
44
- pk_columns = [
45
- row[0]
46
- for row in self.get_records(
47
- """
48
- select kcu.column_name
49
- from information_schema.table_constraints tco
50
- join information_schema.key_column_usage kcu
51
- on kcu.constraint_name = tco.constraint_name
52
- and kcu.constraint_schema = tco.constraint_schema
53
- and kcu.constraint_name = tco.constraint_name
54
- where tco.constraint_type = 'PRIMARY KEY'
55
- and kcu.table_schema = %s
56
- and kcu.table_name = %s
57
- order by kcu.ordinal_position
58
- """,
59
- (self.unescape_word(schema), self.unescape_word(table)),
60
- )
61
- ]
44
+ table = self.unescape_word(table)
45
+ schema = self.unescape_word(schema) if schema else None
46
+ query = """
47
+ select kcu.column_name
48
+ from information_schema.table_constraints tco
49
+ join information_schema.key_column_usage kcu
50
+ on kcu.constraint_name = tco.constraint_name
51
+ and kcu.constraint_schema = tco.constraint_schema
52
+ and kcu.constraint_name = tco.constraint_name
53
+ where tco.constraint_type = 'PRIMARY KEY'
54
+ and kcu.table_schema = %s
55
+ and kcu.table_name = %s
56
+ order by kcu.ordinal_position
57
+ """
58
+ pk_columns = [row[0] for row in self.get_records(query, (schema, table))]
62
59
  return pk_columns or None
63
60
 
64
61
  @staticmethod
@@ -78,31 +75,27 @@ class PostgresDialect(Dialect):
78
75
  ) -> list[str] | None:
79
76
  if schema is None:
80
77
  table, schema = self.extract_schema_from_table(table)
81
-
82
- column_names = list(
83
- row["name"]
84
- for row in filter(
85
- predicate,
86
- map(
87
- self._to_row,
88
- self.get_records(
89
- """
90
- select column_name,
91
- data_type,
92
- is_nullable,
93
- column_default,
94
- is_generated,
95
- is_identity
96
- from information_schema.columns
97
- where table_schema = %s
98
- and table_name = %s
99
- order by ordinal_position
100
- """,
101
- (self.unescape_word(schema), self.unescape_word(table)),
102
- ),
103
- ),
104
- )
105
- )
78
+ table = self.unescape_word(table)
79
+ schema = self.unescape_word(schema) if schema else None
80
+ query = """
81
+ select column_name,
82
+ data_type,
83
+ is_nullable,
84
+ column_default,
85
+ is_generated,
86
+ is_identity
87
+ from information_schema.columns
88
+ where table_schema = %s
89
+ and table_name = %s
90
+ order by ordinal_position
91
+ """
92
+ column_names = []
93
+ for row in map(
94
+ self._to_row,
95
+ self.get_records(query, (schema, table)),
96
+ ):
97
+ if predicate(row):
98
+ column_names.append(row["name"])
106
99
  self.log.debug("Column names for table '%s': %s", table, column_names)
107
100
  return column_names
108
101
 
@@ -65,4 +65,18 @@ def get_provider_info():
65
65
  "handler": "airflow.providers.postgres.assets.postgres.sanitize_uri",
66
66
  }
67
67
  ],
68
+ "config": {
69
+ "postgres": {
70
+ "description": "Configuration for Postgres hooks and operators.\n",
71
+ "options": {
72
+ "azure_oauth_scope": {
73
+ "description": "The scope to use while retrieving Oauth token for Postgres Flexible Server\nfrom Azure Entra authentication.\n",
74
+ "version_added": "6.4.0",
75
+ "type": "string",
76
+ "example": None,
77
+ "default": "https://ossrdbms-aad.database.windows.net/.default",
78
+ }
79
+ },
80
+ }
81
+ },
68
82
  }
@@ -30,10 +30,12 @@ from more_itertools import chunked
30
30
  from psycopg2.extras import DictCursor, NamedTupleCursor, RealDictCursor, execute_batch
31
31
  from sqlalchemy.engine import URL
32
32
 
33
+ from airflow.configuration import conf
33
34
  from airflow.exceptions import (
34
35
  AirflowException,
35
36
  AirflowOptionalProviderFeatureException,
36
37
  )
38
+ from airflow.providers.common.compat.sdk import Connection
37
39
  from airflow.providers.common.sql.hooks.sql import DbApiHook
38
40
  from airflow.providers.postgres.dialects.postgres import PostgresDialect
39
41
 
@@ -64,11 +66,6 @@ if TYPE_CHECKING:
64
66
  if USE_PSYCOPG3:
65
67
  from psycopg.errors import Diagnostic
66
68
 
67
- try:
68
- from airflow.sdk import Connection
69
- except ImportError:
70
- from airflow.models.connection import Connection # type: ignore[assignment]
71
-
72
69
  CursorType: TypeAlias = DictCursor | RealDictCursor | NamedTupleCursor
73
70
  CursorRow: TypeAlias = dict[str, Any] | tuple[Any, ...]
74
71
 
@@ -156,7 +153,9 @@ class PostgresHook(DbApiHook):
156
153
  "aws_conn_id",
157
154
  "sqlalchemy_scheme",
158
155
  "sqlalchemy_query",
156
+ "azure_conn_id",
159
157
  }
158
+ default_azure_oauth_scope = "https://ossrdbms-aad.database.windows.net/.default"
160
159
 
161
160
  def __init__(
162
161
  self, *args, options: str | None = None, enable_log_db_messages: bool = False, **kwargs
@@ -177,6 +176,8 @@ class PostgresHook(DbApiHook):
177
176
  query = conn.extra_dejson.get("sqlalchemy_query", {})
178
177
  if not isinstance(query, dict):
179
178
  raise AirflowException("The parameter 'sqlalchemy_query' must be of type dict!")
179
+ if conn.extra_dejson.get("iam", False):
180
+ conn.login, conn.password, conn.port = self.get_iam_token(conn)
180
181
  return URL.create(
181
182
  drivername="postgresql+psycopg" if USE_PSYCOPG3 else "postgresql",
182
183
  username=self.__cast_nullable(conn.login, str),
@@ -441,8 +442,14 @@ class PostgresHook(DbApiHook):
441
442
  return PostgresHook._serialize_cell_ppg2(cell, conn)
442
443
 
443
444
  def get_iam_token(self, conn: Connection) -> tuple[str, str, int]:
445
+ """Get the IAM token from different identity providers."""
446
+ if conn.extra_dejson.get("azure_conn_id"):
447
+ return self.get_azure_iam_token(conn)
448
+ return self.get_aws_iam_token(conn)
449
+
450
+ def get_aws_iam_token(self, conn: Connection) -> tuple[str, str, int]:
444
451
  """
445
- Get the IAM token.
452
+ Get the AWS IAM token.
446
453
 
447
454
  This uses AWSHook to retrieve a temporary password to connect to
448
455
  Postgres or Redshift. Port is required. If none is provided, the default
@@ -500,6 +507,36 @@ class PostgresHook(DbApiHook):
500
507
  token = rds_client.generate_db_auth_token(conn.host, port, conn.login)
501
508
  return cast("str", login), cast("str", token), port
502
509
 
510
+ def get_azure_iam_token(self, conn: Connection) -> tuple[str, str, int]:
511
+ """
512
+ Get the Azure IAM token.
513
+
514
+ This uses AzureBaseHook to retrieve an OAUTH token to connect to Postgres.
515
+ Scope for the OAuth token can be set in the config option ``azure_oauth_scope`` under the section ``[postgres]``.
516
+ """
517
+ if TYPE_CHECKING:
518
+ from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
519
+
520
+ azure_conn_id = conn.extra_dejson.get("azure_conn_id", "azure_default")
521
+ try:
522
+ azure_conn = Connection.get(azure_conn_id)
523
+ except AttributeError:
524
+ azure_conn = Connection.get_connection_from_secrets(azure_conn_id) # type: ignore[attr-defined]
525
+ azure_base_hook: AzureBaseHook = azure_conn.get_hook()
526
+ scope = conf.get("postgres", "azure_oauth_scope", fallback=self.default_azure_oauth_scope)
527
+ try:
528
+ token = azure_base_hook.get_token(scope).token
529
+ except AttributeError as e:
530
+ if e.name == "get_token" and e.obj == azure_base_hook:
531
+ raise AttributeError(
532
+ "'AzureBaseHook' object has no attribute 'get_token'. "
533
+ "Please upgrade apache-airflow-providers-microsoft-azure>=12.8.0",
534
+ name=e.name,
535
+ obj=e.obj,
536
+ ) from e
537
+ raise
538
+ return cast("str", conn.login or azure_conn.login), token, conn.port or 5432
539
+
503
540
  def get_table_primary_key(self, table: str, schema: str | None = "public") -> list[str] | None:
504
541
  """
505
542
  Get the table's primary key.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-postgres
3
- Version: 6.3.0
3
+ Version: 6.4.0
4
4
  Summary: Provider package apache-airflow-providers-postgres for Apache Airflow
5
5
  Keywords: airflow-provider,postgres,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,24 +21,27 @@ Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.10.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
24
25
  Requires-Dist: apache-airflow-providers-common-sql>=1.23.0
25
26
  Requires-Dist: psycopg2-binary>=2.9.9; python_version < '3.13'
26
27
  Requires-Dist: psycopg2-binary>=2.9.10; python_version >= '3.13'
27
28
  Requires-Dist: asyncpg>=0.30.0
28
29
  Requires-Dist: apache-airflow-providers-amazon>=2.6.0 ; extra == "amazon"
30
+ Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
29
31
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
30
32
  Requires-Dist: pandas>=2.1.2 ; extra == "pandas" and ( python_version <"3.13")
31
33
  Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
32
34
  Requires-Dist: polars>=1.26.0 ; extra == "polars"
33
35
  Requires-Dist: psycopg[binary]>=3.2.9 ; extra == "psycopg"
34
36
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
35
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0/changelog.html
36
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0
37
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.4.0/changelog.html
38
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.4.0
37
39
  Project-URL: Mastodon, https://fosstodon.org/@airflow
38
40
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
39
41
  Project-URL: Source Code, https://github.com/apache/airflow
40
42
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
41
43
  Provides-Extra: amazon
44
+ Provides-Extra: microsoft-azure
42
45
  Provides-Extra: openlineage
43
46
  Provides-Extra: pandas
44
47
  Provides-Extra: polars
@@ -69,7 +72,7 @@ Provides-Extra: psycopg
69
72
 
70
73
  Package ``apache-airflow-providers-postgres``
71
74
 
72
- Release: ``6.3.0``
75
+ Release: ``6.4.0``
73
76
 
74
77
 
75
78
  `PostgreSQL <https://www.postgresql.org/>`__
@@ -82,7 +85,7 @@ This is a provider package for ``postgres`` provider. All classes for this provi
82
85
  are in ``airflow.providers.postgres`` python package.
83
86
 
84
87
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0/>`_.
88
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.4.0/>`_.
86
89
 
87
90
  Installation
88
91
  ------------
@@ -96,15 +99,16 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
96
99
  Requirements
97
100
  ------------
98
101
 
99
- ======================================= ======================================
100
- PIP package Version required
101
- ======================================= ======================================
102
- ``apache-airflow`` ``>=2.10.0``
103
- ``apache-airflow-providers-common-sql`` ``>=1.23.0``
104
- ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
105
- ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
106
- ``asyncpg`` ``>=0.30.0``
107
- ======================================= ======================================
102
+ ========================================== ======================================
103
+ PIP package Version required
104
+ ========================================== ======================================
105
+ ``apache-airflow`` ``>=2.10.0``
106
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
107
+ ``apache-airflow-providers-common-sql`` ``>=1.23.0``
108
+ ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
109
+ ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
110
+ ``asyncpg`` ``>=0.30.0``
111
+ ========================================== ======================================
108
112
 
109
113
  Cross provider package dependencies
110
114
  -----------------------------------
@@ -119,14 +123,30 @@ You can install such cross-provider dependencies when installing from PyPI. For
119
123
  pip install apache-airflow-providers-postgres[amazon]
120
124
 
121
125
 
122
- ============================================================================================================== ===============
123
- Dependent package Extra
124
- ============================================================================================================== ===============
125
- `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
126
- `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
127
- `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
128
- ============================================================================================================== ===============
126
+ ====================================================================================================================== ===================
127
+ Dependent package Extra
128
+ ====================================================================================================================== ===================
129
+ `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
130
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
131
+ `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
132
+ `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
133
+ `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
134
+ ====================================================================================================================== ===================
135
+
136
+ Optional dependencies
137
+ ----------------------
138
+
139
+ =================== =====================================================================================
140
+ Extra Dependencies
141
+ =================== =====================================================================================
142
+ ``amazon`` ``apache-airflow-providers-amazon>=2.6.0``
143
+ ``microsoft.azure`` ``apache-airflow-providers-microsoft-azure``
144
+ ``openlineage`` ``apache-airflow-providers-openlineage``
145
+ ``pandas`` ``pandas>=2.1.2; python_version <"3.13"``, ``pandas>=2.2.3; python_version >="3.13"``
146
+ ``polars`` ``polars>=1.26.0``
147
+ ``psycopg`` ``psycopg[binary]>=3.2.9``
148
+ =================== =====================================================================================
129
149
 
130
150
  The changelog for the provider package can be found in the
131
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0/changelog.html>`_.
151
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.4.0/changelog.html>`_.
132
152
 
@@ -0,0 +1,13 @@
1
+ airflow/providers/postgres/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
+ airflow/providers/postgres/__init__.py,sha256=UitRehE_s7O23y-mxHZndABZfxCBajA-IG0m46lPqUQ,1497
3
+ airflow/providers/postgres/get_provider_info.py,sha256=tLd8Kw5q9bE0XewMKJ4zzwYQoPh_G-E3mmFxK8QzfB0,3269
4
+ airflow/providers/postgres/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
+ airflow/providers/postgres/assets/postgres.py,sha256=XNhOJCbOA_soaaiS73JjULMqAM_7PBryhToe8FJREA0,1522
6
+ airflow/providers/postgres/dialects/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
+ airflow/providers/postgres/dialects/postgres.py,sha256=qBw5MQngxLJQu0pxeHOKlguPePnAg0-PftxvMnqPZPM,5314
8
+ airflow/providers/postgres/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
+ airflow/providers/postgres/hooks/postgres.py,sha256=s-tU1m3UMDojNscsR05X6RI55Pk0KxyKcqFr0vYKXlI,28845
10
+ apache_airflow_providers_postgres-6.4.0.dist-info/entry_points.txt,sha256=dhtJi6PTWHd6BwKhmI4OtSPvQVI_p0yYWI0eba83HqY,104
11
+ apache_airflow_providers_postgres-6.4.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
12
+ apache_airflow_providers_postgres-6.4.0.dist-info/METADATA,sha256=WxXXuHgRst253lVwJh-ySgAkK7NHYIFuMpad6bEvmBI,7679
13
+ apache_airflow_providers_postgres-6.4.0.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- airflow/providers/postgres/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/postgres/__init__.py,sha256=EBlBQkd4wsb32h6i--UVow1UxLY3oTKzALQLc1L5W7s,1497
3
- airflow/providers/postgres/get_provider_info.py,sha256=qEEYbClLY3-NH40dBk2u_nOIfvfEIHdXaWsIJ8J5Z68,2626
4
- airflow/providers/postgres/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
- airflow/providers/postgres/assets/postgres.py,sha256=XNhOJCbOA_soaaiS73JjULMqAM_7PBryhToe8FJREA0,1522
6
- airflow/providers/postgres/dialects/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
- airflow/providers/postgres/dialects/postgres.py,sha256=WhRMbSQv2u373R62e3buE5J1L3j-XGHhZxrWc8Gtc5s,5551
8
- airflow/providers/postgres/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
- airflow/providers/postgres/hooks/postgres.py,sha256=YwA99YPz_ZpCvBn5yyw4ykhJ9hMGNvEsMmAf5nsfVE8,26908
10
- apache_airflow_providers_postgres-6.3.0.dist-info/entry_points.txt,sha256=dhtJi6PTWHd6BwKhmI4OtSPvQVI_p0yYWI0eba83HqY,104
11
- apache_airflow_providers_postgres-6.3.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
12
- apache_airflow_providers_postgres-6.3.0.dist-info/METADATA,sha256=ndnpXUFB9vCAeT4wp6MVzJI4mxgr4KxmCO8NUA4qkIk,6283
13
- apache_airflow_providers_postgres-6.3.0.dist-info/RECORD,,