apache-airflow-providers-postgres 6.3.0rc1__tar.gz → 6.5.2rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. apache_airflow_providers_postgres-6.5.2rc1/NOTICE +5 -0
  2. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/PKG-INFO +49 -24
  3. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/README.rst +37 -19
  4. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/changelog.rst +83 -7
  5. apache_airflow_providers_postgres-6.5.2rc1/docs/configurations-ref.rst +19 -0
  6. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/connections/postgres.rst +14 -1
  7. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/index.rst +24 -20
  8. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/provider.yaml +20 -1
  9. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/pyproject.toml +15 -5
  10. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/__init__.py +3 -3
  11. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/dialects/postgres.py +36 -43
  12. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/get_provider_info.py +14 -0
  13. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/hooks/postgres.py +52 -11
  14. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/postgres/assets/test_postgres.py +8 -3
  15. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/postgres/hooks/test_postgres.py +62 -20
  16. {apache_airflow_providers_postgres-6.3.0rc1/src/airflow/providers/postgres → apache_airflow_providers_postgres-6.5.2rc1}/LICENSE +0 -0
  17. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/.latest-doc-only-change.txt +0 -0
  18. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/commits.rst +0 -0
  19. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/conf.py +0 -0
  20. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/dialects.rst +0 -0
  21. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/installing-providers-from-sources.rst +0 -0
  22. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/integration-logos/Postgres.png +0 -0
  23. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/operators.rst +0 -0
  24. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/redirects.txt +0 -0
  25. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/docs/security.rst +0 -0
  26. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/__init__.py +0 -0
  27. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/__init__.py +0 -0
  28. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/assets/__init__.py +0 -0
  29. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/assets/postgres.py +0 -0
  30. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/dialects/__init__.py +0 -0
  31. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/src/airflow/providers/postgres/hooks/__init__.py +0 -0
  32. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/conftest.py +0 -0
  33. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/system/__init__.py +0 -0
  34. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/system/postgres/__init__.py +0 -0
  35. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/system/postgres/example_postgres.py +0 -0
  36. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/__init__.py +0 -0
  37. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/postgres/__init__.py +0 -0
  38. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/postgres/assets/__init__.py +0 -0
  39. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/postgres/dialects/__init__.py +0 -0
  40. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/postgres/dialects/test_postgres.py +0 -0
  41. {apache_airflow_providers_postgres-6.3.0rc1 → apache_airflow_providers_postgres-6.5.2rc1}/tests/unit/postgres/hooks/__init__.py +0 -0
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2026 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-postgres
3
- Version: 6.3.0rc1
3
+ Version: 6.5.2rc1
4
4
  Summary: Provider package apache-airflow-providers-postgres for Apache Airflow
5
5
  Keywords: airflow-provider,postgres,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,35 +15,41 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
23
+ License-File: LICENSE
24
+ License-File: NOTICE
25
+ Requires-Dist: apache-airflow>=2.11.0rc1
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.12.0rc1
24
27
  Requires-Dist: apache-airflow-providers-common-sql>=1.23.0rc1
25
28
  Requires-Dist: psycopg2-binary>=2.9.9; python_version < '3.13'
26
29
  Requires-Dist: psycopg2-binary>=2.9.10; python_version >= '3.13'
27
30
  Requires-Dist: asyncpg>=0.30.0
28
31
  Requires-Dist: apache-airflow-providers-amazon>=2.6.0rc1 ; extra == "amazon"
32
+ Requires-Dist: apache-airflow-providers-microsoft-azure>=12.8.0rc1 ; extra == "microsoft-azure"
29
33
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
30
34
  Requires-Dist: pandas>=2.1.2 ; extra == "pandas" and ( python_version <"3.13")
31
35
  Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
32
36
  Requires-Dist: polars>=1.26.0 ; extra == "polars"
33
37
  Requires-Dist: psycopg[binary]>=3.2.9 ; extra == "psycopg"
38
+ Requires-Dist: sqlalchemy>=1.4.49 ; extra == "sqlalchemy"
34
39
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
35
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.3.0/changelog.html
36
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.3.0
40
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.5.2/changelog.html
41
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.5.2
37
42
  Project-URL: Mastodon, https://fosstodon.org/@airflow
38
43
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
39
44
  Project-URL: Source Code, https://github.com/apache/airflow
40
45
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
41
46
  Provides-Extra: amazon
47
+ Provides-Extra: microsoft-azure
42
48
  Provides-Extra: openlineage
43
49
  Provides-Extra: pandas
44
50
  Provides-Extra: polars
45
51
  Provides-Extra: psycopg
52
+ Provides-Extra: sqlalchemy
46
53
 
47
54
 
48
55
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -69,7 +76,7 @@ Provides-Extra: psycopg
69
76
 
70
77
  Package ``apache-airflow-providers-postgres``
71
78
 
72
- Release: ``6.3.0``
79
+ Release: ``6.5.2``
73
80
 
74
81
 
75
82
  `PostgreSQL <https://www.postgresql.org/>`__
@@ -82,7 +89,7 @@ This is a provider package for ``postgres`` provider. All classes for this provi
82
89
  are in ``airflow.providers.postgres`` python package.
83
90
 
84
91
  You can find package information and changelog for the provider
85
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0/>`_.
92
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.5.2/>`_.
86
93
 
87
94
  Installation
88
95
  ------------
@@ -96,15 +103,16 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
96
103
  Requirements
97
104
  ------------
98
105
 
99
- ======================================= ======================================
100
- PIP package Version required
101
- ======================================= ======================================
102
- ``apache-airflow`` ``>=2.10.0``
103
- ``apache-airflow-providers-common-sql`` ``>=1.23.0``
104
- ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
105
- ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
106
- ``asyncpg`` ``>=0.30.0``
107
- ======================================= ======================================
106
+ ========================================== ======================================
107
+ PIP package Version required
108
+ ========================================== ======================================
109
+ ``apache-airflow`` ``>=2.11.0``
110
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
111
+ ``apache-airflow-providers-common-sql`` ``>=1.23.0``
112
+ ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
113
+ ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
114
+ ``asyncpg`` ``>=0.30.0``
115
+ ========================================== ======================================
108
116
 
109
117
  Cross provider package dependencies
110
118
  -----------------------------------
@@ -119,14 +127,31 @@ You can install such cross-provider dependencies when installing from PyPI. For
119
127
  pip install apache-airflow-providers-postgres[amazon]
120
128
 
121
129
 
122
- ============================================================================================================== ===============
123
- Dependent package Extra
124
- ============================================================================================================== ===============
125
- `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
126
- `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
127
- `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
128
- ============================================================================================================== ===============
130
+ ====================================================================================================================== ===================
131
+ Dependent package Extra
132
+ ====================================================================================================================== ===================
133
+ `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
134
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
135
+ `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
136
+ `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
137
+ `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
138
+ ====================================================================================================================== ===================
139
+
140
+ Optional dependencies
141
+ ----------------------
142
+
143
+ =================== =====================================================================================
144
+ Extra Dependencies
145
+ =================== =====================================================================================
146
+ ``sqlalchemy`` ``sqlalchemy>=1.4.49``
147
+ ``amazon`` ``apache-airflow-providers-amazon>=2.6.0``
148
+ ``microsoft.azure`` ``apache-airflow-providers-microsoft-azure>=12.8.0``
149
+ ``openlineage`` ``apache-airflow-providers-openlineage``
150
+ ``pandas`` ``pandas>=2.1.2; python_version <"3.13"``, ``pandas>=2.2.3; python_version >="3.13"``
151
+ ``polars`` ``polars>=1.26.0``
152
+ ``psycopg`` ``psycopg[binary]>=3.2.9``
153
+ =================== =====================================================================================
129
154
 
130
155
  The changelog for the provider package can be found in the
131
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0/changelog.html>`_.
156
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.5.2/changelog.html>`_.
132
157
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-postgres``
25
25
 
26
- Release: ``6.3.0``
26
+ Release: ``6.5.2``
27
27
 
28
28
 
29
29
  `PostgreSQL <https://www.postgresql.org/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``postgres`` provider. All classes for this provi
36
36
  are in ``airflow.providers.postgres`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.5.2/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -50,15 +50,16 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
50
50
  Requirements
51
51
  ------------
52
52
 
53
- ======================================= ======================================
54
- PIP package Version required
55
- ======================================= ======================================
56
- ``apache-airflow`` ``>=2.10.0``
57
- ``apache-airflow-providers-common-sql`` ``>=1.23.0``
58
- ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
59
- ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
60
- ``asyncpg`` ``>=0.30.0``
61
- ======================================= ======================================
53
+ ========================================== ======================================
54
+ PIP package Version required
55
+ ========================================== ======================================
56
+ ``apache-airflow`` ``>=2.11.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
58
+ ``apache-airflow-providers-common-sql`` ``>=1.23.0``
59
+ ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
60
+ ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
61
+ ``asyncpg`` ``>=0.30.0``
62
+ ========================================== ======================================
62
63
 
63
64
  Cross provider package dependencies
64
65
  -----------------------------------
@@ -73,13 +74,30 @@ You can install such cross-provider dependencies when installing from PyPI. For
73
74
  pip install apache-airflow-providers-postgres[amazon]
74
75
 
75
76
 
76
- ============================================================================================================== ===============
77
- Dependent package Extra
78
- ============================================================================================================== ===============
79
- `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
80
- `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
81
- `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
82
- ============================================================================================================== ===============
77
+ ====================================================================================================================== ===================
78
+ Dependent package Extra
79
+ ====================================================================================================================== ===================
80
+ `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
81
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
82
+ `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
83
+ `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
84
+ `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
85
+ ====================================================================================================================== ===================
86
+
87
+ Optional dependencies
88
+ ----------------------
89
+
90
+ =================== =====================================================================================
91
+ Extra Dependencies
92
+ =================== =====================================================================================
93
+ ``sqlalchemy`` ``sqlalchemy>=1.4.49``
94
+ ``amazon`` ``apache-airflow-providers-amazon>=2.6.0``
95
+ ``microsoft.azure`` ``apache-airflow-providers-microsoft-azure>=12.8.0``
96
+ ``openlineage`` ``apache-airflow-providers-openlineage``
97
+ ``pandas`` ``pandas>=2.1.2; python_version <"3.13"``, ``pandas>=2.2.3; python_version >="3.13"``
98
+ ``polars`` ``polars>=1.26.0``
99
+ ``psycopg`` ``psycopg[binary]>=3.2.9``
100
+ =================== =====================================================================================
83
101
 
84
102
  The changelog for the provider package can be found in the
85
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.3.0/changelog.html>`_.
103
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-postgres/6.5.2/changelog.html>`_.
@@ -27,11 +27,90 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
- 6.3.0
30
+ 6.5.2
31
+ .....
32
+
33
+ Misc
34
+ ~~~~
35
+
36
+ * ``Consume ''AirflowOptionalProviderFeatureException'' from compat sdk in providers (#60335)``
37
+ * ``New year means updated Copyright notices (#60344)``
38
+ * ``Make SQLAlchemy optional for Postgres provider (#60257)``
39
+ * ``Migrate postgres provider to use airflow.sdk.configuration.conf (#59984)``
40
+
41
+ .. Below changes are excluded from the changelog. Move them to
42
+ appropriate section above if needed. Do not delete the lines(!):
43
+
44
+ 6.5.1
31
45
  .....
32
46
 
47
+ Misc
48
+ ~~~~
49
+
50
+ * ``Add backcompat for exceptions in providers (#58727)``
51
+
52
+ .. Below changes are excluded from the changelog. Move them to
53
+ appropriate section above if needed. Do not delete the lines(!):
54
+
55
+ 6.5.0
56
+ .....
57
+
58
+ .. note::
59
+ This release of provider is only available for Airflow 2.11+ as explained in the
60
+ Apache Airflow providers support policy <https://github.com/apache/airflow/blob/main/PROVIDERS.rst#minimum-supported-version-of-airflow-for-community-managed-providers>_.
61
+
62
+ Misc
63
+ ~~~~
64
+
65
+ * ``Bump minimum Airflow version in providers to Airflow 2.11.0 (#58612)``
66
+
67
+ .. Below changes are excluded from the changelog. Move them to
68
+ appropriate section above if needed. Do not delete the lines(!):
69
+ * ``Updates to release process of providers (#58316)``
70
+ * ``Remove SDK reference for NOTSET in Airflow Core (#58258)``
71
+
72
+ 6.4.1
73
+ .....
74
+
75
+ Misc
76
+ ~~~~
77
+
78
+ * ``Convert all airflow distributions to be compliant with ASF requirements (#58138)``
79
+ * ``better error handling in SnowflakeHook and PostgresHook when old version of AzureBaseHook (#57184)``
80
+
81
+ .. Below changes are excluded from the changelog. Move them to
82
+ appropriate section above if needed. Do not delete the lines(!):
83
+ * ``Delete all unnecessary LICENSE Files (#58191)``
84
+ * ``Enable ruff PLW2101,PLW2901,PLW3301 rule (#57700)``
85
+ * ``Enable PT006 rule to postgres Provider test (#57934)``
86
+ * ``Fix code formatting via ruff preview (#57641)``
87
+
88
+ 6.4.0
89
+ .....
90
+
91
+ Features
92
+ ~~~~~~~~
93
+
94
+ * ``Add Azure IAM/Entra ID support for PostgresHook (#55729)``
95
+
96
+ Misc
97
+ ~~~~
98
+
99
+ * ``fix mypy type errors in common/sql provider for sqlalchemy 2 upgrade (#56824)``
100
+ * ``Migrate postgres provider to ''common.compat'' (#57022)``
101
+
102
+ Doc-only
103
+ ~~~~~~~~
104
+
105
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
106
+
107
+ .. Below changes are excluded from the changelog. Move them to
108
+ appropriate section above if needed. Do not delete the lines(!):
109
+ * ``Enable PT011 rule to prvoider tests (#55980)``
110
+
111
+ 6.3.0
112
+ .....
33
113
 
34
- Release Date: ``|PypiReleaseDate|``
35
114
 
36
115
  Features
37
116
  ~~~~~~~~
@@ -58,8 +137,6 @@ Doc-only
58
137
  .. Below changes are excluded from the changelog. Move them to
59
138
  appropriate section above if needed. Do not delete the lines(!):
60
139
  * ``Switch pre-commit to prek (#54258)``
61
-
62
- .. Review and move the new changes to one of the sections above:
63
140
  * ``Fix Airflow 2 reference in README/index of providers (#55240)``
64
141
 
65
142
  6.2.3
@@ -235,8 +312,6 @@ Misc
235
312
  .. Below changes are excluded from the changelog. Move them to
236
313
  appropriate section above if needed. Do not delete the lines(!):
237
314
  * ``Use Python 3.9 as target version for Ruff & Black rules (#44298)``
238
-
239
- .. Review and move the new changes to one of the sections above:
240
315
  * ``Update path of example dags in docs (#45069)``
241
316
  * ``Allow configuration of sqlalchemy query parameter for JdbcHook and PostgresHook through extras (#44910)``
242
317
 
@@ -607,7 +682,8 @@ Misc
607
682
 
608
683
  * ``Add common-sql lower bound for common-sql (#25789)``
609
684
 
610
- .. Review and move the new changes to one of the sections above:
685
+ .. Below changes are excluded from the changelog. Move them to
686
+ appropriate section above if needed. Do not delete the lines(!):
611
687
  * ``Rename schema to database in 'PostgresHook' (#26436)``
612
688
  * ``Revert "Rename schema to database in 'PostgresHook' (#26436)" (#26734)``
613
689
  * ``Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)``
@@ -0,0 +1,19 @@
1
+ .. Licensed to the Apache Software Foundation (ASF) under one
2
+ or more contributor license agreements. See the NOTICE file
3
+ distributed with this work for additional information
4
+ regarding copyright ownership. The ASF licenses this file
5
+ to you under the Apache License, Version 2.0 (the
6
+ "License"); you may not use this file except in compliance
7
+ with the License. You may obtain a copy of the License at
8
+
9
+ .. http://www.apache.org/licenses/LICENSE-2.0
10
+
11
+ .. Unless required by applicable law or agreed to in writing,
12
+ software distributed under the License is distributed on an
13
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ KIND, either express or implied. See the License for the
15
+ specific language governing permissions and limitations
16
+ under the License.
17
+
18
+ .. include:: /../../../devel-common/src/sphinx_exts/includes/providers-configurations-ref.rst
19
+ .. include:: /../../../devel-common/src/sphinx_exts/includes/sections-and-options.rst
@@ -96,7 +96,9 @@ Extra (optional)
96
96
  * ``iam`` - If set to ``True`` than use AWS IAM database authentication for
97
97
  `Amazon RDS <https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.IAMDBAuth.html>`__,
98
98
  `Amazon Aurora <https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/UsingWithRDS.IAMDBAuth.html>`__
99
- or `Amazon Redshift <https://docs.aws.amazon.com/redshift/latest/mgmt/generating-user-credentials.html>`__.
99
+ `Amazon Redshift <https://docs.aws.amazon.com/redshift/latest/mgmt/generating-user-credentials.html>`__
100
+ or use Microsoft Entra Authentication for
101
+ `Azure Postgres Flexible Server <https://learn.microsoft.com/en-us/azure/postgresql/flexible-server/security-entra-concepts>`__.
100
102
  * ``aws_conn_id`` - AWS Connection ID which use for authentication via AWS IAM,
101
103
  if not specified then **aws_default** is used.
102
104
  * ``redshift`` - Used when AWS IAM database authentication enabled.
@@ -104,6 +106,8 @@ Extra (optional)
104
106
  * ``cluster-identifier`` - The unique identifier of the Amazon Redshift Cluster that contains the database
105
107
  for which you are requesting credentials. This parameter is case sensitive.
106
108
  If not specified than hostname from **Connection Host** is used.
109
+ * ``azure_conn_id`` - Azure Connection ID to be used for authentication via Azure Entra ID. Azure Oauth token
110
+ is retrieved from the azure connection which is used as password for PostgreSQL connection. Scope for the Azure OAuth token can be set in the config option ``azure_oauth_scope`` under the section ``[postgres]``. Requires `apache-airflow-providers-microsoft-azure>=12.8.0`.
107
111
 
108
112
  Example "extras" field (Amazon RDS PostgreSQL or Amazon Aurora PostgreSQL):
109
113
 
@@ -125,6 +129,15 @@ Extra (optional)
125
129
  "cluster-identifier": "awesome-redshift-identifier"
126
130
  }
127
131
 
132
+ Example "extras" field (to use Azure Entra Authentication for Postgres Flexible Server):
133
+
134
+ .. code-block:: json
135
+
136
+ {
137
+ "iam": true,
138
+ "azure_conn_id": "azure_default_conn"
139
+ }
140
+
128
141
  When specifying the connection as URI (in :envvar:`AIRFLOW_CONN_{CONN_ID}` variable) you should specify it
129
142
  following the standard syntax of DB connections, where extras are passed as parameters
130
143
  of the URI (note that all components of the URI should be URL-encoded).
@@ -41,6 +41,7 @@
41
41
  :maxdepth: 1
42
42
  :caption: References
43
43
 
44
+ Configuration <configurations-ref>
44
45
  Python API <_api/airflow/providers/postgres/index>
45
46
  Dialects <dialects>
46
47
 
@@ -77,7 +78,7 @@ apache-airflow-providers-postgres package
77
78
  `PostgreSQL <https://www.postgresql.org/>`__
78
79
 
79
80
 
80
- Release: 6.3.0
81
+ Release: 6.5.2
81
82
 
82
83
  Provider package
83
84
  ----------------
@@ -95,17 +96,18 @@ For the minimum Airflow version supported, see ``Requirements`` below.
95
96
  Requirements
96
97
  ------------
97
98
 
98
- The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
99
+ The minimum Apache Airflow version supported by this provider distribution is ``2.11.0``.
99
100
 
100
- ======================================= ======================================
101
- PIP package Version required
102
- ======================================= ======================================
103
- ``apache-airflow`` ``>=2.10.0``
104
- ``apache-airflow-providers-common-sql`` ``>=1.23.0``
105
- ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
106
- ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
107
- ``asyncpg`` ``>=0.30.0``
108
- ======================================= ======================================
101
+ ========================================== ======================================
102
+ PIP package Version required
103
+ ========================================== ======================================
104
+ ``apache-airflow`` ``>=2.11.0``
105
+ ``apache-airflow-providers-common-compat`` ``>=1.12.0``
106
+ ``apache-airflow-providers-common-sql`` ``>=1.23.0``
107
+ ``psycopg2-binary`` ``>=2.9.9; python_version < "3.13"``
108
+ ``psycopg2-binary`` ``>=2.9.10; python_version >= "3.13"``
109
+ ``asyncpg`` ``>=0.30.0``
110
+ ========================================== ======================================
109
111
 
110
112
  Cross provider package dependencies
111
113
  -----------------------------------
@@ -120,13 +122,15 @@ You can install such cross-provider dependencies when installing from PyPI. For
120
122
  pip install apache-airflow-providers-postgres[amazon]
121
123
 
122
124
 
123
- ============================================================================================================== ===============
124
- Dependent package Extra
125
- ============================================================================================================== ===============
126
- `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
127
- `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
128
- `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
129
- ============================================================================================================== ===============
125
+ ====================================================================================================================== ===================
126
+ Dependent package Extra
127
+ ====================================================================================================================== ===================
128
+ `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
129
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
130
+ `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
131
+ `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
132
+ `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
133
+ ====================================================================================================================== ===================
130
134
 
131
135
  Downloading official packages
132
136
  -----------------------------
@@ -134,5 +138,5 @@ Downloading official packages
134
138
  You can download officially released packages and verify their checksums and signatures from the
135
139
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
136
140
 
137
- * `The apache-airflow-providers-postgres 6.3.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.3.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.3.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.3.0.tar.gz.sha512>`__)
138
- * `The apache-airflow-providers-postgres 6.3.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.3.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.3.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.3.0-py3-none-any.whl.sha512>`__)
141
+ * `The apache-airflow-providers-postgres 6.5.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.5.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.5.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.5.2.tar.gz.sha512>`__)
142
+ * `The apache-airflow-providers-postgres 6.5.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.5.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.5.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_postgres-6.5.2-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,17 @@ description: |
22
22
  `PostgreSQL <https://www.postgresql.org/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1756877337
25
+ source-date-epoch: 1768335516
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 6.5.2
32
+ - 6.5.1
33
+ - 6.5.0
34
+ - 6.4.1
35
+ - 6.4.0
31
36
  - 6.3.0
32
37
  - 6.2.3
33
38
  - 6.2.2
@@ -109,3 +114,17 @@ asset-uris:
109
114
  dataset-uris:
110
115
  - schemes: [postgres, postgresql]
111
116
  handler: airflow.providers.postgres.assets.postgres.sanitize_uri
117
+
118
+ config:
119
+ postgres:
120
+ description: |
121
+ Configuration for Postgres hooks and operators.
122
+ options:
123
+ azure_oauth_scope:
124
+ description: |
125
+ The scope to use while retrieving Oauth token for Postgres Flexible Server
126
+ from Azure Entra authentication.
127
+ version_added: 6.4.0
128
+ type: string
129
+ example: ~
130
+ default: "https://ossrdbms-aad.database.windows.net/.default"
@@ -25,9 +25,11 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-postgres"
28
- version = "6.3.0rc1"
28
+ version = "6.5.2rc1"
29
29
  description = "Provider package apache-airflow-providers-postgres for Apache Airflow"
30
30
  readme = "README.rst"
31
+ license = "Apache-2.0"
32
+ license-files = ['LICENSE', 'NOTICE']
31
33
  authors = [
32
34
  {name="Apache Software Foundation", email="dev@airflow.apache.org"},
33
35
  ]
@@ -43,7 +45,6 @@ classifiers = [
43
45
  "Intended Audience :: System Administrators",
44
46
  "Framework :: Apache Airflow",
45
47
  "Framework :: Apache Airflow :: Provider",
46
- "License :: OSI Approved :: Apache Software License",
47
48
  "Programming Language :: Python :: 3.10",
48
49
  "Programming Language :: Python :: 3.11",
49
50
  "Programming Language :: Python :: 3.12",
@@ -57,7 +58,8 @@ requires-python = ">=3.10"
57
58
  # Make sure to run ``prek update-providers-dependencies --all-files``
58
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
60
  dependencies = [
60
- "apache-airflow>=2.10.0rc1",
61
+ "apache-airflow>=2.11.0rc1",
62
+ "apache-airflow-providers-common-compat>=1.12.0rc1",
61
63
  "apache-airflow-providers-common-sql>=1.23.0rc1",
62
64
  "psycopg2-binary>=2.9.9; python_version < '3.13'",
63
65
  "psycopg2-binary>=2.9.10; python_version >= '3.13'",
@@ -67,9 +69,15 @@ dependencies = [
67
69
  # The optional dependencies should be modified in place in the generated file
68
70
  # Any change in the dependencies is preserved when the file is regenerated
69
71
  [project.optional-dependencies]
72
+ sqlalchemy = [
73
+ "sqlalchemy>=1.4.49"
74
+ ]
70
75
  "amazon" = [
71
76
  "apache-airflow-providers-amazon>=2.6.0rc1",
72
77
  ]
78
+ "microsoft.azure" = [
79
+ "apache-airflow-providers-microsoft-azure>=12.8.0rc1"
80
+ ]
73
81
  "openlineage" = [
74
82
  "apache-airflow-providers-openlineage"
75
83
  ]
@@ -90,7 +98,9 @@ dev = [
90
98
  "apache-airflow-task-sdk",
91
99
  "apache-airflow-devel-common",
92
100
  "apache-airflow-providers-amazon",
101
+ "apache-airflow-providers-common-compat",
93
102
  "apache-airflow-providers-common-sql",
103
+ "apache-airflow-providers-microsoft-azure",
94
104
  "apache-airflow-providers-openlineage",
95
105
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
96
106
  "apache-airflow-providers-common-sql[pandas]",
@@ -124,8 +134,8 @@ apache-airflow-providers-common-sql = {workspace = true}
124
134
  apache-airflow-providers-standard = {workspace = true}
125
135
 
126
136
  [project.urls]
127
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.3.0"
128
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.3.0/changelog.html"
137
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.5.2"
138
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-postgres/6.5.2/changelog.html"
129
139
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
130
140
  "Source Code" = "https://github.com/apache/airflow"
131
141
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "6.3.0"
32
+ __version__ = "6.5.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-postgres:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-postgres:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -41,24 +41,21 @@ class PostgresDialect(Dialect):
41
41
  """
42
42
  if schema is None:
43
43
  table, schema = self.extract_schema_from_table(table)
44
- pk_columns = [
45
- row[0]
46
- for row in self.get_records(
47
- """
48
- select kcu.column_name
49
- from information_schema.table_constraints tco
50
- join information_schema.key_column_usage kcu
51
- on kcu.constraint_name = tco.constraint_name
52
- and kcu.constraint_schema = tco.constraint_schema
53
- and kcu.constraint_name = tco.constraint_name
54
- where tco.constraint_type = 'PRIMARY KEY'
55
- and kcu.table_schema = %s
56
- and kcu.table_name = %s
57
- order by kcu.ordinal_position
58
- """,
59
- (self.unescape_word(schema), self.unescape_word(table)),
60
- )
61
- ]
44
+ table = self.unescape_word(table) or table
45
+ schema = self.unescape_word(schema) if schema else None
46
+ query = """
47
+ select kcu.column_name
48
+ from information_schema.table_constraints tco
49
+ join information_schema.key_column_usage kcu
50
+ on kcu.constraint_name = tco.constraint_name
51
+ and kcu.constraint_schema = tco.constraint_schema
52
+ and kcu.constraint_name = tco.constraint_name
53
+ where tco.constraint_type = 'PRIMARY KEY'
54
+ and kcu.table_schema = %s
55
+ and kcu.table_name = %s
56
+ order by kcu.ordinal_position
57
+ """
58
+ pk_columns = [row[0] for row in self.get_records(query, (schema, table))]
62
59
  return pk_columns or None
63
60
 
64
61
  @staticmethod
@@ -78,31 +75,27 @@ class PostgresDialect(Dialect):
78
75
  ) -> list[str] | None:
79
76
  if schema is None:
80
77
  table, schema = self.extract_schema_from_table(table)
81
-
82
- column_names = list(
83
- row["name"]
84
- for row in filter(
85
- predicate,
86
- map(
87
- self._to_row,
88
- self.get_records(
89
- """
90
- select column_name,
91
- data_type,
92
- is_nullable,
93
- column_default,
94
- is_generated,
95
- is_identity
96
- from information_schema.columns
97
- where table_schema = %s
98
- and table_name = %s
99
- order by ordinal_position
100
- """,
101
- (self.unescape_word(schema), self.unescape_word(table)),
102
- ),
103
- ),
104
- )
105
- )
78
+ table = self.unescape_word(table) or table
79
+ schema = self.unescape_word(schema) if schema else None
80
+ query = """
81
+ select column_name,
82
+ data_type,
83
+ is_nullable,
84
+ column_default,
85
+ is_generated,
86
+ is_identity
87
+ from information_schema.columns
88
+ where table_schema = %s
89
+ and table_name = %s
90
+ order by ordinal_position
91
+ """
92
+ column_names = []
93
+ for row in map(
94
+ self._to_row,
95
+ self.get_records(query, (schema, table)),
96
+ ):
97
+ if predicate(row):
98
+ column_names.append(row["name"])
106
99
  self.log.debug("Column names for table '%s': %s", table, column_names)
107
100
  return column_names
108
101
 
@@ -65,4 +65,18 @@ def get_provider_info():
65
65
  "handler": "airflow.providers.postgres.assets.postgres.sanitize_uri",
66
66
  }
67
67
  ],
68
+ "config": {
69
+ "postgres": {
70
+ "description": "Configuration for Postgres hooks and operators.\n",
71
+ "options": {
72
+ "azure_oauth_scope": {
73
+ "description": "The scope to use while retrieving Oauth token for Postgres Flexible Server\nfrom Azure Entra authentication.\n",
74
+ "version_added": "6.4.0",
75
+ "type": "string",
76
+ "example": None,
77
+ "default": "https://ossrdbms-aad.database.windows.net/.default",
78
+ }
79
+ },
80
+ }
81
+ },
68
82
  }
@@ -24,15 +24,15 @@ from copy import deepcopy
24
24
  from typing import TYPE_CHECKING, Any, Literal, Protocol, TypeAlias, cast, overload
25
25
 
26
26
  import psycopg2
27
- import psycopg2.extensions
28
27
  import psycopg2.extras
29
28
  from more_itertools import chunked
30
29
  from psycopg2.extras import DictCursor, NamedTupleCursor, RealDictCursor, execute_batch
31
- from sqlalchemy.engine import URL
32
30
 
33
- from airflow.exceptions import (
31
+ from airflow.providers.common.compat.sdk import (
34
32
  AirflowException,
35
33
  AirflowOptionalProviderFeatureException,
34
+ Connection,
35
+ conf,
36
36
  )
37
37
  from airflow.providers.common.sql.hooks.sql import DbApiHook
38
38
  from airflow.providers.postgres.dialects.postgres import PostgresDialect
@@ -57,6 +57,7 @@ if USE_PSYCOPG3:
57
57
  if TYPE_CHECKING:
58
58
  from pandas import DataFrame as PandasDataFrame
59
59
  from polars import DataFrame as PolarsDataFrame
60
+ from sqlalchemy.engine import URL
60
61
 
61
62
  from airflow.providers.common.sql.dialects.dialect import Dialect
62
63
  from airflow.providers.openlineage.sqlparser import DatabaseInfo
@@ -64,11 +65,6 @@ if TYPE_CHECKING:
64
65
  if USE_PSYCOPG3:
65
66
  from psycopg.errors import Diagnostic
66
67
 
67
- try:
68
- from airflow.sdk import Connection
69
- except ImportError:
70
- from airflow.models.connection import Connection # type: ignore[assignment]
71
-
72
68
  CursorType: TypeAlias = DictCursor | RealDictCursor | NamedTupleCursor
73
69
  CursorRow: TypeAlias = dict[str, Any] | tuple[Any, ...]
74
70
 
@@ -156,7 +152,9 @@ class PostgresHook(DbApiHook):
156
152
  "aws_conn_id",
157
153
  "sqlalchemy_scheme",
158
154
  "sqlalchemy_query",
155
+ "azure_conn_id",
159
156
  }
157
+ default_azure_oauth_scope = "https://ossrdbms-aad.database.windows.net/.default"
160
158
 
161
159
  def __init__(
162
160
  self, *args, options: str | None = None, enable_log_db_messages: bool = False, **kwargs
@@ -173,10 +171,19 @@ class PostgresHook(DbApiHook):
173
171
 
174
172
  @property
175
173
  def sqlalchemy_url(self) -> URL:
174
+ try:
175
+ from sqlalchemy.engine import URL
176
+ except (ImportError, ModuleNotFoundError) as err:
177
+ raise AirflowOptionalProviderFeatureException(
178
+ "SQLAlchemy is not installed. Please install it with "
179
+ "`pip install apache-airflow-providers-postgres[sqlalchemy]`."
180
+ ) from err
176
181
  conn = self.connection
177
182
  query = conn.extra_dejson.get("sqlalchemy_query", {})
178
183
  if not isinstance(query, dict):
179
184
  raise AirflowException("The parameter 'sqlalchemy_query' must be of type dict!")
185
+ if conn.extra_dejson.get("iam", False):
186
+ conn.login, conn.password, conn.port = self.get_iam_token(conn)
180
187
  return URL.create(
181
188
  drivername="postgresql+psycopg" if USE_PSYCOPG3 else "postgresql",
182
189
  username=self.__cast_nullable(conn.login, str),
@@ -441,8 +448,14 @@ class PostgresHook(DbApiHook):
441
448
  return PostgresHook._serialize_cell_ppg2(cell, conn)
442
449
 
443
450
  def get_iam_token(self, conn: Connection) -> tuple[str, str, int]:
451
+ """Get the IAM token from different identity providers."""
452
+ if conn.extra_dejson.get("azure_conn_id"):
453
+ return self.get_azure_iam_token(conn)
454
+ return self.get_aws_iam_token(conn)
455
+
456
+ def get_aws_iam_token(self, conn: Connection) -> tuple[str, str, int]:
444
457
  """
445
- Get the IAM token.
458
+ Get the AWS IAM token.
446
459
 
447
460
  This uses AWSHook to retrieve a temporary password to connect to
448
461
  Postgres or Redshift. Port is required. If none is provided, the default
@@ -451,7 +464,7 @@ class PostgresHook(DbApiHook):
451
464
  try:
452
465
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
453
466
  except ImportError:
454
- from airflow.exceptions import AirflowOptionalProviderFeatureException
467
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
455
468
 
456
469
  raise AirflowOptionalProviderFeatureException(
457
470
  "apache-airflow-providers-amazon not installed, run: "
@@ -500,6 +513,34 @@ class PostgresHook(DbApiHook):
500
513
  token = rds_client.generate_db_auth_token(conn.host, port, conn.login)
501
514
  return cast("str", login), cast("str", token), port
502
515
 
516
+ def get_azure_iam_token(self, conn: Connection) -> tuple[str, str, int]:
517
+ """
518
+ Get the Azure IAM token.
519
+
520
+ This uses AzureBaseHook to retrieve an OAUTH token to connect to Postgres.
521
+ Scope for the OAuth token can be set in the config option ``azure_oauth_scope`` under the section ``[postgres]``.
522
+ """
523
+ if TYPE_CHECKING:
524
+ from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
525
+
526
+ azure_conn_id = conn.extra_dejson.get("azure_conn_id", "azure_default")
527
+ try:
528
+ azure_conn = Connection.get(azure_conn_id)
529
+ except AttributeError:
530
+ azure_conn = Connection.get_connection_from_secrets(azure_conn_id) # type: ignore[attr-defined]
531
+ try:
532
+ azure_base_hook: AzureBaseHook = azure_conn.get_hook()
533
+ except TypeError as e:
534
+ if "required positional argument: 'sdk_client'" in str(e):
535
+ raise AirflowOptionalProviderFeatureException(
536
+ "Getting azure token is not supported by current version of 'AzureBaseHook'. "
537
+ "Please upgrade apache-airflow-providers-microsoft-azure>=12.8.0"
538
+ ) from e
539
+ raise
540
+ scope = conf.get("postgres", "azure_oauth_scope", fallback=self.default_azure_oauth_scope)
541
+ token = azure_base_hook.get_token(scope).token
542
+ return cast("str", conn.login or azure_conn.login), token, conn.port or 5432
543
+
503
544
  def get_table_primary_key(self, table: str, schema: str | None = "public") -> list[str] | None:
504
545
  """
505
546
  Get the table's primary key.
@@ -531,7 +572,7 @@ class PostgresHook(DbApiHook):
531
572
  try:
532
573
  from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
533
574
  except ImportError:
534
- from airflow.exceptions import AirflowOptionalProviderFeatureException
575
+ from airflow.providers.common.compat.sdk import AirflowOptionalProviderFeatureException
535
576
 
536
577
  raise AirflowOptionalProviderFeatureException(
537
578
  "apache-airflow-providers-amazon not installed, run: "
@@ -25,7 +25,7 @@ from airflow.providers.postgres.assets.postgres import sanitize_uri
25
25
 
26
26
 
27
27
  @pytest.mark.parametrize(
28
- "original, normalized",
28
+ ("original", "normalized"),
29
29
  [
30
30
  pytest.param(
31
31
  "postgres://example.com:1234/database/schema/table",
@@ -56,11 +56,16 @@ def test_sanitize_uri_pass(original: str, normalized: str) -> None:
56
56
  pytest.param("postgres://", id="blank"),
57
57
  pytest.param("postgres:///database/schema/table", id="no-host"),
58
58
  pytest.param("postgres://example.com/database/table", id="missing-component"),
59
- pytest.param("postgres://example.com:abcd/database/schema/table", id="non-port"),
60
59
  pytest.param("postgres://example.com/database/schema/table/column", id="extra-component"),
61
60
  ],
62
61
  )
63
62
  def test_sanitize_uri_fail(value: str) -> None:
64
63
  uri_i = urllib.parse.urlsplit(value)
65
- with pytest.raises(ValueError):
64
+ with pytest.raises(ValueError, match="URI format postgres:// must contain"):
65
+ sanitize_uri(uri_i)
66
+
67
+
68
+ def test_sanitize_uri_fail_non_port() -> None:
69
+ uri_i = urllib.parse.urlsplit("postgres://example.com:abcd/database/schema/table")
70
+ with pytest.raises(ValueError, match="Port could not be cast to integer value as 'abcd'"):
66
71
  sanitize_uri(uri_i)
@@ -27,14 +27,13 @@ import polars as pl
27
27
  import pytest
28
28
  import sqlalchemy
29
29
 
30
- from airflow.exceptions import AirflowException
31
30
  from airflow.models import Connection
31
+ from airflow.providers.common.compat.sdk import AirflowException, AirflowOptionalProviderFeatureException
32
32
  from airflow.providers.postgres.dialects.postgres import PostgresDialect
33
33
  from airflow.providers.postgres.hooks.postgres import CompatConnection, PostgresHook
34
- from airflow.utils.types import NOTSET
35
34
 
36
35
  from tests_common.test_utils.common_sql import mock_db_hook
37
- from tests_common.test_utils.version_compat import SQLALCHEMY_V_1_4
36
+ from tests_common.test_utils.version_compat import NOTSET, SQLALCHEMY_V_1_4
38
37
 
39
38
  INSERT_SQL_STATEMENT = "INSERT INTO connection (id, conn_id, conn_type, description, host, {}, login, password, port, is_encrypted, is_extra_encrypted, extra) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
40
39
 
@@ -127,7 +126,7 @@ class TestPostgresHookConn:
127
126
  @pytest.mark.parametrize("aws_conn_id", [NOTSET, None, "mock_aws_conn"])
128
127
  @pytest.mark.parametrize("port", [5432, 5439, None])
129
128
  @pytest.mark.parametrize(
130
- "host,conn_cluster_identifier,expected_host",
129
+ ("host", "conn_cluster_identifier", "expected_host"),
131
130
  [
132
131
  (
133
132
  "cluster-identifier.ccdfre4hpd39h.us-east-1.redshift.amazonaws.com",
@@ -209,7 +208,7 @@ class TestPostgresHookConn:
209
208
  @pytest.mark.usefixtures("mock_connect")
210
209
  def test_get_conn_with_invalid_cursor(self):
211
210
  self.connection.extra = '{"cursor": "mycursor"}'
212
- with pytest.raises(ValueError):
211
+ with pytest.raises(ValueError, match="Invalid cursor passed mycursor."):
213
212
  self.db_hook.get_conn()
214
213
 
215
214
  def test_get_conn_from_connection(self, mock_connect):
@@ -297,7 +296,7 @@ class TestPostgresHookConn:
297
296
  @pytest.mark.parametrize("aws_conn_id", [NOTSET, None, "mock_aws_conn"])
298
297
  @pytest.mark.parametrize("port", [5432, 5439, None])
299
298
  @pytest.mark.parametrize(
300
- "host,conn_cluster_identifier,expected_cluster_identifier",
299
+ ("host", "conn_cluster_identifier", "expected_cluster_identifier"),
301
300
  [
302
301
  (
303
302
  "cluster-identifier.ccdfre4hpd39h.us-east-1.redshift.amazonaws.com",
@@ -373,7 +372,7 @@ class TestPostgresHookConn:
373
372
  @pytest.mark.parametrize("aws_conn_id", [NOTSET, None, "mock_aws_conn"])
374
373
  @pytest.mark.parametrize("port", [5432, 5439, None])
375
374
  @pytest.mark.parametrize(
376
- "host,conn_workgroup_name,expected_workgroup_name",
375
+ ("host", "conn_workgroup_name", "expected_workgroup_name"),
377
376
  [
378
377
  (
379
378
  "serverless-workgroup.ccdfre4hpd39h.us-east-1.redshift.amazonaws.com",
@@ -444,6 +443,57 @@ class TestPostgresHookConn:
444
443
  port=(port or 5439),
445
444
  )
446
445
 
446
+ def test_get_conn_azure_iam(self, mocker, mock_connect):
447
+ mock_azure_conn_id = "azure_conn1"
448
+ mock_db_token = "azure_token1"
449
+ mock_conn_extra = {"iam": True, "azure_conn_id": mock_azure_conn_id}
450
+ self.connection.extra = json.dumps(mock_conn_extra)
451
+
452
+ mock_connection_class = mocker.patch("airflow.providers.postgres.hooks.postgres.Connection")
453
+ mock_azure_base_hook = mock_connection_class.get.return_value.get_hook.return_value
454
+ mock_azure_base_hook.get_token.return_value.token = mock_db_token
455
+
456
+ self.db_hook.get_conn()
457
+
458
+ # Check AzureBaseHook initialization and get_token call args
459
+ mock_connection_class.get.assert_called_once_with(mock_azure_conn_id)
460
+ mock_azure_base_hook.get_token.assert_called_once_with(PostgresHook.default_azure_oauth_scope)
461
+
462
+ # Check expected psycopg2 connection call args
463
+ mock_connect.assert_called_once_with(
464
+ user=self.connection.login,
465
+ password=mock_db_token,
466
+ host=self.connection.host,
467
+ dbname=self.connection.schema,
468
+ port=(self.connection.port or 5432),
469
+ )
470
+
471
+ assert mock_db_token in self.db_hook.sqlalchemy_url
472
+
473
+ def test_get_azure_iam_token_expect_failure_on_older_azure_provider_package(self, mocker):
474
+ class MockAzureBaseHookOldVersion:
475
+ """Simulate an old version of AzureBaseHook where sdk_client is required."""
476
+
477
+ def __init__(self, sdk_client, conn_id="azure_default"):
478
+ pass
479
+
480
+ azure_conn_id = "azure_test_conn"
481
+ mock_connection_class = mocker.patch("airflow.providers.postgres.hooks.postgres.Connection")
482
+ mock_connection_class.get.return_value.get_hook = MockAzureBaseHookOldVersion
483
+
484
+ self.connection.extra = json.dumps({"iam": True, "azure_conn_id": azure_conn_id})
485
+ with pytest.raises(
486
+ AirflowOptionalProviderFeatureException,
487
+ match=(
488
+ "Getting azure token is not supported.*"
489
+ "Please upgrade apache-airflow-providers-microsoft-azure>="
490
+ ),
491
+ ):
492
+ self.db_hook.get_azure_iam_token(self.connection)
493
+
494
+ # Check AzureBaseHook initialization
495
+ mock_connection_class.get.assert_called_once_with(azure_conn_id)
496
+
447
497
  def test_get_uri_from_connection_without_database_override(self, mocker):
448
498
  expected: str = f"postgresql{'+psycopg' if USE_PSYCOPG3 else ''}://login:password@host:1/database"
449
499
  self.db_hook.get_connection = mocker.MagicMock(
@@ -639,7 +689,7 @@ class TestPostgresHook:
639
689
  assert sorted(input_data) == sorted(results)
640
690
 
641
691
  @pytest.mark.parametrize(
642
- "df_type, expected_type",
692
+ ("df_type", "expected_type"),
643
693
  [
644
694
  ("pandas", pd.DataFrame),
645
695
  ("polars", pl.DataFrame),
@@ -879,11 +929,9 @@ class TestPostgresHookPPG2:
879
929
  ),
880
930
  ]
881
931
  fields = ("id", "value")
882
- with pytest.raises(ValueError) as ctx:
932
+ with pytest.raises(ValueError, match="PostgreSQL ON CONFLICT upsert syntax requires column names"):
883
933
  setup.db_hook.insert_rows(table, rows, replace=True, replace_index=fields[0])
884
934
 
885
- assert str(ctx.value) == "PostgreSQL ON CONFLICT upsert syntax requires column names"
886
-
887
935
  def test_insert_rows_replace_missing_replace_index_arg(self, postgres_hook_setup):
888
936
  setup = postgres_hook_setup
889
937
  table = "table"
@@ -898,11 +946,9 @@ class TestPostgresHookPPG2:
898
946
  ),
899
947
  ]
900
948
  fields = ("id", "value")
901
- with pytest.raises(ValueError) as ctx:
949
+ with pytest.raises(ValueError, match="PostgreSQL ON CONFLICT upsert syntax requires an unique index"):
902
950
  setup.db_hook.insert_rows(table, rows, fields, replace=True)
903
951
 
904
- assert str(ctx.value) == "PostgreSQL ON CONFLICT upsert syntax requires an unique index"
905
-
906
952
  def test_insert_rows_replace_all_index(self, postgres_hook_setup):
907
953
  setup = postgres_hook_setup
908
954
  table = "table"
@@ -1145,11 +1191,9 @@ class TestPostgresHookPPG3:
1145
1191
  ),
1146
1192
  ]
1147
1193
  fields = ("id", "value")
1148
- with pytest.raises(ValueError) as ctx:
1194
+ with pytest.raises(ValueError, match="PostgreSQL ON CONFLICT upsert syntax requires column names"):
1149
1195
  self.db_hook.insert_rows(table, rows, replace=True, replace_index=fields[0])
1150
1196
 
1151
- assert str(ctx.value) == "PostgreSQL ON CONFLICT upsert syntax requires column names"
1152
-
1153
1197
  def test_insert_rows_replace_missing_replace_index_arg(self):
1154
1198
  table = "table"
1155
1199
  rows = [
@@ -1163,11 +1207,9 @@ class TestPostgresHookPPG3:
1163
1207
  ),
1164
1208
  ]
1165
1209
  fields = ("id", "value")
1166
- with pytest.raises(ValueError) as ctx:
1210
+ with pytest.raises(ValueError, match="PostgreSQL ON CONFLICT upsert syntax requires an unique index"):
1167
1211
  self.db_hook.insert_rows(table, rows, fields, replace=True)
1168
1212
 
1169
- assert str(ctx.value) == "PostgreSQL ON CONFLICT upsert syntax requires an unique index"
1170
-
1171
1213
  def test_insert_rows_replace_all_index(self):
1172
1214
  table = "table"
1173
1215
  rows = [