apache-airflow-providers-common-sql 1.27.5__tar.gz → 1.28.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/PKG-INFO +7 -8
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/README.rst +4 -5
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/changelog.rst +38 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/index.rst +5 -7
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/operators.rst +31 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/provider.yaml +2 -1
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/pyproject.toml +4 -4
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/README_API.md +9 -9
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/__init__.py +1 -1
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/dialects/dialect.pyi +3 -3
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/get_provider_info.pyi +3 -3
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/hooks/handlers.pyi +3 -3
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/hooks/sql.py +4 -1
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/hooks/sql.pyi +3 -3
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/operators/generic_transfer.py +6 -4
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/operators/generic_transfer.pyi +2 -5
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/operators/sql.py +133 -1
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/sensors/sql.pyi +3 -3
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/triggers/sql.pyi +2 -5
- apache_airflow_providers_common_sql-1.28.0/tests/system/common/sql/example_sql_insert_rows.py +90 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/operators/test_generic_transfer.py +6 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/operators/test_sql.py +41 -43
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/sensors/test_sql.py +2 -2
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/commits.rst +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/conf.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/connections.rst +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/dataframes.rst +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/dialects.rst +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/integration-logos/sql.png +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/security.rst +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/docs/supported-database-types.rst +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/LICENSE +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/dialects/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/dialects/dialect.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/get_provider_info.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/hooks/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/hooks/handlers.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/operators/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/sensors/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/sensors/sql.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/triggers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/triggers/sql.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/src/airflow/providers/common/sql/version_compat.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/conftest.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/system/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/system/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/system/common/sql/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/system/common/sql/example_generic_transfer.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/system/common/sql/example_sql_column_table_check.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/system/common/sql/example_sql_execute_query.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/system/common/sql/example_sql_threshold_check.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/dialects/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/dialects/test_dialect.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/hooks/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/hooks/test_dbapi.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/hooks/test_handlers.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/hooks/test_sql.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/hooks/test_sqlparse.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/operators/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/operators/test_sql_execute.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/sensors/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/triggers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/tests/unit/common/sql/triggers/test_sql.py +0 -0
{apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-common-sql
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.28.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.sql,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -29,8 +29,8 @@ Requires-Dist: pandas[sql-other]>=2.1.2 ; extra == "pandas" and ( python_version
|
|
|
29
29
|
Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
|
|
30
30
|
Requires-Dist: polars>=1.26.0 ; extra == "polars"
|
|
31
31
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
32
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
33
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
32
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/changelog.html
|
|
33
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0
|
|
34
34
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
35
35
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
36
36
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -64,9 +64,8 @@ Provides-Extra: polars
|
|
|
64
64
|
|
|
65
65
|
Package ``apache-airflow-providers-common-sql``
|
|
66
66
|
|
|
67
|
-
Release: ``1.
|
|
67
|
+
Release: ``1.28.0``
|
|
68
68
|
|
|
69
|
-
Release Date: ``|PypiReleaseDate|``
|
|
70
69
|
|
|
71
70
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
72
71
|
|
|
@@ -78,12 +77,12 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
78
77
|
are in ``airflow.providers.common.sql`` python package.
|
|
79
78
|
|
|
80
79
|
You can find package information and changelog for the provider
|
|
81
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
80
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/>`_.
|
|
82
81
|
|
|
83
82
|
Installation
|
|
84
83
|
------------
|
|
85
84
|
|
|
86
|
-
You can install this package on top of an existing Airflow
|
|
85
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
87
86
|
for the minimum Airflow version supported) via
|
|
88
87
|
``pip install apache-airflow-providers-common-sql``
|
|
89
88
|
|
|
@@ -121,5 +120,5 @@ Dependent package
|
|
|
121
120
|
============================================================================================================== ===============
|
|
122
121
|
|
|
123
122
|
The changelog for the provider package can be found in the
|
|
124
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
123
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/changelog.html>`_.
|
|
125
124
|
|
{apache_airflow_providers_common_sql-1.27.5 → apache_airflow_providers_common_sql-1.28.0}/README.rst
RENAMED
|
@@ -23,9 +23,8 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-common-sql``
|
|
25
25
|
|
|
26
|
-
Release: ``1.
|
|
26
|
+
Release: ``1.28.0``
|
|
27
27
|
|
|
28
|
-
Release Date: ``|PypiReleaseDate|``
|
|
29
28
|
|
|
30
29
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
31
30
|
|
|
@@ -37,12 +36,12 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
37
36
|
are in ``airflow.providers.common.sql`` python package.
|
|
38
37
|
|
|
39
38
|
You can find package information and changelog for the provider
|
|
40
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/>`_.
|
|
41
40
|
|
|
42
41
|
Installation
|
|
43
42
|
------------
|
|
44
43
|
|
|
45
|
-
You can install this package on top of an existing Airflow
|
|
44
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
46
45
|
for the minimum Airflow version supported) via
|
|
47
46
|
``pip install apache-airflow-providers-common-sql``
|
|
48
47
|
|
|
@@ -80,4 +79,4 @@ Dependent package
|
|
|
80
79
|
============================================================================================================== ===============
|
|
81
80
|
|
|
82
81
|
The changelog for the provider package can be found in the
|
|
83
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
82
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/changelog.html>`_.
|
|
@@ -25,6 +25,44 @@
|
|
|
25
25
|
Changelog
|
|
26
26
|
---------
|
|
27
27
|
|
|
28
|
+
1.28.0
|
|
29
|
+
......
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
Release Date: ``|PypiReleaseDate|``
|
|
33
|
+
|
|
34
|
+
Features
|
|
35
|
+
~~~~~~~~
|
|
36
|
+
|
|
37
|
+
* ``Add SQLInsertRowsOperator in common.sql provider (#55031)``
|
|
38
|
+
* ``Added paginated_sql_statement_clause parameter to GenericTransfer (#48866)``
|
|
39
|
+
* ``Add rudimentary support for psycopg3 (#52976)``
|
|
40
|
+
|
|
41
|
+
Bug Fixes
|
|
42
|
+
~~~~~~~~~
|
|
43
|
+
|
|
44
|
+
* ``Fixed resolving of dialect name when host of JdbcHook is an JDBC URL (#54446)``
|
|
45
|
+
|
|
46
|
+
Misc
|
|
47
|
+
~~~~
|
|
48
|
+
|
|
49
|
+
* ``Remove unneeded isort skip comments (#54979)``
|
|
50
|
+
|
|
51
|
+
Doc-only
|
|
52
|
+
~~~~~~~~
|
|
53
|
+
|
|
54
|
+
* ``Make term Dag consistent in providers docs (#55101)``
|
|
55
|
+
|
|
56
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
57
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
58
|
+
* ``Remove airflow.models.DAG (#54383)``
|
|
59
|
+
* ``Replace API server's direct Connection access workaround in BaseHook (#54083)``
|
|
60
|
+
* ``Switch pre-commit to prek (#54258)``
|
|
61
|
+
* ``make bundle_name not nullable (#47592)``
|
|
62
|
+
|
|
63
|
+
.. Review and move the new changes to one of the sections above:
|
|
64
|
+
* ``Fix Airflow 2 reference in README/index of providers (#55240)``
|
|
65
|
+
|
|
28
66
|
1.27.5
|
|
29
67
|
......
|
|
30
68
|
|
|
@@ -58,7 +58,7 @@
|
|
|
58
58
|
:maxdepth: 1
|
|
59
59
|
:caption: Resources
|
|
60
60
|
|
|
61
|
-
Example
|
|
61
|
+
Example Dags <https://github.com/apache/airflow/tree/providers-common-sql/|version|/providers/common/sql/tests/system/common/sql>
|
|
62
62
|
PyPI Repository <https://pypi.org/project/apache-airflow-providers-common-sql/>
|
|
63
63
|
Installing from sources <installing-providers-from-sources>
|
|
64
64
|
|
|
@@ -79,9 +79,7 @@ apache-airflow-providers-common-sql package
|
|
|
79
79
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
80
80
|
|
|
81
81
|
|
|
82
|
-
Release: 1.
|
|
83
|
-
|
|
84
|
-
Release Date: ``|PypiReleaseDate|``
|
|
82
|
+
Release: 1.28.0
|
|
85
83
|
|
|
86
84
|
Provider package
|
|
87
85
|
----------------
|
|
@@ -92,7 +90,7 @@ All classes for this package are included in the ``airflow.providers.common.sql`
|
|
|
92
90
|
Installation
|
|
93
91
|
------------
|
|
94
92
|
|
|
95
|
-
You can install this package on top of an existing Airflow
|
|
93
|
+
You can install this package on top of an existing Airflow installation via
|
|
96
94
|
``pip install apache-airflow-providers-common-sql``.
|
|
97
95
|
For the minimum Airflow version supported, see ``Requirements`` below.
|
|
98
96
|
|
|
@@ -135,5 +133,5 @@ Downloading official packages
|
|
|
135
133
|
You can download officially released packages and verify their checksums and signatures from the
|
|
136
134
|
`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
|
|
137
135
|
|
|
138
|
-
* `The apache-airflow-providers-common-sql 1.
|
|
139
|
-
* `The apache-airflow-providers-common-sql 1.
|
|
136
|
+
* `The apache-airflow-providers-common-sql 1.28.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.28.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.28.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.28.0.tar.gz.sha512>`__)
|
|
137
|
+
* `The apache-airflow-providers-common-sql 1.28.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.28.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.28.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.28.0-py3-none-any.whl.sha512>`__)
|
|
@@ -174,6 +174,37 @@ The below example demonstrates how to instantiate the SQLThresholdCheckOperator
|
|
|
174
174
|
|
|
175
175
|
If the value returned by the query, is within the thresholds, the task passes. Otherwise, it fails.
|
|
176
176
|
|
|
177
|
+
.. _howto/operator:SQLInsertRowsOperator:
|
|
178
|
+
|
|
179
|
+
Insert rows into Table
|
|
180
|
+
~~~~~~~~~~~~~~~~~~~~~~
|
|
181
|
+
|
|
182
|
+
Use the :class:`~airflow.providers.common.sql.operators.sql.SQLInsertRowsOperator` to insert rows into a database table
|
|
183
|
+
directly from Python data structures or an XCom. Parameters of the operator are:
|
|
184
|
+
|
|
185
|
+
- ``table_name`` - name of the table in which the rows will be inserted (templated).
|
|
186
|
+
- ``conn_id`` - the Airflow connection ID used to connect to the database.
|
|
187
|
+
- ``schema`` (optional) - the schema in which the table is defined.
|
|
188
|
+
- ``database`` (optional) - name of the database which overrides the one defined in the connection.
|
|
189
|
+
- ``columns`` (optional) - list of columns to use for the insert when passing a list of dictionaries.
|
|
190
|
+
- ``ignored_columns`` (optional) - list of columns to ignore for the insert, if no columns are specified,
|
|
191
|
+
columns will be dynamically resolved from the metadata.
|
|
192
|
+
- ``rows`` - rows to insert, a list of tuples.
|
|
193
|
+
- ``rows_processor`` (optional) - a function applied to the rows before inserting them.
|
|
194
|
+
- ``preoperator`` (optional) - SQL statement or list of statements to execute before inserting data (templated).
|
|
195
|
+
- ``postoperator`` (optional) - SQL statement or list of statements to execute after inserting data (templated).
|
|
196
|
+
- ``hook_params`` (optional) - dictionary of additional parameters passed to the underlying hook.
|
|
197
|
+
- ``insert_args`` (optional) - dictionary of additional arguments passed to the hook's ``insert_rows`` method,
|
|
198
|
+
can include ``replace``, ``executemany``, ``fast_executemany``, ``autocommit``, and others supported by the hook.
|
|
199
|
+
|
|
200
|
+
The example below shows how to instantiate the SQLInsertRowsOperator task.
|
|
201
|
+
|
|
202
|
+
.. exampleinclude:: /../tests/system/common/sql/example_sql_insert_rows.py
|
|
203
|
+
:language: python
|
|
204
|
+
:dedent: 4
|
|
205
|
+
:start-after: [START howto_operator_sql_insert_rows]
|
|
206
|
+
:end-before: [END howto_operator_sql_insert_rows]
|
|
207
|
+
|
|
177
208
|
.. _howto/operator:GenericTransfer:
|
|
178
209
|
|
|
179
210
|
Generic Transfer
|
|
@@ -22,12 +22,13 @@ description: |
|
|
|
22
22
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
23
23
|
|
|
24
24
|
state: ready
|
|
25
|
-
source-date-epoch:
|
|
25
|
+
source-date-epoch: 1756876759
|
|
26
26
|
# Note that those versions are maintained by release manager - do not update them manually
|
|
27
27
|
# with the exception of case where other provider in sources has >= new provider version.
|
|
28
28
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
|
29
29
|
# to be done in the same PR
|
|
30
30
|
versions:
|
|
31
|
+
- 1.28.0
|
|
31
32
|
- 1.27.5
|
|
32
33
|
- 1.27.4
|
|
33
34
|
- 1.27.3
|
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-common-sql"
|
|
28
|
-
version = "1.
|
|
28
|
+
version = "1.28.0"
|
|
29
29
|
description = "Provider package apache-airflow-providers-common-sql for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -54,7 +54,7 @@ requires-python = ">=3.10"
|
|
|
54
54
|
|
|
55
55
|
# The dependencies should be modified in place in the generated file.
|
|
56
56
|
# Any change in the dependencies is preserved when the file is regenerated
|
|
57
|
-
# Make sure to run ``
|
|
57
|
+
# Make sure to run ``prek update-providers-dependencies --all-files``
|
|
58
58
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
59
59
|
dependencies = [
|
|
60
60
|
"apache-airflow>=2.10.0",
|
|
@@ -121,8 +121,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
121
121
|
apache-airflow-providers-standard = {workspace = true}
|
|
122
122
|
|
|
123
123
|
[project.urls]
|
|
124
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
125
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
124
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0"
|
|
125
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/changelog.html"
|
|
126
126
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
127
127
|
"Source Code" = "https://github.com/apache/airflow"
|
|
128
128
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -30,7 +30,7 @@ The approach we take is similar to one that has been applied by Android OS team
|
|
|
30
30
|
and it is based on storing the current version of API and flagging changes that are potentially breaking.
|
|
31
31
|
This is done by comparing the previous API (store in stub files) and the upcoming API from the PR.
|
|
32
32
|
The upcoming API is automatically extracted from `common.sql` Python files using `update-common-sql-api-stubs`
|
|
33
|
-
|
|
33
|
+
prek hook using mypy `stubgen` and stored as `.pyi` files in the `airflow.providers.common.sql` package.
|
|
34
34
|
We also post-process the `.pyi` files to add some historically exposed methods that should be also
|
|
35
35
|
considered as public API.
|
|
36
36
|
|
|
@@ -40,22 +40,22 @@ to review the changes and manually regenerate the stub files.
|
|
|
40
40
|
The details of the workflow are as follows:
|
|
41
41
|
|
|
42
42
|
1) The previous API is stored in the (committed to repository) stub files.
|
|
43
|
-
2)
|
|
43
|
+
2) Whenever common.sql Python files are modified the `update-common-sql-api-stubs` prek hook
|
|
44
44
|
regenerates the stubs (including post-processing it) and looks for potentially breaking changes
|
|
45
45
|
(removals or updates of the existing classes/methods).
|
|
46
|
-
3) If the check reveals there are no changes to the API, nothing happens,
|
|
47
|
-
4) If there are only additions, the
|
|
48
|
-
asks the contributor to commit resulting updates and fails the
|
|
46
|
+
3) If the check reveals there are no changes to the API, nothing happens, prek hook succeeds.
|
|
47
|
+
4) If there are only additions, the prek hook automatically updates the stub files,
|
|
48
|
+
asks the contributor to commit resulting updates and fails the prek hook. This is very similar to
|
|
49
49
|
other static checks that automatically modify/fix source code.
|
|
50
|
-
5) If the
|
|
51
|
-
contributor. The
|
|
50
|
+
5) If the prek hook detects potentially breaking changes, the process is a bit more involved for the
|
|
51
|
+
contributor. The prek hook flags such changes to the contributor by failing the prek hook and
|
|
52
52
|
asks the contributor to review the change looking specifically for breaking compatibility with previous
|
|
53
53
|
providers (and fix any backwards compatibility). Once this is completed, the contributor is asked to
|
|
54
|
-
manually and explicitly regenerate and commit the new version of the stubs by running the
|
|
54
|
+
manually and explicitly regenerate and commit the new version of the stubs by running the prek hook
|
|
55
55
|
with manually added environment variable:
|
|
56
56
|
|
|
57
57
|
```shell
|
|
58
|
-
UPDATE_COMMON_SQL_API=1
|
|
58
|
+
UPDATE_COMMON_SQL_API=1 prek update-common-sql-api-stubs
|
|
59
59
|
```
|
|
60
60
|
|
|
61
61
|
# Verifying other providers to use only public API of the `common.sql` provider
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.28.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
#
|
|
18
18
|
# This is automatically generated stub for the `common.sql` provider
|
|
19
19
|
#
|
|
20
|
-
# This file is generated automatically by the `update-common-sql-api stubs`
|
|
20
|
+
# This file is generated automatically by the `update-common-sql-api stubs` prek hook
|
|
21
21
|
# and the .pyi file represents part of the "public" API that the
|
|
22
22
|
# `common.sql` provider exposes to other providers.
|
|
23
23
|
#
|
|
@@ -28,8 +28,8 @@
|
|
|
28
28
|
# You can read more in the README_API.md file
|
|
29
29
|
#
|
|
30
30
|
"""
|
|
31
|
-
Definition of the public interface for
|
|
32
|
-
|
|
31
|
+
Definition of the public interface for
|
|
32
|
+
airflow.providers.common.sql.src.airflow.providers.common.sql.dialects.dialect.
|
|
33
33
|
"""
|
|
34
34
|
|
|
35
35
|
from collections.abc import Callable, Iterable, Mapping
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
#
|
|
18
18
|
# This is automatically generated stub for the `common.sql` provider
|
|
19
19
|
#
|
|
20
|
-
# This file is generated automatically by the `update-common-sql-api stubs`
|
|
20
|
+
# This file is generated automatically by the `update-common-sql-api stubs` prek hook
|
|
21
21
|
# and the .pyi file represents part of the "public" API that the
|
|
22
22
|
# `common.sql` provider exposes to other providers.
|
|
23
23
|
#
|
|
@@ -28,8 +28,8 @@
|
|
|
28
28
|
# You can read more in the README_API.md file
|
|
29
29
|
#
|
|
30
30
|
"""
|
|
31
|
-
Definition of the public interface for
|
|
32
|
-
|
|
31
|
+
Definition of the public interface for
|
|
32
|
+
airflow.providers.common.sql.src.airflow.providers.common.sql.get_provider_info.
|
|
33
33
|
"""
|
|
34
34
|
|
|
35
35
|
def get_provider_info() -> None: ...
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
#
|
|
18
18
|
# This is automatically generated stub for the `common.sql` provider
|
|
19
19
|
#
|
|
20
|
-
# This file is generated automatically by the `update-common-sql-api stubs`
|
|
20
|
+
# This file is generated automatically by the `update-common-sql-api stubs` prek hook
|
|
21
21
|
# and the .pyi file represents part of the "public" API that the
|
|
22
22
|
# `common.sql` provider exposes to other providers.
|
|
23
23
|
#
|
|
@@ -28,8 +28,8 @@
|
|
|
28
28
|
# You can read more in the README_API.md file
|
|
29
29
|
#
|
|
30
30
|
"""
|
|
31
|
-
Definition of the public interface for
|
|
32
|
-
|
|
31
|
+
Definition of the public interface for
|
|
32
|
+
airflow.providers.common.sql.src.airflow.providers.common.sql.hooks.handlers.
|
|
33
33
|
"""
|
|
34
34
|
|
|
35
35
|
from collections.abc import Iterable
|
|
@@ -332,7 +332,7 @@ class DbApiHook(BaseHook):
|
|
|
332
332
|
def dialect_name(self) -> str:
|
|
333
333
|
try:
|
|
334
334
|
return make_url(self.get_uri()).get_dialect().name
|
|
335
|
-
except (ArgumentError, NoSuchModuleError):
|
|
335
|
+
except (ArgumentError, NoSuchModuleError, ValueError):
|
|
336
336
|
config = self.connection_extra
|
|
337
337
|
sqlalchemy_scheme = config.get("sqlalchemy_scheme")
|
|
338
338
|
if sqlalchemy_scheme:
|
|
@@ -810,6 +810,9 @@ class DbApiHook(BaseHook):
|
|
|
810
810
|
self.log.info("Running statement: %s, parameters: %s", sql_statement, parameters)
|
|
811
811
|
|
|
812
812
|
if parameters:
|
|
813
|
+
# If we're using psycopg3, we might need to handle parameters differently
|
|
814
|
+
if hasattr(cur, "__module__") and "psycopg" in cur.__module__ and isinstance(parameters, list):
|
|
815
|
+
parameters = tuple(parameters)
|
|
813
816
|
cur.execute(sql_statement, parameters)
|
|
814
817
|
else:
|
|
815
818
|
cur.execute(sql_statement)
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
#
|
|
18
18
|
# This is automatically generated stub for the `common.sql` provider
|
|
19
19
|
#
|
|
20
|
-
# This file is generated automatically by the `update-common-sql-api stubs`
|
|
20
|
+
# This file is generated automatically by the `update-common-sql-api stubs` prek hook
|
|
21
21
|
# and the .pyi file represents part of the "public" API that the
|
|
22
22
|
# `common.sql` provider exposes to other providers.
|
|
23
23
|
#
|
|
@@ -28,8 +28,8 @@
|
|
|
28
28
|
# You can read more in the README_API.md file
|
|
29
29
|
#
|
|
30
30
|
"""
|
|
31
|
-
Definition of the public interface for
|
|
32
|
-
|
|
31
|
+
Definition of the public interface for
|
|
32
|
+
airflow.providers.common.sql.src.airflow.providers.common.sql.hooks.sql.
|
|
33
33
|
"""
|
|
34
34
|
|
|
35
35
|
from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, Sequence
|
|
@@ -56,6 +56,7 @@ class GenericTransfer(BaseOperator):
|
|
|
56
56
|
executed prior to loading the data. (templated)
|
|
57
57
|
:param insert_args: extra params for `insert_rows` method.
|
|
58
58
|
:param page_size: number of records to be read in paginated mode (optional).
|
|
59
|
+
:param paginated_sql_statement_clause: SQL statement clause to be used for pagination (optional).
|
|
59
60
|
"""
|
|
60
61
|
|
|
61
62
|
template_fields: Sequence[str] = (
|
|
@@ -65,6 +66,8 @@ class GenericTransfer(BaseOperator):
|
|
|
65
66
|
"destination_table",
|
|
66
67
|
"preoperator",
|
|
67
68
|
"insert_args",
|
|
69
|
+
"page_size",
|
|
70
|
+
"paginated_sql_statement_clause",
|
|
68
71
|
)
|
|
69
72
|
template_ext: Sequence[str] = (
|
|
70
73
|
".sql",
|
|
@@ -85,6 +88,7 @@ class GenericTransfer(BaseOperator):
|
|
|
85
88
|
preoperator: str | list[str] | None = None,
|
|
86
89
|
insert_args: dict | None = None,
|
|
87
90
|
page_size: int | None = None,
|
|
91
|
+
paginated_sql_statement_clause: str | None = None,
|
|
88
92
|
**kwargs,
|
|
89
93
|
) -> None:
|
|
90
94
|
super().__init__(**kwargs)
|
|
@@ -97,9 +101,7 @@ class GenericTransfer(BaseOperator):
|
|
|
97
101
|
self.preoperator = preoperator
|
|
98
102
|
self.insert_args = insert_args or {}
|
|
99
103
|
self.page_size = page_size
|
|
100
|
-
self.
|
|
101
|
-
"paginated_sql_statement_format", "{} LIMIT {} OFFSET {}"
|
|
102
|
-
)
|
|
104
|
+
self.paginated_sql_statement_clause = paginated_sql_statement_clause or "{} LIMIT {} OFFSET {}"
|
|
103
105
|
|
|
104
106
|
@classmethod
|
|
105
107
|
def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> DbApiHook:
|
|
@@ -126,7 +128,7 @@ class GenericTransfer(BaseOperator):
|
|
|
126
128
|
|
|
127
129
|
def get_paginated_sql(self, offset: int) -> str:
|
|
128
130
|
"""Format the paginated SQL statement using the current format."""
|
|
129
|
-
return self.
|
|
131
|
+
return self.paginated_sql_statement_clause.format(self.sql, self.page_size, offset)
|
|
130
132
|
|
|
131
133
|
def render_template_fields(
|
|
132
134
|
self,
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
#
|
|
18
18
|
# This is automatically generated stub for the `common.sql` provider
|
|
19
19
|
#
|
|
20
|
-
# This file is generated automatically by the `update-common-sql-api stubs`
|
|
20
|
+
# This file is generated automatically by the `update-common-sql-api stubs` prek hook
|
|
21
21
|
# and the .pyi file represents part of the "public" API that the
|
|
22
22
|
# `common.sql` provider exposes to other providers.
|
|
23
23
|
#
|
|
@@ -27,10 +27,7 @@
|
|
|
27
27
|
#
|
|
28
28
|
# You can read more in the README_API.md file
|
|
29
29
|
#
|
|
30
|
-
"""
|
|
31
|
-
Definition of the public interface for airflow.providers.common.sql.operators.generic_transfer
|
|
32
|
-
isort:skip_file
|
|
33
|
-
"""
|
|
30
|
+
"""Definition of the public interface for airflow.providers.common.sql.operators.generic_transfer."""
|
|
34
31
|
|
|
35
32
|
from collections.abc import Sequence
|
|
36
33
|
from functools import cached_property as cached_property
|
|
@@ -23,7 +23,8 @@ from collections.abc import Callable, Iterable, Mapping, Sequence
|
|
|
23
23
|
from functools import cached_property
|
|
24
24
|
from typing import TYPE_CHECKING, Any, ClassVar, NoReturn, SupportsAbs
|
|
25
25
|
|
|
26
|
-
from airflow
|
|
26
|
+
from airflow import XComArg
|
|
27
|
+
from airflow.exceptions import AirflowException, AirflowFailException, AirflowSkipException
|
|
27
28
|
from airflow.models import SkipMixin
|
|
28
29
|
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, return_single_query_results
|
|
29
30
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
@@ -31,6 +32,8 @@ from airflow.providers.common.sql.version_compat import BaseHook, BaseOperator
|
|
|
31
32
|
from airflow.utils.helpers import merge_dicts
|
|
32
33
|
|
|
33
34
|
if TYPE_CHECKING:
|
|
35
|
+
import jinja2
|
|
36
|
+
|
|
34
37
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
35
38
|
from airflow.utils.context import Context
|
|
36
39
|
|
|
@@ -1252,6 +1255,135 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
|
|
|
1252
1255
|
self.skip_all_except(context["ti"], follow_branch)
|
|
1253
1256
|
|
|
1254
1257
|
|
|
1258
|
+
class SQLInsertRowsOperator(BaseSQLOperator):
|
|
1259
|
+
"""
|
|
1260
|
+
Insert rows (e.g. a collection of tuples) into a database table directly from an XCom or Python data structure.
|
|
1261
|
+
|
|
1262
|
+
:param table: the name of the table in which the rows will be inserted (templated).
|
|
1263
|
+
:param conn_id: the connection ID used to connect to the database
|
|
1264
|
+
:param schema: (optional) the name of schema in which the table is defined
|
|
1265
|
+
:param database: name of database (e.g. schema) which overwrite the defined one in connection
|
|
1266
|
+
:param columns: (optional) specify a list of columns being used for the insert when passing a list of
|
|
1267
|
+
dictionaries.
|
|
1268
|
+
:param ignore_columns: (optional) specify a list of columns being ignored for the insert. If no columns
|
|
1269
|
+
where specified, the columns will be resolved dynamically from the metadata.
|
|
1270
|
+
:param rows: the rows to insert into the table. Rows can be a list of tuples or a list of dictionaries.
|
|
1271
|
+
When a list of dictionaries is provided, the column names are inferred from the dictionary keys and
|
|
1272
|
+
will be matched with the column names, ignored columns will be filtered out.
|
|
1273
|
+
:rows_processor: (optional) a function that will be applied to the rows before inserting them into the table.
|
|
1274
|
+
:param preoperator: sql statement or list of statements to be executed prior to loading the data. (templated)
|
|
1275
|
+
:param postoperator: sql statement or list of statements to be executed after loading the data. (templated)
|
|
1276
|
+
:param insert_args: (optional) dictionary of additional arguments passed to the underlying hook's
|
|
1277
|
+
`insert_rows` method. This allows you to configure options such as `replace`, `executemany`,
|
|
1278
|
+
`fast_executemany`, and `autocommit`.
|
|
1279
|
+
|
|
1280
|
+
.. seealso::
|
|
1281
|
+
For more information on how to use this operator, take a look at the guide:
|
|
1282
|
+
:ref:`howto/operator:SQLInsertRowsOperator`
|
|
1283
|
+
"""
|
|
1284
|
+
|
|
1285
|
+
template_fields: Sequence[str] = (
|
|
1286
|
+
"table_name",
|
|
1287
|
+
"conn_id",
|
|
1288
|
+
"schema",
|
|
1289
|
+
"database",
|
|
1290
|
+
"_columns",
|
|
1291
|
+
"ignored_columns",
|
|
1292
|
+
"preoperator",
|
|
1293
|
+
"postoperator",
|
|
1294
|
+
"insert_args",
|
|
1295
|
+
)
|
|
1296
|
+
template_ext: Sequence[str] = (".sql",)
|
|
1297
|
+
template_fields_renderers = {"preoperator": "sql"}
|
|
1298
|
+
|
|
1299
|
+
def __init__(
|
|
1300
|
+
self,
|
|
1301
|
+
*,
|
|
1302
|
+
table_name: str,
|
|
1303
|
+
conn_id: str | None = None,
|
|
1304
|
+
schema: str | None = None,
|
|
1305
|
+
database: str | None = None,
|
|
1306
|
+
columns: Iterable[str] | None = None,
|
|
1307
|
+
ignored_columns: Iterable[str] | None = None,
|
|
1308
|
+
rows: list[Any] | XComArg | None = None,
|
|
1309
|
+
rows_processor: Callable[[Any, Context], Any] = lambda rows, **context: rows,
|
|
1310
|
+
preoperator: str | list[str] | None = None,
|
|
1311
|
+
postoperator: str | list[str] | None = None,
|
|
1312
|
+
hook_params: dict | None = None,
|
|
1313
|
+
insert_args: dict | None = None,
|
|
1314
|
+
**kwargs,
|
|
1315
|
+
):
|
|
1316
|
+
super().__init__(
|
|
1317
|
+
conn_id=conn_id,
|
|
1318
|
+
database=database,
|
|
1319
|
+
hook_params=hook_params,
|
|
1320
|
+
**kwargs,
|
|
1321
|
+
)
|
|
1322
|
+
self.table_name = table_name
|
|
1323
|
+
self.schema = schema
|
|
1324
|
+
self._columns: list | None = list(columns) if columns else None
|
|
1325
|
+
self.ignored_columns = set(ignored_columns or {})
|
|
1326
|
+
self.rows = rows or []
|
|
1327
|
+
self._rows_processor = rows_processor
|
|
1328
|
+
self.preoperator = preoperator
|
|
1329
|
+
self.postoperator = postoperator
|
|
1330
|
+
self.insert_args = insert_args or {}
|
|
1331
|
+
self.do_xcom_push = False
|
|
1332
|
+
|
|
1333
|
+
def render_template_fields(
|
|
1334
|
+
self,
|
|
1335
|
+
context: Context,
|
|
1336
|
+
jinja_env: jinja2.Environment | None = None,
|
|
1337
|
+
) -> None:
|
|
1338
|
+
super().render_template_fields(context=context, jinja_env=jinja_env)
|
|
1339
|
+
|
|
1340
|
+
if isinstance(self.rows, XComArg):
|
|
1341
|
+
self.rows = self.rows.resolve(context=context)
|
|
1342
|
+
|
|
1343
|
+
@property
|
|
1344
|
+
def table_name_with_schema(self) -> str:
|
|
1345
|
+
if self.schema is not None:
|
|
1346
|
+
return f"{self.schema}.{self.table_name}"
|
|
1347
|
+
return self.table_name
|
|
1348
|
+
|
|
1349
|
+
@cached_property
|
|
1350
|
+
def columns(self):
|
|
1351
|
+
if self._columns is None:
|
|
1352
|
+
self._columns = self.get_db_hook().dialect.get_column_names(self.table_name_with_schema)
|
|
1353
|
+
return self._columns
|
|
1354
|
+
|
|
1355
|
+
@property
|
|
1356
|
+
def column_names(self) -> list[str]:
|
|
1357
|
+
if self.ignored_columns:
|
|
1358
|
+
return [column for column in self.columns if column not in self.ignored_columns]
|
|
1359
|
+
return self.columns
|
|
1360
|
+
|
|
1361
|
+
def _process_rows(self, context: Context):
|
|
1362
|
+
return self._rows_processor(context, self.rows) # type: ignore
|
|
1363
|
+
|
|
1364
|
+
def execute(self, context: Context) -> Any:
|
|
1365
|
+
if not self.rows:
|
|
1366
|
+
raise AirflowSkipException(f"Skipping task {self.task_id} because no rows.")
|
|
1367
|
+
|
|
1368
|
+
self.log.debug("Table: %s", self.table_name_with_schema)
|
|
1369
|
+
self.log.debug("Column names: %s", self.column_names)
|
|
1370
|
+
if self.preoperator:
|
|
1371
|
+
self.log.debug("Running preoperator")
|
|
1372
|
+
self.log.debug(self.preoperator)
|
|
1373
|
+
self.get_db_hook().run(self.preoperator)
|
|
1374
|
+
rows = self._process_rows(context=context)
|
|
1375
|
+
self.get_db_hook().insert_rows(
|
|
1376
|
+
table=self.table_name_with_schema,
|
|
1377
|
+
rows=rows,
|
|
1378
|
+
target_fields=self.column_names,
|
|
1379
|
+
**self.insert_args,
|
|
1380
|
+
)
|
|
1381
|
+
if self.postoperator:
|
|
1382
|
+
self.log.debug("Running postoperator")
|
|
1383
|
+
self.log.debug(self.postoperator)
|
|
1384
|
+
self.get_db_hook().run(self.postoperator)
|
|
1385
|
+
|
|
1386
|
+
|
|
1255
1387
|
def _initialize_partition_clause(clause: str | None) -> str | None:
|
|
1256
1388
|
"""Ensure the partition_clause contains only valid patterns."""
|
|
1257
1389
|
if clause is None:
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
#
|
|
18
18
|
# This is automatically generated stub for the `common.sql` provider
|
|
19
19
|
#
|
|
20
|
-
# This file is generated automatically by the `update-common-sql-api stubs`
|
|
20
|
+
# This file is generated automatically by the `update-common-sql-api stubs` prek hook
|
|
21
21
|
# and the .pyi file represents part of the "public" API that the
|
|
22
22
|
# `common.sql` provider exposes to other providers.
|
|
23
23
|
#
|
|
@@ -28,8 +28,8 @@
|
|
|
28
28
|
# You can read more in the README_API.md file
|
|
29
29
|
#
|
|
30
30
|
"""
|
|
31
|
-
Definition of the public interface for
|
|
32
|
-
|
|
31
|
+
Definition of the public interface for
|
|
32
|
+
airflow.providers.common.sql.src.airflow.providers.common.sql.sensors.sql.
|
|
33
33
|
"""
|
|
34
34
|
|
|
35
35
|
from collections.abc import Callable, Mapping, Sequence
|