apache-airflow-providers-common-sql 1.23.0__tar.gz → 1.24.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.

Files changed (29) hide show
  1. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/PKG-INFO +26 -25
  2. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/README.rst +21 -21
  3. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/pyproject.toml +29 -8
  4. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/__init__.py +1 -1
  5. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/get_provider_info.py +19 -3
  6. apache_airflow_providers_common_sql-1.24.0/src/airflow/providers/common/sql/operators/generic_transfer.py +219 -0
  7. apache_airflow_providers_common_sql-1.24.0/src/airflow/providers/common/sql/operators/generic_transfer.pyi +85 -0
  8. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/operators/sql.py +2 -1
  9. apache_airflow_providers_common_sql-1.24.0/src/airflow/providers/common/sql/triggers/__init__.py +16 -0
  10. apache_airflow_providers_common_sql-1.24.0/src/airflow/providers/common/sql/triggers/sql.py +87 -0
  11. apache_airflow_providers_common_sql-1.24.0/src/airflow/providers/common/sql/triggers/sql.pyi +45 -0
  12. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/LICENSE +0 -0
  13. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/README_API.md +0 -0
  14. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/dialects/__init__.py +0 -0
  15. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/dialects/dialect.py +0 -0
  16. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/dialects/dialect.pyi +0 -0
  17. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md +0 -0
  18. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md +0 -0
  19. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md +0 -0
  20. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/get_provider_info.pyi +0 -0
  21. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/hooks/__init__.py +0 -0
  22. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/hooks/handlers.py +0 -0
  23. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/hooks/handlers.pyi +0 -0
  24. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/hooks/sql.py +0 -0
  25. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/hooks/sql.pyi +0 -0
  26. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/operators/__init__.py +0 -0
  27. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/sensors/__init__.py +0 -0
  28. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/sensors/sql.py +0 -0
  29. {apache_airflow_providers_common_sql-1.23.0 → apache_airflow_providers_common_sql-1.24.0}/src/airflow/providers/common/sql/sensors/sql.pyi +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.23.0
3
+ Version: 1.24.0
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -23,11 +23,12 @@ Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.9.0
24
24
  Requires-Dist: sqlparse>=0.5.1
25
25
  Requires-Dist: more-itertools>=9.0.0
26
+ Requires-Dist: methodtools>=0.4.7
26
27
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
27
28
  Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas"
28
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
29
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html
30
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0
30
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/changelog.html
31
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0
31
32
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
33
  Project-URL: Source Code, https://github.com/apache/airflow
33
34
  Project-URL: Twitter, https://x.com/ApacheAirflow
@@ -36,32 +37,31 @@ Provides-Extra: openlineage
36
37
  Provides-Extra: pandas
37
38
 
38
39
 
39
- .. Licensed to the Apache Software Foundation (ASF) under one
40
- or more contributor license agreements. See the NOTICE file
41
- distributed with this work for additional information
42
- regarding copyright ownership. The ASF licenses this file
43
- to you under the Apache License, Version 2.0 (the
44
- "License"); you may not use this file except in compliance
45
- with the License. You may obtain a copy of the License at
40
+ .. Licensed to the Apache Software Foundation (ASF) under one
41
+ or more contributor license agreements. See the NOTICE file
42
+ distributed with this work for additional information
43
+ regarding copyright ownership. The ASF licenses this file
44
+ to you under the Apache License, Version 2.0 (the
45
+ "License"); you may not use this file except in compliance
46
+ with the License. You may obtain a copy of the License at
46
47
 
47
- .. http://www.apache.org/licenses/LICENSE-2.0
48
+ .. http://www.apache.org/licenses/LICENSE-2.0
48
49
 
49
- .. Unless required by applicable law or agreed to in writing,
50
- software distributed under the License is distributed on an
51
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
52
- KIND, either express or implied. See the License for the
53
- specific language governing permissions and limitations
54
- under the License.
50
+ .. Unless required by applicable law or agreed to in writing,
51
+ software distributed under the License is distributed on an
52
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
53
+ KIND, either express or implied. See the License for the
54
+ specific language governing permissions and limitations
55
+ under the License.
55
56
 
56
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
57
-
58
- .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
59
- `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
57
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
60
58
 
59
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
60
+ ``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
61
61
 
62
62
  Package ``apache-airflow-providers-common-sql``
63
63
 
64
- Release: ``1.23.0``
64
+ Release: ``1.24.0``
65
65
 
66
66
 
67
67
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -74,7 +74,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
74
74
  are in ``airflow.providers.common.sql`` python package.
75
75
 
76
76
  You can find package information and changelog for the provider
77
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/>`_.
77
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/>`_.
78
78
 
79
79
  Installation
80
80
  ------------
@@ -94,6 +94,7 @@ PIP package Version required
94
94
  ``apache-airflow`` ``>=2.9.0``
95
95
  ``sqlparse`` ``>=0.5.1``
96
96
  ``more-itertools`` ``>=9.0.0``
97
+ ``methodtools`` ``>=0.4.7``
97
98
  ================== ==================
98
99
 
99
100
  Cross provider package dependencies
@@ -116,5 +117,5 @@ Dependent package
116
117
  ============================================================================================================== ===============
117
118
 
118
119
  The changelog for the provider package can be found in the
119
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html>`_.
120
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/changelog.html>`_.
120
121
 
@@ -1,30 +1,29 @@
1
1
 
2
- .. Licensed to the Apache Software Foundation (ASF) under one
3
- or more contributor license agreements. See the NOTICE file
4
- distributed with this work for additional information
5
- regarding copyright ownership. The ASF licenses this file
6
- to you under the Apache License, Version 2.0 (the
7
- "License"); you may not use this file except in compliance
8
- with the License. You may obtain a copy of the License at
2
+ .. Licensed to the Apache Software Foundation (ASF) under one
3
+ or more contributor license agreements. See the NOTICE file
4
+ distributed with this work for additional information
5
+ regarding copyright ownership. The ASF licenses this file
6
+ to you under the Apache License, Version 2.0 (the
7
+ "License"); you may not use this file except in compliance
8
+ with the License. You may obtain a copy of the License at
9
9
 
10
- .. http://www.apache.org/licenses/LICENSE-2.0
10
+ .. http://www.apache.org/licenses/LICENSE-2.0
11
11
 
12
- .. Unless required by applicable law or agreed to in writing,
13
- software distributed under the License is distributed on an
14
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- KIND, either express or implied. See the License for the
16
- specific language governing permissions and limitations
17
- under the License.
12
+ .. Unless required by applicable law or agreed to in writing,
13
+ software distributed under the License is distributed on an
14
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ KIND, either express or implied. See the License for the
16
+ specific language governing permissions and limitations
17
+ under the License.
18
18
 
19
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
20
-
21
- .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
- `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
19
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
23
20
 
21
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
+ ``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
24
23
 
25
24
  Package ``apache-airflow-providers-common-sql``
26
25
 
27
- Release: ``1.23.0``
26
+ Release: ``1.24.0``
28
27
 
29
28
 
30
29
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -37,7 +36,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
37
36
  are in ``airflow.providers.common.sql`` python package.
38
37
 
39
38
  You can find package information and changelog for the provider
40
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/>`_.
41
40
 
42
41
  Installation
43
42
  ------------
@@ -57,6 +56,7 @@ PIP package Version required
57
56
  ``apache-airflow`` ``>=2.9.0``
58
57
  ``sqlparse`` ``>=0.5.1``
59
58
  ``more-itertools`` ``>=9.0.0``
59
+ ``methodtools`` ``>=0.4.7``
60
60
  ================== ==================
61
61
 
62
62
  Cross provider package dependencies
@@ -79,4 +79,4 @@ Dependent package
79
79
  ============================================================================================================== ===============
80
80
 
81
81
  The changelog for the provider package can be found in the
82
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html>`_.
82
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/changelog.html>`_.
@@ -20,12 +20,12 @@
20
20
  # IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
21
21
  # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
22
22
  [build-system]
23
- requires = ["flit_core==3.10.1"]
23
+ requires = ["flit_core==3.11.0"]
24
24
  build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-common-sql"
28
- version = "1.23.0"
28
+ version = "1.24.0"
29
29
  description = "Provider package apache-airflow-providers-common-sql for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -52,12 +52,17 @@ classifiers = [
52
52
  ]
53
53
  requires-python = "~=3.9"
54
54
 
55
- # The dependencies should be modified in place in the generated file
55
+ # The dependencies should be modified in place in the generated file.
56
56
  # Any change in the dependencies is preserved when the file is regenerated
57
+ # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
+ # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
57
59
  dependencies = [
58
60
  "apache-airflow>=2.9.0",
59
61
  "sqlparse>=0.5.1",
60
62
  "more-itertools>=9.0.0",
63
+ # The methodtools dependency is necessary since the introduction of dialects:
64
+ # https://github.com/apache/airflow/pull/41327/files
65
+ "methodtools>=0.4.7"
61
66
  ]
62
67
 
63
68
  # The optional dependencies should be modified in place in the generated file
@@ -74,9 +79,28 @@ dependencies = [
74
79
  "apache-airflow-providers-openlineage"
75
80
  ]
76
81
 
82
+ [dependency-groups]
83
+ dev = [
84
+ "apache-airflow",
85
+ "apache-airflow-task-sdk",
86
+ "apache-airflow-devel-common",
87
+ "apache-airflow-providers-openlineage",
88
+ # Additional devel dependencies (do not remove this line and add extra development dependencies)
89
+ ]
90
+
91
+ [tool.uv.sources]
92
+ # These names must match the names as defined in the pyproject.toml of the workspace items,
93
+ # *not* the workspace folder paths
94
+ apache-airflow = {workspace = true}
95
+ apache-airflow-devel-common = {workspace = true}
96
+ apache-airflow-task-sdk = {workspace = true}
97
+ apache-airflow-providers-common-sql = {workspace = true}
98
+ apache-airflow-providers-fab = {workspace = true}
99
+ apache-airflow-providers-standard = {workspace = true}
100
+
77
101
  [project.urls]
78
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0"
79
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html"
102
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0"
103
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/changelog.html"
80
104
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
81
105
  "Source Code" = "https://github.com/apache/airflow"
82
106
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -88,6 +112,3 @@ provider_info = "airflow.providers.common.sql.get_provider_info:get_provider_inf
88
112
 
89
113
  [tool.flit.module]
90
114
  name = "airflow.providers.common.sql"
91
-
92
- [tool.pytest.ini_options]
93
- ignore = "tests/system/"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.23.0"
32
+ __version__ = "1.24.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Common SQL",
28
28
  "description": "`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1739959337,
30
+ "source-date-epoch": 1741508670,
31
31
  "versions": [
32
+ "1.24.0",
32
33
  "1.23.0",
33
34
  "1.21.0",
34
35
  "1.20.0",
@@ -81,7 +82,10 @@ def get_provider_info():
81
82
  "operators": [
82
83
  {
83
84
  "integration-name": "Common SQL",
84
- "python-modules": ["airflow.providers.common.sql.operators.sql"],
85
+ "python-modules": [
86
+ "airflow.providers.common.sql.operators.sql",
87
+ "airflow.providers.common.sql.operators.generic_transfer",
88
+ ],
85
89
  }
86
90
  ],
87
91
  "dialects": [
@@ -99,12 +103,24 @@ def get_provider_info():
99
103
  ],
100
104
  }
101
105
  ],
106
+ "triggers": [
107
+ {
108
+ "integration-name": "Common SQL",
109
+ "python-modules": ["airflow.providers.common.sql.triggers.sql"],
110
+ }
111
+ ],
102
112
  "sensors": [
103
113
  {"integration-name": "Common SQL", "python-modules": ["airflow.providers.common.sql.sensors.sql"]}
104
114
  ],
105
- "dependencies": ["apache-airflow>=2.9.0", "sqlparse>=0.5.1", "more-itertools>=9.0.0"],
115
+ "dependencies": [
116
+ "apache-airflow>=2.9.0",
117
+ "sqlparse>=0.5.1",
118
+ "more-itertools>=9.0.0",
119
+ "methodtools>=0.4.7",
120
+ ],
106
121
  "optional-dependencies": {
107
122
  "pandas": ["pandas>=2.1.2,<2.2"],
108
123
  "openlineage": ["apache-airflow-providers-openlineage"],
109
124
  },
125
+ "devel-dependencies": [],
110
126
  }
@@ -0,0 +1,219 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from collections.abc import Sequence
21
+ from functools import cached_property
22
+ from typing import TYPE_CHECKING, Any
23
+
24
+ from airflow.exceptions import AirflowException
25
+ from airflow.hooks.base import BaseHook
26
+ from airflow.models import BaseOperator
27
+ from airflow.providers.common.sql.hooks.sql import DbApiHook
28
+ from airflow.providers.common.sql.triggers.sql import SQLExecuteQueryTrigger
29
+
30
+ if TYPE_CHECKING:
31
+ import jinja2
32
+
33
+ try:
34
+ from airflow.sdk.definitions.context import Context
35
+ except ImportError:
36
+ # TODO: Remove once provider drops support for Airflow 2
37
+ from airflow.utils.context import Context
38
+
39
+
40
+ class GenericTransfer(BaseOperator):
41
+ """
42
+ Moves data from a connection to another.
43
+
44
+ Assuming that they both provide the required methods in their respective hooks.
45
+ The source hook needs to expose a `get_records` method, and the destination a
46
+ `insert_rows` method.
47
+
48
+ This is meant to be used on small-ish datasets that fit in memory.
49
+
50
+ :param sql: SQL query to execute against the source database. (templated)
51
+ :param destination_table: target table. (templated)
52
+ :param source_conn_id: source connection. (templated)
53
+ :param source_hook_params: source hook parameters.
54
+ :param destination_conn_id: destination connection. (templated)
55
+ :param destination_hook_params: destination hook parameters.
56
+ :param preoperator: sql statement or list of statements to be
57
+ executed prior to loading the data. (templated)
58
+ :param insert_args: extra params for `insert_rows` method.
59
+ :param page_size: number of records to be read in paginated mode (optional).
60
+ """
61
+
62
+ template_fields: Sequence[str] = (
63
+ "source_conn_id",
64
+ "destination_conn_id",
65
+ "sql",
66
+ "destination_table",
67
+ "preoperator",
68
+ "insert_args",
69
+ )
70
+ template_ext: Sequence[str] = (
71
+ ".sql",
72
+ ".hql",
73
+ )
74
+ template_fields_renderers = {"preoperator": "sql"}
75
+ ui_color = "#b0f07c"
76
+
77
+ def __init__(
78
+ self,
79
+ *,
80
+ sql: str,
81
+ destination_table: str,
82
+ source_conn_id: str,
83
+ source_hook_params: dict | None = None,
84
+ destination_conn_id: str,
85
+ destination_hook_params: dict | None = None,
86
+ preoperator: str | list[str] | None = None,
87
+ insert_args: dict | None = None,
88
+ page_size: int | None = None,
89
+ **kwargs,
90
+ ) -> None:
91
+ super().__init__(**kwargs)
92
+ self.sql = sql
93
+ self.destination_table = destination_table
94
+ self.source_conn_id = source_conn_id
95
+ self.source_hook_params = source_hook_params
96
+ self.destination_conn_id = destination_conn_id
97
+ self.destination_hook_params = destination_hook_params
98
+ self.preoperator = preoperator
99
+ self.insert_args = insert_args or {}
100
+ self.page_size = page_size
101
+ self._paginated_sql_statement_format = kwargs.get(
102
+ "paginated_sql_statement_format", "{} LIMIT {} OFFSET {}"
103
+ )
104
+
105
+ @classmethod
106
+ def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> DbApiHook:
107
+ """
108
+ Return DbApiHook for this connection id.
109
+
110
+ :param conn_id: connection id
111
+ :param hook_params: hook parameters
112
+ :return: DbApiHook for this connection
113
+ """
114
+ connection = BaseHook.get_connection(conn_id)
115
+ hook = connection.get_hook(hook_params=hook_params)
116
+ if not isinstance(hook, DbApiHook):
117
+ raise RuntimeError(f"Hook for connection {conn_id!r} must be of type {DbApiHook.__name__}")
118
+ return hook
119
+
120
+ @cached_property
121
+ def source_hook(self) -> DbApiHook:
122
+ return self.get_hook(conn_id=self.source_conn_id, hook_params=self.source_hook_params)
123
+
124
+ @cached_property
125
+ def destination_hook(self) -> DbApiHook:
126
+ return self.get_hook(conn_id=self.destination_conn_id, hook_params=self.destination_hook_params)
127
+
128
+ def get_paginated_sql(self, offset: int) -> str:
129
+ """Format the paginated SQL statement using the current format."""
130
+ return self._paginated_sql_statement_format.format(self.sql, self.page_size, offset)
131
+
132
+ def render_template_fields(
133
+ self,
134
+ context: Context,
135
+ jinja_env: jinja2.Environment | None = None,
136
+ ) -> None:
137
+ super().render_template_fields(context=context, jinja_env=jinja_env)
138
+
139
+ # Make sure string are converted to integers
140
+ if isinstance(self.page_size, str):
141
+ self.page_size = int(self.page_size)
142
+ commit_every = self.insert_args.get("commit_every")
143
+ if isinstance(commit_every, str):
144
+ self.insert_args["commit_every"] = int(commit_every)
145
+
146
+ def execute(self, context: Context):
147
+ if self.preoperator:
148
+ self.log.info("Running preoperator")
149
+ self.log.info(self.preoperator)
150
+ self.destination_hook.run(self.preoperator)
151
+
152
+ if self.page_size and isinstance(self.sql, str):
153
+ self.defer(
154
+ trigger=SQLExecuteQueryTrigger(
155
+ conn_id=self.source_conn_id,
156
+ hook_params=self.source_hook_params,
157
+ sql=self.get_paginated_sql(0),
158
+ ),
159
+ method_name=self.execute_complete.__name__,
160
+ )
161
+ else:
162
+ self.log.info("Extracting data from %s", self.source_conn_id)
163
+ self.log.info("Executing: \n %s", self.sql)
164
+
165
+ results = self.destination_hook.get_records(self.sql)
166
+
167
+ self.log.info("Inserting rows into %s", self.destination_conn_id)
168
+ self.destination_hook.insert_rows(table=self.destination_table, rows=results, **self.insert_args)
169
+
170
+ def execute_complete(
171
+ self,
172
+ context: Context,
173
+ event: dict[Any, Any] | None = None,
174
+ ) -> Any:
175
+ if event:
176
+ if event.get("status") == "failure":
177
+ raise AirflowException(event.get("message"))
178
+
179
+ results = event.get("results")
180
+
181
+ if results:
182
+ map_index = context["ti"].map_index
183
+ offset = (
184
+ context["ti"].xcom_pull(
185
+ key="offset",
186
+ task_ids=self.task_id,
187
+ dag_id=self.dag_id,
188
+ map_indexes=map_index,
189
+ default=0,
190
+ )
191
+ + self.page_size
192
+ )
193
+
194
+ self.log.info("Offset increased to %d", offset)
195
+ self.xcom_push(context=context, key="offset", value=offset)
196
+
197
+ self.log.info("Inserting %d rows into %s", len(results), self.destination_conn_id)
198
+ self.destination_hook.insert_rows(
199
+ table=self.destination_table, rows=results, **self.insert_args
200
+ )
201
+ self.log.info(
202
+ "Inserting %d rows into %s done!",
203
+ len(results),
204
+ self.destination_conn_id,
205
+ )
206
+
207
+ self.defer(
208
+ trigger=SQLExecuteQueryTrigger(
209
+ conn_id=self.source_conn_id,
210
+ hook_params=self.source_hook_params,
211
+ sql=self.get_paginated_sql(offset),
212
+ ),
213
+ method_name=self.execute_complete.__name__,
214
+ )
215
+ else:
216
+ self.log.info(
217
+ "No more rows to fetch into %s; ending transfer.",
218
+ self.destination_table,
219
+ )
@@ -0,0 +1,85 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This is automatically generated stub for the `common.sql` provider
19
+ #
20
+ # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
21
+ # and the .pyi file represents part of the "public" API that the
22
+ # `common.sql` provider exposes to other providers.
23
+ #
24
+ # Any, potentially breaking change in the stubs will require deliberate manual action from the contributor
25
+ # making a change to the `common.sql` provider. Those stubs are also used by MyPy automatically when checking
26
+ # if only public API of the common.sql provider is used by all the other providers.
27
+ #
28
+ # You can read more in the README_API.md file
29
+ #
30
+ """
31
+ Definition of the public interface for airflow.providers.common.sql.operators.generic_transfer
32
+ isort:skip_file
33
+ """
34
+
35
+ from collections.abc import Sequence
36
+ from functools import cached_property as cached_property
37
+ from typing import Any, ClassVar
38
+
39
+ import jinja2
40
+ from _typeshed import Incomplete as Incomplete
41
+
42
+ from airflow.models import BaseOperator
43
+ from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook
44
+ from airflow.utils.context import Context as Context
45
+
46
+ class GenericTransfer(BaseOperator):
47
+ template_fields: Sequence[str]
48
+ template_ext: Sequence[str]
49
+ template_fields_renderers: ClassVar[dict]
50
+ ui_color: str
51
+ sql: Incomplete
52
+ destination_table: Incomplete
53
+ source_conn_id: Incomplete
54
+ source_hook_params: Incomplete
55
+ destination_conn_id: Incomplete
56
+ destination_hook_params: Incomplete
57
+ preoperator: Incomplete
58
+ insert_args: Incomplete
59
+ page_size: Incomplete
60
+ def __init__(
61
+ self,
62
+ *,
63
+ sql: str,
64
+ destination_table: str,
65
+ source_conn_id: str,
66
+ source_hook_params: dict | None = None,
67
+ destination_conn_id: str,
68
+ destination_hook_params: dict | None = None,
69
+ preoperator: str | list[str] | None = None,
70
+ insert_args: dict | None = None,
71
+ page_size: int | None = None,
72
+ **kwargs,
73
+ ) -> None: ...
74
+ @classmethod
75
+ def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> DbApiHook: ...
76
+ @cached_property
77
+ def source_hook(self) -> DbApiHook: ...
78
+ @cached_property
79
+ def destination_hook(self) -> DbApiHook: ...
80
+ def get_paginated_sql(self, offset: int) -> str: ...
81
+ def render_template_fields(
82
+ self, context: Context, jinja_env: jinja2.Environment | None = None
83
+ ) -> None: ...
84
+ def execute(self, context: Context): ...
85
+ def execute_complete(self, context: Context, event: dict[Any, Any] | None = None) -> Any: ...
@@ -26,7 +26,8 @@ from typing import TYPE_CHECKING, Any, Callable, ClassVar, NoReturn, SupportsAbs
26
26
  from airflow.exceptions import AirflowException, AirflowFailException
27
27
  from airflow.hooks.base import BaseHook
28
28
  from airflow.models import BaseOperator, SkipMixin
29
- from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, return_single_query_results
29
+ from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, return_single_query_results
30
+ from airflow.providers.common.sql.hooks.sql import DbApiHook
30
31
  from airflow.utils.helpers import merge_dicts
31
32
 
32
33
  if TYPE_CHECKING:
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -0,0 +1,87 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from typing import TYPE_CHECKING
21
+
22
+ from airflow.exceptions import AirflowException
23
+ from airflow.hooks.base import BaseHook
24
+ from airflow.providers.common.sql.hooks.sql import DbApiHook
25
+ from airflow.triggers.base import BaseTrigger, TriggerEvent
26
+
27
+ if TYPE_CHECKING:
28
+ from collections.abc import AsyncIterator
29
+ from typing import Any
30
+
31
+
32
+ class SQLExecuteQueryTrigger(BaseTrigger):
33
+ """
34
+ A trigger that executes SQL code in async mode.
35
+
36
+ :param sql: the sql statement to be executed (str) or a list of sql statements to execute
37
+ :param conn_id: the connection ID used to connect to the database
38
+ :param hook_params: hook parameters
39
+ """
40
+
41
+ def __init__(
42
+ self,
43
+ sql: str | list[str],
44
+ conn_id: str,
45
+ hook_params: dict | None = None,
46
+ **kwargs,
47
+ ):
48
+ super().__init__(**kwargs)
49
+ self.sql = sql
50
+ self.conn_id = conn_id
51
+ self.hook_params = hook_params
52
+
53
+ def serialize(self) -> tuple[str, dict[str, Any]]:
54
+ """Serialize the SQLExecuteQueryTrigger arguments and classpath."""
55
+ return (
56
+ f"{self.__class__.__module__}.{self.__class__.__name__}",
57
+ {
58
+ "sql": self.sql,
59
+ "conn_id": self.conn_id,
60
+ "hook_params": self.hook_params,
61
+ },
62
+ )
63
+
64
+ async def run(self) -> AsyncIterator[TriggerEvent]:
65
+ try:
66
+ hook = BaseHook.get_hook(self.conn_id, hook_params=self.hook_params)
67
+
68
+ if not isinstance(hook, DbApiHook):
69
+ raise AirflowException(
70
+ f"You are trying to use `common-sql` with {hook.__class__.__name__},"
71
+ " but its provider does not support it. Please upgrade the provider to a version that"
72
+ " supports `common-sql`. The hook class should be a subclass of"
73
+ f" `{hook.__class__.__module__}.{hook.__class__.__name__}`."
74
+ f" Got {hook.__class__.__name__} hook with class hierarchy: {hook.__class__.mro()}"
75
+ )
76
+
77
+ self.log.info("Extracting data from %s", self.conn_id)
78
+ self.log.info("Executing: \n %s", self.sql)
79
+ self.log.info("Reading records from %s", self.conn_id)
80
+ results = hook.get_records(self.sql)
81
+ self.log.info("Reading records from %s done!", self.conn_id)
82
+
83
+ self.log.debug("results: %s", results)
84
+ yield TriggerEvent({"status": "success", "results": results})
85
+ except Exception as e:
86
+ self.log.exception("An error occurred: %s", e)
87
+ yield TriggerEvent({"status": "failure", "message": str(e)})
@@ -0,0 +1,45 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This is automatically generated stub for the `common.sql` provider
19
+ #
20
+ # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
21
+ # and the .pyi file represents part of the "public" API that the
22
+ # `common.sql` provider exposes to other providers.
23
+ #
24
+ # Any, potentially breaking change in the stubs will require deliberate manual action from the contributor
25
+ # making a change to the `common.sql` provider. Those stubs are also used by MyPy automatically when checking
26
+ # if only public API of the common.sql provider is used by all the other providers.
27
+ #
28
+ # You can read more in the README_API.md file
29
+ #
30
+ """
31
+ Definition of the public interface for airflow.providers.common.sql.triggers.sql
32
+ isort:skip_file
33
+ """
34
+
35
+ from collections.abc import AsyncIterator
36
+ from typing import Any
37
+
38
+ from airflow.triggers.base import BaseTrigger as BaseTrigger, TriggerEvent as TriggerEvent
39
+
40
+ class SQLExecuteQueryTrigger(BaseTrigger):
41
+ def __init__(
42
+ self, sql: str | list[str], conn_id: str, hook_params: dict | None = None, **kwargs
43
+ ) -> None: ...
44
+ def serialize(self) -> tuple[str, dict[str, Any]]: ...
45
+ async def run(self) -> AsyncIterator[TriggerEvent]: ... # type: ignore