apache-airflow-providers-common-sql 1.20.0rc1__py3-none-any.whl → 1.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.
- airflow/providers/common/sql/__init__.py +3 -3
- airflow/providers/common/sql/get_provider_info.py +10 -3
- airflow/providers/common/sql/hooks/handlers.py +75 -0
- airflow/providers/common/sql/hooks/handlers.pyi +38 -0
- airflow/providers/common/sql/hooks/sql.py +20 -62
- airflow/providers/common/sql/operators/sql.py +27 -4
- airflow/providers/common/sql/operators/sql.pyi +7 -0
- airflow/providers/common/sql/sensors/sql.py +2 -1
- {apache_airflow_providers_common_sql-1.20.0rc1.dist-info → apache_airflow_providers_common_sql-1.21.0.dist-info}/METADATA +9 -9
- apache_airflow_providers_common_sql-1.21.0.dist-info/RECORD +21 -0
- apache_airflow_providers_common_sql-1.20.0rc1.dist-info/RECORD +0 -19
- {apache_airflow_providers_common_sql-1.20.0rc1.dist-info → apache_airflow_providers_common_sql-1.21.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_common_sql-1.20.0rc1.dist-info → apache_airflow_providers_common_sql-1.21.0.dist-info}/entry_points.txt +0 -0
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.21.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.9.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-common-sql:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-common-sql:{__version__}` needs Apache Airflow 2.9.0+"
|
|
39
39
|
)
|
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
|
28
28
|
"name": "Common SQL",
|
|
29
29
|
"description": "`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__\n",
|
|
30
30
|
"state": "ready",
|
|
31
|
-
"source-date-epoch":
|
|
31
|
+
"source-date-epoch": 1734529664,
|
|
32
32
|
"versions": [
|
|
33
|
+
"1.21.0",
|
|
33
34
|
"1.20.0",
|
|
34
35
|
"1.19.0",
|
|
35
36
|
"1.18.0",
|
|
@@ -68,7 +69,7 @@ def get_provider_info():
|
|
|
68
69
|
"1.1.0",
|
|
69
70
|
"1.0.0",
|
|
70
71
|
],
|
|
71
|
-
"dependencies": ["apache-airflow>=2.
|
|
72
|
+
"dependencies": ["apache-airflow>=2.9.0", "sqlparse>=0.5.1", "more-itertools>=9.0.0"],
|
|
72
73
|
"additional-extras": [
|
|
73
74
|
{
|
|
74
75
|
"name": "pandas",
|
|
@@ -94,7 +95,13 @@ def get_provider_info():
|
|
|
94
95
|
}
|
|
95
96
|
],
|
|
96
97
|
"hooks": [
|
|
97
|
-
{
|
|
98
|
+
{
|
|
99
|
+
"integration-name": "Common SQL",
|
|
100
|
+
"python-modules": [
|
|
101
|
+
"airflow.providers.common.sql.hooks.handlers",
|
|
102
|
+
"airflow.providers.common.sql.hooks.sql",
|
|
103
|
+
],
|
|
104
|
+
}
|
|
98
105
|
],
|
|
99
106
|
"sensors": [
|
|
100
107
|
{"integration-name": "Common SQL", "python-modules": ["airflow.providers.common.sql.sensors.sql"]}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from collections.abc import Iterable
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool | None):
|
|
23
|
+
"""
|
|
24
|
+
Determine when results of single query only should be returned.
|
|
25
|
+
|
|
26
|
+
For compatibility reasons, the behaviour of the DBAPIHook is somewhat confusing.
|
|
27
|
+
In some cases, when multiple queries are run, the return value will be an iterable (list) of results
|
|
28
|
+
-- one for each query. However, in other cases, when single query is run, the return value will be just
|
|
29
|
+
the result of that single query without wrapping the results in a list.
|
|
30
|
+
|
|
31
|
+
The cases when single query results are returned without wrapping them in a list are as follows:
|
|
32
|
+
|
|
33
|
+
a) sql is string and ``return_last`` is True (regardless what ``split_statements`` value is)
|
|
34
|
+
b) sql is string and ``split_statements`` is False
|
|
35
|
+
|
|
36
|
+
In all other cases, the results are wrapped in a list, even if there is only one statement to process.
|
|
37
|
+
In particular, the return value will be a list of query results in the following circumstances:
|
|
38
|
+
|
|
39
|
+
a) when ``sql`` is an iterable of string statements (regardless what ``return_last`` value is)
|
|
40
|
+
b) when ``sql`` is string, ``split_statements`` is True and ``return_last`` is False
|
|
41
|
+
|
|
42
|
+
:param sql: sql to run (either string or list of strings)
|
|
43
|
+
:param return_last: whether last statement output should only be returned
|
|
44
|
+
:param split_statements: whether to split string statements.
|
|
45
|
+
:return: True if the hook should return single query results
|
|
46
|
+
"""
|
|
47
|
+
if split_statements is not None:
|
|
48
|
+
return isinstance(sql, str) and (return_last or not split_statements)
|
|
49
|
+
return isinstance(sql, str) and return_last
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def fetch_all_handler(cursor) -> list[tuple] | None:
|
|
53
|
+
"""Return results for DbApiHook.run()."""
|
|
54
|
+
if not hasattr(cursor, "description"):
|
|
55
|
+
raise RuntimeError(
|
|
56
|
+
"The database we interact with does not support DBAPI 2.0. Use operator and "
|
|
57
|
+
"handlers that are specifically designed for your database."
|
|
58
|
+
)
|
|
59
|
+
if cursor.description is not None:
|
|
60
|
+
return cursor.fetchall()
|
|
61
|
+
else:
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def fetch_one_handler(cursor) -> list[tuple] | None:
|
|
66
|
+
"""Return first result for DbApiHook.run()."""
|
|
67
|
+
if not hasattr(cursor, "description"):
|
|
68
|
+
raise RuntimeError(
|
|
69
|
+
"The database we interact with does not support DBAPI 2.0. Use operator and "
|
|
70
|
+
"handlers that are specifically designed for your database."
|
|
71
|
+
)
|
|
72
|
+
if cursor.description is not None:
|
|
73
|
+
return cursor.fetchone()
|
|
74
|
+
else:
|
|
75
|
+
return None
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
#
|
|
18
|
+
# This is automatically generated stub for the `common.sql` provider
|
|
19
|
+
#
|
|
20
|
+
# This file is generated automatically by the `update-common-sql-api stubs` pre-commit
|
|
21
|
+
# and the .pyi file represents part of the "public" API that the
|
|
22
|
+
# `common.sql` provider exposes to other providers.
|
|
23
|
+
#
|
|
24
|
+
# Any, potentially breaking change in the stubs will require deliberate manual action from the contributor
|
|
25
|
+
# making a change to the `common.sql` provider. Those stubs are also used by MyPy automatically when checking
|
|
26
|
+
# if only public API of the common.sql provider is used by all the other providers.
|
|
27
|
+
#
|
|
28
|
+
# You can read more in the README_API.md file
|
|
29
|
+
#
|
|
30
|
+
"""
|
|
31
|
+
Definition of the public interface for airflow.providers.common.sql.hooks.handlers
|
|
32
|
+
isort:skip_file
|
|
33
|
+
"""
|
|
34
|
+
from typing import Iterable
|
|
35
|
+
|
|
36
|
+
def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool): ...
|
|
37
|
+
def fetch_all_handler(cursor) -> list[tuple] | None: ...
|
|
38
|
+
def fetch_one_handler(cursor) -> list[tuple] | None: ...
|
|
@@ -18,6 +18,7 @@ from __future__ import annotations
|
|
|
18
18
|
|
|
19
19
|
import contextlib
|
|
20
20
|
import warnings
|
|
21
|
+
from collections.abc import Generator, Iterable, Mapping, Sequence
|
|
21
22
|
from contextlib import closing, contextmanager
|
|
22
23
|
from datetime import datetime
|
|
23
24
|
from functools import cached_property
|
|
@@ -25,12 +26,7 @@ from typing import (
|
|
|
25
26
|
TYPE_CHECKING,
|
|
26
27
|
Any,
|
|
27
28
|
Callable,
|
|
28
|
-
Generator,
|
|
29
|
-
Iterable,
|
|
30
|
-
List,
|
|
31
|
-
Mapping,
|
|
32
29
|
Protocol,
|
|
33
|
-
Sequence,
|
|
34
30
|
TypeVar,
|
|
35
31
|
cast,
|
|
36
32
|
overload,
|
|
@@ -45,7 +41,6 @@ from sqlalchemy.engine import Inspector
|
|
|
45
41
|
from airflow.exceptions import (
|
|
46
42
|
AirflowException,
|
|
47
43
|
AirflowOptionalProviderFeatureException,
|
|
48
|
-
AirflowProviderDeprecationWarning,
|
|
49
44
|
)
|
|
50
45
|
from airflow.hooks.base import BaseHook
|
|
51
46
|
|
|
@@ -60,60 +55,32 @@ if TYPE_CHECKING:
|
|
|
60
55
|
|
|
61
56
|
T = TypeVar("T")
|
|
62
57
|
SQL_PLACEHOLDERS = frozenset({"%s", "?"})
|
|
58
|
+
WARNING_MESSAGE = """Import of {} from the 'airflow.providers.common.sql.hooks' module is deprecated and will
|
|
59
|
+
be removed in the future. Please import it from 'airflow.providers.common.sql.hooks.handlers'."""
|
|
63
60
|
|
|
64
61
|
|
|
65
|
-
def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool):
|
|
66
|
-
""
|
|
67
|
-
Determine when results of single query only should be returned.
|
|
62
|
+
def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool | None):
|
|
63
|
+
warnings.warn(WARNING_MESSAGE.format("return_single_query_results"), DeprecationWarning, stacklevel=2)
|
|
68
64
|
|
|
69
|
-
|
|
70
|
-
In some cases, when multiple queries are run, the return value will be an iterable (list) of results
|
|
71
|
-
-- one for each query. However, in other cases, when single query is run, the return value will be just
|
|
72
|
-
the result of that single query without wrapping the results in a list.
|
|
65
|
+
from airflow.providers.common.sql.hooks import handlers
|
|
73
66
|
|
|
74
|
-
|
|
67
|
+
return handlers.return_single_query_results(sql, return_last, split_statements)
|
|
75
68
|
|
|
76
|
-
a) sql is string and ``return_last`` is True (regardless what ``split_statements`` value is)
|
|
77
|
-
b) sql is string and ``split_statements`` is False
|
|
78
69
|
|
|
79
|
-
|
|
80
|
-
|
|
70
|
+
def fetch_all_handler(cursor) -> list[tuple] | None:
|
|
71
|
+
warnings.warn(WARNING_MESSAGE.format("fetch_all_handler"), DeprecationWarning, stacklevel=2)
|
|
81
72
|
|
|
82
|
-
|
|
83
|
-
b) when ``sql`` is string, ``split_statements`` is True and ``return_last`` is False
|
|
73
|
+
from airflow.providers.common.sql.hooks import handlers
|
|
84
74
|
|
|
85
|
-
|
|
86
|
-
:param return_last: whether last statement output should only be returned
|
|
87
|
-
:param split_statements: whether to split string statements.
|
|
88
|
-
:return: True if the hook should return single query results
|
|
89
|
-
"""
|
|
90
|
-
return isinstance(sql, str) and (return_last or not split_statements)
|
|
75
|
+
return handlers.fetch_all_handler(cursor)
|
|
91
76
|
|
|
92
77
|
|
|
93
|
-
def
|
|
94
|
-
""
|
|
95
|
-
if not hasattr(cursor, "description"):
|
|
96
|
-
raise RuntimeError(
|
|
97
|
-
"The database we interact with does not support DBAPI 2.0. Use operator and "
|
|
98
|
-
"handlers that are specifically designed for your database."
|
|
99
|
-
)
|
|
100
|
-
if cursor.description is not None:
|
|
101
|
-
return cursor.fetchall()
|
|
102
|
-
else:
|
|
103
|
-
return None
|
|
78
|
+
def fetch_one_handler(cursor) -> list[tuple] | None:
|
|
79
|
+
warnings.warn(WARNING_MESSAGE.format("fetch_one_handler"), DeprecationWarning, stacklevel=2)
|
|
104
80
|
|
|
81
|
+
from airflow.providers.common.sql.hooks import handlers
|
|
105
82
|
|
|
106
|
-
|
|
107
|
-
"""Return first result for DbApiHook.run()."""
|
|
108
|
-
if not hasattr(cursor, "description"):
|
|
109
|
-
raise RuntimeError(
|
|
110
|
-
"The database we interact with does not support DBAPI 2.0. Use operator and "
|
|
111
|
-
"handlers that are specifically designed for your database."
|
|
112
|
-
)
|
|
113
|
-
if cursor.description is not None:
|
|
114
|
-
return cursor.fetchone()
|
|
115
|
-
else:
|
|
116
|
-
return None
|
|
83
|
+
return handlers.fetch_one_handler(cursor)
|
|
117
84
|
|
|
118
85
|
|
|
119
86
|
class ConnectorProtocol(Protocol):
|
|
@@ -193,6 +160,7 @@ class DbApiHook(BaseHook):
|
|
|
193
160
|
|
|
194
161
|
@cached_property
|
|
195
162
|
def placeholder(self) -> str:
|
|
163
|
+
"""Return SQL placeholder."""
|
|
196
164
|
placeholder = self.connection_extra.get("placeholder")
|
|
197
165
|
if placeholder:
|
|
198
166
|
if placeholder in SQL_PLACEHOLDERS:
|
|
@@ -250,8 +218,9 @@ class DbApiHook(BaseHook):
|
|
|
250
218
|
|
|
251
219
|
:return: the extracted uri.
|
|
252
220
|
"""
|
|
253
|
-
conn = self.
|
|
254
|
-
|
|
221
|
+
conn = self.connection
|
|
222
|
+
if self.__schema:
|
|
223
|
+
conn.schema = self.__schema
|
|
255
224
|
return conn.get_uri()
|
|
256
225
|
|
|
257
226
|
@property
|
|
@@ -532,19 +501,8 @@ class DbApiHook(BaseHook):
|
|
|
532
501
|
If this method is not overridden, the result data is returned as-is. If the output of the cursor
|
|
533
502
|
is already a common data structure, this method should be ignored.
|
|
534
503
|
"""
|
|
535
|
-
# Back-compatibility call for providers implementing old ´_make_serializable' method.
|
|
536
|
-
with contextlib.suppress(AttributeError):
|
|
537
|
-
result = self._make_serializable(result=result) # type: ignore[attr-defined]
|
|
538
|
-
warnings.warn(
|
|
539
|
-
"The `_make_serializable` method is deprecated and support will be removed in a future "
|
|
540
|
-
f"version of the common.sql provider. Please update the {self.__class__.__name__}'s provider "
|
|
541
|
-
"to a version based on common.sql >= 1.9.1.",
|
|
542
|
-
AirflowProviderDeprecationWarning,
|
|
543
|
-
stacklevel=2,
|
|
544
|
-
)
|
|
545
|
-
|
|
546
504
|
if isinstance(result, Sequence):
|
|
547
|
-
return cast(
|
|
505
|
+
return cast(list[tuple], result)
|
|
548
506
|
return cast(tuple, result)
|
|
549
507
|
|
|
550
508
|
def _run_command(self, cur, sql_statement, parameters):
|
|
@@ -19,8 +19,9 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import ast
|
|
21
21
|
import re
|
|
22
|
+
from collections.abc import Iterable, Mapping, Sequence
|
|
22
23
|
from functools import cached_property
|
|
23
|
-
from typing import TYPE_CHECKING, Any, Callable, ClassVar,
|
|
24
|
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar, NoReturn, SupportsAbs
|
|
24
25
|
|
|
25
26
|
from airflow.exceptions import AirflowException, AirflowFailException
|
|
26
27
|
from airflow.hooks.base import BaseHook
|
|
@@ -115,6 +116,10 @@ _MIN_SUPPORTED_PROVIDERS_VERSION = {
|
|
|
115
116
|
}
|
|
116
117
|
|
|
117
118
|
|
|
119
|
+
def default_output_processor(results: list[Any], descriptions: list[Sequence[Sequence] | None]) -> list[Any]:
|
|
120
|
+
return results
|
|
121
|
+
|
|
122
|
+
|
|
118
123
|
class BaseSQLOperator(BaseOperator):
|
|
119
124
|
"""
|
|
120
125
|
This is a base class for generic SQL Operator to get a DB Hook.
|
|
@@ -209,6 +214,8 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
209
214
|
:param autocommit: (optional) if True, each command is automatically committed (default: False).
|
|
210
215
|
:param parameters: (optional) the parameters to render the SQL query with.
|
|
211
216
|
:param handler: (optional) the function that will be applied to the cursor (default: fetch_all_handler).
|
|
217
|
+
:param output_processor: (optional) the function that will be applied to the result
|
|
218
|
+
(default: default_output_processor).
|
|
212
219
|
:param split_statements: (optional) if split single SQL string into statements. By default, defers
|
|
213
220
|
to the default value in the ``run`` method of the configured hook.
|
|
214
221
|
:param conn_id: the connection ID used to connect to the database
|
|
@@ -234,6 +241,13 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
234
241
|
autocommit: bool = False,
|
|
235
242
|
parameters: Mapping | Iterable | None = None,
|
|
236
243
|
handler: Callable[[Any], list[tuple] | None] = fetch_all_handler,
|
|
244
|
+
output_processor: (
|
|
245
|
+
Callable[
|
|
246
|
+
[list[Any], list[Sequence[Sequence] | None]],
|
|
247
|
+
list[Any] | tuple[list[Sequence[Sequence] | None], list],
|
|
248
|
+
]
|
|
249
|
+
| None
|
|
250
|
+
) = None,
|
|
237
251
|
conn_id: str | None = None,
|
|
238
252
|
database: str | None = None,
|
|
239
253
|
split_statements: bool | None = None,
|
|
@@ -246,11 +260,14 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
246
260
|
self.autocommit = autocommit
|
|
247
261
|
self.parameters = parameters
|
|
248
262
|
self.handler = handler
|
|
263
|
+
self._output_processor = output_processor or default_output_processor
|
|
249
264
|
self.split_statements = split_statements
|
|
250
265
|
self.return_last = return_last
|
|
251
266
|
self.show_return_value_in_logs = show_return_value_in_logs
|
|
252
267
|
|
|
253
|
-
def _process_output(
|
|
268
|
+
def _process_output(
|
|
269
|
+
self, results: list[Any], descriptions: list[Sequence[Sequence] | None]
|
|
270
|
+
) -> list[Any] | tuple[list[Sequence[Sequence] | None], list]:
|
|
254
271
|
"""
|
|
255
272
|
Process output before it is returned by the operator.
|
|
256
273
|
|
|
@@ -269,7 +286,7 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
269
286
|
"""
|
|
270
287
|
if self.show_return_value_in_logs:
|
|
271
288
|
self.log.info("Operator output is: %s", results)
|
|
272
|
-
return results
|
|
289
|
+
return self._output_processor(results, descriptions)
|
|
273
290
|
|
|
274
291
|
def _should_run_output_processing(self) -> bool:
|
|
275
292
|
return self.do_xcom_push
|
|
@@ -296,7 +313,9 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
296
313
|
# single query results are going to be returned, and we return the first element
|
|
297
314
|
# of the list in this case from the (always) list returned by _process_output
|
|
298
315
|
return self._process_output([output], hook.descriptions)[-1]
|
|
299
|
-
|
|
316
|
+
result = self._process_output(output, hook.descriptions)
|
|
317
|
+
self.log.info("result: %s", result)
|
|
318
|
+
return result
|
|
300
319
|
|
|
301
320
|
def prepare_template(self) -> None:
|
|
302
321
|
"""Parse template file for attribute parameters."""
|
|
@@ -1040,6 +1059,10 @@ class SQLThresholdCheckOperator(BaseSQLOperator):
|
|
|
1040
1059
|
:param database: name of database which overwrite the defined one in connection
|
|
1041
1060
|
:param min_threshold: numerical value or min threshold sql to be executed (templated)
|
|
1042
1061
|
:param max_threshold: numerical value or max threshold sql to be executed (templated)
|
|
1062
|
+
|
|
1063
|
+
.. seealso::
|
|
1064
|
+
For more information on how to use this operator, take a look at the guide:
|
|
1065
|
+
:ref:`howto/operator:SQLThresholdCheckOperator`
|
|
1043
1066
|
"""
|
|
1044
1067
|
|
|
1045
1068
|
template_fields: Sequence[str] = (
|
|
@@ -78,6 +78,13 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
78
78
|
autocommit: bool = False,
|
|
79
79
|
parameters: Mapping | Iterable | None = None,
|
|
80
80
|
handler: Callable[[Any], list[tuple] | None] = ...,
|
|
81
|
+
output_processor: (
|
|
82
|
+
Callable[
|
|
83
|
+
[list[Any], list[Sequence[Sequence] | None]],
|
|
84
|
+
list[Any] | tuple[list[Sequence[Sequence] | None], list],
|
|
85
|
+
]
|
|
86
|
+
| None
|
|
87
|
+
) = None,
|
|
81
88
|
conn_id: str | None = None,
|
|
82
89
|
database: str | None = None,
|
|
83
90
|
split_statements: bool | None = None,
|
|
@@ -16,8 +16,9 @@
|
|
|
16
16
|
# under the License.
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
|
+
from collections.abc import Mapping, Sequence
|
|
19
20
|
from operator import itemgetter
|
|
20
|
-
from typing import TYPE_CHECKING, Any, Callable
|
|
21
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
21
22
|
|
|
22
23
|
from airflow.exceptions import AirflowException
|
|
23
24
|
from airflow.hooks.base import BaseHook
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: apache-airflow-providers-common-sql
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.21.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.sql,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -20,18 +20,18 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
Requires-Dist: apache-airflow>=2.
|
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0
|
|
24
24
|
Requires-Dist: more-itertools>=9.0.0
|
|
25
25
|
Requires-Dist: sqlparse>=0.5.1
|
|
26
26
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
|
27
27
|
Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas" and (python_version>="3.9")
|
|
28
28
|
Requires-Dist: pandas>=1.5.3,<2.2 ; extra == "pandas" and (python_version<"3.9")
|
|
29
29
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
30
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
31
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
30
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0/changelog.html
|
|
31
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0
|
|
32
32
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
33
33
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
34
|
-
Project-URL: Twitter, https://
|
|
34
|
+
Project-URL: Twitter, https://x.com/ApacheAirflow
|
|
35
35
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
36
36
|
Provides-Extra: openlineage
|
|
37
37
|
Provides-Extra: pandas
|
|
@@ -80,7 +80,7 @@ Provides-Extra: pandas
|
|
|
80
80
|
|
|
81
81
|
Package ``apache-airflow-providers-common-sql``
|
|
82
82
|
|
|
83
|
-
Release: ``1.
|
|
83
|
+
Release: ``1.21.0``
|
|
84
84
|
|
|
85
85
|
|
|
86
86
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
@@ -93,7 +93,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
93
93
|
are in ``airflow.providers.common.sql`` python package.
|
|
94
94
|
|
|
95
95
|
You can find package information and changelog for the provider
|
|
96
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
96
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0/>`_.
|
|
97
97
|
|
|
98
98
|
Installation
|
|
99
99
|
------------
|
|
@@ -110,7 +110,7 @@ Requirements
|
|
|
110
110
|
================== ==================
|
|
111
111
|
PIP package Version required
|
|
112
112
|
================== ==================
|
|
113
|
-
``apache-airflow`` ``>=2.
|
|
113
|
+
``apache-airflow`` ``>=2.9.0``
|
|
114
114
|
``sqlparse`` ``>=0.5.1``
|
|
115
115
|
``more-itertools`` ``>=9.0.0``
|
|
116
116
|
================== ==================
|
|
@@ -135,4 +135,4 @@ Dependent package
|
|
|
135
135
|
============================================================================================================== ===============
|
|
136
136
|
|
|
137
137
|
The changelog for the provider package can be found in the
|
|
138
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
138
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0/changelog.html>`_.
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
airflow/providers/common/sql/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
|
2
|
+
airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
|
|
3
|
+
airflow/providers/common/sql/__init__.py,sha256=epCDxgCEfDK8S3SdIHnitpPFOA_Ac4x8u0iDFaxlKdU,1498
|
|
4
|
+
airflow/providers/common/sql/get_provider_info.py,sha256=m1EUPSNe446ycNo82p4O5FGUt4awASnsZxRO7LV4CJM,3584
|
|
5
|
+
airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
|
|
6
|
+
airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
|
|
7
|
+
airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
8
|
+
airflow/providers/common/sql/hooks/handlers.py,sha256=pzZg6gDWQa_bIpoFCs3J_o_07Eb9EpbJIiNCk2e07e4,3413
|
|
9
|
+
airflow/providers/common/sql/hooks/handlers.pyi,sha256=EKYi9Qonia4T06GHm62BJjOF2D7UR3WG5xfD1r7-JGE,1771
|
|
10
|
+
airflow/providers/common/sql/hooks/sql.py,sha256=vIR_HkgWQ0b22Nknu5EKm_v11XM3IPH4ACnSQbqrE_c,30992
|
|
11
|
+
airflow/providers/common/sql/hooks/sql.pyi,sha256=OTtj9D5LE8mMHCZnN4yTStWTn8Iz5XpEsTiFpUgt10w,6010
|
|
12
|
+
airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
13
|
+
airflow/providers/common/sql/operators/sql.py,sha256=Y3yjeYvsPJtz7RCf8rkkispvszbAufIqd8GSrOVqXvo,49124
|
|
14
|
+
airflow/providers/common/sql/operators/sql.pyi,sha256=NQD2Q0S0R4L1rmJWvORij9ej6SOBca04oUQZu5Oo6AU,8443
|
|
15
|
+
airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
16
|
+
airflow/providers/common/sql/sensors/sql.py,sha256=vJqxvDpwmGbq_6kMMqU4YjhHhcbhDhHYjJ2ufhLDmGc,5519
|
|
17
|
+
airflow/providers/common/sql/sensors/sql.pyi,sha256=3agRcNF_FT3_6QnD-sZOI8pV0lBjEalOAIKRINfQ0MI,2467
|
|
18
|
+
apache_airflow_providers_common_sql-1.21.0.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
|
|
19
|
+
apache_airflow_providers_common_sql-1.21.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
|
20
|
+
apache_airflow_providers_common_sql-1.21.0.dist-info/METADATA,sha256=oBWQ2mt3oEGvB2odVf8EhyD1Z61VZSeTuA-yaCuKoUs,6157
|
|
21
|
+
apache_airflow_providers_common_sql-1.21.0.dist-info/RECORD,,
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
airflow/providers/common/sql/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
|
2
|
-
airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
|
|
3
|
-
airflow/providers/common/sql/__init__.py,sha256=Aylh6G8o4C_uXE9dxyD0Bb4KdcsNz1VQrTsfwt9WlPA,1498
|
|
4
|
-
airflow/providers/common/sql/get_provider_info.py,sha256=_sSA2HGwIdVzzerTlWq7iM_DcAVZdgR-Mg-0jGPh-hA,3409
|
|
5
|
-
airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
|
|
6
|
-
airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
|
|
7
|
-
airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
8
|
-
airflow/providers/common/sql/hooks/sql.py,sha256=1FwVmPnhqbhrt3SZjrmUDFMjtM0eE8Q1dWfAWysCql8,33004
|
|
9
|
-
airflow/providers/common/sql/hooks/sql.pyi,sha256=OTtj9D5LE8mMHCZnN4yTStWTn8Iz5XpEsTiFpUgt10w,6010
|
|
10
|
-
airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
11
|
-
airflow/providers/common/sql/operators/sql.py,sha256=SQ-jGNPHCw7qohR20DB0W9l3kmS6KEiFRsdQM3Kf-E8,48193
|
|
12
|
-
airflow/providers/common/sql/operators/sql.pyi,sha256=xCPJN4IQffaPc4-SU1pcLkYHDF4hr1c_L2YhhMpqqqg,8206
|
|
13
|
-
airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
14
|
-
airflow/providers/common/sql/sensors/sql.py,sha256=UYOQficcLJlumJcZwKIBYMcnCe5PkGPiU2ZaYta858Q,5492
|
|
15
|
-
airflow/providers/common/sql/sensors/sql.pyi,sha256=3agRcNF_FT3_6QnD-sZOI8pV0lBjEalOAIKRINfQ0MI,2467
|
|
16
|
-
apache_airflow_providers_common_sql-1.20.0rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
|
|
17
|
-
apache_airflow_providers_common_sql-1.20.0rc1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
|
18
|
-
apache_airflow_providers_common_sql-1.20.0rc1.dist-info/METADATA,sha256=oduJ07aan8ebAPDKxYCrf6-TjigxBHCAHePPigKz9Wk,6173
|
|
19
|
-
apache_airflow_providers_common_sql-1.20.0rc1.dist-info/RECORD,,
|
|
File without changes
|