apache-airflow-providers-common-sql 1.12.0rc1__py3-none-any.whl → 1.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.
- airflow/providers/common/sql/__init__.py +3 -3
- airflow/providers/common/sql/get_provider_info.py +3 -2
- airflow/providers/common/sql/hooks/sql.py +50 -22
- airflow/providers/common/sql/hooks/sql.pyi +60 -29
- airflow/providers/common/sql/operators/sql.py +9 -0
- airflow/providers/common/sql/operators/sql.pyi +59 -41
- airflow/providers/common/sql/sensors/sql.pyi +12 -6
- {apache_airflow_providers_common_sql-1.12.0rc1.dist-info → apache_airflow_providers_common_sql-1.13.0.dist-info}/METADATA +8 -8
- apache_airflow_providers_common_sql-1.13.0.dist-info/RECORD +19 -0
- apache_airflow_providers_common_sql-1.12.0rc1.dist-info/RECORD +0 -19
- {apache_airflow_providers_common_sql-1.12.0rc1.dist-info → apache_airflow_providers_common_sql-1.13.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_common_sql-1.12.0rc1.dist-info → apache_airflow_providers_common_sql-1.13.0.dist-info}/entry_points.txt +0 -0
|
@@ -27,7 +27,7 @@ import packaging.version
|
|
|
27
27
|
|
|
28
28
|
__all__ = ["__version__"]
|
|
29
29
|
|
|
30
|
-
__version__ = "1.
|
|
30
|
+
__version__ = "1.13.0"
|
|
31
31
|
|
|
32
32
|
try:
|
|
33
33
|
from airflow import __version__ as airflow_version
|
|
@@ -35,8 +35,8 @@ except ImportError:
|
|
|
35
35
|
from airflow.version import version as airflow_version
|
|
36
36
|
|
|
37
37
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
38
|
-
"2.
|
|
38
|
+
"2.7.0"
|
|
39
39
|
):
|
|
40
40
|
raise RuntimeError(
|
|
41
|
-
f"The package `apache-airflow-providers-common-sql:{__version__}` needs Apache Airflow 2.
|
|
41
|
+
f"The package `apache-airflow-providers-common-sql:{__version__}` needs Apache Airflow 2.7.0+"
|
|
42
42
|
)
|
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
|
28
28
|
"name": "Common SQL",
|
|
29
29
|
"description": "`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__\n",
|
|
30
30
|
"state": "ready",
|
|
31
|
-
"source-date-epoch":
|
|
31
|
+
"source-date-epoch": 1714476113,
|
|
32
32
|
"versions": [
|
|
33
|
+
"1.13.0",
|
|
33
34
|
"1.12.0",
|
|
34
35
|
"1.11.1",
|
|
35
36
|
"1.11.0",
|
|
@@ -57,7 +58,7 @@ def get_provider_info():
|
|
|
57
58
|
"1.1.0",
|
|
58
59
|
"1.0.0",
|
|
59
60
|
],
|
|
60
|
-
"dependencies": ["apache-airflow>=2.
|
|
61
|
+
"dependencies": ["apache-airflow>=2.7.0", "sqlparse>=0.4.2", "more-itertools>=9.0.0"],
|
|
61
62
|
"additional-extras": [{"name": "pandas", "dependencies": ["pandas>=1.2.5,<2.2"]}],
|
|
62
63
|
"integrations": [
|
|
63
64
|
{
|
|
@@ -18,7 +18,7 @@ from __future__ import annotations
|
|
|
18
18
|
|
|
19
19
|
import contextlib
|
|
20
20
|
import warnings
|
|
21
|
-
from contextlib import closing
|
|
21
|
+
from contextlib import closing, contextmanager
|
|
22
22
|
from datetime import datetime
|
|
23
23
|
from typing import (
|
|
24
24
|
TYPE_CHECKING,
|
|
@@ -49,6 +49,7 @@ from airflow.hooks.base import BaseHook
|
|
|
49
49
|
|
|
50
50
|
if TYPE_CHECKING:
|
|
51
51
|
from pandas import DataFrame
|
|
52
|
+
from sqlalchemy.engine import URL
|
|
52
53
|
|
|
53
54
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
54
55
|
from airflow.providers.openlineage.sqlparser import DatabaseInfo
|
|
@@ -147,6 +148,8 @@ class DbApiHook(BaseHook):
|
|
|
147
148
|
default_conn_name = "default_conn_id"
|
|
148
149
|
# Override if this db supports autocommit.
|
|
149
150
|
supports_autocommit = False
|
|
151
|
+
# Override if this db supports executemany.
|
|
152
|
+
supports_executemany = False
|
|
150
153
|
# Override with the object that exposes the connect method
|
|
151
154
|
connector: ConnectorProtocol | None = None
|
|
152
155
|
# Override with db-specific query to check connection
|
|
@@ -207,6 +210,22 @@ class DbApiHook(BaseHook):
|
|
|
207
210
|
conn.schema = self.__schema or conn.schema
|
|
208
211
|
return conn.get_uri()
|
|
209
212
|
|
|
213
|
+
@property
|
|
214
|
+
def sqlalchemy_url(self) -> URL:
|
|
215
|
+
"""
|
|
216
|
+
Return a Sqlalchemy.engine.URL object from the connection.
|
|
217
|
+
|
|
218
|
+
Needs to be implemented in the provider subclass to return the sqlalchemy.engine.URL object.
|
|
219
|
+
|
|
220
|
+
:return: the extracted sqlalchemy.engine.URL object.
|
|
221
|
+
"""
|
|
222
|
+
qualname = f"{self.__class__.__module__}.{self.__class__.__qualname__}"
|
|
223
|
+
if qualname != "airflow.providers.common.sql.hooks.sql.DbApiHook":
|
|
224
|
+
msg = f"{qualname!r} does not implement/support built SQLAlchemy URL."
|
|
225
|
+
else:
|
|
226
|
+
msg = "`sqlalchemy_url` property should be implemented in the provider subclass."
|
|
227
|
+
raise NotImplementedError(msg)
|
|
228
|
+
|
|
210
229
|
def get_sqlalchemy_engine(self, engine_kwargs=None):
|
|
211
230
|
"""
|
|
212
231
|
Get an sqlalchemy_engine object.
|
|
@@ -408,10 +427,7 @@ class DbApiHook(BaseHook):
|
|
|
408
427
|
else:
|
|
409
428
|
raise ValueError("List of SQL statements is empty")
|
|
410
429
|
_last_result = None
|
|
411
|
-
with
|
|
412
|
-
if self.supports_autocommit:
|
|
413
|
-
self.set_autocommit(conn, autocommit)
|
|
414
|
-
|
|
430
|
+
with self._create_autocommit_connection(autocommit) as conn:
|
|
415
431
|
with closing(conn.cursor()) as cur:
|
|
416
432
|
results = []
|
|
417
433
|
for sql_statement in sql_list:
|
|
@@ -528,6 +544,14 @@ class DbApiHook(BaseHook):
|
|
|
528
544
|
|
|
529
545
|
return self._replace_statement_format.format(table, target_fields, ",".join(placeholders))
|
|
530
546
|
|
|
547
|
+
@contextmanager
|
|
548
|
+
def _create_autocommit_connection(self, autocommit: bool = False):
|
|
549
|
+
"""Context manager that closes the connection after use and detects if autocommit is supported."""
|
|
550
|
+
with closing(self.get_conn()) as conn:
|
|
551
|
+
if self.supports_autocommit:
|
|
552
|
+
self.set_autocommit(conn, autocommit)
|
|
553
|
+
yield conn
|
|
554
|
+
|
|
531
555
|
def insert_rows(
|
|
532
556
|
self,
|
|
533
557
|
table,
|
|
@@ -550,47 +574,51 @@ class DbApiHook(BaseHook):
|
|
|
550
574
|
:param commit_every: The maximum number of rows to insert in one
|
|
551
575
|
transaction. Set to 0 to insert all rows in one transaction.
|
|
552
576
|
:param replace: Whether to replace instead of insert
|
|
553
|
-
:param executemany:
|
|
554
|
-
|
|
577
|
+
:param executemany: (Deprecated) If True, all rows are inserted at once in
|
|
578
|
+
chunks defined by the commit_every parameter. This only works if all rows
|
|
579
|
+
have same number of column names, but leads to better performance.
|
|
555
580
|
"""
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
581
|
+
if executemany:
|
|
582
|
+
warnings.warn(
|
|
583
|
+
"executemany parameter is deprecated, override supports_executemany instead.",
|
|
584
|
+
AirflowProviderDeprecationWarning,
|
|
585
|
+
stacklevel=2,
|
|
586
|
+
)
|
|
560
587
|
|
|
588
|
+
nb_rows = 0
|
|
589
|
+
with self._create_autocommit_connection() as conn:
|
|
561
590
|
conn.commit()
|
|
562
|
-
|
|
563
591
|
with closing(conn.cursor()) as cur:
|
|
564
|
-
if executemany:
|
|
592
|
+
if self.supports_executemany or executemany:
|
|
565
593
|
for chunked_rows in chunked(rows, commit_every):
|
|
566
594
|
values = list(
|
|
567
595
|
map(
|
|
568
|
-
lambda row:
|
|
596
|
+
lambda row: self._serialize_cells(row, conn),
|
|
569
597
|
chunked_rows,
|
|
570
598
|
)
|
|
571
599
|
)
|
|
572
600
|
sql = self._generate_insert_sql(table, values[0], target_fields, replace, **kwargs)
|
|
573
601
|
self.log.debug("Generated sql: %s", sql)
|
|
574
|
-
cur.fast_executemany = True
|
|
575
602
|
cur.executemany(sql, values)
|
|
576
603
|
conn.commit()
|
|
577
604
|
self.log.info("Loaded %s rows into %s so far", len(chunked_rows), table)
|
|
605
|
+
nb_rows += len(chunked_rows)
|
|
578
606
|
else:
|
|
579
607
|
for i, row in enumerate(rows, 1):
|
|
580
|
-
|
|
581
|
-
for cell in row:
|
|
582
|
-
lst.append(self._serialize_cell(cell, conn))
|
|
583
|
-
values = tuple(lst)
|
|
608
|
+
values = self._serialize_cells(row, conn)
|
|
584
609
|
sql = self._generate_insert_sql(table, values, target_fields, replace, **kwargs)
|
|
585
610
|
self.log.debug("Generated sql: %s", sql)
|
|
586
611
|
cur.execute(sql, values)
|
|
587
612
|
if commit_every and i % commit_every == 0:
|
|
588
613
|
conn.commit()
|
|
589
614
|
self.log.info("Loaded %s rows into %s so far", i, table)
|
|
615
|
+
nb_rows += 1
|
|
616
|
+
conn.commit()
|
|
617
|
+
self.log.info("Done loading. Loaded a total of %s rows into %s", nb_rows, table)
|
|
590
618
|
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
619
|
+
@classmethod
|
|
620
|
+
def _serialize_cells(cls, row, conn=None):
|
|
621
|
+
return tuple(cls._serialize_cell(cell, conn) for cell in row)
|
|
594
622
|
|
|
595
623
|
@staticmethod
|
|
596
624
|
def _serialize_cell(cell, conn=None) -> str | None:
|
|
@@ -32,57 +32,82 @@ Definition of the public interface for airflow.providers.common.sql.hooks.sql
|
|
|
32
32
|
isort:skip_file
|
|
33
33
|
"""
|
|
34
34
|
from _typeshed import Incomplete
|
|
35
|
-
from airflow.
|
|
36
|
-
|
|
37
|
-
|
|
35
|
+
from airflow.exceptions import (
|
|
36
|
+
AirflowException as AirflowException,
|
|
37
|
+
AirflowOptionalProviderFeatureException as AirflowOptionalProviderFeatureException,
|
|
38
|
+
AirflowProviderDeprecationWarning as AirflowProviderDeprecationWarning,
|
|
39
|
+
)
|
|
40
|
+
from airflow.hooks.base import BaseHook as BaseHook
|
|
41
|
+
from airflow.providers.openlineage.extractors import OperatorLineage as OperatorLineage
|
|
42
|
+
from airflow.providers.openlineage.sqlparser import DatabaseInfo as DatabaseInfo
|
|
43
|
+
from pandas import DataFrame as DataFrame
|
|
44
|
+
from sqlalchemy.engine import URL as URL
|
|
45
|
+
from typing import Any, Callable, Generator, Iterable, Mapping, Protocol, Sequence, TypeVar, overload
|
|
38
46
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def
|
|
43
|
-
def
|
|
47
|
+
T = TypeVar("T")
|
|
48
|
+
SQL_PLACEHOLDERS: Incomplete
|
|
49
|
+
|
|
50
|
+
def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool): ...
|
|
51
|
+
def fetch_all_handler(cursor) -> list[tuple] | None: ...
|
|
52
|
+
def fetch_one_handler(cursor) -> list[tuple] | None: ...
|
|
44
53
|
|
|
45
54
|
class ConnectorProtocol(Protocol):
|
|
46
55
|
def connect(self, host: str, port: int, username: str, schema: str) -> Any: ...
|
|
47
56
|
|
|
48
|
-
class DbApiHook(
|
|
57
|
+
class DbApiHook(BaseHook):
|
|
49
58
|
conn_name_attr: str
|
|
50
59
|
default_conn_name: str
|
|
51
60
|
supports_autocommit: bool
|
|
52
|
-
|
|
53
|
-
|
|
61
|
+
supports_executemany: bool
|
|
62
|
+
connector: ConnectorProtocol | None
|
|
54
63
|
log_sql: Incomplete
|
|
55
64
|
descriptions: Incomplete
|
|
56
|
-
|
|
57
|
-
|
|
65
|
+
def __init__(self, *args, schema: str | None = None, log_sql: bool = True, **kwargs) -> None: ...
|
|
66
|
+
@property
|
|
67
|
+
def placeholder(self): ...
|
|
58
68
|
def get_conn(self): ...
|
|
59
69
|
def get_uri(self) -> str: ...
|
|
60
|
-
|
|
61
|
-
def
|
|
70
|
+
@property
|
|
71
|
+
def sqlalchemy_url(self) -> URL: ...
|
|
72
|
+
def get_sqlalchemy_engine(self, engine_kwargs: Incomplete | None = None): ...
|
|
73
|
+
def get_pandas_df(
|
|
74
|
+
self, sql, parameters: list | tuple | Mapping[str, Any] | None = None, **kwargs
|
|
75
|
+
) -> DataFrame: ...
|
|
62
76
|
def get_pandas_df_by_chunks(
|
|
63
|
-
self, sql, parameters:
|
|
64
|
-
) -> None: ...
|
|
77
|
+
self, sql, parameters: list | tuple | Mapping[str, Any] | None = None, *, chunksize: int, **kwargs
|
|
78
|
+
) -> Generator[DataFrame, None, None]: ...
|
|
65
79
|
def get_records(
|
|
66
|
-
self, sql:
|
|
80
|
+
self, sql: str | list[str], parameters: Iterable | Mapping[str, Any] | None = None
|
|
67
81
|
) -> Any: ...
|
|
68
82
|
def get_first(
|
|
69
|
-
self, sql:
|
|
83
|
+
self, sql: str | list[str], parameters: Iterable | Mapping[str, Any] | None = None
|
|
70
84
|
) -> Any: ...
|
|
71
85
|
@staticmethod
|
|
72
86
|
def strip_sql_string(sql: str) -> str: ...
|
|
73
87
|
@staticmethod
|
|
74
88
|
def split_sql_string(sql: str) -> list[str]: ...
|
|
75
89
|
@property
|
|
76
|
-
def last_description(self) ->
|
|
90
|
+
def last_description(self) -> Sequence[Sequence] | None: ...
|
|
91
|
+
@overload
|
|
92
|
+
def run(
|
|
93
|
+
self,
|
|
94
|
+
sql: str | Iterable[str],
|
|
95
|
+
autocommit: bool = ...,
|
|
96
|
+
parameters: Iterable | Mapping[str, Any] | None = ...,
|
|
97
|
+
handler: None = ...,
|
|
98
|
+
split_statements: bool = ...,
|
|
99
|
+
return_last: bool = ...,
|
|
100
|
+
) -> None: ...
|
|
101
|
+
@overload
|
|
77
102
|
def run(
|
|
78
103
|
self,
|
|
79
|
-
sql:
|
|
104
|
+
sql: str | Iterable[str],
|
|
80
105
|
autocommit: bool = ...,
|
|
81
|
-
parameters:
|
|
82
|
-
handler:
|
|
106
|
+
parameters: Iterable | Mapping[str, Any] | None = ...,
|
|
107
|
+
handler: Callable[[Any], T] = ...,
|
|
83
108
|
split_statements: bool = ...,
|
|
84
109
|
return_last: bool = ...,
|
|
85
|
-
) ->
|
|
110
|
+
) -> tuple | list[tuple] | list[list[tuple] | tuple] | None: ...
|
|
86
111
|
def set_autocommit(self, conn, autocommit) -> None: ...
|
|
87
112
|
def get_autocommit(self, conn) -> bool: ...
|
|
88
113
|
def get_cursor(self): ...
|
|
@@ -90,13 +115,19 @@ class DbApiHook(BaseForDbApiHook):
|
|
|
90
115
|
self,
|
|
91
116
|
table,
|
|
92
117
|
rows,
|
|
93
|
-
target_fields: Incomplete | None =
|
|
94
|
-
commit_every: int =
|
|
95
|
-
replace: bool =
|
|
118
|
+
target_fields: Incomplete | None = None,
|
|
119
|
+
commit_every: int = 1000,
|
|
120
|
+
replace: bool = False,
|
|
96
121
|
*,
|
|
97
|
-
executemany: bool =
|
|
122
|
+
executemany: bool = False,
|
|
98
123
|
**kwargs,
|
|
99
|
-
)
|
|
124
|
+
): ...
|
|
100
125
|
def bulk_dump(self, table, tmp_file) -> None: ...
|
|
101
126
|
def bulk_load(self, table, tmp_file) -> None: ...
|
|
102
127
|
def test_connection(self): ...
|
|
128
|
+
def get_openlineage_database_info(self, connection) -> DatabaseInfo | None: ...
|
|
129
|
+
def get_openlineage_database_dialect(self, connection) -> str: ...
|
|
130
|
+
def get_openlineage_default_schema(self) -> str | None: ...
|
|
131
|
+
def get_openlineage_database_specific_lineage(self, task_instance) -> OperatorLineage | None: ...
|
|
132
|
+
@staticmethod
|
|
133
|
+
def get_openlineage_authority_part(connection, default_port: int | None = None) -> str: ...
|
|
@@ -309,6 +309,14 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
309
309
|
|
|
310
310
|
hook = self.get_db_hook()
|
|
311
311
|
|
|
312
|
+
try:
|
|
313
|
+
from airflow.providers.openlineage.utils.utils import should_use_external_connection
|
|
314
|
+
|
|
315
|
+
use_external_connection = should_use_external_connection(hook)
|
|
316
|
+
except ImportError:
|
|
317
|
+
# OpenLineage provider release < 1.8.0 - we always use connection
|
|
318
|
+
use_external_connection = True
|
|
319
|
+
|
|
312
320
|
connection = hook.get_connection(getattr(hook, hook.conn_name_attr))
|
|
313
321
|
try:
|
|
314
322
|
database_info = hook.get_openlineage_database_info(connection)
|
|
@@ -334,6 +342,7 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
334
342
|
database_info=database_info,
|
|
335
343
|
database=self.database,
|
|
336
344
|
sqlalchemy_engine=hook.get_sqlalchemy_engine(),
|
|
345
|
+
use_connection=use_external_connection,
|
|
337
346
|
)
|
|
338
347
|
|
|
339
348
|
return operator_lineage
|
|
@@ -31,16 +31,29 @@
|
|
|
31
31
|
Definition of the public interface for airflow.providers.common.sql.operators.sql
|
|
32
32
|
isort:skip_file
|
|
33
33
|
"""
|
|
34
|
-
from _typeshed import Incomplete
|
|
35
|
-
from airflow.
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
34
|
+
from _typeshed import Incomplete
|
|
35
|
+
from airflow.exceptions import (
|
|
36
|
+
AirflowException as AirflowException,
|
|
37
|
+
AirflowFailException as AirflowFailException,
|
|
38
|
+
)
|
|
39
|
+
from airflow.hooks.base import BaseHook as BaseHook
|
|
40
|
+
from airflow.models import BaseOperator as BaseOperator, SkipMixin as SkipMixin
|
|
41
|
+
from airflow.providers.common.sql.hooks.sql import (
|
|
42
|
+
DbApiHook as DbApiHook,
|
|
43
|
+
fetch_all_handler as fetch_all_handler,
|
|
44
|
+
return_single_query_results as return_single_query_results,
|
|
45
|
+
)
|
|
46
|
+
from airflow.providers.openlineage.extractors import OperatorLineage as OperatorLineage
|
|
47
|
+
from airflow.utils.context import Context as Context
|
|
48
|
+
from airflow.utils.helpers import merge_dicts as merge_dicts
|
|
49
|
+
from functools import cached_property as cached_property
|
|
50
|
+
from typing import Any, Callable, Iterable, Mapping, Sequence, SupportsAbs
|
|
39
51
|
|
|
40
52
|
def _parse_boolean(val: str) -> str | bool: ...
|
|
41
53
|
def parse_boolean(val: str) -> str | bool: ...
|
|
42
54
|
|
|
43
55
|
class BaseSQLOperator(BaseOperator):
|
|
56
|
+
conn_id_field: str
|
|
44
57
|
conn_id: Incomplete
|
|
45
58
|
database: Incomplete
|
|
46
59
|
hook_params: Incomplete
|
|
@@ -48,10 +61,10 @@ class BaseSQLOperator(BaseOperator):
|
|
|
48
61
|
def __init__(
|
|
49
62
|
self,
|
|
50
63
|
*,
|
|
51
|
-
conn_id:
|
|
52
|
-
database:
|
|
53
|
-
hook_params:
|
|
54
|
-
retry_on_failure: bool =
|
|
64
|
+
conn_id: str | None = None,
|
|
65
|
+
database: str | None = None,
|
|
66
|
+
hook_params: dict | None = None,
|
|
67
|
+
retry_on_failure: bool = True,
|
|
55
68
|
**kwargs,
|
|
56
69
|
) -> None: ...
|
|
57
70
|
def get_db_hook(self) -> DbApiHook: ...
|
|
@@ -72,20 +85,24 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
72
85
|
def __init__(
|
|
73
86
|
self,
|
|
74
87
|
*,
|
|
75
|
-
sql:
|
|
76
|
-
autocommit: bool =
|
|
77
|
-
parameters:
|
|
88
|
+
sql: str | list[str],
|
|
89
|
+
autocommit: bool = False,
|
|
90
|
+
parameters: Mapping | Iterable | None = None,
|
|
78
91
|
handler: Callable[[Any], Any] = ...,
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
92
|
+
conn_id: str | None = None,
|
|
93
|
+
database: str | None = None,
|
|
94
|
+
split_statements: bool | None = None,
|
|
95
|
+
return_last: bool = True,
|
|
96
|
+
show_return_value_in_logs: bool = False,
|
|
82
97
|
**kwargs,
|
|
83
98
|
) -> None: ...
|
|
84
99
|
def execute(self, context): ...
|
|
85
100
|
def prepare_template(self) -> None: ...
|
|
101
|
+
def get_openlineage_facets_on_start(self) -> OperatorLineage | None: ...
|
|
102
|
+
def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage | None: ...
|
|
86
103
|
|
|
87
104
|
class SQLColumnCheckOperator(BaseSQLOperator):
|
|
88
|
-
template_fields:
|
|
105
|
+
template_fields: Sequence[str]
|
|
89
106
|
template_fields_renderers: Incomplete
|
|
90
107
|
sql_check_template: str
|
|
91
108
|
column_checks: Incomplete
|
|
@@ -99,16 +116,16 @@ class SQLColumnCheckOperator(BaseSQLOperator):
|
|
|
99
116
|
*,
|
|
100
117
|
table: str,
|
|
101
118
|
column_mapping: dict[str, dict[str, Any]],
|
|
102
|
-
partition_clause:
|
|
103
|
-
conn_id:
|
|
104
|
-
database:
|
|
105
|
-
accept_none: bool =
|
|
119
|
+
partition_clause: str | None = None,
|
|
120
|
+
conn_id: str | None = None,
|
|
121
|
+
database: str | None = None,
|
|
122
|
+
accept_none: bool = True,
|
|
106
123
|
**kwargs,
|
|
107
124
|
) -> None: ...
|
|
108
125
|
def execute(self, context: Context): ...
|
|
109
126
|
|
|
110
127
|
class SQLTableCheckOperator(BaseSQLOperator):
|
|
111
|
-
template_fields:
|
|
128
|
+
template_fields: Sequence[str]
|
|
112
129
|
template_fields_renderers: Incomplete
|
|
113
130
|
sql_check_template: str
|
|
114
131
|
table: Incomplete
|
|
@@ -120,9 +137,9 @@ class SQLTableCheckOperator(BaseSQLOperator):
|
|
|
120
137
|
*,
|
|
121
138
|
table: str,
|
|
122
139
|
checks: dict[str, dict[str, Any]],
|
|
123
|
-
partition_clause:
|
|
124
|
-
conn_id:
|
|
125
|
-
database:
|
|
140
|
+
partition_clause: str | None = None,
|
|
141
|
+
conn_id: str | None = None,
|
|
142
|
+
database: str | None = None,
|
|
126
143
|
**kwargs,
|
|
127
144
|
) -> None: ...
|
|
128
145
|
def execute(self, context: Context): ...
|
|
@@ -138,9 +155,9 @@ class SQLCheckOperator(BaseSQLOperator):
|
|
|
138
155
|
self,
|
|
139
156
|
*,
|
|
140
157
|
sql: str,
|
|
141
|
-
conn_id:
|
|
142
|
-
database:
|
|
143
|
-
parameters:
|
|
158
|
+
conn_id: str | None = None,
|
|
159
|
+
database: str | None = None,
|
|
160
|
+
parameters: Iterable | Mapping[str, Any] | None = None,
|
|
144
161
|
**kwargs,
|
|
145
162
|
) -> None: ...
|
|
146
163
|
def execute(self, context: Context): ...
|
|
@@ -160,11 +177,12 @@ class SQLValueCheckOperator(BaseSQLOperator):
|
|
|
160
177
|
*,
|
|
161
178
|
sql: str,
|
|
162
179
|
pass_value: Any,
|
|
163
|
-
tolerance: Any =
|
|
164
|
-
conn_id:
|
|
165
|
-
database:
|
|
180
|
+
tolerance: Any = None,
|
|
181
|
+
conn_id: str | None = None,
|
|
182
|
+
database: str | None = None,
|
|
166
183
|
**kwargs,
|
|
167
184
|
) -> None: ...
|
|
185
|
+
def check_value(self, records) -> None: ...
|
|
168
186
|
def execute(self, context: Context): ...
|
|
169
187
|
|
|
170
188
|
class SQLIntervalCheckOperator(BaseSQLOperator):
|
|
@@ -188,12 +206,12 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
|
|
|
188
206
|
*,
|
|
189
207
|
table: str,
|
|
190
208
|
metrics_thresholds: dict[str, int],
|
|
191
|
-
date_filter_column:
|
|
192
|
-
days_back: SupportsAbs[int] =
|
|
193
|
-
ratio_formula:
|
|
194
|
-
ignore_zero: bool =
|
|
195
|
-
conn_id:
|
|
196
|
-
database:
|
|
209
|
+
date_filter_column: str | None = "ds",
|
|
210
|
+
days_back: SupportsAbs[int] = -7,
|
|
211
|
+
ratio_formula: str | None = "max_over_min",
|
|
212
|
+
ignore_zero: bool = True,
|
|
213
|
+
conn_id: str | None = None,
|
|
214
|
+
database: str | None = None,
|
|
197
215
|
**kwargs,
|
|
198
216
|
) -> None: ...
|
|
199
217
|
def execute(self, context: Context): ...
|
|
@@ -211,8 +229,8 @@ class SQLThresholdCheckOperator(BaseSQLOperator):
|
|
|
211
229
|
sql: str,
|
|
212
230
|
min_threshold: Any,
|
|
213
231
|
max_threshold: Any,
|
|
214
|
-
conn_id:
|
|
215
|
-
database:
|
|
232
|
+
conn_id: str | None = None,
|
|
233
|
+
database: str | None = None,
|
|
216
234
|
**kwargs,
|
|
217
235
|
) -> None: ...
|
|
218
236
|
def execute(self, context: Context): ...
|
|
@@ -234,9 +252,9 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
|
|
|
234
252
|
sql: str,
|
|
235
253
|
follow_task_ids_if_true: list[str],
|
|
236
254
|
follow_task_ids_if_false: list[str],
|
|
237
|
-
conn_id: str =
|
|
238
|
-
database:
|
|
239
|
-
parameters:
|
|
255
|
+
conn_id: str = "default_conn_id",
|
|
256
|
+
database: str | None = None,
|
|
257
|
+
parameters: Iterable | Mapping[str, Any] | None = None,
|
|
240
258
|
**kwargs,
|
|
241
259
|
) -> None: ...
|
|
242
260
|
def execute(self, context: Context): ...
|
|
@@ -32,7 +32,13 @@ Definition of the public interface for airflow.providers.common.sql.sensors.sql
|
|
|
32
32
|
isort:skip_file
|
|
33
33
|
"""
|
|
34
34
|
from _typeshed import Incomplete
|
|
35
|
-
from airflow.
|
|
35
|
+
from airflow.exceptions import (
|
|
36
|
+
AirflowException as AirflowException,
|
|
37
|
+
AirflowSkipException as AirflowSkipException,
|
|
38
|
+
)
|
|
39
|
+
from airflow.hooks.base import BaseHook as BaseHook
|
|
40
|
+
from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook
|
|
41
|
+
from airflow.sensors.base import BaseSensorOperator as BaseSensorOperator
|
|
36
42
|
from typing import Any, Sequence
|
|
37
43
|
|
|
38
44
|
class SqlSensor(BaseSensorOperator):
|
|
@@ -51,11 +57,11 @@ class SqlSensor(BaseSensorOperator):
|
|
|
51
57
|
*,
|
|
52
58
|
conn_id,
|
|
53
59
|
sql,
|
|
54
|
-
parameters: Incomplete | None =
|
|
55
|
-
success: Incomplete | None =
|
|
56
|
-
failure: Incomplete | None =
|
|
57
|
-
fail_on_empty: bool =
|
|
58
|
-
hook_params: Incomplete | None =
|
|
60
|
+
parameters: Incomplete | None = None,
|
|
61
|
+
success: Incomplete | None = None,
|
|
62
|
+
failure: Incomplete | None = None,
|
|
63
|
+
fail_on_empty: bool = False,
|
|
64
|
+
hook_params: Incomplete | None = None,
|
|
59
65
|
**kwargs,
|
|
60
66
|
) -> None: ...
|
|
61
67
|
def poke(self, context: Any): ...
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: apache-airflow-providers-common-sql
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.13.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.sql,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -21,14 +21,14 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.11
|
|
22
22
|
Classifier: Programming Language :: Python :: 3.12
|
|
23
23
|
Classifier: Topic :: System :: Monitoring
|
|
24
|
-
Requires-Dist: apache-airflow>=2.
|
|
24
|
+
Requires-Dist: apache-airflow>=2.7.0
|
|
25
25
|
Requires-Dist: more-itertools>=9.0.0
|
|
26
26
|
Requires-Dist: sqlparse>=0.4.2
|
|
27
27
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
|
28
28
|
Requires-Dist: pandas>=1.2.5,<2.2 ; extra == "pandas"
|
|
29
29
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
30
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
31
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
30
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.13.0/changelog.html
|
|
31
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.13.0
|
|
32
32
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
33
33
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
34
34
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
|
@@ -80,7 +80,7 @@ Provides-Extra: pandas
|
|
|
80
80
|
|
|
81
81
|
Package ``apache-airflow-providers-common-sql``
|
|
82
82
|
|
|
83
|
-
Release: ``1.
|
|
83
|
+
Release: ``1.13.0``
|
|
84
84
|
|
|
85
85
|
|
|
86
86
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
@@ -93,7 +93,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
93
93
|
are in ``airflow.providers.common.sql`` python package.
|
|
94
94
|
|
|
95
95
|
You can find package information and changelog for the provider
|
|
96
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
96
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.13.0/>`_.
|
|
97
97
|
|
|
98
98
|
Installation
|
|
99
99
|
------------
|
|
@@ -110,7 +110,7 @@ Requirements
|
|
|
110
110
|
================== ==================
|
|
111
111
|
PIP package Version required
|
|
112
112
|
================== ==================
|
|
113
|
-
``apache-airflow`` ``>=2.
|
|
113
|
+
``apache-airflow`` ``>=2.7.0``
|
|
114
114
|
``sqlparse`` ``>=0.4.2``
|
|
115
115
|
``more-itertools`` ``>=9.0.0``
|
|
116
116
|
================== ==================
|
|
@@ -135,4 +135,4 @@ Dependent package
|
|
|
135
135
|
============================================================================================================== ===============
|
|
136
136
|
|
|
137
137
|
The changelog for the provider package can be found in the
|
|
138
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
138
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.13.0/changelog.html>`_.
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
airflow/providers/common/sql/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
|
|
2
|
+
airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
|
|
3
|
+
airflow/providers/common/sql/__init__.py,sha256=4rYZjNENBZoNjNv7RMVPssgnMxnlrnMMgvBWNOa6wd8,1586
|
|
4
|
+
airflow/providers/common/sql/get_provider_info.py,sha256=NxIIRHQakcE1UEaY4Tks8C0_3ovRxMXAyXR7DFUseDk,2996
|
|
5
|
+
airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
|
|
6
|
+
airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
|
|
7
|
+
airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
8
|
+
airflow/providers/common/sql/hooks/sql.py,sha256=Yp3xs08lMq8s7KVUXvNOyT5XiIFYDbki1mUcyZGijKQ,29781
|
|
9
|
+
airflow/providers/common/sql/hooks/sql.pyi,sha256=iwsV-yalhYz5aBdPqaNl1x0YYYqjP75N18moKXbVYQU,5570
|
|
10
|
+
airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
11
|
+
airflow/providers/common/sql/operators/sql.py,sha256=QuM-Dqsg1ytyYl26_-pN-cKuIQEAiwvmDpbcAemLFnI,48231
|
|
12
|
+
airflow/providers/common/sql/operators/sql.pyi,sha256=f66uuQmDvj6rsX36tGTbEpm6zK3R4Opmy5i6OmdSI7w,8554
|
|
13
|
+
airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
14
|
+
airflow/providers/common/sql/sensors/sql.py,sha256=t4RiXUZGrr_FgcRo5QV-rIwNgo7qNuyafqP-GzWJuFY,5683
|
|
15
|
+
airflow/providers/common/sql/sensors/sql.pyi,sha256=4tj_m6r4ropcZYKokcVeHf4lx5fZFLExlj7QBKjfo8o,2576
|
|
16
|
+
apache_airflow_providers_common_sql-1.13.0.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
|
|
17
|
+
apache_airflow_providers_common_sql-1.13.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
|
18
|
+
apache_airflow_providers_common_sql-1.13.0.dist-info/METADATA,sha256=_r9Y-ot1X90bvoODAklmtJFUVUxvfls-jfa3syRsMA0,6108
|
|
19
|
+
apache_airflow_providers_common_sql-1.13.0.dist-info/RECORD,,
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
airflow/providers/common/sql/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
|
|
2
|
-
airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
|
|
3
|
-
airflow/providers/common/sql/__init__.py,sha256=klMx41vmt09tJSbH0p_K4neG58LhZ-sXr4Y6t9tpYEs,1586
|
|
4
|
-
airflow/providers/common/sql/get_provider_info.py,sha256=H9lLcgafXyHuzteDB8Jl_yjKx2D1GRt2Fyh1ws1jcg0,2974
|
|
5
|
-
airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
|
|
6
|
-
airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
|
|
7
|
-
airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
8
|
-
airflow/providers/common/sql/hooks/sql.py,sha256=aCDIgO1wmWWJjluSBiHc5rBaBBqQHHRlSozPgctEOdY,28425
|
|
9
|
-
airflow/providers/common/sql/hooks/sql.pyi,sha256=dqxjIq0JLrsPiC65wvOntDpXpEzWdEuv80abfqq2yFU,4138
|
|
10
|
-
airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
11
|
-
airflow/providers/common/sql/operators/sql.py,sha256=C7w4xxXQdrkJCuqQqgUxYqL_pxkXhI65YAKtYMrO5WA,47843
|
|
12
|
-
airflow/providers/common/sql/operators/sql.pyi,sha256=-Wa-4uMtRPvUowYgSDnfH98Joe3Uakzvof4F4G4mgMM,7769
|
|
13
|
-
airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
14
|
-
airflow/providers/common/sql/sensors/sql.py,sha256=t4RiXUZGrr_FgcRo5QV-rIwNgo7qNuyafqP-GzWJuFY,5683
|
|
15
|
-
airflow/providers/common/sql/sensors/sql.pyi,sha256=ZwVia3SUHrW7eB98r3vHYT_jhgkSWHRZqA2srYDHVbc,2295
|
|
16
|
-
apache_airflow_providers_common_sql-1.12.0rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
|
|
17
|
-
apache_airflow_providers_common_sql-1.12.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
|
18
|
-
apache_airflow_providers_common_sql-1.12.0rc1.dist-info/METADATA,sha256=dWCb63rDgZyHzMdykloxRc6SLZuRbBINj1-u-2flqnk,6118
|
|
19
|
-
apache_airflow_providers_common_sql-1.12.0rc1.dist-info/RECORD,,
|
|
File without changes
|