apache-airflow-providers-common-sql 1.23.0__py3-none-any.whl → 1.24.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.23.0"
32
+ __version__ = "1.24.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Common SQL",
28
28
  "description": "`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1739959337,
30
+ "source-date-epoch": 1741508670,
31
31
  "versions": [
32
+ "1.24.0",
32
33
  "1.23.0",
33
34
  "1.21.0",
34
35
  "1.20.0",
@@ -81,7 +82,10 @@ def get_provider_info():
81
82
  "operators": [
82
83
  {
83
84
  "integration-name": "Common SQL",
84
- "python-modules": ["airflow.providers.common.sql.operators.sql"],
85
+ "python-modules": [
86
+ "airflow.providers.common.sql.operators.sql",
87
+ "airflow.providers.common.sql.operators.generic_transfer",
88
+ ],
85
89
  }
86
90
  ],
87
91
  "dialects": [
@@ -99,12 +103,24 @@ def get_provider_info():
99
103
  ],
100
104
  }
101
105
  ],
106
+ "triggers": [
107
+ {
108
+ "integration-name": "Common SQL",
109
+ "python-modules": ["airflow.providers.common.sql.triggers.sql"],
110
+ }
111
+ ],
102
112
  "sensors": [
103
113
  {"integration-name": "Common SQL", "python-modules": ["airflow.providers.common.sql.sensors.sql"]}
104
114
  ],
105
- "dependencies": ["apache-airflow>=2.9.0", "sqlparse>=0.5.1", "more-itertools>=9.0.0"],
115
+ "dependencies": [
116
+ "apache-airflow>=2.9.0",
117
+ "sqlparse>=0.5.1",
118
+ "more-itertools>=9.0.0",
119
+ "methodtools>=0.4.7",
120
+ ],
106
121
  "optional-dependencies": {
107
122
  "pandas": ["pandas>=2.1.2,<2.2"],
108
123
  "openlineage": ["apache-airflow-providers-openlineage"],
109
124
  },
125
+ "devel-dependencies": [],
110
126
  }
@@ -0,0 +1,219 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from collections.abc import Sequence
21
+ from functools import cached_property
22
+ from typing import TYPE_CHECKING, Any
23
+
24
+ from airflow.exceptions import AirflowException
25
+ from airflow.hooks.base import BaseHook
26
+ from airflow.models import BaseOperator
27
+ from airflow.providers.common.sql.hooks.sql import DbApiHook
28
+ from airflow.providers.common.sql.triggers.sql import SQLExecuteQueryTrigger
29
+
30
+ if TYPE_CHECKING:
31
+ import jinja2
32
+
33
+ try:
34
+ from airflow.sdk.definitions.context import Context
35
+ except ImportError:
36
+ # TODO: Remove once provider drops support for Airflow 2
37
+ from airflow.utils.context import Context
38
+
39
+
40
+ class GenericTransfer(BaseOperator):
41
+ """
42
+ Moves data from a connection to another.
43
+
44
+ Assuming that they both provide the required methods in their respective hooks.
45
+ The source hook needs to expose a `get_records` method, and the destination a
46
+ `insert_rows` method.
47
+
48
+ This is meant to be used on small-ish datasets that fit in memory.
49
+
50
+ :param sql: SQL query to execute against the source database. (templated)
51
+ :param destination_table: target table. (templated)
52
+ :param source_conn_id: source connection. (templated)
53
+ :param source_hook_params: source hook parameters.
54
+ :param destination_conn_id: destination connection. (templated)
55
+ :param destination_hook_params: destination hook parameters.
56
+ :param preoperator: sql statement or list of statements to be
57
+ executed prior to loading the data. (templated)
58
+ :param insert_args: extra params for `insert_rows` method.
59
+ :param page_size: number of records to be read in paginated mode (optional).
60
+ """
61
+
62
+ template_fields: Sequence[str] = (
63
+ "source_conn_id",
64
+ "destination_conn_id",
65
+ "sql",
66
+ "destination_table",
67
+ "preoperator",
68
+ "insert_args",
69
+ )
70
+ template_ext: Sequence[str] = (
71
+ ".sql",
72
+ ".hql",
73
+ )
74
+ template_fields_renderers = {"preoperator": "sql"}
75
+ ui_color = "#b0f07c"
76
+
77
+ def __init__(
78
+ self,
79
+ *,
80
+ sql: str,
81
+ destination_table: str,
82
+ source_conn_id: str,
83
+ source_hook_params: dict | None = None,
84
+ destination_conn_id: str,
85
+ destination_hook_params: dict | None = None,
86
+ preoperator: str | list[str] | None = None,
87
+ insert_args: dict | None = None,
88
+ page_size: int | None = None,
89
+ **kwargs,
90
+ ) -> None:
91
+ super().__init__(**kwargs)
92
+ self.sql = sql
93
+ self.destination_table = destination_table
94
+ self.source_conn_id = source_conn_id
95
+ self.source_hook_params = source_hook_params
96
+ self.destination_conn_id = destination_conn_id
97
+ self.destination_hook_params = destination_hook_params
98
+ self.preoperator = preoperator
99
+ self.insert_args = insert_args or {}
100
+ self.page_size = page_size
101
+ self._paginated_sql_statement_format = kwargs.get(
102
+ "paginated_sql_statement_format", "{} LIMIT {} OFFSET {}"
103
+ )
104
+
105
+ @classmethod
106
+ def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> DbApiHook:
107
+ """
108
+ Return DbApiHook for this connection id.
109
+
110
+ :param conn_id: connection id
111
+ :param hook_params: hook parameters
112
+ :return: DbApiHook for this connection
113
+ """
114
+ connection = BaseHook.get_connection(conn_id)
115
+ hook = connection.get_hook(hook_params=hook_params)
116
+ if not isinstance(hook, DbApiHook):
117
+ raise RuntimeError(f"Hook for connection {conn_id!r} must be of type {DbApiHook.__name__}")
118
+ return hook
119
+
120
+ @cached_property
121
+ def source_hook(self) -> DbApiHook:
122
+ return self.get_hook(conn_id=self.source_conn_id, hook_params=self.source_hook_params)
123
+
124
+ @cached_property
125
+ def destination_hook(self) -> DbApiHook:
126
+ return self.get_hook(conn_id=self.destination_conn_id, hook_params=self.destination_hook_params)
127
+
128
+ def get_paginated_sql(self, offset: int) -> str:
129
+ """Format the paginated SQL statement using the current format."""
130
+ return self._paginated_sql_statement_format.format(self.sql, self.page_size, offset)
131
+
132
+ def render_template_fields(
133
+ self,
134
+ context: Context,
135
+ jinja_env: jinja2.Environment | None = None,
136
+ ) -> None:
137
+ super().render_template_fields(context=context, jinja_env=jinja_env)
138
+
139
+ # Make sure string are converted to integers
140
+ if isinstance(self.page_size, str):
141
+ self.page_size = int(self.page_size)
142
+ commit_every = self.insert_args.get("commit_every")
143
+ if isinstance(commit_every, str):
144
+ self.insert_args["commit_every"] = int(commit_every)
145
+
146
+ def execute(self, context: Context):
147
+ if self.preoperator:
148
+ self.log.info("Running preoperator")
149
+ self.log.info(self.preoperator)
150
+ self.destination_hook.run(self.preoperator)
151
+
152
+ if self.page_size and isinstance(self.sql, str):
153
+ self.defer(
154
+ trigger=SQLExecuteQueryTrigger(
155
+ conn_id=self.source_conn_id,
156
+ hook_params=self.source_hook_params,
157
+ sql=self.get_paginated_sql(0),
158
+ ),
159
+ method_name=self.execute_complete.__name__,
160
+ )
161
+ else:
162
+ self.log.info("Extracting data from %s", self.source_conn_id)
163
+ self.log.info("Executing: \n %s", self.sql)
164
+
165
+ results = self.destination_hook.get_records(self.sql)
166
+
167
+ self.log.info("Inserting rows into %s", self.destination_conn_id)
168
+ self.destination_hook.insert_rows(table=self.destination_table, rows=results, **self.insert_args)
169
+
170
+ def execute_complete(
171
+ self,
172
+ context: Context,
173
+ event: dict[Any, Any] | None = None,
174
+ ) -> Any:
175
+ if event:
176
+ if event.get("status") == "failure":
177
+ raise AirflowException(event.get("message"))
178
+
179
+ results = event.get("results")
180
+
181
+ if results:
182
+ map_index = context["ti"].map_index
183
+ offset = (
184
+ context["ti"].xcom_pull(
185
+ key="offset",
186
+ task_ids=self.task_id,
187
+ dag_id=self.dag_id,
188
+ map_indexes=map_index,
189
+ default=0,
190
+ )
191
+ + self.page_size
192
+ )
193
+
194
+ self.log.info("Offset increased to %d", offset)
195
+ self.xcom_push(context=context, key="offset", value=offset)
196
+
197
+ self.log.info("Inserting %d rows into %s", len(results), self.destination_conn_id)
198
+ self.destination_hook.insert_rows(
199
+ table=self.destination_table, rows=results, **self.insert_args
200
+ )
201
+ self.log.info(
202
+ "Inserting %d rows into %s done!",
203
+ len(results),
204
+ self.destination_conn_id,
205
+ )
206
+
207
+ self.defer(
208
+ trigger=SQLExecuteQueryTrigger(
209
+ conn_id=self.source_conn_id,
210
+ hook_params=self.source_hook_params,
211
+ sql=self.get_paginated_sql(offset),
212
+ ),
213
+ method_name=self.execute_complete.__name__,
214
+ )
215
+ else:
216
+ self.log.info(
217
+ "No more rows to fetch into %s; ending transfer.",
218
+ self.destination_table,
219
+ )
@@ -0,0 +1,85 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This is automatically generated stub for the `common.sql` provider
19
+ #
20
+ # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
21
+ # and the .pyi file represents part of the "public" API that the
22
+ # `common.sql` provider exposes to other providers.
23
+ #
24
+ # Any, potentially breaking change in the stubs will require deliberate manual action from the contributor
25
+ # making a change to the `common.sql` provider. Those stubs are also used by MyPy automatically when checking
26
+ # if only public API of the common.sql provider is used by all the other providers.
27
+ #
28
+ # You can read more in the README_API.md file
29
+ #
30
+ """
31
+ Definition of the public interface for airflow.providers.common.sql.operators.generic_transfer
32
+ isort:skip_file
33
+ """
34
+
35
+ from collections.abc import Sequence
36
+ from functools import cached_property as cached_property
37
+ from typing import Any, ClassVar
38
+
39
+ import jinja2
40
+ from _typeshed import Incomplete as Incomplete
41
+
42
+ from airflow.models import BaseOperator
43
+ from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook
44
+ from airflow.utils.context import Context as Context
45
+
46
+ class GenericTransfer(BaseOperator):
47
+ template_fields: Sequence[str]
48
+ template_ext: Sequence[str]
49
+ template_fields_renderers: ClassVar[dict]
50
+ ui_color: str
51
+ sql: Incomplete
52
+ destination_table: Incomplete
53
+ source_conn_id: Incomplete
54
+ source_hook_params: Incomplete
55
+ destination_conn_id: Incomplete
56
+ destination_hook_params: Incomplete
57
+ preoperator: Incomplete
58
+ insert_args: Incomplete
59
+ page_size: Incomplete
60
+ def __init__(
61
+ self,
62
+ *,
63
+ sql: str,
64
+ destination_table: str,
65
+ source_conn_id: str,
66
+ source_hook_params: dict | None = None,
67
+ destination_conn_id: str,
68
+ destination_hook_params: dict | None = None,
69
+ preoperator: str | list[str] | None = None,
70
+ insert_args: dict | None = None,
71
+ page_size: int | None = None,
72
+ **kwargs,
73
+ ) -> None: ...
74
+ @classmethod
75
+ def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> DbApiHook: ...
76
+ @cached_property
77
+ def source_hook(self) -> DbApiHook: ...
78
+ @cached_property
79
+ def destination_hook(self) -> DbApiHook: ...
80
+ def get_paginated_sql(self, offset: int) -> str: ...
81
+ def render_template_fields(
82
+ self, context: Context, jinja_env: jinja2.Environment | None = None
83
+ ) -> None: ...
84
+ def execute(self, context: Context): ...
85
+ def execute_complete(self, context: Context, event: dict[Any, Any] | None = None) -> Any: ...
@@ -26,7 +26,8 @@ from typing import TYPE_CHECKING, Any, Callable, ClassVar, NoReturn, SupportsAbs
26
26
  from airflow.exceptions import AirflowException, AirflowFailException
27
27
  from airflow.hooks.base import BaseHook
28
28
  from airflow.models import BaseOperator, SkipMixin
29
- from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, return_single_query_results
29
+ from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, return_single_query_results
30
+ from airflow.providers.common.sql.hooks.sql import DbApiHook
30
31
  from airflow.utils.helpers import merge_dicts
31
32
 
32
33
  if TYPE_CHECKING:
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -0,0 +1,87 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from typing import TYPE_CHECKING
21
+
22
+ from airflow.exceptions import AirflowException
23
+ from airflow.hooks.base import BaseHook
24
+ from airflow.providers.common.sql.hooks.sql import DbApiHook
25
+ from airflow.triggers.base import BaseTrigger, TriggerEvent
26
+
27
+ if TYPE_CHECKING:
28
+ from collections.abc import AsyncIterator
29
+ from typing import Any
30
+
31
+
32
+ class SQLExecuteQueryTrigger(BaseTrigger):
33
+ """
34
+ A trigger that executes SQL code in async mode.
35
+
36
+ :param sql: the sql statement to be executed (str) or a list of sql statements to execute
37
+ :param conn_id: the connection ID used to connect to the database
38
+ :param hook_params: hook parameters
39
+ """
40
+
41
+ def __init__(
42
+ self,
43
+ sql: str | list[str],
44
+ conn_id: str,
45
+ hook_params: dict | None = None,
46
+ **kwargs,
47
+ ):
48
+ super().__init__(**kwargs)
49
+ self.sql = sql
50
+ self.conn_id = conn_id
51
+ self.hook_params = hook_params
52
+
53
+ def serialize(self) -> tuple[str, dict[str, Any]]:
54
+ """Serialize the SQLExecuteQueryTrigger arguments and classpath."""
55
+ return (
56
+ f"{self.__class__.__module__}.{self.__class__.__name__}",
57
+ {
58
+ "sql": self.sql,
59
+ "conn_id": self.conn_id,
60
+ "hook_params": self.hook_params,
61
+ },
62
+ )
63
+
64
+ async def run(self) -> AsyncIterator[TriggerEvent]:
65
+ try:
66
+ hook = BaseHook.get_hook(self.conn_id, hook_params=self.hook_params)
67
+
68
+ if not isinstance(hook, DbApiHook):
69
+ raise AirflowException(
70
+ f"You are trying to use `common-sql` with {hook.__class__.__name__},"
71
+ " but its provider does not support it. Please upgrade the provider to a version that"
72
+ " supports `common-sql`. The hook class should be a subclass of"
73
+ f" `{hook.__class__.__module__}.{hook.__class__.__name__}`."
74
+ f" Got {hook.__class__.__name__} hook with class hierarchy: {hook.__class__.mro()}"
75
+ )
76
+
77
+ self.log.info("Extracting data from %s", self.conn_id)
78
+ self.log.info("Executing: \n %s", self.sql)
79
+ self.log.info("Reading records from %s", self.conn_id)
80
+ results = hook.get_records(self.sql)
81
+ self.log.info("Reading records from %s done!", self.conn_id)
82
+
83
+ self.log.debug("results: %s", results)
84
+ yield TriggerEvent({"status": "success", "results": results})
85
+ except Exception as e:
86
+ self.log.exception("An error occurred: %s", e)
87
+ yield TriggerEvent({"status": "failure", "message": str(e)})
@@ -0,0 +1,45 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # This is automatically generated stub for the `common.sql` provider
19
+ #
20
+ # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
21
+ # and the .pyi file represents part of the "public" API that the
22
+ # `common.sql` provider exposes to other providers.
23
+ #
24
+ # Any, potentially breaking change in the stubs will require deliberate manual action from the contributor
25
+ # making a change to the `common.sql` provider. Those stubs are also used by MyPy automatically when checking
26
+ # if only public API of the common.sql provider is used by all the other providers.
27
+ #
28
+ # You can read more in the README_API.md file
29
+ #
30
+ """
31
+ Definition of the public interface for airflow.providers.common.sql.triggers.sql
32
+ isort:skip_file
33
+ """
34
+
35
+ from collections.abc import AsyncIterator
36
+ from typing import Any
37
+
38
+ from airflow.triggers.base import BaseTrigger as BaseTrigger, TriggerEvent as TriggerEvent
39
+
40
+ class SQLExecuteQueryTrigger(BaseTrigger):
41
+ def __init__(
42
+ self, sql: str | list[str], conn_id: str, hook_params: dict | None = None, **kwargs
43
+ ) -> None: ...
44
+ def serialize(self) -> tuple[str, dict[str, Any]]: ...
45
+ async def run(self) -> AsyncIterator[TriggerEvent]: ... # type: ignore
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.23.0
3
+ Version: 1.24.0rc1
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,14 +20,15 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0
23
+ Requires-Dist: apache-airflow>=2.9.0rc0
24
24
  Requires-Dist: sqlparse>=0.5.1
25
25
  Requires-Dist: more-itertools>=9.0.0
26
+ Requires-Dist: methodtools>=0.4.7
26
27
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
27
28
  Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas"
28
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
29
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html
30
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0
30
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/changelog.html
31
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0
31
32
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
33
  Project-URL: Source Code, https://github.com/apache/airflow
33
34
  Project-URL: Twitter, https://x.com/ApacheAirflow
@@ -36,32 +37,31 @@ Provides-Extra: openlineage
36
37
  Provides-Extra: pandas
37
38
 
38
39
 
39
- .. Licensed to the Apache Software Foundation (ASF) under one
40
- or more contributor license agreements. See the NOTICE file
41
- distributed with this work for additional information
42
- regarding copyright ownership. The ASF licenses this file
43
- to you under the Apache License, Version 2.0 (the
44
- "License"); you may not use this file except in compliance
45
- with the License. You may obtain a copy of the License at
40
+ .. Licensed to the Apache Software Foundation (ASF) under one
41
+ or more contributor license agreements. See the NOTICE file
42
+ distributed with this work for additional information
43
+ regarding copyright ownership. The ASF licenses this file
44
+ to you under the Apache License, Version 2.0 (the
45
+ "License"); you may not use this file except in compliance
46
+ with the License. You may obtain a copy of the License at
46
47
 
47
- .. http://www.apache.org/licenses/LICENSE-2.0
48
+ .. http://www.apache.org/licenses/LICENSE-2.0
48
49
 
49
- .. Unless required by applicable law or agreed to in writing,
50
- software distributed under the License is distributed on an
51
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
52
- KIND, either express or implied. See the License for the
53
- specific language governing permissions and limitations
54
- under the License.
50
+ .. Unless required by applicable law or agreed to in writing,
51
+ software distributed under the License is distributed on an
52
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
53
+ KIND, either express or implied. See the License for the
54
+ specific language governing permissions and limitations
55
+ under the License.
55
56
 
56
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
57
-
58
- .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
59
- `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
57
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
60
58
 
59
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
60
+ ``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
61
61
 
62
62
  Package ``apache-airflow-providers-common-sql``
63
63
 
64
- Release: ``1.23.0``
64
+ Release: ``1.24.0``
65
65
 
66
66
 
67
67
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -74,7 +74,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
74
74
  are in ``airflow.providers.common.sql`` python package.
75
75
 
76
76
  You can find package information and changelog for the provider
77
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/>`_.
77
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/>`_.
78
78
 
79
79
  Installation
80
80
  ------------
@@ -94,6 +94,7 @@ PIP package Version required
94
94
  ``apache-airflow`` ``>=2.9.0``
95
95
  ``sqlparse`` ``>=0.5.1``
96
96
  ``more-itertools`` ``>=9.0.0``
97
+ ``methodtools`` ``>=0.4.7``
97
98
  ================== ==================
98
99
 
99
100
  Cross provider package dependencies
@@ -116,5 +117,5 @@ Dependent package
116
117
  ============================================================================================================== ===============
117
118
 
118
119
  The changelog for the provider package can be found in the
119
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html>`_.
120
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.24.0/changelog.html>`_.
120
121
 
@@ -1,7 +1,7 @@
1
1
  airflow/providers/common/sql/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
2
  airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
3
- airflow/providers/common/sql/__init__.py,sha256=AYm77GCPhnDJ6AMc3fi1FzUszcGkMWApE2HhQ-W5hR4,1498
4
- airflow/providers/common/sql/get_provider_info.py,sha256=mObPyvz4mfh6C0xry0r7XPQ3QZt4HFf5Ud2ZTWpzneQ,3650
3
+ airflow/providers/common/sql/__init__.py,sha256=uJwbcNerg4dH470OLlPqVvDojLnTcIZiXBBSKIEK41g,1498
4
+ airflow/providers/common/sql/get_provider_info.py,sha256=6nDmSiEuk71yJHaEz5LW9rCazSUZ95Gr4bZex0_hBL8,4097
5
5
  airflow/providers/common/sql/get_provider_info.pyi,sha256=0mydJPGQScnPpoa9-ohHVJFngFH6Lsk22KS243PE-gw,1596
6
6
  airflow/providers/common/sql/dialects/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
7
  airflow/providers/common/sql/dialects/dialect.py,sha256=rV4frwvKeU25AydRFA0iJEzSDAjHBAQQl4a7OLpE2Tg,7698
@@ -15,11 +15,16 @@ airflow/providers/common/sql/hooks/handlers.pyi,sha256=8meEoDd_lUHgJ8PCPrmhRX00x
15
15
  airflow/providers/common/sql/hooks/sql.py,sha256=K7U6LVbglKTXBIw34MBoFWTN3o1o82x1zw3iKyiZdu8,36130
16
16
  airflow/providers/common/sql/hooks/sql.pyi,sha256=K0B4HOgI-pczVG3K8PRSlF0tJ7FcLwz-nHveSpIydno,6751
17
17
  airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
- airflow/providers/common/sql/operators/sql.py,sha256=c5nERbSxUTg8438Hq7MnM4vuFOWZLJfN70o7FIDmNVQ,49245
18
+ airflow/providers/common/sql/operators/generic_transfer.py,sha256=EEKKMcOOLl0vyjXRHBI6UuoFLqpHpeHEj9zwMTrNAA4,8336
19
+ airflow/providers/common/sql/operators/generic_transfer.pyi,sha256=rhuCB7KSm_NutW8m3BNQmaoiUPDXp1fTrSeoR0Jr4dU,3330
20
+ airflow/providers/common/sql/operators/sql.py,sha256=o7k8jlEMcubjKBiOIsoAEERTacZRdfPIGdvEIe8xwMg,49300
19
21
  airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
20
22
  airflow/providers/common/sql/sensors/sql.py,sha256=vJqxvDpwmGbq_6kMMqU4YjhHhcbhDhHYjJ2ufhLDmGc,5519
21
23
  airflow/providers/common/sql/sensors/sql.pyi,sha256=GiOk2qD0PO5HWISgTTdOJQLC9b2ItzvQr68adXIbjGQ,2530
22
- apache_airflow_providers_common_sql-1.23.0.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
23
- apache_airflow_providers_common_sql-1.23.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
24
- apache_airflow_providers_common_sql-1.23.0.dist-info/METADATA,sha256=XYL-MD-Pvo02D0YiahqtW5oBZIua_8OyoUSkS9zT15I,5223
25
- apache_airflow_providers_common_sql-1.23.0.dist-info/RECORD,,
24
+ airflow/providers/common/sql/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
+ airflow/providers/common/sql/triggers/sql.py,sha256=ugrQsMY3svOOdUHbksBTDNmI5qKCPwdO_AxTPlxrD9I,3463
26
+ airflow/providers/common/sql/triggers/sql.pyi,sha256=7wVgfqUPJB7egsWwbZtwZV3TFm7DuKLclWetNInCM5w,1986
27
+ apache_airflow_providers_common_sql-1.24.0rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
28
+ apache_airflow_providers_common_sql-1.24.0rc1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
29
+ apache_airflow_providers_common_sql-1.24.0rc1.dist-info/METADATA,sha256=33in-6D9nkjsAbjMjV1rTVURMwZ9tm-sZFEG736vkTQ,5281
30
+ apache_airflow_providers_common_sql-1.24.0rc1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: flit 3.10.1
2
+ Generator: flit 3.11.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any