apache-airflow-providers-common-sql 1.3.4__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.

@@ -27,7 +27,7 @@ def get_provider_info():
27
27
  "package-name": "apache-airflow-providers-common-sql",
28
28
  "name": "Common SQL",
29
29
  "description": "`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__\n",
30
- "versions": ["1.3.4", "1.3.3", "1.3.2", "1.3.1", "1.3.0", "1.2.0", "1.1.0", "1.0.0"],
30
+ "versions": ["1.4.0", "1.3.4", "1.3.3", "1.3.2", "1.3.1", "1.3.0", "1.2.0", "1.1.0", "1.0.0"],
31
31
  "dependencies": ["sqlparse>=0.4.2"],
32
32
  "additional-extras": [{"name": "pandas", "dependencies": ["pandas>=0.17.1"]}],
33
33
  "integrations": [
@@ -300,10 +300,10 @@ class DbApiHook(BaseForDbApiHook):
300
300
 
301
301
  After ``run`` is called, you may access the following properties on the hook object:
302
302
 
303
- * ``descriptions``: an array of cursor descriptions. If ``return_last`` is True, this will be
304
- a one-element array containing the cursor ``description`` for the last statement.
305
- Otherwise, it will contain the cursor description for each statement executed.
306
- * ``last_description``: the description for the last statement executed
303
+ * ``descriptions``: an array of cursor descriptions. If ``return_last`` is True, this will be
304
+ a one-element array containing the cursor ``description`` for the last statement.
305
+ Otherwise, it will contain the cursor description for each statement executed.
306
+ * ``last_description``: the description for the last statement executed
307
307
 
308
308
  Note that query result will ONLY be actually returned when a handler is provided; if
309
309
  ``handler`` is None, this method will return None.
@@ -311,7 +311,7 @@ class DbApiHook(BaseForDbApiHook):
311
311
  Handler is a way to process the rows from cursor (Iterator) into a value that is suitable to be
312
312
  returned to XCom and generally fit in memory.
313
313
 
314
- You can use pre-defined handles (`fetch_all_handler``, ''fetch_one_handler``) or implement your
314
+ You can use pre-defined handles (``fetch_all_handler``, ``fetch_one_handler``) or implement your
315
315
  own handler.
316
316
 
317
317
  :param sql: the sql statement to be executed (str) or a list of
@@ -73,6 +73,8 @@ def _get_failed_checks(checks, col=None):
73
73
 
74
74
  parse_boolean = _parse_boolean
75
75
  """
76
+ :sphinx-autoapi-skip:
77
+
76
78
  IMPORTANT!!! Keep it for compatibility with released 8.4.0 version of google provider.
77
79
 
78
80
  Unfortunately the provider used _get_failed_checks and parse_boolean as imports and we should
@@ -187,20 +189,22 @@ class BaseSQLOperator(BaseOperator):
187
189
  class SQLExecuteQueryOperator(BaseSQLOperator):
188
190
  """
189
191
  Executes SQL code in a specific database
190
- :param sql: the SQL code or string pointing to a template file to be executed (templated).
191
- File must have a '.sql' extensions.
192
192
 
193
193
  When implementing a specific Operator, you can also implement `_process_output` method in the
194
194
  hook to perform additional processing of values returned by the DB Hook of yours. For example, you
195
195
  can join description retrieved from the cursors of your statements with returned values, or save
196
196
  the output of your operator to a file.
197
197
 
198
+ :param sql: the SQL code or string pointing to a template file to be executed (templated).
199
+ File must have a '.sql' extension.
198
200
  :param autocommit: (optional) if True, each command is automatically committed (default: False).
199
201
  :param parameters: (optional) the parameters to render the SQL query with.
200
202
  :param handler: (optional) the function that will be applied to the cursor (default: fetch_all_handler).
201
203
  :param split_statements: (optional) if split single SQL string into statements. By default, defers
202
204
  to the default value in the ``run`` method of the configured hook.
203
205
  :param return_last: (optional) return the result of only last statement (default: True).
206
+ :param show_return_value_in_logs: (optional) if true operator output will be printed to the task log.
207
+ Use with caution. It's not recommended to dump large datasets to the log. (default: False).
204
208
 
205
209
  .. seealso::
206
210
  For more information on how to use this operator, take a look at the guide:
@@ -221,6 +225,7 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
221
225
  handler: Callable[[Any], Any] = fetch_all_handler,
222
226
  split_statements: bool | None = None,
223
227
  return_last: bool = True,
228
+ show_return_value_in_logs: bool = False,
224
229
  **kwargs,
225
230
  ) -> None:
226
231
  super().__init__(**kwargs)
@@ -230,6 +235,7 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
230
235
  self.handler = handler
231
236
  self.split_statements = split_statements
232
237
  self.return_last = return_last
238
+ self.show_return_value_in_logs = show_return_value_in_logs
233
239
 
234
240
  def _process_output(self, results: list[Any], descriptions: list[Sequence[Sequence] | None]) -> list[Any]:
235
241
  """
@@ -248,6 +254,8 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
248
254
  :param results: results in the form of list of rows.
249
255
  :param descriptions: list of descriptions returned by ``cur.description`` in the Python DBAPI
250
256
  """
257
+ if self.show_return_value_in_logs:
258
+ self.log.info("Operator output is: %s", results)
251
259
  return results
252
260
 
253
261
  def execute(self, context):
@@ -286,40 +294,41 @@ class SQLColumnCheckOperator(BaseSQLOperator):
286
294
  Checks are performed on a per-column basis specified by the column_mapping.
287
295
 
288
296
  Each check can take one or more of the following options:
289
- - equal_to: an exact value to equal, cannot be used with other comparison options
290
- - greater_than: value that result should be strictly greater than
291
- - less_than: value that results should be strictly less than
292
- - geq_to: value that results should be greater than or equal to
293
- - leq_to: value that results should be less than or equal to
294
- - tolerance: the percentage that the result may be off from the expected value
295
- - partition_clause: an extra clause passed into a WHERE statement to partition data
297
+
298
+ * ``equal_to``: an exact value to equal, cannot be used with other comparison options
299
+ * ``greater_than``: value that result should be strictly greater than
300
+ * ``less_than``: value that results should be strictly less than
301
+ * ``geq_to``: value that results should be greater than or equal to
302
+ * ``leq_to``: value that results should be less than or equal to
303
+ * ``tolerance``: the percentage that the result may be off from the expected value
304
+ * ``partition_clause``: an extra clause passed into a WHERE statement to partition data
296
305
 
297
306
  :param table: the table to run checks on
298
307
  :param column_mapping: the dictionary of columns and their associated checks, e.g.
299
308
 
300
- .. code-block:: python
301
-
302
- {
303
- "col_name": {
304
- "null_check": {
305
- "equal_to": 0,
306
- "partition_clause": "foreign_key IS NOT NULL",
307
- },
308
- "min": {
309
- "greater_than": 5,
310
- "leq_to": 10,
311
- "tolerance": 0.2,
312
- },
313
- "max": {"less_than": 1000, "geq_to": 10, "tolerance": 0.01},
309
+ .. code-block:: python
310
+
311
+ {
312
+ "col_name": {
313
+ "null_check": {
314
+ "equal_to": 0,
315
+ "partition_clause": "foreign_key IS NOT NULL",
316
+ },
317
+ "min": {
318
+ "greater_than": 5,
319
+ "leq_to": 10,
320
+ "tolerance": 0.2,
321
+ },
322
+ "max": {"less_than": 1000, "geq_to": 10, "tolerance": 0.01},
323
+ }
314
324
  }
315
- }
316
325
 
317
326
  :param partition_clause: a partial SQL statement that is added to a WHERE clause in the query built by
318
327
  the operator that creates partition_clauses for the checks to run on, e.g.
319
328
 
320
- .. code-block:: python
329
+ .. code-block:: python
321
330
 
322
- "date = '1970-01-01'"
331
+ "date = '1970-01-01'"
323
332
 
324
333
  :param conn_id: the connection ID used to connect to the database
325
334
  :param database: name of database which overwrite the defined one in connection
@@ -534,21 +543,21 @@ class SQLTableCheckOperator(BaseSQLOperator):
534
543
  :param checks: the dictionary of checks, where check names are followed by a dictionary containing at
535
544
  least a check statement, and optionally a partition clause, e.g.:
536
545
 
537
- .. code-block:: python
546
+ .. code-block:: python
538
547
 
539
- {
540
- "row_count_check": {"check_statement": "COUNT(*) = 1000"},
541
- "column_sum_check": {"check_statement": "col_a + col_b < col_c"},
542
- "third_check": {"check_statement": "MIN(col) = 1", "partition_clause": "col IS NOT NULL"},
543
- }
548
+ {
549
+ "row_count_check": {"check_statement": "COUNT(*) = 1000"},
550
+ "column_sum_check": {"check_statement": "col_a + col_b < col_c"},
551
+ "third_check": {"check_statement": "MIN(col) = 1", "partition_clause": "col IS NOT NULL"},
552
+ }
544
553
 
545
554
 
546
555
  :param partition_clause: a partial SQL statement that is added to a WHERE clause in the query built by
547
556
  the operator that creates partition_clauses for the checks to run on, e.g.
548
557
 
549
- .. code-block:: python
558
+ .. code-block:: python
550
559
 
551
- "date = '1970-01-01'"
560
+ "date = '1970-01-01'"
552
561
 
553
562
  :param conn_id: the connection ID used to connect to the database
554
563
  :param database: name of database which overwrite the defined one in connection
@@ -807,12 +816,11 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
807
816
  :param date_filter_column: The column name for the dates to filter on. Defaults to 'ds'
808
817
  :param ratio_formula: which formula to use to compute the ratio between
809
818
  the two metrics. Assuming cur is the metric of today and ref is
810
- the metric to today - days_back.
819
+ the metric to today - days_back. Default: 'max_over_min'
811
820
 
812
- max_over_min: computes max(cur, ref) / min(cur, ref)
813
- relative_diff: computes abs(cur-ref) / ref
821
+ * ``max_over_min``: computes max(cur, ref) / min(cur, ref)
822
+ * ``relative_diff``: computes abs(cur-ref) / ref
814
823
 
815
- Default: 'max_over_min'
816
824
  :param ignore_zero: whether we should ignore zero metrics
817
825
  :param metrics_thresholds: a dictionary of ratios indexed by metrics
818
826
  """
@@ -1029,7 +1037,7 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
1029
1037
 
1030
1038
  :param sql: The SQL code to be executed, should return true or false (templated)
1031
1039
  Template reference are recognized by str ending in '.sql'.
1032
- Expected SQL query to return Boolean (True/False), integer (0 = False, Otherwise = 1)
1040
+ Expected SQL query to return a boolean (True/False), integer (0 = False, Otherwise = 1)
1033
1041
  or string (true/y/yes/1/on/false/n/no/0/off).
1034
1042
  :param follow_task_ids_if_true: task id or task ids to follow if query returns true
1035
1043
  :param follow_task_ids_if_false: task id or task ids to follow if query returns false
@@ -1,19 +1,18 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.3.4
3
+ Version: 1.4.0
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-common-sql package
5
5
  Home-page: https://airflow.apache.org/
6
+ Download-URL: https://archive.apache.org/dist/airflow/providers
6
7
  Author: Apache Software Foundation
7
8
  Author-email: dev@airflow.apache.org
8
9
  License: Apache License 2.0
9
- Download-URL: https://archive.apache.org/dist/airflow/providers
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.3.4/
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.4.0/
11
11
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
12
12
  Project-URL: Source Code, https://github.com/apache/airflow
13
13
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
14
14
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
15
15
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
16
- Platform: UNKNOWN
17
16
  Classifier: Development Status :: 5 - Production/Stable
18
17
  Classifier: Environment :: Console
19
18
  Classifier: Environment :: Web Environment
@@ -56,7 +55,7 @@ Requires-Dist: pandas (>=0.17.1) ; extra == 'pandas'
56
55
 
57
56
  Package ``apache-airflow-providers-common-sql``
58
57
 
59
- Release: ``1.3.4``
58
+ Release: ``1.4.0``
60
59
 
61
60
 
62
61
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -69,7 +68,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
69
68
  are in ``airflow.providers.common.sql`` python package.
70
69
 
71
70
  You can find package information and changelog for the provider
72
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.3.4/>`_.
71
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.4.0/>`_.
73
72
 
74
73
 
75
74
  Installation
@@ -116,6 +115,19 @@ PIP package Version required
116
115
  Changelog
117
116
  ---------
118
117
 
118
+ 1.4.0
119
+ .....
120
+
121
+ Features
122
+ ~~~~~~~~
123
+
124
+ * ``Add option to show output of 'SQLExecuteQueryOperator' in the log (#29954)``
125
+
126
+ Misc
127
+ ~~~~
128
+
129
+ * ``Fix Python API docs formatting for Common SQL provider (#29863)``
130
+
119
131
  1.3.4
120
132
  .....
121
133
 
@@ -260,5 +272,3 @@ Bug Fixes
260
272
  Initial version of the provider.
261
273
  Adds ``SQLColumnCheckOperator`` and ``SQLTableCheckOperator``.
262
274
  Moves ``DBApiHook``, ``SQLSensor`` and ``ConnectorProtocol`` to the provider.
263
-
264
-
@@ -0,0 +1,15 @@
1
+ airflow/providers/common/sql/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
2
+ airflow/providers/common/sql/get_provider_info.py,sha256=EdtCMM7YWFHw2NRwYch1dWOWGVcndF9ED9iqctui_Ho,2343
3
+ airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
4
+ airflow/providers/common/sql/hooks/sql.py,sha256=UoN_xliZoGVZAP2Spw-qNNsIBrAnEVXoRyl03U2zqMI,21739
5
+ airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
+ airflow/providers/common/sql/operators/sql.py,sha256=cZvSFT0Wm5poN6MLwhhAvIlSfMRz21_lRH11ySe7x6g,44390
7
+ airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
+ airflow/providers/common/sql/sensors/sql.py,sha256=iMJVBO-meJjrfnwD6c5kxgyCZ0ZzoWe7mj4vmCeoo5Y,4691
9
+ apache_airflow_providers_common_sql-1.4.0.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
10
+ apache_airflow_providers_common_sql-1.4.0.dist-info/METADATA,sha256=S119TKCjpcde1bkhZJ5BpHb9_BYpYAqp5bmkCL0mVa8,9482
11
+ apache_airflow_providers_common_sql-1.4.0.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
12
+ apache_airflow_providers_common_sql-1.4.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
13
+ apache_airflow_providers_common_sql-1.4.0.dist-info/entry_points.txt,sha256=tmKZSanpZW-c-LmV95ou0uu9LqJ9MGXvO18XZNC5KsY,107
14
+ apache_airflow_providers_common_sql-1.4.0.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
15
+ apache_airflow_providers_common_sql-1.4.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.38.4)
2
+ Generator: bdist_wheel (0.40.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,3 +1,2 @@
1
1
  [apache_airflow_provider]
2
2
  provider_info = airflow.providers.common.sql.get_provider_info:get_provider_info
3
-
@@ -1,15 +0,0 @@
1
- airflow/providers/common/sql/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
2
- airflow/providers/common/sql/get_provider_info.py,sha256=LYEf16XL7V3NF9vPhOqDyNPWwrLLGL6oJovMjegruhA,2334
3
- airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
4
- airflow/providers/common/sql/hooks/sql.py,sha256=DQzqSqX4fO5pGxFqEZhn1KmdYTP-5zZmcMhCHKTqYWg,21748
5
- airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
- airflow/providers/common/sql/operators/sql.py,sha256=UHZJvjn8jNYpWGDh3ZBwCevsb17leOzramCzmFM4Eig,43803
7
- airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
- airflow/providers/common/sql/sensors/sql.py,sha256=iMJVBO-meJjrfnwD6c5kxgyCZ0ZzoWe7mj4vmCeoo5Y,4691
9
- apache_airflow_providers_common_sql-1.3.4.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
10
- apache_airflow_providers_common_sql-1.3.4.dist-info/METADATA,sha256=B3uOTdW_lEGLkzEQU74Yp61o2YKVK_5ZU0NUbOaimbY,9306
11
- apache_airflow_providers_common_sql-1.3.4.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
12
- apache_airflow_providers_common_sql-1.3.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
13
- apache_airflow_providers_common_sql-1.3.4.dist-info/entry_points.txt,sha256=cp5O8pmTdaXOHe20cn7orc8OPd05YFN2NWZ05KLd2QU,108
14
- apache_airflow_providers_common_sql-1.3.4.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
15
- apache_airflow_providers_common_sql-1.3.4.dist-info/RECORD,,