apache-airflow-providers-snowflake 6.5.0__tar.gz → 6.5.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-snowflake might be problematic. Click here for more details.
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/PKG-INFO +19 -15
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/README.rst +11 -9
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/changelog.rst +21 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/index.rst +11 -8
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/provider.yaml +1 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/pyproject.toml +10 -6
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/__init__.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0/tests/system → apache_airflow_providers_snowflake-6.5.1/src/airflow/providers}/__init__.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/__init__.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/hooks/snowflake.py +5 -5
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/hooks/snowflake_sql_api.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/operators/snowflake.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/utils/openlineage.py +19 -7
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/version_compat.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0/src/airflow/providers → apache_airflow_providers_snowflake-6.5.1/tests/system}/__init__.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/system/snowflake/example_snowpark_decorator.py +6 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/__init__.py +1 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/decorators/test_snowpark.py +9 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/operators/test_snowflake.py +9 -1
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/operators/test_snowpark.py +2 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/utils/test_openlineage.py +22 -25
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/utils/test_snowpark.py +2 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/commits.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/conf.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/connections/snowflake.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/decorators/index.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/decorators/snowpark.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/integration-logos/Snowflake.png +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/operators/copy_into_snowflake.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/operators/index.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/operators/snowflake.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/operators/snowpark.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/security.rst +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/LICENSE +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/decorators/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/decorators/snowpark.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/get_provider_info.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/hooks/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/operators/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/operators/snowpark.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/transfers/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/transfers/copy_into_snowflake.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/triggers/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/triggers/snowflake_trigger.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/utils/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/utils/common.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/utils/snowpark.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/src/airflow/providers/snowflake/utils/sql_api_generate_jwt.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/conftest.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/system/snowflake/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/system/snowflake/example_copy_into_snowflake.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/system/snowflake/example_snowflake.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/system/snowflake/example_snowflake_snowflake_op_template_file.sql +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/system/snowflake/example_snowpark_operator.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/decorators/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/hooks/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/hooks/test_snowflake.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/hooks/test_snowflake_sql_api.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/hooks/test_sql.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/operators/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/operators/test_snowflake_sql.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/transfers/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/transfers/test_copy_into_snowflake.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/triggers/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/triggers/test_snowflake.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/utils/__init__.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/utils/test_common.py +0 -0
- {apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/tests/unit/snowflake/utils/test_sql_api_generate_jwt.py +0 -0
{apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/PKG-INFO
RENAMED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-snowflake
|
|
3
|
-
Version: 6.5.
|
|
3
|
+
Version: 6.5.1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-snowflake for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,snowflake,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
|
-
Requires-Python:
|
|
8
|
+
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
10
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
11
|
Classifier: Environment :: Console
|
|
@@ -18,21 +18,23 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
21
22
|
Classifier: Topic :: System :: Monitoring
|
|
22
23
|
Requires-Dist: apache-airflow>=2.10.0
|
|
23
24
|
Requires-Dist: apache-airflow-providers-common-compat>=1.6.0
|
|
24
25
|
Requires-Dist: apache-airflow-providers-common-sql>=1.21.0
|
|
25
26
|
Requires-Dist: pandas>=2.1.2; python_version <"3.13"
|
|
26
27
|
Requires-Dist: pandas>=2.2.3; python_version >="3.13"
|
|
27
|
-
Requires-Dist: pyarrow>=16.1.0
|
|
28
|
+
Requires-Dist: pyarrow>=16.1.0; python_version < '3.13'
|
|
29
|
+
Requires-Dist: pyarrow>=18.0.0; python_version >= '3.13'
|
|
28
30
|
Requires-Dist: snowflake-connector-python>=3.7.1
|
|
29
31
|
Requires-Dist: snowflake-sqlalchemy>=1.4.0
|
|
30
32
|
Requires-Dist: snowflake-snowpark-python>=1.17.0;python_version<'3.12'
|
|
31
|
-
Requires-Dist: snowflake-snowpark-python>=1.27.0;python_version>='3.12'
|
|
33
|
+
Requires-Dist: snowflake-snowpark-python>=1.27.0,<9999;python_version>='3.12' and python_version<'3.13'
|
|
32
34
|
Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
|
|
33
35
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
34
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
35
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
36
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1/changelog.html
|
|
37
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1
|
|
36
38
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
37
39
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
38
40
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -64,8 +66,9 @@ Provides-Extra: openlineage
|
|
|
64
66
|
|
|
65
67
|
Package ``apache-airflow-providers-snowflake``
|
|
66
68
|
|
|
67
|
-
Release: ``6.5.
|
|
69
|
+
Release: ``6.5.1``
|
|
68
70
|
|
|
71
|
+
Release Date: ``|PypiReleaseDate|``
|
|
69
72
|
|
|
70
73
|
`Snowflake <https://www.snowflake.com/>`__
|
|
71
74
|
|
|
@@ -77,7 +80,7 @@ This is a provider package for ``snowflake`` provider. All classes for this prov
|
|
|
77
80
|
are in ``airflow.providers.snowflake`` python package.
|
|
78
81
|
|
|
79
82
|
You can find package information and changelog for the provider
|
|
80
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
83
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1/>`_.
|
|
81
84
|
|
|
82
85
|
Installation
|
|
83
86
|
------------
|
|
@@ -86,25 +89,26 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
|
86
89
|
for the minimum Airflow version supported) via
|
|
87
90
|
``pip install apache-airflow-providers-snowflake``
|
|
88
91
|
|
|
89
|
-
The package supports the following python versions: 3.10,3.11,3.12
|
|
92
|
+
The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
90
93
|
|
|
91
94
|
Requirements
|
|
92
95
|
------------
|
|
93
96
|
|
|
94
|
-
==========================================
|
|
97
|
+
========================================== ========================================================================
|
|
95
98
|
PIP package Version required
|
|
96
|
-
==========================================
|
|
99
|
+
========================================== ========================================================================
|
|
97
100
|
``apache-airflow`` ``>=2.10.0``
|
|
98
101
|
``apache-airflow-providers-common-compat`` ``>=1.6.0``
|
|
99
102
|
``apache-airflow-providers-common-sql`` ``>=1.21.0``
|
|
100
103
|
``pandas`` ``>=2.1.2; python_version < "3.13"``
|
|
101
104
|
``pandas`` ``>=2.2.3; python_version >= "3.13"``
|
|
102
|
-
``pyarrow`` ``>=16.1.0``
|
|
105
|
+
``pyarrow`` ``>=16.1.0; python_version < "3.13"``
|
|
106
|
+
``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
|
|
103
107
|
``snowflake-connector-python`` ``>=3.7.1``
|
|
104
108
|
``snowflake-sqlalchemy`` ``>=1.4.0``
|
|
105
109
|
``snowflake-snowpark-python`` ``>=1.17.0; python_version < "3.12"``
|
|
106
|
-
``snowflake-snowpark-python`` ``>=1.27.0; python_version >= "3.12"``
|
|
107
|
-
==========================================
|
|
110
|
+
``snowflake-snowpark-python`` ``>=1.27.0,<9999; python_version >= "3.12" and python_version < "3.13"``
|
|
111
|
+
========================================== ========================================================================
|
|
108
112
|
|
|
109
113
|
Cross provider package dependencies
|
|
110
114
|
-----------------------------------
|
|
@@ -128,5 +132,5 @@ Dependent package
|
|
|
128
132
|
================================================================================================================== =================
|
|
129
133
|
|
|
130
134
|
The changelog for the provider package can be found in the
|
|
131
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
135
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1/changelog.html>`_.
|
|
132
136
|
|
{apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/README.rst
RENAMED
|
@@ -23,8 +23,9 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-snowflake``
|
|
25
25
|
|
|
26
|
-
Release: ``6.5.
|
|
26
|
+
Release: ``6.5.1``
|
|
27
27
|
|
|
28
|
+
Release Date: ``|PypiReleaseDate|``
|
|
28
29
|
|
|
29
30
|
`Snowflake <https://www.snowflake.com/>`__
|
|
30
31
|
|
|
@@ -36,7 +37,7 @@ This is a provider package for ``snowflake`` provider. All classes for this prov
|
|
|
36
37
|
are in ``airflow.providers.snowflake`` python package.
|
|
37
38
|
|
|
38
39
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
40
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1/>`_.
|
|
40
41
|
|
|
41
42
|
Installation
|
|
42
43
|
------------
|
|
@@ -45,25 +46,26 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
|
45
46
|
for the minimum Airflow version supported) via
|
|
46
47
|
``pip install apache-airflow-providers-snowflake``
|
|
47
48
|
|
|
48
|
-
The package supports the following python versions: 3.10,3.11,3.12
|
|
49
|
+
The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
49
50
|
|
|
50
51
|
Requirements
|
|
51
52
|
------------
|
|
52
53
|
|
|
53
|
-
==========================================
|
|
54
|
+
========================================== ========================================================================
|
|
54
55
|
PIP package Version required
|
|
55
|
-
==========================================
|
|
56
|
+
========================================== ========================================================================
|
|
56
57
|
``apache-airflow`` ``>=2.10.0``
|
|
57
58
|
``apache-airflow-providers-common-compat`` ``>=1.6.0``
|
|
58
59
|
``apache-airflow-providers-common-sql`` ``>=1.21.0``
|
|
59
60
|
``pandas`` ``>=2.1.2; python_version < "3.13"``
|
|
60
61
|
``pandas`` ``>=2.2.3; python_version >= "3.13"``
|
|
61
|
-
``pyarrow`` ``>=16.1.0``
|
|
62
|
+
``pyarrow`` ``>=16.1.0; python_version < "3.13"``
|
|
63
|
+
``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
|
|
62
64
|
``snowflake-connector-python`` ``>=3.7.1``
|
|
63
65
|
``snowflake-sqlalchemy`` ``>=1.4.0``
|
|
64
66
|
``snowflake-snowpark-python`` ``>=1.17.0; python_version < "3.12"``
|
|
65
|
-
``snowflake-snowpark-python`` ``>=1.27.0; python_version >= "3.12"``
|
|
66
|
-
==========================================
|
|
67
|
+
``snowflake-snowpark-python`` ``>=1.27.0,<9999; python_version >= "3.12" and python_version < "3.13"``
|
|
68
|
+
========================================== ========================================================================
|
|
67
69
|
|
|
68
70
|
Cross provider package dependencies
|
|
69
71
|
-----------------------------------
|
|
@@ -87,4 +89,4 @@ Dependent package
|
|
|
87
89
|
================================================================================================================== =================
|
|
88
90
|
|
|
89
91
|
The changelog for the provider package can be found in the
|
|
90
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
92
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1/changelog.html>`_.
|
|
@@ -27,6 +27,27 @@
|
|
|
27
27
|
Changelog
|
|
28
28
|
---------
|
|
29
29
|
|
|
30
|
+
6.5.1
|
|
31
|
+
.....
|
|
32
|
+
|
|
33
|
+
Misc
|
|
34
|
+
~~~~
|
|
35
|
+
|
|
36
|
+
* ``Add Python 3.13 support for Airflow. (#46891)``
|
|
37
|
+
* ``another magic pip resolver hint (#53329)``
|
|
38
|
+
* ``fix: Improve logging and timeouts in OL helpers (#53139)``
|
|
39
|
+
* ``Remove upper-binding for "python-requires" (#52980)``
|
|
40
|
+
* ``Cleanup type ignores in snowflake provider where possible (#53258)``
|
|
41
|
+
* ``Remove type ignore across codebase after mypy upgrade (#53243)``
|
|
42
|
+
* ``Make snowpark optional for snowflake provider and disable it for Python 3.13 (#53489)``
|
|
43
|
+
* ``Deprecate decorators from Core (#53629)``
|
|
44
|
+
|
|
45
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
46
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
47
|
+
* ``Make dag_version_id in TI non-nullable (#50825)``
|
|
48
|
+
* ``Temporarily switch to use >=,< pattern instead of '~=' (#52967)``
|
|
49
|
+
* ``Replace 'mock.patch("utcnow")' with time_machine. (#53642)``
|
|
50
|
+
|
|
30
51
|
6.5.0
|
|
31
52
|
.....
|
|
32
53
|
|
{apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/index.rst
RENAMED
|
@@ -78,7 +78,9 @@ apache-airflow-providers-snowflake package
|
|
|
78
78
|
`Snowflake <https://www.snowflake.com/>`__
|
|
79
79
|
|
|
80
80
|
|
|
81
|
-
Release: 6.5.
|
|
81
|
+
Release: 6.5.1
|
|
82
|
+
|
|
83
|
+
Release Date: ``|PypiReleaseDate|``
|
|
82
84
|
|
|
83
85
|
Provider package
|
|
84
86
|
----------------
|
|
@@ -98,20 +100,21 @@ Requirements
|
|
|
98
100
|
|
|
99
101
|
The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
|
|
100
102
|
|
|
101
|
-
==========================================
|
|
103
|
+
========================================== ========================================================================
|
|
102
104
|
PIP package Version required
|
|
103
|
-
==========================================
|
|
105
|
+
========================================== ========================================================================
|
|
104
106
|
``apache-airflow`` ``>=2.10.0``
|
|
105
107
|
``apache-airflow-providers-common-compat`` ``>=1.6.0``
|
|
106
108
|
``apache-airflow-providers-common-sql`` ``>=1.21.0``
|
|
107
109
|
``pandas`` ``>=2.1.2; python_version < "3.13"``
|
|
108
110
|
``pandas`` ``>=2.2.3; python_version >= "3.13"``
|
|
109
|
-
``pyarrow`` ``>=16.1.0``
|
|
111
|
+
``pyarrow`` ``>=16.1.0; python_version < "3.13"``
|
|
112
|
+
``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
|
|
110
113
|
``snowflake-connector-python`` ``>=3.7.1``
|
|
111
114
|
``snowflake-sqlalchemy`` ``>=1.4.0``
|
|
112
115
|
``snowflake-snowpark-python`` ``>=1.17.0; python_version < "3.12"``
|
|
113
|
-
``snowflake-snowpark-python`` ``>=1.27.0; python_version >= "3.12"``
|
|
114
|
-
==========================================
|
|
116
|
+
``snowflake-snowpark-python`` ``>=1.27.0,<9999; python_version >= "3.12" and python_version < "3.13"``
|
|
117
|
+
========================================== ========================================================================
|
|
115
118
|
|
|
116
119
|
Cross provider package dependencies
|
|
117
120
|
-----------------------------------
|
|
@@ -140,5 +143,5 @@ Downloading official packages
|
|
|
140
143
|
You can download officially released packages and verify their checksums and signatures from the
|
|
141
144
|
`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
|
|
142
145
|
|
|
143
|
-
* `The apache-airflow-providers-snowflake 6.5.
|
|
144
|
-
* `The apache-airflow-providers-snowflake 6.5.
|
|
146
|
+
* `The apache-airflow-providers-snowflake 6.5.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_snowflake-6.5.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_snowflake-6.5.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_snowflake-6.5.1.tar.gz.sha512>`__)
|
|
147
|
+
* `The apache-airflow-providers-snowflake 6.5.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_snowflake-6.5.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_snowflake-6.5.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_snowflake-6.5.1-py3-none-any.whl.sha512>`__)
|
{apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/pyproject.toml
RENAMED
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-snowflake"
|
|
28
|
-
version = "6.5.
|
|
28
|
+
version = "6.5.1"
|
|
29
29
|
description = "Provider package apache-airflow-providers-snowflake for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
authors = [
|
|
@@ -47,9 +47,10 @@ classifiers = [
|
|
|
47
47
|
"Programming Language :: Python :: 3.10",
|
|
48
48
|
"Programming Language :: Python :: 3.11",
|
|
49
49
|
"Programming Language :: Python :: 3.12",
|
|
50
|
+
"Programming Language :: Python :: 3.13",
|
|
50
51
|
"Topic :: System :: Monitoring",
|
|
51
52
|
]
|
|
52
|
-
requires-python = "
|
|
53
|
+
requires-python = ">=3.10"
|
|
53
54
|
|
|
54
55
|
# The dependencies should be modified in place in the generated file.
|
|
55
56
|
# Any change in the dependencies is preserved when the file is regenerated
|
|
@@ -61,11 +62,14 @@ dependencies = [
|
|
|
61
62
|
"apache-airflow-providers-common-sql>=1.21.0",
|
|
62
63
|
'pandas>=2.1.2; python_version <"3.13"',
|
|
63
64
|
'pandas>=2.2.3; python_version >="3.13"',
|
|
64
|
-
"pyarrow>=16.1.0",
|
|
65
|
+
"pyarrow>=16.1.0; python_version < '3.13'",
|
|
66
|
+
"pyarrow>=18.0.0; python_version >= '3.13'",
|
|
65
67
|
"snowflake-connector-python>=3.7.1",
|
|
66
68
|
"snowflake-sqlalchemy>=1.4.0",
|
|
67
69
|
"snowflake-snowpark-python>=1.17.0;python_version<'3.12'",
|
|
68
|
-
"
|
|
70
|
+
# The "<9999" is a hint to the pip resolver to resolve this requirement early,
|
|
71
|
+
# can be removed when the pip resolver is improved
|
|
72
|
+
"snowflake-snowpark-python>=1.27.0,<9999;python_version>='3.12' and python_version<'3.13'",
|
|
69
73
|
]
|
|
70
74
|
|
|
71
75
|
# The optional dependencies should be modified in place in the generated file
|
|
@@ -113,8 +117,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
113
117
|
apache-airflow-providers-standard = {workspace = true}
|
|
114
118
|
|
|
115
119
|
[project.urls]
|
|
116
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
117
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.
|
|
120
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1"
|
|
121
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.5.1/changelog.html"
|
|
118
122
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
119
123
|
"Source Code" = "https://github.com/apache/airflow"
|
|
120
124
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -14,4 +14,4 @@
|
|
|
14
14
|
# KIND, either express or implied. See the License for the
|
|
15
15
|
# specific language governing permissions and limitations
|
|
16
16
|
# under the License.
|
|
17
|
-
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
17
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
@@ -14,4 +14,4 @@
|
|
|
14
14
|
# KIND, either express or implied. See the License for the
|
|
15
15
|
# specific language governing permissions and limitations
|
|
16
16
|
# under the License.
|
|
17
|
-
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
17
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "6.5.
|
|
32
|
+
__version__ = "6.5.1"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -253,7 +253,7 @@ class SnowflakeHook(DbApiHook):
|
|
|
253
253
|
|
|
254
254
|
This is used in ``get_uri()`` and ``get_connection()``.
|
|
255
255
|
"""
|
|
256
|
-
conn = self.get_connection(self.
|
|
256
|
+
conn = self.get_connection(self.get_conn_id())
|
|
257
257
|
extra_dict = conn.extra_dejson
|
|
258
258
|
account = self._get_field(extra_dict, "account") or ""
|
|
259
259
|
warehouse = self._get_field(extra_dict, "warehouse") or ""
|
|
@@ -461,7 +461,7 @@ class SnowflakeHook(DbApiHook):
|
|
|
461
461
|
def get_autocommit(self, conn):
|
|
462
462
|
return getattr(conn, "autocommit_mode", False)
|
|
463
463
|
|
|
464
|
-
@overload
|
|
464
|
+
@overload
|
|
465
465
|
def run(
|
|
466
466
|
self,
|
|
467
467
|
sql: str | Iterable[str],
|
|
@@ -544,16 +544,16 @@ class SnowflakeHook(DbApiHook):
|
|
|
544
544
|
results = []
|
|
545
545
|
for sql_statement in sql_list:
|
|
546
546
|
self.log.info("Running statement: %s, parameters: %s", sql_statement, parameters)
|
|
547
|
-
self._run_command(cur, sql_statement, parameters)
|
|
547
|
+
self._run_command(cur, sql_statement, parameters)
|
|
548
548
|
|
|
549
549
|
if handler is not None:
|
|
550
|
-
result = self._make_common_data_structure(handler(cur))
|
|
550
|
+
result = self._make_common_data_structure(handler(cur))
|
|
551
551
|
if return_single_query_results(sql, return_last, split_statements):
|
|
552
552
|
_last_result = result
|
|
553
553
|
_last_description = cur.description
|
|
554
554
|
else:
|
|
555
555
|
results.append(result)
|
|
556
|
-
self.descriptions.append(cur.description)
|
|
556
|
+
self.descriptions.append(cur.description)
|
|
557
557
|
|
|
558
558
|
query_id = cur.sfqid
|
|
559
559
|
self.log.info("Rows affected: %s", cur.rowcount)
|
|
@@ -464,7 +464,7 @@ class SnowflakeSqlApiHook(SnowflakeHook):
|
|
|
464
464
|
:return: The response object from the API call.
|
|
465
465
|
"""
|
|
466
466
|
async with aiohttp.ClientSession(headers=headers) as session:
|
|
467
|
-
async for attempt in AsyncRetrying(**self.retry_config):
|
|
467
|
+
async for attempt in AsyncRetrying(**self.retry_config):
|
|
468
468
|
with attempt:
|
|
469
469
|
if method.upper() == "GET":
|
|
470
470
|
async with session.request(method=method.lower(), url=url, params=params) as response:
|
|
@@ -427,7 +427,7 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
|
|
|
427
427
|
"""
|
|
428
428
|
self.log.info("Executing: %s", self.sql)
|
|
429
429
|
self.query_ids = self._hook.execute_query(
|
|
430
|
-
self.sql,
|
|
430
|
+
self.sql,
|
|
431
431
|
statement_count=self.statement_count,
|
|
432
432
|
bindings=self.bindings,
|
|
433
433
|
)
|
|
@@ -199,6 +199,7 @@ def _run_single_query_with_hook(hook: SnowflakeHook, sql: str) -> list[dict]:
|
|
|
199
199
|
with closing(hook.get_conn()) as conn:
|
|
200
200
|
hook.set_autocommit(conn, False)
|
|
201
201
|
with hook._get_cursor(conn, return_dictionaries=True) as cur:
|
|
202
|
+
cur.execute("ALTER SESSION SET STATEMENT_TIMEOUT_IN_SECONDS = 3;") # only for this session
|
|
202
203
|
cur.execute(sql)
|
|
203
204
|
result = cur.fetchall()
|
|
204
205
|
conn.commit()
|
|
@@ -232,25 +233,36 @@ def _get_queries_details_from_snowflake(
|
|
|
232
233
|
if not query_ids:
|
|
233
234
|
return {}
|
|
234
235
|
query_condition = f"IN {tuple(query_ids)}" if len(query_ids) > 1 else f"= '{query_ids[0]}'"
|
|
236
|
+
# https://docs.snowflake.com/en/sql-reference/account-usage#differences-between-account-usage-and-information-schema
|
|
237
|
+
# INFORMATION_SCHEMA.QUERY_HISTORY has no latency, so it's better than ACCOUNT_USAGE.QUERY_HISTORY
|
|
238
|
+
# https://docs.snowflake.com/en/sql-reference/functions/query_history
|
|
239
|
+
# SNOWFLAKE.INFORMATION_SCHEMA.QUERY_HISTORY() function seems the most suitable function for the job,
|
|
240
|
+
# we get history of queries executed by the user, and we're using the same credentials.
|
|
235
241
|
query = (
|
|
236
242
|
"SELECT "
|
|
237
243
|
"QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
238
244
|
"FROM "
|
|
239
|
-
"table(information_schema.query_history()) "
|
|
245
|
+
"table(snowflake.information_schema.query_history()) "
|
|
240
246
|
f"WHERE "
|
|
241
247
|
f"QUERY_ID {query_condition}"
|
|
242
248
|
f";"
|
|
243
249
|
)
|
|
244
250
|
|
|
245
251
|
try:
|
|
246
|
-
#
|
|
247
|
-
|
|
248
|
-
|
|
252
|
+
# Note: need to lazy import here to avoid circular imports
|
|
253
|
+
from airflow.providers.snowflake.hooks.snowflake_sql_api import SnowflakeSqlApiHook
|
|
254
|
+
|
|
255
|
+
if isinstance(hook, SnowflakeSqlApiHook):
|
|
256
|
+
result = _run_single_query_with_api_hook(hook=hook, sql=query)
|
|
249
257
|
result = _process_data_from_api(data=result)
|
|
250
258
|
else:
|
|
251
259
|
result = _run_single_query_with_hook(hook=hook, sql=query)
|
|
252
260
|
except Exception as e:
|
|
253
|
-
log.
|
|
261
|
+
log.info(
|
|
262
|
+
"OpenLineage encountered an error while retrieving additional metadata about SQL queries"
|
|
263
|
+
" from Snowflake. The process will continue with default values. Error details: %s",
|
|
264
|
+
e,
|
|
265
|
+
)
|
|
254
266
|
result = []
|
|
255
267
|
|
|
256
268
|
return {row["QUERY_ID"]: row for row in result} if result else {}
|
|
@@ -416,8 +428,8 @@ def emit_openlineage_events_for_snowflake_queries(
|
|
|
416
428
|
event_batch = _create_snowflake_event_pair(
|
|
417
429
|
job_namespace=namespace(),
|
|
418
430
|
job_name=f"{task_instance.dag_id}.{task_instance.task_id}.query.{counter}",
|
|
419
|
-
start_time=query_metadata.get("START_TIME", default_event_time),
|
|
420
|
-
end_time=query_metadata.get("END_TIME", default_event_time),
|
|
431
|
+
start_time=query_metadata.get("START_TIME", default_event_time),
|
|
432
|
+
end_time=query_metadata.get("END_TIME", default_event_time),
|
|
421
433
|
# `EXECUTION_STATUS` can be `success`, `fail` or `incident` (Snowflake outage, so still failure)
|
|
422
434
|
is_successful=query_metadata.get("EXECUTION_STATUS", default_state).lower() == "success",
|
|
423
435
|
run_facets={**query_specific_run_facets, **common_run_facets, **additional_run_facets},
|
|
@@ -37,7 +37,7 @@ AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
|
37
37
|
if AIRFLOW_V_3_0_PLUS:
|
|
38
38
|
from airflow.sdk import BaseOperator
|
|
39
39
|
else:
|
|
40
|
-
from airflow.models import BaseOperator
|
|
40
|
+
from airflow.models import BaseOperator
|
|
41
41
|
|
|
42
42
|
__all__ = [
|
|
43
43
|
"AIRFLOW_V_3_0_PLUS",
|
|
@@ -14,4 +14,4 @@
|
|
|
14
14
|
# KIND, either express or implied. See the License for the
|
|
15
15
|
# specific language governing permissions and limitations
|
|
16
16
|
# under the License.
|
|
17
|
-
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
17
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
@@ -28,7 +28,12 @@ if TYPE_CHECKING:
|
|
|
28
28
|
from snowflake.snowpark import Session
|
|
29
29
|
|
|
30
30
|
from airflow import DAG
|
|
31
|
-
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
from airflow.sdk import task
|
|
34
|
+
except ImportError:
|
|
35
|
+
# Airflow 2 path
|
|
36
|
+
from airflow.decorators import task # type: ignore[attr-defined,no-redef]
|
|
32
37
|
|
|
33
38
|
SNOWFLAKE_CONN_ID = "my_snowflake_conn"
|
|
34
39
|
DAG_ID = "example_snowpark_decorator"
|
|
@@ -14,4 +14,4 @@
|
|
|
14
14
|
# KIND, either express or implied. See the License for the
|
|
15
15
|
# specific language governing permissions and limitations
|
|
16
16
|
# under the License.
|
|
17
|
-
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
17
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
@@ -23,7 +23,15 @@ from unittest import mock
|
|
|
23
23
|
|
|
24
24
|
import pytest
|
|
25
25
|
|
|
26
|
-
|
|
26
|
+
pytest.importorskip("snowflake-snowpark-python")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
|
|
30
|
+
|
|
31
|
+
if AIRFLOW_V_3_0_PLUS:
|
|
32
|
+
from airflow.sdk import task
|
|
33
|
+
else:
|
|
34
|
+
from airflow.decorators import task # type: ignore[attr-defined,no-redef]
|
|
27
35
|
from airflow.utils import timezone
|
|
28
36
|
|
|
29
37
|
from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
|
|
@@ -177,6 +177,13 @@ def create_context(task, dag=None):
|
|
|
177
177
|
tzinfo = pendulum.timezone("UTC")
|
|
178
178
|
logical_date = timezone.datetime(2022, 1, 1, 1, 0, 0, tzinfo=tzinfo)
|
|
179
179
|
if AIRFLOW_V_3_0_PLUS:
|
|
180
|
+
from airflow.models.dag_version import DagVersion
|
|
181
|
+
from airflow.models.serialized_dag import SerializedDagModel
|
|
182
|
+
|
|
183
|
+
dag.sync_to_db()
|
|
184
|
+
SerializedDagModel.write_dag(dag, bundle_name="testing")
|
|
185
|
+
dag_version = DagVersion.get_latest_version(dag.dag_id)
|
|
186
|
+
task_instance = TaskInstance(task=task, run_id="test_run_id", dag_version_id=dag_version.id)
|
|
180
187
|
dag_run = DagRun(
|
|
181
188
|
dag_id=dag.dag_id,
|
|
182
189
|
logical_date=logical_date,
|
|
@@ -191,7 +198,7 @@ def create_context(task, dag=None):
|
|
|
191
198
|
run_id=DagRun.generate_run_id(DagRunType.MANUAL, logical_date),
|
|
192
199
|
)
|
|
193
200
|
|
|
194
|
-
|
|
201
|
+
task_instance = TaskInstance(task=task)
|
|
195
202
|
task_instance.dag_run = dag_run
|
|
196
203
|
task_instance.xcom_push = mock.Mock()
|
|
197
204
|
date_key = "logical_date" if AIRFLOW_V_3_0_PLUS else "execution_date"
|
|
@@ -208,6 +215,7 @@ def create_context(task, dag=None):
|
|
|
208
215
|
}
|
|
209
216
|
|
|
210
217
|
|
|
218
|
+
@pytest.mark.db_test
|
|
211
219
|
class TestSnowflakeSqlApiOperator:
|
|
212
220
|
@pytest.fixture
|
|
213
221
|
def mock_execute_query(self):
|
|
@@ -221,7 +221,9 @@ def test_run_single_query_with_hook(mock_get_cursor, mock_set_autocommit, mock_g
|
|
|
221
221
|
sql_query = "SELECT * FROM test_table;"
|
|
222
222
|
result = _run_single_query_with_hook(hook, sql_query)
|
|
223
223
|
|
|
224
|
-
mock_cursor.execute.
|
|
224
|
+
mock_cursor.execute.assert_has_calls(
|
|
225
|
+
[mock.call("ALTER SESSION SET STATEMENT_TIMEOUT_IN_SECONDS = 3;"), mock.call(sql_query)]
|
|
226
|
+
)
|
|
225
227
|
assert result == [{"col1": "value1"}, {"col2": "value2"}]
|
|
226
228
|
|
|
227
229
|
|
|
@@ -302,7 +304,7 @@ def test_get_queries_details_from_snowflake_single_query(mock_run_single_query):
|
|
|
302
304
|
details = _get_queries_details_from_snowflake(hook, query_ids)
|
|
303
305
|
expected_query = (
|
|
304
306
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
305
|
-
"FROM table(information_schema.query_history()) "
|
|
307
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
306
308
|
"WHERE QUERY_ID = 'ABC';"
|
|
307
309
|
)
|
|
308
310
|
mock_run_single_query.assert_called_once_with(hook=hook, sql=expected_query)
|
|
@@ -330,7 +332,7 @@ def test_get_queries_details_from_snowflake_single_query_api_hook(mock_run_singl
|
|
|
330
332
|
|
|
331
333
|
expected_query = (
|
|
332
334
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
333
|
-
"FROM table(information_schema.query_history()) "
|
|
335
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
334
336
|
"WHERE QUERY_ID = 'ABC';"
|
|
335
337
|
)
|
|
336
338
|
expected_details = {
|
|
@@ -377,7 +379,7 @@ def test_get_queries_details_from_snowflake_multiple_queries(mock_run_single_que
|
|
|
377
379
|
expected_query_condition = f"IN {tuple(query_ids)}"
|
|
378
380
|
expected_query = (
|
|
379
381
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
380
|
-
"FROM table(information_schema.query_history()) "
|
|
382
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
381
383
|
f"WHERE QUERY_ID {expected_query_condition};"
|
|
382
384
|
)
|
|
383
385
|
mock_run_single_query.assert_called_once_with(hook=hook, sql=expected_query)
|
|
@@ -415,7 +417,7 @@ def test_get_queries_details_from_snowflake_multiple_queries_api_hook(mock_run_s
|
|
|
415
417
|
expected_query_condition = f"IN {tuple(query_ids)}"
|
|
416
418
|
expected_query = (
|
|
417
419
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
418
|
-
"FROM table(information_schema.query_history()) "
|
|
420
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
419
421
|
f"WHERE QUERY_ID {expected_query_condition};"
|
|
420
422
|
)
|
|
421
423
|
expected_details = [
|
|
@@ -453,7 +455,7 @@ def test_get_queries_details_from_snowflake_no_data_found(mock_run_single_query)
|
|
|
453
455
|
expected_query_condition = f"IN {tuple(query_ids)}"
|
|
454
456
|
expected_query = (
|
|
455
457
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
456
|
-
"FROM table(information_schema.query_history()) "
|
|
458
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
457
459
|
f"WHERE QUERY_ID {expected_query_condition};"
|
|
458
460
|
)
|
|
459
461
|
mock_run_single_query.assert_called_once_with(hook=hook, sql=expected_query)
|
|
@@ -471,7 +473,7 @@ def test_get_queries_details_from_snowflake_no_data_found_api_hook(mock_run_sing
|
|
|
471
473
|
expected_query_condition = f"IN {tuple(query_ids)}"
|
|
472
474
|
expected_query = (
|
|
473
475
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
474
|
-
"FROM table(information_schema.query_history()) "
|
|
476
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
475
477
|
f"WHERE QUERY_ID {expected_query_condition};"
|
|
476
478
|
)
|
|
477
479
|
mock_run_single_query_api.assert_called_once_with(hook=hook, sql=expected_query)
|
|
@@ -489,7 +491,7 @@ def test_get_queries_details_from_snowflake_error(mock_run_single_query):
|
|
|
489
491
|
expected_query_condition = f"IN {tuple(query_ids)}"
|
|
490
492
|
expected_query = (
|
|
491
493
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
492
|
-
"FROM table(information_schema.query_history()) "
|
|
494
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
493
495
|
f"WHERE QUERY_ID {expected_query_condition};"
|
|
494
496
|
)
|
|
495
497
|
mock_run_single_query.assert_called_once_with(hook=hook, sql=expected_query)
|
|
@@ -507,7 +509,7 @@ def test_get_queries_details_from_snowflake_error_api_hook(mock_run_single_query
|
|
|
507
509
|
expected_query_condition = f"IN {tuple(query_ids)}"
|
|
508
510
|
expected_query = (
|
|
509
511
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
510
|
-
"FROM table(information_schema.query_history()) "
|
|
512
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
511
513
|
f"WHERE QUERY_ID {expected_query_condition};"
|
|
512
514
|
)
|
|
513
515
|
mock_run_single_query_api.assert_called_once_with(hook=hook, sql=expected_query)
|
|
@@ -529,7 +531,7 @@ def test_get_queries_details_from_snowflake_error_api_hook_process_data(
|
|
|
529
531
|
expected_query_condition = f"IN {tuple(query_ids)}"
|
|
530
532
|
expected_query = (
|
|
531
533
|
"SELECT QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
532
|
-
"FROM table(information_schema.query_history()) "
|
|
534
|
+
"FROM table(snowflake.information_schema.query_history()) "
|
|
533
535
|
f"WHERE QUERY_ID {expected_query_condition};"
|
|
534
536
|
)
|
|
535
537
|
mock_run_single_query_api.assert_called_once_with(hook=hook, sql=expected_query)
|
|
@@ -578,15 +580,14 @@ def test_create_snowflake_event_pair_success(mock_generate_uuid, is_successful):
|
|
|
578
580
|
|
|
579
581
|
@mock.patch("importlib.metadata.version", return_value="2.3.0")
|
|
580
582
|
@mock.patch("openlineage.client.uuid.generate_new_uuid")
|
|
581
|
-
@mock.patch("airflow.utils.timezone.utcnow")
|
|
582
583
|
def test_emit_openlineage_events_for_snowflake_queries_with_extra_metadata(
|
|
583
|
-
|
|
584
|
+
mock_generate_uuid, mock_version, time_machine
|
|
584
585
|
):
|
|
585
586
|
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
|
|
586
587
|
mock_generate_uuid.return_value = fake_uuid
|
|
587
588
|
|
|
588
589
|
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
|
|
589
|
-
|
|
590
|
+
time_machine.move_to(default_event_time, tick=False)
|
|
590
591
|
|
|
591
592
|
query_ids = ["query1", "query2", "query3"]
|
|
592
593
|
original_query_ids = copy.deepcopy(query_ids)
|
|
@@ -819,15 +820,14 @@ def test_emit_openlineage_events_for_snowflake_queries_with_extra_metadata(
|
|
|
819
820
|
|
|
820
821
|
@mock.patch("importlib.metadata.version", return_value="2.3.0")
|
|
821
822
|
@mock.patch("openlineage.client.uuid.generate_new_uuid")
|
|
822
|
-
@mock.patch("airflow.utils.timezone.utcnow")
|
|
823
823
|
def test_emit_openlineage_events_for_snowflake_queries_without_extra_metadata(
|
|
824
|
-
|
|
824
|
+
mock_generate_uuid, mock_version, time_machine
|
|
825
825
|
):
|
|
826
826
|
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
|
|
827
827
|
mock_generate_uuid.return_value = fake_uuid
|
|
828
828
|
|
|
829
829
|
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
|
|
830
|
-
|
|
830
|
+
time_machine.move_to(default_event_time, tick=False)
|
|
831
831
|
|
|
832
832
|
query_ids = ["query1"]
|
|
833
833
|
original_query_ids = copy.deepcopy(query_ids)
|
|
@@ -938,15 +938,14 @@ def test_emit_openlineage_events_for_snowflake_queries_without_extra_metadata(
|
|
|
938
938
|
|
|
939
939
|
@mock.patch("importlib.metadata.version", return_value="2.3.0")
|
|
940
940
|
@mock.patch("openlineage.client.uuid.generate_new_uuid")
|
|
941
|
-
@mock.patch("airflow.utils.timezone.utcnow")
|
|
942
941
|
def test_emit_openlineage_events_for_snowflake_queries_without_query_ids(
|
|
943
|
-
|
|
942
|
+
mock_generate_uuid, mock_version, time_machine
|
|
944
943
|
):
|
|
945
944
|
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
|
|
946
945
|
mock_generate_uuid.return_value = fake_uuid
|
|
947
946
|
|
|
948
947
|
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
|
|
949
|
-
|
|
948
|
+
time_machine.move_to(default_event_time, tick=False)
|
|
950
949
|
|
|
951
950
|
hook = mock.MagicMock()
|
|
952
951
|
hook.query_ids = ["query1"]
|
|
@@ -1059,15 +1058,14 @@ def test_emit_openlineage_events_for_snowflake_queries_without_query_ids(
|
|
|
1059
1058
|
@mock.patch("airflow.providers.openlineage.sqlparser.SQLParser.create_namespace", return_value="snowflake_ns")
|
|
1060
1059
|
@mock.patch("importlib.metadata.version", return_value="2.3.0")
|
|
1061
1060
|
@mock.patch("openlineage.client.uuid.generate_new_uuid")
|
|
1062
|
-
@mock.patch("airflow.utils.timezone.utcnow")
|
|
1063
1061
|
def test_emit_openlineage_events_for_snowflake_queries_without_query_ids_and_namespace(
|
|
1064
|
-
|
|
1062
|
+
mock_generate_uuid, mock_version, mock_parser, time_machine
|
|
1065
1063
|
):
|
|
1066
1064
|
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
|
|
1067
1065
|
mock_generate_uuid.return_value = fake_uuid
|
|
1068
1066
|
|
|
1069
1067
|
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
|
|
1070
|
-
|
|
1068
|
+
time_machine.move_to(default_event_time, tick=False)
|
|
1071
1069
|
|
|
1072
1070
|
hook = mock.MagicMock()
|
|
1073
1071
|
hook.query_ids = ["query1"]
|
|
@@ -1179,15 +1177,14 @@ def test_emit_openlineage_events_for_snowflake_queries_without_query_ids_and_nam
|
|
|
1179
1177
|
|
|
1180
1178
|
@mock.patch("importlib.metadata.version", return_value="2.3.0")
|
|
1181
1179
|
@mock.patch("openlineage.client.uuid.generate_new_uuid")
|
|
1182
|
-
@mock.patch("airflow.utils.timezone.utcnow")
|
|
1183
1180
|
def test_emit_openlineage_events_for_snowflake_queries_with_query_ids_and_hook_query_ids(
|
|
1184
|
-
|
|
1181
|
+
mock_generate_uuid, mock_version, time_machine
|
|
1185
1182
|
):
|
|
1186
1183
|
fake_uuid = "01958e68-03a2-79e3-9ae9-26865cc40e2f"
|
|
1187
1184
|
mock_generate_uuid.return_value = fake_uuid
|
|
1188
1185
|
|
|
1189
1186
|
default_event_time = timezone.datetime(2025, 1, 5, 0, 0, 0)
|
|
1190
|
-
|
|
1187
|
+
time_machine.move_to(default_event_time, tick=False)
|
|
1191
1188
|
|
|
1192
1189
|
hook = mock.MagicMock()
|
|
1193
1190
|
hook.query_ids = ["query1"]
|
|
File without changes
|
|
File without changes
|
{apache_airflow_providers_snowflake-6.5.0 → apache_airflow_providers_snowflake-6.5.1}/docs/conf.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|