apache-airflow-providers-yandex 4.1.1rc1__tar.gz → 4.3.2rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. apache_airflow_providers_yandex-4.3.2rc1/NOTICE +5 -0
  2. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/PKG-INFO +32 -21
  3. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/README.rst +21 -11
  4. apache_airflow_providers_yandex-4.3.2rc1/docs/.latest-doc-only-change.txt +1 -0
  5. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/changelog.rst +87 -2
  6. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/index.rst +14 -13
  7. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/operators/dataproc.rst +1 -1
  8. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/operators/yq.rst +1 -1
  9. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/provider.yaml +9 -1
  10. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/pyproject.toml +13 -16
  11. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/__init__.py +1 -1
  12. {apache_airflow_providers_yandex-4.1.1rc1/tests/system → apache_airflow_providers_yandex-4.3.2rc1/src/airflow/providers}/__init__.py +1 -1
  13. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/__init__.py +3 -3
  14. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/hooks/yandex.py +1 -5
  15. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/links/yq.py +2 -2
  16. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/operators/dataproc.py +18 -6
  17. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/operators/yq.py +2 -2
  18. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/secrets/lockbox.py +4 -2
  19. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/utils/user_agent.py +1 -1
  20. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/version_compat.py +2 -14
  21. {apache_airflow_providers_yandex-4.1.1rc1/tests/unit → apache_airflow_providers_yandex-4.3.2rc1/tests/system}/__init__.py +1 -1
  22. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/system/yandex/example_yandexcloud.py +6 -1
  23. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/system/yandex/example_yandexcloud_dataproc.py +5 -1
  24. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/system/yandex/example_yandexcloud_dataproc_lightweight.py +5 -1
  25. {apache_airflow_providers_yandex-4.1.1rc1/src/airflow/providers → apache_airflow_providers_yandex-4.3.2rc1/tests/unit}/__init__.py +1 -1
  26. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/hooks/test_dataproc.py +4 -10
  27. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/hooks/test_yandex.py +3 -10
  28. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/hooks/test_yq.py +3 -9
  29. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/links/test_yq.py +12 -27
  30. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/operators/test_dataproc.py +71 -11
  31. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/operators/test_yq.py +4 -11
  32. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/secrets/test_lockbox.py +3 -2
  33. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/utils/test_fields.py +1 -1
  34. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/utils/test_user_agent.py +1 -1
  35. apache_airflow_providers_yandex-4.1.1rc1/docs/.latest-doc-only-change.txt +0 -1
  36. {apache_airflow_providers_yandex-4.1.1rc1/src/airflow/providers/yandex → apache_airflow_providers_yandex-4.3.2rc1}/LICENSE +0 -0
  37. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/commits.rst +0 -0
  38. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/conf.py +0 -0
  39. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/configurations-ref.rst +0 -0
  40. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/connections/yandexcloud.rst +0 -0
  41. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/installing-providers-from-sources.rst +0 -0
  42. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/integration-logos/Yandex-Cloud.png +0 -0
  43. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/operators/index.rst +0 -0
  44. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/secrets-backends/yandex-cloud-lockbox-secret-backend.rst +0 -0
  45. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/docs/security.rst +0 -0
  46. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/get_provider_info.py +0 -0
  47. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/hooks/__init__.py +0 -0
  48. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/hooks/dataproc.py +0 -0
  49. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/hooks/yq.py +0 -0
  50. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/links/__init__.py +0 -0
  51. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/operators/__init__.py +0 -0
  52. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/secrets/__init__.py +0 -0
  53. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/utils/__init__.py +0 -0
  54. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/utils/credentials.py +0 -0
  55. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/utils/defaults.py +0 -0
  56. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/src/airflow/providers/yandex/utils/fields.py +0 -0
  57. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/conftest.py +0 -0
  58. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/system/yandex/__init__.py +0 -0
  59. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/system/yandex/example_yandexcloud_yq.py +0 -0
  60. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/__init__.py +0 -0
  61. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/hooks/__init__.py +0 -0
  62. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/links/__init__.py +0 -0
  63. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/operators/__init__.py +0 -0
  64. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/secrets/__init__.py +0 -0
  65. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/utils/__init__.py +0 -0
  66. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/utils/test_credentials.py +0 -0
  67. {apache_airflow_providers_yandex-4.1.1rc1 → apache_airflow_providers_yandex-4.3.2rc1}/tests/unit/yandex/utils/test_defaults.py +0 -0
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2026 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-yandex
3
- Version: 4.1.1rc1
3
+ Version: 4.3.2rc1
4
4
  Summary: Provider package apache-airflow-providers-yandex for Apache Airflow
5
5
  Keywords: airflow-provider,yandex,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.10
8
+ Requires-Python: >=3.10,!=3.13
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,23 +15,23 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Topic :: System :: Monitoring
22
- Requires-Dist: apache-airflow>=2.10.0rc1
23
- Requires-Dist: yandexcloud>=0.308.0
24
- Requires-Dist: yandex-query-client>=0.1.4
25
- Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
22
+ License-File: LICENSE
23
+ License-File: NOTICE
24
+ Requires-Dist: apache-airflow>=2.11.0rc1
25
+ Requires-Dist: yandexcloud>=0.308.0; python_version < '3.13'
26
+ Requires-Dist: yandex-query-client>=0.1.4; python_version < '3.13'
27
+ Requires-Dist: apache-airflow-providers-common-compat>=1.12.0rc1
26
28
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
27
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.1.1/changelog.html
28
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.1.1
29
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.3.2/changelog.html
30
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.3.2
29
31
  Project-URL: Mastodon, https://fosstodon.org/@airflow
30
32
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
31
33
  Project-URL: Source Code, https://github.com/apache/airflow
32
34
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
33
- Provides-Extra: common-compat
34
35
 
35
36
 
36
37
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -57,7 +58,7 @@ Provides-Extra: common-compat
57
58
 
58
59
  Package ``apache-airflow-providers-yandex``
59
60
 
60
- Release: ``4.1.1``
61
+ Release: ``4.3.2``
61
62
 
62
63
 
63
64
  This package is for Yandex, including:
@@ -72,12 +73,12 @@ This is a provider package for ``yandex`` provider. All classes for this provide
72
73
  are in ``airflow.providers.yandex`` python package.
73
74
 
74
75
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.1.1/>`_.
76
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.3.2/>`_.
76
77
 
77
78
  Installation
78
79
  ------------
79
80
 
80
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
81
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
81
82
  for the minimum Airflow version supported) via
82
83
  ``pip install apache-airflow-providers-yandex``
83
84
 
@@ -86,13 +87,14 @@ The package supports the following python versions: 3.10,3.11,3.12
86
87
  Requirements
87
88
  ------------
88
89
 
89
- ======================= ==================
90
- PIP package Version required
91
- ======================= ==================
92
- ``apache-airflow`` ``>=2.10.0``
93
- ``yandexcloud`` ``>=0.308.0``
94
- ``yandex-query-client`` ``>=0.1.4``
95
- ======================= ==================
90
+ ========================================== ======================================
91
+ PIP package Version required
92
+ ========================================== ======================================
93
+ ``apache-airflow`` ``>=2.11.0``
94
+ ``yandexcloud`` ``>=0.308.0; python_version < "3.13"``
95
+ ``yandex-query-client`` ``>=0.1.4; python_version < "3.13"``
96
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
97
+ ========================================== ======================================
96
98
 
97
99
  Cross provider package dependencies
98
100
  -----------------------------------
@@ -113,6 +115,15 @@ Dependent package
113
115
  `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
114
116
  ================================================================================================================== =================
115
117
 
118
+ Optional dependencies
119
+ ----------------------
120
+
121
+ ================= ==========================================
122
+ Extra Dependencies
123
+ ================= ==========================================
124
+ ``common.compat`` ``apache-airflow-providers-common-compat``
125
+ ================= ==========================================
126
+
116
127
  The changelog for the provider package can be found in the
117
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.1.1/changelog.html>`_.
128
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.3.2/changelog.html>`_.
118
129
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-yandex``
25
25
 
26
- Release: ``4.1.1``
26
+ Release: ``4.3.2``
27
27
 
28
28
 
29
29
  This package is for Yandex, including:
@@ -38,12 +38,12 @@ This is a provider package for ``yandex`` provider. All classes for this provide
38
38
  are in ``airflow.providers.yandex`` python package.
39
39
 
40
40
  You can find package information and changelog for the provider
41
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.1.1/>`_.
41
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.3.2/>`_.
42
42
 
43
43
  Installation
44
44
  ------------
45
45
 
46
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
46
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
47
47
  for the minimum Airflow version supported) via
48
48
  ``pip install apache-airflow-providers-yandex``
49
49
 
@@ -52,13 +52,14 @@ The package supports the following python versions: 3.10,3.11,3.12
52
52
  Requirements
53
53
  ------------
54
54
 
55
- ======================= ==================
56
- PIP package Version required
57
- ======================= ==================
58
- ``apache-airflow`` ``>=2.10.0``
59
- ``yandexcloud`` ``>=0.308.0``
60
- ``yandex-query-client`` ``>=0.1.4``
61
- ======================= ==================
55
+ ========================================== ======================================
56
+ PIP package Version required
57
+ ========================================== ======================================
58
+ ``apache-airflow`` ``>=2.11.0``
59
+ ``yandexcloud`` ``>=0.308.0; python_version < "3.13"``
60
+ ``yandex-query-client`` ``>=0.1.4; python_version < "3.13"``
61
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
62
+ ========================================== ======================================
62
63
 
63
64
  Cross provider package dependencies
64
65
  -----------------------------------
@@ -79,5 +80,14 @@ Dependent package
79
80
  `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
80
81
  ================================================================================================================== =================
81
82
 
83
+ Optional dependencies
84
+ ----------------------
85
+
86
+ ================= ==========================================
87
+ Extra Dependencies
88
+ ================= ==========================================
89
+ ``common.compat`` ``apache-airflow-providers-common-compat``
90
+ ================= ==========================================
91
+
82
92
  The changelog for the provider package can be found in the
83
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.1.1/changelog.html>`_.
93
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-yandex/4.3.2/changelog.html>`_.
@@ -0,0 +1 @@
1
+ 134348e1895ad54cfa4d3a75a78bafe872328b11
@@ -27,6 +27,93 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
+ 4.3.2
31
+ .....
32
+
33
+ Misc
34
+ ~~~~
35
+
36
+ * ``New year means updated Copyright notices (#60344)``
37
+ * ``Check team boundaries in connections (#59476)``
38
+ * ``Migrate yandex provider to use airflow.sdk.configuration.conf (#59992)``
39
+
40
+ .. Below changes are excluded from the changelog. Move them to
41
+ appropriate section above if needed. Do not delete the lines(!):
42
+
43
+ 4.3.1
44
+ .....
45
+
46
+ Misc
47
+ ~~~~
48
+
49
+ * ``Check team boundaries in variables (#58905)``
50
+
51
+ .. Below changes are excluded from the changelog. Move them to
52
+ appropriate section above if needed. Do not delete the lines(!):
53
+ * ``TaskInstance unused method cleanup (#59835)``
54
+
55
+ 4.3.0
56
+ .....
57
+
58
+ .. note::
59
+ This release of provider is only available for Airflow 2.11+ as explained in the
60
+ Apache Airflow providers support policy <https://github.com/apache/airflow/blob/main/PROVIDERS.rst#minimum-supported-version-of-airflow-for-community-managed-providers>_.
61
+
62
+ Misc
63
+ ~~~~
64
+
65
+ * ``Bump minimum Airflow version in providers to Airflow 2.11.0 (#58612)``
66
+
67
+ .. Below changes are excluded from the changelog. Move them to
68
+ appropriate section above if needed. Do not delete the lines(!):
69
+ * ``Updates to release process of providers (#58316)``
70
+
71
+ 4.2.1
72
+ .....
73
+
74
+ Misc
75
+ ~~~~
76
+
77
+ * ``Convert all airflow distributions to be compliant with ASF requirements (#58138)``
78
+ * ``Migrate 'yandex' provider to 'common.compat' (#57116)``
79
+
80
+ .. Below changes are excluded from the changelog. Move them to
81
+ appropriate section above if needed. Do not delete the lines(!):
82
+ * ``Delete all unnecessary LICENSE Files (#58191)``
83
+ * ``Prepare release for Oct 2025 wave of providers (#57029)``
84
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
85
+ * ``Prepare release for Sep 2025 2nd wave of providers (#55688)``
86
+ * ``Enable pt011 rule 1 (#55706)``
87
+ * ``Prepare release for Sep 2025 1st wave of providers (#55203)``
88
+ * ``Fix Airflow 2 reference in README/index of providers (#55240)``
89
+ * ``Make term Dag consistent in providers docs (#55101)``
90
+ * ``Move trigger_rule utils from 'airflow/utils' to 'airflow.task'and integrate with Execution API spec (#53389)``
91
+ * ``Switch pre-commit to prek (#54258)``
92
+
93
+ 4.2.0
94
+ .....
95
+
96
+ Features
97
+ ~~~~~~~~
98
+
99
+ * ``Add environment and oslogin for yandex dataproc create cluster (#52973)``
100
+
101
+ Misc
102
+ ~~~~
103
+
104
+ * ``Fix unreachable code errors in yandex provider (#53453)``
105
+ * ``Add Python 3.13 support for Airflow. (#46891)``
106
+ * ``Remove type ignore across codebase after mypy upgrade (#53243)``
107
+ * ``Remove upper-binding for "python-requires" (#52980)``
108
+ * ``Temporarily switch to use >=,< pattern instead of '~=' (#52967)``
109
+ * ``Moving BaseHook usages to version_compat for yandex (#52963)``
110
+
111
+ .. Below changes are excluded from the changelog. Move them to
112
+ appropriate section above if needed. Do not delete the lines(!):
113
+ * ``Deprecate decorators from Core (#53629)``
114
+ * ``Cleanup type ignores in yandex provider where possible (#53251)``
115
+ * ``Make dag_version_id in TI non-nullable (#50825)``
116
+
30
117
  4.1.1
31
118
  .....
32
119
 
@@ -145,8 +232,6 @@ Misc
145
232
  * ``Use Python 3.9 as target version for Ruff & Black rules (#44298)``
146
233
  * ``Prepare docs for Nov 1st wave of providers (#44011)``
147
234
  * ``Split providers out of the main "airflow/" tree into a UV workspace project (#42505)``
148
-
149
- .. Review and move the new changes to one of the sections above:
150
235
  * ``Update path of example dags in docs (#45069)``
151
236
 
152
237
  3.12.0
@@ -58,7 +58,7 @@
58
58
  :maxdepth: 1
59
59
  :caption: Resources
60
60
 
61
- Example DAGs <https://github.com/apache/airflow/tree/providers-yandex/|version|/providers/yandex/tests/system/yandex/example_yandexcloud_dataproc.py>
61
+ Example Dags <https://github.com/apache/airflow/tree/providers-yandex/|version|/providers/yandex/tests/system/yandex/example_yandexcloud_dataproc.py>
62
62
  PyPI Repository <https://pypi.org/project/apache-airflow-providers-yandex/>
63
63
  Installing from sources <installing-providers-from-sources>
64
64
 
@@ -81,7 +81,7 @@ This package is for Yandex, including:
81
81
  - `Yandex.Cloud <https://cloud.yandex.com/>`__
82
82
 
83
83
 
84
- Release: 4.1.1
84
+ Release: 4.3.2
85
85
 
86
86
  Provider package
87
87
  ----------------
@@ -92,22 +92,23 @@ All classes for this package are included in the ``airflow.providers.yandex`` py
92
92
  Installation
93
93
  ------------
94
94
 
95
- You can install this package on top of an existing Airflow 2 installation via
95
+ You can install this package on top of an existing Airflow installation via
96
96
  ``pip install apache-airflow-providers-yandex``.
97
97
  For the minimum Airflow version supported, see ``Requirements`` below.
98
98
 
99
99
  Requirements
100
100
  ------------
101
101
 
102
- The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
102
+ The minimum Apache Airflow version supported by this provider distribution is ``2.11.0``.
103
103
 
104
- ======================= ==================
105
- PIP package Version required
106
- ======================= ==================
107
- ``apache-airflow`` ``>=2.10.0``
108
- ``yandexcloud`` ``>=0.308.0``
109
- ``yandex-query-client`` ``>=0.1.4``
110
- ======================= ==================
104
+ ========================================== ======================================
105
+ PIP package Version required
106
+ ========================================== ======================================
107
+ ``apache-airflow`` ``>=2.11.0``
108
+ ``yandexcloud`` ``>=0.308.0; python_version < "3.13"``
109
+ ``yandex-query-client`` ``>=0.1.4; python_version < "3.13"``
110
+ ``apache-airflow-providers-common-compat`` ``>=1.12.0``
111
+ ========================================== ======================================
111
112
 
112
113
  Cross provider package dependencies
113
114
  -----------------------------------
@@ -134,5 +135,5 @@ Downloading official packages
134
135
  You can download officially released packages and verify their checksums and signatures from the
135
136
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
136
137
 
137
- * `The apache-airflow-providers-yandex 4.1.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.1.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.1.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.1.1.tar.gz.sha512>`__)
138
- * `The apache-airflow-providers-yandex 4.1.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.1.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.1.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.1.1-py3-none-any.whl.sha512>`__)
138
+ * `The apache-airflow-providers-yandex 4.3.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.3.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.3.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.3.2.tar.gz.sha512>`__)
139
+ * `The apache-airflow-providers-yandex 4.3.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.3.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.3.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_yandex-4.3.2-py3-none-any.whl.sha512>`__)
@@ -34,4 +34,4 @@ that can be integrated with Apache Hadoop and other storage systems.
34
34
  Using the operators
35
35
  ^^^^^^^^^^^^^^^^^^^
36
36
  To learn how to use Data Proc operators,
37
- see `example DAGs <https://github.com/apache/airflow/tree/providers-yandex/|version|/providers/yandex/tests/system/yandex/example_yandexcloud_dataproc.py>`_.
37
+ see `example Dags <https://github.com/apache/airflow/tree/providers-yandex/|version|/providers/yandex/tests/system/yandex/example_yandexcloud_dataproc.py>`_.
@@ -25,4 +25,4 @@ Yandex Query Operators
25
25
  Using the operators
26
26
  ^^^^^^^^^^^^^^^^^^^
27
27
  To learn how to use Yandex Query operator,
28
- see `example DAG <https://github.com/apache/airflow/tree/providers-yandex/|version|/providers/yandex/tests/system/yandex/example_yandexcloud_yq.py>`__.
28
+ see `example Dag <https://github.com/apache/airflow/tree/providers-yandex/|version|/providers/yandex/tests/system/yandex/example_yandexcloud_yq.py>`__.
@@ -23,12 +23,17 @@ description: |
23
23
 
24
24
  - `Yandex.Cloud <https://cloud.yandex.com/>`__
25
25
  state: ready
26
- source-date-epoch: 1751474658
26
+ source-date-epoch: 1768335746
27
27
  # Note that those versions are maintained by release manager - do not update them manually
28
28
  # with the exception of case where other provider in sources has >= new provider version.
29
29
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
30
30
  # to be done in the same PR
31
31
  versions:
32
+ - 4.3.2
33
+ - 4.3.1
34
+ - 4.3.0
35
+ - 4.2.1
36
+ - 4.2.0
32
37
  - 4.1.1
33
38
  - 4.1.0
34
39
  - 4.0.3
@@ -60,6 +65,9 @@ versions:
60
65
  - 1.0.1
61
66
  - 1.0.0
62
67
 
68
+ excluded-python-versions:
69
+ - "3.13"
70
+
63
71
  integrations:
64
72
  - integration-name: Yandex.Cloud
65
73
  external-doc-url: https://cloud.yandex.com/
@@ -25,9 +25,11 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-yandex"
28
- version = "4.1.1rc1"
28
+ version = "4.3.2rc1"
29
29
  description = "Provider package apache-airflow-providers-yandex for Apache Airflow"
30
30
  readme = "README.rst"
31
+ license = "Apache-2.0"
32
+ license-files = ['LICENSE', 'NOTICE']
31
33
  authors = [
32
34
  {name="Apache Software Foundation", email="dev@airflow.apache.org"},
33
35
  ]
@@ -43,29 +45,22 @@ classifiers = [
43
45
  "Intended Audience :: System Administrators",
44
46
  "Framework :: Apache Airflow",
45
47
  "Framework :: Apache Airflow :: Provider",
46
- "License :: OSI Approved :: Apache Software License",
47
48
  "Programming Language :: Python :: 3.10",
48
49
  "Programming Language :: Python :: 3.11",
49
50
  "Programming Language :: Python :: 3.12",
50
51
  "Topic :: System :: Monitoring",
51
52
  ]
52
- requires-python = "~=3.10"
53
+ requires-python = ">=3.10,!=3.13"
53
54
 
54
55
  # The dependencies should be modified in place in the generated file.
55
56
  # Any change in the dependencies is preserved when the file is regenerated
56
- # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
57
+ # Make sure to run ``prek update-providers-dependencies --all-files``
57
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
58
59
  dependencies = [
59
- "apache-airflow>=2.10.0rc1",
60
- "yandexcloud>=0.308.0",
61
- "yandex-query-client>=0.1.4",
62
- ]
63
-
64
- # The optional dependencies should be modified in place in the generated file
65
- # Any change in the dependencies is preserved when the file is regenerated
66
- [project.optional-dependencies]
67
- "common.compat" = [
68
- "apache-airflow-providers-common-compat"
60
+ "apache-airflow>=2.11.0rc1",
61
+ "yandexcloud>=0.308.0; python_version < '3.13'",
62
+ "yandex-query-client>=0.1.4; python_version < '3.13'",
63
+ "apache-airflow-providers-common-compat>=1.12.0rc1",
69
64
  ]
70
65
 
71
66
  [dependency-groups]
@@ -73,8 +68,10 @@ dev = [
73
68
  "apache-airflow",
74
69
  "apache-airflow-task-sdk",
75
70
  "apache-airflow-devel-common",
71
+ "apache-airflow-providers-common-compat",
76
72
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
77
73
  "responses>=0.25.0",
74
+
78
75
  ]
79
76
 
80
77
  # To build docs:
@@ -103,8 +100,8 @@ apache-airflow-providers-common-sql = {workspace = true}
103
100
  apache-airflow-providers-standard = {workspace = true}
104
101
 
105
102
  [project.urls]
106
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.1.1"
107
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.1.1/changelog.html"
103
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.3.2"
104
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-yandex/4.3.2/changelog.html"
108
105
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
109
106
  "Source Code" = "https://github.com/apache/airflow"
110
107
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.1.1"
32
+ __version__ = "4.3.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-yandex:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-yandex:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -20,6 +20,7 @@ from typing import Any
20
20
 
21
21
  import yandexcloud
22
22
 
23
+ from airflow.providers.common.compat.sdk import BaseHook
23
24
  from airflow.providers.yandex.utils.credentials import (
24
25
  CredentialsType,
25
26
  get_credentials,
@@ -29,11 +30,6 @@ from airflow.providers.yandex.utils.defaults import conn_name_attr, conn_type, d
29
30
  from airflow.providers.yandex.utils.fields import get_field_from_extras
30
31
  from airflow.providers.yandex.utils.user_agent import provider_user_agent
31
32
 
32
- try:
33
- from airflow.sdk import BaseHook
34
- except ImportError:
35
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
36
-
37
33
 
38
34
  class YandexCloudBaseHook(BaseHook):
39
35
  """
@@ -18,11 +18,11 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING
20
20
 
21
- from airflow.providers.yandex.version_compat import BaseOperatorLink, XCom
21
+ from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom
22
22
 
23
23
  if TYPE_CHECKING:
24
24
  from airflow.models.taskinstancekey import TaskInstanceKey
25
- from airflow.providers.yandex.version_compat import BaseOperator, Context
25
+ from airflow.providers.common.compat.sdk import BaseOperator, Context
26
26
 
27
27
  XCOM_WEBLINK_KEY = "web_link"
28
28
 
@@ -14,17 +14,20 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
+
17
18
  from __future__ import annotations
18
19
 
19
20
  from collections.abc import Iterable, Sequence
20
21
  from dataclasses import dataclass
21
22
  from typing import TYPE_CHECKING
22
23
 
24
+ import yandexcloud
25
+
26
+ from airflow.providers.common.compat.sdk import BaseOperator
23
27
  from airflow.providers.yandex.hooks.dataproc import DataprocHook
24
- from airflow.providers.yandex.version_compat import BaseOperator
25
28
 
26
29
  if TYPE_CHECKING:
27
- from airflow.providers.yandex.version_compat import Context
30
+ from airflow.providers.common.compat.sdk import Context
28
31
 
29
32
 
30
33
  @dataclass
@@ -54,6 +57,7 @@ class DataprocCreateClusterOperator(BaseOperator):
54
57
  Currently there are ru-central1-a, ru-central1-b and ru-central1-c.
55
58
  :param service_account_id: Service account id for the cluster.
56
59
  Service account can be created inside the folder.
60
+ :param environment: Environment for the cluster. Possible options: PRODUCTION, PRESTABLE.
57
61
  :param masternode_resource_preset: Resources preset (CPU+RAM configuration)
58
62
  for the primary node of the cluster.
59
63
  :param masternode_disk_size: Masternode storage size in GiB.
@@ -96,6 +100,7 @@ class DataprocCreateClusterOperator(BaseOperator):
96
100
  Docs: https://cloud.yandex.com/docs/data-proc/concepts/logs
97
101
  :param initialization_actions: Set of init-actions to run when cluster starts.
98
102
  Docs: https://cloud.yandex.com/docs/data-proc/concepts/init-action
103
+ :param oslogin_enabled: Enable authorization via OS Login for cluster.
99
104
  :param labels: Cluster labels as key:value pairs. No more than 64 per resource.
100
105
  Docs: https://cloud.yandex.com/docs/resource-manager/concepts/labels
101
106
  """
@@ -109,10 +114,11 @@ class DataprocCreateClusterOperator(BaseOperator):
109
114
  cluster_image_version: str | None = None,
110
115
  ssh_public_keys: str | Iterable[str] | None = None,
111
116
  subnet_id: str | None = None,
112
- services: Iterable[str] = ("HDFS", "YARN", "MAPREDUCE", "HIVE", "SPARK"),
117
+ services: Iterable[str] | None = ("HDFS", "YARN", "MAPREDUCE", "HIVE", "SPARK"),
113
118
  s3_bucket: str | None = None,
114
119
  zone: str = "ru-central1-b",
115
120
  service_account_id: str | None = None,
121
+ environment: str | None = None,
116
122
  masternode_resource_preset: str | None = None,
117
123
  masternode_disk_size: int | None = None,
118
124
  masternode_disk_type: str | None = None,
@@ -138,6 +144,7 @@ class DataprocCreateClusterOperator(BaseOperator):
138
144
  security_group_ids: Iterable[str] | None = None,
139
145
  log_group_id: str | None = None,
140
146
  initialization_actions: Iterable[InitializationAction] | None = None,
147
+ oslogin_enabled: bool = False,
141
148
  labels: dict[str, str] | None = None,
142
149
  **kwargs,
143
150
  ) -> None:
@@ -145,9 +152,6 @@ class DataprocCreateClusterOperator(BaseOperator):
145
152
  if ssh_public_keys is None:
146
153
  ssh_public_keys = []
147
154
 
148
- if services is None:
149
- services = []
150
-
151
155
  self.folder_id = folder_id
152
156
  self.yandex_conn_id = connection_id
153
157
  self.cluster_name = cluster_name
@@ -159,6 +163,7 @@ class DataprocCreateClusterOperator(BaseOperator):
159
163
  self.s3_bucket = s3_bucket
160
164
  self.zone = zone
161
165
  self.service_account_id = service_account_id
166
+ self.environment = environment
162
167
  self.masternode_resource_preset = masternode_resource_preset
163
168
  self.masternode_disk_size = masternode_disk_size
164
169
  self.masternode_disk_type = masternode_disk_type
@@ -183,6 +188,7 @@ class DataprocCreateClusterOperator(BaseOperator):
183
188
  self.security_group_ids = security_group_ids
184
189
  self.log_group_id = log_group_id
185
190
  self.initialization_actions = initialization_actions
191
+ self.oslogin_enabled = oslogin_enabled
186
192
  self.labels = labels
187
193
 
188
194
  self.hook: DataprocHook | None = None
@@ -191,6 +197,11 @@ class DataprocCreateClusterOperator(BaseOperator):
191
197
  self.hook = DataprocHook(
192
198
  yandex_conn_id=self.yandex_conn_id,
193
199
  )
200
+ kwargs_depends_on_version = {}
201
+ if yandexcloud.__version__ >= "0.350.0":
202
+ kwargs_depends_on_version.update(
203
+ {"oslogin_enabled": self.oslogin_enabled, "environment": self.environment}
204
+ )
194
205
  operation_result = self.hook.dataproc_client.create_cluster(
195
206
  folder_id=self.folder_id,
196
207
  cluster_name=self.cluster_name,
@@ -236,6 +247,7 @@ class DataprocCreateClusterOperator(BaseOperator):
236
247
  ]
237
248
  if self.initialization_actions
238
249
  else None,
250
+ **kwargs_depends_on_version,
239
251
  )
240
252
  cluster_id = operation_result.response.id
241
253
 
@@ -20,12 +20,12 @@ from collections.abc import Sequence
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
+ from airflow.providers.common.compat.sdk import BaseOperator
23
24
  from airflow.providers.yandex.hooks.yq import YQHook
24
25
  from airflow.providers.yandex.links.yq import YQLink
25
- from airflow.providers.yandex.version_compat import BaseOperator
26
26
 
27
27
  if TYPE_CHECKING:
28
- from airflow.providers.yandex.version_compat import Context
28
+ from airflow.providers.common.compat.sdk import Context
29
29
 
30
30
 
31
31
  class YQExecuteQueryOperator(BaseOperator):
@@ -145,11 +145,12 @@ class LockboxSecretBackend(BaseSecretsBackend, LoggingMixin):
145
145
  self.sep = sep
146
146
  self.endpoint = endpoint
147
147
 
148
- def get_conn_value(self, conn_id: str) -> str | None:
148
+ def get_conn_value(self, conn_id: str, team_name: str | None = None) -> str | None:
149
149
  """
150
150
  Retrieve from Secrets Backend a string value representing the Connection object.
151
151
 
152
152
  :param conn_id: Connection ID
153
+ :param team_name: Team name associated to the task trying to access the connection (if any)
153
154
  :return: Connection Value
154
155
  """
155
156
  if self.connections_prefix is None:
@@ -160,11 +161,12 @@ class LockboxSecretBackend(BaseSecretsBackend, LoggingMixin):
160
161
 
161
162
  return self._get_secret_value(self.connections_prefix, conn_id)
162
163
 
163
- def get_variable(self, key: str) -> str | None:
164
+ def get_variable(self, key: str, team_name: str | None = None) -> str | None:
164
165
  """
165
166
  Return value for Airflow Variable.
166
167
 
167
168
  :param key: Variable Key
169
+ :param team_name: Team name associated to the task trying to access the variable (if any)
168
170
  :return: Variable Value
169
171
  """
170
172
  if self.variables_prefix is None:
@@ -24,7 +24,7 @@ from airflow.providers.yandex.utils.defaults import conn_type, hook_name
24
24
  def provider_user_agent() -> str | None:
25
25
  """Construct User-Agent from Airflow core & provider package versions."""
26
26
  from airflow import __version__ as airflow_version
27
- from airflow.configuration import conf
27
+ from airflow.providers.common.compat.sdk import conf
28
28
  from airflow.providers_manager import ProvidersManager
29
29
 
30
30
  try:
@@ -28,21 +28,9 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
28
28
 
29
29
 
30
30
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
31
-
32
- if AIRFLOW_V_3_0_PLUS:
33
- from airflow.sdk import BaseOperator, BaseOperatorLink
34
- from airflow.sdk.definitions.context import Context
35
- from airflow.sdk.execution_time.xcom import XCom
36
- else:
37
- from airflow.models import BaseOperator, XCom
38
- from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
39
- from airflow.utils.context import Context
40
-
31
+ AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
41
32
 
42
33
  __all__ = [
43
34
  "AIRFLOW_V_3_0_PLUS",
44
- "BaseOperator",
45
- "BaseOperatorLink",
46
- "Context",
47
- "XCom",
35
+ "AIRFLOW_V_3_1_PLUS",
48
36
  ]
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -30,7 +30,12 @@ from google.protobuf.json_format import MessageToDict
30
30
  from yandexcloud.operations import OperationError
31
31
 
32
32
  from airflow import DAG
33
- from airflow.decorators import task
33
+
34
+ try:
35
+ from airflow.sdk import task
36
+ except ImportError:
37
+ # Airflow 2 path
38
+ from airflow.decorators import task # type: ignore[attr-defined,no-redef]
34
39
  from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook
35
40
 
36
41
  from tests_common.test_utils.system_tests import get_test_env_id
@@ -30,7 +30,11 @@ from airflow.providers.yandex.operators.dataproc import (
30
30
  )
31
31
 
32
32
  # Name of the datacenter where Dataproc cluster will be created
33
- from airflow.utils.trigger_rule import TriggerRule
33
+ try:
34
+ from airflow.sdk import TriggerRule
35
+ except ImportError:
36
+ # Compatibility for Airflow < 3.1
37
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
34
38
 
35
39
  from tests_common.test_utils.system_tests import get_test_env_id
36
40
 
@@ -26,7 +26,11 @@ from airflow.providers.yandex.operators.dataproc import (
26
26
  )
27
27
 
28
28
  # Name of the datacenter where Dataproc cluster will be created
29
- from airflow.utils.trigger_rule import TriggerRule
29
+ try:
30
+ from airflow.sdk import TriggerRule
31
+ except ImportError:
32
+ # Compatibility for Airflow < 3.1
33
+ from airflow.utils.trigger_rule import TriggerRule # type: ignore[no-redef,attr-defined]
30
34
 
31
35
  from tests_common.test_utils.system_tests import get_test_env_id
32
36
 
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -21,19 +21,13 @@ from unittest import mock
21
21
 
22
22
  import pytest
23
23
 
24
- try:
25
- import importlib.util
24
+ pytest.importorskip("yandexcloud")
26
25
 
27
- if not importlib.util.find_spec("airflow.sdk.bases.hook"):
28
- raise ImportError
29
26
 
30
- BASEHOOK_PATCH_PATH = "airflow.sdk.bases.hook.BaseHook"
31
- except ImportError:
32
- BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
33
- yandexlcloud = pytest.importorskip("yandexcloud")
27
+ from airflow.models import Connection
28
+ from airflow.providers.yandex.hooks.dataproc import DataprocHook
34
29
 
35
- from airflow.models import Connection # noqa: E402
36
- from airflow.providers.yandex.hooks.dataproc import DataprocHook # noqa: E402
30
+ BASEHOOK_PATCH_PATH = "airflow.providers.common.compat.sdk.BaseHook"
37
31
 
38
32
  # Airflow connection with type "yandexcloud" must be created
39
33
  CONNECTION_ID = "yandexcloud_default"
@@ -21,20 +21,13 @@ from unittest import mock
21
21
 
22
22
  import pytest
23
23
 
24
+ pytest.importorskip("yandexcloud")
25
+
24
26
  from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook
25
27
 
26
28
  from tests_common.test_utils.config import conf_vars
27
29
 
28
- try:
29
- import importlib.util
30
-
31
- if not importlib.util.find_spec("airflow.sdk.bases.hook"):
32
- raise ImportError
33
-
34
- BASEHOOK_PATCH_PATH = "airflow.sdk.bases.hook.BaseHook"
35
- except ImportError:
36
- BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
37
- yandexcloud = pytest.importorskip("yandexcloud")
30
+ BASEHOOK_PATCH_PATH = "airflow.providers.common.compat.sdk.BaseHook"
38
31
 
39
32
 
40
33
  class TestYandexHook:
@@ -25,19 +25,13 @@ import responses
25
25
  from responses import matchers
26
26
 
27
27
  from airflow.models import Connection
28
- from airflow.providers.yandex.hooks.yq import YQHook
29
-
30
- try:
31
- import importlib.util
32
28
 
33
- if not importlib.util.find_spec("airflow.sdk.bases.hook"):
34
- raise ImportError
29
+ BASEHOOK_PATCH_PATH = "airflow.providers.common.compat.sdk.BaseHook"
35
30
 
36
- BASEHOOK_PATCH_PATH = "airflow.sdk.bases.hook.BaseHook"
37
- except ImportError:
38
- BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
39
31
  yandexcloud = pytest.importorskip("yandexcloud")
40
32
 
33
+ from airflow.providers.yandex.hooks.yq import YQHook
34
+
41
35
  OAUTH_TOKEN = "my_oauth_token"
42
36
  IAM_TOKEN = "my_iam_token"
43
37
  SERVICE_ACCOUNT_AUTH_KEY_JSON = """{"id":"my_id", "service_account_id":"my_sa1", "private_key":"my_pk"}"""
@@ -20,47 +20,32 @@ from unittest import mock
20
20
 
21
21
  import pytest
22
22
 
23
- from airflow.models.taskinstance import TaskInstance
23
+ from airflow.providers.common.compat.sdk import XCom
24
24
  from airflow.providers.yandex.links.yq import YQLink
25
25
 
26
26
  from tests_common.test_utils.mock_operators import MockOperator
27
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
28
-
29
- if AIRFLOW_V_3_0_PLUS:
30
- from airflow.sdk.execution_time.xcom import XCom
31
- else:
32
- from airflow.models import XCom # type: ignore[no-redef]
27
+ from tests_common.test_utils.taskinstance import create_task_instance
33
28
 
34
29
  yandexcloud = pytest.importorskip("yandexcloud")
35
30
 
36
31
 
37
32
  def test_persist():
38
33
  mock_ti = mock.MagicMock()
39
- mock_context = {"ti": mock_ti}
40
- if not AIRFLOW_V_3_0_PLUS:
41
- mock_context["task_instance"] = mock_ti
42
-
43
- YQLink.persist(context=mock_context, web_link="g.com")
44
-
45
- ti = mock_context["ti"]
46
- ti.xcom_push.assert_called_once_with(key="web_link", value="g.com")
34
+ YQLink.persist(context={"ti": mock_ti, "task_instance": mock_ti}, web_link="g.com")
35
+ mock_ti.xcom_push.assert_called_once_with(key="web_link", value="g.com")
47
36
 
48
37
 
49
38
  def test_default_link():
50
- with mock.patch.object(XCom, "get_value") as m:
51
- m.return_value = None
52
- link = YQLink()
53
-
54
- op = MockOperator(task_id="test_task_id")
55
- ti = TaskInstance(task=op, run_id="run_id1")
39
+ link = YQLink()
40
+ op = MockOperator(task_id="test_task_id")
41
+ ti = create_task_instance(task=op, run_id="run_id1", dag_version_id=mock.MagicMock())
42
+ with mock.patch.object(XCom, "get_value", return_value=None):
56
43
  assert link.get_link(op, ti_key=ti.key) == "https://yq.cloud.yandex.ru"
57
44
 
58
45
 
59
46
  def test_link():
60
- with mock.patch.object(XCom, "get_value") as m:
61
- m.return_value = "https://g.com"
62
- link = YQLink()
63
-
64
- op = MockOperator(task_id="test_task_id")
65
- ti = TaskInstance(task=op, run_id="run_id1")
47
+ link = YQLink()
48
+ op = MockOperator(task_id="test_task_id")
49
+ ti = create_task_instance(task=op, run_id="run_id1", dag_version_id=mock.MagicMock())
50
+ with mock.patch.object(XCom, "get_value", return_value="https://g.com"):
66
51
  assert link.get_link(op, ti_key=ti.key) == "https://g.com"
@@ -21,6 +21,8 @@ from unittest.mock import MagicMock, call, patch
21
21
 
22
22
  import pytest
23
23
 
24
+ yandexcloud = pytest.importorskip("yandexcloud")
25
+
24
26
  from airflow.models.dag import DAG
25
27
  from airflow.providers.yandex.operators.dataproc import (
26
28
  DataprocCreateClusterOperator,
@@ -31,8 +33,6 @@ from airflow.providers.yandex.operators.dataproc import (
31
33
  DataprocDeleteClusterOperator,
32
34
  )
33
35
 
34
- yandexcloud = pytest.importorskip("yandexcloud")
35
-
36
36
  # Airflow connection with type "yandexcloud"
37
37
  CONNECTION_ID = "yandexcloud_default"
38
38
 
@@ -66,15 +66,7 @@ SSH_PUBLIC_KEYS = [
66
66
  # https://cloud.yandex.com/docs/logging/concepts/log-group
67
67
  LOG_GROUP_ID = "my_log_group_id"
68
68
 
69
- try:
70
- import importlib.util
71
-
72
- if not importlib.util.find_spec("airflow.sdk.bases.hook"):
73
- raise ImportError
74
-
75
- BASEHOOK_PATCH_PATH = "airflow.sdk.bases.hook.BaseHook"
76
- except ImportError:
77
- BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
69
+ BASEHOOK_PATCH_PATH = "airflow.providers.common.compat.sdk.BaseHook"
78
70
 
79
71
 
80
72
  class TestDataprocClusterCreateOperator:
@@ -93,6 +85,7 @@ class TestDataprocClusterCreateOperator:
93
85
  @patch("airflow.providers.yandex.utils.credentials.get_credentials")
94
86
  @patch(f"{BASEHOOK_PATCH_PATH}.get_connection")
95
87
  @patch("yandexcloud._wrappers.dataproc.Dataproc.create_cluster")
88
+ @patch("yandexcloud.__version__", "0.308.0")
96
89
  def test_create_cluster(self, mock_create_cluster, *_):
97
90
  operator = DataprocCreateClusterOperator(
98
91
  task_id="create_cluster",
@@ -154,6 +147,73 @@ class TestDataprocClusterCreateOperator:
154
147
  ]
155
148
  )
156
149
 
150
+ @patch("airflow.providers.yandex.utils.credentials.get_credentials")
151
+ @patch(f"{BASEHOOK_PATCH_PATH}.get_connection")
152
+ @patch("yandexcloud._wrappers.dataproc.Dataproc.create_cluster")
153
+ @patch("yandexcloud.__version__", "0.350.0")
154
+ def test_create_cluster_with_350_sdk(self, mock_create_cluster, *_):
155
+ operator = DataprocCreateClusterOperator(
156
+ task_id="create_cluster",
157
+ ssh_public_keys=SSH_PUBLIC_KEYS,
158
+ folder_id=FOLDER_ID,
159
+ subnet_id=SUBNET_ID,
160
+ zone=AVAILABILITY_ZONE_ID,
161
+ connection_id=CONNECTION_ID,
162
+ s3_bucket=S3_BUCKET_NAME_FOR_LOGS,
163
+ cluster_image_version=CLUSTER_IMAGE_VERSION,
164
+ log_group_id=LOG_GROUP_ID,
165
+ )
166
+ context = {"task_instance": MagicMock()}
167
+ operator.execute(context)
168
+ mock_create_cluster.assert_called_once_with(
169
+ cluster_description="",
170
+ cluster_image_version="1.4",
171
+ cluster_name=None,
172
+ computenode_count=0,
173
+ computenode_disk_size=None,
174
+ computenode_disk_type=None,
175
+ computenode_resource_preset=None,
176
+ computenode_max_hosts_count=None,
177
+ computenode_measurement_duration=None,
178
+ computenode_warmup_duration=None,
179
+ computenode_stabilization_duration=None,
180
+ computenode_preemptible=False,
181
+ computenode_cpu_utilization_target=None,
182
+ computenode_decommission_timeout=None,
183
+ datanode_count=1,
184
+ datanode_disk_size=None,
185
+ datanode_disk_type=None,
186
+ datanode_resource_preset=None,
187
+ folder_id="my_folder_id",
188
+ masternode_disk_size=None,
189
+ masternode_disk_type=None,
190
+ masternode_resource_preset=None,
191
+ s3_bucket="my_bucket_name",
192
+ service_account_id=None,
193
+ services=("HDFS", "YARN", "MAPREDUCE", "HIVE", "SPARK"),
194
+ ssh_public_keys=[
195
+ "ssh-rsa AAA5B3NzaC1yc2EAA1ADA2ABA3AA4QCxO38tKA0XIs9ivPxt7AYdf3bgtAR1ow3Qkb9GPQ6wkFHQq"
196
+ "cFDe6faKCxH6iDRt2o4D8L8Bx6zN42uZSB0nf8jkIxFTcEU3mFSXEbWByg78ao3dMrAAj1tyr1H1pON6P0="
197
+ ],
198
+ subnet_id="my_subnet_id",
199
+ zone="ru-central1-c",
200
+ log_group_id=LOG_GROUP_ID,
201
+ properties=None,
202
+ enable_ui_proxy=False,
203
+ host_group_ids=None,
204
+ security_group_ids=None,
205
+ labels=None,
206
+ initialization_actions=None,
207
+ environment=None,
208
+ oslogin_enabled=False,
209
+ )
210
+ context["task_instance"].xcom_push.assert_has_calls(
211
+ [
212
+ call(key="cluster_id", value=mock_create_cluster().response.id),
213
+ call(key="yandexcloud_connection_id", value=CONNECTION_ID),
214
+ ]
215
+ )
216
+
157
217
  @patch("airflow.providers.yandex.utils.credentials.get_credentials")
158
218
  @patch(f"{BASEHOOK_PATCH_PATH}.get_connection")
159
219
  @patch("yandexcloud._wrappers.dataproc.Dataproc.delete_cluster")
@@ -21,6 +21,9 @@ from datetime import datetime, timedelta
21
21
  from unittest.mock import MagicMock, call, patch
22
22
 
23
23
  import pytest
24
+
25
+ pytest.importorskip("yandexcloud")
26
+
24
27
  import responses
25
28
  from responses import matchers
26
29
 
@@ -30,17 +33,7 @@ from airflow.providers.yandex.operators.yq import YQExecuteQueryOperator
30
33
 
31
34
  from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
32
35
 
33
- yandexcloud = pytest.importorskip("yandexcloud")
34
-
35
- try:
36
- import importlib.util
37
-
38
- if not importlib.util.find_spec("airflow.sdk.bases.hook"):
39
- raise ImportError
40
-
41
- BASEHOOK_PATCH_PATH = "airflow.sdk.bases.hook.BaseHook"
42
- except ImportError:
43
- BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
36
+ BASEHOOK_PATCH_PATH = "airflow.providers.common.compat.sdk.BaseHook"
44
37
 
45
38
  OAUTH_TOKEN = "my_oauth_token"
46
39
  FOLDER_ID = "my_folder_id"
@@ -20,6 +20,9 @@ import json
20
20
  from unittest.mock import MagicMock, Mock, patch
21
21
 
22
22
  import pytest
23
+
24
+ yandexcloud = pytest.importorskip("yandexcloud")
25
+
23
26
  import yandex.cloud.lockbox.v1.payload_pb2 as payload_pb
24
27
  import yandex.cloud.lockbox.v1.secret_pb2 as secret_pb
25
28
  import yandex.cloud.lockbox.v1.secret_service_pb2 as secret_service_pb
@@ -27,8 +30,6 @@ import yandex.cloud.lockbox.v1.secret_service_pb2 as secret_service_pb
27
30
  from airflow.providers.yandex.secrets.lockbox import LockboxSecretBackend
28
31
  from airflow.providers.yandex.utils.defaults import default_conn_name
29
32
 
30
- yandexcloud = pytest.importorskip("yandexcloud")
31
-
32
33
 
33
34
  class TestLockboxSecretBackend:
34
35
  @patch("airflow.providers.yandex.secrets.lockbox.LockboxSecretBackend._get_secret_value")
@@ -75,7 +75,7 @@ def test_get_field_from_extras_field_name_with_extra_raise_exception():
75
75
  default = None
76
76
  extras = {}
77
77
 
78
- with pytest.raises(ValueError):
78
+ with pytest.raises(ValueError, match="extra__yandexcloud__field"):
79
79
  get_field_from_extras(
80
80
  extras=extras,
81
81
  field_name=field_name,
@@ -41,7 +41,7 @@ def test_provider_user_agent():
41
41
  user_agent_provider = f"{provider_name}/{provider.version}"
42
42
  assert user_agent_provider in user_agent
43
43
 
44
- from airflow.configuration import conf
44
+ from airflow.providers.common.compat.sdk import conf
45
45
 
46
46
  user_agent_prefix = conf.get("yandex", "sdk_user_agent_prefix", fallback="")
47
47
  assert user_agent_prefix in user_agent
@@ -1 +0,0 @@
1
- 7b2ec33c7ad4998d9c9735b79593fcdcd3b9dd1f