apache-airflow-providers-yandex 3.11.2rc1__py3-none-any.whl → 3.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "3.11.2"
32
+ __version__ = "3.12.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.7.0"
35
+ "2.8.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-yandex:{__version__}` needs Apache Airflow 2.7.0+"
38
+ f"The package `apache-airflow-providers-yandex:{__version__}` needs Apache Airflow 2.8.0+"
39
39
  )
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Yandex",
29
29
  "description": "This package is for Yandex, including:\n\n - `Yandex.Cloud <https://cloud.yandex.com/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1718605606,
31
+ "source-date-epoch": 1723970668,
32
32
  "versions": [
33
+ "3.12.0",
33
34
  "3.11.2",
34
35
  "3.11.1",
35
36
  "3.11.0",
@@ -54,11 +55,7 @@ def get_provider_info():
54
55
  "1.0.1",
55
56
  "1.0.0",
56
57
  ],
57
- "dependencies": [
58
- "apache-airflow>=2.7.0",
59
- "yandexcloud>=0.278.0,!=0.289.0,!=0.290.0,<0.292.0",
60
- "yandex-query-client>=0.1.4",
61
- ],
58
+ "dependencies": ["apache-airflow>=2.8.0", "yandexcloud>=0.308.0", "yandex-query-client>=0.1.4"],
62
59
  "integrations": [
63
60
  {
64
61
  "integration-name": "Yandex.Cloud",
@@ -16,8 +16,16 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ from typing import TYPE_CHECKING
20
+
21
+ from deprecated import deprecated
22
+
23
+ from airflow.exceptions import AirflowProviderDeprecationWarning
19
24
  from airflow.providers.yandex.hooks.yandex import YandexCloudBaseHook
20
25
 
26
+ if TYPE_CHECKING:
27
+ from yandexcloud._wrappers.dataproc import Dataproc
28
+
21
29
 
22
30
  class DataprocHook(YandexCloudBaseHook):
23
31
  """
@@ -29,7 +37,19 @@ class DataprocHook(YandexCloudBaseHook):
29
37
  def __init__(self, *args, **kwargs) -> None:
30
38
  super().__init__(*args, **kwargs)
31
39
  self.cluster_id = None
32
- self.client = self.sdk.wrappers.Dataproc(
40
+ self.dataproc_client: Dataproc = self.sdk.wrappers.Dataproc(
33
41
  default_folder_id=self.default_folder_id,
34
42
  default_public_ssh_key=self.default_public_ssh_key,
35
43
  )
44
+
45
+ @property
46
+ @deprecated(
47
+ reason="`client` deprecated and will be removed in the future. Use `dataproc_client` instead",
48
+ category=AirflowProviderDeprecationWarning,
49
+ )
50
+ def client(self):
51
+ return self.dataproc_client
52
+
53
+ @client.setter
54
+ def client(self, value):
55
+ self.dataproc_client = value
@@ -24,6 +24,7 @@ import yandexcloud
24
24
  from airflow.exceptions import AirflowProviderDeprecationWarning
25
25
  from airflow.hooks.base import BaseHook
26
26
  from airflow.providers.yandex.utils.credentials import (
27
+ CredentialsType,
27
28
  get_credentials,
28
29
  get_service_account_id,
29
30
  )
@@ -132,13 +133,18 @@ class YandexCloudBaseHook(BaseHook):
132
133
  self.connection_id = yandex_conn_id or connection_id or default_conn_name
133
134
  self.connection = self.get_connection(self.connection_id)
134
135
  self.extras = self.connection.extra_dejson
135
- self.credentials = get_credentials(
136
+ self.credentials: CredentialsType = get_credentials(
136
137
  oauth_token=self._get_field("oauth"),
137
138
  service_account_json=self._get_field("service_account_json"),
138
139
  service_account_json_path=self._get_field("service_account_json_path"),
139
140
  )
140
141
  sdk_config = self._get_endpoint()
141
- self.sdk = yandexcloud.SDK(user_agent=provider_user_agent(), **sdk_config, **self.credentials)
142
+ self.sdk = yandexcloud.SDK(
143
+ user_agent=provider_user_agent(),
144
+ token=self.credentials.get("token"),
145
+ service_account_key=self.credentials.get("service_account_key"),
146
+ endpoint=sdk_config.get("endpoint"),
147
+ )
142
148
  self.default_folder_id = default_folder_id or self._get_field("folder_id")
143
149
  self.default_public_ssh_key = default_public_ssh_key or self._get_field("public_ssh_key")
144
150
  self.default_service_account_id = default_service_account_id or get_service_account_id(
@@ -43,7 +43,8 @@ class YQHook(YandexCloudBaseHook):
43
43
  self.client.close()
44
44
 
45
45
  def create_query(self, query_text: str | None, name: str | None = None) -> str:
46
- """Create and run query.
46
+ """
47
+ Create and run query.
47
48
 
48
49
  :param query_text: SQL text.
49
50
  :param name: name for the query
@@ -54,7 +55,8 @@ class YQHook(YandexCloudBaseHook):
54
55
  )
55
56
 
56
57
  def wait_results(self, query_id: str, execution_timeout: timedelta = timedelta(minutes=30)) -> Any:
57
- """Wait for query complete and get results.
58
+ """
59
+ Wait for query complete and get results.
58
60
 
59
61
  :param query_id: ID of query.
60
62
  :param execution_timeout: how long to wait for the query to complete.
@@ -66,36 +68,39 @@ class YQHook(YandexCloudBaseHook):
66
68
  return self.client.get_query_all_result_sets(query_id=query_id, result_set_count=result_set_count)
67
69
 
68
70
  def stop_query(self, query_id: str) -> None:
69
- """Stop the query.
71
+ """
72
+ Stop the query.
70
73
 
71
74
  :param query_id: ID of the query.
72
75
  """
73
76
  self.client.stop_query(query_id)
74
77
 
75
78
  def get_query(self, query_id: str) -> Any:
76
- """Get query info.
79
+ """
80
+ Get query info.
77
81
 
78
82
  :param query_id: ID of the query.
79
83
  """
80
84
  return self.client.get_query(query_id)
81
85
 
82
86
  def get_query_status(self, query_id: str) -> str:
83
- """Get status of the query.
87
+ """
88
+ Get status of the query.
84
89
 
85
90
  :param query_id: ID of query.
86
91
  """
87
92
  return self.client.get_query_status(query_id)
88
93
 
89
94
  def compose_query_web_link(self, query_id: str):
90
- """Compose web link to query in Yandex Query UI.
95
+ """
96
+ Compose web link to query in Yandex Query UI.
91
97
 
92
98
  :param query_id: ID of query.
93
99
  """
94
100
  return self.client.compose_query_web_link(query_id)
95
101
 
96
102
  def _get_iam_token(self) -> str:
97
- iam_token = self.credentials.get("token")
98
- if iam_token is not None:
99
- return iam_token
103
+ if "token" in self.credentials:
104
+ return self.credentials["token"]
100
105
 
101
106
  return yc_auth.get_auth_token(service_account_key=self.credentials.get("service_account_key"))
@@ -33,12 +33,13 @@ class InitializationAction:
33
33
  """Data for initialization action to be run at start of DataProc cluster."""
34
34
 
35
35
  uri: str # Uri of the executable file
36
- args: Sequence[str] # Arguments to the initialization action
36
+ args: Iterable[str] # Arguments to the initialization action
37
37
  timeout: int # Execution timeout
38
38
 
39
39
 
40
40
  class DataprocCreateClusterOperator(BaseOperator):
41
- """Creates Yandex.Cloud Data Proc cluster.
41
+ """
42
+ Creates Yandex.Cloud Data Proc cluster.
42
43
 
43
44
  :param folder_id: ID of the folder in which cluster should be created.
44
45
  :param cluster_name: Cluster name. Must be unique inside the folder.
@@ -142,6 +143,12 @@ class DataprocCreateClusterOperator(BaseOperator):
142
143
  **kwargs,
143
144
  ) -> None:
144
145
  super().__init__(**kwargs)
146
+ if ssh_public_keys is None:
147
+ ssh_public_keys = []
148
+
149
+ if services is None:
150
+ services = []
151
+
145
152
  self.folder_id = folder_id
146
153
  self.yandex_conn_id = connection_id
147
154
  self.cluster_name = cluster_name
@@ -185,7 +192,7 @@ class DataprocCreateClusterOperator(BaseOperator):
185
192
  self.hook = DataprocHook(
186
193
  yandex_conn_id=self.yandex_conn_id,
187
194
  )
188
- operation_result = self.hook.client.create_cluster(
195
+ operation_result = self.hook.dataproc_client.create_cluster(
189
196
  folder_id=self.folder_id,
190
197
  cluster_name=self.cluster_name,
191
198
  cluster_description=self.cluster_description,
@@ -220,15 +227,16 @@ class DataprocCreateClusterOperator(BaseOperator):
220
227
  security_group_ids=self.security_group_ids,
221
228
  log_group_id=self.log_group_id,
222
229
  labels=self.labels,
223
- initialization_actions=self.initialization_actions
224
- and [
230
+ initialization_actions=[
225
231
  self.hook.sdk.wrappers.InitializationAction(
226
232
  uri=init_action.uri,
227
233
  args=init_action.args,
228
234
  timeout=init_action.timeout,
229
235
  )
230
236
  for init_action in self.initialization_actions
231
- ],
237
+ ]
238
+ if self.initialization_actions
239
+ else None,
232
240
  )
233
241
  cluster_id = operation_result.response.id
234
242
 
@@ -243,7 +251,8 @@ class DataprocCreateClusterOperator(BaseOperator):
243
251
 
244
252
 
245
253
  class DataprocBaseOperator(BaseOperator):
246
- """Base class for DataProc operators working with given cluster.
254
+ """
255
+ Base class for DataProc operators working with given cluster.
247
256
 
248
257
  :param connection_id: ID of the Yandex.Cloud Airflow connection.
249
258
  :param cluster_id: ID of the cluster to remove. (templated)
@@ -276,7 +285,8 @@ class DataprocBaseOperator(BaseOperator):
276
285
 
277
286
 
278
287
  class DataprocDeleteClusterOperator(DataprocBaseOperator):
279
- """Deletes Yandex.Cloud Data Proc cluster.
288
+ """
289
+ Deletes Yandex.Cloud Data Proc cluster.
280
290
 
281
291
  :param connection_id: ID of the Yandex.Cloud Airflow connection.
282
292
  :param cluster_id: ID of the cluster to remove. (templated)
@@ -287,11 +297,12 @@ class DataprocDeleteClusterOperator(DataprocBaseOperator):
287
297
 
288
298
  def execute(self, context: Context) -> None:
289
299
  hook = self._setup(context)
290
- hook.client.delete_cluster(self.cluster_id)
300
+ hook.dataproc_client.delete_cluster(self.cluster_id)
291
301
 
292
302
 
293
303
  class DataprocCreateHiveJobOperator(DataprocBaseOperator):
294
- """Runs Hive job in Data Proc cluster.
304
+ """
305
+ Runs Hive job in Data Proc cluster.
295
306
 
296
307
  :param query: Hive query.
297
308
  :param query_file_uri: URI of the script that contains Hive queries. Can be placed in HDFS or S3.
@@ -327,7 +338,7 @@ class DataprocCreateHiveJobOperator(DataprocBaseOperator):
327
338
 
328
339
  def execute(self, context: Context) -> None:
329
340
  hook = self._setup(context)
330
- hook.client.create_hive_job(
341
+ hook.dataproc_client.create_hive_job(
331
342
  query=self.query,
332
343
  query_file_uri=self.query_file_uri,
333
344
  script_variables=self.script_variables,
@@ -339,7 +350,8 @@ class DataprocCreateHiveJobOperator(DataprocBaseOperator):
339
350
 
340
351
 
341
352
  class DataprocCreateMapReduceJobOperator(DataprocBaseOperator):
342
- """Runs Mapreduce job in Data Proc cluster.
353
+ """
354
+ Runs Mapreduce job in Data Proc cluster.
343
355
 
344
356
  :param main_jar_file_uri: URI of jar file with job.
345
357
  Can be placed in HDFS or S3. Can be specified instead of main_class.
@@ -382,7 +394,7 @@ class DataprocCreateMapReduceJobOperator(DataprocBaseOperator):
382
394
 
383
395
  def execute(self, context: Context) -> None:
384
396
  hook = self._setup(context)
385
- hook.client.create_mapreduce_job(
397
+ hook.dataproc_client.create_mapreduce_job(
386
398
  main_class=self.main_class,
387
399
  main_jar_file_uri=self.main_jar_file_uri,
388
400
  jar_file_uris=self.jar_file_uris,
@@ -396,7 +408,8 @@ class DataprocCreateMapReduceJobOperator(DataprocBaseOperator):
396
408
 
397
409
 
398
410
  class DataprocCreateSparkJobOperator(DataprocBaseOperator):
399
- """Runs Spark job in Data Proc cluster.
411
+ """
412
+ Runs Spark job in Data Proc cluster.
400
413
 
401
414
  :param main_jar_file_uri: URI of jar file with job. Can be placed in HDFS or S3.
402
415
  :param main_class: Name of the main class of the job.
@@ -449,7 +462,7 @@ class DataprocCreateSparkJobOperator(DataprocBaseOperator):
449
462
 
450
463
  def execute(self, context: Context) -> None:
451
464
  hook = self._setup(context)
452
- hook.client.create_spark_job(
465
+ hook.dataproc_client.create_spark_job(
453
466
  main_class=self.main_class,
454
467
  main_jar_file_uri=self.main_jar_file_uri,
455
468
  jar_file_uris=self.jar_file_uris,
@@ -466,7 +479,8 @@ class DataprocCreateSparkJobOperator(DataprocBaseOperator):
466
479
 
467
480
 
468
481
  class DataprocCreatePysparkJobOperator(DataprocBaseOperator):
469
- """Runs Pyspark job in Data Proc cluster.
482
+ """
483
+ Runs Pyspark job in Data Proc cluster.
470
484
 
471
485
  :param main_python_file_uri: URI of python file with job. Can be placed in HDFS or S3.
472
486
  :param python_file_uris: URIs of python files used in the job. Can be placed in HDFS or S3.
@@ -519,7 +533,7 @@ class DataprocCreatePysparkJobOperator(DataprocBaseOperator):
519
533
 
520
534
  def execute(self, context: Context) -> None:
521
535
  hook = self._setup(context)
522
- hook.client.create_pyspark_job(
536
+ hook.dataproc_client.create_pyspark_job(
523
537
  main_python_file_uri=self.main_python_file_uri,
524
538
  python_file_uris=self.python_file_uris,
525
539
  jar_file_uris=self.jar_file_uris,
@@ -242,7 +242,7 @@ class LockboxSecretBackend(BaseSecretsBackend, LoggingMixin):
242
242
  return f"{prefix}{self.sep}{key}"
243
243
 
244
244
  def _get_secret_value(self, prefix: str, key: str) -> str | None:
245
- secret: secret_pb.Secret = None
245
+ secret: secret_pb.Secret | None = None
246
246
  for s in self._get_secrets():
247
247
  if s.name == self._build_secret_name(prefix=prefix, key=key):
248
248
  secret = s
@@ -18,16 +18,23 @@ from __future__ import annotations
18
18
 
19
19
  import json
20
20
  import logging
21
- from typing import Any
21
+ from typing import TypedDict
22
22
 
23
23
  log = logging.getLogger(__name__)
24
24
 
25
25
 
26
+ class CredentialsType(TypedDict, total=False):
27
+ """Credentials dict description."""
28
+
29
+ token: str
30
+ service_account_key: dict[str, str]
31
+
32
+
26
33
  def get_credentials(
27
34
  oauth_token: str | None = None,
28
35
  service_account_json: dict | str | None = None,
29
36
  service_account_json_path: str | None = None,
30
- ) -> dict[str, Any]:
37
+ ) -> CredentialsType:
31
38
  """
32
39
  Return credentials JSON for Yandex Cloud SDK based on credentials.
33
40
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-yandex
3
- Version: 3.11.2rc1
3
+ Version: 3.12.0
4
4
  Summary: Provider package apache-airflow-providers-yandex for Apache Airflow
5
5
  Keywords: airflow-provider,yandex,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,12 +21,12 @@ Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Topic :: System :: Monitoring
24
- Requires-Dist: apache-airflow>=2.7.0rc0
24
+ Requires-Dist: apache-airflow>=2.8.0
25
25
  Requires-Dist: yandex-query-client>=0.1.4
26
- Requires-Dist: yandexcloud>=0.278.0,!=0.289.0,!=0.290.0,<0.292.0
26
+ Requires-Dist: yandexcloud>=0.308.0
27
27
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
28
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.11.2/changelog.html
29
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.11.2
28
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.12.0/changelog.html
29
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.12.0
30
30
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
31
31
  Project-URL: Source Code, https://github.com/apache/airflow
32
32
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -76,7 +76,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
76
76
 
77
77
  Package ``apache-airflow-providers-yandex``
78
78
 
79
- Release: ``3.11.2.rc1``
79
+ Release: ``3.12.0``
80
80
 
81
81
 
82
82
  This package is for Yandex, including:
@@ -91,7 +91,7 @@ This is a provider package for ``yandex`` provider. All classes for this provide
91
91
  are in ``airflow.providers.yandex`` python package.
92
92
 
93
93
  You can find package information and changelog for the provider
94
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.11.2/>`_.
94
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.12.0/>`_.
95
95
 
96
96
  Installation
97
97
  ------------
@@ -105,13 +105,13 @@ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
105
105
  Requirements
106
106
  ------------
107
107
 
108
- ======================= ==========================================
108
+ ======================= ==================
109
109
  PIP package Version required
110
- ======================= ==========================================
111
- ``apache-airflow`` ``>=2.7.0``
112
- ``yandexcloud`` ``>=0.278.0,!=0.289.0,!=0.290.0,<0.292.0``
110
+ ======================= ==================
111
+ ``apache-airflow`` ``>=2.8.0``
112
+ ``yandexcloud`` ``>=0.308.0``
113
113
  ``yandex-query-client`` ``>=0.1.4``
114
- ======================= ==========================================
114
+ ======================= ==================
115
115
 
116
116
  The changelog for the provider package can be found in the
117
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.11.2/changelog.html>`_.
117
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-yandex/3.12.0/changelog.html>`_.
@@ -1,25 +1,25 @@
1
1
  airflow/providers/yandex/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/yandex/__init__.py,sha256=G8AzvXtrAVvJmVp2TEsaHPbRLEvfzmZILZfeTHipHuc,1494
3
- airflow/providers/yandex/get_provider_info.py,sha256=wO1VKFNN4YeAGx_bF_LcbrDlBi_WPqQi02RD4nK0144,4815
2
+ airflow/providers/yandex/__init__.py,sha256=icjIfW_ya1vvqiD0HpHgBf7Jq1HuS_li4-8Kd7GKwck,1494
3
+ airflow/providers/yandex/get_provider_info.py,sha256=Bf4RoWGLV7qisIZH-_J9bi2MQLnCxziXdMGxpN3siwg,4761
4
4
  airflow/providers/yandex/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
- airflow/providers/yandex/hooks/dataproc.py,sha256=1UdqxDMI7uL6fNkG6oU6l2tFITF_nHXiV1VUgRqF7KY,1379
6
- airflow/providers/yandex/hooks/yandex.py,sha256=xJMUzGo0sNpb5-LQvgq6jDxWHK3XkNzlpoeEELREeow,7097
5
+ airflow/providers/yandex/hooks/dataproc.py,sha256=8ykGvlZUAjAKHpypiOYcWVJk7u-WNjfEohuyOy1Okss,1944
6
+ airflow/providers/yandex/hooks/yandex.py,sha256=z8HYXIUkBxhDL2vQUUrNu4xkAPf3rjMv0SkP187odTk,7299
7
7
  airflow/providers/yandex/hooks/yandexcloud_dataproc.py,sha256=-JVJm3YLkDbJZKauCR1oCnWNkdLUJa1Fj_5HmZq1f44,1243
8
- airflow/providers/yandex/hooks/yq.py,sha256=KPOXEIfjW6m-MYZMjg0J852t-gVxXZNqRQtIgVaGZO0,3477
8
+ airflow/providers/yandex/hooks/yq.py,sha256=Qh1ZTp8OVKvQ6sFzmKUMe3kbkYT5v7D4qEq6VsKtB2k,3503
9
9
  airflow/providers/yandex/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
10
  airflow/providers/yandex/links/yq.py,sha256=jsy3liqQFk1eSSdK9YDbor0Epp7ng_q2ueVIwsD2i-8,1578
11
11
  airflow/providers/yandex/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
12
- airflow/providers/yandex/operators/dataproc.py,sha256=QJc7UvBNPhAUBsuYQ4H8Wf0LpZP_-kCw7RdI0n3P_Bs,25945
12
+ airflow/providers/yandex/operators/dataproc.py,sha256=JGBwX_ae3OZ4XtPVdvYi_XFC4sibI94mhBMbDDn0KT4,26181
13
13
  airflow/providers/yandex/operators/yandexcloud_dataproc.py,sha256=bDLMwevS5spRfVEtixdKhQTC9gqDMm9himLrRohJwKQ,1255
14
14
  airflow/providers/yandex/operators/yq.py,sha256=lGqbogakylV4s5D5movQRL4v3IU2Qt1JHH8ygo3Hd2Q,3223
15
15
  airflow/providers/yandex/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
16
- airflow/providers/yandex/secrets/lockbox.py,sha256=9Vi95RXd6VT36Rh1PVMUfFzm42oyrlMl77DoL9ivxVc,12161
16
+ airflow/providers/yandex/secrets/lockbox.py,sha256=gUYWJE2qEvAZRz35qv0iKYK33BGnWesahZFuQgK5_kM,12168
17
17
  airflow/providers/yandex/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
- airflow/providers/yandex/utils/credentials.py,sha256=6McJIitAuTROJRUSKTdWChfcZ9o4WthP6GmBJ4uz-j8,3335
18
+ airflow/providers/yandex/utils/credentials.py,sha256=l-8lIkQaIXTsNP_hMfP_tVADM54ZdV70J4563Ig7r4M,3487
19
19
  airflow/providers/yandex/utils/defaults.py,sha256=CXt75MhGJe8echoDpl1vR4VG5bEvYDDjIHmFqckDh2w,950
20
20
  airflow/providers/yandex/utils/fields.py,sha256=1D8SDWH8h0djj5Hnk50w6BpPeNJyP-689Qfjpkr-yCg,1728
21
21
  airflow/providers/yandex/utils/user_agent.py,sha256=AC-WEzhjxkgUYOy4LdX2-nnUZdMhKRRUCJ2_TjfNm6k,1839
22
- apache_airflow_providers_yandex-3.11.2rc1.dist-info/entry_points.txt,sha256=ApXKRkvdgU2QNSQovjewC0b-LptwfBGBnJB3LTgBNx8,102
23
- apache_airflow_providers_yandex-3.11.2rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
24
- apache_airflow_providers_yandex-3.11.2rc1.dist-info/METADATA,sha256=eUUr7WsQMyJuXCZ-2hI9sW5XJF5kCCVb710yno_SHG8,5049
25
- apache_airflow_providers_yandex-3.11.2rc1.dist-info/RECORD,,
22
+ apache_airflow_providers_yandex-3.12.0.dist-info/entry_points.txt,sha256=ApXKRkvdgU2QNSQovjewC0b-LptwfBGBnJB3LTgBNx8,102
23
+ apache_airflow_providers_yandex-3.12.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
24
+ apache_airflow_providers_yandex-3.12.0.dist-info/METADATA,sha256=RZ18gEpioyAj1TYgomG-Qt1hNRCC6hw6Mt76GqOS9lw,4909
25
+ apache_airflow_providers_yandex-3.12.0.dist-info/RECORD,,