apache-airflow-providers-snowflake 6.5.0rc1__py3-none-any.whl → 6.8.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/snowflake/__init__.py +3 -3
- airflow/providers/snowflake/decorators/snowpark.py +1 -11
- airflow/providers/snowflake/get_provider_info.py +16 -0
- airflow/providers/snowflake/hooks/snowflake.py +60 -14
- airflow/providers/snowflake/hooks/snowflake_sql_api.py +36 -6
- airflow/providers/snowflake/operators/snowflake.py +16 -17
- airflow/providers/snowflake/transfers/copy_into_snowflake.py +1 -1
- airflow/providers/snowflake/utils/openlineage.py +44 -70
- airflow/providers/snowflake/version_compat.py +0 -6
- {apache_airflow_providers_snowflake-6.5.0rc1.dist-info → apache_airflow_providers_snowflake-6.8.0rc1.dist-info}/METADATA +56 -36
- apache_airflow_providers_snowflake-6.8.0rc1.dist-info/RECORD +26 -0
- apache_airflow_providers_snowflake-6.8.0rc1.dist-info/licenses/NOTICE +5 -0
- apache_airflow_providers_snowflake-6.5.0rc1.dist-info/RECORD +0 -25
- {apache_airflow_providers_snowflake-6.5.0rc1.dist-info → apache_airflow_providers_snowflake-6.8.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_snowflake-6.5.0rc1.dist-info → apache_airflow_providers_snowflake-6.8.0rc1.dist-info}/entry_points.txt +0 -0
- {airflow/providers/snowflake → apache_airflow_providers_snowflake-6.8.0rc1.dist-info/licenses}/LICENSE +0 -0
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "6.
|
|
32
|
+
__version__ = "6.8.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.11.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-snowflake:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-snowflake:{__version__}` needs Apache Airflow 2.11.0+"
|
|
39
39
|
)
|
|
@@ -18,21 +18,11 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
from collections.abc import Callable, Sequence
|
|
21
|
-
from typing import TYPE_CHECKING
|
|
22
|
-
|
|
23
|
-
from airflow.providers.snowflake.version_compat import AIRFLOW_V_3_0_PLUS
|
|
24
|
-
|
|
25
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
26
|
-
from airflow.sdk.bases.decorator import DecoratedOperator, task_decorator_factory
|
|
27
|
-
else:
|
|
28
|
-
from airflow.decorators.base import DecoratedOperator, task_decorator_factory # type: ignore[no-redef]
|
|
29
21
|
|
|
22
|
+
from airflow.providers.common.compat.sdk import DecoratedOperator, TaskDecorator, task_decorator_factory
|
|
30
23
|
from airflow.providers.snowflake.operators.snowpark import SnowparkOperator
|
|
31
24
|
from airflow.providers.snowflake.utils.snowpark import inject_session_into_op_kwargs
|
|
32
25
|
|
|
33
|
-
if TYPE_CHECKING:
|
|
34
|
-
from airflow.sdk.bases.decorator import TaskDecorator
|
|
35
|
-
|
|
36
26
|
|
|
37
27
|
class _SnowparkDecoratedOperator(DecoratedOperator, SnowparkOperator):
|
|
38
28
|
"""
|
|
@@ -73,11 +73,13 @@ def get_provider_info():
|
|
|
73
73
|
"source-integration-name": "Google Cloud Storage (GCS)",
|
|
74
74
|
"target-integration-name": "Snowflake",
|
|
75
75
|
"python-module": "airflow.providers.snowflake.transfers.copy_into_snowflake",
|
|
76
|
+
"how-to-guide": "/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst",
|
|
76
77
|
},
|
|
77
78
|
{
|
|
78
79
|
"source-integration-name": "Microsoft Azure Blob Storage",
|
|
79
80
|
"target-integration-name": "Snowflake",
|
|
80
81
|
"python-module": "airflow.providers.snowflake.transfers.copy_into_snowflake",
|
|
82
|
+
"how-to-guide": "/docs/apache-airflow-providers-snowflake/operators/copy_into_snowflake.rst",
|
|
81
83
|
},
|
|
82
84
|
],
|
|
83
85
|
"connection-types": [
|
|
@@ -92,4 +94,18 @@ def get_provider_info():
|
|
|
92
94
|
"python-modules": ["airflow.providers.snowflake.triggers.snowflake_trigger"],
|
|
93
95
|
}
|
|
94
96
|
],
|
|
97
|
+
"config": {
|
|
98
|
+
"snowflake": {
|
|
99
|
+
"description": "Configuration for Snowflake hooks and operators.\n",
|
|
100
|
+
"options": {
|
|
101
|
+
"azure_oauth_scope": {
|
|
102
|
+
"description": "The scope to use while retrieving OAuth token for Snowflake from Azure Entra authentication.\n",
|
|
103
|
+
"version_added": "6.6.0",
|
|
104
|
+
"type": "string",
|
|
105
|
+
"example": None,
|
|
106
|
+
"default": "api://snowflake_oauth_server/.default",
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
}
|
|
110
|
+
},
|
|
95
111
|
}
|
|
@@ -36,7 +36,9 @@ from snowflake.connector import DictCursor, SnowflakeConnection, util_text
|
|
|
36
36
|
from snowflake.sqlalchemy import URL
|
|
37
37
|
from sqlalchemy import create_engine
|
|
38
38
|
|
|
39
|
-
from airflow.
|
|
39
|
+
from airflow.configuration import conf
|
|
40
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
|
41
|
+
from airflow.providers.common.compat.sdk import AirflowException, Connection
|
|
40
42
|
from airflow.providers.common.sql.hooks.handlers import return_single_query_results
|
|
41
43
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
42
44
|
from airflow.providers.snowflake.utils.openlineage import fix_snowflake_sqlalchemy_uri
|
|
@@ -94,6 +96,7 @@ class SnowflakeHook(DbApiHook):
|
|
|
94
96
|
hook_name = "Snowflake"
|
|
95
97
|
supports_autocommit = True
|
|
96
98
|
_test_connection_sql = "select 1"
|
|
99
|
+
default_azure_oauth_scope = "api://snowflake_oauth_server/.default"
|
|
97
100
|
|
|
98
101
|
@classmethod
|
|
99
102
|
def get_connection_form_widgets(cls) -> dict[str, Any]:
|
|
@@ -139,6 +142,7 @@ class SnowflakeHook(DbApiHook):
|
|
|
139
142
|
"grant_type": "refresh_token client_credentials",
|
|
140
143
|
"token_endpoint": "token endpoint",
|
|
141
144
|
"refresh_token": "refresh token",
|
|
145
|
+
"scope": "scope",
|
|
142
146
|
},
|
|
143
147
|
indent=1,
|
|
144
148
|
),
|
|
@@ -220,6 +224,11 @@ class SnowflakeHook(DbApiHook):
|
|
|
220
224
|
"redirect_uri": conn_config.get("redirect_uri", "https://localhost.com"),
|
|
221
225
|
}
|
|
222
226
|
|
|
227
|
+
scope = conn_config.get("scope")
|
|
228
|
+
|
|
229
|
+
if scope:
|
|
230
|
+
data["scope"] = scope
|
|
231
|
+
|
|
223
232
|
if grant_type == "refresh_token":
|
|
224
233
|
data |= {
|
|
225
234
|
"refresh_token": conn_config["refresh_token"],
|
|
@@ -246,6 +255,38 @@ class SnowflakeHook(DbApiHook):
|
|
|
246
255
|
token = response.json()["access_token"]
|
|
247
256
|
return token
|
|
248
257
|
|
|
258
|
+
def get_azure_oauth_token(self, azure_conn_id: str) -> str:
|
|
259
|
+
"""
|
|
260
|
+
Generate OAuth access token using Azure connection id.
|
|
261
|
+
|
|
262
|
+
This uses AzureBaseHook on the connection id to retrieve the token. Scope for the OAuth token can be
|
|
263
|
+
set in the config option ``azure_oauth_scope`` under the section ``[snowflake]``.
|
|
264
|
+
|
|
265
|
+
:param azure_conn_id: The connection id for the Azure connection that will be used to fetch the token.
|
|
266
|
+
:raises AttributeError: If AzureBaseHook does not have a get_token method which happens when
|
|
267
|
+
package apache-airflow-providers-microsoft-azure<12.8.0.
|
|
268
|
+
:returns: The OAuth access token string.
|
|
269
|
+
"""
|
|
270
|
+
if TYPE_CHECKING:
|
|
271
|
+
from airflow.providers.microsoft.azure.hooks.azure_base import AzureBaseHook
|
|
272
|
+
|
|
273
|
+
try:
|
|
274
|
+
azure_conn = Connection.get(azure_conn_id)
|
|
275
|
+
except AttributeError:
|
|
276
|
+
azure_conn = Connection.get_connection_from_secrets(azure_conn_id) # type: ignore[attr-defined]
|
|
277
|
+
try:
|
|
278
|
+
azure_base_hook: AzureBaseHook = azure_conn.get_hook()
|
|
279
|
+
except TypeError as e:
|
|
280
|
+
if "required positional argument: 'sdk_client'" in str(e):
|
|
281
|
+
raise AirflowOptionalProviderFeatureException(
|
|
282
|
+
"Getting azure token is not supported by current version of 'AzureBaseHook'. "
|
|
283
|
+
"Please upgrade apache-airflow-providers-microsoft-azure>=12.8.0"
|
|
284
|
+
) from e
|
|
285
|
+
raise
|
|
286
|
+
scope = conf.get("snowflake", "azure_oauth_scope", fallback=self.default_azure_oauth_scope)
|
|
287
|
+
token = azure_base_hook.get_token(scope).token
|
|
288
|
+
return token
|
|
289
|
+
|
|
249
290
|
@cached_property
|
|
250
291
|
def _get_conn_params(self) -> dict[str, str | None]:
|
|
251
292
|
"""
|
|
@@ -253,7 +294,7 @@ class SnowflakeHook(DbApiHook):
|
|
|
253
294
|
|
|
254
295
|
This is used in ``get_uri()`` and ``get_connection()``.
|
|
255
296
|
"""
|
|
256
|
-
conn = self.get_connection(self.
|
|
297
|
+
conn = self.get_connection(self.get_conn_id())
|
|
257
298
|
extra_dict = conn.extra_dejson
|
|
258
299
|
account = self._get_field(extra_dict, "account") or ""
|
|
259
300
|
warehouse = self._get_field(extra_dict, "warehouse") or ""
|
|
@@ -349,14 +390,19 @@ class SnowflakeHook(DbApiHook):
|
|
|
349
390
|
conn_config["authenticator"] = "oauth"
|
|
350
391
|
|
|
351
392
|
if conn_config.get("authenticator") == "oauth":
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
conn_config=
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
393
|
+
if extra_dict.get("azure_conn_id"):
|
|
394
|
+
conn_config["token"] = self.get_azure_oauth_token(extra_dict["azure_conn_id"])
|
|
395
|
+
else:
|
|
396
|
+
token_endpoint = self._get_field(extra_dict, "token_endpoint") or ""
|
|
397
|
+
conn_config["scope"] = self._get_field(extra_dict, "scope")
|
|
398
|
+
conn_config["client_id"] = conn.login
|
|
399
|
+
conn_config["client_secret"] = conn.password
|
|
400
|
+
|
|
401
|
+
conn_config["token"] = self.get_oauth_token(
|
|
402
|
+
conn_config=conn_config,
|
|
403
|
+
token_endpoint=token_endpoint,
|
|
404
|
+
grant_type=extra_dict.get("grant_type", "refresh_token"),
|
|
405
|
+
)
|
|
360
406
|
|
|
361
407
|
conn_config.pop("login", None)
|
|
362
408
|
conn_config.pop("user", None)
|
|
@@ -461,7 +507,7 @@ class SnowflakeHook(DbApiHook):
|
|
|
461
507
|
def get_autocommit(self, conn):
|
|
462
508
|
return getattr(conn, "autocommit_mode", False)
|
|
463
509
|
|
|
464
|
-
@overload
|
|
510
|
+
@overload
|
|
465
511
|
def run(
|
|
466
512
|
self,
|
|
467
513
|
sql: str | Iterable[str],
|
|
@@ -544,16 +590,16 @@ class SnowflakeHook(DbApiHook):
|
|
|
544
590
|
results = []
|
|
545
591
|
for sql_statement in sql_list:
|
|
546
592
|
self.log.info("Running statement: %s, parameters: %s", sql_statement, parameters)
|
|
547
|
-
self._run_command(cur, sql_statement, parameters)
|
|
593
|
+
self._run_command(cur, sql_statement, parameters)
|
|
548
594
|
|
|
549
595
|
if handler is not None:
|
|
550
|
-
result = self._make_common_data_structure(handler(cur))
|
|
596
|
+
result = self._make_common_data_structure(handler(cur))
|
|
551
597
|
if return_single_query_results(sql, return_last, split_statements):
|
|
552
598
|
_last_result = result
|
|
553
599
|
_last_description = cur.description
|
|
554
600
|
else:
|
|
555
601
|
results.append(result)
|
|
556
|
-
self.descriptions.append(cur.description)
|
|
602
|
+
self.descriptions.append(cur.description)
|
|
557
603
|
|
|
558
604
|
query_id = cur.sfqid
|
|
559
605
|
self.log.info("Rows affected: %s", cur.rowcount)
|
|
@@ -39,7 +39,8 @@ from tenacity import (
|
|
|
39
39
|
wait_exponential,
|
|
40
40
|
)
|
|
41
41
|
|
|
42
|
-
from airflow.exceptions import
|
|
42
|
+
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
43
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
43
44
|
from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
|
|
44
45
|
from airflow.providers.snowflake.utils.sql_api_generate_jwt import JWTGenerator
|
|
45
46
|
|
|
@@ -102,7 +103,7 @@ class SnowflakeSqlApiHook(SnowflakeHook):
|
|
|
102
103
|
"retry": retry_if_exception(self._should_retry_on_error),
|
|
103
104
|
"wait": wait_exponential(multiplier=1, min=1, max=60),
|
|
104
105
|
"stop": stop_after_attempt(5),
|
|
105
|
-
"before_sleep": before_sleep_log(self.log, log_level=20), #
|
|
106
|
+
"before_sleep": before_sleep_log(self.log, log_level=20), # type: ignore[arg-type]
|
|
106
107
|
"reraise": True,
|
|
107
108
|
}
|
|
108
109
|
if api_retry_args:
|
|
@@ -258,16 +259,21 @@ class SnowflakeSqlApiHook(SnowflakeHook):
|
|
|
258
259
|
conn_config=conn_config, token_endpoint=token_endpoint, grant_type=grant_type
|
|
259
260
|
)
|
|
260
261
|
|
|
261
|
-
def get_request_url_header_params(
|
|
262
|
+
def get_request_url_header_params(
|
|
263
|
+
self, query_id: str, url_suffix: str | None = None
|
|
264
|
+
) -> tuple[dict[str, Any], dict[str, Any], str]:
|
|
262
265
|
"""
|
|
263
266
|
Build the request header Url with account name identifier and query id from the connection params.
|
|
264
267
|
|
|
265
268
|
:param query_id: statement handles query ids for the individual statements.
|
|
269
|
+
:param url_suffix: Optional path suffix to append to the URL. Must start with '/', e.g. '/cancel' or '/result'.
|
|
266
270
|
"""
|
|
267
271
|
req_id = uuid.uuid4()
|
|
268
272
|
header = self.get_headers()
|
|
269
273
|
params = {"requestId": str(req_id)}
|
|
270
274
|
url = f"{self.account_identifier}.snowflakecomputing.com/api/v2/statements/{query_id}"
|
|
275
|
+
if url_suffix:
|
|
276
|
+
url += url_suffix
|
|
271
277
|
return header, params, url
|
|
272
278
|
|
|
273
279
|
def check_query_output(self, query_ids: list[str]) -> None:
|
|
@@ -288,7 +294,21 @@ class SnowflakeSqlApiHook(SnowflakeHook):
|
|
|
288
294
|
if status_code == 202:
|
|
289
295
|
return {"status": "running", "message": "Query statements are still running"}
|
|
290
296
|
if status_code == 422:
|
|
291
|
-
|
|
297
|
+
error_message = resp.get("message", "Unknown error occurred")
|
|
298
|
+
error_details = []
|
|
299
|
+
if code := resp.get("code"):
|
|
300
|
+
error_details.append(f"Code: {code}")
|
|
301
|
+
if sql_state := resp.get("sqlState"):
|
|
302
|
+
error_details.append(f"SQL State: {sql_state}")
|
|
303
|
+
if statement_handle := resp.get("statementHandle"):
|
|
304
|
+
error_details.append(f"Statement Handle: {statement_handle}")
|
|
305
|
+
|
|
306
|
+
if error_details:
|
|
307
|
+
enhanced_message = f"{error_message} ({', '.join(error_details)})"
|
|
308
|
+
else:
|
|
309
|
+
enhanced_message = error_message
|
|
310
|
+
|
|
311
|
+
return {"status": "error", "message": enhanced_message}
|
|
292
312
|
if status_code == 200:
|
|
293
313
|
if resp_statement_handles := resp.get("statementHandles"):
|
|
294
314
|
statement_handles = resp_statement_handles
|
|
@@ -399,6 +419,16 @@ class SnowflakeSqlApiHook(SnowflakeHook):
|
|
|
399
419
|
status_code, resp = await self._make_api_call_with_retries_async("GET", url, header, params)
|
|
400
420
|
return self._process_response(status_code, resp)
|
|
401
421
|
|
|
422
|
+
def _cancel_sql_api_query_execution(self, query_id: str) -> dict[str, str | list[str]]:
|
|
423
|
+
self.log.info("Cancelling query id %s", query_id)
|
|
424
|
+
header, params, url = self.get_request_url_header_params(query_id, "/cancel")
|
|
425
|
+
status_code, resp = self._make_api_call_with_retries("POST", url, header, params)
|
|
426
|
+
return self._process_response(status_code, resp)
|
|
427
|
+
|
|
428
|
+
def cancel_queries(self, query_ids: list[str]) -> None:
|
|
429
|
+
for query_id in query_ids:
|
|
430
|
+
self._cancel_sql_api_query_execution(query_id)
|
|
431
|
+
|
|
402
432
|
@staticmethod
|
|
403
433
|
def _should_retry_on_error(exception) -> bool:
|
|
404
434
|
"""
|
|
@@ -435,7 +465,7 @@ class SnowflakeSqlApiHook(SnowflakeHook):
|
|
|
435
465
|
:param url: The URL for the API endpoint.
|
|
436
466
|
:param headers: The headers to include in the API call.
|
|
437
467
|
:param params: (Optional) The query parameters to include in the API call.
|
|
438
|
-
:param
|
|
468
|
+
:param json: (Optional) The data to include in the API call.
|
|
439
469
|
:return: The response object from the API call.
|
|
440
470
|
"""
|
|
441
471
|
with requests.Session() as session:
|
|
@@ -464,7 +494,7 @@ class SnowflakeSqlApiHook(SnowflakeHook):
|
|
|
464
494
|
:return: The response object from the API call.
|
|
465
495
|
"""
|
|
466
496
|
async with aiohttp.ClientSession(headers=headers) as session:
|
|
467
|
-
async for attempt in AsyncRetrying(**self.retry_config):
|
|
497
|
+
async for attempt in AsyncRetrying(**self.retry_config):
|
|
468
498
|
with attempt:
|
|
469
499
|
if method.upper() == "GET":
|
|
470
500
|
async with session.request(method=method.lower(), url=url, params=params) as response:
|
|
@@ -24,7 +24,7 @@ from functools import cached_property
|
|
|
24
24
|
from typing import TYPE_CHECKING, Any, SupportsAbs, cast
|
|
25
25
|
|
|
26
26
|
from airflow.configuration import conf
|
|
27
|
-
from airflow.
|
|
27
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
28
28
|
from airflow.providers.common.sql.operators.sql import (
|
|
29
29
|
SQLCheckOperator,
|
|
30
30
|
SQLExecuteQueryOperator,
|
|
@@ -35,11 +35,7 @@ from airflow.providers.snowflake.hooks.snowflake_sql_api import SnowflakeSqlApiH
|
|
|
35
35
|
from airflow.providers.snowflake.triggers.snowflake_trigger import SnowflakeSqlApiTrigger
|
|
36
36
|
|
|
37
37
|
if TYPE_CHECKING:
|
|
38
|
-
|
|
39
|
-
from airflow.sdk.definitions.context import Context
|
|
40
|
-
except ImportError:
|
|
41
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
42
|
-
from airflow.utils.context import Context
|
|
38
|
+
from airflow.providers.common.compat.sdk import Context
|
|
43
39
|
|
|
44
40
|
|
|
45
41
|
class SnowflakeCheckOperator(SQLCheckOperator):
|
|
@@ -76,8 +72,6 @@ class SnowflakeCheckOperator(SQLCheckOperator):
|
|
|
76
72
|
Template references are recognized by str ending in '.sql'
|
|
77
73
|
:param snowflake_conn_id: Reference to
|
|
78
74
|
:ref:`Snowflake connection id<howto/connection:snowflake>`
|
|
79
|
-
:param autocommit: if True, each command is automatically committed.
|
|
80
|
-
(default value: True)
|
|
81
75
|
:param parameters: (optional) the parameters to render the SQL query with.
|
|
82
76
|
:param warehouse: name of warehouse (will overwrite any warehouse
|
|
83
77
|
defined in the connection's extra JSON)
|
|
@@ -109,8 +103,6 @@ class SnowflakeCheckOperator(SQLCheckOperator):
|
|
|
109
103
|
sql: str,
|
|
110
104
|
snowflake_conn_id: str = "snowflake_default",
|
|
111
105
|
parameters: Iterable | Mapping[str, Any] | None = None,
|
|
112
|
-
autocommit: bool = True,
|
|
113
|
-
do_xcom_push: bool = True,
|
|
114
106
|
warehouse: str | None = None,
|
|
115
107
|
database: str | None = None,
|
|
116
108
|
role: str | None = None,
|
|
@@ -179,8 +171,6 @@ class SnowflakeValueCheckOperator(SQLValueCheckOperator):
|
|
|
179
171
|
tolerance: Any = None,
|
|
180
172
|
snowflake_conn_id: str = "snowflake_default",
|
|
181
173
|
parameters: Iterable | Mapping[str, Any] | None = None,
|
|
182
|
-
autocommit: bool = True,
|
|
183
|
-
do_xcom_push: bool = True,
|
|
184
174
|
warehouse: str | None = None,
|
|
185
175
|
database: str | None = None,
|
|
186
176
|
role: str | None = None,
|
|
@@ -202,7 +192,12 @@ class SnowflakeValueCheckOperator(SQLValueCheckOperator):
|
|
|
202
192
|
**hook_params,
|
|
203
193
|
}
|
|
204
194
|
super().__init__(
|
|
205
|
-
sql=sql,
|
|
195
|
+
sql=sql,
|
|
196
|
+
pass_value=pass_value,
|
|
197
|
+
tolerance=tolerance,
|
|
198
|
+
conn_id=snowflake_conn_id,
|
|
199
|
+
parameters=parameters,
|
|
200
|
+
**kwargs,
|
|
206
201
|
)
|
|
207
202
|
self.query_ids: list[str] = []
|
|
208
203
|
|
|
@@ -259,9 +254,6 @@ class SnowflakeIntervalCheckOperator(SQLIntervalCheckOperator):
|
|
|
259
254
|
date_filter_column: str = "ds",
|
|
260
255
|
days_back: SupportsAbs[int] = -7,
|
|
261
256
|
snowflake_conn_id: str = "snowflake_default",
|
|
262
|
-
parameters: Iterable | Mapping[str, Any] | None = None,
|
|
263
|
-
autocommit: bool = True,
|
|
264
|
-
do_xcom_push: bool = True,
|
|
265
257
|
warehouse: str | None = None,
|
|
266
258
|
database: str | None = None,
|
|
267
259
|
role: str | None = None,
|
|
@@ -427,7 +419,7 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
|
|
|
427
419
|
"""
|
|
428
420
|
self.log.info("Executing: %s", self.sql)
|
|
429
421
|
self.query_ids = self._hook.execute_query(
|
|
430
|
-
self.sql,
|
|
422
|
+
self.sql,
|
|
431
423
|
statement_count=self.statement_count,
|
|
432
424
|
bindings=self.bindings,
|
|
433
425
|
)
|
|
@@ -521,3 +513,10 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
|
|
|
521
513
|
self._hook.query_ids = self.query_ids
|
|
522
514
|
else:
|
|
523
515
|
self.log.info("%s completed successfully.", self.task_id)
|
|
516
|
+
|
|
517
|
+
def on_kill(self) -> None:
|
|
518
|
+
"""Cancel the running query."""
|
|
519
|
+
if self.query_ids:
|
|
520
|
+
self.log.info("Cancelling the query ids %s", self.query_ids)
|
|
521
|
+
self._hook.cancel_queries(self.query_ids)
|
|
522
|
+
self.log.info("Query ids %s cancelled successfully", self.query_ids)
|
|
@@ -22,9 +22,9 @@ from __future__ import annotations
|
|
|
22
22
|
from collections.abc import Sequence
|
|
23
23
|
from typing import Any
|
|
24
24
|
|
|
25
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
25
26
|
from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
|
|
26
27
|
from airflow.providers.snowflake.utils.common import enclose_param
|
|
27
|
-
from airflow.providers.snowflake.version_compat import BaseOperator
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
def _validate_parameter(param_name: str, value: str | None) -> str | None:
|
|
@@ -23,7 +23,6 @@ from typing import TYPE_CHECKING, Any
|
|
|
23
23
|
from urllib.parse import quote, urlparse, urlunparse
|
|
24
24
|
|
|
25
25
|
from airflow.providers.common.compat.openlineage.check import require_openlineage_version
|
|
26
|
-
from airflow.providers.snowflake.version_compat import AIRFLOW_V_3_0_PLUS
|
|
27
26
|
from airflow.utils import timezone
|
|
28
27
|
|
|
29
28
|
if TYPE_CHECKING:
|
|
@@ -109,60 +108,6 @@ def fix_snowflake_sqlalchemy_uri(uri: str) -> str:
|
|
|
109
108
|
return urlunparse((parts.scheme, hostname, parts.path, parts.params, parts.query, parts.fragment))
|
|
110
109
|
|
|
111
110
|
|
|
112
|
-
def _get_logical_date(task_instance):
|
|
113
|
-
# todo: remove when min airflow version >= 3.0
|
|
114
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
115
|
-
dagrun = task_instance.get_template_context()["dag_run"]
|
|
116
|
-
return dagrun.logical_date or dagrun.run_after
|
|
117
|
-
|
|
118
|
-
if hasattr(task_instance, "logical_date"):
|
|
119
|
-
date = task_instance.logical_date
|
|
120
|
-
else:
|
|
121
|
-
date = task_instance.execution_date
|
|
122
|
-
|
|
123
|
-
return date
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
def _get_dag_run_clear_number(task_instance):
|
|
127
|
-
# todo: remove when min airflow version >= 3.0
|
|
128
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
129
|
-
dagrun = task_instance.get_template_context()["dag_run"]
|
|
130
|
-
return dagrun.clear_number
|
|
131
|
-
return task_instance.dag_run.clear_number
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
# todo: move this run_id logic into OpenLineage's listener to avoid differences
|
|
135
|
-
def _get_ol_run_id(task_instance) -> str:
|
|
136
|
-
"""
|
|
137
|
-
Get OpenLineage run_id from TaskInstance.
|
|
138
|
-
|
|
139
|
-
It's crucial that the task_instance's run_id creation logic matches OpenLineage's listener implementation.
|
|
140
|
-
Only then can we ensure that the generated run_id aligns with the Airflow task,
|
|
141
|
-
enabling a proper connection between events.
|
|
142
|
-
"""
|
|
143
|
-
from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
|
|
144
|
-
|
|
145
|
-
# Generate same OL run id as is generated for current task instance
|
|
146
|
-
return OpenLineageAdapter.build_task_instance_run_id(
|
|
147
|
-
dag_id=task_instance.dag_id,
|
|
148
|
-
task_id=task_instance.task_id,
|
|
149
|
-
logical_date=_get_logical_date(task_instance),
|
|
150
|
-
try_number=task_instance.try_number,
|
|
151
|
-
map_index=task_instance.map_index,
|
|
152
|
-
)
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
# todo: move this run_id logic into OpenLineage's listener to avoid differences
|
|
156
|
-
def _get_ol_dag_run_id(task_instance) -> str:
|
|
157
|
-
from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
|
|
158
|
-
|
|
159
|
-
return OpenLineageAdapter.build_dag_run_id(
|
|
160
|
-
dag_id=task_instance.dag_id,
|
|
161
|
-
logical_date=_get_logical_date(task_instance),
|
|
162
|
-
clear_number=_get_dag_run_clear_number(task_instance),
|
|
163
|
-
)
|
|
164
|
-
|
|
165
|
-
|
|
166
111
|
def _get_parent_run_facet(task_instance):
|
|
167
112
|
"""
|
|
168
113
|
Retrieve the ParentRunFacet associated with a specific Airflow task instance.
|
|
@@ -173,22 +118,39 @@ def _get_parent_run_facet(task_instance):
|
|
|
173
118
|
"""
|
|
174
119
|
from openlineage.client.facet_v2 import parent_run
|
|
175
120
|
|
|
176
|
-
from airflow.providers.openlineage.
|
|
121
|
+
from airflow.providers.openlineage.plugins.macros import (
|
|
122
|
+
lineage_job_name,
|
|
123
|
+
lineage_job_namespace,
|
|
124
|
+
lineage_root_job_name,
|
|
125
|
+
lineage_root_run_id,
|
|
126
|
+
lineage_run_id,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
parent_run_id = lineage_run_id(task_instance)
|
|
130
|
+
parent_job_name = lineage_job_name(task_instance)
|
|
131
|
+
parent_job_namespace = lineage_job_namespace()
|
|
132
|
+
|
|
133
|
+
root_parent_run_id = lineage_root_run_id(task_instance)
|
|
134
|
+
rot_parent_job_name = lineage_root_job_name(task_instance)
|
|
177
135
|
|
|
178
|
-
|
|
179
|
-
|
|
136
|
+
try: # Added in OL provider 2.9.0, try to use it if possible
|
|
137
|
+
from airflow.providers.openlineage.plugins.macros import lineage_root_job_namespace
|
|
138
|
+
|
|
139
|
+
root_parent_job_namespace = lineage_root_job_namespace(task_instance)
|
|
140
|
+
except ImportError:
|
|
141
|
+
root_parent_job_namespace = lineage_job_namespace()
|
|
180
142
|
|
|
181
143
|
return parent_run.ParentRunFacet(
|
|
182
144
|
run=parent_run.Run(runId=parent_run_id),
|
|
183
145
|
job=parent_run.Job(
|
|
184
|
-
namespace=
|
|
185
|
-
name=
|
|
146
|
+
namespace=parent_job_namespace,
|
|
147
|
+
name=parent_job_name,
|
|
186
148
|
),
|
|
187
149
|
root=parent_run.Root(
|
|
188
150
|
run=parent_run.RootRun(runId=root_parent_run_id),
|
|
189
151
|
job=parent_run.RootJob(
|
|
190
|
-
name=
|
|
191
|
-
namespace=
|
|
152
|
+
name=rot_parent_job_name,
|
|
153
|
+
namespace=root_parent_job_namespace,
|
|
192
154
|
),
|
|
193
155
|
),
|
|
194
156
|
)
|
|
@@ -199,6 +161,7 @@ def _run_single_query_with_hook(hook: SnowflakeHook, sql: str) -> list[dict]:
|
|
|
199
161
|
with closing(hook.get_conn()) as conn:
|
|
200
162
|
hook.set_autocommit(conn, False)
|
|
201
163
|
with hook._get_cursor(conn, return_dictionaries=True) as cur:
|
|
164
|
+
cur.execute("ALTER SESSION SET STATEMENT_TIMEOUT_IN_SECONDS = 3;") # only for this session
|
|
202
165
|
cur.execute(sql)
|
|
203
166
|
result = cur.fetchall()
|
|
204
167
|
conn.commit()
|
|
@@ -232,25 +195,36 @@ def _get_queries_details_from_snowflake(
|
|
|
232
195
|
if not query_ids:
|
|
233
196
|
return {}
|
|
234
197
|
query_condition = f"IN {tuple(query_ids)}" if len(query_ids) > 1 else f"= '{query_ids[0]}'"
|
|
198
|
+
# https://docs.snowflake.com/en/sql-reference/account-usage#differences-between-account-usage-and-information-schema
|
|
199
|
+
# INFORMATION_SCHEMA.QUERY_HISTORY has no latency, so it's better than ACCOUNT_USAGE.QUERY_HISTORY
|
|
200
|
+
# https://docs.snowflake.com/en/sql-reference/functions/query_history
|
|
201
|
+
# SNOWFLAKE.INFORMATION_SCHEMA.QUERY_HISTORY() function seems the most suitable function for the job,
|
|
202
|
+
# we get history of queries executed by the user, and we're using the same credentials.
|
|
235
203
|
query = (
|
|
236
204
|
"SELECT "
|
|
237
205
|
"QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
|
|
238
206
|
"FROM "
|
|
239
|
-
"table(information_schema.query_history()) "
|
|
207
|
+
"table(snowflake.information_schema.query_history()) "
|
|
240
208
|
f"WHERE "
|
|
241
209
|
f"QUERY_ID {query_condition}"
|
|
242
210
|
f";"
|
|
243
211
|
)
|
|
244
212
|
|
|
245
213
|
try:
|
|
246
|
-
#
|
|
247
|
-
|
|
248
|
-
|
|
214
|
+
# Note: need to lazy import here to avoid circular imports
|
|
215
|
+
from airflow.providers.snowflake.hooks.snowflake_sql_api import SnowflakeSqlApiHook
|
|
216
|
+
|
|
217
|
+
if isinstance(hook, SnowflakeSqlApiHook):
|
|
218
|
+
result = _run_single_query_with_api_hook(hook=hook, sql=query)
|
|
249
219
|
result = _process_data_from_api(data=result)
|
|
250
220
|
else:
|
|
251
221
|
result = _run_single_query_with_hook(hook=hook, sql=query)
|
|
252
222
|
except Exception as e:
|
|
253
|
-
log.
|
|
223
|
+
log.info(
|
|
224
|
+
"OpenLineage encountered an error while retrieving additional metadata about SQL queries"
|
|
225
|
+
" from Snowflake. The process will continue with default values. Error details: %s",
|
|
226
|
+
e,
|
|
227
|
+
)
|
|
254
228
|
result = []
|
|
255
229
|
|
|
256
230
|
return {row["QUERY_ID"]: row for row in result} if result else {}
|
|
@@ -287,7 +261,7 @@ def _create_snowflake_event_pair(
|
|
|
287
261
|
return start, end
|
|
288
262
|
|
|
289
263
|
|
|
290
|
-
@require_openlineage_version(provider_min_version="2.
|
|
264
|
+
@require_openlineage_version(provider_min_version="2.5.0")
|
|
291
265
|
def emit_openlineage_events_for_snowflake_queries(
|
|
292
266
|
task_instance,
|
|
293
267
|
hook: SnowflakeHook | SnowflakeSqlApiHook | None = None,
|
|
@@ -416,8 +390,8 @@ def emit_openlineage_events_for_snowflake_queries(
|
|
|
416
390
|
event_batch = _create_snowflake_event_pair(
|
|
417
391
|
job_namespace=namespace(),
|
|
418
392
|
job_name=f"{task_instance.dag_id}.{task_instance.task_id}.query.{counter}",
|
|
419
|
-
start_time=query_metadata.get("START_TIME", default_event_time),
|
|
420
|
-
end_time=query_metadata.get("END_TIME", default_event_time),
|
|
393
|
+
start_time=query_metadata.get("START_TIME", default_event_time),
|
|
394
|
+
end_time=query_metadata.get("END_TIME", default_event_time),
|
|
421
395
|
# `EXECUTION_STATUS` can be `success`, `fail` or `incident` (Snowflake outage, so still failure)
|
|
422
396
|
is_successful=query_metadata.get("EXECUTION_STATUS", default_state).lower() == "success",
|
|
423
397
|
run_facets={**query_specific_run_facets, **common_run_facets, **additional_run_facets},
|
|
@@ -34,12 +34,6 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
|
|
34
34
|
|
|
35
35
|
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
36
36
|
|
|
37
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
38
|
-
from airflow.sdk import BaseOperator
|
|
39
|
-
else:
|
|
40
|
-
from airflow.models import BaseOperator # type: ignore[no-redef]
|
|
41
|
-
|
|
42
37
|
__all__ = [
|
|
43
38
|
"AIRFLOW_V_3_0_PLUS",
|
|
44
|
-
"BaseOperator",
|
|
45
39
|
]
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-snowflake
|
|
3
|
-
Version: 6.
|
|
3
|
+
Version: 6.8.0rc1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-snowflake for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,snowflake,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
|
-
Requires-Python:
|
|
8
|
+
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
|
+
License-Expression: Apache-2.0
|
|
10
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
12
|
Classifier: Environment :: Console
|
|
12
13
|
Classifier: Environment :: Web Environment
|
|
@@ -14,29 +15,35 @@ Classifier: Intended Audience :: Developers
|
|
|
14
15
|
Classifier: Intended Audience :: System Administrators
|
|
15
16
|
Classifier: Framework :: Apache Airflow
|
|
16
17
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
21
22
|
Classifier: Topic :: System :: Monitoring
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
Requires-Dist: apache-airflow
|
|
23
|
+
License-File: LICENSE
|
|
24
|
+
License-File: NOTICE
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0rc1
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.10.1rc1
|
|
27
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.27.5rc1
|
|
25
28
|
Requires-Dist: pandas>=2.1.2; python_version <"3.13"
|
|
26
29
|
Requires-Dist: pandas>=2.2.3; python_version >="3.13"
|
|
27
|
-
Requires-Dist: pyarrow>=16.1.0
|
|
28
|
-
Requires-Dist:
|
|
29
|
-
Requires-Dist: snowflake-
|
|
30
|
-
Requires-Dist: snowflake-
|
|
31
|
-
Requires-Dist: snowflake-snowpark-python>=1.
|
|
30
|
+
Requires-Dist: pyarrow>=16.1.0; python_version < '3.13'
|
|
31
|
+
Requires-Dist: pyarrow>=18.0.0; python_version >= '3.13'
|
|
32
|
+
Requires-Dist: snowflake-connector-python>=3.16.0
|
|
33
|
+
Requires-Dist: snowflake-sqlalchemy>=1.7.0
|
|
34
|
+
Requires-Dist: snowflake-snowpark-python>=1.17.0,<9999;python_version<'3.12'
|
|
35
|
+
Requires-Dist: snowflake-snowpark-python>=1.27.0,<9999;python_version>='3.12' and python_version<'3.14'
|
|
36
|
+
Requires-Dist: setuptools>=80.0.0,<9999
|
|
37
|
+
Requires-Dist: apache-airflow-providers-microsoft-azure>=12.8.0rc1 ; extra == "microsoft-azure"
|
|
32
38
|
Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
|
|
33
39
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
34
|
-
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-snowflake/6.
|
|
35
|
-
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-snowflake/6.
|
|
40
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-snowflake/6.8.0/changelog.html
|
|
41
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-snowflake/6.8.0
|
|
36
42
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
37
43
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
38
44
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
39
45
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
46
|
+
Provides-Extra: microsoft-azure
|
|
40
47
|
Provides-Extra: openlineage
|
|
41
48
|
|
|
42
49
|
|
|
@@ -64,7 +71,7 @@ Provides-Extra: openlineage
|
|
|
64
71
|
|
|
65
72
|
Package ``apache-airflow-providers-snowflake``
|
|
66
73
|
|
|
67
|
-
Release: ``6.
|
|
74
|
+
Release: ``6.8.0``
|
|
68
75
|
|
|
69
76
|
|
|
70
77
|
`Snowflake <https://www.snowflake.com/>`__
|
|
@@ -77,34 +84,36 @@ This is a provider package for ``snowflake`` provider. All classes for this prov
|
|
|
77
84
|
are in ``airflow.providers.snowflake`` python package.
|
|
78
85
|
|
|
79
86
|
You can find package information and changelog for the provider
|
|
80
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.
|
|
87
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.8.0/>`_.
|
|
81
88
|
|
|
82
89
|
Installation
|
|
83
90
|
------------
|
|
84
91
|
|
|
85
|
-
You can install this package on top of an existing Airflow
|
|
92
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
86
93
|
for the minimum Airflow version supported) via
|
|
87
94
|
``pip install apache-airflow-providers-snowflake``
|
|
88
95
|
|
|
89
|
-
The package supports the following python versions: 3.10,3.11,3.12
|
|
96
|
+
The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
90
97
|
|
|
91
98
|
Requirements
|
|
92
99
|
------------
|
|
93
100
|
|
|
94
|
-
==========================================
|
|
101
|
+
========================================== ========================================================================
|
|
95
102
|
PIP package Version required
|
|
96
|
-
==========================================
|
|
97
|
-
``apache-airflow`` ``>=2.
|
|
98
|
-
``apache-airflow-providers-common-compat`` ``>=1.
|
|
99
|
-
``apache-airflow-providers-common-sql`` ``>=1.
|
|
103
|
+
========================================== ========================================================================
|
|
104
|
+
``apache-airflow`` ``>=2.11.0``
|
|
105
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.1``
|
|
106
|
+
``apache-airflow-providers-common-sql`` ``>=1.27.5``
|
|
100
107
|
``pandas`` ``>=2.1.2; python_version < "3.13"``
|
|
101
108
|
``pandas`` ``>=2.2.3; python_version >= "3.13"``
|
|
102
|
-
``pyarrow`` ``>=16.1.0``
|
|
103
|
-
``
|
|
104
|
-
``snowflake-
|
|
105
|
-
``snowflake-
|
|
106
|
-
``snowflake-snowpark-python`` ``>=1.
|
|
107
|
-
|
|
109
|
+
``pyarrow`` ``>=16.1.0; python_version < "3.13"``
|
|
110
|
+
``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
|
|
111
|
+
``snowflake-connector-python`` ``>=3.16.0``
|
|
112
|
+
``snowflake-sqlalchemy`` ``>=1.7.0``
|
|
113
|
+
``snowflake-snowpark-python`` ``>=1.17.0,<9999; python_version < "3.12"``
|
|
114
|
+
``snowflake-snowpark-python`` ``>=1.27.0,<9999; python_version >= "3.12" and python_version < "3.14"``
|
|
115
|
+
``setuptools`` ``>=80.0.0,<9999``
|
|
116
|
+
========================================== ========================================================================
|
|
108
117
|
|
|
109
118
|
Cross provider package dependencies
|
|
110
119
|
-----------------------------------
|
|
@@ -119,14 +128,25 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
119
128
|
pip install apache-airflow-providers-snowflake[common.compat]
|
|
120
129
|
|
|
121
130
|
|
|
122
|
-
|
|
123
|
-
Dependent package
|
|
124
|
-
|
|
125
|
-
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_
|
|
126
|
-
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_
|
|
127
|
-
`apache-airflow-providers-
|
|
128
|
-
|
|
131
|
+
====================================================================================================================== ===================
|
|
132
|
+
Dependent package Extra
|
|
133
|
+
====================================================================================================================== ===================
|
|
134
|
+
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
135
|
+
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
136
|
+
`apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
|
|
137
|
+
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
138
|
+
====================================================================================================================== ===================
|
|
139
|
+
|
|
140
|
+
Optional dependencies
|
|
141
|
+
----------------------
|
|
142
|
+
|
|
143
|
+
=================== ====================================================
|
|
144
|
+
Extra Dependencies
|
|
145
|
+
=================== ====================================================
|
|
146
|
+
``microsoft.azure`` ``apache-airflow-providers-microsoft-azure>=12.8.0``
|
|
147
|
+
``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
|
|
148
|
+
=================== ====================================================
|
|
129
149
|
|
|
130
150
|
The changelog for the provider package can be found in the
|
|
131
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.
|
|
151
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.8.0/changelog.html>`_.
|
|
132
152
|
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
airflow/providers/snowflake/__init__.py,sha256=pgddHY3z_6NgaHwG9T-htBh_SJFQSlUfWsNhnrWYOiQ,1498
|
|
2
|
+
airflow/providers/snowflake/get_provider_info.py,sha256=UnvI6oVcI5LN6MAV21dUCGxU_TxHsAjctSIcFgMhLOg,4711
|
|
3
|
+
airflow/providers/snowflake/version_compat.py,sha256=RQbdCueLOaFZWekpQmF0BoAoJInW8EoyvJ3Ah-HbrPo,1577
|
|
4
|
+
airflow/providers/snowflake/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
5
|
+
airflow/providers/snowflake/decorators/snowpark.py,sha256=Bxge_oCz_iGqgUeMlaY3GW741PAIwnLIeQO_OXBCwYY,5219
|
|
6
|
+
airflow/providers/snowflake/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
7
|
+
airflow/providers/snowflake/hooks/snowflake.py,sha256=UjUHqVcWctjk0W8gyqdlMmR-TmHWx_nvwOSxW417ufI,30630
|
|
8
|
+
airflow/providers/snowflake/hooks/snowflake_sql_api.py,sha256=D6jX_uvpBpzQ_4dXDQcmFROJEfRYj6ehxvEFRN41jQI,23726
|
|
9
|
+
airflow/providers/snowflake/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
10
|
+
airflow/providers/snowflake/operators/snowflake.py,sha256=_ikG_JaU8NN__Jk18sdevEf835hF16Fm87hEmfV3Otw,23116
|
|
11
|
+
airflow/providers/snowflake/operators/snowpark.py,sha256=Tfd31My6arGXKo0yfi46HyVfkHO3yeT085l3ymxtGpk,5815
|
|
12
|
+
airflow/providers/snowflake/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
13
|
+
airflow/providers/snowflake/transfers/copy_into_snowflake.py,sha256=O1kV1_FoXUBxdX0UNlxJVqgcgutoHS6DI-Ipx9iDfvg,13611
|
|
14
|
+
airflow/providers/snowflake/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
15
|
+
airflow/providers/snowflake/triggers/snowflake_trigger.py,sha256=QXNLijmtZI7NIdPtOwbvS-4ohgrm8RV_jaBKvekosHQ,4051
|
|
16
|
+
airflow/providers/snowflake/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
17
|
+
airflow/providers/snowflake/utils/common.py,sha256=DG-KLy2KpZWAqZqm_XIECm8lmdoUlzwkXv9onmkQThc,1644
|
|
18
|
+
airflow/providers/snowflake/utils/openlineage.py,sha256=HHMJvTPfCnxO1qMcS6rr9t0N8_jFPdMswEVFY25YoIc,17993
|
|
19
|
+
airflow/providers/snowflake/utils/snowpark.py,sha256=-S6ltYiW-KooqUMGzY0OebmAzpUAu7GIjFWwuYERuk8,1629
|
|
20
|
+
airflow/providers/snowflake/utils/sql_api_generate_jwt.py,sha256=9mR-vHIquv60tfAni87f6FAjKsiRHUDDrsVhzw4M9vM,6762
|
|
21
|
+
apache_airflow_providers_snowflake-6.8.0rc1.dist-info/entry_points.txt,sha256=bCrl5J1PXUMzbgnrKYho61rkbL2gHRT4I6f_1jlxAX4,105
|
|
22
|
+
apache_airflow_providers_snowflake-6.8.0rc1.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
23
|
+
apache_airflow_providers_snowflake-6.8.0rc1.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
|
|
24
|
+
apache_airflow_providers_snowflake-6.8.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
25
|
+
apache_airflow_providers_snowflake-6.8.0rc1.dist-info/METADATA,sha256=qQXaINQc2tToeP3m4B3s0b_2f68C6v4ChGX-0Wtvmh8,7812
|
|
26
|
+
apache_airflow_providers_snowflake-6.8.0rc1.dist-info/RECORD,,
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
airflow/providers/snowflake/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/snowflake/__init__.py,sha256=adBe6ltW-rLtYQFNU8hpf6foAV8Pe60l_xFgAkGQCBg,1498
|
|
3
|
-
airflow/providers/snowflake/get_provider_info.py,sha256=NdNRMfulBbpD-I4yFRr8U533m9djD18ijEMvuxOp4_g,3875
|
|
4
|
-
airflow/providers/snowflake/version_compat.py,sha256=IlaClFJYi0uPxuC8cJt0Ro3Kl3tjIGc31ALoKDQbw5Q,1738
|
|
5
|
-
airflow/providers/snowflake/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
|
-
airflow/providers/snowflake/decorators/snowpark.py,sha256=5ocPY8wrXvKbZJokefV4HDfX0WXzrHmcekXoZjkfHEw,5523
|
|
7
|
-
airflow/providers/snowflake/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
8
|
-
airflow/providers/snowflake/hooks/snowflake.py,sha256=RUWO9j_gd26BbktGzavxvgihCbOaBTNKlxyDvTxIBig,28502
|
|
9
|
-
airflow/providers/snowflake/hooks/snowflake_sql_api.py,sha256=bCY3lSar-k3XjiDnrrWVRWTRunJL6U0Kss7fCLxFqTM,22287
|
|
10
|
-
airflow/providers/snowflake/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
11
|
-
airflow/providers/snowflake/operators/snowflake.py,sha256=Xfz_bv1Y0M8IWv67dOXcupeYWYlG91kPVgCe_vEqntY,23253
|
|
12
|
-
airflow/providers/snowflake/operators/snowpark.py,sha256=Tfd31My6arGXKo0yfi46HyVfkHO3yeT085l3ymxtGpk,5815
|
|
13
|
-
airflow/providers/snowflake/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
14
|
-
airflow/providers/snowflake/transfers/copy_into_snowflake.py,sha256=2WQDhD9U1l38ZoIv7FImsV6S3gT_rSisg_isNi4k08E,13618
|
|
15
|
-
airflow/providers/snowflake/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
16
|
-
airflow/providers/snowflake/triggers/snowflake_trigger.py,sha256=QXNLijmtZI7NIdPtOwbvS-4ohgrm8RV_jaBKvekosHQ,4051
|
|
17
|
-
airflow/providers/snowflake/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
18
|
-
airflow/providers/snowflake/utils/common.py,sha256=DG-KLy2KpZWAqZqm_XIECm8lmdoUlzwkXv9onmkQThc,1644
|
|
19
|
-
airflow/providers/snowflake/utils/openlineage.py,sha256=5qpLU7D9EFolXGWEKI90py45iU0OPNyUEim7_Y7a2yw,18686
|
|
20
|
-
airflow/providers/snowflake/utils/snowpark.py,sha256=-S6ltYiW-KooqUMGzY0OebmAzpUAu7GIjFWwuYERuk8,1629
|
|
21
|
-
airflow/providers/snowflake/utils/sql_api_generate_jwt.py,sha256=9mR-vHIquv60tfAni87f6FAjKsiRHUDDrsVhzw4M9vM,6762
|
|
22
|
-
apache_airflow_providers_snowflake-6.5.0rc1.dist-info/entry_points.txt,sha256=bCrl5J1PXUMzbgnrKYho61rkbL2gHRT4I6f_1jlxAX4,105
|
|
23
|
-
apache_airflow_providers_snowflake-6.5.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
24
|
-
apache_airflow_providers_snowflake-6.5.0rc1.dist-info/METADATA,sha256=i3-l5gSqKajwJE54Yr6sbqERh3r8mXiNHfuzHB8T9R0,6522
|
|
25
|
-
apache_airflow_providers_snowflake-6.5.0rc1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|