apache-airflow-providers-snowflake 6.3.0__py3-none-any.whl → 6.8.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -19,11 +19,10 @@ from __future__ import annotations
19
19
  import datetime
20
20
  import logging
21
21
  from contextlib import closing
22
- from typing import TYPE_CHECKING
22
+ from typing import TYPE_CHECKING, Any
23
23
  from urllib.parse import quote, urlparse, urlunparse
24
24
 
25
25
  from airflow.providers.common.compat.openlineage.check import require_openlineage_version
26
- from airflow.providers.snowflake.version_compat import AIRFLOW_V_3_0_PLUS
27
26
  from airflow.utils import timezone
28
27
 
29
28
  if TYPE_CHECKING:
@@ -31,6 +30,7 @@ if TYPE_CHECKING:
31
30
  from openlineage.client.facet_v2 import JobFacet
32
31
 
33
32
  from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
33
+ from airflow.providers.snowflake.hooks.snowflake_sql_api import SnowflakeSqlApiHook
34
34
 
35
35
 
36
36
  log = logging.getLogger(__name__)
@@ -52,7 +52,15 @@ def fix_account_name(name: str) -> str:
52
52
  account, region = spl
53
53
  cloud = "aws"
54
54
  else:
55
- account, region, cloud = spl
55
+ # region can easily get duplicated without crashing snowflake, so we need to handle that as well
56
+ # eg. account_locator.europe-west3.gcp.europe-west3.gcp will be ok for snowflake
57
+ account, region, cloud, *rest = spl
58
+ rest = [x for x in rest if x not in (region, cloud)]
59
+ if rest: # Not sure what could be left here, but leaving this just in case
60
+ log.warning(
61
+ "Unexpected parts found in Snowflake uri hostname and will be ignored by OpenLineage: %s",
62
+ rest,
63
+ )
56
64
  return f"{account}.{region}.{cloud}"
57
65
 
58
66
  # Check for existing accounts with cloud names
@@ -72,13 +80,16 @@ def fix_snowflake_sqlalchemy_uri(uri: str) -> str:
72
80
  """
73
81
  Fix snowflake sqlalchemy connection URI to OpenLineage structure.
74
82
 
75
- Snowflake sqlalchemy connection URI has following structure:
83
+ Snowflake sqlalchemy connection URI has the following structure:
76
84
  'snowflake://<user_login_name>:<password>@<account_identifier>/<database_name>/<schema_name>?warehouse=<warehouse_name>&role=<role_name>'
77
85
  We want account identifier normalized. It can have two forms:
78
- - newer, in form of <organization>-<id>. In this case we want to do nothing.
79
- - older, composed of <id>-<region>-<cloud> where region and cloud can be
86
+ - newer, in form of <organization_id>-<account_id>. In this case we want to do nothing.
87
+ - older, composed of <account_locator>.<region>.<cloud> where region and cloud can be
80
88
  optional in some cases. If <cloud> is omitted, it's AWS.
81
89
  If region and cloud are omitted, it's AWS us-west-1
90
+
91
+ Current doc on Snowflake account identifiers:
92
+ https://docs.snowflake.com/en/user-guide/admin-account-identifier
82
93
  """
83
94
  try:
84
95
  parts = urlparse(uri)
@@ -97,60 +108,6 @@ def fix_snowflake_sqlalchemy_uri(uri: str) -> str:
97
108
  return urlunparse((parts.scheme, hostname, parts.path, parts.params, parts.query, parts.fragment))
98
109
 
99
110
 
100
- def _get_logical_date(task_instance):
101
- # todo: remove when min airflow version >= 3.0
102
- if AIRFLOW_V_3_0_PLUS:
103
- dagrun = task_instance.get_template_context()["dag_run"]
104
- return dagrun.logical_date or dagrun.run_after
105
-
106
- if hasattr(task_instance, "logical_date"):
107
- date = task_instance.logical_date
108
- else:
109
- date = task_instance.execution_date
110
-
111
- return date
112
-
113
-
114
- def _get_dag_run_clear_number(task_instance):
115
- # todo: remove when min airflow version >= 3.0
116
- if AIRFLOW_V_3_0_PLUS:
117
- dagrun = task_instance.get_template_context()["dag_run"]
118
- return dagrun.clear_number
119
- return task_instance.dag_run.clear_number
120
-
121
-
122
- # todo: move this run_id logic into OpenLineage's listener to avoid differences
123
- def _get_ol_run_id(task_instance) -> str:
124
- """
125
- Get OpenLineage run_id from TaskInstance.
126
-
127
- It's crucial that the task_instance's run_id creation logic matches OpenLineage's listener implementation.
128
- Only then can we ensure that the generated run_id aligns with the Airflow task,
129
- enabling a proper connection between events.
130
- """
131
- from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
132
-
133
- # Generate same OL run id as is generated for current task instance
134
- return OpenLineageAdapter.build_task_instance_run_id(
135
- dag_id=task_instance.dag_id,
136
- task_id=task_instance.task_id,
137
- logical_date=_get_logical_date(task_instance),
138
- try_number=task_instance.try_number,
139
- map_index=task_instance.map_index,
140
- )
141
-
142
-
143
- # todo: move this run_id logic into OpenLineage's listener to avoid differences
144
- def _get_ol_dag_run_id(task_instance) -> str:
145
- from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter
146
-
147
- return OpenLineageAdapter.build_dag_run_id(
148
- dag_id=task_instance.dag_id,
149
- logical_date=_get_logical_date(task_instance),
150
- clear_number=_get_dag_run_clear_number(task_instance),
151
- )
152
-
153
-
154
111
  def _get_parent_run_facet(task_instance):
155
112
  """
156
113
  Retrieve the ParentRunFacet associated with a specific Airflow task instance.
@@ -161,22 +118,39 @@ def _get_parent_run_facet(task_instance):
161
118
  """
162
119
  from openlineage.client.facet_v2 import parent_run
163
120
 
164
- from airflow.providers.openlineage.conf import namespace
121
+ from airflow.providers.openlineage.plugins.macros import (
122
+ lineage_job_name,
123
+ lineage_job_namespace,
124
+ lineage_root_job_name,
125
+ lineage_root_run_id,
126
+ lineage_run_id,
127
+ )
165
128
 
166
- parent_run_id = _get_ol_run_id(task_instance)
167
- root_parent_run_id = _get_ol_dag_run_id(task_instance)
129
+ parent_run_id = lineage_run_id(task_instance)
130
+ parent_job_name = lineage_job_name(task_instance)
131
+ parent_job_namespace = lineage_job_namespace()
132
+
133
+ root_parent_run_id = lineage_root_run_id(task_instance)
134
+ rot_parent_job_name = lineage_root_job_name(task_instance)
135
+
136
+ try: # Added in OL provider 2.9.0, try to use it if possible
137
+ from airflow.providers.openlineage.plugins.macros import lineage_root_job_namespace
138
+
139
+ root_parent_job_namespace = lineage_root_job_namespace(task_instance)
140
+ except ImportError:
141
+ root_parent_job_namespace = lineage_job_namespace()
168
142
 
169
143
  return parent_run.ParentRunFacet(
170
144
  run=parent_run.Run(runId=parent_run_id),
171
145
  job=parent_run.Job(
172
- namespace=namespace(),
173
- name=f"{task_instance.dag_id}.{task_instance.task_id}",
146
+ namespace=parent_job_namespace,
147
+ name=parent_job_name,
174
148
  ),
175
149
  root=parent_run.Root(
176
150
  run=parent_run.RootRun(runId=root_parent_run_id),
177
151
  job=parent_run.RootJob(
178
- name=task_instance.dag_id,
179
- namespace=namespace(),
152
+ name=rot_parent_job_name,
153
+ namespace=root_parent_job_namespace,
180
154
  ),
181
155
  ),
182
156
  )
@@ -187,30 +161,71 @@ def _run_single_query_with_hook(hook: SnowflakeHook, sql: str) -> list[dict]:
187
161
  with closing(hook.get_conn()) as conn:
188
162
  hook.set_autocommit(conn, False)
189
163
  with hook._get_cursor(conn, return_dictionaries=True) as cur:
164
+ cur.execute("ALTER SESSION SET STATEMENT_TIMEOUT_IN_SECONDS = 3;") # only for this session
190
165
  cur.execute(sql)
191
166
  result = cur.fetchall()
192
167
  conn.commit()
193
168
  return result
194
169
 
195
170
 
171
+ def _run_single_query_with_api_hook(hook: SnowflakeSqlApiHook, sql: str) -> list[dict[str, Any]]:
172
+ """Execute a query against Snowflake API without adding extra logging or instrumentation."""
173
+ # `hook.execute_query` resets the query_ids, so we need to save them and re-assign after we're done
174
+ query_ids_before_execution = list(hook.query_ids)
175
+ try:
176
+ _query_ids = hook.execute_query(sql=sql, statement_count=0)
177
+ hook.wait_for_query(query_id=_query_ids[0], raise_error=True, poll_interval=1, timeout=3)
178
+ return hook.get_result_from_successful_sql_api_query(query_id=_query_ids[0])
179
+ finally:
180
+ hook.query_ids = query_ids_before_execution
181
+
182
+
183
+ def _process_data_from_api(data: list[dict[str, Any]]) -> list[dict[str, Any]]:
184
+ """Convert 'START_TIME' and 'END_TIME' fields to UTC datetime objects."""
185
+ for row in data:
186
+ for key in ("START_TIME", "END_TIME"):
187
+ row[key] = datetime.datetime.fromtimestamp(float(row[key]), timezone.utc)
188
+ return data
189
+
190
+
196
191
  def _get_queries_details_from_snowflake(
197
- hook: SnowflakeHook, query_ids: list[str]
198
- ) -> dict[str, dict[str, str]]:
192
+ hook: SnowflakeHook | SnowflakeSqlApiHook, query_ids: list[str]
193
+ ) -> dict[str, dict[str, Any]]:
199
194
  """Retrieve execution details for specific queries from Snowflake's query history."""
200
195
  if not query_ids:
201
196
  return {}
202
197
  query_condition = f"IN {tuple(query_ids)}" if len(query_ids) > 1 else f"= '{query_ids[0]}'"
198
+ # https://docs.snowflake.com/en/sql-reference/account-usage#differences-between-account-usage-and-information-schema
199
+ # INFORMATION_SCHEMA.QUERY_HISTORY has no latency, so it's better than ACCOUNT_USAGE.QUERY_HISTORY
200
+ # https://docs.snowflake.com/en/sql-reference/functions/query_history
201
+ # SNOWFLAKE.INFORMATION_SCHEMA.QUERY_HISTORY() function seems the most suitable function for the job,
202
+ # we get history of queries executed by the user, and we're using the same credentials.
203
203
  query = (
204
204
  "SELECT "
205
205
  "QUERY_ID, EXECUTION_STATUS, START_TIME, END_TIME, QUERY_TEXT, ERROR_CODE, ERROR_MESSAGE "
206
206
  "FROM "
207
- "table(information_schema.query_history()) "
207
+ "table(snowflake.information_schema.query_history()) "
208
208
  f"WHERE "
209
209
  f"QUERY_ID {query_condition}"
210
210
  f";"
211
211
  )
212
212
 
213
- result = _run_single_query_with_hook(hook=hook, sql=query)
213
+ try:
214
+ # Note: need to lazy import here to avoid circular imports
215
+ from airflow.providers.snowflake.hooks.snowflake_sql_api import SnowflakeSqlApiHook
216
+
217
+ if isinstance(hook, SnowflakeSqlApiHook):
218
+ result = _run_single_query_with_api_hook(hook=hook, sql=query)
219
+ result = _process_data_from_api(data=result)
220
+ else:
221
+ result = _run_single_query_with_hook(hook=hook, sql=query)
222
+ except Exception as e:
223
+ log.info(
224
+ "OpenLineage encountered an error while retrieving additional metadata about SQL queries"
225
+ " from Snowflake. The process will continue with default values. Error details: %s",
226
+ e,
227
+ )
228
+ result = []
214
229
 
215
230
  return {row["QUERY_ID"]: row for row in result} if result else {}
216
231
 
@@ -246,19 +261,20 @@ def _create_snowflake_event_pair(
246
261
  return start, end
247
262
 
248
263
 
249
- @require_openlineage_version(provider_min_version="2.3.0")
264
+ @require_openlineage_version(provider_min_version="2.5.0")
250
265
  def emit_openlineage_events_for_snowflake_queries(
251
- query_ids: list[str],
252
- query_source_namespace: str,
253
266
  task_instance,
254
- hook: SnowflakeHook | None = None,
267
+ hook: SnowflakeHook | SnowflakeSqlApiHook | None = None,
268
+ query_ids: list[str] | None = None,
269
+ query_source_namespace: str | None = None,
270
+ query_for_extra_metadata: bool = False,
255
271
  additional_run_facets: dict | None = None,
256
272
  additional_job_facets: dict | None = None,
257
273
  ) -> None:
258
274
  """
259
275
  Emit OpenLineage events for executed Snowflake queries.
260
276
 
261
- Metadata retrieval from Snowflake is attempted only if a `SnowflakeHook` is provided.
277
+ Metadata retrieval from Snowflake is attempted only if `get_extra_metadata` is True and hook is provided.
262
278
  If metadata is available, execution details such as start time, end time, execution status,
263
279
  error messages, and SQL text are included in the events. If no metadata is found, the function
264
280
  defaults to using the Airflow task instance's state and the current timestamp.
@@ -268,10 +284,16 @@ def emit_openlineage_events_for_snowflake_queries(
268
284
  will correspond to actual query execution times.
269
285
 
270
286
  Args:
271
- query_ids: A list of Snowflake query IDs to emit events for.
272
- query_source_namespace: The namespace to be included in ExternalQueryRunFacet.
273
287
  task_instance: The Airflow task instance that run these queries.
274
- hook: A SnowflakeHook instance used to retrieve query metadata if available.
288
+ hook: A supported Snowflake hook instance used to retrieve query metadata if available.
289
+ If omitted, `query_ids` and `query_source_namespace` must be provided explicitly and
290
+ `query_for_extra_metadata` must be `False`.
291
+ query_ids: A list of Snowflake query IDs to emit events for, can only be None if `hook` is provided
292
+ and `hook.query_ids` are present.
293
+ query_source_namespace: The namespace to be included in ExternalQueryRunFacet,
294
+ can be `None` only if hook is provided.
295
+ query_for_extra_metadata: Whether to query Snowflake for additional metadata about queries.
296
+ Must be `False` if `hook` is not provided.
275
297
  additional_run_facets: Additional run facets to include in OpenLineage events.
276
298
  additional_job_facets: Additional job facets to include in OpenLineage events.
277
299
  """
@@ -280,28 +302,55 @@ def emit_openlineage_events_for_snowflake_queries(
280
302
  from airflow.providers.common.compat.openlineage.facet import (
281
303
  ErrorMessageRunFacet,
282
304
  ExternalQueryRunFacet,
305
+ RunFacet,
283
306
  SQLJobFacet,
284
307
  )
285
308
  from airflow.providers.openlineage.conf import namespace
286
309
  from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
287
310
 
288
- if not query_ids:
289
- log.debug("No Snowflake query IDs provided; skipping OpenLineage event emission.")
290
- return
291
-
292
- query_ids = [q for q in query_ids] # Make a copy to make sure it does not change
311
+ log.info("OpenLineage will emit events for Snowflake queries.")
293
312
 
294
313
  if hook:
314
+ if not query_ids:
315
+ log.debug("No Snowflake query IDs provided; Checking `hook.query_ids` property.")
316
+ query_ids = getattr(hook, "query_ids", [])
317
+ if not query_ids:
318
+ raise ValueError("No Snowflake query IDs provided and `hook.query_ids` are not present.")
319
+
320
+ if not query_source_namespace:
321
+ log.debug("No Snowflake query namespace provided; Creating one from scratch.")
322
+ from airflow.providers.openlineage.sqlparser import SQLParser
323
+
324
+ connection = hook.get_connection(hook.get_conn_id())
325
+ query_source_namespace = SQLParser.create_namespace(
326
+ hook.get_openlineage_database_info(connection)
327
+ )
328
+ else:
329
+ if not query_ids:
330
+ raise ValueError("If 'hook' is not provided, 'query_ids' must be set.")
331
+ if not query_source_namespace:
332
+ raise ValueError("If 'hook' is not provided, 'query_source_namespace' must be set.")
333
+ if query_for_extra_metadata:
334
+ raise ValueError("If 'hook' is not provided, 'query_for_extra_metadata' must be False.")
335
+
336
+ query_ids = [q for q in query_ids] # Make a copy to make sure we do not change hook's attribute
337
+
338
+ if query_for_extra_metadata and hook:
295
339
  log.debug("Retrieving metadata for %s queries from Snowflake.", len(query_ids))
296
340
  snowflake_metadata = _get_queries_details_from_snowflake(hook, query_ids)
297
341
  else:
298
- log.debug("SnowflakeHook not provided. No extra metadata fill be fetched from Snowflake.")
342
+ log.debug("`query_for_extra_metadata` is False. No extra metadata fill be fetched from Snowflake.")
299
343
  snowflake_metadata = {}
300
344
 
301
345
  # If real metadata is unavailable, we send events with eventTime=now
302
346
  default_event_time = timezone.utcnow()
303
347
  # If no query metadata is provided, we use task_instance's state when checking for success
304
- default_state = str(task_instance.state) if hasattr(task_instance, "state") else ""
348
+ # ti.state has no `value` attr (AF2) when task it's still running, in AF3 we get 'running', in that case
349
+ # assuming it's user call and query succeeded, so we replace it with success.
350
+ default_state = (
351
+ getattr(task_instance.state, "value", "running") if hasattr(task_instance, "state") else ""
352
+ )
353
+ default_state = "success" if default_state == "running" else default_state
305
354
 
306
355
  common_run_facets = {"parent": _get_parent_run_facet(task_instance)}
307
356
  common_job_facets: dict[str, JobFacet] = {
@@ -324,12 +373,11 @@ def emit_openlineage_events_for_snowflake_queries(
324
373
  query_metadata if query_metadata else "not found",
325
374
  )
326
375
 
327
- # TODO(potiuk): likely typing here needs to be fixed
328
- query_specific_run_facets = { # type : ignore[assignment]
376
+ query_specific_run_facets: dict[str, RunFacet] = {
329
377
  "externalQuery": ExternalQueryRunFacet(externalQueryId=query_id, source=query_source_namespace)
330
378
  }
331
379
  if query_metadata.get("ERROR_MESSAGE"):
332
- query_specific_run_facets["error"] = ErrorMessageRunFacet( # type: ignore[assignment]
380
+ query_specific_run_facets["error"] = ErrorMessageRunFacet(
333
381
  message=f"{query_metadata.get('ERROR_CODE')} : {query_metadata['ERROR_MESSAGE']}",
334
382
  programmingLanguage="SQL",
335
383
  )
@@ -342,8 +390,8 @@ def emit_openlineage_events_for_snowflake_queries(
342
390
  event_batch = _create_snowflake_event_pair(
343
391
  job_namespace=namespace(),
344
392
  job_name=f"{task_instance.dag_id}.{task_instance.task_id}.query.{counter}",
345
- start_time=query_metadata.get("START_TIME", default_event_time), # type: ignore[arg-type]
346
- end_time=query_metadata.get("END_TIME", default_event_time), # type: ignore[arg-type]
393
+ start_time=query_metadata.get("START_TIME", default_event_time),
394
+ end_time=query_metadata.get("END_TIME", default_event_time),
347
395
  # `EXECUTION_STATUS` can be `success`, `fail` or `incident` (Snowflake outage, so still failure)
348
396
  is_successful=query_metadata.get("EXECUTION_STATUS", default_state).lower() == "success",
349
397
  run_facets={**query_specific_run_facets, **common_run_facets, **additional_run_facets},
@@ -352,9 +400,9 @@ def emit_openlineage_events_for_snowflake_queries(
352
400
  events.extend(event_batch)
353
401
 
354
402
  log.debug("Generated %s OpenLineage events; emitting now.", len(events))
355
- client = get_openlineage_listener().adapter.get_or_create_openlineage_client()
403
+ adapter = get_openlineage_listener().adapter
356
404
  for event in events:
357
- client.emit(event)
405
+ adapter.emit(event)
358
406
 
359
407
  log.info("OpenLineage has successfully finished processing information about Snowflake queries.")
360
408
  return
@@ -18,7 +18,8 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import inspect
21
- from typing import TYPE_CHECKING, Callable
21
+ from collections.abc import Callable
22
+ from typing import TYPE_CHECKING
22
23
 
23
24
  if TYPE_CHECKING:
24
25
  from snowflake.snowpark import Session
@@ -33,3 +33,7 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
33
33
 
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+
37
+ __all__ = [
38
+ "AIRFLOW_V_3_0_PLUS",
39
+ ]
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-snowflake
3
- Version: 6.3.0
3
+ Version: 6.8.0rc1
4
4
  Summary: Provider package apache-airflow-providers-snowflake for Apache Airflow
5
5
  Keywords: airflow-provider,snowflake,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,28 +15,35 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.21.0
26
- Requires-Dist: pandas>=2.1.2,<2.2
27
- Requires-Dist: pyarrow>=14.0.1
28
- Requires-Dist: snowflake-connector-python>=3.7.1
29
- Requires-Dist: snowflake-sqlalchemy>=1.4.0
30
- Requires-Dist: snowflake-snowpark-python>=1.17.0;python_version<'3.12'
31
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
23
+ License-File: LICENSE
24
+ License-File: NOTICE
25
+ Requires-Dist: apache-airflow>=2.11.0rc1
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.10.1rc1
27
+ Requires-Dist: apache-airflow-providers-common-sql>=1.27.5rc1
28
+ Requires-Dist: pandas>=2.1.2; python_version <"3.13"
29
+ Requires-Dist: pandas>=2.2.3; python_version >="3.13"
30
+ Requires-Dist: pyarrow>=16.1.0; python_version < '3.13'
31
+ Requires-Dist: pyarrow>=18.0.0; python_version >= '3.13'
32
+ Requires-Dist: snowflake-connector-python>=3.16.0
33
+ Requires-Dist: snowflake-sqlalchemy>=1.7.0
34
+ Requires-Dist: snowflake-snowpark-python>=1.17.0,<9999;python_version<'3.12'
35
+ Requires-Dist: snowflake-snowpark-python>=1.27.0,<9999;python_version>='3.12' and python_version<'3.14'
36
+ Requires-Dist: setuptools>=80.0.0,<9999
37
+ Requires-Dist: apache-airflow-providers-microsoft-azure>=12.8.0rc1 ; extra == "microsoft-azure"
38
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
32
39
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
33
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.3.0/changelog.html
34
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.3.0
40
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-snowflake/6.8.0/changelog.html
41
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-snowflake/6.8.0
35
42
  Project-URL: Mastodon, https://fosstodon.org/@airflow
36
43
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
37
44
  Project-URL: Source Code, https://github.com/apache/airflow
38
45
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
46
+ Provides-Extra: microsoft-azure
39
47
  Provides-Extra: openlineage
40
48
 
41
49
 
@@ -63,7 +71,7 @@ Provides-Extra: openlineage
63
71
 
64
72
  Package ``apache-airflow-providers-snowflake``
65
73
 
66
- Release: ``6.3.0``
74
+ Release: ``6.8.0``
67
75
 
68
76
 
69
77
  `Snowflake <https://www.snowflake.com/>`__
@@ -76,32 +84,36 @@ This is a provider package for ``snowflake`` provider. All classes for this prov
76
84
  are in ``airflow.providers.snowflake`` python package.
77
85
 
78
86
  You can find package information and changelog for the provider
79
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.3.0/>`_.
87
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.8.0/>`_.
80
88
 
81
89
  Installation
82
90
  ------------
83
91
 
84
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
92
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
85
93
  for the minimum Airflow version supported) via
86
94
  ``pip install apache-airflow-providers-snowflake``
87
95
 
88
- The package supports the following python versions: 3.9,3.10,3.11,3.12
96
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
89
97
 
90
98
  Requirements
91
99
  ------------
92
100
 
93
- ========================================== =====================================
101
+ ========================================== ========================================================================
94
102
  PIP package Version required
95
- ========================================== =====================================
96
- ``apache-airflow`` ``>=2.10.0``
97
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
98
- ``apache-airflow-providers-common-sql`` ``>=1.21.0``
99
- ``pandas`` ``>=2.1.2,<2.2``
100
- ``pyarrow`` ``>=14.0.1``
101
- ``snowflake-connector-python`` ``>=3.7.1``
102
- ``snowflake-sqlalchemy`` ``>=1.4.0``
103
- ``snowflake-snowpark-python`` ``>=1.17.0; python_version < "3.12"``
104
- ========================================== =====================================
103
+ ========================================== ========================================================================
104
+ ``apache-airflow`` ``>=2.11.0``
105
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
106
+ ``apache-airflow-providers-common-sql`` ``>=1.27.5``
107
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
108
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
109
+ ``pyarrow`` ``>=16.1.0; python_version < "3.13"``
110
+ ``pyarrow`` ``>=18.0.0; python_version >= "3.13"``
111
+ ``snowflake-connector-python`` ``>=3.16.0``
112
+ ``snowflake-sqlalchemy`` ``>=1.7.0``
113
+ ``snowflake-snowpark-python`` ``>=1.17.0,<9999; python_version < "3.12"``
114
+ ``snowflake-snowpark-python`` ``>=1.27.0,<9999; python_version >= "3.12" and python_version < "3.14"``
115
+ ``setuptools`` ``>=80.0.0,<9999``
116
+ ========================================== ========================================================================
105
117
 
106
118
  Cross provider package dependencies
107
119
  -----------------------------------
@@ -116,14 +128,25 @@ You can install such cross-provider dependencies when installing from PyPI. For
116
128
  pip install apache-airflow-providers-snowflake[common.compat]
117
129
 
118
130
 
119
- ================================================================================================================== =================
120
- Dependent package Extra
121
- ================================================================================================================== =================
122
- `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
123
- `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
124
- `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
125
- ================================================================================================================== =================
131
+ ====================================================================================================================== ===================
132
+ Dependent package Extra
133
+ ====================================================================================================================== ===================
134
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
135
+ `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
136
+ `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
137
+ `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
138
+ ====================================================================================================================== ===================
139
+
140
+ Optional dependencies
141
+ ----------------------
142
+
143
+ =================== ====================================================
144
+ Extra Dependencies
145
+ =================== ====================================================
146
+ ``microsoft.azure`` ``apache-airflow-providers-microsoft-azure>=12.8.0``
147
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
148
+ =================== ====================================================
126
149
 
127
150
  The changelog for the provider package can be found in the
128
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.3.0/changelog.html>`_.
151
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.8.0/changelog.html>`_.
129
152
 
@@ -0,0 +1,26 @@
1
+ airflow/providers/snowflake/__init__.py,sha256=pgddHY3z_6NgaHwG9T-htBh_SJFQSlUfWsNhnrWYOiQ,1498
2
+ airflow/providers/snowflake/get_provider_info.py,sha256=UnvI6oVcI5LN6MAV21dUCGxU_TxHsAjctSIcFgMhLOg,4711
3
+ airflow/providers/snowflake/version_compat.py,sha256=RQbdCueLOaFZWekpQmF0BoAoJInW8EoyvJ3Ah-HbrPo,1577
4
+ airflow/providers/snowflake/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
+ airflow/providers/snowflake/decorators/snowpark.py,sha256=Bxge_oCz_iGqgUeMlaY3GW741PAIwnLIeQO_OXBCwYY,5219
6
+ airflow/providers/snowflake/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
+ airflow/providers/snowflake/hooks/snowflake.py,sha256=UjUHqVcWctjk0W8gyqdlMmR-TmHWx_nvwOSxW417ufI,30630
8
+ airflow/providers/snowflake/hooks/snowflake_sql_api.py,sha256=D6jX_uvpBpzQ_4dXDQcmFROJEfRYj6ehxvEFRN41jQI,23726
9
+ airflow/providers/snowflake/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
+ airflow/providers/snowflake/operators/snowflake.py,sha256=_ikG_JaU8NN__Jk18sdevEf835hF16Fm87hEmfV3Otw,23116
11
+ airflow/providers/snowflake/operators/snowpark.py,sha256=Tfd31My6arGXKo0yfi46HyVfkHO3yeT085l3ymxtGpk,5815
12
+ airflow/providers/snowflake/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
13
+ airflow/providers/snowflake/transfers/copy_into_snowflake.py,sha256=O1kV1_FoXUBxdX0UNlxJVqgcgutoHS6DI-Ipx9iDfvg,13611
14
+ airflow/providers/snowflake/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
+ airflow/providers/snowflake/triggers/snowflake_trigger.py,sha256=QXNLijmtZI7NIdPtOwbvS-4ohgrm8RV_jaBKvekosHQ,4051
16
+ airflow/providers/snowflake/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
+ airflow/providers/snowflake/utils/common.py,sha256=DG-KLy2KpZWAqZqm_XIECm8lmdoUlzwkXv9onmkQThc,1644
18
+ airflow/providers/snowflake/utils/openlineage.py,sha256=HHMJvTPfCnxO1qMcS6rr9t0N8_jFPdMswEVFY25YoIc,17993
19
+ airflow/providers/snowflake/utils/snowpark.py,sha256=-S6ltYiW-KooqUMGzY0OebmAzpUAu7GIjFWwuYERuk8,1629
20
+ airflow/providers/snowflake/utils/sql_api_generate_jwt.py,sha256=9mR-vHIquv60tfAni87f6FAjKsiRHUDDrsVhzw4M9vM,6762
21
+ apache_airflow_providers_snowflake-6.8.0rc1.dist-info/entry_points.txt,sha256=bCrl5J1PXUMzbgnrKYho61rkbL2gHRT4I6f_1jlxAX4,105
22
+ apache_airflow_providers_snowflake-6.8.0rc1.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
23
+ apache_airflow_providers_snowflake-6.8.0rc1.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
24
+ apache_airflow_providers_snowflake-6.8.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
25
+ apache_airflow_providers_snowflake-6.8.0rc1.dist-info/METADATA,sha256=qQXaINQc2tToeP3m4B3s0b_2f68C6v4ChGX-0Wtvmh8,7812
26
+ apache_airflow_providers_snowflake-6.8.0rc1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2025 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,25 +0,0 @@
1
- airflow/providers/snowflake/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/snowflake/__init__.py,sha256=bV-absjYLO_SPfvPyRwm7orXAY2ssUaMnwtRxzd8frk,1498
3
- airflow/providers/snowflake/get_provider_info.py,sha256=NdNRMfulBbpD-I4yFRr8U533m9djD18ijEMvuxOp4_g,3875
4
- airflow/providers/snowflake/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
5
- airflow/providers/snowflake/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
- airflow/providers/snowflake/decorators/snowpark.py,sha256=tKXOjP8m8SEIu0jx2KSrd0n3jGMaIKDOwG2lMkvk3cI,5523
7
- airflow/providers/snowflake/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
- airflow/providers/snowflake/hooks/snowflake.py,sha256=9OH16CYnnJ0-ayAg1D7OdZusEf5lSGjQurWifptp97k,28025
9
- airflow/providers/snowflake/hooks/snowflake_sql_api.py,sha256=-J0mPcdDc9wbB7DcnZfnXJN7H62nbR_NK5WQJxeKZjE,14532
10
- airflow/providers/snowflake/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
11
- airflow/providers/snowflake/operators/snowflake.py,sha256=5MisB-bKqUFM9t5Ky913UqewoHlq3k3mCv4bnc-VY7g,22657
12
- airflow/providers/snowflake/operators/snowpark.py,sha256=Wt3wzcsja0ed4q2KE9WyL74XH6mUVSPNZvcCHWEHQtc,5815
13
- airflow/providers/snowflake/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
- airflow/providers/snowflake/transfers/copy_into_snowflake.py,sha256=UjbznjbK-QWN071ZFMvBHZXoFddMo0vQFK-7VLv3amo,13191
15
- airflow/providers/snowflake/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
16
- airflow/providers/snowflake/triggers/snowflake_trigger.py,sha256=38tkByMyjbVbSt-69YL8EzRBQT4rhwuOKHgbwHfULL0,4250
17
- airflow/providers/snowflake/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
- airflow/providers/snowflake/utils/common.py,sha256=DG-KLy2KpZWAqZqm_XIECm8lmdoUlzwkXv9onmkQThc,1644
19
- airflow/providers/snowflake/utils/openlineage.py,sha256=mgpfWXAzz6EnUS5IsOSyi5_NlorkAaD1jGLpzH9Hs-Y,14483
20
- airflow/providers/snowflake/utils/snowpark.py,sha256=9kzWRkdgoNQ8f3Wnr92LdZylMpcpRasxefpOXrM30Cw,1602
21
- airflow/providers/snowflake/utils/sql_api_generate_jwt.py,sha256=9mR-vHIquv60tfAni87f6FAjKsiRHUDDrsVhzw4M9vM,6762
22
- apache_airflow_providers_snowflake-6.3.0.dist-info/entry_points.txt,sha256=bCrl5J1PXUMzbgnrKYho61rkbL2gHRT4I6f_1jlxAX4,105
23
- apache_airflow_providers_snowflake-6.3.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
24
- apache_airflow_providers_snowflake-6.3.0.dist-info/METADATA,sha256=Yj1vxXvBWkLVV7-mxFohU_CodzoCJcLfTEEi_8ghgHs,6213
25
- apache_airflow_providers_snowflake-6.3.0.dist-info/RECORD,,