apache-airflow-providers-snowflake 6.1.0__py3-none-any.whl → 6.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-snowflake might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "6.1.0"
32
+ __version__ = "6.1.1"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Snowflake",
28
28
  "description": "`Snowflake <https://www.snowflake.com/>`__\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1739964397,
30
+ "source-date-epoch": 1741509691,
31
31
  "versions": [
32
+ "6.1.1",
32
33
  "6.1.0",
33
34
  "6.0.0",
34
35
  "5.8.1",
@@ -162,4 +163,5 @@ def get_provider_info():
162
163
  "snowflake-snowpark-python>=1.17.0;python_version<'3.12'",
163
164
  ],
164
165
  "optional-dependencies": {"openlineage": ["apache-airflow-providers-openlineage"]},
166
+ "devel-dependencies": [],
165
167
  }
@@ -28,15 +28,15 @@ from urllib.parse import urlparse
28
28
 
29
29
  from cryptography.hazmat.backends import default_backend
30
30
  from cryptography.hazmat.primitives import serialization
31
+ from snowflake import connector
32
+ from snowflake.connector import DictCursor, SnowflakeConnection, util_text
33
+ from snowflake.sqlalchemy import URL
31
34
  from sqlalchemy import create_engine
32
35
 
33
36
  from airflow.exceptions import AirflowException
34
37
  from airflow.providers.common.sql.hooks.sql import DbApiHook, return_single_query_results
35
38
  from airflow.providers.snowflake.utils.openlineage import fix_snowflake_sqlalchemy_uri
36
39
  from airflow.utils.strings import to_boolean
37
- from snowflake import connector
38
- from snowflake.connector import DictCursor, SnowflakeConnection, util_text
39
- from snowflake.sqlalchemy import URL
40
40
 
41
41
  T = TypeVar("T")
42
42
  if TYPE_CHECKING:
@@ -367,9 +367,10 @@ class SnowflakeHook(DbApiHook):
367
367
 
368
368
  :return: the created session.
369
369
  """
370
+ from snowflake.snowpark import Session
371
+
370
372
  from airflow import __version__ as airflow_version
371
373
  from airflow.providers.snowflake import __version__ as provider_version
372
- from snowflake.snowpark import Session
373
374
 
374
375
  conn_config = self._get_conn_params
375
376
  session = Session.builder.configs(conn_config).create()
@@ -455,9 +455,13 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
455
455
  method_name="execute_complete",
456
456
  )
457
457
  else:
458
- statement_status = self.poll_on_queries()
459
- if statement_status["error"]:
460
- raise AirflowException(statement_status["error"])
458
+ while True:
459
+ statement_status = self.poll_on_queries()
460
+ if statement_status["error"]:
461
+ raise AirflowException(statement_status["error"])
462
+ if not statement_status["running"]:
463
+ break
464
+
461
465
  self._hook.check_query_output(self.query_ids)
462
466
 
463
467
  def poll_on_queries(self):
@@ -465,6 +469,7 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
465
469
  queries_in_progress = set(self.query_ids)
466
470
  statement_success_status = {}
467
471
  statement_error_status = {}
472
+ statement_running_status = {}
468
473
  for query_id in self.query_ids:
469
474
  if not len(queries_in_progress):
470
475
  break
@@ -479,8 +484,14 @@ class SnowflakeSqlApiOperator(SQLExecuteQueryOperator):
479
484
  if statement_status.get("status") == "success":
480
485
  statement_success_status[query_id] = statement_status
481
486
  queries_in_progress.remove(query_id)
487
+ if statement_status.get("status") == "running":
488
+ statement_running_status[query_id] = statement_status
482
489
  time.sleep(self.poll_interval)
483
- return {"success": statement_success_status, "error": statement_error_status}
490
+ return {
491
+ "success": statement_success_status,
492
+ "error": statement_error_status,
493
+ "running": statement_running_status,
494
+ }
484
495
 
485
496
  def execute_complete(self, context: Context, event: dict[str, str | list[str]] | None = None) -> None:
486
497
  """
@@ -175,7 +175,7 @@ class CopyFromExternalStageToSnowflakeOperator(BaseOperator):
175
175
 
176
176
  >>> results = [{"file": "azure://my_account.blob.core.windows.net/azure_container/dir3/file.csv"}]
177
177
  >>> method(results)
178
- ([('wasbs://azure_container@my_account', 'dir3')], [])
178
+ ([('wasbs://azure_container@my_account', 'dir3/file.csv')], [])
179
179
 
180
180
  >>> results = [{"file": "azure://my_account.blob.core.windows.net/azure_container"}]
181
181
  >>> method(results)
@@ -183,21 +183,28 @@ class CopyFromExternalStageToSnowflakeOperator(BaseOperator):
183
183
 
184
184
  >>> results = [{"file": "s3://bucket"}, {"file": "gcs://bucket/"}, {"file": "s3://bucket/a.csv"}]
185
185
  >>> method(results)
186
- ([('gcs://bucket', '/'), ('s3://bucket', '/')], [])
186
+ ([('gcs://bucket', '/'), ('s3://bucket', '/'), ('s3://bucket', 'a.csv')], [])
187
187
 
188
188
  >>> results = [{"file": "s3://bucket/dir/file.csv"}, {"file": "gcs://bucket/dir/dir2/a.txt"}]
189
189
  >>> method(results)
190
- ([('gcs://bucket', 'dir/dir2'), ('s3://bucket', 'dir')], [])
190
+ ([('gcs://bucket', 'dir/dir2/a.txt'), ('s3://bucket', 'dir/file.csv')], [])
191
191
 
192
192
  >>> results = [
193
193
  ... {"file": "s3://bucket/dir/file.csv"},
194
194
  ... {"file": "azure://my_account.something_new.windows.net/azure_container"},
195
195
  ... ]
196
196
  >>> method(results)
197
- ([('s3://bucket', 'dir')], ['azure://my_account.something_new.windows.net/azure_container'])
197
+ ([('s3://bucket', 'dir/file.csv')], ['azure://my_account.something_new.windows.net/azure_container'])
198
+
199
+ >>> results = [
200
+ ... {"file": "s3://bucket/dir/file.csv"},
201
+ ... {"file": "s3:/invalid-s3-uri"},
202
+ ... {"file": "gcs:invalid-gcs-uri"},
203
+ ... ]
204
+ >>> method(results)
205
+ ([('s3://bucket', 'dir/file.csv')], ['gcs:invalid-gcs-uri', 's3:/invalid-s3-uri'])
198
206
  """
199
207
  import re
200
- from pathlib import Path
201
208
  from urllib.parse import urlparse
202
209
 
203
210
  azure_regex = r"azure:\/\/(\w+)?\.blob.core.windows.net\/(\w+)\/?(.*)?"
@@ -205,23 +212,31 @@ class CopyFromExternalStageToSnowflakeOperator(BaseOperator):
205
212
  unique_dataset_paths = set()
206
213
 
207
214
  for row in query_result:
208
- uri = urlparse(row["file"])
209
- if uri.scheme == "azure":
210
- match = re.fullmatch(azure_regex, row["file"])
211
- if not match:
215
+ try:
216
+ uri = urlparse(row["file"])
217
+
218
+ # Check for valid URI structure
219
+ if not uri.scheme or not uri.netloc:
212
220
  extraction_error_files.append(row["file"])
213
221
  continue
214
- account_name, container_name, name = match.groups()
215
- namespace = f"wasbs://{container_name}@{account_name}"
216
- else:
217
- namespace = f"{uri.scheme}://{uri.netloc}"
218
- name = uri.path.lstrip("/")
219
222
 
220
- name = Path(name).parent.as_posix()
221
- if name in ("", "."):
222
- name = "/"
223
+ if uri.scheme == "azure":
224
+ match = re.fullmatch(azure_regex, row["file"])
225
+ if not match:
226
+ extraction_error_files.append(row["file"])
227
+ continue
228
+ account_name, container_name, name = match.groups()
229
+ namespace = f"wasbs://{container_name}@{account_name}"
230
+ else:
231
+ namespace = f"{uri.scheme}://{uri.netloc}"
232
+ name = uri.path.lstrip("/")
233
+
234
+ if name in ("", "."):
235
+ name = "/"
223
236
 
224
- unique_dataset_paths.add((namespace, name))
237
+ unique_dataset_paths.add((namespace, name))
238
+ except Exception:
239
+ extraction_error_files.append(row["file"])
225
240
 
226
241
  return sorted(unique_dataset_paths), sorted(extraction_error_files)
227
242
 
@@ -243,9 +258,11 @@ class CopyFromExternalStageToSnowflakeOperator(BaseOperator):
243
258
  return OperatorLineage()
244
259
 
245
260
  query_results = self._result or []
246
- # If no files were uploaded we get [{"status": "0 files were uploaded..."}]
247
- if len(query_results) == 1 and query_results[0].get("status"):
261
+
262
+ # This typically happens when no files were processed (empty directory)
263
+ if len(query_results) == 1 and ("file" not in query_results[0] or query_results[0]["file"] is None):
248
264
  query_results = []
265
+
249
266
  unique_dataset_paths, extraction_error_files = self._extract_openlineage_unique_dataset_paths(
250
267
  query_results
251
268
  )
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-snowflake
3
- Version: 6.1.0
3
+ Version: 6.1.1
4
4
  Summary: Provider package apache-airflow-providers-snowflake for Apache Airflow
5
5
  Keywords: airflow-provider,snowflake,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -30,8 +30,8 @@ Requires-Dist: snowflake-sqlalchemy>=1.4.0
30
30
  Requires-Dist: snowflake-snowpark-python>=1.17.0;python_version<'3.12'
31
31
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
32
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
33
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.0/changelog.html
34
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.0
33
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.1/changelog.html
34
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.1
35
35
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
36
36
  Project-URL: Source Code, https://github.com/apache/airflow
37
37
  Project-URL: Twitter, https://x.com/ApacheAirflow
@@ -39,32 +39,31 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
39
39
  Provides-Extra: openlineage
40
40
 
41
41
 
42
- .. Licensed to the Apache Software Foundation (ASF) under one
43
- or more contributor license agreements. See the NOTICE file
44
- distributed with this work for additional information
45
- regarding copyright ownership. The ASF licenses this file
46
- to you under the Apache License, Version 2.0 (the
47
- "License"); you may not use this file except in compliance
48
- with the License. You may obtain a copy of the License at
42
+ .. Licensed to the Apache Software Foundation (ASF) under one
43
+ or more contributor license agreements. See the NOTICE file
44
+ distributed with this work for additional information
45
+ regarding copyright ownership. The ASF licenses this file
46
+ to you under the Apache License, Version 2.0 (the
47
+ "License"); you may not use this file except in compliance
48
+ with the License. You may obtain a copy of the License at
49
49
 
50
- .. http://www.apache.org/licenses/LICENSE-2.0
50
+ .. http://www.apache.org/licenses/LICENSE-2.0
51
51
 
52
- .. Unless required by applicable law or agreed to in writing,
53
- software distributed under the License is distributed on an
54
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
55
- KIND, either express or implied. See the License for the
56
- specific language governing permissions and limitations
57
- under the License.
52
+ .. Unless required by applicable law or agreed to in writing,
53
+ software distributed under the License is distributed on an
54
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
55
+ KIND, either express or implied. See the License for the
56
+ specific language governing permissions and limitations
57
+ under the License.
58
58
 
59
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
60
-
61
- .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
62
- `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
59
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
63
60
 
61
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
62
+ ``PROVIDER_README_TEMPLATE.rst.jinja2`` IN the ``dev/breeze/src/airflow_breeze/templates`` DIRECTORY
64
63
 
65
64
  Package ``apache-airflow-providers-snowflake``
66
65
 
67
- Release: ``6.1.0``
66
+ Release: ``6.1.1``
68
67
 
69
68
 
70
69
  `Snowflake <https://www.snowflake.com/>`__
@@ -77,7 +76,7 @@ This is a provider package for ``snowflake`` provider. All classes for this prov
77
76
  are in ``airflow.providers.snowflake`` python package.
78
77
 
79
78
  You can find package information and changelog for the provider
80
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.0/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.1/>`_.
81
80
 
82
81
  Installation
83
82
  ------------
@@ -126,5 +125,5 @@ Dependent package
126
125
  ================================================================================================================== =================
127
126
 
128
127
  The changelog for the provider package can be found in the
129
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.0/changelog.html>`_.
128
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-snowflake/6.1.1/changelog.html>`_.
130
129
 
@@ -1,16 +1,16 @@
1
1
  airflow/providers/snowflake/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/snowflake/__init__.py,sha256=wbduNpHC6V1I9K4YVLh4ZhvnqMDn2Jl-HIR0Cf1eNOE,1496
3
- airflow/providers/snowflake/get_provider_info.py,sha256=x9uRV99oSHoTBm21ikl2MqVkthQs3bjiqwIJZZDsQT8,5644
2
+ airflow/providers/snowflake/__init__.py,sha256=a9Xtufn-eAD1dLTNyU--Hpz1_KPtC8TnIjkZCd69B5M,1496
3
+ airflow/providers/snowflake/get_provider_info.py,sha256=u5BI-sKjvF49I88JLTYA0XOr6R_GOSMe2hsaW67kKJg,5699
4
4
  airflow/providers/snowflake/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
5
  airflow/providers/snowflake/decorators/snowpark.py,sha256=IXAzhcf7lkim9wsb_7SZlk5JPMQ38KOsEtymQUr0Q68,5298
6
6
  airflow/providers/snowflake/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
- airflow/providers/snowflake/hooks/snowflake.py,sha256=cIJMMG76SUh0XrPw8uub-zqqrEG9oQt9_GSk4s4Jucc,24161
7
+ airflow/providers/snowflake/hooks/snowflake.py,sha256=z5d6enjm78OhMUZQwuNXU2yFsv6ikKRsxmwS1TYVKKs,24162
8
8
  airflow/providers/snowflake/hooks/snowflake_sql_api.py,sha256=3W3wGAWRUxu1K62qqw682vBrXrRzyEmoCYID7PlMfaA,15486
9
9
  airflow/providers/snowflake/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
- airflow/providers/snowflake/operators/snowflake.py,sha256=OgaX1Y-26LYG9MLv7vImcQTXR1tD-PtQVI_Ahdc3hCk,22292
10
+ airflow/providers/snowflake/operators/snowflake.py,sha256=KhC-t3N2N3QXhPFW-0ypBULQN_2z8h1-k7uDXc-Xku0,22659
11
11
  airflow/providers/snowflake/operators/snowpark.py,sha256=Wt3wzcsja0ed4q2KE9WyL74XH6mUVSPNZvcCHWEHQtc,5815
12
12
  airflow/providers/snowflake/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
13
- airflow/providers/snowflake/transfers/copy_into_snowflake.py,sha256=splXX_zn35NecxhWqbs0rAO6zJL0jzeIEdJCqBw5IOI,12511
13
+ airflow/providers/snowflake/transfers/copy_into_snowflake.py,sha256=UjbznjbK-QWN071ZFMvBHZXoFddMo0vQFK-7VLv3amo,13191
14
14
  airflow/providers/snowflake/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
15
  airflow/providers/snowflake/triggers/snowflake_trigger.py,sha256=38tkByMyjbVbSt-69YL8EzRBQT4rhwuOKHgbwHfULL0,4250
16
16
  airflow/providers/snowflake/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -18,7 +18,7 @@ airflow/providers/snowflake/utils/common.py,sha256=DG-KLy2KpZWAqZqm_XIECm8lmdoUl
18
18
  airflow/providers/snowflake/utils/openlineage.py,sha256=XkcYvb_cXG8tZQ6h1IwhfGCkOVf7MSBfOxW0WLGBW0s,3257
19
19
  airflow/providers/snowflake/utils/snowpark.py,sha256=lw_tleNGFJICtTw2qCJ3TjWFOwZK1t8ZCIfYumS2Q18,1616
20
20
  airflow/providers/snowflake/utils/sql_api_generate_jwt.py,sha256=9mR-vHIquv60tfAni87f6FAjKsiRHUDDrsVhzw4M9vM,6762
21
- apache_airflow_providers_snowflake-6.1.0.dist-info/entry_points.txt,sha256=bCrl5J1PXUMzbgnrKYho61rkbL2gHRT4I6f_1jlxAX4,105
22
- apache_airflow_providers_snowflake-6.1.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
23
- apache_airflow_providers_snowflake-6.1.0.dist-info/METADATA,sha256=5whcYkTWZjVZpDmsdFgM1tksqylxyncCThrKXMHdlrc,6222
24
- apache_airflow_providers_snowflake-6.1.0.dist-info/RECORD,,
21
+ apache_airflow_providers_snowflake-6.1.1.dist-info/entry_points.txt,sha256=bCrl5J1PXUMzbgnrKYho61rkbL2gHRT4I6f_1jlxAX4,105
22
+ apache_airflow_providers_snowflake-6.1.1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
23
+ apache_airflow_providers_snowflake-6.1.1.dist-info/METADATA,sha256=wRIICOXzLenMRO4U2evqycpr_-dhRmnhIxOdGnt0yAY,6208
24
+ apache_airflow_providers_snowflake-6.1.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: flit 3.10.1
2
+ Generator: flit 3.11.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any