apache-airflow-providers-sftp 5.5.1rc1__tar.gz → 5.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/NOTICE +1 -1
  2. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/PKG-INFO +18 -18
  3. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/README.rst +10 -10
  4. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/changelog.rst +34 -0
  5. apache_airflow_providers_sftp-5.7.0/docs/filesystems/index.rst +26 -0
  6. apache_airflow_providers_sftp-5.7.0/docs/filesystems/sftp.rst +63 -0
  7. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/index.rst +11 -4
  8. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/sensors/sftp_sensor.rst +18 -6
  9. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/provider.yaml +6 -1
  10. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/pyproject.toml +9 -9
  11. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/__init__.py +1 -1
  12. apache_airflow_providers_sftp-5.7.0/src/airflow/providers/sftp/fs/sftp.py +65 -0
  13. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/get_provider_info.py +1 -0
  14. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/hooks/sftp.py +2 -2
  15. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/sensors/sftp.py +10 -2
  16. apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/fs/test_sftp.py +222 -0
  17. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/unit/sftp/hooks/test_sftp.py +8 -8
  18. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/unit/sftp/operators/test_sftp.py +30 -43
  19. apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/sensors/__init__.py +16 -0
  20. apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/triggers/__init__.py +16 -0
  21. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/LICENSE +0 -0
  22. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/.latest-doc-only-change.txt +0 -0
  23. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/commits.rst +0 -0
  24. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/conf.py +0 -0
  25. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/connections/sftp.rst +0 -0
  26. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/installing-providers-from-sources.rst +0 -0
  27. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/integration-logos/SFTP.png +0 -0
  28. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/docs/security.rst +0 -0
  29. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/__init__.py +0 -0
  30. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/__init__.py +0 -0
  31. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/decorators/__init__.py +0 -0
  32. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/decorators/sensors/__init__.py +0 -0
  33. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/decorators/sensors/sftp.py +0 -0
  34. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/exceptions.py +0 -0
  35. {apache_airflow_providers_sftp-5.5.1rc1/src/airflow/providers/sftp/hooks → apache_airflow_providers_sftp-5.7.0/src/airflow/providers/sftp/fs}/__init__.py +0 -0
  36. {apache_airflow_providers_sftp-5.5.1rc1/src/airflow/providers/sftp/operators → apache_airflow_providers_sftp-5.7.0/src/airflow/providers/sftp/hooks}/__init__.py +0 -0
  37. {apache_airflow_providers_sftp-5.5.1rc1/src/airflow/providers/sftp/sensors → apache_airflow_providers_sftp-5.7.0/src/airflow/providers/sftp/operators}/__init__.py +0 -0
  38. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/operators/sftp.py +0 -0
  39. {apache_airflow_providers_sftp-5.5.1rc1/src/airflow/providers/sftp/triggers → apache_airflow_providers_sftp-5.7.0/src/airflow/providers/sftp/sensors}/__init__.py +0 -0
  40. {apache_airflow_providers_sftp-5.5.1rc1/tests/system/sftp → apache_airflow_providers_sftp-5.7.0/src/airflow/providers/sftp/triggers}/__init__.py +0 -0
  41. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/triggers/sftp.py +0 -0
  42. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/src/airflow/providers/sftp/version_compat.py +0 -0
  43. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/conftest.py +0 -0
  44. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/system/__init__.py +0 -0
  45. {apache_airflow_providers_sftp-5.5.1rc1/tests/unit → apache_airflow_providers_sftp-5.7.0/tests/system}/sftp/__init__.py +0 -0
  46. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/system/sftp/example_sftp_sensor.py +0 -0
  47. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/unit/__init__.py +0 -0
  48. {apache_airflow_providers_sftp-5.5.1rc1/tests/unit/sftp/decorators → apache_airflow_providers_sftp-5.7.0/tests/unit/sftp}/__init__.py +0 -0
  49. {apache_airflow_providers_sftp-5.5.1rc1/tests/unit/sftp/decorators/sensors → apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/decorators}/__init__.py +0 -0
  50. {apache_airflow_providers_sftp-5.5.1rc1/tests/unit/sftp/hooks → apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/decorators/sensors}/__init__.py +0 -0
  51. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/unit/sftp/decorators/sensors/test_sftp.py +0 -0
  52. {apache_airflow_providers_sftp-5.5.1rc1/tests/unit/sftp/operators → apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/fs}/__init__.py +0 -0
  53. {apache_airflow_providers_sftp-5.5.1rc1/tests/unit/sftp/sensors → apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/hooks}/__init__.py +0 -0
  54. {apache_airflow_providers_sftp-5.5.1rc1/tests/unit/sftp/triggers → apache_airflow_providers_sftp-5.7.0/tests/unit/sftp/operators}/__init__.py +0 -0
  55. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/unit/sftp/sensors/test_sftp.py +0 -0
  56. {apache_airflow_providers_sftp-5.5.1rc1 → apache_airflow_providers_sftp-5.7.0}/tests/unit/sftp/triggers/test_sftp.py +0 -0
@@ -1,5 +1,5 @@
1
1
  Apache Airflow
2
- Copyright 2016-2025 The Apache Software Foundation
2
+ Copyright 2016-2026 The Apache Software Foundation
3
3
 
4
4
  This product includes software developed at
5
5
  The Apache Software Foundation (http://www.apache.org/).
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-sftp
3
- Version: 5.5.1rc1
3
+ Version: 5.7.0
4
4
  Summary: Provider package apache-airflow-providers-sftp for Apache Airflow
5
5
  Keywords: airflow-provider,sftp,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,22 +22,22 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
- Requires-Dist: apache-airflow>=2.11.0rc1
26
- Requires-Dist: apache-airflow-providers-ssh>=4.0.0rc1
27
- Requires-Dist: apache-airflow-providers-common-compat>=1.10.1rc1
25
+ Requires-Dist: apache-airflow>=2.11.0
26
+ Requires-Dist: apache-airflow-providers-ssh>=4.0.0
27
+ Requires-Dist: apache-airflow-providers-common-compat>=1.12.0
28
28
  Requires-Dist: paramiko>=2.9.0,<4.0.0
29
29
  Requires-Dist: asyncssh>=2.12.0
30
- Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
31
30
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
31
+ Requires-Dist: sshfs>=2023.1.0 ; extra == "sshfs"
32
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
33
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-sftp/5.5.1/changelog.html
34
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-sftp/5.5.1
33
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0/changelog.html
34
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0
35
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
36
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
37
37
  Project-URL: Source Code, https://github.com/apache/airflow
38
38
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
39
- Provides-Extra: common-compat
40
39
  Provides-Extra: openlineage
40
+ Provides-Extra: sshfs
41
41
 
42
42
 
43
43
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -64,7 +64,7 @@ Provides-Extra: openlineage
64
64
 
65
65
  Package ``apache-airflow-providers-sftp``
66
66
 
67
- Release: ``5.5.1``
67
+ Release: ``5.7.0``
68
68
 
69
69
 
70
70
  `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
@@ -77,7 +77,7 @@ This is a provider package for ``sftp`` provider. All classes for this provider
77
77
  are in ``airflow.providers.sftp`` python package.
78
78
 
79
79
  You can find package information and changelog for the provider
80
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.5.1/>`_.
80
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0/>`_.
81
81
 
82
82
  Installation
83
83
  ------------
@@ -96,7 +96,7 @@ PIP package Version required
96
96
  ========================================== ==================
97
97
  ``apache-airflow`` ``>=2.11.0``
98
98
  ``apache-airflow-providers-ssh`` ``>=4.0.0``
99
- ``apache-airflow-providers-common-compat`` ``>=1.10.1``
99
+ ``apache-airflow-providers-common-compat`` ``>=1.12.0``
100
100
  ``paramiko`` ``>=2.9.0,<4.0.0``
101
101
  ``asyncssh`` ``>=2.12.0``
102
102
  ========================================== ==================
@@ -125,13 +125,13 @@ Dependent package
125
125
  Optional dependencies
126
126
  ----------------------
127
127
 
128
- ================= ==========================================
129
- Extra Dependencies
130
- ================= ==========================================
131
- ``common.compat`` ``apache-airflow-providers-common-compat``
132
- ``openlineage`` ``apache-airflow-providers-openlineage``
133
- ================= ==========================================
128
+ =============== ========================================
129
+ Extra Dependencies
130
+ =============== ========================================
131
+ ``openlineage`` ``apache-airflow-providers-openlineage``
132
+ ``sshfs`` ``sshfs>=2023.1.0``
133
+ =============== ========================================
134
134
 
135
135
  The changelog for the provider package can be found in the
136
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.5.1/changelog.html>`_.
136
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0/changelog.html>`_.
137
137
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-sftp``
25
25
 
26
- Release: ``5.5.1``
26
+ Release: ``5.7.0``
27
27
 
28
28
 
29
29
  `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``sftp`` provider. All classes for this provider
36
36
  are in ``airflow.providers.sftp`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.5.1/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -55,7 +55,7 @@ PIP package Version required
55
55
  ========================================== ==================
56
56
  ``apache-airflow`` ``>=2.11.0``
57
57
  ``apache-airflow-providers-ssh`` ``>=4.0.0``
58
- ``apache-airflow-providers-common-compat`` ``>=1.10.1``
58
+ ``apache-airflow-providers-common-compat`` ``>=1.12.0``
59
59
  ``paramiko`` ``>=2.9.0,<4.0.0``
60
60
  ``asyncssh`` ``>=2.12.0``
61
61
  ========================================== ==================
@@ -84,12 +84,12 @@ Dependent package
84
84
  Optional dependencies
85
85
  ----------------------
86
86
 
87
- ================= ==========================================
88
- Extra Dependencies
89
- ================= ==========================================
90
- ``common.compat`` ``apache-airflow-providers-common-compat``
91
- ``openlineage`` ``apache-airflow-providers-openlineage``
92
- ================= ==========================================
87
+ =============== ========================================
88
+ Extra Dependencies
89
+ =============== ========================================
90
+ ``openlineage`` ``apache-airflow-providers-openlineage``
91
+ ``sshfs`` ``sshfs>=2023.1.0``
92
+ =============== ========================================
93
93
 
94
94
  The changelog for the provider package can be found in the
95
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.5.1/changelog.html>`_.
95
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0/changelog.html>`_.
@@ -27,6 +27,40 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
+ 5.7.0
31
+ .....
32
+
33
+ Features
34
+ ~~~~~~~~
35
+
36
+ * ``Add SSH/SFTP support for ObjectStoragePath (#60757)``
37
+
38
+ Misc
39
+ ~~~~
40
+
41
+ * ``Use common provider's get_async_connection in other providers (#56791)``
42
+
43
+ .. Below changes are excluded from the changelog. Move them to
44
+ appropriate section above if needed. Do not delete the lines(!):
45
+
46
+ 5.6.0
47
+ .....
48
+
49
+ Features
50
+ ~~~~~~~~
51
+
52
+ * ``Enhance SFTPSensor documentation and functionality (#60313)``
53
+
54
+ Misc
55
+ ~~~~
56
+
57
+ * ``New year means updated Copyright notices (#60344)``
58
+ * ``Migrate sftp provider to use airflow.sdk.configuration.conf (#59970)``
59
+
60
+ .. Below changes are excluded from the changelog. Move them to
61
+ appropriate section above if needed. Do not delete the lines(!):
62
+ * ``TaskInstance unused method cleanup (#59835)``
63
+
30
64
  5.5.1
31
65
  .....
32
66
 
@@ -0,0 +1,26 @@
1
+ .. Licensed to the Apache Software Foundation (ASF) under one
2
+ or more contributor license agreements. See the NOTICE file
3
+ distributed with this work for additional information
4
+ regarding copyright ownership. The ASF licenses this file
5
+ to you under the Apache License, Version 2.0 (the
6
+ "License"); you may not use this file except in compliance
7
+ with the License. You may obtain a copy of the License at
8
+
9
+ .. http://www.apache.org/licenses/LICENSE-2.0
10
+
11
+ .. Unless required by applicable law or agreed to in writing,
12
+ software distributed under the License is distributed on an
13
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ KIND, either express or implied. See the License for the
15
+ specific language governing permissions and limitations
16
+ under the License.
17
+
18
+ Filesystems
19
+ ===========
20
+
21
+ .. toctree::
22
+ :maxdepth: 1
23
+ :caption: Filesystem Providers
24
+ :glob:
25
+
26
+ *
@@ -0,0 +1,63 @@
1
+ .. Licensed to the Apache Software Foundation (ASF) under one
2
+ or more contributor license agreements. See the NOTICE file
3
+ distributed with this work for additional information
4
+ regarding copyright ownership. The ASF licenses this file
5
+ to you under the Apache License, Version 2.0 (the
6
+ "License"); you may not use this file except in compliance
7
+ with the License. You may obtain a copy of the License at
8
+
9
+ .. http://www.apache.org/licenses/LICENSE-2.0
10
+
11
+ .. Unless required by applicable law or agreed to in writing,
12
+ software distributed under the License is distributed on an
13
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ KIND, either express or implied. See the License for the
15
+ specific language governing permissions and limitations
16
+ under the License.
17
+
18
+ SFTP Filesystem
19
+ ===============
20
+
21
+ Use ``ObjectStoragePath`` with SFTP/SSH servers via the `sshfs <https://github.com/fsspec/sshfs>`__ library.
22
+
23
+ .. code-block:: bash
24
+
25
+ pip install apache-airflow-providers-sftp[sshfs]
26
+
27
+ URL format: ``sftp://connection_id@hostname/path/to/file`` (also supports ``ssh://``).
28
+
29
+ Configuration
30
+ -------------
31
+
32
+ Uses the standard SFTP connection. The following extras are supported:
33
+
34
+ * ``key_file`` - path to private key file
35
+ * ``private_key`` - private key content (PEM format)
36
+ * ``private_key_passphrase`` - passphrase for encrypted keys
37
+ * ``no_host_key_check`` - set to ``true`` to skip host key verification
38
+
39
+ See :doc:`/connections/sftp` for details.
40
+
41
+ Example
42
+ -------
43
+
44
+ .. code-block:: python
45
+
46
+ from airflow.sdk import ObjectStoragePath
47
+
48
+ path = ObjectStoragePath("sftp://my_conn@myserver/data/file.csv")
49
+
50
+ # read
51
+ with path.open() as f:
52
+ data = f.read()
53
+
54
+ # write
55
+ with path.open("w") as f:
56
+ f.write("content")
57
+
58
+ # list
59
+ for p in path.parent.iterdir():
60
+ print(p.name)
61
+
62
+ # copy
63
+ path.copy(ObjectStoragePath("file:///tmp/local.csv"))
@@ -29,6 +29,13 @@
29
29
  Changelog <changelog>
30
30
  Security <security>
31
31
 
32
+ .. toctree::
33
+ :hidden:
34
+ :maxdepth: 1
35
+ :caption: Guides
36
+
37
+ Filesystems <filesystems/index>
38
+
32
39
  .. toctree::
33
40
  :hidden:
34
41
  :maxdepth: 1
@@ -70,7 +77,7 @@ apache-airflow-providers-sftp package
70
77
  `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
71
78
 
72
79
 
73
- Release: 5.5.1
80
+ Release: 5.7.0
74
81
 
75
82
  Provider package
76
83
  ----------------
@@ -95,7 +102,7 @@ PIP package Version required
95
102
  ========================================== ==================
96
103
  ``apache-airflow`` ``>=2.11.0``
97
104
  ``apache-airflow-providers-ssh`` ``>=4.0.0``
98
- ``apache-airflow-providers-common-compat`` ``>=1.10.1``
105
+ ``apache-airflow-providers-common-compat`` ``>=1.12.0``
99
106
  ``paramiko`` ``>=2.9.0,<4.0.0``
100
107
  ``asyncssh`` ``>=2.12.0``
101
108
  ========================================== ==================
@@ -127,5 +134,5 @@ Downloading official packages
127
134
  You can download officially released packages and verify their checksums and signatures from the
128
135
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
129
136
 
130
- * `The apache-airflow-providers-sftp 5.5.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.5.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.5.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.5.1.tar.gz.sha512>`__)
131
- * `The apache-airflow-providers-sftp 5.5.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.5.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.5.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.5.1-py3-none-any.whl.sha512>`__)
137
+ * `The apache-airflow-providers-sftp 5.7.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.7.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.7.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.7.0.tar.gz.sha512>`__)
138
+ * `The apache-airflow-providers-sftp 5.7.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.7.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.7.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_sftp-5.7.0-py3-none-any.whl.sha512>`__)
@@ -30,14 +30,26 @@ To get more information about this sensor visit :class:`~airflow.providers.sftp.
30
30
 
31
31
  We can also use TaskFlow API. It takes the same arguments as the :class:`~airflow.providers.sftp.sensors.sftp.SFTPSensor` along with -
32
32
 
33
+ python_callable (optional)
34
+ A callable that will be executed after files matching the sensor criteria are found.
35
+ This allows you to process the found files with custom logic. The callable receives:
36
+
37
+ - Positional arguments from ``op_args``
38
+ - Keyword arguments from ``op_kwargs``, with ``files_found`` automatically added
39
+ (if ``op_kwargs`` is provided and not empty) containing the list of files that matched
40
+ the sensor criteria
41
+
42
+ The return value of the callable is stored in XCom along with the ``files_found`` list,
43
+ accessible via ``{"files_found": [...], "decorator_return_value": <callable_return_value>}``.
44
+
33
45
  op_args (optional)
34
- A list of positional arguments that will get unpacked when
35
- calling your callable (templated)
36
- op_kwargs (optional)
37
- A dictionary of keyword arguments that will get unpacked
38
- in your function (templated)
46
+ A list of positional arguments that will get unpacked when calling your callable (templated).
47
+ Only used when ``python_callable`` is provided.
39
48
 
40
- Whatever returned by the python callable is put into XCom.
49
+ op_kwargs (optional)
50
+ A dictionary of keyword arguments that will get unpacked in your function (templated).
51
+ If provided and not empty, the ``files_found`` list is automatically added to this dictionary
52
+ when the callable is invoked. Only used when ``python_callable`` is provided.
41
53
 
42
54
  .. exampleinclude:: /../../sftp/tests/system/sftp/example_sftp_sensor.py
43
55
  :language: python
@@ -22,12 +22,14 @@ description: |
22
22
  `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1765299399
25
+ source-date-epoch: 1769461567
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 5.7.0
32
+ - 5.6.0
31
33
  - 5.5.1
32
34
  - 5.5.0
33
35
  - 5.4.2
@@ -122,3 +124,6 @@ triggers:
122
124
  - integration-name: SSH File Transfer Protocol (SFTP)
123
125
  python-modules:
124
126
  - airflow.providers.sftp.triggers.sftp
127
+
128
+ filesystems:
129
+ - airflow.providers.sftp.fs.sftp
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-sftp"
28
- version = "5.5.1rc1"
28
+ version = "5.7.0"
29
29
  description = "Provider package apache-airflow-providers-sftp for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  license = "Apache-2.0"
@@ -58,9 +58,9 @@ requires-python = ">=3.10"
58
58
  # Make sure to run ``prek update-providers-dependencies --all-files``
59
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
60
60
  dependencies = [
61
- "apache-airflow>=2.11.0rc1",
62
- "apache-airflow-providers-ssh>=4.0.0rc1",
63
- "apache-airflow-providers-common-compat>=1.10.1rc1",
61
+ "apache-airflow>=2.11.0",
62
+ "apache-airflow-providers-ssh>=4.0.0",
63
+ "apache-airflow-providers-common-compat>=1.12.0",
64
64
  # TODO: Bump to >= 4.0.0 once https://github.com/apache/airflow/issues/54079 is handled
65
65
  "paramiko>=2.9.0,<4.0.0",
66
66
  "asyncssh>=2.12.0",
@@ -69,12 +69,12 @@ dependencies = [
69
69
  # The optional dependencies should be modified in place in the generated file
70
70
  # Any change in the dependencies is preserved when the file is regenerated
71
71
  [project.optional-dependencies]
72
- "common.compat" = [
73
- "apache-airflow-providers-common-compat"
74
- ]
75
72
  "openlineage" = [
76
73
  "apache-airflow-providers-openlineage"
77
74
  ]
75
+ "sshfs" = [
76
+ "sshfs>=2023.1.0",
77
+ ]
78
78
 
79
79
  [dependency-groups]
80
80
  dev = [
@@ -113,8 +113,8 @@ apache-airflow-providers-common-sql = {workspace = true}
113
113
  apache-airflow-providers-standard = {workspace = true}
114
114
 
115
115
  [project.urls]
116
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-sftp/5.5.1"
117
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-sftp/5.5.1/changelog.html"
116
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0"
117
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.7.0/changelog.html"
118
118
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
119
119
  "Source Code" = "https://github.com/apache/airflow"
120
120
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "5.5.1"
32
+ __version__ = "5.7.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -0,0 +1,65 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING, Any
20
+
21
+ from airflow.sdk.bases.hook import BaseHook
22
+
23
+ if TYPE_CHECKING:
24
+ from fsspec import AbstractFileSystem
25
+
26
+ schemes = ["sftp", "ssh"]
27
+
28
+
29
+ def get_fs(conn_id: str | None, storage_options: dict[str, Any] | None = None) -> AbstractFileSystem:
30
+ try:
31
+ from sshfs import SSHFileSystem
32
+ except ImportError:
33
+ raise ImportError(
34
+ "Airflow FS SFTP/SSH protocol requires the sshfs library. "
35
+ "Install with: pip install apache-airflow-providers-sftp[sshfs]"
36
+ )
37
+
38
+ if conn_id is None:
39
+ return SSHFileSystem(**(storage_options or {}))
40
+
41
+ conn = BaseHook.get_connection(conn_id)
42
+ extras = conn.extra_dejson
43
+
44
+ options: dict[str, Any] = {
45
+ "host": conn.host,
46
+ "port": conn.port or 22,
47
+ "username": conn.login,
48
+ }
49
+
50
+ if conn.password:
51
+ options["password"] = conn.password
52
+
53
+ if key_file := extras.get("key_file"):
54
+ options["client_keys"] = [key_file]
55
+
56
+ if private_key := extras.get("private_key"):
57
+ options["client_keys"] = [private_key]
58
+ if passphrase := extras.get("private_key_passphrase"):
59
+ options["passphrase"] = passphrase
60
+
61
+ if str(extras.get("no_host_key_check", "")).lower() == "true":
62
+ options["known_hosts"] = None
63
+
64
+ options.update(storage_options or {})
65
+ return SSHFileSystem(**options)
@@ -71,4 +71,5 @@ def get_provider_info():
71
71
  "python-modules": ["airflow.providers.sftp.triggers.sftp"],
72
72
  }
73
73
  ],
74
+ "filesystems": ["airflow.providers.sftp.fs.sftp"],
74
75
  }
@@ -33,10 +33,10 @@ from pathlib import Path
33
33
  from typing import IO, TYPE_CHECKING, Any, cast
34
34
 
35
35
  import asyncssh
36
- from asgiref.sync import sync_to_async
37
36
  from paramiko.config import SSH_PORT
38
37
 
39
38
  from airflow.exceptions import AirflowProviderDeprecationWarning
39
+ from airflow.providers.common.compat.connection import get_async_connection
40
40
  from airflow.providers.common.compat.sdk import AirflowException, BaseHook, Connection
41
41
  from airflow.providers.sftp.exceptions import ConnectionNotOpenedException
42
42
  from airflow.providers.ssh.hooks.ssh import SSHHook
@@ -756,7 +756,7 @@ class SFTPHookAsync(BaseHook):
756
756
  - known_hosts
757
757
  - passphrase
758
758
  """
759
- conn = await sync_to_async(self.get_connection)(self.sftp_conn_id)
759
+ conn = await get_async_connection(self.sftp_conn_id)
760
760
  if conn.extra is not None:
761
761
  self._parse_extras(conn) # type: ignore[arg-type]
762
762
 
@@ -26,8 +26,7 @@ from typing import TYPE_CHECKING, Any
26
26
 
27
27
  from paramiko.sftp import SFTP_NO_SUCH_FILE
28
28
 
29
- from airflow.configuration import conf
30
- from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator, PokeReturnValue
29
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator, PokeReturnValue, conf
31
30
  from airflow.providers.sftp.hooks.sftp import SFTPHook
32
31
  from airflow.providers.sftp.triggers.sftp import SFTPTrigger
33
32
  from airflow.utils.timezone import convert_to_utc, parse
@@ -44,6 +43,15 @@ class SFTPSensor(BaseSensorOperator):
44
43
  :param file_pattern: The pattern that will be used to match the file (fnmatch format)
45
44
  :param sftp_conn_id: The connection to run the sensor against
46
45
  :param newer_than: DateTime for which the file or file path should be newer than, comparison is inclusive
46
+ :param python_callable: Optional callable that will be called after files are found. The callable
47
+ will receive the found files list in ``op_kwargs['files_found']`` if ``op_kwargs`` is provided
48
+ and not empty. The return value of the callable will be stored in XCom along with the
49
+ files_found list.
50
+ :param op_args: A list of positional arguments that will get unpacked when calling your callable
51
+ (templated). Only used when ``python_callable`` is provided.
52
+ :param op_kwargs: A dictionary of keyword arguments that will get unpacked in your callable
53
+ (templated). If provided and not empty, the ``files_found`` list will be automatically added
54
+ to this dictionary. Only used when ``python_callable`` is provided.
47
55
  :param deferrable: If waiting for completion, whether to defer the task until done, default is ``False``.
48
56
  """
49
57
 
@@ -0,0 +1,222 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from unittest.mock import patch
20
+
21
+ import pytest
22
+
23
+ pytest.importorskip("sshfs")
24
+
25
+ TEST_CONN_ID = "sftp_test_conn"
26
+
27
+
28
+ @pytest.fixture(scope="module", autouse=True)
29
+ def _setup_connections():
30
+ with pytest.MonkeyPatch.context() as mp_ctx:
31
+ mp_ctx.setenv(
32
+ f"AIRFLOW_CONN_{TEST_CONN_ID}".upper(),
33
+ "sftp://testuser:testpass@testhost:2222",
34
+ )
35
+ yield
36
+
37
+
38
+ class TestSftpFilesystem:
39
+ def test_schemes(self):
40
+ from airflow.providers.sftp.fs.sftp import schemes
41
+
42
+ assert "sftp" in schemes
43
+ assert "ssh" in schemes
44
+
45
+ @patch("sshfs.SSHFileSystem", autospec=True)
46
+ def test_get_fs_with_connection(self, mock_sshfs):
47
+ from airflow.providers.sftp.fs.sftp import get_fs
48
+
49
+ get_fs(conn_id=TEST_CONN_ID)
50
+
51
+ mock_sshfs.assert_called_once()
52
+ call_kwargs = mock_sshfs.call_args.kwargs
53
+ assert call_kwargs["host"] == "testhost"
54
+ assert call_kwargs["port"] == 2222
55
+ assert call_kwargs["username"] == "testuser"
56
+ assert call_kwargs["password"] == "testpass"
57
+
58
+ @patch("sshfs.SSHFileSystem", autospec=True)
59
+ def test_get_fs_without_connection(self, mock_sshfs):
60
+ from airflow.providers.sftp.fs.sftp import get_fs
61
+
62
+ # When conn_id is None, storage_options are passed directly to SSHFileSystem
63
+ storage_options = {"host": "manual-host", "username": "manual-user"}
64
+ get_fs(conn_id=None, storage_options=storage_options)
65
+
66
+ mock_sshfs.assert_called_once()
67
+ call_kwargs = mock_sshfs.call_args.kwargs
68
+ assert call_kwargs["host"] == "manual-host"
69
+ assert call_kwargs["username"] == "manual-user"
70
+
71
+ @patch("sshfs.SSHFileSystem", autospec=True)
72
+ def test_get_fs_storage_options_merge(self, mock_sshfs):
73
+ from airflow.providers.sftp.fs.sftp import get_fs
74
+
75
+ storage_options = {"custom_option": "custom_value"}
76
+ get_fs(conn_id=TEST_CONN_ID, storage_options=storage_options)
77
+
78
+ mock_sshfs.assert_called_once()
79
+ call_kwargs = mock_sshfs.call_args.kwargs
80
+ assert call_kwargs["custom_option"] == "custom_value"
81
+ assert call_kwargs["host"] == "testhost"
82
+
83
+ @patch("sshfs.SSHFileSystem", autospec=True)
84
+ def test_get_fs_storage_options_override(self, mock_sshfs):
85
+ from airflow.providers.sftp.fs.sftp import get_fs
86
+
87
+ storage_options = {"port": 3333}
88
+ get_fs(conn_id=TEST_CONN_ID, storage_options=storage_options)
89
+
90
+ mock_sshfs.assert_called_once()
91
+ call_kwargs = mock_sshfs.call_args.kwargs
92
+ assert call_kwargs["port"] == 3333
93
+
94
+ @patch("sshfs.SSHFileSystem", autospec=True)
95
+ def test_get_fs_with_key_file(self, mock_sshfs):
96
+ from airflow.providers.sftp.fs.sftp import get_fs
97
+
98
+ with pytest.MonkeyPatch.context() as mp_ctx:
99
+ mp_ctx.setenv(
100
+ "AIRFLOW_CONN_SFTP_KEY_FILE",
101
+ "sftp://testuser@testhost?key_file=%2Fpath%2Fto%2Fkey",
102
+ )
103
+
104
+ get_fs(conn_id="sftp_key_file")
105
+
106
+ mock_sshfs.assert_called_once()
107
+ call_kwargs = mock_sshfs.call_args.kwargs
108
+ assert call_kwargs["client_keys"] == ["/path/to/key"]
109
+
110
+ @patch("sshfs.SSHFileSystem", autospec=True)
111
+ def test_get_fs_with_private_key(self, mock_sshfs):
112
+ from airflow.providers.sftp.fs.sftp import get_fs
113
+
114
+ with pytest.MonkeyPatch.context() as mp_ctx:
115
+ mp_ctx.setenv(
116
+ "AIRFLOW_CONN_SFTP_PRIVATE_KEY",
117
+ "sftp://testuser@testhost?private_key=PRIVATE_KEY_CONTENT&private_key_passphrase=secret",
118
+ )
119
+
120
+ get_fs(conn_id="sftp_private_key")
121
+
122
+ mock_sshfs.assert_called_once()
123
+ call_kwargs = mock_sshfs.call_args.kwargs
124
+ assert call_kwargs["client_keys"] == ["PRIVATE_KEY_CONTENT"]
125
+ assert call_kwargs["passphrase"] == "secret"
126
+
127
+ @patch("sshfs.SSHFileSystem", autospec=True)
128
+ def test_get_fs_with_private_key_no_passphrase(self, mock_sshfs):
129
+ from airflow.providers.sftp.fs.sftp import get_fs
130
+
131
+ with pytest.MonkeyPatch.context() as mp_ctx:
132
+ mp_ctx.setenv(
133
+ "AIRFLOW_CONN_SFTP_PRIVATE_KEY_NO_PASS",
134
+ "sftp://testuser@testhost?private_key=PRIVATE_KEY_CONTENT",
135
+ )
136
+
137
+ get_fs(conn_id="sftp_private_key_no_pass")
138
+
139
+ mock_sshfs.assert_called_once()
140
+ call_kwargs = mock_sshfs.call_args.kwargs
141
+ assert call_kwargs["client_keys"] == ["PRIVATE_KEY_CONTENT"]
142
+ assert "passphrase" not in call_kwargs
143
+
144
+ @patch("sshfs.SSHFileSystem", autospec=True)
145
+ def test_get_fs_with_no_host_key_check(self, mock_sshfs):
146
+ from airflow.providers.sftp.fs.sftp import get_fs
147
+
148
+ with pytest.MonkeyPatch.context() as mp_ctx:
149
+ mp_ctx.setenv(
150
+ "AIRFLOW_CONN_SFTP_NO_HOST_CHECK",
151
+ "sftp://testuser@testhost?no_host_key_check=true",
152
+ )
153
+
154
+ get_fs(conn_id="sftp_no_host_check")
155
+
156
+ mock_sshfs.assert_called_once()
157
+ call_kwargs = mock_sshfs.call_args.kwargs
158
+ assert call_kwargs["known_hosts"] is None
159
+
160
+ @patch("sshfs.SSHFileSystem", autospec=True)
161
+ def test_get_fs_default_port(self, mock_sshfs):
162
+ from airflow.providers.sftp.fs.sftp import get_fs
163
+
164
+ with pytest.MonkeyPatch.context() as mp_ctx:
165
+ mp_ctx.setenv(
166
+ "AIRFLOW_CONN_SFTP_DEFAULT_PORT",
167
+ "sftp://testuser@testhost",
168
+ )
169
+
170
+ get_fs(conn_id="sftp_default_port")
171
+
172
+ mock_sshfs.assert_called_once()
173
+ call_kwargs = mock_sshfs.call_args.kwargs
174
+ assert call_kwargs["port"] == 22
175
+
176
+ @patch("sshfs.SSHFileSystem", autospec=True)
177
+ def test_get_fs_without_password(self, mock_sshfs):
178
+ from airflow.providers.sftp.fs.sftp import get_fs
179
+
180
+ with pytest.MonkeyPatch.context() as mp_ctx:
181
+ mp_ctx.setenv(
182
+ "AIRFLOW_CONN_SFTP_NO_PASSWORD",
183
+ "sftp://testuser@testhost",
184
+ )
185
+
186
+ get_fs(conn_id="sftp_no_password")
187
+
188
+ mock_sshfs.assert_called_once()
189
+ call_kwargs = mock_sshfs.call_args.kwargs
190
+ assert "password" not in call_kwargs
191
+
192
+ @patch("sshfs.SSHFileSystem", autospec=True)
193
+ def test_get_fs_host_key_check_enabled_by_default(self, mock_sshfs):
194
+ from airflow.providers.sftp.fs.sftp import get_fs
195
+
196
+ with pytest.MonkeyPatch.context() as mp_ctx:
197
+ mp_ctx.setenv(
198
+ "AIRFLOW_CONN_SFTP_HOST_CHECK_DEFAULT",
199
+ "sftp://testuser@testhost",
200
+ )
201
+
202
+ get_fs(conn_id="sftp_host_check_default")
203
+
204
+ mock_sshfs.assert_called_once()
205
+ call_kwargs = mock_sshfs.call_args.kwargs
206
+ assert "known_hosts" not in call_kwargs
207
+
208
+ @patch("sshfs.SSHFileSystem", autospec=True)
209
+ def test_get_fs_host_key_check_explicit_false(self, mock_sshfs):
210
+ from airflow.providers.sftp.fs.sftp import get_fs
211
+
212
+ with pytest.MonkeyPatch.context() as mp_ctx:
213
+ mp_ctx.setenv(
214
+ "AIRFLOW_CONN_SFTP_HOST_CHECK_FALSE",
215
+ "sftp://testuser@testhost?no_host_key_check=false",
216
+ )
217
+
218
+ get_fs(conn_id="sftp_host_check_false")
219
+
220
+ mock_sshfs.assert_called_once()
221
+ call_kwargs = mock_sshfs.call_args.kwargs
222
+ assert "known_hosts" not in call_kwargs
@@ -734,7 +734,7 @@ class MockAirflowConnectionWithPrivate:
734
734
 
735
735
  class TestSFTPHookAsync:
736
736
  @patch("asyncssh.connect", new_callable=AsyncMock)
737
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
737
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
738
738
  @pytest.mark.asyncio
739
739
  async def test_extra_dejson_fields_for_connection_building_known_hosts_none(
740
740
  self, mock_get_connection, mock_connect, caplog
@@ -775,7 +775,7 @@ class TestSFTPHookAsync:
775
775
  )
776
776
  @patch("asyncssh.connect", new_callable=AsyncMock)
777
777
  @patch("asyncssh.import_private_key")
778
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
778
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
779
779
  @pytest.mark.asyncio
780
780
  async def test_extra_dejson_fields_for_connection_with_host_key(
781
781
  self,
@@ -799,7 +799,7 @@ class TestSFTPHookAsync:
799
799
  assert hook.known_hosts == f"localhost {mock_host_key}".encode()
800
800
 
801
801
  @patch("asyncssh.connect", new_callable=AsyncMock)
802
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
802
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
803
803
  @pytest.mark.asyncio
804
804
  async def test_extra_dejson_fields_for_connection_raises_valuerror(
805
805
  self, mock_get_connection, mock_connect
@@ -820,7 +820,7 @@ class TestSFTPHookAsync:
820
820
  @patch("paramiko.SSHClient.connect")
821
821
  @patch("asyncssh.import_private_key")
822
822
  @patch("asyncssh.connect", new_callable=AsyncMock)
823
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
823
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
824
824
  @pytest.mark.asyncio
825
825
  async def test_no_host_key_check_set_logs_warning(
826
826
  self, mock_get_connection, mock_connect, mock_import_pkey, mock_ssh_connect, caplog
@@ -833,7 +833,7 @@ class TestSFTPHookAsync:
833
833
  assert "No Host Key Verification. This won't protect against Man-In-The-Middle attacks" in caplog.text
834
834
 
835
835
  @patch("asyncssh.connect", new_callable=AsyncMock)
836
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
836
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
837
837
  @pytest.mark.asyncio
838
838
  async def test_extra_dejson_fields_for_connection_building(self, mock_get_connection, mock_connect):
839
839
  """
@@ -861,7 +861,7 @@ class TestSFTPHookAsync:
861
861
  @pytest.mark.asyncio
862
862
  @patch("asyncssh.connect", new_callable=AsyncMock)
863
863
  @patch("asyncssh.import_private_key")
864
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
864
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
865
865
  async def test_connection_private(self, mock_get_connection, mock_import_private_key, mock_connect):
866
866
  """
867
867
  Assert that connection details with private key passed through the extra field in the Airflow connection
@@ -888,7 +888,7 @@ class TestSFTPHookAsync:
888
888
 
889
889
  @pytest.mark.asyncio
890
890
  @patch("asyncssh.connect", new_callable=AsyncMock)
891
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
891
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
892
892
  async def test_connection_port_default_to_22(self, mock_get_connection, mock_connect):
893
893
  from unittest.mock import Mock, call
894
894
 
@@ -917,7 +917,7 @@ class TestSFTPHookAsync:
917
917
 
918
918
  @pytest.mark.asyncio
919
919
  @patch("asyncssh.connect", new_callable=AsyncMock)
920
- @patch("airflow.providers.sftp.hooks.sftp.SFTPHookAsync.get_connection")
920
+ @patch("airflow.providers.sftp.hooks.sftp.get_async_connection")
921
921
  async def test_init_argument_not_ignored(self, mock_get_connection, mock_connect):
922
922
  from unittest.mock import Mock, call
923
923
 
@@ -138,16 +138,16 @@ class TestSFTPOperator:
138
138
  do_xcom_push=True,
139
139
  )
140
140
 
141
- tis = {ti.task_id: ti for ti in dag_maker.create_dagrun().task_instances}
142
- tis["put_test_task"].run()
143
- tis["check_file_task"].run()
141
+ dr = dag_maker.create_dagrun()
142
+ dag_maker.run_ti("put_test_task", dr)
143
+ check_file_task_ti = dag_maker.run_ti("check_file_task", dr)
144
144
 
145
- pulled = tis["check_file_task"].xcom_pull(task_ids="check_file_task", key="return_value")
145
+ pulled = check_file_task_ti.xcom_pull(task_ids="check_file_task", key="return_value")
146
146
  assert pulled.strip() == test_local_file_content
147
147
 
148
148
  @pytest.mark.skipif(AIRFLOW_V_3_0_PLUS, reason="Pickle support is removed in Airflow 3")
149
149
  @conf_vars({("core", "enable_xcom_pickling"): "True"})
150
- def test_file_transfer_no_intermediate_dir_error_put(self, create_task_instance_of_operator):
150
+ def test_file_transfer_no_intermediate_dir_error_put(self, dag_maker):
151
151
  test_local_file_content = (
152
152
  b"This is local file content \n which is multiline "
153
153
  b"continuing....with other character\nanother line here \n this is last line"
@@ -158,20 +158,17 @@ class TestSFTPOperator:
158
158
 
159
159
  # Try to put test file to remote. This should raise an error with
160
160
  # "No such file" as the directory does not exist.
161
- ti2 = create_task_instance_of_operator(
162
- SFTPOperator,
163
- dag_id="unit_tests_sftp_op_file_transfer_no_intermediate_dir_error_put",
164
- task_id="test_sftp",
165
- sftp_hook=self.sftp_hook,
166
- local_filepath=self.test_local_filepath,
167
- remote_filepath=self.test_remote_filepath_int_dir,
168
- operation=SFTPOperation.PUT,
169
- create_intermediate_dirs=False,
170
- )
171
- with (
172
- pytest.raises(AirflowException) as ctx,
173
- ):
174
- ti2.run()
161
+ with dag_maker(dag_id="unit_tests_sftp_op_file_transfer_no_intermediate_dir_error_put"):
162
+ SFTPOperator(
163
+ task_id="test_sftp",
164
+ sftp_hook=self.sftp_hook,
165
+ local_filepath=self.test_local_filepath,
166
+ remote_filepath=self.test_remote_filepath_int_dir,
167
+ operation=SFTPOperation.PUT,
168
+ create_intermediate_dirs=False,
169
+ )
170
+ with pytest.raises(AirflowException) as ctx:
171
+ dag_maker.run_ti("test_sftp")
175
172
  assert "No such file" in str(ctx.value)
176
173
 
177
174
  @pytest.mark.skipif(AIRFLOW_V_3_0_PLUS, reason="Pickle support is removed in Airflow 3")
@@ -201,10 +198,9 @@ class TestSFTPOperator:
201
198
  do_xcom_push=True,
202
199
  )
203
200
  dagrun = dag_maker.create_dagrun(logical_date=timezone.utcnow())
204
- tis = {ti.task_id: ti for ti in dagrun.task_instances}
205
- tis["test_sftp"].run()
206
- tis["test_check_file"].run()
207
- pulled = tis["test_check_file"].xcom_pull(task_ids="test_check_file", key="return_value")
201
+ dag_maker.run_ti("test_sftp", dagrun)
202
+ test_check_file_ti = dag_maker.run_ti("test_check_file", dagrun)
203
+ pulled = test_check_file_ti.xcom_pull(task_ids="test_check_file", key="return_value")
208
204
  assert pulled.strip() == test_local_file_content
209
205
 
210
206
  @conf_vars({("core", "enable_xcom_pickling"): "False"})
@@ -232,12 +228,10 @@ class TestSFTPOperator:
232
228
  do_xcom_push=True,
233
229
  )
234
230
  dagrun = dag_maker.create_dagrun(logical_date=timezone.utcnow())
235
- tis = {ti.task_id: ti for ti in dagrun.task_instances}
231
+ dag_maker.run_ti("put_test_task", dagrun)
232
+ check_file_task_ti = dag_maker.run_ti("check_file_task", dagrun)
236
233
 
237
- tis["put_test_task"].run()
238
- tis["check_file_task"].run()
239
-
240
- pulled = tis["check_file_task"].xcom_pull(task_ids="check_file_task", key="return_value")
234
+ pulled = check_file_task_ti.xcom_pull(task_ids="check_file_task", key="return_value")
241
235
  assert pulled.strip() == b64encode(test_local_file_content).decode("utf-8")
242
236
 
243
237
  @pytest.fixture
@@ -258,8 +252,7 @@ class TestSFTPOperator:
258
252
  remote_filepath=self.test_remote_filepath,
259
253
  operation=SFTPOperation.GET,
260
254
  )
261
- for ti in dag_maker.create_dagrun(logical_date=timezone.utcnow()).task_instances:
262
- ti.run()
255
+ dag_maker.run_ti("test_sftp")
263
256
 
264
257
  # Test the received content.
265
258
  with open(self.test_local_filepath, "rb") as file:
@@ -276,8 +269,7 @@ class TestSFTPOperator:
276
269
  remote_filepath=self.test_remote_filepath,
277
270
  operation=SFTPOperation.GET,
278
271
  )
279
- for ti in dag_maker.create_dagrun(logical_date=timezone.utcnow()).task_instances:
280
- ti.run()
272
+ dag_maker.run_ti("test_sftp")
281
273
 
282
274
  # Test the received content.
283
275
  content_received = None
@@ -297,14 +289,11 @@ class TestSFTPOperator:
297
289
  operation=SFTPOperation.GET,
298
290
  )
299
291
 
300
- for ti in dag_maker.create_dagrun(logical_date=timezone.utcnow()).task_instances:
301
- # This should raise an error with "No such file" as the directory
302
- # does not exist.
303
- with (
304
- pytest.raises(AirflowException) as ctx,
305
- ):
306
- ti.run()
307
- assert "No such file" in str(ctx.value)
292
+ # This should raise an error with "No such file" as the directory
293
+ # does not exist.
294
+ with pytest.raises(AirflowException) as ctx:
295
+ dag_maker.run_ti("test_sftp")
296
+ assert "No such file" in str(ctx.value)
308
297
 
309
298
  @pytest.mark.skipif(AIRFLOW_V_3_0_PLUS, reason="Pickle support is removed in Airflow 3")
310
299
  @conf_vars({("core", "enable_xcom_pickling"): "True"})
@@ -318,9 +307,7 @@ class TestSFTPOperator:
318
307
  operation=SFTPOperation.GET,
319
308
  create_intermediate_dirs=True,
320
309
  )
321
-
322
- for ti in dag_maker.create_dagrun(logical_date=timezone.utcnow()).task_instances:
323
- ti.run()
310
+ dag_maker.run_ti("test_sftp")
324
311
 
325
312
  # Test the received content.
326
313
  content_received = None
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
@@ -0,0 +1,16 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.