apache-airflow-providers-sftp 5.1.2__py3-none-any.whl → 5.2.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/sftp/__init__.py +1 -1
- airflow/providers/sftp/decorators/sensors/sftp.py +10 -1
- airflow/providers/sftp/get_provider_info.py +0 -59
- airflow/providers/sftp/hooks/sftp.py +123 -0
- airflow/providers/sftp/operators/sftp.py +26 -4
- airflow/providers/sftp/version_compat.py +36 -0
- {apache_airflow_providers_sftp-5.1.2.dist-info → apache_airflow_providers_sftp-5.2.0rc1.dist-info}/METADATA +8 -8
- apache_airflow_providers_sftp-5.2.0rc1.dist-info/RECORD +19 -0
- {apache_airflow_providers_sftp-5.1.2.dist-info → apache_airflow_providers_sftp-5.2.0rc1.dist-info}/WHEEL +1 -1
- apache_airflow_providers_sftp-5.1.2.dist-info/RECORD +0 -18
- {apache_airflow_providers_sftp-5.1.2.dist-info → apache_airflow_providers_sftp-5.2.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "5.
|
32
|
+
__version__ = "5.2.0"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
35
|
"2.9.0"
|
@@ -20,7 +20,16 @@ from __future__ import annotations
|
|
20
20
|
from collections.abc import Sequence
|
21
21
|
from typing import Callable
|
22
22
|
|
23
|
-
from airflow.
|
23
|
+
from airflow.providers.sftp.version_compat import AIRFLOW_V_3_0_PLUS
|
24
|
+
|
25
|
+
if AIRFLOW_V_3_0_PLUS:
|
26
|
+
from airflow.sdk.bases.decorator import TaskDecorator, get_unique_task_id, task_decorator_factory
|
27
|
+
else:
|
28
|
+
from airflow.decorators.base import ( # type: ignore[no-redef]
|
29
|
+
TaskDecorator,
|
30
|
+
get_unique_task_id,
|
31
|
+
task_decorator_factory,
|
32
|
+
)
|
24
33
|
from airflow.providers.sftp.sensors.sftp import SFTPSensor
|
25
34
|
|
26
35
|
|
@@ -26,54 +26,6 @@ def get_provider_info():
|
|
26
26
|
"package-name": "apache-airflow-providers-sftp",
|
27
27
|
"name": "SFTP",
|
28
28
|
"description": "`SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__\n",
|
29
|
-
"state": "ready",
|
30
|
-
"source-date-epoch": 1743836705,
|
31
|
-
"versions": [
|
32
|
-
"5.1.2",
|
33
|
-
"5.1.1",
|
34
|
-
"5.1.0",
|
35
|
-
"5.0.0",
|
36
|
-
"4.11.1",
|
37
|
-
"4.11.0",
|
38
|
-
"4.10.3",
|
39
|
-
"4.10.2",
|
40
|
-
"4.10.1",
|
41
|
-
"4.10.0",
|
42
|
-
"4.9.1",
|
43
|
-
"4.9.0",
|
44
|
-
"4.8.1",
|
45
|
-
"4.8.0",
|
46
|
-
"4.7.0",
|
47
|
-
"4.6.1",
|
48
|
-
"4.6.0",
|
49
|
-
"4.5.0",
|
50
|
-
"4.4.0",
|
51
|
-
"4.3.1",
|
52
|
-
"4.3.0",
|
53
|
-
"4.2.4",
|
54
|
-
"4.2.3",
|
55
|
-
"4.2.2",
|
56
|
-
"4.2.1",
|
57
|
-
"4.2.0",
|
58
|
-
"4.1.0",
|
59
|
-
"4.0.0",
|
60
|
-
"3.0.0",
|
61
|
-
"2.6.0",
|
62
|
-
"2.5.2",
|
63
|
-
"2.5.1",
|
64
|
-
"2.5.0",
|
65
|
-
"2.4.1",
|
66
|
-
"2.4.0",
|
67
|
-
"2.3.0",
|
68
|
-
"2.2.0",
|
69
|
-
"2.1.1",
|
70
|
-
"2.1.0",
|
71
|
-
"2.0.0",
|
72
|
-
"1.2.0",
|
73
|
-
"1.1.1",
|
74
|
-
"1.1.0",
|
75
|
-
"1.0.0",
|
76
|
-
],
|
77
29
|
"integrations": [
|
78
30
|
{
|
79
31
|
"integration-name": "SSH File Transfer Protocol (SFTP)",
|
@@ -118,15 +70,4 @@ def get_provider_info():
|
|
118
70
|
"python-modules": ["airflow.providers.sftp.triggers.sftp"],
|
119
71
|
}
|
120
72
|
],
|
121
|
-
"dependencies": [
|
122
|
-
"apache-airflow>=2.9.0",
|
123
|
-
"apache-airflow-providers-ssh>=2.1.0",
|
124
|
-
"paramiko>=2.9.0",
|
125
|
-
"asyncssh>=2.12.0",
|
126
|
-
],
|
127
|
-
"optional-dependencies": {
|
128
|
-
"common.compat": ["apache-airflow-providers-common-compat"],
|
129
|
-
"openlineage": ["apache-airflow-providers-openlineage"],
|
130
|
-
},
|
131
|
-
"devel-dependencies": [],
|
132
73
|
}
|
@@ -19,6 +19,7 @@
|
|
19
19
|
|
20
20
|
from __future__ import annotations
|
21
21
|
|
22
|
+
import concurrent.futures
|
22
23
|
import datetime
|
23
24
|
import os
|
24
25
|
import stat
|
@@ -339,6 +340,63 @@ class SFTPHook(SSHHook):
|
|
339
340
|
new_local_path = os.path.join(local_full_path, os.path.relpath(file_path, remote_full_path))
|
340
341
|
self.retrieve_file(file_path, new_local_path, prefetch)
|
341
342
|
|
343
|
+
def retrieve_directory_concurrently(
|
344
|
+
self, remote_full_path: str, local_full_path: str, workers: int = os.cpu_count() or 2
|
345
|
+
) -> None:
|
346
|
+
"""
|
347
|
+
Transfer the remote directory to a local location concurrently.
|
348
|
+
|
349
|
+
If local_full_path is a string path, the directory will be put
|
350
|
+
at that location.
|
351
|
+
|
352
|
+
:param remote_full_path: full path to the remote directory
|
353
|
+
:param local_full_path: full path to the local directory
|
354
|
+
:param prefetch: controls whether prefetch is performed (default: True)
|
355
|
+
:param workers: number of workers to use for concurrent transfer (default: number of CPUs or 2 if undetermined)
|
356
|
+
"""
|
357
|
+
|
358
|
+
def retrieve_file_chunk(
|
359
|
+
conn: SFTPClient, local_file_chunk: list[str], remote_file_chunk: list[str], prefetch: bool = True
|
360
|
+
):
|
361
|
+
for local_file, remote_file in zip(local_file_chunk, remote_file_chunk):
|
362
|
+
conn.get(remote_file, local_file, prefetch=prefetch)
|
363
|
+
|
364
|
+
with self.get_managed_conn():
|
365
|
+
if Path(local_full_path).exists():
|
366
|
+
raise AirflowException(f"{local_full_path} already exists")
|
367
|
+
Path(local_full_path).mkdir(parents=True)
|
368
|
+
new_local_file_paths, remote_file_paths = [], []
|
369
|
+
files, dirs, _ = self.get_tree_map(remote_full_path)
|
370
|
+
for dir_path in dirs:
|
371
|
+
new_local_path = os.path.join(local_full_path, os.path.relpath(dir_path, remote_full_path))
|
372
|
+
Path(new_local_path).mkdir(parents=True, exist_ok=True)
|
373
|
+
for file in files:
|
374
|
+
remote_file_paths.append(file)
|
375
|
+
new_local_file_paths.append(
|
376
|
+
os.path.join(local_full_path, os.path.relpath(file, remote_full_path))
|
377
|
+
)
|
378
|
+
remote_file_chunks = [remote_file_paths[i::workers] for i in range(workers)]
|
379
|
+
local_file_chunks = [new_local_file_paths[i::workers] for i in range(workers)]
|
380
|
+
self.log.info("Opening %s new SFTP connections", workers)
|
381
|
+
conns = [SFTPHook(ssh_conn_id=self.ssh_conn_id).get_conn() for _ in range(workers)]
|
382
|
+
try:
|
383
|
+
self.log.info("Retrieving files concurrently with %s threads", workers)
|
384
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
385
|
+
futures = [
|
386
|
+
executor.submit(
|
387
|
+
retrieve_file_chunk,
|
388
|
+
conns[i],
|
389
|
+
local_file_chunks[i],
|
390
|
+
remote_file_chunks[i],
|
391
|
+
)
|
392
|
+
for i in range(workers)
|
393
|
+
]
|
394
|
+
for future in concurrent.futures.as_completed(futures):
|
395
|
+
future.result()
|
396
|
+
finally:
|
397
|
+
for conn in conns:
|
398
|
+
conn.close()
|
399
|
+
|
342
400
|
def store_directory(self, remote_full_path: str, local_full_path: str, confirm: bool = True) -> None:
|
343
401
|
"""
|
344
402
|
Transfer a local directory to the remote location.
|
@@ -367,6 +425,71 @@ class SFTPHook(SSHHook):
|
|
367
425
|
)
|
368
426
|
self.store_file(new_remote_path, file_path, confirm)
|
369
427
|
|
428
|
+
def store_directory_concurrently(
|
429
|
+
self,
|
430
|
+
remote_full_path: str,
|
431
|
+
local_full_path: str,
|
432
|
+
confirm: bool = True,
|
433
|
+
workers: int = os.cpu_count() or 2,
|
434
|
+
) -> None:
|
435
|
+
"""
|
436
|
+
Transfer a local directory to the remote location concurrently.
|
437
|
+
|
438
|
+
If local_full_path is a string path, the directory will be read
|
439
|
+
from that location.
|
440
|
+
|
441
|
+
:param remote_full_path: full path to the remote directory
|
442
|
+
:param local_full_path: full path to the local directory
|
443
|
+
:param confirm: whether to confirm the file size after transfer (default: True)
|
444
|
+
:param workers: number of workers to use for concurrent transfer (default: number of CPUs or 2 if undetermined)
|
445
|
+
"""
|
446
|
+
|
447
|
+
def store_file_chunk(
|
448
|
+
conn: SFTPClient, local_file_chunk: list[str], remote_file_chunk: list[str], confirm: bool
|
449
|
+
):
|
450
|
+
for local_file, remote_file in zip(local_file_chunk, remote_file_chunk):
|
451
|
+
conn.put(local_file, remote_file, confirm=confirm)
|
452
|
+
|
453
|
+
with self.get_managed_conn():
|
454
|
+
if self.path_exists(remote_full_path):
|
455
|
+
raise AirflowException(f"{remote_full_path} already exists")
|
456
|
+
self.create_directory(remote_full_path)
|
457
|
+
|
458
|
+
local_file_paths, new_remote_file_paths = [], []
|
459
|
+
for root, dirs, files in os.walk(local_full_path):
|
460
|
+
for dir_name in dirs:
|
461
|
+
dir_path = os.path.join(root, dir_name)
|
462
|
+
new_remote_path = os.path.join(
|
463
|
+
remote_full_path, os.path.relpath(dir_path, local_full_path)
|
464
|
+
)
|
465
|
+
self.create_directory(new_remote_path)
|
466
|
+
for file_name in files:
|
467
|
+
file_path = os.path.join(root, file_name)
|
468
|
+
new_remote_path = os.path.join(
|
469
|
+
remote_full_path, os.path.relpath(file_path, local_full_path)
|
470
|
+
)
|
471
|
+
local_file_paths.append(file_path)
|
472
|
+
new_remote_file_paths.append(new_remote_path)
|
473
|
+
|
474
|
+
remote_file_chunks = [new_remote_file_paths[i::workers] for i in range(workers)]
|
475
|
+
local_file_chunks = [local_file_paths[i::workers] for i in range(workers)]
|
476
|
+
self.log.info("Opening %s new SFTP connections", workers)
|
477
|
+
conns = [SFTPHook(ssh_conn_id=self.ssh_conn_id).get_conn() for _ in range(workers)]
|
478
|
+
try:
|
479
|
+
self.log.info("Storing files concurrently with %s threads", workers)
|
480
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
481
|
+
futures = [
|
482
|
+
executor.submit(
|
483
|
+
store_file_chunk, conns[i], local_file_chunks[i], remote_file_chunks[i], confirm
|
484
|
+
)
|
485
|
+
for i in range(workers)
|
486
|
+
]
|
487
|
+
for future in concurrent.futures.as_completed(futures):
|
488
|
+
future.result()
|
489
|
+
finally:
|
490
|
+
for conn in conns:
|
491
|
+
conn.close()
|
492
|
+
|
370
493
|
def get_mod_time(self, path: str) -> str:
|
371
494
|
"""
|
372
495
|
Get an entry's modification time.
|
@@ -74,6 +74,8 @@ class SFTPOperator(BaseOperator):
|
|
74
74
|
create_intermediate_dirs=True,
|
75
75
|
dag=dag,
|
76
76
|
)
|
77
|
+
:param concurrency: Number of threads when transferring directories. Each thread opens a new SFTP connection.
|
78
|
+
This parameter is used only when transferring directories, not individual files. (Default is 1)
|
77
79
|
|
78
80
|
"""
|
79
81
|
|
@@ -90,6 +92,7 @@ class SFTPOperator(BaseOperator):
|
|
90
92
|
operation: str = SFTPOperation.PUT,
|
91
93
|
confirm: bool = True,
|
92
94
|
create_intermediate_dirs: bool = False,
|
95
|
+
concurrency: int = 1,
|
93
96
|
**kwargs,
|
94
97
|
) -> None:
|
95
98
|
super().__init__(**kwargs)
|
@@ -101,6 +104,7 @@ class SFTPOperator(BaseOperator):
|
|
101
104
|
self.create_intermediate_dirs = create_intermediate_dirs
|
102
105
|
self.local_filepath = local_filepath
|
103
106
|
self.remote_filepath = remote_filepath
|
107
|
+
self.concurrency = concurrency
|
104
108
|
|
105
109
|
def execute(self, context: Any) -> str | list[str] | None:
|
106
110
|
if self.local_filepath is None:
|
@@ -132,6 +136,9 @@ class SFTPOperator(BaseOperator):
|
|
132
136
|
f"expected {SFTPOperation.GET} or {SFTPOperation.PUT} or {SFTPOperation.DELETE}."
|
133
137
|
)
|
134
138
|
|
139
|
+
if self.concurrency < 1:
|
140
|
+
raise ValueError(f"concurrency should be greater than 0, got {self.concurrency}")
|
141
|
+
|
135
142
|
file_msg = None
|
136
143
|
try:
|
137
144
|
if self.ssh_conn_id:
|
@@ -161,7 +168,14 @@ class SFTPOperator(BaseOperator):
|
|
161
168
|
file_msg = f"from {_remote_filepath} to {_local_filepath}"
|
162
169
|
self.log.info("Starting to transfer %s", file_msg)
|
163
170
|
if self.sftp_hook.isdir(_remote_filepath):
|
164
|
-
self.
|
171
|
+
if self.concurrency > 1:
|
172
|
+
self.sftp_hook.retrieve_directory_concurrently(
|
173
|
+
_remote_filepath,
|
174
|
+
_local_filepath,
|
175
|
+
workers=self.concurrency,
|
176
|
+
)
|
177
|
+
elif self.concurrency == 1:
|
178
|
+
self.sftp_hook.retrieve_directory(_remote_filepath, _local_filepath)
|
165
179
|
else:
|
166
180
|
self.sftp_hook.retrieve_file(_remote_filepath, _local_filepath)
|
167
181
|
elif self.operation.lower() == SFTPOperation.PUT:
|
@@ -171,9 +185,17 @@ class SFTPOperator(BaseOperator):
|
|
171
185
|
file_msg = f"from {_local_filepath} to {_remote_filepath}"
|
172
186
|
self.log.info("Starting to transfer file %s", file_msg)
|
173
187
|
if os.path.isdir(_local_filepath):
|
174
|
-
self.
|
175
|
-
|
176
|
-
|
188
|
+
if self.concurrency > 1:
|
189
|
+
self.sftp_hook.store_directory_concurrently(
|
190
|
+
_remote_filepath,
|
191
|
+
_local_filepath,
|
192
|
+
confirm=self.confirm,
|
193
|
+
workers=self.concurrency,
|
194
|
+
)
|
195
|
+
elif self.concurrency == 1:
|
196
|
+
self.sftp_hook.store_directory(
|
197
|
+
_remote_filepath, _local_filepath, confirm=self.confirm
|
198
|
+
)
|
177
199
|
else:
|
178
200
|
self.sftp_hook.store_file(_remote_filepath, _local_filepath, confirm=self.confirm)
|
179
201
|
elif self.operation.lower() == SFTPOperation.DELETE:
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
#
|
18
|
+
# NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
|
19
|
+
# DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
|
20
|
+
# ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
|
21
|
+
# THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
|
22
|
+
#
|
23
|
+
from __future__ import annotations
|
24
|
+
|
25
|
+
|
26
|
+
def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
27
|
+
from packaging.version import Version
|
28
|
+
|
29
|
+
from airflow import __version__
|
30
|
+
|
31
|
+
airflow_version = Version(__version__)
|
32
|
+
return airflow_version.major, airflow_version.minor, airflow_version.micro
|
33
|
+
|
34
|
+
|
35
|
+
AIRFLOW_V_2_10_PLUS = get_base_airflow_version_tuple() >= (2, 10, 0)
|
36
|
+
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: apache-airflow-providers-sftp
|
3
|
-
Version: 5.
|
3
|
+
Version: 5.2.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-sftp for Apache Airflow
|
5
5
|
Keywords: airflow-provider,sftp,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -20,15 +20,15 @@ Classifier: Programming Language :: Python :: 3.10
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
23
|
-
Requires-Dist: apache-airflow>=2.9.
|
24
|
-
Requires-Dist: apache-airflow-providers-ssh>=2.1.
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0rc0
|
24
|
+
Requires-Dist: apache-airflow-providers-ssh>=2.1.0rc0
|
25
25
|
Requires-Dist: paramiko>=2.9.0
|
26
26
|
Requires-Dist: asyncssh>=2.12.0
|
27
27
|
Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
|
28
28
|
Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
|
29
29
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
30
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.
|
31
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.
|
30
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.2.0/changelog.html
|
31
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.2.0
|
32
32
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
33
33
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
34
34
|
Project-URL: Source Code, https://github.com/apache/airflow
|
@@ -61,7 +61,7 @@ Provides-Extra: openlineage
|
|
61
61
|
|
62
62
|
Package ``apache-airflow-providers-sftp``
|
63
63
|
|
64
|
-
Release: ``5.
|
64
|
+
Release: ``5.2.0``
|
65
65
|
|
66
66
|
|
67
67
|
`SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
|
@@ -74,7 +74,7 @@ This is a provider package for ``sftp`` provider. All classes for this provider
|
|
74
74
|
are in ``airflow.providers.sftp`` python package.
|
75
75
|
|
76
76
|
You can find package information and changelog for the provider
|
77
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.
|
77
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.2.0/>`_.
|
78
78
|
|
79
79
|
Installation
|
80
80
|
------------
|
@@ -119,5 +119,5 @@ Dependent package
|
|
119
119
|
================================================================================================================== =================
|
120
120
|
|
121
121
|
The changelog for the provider package can be found in the
|
122
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.
|
122
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-sftp/5.2.0/changelog.html>`_.
|
123
123
|
|
@@ -0,0 +1,19 @@
|
|
1
|
+
airflow/providers/sftp/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
2
|
+
airflow/providers/sftp/__init__.py,sha256=MgTmkrkMblXGjX5-oMRC3zs1BqvT-AiuyFUkdo0vxHQ,1491
|
3
|
+
airflow/providers/sftp/get_provider_info.py,sha256=_IqUGQ-rKpZsSAsXdTsGYzfzJ3X57duhn-2b0-rFOz0,2905
|
4
|
+
airflow/providers/sftp/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
|
5
|
+
airflow/providers/sftp/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
6
|
+
airflow/providers/sftp/decorators/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
7
|
+
airflow/providers/sftp/decorators/sensors/sftp.py,sha256=htmylsF0h93Loova_rq7_ebJQAjBgJlbHgC9rSVpwww,3152
|
8
|
+
airflow/providers/sftp/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
9
|
+
airflow/providers/sftp/hooks/sftp.py,sha256=JlTt-JFP-2L8pGFARYDJXOJ-wEQuI0VZjK6ZQi9C3us,32315
|
10
|
+
airflow/providers/sftp/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
11
|
+
airflow/providers/sftp/operators/sftp.py,sha256=WSHPIGLUZD62knz_uBweVfMIi4tqMCDmUHBl9umKrbY,12940
|
12
|
+
airflow/providers/sftp/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
13
|
+
airflow/providers/sftp/sensors/sftp.py,sha256=WpotlHzsWRgtJnu44zEKU3hagKT1J634M-jQYvQ9Z3M,8301
|
14
|
+
airflow/providers/sftp/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
15
|
+
airflow/providers/sftp/triggers/sftp.py,sha256=fSi-I5FocNQblHt4GYfGispFgOOl8XQ9Vk9ZFLcv_Sw,6182
|
16
|
+
apache_airflow_providers_sftp-5.2.0rc1.dist-info/entry_points.txt,sha256=Fa1IkUHV6qnIuwLd0U7tKoklbLXXVrbB2hhG6N7Q-zo,100
|
17
|
+
apache_airflow_providers_sftp-5.2.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
18
|
+
apache_airflow_providers_sftp-5.2.0rc1.dist-info/METADATA,sha256=HLOKnf-O-20vDvA4v6_NK_3i6Fgos-v86fuhkirFCWM,5686
|
19
|
+
apache_airflow_providers_sftp-5.2.0rc1.dist-info/RECORD,,
|
@@ -1,18 +0,0 @@
|
|
1
|
-
airflow/providers/sftp/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
2
|
-
airflow/providers/sftp/__init__.py,sha256=WdG_S0JGjH55B8nVQJCyjdE-82LLOGB5teA3VeIPUC8,1491
|
3
|
-
airflow/providers/sftp/get_provider_info.py,sha256=RSPeKARVeBql8BB3Wt4PRAHxTvydeRz5Y46QsDzZlcY,4345
|
4
|
-
airflow/providers/sftp/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
5
|
-
airflow/providers/sftp/decorators/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
6
|
-
airflow/providers/sftp/decorators/sensors/sftp.py,sha256=deps7xldfdLcO5mwxuZ9SJ7wFNrf2DZWMSt_KYMmGG4,2888
|
7
|
-
airflow/providers/sftp/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
8
|
-
airflow/providers/sftp/hooks/sftp.py,sha256=srBUSfeqxI37T7ytSfAQo8zX-GDWoDdWKFXHD1xwHCs,26457
|
9
|
-
airflow/providers/sftp/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
10
|
-
airflow/providers/sftp/operators/sftp.py,sha256=skNYBFzdZUnkU_SpDf_m6HxnUuTVd5KDwJDRJnly7NE,11670
|
11
|
-
airflow/providers/sftp/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
12
|
-
airflow/providers/sftp/sensors/sftp.py,sha256=WpotlHzsWRgtJnu44zEKU3hagKT1J634M-jQYvQ9Z3M,8301
|
13
|
-
airflow/providers/sftp/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
14
|
-
airflow/providers/sftp/triggers/sftp.py,sha256=fSi-I5FocNQblHt4GYfGispFgOOl8XQ9Vk9ZFLcv_Sw,6182
|
15
|
-
apache_airflow_providers_sftp-5.1.2.dist-info/entry_points.txt,sha256=Fa1IkUHV6qnIuwLd0U7tKoklbLXXVrbB2hhG6N7Q-zo,100
|
16
|
-
apache_airflow_providers_sftp-5.1.2.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
|
17
|
-
apache_airflow_providers_sftp-5.1.2.dist-info/METADATA,sha256=5Ls4zFXTOZP0560ym6NzVQXRpWhPz1Sb8K9nJW0aRJQ,5677
|
18
|
-
apache_airflow_providers_sftp-5.1.2.dist-info/RECORD,,
|
File without changes
|