apache-airflow-providers-standard 1.1.0__py3-none-any.whl → 1.2.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.1.0"
32
+ __version__ = "1.2.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.9.0"
35
+ "2.10.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.9.0+"
38
+ f"The package `apache-airflow-providers-standard:{__version__}` needs Apache Airflow 2.10.0+"
39
39
  )
@@ -0,0 +1,57 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ """Exceptions used by Standard Provider."""
18
+
19
+ from __future__ import annotations
20
+
21
+ from airflow.exceptions import AirflowException
22
+
23
+
24
+ class AirflowExternalTaskSensorException(AirflowException):
25
+ """Base exception for all ExternalTaskSensor related errors."""
26
+
27
+
28
+ class ExternalDagNotFoundError(AirflowExternalTaskSensorException):
29
+ """Raised when the external DAG does not exist."""
30
+
31
+
32
+ class ExternalDagDeletedError(AirflowExternalTaskSensorException):
33
+ """Raised when the external DAG was deleted."""
34
+
35
+
36
+ class ExternalTaskNotFoundError(AirflowExternalTaskSensorException):
37
+ """Raised when the external task does not exist."""
38
+
39
+
40
+ class ExternalTaskGroupNotFoundError(AirflowExternalTaskSensorException):
41
+ """Raised when the external task group does not exist."""
42
+
43
+
44
+ class ExternalTaskFailedError(AirflowExternalTaskSensorException):
45
+ """Raised when the external task failed."""
46
+
47
+
48
+ class ExternalTaskGroupFailedError(AirflowExternalTaskSensorException):
49
+ """Raised when the external task group failed."""
50
+
51
+
52
+ class ExternalDagFailedError(AirflowExternalTaskSensorException):
53
+ """Raised when the external DAG failed."""
54
+
55
+
56
+ class DuplicateStateError(AirflowExternalTaskSensorException):
57
+ """Raised when duplicate states are provided across allowed, skipped and failed states."""
@@ -21,11 +21,13 @@ import inspect
21
21
  import json
22
22
  import logging
23
23
  import os
24
+ import re
24
25
  import shutil
25
26
  import subprocess
26
27
  import sys
27
28
  import textwrap
28
29
  import types
30
+ import warnings
29
31
  from abc import ABCMeta, abstractmethod
30
32
  from collections.abc import Collection, Container, Iterable, Mapping, Sequence
31
33
  from functools import cache
@@ -34,29 +36,33 @@ from tempfile import TemporaryDirectory
34
36
  from typing import TYPE_CHECKING, Any, Callable, NamedTuple, cast
35
37
 
36
38
  import lazy_object_proxy
39
+ from packaging.requirements import InvalidRequirement, Requirement
40
+ from packaging.specifiers import InvalidSpecifier
41
+ from packaging.version import InvalidVersion
37
42
 
38
43
  from airflow.exceptions import (
39
44
  AirflowConfigException,
40
45
  AirflowException,
46
+ AirflowProviderDeprecationWarning,
41
47
  AirflowSkipException,
42
48
  DeserializingResultError,
43
49
  )
44
50
  from airflow.models.baseoperator import BaseOperator
45
51
  from airflow.models.variable import Variable
46
52
  from airflow.providers.standard.utils.python_virtualenv import prepare_virtualenv, write_python_script
47
- from airflow.providers.standard.version_compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS
53
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
48
54
  from airflow.utils import hashlib_wrapper
49
55
  from airflow.utils.context import context_copy_partial, context_merge
50
56
  from airflow.utils.file import get_unique_dag_module_name
51
57
  from airflow.utils.operator_helpers import KeywordParameters
52
- from airflow.utils.process_utils import execute_in_subprocess, execute_in_subprocess_with_kwargs
58
+ from airflow.utils.process_utils import execute_in_subprocess
53
59
 
54
60
  if AIRFLOW_V_3_0_PLUS:
55
- from airflow.providers.standard.operators.branch import BranchMixIn
61
+ from airflow.providers.standard.operators.branch import BaseBranchOperator
56
62
  from airflow.providers.standard.utils.skipmixin import SkipMixin
57
63
  else:
58
64
  from airflow.models.skipmixin import SkipMixin
59
- from airflow.operators.branch import BranchMixIn # type: ignore[no-redef]
65
+ from airflow.operators.branch import BaseBranchOperator # type: ignore[no-redef]
60
66
 
61
67
 
62
68
  log = logging.getLogger(__name__)
@@ -200,12 +206,10 @@ class PythonOperator(BaseOperator):
200
206
  from airflow.sdk.execution_time.context import context_get_outlet_events
201
207
 
202
208
  return create_executable_runner, context_get_outlet_events(context)
203
- if AIRFLOW_V_2_10_PLUS:
204
- from airflow.utils.context import context_get_outlet_events # type: ignore
205
- from airflow.utils.operator_helpers import ExecutionCallableRunner # type: ignore
209
+ from airflow.utils.context import context_get_outlet_events # type: ignore
210
+ from airflow.utils.operator_helpers import ExecutionCallableRunner # type: ignore
206
211
 
207
- return ExecutionCallableRunner, context_get_outlet_events(context)
208
- return None
212
+ return ExecutionCallableRunner, context_get_outlet_events(context)
209
213
 
210
214
  self.__prepare_execution = __prepare_execution
211
215
 
@@ -235,7 +239,7 @@ class PythonOperator(BaseOperator):
235
239
  return runner.run(*self.op_args, **self.op_kwargs)
236
240
 
237
241
 
238
- class BranchPythonOperator(PythonOperator, BranchMixIn):
242
+ class BranchPythonOperator(BaseBranchOperator, PythonOperator):
239
243
  """
240
244
  A workflow can "branch" or follow a path after the execution of this task.
241
245
 
@@ -249,10 +253,8 @@ class BranchPythonOperator(PythonOperator, BranchMixIn):
249
253
  the DAG run's state to be inferred.
250
254
  """
251
255
 
252
- inherits_from_skipmixin = True
253
-
254
- def execute(self, context: Context) -> Any:
255
- return self.do_branch(context, super().execute(context))
256
+ def choose_branch(self, context: Context) -> str | Iterable[str]:
257
+ return PythonOperator.execute(self, context)
256
258
 
257
259
 
258
260
  class ShortCircuitOperator(PythonOperator, SkipMixin):
@@ -560,16 +562,10 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
560
562
  os.fspath(termination_log_path),
561
563
  os.fspath(airflow_context_path),
562
564
  ]
563
- if AIRFLOW_V_2_10_PLUS:
564
- execute_in_subprocess(
565
- cmd=cmd,
566
- env=env_vars,
567
- )
568
- else:
569
- execute_in_subprocess_with_kwargs(
570
- cmd=cmd,
571
- env=env_vars,
572
- )
565
+ execute_in_subprocess(
566
+ cmd=cmd,
567
+ env=env_vars,
568
+ )
573
569
  except subprocess.CalledProcessError as e:
574
570
  if e.returncode in self.skip_on_exit_code:
575
571
  raise AirflowSkipException(f"Process exited with code {e.returncode}. Skipping.")
@@ -856,14 +852,49 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
856
852
 
857
853
  def _iter_serializable_context_keys(self):
858
854
  yield from self.BASE_SERIALIZABLE_CONTEXT_KEYS
859
- if self.system_site_packages or "apache-airflow" in self.requirements:
855
+
856
+ found_airflow = found_pendulum = False
857
+
858
+ if self.system_site_packages:
859
+ # If we're using system packages, assume both are present
860
+ found_airflow = found_pendulum = True
861
+ else:
862
+ requirements_iterable = []
863
+ if isinstance(self.requirements, str):
864
+ requirements_iterable = self.requirements.splitlines()
865
+ else:
866
+ for item in self.requirements:
867
+ requirements_iterable.extend(item.splitlines())
868
+
869
+ for raw_str in requirements_iterable:
870
+ line = raw_str.strip()
871
+ # Skip blank lines and full‐line comments
872
+ if not line or line.startswith("#"):
873
+ continue
874
+
875
+ # Strip off any inline comment
876
+ # e.g. turn "foo==1.2.3 # comment" → "foo==1.2.3"
877
+ req_str = re.sub(r"#.*$", "", line).strip()
878
+
879
+ try:
880
+ req = Requirement(req_str)
881
+ except (InvalidRequirement, InvalidSpecifier, InvalidVersion) as e:
882
+ raise ValueError(f"Invalid requirement '{raw_str}': {e}") from e
883
+
884
+ if req.name == "apache-airflow":
885
+ found_airflow = found_pendulum = True
886
+ break
887
+ elif req.name == "pendulum":
888
+ found_pendulum = True
889
+
890
+ if found_airflow:
860
891
  yield from self.AIRFLOW_SERIALIZABLE_CONTEXT_KEYS
861
892
  yield from self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS
862
- elif "pendulum" in self.requirements:
893
+ elif found_pendulum:
863
894
  yield from self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS
864
895
 
865
896
 
866
- class BranchPythonVirtualenvOperator(PythonVirtualenvOperator, BranchMixIn):
897
+ class BranchPythonVirtualenvOperator(BaseBranchOperator, PythonVirtualenvOperator):
867
898
  """
868
899
  A workflow can "branch" or follow a path after the execution of this task in a virtual environment.
869
900
 
@@ -881,10 +912,8 @@ class BranchPythonVirtualenvOperator(PythonVirtualenvOperator, BranchMixIn):
881
912
  :ref:`howto/operator:BranchPythonVirtualenvOperator`
882
913
  """
883
914
 
884
- inherits_from_skipmixin = True
885
-
886
- def execute(self, context: Context) -> Any:
887
- return self.do_branch(context, super().execute(context))
915
+ def choose_branch(self, context: Context) -> str | Iterable[str]:
916
+ return PythonVirtualenvOperator.execute(self, context)
888
917
 
889
918
 
890
919
  class ExternalPythonOperator(_BasePythonVirtualenvOperator):
@@ -1080,7 +1109,7 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
1080
1109
  return None
1081
1110
 
1082
1111
 
1083
- class BranchExternalPythonOperator(ExternalPythonOperator, BranchMixIn):
1112
+ class BranchExternalPythonOperator(BaseBranchOperator, ExternalPythonOperator):
1084
1113
  """
1085
1114
  A workflow can "branch" or follow a path after the execution of this task.
1086
1115
 
@@ -1093,8 +1122,8 @@ class BranchExternalPythonOperator(ExternalPythonOperator, BranchMixIn):
1093
1122
  :ref:`howto/operator:BranchExternalPythonOperator`
1094
1123
  """
1095
1124
 
1096
- def execute(self, context: Context) -> Any:
1097
- return self.do_branch(context, super().execute(context))
1125
+ def choose_branch(self, context: Context) -> str | Iterable[str]:
1126
+ return ExternalPythonOperator.execute(self, context)
1098
1127
 
1099
1128
 
1100
1129
  def get_current_context() -> Mapping[str, Any]:
@@ -1125,6 +1154,13 @@ def get_current_context() -> Mapping[str, Any]:
1125
1154
  was starting to execute.
1126
1155
  """
1127
1156
  if AIRFLOW_V_3_0_PLUS:
1157
+ warnings.warn(
1158
+ "Using get_current_context from standard provider is deprecated and will be removed."
1159
+ "Please import `from airflow.sdk import get_current_context` and use it instead.",
1160
+ AirflowProviderDeprecationWarning,
1161
+ stacklevel=2,
1162
+ )
1163
+
1128
1164
  from airflow.sdk import get_current_context
1129
1165
 
1130
1166
  return get_current_context()
@@ -1,4 +1,3 @@
1
- #
2
1
  # Licensed to the Apache Software Foundation (ASF) under one
3
2
  # or more contributor license agreements. See the NOTICE file
4
3
  # distributed with this work for additional information
@@ -24,9 +23,19 @@ from collections.abc import Collection, Iterable
24
23
  from typing import TYPE_CHECKING, Any, Callable, ClassVar
25
24
 
26
25
  from airflow.configuration import conf
27
- from airflow.exceptions import AirflowException, AirflowSkipException
26
+ from airflow.exceptions import AirflowSkipException
28
27
  from airflow.models.dag import DagModel
29
28
  from airflow.models.dagbag import DagBag
29
+ from airflow.providers.standard.exceptions import (
30
+ DuplicateStateError,
31
+ ExternalDagDeletedError,
32
+ ExternalDagFailedError,
33
+ ExternalDagNotFoundError,
34
+ ExternalTaskFailedError,
35
+ ExternalTaskGroupFailedError,
36
+ ExternalTaskGroupNotFoundError,
37
+ ExternalTaskNotFoundError,
38
+ )
30
39
  from airflow.providers.standard.operators.empty import EmptyOperator
31
40
  from airflow.providers.standard.triggers.external_task import WorkflowTrigger
32
41
  from airflow.providers.standard.utils.sensor_helper import _get_count, _get_external_task_group_task_ids
@@ -190,7 +199,7 @@ class ExternalTaskSensor(BaseSensorOperator):
190
199
  total_states = set(self.allowed_states + self.skipped_states + self.failed_states)
191
200
 
192
201
  if len(total_states) != len(self.allowed_states) + len(self.skipped_states) + len(self.failed_states):
193
- raise AirflowException(
202
+ raise DuplicateStateError(
194
203
  "Duplicate values provided across allowed_states, skipped_states and failed_states."
195
204
  )
196
205
 
@@ -356,7 +365,7 @@ class ExternalTaskSensor(BaseSensorOperator):
356
365
  f"Some of the external tasks {self.external_task_ids} "
357
366
  f"in DAG {self.external_dag_id} failed. Skipping due to soft_fail."
358
367
  )
359
- raise AirflowException(
368
+ raise ExternalTaskFailedError(
360
369
  f"Some of the external tasks {self.external_task_ids} "
361
370
  f"in DAG {self.external_dag_id} failed."
362
371
  )
@@ -366,7 +375,7 @@ class ExternalTaskSensor(BaseSensorOperator):
366
375
  f"The external task_group '{self.external_task_group_id}' "
367
376
  f"in DAG '{self.external_dag_id}' failed. Skipping due to soft_fail."
368
377
  )
369
- raise AirflowException(
378
+ raise ExternalTaskGroupFailedError(
370
379
  f"The external task_group '{self.external_task_group_id}' "
371
380
  f"in DAG '{self.external_dag_id}' failed."
372
381
  )
@@ -374,7 +383,7 @@ class ExternalTaskSensor(BaseSensorOperator):
374
383
  raise AirflowSkipException(
375
384
  f"The external DAG {self.external_dag_id} failed. Skipping due to soft_fail."
376
385
  )
377
- raise AirflowException(f"The external DAG {self.external_dag_id} failed.")
386
+ raise ExternalDagFailedError(f"The external DAG {self.external_dag_id} failed.")
378
387
 
379
388
  def _handle_skipped_states(self, count_skipped: float | int) -> None:
380
389
  """Handle skipped states and raise appropriate exceptions."""
@@ -443,10 +452,14 @@ class ExternalTaskSensor(BaseSensorOperator):
443
452
  self.log.info("External tasks %s has executed successfully.", self.external_task_ids)
444
453
  elif event["status"] == "skipped":
445
454
  raise AirflowSkipException("External job has skipped skipping.")
455
+ elif event["status"] == "failed":
456
+ if self.soft_fail:
457
+ raise AirflowSkipException("External job has failed skipping.")
458
+ raise ExternalDagFailedError("External job has failed.")
446
459
  else:
447
460
  if self.soft_fail:
448
461
  raise AirflowSkipException("External job has failed skipping.")
449
- raise AirflowException(
462
+ raise ExternalTaskNotFoundError(
450
463
  "Error occurred while trying to retrieve task status. Please, check the "
451
464
  "name of executed task and Dag."
452
465
  )
@@ -455,23 +468,31 @@ class ExternalTaskSensor(BaseSensorOperator):
455
468
  dag_to_wait = DagModel.get_current(self.external_dag_id, session)
456
469
 
457
470
  if not dag_to_wait:
458
- raise AirflowException(f"The external DAG {self.external_dag_id} does not exist.")
471
+ raise ExternalDagNotFoundError(f"The external DAG {self.external_dag_id} does not exist.")
459
472
 
460
473
  if not os.path.exists(correct_maybe_zipped(dag_to_wait.fileloc)):
461
- raise AirflowException(f"The external DAG {self.external_dag_id} was deleted.")
474
+ raise ExternalDagDeletedError(f"The external DAG {self.external_dag_id} was deleted.")
462
475
 
463
476
  if self.external_task_ids:
464
477
  refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag(self.external_dag_id)
478
+ if not refreshed_dag_info:
479
+ raise ExternalDagNotFoundError(
480
+ f"The external DAG {self.external_dag_id} could not be loaded."
481
+ )
465
482
  for external_task_id in self.external_task_ids:
466
483
  if not refreshed_dag_info.has_task(external_task_id):
467
- raise AirflowException(
484
+ raise ExternalTaskNotFoundError(
468
485
  f"The external task {external_task_id} in DAG {self.external_dag_id} does not exist."
469
486
  )
470
487
 
471
488
  if self.external_task_group_id:
472
489
  refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag(self.external_dag_id)
490
+ if not refreshed_dag_info:
491
+ raise ExternalDagNotFoundError(
492
+ f"The external DAG {self.external_dag_id} could not be loaded."
493
+ )
473
494
  if not refreshed_dag_info.has_task_group(self.external_task_group_id):
474
- raise AirflowException(
495
+ raise ExternalTaskGroupNotFoundError(
475
496
  f"The external task group '{self.external_task_group_id}' in "
476
497
  f"DAG '{self.external_dag_id}' does not exist."
477
498
  )
@@ -22,7 +22,6 @@ from dataclasses import dataclass
22
22
  from typing import TYPE_CHECKING, Any, NoReturn
23
23
 
24
24
  from airflow.providers.standard.triggers.temporal import DateTimeTrigger
25
- from airflow.providers.standard.version_compat import AIRFLOW_V_2_10_PLUS
26
25
  from airflow.sensors.base import BaseSensorOperator
27
26
 
28
27
  try:
@@ -123,9 +122,7 @@ class TimeSensorAsync(BaseSensorOperator):
123
122
 
124
123
  def execute(self, context: Context) -> NoReturn:
125
124
  self.defer(
126
- trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger)
127
- if AIRFLOW_V_2_10_PLUS
128
- else DateTimeTrigger(moment=self.target_datetime),
125
+ trigger=DateTimeTrigger(moment=self.target_datetime, end_from_trigger=self.end_from_trigger),
129
126
  method_name="execute_complete",
130
127
  )
131
128
 
@@ -115,9 +115,7 @@ class WorkflowTrigger(BaseTrigger):
115
115
  if failed_count > 0:
116
116
  yield TriggerEvent({"status": "failed"})
117
117
  return
118
- else:
119
- yield TriggerEvent({"status": "success"})
120
- return
118
+
121
119
  if self.skipped_states:
122
120
  skipped_count = await get_count_func(self.skipped_states)
123
121
  if skipped_count > 0:
@@ -23,14 +23,9 @@ from typing import Any
23
23
 
24
24
  import pendulum
25
25
 
26
- from airflow.exceptions import AirflowException
27
- from airflow.providers.standard.version_compat import AIRFLOW_V_2_10_PLUS
28
- from airflow.triggers.base import BaseTrigger, TriggerEvent
26
+ from airflow.triggers.base import BaseTrigger, TaskSuccessEvent, TriggerEvent
29
27
  from airflow.utils import timezone
30
28
 
31
- if AIRFLOW_V_2_10_PLUS:
32
- from airflow.triggers.base import TaskSuccessEvent
33
-
34
29
 
35
30
  class DateTimeTrigger(BaseTrigger):
36
31
  """
@@ -54,9 +49,6 @@ class DateTimeTrigger(BaseTrigger):
54
49
  if moment.tzinfo is None:
55
50
  raise ValueError("You cannot pass naive datetimes")
56
51
  self.moment: pendulum.DateTime = timezone.convert_to_utc(moment)
57
- if not AIRFLOW_V_2_10_PLUS and end_from_trigger:
58
- raise AirflowException("end_from_trigger is only supported in Airflow 2.10 and later. ")
59
-
60
52
  self.end_from_trigger = end_from_trigger
61
53
 
62
54
  def serialize(self) -> tuple[str, dict[str, Any]]:
@@ -32,5 +32,4 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
32
32
  return airflow_version.major, airflow_version.minor, airflow_version.micro
33
33
 
34
34
 
35
- AIRFLOW_V_2_10_PLUS = get_base_airflow_version_tuple() >= (2, 10, 0)
36
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 1.1.0
3
+ Version: 1.2.0rc1
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,10 +20,10 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0
23
+ Requires-Dist: apache-airflow>=2.10.0rc1
24
24
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
25
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0/changelog.html
26
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0
25
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.2.0/changelog.html
26
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.2.0
27
27
  Project-URL: Mastodon, https://fosstodon.org/@airflow
28
28
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
29
29
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
54
54
 
55
55
  Package ``apache-airflow-providers-standard``
56
56
 
57
- Release: ``1.1.0``
57
+ Release: ``1.2.0``
58
58
 
59
59
 
60
60
  Airflow Standard Provider
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
67
67
  are in ``airflow.providers.standard`` python package.
68
68
 
69
69
  You can find package information and changelog for the provider
70
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0/>`_.
70
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.2.0/>`_.
71
71
 
72
72
  Installation
73
73
  ------------
@@ -84,9 +84,9 @@ Requirements
84
84
  ================== ==================
85
85
  PIP package Version required
86
86
  ================== ==================
87
- ``apache-airflow`` ``>=2.9.0``
87
+ ``apache-airflow`` ``>=2.10.0``
88
88
  ================== ==================
89
89
 
90
90
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.1.0/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.2.0/changelog.html>`_.
92
92
 
@@ -1,7 +1,8 @@
1
1
  airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/standard/__init__.py,sha256=J9d1-mgExKhv6Qbbqa9OWyGgbOVDssHxlU9VvluSS-U,1495
2
+ airflow/providers/standard/__init__.py,sha256=w-ohvwgsE19GzHH1PMPQpVI6bMYmgFWm6aBLEj5TVBg,1497
3
+ airflow/providers/standard/exceptions.py,sha256=MN4gEuNI8rFYx8zs5aAsUKDwyZQNuCig68h15CdwJ90,2075
3
4
  airflow/providers/standard/get_provider_info.py,sha256=9qlyfIHRu_d_jZyyE0SR8s2dN9HEjzV5P2EjyVCcbw4,7003
4
- airflow/providers/standard/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
5
+ airflow/providers/standard/version_compat.py,sha256=j5PCtXvZ71aBjixu-EFTNtVDPsngzzs7os0ZQDgFVDk,1536
5
6
  airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
7
  airflow/providers/standard/decorators/bash.py,sha256=J13t48yrRv7XpDV8_QWtI0IXbqNiqxW9Ct0ngmrQAdE,4396
7
8
  airflow/providers/standard/decorators/branch_external_python.py,sha256=M6JwUxKQj8KOHZL8cHtKmn37leEz4DKoGDrexNH2aAA,2557
@@ -22,30 +23,30 @@ airflow/providers/standard/operators/branch.py,sha256=C_AUd7TSo_U52GiWsrR7rJIsRU
22
23
  airflow/providers/standard/operators/datetime.py,sha256=bYDdbfAyAlEXRRHjOgB06UhgDum6SPdd5I3u-ylPSaw,5005
23
24
  airflow/providers/standard/operators/empty.py,sha256=C7_uLWJK6kExzlNc7xdMo8VAQ_ONWITvEQ2FImrMepM,1324
24
25
  airflow/providers/standard/operators/latest_only.py,sha256=1yJtpi6cK4TIjARQgcrf460as4V6uVBdoDtjJEUnbvs,4884
25
- airflow/providers/standard/operators/python.py,sha256=UrRPJP4ZP2-n5kx8U3ExiUVAYGu3LJUE7JoxBkWeLBU,50046
26
+ airflow/providers/standard/operators/python.py,sha256=pyYeVlk2ZioYMFnA3ZICY8T7vEcWJZTmSyPmZp6hWVk,51544
26
27
  airflow/providers/standard/operators/smooth.py,sha256=d3OV38EzV_wlfMYN3JGWGwyzsFonx8VbqgGfXSw0_bM,1382
27
28
  airflow/providers/standard/operators/trigger_dagrun.py,sha256=isd9HVzYhAZS_Y3V1iQ3Gm2QlOV6KdGWHdq0PnhWxrU,16746
28
29
  airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB7zOXfe3R6vSGVa_rJk,5083
29
30
  airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
30
31
  airflow/providers/standard/sensors/bash.py,sha256=sNcII9aLzJhfdimOwsTggeZYk1TM_ulWDS5iKpB_9XE,4963
31
32
  airflow/providers/standard/sensors/date_time.py,sha256=hRUuLaNgqDh4jqaIaD8zdyq2BUXkpWM2NzJN5YkwTJI,6077
32
- airflow/providers/standard/sensors/external_task.py,sha256=-3PeSlX1CHb8yo9CgY0CbfKCNsmIkVbfNGBYva8SOzo,27298
33
+ airflow/providers/standard/sensors/external_task.py,sha256=63w5wzM61MrfH6546rKnYEWcZqeL8APrULNLLue44gg,28249
33
34
  airflow/providers/standard/sensors/filesystem.py,sha256=rfupSeHtFGdAcL6cw3H6u6ttBxogSThYiPqsUKgABMU,6029
34
35
  airflow/providers/standard/sensors/python.py,sha256=2tCCQa4ynsBpIfRKZdXnq2-9qIk9odikvRj46jxvR24,3387
35
- airflow/providers/standard/sensors/time.py,sha256=Pc9BZqqTQy3Qqz7uME9yF4qmWsXYCzAoAlsmwgpAraY,5007
36
+ airflow/providers/standard/sensors/time.py,sha256=q0cDO3hz7VYkNUOJS2UiTKY-IICCBAdEsw25l23DPgM,4836
36
37
  airflow/providers/standard/sensors/time_delta.py,sha256=1OlDMIwNYXhBeeE8TmfsAMIFIOur4BMlDWe0L_JScZc,6633
37
38
  airflow/providers/standard/sensors/weekday.py,sha256=XoEp5HYmxHQOFZlyBPuRJcPO2srjftguilJH33H1L4M,4449
38
39
  airflow/providers/standard/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
39
- airflow/providers/standard/triggers/external_task.py,sha256=LXJd9rbHuuVNM-ZD3G7uMVsQZ-pvk5S8tfpkJVnSnPE,11709
40
+ airflow/providers/standard/triggers/external_task.py,sha256=cuIUvLeOC6Epf4JDvFZSqcWU9fdh2plMbwop_pceb98,11599
40
41
  airflow/providers/standard/triggers/file.py,sha256=2i8-RwSjEgdOwQNcHCqLmSdpE3Ehqg4GQJ8nE3-fHxo,4886
41
- airflow/providers/standard/triggers/temporal.py,sha256=SYdFNzUA-ujVMmjdm8Ykhv5lQ7GdalreyTXjMxQ1lx0,4789
42
+ airflow/providers/standard/triggers/temporal.py,sha256=AlSdf3iNUMrdQmMlKHi0Ms-D_OU7hIkLFsqj23mpR3Q,4446
42
43
  airflow/providers/standard/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
43
44
  airflow/providers/standard/utils/python_virtualenv.py,sha256=IhLLfE2xLQcEhsnhuWd-Y2D3eaM8TojFN6GjvtMFoNQ,7757
44
45
  airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq6hQ9EHkOoGnAHc2_XNkZQkOJGxZArDKLc-c,2770
45
46
  airflow/providers/standard/utils/sensor_helper.py,sha256=PNIETsl_a4BkmOypFfHdpP0VuTkC6eWKUDuwnNVaWsA,5000
46
47
  airflow/providers/standard/utils/skipmixin.py,sha256=XkhDozcXUHZ7C6AxzEW8ZYrqbra1oJGGR3ZieNQ-N0M,7791
47
48
  airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
48
- apache_airflow_providers_standard-1.1.0.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
49
- apache_airflow_providers_standard-1.1.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
50
- apache_airflow_providers_standard-1.1.0.dist-info/METADATA,sha256=DlPpPeT7VKLNgroj8o6RqhMfWzD0FvjbxlYLcud_rkM,3786
51
- apache_airflow_providers_standard-1.1.0.dist-info/RECORD,,
49
+ apache_airflow_providers_standard-1.2.0rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
50
+ apache_airflow_providers_standard-1.2.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
51
+ apache_airflow_providers_standard-1.2.0rc1.dist-info/METADATA,sha256=AjCFtk0rKlroDgALeGr2lDjM1VUyLgRnl2OXu0TbrSE,3808
52
+ apache_airflow_providers_standard-1.2.0rc1.dist-info/RECORD,,