apache-airflow-providers-standard 0.0.3rc1__tar.gz → 0.1.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (37) hide show
  1. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1}/PKG-INFO +9 -26
  2. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1}/README.rst +4 -22
  3. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1}/pyproject.toml +12 -8
  4. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/LICENSE +0 -52
  5. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/__init__.py +1 -1
  6. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/get_provider_info.py +5 -5
  7. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/bash.py +7 -9
  8. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/datetime.py +5 -1
  9. apache_airflow_providers_standard-0.1.0rc1/src/airflow/providers/standard/operators/empty.py +39 -0
  10. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/generic_transfer.py +5 -1
  11. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/latest_only.py +7 -2
  12. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/python.py +38 -61
  13. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/trigger_dagrun.py +27 -31
  14. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/weekday.py +7 -3
  15. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/bash.py +5 -1
  16. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/date_time.py +5 -1
  17. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/external_task.py +13 -16
  18. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/filesystem.py +5 -1
  19. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/python.py +5 -1
  20. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/time.py +5 -1
  21. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/time_delta.py +19 -6
  22. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/weekday.py +5 -1
  23. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/triggers/external_task.py +41 -46
  24. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/triggers/file.py +57 -3
  25. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -24
  26. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/utils/sensor_helper.py +9 -13
  27. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/hooks/__init__.py +0 -0
  28. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/hooks/filesystem.py +0 -0
  29. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/hooks/package_index.py +0 -0
  30. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/hooks/subprocess.py +0 -0
  31. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/operators/__init__.py +0 -0
  32. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/sensors/__init__.py +0 -0
  33. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/triggers/__init__.py +0 -0
  34. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/triggers/temporal.py +0 -0
  35. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/utils/__init__.py +0 -0
  36. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/utils/python_virtualenv.py +0 -0
  37. {apache_airflow_providers_standard-0.0.3rc1 → apache_airflow_providers_standard-0.1.0rc1/src}/airflow/providers/standard/version_compat.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-standard
3
- Version: 0.0.3rc1
3
+ Version: 0.1.0rc1
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,34 +20,17 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
24
23
  Requires-Dist: apache-airflow>=2.9.0rc0
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
25
25
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
26
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3/changelog.html
27
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3
26
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/changelog.html
27
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0
28
28
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
29
29
  Project-URL: Source Code, https://github.com/apache/airflow
30
30
  Project-URL: Twitter, https://x.com/ApacheAirflow
31
31
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
32
32
 
33
33
 
34
- .. Licensed to the Apache Software Foundation (ASF) under one
35
- or more contributor license agreements. See the NOTICE file
36
- distributed with this work for additional information
37
- regarding copyright ownership. The ASF licenses this file
38
- to you under the Apache License, Version 2.0 (the
39
- "License"); you may not use this file except in compliance
40
- with the License. You may obtain a copy of the License at
41
-
42
- .. http://www.apache.org/licenses/LICENSE-2.0
43
-
44
- .. Unless required by applicable law or agreed to in writing,
45
- software distributed under the License is distributed on an
46
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
47
- KIND, either express or implied. See the License for the
48
- specific language governing permissions and limitations
49
- under the License.
50
-
51
34
  .. Licensed to the Apache Software Foundation (ASF) under one
52
35
  or more contributor license agreements. See the NOTICE file
53
36
  distributed with this work for additional information
@@ -65,8 +48,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
65
48
  specific language governing permissions and limitations
66
49
  under the License.
67
50
 
68
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
69
- OVERWRITTEN WHEN PREPARING PACKAGES.
51
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
70
52
 
71
53
  .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
72
54
  `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -74,7 +56,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
74
56
 
75
57
  Package ``apache-airflow-providers-standard``
76
58
 
77
- Release: ``0.0.3.rc1``
59
+ Release: ``0.1.0``
78
60
 
79
61
 
80
62
  Airflow Standard Provider
@@ -87,7 +69,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
87
69
  are in ``airflow.providers.standard`` python package.
88
70
 
89
71
  You can find package information and changelog for the provider
90
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3/>`_.
72
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/>`_.
91
73
 
92
74
  Installation
93
75
  ------------
@@ -109,4 +91,5 @@ PIP package Version required
109
91
  ======================================= ==================
110
92
 
111
93
  The changelog for the provider package can be found in the
112
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3/changelog.html>`_.
94
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/changelog.html>`_.
95
+
@@ -1,21 +1,4 @@
1
1
 
2
- .. Licensed to the Apache Software Foundation (ASF) under one
3
- or more contributor license agreements. See the NOTICE file
4
- distributed with this work for additional information
5
- regarding copyright ownership. The ASF licenses this file
6
- to you under the Apache License, Version 2.0 (the
7
- "License"); you may not use this file except in compliance
8
- with the License. You may obtain a copy of the License at
9
-
10
- .. http://www.apache.org/licenses/LICENSE-2.0
11
-
12
- .. Unless required by applicable law or agreed to in writing,
13
- software distributed under the License is distributed on an
14
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- KIND, either express or implied. See the License for the
16
- specific language governing permissions and limitations
17
- under the License.
18
-
19
2
  .. Licensed to the Apache Software Foundation (ASF) under one
20
3
  or more contributor license agreements. See the NOTICE file
21
4
  distributed with this work for additional information
@@ -33,8 +16,7 @@
33
16
  specific language governing permissions and limitations
34
17
  under the License.
35
18
 
36
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
37
- OVERWRITTEN WHEN PREPARING PACKAGES.
19
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
38
20
 
39
21
  .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
40
22
  `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -42,7 +24,7 @@
42
24
 
43
25
  Package ``apache-airflow-providers-standard``
44
26
 
45
- Release: ``0.0.3.rc1``
27
+ Release: ``0.1.0``
46
28
 
47
29
 
48
30
  Airflow Standard Provider
@@ -55,7 +37,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
55
37
  are in ``airflow.providers.standard`` python package.
56
38
 
57
39
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3/>`_.
40
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/>`_.
59
41
 
60
42
  Installation
61
43
  ------------
@@ -77,4 +59,4 @@ PIP package Version required
77
59
  ======================================= ==================
78
60
 
79
61
  The changelog for the provider package can be found in the
80
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3/changelog.html>`_.
62
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/changelog.html>`_.
@@ -1,4 +1,3 @@
1
-
2
1
  # Licensed to the Apache Software Foundation (ASF) under one
3
2
  # or more contributor license agreements. See the NOTICE file
4
3
  # distributed with this work for additional information
@@ -16,10 +15,9 @@
16
15
  # specific language governing permissions and limitations
17
16
  # under the License.
18
17
 
19
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
20
- # OVERWRITTEN WHEN PREPARING PACKAGES.
18
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
21
19
 
22
- # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
20
+ # IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
23
21
  # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
24
22
  [build-system]
25
23
  requires = ["flit_core==3.10.1"]
@@ -27,7 +25,7 @@ build-backend = "flit_core.buildapi"
27
25
 
28
26
  [project]
29
27
  name = "apache-airflow-providers-standard"
30
- version = "0.0.3.rc1"
28
+ version = "0.1.0.rc1"
31
29
  description = "Provider package apache-airflow-providers-standard for Apache Airflow"
32
30
  readme = "README.rst"
33
31
  authors = [
@@ -53,14 +51,17 @@ classifiers = [
53
51
  "Topic :: System :: Monitoring",
54
52
  ]
55
53
  requires-python = "~=3.9"
54
+
55
+ # The dependencies should be modified in place in the generated file
56
+ # Any change in the dependencies is preserved when the file is regenerated
56
57
  dependencies = [
57
- "apache-airflow-providers-common-sql>=1.20.0rc0",
58
58
  "apache-airflow>=2.9.0rc0",
59
+ "apache-airflow-providers-common-sql>=1.20.0rc0",
59
60
  ]
60
61
 
61
62
  [project.urls]
62
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3"
63
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.0.3/changelog.html"
63
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0"
64
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.1.0/changelog.html"
64
65
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
65
66
  "Source Code" = "https://github.com/apache/airflow"
66
67
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -72,3 +73,6 @@ provider_info = "airflow.providers.standard.get_provider_info:get_provider_info"
72
73
 
73
74
  [tool.flit.module]
74
75
  name = "airflow.providers.standard"
76
+
77
+ [tool.pytest.ini_options]
78
+ ignore = "tests/system/"
@@ -199,55 +199,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
-
203
- ============================================================================
204
- APACHE AIRFLOW SUBCOMPONENTS:
205
-
206
- The Apache Airflow project contains subcomponents with separate copyright
207
- notices and license terms. Your use of the source code for the these
208
- subcomponents is subject to the terms and conditions of the following
209
- licenses.
210
-
211
-
212
- ========================================================================
213
- Third party Apache 2.0 licenses
214
- ========================================================================
215
-
216
- The following components are provided under the Apache 2.0 License.
217
- See project link for details. The text of each license is also included
218
- at 3rd-party-licenses/LICENSE-[project].txt.
219
-
220
- (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
- (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
- (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
- (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
-
225
- ========================================================================
226
- MIT licenses
227
- ========================================================================
228
-
229
- The following components are provided under the MIT License. See project link for details.
230
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
231
-
232
- (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
- (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
- (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
- (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
- (MIT License) dataTables v1.10.25 (https://datatables.net)
237
- (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
- (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
- (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
- (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
-
242
- ========================================================================
243
- BSD 3-Clause licenses
244
- ========================================================================
245
- The following components are provided under the BSD 3-Clause license. See project links for details.
246
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
247
-
248
- (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
- (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
- (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
-
252
- ========================================================================
253
- See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "0.0.3"
32
+ __version__ = "0.1.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -15,8 +15,7 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
- # OVERWRITTEN WHEN PREPARING PACKAGES.
18
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
20
19
  #
21
20
  # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
21
  # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -28,9 +27,8 @@ def get_provider_info():
28
27
  "name": "Standard",
29
28
  "description": "Airflow Standard Provider\n",
30
29
  "state": "ready",
31
- "source-date-epoch": 1734536895,
32
- "versions": ["0.0.3", "0.0.2", "0.0.1"],
33
- "dependencies": ["apache-airflow>=2.9.0", "apache-airflow-providers-common-sql>=1.20.0"],
30
+ "source-date-epoch": 1739964539,
31
+ "versions": ["0.1.0", "0.0.3", "0.0.2", "0.0.1"],
34
32
  "integrations": [
35
33
  {
36
34
  "integration-name": "Standard",
@@ -51,6 +49,7 @@ def get_provider_info():
51
49
  "airflow.providers.standard.operators.weekday",
52
50
  "airflow.providers.standard.operators.bash",
53
51
  "airflow.providers.standard.operators.python",
52
+ "airflow.providers.standard.operators.empty",
54
53
  "airflow.providers.standard.operators.generic_transfer",
55
54
  "airflow.providers.standard.operators.trigger_dagrun",
56
55
  "airflow.providers.standard.operators.latest_only",
@@ -106,4 +105,5 @@ def get_provider_info():
106
105
  },
107
106
  }
108
107
  },
108
+ "dependencies": ["apache-airflow>=2.9.0", "apache-airflow-providers-common-sql>=1.20.0"],
109
109
  }
@@ -34,8 +34,11 @@ from airflow.utils.types import ArgNotSet
34
34
  if TYPE_CHECKING:
35
35
  from sqlalchemy.orm import Session as SASession
36
36
 
37
- from airflow.models.taskinstance import TaskInstance
38
- from airflow.utils.context import Context
37
+ try:
38
+ from airflow.sdk.definitions.context import Context
39
+ except ImportError:
40
+ # TODO: Remove once provider drops support for Airflow 2
41
+ from airflow.utils.context import Context
39
42
 
40
43
 
41
44
  class BashOperator(BaseOperator):
@@ -66,7 +69,7 @@ class BashOperator(BaseOperator):
66
69
  :param cwd: Working directory to execute the command in (templated).
67
70
  If None (default), the command is run in a temporary directory.
68
71
  To use current DAG folder as the working directory,
69
- you might set template ``{{ dag_run.dag.folder }}``.
72
+ you might set template ``{{ task.dag.folder }}``.
70
73
  When bash_command is a '.sh' or '.bash' file, Airflow must have write
71
74
  access to the working directory. The script will be rendered (Jinja
72
75
  template) into a new temporary file in this directory.
@@ -198,7 +201,7 @@ class BashOperator(BaseOperator):
198
201
  # TODO: This should be replaced with Task SDK API call
199
202
  @staticmethod
200
203
  @provide_session
201
- def refresh_bash_command(ti: TaskInstance, session: SASession = NEW_SESSION) -> None:
204
+ def refresh_bash_command(ti, session: SASession = NEW_SESSION) -> None:
202
205
  """
203
206
  Rewrite the underlying rendered bash_command value for a task instance in the metadatabase.
204
207
 
@@ -211,11 +214,6 @@ class BashOperator(BaseOperator):
211
214
  from airflow.models.renderedtifields import RenderedTaskInstanceFields
212
215
 
213
216
  """Update rendered task instance fields for cases where runtime evaluated, not templated."""
214
- # Note: Need lazy import to break the partly loaded class loop
215
- from airflow.models.taskinstance import TaskInstance
216
-
217
- # If called via remote API the DAG needs to be re-loaded
218
- TaskInstance.ensure_dag(ti, session=session)
219
217
 
220
218
  rtif = RenderedTaskInstanceFields(ti)
221
219
  RenderedTaskInstanceFields.write(rtif, session=session)
@@ -25,7 +25,11 @@ from airflow.operators.branch import BaseBranchOperator
25
25
  from airflow.utils import timezone
26
26
 
27
27
  if TYPE_CHECKING:
28
- from airflow.utils.context import Context
28
+ try:
29
+ from airflow.sdk.definitions.context import Context
30
+ except ImportError:
31
+ # TODO: Remove once provider drops support for Airflow 2
32
+ from airflow.utils.context import Context
29
33
 
30
34
 
31
35
  class BranchDateTimeOperator(BaseBranchOperator):
@@ -0,0 +1,39 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING
20
+
21
+ from airflow.models.baseoperator import BaseOperator
22
+
23
+ if TYPE_CHECKING:
24
+ from airflow.sdk.definitions.context import Context
25
+
26
+
27
+ class EmptyOperator(BaseOperator):
28
+ """
29
+ Operator that does literally nothing.
30
+
31
+ It can be used to group tasks in a DAG.
32
+ The task is evaluated by the scheduler but never processed by the executor.
33
+ """
34
+
35
+ ui_color = "#e8f7e4"
36
+ inherits_from_empty_operator = True
37
+
38
+ def execute(self, context: Context):
39
+ pass
@@ -24,7 +24,11 @@ from airflow.hooks.base import BaseHook
24
24
  from airflow.models import BaseOperator
25
25
 
26
26
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
27
+ try:
28
+ from airflow.sdk.definitions.context import Context
29
+ except ImportError:
30
+ # TODO: Remove once provider drops support for Airflow 2
31
+ from airflow.utils.context import Context
28
32
 
29
33
 
30
34
  class GenericTransfer(BaseOperator):
@@ -28,7 +28,12 @@ from airflow.operators.branch import BaseBranchOperator
28
28
 
29
29
  if TYPE_CHECKING:
30
30
  from airflow.models import DAG, DagRun
31
- from airflow.utils.context import Context
31
+
32
+ try:
33
+ from airflow.sdk.definitions.context import Context
34
+ except ImportError:
35
+ # TODO: Remove once provider drops support for Airflow 2
36
+ from airflow.utils.context import Context
32
37
 
33
38
 
34
39
  class LatestOnlyOperator(BaseBranchOperator):
@@ -52,7 +57,7 @@ class LatestOnlyOperator(BaseBranchOperator):
52
57
  self.log.info("Externally triggered DAG_Run: allowing execution to proceed.")
53
58
  return list(context["task"].get_direct_relative_ids(upstream=False))
54
59
 
55
- dag: DAG = context["dag"]
60
+ dag: DAG = context["dag"] # type: ignore[assignment]
56
61
  next_info = dag.next_dagrun_info(dag.get_run_data_interval(dag_run), restricted=False)
57
62
  now = pendulum.now("UTC")
58
63
 
@@ -43,15 +43,10 @@ from airflow.exceptions import (
43
43
  )
44
44
  from airflow.models.baseoperator import BaseOperator
45
45
  from airflow.models.skipmixin import SkipMixin
46
- from airflow.models.taskinstance import _CURRENT_CONTEXT
47
46
  from airflow.models.variable import Variable
48
47
  from airflow.operators.branch import BranchMixIn
49
48
  from airflow.providers.standard.utils.python_virtualenv import prepare_virtualenv, write_python_script
50
- from airflow.providers.standard.version_compat import (
51
- AIRFLOW_V_2_10_PLUS,
52
- AIRFLOW_V_3_0_PLUS,
53
- )
54
- from airflow.typing_compat import Literal
49
+ from airflow.providers.standard.version_compat import AIRFLOW_V_2_10_PLUS, AIRFLOW_V_3_0_PLUS
55
50
  from airflow.utils import hashlib_wrapper
56
51
  from airflow.utils.context import context_copy_partial, context_merge
57
52
  from airflow.utils.file import get_unique_dag_module_name
@@ -61,9 +56,17 @@ from airflow.utils.process_utils import execute_in_subprocess, execute_in_subpro
61
56
  log = logging.getLogger(__name__)
62
57
 
63
58
  if TYPE_CHECKING:
59
+ from typing import Literal
60
+
64
61
  from pendulum.datetime import DateTime
65
62
 
66
- from airflow.utils.context import Context
63
+ try:
64
+ from airflow.sdk.definitions.context import Context
65
+ except ImportError:
66
+ # TODO: Remove once provider drops support for Airflow 2
67
+ from airflow.utils.context import Context
68
+
69
+ _SerializerTypeDef = Literal["pickle", "cloudpickle", "dill"]
67
70
 
68
71
 
69
72
  @cache
@@ -299,7 +302,8 @@ class ShortCircuitOperator(PythonOperator, SkipMixin):
299
302
  self.log.info("Skipping downstream tasks")
300
303
  if AIRFLOW_V_3_0_PLUS:
301
304
  self.skip(
302
- dag_run=dag_run,
305
+ dag_id=dag_run.dag_id,
306
+ run_id=dag_run.run_id,
303
307
  tasks=to_skip,
304
308
  map_index=context["ti"].map_index,
305
309
  )
@@ -343,7 +347,6 @@ def _load_cloudpickle():
343
347
  return cloudpickle
344
348
 
345
349
 
346
- _SerializerTypeDef = Literal["pickle", "cloudpickle", "dill"]
347
350
  _SERIALIZERS: dict[_SerializerTypeDef, Any] = {
348
351
  "pickle": lazy_object_proxy.Proxy(_load_pickle),
349
352
  "dill": lazy_object_proxy.Proxy(_load_dill),
@@ -374,6 +377,9 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
374
377
  "yesterday_ds",
375
378
  "yesterday_ds_nodash",
376
379
  }
380
+ if AIRFLOW_V_3_0_PLUS:
381
+ BASE_SERIALIZABLE_CONTEXT_KEYS.add("task_reschedule_count")
382
+
377
383
  PENDULUM_SERIALIZABLE_CONTEXT_KEYS = {
378
384
  "data_interval_end",
379
385
  "data_interval_start",
@@ -388,6 +394,8 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
388
394
  "prev_execution_date",
389
395
  "prev_execution_date_success",
390
396
  }
397
+ if AIRFLOW_V_3_0_PLUS:
398
+ PENDULUM_SERIALIZABLE_CONTEXT_KEYS.add("start_date")
391
399
 
392
400
  AIRFLOW_SERIALIZABLE_CONTEXT_KEYS = {
393
401
  "macros",
@@ -415,7 +423,6 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
415
423
  skip_on_exit_code: int | Container[int] | None = None,
416
424
  env_vars: dict[str, str] | None = None,
417
425
  inherit_env: bool = True,
418
- use_airflow_context: bool = False,
419
426
  **kwargs,
420
427
  ):
421
428
  if (
@@ -457,7 +464,6 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
457
464
  )
458
465
  self.env_vars = env_vars
459
466
  self.inherit_env = inherit_env
460
- self.use_airflow_context = use_airflow_context
461
467
 
462
468
  @abstractmethod
463
469
  def _iter_serializable_context_keys(self):
@@ -516,7 +522,6 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
516
522
  "pickling_library": self.serializer,
517
523
  "python_callable": self.python_callable.__name__,
518
524
  "python_callable_source": self.get_python_source(),
519
- "use_airflow_context": self.use_airflow_context,
520
525
  }
521
526
 
522
527
  if inspect.getfile(self.python_callable) == self.dag.fileloc:
@@ -527,20 +532,6 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
527
532
  filename=os.fspath(script_path),
528
533
  render_template_as_native_obj=self.dag.render_template_as_native_obj,
529
534
  )
530
- if self.use_airflow_context:
531
- # TODO: replace with commented code when context serialization is implemented in AIP-72
532
- raise AirflowException(
533
- "The `use_airflow_context=True` is not yet implemented. "
534
- "It will work in Airflow 3 after AIP-72 context "
535
- "serialization is ready."
536
- )
537
- # context = get_current_context()
538
- # with create_session() as session:
539
- # dag_run, task_instance = context["dag_run"], context["task_instance"]
540
- # session.add_all([dag_run, task_instance])
541
- # serializable_context: dict[Encoding, Any] = # Get serializable context here
542
- # with airflow_context_path.open("w+") as file:
543
- # json.dump(serializable_context, file)
544
535
 
545
536
  env_vars = dict(os.environ) if self.inherit_env else {}
546
537
  if self.env_vars:
@@ -583,7 +574,11 @@ class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
583
574
  return self._read_result(output_path)
584
575
 
585
576
  def determine_kwargs(self, context: Mapping[str, Any]) -> Mapping[str, Any]:
586
- return KeywordParameters.determine(self.python_callable, self.op_args, context).serializing()
577
+ keyword_params = KeywordParameters.determine(self.python_callable, self.op_args, context)
578
+ if AIRFLOW_V_3_0_PLUS:
579
+ return keyword_params.unpacking()
580
+ else:
581
+ return keyword_params.serializing() # type: ignore[attr-defined]
587
582
 
588
583
 
589
584
  class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
@@ -651,8 +646,6 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
651
646
  environment. If set to ``True``, the virtual environment will inherit the environment variables
652
647
  of the parent process (``os.environ``). If set to ``False``, the virtual environment will be
653
648
  executed with a clean environment.
654
- :param use_airflow_context: Whether to provide ``get_current_context()`` to the python_callable.
655
- NOT YET IMPLEMENTED - waits for AIP-72 context serialization.
656
649
  """
657
650
 
658
651
  template_fields: Sequence[str] = tuple(
@@ -680,7 +673,6 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
680
673
  venv_cache_path: None | os.PathLike[str] = None,
681
674
  env_vars: dict[str, str] | None = None,
682
675
  inherit_env: bool = True,
683
- use_airflow_context: bool = False,
684
676
  **kwargs,
685
677
  ):
686
678
  if (
@@ -697,18 +689,6 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
697
689
  raise AirflowException(
698
690
  "Passing non-string types (e.g. int or float) as python_version not supported"
699
691
  )
700
- if use_airflow_context and (not expect_airflow and not system_site_packages):
701
- raise AirflowException(
702
- "The `use_airflow_context` parameter is set to True, but "
703
- "expect_airflow and system_site_packages are set to False."
704
- )
705
- # TODO: remove when context serialization is implemented in AIP-72
706
- if use_airflow_context and not AIRFLOW_V_3_0_PLUS:
707
- raise AirflowException(
708
- "The `use_airflow_context=True` is not yet implemented. "
709
- "It will work in Airflow 3 after AIP-72 context "
710
- "serialization is ready."
711
- )
712
692
  if not requirements:
713
693
  self.requirements: list[str] = []
714
694
  elif isinstance(requirements, str):
@@ -737,7 +717,6 @@ class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
737
717
  skip_on_exit_code=skip_on_exit_code,
738
718
  env_vars=env_vars,
739
719
  inherit_env=inherit_env,
740
- use_airflow_context=use_airflow_context,
741
720
  **kwargs,
742
721
  )
743
722
 
@@ -953,8 +932,6 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
953
932
  environment. If set to ``True``, the virtual environment will inherit the environment variables
954
933
  of the parent process (``os.environ``). If set to ``False``, the virtual environment will be
955
934
  executed with a clean environment.
956
- :param use_airflow_context: Whether to provide ``get_current_context()`` to the python_callable.
957
- NOT YET IMPLEMENTED - waits for AIP-72 context serialization.
958
935
  """
959
936
 
960
937
  template_fields: Sequence[str] = tuple({"python"}.union(PythonOperator.template_fields))
@@ -975,22 +952,10 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
975
952
  skip_on_exit_code: int | Container[int] | None = None,
976
953
  env_vars: dict[str, str] | None = None,
977
954
  inherit_env: bool = True,
978
- use_airflow_context: bool = False,
979
955
  **kwargs,
980
956
  ):
981
957
  if not python:
982
958
  raise ValueError("Python Path must be defined in ExternalPythonOperator")
983
- if use_airflow_context and not expect_airflow:
984
- raise AirflowException(
985
- "The `use_airflow_context` parameter is set to True, but expect_airflow is set to False."
986
- )
987
- # TODO: remove when context serialization is implemented in AIP-72
988
- if use_airflow_context:
989
- raise AirflowException(
990
- "The `use_airflow_context=True` is not yet implemented. "
991
- "It will work in Airflow 3 after AIP-72 context "
992
- "serialization is ready."
993
- )
994
959
  self.python = python
995
960
  self.expect_pendulum = expect_pendulum
996
961
  super().__init__(
@@ -1005,7 +970,6 @@ class ExternalPythonOperator(_BasePythonVirtualenvOperator):
1005
970
  skip_on_exit_code=skip_on_exit_code,
1006
971
  env_vars=env_vars,
1007
972
  inherit_env=inherit_env,
1008
- use_airflow_context=use_airflow_context,
1009
973
  **kwargs,
1010
974
  )
1011
975
 
@@ -1120,7 +1084,7 @@ class BranchExternalPythonOperator(ExternalPythonOperator, BranchMixIn):
1120
1084
  return self.do_branch(context, super().execute(context))
1121
1085
 
1122
1086
 
1123
- def get_current_context() -> Context:
1087
+ def get_current_context() -> Mapping[str, Any]:
1124
1088
  """
1125
1089
  Retrieve the execution context dictionary without altering user method's signature.
1126
1090
 
@@ -1147,9 +1111,22 @@ def get_current_context() -> Context:
1147
1111
  Current context will only have value if this method was called after an operator
1148
1112
  was starting to execute.
1149
1113
  """
1114
+ if AIRFLOW_V_3_0_PLUS:
1115
+ from airflow.sdk import get_current_context
1116
+
1117
+ return get_current_context()
1118
+ else:
1119
+ return _get_current_context()
1120
+
1121
+
1122
+ def _get_current_context() -> Mapping[str, Any]:
1123
+ # Airflow 2.x
1124
+ # TODO: To be removed when Airflow 2 support is dropped
1125
+ from airflow.models.taskinstance import _CURRENT_CONTEXT # type: ignore[attr-defined]
1126
+
1150
1127
  if not _CURRENT_CONTEXT:
1151
- raise AirflowException(
1128
+ raise RuntimeError(
1152
1129
  "Current context was requested but no context was found! "
1153
- "Are you running within an airflow task?"
1130
+ "Are you running within an Airflow task?"
1154
1131
  )
1155
1132
  return _CURRENT_CONTEXT[-1]