apache-airflow-providers-standard 1.8.0rc1__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.8.0"
32
+ __version__ = "1.9.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -0,0 +1,102 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ import ast
21
+ from collections.abc import Callable
22
+ from typing import TYPE_CHECKING, Any
23
+
24
+ if TYPE_CHECKING:
25
+ from airflow.sdk.bases.decorator import DecoratedOperator, TaskDecorator, task_decorator_factory
26
+ else:
27
+ try:
28
+ from airflow.sdk.bases.decorator import DecoratedOperator, TaskDecorator, task_decorator_factory
29
+ except ModuleNotFoundError:
30
+ from airflow.decorators.base import (
31
+ DecoratedOperator,
32
+ TaskDecorator,
33
+ task_decorator_factory,
34
+ )
35
+
36
+ if TYPE_CHECKING:
37
+ from airflow.sdk.definitions.context import Context
38
+
39
+
40
+ class _StubOperator(DecoratedOperator):
41
+ custom_operator_name: str = "@task.stub"
42
+
43
+ def __init__(
44
+ self,
45
+ *,
46
+ python_callable: Callable,
47
+ task_id: str,
48
+ **kwargs,
49
+ ) -> None:
50
+ super().__init__(
51
+ python_callable=python_callable,
52
+ task_id=task_id,
53
+ **kwargs,
54
+ )
55
+ # Validate python callable
56
+ module = ast.parse(self.get_python_source())
57
+
58
+ if len(module.body) != 1:
59
+ raise RuntimeError("Expected a single statement")
60
+ fn = module.body[0]
61
+ if not isinstance(fn, ast.FunctionDef):
62
+ raise RuntimeError("Expected a single sync function")
63
+ for stmt in fn.body:
64
+ if isinstance(stmt, ast.Pass):
65
+ continue
66
+ if isinstance(stmt, ast.Expr):
67
+ if isinstance(stmt.value, ast.Constant) and isinstance(stmt.value.value, (str, type(...))):
68
+ continue
69
+
70
+ raise ValueError(
71
+ f"Functions passed to @task.stub must be an empty function (`pass`, or `...` only) (got {stmt})"
72
+ )
73
+
74
+ ...
75
+
76
+ def execute(self, context: Context) -> Any:
77
+ raise RuntimeError(
78
+ "@task.stub should not be executed directly -- we expected this to go to a remote worker. "
79
+ "Check your pool and worker configs"
80
+ )
81
+
82
+
83
+ def stub(
84
+ python_callable: Callable | None = None,
85
+ queue: str | None = None,
86
+ executor: str | None = None,
87
+ **kwargs,
88
+ ) -> TaskDecorator:
89
+ """
90
+ Define a stub task in the DAG.
91
+
92
+ Stub tasks exist in the Dag graph only, but the execution must happen in an external
93
+ environment via the Task Execution Interface.
94
+
95
+ """
96
+ return task_decorator_factory(
97
+ decorated_operator_class=_StubOperator,
98
+ python_callable=python_callable,
99
+ queue=queue,
100
+ executor=executor,
101
+ **kwargs,
102
+ )
@@ -92,21 +92,14 @@ def example_bash_decorator():
92
92
  # [END howto_decorator_bash_parametrize]
93
93
 
94
94
  # [START howto_decorator_bash_build_cmd]
95
- def _get_files_in_cwd() -> list[str]:
96
- from pathlib import Path
97
-
98
- dir_contents = Path.cwd().glob("airflow-core/src/airflow/example_dags/*.py")
99
- files = [str(elem) for elem in dir_contents if elem.is_file()]
100
-
101
- return files
102
-
103
95
  @task.bash
104
96
  def get_file_stats() -> str:
97
+ from pathlib import Path
105
98
  from shlex import join
106
99
 
107
- files = _get_files_in_cwd()
108
- files = files if files else ["."]
109
- cmd = join(["stat", *files])
100
+ # Get stats of the current DAG file itself
101
+ current_file = str(Path(__file__))
102
+ cmd = join(["stat", current_file])
110
103
 
111
104
  return cmd
112
105
 
@@ -144,5 +144,6 @@ def get_provider_info():
144
144
  "class-name": "airflow.providers.standard.decorators.short_circuit.short_circuit_task",
145
145
  "name": "short_circuit",
146
146
  },
147
+ {"class-name": "airflow.providers.standard.decorators.stub.stub", "name": "stub"},
147
148
  ],
148
149
  }
@@ -25,7 +25,6 @@ from typing import TYPE_CHECKING, Any, ClassVar
25
25
  from airflow.configuration import conf
26
26
  from airflow.exceptions import AirflowSkipException
27
27
  from airflow.models.dag import DagModel
28
- from airflow.models.dagbag import DagBag
29
28
  from airflow.providers.standard.exceptions import (
30
29
  DuplicateStateError,
31
30
  ExternalDagDeletedError,
@@ -41,6 +40,7 @@ from airflow.providers.standard.triggers.external_task import WorkflowTrigger
41
40
  from airflow.providers.standard.utils.sensor_helper import _get_count, _get_external_task_group_task_ids
42
41
  from airflow.providers.standard.version_compat import (
43
42
  AIRFLOW_V_3_0_PLUS,
43
+ AIRFLOW_V_3_2_PLUS,
44
44
  BaseOperator,
45
45
  BaseOperatorLink,
46
46
  BaseSensorOperator,
@@ -51,6 +51,11 @@ from airflow.utils.state import State, TaskInstanceState
51
51
  if not AIRFLOW_V_3_0_PLUS:
52
52
  from airflow.utils.session import NEW_SESSION, provide_session
53
53
 
54
+ if AIRFLOW_V_3_2_PLUS:
55
+ from airflow.dag_processing.dagbag import DagBag
56
+ else:
57
+ from airflow.models.dagbag import DagBag # type: ignore[attr-defined, no-redef]
58
+
54
59
  if TYPE_CHECKING:
55
60
  from sqlalchemy.orm import Session
56
61
 
@@ -34,6 +34,7 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS: bool = get_base_airflow_version_tuple() >= (3, 0, 0)
36
36
  AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
37
+ AIRFLOW_V_3_2_PLUS: bool = get_base_airflow_version_tuple() >= (3, 2, 0)
37
38
 
38
39
  # BaseOperator is not imported from SDK from 3.0 (and only done from 3.1) due to a bug with
39
40
  # DecoratedOperator -- where `DecoratedOperator._handle_output` needed `xcom_push` to exist on `BaseOperator`
@@ -57,6 +58,7 @@ else:
57
58
  __all__ = [
58
59
  "AIRFLOW_V_3_0_PLUS",
59
60
  "AIRFLOW_V_3_1_PLUS",
61
+ "AIRFLOW_V_3_2_PLUS",
60
62
  "BaseOperator",
61
63
  "BaseOperatorLink",
62
64
  "BaseHook",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 1.8.0rc1
3
+ Version: 1.9.0
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,10 +20,10 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
24
24
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
25
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.8.0/changelog.html
26
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-standard/1.8.0
25
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.0/changelog.html
26
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.0
27
27
  Project-URL: Mastodon, https://fosstodon.org/@airflow
28
28
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
29
29
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
54
54
 
55
55
  Package ``apache-airflow-providers-standard``
56
56
 
57
- Release: ``1.8.0``
57
+ Release: ``1.9.0``
58
58
 
59
59
 
60
60
  Airflow Standard Provider
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
67
67
  are in ``airflow.providers.standard`` python package.
68
68
 
69
69
  You can find package information and changelog for the provider
70
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.8.0/>`_.
70
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.0/>`_.
71
71
 
72
72
  Installation
73
73
  ------------
@@ -88,5 +88,5 @@ PIP package Version required
88
88
  ================== ==================
89
89
 
90
90
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.8.0/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.9.0/changelog.html>`_.
92
92
 
@@ -1,8 +1,8 @@
1
1
  airflow/providers/standard/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/standard/__init__.py,sha256=LOW3Un9JxsKQy3ZmcKa-xKq5MZnEhCNCBzws_pLaAeI,1497
2
+ airflow/providers/standard/__init__.py,sha256=hCZHSeeS05isy47esiFDswI-Z4GhWDXOlX-NU2xPjLc,1497
3
3
  airflow/providers/standard/exceptions.py,sha256=m2Ryv36yrzAk8xMIA4lhR11n1gA1CKPFwRok8ksl_tk,2416
4
- airflow/providers/standard/get_provider_info.py,sha256=jhENLvqCXj0mzBPmJeAvPj7XWaaNuxPLhHVVA-9rqzs,7132
5
- airflow/providers/standard/version_compat.py,sha256=pWRfOwwGUd_uIfch0eiT5vOgwaA5rvohtW5Q46orG98,2891
4
+ airflow/providers/standard/get_provider_info.py,sha256=NVstkG2ZeAiTZnvmbrMpxcYgJzcdITKdQvgDOZYX1Rk,7227
5
+ airflow/providers/standard/version_compat.py,sha256=b9e-bjiRWtIIhhQ-XRBg3mFVxWZIl27CkZQf5dtRsYA,2990
6
6
  airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
7
  airflow/providers/standard/decorators/bash.py,sha256=NuWB6q8AeTzWPeIIX-y3M-qN819BEcjNQwS32XTF6cs,4416
8
8
  airflow/providers/standard/decorators/branch_external_python.py,sha256=G1l3_sYM38wEWMRnzRuVGmAbd4uMN5D07zm3EM3-ZWo,2584
@@ -13,8 +13,9 @@ airflow/providers/standard/decorators/python.py,sha256=Y56NyfW5qtF-mLMv7yCNTnDRP
13
13
  airflow/providers/standard/decorators/python_virtualenv.py,sha256=_8Ir-9tt1Ru5rHfxW8nwqD7EAzw8UzQpXO7O14I2HTY,2592
14
14
  airflow/providers/standard/decorators/sensor.py,sha256=BEcTdFUrfiFnLLZN6Uqtiqqq6ScGRVg9JkZYZk4sc-U,3230
15
15
  airflow/providers/standard/decorators/short_circuit.py,sha256=zUZgcVDrZXXU_cOhvkxGJrSUsC0oD1HAGPNVDD3U2QM,2533
16
+ airflow/providers/standard/decorators/stub.py,sha256=WY6-h8zdlaxVN2hjly_P5GH3bKBEdX2UJA-_U9HKogs,3315
16
17
  airflow/providers/standard/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
- airflow/providers/standard/example_dags/example_bash_decorator.py,sha256=B2F_AHzdjucrm0zx7osc4JQjWklXgmY7jps8Y1r5fbI,4067
18
+ airflow/providers/standard/example_dags/example_bash_decorator.py,sha256=1HYpavjRq98k-Pg11hFhD8_fMJ_kYCGxpknrd7xF7oY,3867
18
19
  airflow/providers/standard/example_dags/example_bash_operator.py,sha256=tAS4cBsKW9B1nUukmYTpUw5Vf63476v_-tYjffyAtd4,2352
19
20
  airflow/providers/standard/example_dags/example_branch_datetime_operator.py,sha256=6sGzn1xlMaF3I-HMI7bvx78oyxZUw5WAF_Gja_ZUch0,3765
20
21
  airflow/providers/standard/example_dags/example_branch_day_of_week_operator.py,sha256=75ncMaGfkjxN0ULszqeXrSL5rHauUTNOhGiGAGPm3pw,2362
@@ -53,7 +54,7 @@ airflow/providers/standard/operators/weekday.py,sha256=Qg7LhXYtybVSGZn8uQqF-r7RB
53
54
  airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
54
55
  airflow/providers/standard/sensors/bash.py,sha256=jiysK84IwnVpQj1_lE65E_pSPE0FO82GGtqXf_a8STA,4984
55
56
  airflow/providers/standard/sensors/date_time.py,sha256=h1Y1E6AqDxvAFJZw56ni3R8TEeUgn4OBI04bd5L-vt0,6516
56
- airflow/providers/standard/sensors/external_task.py,sha256=lDEg2Zbwp79f6VV6uH3PXI-NiHbL4IMAO4z-1VDl4gA,28695
57
+ airflow/providers/standard/sensors/external_task.py,sha256=d3o7GRWCNgsczEPQ0FtyRRYESxFwwMYSnrT_gmdJXeY,28846
57
58
  airflow/providers/standard/sensors/filesystem.py,sha256=Z6Fiism8rMA8LzKUBnxOWL3ErUTtkKQBPp7_ipbGPAU,5902
58
59
  airflow/providers/standard/sensors/python.py,sha256=gFlZf3h60UtgJCarG9FN30cnPxKyLNVn46nNxu6O9aQ,3415
59
60
  airflow/providers/standard/sensors/time.py,sha256=tUDXe8-oOIoq7R_FXE62GPYTGngJqRBoS1X44Y6HR4E,5123
@@ -70,7 +71,7 @@ airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=3Z334hVq
70
71
  airflow/providers/standard/utils/sensor_helper.py,sha256=FwI36sJK-s3Wz0slypF1_tAikQpiXovtTiN__Md00Aw,5049
71
72
  airflow/providers/standard/utils/skipmixin.py,sha256=kHpui_Ag-bwbbwjQOfXEmE4zAWYrhO9H_qKGRkbtqdE,8349
72
73
  airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
73
- apache_airflow_providers_standard-1.8.0rc1.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
74
- apache_airflow_providers_standard-1.8.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
75
- apache_airflow_providers_standard-1.8.0rc1.dist-info/METADATA,sha256=CglBdGScFsDjWa4Ic7ikWkEVBMi_iL3vAESP6ftTVhg,3809
76
- apache_airflow_providers_standard-1.8.0rc1.dist-info/RECORD,,
74
+ apache_airflow_providers_standard-1.9.0.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
75
+ apache_airflow_providers_standard-1.9.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
76
+ apache_airflow_providers_standard-1.9.0.dist-info/METADATA,sha256=KPHPGVwh8uwf-ihZFTpc6ft5-w8bZlnnFHPoqpdHLZA,3789
77
+ apache_airflow_providers_standard-1.9.0.dist-info/RECORD,,