apache-airflow-providers-standard 0.2.0b1__tar.gz → 0.2.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/PKG-INFO +7 -7
  2. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/README.rst +3 -3
  3. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/pyproject.toml +5 -5
  4. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/__init__.py +1 -1
  5. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/get_provider_info.py +1 -1
  6. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/bash.py +11 -48
  7. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/python.py +23 -14
  8. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/utils/python_virtualenv_script.jinja2 +5 -0
  9. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/LICENSE +0 -0
  10. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/hooks/__init__.py +0 -0
  11. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/hooks/filesystem.py +0 -0
  12. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/hooks/package_index.py +0 -0
  13. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/hooks/subprocess.py +0 -0
  14. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/__init__.py +0 -0
  15. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/branch.py +0 -0
  16. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/datetime.py +0 -0
  17. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/empty.py +0 -0
  18. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/latest_only.py +0 -0
  19. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/smooth.py +0 -0
  20. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/trigger_dagrun.py +0 -0
  21. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/operators/weekday.py +0 -0
  22. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/__init__.py +0 -0
  23. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/bash.py +0 -0
  24. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/date_time.py +0 -0
  25. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/external_task.py +0 -0
  26. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/filesystem.py +0 -0
  27. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/python.py +0 -0
  28. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/time.py +0 -0
  29. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/time_delta.py +0 -0
  30. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/sensors/weekday.py +0 -0
  31. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/triggers/__init__.py +0 -0
  32. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/triggers/external_task.py +0 -0
  33. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/triggers/file.py +0 -0
  34. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/triggers/temporal.py +0 -0
  35. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/utils/__init__.py +0 -0
  36. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/utils/python_virtualenv.py +0 -0
  37. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/utils/sensor_helper.py +0 -0
  38. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/utils/skipmixin.py +0 -0
  39. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/utils/weekday.py +0 -0
  40. {apache_airflow_providers_standard-0.2.0b1 → apache_airflow_providers_standard-0.2.0rc1}/src/airflow/providers/standard/version_compat.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 0.2.0b1
3
+ Version: 0.2.0rc1
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,10 +20,10 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0
23
+ Requires-Dist: apache-airflow>=2.9.0rc0
24
24
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
25
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html
26
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1
25
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0/changelog.html
26
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0
27
27
  Project-URL: Mastodon, https://fosstodon.org/@airflow
28
28
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
29
29
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -54,7 +54,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
54
54
 
55
55
  Package ``apache-airflow-providers-standard``
56
56
 
57
- Release: ``0.2.0b1``
57
+ Release: ``0.2.0``
58
58
 
59
59
 
60
60
  Airflow Standard Provider
@@ -67,7 +67,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
67
67
  are in ``airflow.providers.standard`` python package.
68
68
 
69
69
  You can find package information and changelog for the provider
70
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/>`_.
70
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0/>`_.
71
71
 
72
72
  Installation
73
73
  ------------
@@ -88,5 +88,5 @@ PIP package Version required
88
88
  ================== ==================
89
89
 
90
90
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0/changelog.html>`_.
92
92
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-standard``
25
25
 
26
- Release: ``0.2.0b1``
26
+ Release: ``0.2.0``
27
27
 
28
28
 
29
29
  Airflow Standard Provider
@@ -36,7 +36,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
36
36
  are in ``airflow.providers.standard`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -57,4 +57,4 @@ PIP package Version required
57
57
  ================== ==================
58
58
 
59
59
  The changelog for the provider package can be found in the
60
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html>`_.
60
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0/changelog.html>`_.
@@ -20,12 +20,12 @@
20
20
  # IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
21
21
  # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
22
22
  [build-system]
23
- requires = ["flit_core==3.11.0"]
23
+ requires = ["flit_core==3.12.0"]
24
24
  build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-standard"
28
- version = "0.2.0b1"
28
+ version = "0.2.0.rc1"
29
29
  description = "Provider package apache-airflow-providers-standard for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -57,7 +57,7 @@ requires-python = "~=3.9"
57
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
- "apache-airflow>=2.9.0",
60
+ "apache-airflow>=2.9.0rc0",
61
61
  ]
62
62
 
63
63
  [dependency-groups]
@@ -79,8 +79,8 @@ apache-airflow-providers-fab = {workspace = true}
79
79
  apache-airflow-providers-standard = {workspace = true}
80
80
 
81
81
  [project.urls]
82
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1"
83
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0b1/changelog.html"
82
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0"
83
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-standard/0.2.0/changelog.html"
84
84
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
85
85
  "Source Code" = "https://github.com/apache/airflow"
86
86
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "0.2.0b1"
32
+ __version__ = "0.2.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -28,7 +28,7 @@ def get_provider_info():
28
28
  "description": "Airflow Standard Provider\n",
29
29
  "state": "ready",
30
30
  "source-date-epoch": 1742480519,
31
- "versions": ["0.2.0b1", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
31
+ "versions": ["0.2.0", "0.1.1", "0.1.0", "0.0.3", "0.0.2", "0.0.1"],
32
32
  "integrations": [
33
33
  {
34
34
  "integration-name": "Standard",
@@ -27,12 +27,15 @@ from typing import TYPE_CHECKING, Any, Callable, cast
27
27
  from airflow.exceptions import AirflowException, AirflowSkipException
28
28
  from airflow.models.baseoperator import BaseOperator
29
29
  from airflow.providers.standard.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory
30
- from airflow.utils.operator_helpers import context_to_airflow_vars
31
- from airflow.utils.session import NEW_SESSION, provide_session
32
- from airflow.utils.types import ArgNotSet
30
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS
31
+
32
+ if AIRFLOW_V_3_0_PLUS:
33
+ from airflow.sdk.execution_time.context import context_to_airflow_vars
34
+ else:
35
+ from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
33
36
 
34
37
  if TYPE_CHECKING:
35
- from sqlalchemy.orm import Session as SASession
38
+ from airflow.utils.types import ArgNotSet
36
39
 
37
40
  try:
38
41
  from airflow.sdk.definitions.context import Context
@@ -182,43 +185,15 @@ class BashOperator(BaseOperator):
182
185
  self.cwd = cwd
183
186
  self.append_env = append_env
184
187
  self.output_processor = output_processor
185
-
186
- # When using the @task.bash decorator, the Bash command is not known until the underlying Python
187
- # callable is executed and therefore set to NOTSET initially. This flag is useful during execution to
188
- # determine whether the bash_command value needs to re-rendered.
189
- self._init_bash_command_not_set = isinstance(self.bash_command, ArgNotSet)
190
-
191
- # Keep a copy of the original bash_command, without the Jinja template rendered.
192
- # This is later used to determine if the bash_command is a script or an inline string command.
193
- # We do this later, because the bash_command is not available in __init__ when using @task.bash.
194
- self._unrendered_bash_command: str | ArgNotSet = bash_command
188
+ self._is_inline_cmd = None
189
+ if isinstance(bash_command, str):
190
+ self._is_inline_cmd = self._is_inline_command(bash_command=bash_command)
195
191
 
196
192
  @cached_property
197
193
  def subprocess_hook(self):
198
194
  """Returns hook for running the bash command."""
199
195
  return SubprocessHook()
200
196
 
201
- # TODO: This should be replaced with Task SDK API call
202
- @staticmethod
203
- @provide_session
204
- def refresh_bash_command(ti, session: SASession = NEW_SESSION) -> None:
205
- """
206
- Rewrite the underlying rendered bash_command value for a task instance in the metadatabase.
207
-
208
- TaskInstance.get_rendered_template_fields() cannot be used because this will retrieve the
209
- RenderedTaskInstanceFields from the metadatabase which doesn't have the runtime-evaluated bash_command
210
- value.
211
-
212
- :meta private:
213
- """
214
- from airflow.models.renderedtifields import RenderedTaskInstanceFields
215
-
216
- """Update rendered task instance fields for cases where runtime evaluated, not templated."""
217
-
218
- rtif = RenderedTaskInstanceFields(ti)
219
- RenderedTaskInstanceFields.write(rtif, session=session)
220
- RenderedTaskInstanceFields.delete_old_records(ti.task_id, ti.dag_id, session=session)
221
-
222
197
  def get_env(self, context) -> dict:
223
198
  """Build the set of environment variables to be exposed for the bash command."""
224
199
  system_env = os.environ.copy()
@@ -247,19 +222,7 @@ class BashOperator(BaseOperator):
247
222
  raise AirflowException(f"The cwd {self.cwd} must be a directory")
248
223
  env = self.get_env(context)
249
224
 
250
- # Because the bash_command value is evaluated at runtime using the @task.bash decorator, the
251
- # RenderedTaskInstanceField data needs to be rewritten and the bash_command value re-rendered -- the
252
- # latter because the returned command from the decorated callable could contain a Jinja expression.
253
- # Both will ensure the correct Bash command is executed and that the Rendered Template view in the UI
254
- # displays the executed command (otherwise it will display as an ArgNotSet type).
255
- if self._init_bash_command_not_set:
256
- is_inline_command = self._is_inline_command(bash_command=cast(str, self.bash_command))
257
- ti = context["ti"]
258
- self.refresh_bash_command(ti)
259
- else:
260
- is_inline_command = self._is_inline_command(bash_command=cast(str, self._unrendered_bash_command))
261
-
262
- if is_inline_command:
225
+ if self._is_inline_cmd:
263
226
  result = self._run_inline_command(bash_path=bash_path, env=env)
264
227
  else:
265
228
  result = self._run_rendered_script_file(bash_path=bash_path, env=env)
@@ -66,10 +66,12 @@ if TYPE_CHECKING:
66
66
 
67
67
  from pendulum.datetime import DateTime
68
68
 
69
+ from airflow.sdk.execution_time.callback_runner import ExecutionCallableRunner
70
+ from airflow.sdk.execution_time.context import OutletEventAccessorsProtocol
71
+
69
72
  try:
70
73
  from airflow.sdk.definitions.context import Context
71
- except ImportError:
72
- # TODO: Remove once provider drops support for Airflow 2
74
+ except ImportError: # TODO: Remove once provider drops support for Airflow 2
73
75
  from airflow.utils.context import Context
74
76
 
75
77
  _SerializerTypeDef = Literal["pickle", "cloudpickle", "dill"]
@@ -190,14 +192,22 @@ class PythonOperator(BaseOperator):
190
192
  context_merge(context, self.op_kwargs, templates_dict=self.templates_dict)
191
193
  self.op_kwargs = self.determine_kwargs(context)
192
194
 
193
- if AIRFLOW_V_3_0_PLUS:
194
- from airflow.utils.context import context_get_outlet_events
195
+ # This needs to be lazy because subclasses may implement execute_callable
196
+ # by running a separate process that can't use the eager result.
197
+ def __prepare_execution() -> tuple[ExecutionCallableRunner, OutletEventAccessorsProtocol] | None:
198
+ if AIRFLOW_V_3_0_PLUS:
199
+ from airflow.sdk.execution_time.callback_runner import create_executable_runner
200
+ from airflow.sdk.execution_time.context import context_get_outlet_events
201
+
202
+ return create_executable_runner, context_get_outlet_events(context)
203
+ if AIRFLOW_V_2_10_PLUS:
204
+ from airflow.utils.context import context_get_outlet_events # type: ignore
205
+ from airflow.utils.operator_helpers import ExecutionCallableRunner # type: ignore
195
206
 
196
- self._asset_events = context_get_outlet_events(context)
197
- elif AIRFLOW_V_2_10_PLUS:
198
- from airflow.utils.context import context_get_outlet_events
207
+ return ExecutionCallableRunner, context_get_outlet_events(context)
208
+ return None
199
209
 
200
- self._dataset_events = context_get_outlet_events(context)
210
+ self.__prepare_execution = __prepare_execution
201
211
 
202
212
  return_value = self.execute_callable()
203
213
  if self.show_return_value_in_logs:
@@ -210,19 +220,18 @@ class PythonOperator(BaseOperator):
210
220
  def determine_kwargs(self, context: Mapping[str, Any]) -> Mapping[str, Any]:
211
221
  return KeywordParameters.determine(self.python_callable, self.op_args, context).unpacking()
212
222
 
223
+ __prepare_execution: Callable[[], tuple[ExecutionCallableRunner, OutletEventAccessorsProtocol] | None]
224
+
213
225
  def execute_callable(self) -> Any:
214
226
  """
215
227
  Call the python callable with the given arguments.
216
228
 
217
229
  :return: the return value of the call.
218
230
  """
219
- try:
220
- from airflow.utils.operator_helpers import ExecutionCallableRunner
221
- except ImportError:
222
- # Handle Pre Airflow 2.10 case where ExecutionCallableRunner was not available
231
+ if (execution_preparation := self.__prepare_execution()) is None:
223
232
  return self.python_callable(*self.op_args, **self.op_kwargs)
224
- asset_events = self._asset_events if AIRFLOW_V_3_0_PLUS else self._dataset_events
225
- runner = ExecutionCallableRunner(self.python_callable, asset_events, logger=self.log)
233
+ create_execution_runner, asset_events = execution_preparation
234
+ runner = create_execution_runner(self.python_callable, asset_events, logger=self.log)
226
235
  return runner.run(*self.op_args, **self.op_kwargs)
227
236
 
228
237
 
@@ -20,7 +20,12 @@ from __future__ import annotations
20
20
 
21
21
  import {{ pickling_library }}
22
22
  import sys
23
+ import os
24
+ # Setting the PYTHON_OPERATORS_VIRTUAL_ENV_MODE environment variable to 1,
25
+ # helps to avoid the issue of re creating the orm session in the settings file, otherwise
26
+ # it fails with airflow-db-not-allowed
23
27
 
28
+ os.environ["PYTHON_OPERATORS_VIRTUAL_ENV_MODE"] = "1"
24
29
  {% if expect_airflow %}
25
30
  {# Check whether Airflow is available in the environment.
26
31
  # If it is, we'll want to ensure that we integrate any macros that are being provided