apache-airflow-providers-standard 0.0.3rc2__py3-none-any.whl → 1.0.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-standard might be problematic. Click here for more details.

Files changed (35) hide show
  1. airflow/providers/standard/__init__.py +1 -23
  2. airflow/providers/standard/get_provider_info.py +5 -52
  3. airflow/providers/standard/operators/datetime.py +2 -3
  4. airflow/providers/standard/operators/weekday.py +1 -4
  5. airflow/providers/standard/sensors/date_time.py +7 -27
  6. airflow/providers/standard/sensors/time.py +4 -23
  7. airflow/providers/standard/sensors/time_delta.py +4 -29
  8. airflow/providers/standard/sensors/weekday.py +1 -2
  9. {apache_airflow_providers_standard-0.0.3rc2.dist-info → apache_airflow_providers_standard-1.0.0.dev0.dist-info}/METADATA +17 -18
  10. apache_airflow_providers_standard-1.0.0.dev0.dist-info/RECORD +15 -0
  11. {apache_airflow_providers_standard-0.0.3rc2.dist-info → apache_airflow_providers_standard-1.0.0.dev0.dist-info}/WHEEL +1 -1
  12. airflow/providers/standard/hooks/__init__.py +0 -16
  13. airflow/providers/standard/hooks/filesystem.py +0 -89
  14. airflow/providers/standard/hooks/package_index.py +0 -95
  15. airflow/providers/standard/hooks/subprocess.py +0 -119
  16. airflow/providers/standard/operators/bash.py +0 -312
  17. airflow/providers/standard/operators/generic_transfer.py +0 -134
  18. airflow/providers/standard/operators/latest_only.py +0 -78
  19. airflow/providers/standard/operators/python.py +0 -1155
  20. airflow/providers/standard/operators/trigger_dagrun.py +0 -296
  21. airflow/providers/standard/sensors/bash.py +0 -116
  22. airflow/providers/standard/sensors/external_task.py +0 -512
  23. airflow/providers/standard/sensors/filesystem.py +0 -154
  24. airflow/providers/standard/sensors/python.py +0 -81
  25. airflow/providers/standard/triggers/__init__.py +0 -16
  26. airflow/providers/standard/triggers/external_task.py +0 -216
  27. airflow/providers/standard/triggers/file.py +0 -77
  28. airflow/providers/standard/triggers/temporal.py +0 -114
  29. airflow/providers/standard/utils/__init__.py +0 -16
  30. airflow/providers/standard/utils/python_virtualenv.py +0 -209
  31. airflow/providers/standard/utils/python_virtualenv_script.jinja2 +0 -101
  32. airflow/providers/standard/utils/sensor_helper.py +0 -123
  33. airflow/providers/standard/version_compat.py +0 -36
  34. apache_airflow_providers_standard-0.0.3rc2.dist-info/RECORD +0 -37
  35. {apache_airflow_providers_standard-0.0.3rc2.dist-info → apache_airflow_providers_standard-1.0.0.dev0.dist-info}/entry_points.txt +0 -0
@@ -1,1155 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one
3
- # or more contributor license agreements. See the NOTICE file
4
- # distributed with this work for additional information
5
- # regarding copyright ownership. The ASF licenses this file
6
- # to you under the Apache License, Version 2.0 (the
7
- # "License"); you may not use this file except in compliance
8
- # with the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing,
13
- # software distributed under the License is distributed on an
14
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- # KIND, either express or implied. See the License for the
16
- # specific language governing permissions and limitations
17
- # under the License.
18
- from __future__ import annotations
19
-
20
- import inspect
21
- import json
22
- import logging
23
- import os
24
- import shutil
25
- import subprocess
26
- import sys
27
- import textwrap
28
- import types
29
- from abc import ABCMeta, abstractmethod
30
- from collections.abc import Collection, Container, Iterable, Mapping, Sequence
31
- from functools import cache
32
- from pathlib import Path
33
- from tempfile import TemporaryDirectory
34
- from typing import TYPE_CHECKING, Any, Callable, NamedTuple, cast
35
-
36
- import lazy_object_proxy
37
-
38
- from airflow.exceptions import (
39
- AirflowConfigException,
40
- AirflowException,
41
- AirflowSkipException,
42
- DeserializingResultError,
43
- )
44
- from airflow.models.baseoperator import BaseOperator
45
- from airflow.models.skipmixin import SkipMixin
46
- from airflow.models.taskinstance import _CURRENT_CONTEXT
47
- from airflow.models.variable import Variable
48
- from airflow.operators.branch import BranchMixIn
49
- from airflow.providers.standard.utils.python_virtualenv import prepare_virtualenv, write_python_script
50
- from airflow.providers.standard.version_compat import (
51
- AIRFLOW_V_2_10_PLUS,
52
- AIRFLOW_V_3_0_PLUS,
53
- )
54
- from airflow.typing_compat import Literal
55
- from airflow.utils import hashlib_wrapper
56
- from airflow.utils.context import context_copy_partial, context_merge
57
- from airflow.utils.file import get_unique_dag_module_name
58
- from airflow.utils.operator_helpers import KeywordParameters
59
- from airflow.utils.process_utils import execute_in_subprocess, execute_in_subprocess_with_kwargs
60
-
61
- log = logging.getLogger(__name__)
62
-
63
- if TYPE_CHECKING:
64
- from pendulum.datetime import DateTime
65
-
66
- from airflow.utils.context import Context
67
-
68
-
69
- @cache
70
- def _parse_version_info(text: str) -> tuple[int, int, int, str, int]:
71
- """Parse python version info from a text."""
72
- parts = text.strip().split(".")
73
- if len(parts) != 5:
74
- msg = f"Invalid Python version info, expected 5 components separated by '.', but got {text!r}."
75
- raise ValueError(msg)
76
- try:
77
- return int(parts[0]), int(parts[1]), int(parts[2]), parts[3], int(parts[4])
78
- except ValueError:
79
- msg = f"Unable to convert parts {parts} parsed from {text!r} to (int, int, int, str, int)."
80
- raise ValueError(msg) from None
81
-
82
-
83
- class _PythonVersionInfo(NamedTuple):
84
- """Provide the same interface as ``sys.version_info``."""
85
-
86
- major: int
87
- minor: int
88
- micro: int
89
- releaselevel: str
90
- serial: int
91
-
92
- @classmethod
93
- def from_executable(cls, executable: str) -> _PythonVersionInfo:
94
- """Parse python version info from an executable."""
95
- cmd = [executable, "-c", 'import sys; print(".".join(map(str, sys.version_info)))']
96
- try:
97
- result = subprocess.check_output(cmd, text=True)
98
- except Exception as e:
99
- raise ValueError(f"Error while executing command {cmd}: {e}")
100
- return cls(*_parse_version_info(result.strip()))
101
-
102
-
103
- class PythonOperator(BaseOperator):
104
- """
105
- Executes a Python callable.
106
-
107
- .. seealso::
108
- For more information on how to use this operator, take a look at the guide:
109
- :ref:`howto/operator:PythonOperator`
110
-
111
- When running your callable, Airflow will pass a set of keyword arguments that can be used in your
112
- function. This set of kwargs correspond exactly to what you can use in your jinja templates.
113
- For this to work, you need to define ``**kwargs`` in your function header, or you can add directly the
114
- keyword arguments you would like to get - for example with the below code your callable will get
115
- the values of ``ti`` context variables.
116
-
117
- With explicit arguments:
118
-
119
- .. code-block:: python
120
-
121
- def my_python_callable(ti):
122
- pass
123
-
124
- With kwargs:
125
-
126
- .. code-block:: python
127
-
128
- def my_python_callable(**kwargs):
129
- ti = kwargs["ti"]
130
-
131
-
132
- :param python_callable: A reference to an object that is callable
133
- :param op_args: a list of positional arguments that will get unpacked when
134
- calling your callable
135
- :param op_kwargs: a dictionary of keyword arguments that will get unpacked
136
- in your function
137
- :param templates_dict: a dictionary where the values are templates that
138
- will get templated by the Airflow engine sometime between
139
- ``__init__`` and ``execute`` takes place and are made available
140
- in your callable's context after the template has been applied. (templated)
141
- :param templates_exts: a list of file extensions to resolve while
142
- processing templated fields, for examples ``['.sql', '.hql']``
143
- :param show_return_value_in_logs: a bool value whether to show return_value
144
- logs. Defaults to True, which allows return value log output.
145
- It can be set to False to prevent log output of return value when you return huge data
146
- such as transmission a large amount of XCom to TaskAPI.
147
- """
148
-
149
- template_fields: Sequence[str] = ("templates_dict", "op_args", "op_kwargs")
150
- template_fields_renderers = {"templates_dict": "json", "op_args": "py", "op_kwargs": "py"}
151
- BLUE = "#ffefeb"
152
- ui_color = BLUE
153
-
154
- # since we won't mutate the arguments, we should just do the shallow copy
155
- # there are some cases we can't deepcopy the objects(e.g protobuf).
156
- shallow_copy_attrs: Sequence[str] = ("python_callable", "op_kwargs")
157
-
158
- def __init__(
159
- self,
160
- *,
161
- python_callable: Callable,
162
- op_args: Collection[Any] | None = None,
163
- op_kwargs: Mapping[str, Any] | None = None,
164
- templates_dict: dict[str, Any] | None = None,
165
- templates_exts: Sequence[str] | None = None,
166
- show_return_value_in_logs: bool = True,
167
- **kwargs,
168
- ) -> None:
169
- super().__init__(**kwargs)
170
- if not callable(python_callable):
171
- raise AirflowException("`python_callable` param must be callable")
172
- self.python_callable = python_callable
173
- self.op_args = op_args or ()
174
- self.op_kwargs = op_kwargs or {}
175
- self.templates_dict = templates_dict
176
- if templates_exts:
177
- self.template_ext = templates_exts
178
- self.show_return_value_in_logs = show_return_value_in_logs
179
-
180
- def execute(self, context: Context) -> Any:
181
- context_merge(context, self.op_kwargs, templates_dict=self.templates_dict)
182
- self.op_kwargs = self.determine_kwargs(context)
183
-
184
- if AIRFLOW_V_3_0_PLUS:
185
- from airflow.utils.context import context_get_outlet_events
186
-
187
- self._asset_events = context_get_outlet_events(context)
188
- elif AIRFLOW_V_2_10_PLUS:
189
- from airflow.utils.context import context_get_outlet_events
190
-
191
- self._dataset_events = context_get_outlet_events(context)
192
-
193
- return_value = self.execute_callable()
194
- if self.show_return_value_in_logs:
195
- self.log.info("Done. Returned value was: %s", return_value)
196
- else:
197
- self.log.info("Done. Returned value not shown")
198
-
199
- return return_value
200
-
201
- def determine_kwargs(self, context: Mapping[str, Any]) -> Mapping[str, Any]:
202
- return KeywordParameters.determine(self.python_callable, self.op_args, context).unpacking()
203
-
204
- def execute_callable(self) -> Any:
205
- """
206
- Call the python callable with the given arguments.
207
-
208
- :return: the return value of the call.
209
- """
210
- try:
211
- from airflow.utils.operator_helpers import ExecutionCallableRunner
212
-
213
- asset_events = self._asset_events if AIRFLOW_V_3_0_PLUS else self._dataset_events
214
-
215
- runner = ExecutionCallableRunner(self.python_callable, asset_events, logger=self.log)
216
- except ImportError:
217
- # Handle Pre Airflow 3.10 case where ExecutionCallableRunner was not available
218
- return self.python_callable(*self.op_args, **self.op_kwargs)
219
- return runner.run(*self.op_args, **self.op_kwargs)
220
-
221
-
222
- class BranchPythonOperator(PythonOperator, BranchMixIn):
223
- """
224
- A workflow can "branch" or follow a path after the execution of this task.
225
-
226
- It derives the PythonOperator and expects a Python function that returns
227
- a single task_id, a single task_group_id, or a list of task_ids and/or
228
- task_group_ids to follow. The task_id(s) and/or task_group_id(s) returned
229
- should point to a task or task group directly downstream from {self}. All
230
- other "branches" or directly downstream tasks are marked with a state of
231
- ``skipped`` so that these paths can't move forward. The ``skipped`` states
232
- are propagated downstream to allow for the DAG state to fill up and
233
- the DAG run's state to be inferred.
234
- """
235
-
236
- def execute(self, context: Context) -> Any:
237
- return self.do_branch(context, super().execute(context))
238
-
239
-
240
- class ShortCircuitOperator(PythonOperator, SkipMixin):
241
- """
242
- Allows a pipeline to continue based on the result of a ``python_callable``.
243
-
244
- The ShortCircuitOperator is derived from the PythonOperator and evaluates the result of a
245
- ``python_callable``. If the returned result is False or a falsy value, the pipeline will be
246
- short-circuited. Downstream tasks will be marked with a state of "skipped" based on the short-circuiting
247
- mode configured. If the returned result is True or a truthy value, downstream tasks proceed as normal and
248
- an ``XCom`` of the returned result is pushed.
249
-
250
- The short-circuiting can be configured to either respect or ignore the ``trigger_rule`` set for
251
- downstream tasks. If ``ignore_downstream_trigger_rules`` is set to True, the default setting, all
252
- downstream tasks are skipped without considering the ``trigger_rule`` defined for tasks. However, if this
253
- parameter is set to False, the direct downstream tasks are skipped but the specified ``trigger_rule`` for
254
- other subsequent downstream tasks are respected. In this mode, the operator assumes the direct downstream
255
- tasks were purposely meant to be skipped but perhaps not other subsequent tasks.
256
-
257
- .. seealso::
258
- For more information on how to use this operator, take a look at the guide:
259
- :ref:`howto/operator:ShortCircuitOperator`
260
-
261
- :param ignore_downstream_trigger_rules: If set to True, all downstream tasks from this operator task will
262
- be skipped. This is the default behavior. If set to False, the direct, downstream task(s) will be
263
- skipped but the ``trigger_rule`` defined for all other downstream tasks will be respected.
264
- """
265
-
266
- def __init__(self, *, ignore_downstream_trigger_rules: bool = True, **kwargs) -> None:
267
- super().__init__(**kwargs)
268
- self.ignore_downstream_trigger_rules = ignore_downstream_trigger_rules
269
-
270
- def execute(self, context: Context) -> Any:
271
- condition = super().execute(context)
272
- self.log.info("Condition result is %s", condition)
273
-
274
- if condition:
275
- self.log.info("Proceeding with downstream tasks...")
276
- return condition
277
-
278
- if not self.downstream_task_ids:
279
- self.log.info("No downstream tasks; nothing to do.")
280
- return condition
281
-
282
- dag_run = context["dag_run"]
283
-
284
- def get_tasks_to_skip():
285
- if self.ignore_downstream_trigger_rules is True:
286
- tasks = context["task"].get_flat_relatives(upstream=False)
287
- else:
288
- tasks = context["task"].get_direct_relatives(upstream=False)
289
- for t in tasks:
290
- if not t.is_teardown:
291
- yield t
292
-
293
- to_skip = get_tasks_to_skip()
294
-
295
- # this let's us avoid an intermediate list unless debug logging
296
- if self.log.getEffectiveLevel() <= logging.DEBUG:
297
- self.log.debug("Downstream task IDs %s", to_skip := list(get_tasks_to_skip()))
298
-
299
- self.log.info("Skipping downstream tasks")
300
- if AIRFLOW_V_3_0_PLUS:
301
- self.skip(
302
- dag_run=dag_run,
303
- tasks=to_skip,
304
- map_index=context["ti"].map_index,
305
- )
306
- else:
307
- self.skip(
308
- dag_run=dag_run,
309
- tasks=to_skip,
310
- execution_date=cast("DateTime", dag_run.logical_date), # type: ignore[call-arg, union-attr]
311
- map_index=context["ti"].map_index,
312
- )
313
-
314
- self.log.info("Done.")
315
- # returns the result of the super execute method as it is instead of returning None
316
- return condition
317
-
318
-
319
- def _load_pickle():
320
- import pickle
321
-
322
- return pickle
323
-
324
-
325
- def _load_dill():
326
- try:
327
- import dill
328
- except ModuleNotFoundError:
329
- log.error("Unable to import `dill` module. Please please make sure that it installed.")
330
- raise
331
- return dill
332
-
333
-
334
- def _load_cloudpickle():
335
- try:
336
- import cloudpickle
337
- except ModuleNotFoundError:
338
- log.error(
339
- "Unable to import `cloudpickle` module. "
340
- "Please install it with: pip install 'apache-airflow[cloudpickle]'"
341
- )
342
- raise
343
- return cloudpickle
344
-
345
-
346
- _SerializerTypeDef = Literal["pickle", "cloudpickle", "dill"]
347
- _SERIALIZERS: dict[_SerializerTypeDef, Any] = {
348
- "pickle": lazy_object_proxy.Proxy(_load_pickle),
349
- "dill": lazy_object_proxy.Proxy(_load_dill),
350
- "cloudpickle": lazy_object_proxy.Proxy(_load_cloudpickle),
351
- }
352
-
353
-
354
- class _BasePythonVirtualenvOperator(PythonOperator, metaclass=ABCMeta):
355
- BASE_SERIALIZABLE_CONTEXT_KEYS = {
356
- "ds",
357
- "ds_nodash",
358
- "expanded_ti_count",
359
- "inlets",
360
- "outlets",
361
- "run_id",
362
- "task_instance_key_str",
363
- "test_mode",
364
- "ts",
365
- "ts_nodash",
366
- "ts_nodash_with_tz",
367
- # The following should be removed when Airflow 2 support is dropped.
368
- "next_ds",
369
- "next_ds_nodash",
370
- "prev_ds",
371
- "prev_ds_nodash",
372
- "tomorrow_ds",
373
- "tomorrow_ds_nodash",
374
- "yesterday_ds",
375
- "yesterday_ds_nodash",
376
- }
377
- PENDULUM_SERIALIZABLE_CONTEXT_KEYS = {
378
- "data_interval_end",
379
- "data_interval_start",
380
- "logical_date",
381
- "prev_data_interval_end_success",
382
- "prev_data_interval_start_success",
383
- "prev_start_date_success",
384
- "prev_end_date_success",
385
- # The following should be removed when Airflow 2 support is dropped.
386
- "execution_date",
387
- "next_execution_date",
388
- "prev_execution_date",
389
- "prev_execution_date_success",
390
- }
391
-
392
- AIRFLOW_SERIALIZABLE_CONTEXT_KEYS = {
393
- "macros",
394
- "conf",
395
- "dag",
396
- "dag_run",
397
- "task",
398
- "params",
399
- "triggering_asset_events",
400
- # The following should be removed when Airflow 2 support is dropped.
401
- "triggering_dataset_events",
402
- }
403
-
404
- def __init__(
405
- self,
406
- *,
407
- python_callable: Callable,
408
- serializer: _SerializerTypeDef | None = None,
409
- op_args: Collection[Any] | None = None,
410
- op_kwargs: Mapping[str, Any] | None = None,
411
- string_args: Iterable[str] | None = None,
412
- templates_dict: dict | None = None,
413
- templates_exts: list[str] | None = None,
414
- expect_airflow: bool = True,
415
- skip_on_exit_code: int | Container[int] | None = None,
416
- env_vars: dict[str, str] | None = None,
417
- inherit_env: bool = True,
418
- use_airflow_context: bool = False,
419
- **kwargs,
420
- ):
421
- if (
422
- not isinstance(python_callable, types.FunctionType)
423
- or isinstance(python_callable, types.LambdaType)
424
- and python_callable.__name__ == "<lambda>"
425
- ):
426
- raise ValueError(f"{type(self).__name__} only supports functions for python_callable arg")
427
- if inspect.isgeneratorfunction(python_callable):
428
- raise ValueError(f"{type(self).__name__} does not support using 'yield' in python_callable")
429
- super().__init__(
430
- python_callable=python_callable,
431
- op_args=op_args,
432
- op_kwargs=op_kwargs,
433
- templates_dict=templates_dict,
434
- templates_exts=templates_exts,
435
- **kwargs,
436
- )
437
- self.string_args = string_args or []
438
-
439
- serializer = serializer or "pickle"
440
- if serializer not in _SERIALIZERS:
441
- msg = (
442
- f"Unsupported serializer {serializer!r}. "
443
- f"Expected one of {', '.join(map(repr, _SERIALIZERS))}"
444
- )
445
- raise AirflowException(msg)
446
-
447
- self.pickling_library = _SERIALIZERS[serializer]
448
- self.serializer: _SerializerTypeDef = serializer
449
-
450
- self.expect_airflow = expect_airflow
451
- self.skip_on_exit_code = (
452
- skip_on_exit_code
453
- if isinstance(skip_on_exit_code, Container)
454
- else [skip_on_exit_code]
455
- if skip_on_exit_code is not None
456
- else []
457
- )
458
- self.env_vars = env_vars
459
- self.inherit_env = inherit_env
460
- self.use_airflow_context = use_airflow_context
461
-
462
- @abstractmethod
463
- def _iter_serializable_context_keys(self):
464
- pass
465
-
466
- def execute(self, context: Context) -> Any:
467
- serializable_keys = set(self._iter_serializable_context_keys())
468
- serializable_context = context_copy_partial(context, serializable_keys)
469
- return super().execute(context=serializable_context)
470
-
471
- def get_python_source(self):
472
- """Return the source of self.python_callable."""
473
- return textwrap.dedent(inspect.getsource(self.python_callable))
474
-
475
- def _write_args(self, file: Path):
476
- if self.op_args or self.op_kwargs:
477
- self.log.info("Use %r as serializer.", self.serializer)
478
- file.write_bytes(self.pickling_library.dumps({"args": self.op_args, "kwargs": self.op_kwargs}))
479
-
480
- def _write_string_args(self, file: Path):
481
- file.write_text("\n".join(map(str, self.string_args)))
482
-
483
- def _read_result(self, path: Path):
484
- if path.stat().st_size == 0:
485
- return None
486
- try:
487
- return self.pickling_library.loads(path.read_bytes())
488
- except ValueError as value_error:
489
- raise DeserializingResultError() from value_error
490
-
491
- def __deepcopy__(self, memo):
492
- # module objects can't be copied _at all__
493
- memo[id(self.pickling_library)] = self.pickling_library
494
- return super().__deepcopy__(memo)
495
-
496
- def _execute_python_callable_in_subprocess(self, python_path: Path):
497
- with TemporaryDirectory(prefix="venv-call") as tmp:
498
- tmp_dir = Path(tmp)
499
- op_kwargs: dict[str, Any] = dict(self.op_kwargs)
500
- if self.templates_dict:
501
- op_kwargs["templates_dict"] = self.templates_dict
502
- input_path = tmp_dir / "script.in"
503
- output_path = tmp_dir / "script.out"
504
- string_args_path = tmp_dir / "string_args.txt"
505
- script_path = tmp_dir / "script.py"
506
- termination_log_path = tmp_dir / "termination.log"
507
- airflow_context_path = tmp_dir / "airflow_context.json"
508
-
509
- self._write_args(input_path)
510
- self._write_string_args(string_args_path)
511
-
512
- jinja_context = {
513
- "op_args": self.op_args,
514
- "op_kwargs": op_kwargs,
515
- "expect_airflow": self.expect_airflow,
516
- "pickling_library": self.serializer,
517
- "python_callable": self.python_callable.__name__,
518
- "python_callable_source": self.get_python_source(),
519
- "use_airflow_context": self.use_airflow_context,
520
- }
521
-
522
- if inspect.getfile(self.python_callable) == self.dag.fileloc:
523
- jinja_context["modified_dag_module_name"] = get_unique_dag_module_name(self.dag.fileloc)
524
-
525
- write_python_script(
526
- jinja_context=jinja_context,
527
- filename=os.fspath(script_path),
528
- render_template_as_native_obj=self.dag.render_template_as_native_obj,
529
- )
530
- if self.use_airflow_context:
531
- # TODO: replace with commented code when context serialization is implemented in AIP-72
532
- raise AirflowException(
533
- "The `use_airflow_context=True` is not yet implemented. "
534
- "It will work in Airflow 3 after AIP-72 context "
535
- "serialization is ready."
536
- )
537
- # context = get_current_context()
538
- # with create_session() as session:
539
- # dag_run, task_instance = context["dag_run"], context["task_instance"]
540
- # session.add_all([dag_run, task_instance])
541
- # serializable_context: dict[Encoding, Any] = # Get serializable context here
542
- # with airflow_context_path.open("w+") as file:
543
- # json.dump(serializable_context, file)
544
-
545
- env_vars = dict(os.environ) if self.inherit_env else {}
546
- if self.env_vars:
547
- env_vars.update(self.env_vars)
548
-
549
- try:
550
- cmd: list[str] = [
551
- os.fspath(python_path),
552
- os.fspath(script_path),
553
- os.fspath(input_path),
554
- os.fspath(output_path),
555
- os.fspath(string_args_path),
556
- os.fspath(termination_log_path),
557
- os.fspath(airflow_context_path),
558
- ]
559
- if AIRFLOW_V_2_10_PLUS:
560
- execute_in_subprocess(
561
- cmd=cmd,
562
- env=env_vars,
563
- )
564
- else:
565
- execute_in_subprocess_with_kwargs(
566
- cmd=cmd,
567
- env=env_vars,
568
- )
569
- except subprocess.CalledProcessError as e:
570
- if e.returncode in self.skip_on_exit_code:
571
- raise AirflowSkipException(f"Process exited with code {e.returncode}. Skipping.")
572
- elif termination_log_path.exists() and termination_log_path.stat().st_size > 0:
573
- error_msg = f"Process returned non-zero exit status {e.returncode}.\n"
574
- with open(termination_log_path) as file:
575
- error_msg += file.read()
576
- raise AirflowException(error_msg) from None
577
- else:
578
- raise
579
-
580
- if 0 in self.skip_on_exit_code:
581
- raise AirflowSkipException("Process exited with code 0. Skipping.")
582
-
583
- return self._read_result(output_path)
584
-
585
- def determine_kwargs(self, context: Mapping[str, Any]) -> Mapping[str, Any]:
586
- return KeywordParameters.determine(self.python_callable, self.op_args, context).serializing()
587
-
588
-
589
- class PythonVirtualenvOperator(_BasePythonVirtualenvOperator):
590
- """
591
- Run a function in a virtualenv that is created and destroyed automatically.
592
-
593
- The function (has certain caveats) must be defined using def, and not be
594
- part of a class. All imports must happen inside the function
595
- and no variables outside the scope may be referenced. A global scope
596
- variable named virtualenv_string_args will be available (populated by
597
- string_args). In addition, one can pass stuff through op_args and op_kwargs, and one
598
- can use a return value.
599
- Note that if your virtualenv runs in a different Python major version than Airflow,
600
- you cannot use return values, op_args, op_kwargs, or use any macros that are being provided to
601
- Airflow through plugins. You can use string_args though.
602
-
603
- .. seealso::
604
- For more information on how to use this operator, take a look at the guide:
605
- :ref:`howto/operator:PythonVirtualenvOperator`
606
-
607
- :param python_callable: A python function with no references to outside variables,
608
- defined with def, which will be run in a virtual environment.
609
- :param requirements: Either a list of requirement strings, or a (templated)
610
- "requirements file" as specified by pip.
611
- :param python_version: The Python version to run the virtual environment with. Note that
612
- both 2 and 2.7 are acceptable forms.
613
- :param serializer: Which serializer use to serialize the args and result. It can be one of the following:
614
-
615
- - ``"pickle"``: (default) Use pickle for serialization. Included in the Python Standard Library.
616
- - ``"cloudpickle"``: Use cloudpickle for serialize more complex types,
617
- this requires to include cloudpickle in your requirements.
618
- - ``"dill"``: Use dill for serialize more complex types,
619
- this requires to include dill in your requirements.
620
- :param system_site_packages: Whether to include
621
- system_site_packages in your virtual environment.
622
- See virtualenv documentation for more information.
623
- :param pip_install_options: a list of pip install options when installing requirements
624
- See 'pip install -h' for available options
625
- :param op_args: A list of positional arguments to pass to python_callable.
626
- :param op_kwargs: A dict of keyword arguments to pass to python_callable.
627
- :param string_args: Strings that are present in the global var virtualenv_string_args,
628
- available to python_callable at runtime as a list[str]. Note that args are split
629
- by newline.
630
- :param templates_dict: a dictionary where the values are templates that
631
- will get templated by the Airflow engine sometime between
632
- ``__init__`` and ``execute`` takes place and are made available
633
- in your callable's context after the template has been applied
634
- :param templates_exts: a list of file extensions to resolve while
635
- processing templated fields, for examples ``['.sql', '.hql']``
636
- :param expect_airflow: expect Airflow to be installed in the target environment. If true, the operator
637
- will raise warning if Airflow is not installed, and it will attempt to load Airflow
638
- macros when starting.
639
- :param skip_on_exit_code: If python_callable exits with this exit code, leave the task
640
- in ``skipped`` state (default: None). If set to ``None``, any non-zero
641
- exit code will be treated as a failure.
642
- :param index_urls: an optional list of index urls to load Python packages from.
643
- If not provided the system pip conf will be used to source packages from.
644
- :param venv_cache_path: Optional path to the virtual environment parent folder in which the
645
- virtual environment will be cached, creates a sub-folder venv-{hash} whereas hash will be replaced
646
- with a checksum of requirements. If not provided the virtual environment will be created and deleted
647
- in a temp folder for every execution.
648
- :param env_vars: A dictionary containing additional environment variables to set for the virtual
649
- environment when it is executed.
650
- :param inherit_env: Whether to inherit the current environment variables when executing the virtual
651
- environment. If set to ``True``, the virtual environment will inherit the environment variables
652
- of the parent process (``os.environ``). If set to ``False``, the virtual environment will be
653
- executed with a clean environment.
654
- :param use_airflow_context: Whether to provide ``get_current_context()`` to the python_callable.
655
- NOT YET IMPLEMENTED - waits for AIP-72 context serialization.
656
- """
657
-
658
- template_fields: Sequence[str] = tuple(
659
- {"requirements", "index_urls", "venv_cache_path"}.union(PythonOperator.template_fields)
660
- )
661
- template_ext: Sequence[str] = (".txt",)
662
-
663
- def __init__(
664
- self,
665
- *,
666
- python_callable: Callable,
667
- requirements: None | Iterable[str] | str = None,
668
- python_version: str | None = None,
669
- serializer: _SerializerTypeDef | None = None,
670
- system_site_packages: bool = True,
671
- pip_install_options: list[str] | None = None,
672
- op_args: Collection[Any] | None = None,
673
- op_kwargs: Mapping[str, Any] | None = None,
674
- string_args: Iterable[str] | None = None,
675
- templates_dict: dict | None = None,
676
- templates_exts: list[str] | None = None,
677
- expect_airflow: bool = True,
678
- skip_on_exit_code: int | Container[int] | None = None,
679
- index_urls: None | Collection[str] | str = None,
680
- venv_cache_path: None | os.PathLike[str] = None,
681
- env_vars: dict[str, str] | None = None,
682
- inherit_env: bool = True,
683
- use_airflow_context: bool = False,
684
- **kwargs,
685
- ):
686
- if (
687
- python_version
688
- and str(python_version)[0] != str(sys.version_info.major)
689
- and (op_args or op_kwargs)
690
- ):
691
- raise AirflowException(
692
- "Passing op_args or op_kwargs is not supported across different Python "
693
- "major versions for PythonVirtualenvOperator. Please use string_args."
694
- f"Sys version: {sys.version_info}. Virtual environment version: {python_version}"
695
- )
696
- if python_version is not None and not isinstance(python_version, str):
697
- raise AirflowException(
698
- "Passing non-string types (e.g. int or float) as python_version not supported"
699
- )
700
- if use_airflow_context and (not expect_airflow and not system_site_packages):
701
- raise AirflowException(
702
- "The `use_airflow_context` parameter is set to True, but "
703
- "expect_airflow and system_site_packages are set to False."
704
- )
705
- # TODO: remove when context serialization is implemented in AIP-72
706
- if use_airflow_context and not AIRFLOW_V_3_0_PLUS:
707
- raise AirflowException(
708
- "The `use_airflow_context=True` is not yet implemented. "
709
- "It will work in Airflow 3 after AIP-72 context "
710
- "serialization is ready."
711
- )
712
- if not requirements:
713
- self.requirements: list[str] = []
714
- elif isinstance(requirements, str):
715
- self.requirements = [requirements]
716
- else:
717
- self.requirements = list(requirements)
718
- self.python_version = python_version
719
- self.system_site_packages = system_site_packages
720
- self.pip_install_options = pip_install_options
721
- if isinstance(index_urls, str):
722
- self.index_urls: list[str] | None = [index_urls]
723
- elif isinstance(index_urls, Collection):
724
- self.index_urls = list(index_urls)
725
- else:
726
- self.index_urls = None
727
- self.venv_cache_path = venv_cache_path
728
- super().__init__(
729
- python_callable=python_callable,
730
- serializer=serializer,
731
- op_args=op_args,
732
- op_kwargs=op_kwargs,
733
- string_args=string_args,
734
- templates_dict=templates_dict,
735
- templates_exts=templates_exts,
736
- expect_airflow=expect_airflow,
737
- skip_on_exit_code=skip_on_exit_code,
738
- env_vars=env_vars,
739
- inherit_env=inherit_env,
740
- use_airflow_context=use_airflow_context,
741
- **kwargs,
742
- )
743
-
744
- def _requirements_list(self, exclude_cloudpickle: bool = False) -> list[str]:
745
- """Prepare a list of requirements that need to be installed for the virtual environment."""
746
- requirements = [str(dependency) for dependency in self.requirements]
747
- if not self.system_site_packages:
748
- if (
749
- self.serializer == "cloudpickle"
750
- and not exclude_cloudpickle
751
- and "cloudpickle" not in requirements
752
- ):
753
- requirements.append("cloudpickle")
754
- elif self.serializer == "dill" and "dill" not in requirements:
755
- requirements.append("dill")
756
- requirements.sort() # Ensure a hash is stable
757
- return requirements
758
-
759
- def _prepare_venv(self, venv_path: Path) -> None:
760
- """Prepare the requirements and installs the virtual environment."""
761
- requirements_file = venv_path / "requirements.txt"
762
- requirements_file.write_text("\n".join(self._requirements_list()))
763
- prepare_virtualenv(
764
- venv_directory=str(venv_path),
765
- python_bin=f"python{self.python_version}" if self.python_version else "python",
766
- system_site_packages=self.system_site_packages,
767
- requirements_file_path=str(requirements_file),
768
- pip_install_options=self.pip_install_options,
769
- index_urls=self.index_urls,
770
- )
771
-
772
- def _calculate_cache_hash(self, exclude_cloudpickle: bool = False) -> tuple[str, str]:
773
- """
774
- Generate the hash of the cache folder to use.
775
-
776
- The following factors are used as input for the hash:
777
- - (sorted) list of requirements
778
- - pip install options
779
- - flag of system site packages
780
- - python version
781
- - Variable to override the hash with a cache key
782
- - Index URLs
783
-
784
- Returns a hash and the data dict which is the base for the hash as text.
785
- """
786
- hash_dict = {
787
- "requirements_list": self._requirements_list(exclude_cloudpickle=exclude_cloudpickle),
788
- "pip_install_options": self.pip_install_options,
789
- "index_urls": self.index_urls,
790
- "cache_key": str(Variable.get("PythonVirtualenvOperator.cache_key", "")),
791
- "python_version": self.python_version,
792
- "system_site_packages": self.system_site_packages,
793
- }
794
- hash_text = json.dumps(hash_dict, sort_keys=True)
795
- hash_object = hashlib_wrapper.md5(hash_text.encode())
796
- requirements_hash = hash_object.hexdigest()
797
- return requirements_hash[:8], hash_text
798
-
799
- def _ensure_venv_cache_exists(self, venv_cache_path: Path) -> Path:
800
- """Ensure a valid virtual environment is set up and will create inplace."""
801
- cache_hash, hash_data = self._calculate_cache_hash()
802
- venv_path = venv_cache_path / f"venv-{cache_hash}"
803
- self.log.info("Python virtual environment will be cached in %s", venv_path)
804
- venv_path.parent.mkdir(parents=True, exist_ok=True)
805
- with open(f"{venv_path}.lock", "w") as f:
806
- # Ensure that cache is not build by parallel workers
807
- import fcntl
808
-
809
- fcntl.flock(f, fcntl.LOCK_EX)
810
-
811
- hash_marker = venv_path / "install_complete_marker.json"
812
- try:
813
- if venv_path.exists():
814
- if hash_marker.exists():
815
- previous_hash_data = hash_marker.read_text(encoding="utf8")
816
- if previous_hash_data == hash_data:
817
- self.log.info("Re-using cached Python virtual environment in %s", venv_path)
818
- return venv_path
819
-
820
- _, hash_data_before_upgrade = self._calculate_cache_hash(exclude_cloudpickle=True)
821
- if previous_hash_data == hash_data_before_upgrade:
822
- self.log.warning(
823
- "Found a previous virtual environment in with outdated dependencies %s, "
824
- "deleting and re-creating.",
825
- venv_path,
826
- )
827
- else:
828
- self.log.error(
829
- "Unicorn alert: Found a previous virtual environment in %s "
830
- "with the same hash but different parameters. Previous setup: '%s' / "
831
- "Requested venv setup: '%s'. Please report a bug to airflow!",
832
- venv_path,
833
- previous_hash_data,
834
- hash_data,
835
- )
836
- else:
837
- self.log.warning(
838
- "Found a previous (probably partial installed) virtual environment in %s, "
839
- "deleting and re-creating.",
840
- venv_path,
841
- )
842
-
843
- shutil.rmtree(venv_path)
844
-
845
- venv_path.mkdir(parents=True)
846
- self._prepare_venv(venv_path)
847
- hash_marker.write_text(hash_data, encoding="utf8")
848
- except Exception as e:
849
- shutil.rmtree(venv_path)
850
- raise AirflowException(f"Unable to create new virtual environment in {venv_path}") from e
851
- self.log.info("New Python virtual environment created in %s", venv_path)
852
- return venv_path
853
-
854
- def execute_callable(self):
855
- if self.venv_cache_path:
856
- venv_path = self._ensure_venv_cache_exists(Path(self.venv_cache_path))
857
- python_path = venv_path / "bin" / "python"
858
- return self._execute_python_callable_in_subprocess(python_path)
859
-
860
- with TemporaryDirectory(prefix="venv") as tmp_dir:
861
- tmp_path = Path(tmp_dir)
862
- self._prepare_venv(tmp_path)
863
- python_path = tmp_path / "bin" / "python"
864
- result = self._execute_python_callable_in_subprocess(python_path)
865
- return result
866
-
867
- def _iter_serializable_context_keys(self):
868
- yield from self.BASE_SERIALIZABLE_CONTEXT_KEYS
869
- if self.system_site_packages or "apache-airflow" in self.requirements:
870
- yield from self.AIRFLOW_SERIALIZABLE_CONTEXT_KEYS
871
- yield from self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS
872
- elif "pendulum" in self.requirements:
873
- yield from self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS
874
-
875
-
876
- class BranchPythonVirtualenvOperator(PythonVirtualenvOperator, BranchMixIn):
877
- """
878
- A workflow can "branch" or follow a path after the execution of this task in a virtual environment.
879
-
880
- It derives the PythonVirtualenvOperator and expects a Python function that returns
881
- a single task_id, a single task_group_id, or a list of task_ids and/or
882
- task_group_ids to follow. The task_id(s) and/or task_group_id(s) returned
883
- should point to a task or task group directly downstream from {self}. All
884
- other "branches" or directly downstream tasks are marked with a state of
885
- ``skipped`` so that these paths can't move forward. The ``skipped`` states
886
- are propagated downstream to allow for the DAG state to fill up and
887
- the DAG run's state to be inferred.
888
-
889
- .. seealso::
890
- For more information on how to use this operator, take a look at the guide:
891
- :ref:`howto/operator:BranchPythonVirtualenvOperator`
892
- """
893
-
894
- def execute(self, context: Context) -> Any:
895
- return self.do_branch(context, super().execute(context))
896
-
897
-
898
- class ExternalPythonOperator(_BasePythonVirtualenvOperator):
899
- """
900
- Run a function in a virtualenv that is not re-created.
901
-
902
- Reused as is without the overhead of creating the virtual environment (with certain caveats).
903
-
904
- The function must be defined using def, and not be
905
- part of a class. All imports must happen inside the function
906
- and no variables outside the scope may be referenced. A global scope
907
- variable named virtualenv_string_args will be available (populated by
908
- string_args). In addition, one can pass stuff through op_args and op_kwargs, and one
909
- can use a return value.
910
- Note that if your virtual environment runs in a different Python major version than Airflow,
911
- you cannot use return values, op_args, op_kwargs, or use any macros that are being provided to
912
- Airflow through plugins. You can use string_args though.
913
-
914
- If Airflow is installed in the external environment in different version that the version
915
- used by the operator, the operator will fail.,
916
-
917
- .. seealso::
918
- For more information on how to use this operator, take a look at the guide:
919
- :ref:`howto/operator:ExternalPythonOperator`
920
-
921
- :param python: Full path string (file-system specific) that points to a Python binary inside
922
- a virtual environment that should be used (in ``VENV/bin`` folder). Should be absolute path
923
- (so usually start with "/" or "X:/" depending on the filesystem/os used).
924
- :param python_callable: A python function with no references to outside variables,
925
- defined with def, which will be run in a virtual environment.
926
- :param serializer: Which serializer use to serialize the args and result. It can be one of the following:
927
-
928
- - ``"pickle"``: (default) Use pickle for serialization. Included in the Python Standard Library.
929
- - ``"cloudpickle"``: Use cloudpickle for serialize more complex types,
930
- this requires to include cloudpickle in your requirements.
931
- - ``"dill"``: Use dill for serialize more complex types,
932
- this requires to include dill in your requirements.
933
- :param op_args: A list of positional arguments to pass to python_callable.
934
- :param op_kwargs: A dict of keyword arguments to pass to python_callable.
935
- :param string_args: Strings that are present in the global var virtualenv_string_args,
936
- available to python_callable at runtime as a list[str]. Note that args are split
937
- by newline.
938
- :param templates_dict: a dictionary where the values are templates that
939
- will get templated by the Airflow engine sometime between
940
- ``__init__`` and ``execute`` takes place and are made available
941
- in your callable's context after the template has been applied
942
- :param templates_exts: a list of file extensions to resolve while
943
- processing templated fields, for examples ``['.sql', '.hql']``
944
- :param expect_airflow: expect Airflow to be installed in the target environment. If true, the operator
945
- will raise warning if Airflow is not installed, and it will attempt to load Airflow
946
- macros when starting.
947
- :param skip_on_exit_code: If python_callable exits with this exit code, leave the task
948
- in ``skipped`` state (default: None). If set to ``None``, any non-zero
949
- exit code will be treated as a failure.
950
- :param env_vars: A dictionary containing additional environment variables to set for the virtual
951
- environment when it is executed.
952
- :param inherit_env: Whether to inherit the current environment variables when executing the virtual
953
- environment. If set to ``True``, the virtual environment will inherit the environment variables
954
- of the parent process (``os.environ``). If set to ``False``, the virtual environment will be
955
- executed with a clean environment.
956
- :param use_airflow_context: Whether to provide ``get_current_context()`` to the python_callable.
957
- NOT YET IMPLEMENTED - waits for AIP-72 context serialization.
958
- """
959
-
960
- template_fields: Sequence[str] = tuple({"python"}.union(PythonOperator.template_fields))
961
-
962
- def __init__(
963
- self,
964
- *,
965
- python: str,
966
- python_callable: Callable,
967
- serializer: _SerializerTypeDef | None = None,
968
- op_args: Collection[Any] | None = None,
969
- op_kwargs: Mapping[str, Any] | None = None,
970
- string_args: Iterable[str] | None = None,
971
- templates_dict: dict | None = None,
972
- templates_exts: list[str] | None = None,
973
- expect_airflow: bool = True,
974
- expect_pendulum: bool = False,
975
- skip_on_exit_code: int | Container[int] | None = None,
976
- env_vars: dict[str, str] | None = None,
977
- inherit_env: bool = True,
978
- use_airflow_context: bool = False,
979
- **kwargs,
980
- ):
981
- if not python:
982
- raise ValueError("Python Path must be defined in ExternalPythonOperator")
983
- if use_airflow_context and not expect_airflow:
984
- raise AirflowException(
985
- "The `use_airflow_context` parameter is set to True, but expect_airflow is set to False."
986
- )
987
- # TODO: remove when context serialization is implemented in AIP-72
988
- if use_airflow_context:
989
- raise AirflowException(
990
- "The `use_airflow_context=True` is not yet implemented. "
991
- "It will work in Airflow 3 after AIP-72 context "
992
- "serialization is ready."
993
- )
994
- self.python = python
995
- self.expect_pendulum = expect_pendulum
996
- super().__init__(
997
- python_callable=python_callable,
998
- serializer=serializer,
999
- op_args=op_args,
1000
- op_kwargs=op_kwargs,
1001
- string_args=string_args,
1002
- templates_dict=templates_dict,
1003
- templates_exts=templates_exts,
1004
- expect_airflow=expect_airflow,
1005
- skip_on_exit_code=skip_on_exit_code,
1006
- env_vars=env_vars,
1007
- inherit_env=inherit_env,
1008
- use_airflow_context=use_airflow_context,
1009
- **kwargs,
1010
- )
1011
-
1012
- def execute_callable(self):
1013
- python_path = Path(self.python)
1014
- if not python_path.exists():
1015
- raise ValueError(f"Python Path '{python_path}' must exists")
1016
- if not python_path.is_file():
1017
- raise ValueError(f"Python Path '{python_path}' must be a file")
1018
- if not python_path.is_absolute():
1019
- raise ValueError(f"Python Path '{python_path}' must be an absolute path.")
1020
- python_version = _PythonVersionInfo.from_executable(self.python)
1021
- if python_version.major != sys.version_info.major and (self.op_args or self.op_kwargs):
1022
- raise AirflowException(
1023
- "Passing op_args or op_kwargs is not supported across different Python "
1024
- "major versions for ExternalPythonOperator. Please use string_args."
1025
- f"Sys version: {sys.version_info}. "
1026
- f"Virtual environment version: {python_version}"
1027
- )
1028
- return self._execute_python_callable_in_subprocess(python_path)
1029
-
1030
- def _iter_serializable_context_keys(self):
1031
- yield from self.BASE_SERIALIZABLE_CONTEXT_KEYS
1032
- if self._get_airflow_version_from_target_env():
1033
- yield from self.AIRFLOW_SERIALIZABLE_CONTEXT_KEYS
1034
- yield from self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS
1035
- elif self._is_pendulum_installed_in_target_env():
1036
- yield from self.PENDULUM_SERIALIZABLE_CONTEXT_KEYS
1037
-
1038
- def _is_pendulum_installed_in_target_env(self) -> bool:
1039
- try:
1040
- subprocess.check_call([self.python, "-c", "import pendulum"])
1041
- return True
1042
- except Exception as e:
1043
- if self.expect_pendulum:
1044
- self.log.warning("When checking for Pendulum installed in virtual environment got %s", e)
1045
- self.log.warning(
1046
- "Pendulum is not properly installed in the virtual environment "
1047
- "Pendulum context keys will not be available. "
1048
- "Please Install Pendulum or Airflow in your virtual environment to access them."
1049
- )
1050
- return False
1051
-
1052
- @property
1053
- def _external_airflow_version_script(self):
1054
- """
1055
- Return python script which determines the version of the Apache Airflow.
1056
-
1057
- Import airflow as a module might take a while as a result,
1058
- obtaining a version would take up to 1 second.
1059
- On the other hand, `importlib.metadata.version` will retrieve the package version pretty fast
1060
- something below 100ms; this includes new subprocess overhead.
1061
-
1062
- Possible side effect: It might be a situation that `importlib.metadata` is not available (Python < 3.8),
1063
- as well as backport `importlib_metadata` which might indicate that venv doesn't contain an `apache-airflow`
1064
- or something wrong with the environment.
1065
- """
1066
- return textwrap.dedent(
1067
- """
1068
- try:
1069
- from importlib.metadata import version
1070
- except ImportError:
1071
- from importlib_metadata import version
1072
- print(version("apache-airflow"))
1073
- """
1074
- )
1075
-
1076
- def _get_airflow_version_from_target_env(self) -> str | None:
1077
- from airflow import __version__ as airflow_version
1078
-
1079
- try:
1080
- result = subprocess.check_output(
1081
- [self.python, "-c", self._external_airflow_version_script],
1082
- text=True,
1083
- )
1084
- target_airflow_version = result.strip()
1085
- if target_airflow_version != airflow_version:
1086
- raise AirflowConfigException(
1087
- f"The version of Airflow installed for the {self.python} "
1088
- f"({target_airflow_version}) is different than the runtime Airflow version: "
1089
- f"{airflow_version}. Make sure your environment has the same Airflow version "
1090
- f"installed as the Airflow runtime."
1091
- )
1092
- return target_airflow_version
1093
- except Exception as e:
1094
- if self.expect_airflow:
1095
- self.log.warning("When checking for Airflow installed in virtual environment got %s", e)
1096
- self.log.warning(
1097
- "This means that Airflow is not properly installed by %s. "
1098
- "Airflow context keys will not be available. "
1099
- "Please Install Airflow %s in your environment to access them.",
1100
- self.python,
1101
- airflow_version,
1102
- )
1103
- return None
1104
-
1105
-
1106
- class BranchExternalPythonOperator(ExternalPythonOperator, BranchMixIn):
1107
- """
1108
- A workflow can "branch" or follow a path after the execution of this task.
1109
-
1110
- Extends ExternalPythonOperator, so expects to get Python:
1111
- virtual environment that should be used (in ``VENV/bin`` folder). Should be absolute path,
1112
- so it can run on separate virtual environment similarly to ExternalPythonOperator.
1113
-
1114
- .. seealso::
1115
- For more information on how to use this operator, take a look at the guide:
1116
- :ref:`howto/operator:BranchExternalPythonOperator`
1117
- """
1118
-
1119
- def execute(self, context: Context) -> Any:
1120
- return self.do_branch(context, super().execute(context))
1121
-
1122
-
1123
- def get_current_context() -> Context:
1124
- """
1125
- Retrieve the execution context dictionary without altering user method's signature.
1126
-
1127
- This is the simplest method of retrieving the execution context dictionary.
1128
-
1129
- **Old style:**
1130
-
1131
- .. code:: python
1132
-
1133
- def my_task(**context):
1134
- ti = context["ti"]
1135
-
1136
- **New style:**
1137
-
1138
- .. code:: python
1139
-
1140
- from airflow.providers.standard.operators.python import get_current_context
1141
-
1142
-
1143
- def my_task():
1144
- context = get_current_context()
1145
- ti = context["ti"]
1146
-
1147
- Current context will only have value if this method was called after an operator
1148
- was starting to execute.
1149
- """
1150
- if not _CURRENT_CONTEXT:
1151
- raise AirflowException(
1152
- "Current context was requested but no context was found! "
1153
- "Are you running within an airflow task?"
1154
- )
1155
- return _CURRENT_CONTEXT[-1]