apache-airflow-providers-openlineage 1.8.0rc1__py3-none-any.whl → 1.9.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-openlineage might be problematic. Click here for more details.

@@ -20,20 +20,27 @@ from __future__ import annotations
20
20
  import datetime
21
21
  import json
22
22
  import logging
23
- from contextlib import suppress
23
+ import re
24
+ from contextlib import redirect_stdout, suppress
24
25
  from functools import wraps
26
+ from io import StringIO
25
27
  from typing import TYPE_CHECKING, Any, Iterable
26
28
 
27
29
  import attrs
28
30
  from deprecated import deprecated
29
31
  from openlineage.client.utils import RedactMixin
32
+ from packaging.version import Version
30
33
 
34
+ from airflow import __version__ as AIRFLOW_VERSION
31
35
  from airflow.exceptions import AirflowProviderDeprecationWarning # TODO: move this maybe to Airflow's logic?
32
36
  from airflow.models import DAG, BaseOperator, MappedOperator
33
37
  from airflow.providers.openlineage import conf
34
38
  from airflow.providers.openlineage.plugins.facets import (
39
+ AirflowJobFacet,
35
40
  AirflowMappedTaskRunFacet,
36
41
  AirflowRunFacet,
42
+ AirflowStateRunFacet,
43
+ BaseFacet,
37
44
  UnknownOperatorAttributeRunFacet,
38
45
  UnknownOperatorInstance,
39
46
  )
@@ -41,6 +48,7 @@ from airflow.providers.openlineage.utils.selective_enable import (
41
48
  is_dag_lineage_enabled,
42
49
  is_task_lineage_enabled,
43
50
  )
51
+ from airflow.serialization.serialized_objects import SerializedBaseOperator
44
52
  from airflow.utils.context import AirflowContextDeprecationWarning
45
53
  from airflow.utils.log.secrets_masker import Redactable, Redacted, SecretsMasker, should_hide_value_for_key
46
54
  from airflow.utils.module_loading import import_string
@@ -51,6 +59,7 @@ if TYPE_CHECKING:
51
59
 
52
60
  log = logging.getLogger(__name__)
53
61
  _NOMINAL_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
62
+ _IS_AIRFLOW_2_10_OR_HIGHER = Version(Version(AIRFLOW_VERSION).base_version) >= Version("2.10.0")
54
63
 
55
64
 
56
65
  def try_import_from_string(string: str) -> Any:
@@ -78,7 +87,11 @@ def get_custom_facets(task_instance: TaskInstance | None = None) -> dict[str, An
78
87
 
79
88
 
80
89
  def get_fully_qualified_class_name(operator: BaseOperator | MappedOperator) -> str:
81
- return operator.__class__.__module__ + "." + operator.__class__.__name__
90
+ if isinstance(operator, (MappedOperator, SerializedBaseOperator)):
91
+ # as in airflow.api_connexion.schemas.common_schema.ClassReferenceSchema
92
+ return operator._task_module + "." + operator._task_type # type: ignore
93
+ op_class = get_operator_class(operator)
94
+ return op_class.__module__ + "." + op_class.__name__
82
95
 
83
96
 
84
97
  def is_operator_disabled(operator: BaseOperator | MappedOperator) -> bool:
@@ -135,6 +148,8 @@ class InfoJsonEncodable(dict):
135
148
  def _cast_basic_types(value):
136
149
  if isinstance(value, datetime.datetime):
137
150
  return value.isoformat()
151
+ if isinstance(value, datetime.timedelta):
152
+ return f"{value.total_seconds()} seconds"
138
153
  if isinstance(value, (set, list, tuple)):
139
154
  return str(list(value))
140
155
  return value
@@ -168,7 +183,7 @@ class InfoJsonEncodable(dict):
168
183
  class DagInfo(InfoJsonEncodable):
169
184
  """Defines encoding DAG object to JSON."""
170
185
 
171
- includes = ["dag_id", "schedule_interval", "tags", "start_date"]
186
+ includes = ["dag_id", "description", "owner", "schedule_interval", "start_date", "tags"]
172
187
  casts = {"timetable": lambda dag: dag.timetable.serialize() if getattr(dag, "timetable", None) else None}
173
188
  renames = {"_dag_id": "dag_id"}
174
189
 
@@ -191,11 +206,11 @@ class DagRunInfo(InfoJsonEncodable):
191
206
  class TaskInstanceInfo(InfoJsonEncodable):
192
207
  """Defines encoding TaskInstance object to JSON."""
193
208
 
194
- includes = ["duration", "try_number", "pool"]
209
+ includes = ["duration", "try_number", "pool", "queued_dttm"]
195
210
  casts = {
196
- "map_index": lambda ti: ti.map_index
197
- if hasattr(ti, "map_index") and getattr(ti, "map_index") != -1
198
- else None
211
+ "map_index": lambda ti: (
212
+ ti.map_index if hasattr(ti, "map_index") and getattr(ti, "map_index") != -1 else None
213
+ )
199
214
  }
200
215
 
201
216
 
@@ -225,6 +240,7 @@ class TaskInfo(InfoJsonEncodable):
225
240
  "retries",
226
241
  "retry_exponential_backoff",
227
242
  "run_as_user",
243
+ "sla",
228
244
  "task_id",
229
245
  "trigger_rule",
230
246
  "upstream_task_ids",
@@ -234,9 +250,11 @@ class TaskInfo(InfoJsonEncodable):
234
250
  ]
235
251
  casts = {
236
252
  "operator_class": lambda task: task.task_type,
237
- "task_group": lambda task: TaskGroupInfo(task.task_group)
238
- if hasattr(task, "task_group") and getattr(task.task_group, "_group_id", None)
239
- else None,
253
+ "task_group": lambda task: (
254
+ TaskGroupInfo(task.task_group)
255
+ if hasattr(task, "task_group") and getattr(task.task_group, "_group_id", None)
256
+ else None
257
+ ),
240
258
  }
241
259
 
242
260
 
@@ -262,20 +280,158 @@ def get_airflow_run_facet(
262
280
  task_instance: TaskInstance,
263
281
  task: BaseOperator,
264
282
  task_uuid: str,
265
- ):
283
+ ) -> dict[str, BaseFacet]:
266
284
  return {
267
- "airflow": attrs.asdict(
268
- AirflowRunFacet(
269
- dag=DagInfo(dag),
270
- dagRun=DagRunInfo(dag_run),
271
- taskInstance=TaskInstanceInfo(task_instance),
272
- task=TaskInfo(task),
273
- taskUuid=task_uuid,
274
- )
285
+ "airflow": AirflowRunFacet(
286
+ dag=DagInfo(dag),
287
+ dagRun=DagRunInfo(dag_run),
288
+ taskInstance=TaskInstanceInfo(task_instance),
289
+ task=TaskInfo(task),
290
+ taskUuid=task_uuid,
291
+ )
292
+ }
293
+
294
+
295
+ def get_airflow_job_facet(dag_run: DagRun) -> dict[str, BaseFacet]:
296
+ if not dag_run.dag:
297
+ return {}
298
+ return {
299
+ "airflow": AirflowJobFacet(
300
+ taskTree=_get_parsed_dag_tree(dag_run.dag),
301
+ taskGroups=_get_task_groups_details(dag_run.dag),
302
+ tasks=_get_tasks_details(dag_run.dag),
303
+ )
304
+ }
305
+
306
+
307
+ def get_airflow_state_run_facet(dag_run: DagRun) -> dict[str, BaseFacet]:
308
+ return {
309
+ "airflowState": AirflowStateRunFacet(
310
+ dagRunState=dag_run.get_state(),
311
+ tasksState={ti.task_id: ti.state for ti in dag_run.get_task_instances()},
275
312
  )
276
313
  }
277
314
 
278
315
 
316
+ def _safe_get_dag_tree_view(dag: DAG) -> list[str]:
317
+ # get_tree_view() has been added in Airflow 2.8.2
318
+ if hasattr(dag, "get_tree_view"):
319
+ return dag.get_tree_view().splitlines()
320
+
321
+ with redirect_stdout(StringIO()) as stdout:
322
+ dag.tree_view()
323
+ return stdout.getvalue().splitlines()
324
+
325
+
326
+ def _get_parsed_dag_tree(dag: DAG) -> dict:
327
+ """
328
+ Get DAG's tasks hierarchy representation.
329
+
330
+ While the task dependencies are defined as following:
331
+ task >> [task_2, task_4] >> task_7
332
+ task_3 >> task_5
333
+ task_6 # has no dependencies, it's a root and a leaf
334
+
335
+ The result of this function will look like:
336
+ {
337
+ "task": {
338
+ "task_2": {
339
+ "task_7": {}
340
+ },
341
+ "task_4": {
342
+ "task_7": {}
343
+ }
344
+ },
345
+ "task_3": {
346
+ "task_5": {}
347
+ },
348
+ "task_6": {}
349
+ }
350
+ """
351
+ lines = _safe_get_dag_tree_view(dag)
352
+ task_dict: dict[str, dict] = {}
353
+ parent_map: dict[int, tuple[str, dict]] = {}
354
+
355
+ for line in lines:
356
+ stripped_line = line.strip()
357
+ if not stripped_line:
358
+ continue
359
+
360
+ # Determine the level by counting the leading spaces, assuming 4 spaces per level
361
+ # as defined in airflow.models.dag.DAG._generate_tree_view()
362
+ level = (len(line) - len(stripped_line)) // 4
363
+ # airflow.models.baseoperator.BaseOperator.__repr__ is used in DAG tree
364
+ # <Task({op_class}): {task_id}>
365
+ match = re.match(r"^<Task\((.+)\): (.*?)>$", stripped_line)
366
+ if not match:
367
+ return {}
368
+ current_task_id = match[2]
369
+
370
+ if level == 0: # It's a root task
371
+ task_dict[current_task_id] = {}
372
+ parent_map[level] = (current_task_id, task_dict[current_task_id])
373
+ else:
374
+ # Find the immediate parent task
375
+ parent_task, parent_dict = parent_map[(level - 1)]
376
+ # Create new dict for the current task
377
+ parent_dict[current_task_id] = {}
378
+ # Update this task in the parent map
379
+ parent_map[level] = (current_task_id, parent_dict[current_task_id])
380
+
381
+ return task_dict
382
+
383
+
384
+ def _get_tasks_details(dag: DAG) -> dict:
385
+ tasks = {
386
+ single_task.task_id: {
387
+ "operator": get_fully_qualified_class_name(single_task),
388
+ "task_group": single_task.task_group.group_id if single_task.task_group else None,
389
+ "emits_ol_events": _emits_ol_events(single_task),
390
+ "ui_color": single_task.ui_color,
391
+ "ui_fgcolor": single_task.ui_fgcolor,
392
+ "ui_label": single_task.label,
393
+ "is_setup": single_task.is_setup,
394
+ "is_teardown": single_task.is_teardown,
395
+ }
396
+ for single_task in dag.tasks
397
+ }
398
+
399
+ return tasks
400
+
401
+
402
+ def _get_task_groups_details(dag: DAG) -> dict:
403
+ return {
404
+ tg_id: {
405
+ "parent_group": tg.parent_group.group_id,
406
+ "tooltip": tg.tooltip,
407
+ "ui_color": tg.ui_color,
408
+ "ui_fgcolor": tg.ui_fgcolor,
409
+ "ui_label": tg.label,
410
+ }
411
+ for tg_id, tg in dag.task_group_dict.items()
412
+ }
413
+
414
+
415
+ def _emits_ol_events(task: BaseOperator | MappedOperator) -> bool:
416
+ config_selective_enabled = is_selective_lineage_enabled(task)
417
+ config_disabled_for_operators = is_operator_disabled(task)
418
+ # empty operators without callbacks/outlets are skipped for optimization by Airflow
419
+ # in airflow.models.taskinstance.TaskInstance._schedule_downstream_tasks
420
+ is_skipped_as_empty_operator = all(
421
+ (
422
+ task.inherits_from_empty_operator,
423
+ not task.on_execute_callback,
424
+ not task.on_success_callback,
425
+ not task.outlets,
426
+ )
427
+ )
428
+
429
+ emits_ol_events = all(
430
+ (config_selective_enabled, not config_disabled_for_operators, not is_skipped_as_empty_operator)
431
+ )
432
+ return emits_ol_events
433
+
434
+
279
435
  def get_unknown_source_attribute_run_facet(task: BaseOperator, name: str | None = None):
280
436
  if not name:
281
437
  name = get_operator_class(task).__name__
@@ -405,5 +561,7 @@ def normalize_sql(sql: str | Iterable[str]):
405
561
 
406
562
 
407
563
  def should_use_external_connection(hook) -> bool:
408
- # TODO: Add checking overrides
409
- return hook.__class__.__name__ not in ["SnowflakeHook", "SnowflakeSqlApiHook"]
564
+ # If we're at Airflow 2.10, the execution is process-isolated, so we can safely run those again.
565
+ if not _IS_AIRFLOW_2_10_OR_HIGHER:
566
+ return hook.__class__.__name__ not in ["SnowflakeHook", "SnowflakeSqlApiHook", "RedshiftSQLHook"]
567
+ return True
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-openlineage
3
- Version: 1.8.0rc1
3
+ Version: 1.9.0rc1
4
4
  Summary: Provider package apache-airflow-providers-openlineage for Apache Airflow
5
5
  Keywords: airflow-provider,openlineage,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -24,12 +24,12 @@ Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: apache-airflow-providers-common-sql>=1.6.0rc0
25
25
  Requires-Dist: apache-airflow>=2.7.0rc0
26
26
  Requires-Dist: attrs>=22.2
27
- Requires-Dist: openlineage-integration-common>=0.28.0
28
- Requires-Dist: openlineage-python>=0.28.0
27
+ Requires-Dist: openlineage-integration-common>=1.16.0
28
+ Requires-Dist: openlineage-python>=1.16.0
29
29
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
30
30
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
31
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.8.0/changelog.html
32
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.8.0
31
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.9.0/changelog.html
32
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.9.0
33
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
34
  Project-URL: Source Code, https://github.com/apache/airflow
35
35
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -80,7 +80,7 @@ Provides-Extra: common.sql
80
80
 
81
81
  Package ``apache-airflow-providers-openlineage``
82
82
 
83
- Release: ``1.8.0.rc1``
83
+ Release: ``1.9.0.rc1``
84
84
 
85
85
 
86
86
  `OpenLineage <https://openlineage.io/>`__
@@ -93,7 +93,7 @@ This is a provider package for ``openlineage`` provider. All classes for this pr
93
93
  are in ``airflow.providers.openlineage`` python package.
94
94
 
95
95
  You can find package information and changelog for the provider
96
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.8.0/>`_.
96
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.9.0/>`_.
97
97
 
98
98
  Installation
99
99
  ------------
@@ -113,8 +113,8 @@ PIP package Version required
113
113
  ``apache-airflow`` ``>=2.7.0``
114
114
  ``apache-airflow-providers-common-sql`` ``>=1.6.0``
115
115
  ``attrs`` ``>=22.2``
116
- ``openlineage-integration-common`` ``>=0.28.0``
117
- ``openlineage-python`` ``>=0.28.0``
116
+ ``openlineage-integration-common`` ``>=1.16.0``
117
+ ``openlineage-python`` ``>=1.16.0``
118
118
  ======================================= ==================
119
119
 
120
120
  Cross provider package dependencies
@@ -137,4 +137,4 @@ Dependent package
137
137
  ============================================================================================================ ==============
138
138
 
139
139
  The changelog for the provider package can be found in the
140
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.8.0/changelog.html>`_.
140
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/1.9.0/changelog.html>`_.
@@ -0,0 +1,28 @@
1
+ airflow/providers/openlineage/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
+ airflow/providers/openlineage/__init__.py,sha256=wyNzY61gCpS2r6Wx-q-JiM8TbOKgTbDVngFZmcVmqOc,1498
3
+ airflow/providers/openlineage/conf.py,sha256=ZHUZF3wjFGupfFa40F1DjZSkOQJCUtWFyQM3nvz6Bq4,5099
4
+ airflow/providers/openlineage/get_provider_info.py,sha256=v-KK3oIOhEr-LE1WE8J6MaduGde709qiUmbkOklREn4,7472
5
+ airflow/providers/openlineage/sqlparser.py,sha256=nMUA7Ak2HNfnGZ1PybWbNPENZfQIcJ1eE8AMh9UD-O8,15653
6
+ airflow/providers/openlineage/extractors/__init__.py,sha256=I0X4f6zUniclyD9zT0DFHRImpCpJVP4MkPJT3cd7X5I,1081
7
+ airflow/providers/openlineage/extractors/base.py,sha256=o6z8bXwNor1hwcUzezJ8LIPynR_BqXkP-qtwEgLtD2Q,5476
8
+ airflow/providers/openlineage/extractors/bash.py,sha256=m4hLvDV4-zX4gp8apRuhpAR3Uakr8UOUxf-thTWmOxw,2563
9
+ airflow/providers/openlineage/extractors/manager.py,sha256=9TyszMLAsgPS9NETWq7fPJjxcbTFk47x4kd1NRGCvsw,10315
10
+ airflow/providers/openlineage/extractors/python.py,sha256=EQXCj2aHr2XXw0pNxeX-ii8UQFCoqkdf40ozqmA5d58,3151
11
+ airflow/providers/openlineage/facets/AirflowJobFacet.json,sha256=rS9PuPWOi1Jc5B4a5qLxS_Az7Q9Eb3jVYQnN41iXDC0,1187
12
+ airflow/providers/openlineage/facets/AirflowRunFacet.json,sha256=Mk85_KKwBOEcn_ptiQuoo9ucP-coAHfkAnilws2XMl4,5867
13
+ airflow/providers/openlineage/facets/AirflowStateRunFacet.json,sha256=xhHQEKD9Jopw-oqbkCCrrwFjfXnxvuJAritsmegKjuQ,937
14
+ airflow/providers/openlineage/facets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
+ airflow/providers/openlineage/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
16
+ airflow/providers/openlineage/plugins/adapter.py,sha256=nm8muyX-CwBaCNy_YDYbz-iFNL4msVZX6gFb4EaI-YE,18552
17
+ airflow/providers/openlineage/plugins/facets.py,sha256=t60uCyp-FZKAeO7PSom5eAbn0cilN_Mfciip4FWG22M,4445
18
+ airflow/providers/openlineage/plugins/listener.py,sha256=dk7omQItGSLcVw6bK56ScxLTnYQyQa1bXz2Fw402cjA,17956
19
+ airflow/providers/openlineage/plugins/macros.py,sha256=hgFA3ZdQibyn4KXIOsKYBm4WRKDLA5q6Asscx5rvNfM,3076
20
+ airflow/providers/openlineage/plugins/openlineage.py,sha256=rsRUW_zpXVAglzsgQRv5T9VWYY7CMQl0qRWm8-3oqDA,1678
21
+ airflow/providers/openlineage/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
22
+ airflow/providers/openlineage/utils/selective_enable.py,sha256=JVTmXdQknBL-9N0drFDkVMf1HCf8C6nbITVaP4-5ba4,3072
23
+ airflow/providers/openlineage/utils/sql.py,sha256=yNBGMnWMECHtbpTh5nlaQh01O__LWkeQaSqrOVRWz6Y,9524
24
+ airflow/providers/openlineage/utils/utils.py,sha256=QBg4B-isDrizb53cQFOflFx8LVKiabwE08I70I0AxOY,19485
25
+ apache_airflow_providers_openlineage-1.9.0rc1.dist-info/entry_points.txt,sha256=GAx0_i2OeZzqaiiiYuA-xchICDXiCT5kVqpKSxsOjt4,214
26
+ apache_airflow_providers_openlineage-1.9.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
27
+ apache_airflow_providers_openlineage-1.9.0rc1.dist-info/METADATA,sha256=kamPdoPzxHZpb788jJ_RhWSQUgR1U76rKpRnraIm5Qg,6381
28
+ apache_airflow_providers_openlineage-1.9.0rc1.dist-info/RECORD,,
@@ -1,24 +0,0 @@
1
- airflow/providers/openlineage/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
2
- airflow/providers/openlineage/__init__.py,sha256=Y_3EiIS_TiqaVpc68HfekILHQRlsSGQLhs72joO7THg,1498
3
- airflow/providers/openlineage/conf.py,sha256=wozXzU5Do9S0mtjjGc5ruF556G2-ZT4GJa3YLT_-Phg,4693
4
- airflow/providers/openlineage/get_provider_info.py,sha256=ypUFlQXsC6s-cA7OdslwUaxhjaYIslbP_OILdb9CVNQ,7072
5
- airflow/providers/openlineage/sqlparser.py,sha256=-FGWWK0Xu6XkGSXcfn7PXsWIe0Y0fwe-3hivHg7emLA,15308
6
- airflow/providers/openlineage/extractors/__init__.py,sha256=I0X4f6zUniclyD9zT0DFHRImpCpJVP4MkPJT3cd7X5I,1081
7
- airflow/providers/openlineage/extractors/base.py,sha256=o6z8bXwNor1hwcUzezJ8LIPynR_BqXkP-qtwEgLtD2Q,5476
8
- airflow/providers/openlineage/extractors/bash.py,sha256=m4hLvDV4-zX4gp8apRuhpAR3Uakr8UOUxf-thTWmOxw,2563
9
- airflow/providers/openlineage/extractors/manager.py,sha256=9TyszMLAsgPS9NETWq7fPJjxcbTFk47x4kd1NRGCvsw,10315
10
- airflow/providers/openlineage/extractors/python.py,sha256=EQXCj2aHr2XXw0pNxeX-ii8UQFCoqkdf40ozqmA5d58,3151
11
- airflow/providers/openlineage/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
12
- airflow/providers/openlineage/plugins/adapter.py,sha256=d_HczG_nXMwAHdGD-CO4baVfCc9ROcY0zQUWIZRF_Sw,16917
13
- airflow/providers/openlineage/plugins/facets.py,sha256=Z6dsz0rv-3VbRaGZJxW_T7Dak2k0ROGF0YrR_1awxZ0,2644
14
- airflow/providers/openlineage/plugins/listener.py,sha256=LrqvvGL4nzMNeha6PVbtylxp8gru3iKrkQltuQD2k8o,14092
15
- airflow/providers/openlineage/plugins/macros.py,sha256=QowPc9cc_unV-NLxBwm32OmWETA9pOucWguSeK92SSc,3076
16
- airflow/providers/openlineage/plugins/openlineage.py,sha256=rsRUW_zpXVAglzsgQRv5T9VWYY7CMQl0qRWm8-3oqDA,1678
17
- airflow/providers/openlineage/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
- airflow/providers/openlineage/utils/selective_enable.py,sha256=JVTmXdQknBL-9N0drFDkVMf1HCf8C6nbITVaP4-5ba4,3072
19
- airflow/providers/openlineage/utils/sql.py,sha256=7tEK0zVfIe7v3NI6oyv62x0KAS3sl8Ajfhqob8MdiX8,9366
20
- airflow/providers/openlineage/utils/utils.py,sha256=WFMdRsuArqqrgjsmFGb_ljIWV6ry-EGkTPZzy5aiG4Q,14036
21
- apache_airflow_providers_openlineage-1.8.0rc1.dist-info/entry_points.txt,sha256=GAx0_i2OeZzqaiiiYuA-xchICDXiCT5kVqpKSxsOjt4,214
22
- apache_airflow_providers_openlineage-1.8.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
23
- apache_airflow_providers_openlineage-1.8.0rc1.dist-info/METADATA,sha256=ljCFvcM-Hj_8eX0og8NVnFDxfQHfPPZYnzEa85ov8tI,6381
24
- apache_airflow_providers_openlineage-1.8.0rc1.dist-info/RECORD,,