apache-airflow-providers-openlineage 2.1.3rc1__py3-none-any.whl → 2.2.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-openlineage might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "2.1.3"
32
+ __version__ = "2.2.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -298,12 +298,11 @@ class ExtractorManager(LoggingMixin):
298
298
 
299
299
  if isinstance(obj, Dataset):
300
300
  return obj
301
- elif isinstance(obj, Table):
301
+ if isinstance(obj, Table):
302
302
  return ExtractorManager.convert_to_ol_dataset_from_table(obj)
303
- elif isinstance(obj, File):
303
+ if isinstance(obj, File):
304
304
  return ExtractorManager.convert_to_ol_dataset_from_object_storage_uri(obj.url)
305
- else:
306
- return None
305
+ return None
307
306
 
308
307
  def validate_task_metadata(self, task_metadata) -> OperatorLineage | None:
309
308
  try:
@@ -16,6 +16,7 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ import os
19
20
  import traceback
20
21
  from contextlib import ExitStack
21
22
  from typing import TYPE_CHECKING
@@ -36,6 +37,7 @@ from openlineage.client.facet_v2 import (
36
37
  )
37
38
  from openlineage.client.uuid import generate_static_uuid
38
39
 
40
+ from airflow.configuration import conf as airflow_conf
39
41
  from airflow.providers.openlineage import __version__ as OPENLINEAGE_PROVIDER_VERSION, conf
40
42
  from airflow.providers.openlineage.utils.utils import (
41
43
  OpenLineageRedactor,
@@ -81,6 +83,11 @@ class OpenLineageAdapter(LoggingMixin):
81
83
 
82
84
  def get_or_create_openlineage_client(self) -> OpenLineageClient:
83
85
  if not self._client:
86
+ # If not already set explicitly - propagate airflow logging level to OpenLineage client
87
+ airflow_core_log_level = airflow_conf.get("logging", "logging_level", fallback="INFO")
88
+ if not os.getenv("OPENLINEAGE_CLIENT_LOGGING") and airflow_core_log_level != "INFO":
89
+ os.environ["OPENLINEAGE_CLIENT_LOGGING"] = airflow_core_log_level
90
+
84
91
  config = self.get_openlineage_config()
85
92
  if config:
86
93
  self.log.debug(
@@ -102,8 +109,7 @@ class OpenLineageAdapter(LoggingMixin):
102
109
  if openlineage_config_path:
103
110
  config = self._read_yaml_config(openlineage_config_path)
104
111
  return config
105
- else:
106
- self.log.debug("OpenLineage config_path configuration not found.")
112
+ self.log.debug("OpenLineage config_path configuration not found.")
107
113
 
108
114
  # Second, try to get transport config
109
115
  transport_config = conf.transport()
@@ -28,9 +28,9 @@ from setproctitle import getproctitle, setproctitle
28
28
 
29
29
  from airflow import settings
30
30
  from airflow.listeners import hookimpl
31
- from airflow.models import DagRun
31
+ from airflow.models import DagRun, TaskInstance
32
32
  from airflow.providers.openlineage import conf
33
- from airflow.providers.openlineage.extractors import ExtractorManager
33
+ from airflow.providers.openlineage.extractors import ExtractorManager, OperatorLineage
34
34
  from airflow.providers.openlineage.plugins.adapter import OpenLineageAdapter, RunState
35
35
  from airflow.providers.openlineage.utils.utils import (
36
36
  AIRFLOW_V_2_10_PLUS,
@@ -53,7 +53,6 @@ from airflow.utils.state import TaskInstanceState
53
53
  from airflow.utils.timeout import timeout
54
54
 
55
55
  if TYPE_CHECKING:
56
- from airflow.models import TaskInstance
57
56
  from airflow.sdk.execution_time.task_runner import RuntimeTaskInstance
58
57
  from airflow.settings import Session
59
58
 
@@ -235,10 +234,18 @@ class OpenLineageListener:
235
234
 
236
235
  @hookimpl
237
236
  def on_task_instance_success(
238
- self, previous_state: TaskInstanceState, task_instance: RuntimeTaskInstance
237
+ self, previous_state: TaskInstanceState, task_instance: RuntimeTaskInstance | TaskInstance
239
238
  ) -> None:
240
239
  self.log.debug("OpenLineage listener got notification about task instance success")
241
240
 
241
+ if isinstance(task_instance, TaskInstance):
242
+ self._on_task_instance_manual_state_change(
243
+ ti=task_instance,
244
+ dagrun=task_instance.dag_run,
245
+ ti_state=TaskInstanceState.SUCCESS,
246
+ )
247
+ return
248
+
242
249
  context = task_instance.get_template_context()
243
250
  task = context["task"]
244
251
  if TYPE_CHECKING:
@@ -338,10 +345,20 @@ class OpenLineageListener:
338
345
  def on_task_instance_failed(
339
346
  self,
340
347
  previous_state: TaskInstanceState,
341
- task_instance: TaskInstance,
348
+ task_instance: RuntimeTaskInstance | TaskInstance,
342
349
  error: None | str | BaseException,
343
350
  ) -> None:
344
351
  self.log.debug("OpenLineage listener got notification about task instance failure")
352
+
353
+ if isinstance(task_instance, TaskInstance):
354
+ self._on_task_instance_manual_state_change(
355
+ ti=task_instance,
356
+ dagrun=task_instance.dag_run,
357
+ ti_state=TaskInstanceState.FAILED,
358
+ error=error,
359
+ )
360
+ return
361
+
345
362
  context = task_instance.get_template_context()
346
363
  task = context["task"]
347
364
  if TYPE_CHECKING:
@@ -457,6 +474,60 @@ class OpenLineageListener:
457
474
 
458
475
  self._execute(on_failure, "on_failure", use_fork=True)
459
476
 
477
+ def _on_task_instance_manual_state_change(
478
+ self,
479
+ ti: TaskInstance,
480
+ dagrun: DagRun,
481
+ ti_state: TaskInstanceState,
482
+ error: None | str | BaseException = None,
483
+ ) -> None:
484
+ self.log.debug("`_on_task_instance_manual_state_change` was called with state: `%s`.", ti_state)
485
+ end_date = timezone.utcnow()
486
+
487
+ @print_warning(self.log)
488
+ def on_state_change():
489
+ date = dagrun.logical_date or dagrun.run_after
490
+ parent_run_id = self.adapter.build_dag_run_id(
491
+ dag_id=dagrun.dag_id,
492
+ logical_date=date,
493
+ clear_number=dagrun.clear_number,
494
+ )
495
+
496
+ task_uuid = self.adapter.build_task_instance_run_id(
497
+ dag_id=dagrun.dag_id,
498
+ task_id=ti.task_id,
499
+ try_number=ti.try_number,
500
+ logical_date=date,
501
+ map_index=ti.map_index,
502
+ )
503
+
504
+ adapter_kwargs = {
505
+ "run_id": task_uuid,
506
+ "job_name": get_job_name(ti),
507
+ "parent_job_name": dagrun.dag_id,
508
+ "parent_run_id": parent_run_id,
509
+ "end_time": end_date.isoformat(),
510
+ "task": OperatorLineage(),
511
+ "run_facets": get_airflow_debug_facet(),
512
+ }
513
+
514
+ if ti_state == TaskInstanceState.FAILED:
515
+ event_type = RunState.FAIL.value.lower()
516
+ redacted_event = self.adapter.fail_task(**adapter_kwargs, error=error)
517
+ elif ti_state == TaskInstanceState.SUCCESS:
518
+ event_type = RunState.COMPLETE.value.lower()
519
+ redacted_event = self.adapter.complete_task(**adapter_kwargs)
520
+ else:
521
+ raise ValueError(f"Unsupported ti_state: `{ti_state}`.")
522
+
523
+ operator_name = ti.operator.lower()
524
+ Stats.gauge(
525
+ f"ol.event.size.{event_type}.{operator_name}",
526
+ len(Serde.to_json(redacted_event).encode("utf-8")),
527
+ )
528
+
529
+ self._execute(on_state_change, "on_state_change", use_fork=True)
530
+
460
531
  def _execute(self, callable, callable_name: str, use_fork: bool = False):
461
532
  if use_fork:
462
533
  self._fork_execute(callable, callable_name)
@@ -111,7 +111,7 @@ def _get_transport_information_as_spark_properties() -> dict:
111
111
  props = _format_transport(props, http_transport, name)
112
112
  return props
113
113
 
114
- elif transport.kind == "http":
114
+ if transport.kind == "http":
115
115
  return _format_transport({}, _get_transport_information(transport), None)
116
116
 
117
117
  log.info(
@@ -54,7 +54,9 @@ from airflow.providers.openlineage.version_compat import AIRFLOW_V_2_10_PLUS, AI
54
54
  from airflow.sensors.base import BaseSensorOperator
55
55
  from airflow.serialization.serialized_objects import SerializedBaseOperator
56
56
  from airflow.utils.module_loading import import_string
57
- from airflow.utils.session import NEW_SESSION, provide_session
57
+
58
+ if not AIRFLOW_V_3_0_PLUS:
59
+ from airflow.utils.session import NEW_SESSION, provide_session
58
60
 
59
61
  try:
60
62
  from airflow.sdk import BaseOperator as SdkBaseOperator
@@ -195,40 +197,41 @@ def is_selective_lineage_enabled(obj: DAG | BaseOperator | MappedOperator | SdkB
195
197
  return True
196
198
  if isinstance(obj, DAG):
197
199
  return is_dag_lineage_enabled(obj)
198
- elif isinstance(obj, (BaseOperator, MappedOperator, SdkBaseOperator)):
200
+ if isinstance(obj, (BaseOperator, MappedOperator, SdkBaseOperator)):
199
201
  return is_task_lineage_enabled(obj)
200
- else:
201
- raise TypeError("is_selective_lineage_enabled can only be used on DAG or Operator objects")
202
+ raise TypeError("is_selective_lineage_enabled can only be used on DAG or Operator objects")
202
203
 
203
204
 
204
- @provide_session
205
- def is_ti_rescheduled_already(ti: TaskInstance, session=NEW_SESSION):
206
- from sqlalchemy import exists
205
+ if not AIRFLOW_V_3_0_PLUS:
207
206
 
208
- if not isinstance(ti.task, BaseSensorOperator):
209
- return False
207
+ @provide_session
208
+ def is_ti_rescheduled_already(ti: TaskInstance, session=NEW_SESSION):
209
+ from sqlalchemy import exists
210
210
 
211
- if not ti.task.reschedule:
212
- return False
213
- if AIRFLOW_V_3_0_PLUS:
211
+ if not isinstance(ti.task, BaseSensorOperator):
212
+ return False
213
+
214
+ if not ti.task.reschedule:
215
+ return False
216
+ if AIRFLOW_V_3_0_PLUS:
217
+ return (
218
+ session.query(
219
+ exists().where(TaskReschedule.ti_id == ti.id, TaskReschedule.try_number == ti.try_number)
220
+ ).scalar()
221
+ is True
222
+ )
214
223
  return (
215
224
  session.query(
216
- exists().where(TaskReschedule.ti_id == ti.id, TaskReschedule.try_number == ti.try_number)
225
+ exists().where(
226
+ TaskReschedule.dag_id == ti.dag_id,
227
+ TaskReschedule.task_id == ti.task_id,
228
+ TaskReschedule.run_id == ti.run_id,
229
+ TaskReschedule.map_index == ti.map_index,
230
+ TaskReschedule.try_number == ti.try_number,
231
+ )
217
232
  ).scalar()
218
233
  is True
219
234
  )
220
- return (
221
- session.query(
222
- exists().where(
223
- TaskReschedule.dag_id == ti.dag_id,
224
- TaskReschedule.task_id == ti.task_id,
225
- TaskReschedule.run_id == ti.run_id,
226
- TaskReschedule.map_index == ti.map_index,
227
- TaskReschedule.try_number == ti.try_number,
228
- )
229
- ).scalar()
230
- is True
231
- )
232
235
 
233
236
 
234
237
  class InfoJsonEncodable(dict):
@@ -322,6 +325,7 @@ class DagInfo(InfoJsonEncodable):
322
325
  "description",
323
326
  "fileloc",
324
327
  "owner",
328
+ "owner_links",
325
329
  "schedule_interval", # For Airflow 2.
326
330
  "timetable_summary", # For Airflow 3.
327
331
  "start_date",
@@ -713,7 +717,7 @@ class OpenLineageRedactor(SecretsMasker):
713
717
  ),
714
718
  )
715
719
  return item
716
- elif is_json_serializable(item) and hasattr(item, "__dict__"):
720
+ if is_json_serializable(item) and hasattr(item, "__dict__"):
717
721
  for dict_key, subval in item.__dict__.items():
718
722
  if type(subval).__name__ == "Proxy":
719
723
  return "<<non-redactable: Proxy>>"
@@ -729,8 +733,7 @@ class OpenLineageRedactor(SecretsMasker):
729
733
  ),
730
734
  )
731
735
  return item
732
- else:
733
- return super()._redact(item, name, depth, max_depth)
736
+ return super()._redact(item, name, depth, max_depth)
734
737
  except Exception as exc:
735
738
  log.warning("Unable to redact %r. Error was: %s: %s", item, type(exc).__name__, exc)
736
739
  return item
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-openlineage
3
- Version: 2.1.3rc1
3
+ Version: 2.2.0rc1
4
4
  Summary: Provider package apache-airflow-providers-openlineage for Apache Airflow
5
5
  Keywords: airflow-provider,openlineage,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -24,11 +24,11 @@ Requires-Dist: apache-airflow>=2.9.0rc0
24
24
  Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
25
25
  Requires-Dist: apache-airflow-providers-common-compat>=1.4.0rc0
26
26
  Requires-Dist: attrs>=22.2
27
- Requires-Dist: openlineage-integration-common>=1.24.2
28
- Requires-Dist: openlineage-python>=1.24.2
27
+ Requires-Dist: openlineage-integration-common>=1.31.0
28
+ Requires-Dist: openlineage-python>=1.31.0
29
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.1.3/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.1.3
30
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.2.0/changelog.html
31
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.2.0
32
32
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
34
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -59,7 +59,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
59
59
 
60
60
  Package ``apache-airflow-providers-openlineage``
61
61
 
62
- Release: ``2.1.3``
62
+ Release: ``2.2.0``
63
63
 
64
64
 
65
65
  `OpenLineage <https://openlineage.io/>`__
@@ -72,7 +72,7 @@ This is a provider package for ``openlineage`` provider. All classes for this pr
72
72
  are in ``airflow.providers.openlineage`` python package.
73
73
 
74
74
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.1.3/>`_.
75
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.2.0/>`_.
76
76
 
77
77
  Installation
78
78
  ------------
@@ -93,8 +93,8 @@ PIP package Version required
93
93
  ``apache-airflow-providers-common-sql`` ``>=1.20.0``
94
94
  ``apache-airflow-providers-common-compat`` ``>=1.4.0``
95
95
  ``attrs`` ``>=22.2``
96
- ``openlineage-integration-common`` ``>=1.24.2``
97
- ``openlineage-python`` ``>=1.24.2``
96
+ ``openlineage-integration-common`` ``>=1.31.0``
97
+ ``openlineage-python`` ``>=1.31.0``
98
98
  ========================================== ==================
99
99
 
100
100
  Cross provider package dependencies
@@ -118,5 +118,5 @@ Dependent package
118
118
  ================================================================================================================== =================
119
119
 
120
120
  The changelog for the provider package can be found in the
121
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.1.3/changelog.html>`_.
121
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-openlineage/2.2.0/changelog.html>`_.
122
122
 
@@ -1,5 +1,5 @@
1
1
  airflow/providers/openlineage/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/openlineage/__init__.py,sha256=R6VkEEY6inOTMg3aCbbydjy-qIpvgikyWt7OGFyLts4,1498
2
+ airflow/providers/openlineage/__init__.py,sha256=HaGAryTFkC-tcSplPNX4LM4SyZ52yNLtPbPcaLgk2dE,1498
3
3
  airflow/providers/openlineage/conf.py,sha256=aYdLU7iHBdGIU8ZAC5iUiIDgXP9gvP9r_z5hTAbXPOU,5535
4
4
  airflow/providers/openlineage/get_provider_info.py,sha256=108mAg-tdcTBmPYwJ9M3wcmhc-i1bAs5OB2MLnSBA-4,9256
5
5
  airflow/providers/openlineage/sqlparser.py,sha256=N38XhkU-lxwxnYevQpq63JOBi4rzp0q56JjxO3H24W8,20340
@@ -7,7 +7,7 @@ airflow/providers/openlineage/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNl
7
7
  airflow/providers/openlineage/extractors/__init__.py,sha256=I0X4f6zUniclyD9zT0DFHRImpCpJVP4MkPJT3cd7X5I,1081
8
8
  airflow/providers/openlineage/extractors/base.py,sha256=ZXRlvMSak8kUfur-BxrgAxeylMQFG-iT-LusQguIFLc,6342
9
9
  airflow/providers/openlineage/extractors/bash.py,sha256=3aR0PXs8fzRLibRxXN1R8wMZnGzyCur7mjpy8e5GC4A,2583
10
- airflow/providers/openlineage/extractors/manager.py,sha256=g3WJRBR2-XZHTG7qAR4UEviwtymvDArhlPMVD3c4q_g,12862
10
+ airflow/providers/openlineage/extractors/manager.py,sha256=kVo5OHkpUoYIiT2RvwUt128jC_Q4EosdLC0sP1YfIH0,12840
11
11
  airflow/providers/openlineage/extractors/python.py,sha256=hVWOplMlBimrpPKPeW6vm75a8OmAYMU1oJzqMz8Jh90,3171
12
12
  airflow/providers/openlineage/facets/AirflowDagRunFacet.json,sha256=ie6c-J3-wGgk80WDTGWePz18o6DbW--TNM7BMF4WfcU,2251
13
13
  airflow/providers/openlineage/facets/AirflowDebugRunFacet.json,sha256=_zA5gFqGje5MOH1SmdMeA5ViOHvW_pV4oijEAvkuBbY,768
@@ -16,17 +16,17 @@ airflow/providers/openlineage/facets/AirflowRunFacet.json,sha256=70mEaZShgSJp-2x
16
16
  airflow/providers/openlineage/facets/AirflowStateRunFacet.json,sha256=xhHQEKD9Jopw-oqbkCCrrwFjfXnxvuJAritsmegKjuQ,937
17
17
  airflow/providers/openlineage/facets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
18
  airflow/providers/openlineage/plugins/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
- airflow/providers/openlineage/plugins/adapter.py,sha256=wCGJ3rMDpiyFlPaonUQwGzs0hyNqU-4__e0Z9nAbcyI,20620
19
+ airflow/providers/openlineage/plugins/adapter.py,sha256=wSqdbxNhDWzGd4J6SmNFI_Hj7uMN9I7VZMPTTu0u0-w,21046
20
20
  airflow/providers/openlineage/plugins/facets.py,sha256=VvyMYR6ONkC95q5FdNmohv0scbA1Ej_B5cQ97as5GvA,4161
21
- airflow/providers/openlineage/plugins/listener.py,sha256=KlBKT9VkdOrZxvQHsLZWWq_g4jPhaa2GdVxmHy_EVhM,26083
21
+ airflow/providers/openlineage/plugins/listener.py,sha256=J3TB6o8TFYvLlK2um4tNWGPSjGoZHdXkDhbpzIQzBD0,28845
22
22
  airflow/providers/openlineage/plugins/macros.py,sha256=qrHLjE95Uq8H-W9CIkQe5Y9Pu1O-GErhpDV2olGaGQM,3730
23
23
  airflow/providers/openlineage/plugins/openlineage.py,sha256=HD3mYNPfXd-buZydEpuAY-naVBXhausU2LYUNhL48QA,1906
24
24
  airflow/providers/openlineage/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
25
  airflow/providers/openlineage/utils/selective_enable.py,sha256=YyrUQ7Djv5o46XdH83N_G8AXAZ9C_aKPa534pbNVp08,3441
26
- airflow/providers/openlineage/utils/spark.py,sha256=-2XfUaV0WISK6vHSBmB9E78xkuPjO3fM1tDQCZG7j9I,7303
26
+ airflow/providers/openlineage/utils/spark.py,sha256=sOfEkw6OgFbVG8w-PlIlWqHdq8ti_ja7b3bJnEesWiM,7301
27
27
  airflow/providers/openlineage/utils/sql.py,sha256=vkKrrdENEMVG8gtzV6yuTXMa2Z9fBAEXmxDVIDaVncI,9571
28
- airflow/providers/openlineage/utils/utils.py,sha256=_TP49gO4GeyXqqqRgYY-7bgNaM1cczLTv-3lQFGLMfM,29329
29
- apache_airflow_providers_openlineage-2.1.3rc1.dist-info/entry_points.txt,sha256=GAx0_i2OeZzqaiiiYuA-xchICDXiCT5kVqpKSxsOjt4,214
30
- apache_airflow_providers_openlineage-2.1.3rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
31
- apache_airflow_providers_openlineage-2.1.3rc1.dist-info/METADATA,sha256=mwWQRF94TgSsKPNgzcVQ4iUWmoCiCwDc-bVBMB87sKA,5697
32
- apache_airflow_providers_openlineage-2.1.3rc1.dist-info/RECORD,,
28
+ airflow/providers/openlineage/utils/utils.py,sha256=qDAwGpYaTKQokkT40iQgPOjCuL2ZIPLufNuCAalH7-0,29472
29
+ apache_airflow_providers_openlineage-2.2.0rc1.dist-info/entry_points.txt,sha256=GAx0_i2OeZzqaiiiYuA-xchICDXiCT5kVqpKSxsOjt4,214
30
+ apache_airflow_providers_openlineage-2.2.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
31
+ apache_airflow_providers_openlineage-2.2.0rc1.dist-info/METADATA,sha256=kSjcqIg4fYpgx3m3e7UNp8mFWsFtq5HVqGGfRU2EC-E,5697
32
+ apache_airflow_providers_openlineage-2.2.0rc1.dist-info/RECORD,,