apache-airflow-providers-standard 1.10.3__py3-none-any.whl → 1.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.10.3"
32
+ __version__ = "1.11.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -22,6 +22,7 @@ virtual environment.
22
22
 
23
23
  from __future__ import annotations
24
24
 
25
+ import asyncio
25
26
  import logging
26
27
  import sys
27
28
  import time
@@ -75,6 +76,22 @@ def example_python_decorator():
75
76
  run_this >> log_the_sql >> sleeping_task
76
77
  # [END howto_operator_python_kwargs]
77
78
 
79
+ # [START howto_async_operator_python_kwargs]
80
+ # Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
81
+ # Asynchronous callables are natively supported since Airflow 3.2+
82
+ @task
83
+ async def my_async_sleeping_function(random_base):
84
+ """This is a function that will run within the DAG execution"""
85
+ await asyncio.sleep(random_base)
86
+
87
+ for i in range(5):
88
+ async_sleeping_task = my_async_sleeping_function.override(task_id=f"async_sleep_for_{i}")(
89
+ random_base=i / 10
90
+ )
91
+
92
+ run_this >> log_the_sql >> async_sleeping_task
93
+ # [END howto_async_operator_python_kwargs]
94
+
78
95
  # [START howto_operator_python_venv]
79
96
  @task.virtualenv(
80
97
  task_id="virtualenv_python", requirements=["colorama==0.4.0"], system_site_packages=False
@@ -22,6 +22,7 @@ within a virtual environment.
22
22
 
23
23
  from __future__ import annotations
24
24
 
25
+ import asyncio
25
26
  import logging
26
27
  import sys
27
28
  import time
@@ -88,6 +89,23 @@ with DAG(
88
89
  run_this >> log_the_sql >> sleeping_task
89
90
  # [END howto_operator_python_kwargs]
90
91
 
92
+ # [START howto_async_operator_python_kwargs]
93
+ # Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively
94
+ # Asynchronous callables are natively supported since Airflow 3.2+
95
+ async def my_async_sleeping_function(random_base):
96
+ """This is a function that will run within the DAG execution"""
97
+ await asyncio.sleep(random_base)
98
+
99
+ for i in range(5):
100
+ async_sleeping_task = PythonOperator(
101
+ task_id=f"async_sleep_for_{i}",
102
+ python_callable=my_async_sleeping_function,
103
+ op_kwargs={"random_base": i / 10},
104
+ )
105
+
106
+ run_this >> log_the_sql >> async_sleeping_task
107
+ # [END howto_async_operator_python_kwargs]
108
+
91
109
  # [START howto_operator_python_venv]
92
110
  def callable_virtualenv():
93
111
  """
@@ -34,6 +34,7 @@ def get_provider_info():
34
34
  "how-to-guide": [
35
35
  "/docs/apache-airflow-providers-standard/operators/bash.rst",
36
36
  "/docs/apache-airflow-providers-standard/operators/python.rst",
37
+ "/docs/apache-airflow-providers-standard/operators/hitl.rst",
37
38
  "/docs/apache-airflow-providers-standard/operators/datetime.rst",
38
39
  "/docs/apache-airflow-providers-standard/operators/trigger_dag_run.rst",
39
40
  "/docs/apache-airflow-providers-standard/operators/latest_only.rst",
@@ -48,13 +48,17 @@ from airflow.exceptions import (
48
48
  )
49
49
  from airflow.models.variable import Variable
50
50
  from airflow.providers.common.compat.sdk import AirflowException, AirflowSkipException, context_merge
51
+ from airflow.providers.common.compat.standard.operators import (
52
+ BaseAsyncOperator,
53
+ is_async_callable,
54
+ )
51
55
  from airflow.providers.standard.hooks.package_index import PackageIndexHook
52
56
  from airflow.providers.standard.utils.python_virtualenv import (
53
57
  _execute_in_subprocess,
54
58
  prepare_virtualenv,
55
59
  write_python_script,
56
60
  )
57
- from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator
61
+ from airflow.providers.standard.version_compat import AIRFLOW_V_3_0_PLUS, AIRFLOW_V_3_2_PLUS
58
62
  from airflow.utils import hashlib_wrapper
59
63
  from airflow.utils.file import get_unique_dag_module_name
60
64
  from airflow.utils.operator_helpers import KeywordParameters
@@ -75,7 +79,10 @@ if TYPE_CHECKING:
75
79
  from pendulum.datetime import DateTime
76
80
 
77
81
  from airflow.providers.common.compat.sdk import Context
78
- from airflow.sdk.execution_time.callback_runner import ExecutionCallableRunner
82
+ from airflow.sdk.execution_time.callback_runner import (
83
+ AsyncExecutionCallableRunner,
84
+ ExecutionCallableRunner,
85
+ )
79
86
  from airflow.sdk.execution_time.context import OutletEventAccessorsProtocol
80
87
 
81
88
  _SerializerTypeDef = Literal["pickle", "cloudpickle", "dill"]
@@ -115,9 +122,9 @@ class _PythonVersionInfo(NamedTuple):
115
122
  return cls(*_parse_version_info(result.strip()))
116
123
 
117
124
 
118
- class PythonOperator(BaseOperator):
125
+ class PythonOperator(BaseAsyncOperator):
119
126
  """
120
- Executes a Python callable.
127
+ Base class for all Python operators.
121
128
 
122
129
  .. seealso::
123
130
  For more information on how to use this operator, take a look at the guide:
@@ -192,7 +199,14 @@ class PythonOperator(BaseOperator):
192
199
  self.template_ext = templates_exts
193
200
  self.show_return_value_in_logs = show_return_value_in_logs
194
201
 
195
- def execute(self, context: Context) -> Any:
202
+ @property
203
+ def is_async(self) -> bool:
204
+ return is_async_callable(self.python_callable)
205
+
206
+ def execute(self, context) -> Any:
207
+ if self.is_async:
208
+ return BaseAsyncOperator.execute(self, context)
209
+
196
210
  context_merge(context, self.op_kwargs, templates_dict=self.templates_dict)
197
211
  self.op_kwargs = self.determine_kwargs(context)
198
212
 
@@ -236,6 +250,47 @@ class PythonOperator(BaseOperator):
236
250
  runner = create_execution_runner(self.python_callable, asset_events, logger=self.log)
237
251
  return runner.run(*self.op_args, **self.op_kwargs)
238
252
 
253
+ if AIRFLOW_V_3_2_PLUS:
254
+
255
+ async def aexecute(self, context):
256
+ context_merge(context, self.op_kwargs, templates_dict=self.templates_dict)
257
+ self.op_kwargs = self.determine_kwargs(context)
258
+
259
+ # This needs to be lazy because subclasses may implement execute_callable
260
+ # by running a separate process that can't use the eager result.
261
+ def __prepare_execution() -> (
262
+ tuple[AsyncExecutionCallableRunner, OutletEventAccessorsProtocol] | None
263
+ ):
264
+ from airflow.sdk.execution_time.callback_runner import create_async_executable_runner
265
+ from airflow.sdk.execution_time.context import context_get_outlet_events
266
+
267
+ return (
268
+ cast("AsyncExecutionCallableRunner", create_async_executable_runner),
269
+ context_get_outlet_events(context),
270
+ )
271
+
272
+ self.__prepare_execution = __prepare_execution
273
+
274
+ return_value = await self.aexecute_callable()
275
+ if self.show_return_value_in_logs:
276
+ self.log.info("Done. Returned value was: %s", return_value)
277
+ else:
278
+ self.log.info("Done. Returned value not shown")
279
+
280
+ return return_value
281
+
282
+ async def aexecute_callable(self) -> Any:
283
+ """
284
+ Call the python callable with the given arguments.
285
+
286
+ :return: the return value of the call.
287
+ """
288
+ if (execution_preparation := self.__prepare_execution()) is None:
289
+ return await self.python_callable(*self.op_args, **self.op_kwargs)
290
+ create_execution_runner, asset_events = execution_preparation
291
+ runner = create_execution_runner(self.python_callable, asset_events, logger=self.log)
292
+ return await runner.run(*self.op_args, **self.op_kwargs)
293
+
239
294
 
240
295
  class BranchPythonOperator(BaseBranchOperator, PythonOperator):
241
296
  """
@@ -58,8 +58,7 @@ XCOM_RUN_ID = "trigger_run_id"
58
58
  if TYPE_CHECKING:
59
59
  from sqlalchemy.orm.session import Session
60
60
 
61
- from airflow.models.taskinstancekey import TaskInstanceKey
62
- from airflow.providers.common.compat.sdk import Context
61
+ from airflow.providers.common.compat.sdk import Context, TaskInstanceKey
63
62
 
64
63
 
65
64
  class DagIsPaused(AirflowException):
@@ -89,8 +88,17 @@ class TriggerDagRunLink(BaseOperatorLink):
89
88
  trigger_dag_id = operator.trigger_dag_id
90
89
  if not AIRFLOW_V_3_0_PLUS:
91
90
  from airflow.models.renderedtifields import RenderedTaskInstanceFields
91
+ from airflow.models.taskinstancekey import TaskInstanceKey as CoreTaskInstanceKey
92
+
93
+ core_ti_key = CoreTaskInstanceKey(
94
+ dag_id=ti_key.dag_id,
95
+ task_id=ti_key.task_id,
96
+ run_id=ti_key.run_id,
97
+ try_number=ti_key.try_number,
98
+ map_index=ti_key.map_index,
99
+ )
92
100
 
93
- if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
101
+ if template_fields := RenderedTaskInstanceFields.get_templated_fields(core_ti_key):
94
102
  trigger_dag_id: str = template_fields.get("trigger_dag_id", operator.trigger_dag_id) # type: ignore[no-redef]
95
103
 
96
104
  # Fetch the correct dag_run_id for the triggerED dag which is
@@ -62,8 +62,7 @@ else:
62
62
  if TYPE_CHECKING:
63
63
  from sqlalchemy.orm import Session
64
64
 
65
- from airflow.models.taskinstancekey import TaskInstanceKey
66
- from airflow.providers.common.compat.sdk import Context
65
+ from airflow.providers.common.compat.sdk import Context, TaskInstanceKey
67
66
 
68
67
 
69
68
  class ExternalDagLink(BaseOperatorLink):
@@ -83,8 +82,17 @@ class ExternalDagLink(BaseOperatorLink):
83
82
 
84
83
  if not AIRFLOW_V_3_0_PLUS:
85
84
  from airflow.models.renderedtifields import RenderedTaskInstanceFields
85
+ from airflow.models.taskinstancekey import TaskInstanceKey as CoreTaskInstanceKey
86
+
87
+ core_ti_key = CoreTaskInstanceKey(
88
+ dag_id=ti_key.dag_id,
89
+ task_id=ti_key.task_id,
90
+ run_id=ti_key.run_id,
91
+ try_number=ti_key.try_number,
92
+ map_index=ti_key.map_index,
93
+ )
86
94
 
87
- if template_fields := RenderedTaskInstanceFields.get_templated_fields(ti_key):
95
+ if template_fields := RenderedTaskInstanceFields.get_templated_fields(core_ti_key):
88
96
  external_dag_id: str = template_fields.get("external_dag_id", operator.external_dag_id) # type: ignore[no-redef]
89
97
 
90
98
  if AIRFLOW_V_3_0_PLUS:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-standard
3
- Version: 1.10.3
3
+ Version: 1.11.0
4
4
  Summary: Provider package apache-airflow-providers-standard for Apache Airflow
5
5
  Keywords: airflow-provider,standard,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -23,11 +23,11 @@ Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
25
  Requires-Dist: apache-airflow>=2.11.0
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.12.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.13.0
27
27
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
28
28
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
29
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3/changelog.html
30
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3
29
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.11.0/changelog.html
30
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-standard/1.11.0
31
31
  Project-URL: Mastodon, https://fosstodon.org/@airflow
32
32
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
33
33
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -59,7 +59,7 @@ Provides-Extra: openlineage
59
59
 
60
60
  Package ``apache-airflow-providers-standard``
61
61
 
62
- Release: ``1.10.3``
62
+ Release: ``1.11.0``
63
63
 
64
64
 
65
65
  Airflow Standard Provider
@@ -72,7 +72,7 @@ This is a provider package for ``standard`` provider. All classes for this provi
72
72
  are in ``airflow.providers.standard`` python package.
73
73
 
74
74
  You can find package information and changelog for the provider
75
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3/>`_.
75
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.11.0/>`_.
76
76
 
77
77
  Installation
78
78
  ------------
@@ -90,7 +90,7 @@ Requirements
90
90
  PIP package Version required
91
91
  ========================================== ==================
92
92
  ``apache-airflow`` ``>=2.11.0``
93
- ``apache-airflow-providers-common-compat`` ``>=1.10.1``
93
+ ``apache-airflow-providers-common-compat`` ``>=1.13.0``
94
94
  ========================================== ==================
95
95
 
96
96
  Cross provider package dependencies
@@ -123,5 +123,5 @@ Extra Dependencies
123
123
  =============== ========================================
124
124
 
125
125
  The changelog for the provider package can be found in the
126
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.10.3/changelog.html>`_.
126
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-standard/1.11.0/changelog.html>`_.
127
127
 
@@ -1,6 +1,6 @@
1
- airflow/providers/standard/__init__.py,sha256=9U1f1ln4A2DrS16rZ4_tF36RA70CDZaIGImbJjKbpsQ,1498
1
+ airflow/providers/standard/__init__.py,sha256=cnnm9_0d8faHw6BVGMhW4_v_qqK8_eRCiXpWvc9CfCE,1498
2
2
  airflow/providers/standard/exceptions.py,sha256=d5BzPvWfKGMZbgvlkFQCFyVijh2Y2_CB2_2tOmU2djE,2433
3
- airflow/providers/standard/get_provider_info.py,sha256=NVstkG2ZeAiTZnvmbrMpxcYgJzcdITKdQvgDOZYX1Rk,7227
3
+ airflow/providers/standard/get_provider_info.py,sha256=XP3pB1hX-5xAWLULorLNUB4upJusulXZdd-b_BUCy-Y,7309
4
4
  airflow/providers/standard/version_compat.py,sha256=wzS7qPjUGZp-zZeL2YnvAlxD-Sdtt7ff6KnlFk3xlUA,2166
5
5
  airflow/providers/standard/decorators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
6
  airflow/providers/standard/decorators/bash.py,sha256=JFEC7uKb8LJfk2EeRnmo1P5s6Yp5onfqDm7l5jQne1Q,4105
@@ -25,8 +25,8 @@ airflow/providers/standard/example_dags/example_external_task_marker_dag.py,sha2
25
25
  airflow/providers/standard/example_dags/example_external_task_parent_deferrable.py,sha256=7mAZHndZyETYMIDfuh0YPJnIHWyGLSzeXGCIcWBP2Uw,2092
26
26
  airflow/providers/standard/example_dags/example_hitl_operator.py,sha256=nPlDpxVFD_9JMB_A_ryXP-rNHmtXZh_paBftohBfYWg,6025
27
27
  airflow/providers/standard/example_dags/example_latest_only.py,sha256=ac9WpLMWLzyuxZks74t3HojS7vRG2gynmQfGm13gwOI,1456
28
- airflow/providers/standard/example_dags/example_python_decorator.py,sha256=jveqPOw1GZzD3Z37_rYc8Q8hcyx8vCNjgetpO_P6qmg,4281
29
- airflow/providers/standard/example_dags/example_python_operator.py,sha256=3L6CZHK2Fb7zmA9tDhZ5QaEe38WJYlS4l35Gc7xJAoE,4761
28
+ airflow/providers/standard/example_dags/example_python_decorator.py,sha256=S37ScXEgNRO-qJuZomHoUsyqhGyj5g7BEIwsKCXjz4A,4941
29
+ airflow/providers/standard/example_dags/example_python_operator.py,sha256=nWh5w8PgaGhc1yMam1ZCRMTA4lERBE9rTdki0Gnm6xs,5474
30
30
  airflow/providers/standard/example_dags/example_sensor_decorator.py,sha256=zEZUh2YD17T8-bX-HGjar5cCqgi6qHIB0fXY29lboJA,1873
31
31
  airflow/providers/standard/example_dags/example_sensors.py,sha256=SoaLeoKK4r41EqJbF1UmcCNqMDnqwWd_qf211_VMUeM,4667
32
32
  airflow/providers/standard/example_dags/example_short_circuit_decorator.py,sha256=rtEOT4P_B8JRM0M6TAjmIstUJFHUkqJdWQhU9L3YD6I,2429
@@ -46,14 +46,14 @@ airflow/providers/standard/operators/datetime.py,sha256=G7wBc9bn0YSJqNQCgEBPiu_Q
46
46
  airflow/providers/standard/operators/empty.py,sha256=flxN2BhuHegEOiiAcJR9QOuMO8PcxQu353Q9p-Yk82s,1342
47
47
  airflow/providers/standard/operators/hitl.py,sha256=BRMrUsdDjc4snZdXdaMD6u15gvjn1AqDWb7A-JbVrIg,18669
48
48
  airflow/providers/standard/operators/latest_only.py,sha256=CjlffWV9bK2UZ8L5KTPLNq3EG_EF6tQl7CLT4uz-eBg,5292
49
- airflow/providers/standard/operators/python.py,sha256=AFxRb68rgLQbmJ-k5fruGSTNFQRM_mD4w5OD3gThreg,55071
49
+ airflow/providers/standard/operators/python.py,sha256=Sq2OXgLK6v00CyaAP4AspMGEo87-EK-FUZZ_cqbPAek,57307
50
50
  airflow/providers/standard/operators/smooth.py,sha256=WamRqmeSm6BcGCCBAqBEVYIRk4ZXbeI_Q7OjPgLfnUI,1400
51
- airflow/providers/standard/operators/trigger_dagrun.py,sha256=MHejvbWj6QCK24-U9OsWM4ZS6iQ2L8voONpslluuLtE,19330
51
+ airflow/providers/standard/operators/trigger_dagrun.py,sha256=ctmkJ5Yw5m_OuXuwRblgQ4LcSVHP5WCoiMes1A7OSNA,19651
52
52
  airflow/providers/standard/operators/weekday.py,sha256=s8C6T-x9Hvkj4YQWCguTRyhiAqdJYCBr12rPm2qYC3M,4957
53
53
  airflow/providers/standard/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
54
54
  airflow/providers/standard/sensors/bash.py,sha256=EkG1zl2yoUirWAg03BjEpdDTSJbo1ugE-AxhE5s40Ks,4792
55
55
  airflow/providers/standard/sensors/date_time.py,sha256=WEbbniUQZU1pGURDieEGWZcsEy0RRxjoKPIxCYdsObg,5838
56
- airflow/providers/standard/sensors/external_task.py,sha256=wIYceWOOpw5pePy_UdEvIGr1pGVF_fk71NF9pHlFShE,30825
56
+ airflow/providers/standard/sensors/external_task.py,sha256=TfoZjv8DmhrAplPqMze3eEPLsqI_VjZaBa9j4meTty8,31146
57
57
  airflow/providers/standard/sensors/filesystem.py,sha256=fTI6FsBXOI9NE2zlipGAKJE1kvQ5gLaNJf5dOi1pgE8,5316
58
58
  airflow/providers/standard/sensors/python.py,sha256=eBfy0QRgsQHw4H4pZ4u7DNzu7ZdMH7TtPQWOdP4wWqA,3221
59
59
  airflow/providers/standard/sensors/time.py,sha256=qgHENTcppShQUtxGJrvPeFPGoDK41zCB3TbLJLpBJzQ,4436
@@ -71,9 +71,9 @@ airflow/providers/standard/utils/python_virtualenv_script.jinja2,sha256=yg4FngvJ
71
71
  airflow/providers/standard/utils/sensor_helper.py,sha256=ZcJeWAGymwUma7R6U7pQXhmQLC2UEeiyjQOrH4uFxt0,5407
72
72
  airflow/providers/standard/utils/skipmixin.py,sha256=gVmICO2CjH6faJPhzVC8_NkwwnhcEhnULifGJF1tVtg,8046
73
73
  airflow/providers/standard/utils/weekday.py,sha256=ySDrIkWv-lqqxURo9E98IGInDqERec2O4y9o2hQTGiQ,2685
74
- apache_airflow_providers_standard-1.10.3.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
75
- apache_airflow_providers_standard-1.10.3.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
76
- apache_airflow_providers_standard-1.10.3.dist-info/licenses/NOTICE,sha256=_cWHznIoUSbLCY_KfmKqetlKlsoH0c2VBjmZjElAzuc,168
77
- apache_airflow_providers_standard-1.10.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
78
- apache_airflow_providers_standard-1.10.3.dist-info/METADATA,sha256=ERiN8OnMbiovVzLR-NdKko5hbWglP7O7m89WsZ9bZ3I,5662
79
- apache_airflow_providers_standard-1.10.3.dist-info/RECORD,,
74
+ apache_airflow_providers_standard-1.11.0.dist-info/entry_points.txt,sha256=mW2YRh3mVdZdaP5-iGSNgmcCh3YYdALIn28BCLBZZ40,104
75
+ apache_airflow_providers_standard-1.11.0.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
76
+ apache_airflow_providers_standard-1.11.0.dist-info/licenses/NOTICE,sha256=_cWHznIoUSbLCY_KfmKqetlKlsoH0c2VBjmZjElAzuc,168
77
+ apache_airflow_providers_standard-1.11.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
78
+ apache_airflow_providers_standard-1.11.0.dist-info/METADATA,sha256=NTh5yAEJkEyrs5gLiP4BswUHCBGmU0-dCRcGwVffhIo,5662
79
+ apache_airflow_providers_standard-1.11.0.dist-info/RECORD,,