acryl-datahub-airflow-plugin 1.3.1.6rc1__py3-none-any.whl → 1.3.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: acryl-datahub-airflow-plugin
3
- Version: 1.3.1.6rc1
3
+ Version: 1.3.1.7
4
4
  Summary: Datahub Airflow plugin to capture executions and send to Datahub
5
5
  Home-page: https://docs.datahub.com/
6
6
  License: Apache-2.0
@@ -21,58 +21,58 @@ Classifier: Environment :: MacOS X
21
21
  Classifier: Topic :: Software Development
22
22
  Requires-Python: >=3.9
23
23
  Description-Content-Type: text/markdown
24
- Requires-Dist: acryl-datahub[datahub-rest,sql-parser]==1.3.1.6rc1
25
- Requires-Dist: pydantic>=2.4.0
26
24
  Requires-Dist: apache-airflow<4.0.0,>=2.5.0
27
- Requires-Dist: acryl-datahub[datahub-rest]==1.3.1.6rc1
25
+ Requires-Dist: pydantic>=2.4.0
26
+ Requires-Dist: acryl-datahub[datahub-rest]==1.3.1.7
27
+ Requires-Dist: acryl-datahub[datahub-rest,sql-parser]==1.3.1.7
28
28
  Provides-Extra: ignore
29
29
  Provides-Extra: datahub-rest
30
- Requires-Dist: acryl-datahub[datahub-rest]==1.3.1.6rc1; extra == "datahub-rest"
30
+ Requires-Dist: acryl-datahub[datahub-rest]==1.3.1.7; extra == "datahub-rest"
31
31
  Provides-Extra: datahub-kafka
32
- Requires-Dist: acryl-datahub[datahub-kafka]==1.3.1.6rc1; extra == "datahub-kafka"
32
+ Requires-Dist: acryl-datahub[datahub-kafka]==1.3.1.7; extra == "datahub-kafka"
33
33
  Provides-Extra: datahub-file
34
- Requires-Dist: acryl-datahub[sync-file-emitter]==1.3.1.6rc1; extra == "datahub-file"
34
+ Requires-Dist: acryl-datahub[sync-file-emitter]==1.3.1.7; extra == "datahub-file"
35
35
  Provides-Extra: airflow2
36
36
  Requires-Dist: openlineage-airflow>=1.2.0; extra == "airflow2"
37
37
  Provides-Extra: airflow3
38
38
  Requires-Dist: apache-airflow-providers-openlineage>=1.0.0; extra == "airflow3"
39
39
  Provides-Extra: dev
40
- Requires-Dist: mypy==1.17.1; extra == "dev"
41
- Requires-Dist: deepdiff!=8.0.0; extra == "dev"
42
- Requires-Dist: pytest-cov>=2.8.1; extra == "dev"
43
- Requires-Dist: types-cachetools; extra == "dev"
44
- Requires-Dist: coverage>=5.1; extra == "dev"
45
- Requires-Dist: types-setuptools; extra == "dev"
46
- Requires-Dist: apache-airflow<4.0.0,>=2.5.0; extra == "dev"
40
+ Requires-Dist: acryl-datahub[datahub-rest]==1.3.1.7; extra == "dev"
41
+ Requires-Dist: types-click==0.1.12; extra == "dev"
42
+ Requires-Dist: tox-uv; extra == "dev"
47
43
  Requires-Dist: tox; extra == "dev"
48
- Requires-Dist: types-six; extra == "dev"
49
- Requires-Dist: twine; extra == "dev"
50
- Requires-Dist: packaging; extra == "dev"
44
+ Requires-Dist: types-dataclasses; extra == "dev"
45
+ Requires-Dist: types-python-dateutil; extra == "dev"
51
46
  Requires-Dist: ruff==0.11.7; extra == "dev"
52
- Requires-Dist: acryl-datahub[datahub-rest]==1.3.1.6rc1; extra == "dev"
53
- Requires-Dist: types-click==0.1.12; extra == "dev"
47
+ Requires-Dist: types-toml; extra == "dev"
48
+ Requires-Dist: pydantic>=2.4.0; extra == "dev"
49
+ Requires-Dist: pytest>=6.2.2; extra == "dev"
50
+ Requires-Dist: sqlalchemy-stubs; extra == "dev"
51
+ Requires-Dist: twine; extra == "dev"
54
52
  Requires-Dist: build; extra == "dev"
55
- Requires-Dist: types-PyYAML; extra == "dev"
53
+ Requires-Dist: pytest-cov>=2.8.1; extra == "dev"
56
54
  Requires-Dist: types-requests; extra == "dev"
57
- Requires-Dist: types-toml; extra == "dev"
55
+ Requires-Dist: apache-airflow<4.0.0,>=2.5.0; extra == "dev"
56
+ Requires-Dist: types-setuptools; extra == "dev"
57
+ Requires-Dist: packaging; extra == "dev"
58
58
  Requires-Dist: tenacity; extra == "dev"
59
- Requires-Dist: sqlalchemy-stubs; extra == "dev"
60
- Requires-Dist: tox-uv; extra == "dev"
59
+ Requires-Dist: types-cachetools; extra == "dev"
60
+ Requires-Dist: types-six; extra == "dev"
61
+ Requires-Dist: acryl-datahub[datahub-rest,sql-parser]==1.3.1.7; extra == "dev"
61
62
  Requires-Dist: types-tabulate; extra == "dev"
62
- Requires-Dist: types-dataclasses; extra == "dev"
63
- Requires-Dist: pydantic>=2.4.0; extra == "dev"
64
- Requires-Dist: types-python-dateutil; extra == "dev"
65
- Requires-Dist: pytest>=6.2.2; extra == "dev"
66
- Requires-Dist: acryl-datahub[datahub-rest,sql-parser]==1.3.1.6rc1; extra == "dev"
63
+ Requires-Dist: coverage>=5.1; extra == "dev"
64
+ Requires-Dist: deepdiff!=8.0.0; extra == "dev"
65
+ Requires-Dist: types-PyYAML; extra == "dev"
66
+ Requires-Dist: mypy==1.17.1; extra == "dev"
67
67
  Provides-Extra: integration-tests
68
68
  Requires-Dist: apache-airflow[amazon,google,snowflake]>=2.0.2; extra == "integration-tests"
69
- Requires-Dist: acryl-datahub[testing-utils]==1.3.1.6rc1; extra == "integration-tests"
70
- Requires-Dist: snowflake-connector-python>=2.7.10; extra == "integration-tests"
69
+ Requires-Dist: acryl-datahub[testing-utils]==1.3.1.7; extra == "integration-tests"
70
+ Requires-Dist: acryl-datahub[sync-file-emitter]==1.3.1.7; extra == "integration-tests"
71
71
  Requires-Dist: apache-airflow-providers-teradata; extra == "integration-tests"
72
- Requires-Dist: acryl-datahub[datahub-kafka]==1.3.1.6rc1; extra == "integration-tests"
73
- Requires-Dist: virtualenv; extra == "integration-tests"
72
+ Requires-Dist: acryl-datahub[datahub-kafka]==1.3.1.7; extra == "integration-tests"
74
73
  Requires-Dist: apache-airflow-providers-sqlite; extra == "integration-tests"
75
- Requires-Dist: acryl-datahub[sync-file-emitter]==1.3.1.6rc1; extra == "integration-tests"
74
+ Requires-Dist: virtualenv; extra == "integration-tests"
75
+ Requires-Dist: snowflake-connector-python>=2.7.10; extra == "integration-tests"
76
76
  Dynamic: classifier
77
77
  Dynamic: description
78
78
  Dynamic: description-content-type
@@ -5,7 +5,7 @@ datahub_airflow_plugin/_airflow_version_specific.py,sha256=q4iNyScNQTNZnoC9n_8wY
5
5
  datahub_airflow_plugin/_config.py,sha256=AAvxIGcG-wQWkFbpGCRurjnNXVCzqHDBmsFsSRHOEi4,9050
6
6
  datahub_airflow_plugin/_constants.py,sha256=HBrUztUwspK1mQ3iM9_Pdiu18pn-lPK3xhjmubqn_kI,627
7
7
  datahub_airflow_plugin/_datahub_ol_adapter.py,sha256=2YIhJDyLhzZGK3MMra4NGSIfG-az8sKt6ZXcbQGs2Yg,951
8
- datahub_airflow_plugin/_version.py,sha256=maE75AEc4HKbL1U_k9EV2r69enqswvTcB0udJLVrQL0,148
8
+ datahub_airflow_plugin/_version.py,sha256=cTYMyJFZEANDgJ1g-3V1k_EwQYvL_mP4GvcJOXjvCMw,145
9
9
  datahub_airflow_plugin/datahub_listener.py,sha256=9g-MBf14zFs-cQP0Jr_9gnCbzQVqIe_x6ZjgQs0onzE,881
10
10
  datahub_airflow_plugin/datahub_plugin.py,sha256=rbZhs7s5O3_MlkQw5aZToC2W5mMic_EpI3oybHB0ofw,1224
11
11
  datahub_airflow_plugin/entities.py,sha256=xDZ-mZH7hjUkZbatWYUwI43_9B40wGiotlyQhiO8rEM,1987
@@ -27,7 +27,7 @@ datahub_airflow_plugin/airflow3/_bigquery_openlineage_patch.py,sha256=ph9ZePU8ef
27
27
  datahub_airflow_plugin/airflow3/_shims.py,sha256=q2Nw5vS21R_y0cYg3aOm7AYylaNAXlN480ve5XItfNU,2553
28
28
  datahub_airflow_plugin/airflow3/_sqlite_openlineage_patch.py,sha256=QRgadtwS6a42k3J4zAGsIRjyExvRX7CvAM_U3DYivic,3552
29
29
  datahub_airflow_plugin/airflow3/_teradata_openlineage_patch.py,sha256=z-Udbz_7NahufipLOIgUlZGWRsIRF_qkV903rYxR_s4,12563
30
- datahub_airflow_plugin/airflow3/datahub_listener.py,sha256=Xonjn3SX7q9dmDEyW7uyiryvJ1AVJuQcByBbD6TFWTk,58791
30
+ datahub_airflow_plugin/airflow3/datahub_listener.py,sha256=7moB-1_iTBA3K90PDo5xkntHvZx2NDGMFlobmaAdi1o,59201
31
31
  datahub_airflow_plugin/client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  datahub_airflow_plugin/client/airflow_generator.py,sha256=7Cc9khsePHqjRAHZchaMKMKP0wCeT21wCtH8gUZeEEE,26135
33
33
  datahub_airflow_plugin/example_dags/__init__.py,sha256=nIAjilNA_uGtFYYmAakZOvJQjAeg8d7aR61MegkEjgU,1321
@@ -58,8 +58,8 @@ datahub_airflow_plugin/operators/datahub_assertion_operator.py,sha256=j_P9M1a5qM
58
58
  datahub_airflow_plugin/operators/datahub_assertion_sensor.py,sha256=QJIZZYQhqscj3bhBN5Sei-ABMRRAl2KiQxXTXcZQ51Q,2917
59
59
  datahub_airflow_plugin/operators/datahub_operation_operator.py,sha256=KJ8M8jJ7UWW6kNbiS-rELc-kqCPkZ3ck7z51oAXGPSI,3351
60
60
  datahub_airflow_plugin/operators/datahub_operation_sensor.py,sha256=U19fi5DpjBRWm_1ljXcjnspUzfa3mqYfOQZHjLk-ufI,3618
61
- acryl_datahub_airflow_plugin-1.3.1.6rc1.dist-info/METADATA,sha256=Rv6GKbyek-DE95xQiD-A7qWNToCVEjQXb2BVOx_ZWas,12333
62
- acryl_datahub_airflow_plugin-1.3.1.6rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
63
- acryl_datahub_airflow_plugin-1.3.1.6rc1.dist-info/entry_points.txt,sha256=HqmajDHtrsz0b5Lswe1-eeuObxdtucd9YoxH77jJBA8,179
64
- acryl_datahub_airflow_plugin-1.3.1.6rc1.dist-info/top_level.txt,sha256=VBzisOQfzqL1WRbNyItaruf3kTigXltjzgqzbheaFp0,23
65
- acryl_datahub_airflow_plugin-1.3.1.6rc1.dist-info/RECORD,,
61
+ acryl_datahub_airflow_plugin-1.3.1.7.dist-info/METADATA,sha256=ciho0cw8fspfqB-YtBcpfEQcCQUd6vZN84ElM9tbz3E,12300
62
+ acryl_datahub_airflow_plugin-1.3.1.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
63
+ acryl_datahub_airflow_plugin-1.3.1.7.dist-info/entry_points.txt,sha256=HqmajDHtrsz0b5Lswe1-eeuObxdtucd9YoxH77jJBA8,179
64
+ acryl_datahub_airflow_plugin-1.3.1.7.dist-info/top_level.txt,sha256=VBzisOQfzqL1WRbNyItaruf3kTigXltjzgqzbheaFp0,23
65
+ acryl_datahub_airflow_plugin-1.3.1.7.dist-info/RECORD,,
@@ -1,3 +1,3 @@
1
1
  # Published at https://pypi.org/project/acryl-datahub-airflow-plugin/.
2
2
  __package_name__ = "acryl-datahub-airflow-plugin"
3
- __version__ = "1.3.1.6rc1"
3
+ __version__ = "1.3.1.7"
@@ -1059,9 +1059,13 @@ class DataHubListener:
1059
1059
 
1060
1060
  # Airflow 3.0+ doesn't need task holder
1061
1061
 
1062
- # If we don't have the DAG listener API, emit DAG start event
1063
- if not HAS_AIRFLOW_DAG_LISTENER_API:
1064
- self.on_dag_start(dagrun)
1062
+ # Always emit DataFlow from task instance handler as a fallback.
1063
+ # In distributed Airflow deployments (Kubernetes, Astronomer), the on_dag_run_running
1064
+ # hook runs on the scheduler process, but the DataHub listener is only initialized
1065
+ # on worker processes. This means the scheduler's on_dag_run_running never triggers
1066
+ # DataFlow emission. By emitting here, we ensure the DataFlow exists before the
1067
+ # DataJob references it. DataHub's UPSERT semantics make duplicate emissions safe.
1068
+ self.on_dag_start(dagrun)
1065
1069
 
1066
1070
  # Generate and emit datajob
1067
1071
  # Task type can vary between Airflow versions (MappedOperator from different modules)