acryl-datahub-actions 1.2.0.11rc4__py3-none-any.whl → 1.3.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: acryl-datahub-actions
3
- Version: 1.2.0.11rc4
3
+ Version: 1.3.0rc4
4
4
  Summary: An action framework to work with DataHub real time changes.
5
5
  Home-page: https://docs.datahub.com/
6
6
  License: Apache-2.0
@@ -21,201 +21,201 @@ Classifier: Environment :: MacOS X
21
21
  Classifier: Topic :: Software Development
22
22
  Requires-Python: >=3.9
23
23
  Description-Content-Type: text/markdown
24
- Requires-Dist: acryl-datahub[datahub-kafka]==1.2.0.11rc4
25
- Requires-Dist: h11>=0.16
26
- Requires-Dist: toml>=0.10.0
24
+ Requires-Dist: aws-msk-iam-sasl-signer-python==1.0.2
25
+ Requires-Dist: acryl-datahub[datahub-kafka]==1.3.0rc4
26
+ Requires-Dist: click-default-group
27
27
  Requires-Dist: python-dateutil>=2.8.0
28
- Requires-Dist: PyYAML
28
+ Requires-Dist: toml>=0.10.0
29
+ Requires-Dist: click>=6.0.0
30
+ Requires-Dist: typing-inspect
31
+ Requires-Dist: azure-identity==1.21.0
29
32
  Requires-Dist: progressbar2
30
33
  Requires-Dist: tenacity
34
+ Requires-Dist: stackprinter
31
35
  Requires-Dist: entrypoints
32
- Requires-Dist: azure-identity==1.21.0
33
- Requires-Dist: aws-msk-iam-sasl-signer-python==1.0.2
36
+ Requires-Dist: h11>=0.16
34
37
  Requires-Dist: httpcore>=1.0.9
35
- Requires-Dist: click>=6.0.0
36
- Requires-Dist: click-default-group
37
- Requires-Dist: prometheus-client
38
38
  Requires-Dist: pydantic<3.0.0,>=2.0.0
39
- Requires-Dist: stackprinter
40
- Requires-Dist: typing-inspect
41
39
  Requires-Dist: ratelimit
40
+ Requires-Dist: PyYAML
41
+ Requires-Dist: prometheus-client
42
42
  Provides-Extra: base
43
+ Requires-Dist: click-default-group; extra == "base"
43
44
  Requires-Dist: toml>=0.10.0; extra == "base"
45
+ Requires-Dist: click>=6.0.0; extra == "base"
44
46
  Requires-Dist: prometheus-client; extra == "base"
45
- Requires-Dist: PyYAML; extra == "base"
46
47
  Requires-Dist: progressbar2; extra == "base"
47
- Requires-Dist: click>=6.0.0; extra == "base"
48
- Requires-Dist: click-default-group; extra == "base"
49
48
  Requires-Dist: tenacity; extra == "base"
50
- Requires-Dist: entrypoints; extra == "base"
51
49
  Requires-Dist: stackprinter; extra == "base"
52
50
  Requires-Dist: python-dateutil>=2.8.0; extra == "base"
51
+ Requires-Dist: PyYAML; extra == "base"
52
+ Requires-Dist: entrypoints; extra == "base"
53
53
  Provides-Extra: kafka
54
+ Requires-Dist: click-default-group; extra == "kafka"
54
55
  Requires-Dist: toml>=0.10.0; extra == "kafka"
56
+ Requires-Dist: click>=6.0.0; extra == "kafka"
55
57
  Requires-Dist: prometheus-client; extra == "kafka"
56
- Requires-Dist: PyYAML; extra == "kafka"
57
- Requires-Dist: progressbar2; extra == "kafka"
58
58
  Requires-Dist: confluent-kafka[schemaregistry]; extra == "kafka"
59
- Requires-Dist: click>=6.0.0; extra == "kafka"
60
- Requires-Dist: click-default-group; extra == "kafka"
59
+ Requires-Dist: progressbar2; extra == "kafka"
61
60
  Requires-Dist: tenacity; extra == "kafka"
62
- Requires-Dist: entrypoints; extra == "kafka"
63
61
  Requires-Dist: stackprinter; extra == "kafka"
64
62
  Requires-Dist: python-dateutil>=2.8.0; extra == "kafka"
63
+ Requires-Dist: PyYAML; extra == "kafka"
64
+ Requires-Dist: entrypoints; extra == "kafka"
65
65
  Provides-Extra: executor
66
+ Requires-Dist: click-default-group; extra == "executor"
66
67
  Requires-Dist: toml>=0.10.0; extra == "executor"
68
+ Requires-Dist: click>=6.0.0; extra == "executor"
67
69
  Requires-Dist: prometheus-client; extra == "executor"
68
- Requires-Dist: PyYAML; extra == "executor"
69
70
  Requires-Dist: progressbar2; extra == "executor"
70
- Requires-Dist: acryl-executor==0.2.6; extra == "executor"
71
- Requires-Dist: click>=6.0.0; extra == "executor"
72
- Requires-Dist: click-default-group; extra == "executor"
73
71
  Requires-Dist: tenacity; extra == "executor"
74
- Requires-Dist: entrypoints; extra == "executor"
75
72
  Requires-Dist: stackprinter; extra == "executor"
73
+ Requires-Dist: acryl-executor==0.2.6; extra == "executor"
76
74
  Requires-Dist: python-dateutil>=2.8.0; extra == "executor"
75
+ Requires-Dist: PyYAML; extra == "executor"
76
+ Requires-Dist: entrypoints; extra == "executor"
77
77
  Provides-Extra: slack
78
+ Requires-Dist: slack-bolt>=1.15.5; extra == "slack"
79
+ Requires-Dist: click-default-group; extra == "slack"
78
80
  Requires-Dist: toml>=0.10.0; extra == "slack"
81
+ Requires-Dist: click>=6.0.0; extra == "slack"
79
82
  Requires-Dist: prometheus-client; extra == "slack"
80
- Requires-Dist: PyYAML; extra == "slack"
81
83
  Requires-Dist: progressbar2; extra == "slack"
82
- Requires-Dist: slack-bolt>=1.15.5; extra == "slack"
83
- Requires-Dist: click>=6.0.0; extra == "slack"
84
- Requires-Dist: click-default-group; extra == "slack"
85
84
  Requires-Dist: tenacity; extra == "slack"
86
- Requires-Dist: entrypoints; extra == "slack"
87
85
  Requires-Dist: stackprinter; extra == "slack"
88
86
  Requires-Dist: python-dateutil>=2.8.0; extra == "slack"
87
+ Requires-Dist: PyYAML; extra == "slack"
88
+ Requires-Dist: entrypoints; extra == "slack"
89
89
  Provides-Extra: teams
90
- Requires-Dist: pymsteams>=0.2.2; extra == "teams"
90
+ Requires-Dist: click-default-group; extra == "teams"
91
91
  Requires-Dist: toml>=0.10.0; extra == "teams"
92
+ Requires-Dist: click>=6.0.0; extra == "teams"
92
93
  Requires-Dist: prometheus-client; extra == "teams"
93
- Requires-Dist: PyYAML; extra == "teams"
94
94
  Requires-Dist: progressbar2; extra == "teams"
95
- Requires-Dist: click>=6.0.0; extra == "teams"
96
- Requires-Dist: click-default-group; extra == "teams"
97
95
  Requires-Dist: tenacity; extra == "teams"
98
- Requires-Dist: entrypoints; extra == "teams"
99
96
  Requires-Dist: stackprinter; extra == "teams"
100
97
  Requires-Dist: python-dateutil>=2.8.0; extra == "teams"
98
+ Requires-Dist: pymsteams>=0.2.2; extra == "teams"
99
+ Requires-Dist: PyYAML; extra == "teams"
100
+ Requires-Dist: entrypoints; extra == "teams"
101
101
  Provides-Extra: tag-propagation
102
+ Requires-Dist: click-default-group; extra == "tag-propagation"
102
103
  Requires-Dist: toml>=0.10.0; extra == "tag-propagation"
104
+ Requires-Dist: click>=6.0.0; extra == "tag-propagation"
103
105
  Requires-Dist: prometheus-client; extra == "tag-propagation"
104
- Requires-Dist: PyYAML; extra == "tag-propagation"
105
106
  Requires-Dist: progressbar2; extra == "tag-propagation"
106
- Requires-Dist: click>=6.0.0; extra == "tag-propagation"
107
- Requires-Dist: click-default-group; extra == "tag-propagation"
108
107
  Requires-Dist: tenacity; extra == "tag-propagation"
109
- Requires-Dist: entrypoints; extra == "tag-propagation"
110
108
  Requires-Dist: stackprinter; extra == "tag-propagation"
111
109
  Requires-Dist: python-dateutil>=2.8.0; extra == "tag-propagation"
110
+ Requires-Dist: PyYAML; extra == "tag-propagation"
111
+ Requires-Dist: entrypoints; extra == "tag-propagation"
112
112
  Provides-Extra: term-propagation
113
+ Requires-Dist: click-default-group; extra == "term-propagation"
113
114
  Requires-Dist: toml>=0.10.0; extra == "term-propagation"
115
+ Requires-Dist: click>=6.0.0; extra == "term-propagation"
114
116
  Requires-Dist: prometheus-client; extra == "term-propagation"
115
- Requires-Dist: PyYAML; extra == "term-propagation"
116
117
  Requires-Dist: progressbar2; extra == "term-propagation"
117
- Requires-Dist: click>=6.0.0; extra == "term-propagation"
118
- Requires-Dist: click-default-group; extra == "term-propagation"
119
118
  Requires-Dist: tenacity; extra == "term-propagation"
120
- Requires-Dist: entrypoints; extra == "term-propagation"
121
119
  Requires-Dist: stackprinter; extra == "term-propagation"
122
120
  Requires-Dist: python-dateutil>=2.8.0; extra == "term-propagation"
121
+ Requires-Dist: PyYAML; extra == "term-propagation"
122
+ Requires-Dist: entrypoints; extra == "term-propagation"
123
123
  Provides-Extra: snowflake-tag-propagation
124
- Requires-Dist: acryl-datahub[snowflake-slim]==1.2.0.11rc4; extra == "snowflake-tag-propagation"
124
+ Requires-Dist: click-default-group; extra == "snowflake-tag-propagation"
125
125
  Requires-Dist: toml>=0.10.0; extra == "snowflake-tag-propagation"
126
+ Requires-Dist: click>=6.0.0; extra == "snowflake-tag-propagation"
127
+ Requires-Dist: acryl-datahub[snowflake-slim]==1.3.0rc4; extra == "snowflake-tag-propagation"
126
128
  Requires-Dist: prometheus-client; extra == "snowflake-tag-propagation"
127
- Requires-Dist: PyYAML; extra == "snowflake-tag-propagation"
128
129
  Requires-Dist: progressbar2; extra == "snowflake-tag-propagation"
129
- Requires-Dist: click>=6.0.0; extra == "snowflake-tag-propagation"
130
- Requires-Dist: click-default-group; extra == "snowflake-tag-propagation"
131
130
  Requires-Dist: tenacity; extra == "snowflake-tag-propagation"
132
- Requires-Dist: entrypoints; extra == "snowflake-tag-propagation"
133
131
  Requires-Dist: stackprinter; extra == "snowflake-tag-propagation"
134
132
  Requires-Dist: python-dateutil>=2.8.0; extra == "snowflake-tag-propagation"
133
+ Requires-Dist: PyYAML; extra == "snowflake-tag-propagation"
134
+ Requires-Dist: entrypoints; extra == "snowflake-tag-propagation"
135
135
  Provides-Extra: doc-propagation
136
+ Requires-Dist: click-default-group; extra == "doc-propagation"
136
137
  Requires-Dist: toml>=0.10.0; extra == "doc-propagation"
138
+ Requires-Dist: click>=6.0.0; extra == "doc-propagation"
137
139
  Requires-Dist: prometheus-client; extra == "doc-propagation"
138
- Requires-Dist: PyYAML; extra == "doc-propagation"
139
140
  Requires-Dist: progressbar2; extra == "doc-propagation"
140
- Requires-Dist: click>=6.0.0; extra == "doc-propagation"
141
- Requires-Dist: click-default-group; extra == "doc-propagation"
142
141
  Requires-Dist: tenacity; extra == "doc-propagation"
143
- Requires-Dist: entrypoints; extra == "doc-propagation"
144
142
  Requires-Dist: stackprinter; extra == "doc-propagation"
145
143
  Requires-Dist: python-dateutil>=2.8.0; extra == "doc-propagation"
144
+ Requires-Dist: PyYAML; extra == "doc-propagation"
145
+ Requires-Dist: entrypoints; extra == "doc-propagation"
146
146
  Provides-Extra: all
147
- Requires-Dist: acryl-datahub[snowflake-slim]==1.2.0.11rc4; extra == "all"
148
- Requires-Dist: pymsteams>=0.2.2; extra == "all"
147
+ Requires-Dist: slack-bolt>=1.15.5; extra == "all"
148
+ Requires-Dist: click-default-group; extra == "all"
149
149
  Requires-Dist: toml>=0.10.0; extra == "all"
150
+ Requires-Dist: click>=6.0.0; extra == "all"
151
+ Requires-Dist: acryl-datahub[snowflake-slim]==1.3.0rc4; extra == "all"
150
152
  Requires-Dist: prometheus-client; extra == "all"
151
- Requires-Dist: PyYAML; extra == "all"
152
- Requires-Dist: progressbar2; extra == "all"
153
153
  Requires-Dist: confluent-kafka[schemaregistry]; extra == "all"
154
- Requires-Dist: slack-bolt>=1.15.5; extra == "all"
155
- Requires-Dist: acryl-executor==0.2.6; extra == "all"
156
- Requires-Dist: click>=6.0.0; extra == "all"
157
- Requires-Dist: click-default-group; extra == "all"
154
+ Requires-Dist: progressbar2; extra == "all"
158
155
  Requires-Dist: tenacity; extra == "all"
159
- Requires-Dist: entrypoints; extra == "all"
160
156
  Requires-Dist: stackprinter; extra == "all"
157
+ Requires-Dist: acryl-executor==0.2.6; extra == "all"
161
158
  Requires-Dist: python-dateutil>=2.8.0; extra == "all"
159
+ Requires-Dist: pymsteams>=0.2.2; extra == "all"
160
+ Requires-Dist: PyYAML; extra == "all"
161
+ Requires-Dist: entrypoints; extra == "all"
162
162
  Provides-Extra: dev
163
- Requires-Dist: pymsteams>=0.2.2; extra == "dev"
164
- Requires-Dist: types-python-dateutil; extra == "dev"
165
- Requires-Dist: click>=6.0.0; extra == "dev"
166
- Requires-Dist: pytest-dependency>=0.5.1; extra == "dev"
167
- Requires-Dist: build; extra == "dev"
168
- Requires-Dist: PyYAML; extra == "dev"
169
- Requires-Dist: jsonpickle; extra == "dev"
170
- Requires-Dist: confluent-kafka[schemaregistry]; extra == "dev"
171
- Requires-Dist: pytest-docker>=0.10.3; extra == "dev"
172
- Requires-Dist: types-six; extra == "dev"
173
- Requires-Dist: tenacity; extra == "dev"
174
- Requires-Dist: stackprinter; extra == "dev"
175
163
  Requires-Dist: aws-msk-iam-sasl-signer-python==1.0.2; extra == "dev"
176
- Requires-Dist: python-dateutil>=2.8.0; extra == "dev"
177
- Requires-Dist: ratelimit; extra == "dev"
178
- Requires-Dist: h11>=0.16; extra == "dev"
164
+ Requires-Dist: types-toml; extra == "dev"
165
+ Requires-Dist: types-PyYAML; extra == "dev"
166
+ Requires-Dist: ruff==0.11.7; extra == "dev"
179
167
  Requires-Dist: toml>=0.10.0; extra == "dev"
180
- Requires-Dist: coverage>=5.1; extra == "dev"
181
- Requires-Dist: types-pytz; extra == "dev"
182
168
  Requires-Dist: freezegun; extra == "dev"
183
- Requires-Dist: azure-identity==1.21.0; extra == "dev"
184
- Requires-Dist: httpcore>=1.0.9; extra == "dev"
185
- Requires-Dist: pytest-cov>=2.8.1; extra == "dev"
186
- Requires-Dist: types-click==0.1.12; extra == "dev"
187
- Requires-Dist: twine; extra == "dev"
188
- Requires-Dist: prometheus-client; extra == "dev"
189
- Requires-Dist: acryl-executor==0.2.6; extra == "dev"
190
- Requires-Dist: deepdiff; extra == "dev"
169
+ Requires-Dist: typing-inspect; extra == "dev"
191
170
  Requires-Dist: pytest>=6.2.2; extra == "dev"
171
+ Requires-Dist: pytest-cov>=2.8.1; extra == "dev"
192
172
  Requires-Dist: types-freezegun; extra == "dev"
193
- Requires-Dist: types-PyMySQL; extra == "dev"
173
+ Requires-Dist: azure-identity==1.21.0; extra == "dev"
194
174
  Requires-Dist: mypy==1.17.1; extra == "dev"
195
- Requires-Dist: requests-mock; extra == "dev"
196
- Requires-Dist: pydantic<3.0.0,>=2.0.0; extra == "dev"
175
+ Requires-Dist: types-python-dateutil; extra == "dev"
176
+ Requires-Dist: types-six; extra == "dev"
177
+ Requires-Dist: pymsteams>=0.2.2; extra == "dev"
178
+ Requires-Dist: ratelimit; extra == "dev"
197
179
  Requires-Dist: tox; extra == "dev"
198
- Requires-Dist: types-setuptools; extra == "dev"
199
- Requires-Dist: acryl-datahub[snowflake-slim]==1.2.0.11rc4; extra == "dev"
200
- Requires-Dist: click-default-group; extra == "dev"
180
+ Requires-Dist: prometheus-client; extra == "dev"
181
+ Requires-Dist: entrypoints; extra == "dev"
182
+ Requires-Dist: PyYAML; extra == "dev"
183
+ Requires-Dist: pytest-docker>=0.10.3; extra == "dev"
201
184
  Requires-Dist: sqlalchemy-stubs; extra == "dev"
185
+ Requires-Dist: twine; extra == "dev"
186
+ Requires-Dist: types-setuptools; extra == "dev"
187
+ Requires-Dist: tenacity; extra == "dev"
188
+ Requires-Dist: stackprinter; extra == "dev"
202
189
  Requires-Dist: types-cachetools; extra == "dev"
203
- Requires-Dist: typing-inspect; extra == "dev"
204
- Requires-Dist: acryl-datahub[datahub-kafka]==1.2.0.11rc4; extra == "dev"
205
- Requires-Dist: types-PyYAML; extra == "dev"
206
- Requires-Dist: types-requests; extra == "dev"
190
+ Requires-Dist: h11>=0.16; extra == "dev"
191
+ Requires-Dist: confluent-kafka[schemaregistry]; extra == "dev"
192
+ Requires-Dist: acryl-datahub[snowflake-slim]==1.3.0rc4; extra == "dev"
193
+ Requires-Dist: build; extra == "dev"
194
+ Requires-Dist: acryl-datahub[datahub-kafka]==1.3.0rc4; extra == "dev"
195
+ Requires-Dist: click-default-group; extra == "dev"
196
+ Requires-Dist: types-dataclasses; extra == "dev"
197
+ Requires-Dist: pytest-dependency>=0.5.1; extra == "dev"
207
198
  Requires-Dist: progressbar2; extra == "dev"
208
- Requires-Dist: types-toml; extra == "dev"
199
+ Requires-Dist: jsonpickle; extra == "dev"
200
+ Requires-Dist: httpcore>=1.0.9; extra == "dev"
201
+ Requires-Dist: acryl-executor==0.2.6; extra == "dev"
202
+ Requires-Dist: types-requests; extra == "dev"
203
+ Requires-Dist: coverage>=5.1; extra == "dev"
204
+ Requires-Dist: python-dateutil>=2.8.0; extra == "dev"
205
+ Requires-Dist: types-PyMySQL; extra == "dev"
209
206
  Requires-Dist: slack-bolt>=1.15.5; extra == "dev"
210
- Requires-Dist: ruff==0.11.7; extra == "dev"
211
- Requires-Dist: types-dataclasses; extra == "dev"
212
- Requires-Dist: entrypoints; extra == "dev"
207
+ Requires-Dist: click>=6.0.0; extra == "dev"
208
+ Requires-Dist: types-click==0.1.12; extra == "dev"
209
+ Requires-Dist: requests-mock; extra == "dev"
210
+ Requires-Dist: types-pytz; extra == "dev"
211
+ Requires-Dist: pydantic<3.0.0,>=2.0.0; extra == "dev"
212
+ Requires-Dist: deepdiff; extra == "dev"
213
213
  Provides-Extra: integration-tests
214
- Requires-Dist: pymsteams>=0.2.2; extra == "integration-tests"
215
- Requires-Dist: confluent-kafka[schemaregistry]; extra == "integration-tests"
216
214
  Requires-Dist: slack-bolt>=1.15.5; extra == "integration-tests"
217
- Requires-Dist: acryl-datahub[snowflake-slim]==1.2.0.11rc4; extra == "integration-tests"
218
215
  Requires-Dist: acryl-executor==0.2.6; extra == "integration-tests"
216
+ Requires-Dist: pymsteams>=0.2.2; extra == "integration-tests"
217
+ Requires-Dist: confluent-kafka[schemaregistry]; extra == "integration-tests"
218
+ Requires-Dist: acryl-datahub[snowflake-slim]==1.3.0rc4; extra == "integration-tests"
219
219
  Dynamic: classifier
220
220
  Dynamic: description
221
221
  Dynamic: description-content-type
@@ -1,18 +1,18 @@
1
1
  datahub_actions/__init__.py,sha256=Pn9UTDbqYPt6jY_acE7MQIveX_Nzdfl5oGmi-Ze8CHs,647
2
- datahub_actions/_version.py,sha256=N7OVu9QyU67XaRb275wFdcInH1ZLpwD9HIF-rkLUOA4,340
2
+ datahub_actions/_version.py,sha256=-YMaqnOeLCbGY7xnANY8_SezES-TMXqUrM1JLgg51I4,337
3
3
  datahub_actions/entrypoints.py,sha256=_6NOpKhlfXuSUdPhDpPya7d9kJmwoRGrunxcNPMQE9k,4743
4
4
  datahub_actions/action/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
5
5
  datahub_actions/action/action.py,sha256=ET1fpeRn6KVD9diJ9ZOObsojrN9y6Vfn4tK7jzBQKHg,1537
6
6
  datahub_actions/action/action_registry.py,sha256=RiPlpOrBlVEpmUTV9kipF7R5P2QW0TgeOWMXjotXhZE,947
7
7
  datahub_actions/action/action_stats.py,sha256=GcQrFdpSCmvCoCF51OpwjhaJ2bP4ushUWru5D7wkZmk,1346
8
8
  datahub_actions/api/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
9
- datahub_actions/api/action_graph.py,sha256=PsmJXpCqdnE8phhUQRGwWxkJNtwkARqIkzqNrHAh-Vg,13722
9
+ datahub_actions/api/action_graph.py,sha256=gNRvShC8TuBMIR0IND7wSBxZ8pBSL8aEv4k0_3XL3ek,15230
10
10
  datahub_actions/cli/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
11
11
  datahub_actions/cli/actions.py,sha256=5NQHTKqqfib-UynUQgUm9dClBhxBPpmRUeiwUkEYiGA,6465
12
12
  datahub_actions/event/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
13
13
  datahub_actions/event/event.py,sha256=2Eyizwzbf3fXsUV4n5p7gsYZS_CjEE5y9m1YvkmKOKU,990
14
14
  datahub_actions/event/event_envelope.py,sha256=x1QfDetMM7k5SLecD0Nb-duxMxKWU0rmeLroScvkicY,2258
15
- datahub_actions/event/event_registry.py,sha256=yM1UHirXc1FjKH_8tsLpGfId-3vKNG-gl88nEGUnZ0o,3724
15
+ datahub_actions/event/event_registry.py,sha256=bWV2n9u1n8p9Onu9G2AVgZIfOxCjaBT0pKg2eOQdaig,4663
16
16
  datahub_actions/pipeline/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
17
17
  datahub_actions/pipeline/pipeline.py,sha256=6Bod5W3QJNAV0kXymooBxxJVuvAYv3mpvAa6zp-9u5c,12194
18
18
  datahub_actions/pipeline/pipeline_config.py,sha256=ikJYdDpBv0PI0lpbtubseh8SsuK0032i4Gb6Uum2jck,2208
@@ -49,13 +49,13 @@ datahub_actions/plugin/action/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQ
49
49
  datahub_actions/plugin/action/utils/term_resolver.py,sha256=J3_u-iGFXxsGCPtaeV_phBOb58RzHznGNypfV5SCAgo,5397
50
50
  datahub_actions/plugin/source/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
51
51
  datahub_actions/plugin/source/acryl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
- datahub_actions/plugin/source/acryl/constants.py,sha256=gMNBQfXsre-dhK-i3vZsESVtvPj1EPiU9cTDFtO8QOI,248
53
- datahub_actions/plugin/source/acryl/datahub_cloud_event_source.py,sha256=LuNoueGRXzDPux8syT7sFvVrXvCjv8W-Lo3MlvHQw74,12226
52
+ datahub_actions/plugin/source/acryl/constants.py,sha256=f6vEsF6SdD0B1Vs90lCRglRQOCCYjiIlGanNtC7OsnY,307
53
+ datahub_actions/plugin/source/acryl/datahub_cloud_event_source.py,sha256=izoNCkZa3DZwW_2wPBfVgXrfqPIFCoBUYFzw6OyIU6w,12538
54
54
  datahub_actions/plugin/source/acryl/datahub_cloud_events_ack_manager.py,sha256=ky15ibq5lfYdPIwufv4w92XOpp9C6cRvlhPRbJ_cs10,993
55
55
  datahub_actions/plugin/source/acryl/datahub_cloud_events_consumer.py,sha256=T-Y8MB8b3KJXyr3ecEHl-bpNB8bxwy6yAAeIl5atuxI,6039
56
56
  datahub_actions/plugin/source/acryl/datahub_cloud_events_consumer_offsets_store.py,sha256=_aMsVToJ6a2wiKK2KJ_jOwNzUy0By7aVHsXGW4LPmAE,3948
57
57
  datahub_actions/plugin/source/kafka/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
58
- datahub_actions/plugin/source/kafka/kafka_event_source.py,sha256=c-5DjKcMzrfG9QxK2SnC55O_S0voOP0Ge422LNpHmIk,11153
58
+ datahub_actions/plugin/source/kafka/kafka_event_source.py,sha256=7bfrtFYw0PZ6HrO0q-yrPiFY9IKi5psc7Mxoj60vZFE,11543
59
59
  datahub_actions/plugin/source/kafka/utils.py,sha256=EEqBnv8Zd05zSg9T3f2FHaARaStD2j2M_xiSeaQBplA,758
60
60
  datahub_actions/plugin/transform/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
61
61
  datahub_actions/plugin/transform/filter/__init__.py,sha256=KYWPHGi7sDM0DXrrXmhlR6_zhym1qNbtFhjYk1Ug6ss,579
@@ -72,11 +72,11 @@ datahub_actions/utils/collection_util.py,sha256=aXVNgT_bY1iIGTVqE_aQlfq5kc61UBZR
72
72
  datahub_actions/utils/datahub_util.py,sha256=2EdjdgtW6MzEUntlcugfjgIrvI0w9N5ONYlmTdMw_AA,2019
73
73
  datahub_actions/utils/delta_extractor_mcl.py,sha256=ighzDR3RA9d9rS1Isry7-QYMK9w3rTXgql_7QiAYlW0,5443
74
74
  datahub_actions/utils/event_util.py,sha256=VluTOeyFcot48moK9qLmYL1ADAjsau0346NgiGsIqOc,1523
75
- datahub_actions/utils/kafka_msk_iam.py,sha256=1BbJHxbwRxhkJrXS_qnl2SCdJVV7hFgVLT3gdWfbE0c,819
75
+ datahub_actions/utils/kafka_msk_iam.py,sha256=JWg0MBEMcsG2AmW4yXiHvH_dnnsQDIRASdlvDXGTVcI,1013
76
76
  datahub_actions/utils/name_resolver.py,sha256=uXICSpy1IUe5uyFUiRk4vDQ9_G0JytPgKPSnqMA6fZk,10540
77
77
  datahub_actions/utils/social_util.py,sha256=FI_3qDjayX9LKlDjf43QHafnOznQk3v5Vp3Xyhq-lno,5271
78
- acryl_datahub_actions-1.2.0.11rc4.dist-info/METADATA,sha256=QpyNL4Vx_nT41im-XNWET_75PZxuvpuDSUtuYgKUgyk,18131
79
- acryl_datahub_actions-1.2.0.11rc4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
80
- acryl_datahub_actions-1.2.0.11rc4.dist-info/entry_points.txt,sha256=Gbvj36kOFWrsJ1meJVFB7zYgrKbIGgufOpZDurJbehU,866
81
- acryl_datahub_actions-1.2.0.11rc4.dist-info/top_level.txt,sha256=93StcIqRM0PfcJoT06TFhcCjPnIw-CyFgBaF-4vqCKY,16
82
- acryl_datahub_actions-1.2.0.11rc4.dist-info/RECORD,,
78
+ acryl_datahub_actions-1.3.0rc4.dist-info/METADATA,sha256=Lxq6MEZrBh4dBli7NKg8ow5ltUIk_tvtID9lyHVdG8w,18110
79
+ acryl_datahub_actions-1.3.0rc4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
80
+ acryl_datahub_actions-1.3.0rc4.dist-info/entry_points.txt,sha256=Gbvj36kOFWrsJ1meJVFB7zYgrKbIGgufOpZDurJbehU,866
81
+ acryl_datahub_actions-1.3.0rc4.dist-info/top_level.txt,sha256=93StcIqRM0PfcJoT06TFhcCjPnIw-CyFgBaF-4vqCKY,16
82
+ acryl_datahub_actions-1.3.0rc4.dist-info/RECORD,,
@@ -1,6 +1,6 @@
1
1
  # Published at https://pypi.org/project/acryl-datahub-actions/.
2
2
  __package_name__ = "acryl-datahub-actions"
3
- __version__ = "1.2.0.11rc4"
3
+ __version__ = "1.3.0rc4"
4
4
 
5
5
 
6
6
  def is_dev_mode() -> bool:
@@ -14,6 +14,7 @@
14
14
 
15
15
  import json
16
16
  import logging
17
+ import time
17
18
  import urllib.parse
18
19
  from dataclasses import dataclass
19
20
  from typing import Any, Dict, List, Optional
@@ -22,6 +23,7 @@ from datahub.configuration.common import OperationalError
22
23
  from datahub.ingestion.graph.client import DataHubGraph
23
24
  from datahub.metadata.schema_classes import (
24
25
  GlossaryTermAssociationClass,
26
+ MetadataAttributionClass,
25
27
  TagAssociationClass,
26
28
  )
27
29
  from datahub.specific.dataset import DatasetPatchBuilder
@@ -250,20 +252,57 @@ query listIngestionSources($input: ListIngestionSourcesInput!, $execution_start:
250
252
  return target_urn in entities
251
253
  return False
252
254
 
255
+ def _create_attribution_from_context(
256
+ self, context: Optional[Dict]
257
+ ) -> Optional[MetadataAttributionClass]:
258
+ """Create MetadataAttributionClass from context if action source is present."""
259
+ if not context:
260
+ return None
261
+
262
+ # Extract action source from context if present
263
+ action_source = context.get("propagation_source") or context.get("source")
264
+ if not action_source:
265
+ return None
266
+
267
+ return MetadataAttributionClass(
268
+ source=action_source,
269
+ time=int(time.time() * 1000.0),
270
+ actor=context.get("actor", "urn:li:corpuser:__datahub_system"),
271
+ sourceDetail=context,
272
+ )
273
+
253
274
  def add_tags_to_dataset(
254
275
  self,
255
276
  entity_urn: str,
256
277
  dataset_tags: List[str],
257
278
  field_tags: Optional[Dict] = None,
258
279
  context: Optional[Dict] = None,
280
+ action_urn: Optional[str] = None,
259
281
  ) -> None:
260
282
  if field_tags is None:
261
283
  field_tags = {}
284
+
285
+ # Create attribution - prefer action_urn parameter, fallback to context
286
+ attribution = None
287
+ if action_urn:
288
+ attribution = MetadataAttributionClass(
289
+ source=action_urn,
290
+ time=int(time.time() * 1000.0),
291
+ actor=context.get("actor", "urn:li:corpuser:__datahub_system")
292
+ if context
293
+ else "urn:li:corpuser:__datahub_system",
294
+ sourceDetail=context if context else {},
295
+ )
296
+ else:
297
+ attribution = self._create_attribution_from_context(context)
298
+
262
299
  dataset = DatasetPatchBuilder(entity_urn)
263
300
  for t in dataset_tags:
264
301
  dataset.add_tag(
265
302
  tag=TagAssociationClass(
266
- tag=t, context=json.dumps(context) if context else None
303
+ tag=t,
304
+ context=json.dumps(context) if context else None,
305
+ attribution=attribution,
267
306
  )
268
307
  )
269
308
 
@@ -272,7 +311,9 @@ query listIngestionSources($input: ListIngestionSourcesInput!, $execution_start:
272
311
  for tag in tags:
273
312
  field_builder.add_tag(
274
313
  tag=TagAssociationClass(
275
- tag=tag, context=json.dumps(context) if context else None
314
+ tag=tag,
315
+ context=json.dumps(context) if context else None,
316
+ attribution=attribution,
276
317
  )
277
318
  )
278
319
 
@@ -18,6 +18,7 @@ from datahub.ingestion.api.registry import PluginRegistry
18
18
  from datahub.metadata.schema_classes import (
19
19
  EntityChangeEventClass,
20
20
  MetadataChangeLogClass,
21
+ RelationshipChangeEventClass,
21
22
  )
22
23
  from datahub_actions.event.event import Event
23
24
 
@@ -80,10 +81,35 @@ class EntityChangeEvent(EntityChangeEventClass, Event):
80
81
  json_obj["parameters"] = self._inner_dict["__parameters_json"]
81
82
  return json.dumps(json_obj)
82
83
 
84
+ @property
85
+ def safe_parameters(self) -> dict:
86
+ return self.parameters or self.get("__parameters_json") or {} # type: ignore
87
+
88
+
89
+ class RelationshipChangeEvent(RelationshipChangeEventClass, Event):
90
+ @classmethod
91
+ def from_class(
92
+ cls, clazz: RelationshipChangeEventClass
93
+ ) -> "RelationshipChangeEvent":
94
+ instance = cls._construct({})
95
+ instance._restore_defaults()
96
+ # Shallow map inner dictionaries.
97
+ instance._inner_dict = clazz._inner_dict
98
+ return instance
99
+
100
+ @classmethod
101
+ def from_json(cls, json_str: str) -> "Event":
102
+ json_obj = json.loads(json_str)
103
+ return cls.from_class(cls.from_obj(json_obj))
104
+
105
+ def as_json(self) -> str:
106
+ return json.dumps(self.to_obj())
107
+
83
108
 
84
109
  # Standard Event Types for easy reference.
85
110
  ENTITY_CHANGE_EVENT_V1_TYPE = "EntityChangeEvent_v1"
86
111
  METADATA_CHANGE_LOG_EVENT_V1_TYPE = "MetadataChangeLogEvent_v1"
112
+ RELATIONSHIP_CHANGE_EVENT_V1_TYPE = "RelationshipChangeEvent_v1"
87
113
 
88
114
  # Lightweight Event Registry
89
115
  event_registry = PluginRegistry[Event]()
@@ -91,3 +117,4 @@ event_registry = PluginRegistry[Event]()
91
117
  # Register standard event library. Each type can be considered a separate "stream" / "topic"
92
118
  event_registry.register(METADATA_CHANGE_LOG_EVENT_V1_TYPE, MetadataChangeLogEvent)
93
119
  event_registry.register(ENTITY_CHANGE_EVENT_V1_TYPE, EntityChangeEvent)
120
+ event_registry.register(RELATIONSHIP_CHANGE_EVENT_V1_TYPE, RelationshipChangeEvent)
@@ -2,3 +2,4 @@ PLATFORM_EVENT_TOPIC_NAME = "PlatformEvent_v1"
2
2
  METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME = "MetadataChangeLog_Versioned_v1"
3
3
  METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME = "MetadataChangeLog_Timeseries_v1"
4
4
  ENTITY_CHANGE_EVENT_NAME = "entityChangeEvent"
5
+ RELATIONSHIP_CHANGE_EVENT_NAME = "relationshipChangeEvent"
@@ -14,8 +14,10 @@ from datahub_actions.event.event_envelope import EventEnvelope
14
14
  from datahub_actions.event.event_registry import (
15
15
  ENTITY_CHANGE_EVENT_V1_TYPE,
16
16
  METADATA_CHANGE_LOG_EVENT_V1_TYPE,
17
+ RELATIONSHIP_CHANGE_EVENT_V1_TYPE,
17
18
  EntityChangeEvent,
18
19
  MetadataChangeLogEvent,
20
+ RelationshipChangeEvent,
19
21
  )
20
22
 
21
23
  # May or may not need these.
@@ -25,6 +27,7 @@ from datahub_actions.plugin.source.acryl.constants import (
25
27
  METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME,
26
28
  METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME,
27
29
  PLATFORM_EVENT_TOPIC_NAME,
30
+ RELATIONSHIP_CHANGE_EVENT_NAME,
28
31
  )
29
32
  from datahub_actions.plugin.source.acryl.datahub_cloud_events_ack_manager import (
30
33
  AckManager,
@@ -261,8 +264,11 @@ class DataHubEventSource(EventSource):
261
264
  post_json_transform(value["payload"])
262
265
  )
263
266
  if ENTITY_CHANGE_EVENT_NAME == value["name"]:
264
- event = build_entity_change_event(payload)
265
- yield EventEnvelope(ENTITY_CHANGE_EVENT_V1_TYPE, event, {})
267
+ ece = build_entity_change_event(payload)
268
+ yield EventEnvelope(ENTITY_CHANGE_EVENT_V1_TYPE, ece, {})
269
+ elif RELATIONSHIP_CHANGE_EVENT_NAME == value["name"]:
270
+ rce = RelationshipChangeEvent.from_json(payload.get("value"))
271
+ yield EventEnvelope(RELATIONSHIP_CHANGE_EVENT_V1_TYPE, rce, {})
266
272
 
267
273
  @staticmethod
268
274
  def handle_mcl(msg: ExternalEvent) -> Iterable[EventEnvelope]:
@@ -33,8 +33,10 @@ from datahub_actions.event.event_envelope import EventEnvelope
33
33
  from datahub_actions.event.event_registry import (
34
34
  ENTITY_CHANGE_EVENT_V1_TYPE,
35
35
  METADATA_CHANGE_LOG_EVENT_V1_TYPE,
36
+ RELATIONSHIP_CHANGE_EVENT_V1_TYPE,
36
37
  EntityChangeEvent,
37
38
  MetadataChangeLogEvent,
39
+ RelationshipChangeEvent,
38
40
  )
39
41
 
40
42
  # May or may not need these.
@@ -46,6 +48,7 @@ logger = logging.getLogger(__name__)
46
48
 
47
49
 
48
50
  ENTITY_CHANGE_EVENT_NAME = "entityChangeEvent"
51
+ RELATIONSHIP_CHANGE_EVENT_NAME = "relationshipChangeEvent"
49
52
  DEFAULT_TOPIC_ROUTES = {
50
53
  "mcl": "MetadataChangeLog_Versioned_v1",
51
54
  "mcl_timeseries": "MetadataChangeLog_Timeseries_v1",
@@ -216,9 +219,13 @@ class KafkaEventSource(EventSource):
216
219
  post_json_transform(value["payload"])
217
220
  )
218
221
  if ENTITY_CHANGE_EVENT_NAME == value["name"]:
219
- event = build_entity_change_event(payload)
222
+ ece = build_entity_change_event(payload)
220
223
  kafka_meta = build_kafka_meta(msg)
221
- yield EventEnvelope(ENTITY_CHANGE_EVENT_V1_TYPE, event, kafka_meta)
224
+ yield EventEnvelope(ENTITY_CHANGE_EVENT_V1_TYPE, ece, kafka_meta)
225
+ elif RELATIONSHIP_CHANGE_EVENT_NAME == value["name"]:
226
+ rce = RelationshipChangeEvent.from_json(payload.get("value"))
227
+ kafka_meta = build_kafka_meta(msg)
228
+ yield EventEnvelope(RELATIONSHIP_CHANGE_EVENT_V1_TYPE, rce, kafka_meta)
222
229
 
223
230
  def close(self) -> None:
224
231
  if self.consumer:
@@ -1,13 +1,14 @@
1
1
  """Module for AWS MSK IAM authentication."""
2
2
 
3
3
  import logging
4
+ import os
4
5
 
5
- from aws_msk_iam_sasl_signer_python.msk_iam_sasl_signer import MSKAuthTokenProvider
6
+ from aws_msk_iam_sasl_signer import MSKAuthTokenProvider
6
7
 
7
8
  logger = logging.getLogger(__name__)
8
9
 
9
10
 
10
- def oauth_cb(oauth_config):
11
+ def oauth_cb(oauth_config: dict) -> tuple[str, float]:
11
12
  """
12
13
  OAuth callback function for AWS MSK IAM authentication.
13
14
 
@@ -15,12 +16,17 @@ def oauth_cb(oauth_config):
15
16
  for authentication with AWS MSK using IAM.
16
17
 
17
18
  Returns:
18
- tuple: (auth_token, expiry_time_seconds)
19
+ tuple[str, float]: (auth_token, expiry_time_seconds)
19
20
  """
20
21
  try:
21
- auth_token, expiry_ms = MSKAuthTokenProvider.generate_auth_token()
22
+ region = (
23
+ os.getenv("AWS_REGION") or os.getenv("AWS_DEFAULT_REGION") or "us-east-1"
24
+ )
25
+ auth_token, expiry_ms = MSKAuthTokenProvider.generate_auth_token(region=region)
22
26
  # Convert expiry from milliseconds to seconds as required by Kafka client
23
- return auth_token, expiry_ms / 1000
27
+ return auth_token, float(expiry_ms) / 1000
24
28
  except Exception as e:
25
- logger.error(f"Error generating AWS MSK IAM authentication token: {e}")
29
+ logger.error(
30
+ f"Error generating AWS MSK IAM authentication token: {e}", exc_info=True
31
+ )
26
32
  raise