acryl-datahub-cloud 0.3.13.2rc4__py3-none-any.whl → 0.3.14rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub-cloud might be problematic. Click here for more details.

Files changed (33) hide show
  1. acryl_datahub_cloud/_codegen_config.json +1 -1
  2. acryl_datahub_cloud/datahub_usage_reporting/usage_feature_reporter.py +443 -34
  3. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/identity/__init__.py +6 -0
  4. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/search/features/__init__.py +2 -0
  5. acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/platform/event/v1/__init__.py +4 -0
  6. acryl_datahub_cloud/metadata/schema.avsc +445 -107
  7. acryl_datahub_cloud/metadata/schema_classes.py +420 -19
  8. acryl_datahub_cloud/metadata/schemas/ActionRequestInfo.avsc +95 -0
  9. acryl_datahub_cloud/metadata/schemas/AssertionAnalyticsRunEvent.avsc +0 -21
  10. acryl_datahub_cloud/metadata/schemas/ChartInfo.avsc +2 -1
  11. acryl_datahub_cloud/metadata/schemas/CorpUserInvitationStatus.avsc +106 -0
  12. acryl_datahub_cloud/metadata/schemas/CorpUserKey.avsc +2 -0
  13. acryl_datahub_cloud/metadata/schemas/CorpUserUsageFeatures.avsc +93 -0
  14. acryl_datahub_cloud/metadata/schemas/DataHubPageModuleProperties.avsc +13 -2
  15. acryl_datahub_cloud/metadata/schemas/DatasetUsageStatistics.avsc +8 -0
  16. acryl_datahub_cloud/metadata/schemas/InferredMetadata.avsc +69 -0
  17. acryl_datahub_cloud/metadata/schemas/InviteToken.avsc +26 -0
  18. acryl_datahub_cloud/metadata/schemas/LogicalParent.avsc +104 -100
  19. acryl_datahub_cloud/metadata/schemas/MetadataChangeEvent.avsc +98 -45
  20. acryl_datahub_cloud/metadata/schemas/NotificationRequest.avsc +71 -0
  21. acryl_datahub_cloud/metadata/schemas/Ownership.avsc +69 -0
  22. acryl_datahub_cloud/metadata/schemas/RelationshipChangeEvent.avsc +215 -0
  23. acryl_datahub_cloud/metadata/schemas/SchemaFieldKey.avsc +2 -0
  24. acryl_datahub_cloud/metadata/schemas/StructuredProperties.avsc +69 -0
  25. acryl_datahub_cloud/metadata/schemas/StructuredPropertyDefinition.avsc +3 -0
  26. acryl_datahub_cloud/sdk/assertions_client.py +21 -7
  27. acryl_datahub_cloud/sdk/resolver_client.py +4 -1
  28. acryl_datahub_cloud/sdk/subscription_client.py +8 -3
  29. {acryl_datahub_cloud-0.3.13.2rc4.dist-info → acryl_datahub_cloud-0.3.14rc0.dist-info}/METADATA +48 -48
  30. {acryl_datahub_cloud-0.3.13.2rc4.dist-info → acryl_datahub_cloud-0.3.14rc0.dist-info}/RECORD +33 -30
  31. {acryl_datahub_cloud-0.3.13.2rc4.dist-info → acryl_datahub_cloud-0.3.14rc0.dist-info}/WHEEL +0 -0
  32. {acryl_datahub_cloud-0.3.13.2rc4.dist-info → acryl_datahub_cloud-0.3.14rc0.dist-info}/entry_points.txt +0 -0
  33. {acryl_datahub_cloud-0.3.13.2rc4.dist-info → acryl_datahub_cloud-0.3.14rc0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,215 @@
1
+ {
2
+ "type": "record",
3
+ "Event": {
4
+ "name": "relationshipChangeEvent"
5
+ },
6
+ "name": "RelationshipChangeEvent",
7
+ "namespace": "com.linkedin.pegasus2avro.platform.event.v1",
8
+ "fields": [
9
+ {
10
+ "type": [
11
+ "null",
12
+ {
13
+ "type": "record",
14
+ "name": "KafkaAuditHeader",
15
+ "namespace": "com.linkedin.events",
16
+ "fields": [
17
+ {
18
+ "compliance": [
19
+ {
20
+ "policy": "EVENT_TIME"
21
+ }
22
+ ],
23
+ "type": "long",
24
+ "name": "time",
25
+ "doc": "The time at which the event was emitted into kafka."
26
+ },
27
+ {
28
+ "compliance": "NONE",
29
+ "type": "string",
30
+ "name": "server",
31
+ "doc": "The fully qualified name of the host from which the event is being emitted."
32
+ },
33
+ {
34
+ "compliance": "NONE",
35
+ "type": [
36
+ "null",
37
+ "string"
38
+ ],
39
+ "name": "instance",
40
+ "default": null,
41
+ "doc": "The instance on the server from which the event is being emitted. e.g. i001"
42
+ },
43
+ {
44
+ "compliance": "NONE",
45
+ "type": "string",
46
+ "name": "appName",
47
+ "doc": "The name of the application from which the event is being emitted. see go/appname"
48
+ },
49
+ {
50
+ "compliance": "NONE",
51
+ "type": {
52
+ "type": "fixed",
53
+ "name": "UUID",
54
+ "namespace": "com.linkedin.events",
55
+ "size": 16
56
+ },
57
+ "name": "messageId",
58
+ "doc": "A unique identifier for the message"
59
+ },
60
+ {
61
+ "compliance": "NONE",
62
+ "type": [
63
+ "null",
64
+ "int"
65
+ ],
66
+ "name": "auditVersion",
67
+ "default": null,
68
+ "doc": "The version that is being used for auditing. In version 0, the audit trail buckets events into 10 minute audit windows based on the EventHeader timestamp. In version 1, the audit trail buckets events as follows: if the schema has an outer KafkaAuditHeader, use the outer audit header timestamp for bucketing; else if the EventHeader has an inner KafkaAuditHeader use that inner audit header's timestamp for bucketing"
69
+ },
70
+ {
71
+ "compliance": "NONE",
72
+ "type": [
73
+ "null",
74
+ "string"
75
+ ],
76
+ "name": "fabricUrn",
77
+ "default": null,
78
+ "doc": "The fabricUrn of the host from which the event is being emitted. Fabric Urn in the format of urn:li:fabric:{fabric_name}. See go/fabric."
79
+ },
80
+ {
81
+ "compliance": "NONE",
82
+ "type": [
83
+ "null",
84
+ "string"
85
+ ],
86
+ "name": "clusterConnectionString",
87
+ "default": null,
88
+ "doc": "This is a String that the client uses to establish some kind of connection with the Kafka cluster. The exact format of it depends on specific versions of clients and brokers. This information could potentially identify the fabric and cluster with which the client is producing to or consuming from."
89
+ }
90
+ ],
91
+ "doc": "This header records information about the context of an event as it is emitted into kafka and is intended to be used by the kafka audit application. For more information see go/kafkaauditheader"
92
+ }
93
+ ],
94
+ "name": "auditHeader",
95
+ "default": null,
96
+ "doc": "Kafka audit header containing metadata about the message itself.\nIncludes information like message ID, timestamp, and server details."
97
+ },
98
+ {
99
+ "java": {
100
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
101
+ },
102
+ "type": "string",
103
+ "name": "sourceUrn",
104
+ "doc": "The URN (Uniform Resource Name) of the source entity in the relationship.\nIn a downstream relationship example, this would be the URN of the upstream dataset.",
105
+ "Urn": "Urn"
106
+ },
107
+ {
108
+ "java": {
109
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
110
+ },
111
+ "type": "string",
112
+ "name": "destinationUrn",
113
+ "doc": "The URN of the destination entity in the relationship.\nIn a downstream relationship example, this would be the URN of the downstream dataset.",
114
+ "Urn": "Urn"
115
+ },
116
+ {
117
+ "type": {
118
+ "type": "enum",
119
+ "name": "RelationshipChangeOperation",
120
+ "namespace": "com.linkedin.pegasus2avro.platform.event.v1",
121
+ "symbols": [
122
+ "ADD",
123
+ "REMOVE",
124
+ "RESTATE"
125
+ ]
126
+ },
127
+ "name": "operation",
128
+ "doc": "The operation being performed on this relationship.\nTypically includes operations like ADD, REMOVE, or RESTATE."
129
+ },
130
+ {
131
+ "type": "string",
132
+ "name": "relationshipType",
133
+ "doc": "The type/category of relationship being established or modified.\nExamples: \"DownstreamOf\", \"Contains\", \"OwnedBy\", \"DerivedFrom\", etc."
134
+ },
135
+ {
136
+ "type": [
137
+ "null",
138
+ "string"
139
+ ],
140
+ "name": "lifecycleOwner",
141
+ "default": null,
142
+ "doc": "The system or service responsible for managing the lifecycle of this relationship.\nThis helps identify which component has authority over the relationship."
143
+ },
144
+ {
145
+ "type": [
146
+ "null",
147
+ "string"
148
+ ],
149
+ "name": "via",
150
+ "default": null,
151
+ "doc": "Information about how or through what means this relationship was established.\nCould indicate a specific pipeline, process, or tool that discovered/created the relationship."
152
+ },
153
+ {
154
+ "type": [
155
+ "null",
156
+ {
157
+ "type": "map",
158
+ "values": "string"
159
+ }
160
+ ],
161
+ "name": "properties",
162
+ "default": null,
163
+ "doc": "Additional custom properties associated with this relationship.\nAllows for flexible extension without changing the schema."
164
+ },
165
+ {
166
+ "type": {
167
+ "type": "record",
168
+ "name": "AuditStamp",
169
+ "namespace": "com.linkedin.pegasus2avro.common",
170
+ "fields": [
171
+ {
172
+ "type": "long",
173
+ "name": "time",
174
+ "doc": "When did the resource/association/sub-resource move into the specific lifecycle stage represented by this AuditEvent."
175
+ },
176
+ {
177
+ "java": {
178
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
179
+ },
180
+ "type": "string",
181
+ "name": "actor",
182
+ "doc": "The entity (e.g. a member URN) which will be credited for moving the resource/association/sub-resource into the specific lifecycle stage. It is also the one used to authorize the change.",
183
+ "Urn": "Urn"
184
+ },
185
+ {
186
+ "java": {
187
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
188
+ },
189
+ "type": [
190
+ "null",
191
+ "string"
192
+ ],
193
+ "name": "impersonator",
194
+ "default": null,
195
+ "doc": "The entity (e.g. a service URN) which performs the change on behalf of the Actor and must be authorized to act as the Actor.",
196
+ "Urn": "Urn"
197
+ },
198
+ {
199
+ "type": [
200
+ "null",
201
+ "string"
202
+ ],
203
+ "name": "message",
204
+ "default": null,
205
+ "doc": "Additional context around how DataHub was informed of the particular change. For example: was the change created by an automated process, or manually."
206
+ }
207
+ ],
208
+ "doc": "Data captured on a resource/association/sub-resource level giving insight into when that resource/association/sub-resource moved into a particular lifecycle stage, and who acted to move it into that specific lifecycle stage."
209
+ },
210
+ "name": "auditStamp",
211
+ "doc": "Stores information about who made this change and when.\nContains the actor (user or system) that performed the action and the timestamp."
212
+ }
213
+ ],
214
+ "doc": "Kafka event for proposing a relationship change between two entities.\nFor example, when dataset1 establishes a new downstream relationship with dataset2."
215
+ }
@@ -16,6 +16,8 @@
16
16
  "deprecation",
17
17
  "subTypes",
18
18
  "logicalParent",
19
+ "globalTags",
20
+ "glossaryTerms",
19
21
  "schemaFieldProfile",
20
22
  "lineageFeatures"
21
23
  ]
@@ -94,6 +94,75 @@
94
94
  "name": "lastModified",
95
95
  "default": null,
96
96
  "doc": "Audit stamp containing who last modified this relationship edge and when"
97
+ },
98
+ {
99
+ "Searchable": {
100
+ "/actor": {
101
+ "fieldName": "structuredPropertyAttributionActors",
102
+ "fieldType": "URN",
103
+ "queryByDefault": false
104
+ },
105
+ "/source": {
106
+ "fieldName": "structuredPropertyAttributionSources",
107
+ "fieldType": "URN",
108
+ "queryByDefault": false
109
+ },
110
+ "/time": {
111
+ "fieldName": "structuredPropertyAttributionDates",
112
+ "fieldType": "DATETIME",
113
+ "queryByDefault": false
114
+ }
115
+ },
116
+ "type": [
117
+ "null",
118
+ {
119
+ "type": "record",
120
+ "name": "MetadataAttribution",
121
+ "namespace": "com.linkedin.pegasus2avro.common",
122
+ "fields": [
123
+ {
124
+ "type": "long",
125
+ "name": "time",
126
+ "doc": "When this metadata was updated."
127
+ },
128
+ {
129
+ "java": {
130
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
131
+ },
132
+ "type": "string",
133
+ "name": "actor",
134
+ "doc": "The entity (e.g. a member URN) responsible for applying the assocated metadata. This can\neither be a user (in case of UI edits) or the datahub system for automation.",
135
+ "Urn": "Urn"
136
+ },
137
+ {
138
+ "java": {
139
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
140
+ },
141
+ "type": [
142
+ "null",
143
+ "string"
144
+ ],
145
+ "name": "source",
146
+ "default": null,
147
+ "doc": "The DataHub source responsible for applying the associated metadata. This will only be filled out\nwhen a DataHub source is responsible. This includes the specific metadata test urn, the automation urn.",
148
+ "Urn": "Urn"
149
+ },
150
+ {
151
+ "type": {
152
+ "type": "map",
153
+ "values": "string"
154
+ },
155
+ "name": "sourceDetail",
156
+ "default": {},
157
+ "doc": "The details associated with why this metadata was applied. For example, this could include\nthe actual regex rule, sql statement, ingestion pipeline ID, etc.\nAlso can include flags like 'propagated'=true or 'inferred'=true."
158
+ }
159
+ ],
160
+ "doc": "Information about who, why, and how this metadata was applied"
161
+ }
162
+ ],
163
+ "name": "attribution",
164
+ "default": null,
165
+ "doc": "Information about who, why, and how this metadata was applied"
97
166
  }
98
167
  ]
99
168
  }
@@ -23,6 +23,9 @@
23
23
  "doc": "The display name of the property. This is the name that will be shown in the UI and can be used to look up the property id."
24
24
  },
25
25
  {
26
+ "Searchable": {
27
+ "fieldType": "URN"
28
+ },
26
29
  "UrnValidation": {
27
30
  "entityTypes": [
28
31
  "dataType"
@@ -312,6 +312,7 @@ class AssertionsClient:
312
312
  )
313
313
  return self._create_smart_freshness_assertion(
314
314
  dataset_urn=dataset_urn,
315
+ urn=urn,
315
316
  display_name=display_name,
316
317
  detection_mechanism=detection_mechanism,
317
318
  sensitivity=sensitivity,
@@ -396,6 +397,7 @@ class AssertionsClient:
396
397
  )
397
398
  return self._create_smart_volume_assertion(
398
399
  dataset_urn=dataset_urn,
400
+ urn=urn,
399
401
  display_name=display_name,
400
402
  detection_mechanism=detection_mechanism,
401
403
  sensitivity=sensitivity,
@@ -483,6 +485,7 @@ class AssertionsClient:
483
485
  )
484
486
  return self._create_freshness_assertion(
485
487
  dataset_urn=dataset_urn,
488
+ urn=urn,
486
489
  display_name=display_name,
487
490
  detection_mechanism=detection_mechanism,
488
491
  incident_behavior=incident_behavior,
@@ -571,6 +574,7 @@ class AssertionsClient:
571
574
  # Extract criteria from definition to call the new signature
572
575
  parsed_criteria = VolumeAssertionCriteria.parse(criteria)
573
576
  return self._create_volume_assertion(
577
+ urn=urn,
574
578
  dataset_urn=dataset_urn,
575
579
  display_name=display_name,
576
580
  detection_mechanism=detection_mechanism,
@@ -673,6 +677,7 @@ class AssertionsClient:
673
677
  )
674
678
  return self._create_sql_assertion(
675
679
  dataset_urn=dataset_urn,
680
+ urn=urn,
676
681
  display_name=display_name,
677
682
  criteria_condition=criteria.condition,
678
683
  criteria_parameters=criteria.parameters,
@@ -1362,6 +1367,7 @@ class AssertionsClient:
1362
1367
  self,
1363
1368
  *,
1364
1369
  dataset_urn: Union[str, DatasetUrn],
1370
+ urn: Optional[Union[str, AssertionUrn]] = None,
1365
1371
  display_name: Optional[str] = None,
1366
1372
  enabled: bool = True,
1367
1373
  detection_mechanism: DetectionMechanismInputTypes = None,
@@ -1439,7 +1445,7 @@ class AssertionsClient:
1439
1445
  )
1440
1446
  created_by = DEFAULT_CREATED_BY
1441
1447
  assertion_input = _SmartFreshnessAssertionInput(
1442
- urn=None,
1448
+ urn=urn,
1443
1449
  entity_client=self.client.entities,
1444
1450
  dataset_urn=dataset_urn,
1445
1451
  display_name=display_name,
@@ -1474,6 +1480,7 @@ class AssertionsClient:
1474
1480
  *,
1475
1481
  dataset_urn: Union[str, DatasetUrn],
1476
1482
  display_name: Optional[str] = None,
1483
+ urn: Optional[Union[str, AssertionUrn]] = None,
1477
1484
  enabled: bool = True,
1478
1485
  detection_mechanism: DetectionMechanismInputTypes = None,
1479
1486
  sensitivity: Optional[Union[str, InferenceSensitivity]] = None,
@@ -1560,7 +1567,7 @@ class AssertionsClient:
1560
1567
  )
1561
1568
  created_by = DEFAULT_CREATED_BY
1562
1569
  assertion_input = _SmartVolumeAssertionInput(
1563
- urn=None,
1570
+ urn=urn,
1564
1571
  entity_client=self.client.entities,
1565
1572
  dataset_urn=dataset_urn,
1566
1573
  display_name=display_name,
@@ -1595,6 +1602,7 @@ class AssertionsClient:
1595
1602
  self,
1596
1603
  *,
1597
1604
  dataset_urn: Union[str, DatasetUrn],
1605
+ urn: Optional[Union[str, AssertionUrn]] = None,
1598
1606
  display_name: Optional[str] = None,
1599
1607
  enabled: bool = True,
1600
1608
  freshness_schedule_check_type: Optional[
@@ -1667,7 +1675,7 @@ class AssertionsClient:
1667
1675
  )
1668
1676
  created_by = DEFAULT_CREATED_BY
1669
1677
  assertion_input = _FreshnessAssertionInput(
1670
- urn=None,
1678
+ urn=urn,
1671
1679
  entity_client=self.client.entities,
1672
1680
  dataset_urn=dataset_urn,
1673
1681
  display_name=display_name,
@@ -1701,6 +1709,7 @@ class AssertionsClient:
1701
1709
  self,
1702
1710
  *,
1703
1711
  dataset_urn: Union[str, DatasetUrn],
1712
+ urn: Optional[Union[str, AssertionUrn]] = None,
1704
1713
  display_name: Optional[str] = None,
1705
1714
  enabled: bool = True,
1706
1715
  detection_mechanism: DetectionMechanismInputTypes = None,
@@ -1784,7 +1793,7 @@ class AssertionsClient:
1784
1793
  }
1785
1794
 
1786
1795
  assertion_input = _VolumeAssertionInput(
1787
- urn=None,
1796
+ urn=urn,
1788
1797
  entity_client=self.client.entities,
1789
1798
  dataset_urn=dataset_urn,
1790
1799
  display_name=display_name,
@@ -1817,6 +1826,7 @@ class AssertionsClient:
1817
1826
  self,
1818
1827
  *,
1819
1828
  dataset_urn: Union[str, DatasetUrn],
1829
+ urn: Optional[Union[str, AssertionUrn]] = None,
1820
1830
  display_name: Optional[str] = None,
1821
1831
  enabled: bool = True,
1822
1832
  criteria_condition: Union[SqlAssertionCondition, str],
@@ -1884,7 +1894,7 @@ class AssertionsClient:
1884
1894
  parameters=criteria_parameters,
1885
1895
  )
1886
1896
  assertion_input = _SqlAssertionInput(
1887
- urn=None,
1897
+ urn=urn,
1888
1898
  entity_client=self.client.entities,
1889
1899
  dataset_urn=dataset_urn,
1890
1900
  display_name=display_name,
@@ -2555,6 +2565,7 @@ class AssertionsClient:
2555
2565
  self,
2556
2566
  *,
2557
2567
  dataset_urn: Union[str, DatasetUrn],
2568
+ urn: Optional[Union[str, AssertionUrn]] = None,
2558
2569
  column_name: str,
2559
2570
  metric_type: MetricInputType,
2560
2571
  display_name: Optional[str] = None,
@@ -2646,7 +2657,7 @@ class AssertionsClient:
2646
2657
  )
2647
2658
  created_by = DEFAULT_CREATED_BY
2648
2659
  assertion_input = _SmartColumnMetricAssertionInput(
2649
- urn=None,
2660
+ urn=urn,
2650
2661
  entity_client=self.client.entities,
2651
2662
  dataset_urn=dataset_urn,
2652
2663
  column_name=column_name,
@@ -2726,6 +2737,7 @@ class AssertionsClient:
2726
2737
  )
2727
2738
  return self._create_smart_column_metric_assertion(
2728
2739
  dataset_urn=dataset_urn,
2740
+ urn=urn,
2729
2741
  column_name=column_name,
2730
2742
  metric_type=metric_type,
2731
2743
  schedule=schedule,
@@ -3553,6 +3565,7 @@ class AssertionsClient:
3553
3565
  self,
3554
3566
  *,
3555
3567
  dataset_urn: Union[str, DatasetUrn],
3568
+ urn: Optional[Union[str, AssertionUrn]] = None,
3556
3569
  column_name: str,
3557
3570
  metric_type: MetricInputType,
3558
3571
  operator: OperatorInputType,
@@ -3594,7 +3607,7 @@ class AssertionsClient:
3594
3607
  )
3595
3608
  created_by = DEFAULT_CREATED_BY
3596
3609
  assertion_input = _ColumnMetricAssertionInput(
3597
- urn=None,
3610
+ urn=urn,
3598
3611
  entity_client=self.client.entities,
3599
3612
  dataset_urn=dataset_urn,
3600
3613
  column_name=column_name,
@@ -3680,6 +3693,7 @@ class AssertionsClient:
3680
3693
  )
3681
3694
  return self._create_column_metric_assertion(
3682
3695
  dataset_urn=dataset_urn,
3696
+ urn=urn,
3683
3697
  column_name=column_name,
3684
3698
  metric_type=metric_type,
3685
3699
  operator=operator,
@@ -17,6 +17,7 @@ class ResolverClient(OSSResolverClient):
17
17
  *,
18
18
  entity_urn: Optional[str] = None,
19
19
  actor_urn: Optional[str] = None,
20
+ skip_cache: bool = False,
20
21
  ) -> List[SubscriptionUrn]:
21
22
  """Retrieve subscriptions for a given entity or actor, or both if both are given.
22
23
  Args:
@@ -35,5 +36,7 @@ class ResolverClient(OSSResolverClient):
35
36
  filters.append(F.custom_filter("actorUrn", "EQUAL", [actor_urn]))
36
37
 
37
38
  filter = F.and_(*filters)
38
- subscriptions = list(self._client.search.get_urns(filter=filter))
39
+ subscriptions = list(
40
+ self._client.search.get_urns(filter=filter, skip_cache=skip_cache)
41
+ )
39
42
  return [SubscriptionUrn.from_string(urn) for urn in subscriptions]
@@ -9,6 +9,7 @@ from acryl_datahub_cloud.sdk.entities.assertion import Assertion
9
9
  from acryl_datahub_cloud.sdk.entities.subscription import Subscription
10
10
  from datahub.emitter.enum_helpers import get_enum_options
11
11
  from datahub.emitter.mce_builder import make_ts_millis
12
+ from datahub.emitter.rest_emitter import EmitMode
12
13
  from datahub.errors import SdkUsageError
13
14
  from datahub.metadata.urns import AssertionUrn, CorpGroupUrn, CorpUserUrn, DatasetUrn
14
15
  from datahub.sdk._utils import DEFAULT_ACTOR_URN
@@ -116,6 +117,7 @@ class SubscriptionClient:
116
117
  existing_subscriptions = self.client.resolve.subscription( # type: ignore[attr-defined]
117
118
  entity_urn=dataset_urn.urn(),
118
119
  actor_urn=parsed_subscriber_urn.urn(),
120
+ skip_cache=True,
119
121
  )
120
122
  if not existing_subscriptions:
121
123
  # new subscription
@@ -138,7 +140,7 @@ class SubscriptionClient:
138
140
  updatedOn=self._create_audit_stamp(),
139
141
  ),
140
142
  )
141
- self.client.entities.upsert(subscription)
143
+ self.client.entities.upsert(subscription, emit_mode=EmitMode.SYNC_WAIT)
142
144
  logger.info(f"Subscription created: {subscription.urn}")
143
145
  return
144
146
  elif len(existing_subscriptions) == 1:
@@ -157,7 +159,9 @@ class SubscriptionClient:
157
159
  new_assertion_urn=assertion_urn,
158
160
  )
159
161
  existing_subscription_entity.info.updatedOn = self._create_audit_stamp()
160
- self.client.entities.upsert(existing_subscription_entity)
162
+ self.client.entities.upsert(
163
+ existing_subscription_entity, emit_mode=EmitMode.SYNC_WAIT
164
+ )
161
165
  logger.info(f"Subscription updated: {existing_subscription_entity.urn}")
162
166
  return
163
167
  else:
@@ -273,6 +277,7 @@ class SubscriptionClient:
273
277
  existing_subscription_urns = self.client.resolve.subscription( # type: ignore[attr-defined]
274
278
  entity_urn=dataset_urn.urn(),
275
279
  actor_urn=parsed_subscriber_urn.urn(),
280
+ skip_cache=True,
276
281
  )
277
282
 
278
283
  if not existing_subscription_urns:
@@ -326,7 +331,7 @@ class SubscriptionClient:
326
331
  # Update the subscription with remaining change types
327
332
  subscription_entity.info.entityChangeTypes = updated_change_types
328
333
  subscription_entity.info.updatedOn = self._create_audit_stamp()
329
- self.client.entities.upsert(subscription_entity)
334
+ self.client.entities.upsert(subscription_entity, emit_mode=EmitMode.SYNC_WAIT)
330
335
  logger.info(f"Subscription updated: {subscription_entity.urn}")
331
336
 
332
337
  def _get_entity_change_types(