acryl-datahub 1.0.0.2rc2__py3-none-any.whl → 1.0.0.2rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.0.0.2rc2.dist-info → acryl_datahub-1.0.0.2rc3.dist-info}/METADATA +2556 -2556
- {acryl_datahub-1.0.0.2rc2.dist-info → acryl_datahub-1.0.0.2rc3.dist-info}/RECORD +9 -9
- datahub/_version.py +1 -1
- datahub/api/entities/datajob/datajob.py +17 -0
- datahub/ingestion/source/sigma/sigma.py +6 -2
- {acryl_datahub-1.0.0.2rc2.dist-info → acryl_datahub-1.0.0.2rc3.dist-info}/WHEEL +0 -0
- {acryl_datahub-1.0.0.2rc2.dist-info → acryl_datahub-1.0.0.2rc3.dist-info}/entry_points.txt +0 -0
- {acryl_datahub-1.0.0.2rc2.dist-info → acryl_datahub-1.0.0.2rc3.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.0.0.2rc2.dist-info → acryl_datahub-1.0.0.2rc3.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
acryl_datahub-1.0.0.
|
|
1
|
+
acryl_datahub-1.0.0.2rc3.dist-info/licenses/LICENSE,sha256=9xNHpsD0uYF5ONzXsKDCuHHB-xbiCrSbueWXqrTNsxk,11365
|
|
2
2
|
datahub/__init__.py,sha256=aq_i5lVREmoLfYIqcx_pEQicO855YlhD19tWc1eZZNI,59
|
|
3
3
|
datahub/__main__.py,sha256=pegIvQ9hzK7IhqVeUi1MeADSZ2QlP-D3K0OQdEg55RU,106
|
|
4
|
-
datahub/_version.py,sha256=
|
|
4
|
+
datahub/_version.py,sha256=zEgu-5D0-B51vKVharilRcGBBRiiNaqIfj9tNzWK38c,323
|
|
5
5
|
datahub/entrypoints.py,sha256=2TYgHhs3sCxJlojIHjqfxzt3_ImPwPzq4vBtsUuMqu4,8885
|
|
6
6
|
datahub/errors.py,sha256=BzKdcmYseHOt36zfjJXc17WNutFhp9Y23cU_L6cIkxc,612
|
|
7
7
|
datahub/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -42,7 +42,7 @@ datahub/api/entities/datacontract/freshness_assertion.py,sha256=TpKqNeUzeGH9wrYq
|
|
|
42
42
|
datahub/api/entities/datacontract/schema_assertion.py,sha256=24VQ0lm4LxWbpIrho2R6Atc80yqo76Vu7AN2LZiMOzc,2418
|
|
43
43
|
datahub/api/entities/datajob/__init__.py,sha256=suzCs4cLphQ64oDG7meXsJVp6ya9_Glzqamoq_ti5GA,222
|
|
44
44
|
datahub/api/entities/datajob/dataflow.py,sha256=Ajs6XP7-WMK4OwSm0JdFuGwggZhx-l5S-fwor68BBYk,6954
|
|
45
|
-
datahub/api/entities/datajob/datajob.py,sha256=
|
|
45
|
+
datahub/api/entities/datajob/datajob.py,sha256=tAxdxy8djDKD2THKIYEAsNi1Ly_eRpmBjT6fASCnUqg,7944
|
|
46
46
|
datahub/api/entities/dataprocess/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
datahub/api/entities/dataprocess/dataprocess_instance.py,sha256=PIiGXTQLPWl_1rY43N1H-ygDXyzQrSQl0FOLhZ9-YX4,18853
|
|
48
48
|
datahub/api/entities/dataproduct/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -441,7 +441,7 @@ datahub/ingestion/source/schema_inference/parquet.py,sha256=CdqsNuiabLLCulWbuPMs
|
|
|
441
441
|
datahub/ingestion/source/sigma/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
442
442
|
datahub/ingestion/source/sigma/config.py,sha256=yfdKQYvI5hKVl8gNAKIcJe-VW3klvdDqYbUP76gJQDI,3812
|
|
443
443
|
datahub/ingestion/source/sigma/data_classes.py,sha256=YZkkzwftV34mq5c_4jlC2PCSiRKt4hvHjmqikLQhl1I,2012
|
|
444
|
-
datahub/ingestion/source/sigma/sigma.py,sha256=
|
|
444
|
+
datahub/ingestion/source/sigma/sigma.py,sha256=ucODIa5KUGr3WSoo7VgCt8uFaKRbSDlwsdVMAcjPLpQ,24378
|
|
445
445
|
datahub/ingestion/source/sigma/sigma_api.py,sha256=SVvbUs2vjueUdDa-3FzeMsaX5pNpApVI192P7EZzPcI,17870
|
|
446
446
|
datahub/ingestion/source/slack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
447
447
|
datahub/ingestion/source/slack/slack.py,sha256=3N7Yp-u9DvBmo536Z6-pQTrJgSJ3i742GePSgjlBOUU,27616
|
|
@@ -1044,8 +1044,8 @@ datahub_provider/operators/datahub_assertion_operator.py,sha256=uvTQ-jk2F0sbqqxp
|
|
|
1044
1044
|
datahub_provider/operators/datahub_assertion_sensor.py,sha256=lCBj_3x1cf5GMNpHdfkpHuyHfVxsm6ff5x2Z5iizcAo,140
|
|
1045
1045
|
datahub_provider/operators/datahub_operation_operator.py,sha256=aevDp2FzX7FxGlXrR0khoHNbxbhKR2qPEX5e8O2Jyzw,174
|
|
1046
1046
|
datahub_provider/operators/datahub_operation_sensor.py,sha256=8fcdVBCEPgqy1etTXgLoiHoJrRt_nzFZQMdSzHqSG7M,168
|
|
1047
|
-
acryl_datahub-1.0.0.
|
|
1048
|
-
acryl_datahub-1.0.0.
|
|
1049
|
-
acryl_datahub-1.0.0.
|
|
1050
|
-
acryl_datahub-1.0.0.
|
|
1051
|
-
acryl_datahub-1.0.0.
|
|
1047
|
+
acryl_datahub-1.0.0.2rc3.dist-info/METADATA,sha256=QM27NG1azmKgGpnS7CNZpX7fANAZeoQQe8etXepFIaQ,176849
|
|
1048
|
+
acryl_datahub-1.0.0.2rc3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
1049
|
+
acryl_datahub-1.0.0.2rc3.dist-info/entry_points.txt,sha256=o3mDeJXSKhsy7XLkuogihraiabBdLn9HaizYXPrxmk0,9710
|
|
1050
|
+
acryl_datahub-1.0.0.2rc3.dist-info/top_level.txt,sha256=iLjSrLK5ox1YVYcglRUkcvfZPvKlobBWx7CTUXx8_GI,25
|
|
1051
|
+
acryl_datahub-1.0.0.2rc3.dist-info/RECORD,,
|
datahub/_version.py
CHANGED
|
@@ -10,6 +10,7 @@ from datahub.metadata.schema_classes import (
|
|
|
10
10
|
AzkabanJobTypeClass,
|
|
11
11
|
DataJobInfoClass,
|
|
12
12
|
DataJobInputOutputClass,
|
|
13
|
+
DataPlatformInstanceClass,
|
|
13
14
|
FineGrainedLineageClass,
|
|
14
15
|
GlobalTagsClass,
|
|
15
16
|
OwnerClass,
|
|
@@ -45,6 +46,7 @@ class DataJob:
|
|
|
45
46
|
outlets (List[str]): List of urns the DataProcessInstance produces
|
|
46
47
|
fine_grained_lineages: Column lineage for the inlets and outlets
|
|
47
48
|
upstream_urns: List[DataJobUrn] = field(default_factory=list)
|
|
49
|
+
platform_instance (Optional[str]): The instance of the platform that all assets produced by this orchestrator belong to.
|
|
48
50
|
"""
|
|
49
51
|
|
|
50
52
|
id: str
|
|
@@ -61,6 +63,7 @@ class DataJob:
|
|
|
61
63
|
outlets: List[DatasetUrn] = field(default_factory=list)
|
|
62
64
|
fine_grained_lineages: List[FineGrainedLineageClass] = field(default_factory=list)
|
|
63
65
|
upstream_urns: List[DataJobUrn] = field(default_factory=list)
|
|
66
|
+
platform_instance: Optional[str] = None
|
|
64
67
|
|
|
65
68
|
def __post_init__(self):
|
|
66
69
|
job_flow_urn = DataFlowUrn.create_from_ids(
|
|
@@ -127,6 +130,20 @@ class DataJob:
|
|
|
127
130
|
)
|
|
128
131
|
yield mcp
|
|
129
132
|
|
|
133
|
+
if self.platform_instance:
|
|
134
|
+
instance = builder.make_dataplatform_instance_urn(
|
|
135
|
+
platform=self.flow_urn.orchestrator,
|
|
136
|
+
instance=self.platform_instance,
|
|
137
|
+
)
|
|
138
|
+
mcp = MetadataChangeProposalWrapper(
|
|
139
|
+
entityUrn=str(self.urn),
|
|
140
|
+
aspect=DataPlatformInstanceClass(
|
|
141
|
+
platform=builder.make_data_platform_urn(self.flow_urn.orchestrator),
|
|
142
|
+
instance=instance,
|
|
143
|
+
),
|
|
144
|
+
)
|
|
145
|
+
yield mcp
|
|
146
|
+
|
|
130
147
|
mcp = MetadataChangeProposalWrapper(
|
|
131
148
|
entityUrn=str(self.urn),
|
|
132
149
|
aspect=StatusClass(
|
|
@@ -170,7 +170,9 @@ class SigmaSource(StatefulIngestionSourceBase, TestableSource):
|
|
|
170
170
|
if self.config.workspace_pattern.allowed(workspace.name):
|
|
171
171
|
allowed_workspaces.append(workspace)
|
|
172
172
|
else:
|
|
173
|
-
self.reporter.workspaces.dropped(
|
|
173
|
+
self.reporter.workspaces.dropped(
|
|
174
|
+
f"{workspace.name} ({workspace.workspaceId})"
|
|
175
|
+
)
|
|
174
176
|
logger.info(f"Number of allowed workspaces = {len(allowed_workspaces)}")
|
|
175
177
|
|
|
176
178
|
return allowed_workspaces
|
|
@@ -661,7 +663,9 @@ class SigmaSource(StatefulIngestionSourceBase, TestableSource):
|
|
|
661
663
|
yield from self._gen_workbook_workunit(workbook)
|
|
662
664
|
|
|
663
665
|
for workspace in self._get_allowed_workspaces():
|
|
664
|
-
self.reporter.workspaces.processed(
|
|
666
|
+
self.reporter.workspaces.processed(
|
|
667
|
+
f"{workspace.name} ({workspace.workspaceId})"
|
|
668
|
+
)
|
|
665
669
|
yield from self._gen_workspace_workunit(workspace)
|
|
666
670
|
yield from self._gen_sigma_dataset_upstream_lineage_workunit()
|
|
667
671
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|