acryl-datahub 1.0.0.1rc7__py3-none-any.whl → 1.0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (76) hide show
  1. {acryl_datahub-1.0.0.1rc7.dist-info → acryl_datahub-1.0.0.2.dist-info}/METADATA +2561 -2561
  2. {acryl_datahub-1.0.0.1rc7.dist-info → acryl_datahub-1.0.0.2.dist-info}/RECORD +75 -73
  3. datahub/_version.py +1 -1
  4. datahub/api/entities/datajob/dataflow.py +15 -0
  5. datahub/api/entities/datajob/datajob.py +17 -0
  6. datahub/api/entities/dataprocess/dataprocess_instance.py +4 -0
  7. datahub/api/entities/dataset/dataset.py +2 -2
  8. datahub/api/entities/structuredproperties/structuredproperties.py +1 -1
  9. datahub/cli/ingest_cli.py +4 -4
  10. datahub/cli/migrate.py +6 -6
  11. datahub/configuration/common.py +1 -1
  12. datahub/emitter/mcp_builder.py +4 -0
  13. datahub/ingestion/api/common.py +9 -0
  14. datahub/ingestion/api/source.py +4 -1
  15. datahub/ingestion/api/source_helpers.py +26 -1
  16. datahub/ingestion/graph/client.py +104 -0
  17. datahub/ingestion/run/pipeline.py +0 -6
  18. datahub/ingestion/source/aws/sagemaker_processors/models.py +4 -4
  19. datahub/ingestion/source/bigquery_v2/lineage.py +1 -1
  20. datahub/ingestion/source/dynamodb/dynamodb.py +1 -1
  21. datahub/ingestion/source/fivetran/fivetran.py +1 -0
  22. datahub/ingestion/source/fivetran/fivetran_log_api.py +1 -1
  23. datahub/ingestion/source/hex/constants.py +5 -0
  24. datahub/ingestion/source/hex/hex.py +150 -22
  25. datahub/ingestion/source/hex/mapper.py +28 -2
  26. datahub/ingestion/source/hex/model.py +10 -2
  27. datahub/ingestion/source/hex/query_fetcher.py +300 -0
  28. datahub/ingestion/source/iceberg/iceberg.py +106 -18
  29. datahub/ingestion/source/kafka/kafka.py +1 -4
  30. datahub/ingestion/source/kafka_connect/sink_connectors.py +1 -1
  31. datahub/ingestion/source/kafka_connect/source_connectors.py +1 -1
  32. datahub/ingestion/source/looker/looker_source.py +2 -3
  33. datahub/ingestion/source/mlflow.py +6 -7
  34. datahub/ingestion/source/mode.py +2 -2
  35. datahub/ingestion/source/nifi.py +3 -3
  36. datahub/ingestion/source/openapi.py +3 -3
  37. datahub/ingestion/source/openapi_parser.py +8 -8
  38. datahub/ingestion/source/powerbi/config.py +1 -1
  39. datahub/ingestion/source/powerbi/powerbi.py +16 -3
  40. datahub/ingestion/source/redshift/profile.py +2 -2
  41. datahub/ingestion/source/sigma/sigma.py +6 -2
  42. datahub/ingestion/source/snowflake/snowflake_utils.py +1 -1
  43. datahub/ingestion/source/sql/stored_procedures/base.py +12 -1
  44. datahub/ingestion/source/sql/trino.py +4 -3
  45. datahub/ingestion/source/state/stale_entity_removal_handler.py +0 -1
  46. datahub/ingestion/source/superset.py +108 -81
  47. datahub/ingestion/source/tableau/tableau.py +4 -4
  48. datahub/ingestion/source/tableau/tableau_common.py +2 -2
  49. datahub/ingestion/source/unity/source.py +1 -1
  50. datahub/ingestion/source/vertexai/vertexai.py +7 -7
  51. datahub/ingestion/transformer/add_dataset_dataproduct.py +1 -1
  52. datahub/ingestion/transformer/add_dataset_ownership.py +1 -1
  53. datahub/ingestion/transformer/dataset_domain.py +1 -1
  54. datahub/lite/lite_util.py +2 -2
  55. datahub/metadata/_schema_classes.py +47 -2
  56. datahub/metadata/_urns/urn_defs.py +56 -0
  57. datahub/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py +2 -0
  58. datahub/metadata/schema.avsc +121 -85
  59. datahub/metadata/schemas/DataHubOpenAPISchemaKey.avsc +22 -0
  60. datahub/metadata/schemas/DataTransformLogic.avsc +4 -2
  61. datahub/metadata/schemas/FormInfo.avsc +5 -0
  62. datahub/metadata/schemas/MLModelDeploymentProperties.avsc +3 -0
  63. datahub/metadata/schemas/MetadataChangeEvent.avsc +6 -0
  64. datahub/metadata/schemas/MetadataChangeLog.avsc +3 -0
  65. datahub/metadata/schemas/MetadataChangeProposal.avsc +3 -0
  66. datahub/metadata/schemas/QueryProperties.avsc +4 -2
  67. datahub/metadata/schemas/SystemMetadata.avsc +86 -0
  68. datahub/testing/mcp_diff.py +1 -1
  69. datahub/utilities/file_backed_collections.py +6 -6
  70. datahub/utilities/hive_schema_to_avro.py +2 -2
  71. datahub/utilities/ingest_utils.py +2 -2
  72. datahub/ingestion/transformer/system_metadata_transformer.py +0 -45
  73. {acryl_datahub-1.0.0.1rc7.dist-info → acryl_datahub-1.0.0.2.dist-info}/WHEEL +0 -0
  74. {acryl_datahub-1.0.0.1rc7.dist-info → acryl_datahub-1.0.0.2.dist-info}/entry_points.txt +0 -0
  75. {acryl_datahub-1.0.0.1rc7.dist-info → acryl_datahub-1.0.0.2.dist-info}/licenses/LICENSE +0 -0
  76. {acryl_datahub-1.0.0.1rc7.dist-info → acryl_datahub-1.0.0.2.dist-info}/top_level.txt +0 -0
@@ -774,7 +774,7 @@ def get_overridden_info(
774
774
  if (
775
775
  lineage_overrides is not None
776
776
  and lineage_overrides.platform_override_map is not None
777
- and original_platform in lineage_overrides.platform_override_map.keys()
777
+ and original_platform in lineage_overrides.platform_override_map
778
778
  ):
779
779
  platform = lineage_overrides.platform_override_map[original_platform]
780
780
 
@@ -782,7 +782,7 @@ def get_overridden_info(
782
782
  lineage_overrides is not None
783
783
  and lineage_overrides.database_override_map is not None
784
784
  and upstream_db is not None
785
- and upstream_db in lineage_overrides.database_override_map.keys()
785
+ and upstream_db in lineage_overrides.database_override_map
786
786
  ):
787
787
  upstream_db = lineage_overrides.database_override_map[upstream_db]
788
788
 
@@ -1003,7 +1003,7 @@ class UnityCatalogSource(StatefulIngestionSourceBase, TestableSource):
1003
1003
  generate_usage_statistics=False,
1004
1004
  generate_operations=False,
1005
1005
  )
1006
- for dataset_name in self.view_definitions.keys():
1006
+ for dataset_name in self.view_definitions:
1007
1007
  view_ref, view_definition = self.view_definitions[dataset_name]
1008
1008
  result = self._run_sql_parser(
1009
1009
  view_ref,
@@ -22,7 +22,11 @@ from google.oauth2 import service_account
22
22
 
23
23
  import datahub.emitter.mce_builder as builder
24
24
  from datahub.emitter.mcp import MetadataChangeProposalWrapper
25
- from datahub.emitter.mcp_builder import ContainerKey, ProjectIdKey, gen_containers
25
+ from datahub.emitter.mcp_builder import (
26
+ ExperimentKey,
27
+ ProjectIdKey,
28
+ gen_containers,
29
+ )
26
30
  from datahub.ingestion.api.common import PipelineContext
27
31
  from datahub.ingestion.api.decorators import (
28
32
  SupportStatus,
@@ -96,10 +100,6 @@ class ModelMetadata:
96
100
  endpoints: Optional[List[Endpoint]] = None
97
101
 
98
102
 
99
- class ContainerKeyWithId(ContainerKey):
100
- id: str
101
-
102
-
103
103
  @platform_name("Vertex AI", id="vertexai")
104
104
  @config_class(VertexAIConfig)
105
105
  @support_status(SupportStatus.TESTING)
@@ -173,7 +173,7 @@ class VertexAISource(Source):
173
173
  ) -> Iterable[MetadataWorkUnit]:
174
174
  yield from gen_containers(
175
175
  parent_container_key=self._get_project_container(),
176
- container_key=ContainerKeyWithId(
176
+ container_key=ExperimentKey(
177
177
  platform=self.platform,
178
178
  id=self._make_vertexai_experiment_name(experiment.name),
179
179
  ),
@@ -309,7 +309,7 @@ class VertexAISource(Source):
309
309
  def _gen_experiment_run_mcps(
310
310
  self, experiment: Experiment, run: ExperimentRun
311
311
  ) -> Iterable[MetadataChangeProposalWrapper]:
312
- experiment_key = ContainerKeyWithId(
312
+ experiment_key = ExperimentKey(
313
313
  platform=self.platform,
314
314
  id=self._make_vertexai_experiment_name(experiment.name),
315
315
  )
@@ -54,7 +54,7 @@ class AddDatasetDataProduct(DatasetDataproductTransformer):
54
54
  data_products_container: Dict[str, DataProductPatchBuilder] = {}
55
55
  logger.debug("Generating dataproducts")
56
56
  is_container = self.config.is_container
57
- for entity_urn in self.entity_map.keys():
57
+ for entity_urn in self.entity_map:
58
58
  data_product_urn = self.config.get_data_product_to_add(entity_urn)
59
59
  if data_product_urn:
60
60
  if data_product_urn not in data_products:
@@ -86,7 +86,7 @@ class AddDatasetOwnership(OwnershipTransformer):
86
86
  logger.debug("Generating Ownership for containers")
87
87
  ownership_container_mapping: Dict[str, List[OwnerClass]] = {}
88
88
  for entity_urn, data_ownerships in (
89
- (urn, self.config.get_owners_to_add(urn)) for urn in self.entity_map.keys()
89
+ (urn, self.config.get_owners_to_add(urn)) for urn in self.entity_map
90
90
  ):
91
91
  if not data_ownerships:
92
92
  continue
@@ -125,7 +125,7 @@ class AddDatasetDomain(DatasetDomainTransformer):
125
125
  return domain_mcps
126
126
 
127
127
  for entity_urn, domain_to_add in (
128
- (urn, self.config.get_domains_to_add(urn)) for urn in self.entity_map.keys()
128
+ (urn, self.config.get_domains_to_add(urn)) for urn in self.entity_map
129
129
  ):
130
130
  if not domain_to_add or not domain_to_add.domains:
131
131
  continue
datahub/lite/lite_util.py CHANGED
@@ -99,7 +99,7 @@ def get_datahub_lite(config_dict: dict, read_only: bool = False) -> "DataHubLite
99
99
  lite_class = lite_registry.get(lite_type)
100
100
  except KeyError as e:
101
101
  raise Exception(
102
- f"Failed to find a registered lite implementation for {lite_type}. Valid values are {[k for k in lite_registry.mapping.keys()]}"
102
+ f"Failed to find a registered lite implementation for {lite_type}. Valid values are {[k for k in lite_registry.mapping]}"
103
103
  ) from e
104
104
 
105
105
  lite_specific_config = lite_class.get_config_class().parse_obj(
@@ -127,7 +127,7 @@ def get_datahub_lite(config_dict: dict, read_only: bool = False) -> "DataHubLite
127
127
  return lite
128
128
  else:
129
129
  raise Exception(
130
- f"Failed to find a registered forwarding sink for type {lite_local_config.forward_to.type}. Valid values are {[k for k in sink_registry.mapping.keys()]}"
130
+ f"Failed to find a registered forwarding sink for type {lite_local_config.forward_to.type}. Valid values are {[k for k in sink_registry.mapping]}"
131
131
  )
132
132
  else:
133
133
  return lite
@@ -15442,6 +15442,35 @@ class DataHubIngestionSourceKeyClass(_Aspect):
15442
15442
  self._inner_dict['id'] = value
15443
15443
 
15444
15444
 
15445
+ class DataHubOpenAPISchemaKeyClass(_Aspect):
15446
+ """Key for a Query"""
15447
+
15448
+
15449
+ ASPECT_NAME = 'dataHubOpenAPISchemaKey'
15450
+ ASPECT_INFO = {'keyForEntity': 'dataHubOpenAPISchema', 'entityCategory': 'internal', 'entityAspects': ['systemMetadata'], 'entityDoc': 'Contains aspects which are used in OpenAPI requests/responses which are not otherwise present in the data model.'}
15451
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubOpenAPISchemaKey")
15452
+
15453
+ def __init__(self,
15454
+ id: str,
15455
+ ):
15456
+ super().__init__()
15457
+
15458
+ self.id = id
15459
+
15460
+ def _restore_defaults(self) -> None:
15461
+ self.id = str()
15462
+
15463
+
15464
+ @property
15465
+ def id(self) -> str:
15466
+ """A unique id for the DataHub OpenAPI schema."""
15467
+ return self._inner_dict.get('id') # type: ignore
15468
+
15469
+ @id.setter
15470
+ def id(self, value: str) -> None:
15471
+ self._inner_dict['id'] = value
15472
+
15473
+
15445
15474
  class DataHubPersonaKeyClass(_Aspect):
15446
15475
  """Key for a persona type"""
15447
15476
 
@@ -20128,10 +20157,14 @@ class PlatformEventHeaderClass(DictWrapper):
20128
20157
  self._inner_dict['timestampMillis'] = value
20129
20158
 
20130
20159
 
20131
- class SystemMetadataClass(DictWrapper):
20160
+ class SystemMetadataClass(_Aspect):
20132
20161
  """Metadata associated with each metadata change that is processed by the system"""
20133
-
20162
+
20163
+
20164
+ ASPECT_NAME = 'systemMetadata'
20165
+ ASPECT_INFO = {}
20134
20166
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.mxe.SystemMetadata")
20167
+
20135
20168
  def __init__(self,
20136
20169
  lastObserved: Optional[Union[int, None]]=None,
20137
20170
  runId: Optional[Union[str, None]]=None,
@@ -21738,6 +21771,9 @@ class QueryLanguageClass(object):
21738
21771
  SQL = "SQL"
21739
21772
  """A SQL Query"""
21740
21773
 
21774
+ UNKNOWN = "UNKNOWN"
21775
+ """Unknown query language"""
21776
+
21741
21777
 
21742
21778
 
21743
21779
  class QueryPropertiesClass(_Aspect):
@@ -26135,6 +26171,7 @@ __SCHEMA_TYPES = {
26135
26171
  'com.linkedin.pegasus2avro.metadata.key.DataHubActionKey': DataHubActionKeyClass,
26136
26172
  'com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey': DataHubConnectionKeyClass,
26137
26173
  'com.linkedin.pegasus2avro.metadata.key.DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
26174
+ 'com.linkedin.pegasus2avro.metadata.key.DataHubOpenAPISchemaKey': DataHubOpenAPISchemaKeyClass,
26138
26175
  'com.linkedin.pegasus2avro.metadata.key.DataHubPersonaKey': DataHubPersonaKeyClass,
26139
26176
  'com.linkedin.pegasus2avro.metadata.key.DataHubPolicyKey': DataHubPolicyKeyClass,
26140
26177
  'com.linkedin.pegasus2avro.metadata.key.DataHubRetentionKey': DataHubRetentionKeyClass,
@@ -26620,6 +26657,7 @@ __SCHEMA_TYPES = {
26620
26657
  'DataHubActionKey': DataHubActionKeyClass,
26621
26658
  'DataHubConnectionKey': DataHubConnectionKeyClass,
26622
26659
  'DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
26660
+ 'DataHubOpenAPISchemaKey': DataHubOpenAPISchemaKeyClass,
26623
26661
  'DataHubPersonaKey': DataHubPersonaKeyClass,
26624
26662
  'DataHubPolicyKey': DataHubPolicyKeyClass,
26625
26663
  'DataHubRetentionKey': DataHubRetentionKeyClass,
@@ -26879,6 +26917,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
26879
26917
  ContainerClass,
26880
26918
  ContainerPropertiesClass,
26881
26919
  EditableContainerPropertiesClass,
26920
+ SystemMetadataClass,
26882
26921
  DataHubSecretValueClass,
26883
26922
  DataHubUpgradeRequestClass,
26884
26923
  DataHubUpgradeResultClass,
@@ -26935,6 +26974,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
26935
26974
  MLModelKeyClass,
26936
26975
  NotebookKeyClass,
26937
26976
  RoleKeyClass,
26977
+ DataHubOpenAPISchemaKeyClass,
26938
26978
  GlobalSettingsKeyClass,
26939
26979
  DatasetKeyClass,
26940
26980
  ChartKeyClass,
@@ -27102,6 +27142,7 @@ class AspectBag(TypedDict, total=False):
27102
27142
  container: ContainerClass
27103
27143
  containerProperties: ContainerPropertiesClass
27104
27144
  editableContainerProperties: EditableContainerPropertiesClass
27145
+ systemMetadata: SystemMetadataClass
27105
27146
  dataHubSecretValue: DataHubSecretValueClass
27106
27147
  dataHubUpgradeRequest: DataHubUpgradeRequestClass
27107
27148
  dataHubUpgradeResult: DataHubUpgradeResultClass
@@ -27158,6 +27199,7 @@ class AspectBag(TypedDict, total=False):
27158
27199
  mlModelKey: MLModelKeyClass
27159
27200
  notebookKey: NotebookKeyClass
27160
27201
  roleKey: RoleKeyClass
27202
+ dataHubOpenAPISchemaKey: DataHubOpenAPISchemaKeyClass
27161
27203
  globalSettingsKey: GlobalSettingsKeyClass
27162
27204
  datasetKey: DatasetKeyClass
27163
27205
  chartKey: ChartKeyClass
@@ -27292,6 +27334,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
27292
27334
  'mlModel': MLModelKeyClass,
27293
27335
  'notebook': NotebookKeyClass,
27294
27336
  'role': RoleKeyClass,
27337
+ 'dataHubOpenAPISchema': DataHubOpenAPISchemaKeyClass,
27295
27338
  'globalSettings': GlobalSettingsKeyClass,
27296
27339
  'dataset': DatasetKeyClass,
27297
27340
  'chart': ChartKeyClass,
@@ -27352,6 +27395,7 @@ ENTITY_TYPE_NAMES: List[str] = [
27352
27395
  'mlModel',
27353
27396
  'notebook',
27354
27397
  'role',
27398
+ 'dataHubOpenAPISchema',
27355
27399
  'globalSettings',
27356
27400
  'dataset',
27357
27401
  'chart',
@@ -27411,6 +27455,7 @@ EntityTypeName = Literal[
27411
27455
  'mlModel',
27412
27456
  'notebook',
27413
27457
  'role',
27458
+ 'dataHubOpenAPISchema',
27414
27459
  'globalSettings',
27415
27460
  'dataset',
27416
27461
  'chart',
@@ -594,6 +594,62 @@ class RoleUrn(_SpecificUrn):
594
594
  def id(self) -> str:
595
595
  return self._entity_ids[0]
596
596
 
597
+ if TYPE_CHECKING:
598
+ from datahub.metadata.schema_classes import DataHubOpenAPISchemaKeyClass
599
+
600
+ class DataHubOpenAPISchemaUrn(_SpecificUrn):
601
+ ENTITY_TYPE: ClassVar[Literal["dataHubOpenAPISchema"]] = "dataHubOpenAPISchema"
602
+ _URN_PARTS: ClassVar[int] = 1
603
+
604
+ def __init__(self, id: Union["DataHubOpenAPISchemaUrn", str], *, _allow_coercion: bool = True) -> None:
605
+ if _allow_coercion:
606
+ # Field coercion logic (if any is required).
607
+ if isinstance(id, str):
608
+ if id.startswith('urn:li:'):
609
+ try:
610
+ id = DataHubOpenAPISchemaUrn.from_string(id)
611
+ except InvalidUrnError:
612
+ raise InvalidUrnError(f'Expecting a DataHubOpenAPISchemaUrn but got {id}')
613
+ else:
614
+ id = UrnEncoder.encode_string(id)
615
+
616
+ # Validation logic.
617
+ if not id:
618
+ raise InvalidUrnError("DataHubOpenAPISchemaUrn id cannot be empty")
619
+ if isinstance(id, DataHubOpenAPISchemaUrn):
620
+ id = id.id
621
+ elif isinstance(id, Urn):
622
+ raise InvalidUrnError(f'Expecting a DataHubOpenAPISchemaUrn but got {id}')
623
+ if UrnEncoder.contains_reserved_char(id):
624
+ raise InvalidUrnError(f'DataHubOpenAPISchemaUrn id contains reserved characters')
625
+
626
+ super().__init__(self.ENTITY_TYPE, [id])
627
+
628
+ @classmethod
629
+ def _parse_ids(cls, entity_ids: List[str]) -> "DataHubOpenAPISchemaUrn":
630
+ if len(entity_ids) != cls._URN_PARTS:
631
+ raise InvalidUrnError(f"DataHubOpenAPISchemaUrn should have {cls._URN_PARTS} parts, got {len(entity_ids)}: {entity_ids}")
632
+ return cls(id=entity_ids[0], _allow_coercion=False)
633
+
634
+ @classmethod
635
+ def underlying_key_aspect_type(cls) -> Type["DataHubOpenAPISchemaKeyClass"]:
636
+ from datahub.metadata.schema_classes import DataHubOpenAPISchemaKeyClass
637
+
638
+ return DataHubOpenAPISchemaKeyClass
639
+
640
+ def to_key_aspect(self) -> "DataHubOpenAPISchemaKeyClass":
641
+ from datahub.metadata.schema_classes import DataHubOpenAPISchemaKeyClass
642
+
643
+ return DataHubOpenAPISchemaKeyClass(id=self.id)
644
+
645
+ @classmethod
646
+ def from_key_aspect(cls, key_aspect: "DataHubOpenAPISchemaKeyClass") -> "DataHubOpenAPISchemaUrn":
647
+ return cls(id=key_aspect.id)
648
+
649
+ @property
650
+ def id(self) -> str:
651
+ return self._entity_ids[0]
652
+
597
653
  if TYPE_CHECKING:
598
654
  from datahub.metadata.schema_classes import GlobalSettingsKeyClass
599
655
 
@@ -19,6 +19,7 @@ from ......schema_classes import DataHubAccessTokenKeyClass
19
19
  from ......schema_classes import DataHubActionKeyClass
20
20
  from ......schema_classes import DataHubConnectionKeyClass
21
21
  from ......schema_classes import DataHubIngestionSourceKeyClass
22
+ from ......schema_classes import DataHubOpenAPISchemaKeyClass
22
23
  from ......schema_classes import DataHubPersonaKeyClass
23
24
  from ......schema_classes import DataHubPolicyKeyClass
24
25
  from ......schema_classes import DataHubRetentionKeyClass
@@ -72,6 +73,7 @@ DataHubAccessTokenKey = DataHubAccessTokenKeyClass
72
73
  DataHubActionKey = DataHubActionKeyClass
73
74
  DataHubConnectionKey = DataHubConnectionKeyClass
74
75
  DataHubIngestionSourceKey = DataHubIngestionSourceKeyClass
76
+ DataHubOpenAPISchemaKey = DataHubOpenAPISchemaKeyClass
75
77
  DataHubPersonaKey = DataHubPersonaKeyClass
76
78
  DataHubPolicyKey = DataHubPolicyKeyClass
77
79
  DataHubRetentionKey = DataHubRetentionKeyClass
@@ -2343,12 +2343,14 @@
2343
2343
  "type": {
2344
2344
  "type": "enum",
2345
2345
  "symbolDocs": {
2346
- "SQL": "A SQL Query"
2346
+ "SQL": "A SQL Query",
2347
+ "UNKNOWN": "Unknown query language"
2347
2348
  },
2348
2349
  "name": "QueryLanguage",
2349
2350
  "namespace": "com.linkedin.pegasus2avro.query",
2350
2351
  "symbols": [
2351
- "SQL"
2352
+ "SQL",
2353
+ "UNKNOWN"
2352
2354
  ]
2353
2355
  },
2354
2356
  "name": "language",
@@ -2887,6 +2889,11 @@
2887
2889
  "namespace": "com.linkedin.pegasus2avro.form",
2888
2890
  "fields": [
2889
2891
  {
2892
+ "Searchable": {
2893
+ "fieldName": "promptId",
2894
+ "fieldType": "KEYWORD",
2895
+ "queryByDefault": false
2896
+ },
2890
2897
  "type": "string",
2891
2898
  "name": "id",
2892
2899
  "doc": "The unique id for this prompt. This must be GLOBALLY unique."
@@ -4130,6 +4137,92 @@
4130
4137
  ],
4131
4138
  "doc": "Editable information about an Asset Container as defined on the DataHub Platform"
4132
4139
  },
4140
+ {
4141
+ "type": "record",
4142
+ "Aspect": {
4143
+ "name": "systemMetadata"
4144
+ },
4145
+ "name": "SystemMetadata",
4146
+ "namespace": "com.linkedin.pegasus2avro.mxe",
4147
+ "fields": [
4148
+ {
4149
+ "type": [
4150
+ "long",
4151
+ "null"
4152
+ ],
4153
+ "name": "lastObserved",
4154
+ "default": 0,
4155
+ "doc": "The timestamp the metadata was observed at"
4156
+ },
4157
+ {
4158
+ "type": [
4159
+ "string",
4160
+ "null"
4161
+ ],
4162
+ "name": "runId",
4163
+ "default": "no-run-id-provided",
4164
+ "doc": "The original run id that produced the metadata. Populated in case of batch-ingestion."
4165
+ },
4166
+ {
4167
+ "type": [
4168
+ "string",
4169
+ "null"
4170
+ ],
4171
+ "name": "lastRunId",
4172
+ "default": "no-run-id-provided",
4173
+ "doc": "The last run id that produced the metadata. Populated in case of batch-ingestion."
4174
+ },
4175
+ {
4176
+ "type": [
4177
+ "null",
4178
+ "string"
4179
+ ],
4180
+ "name": "pipelineName",
4181
+ "default": null,
4182
+ "doc": "The ingestion pipeline id that produced the metadata. Populated in case of batch ingestion."
4183
+ },
4184
+ {
4185
+ "type": [
4186
+ "null",
4187
+ "string"
4188
+ ],
4189
+ "name": "registryName",
4190
+ "default": null,
4191
+ "doc": "The model registry name that was used to process this event"
4192
+ },
4193
+ {
4194
+ "type": [
4195
+ "null",
4196
+ "string"
4197
+ ],
4198
+ "name": "registryVersion",
4199
+ "default": null,
4200
+ "doc": "The model registry version that was used to process this event"
4201
+ },
4202
+ {
4203
+ "type": [
4204
+ "null",
4205
+ {
4206
+ "type": "map",
4207
+ "values": "string"
4208
+ }
4209
+ ],
4210
+ "name": "properties",
4211
+ "default": null,
4212
+ "doc": "Additional properties"
4213
+ },
4214
+ {
4215
+ "type": [
4216
+ "null",
4217
+ "string"
4218
+ ],
4219
+ "name": "version",
4220
+ "default": null,
4221
+ "doc": "Aspect version\n Initial implementation will use the aspect version's number, however stored as\n a string in the case where a different aspect versioning scheme is later adopted."
4222
+ }
4223
+ ],
4224
+ "doc": "Metadata associated with each metadata change that is processed by the system"
4225
+ },
4133
4226
  {
4134
4227
  "type": "record",
4135
4228
  "name": "PlatformEvent",
@@ -4369,89 +4462,7 @@
4369
4462
  {
4370
4463
  "type": [
4371
4464
  "null",
4372
- {
4373
- "type": "record",
4374
- "name": "SystemMetadata",
4375
- "namespace": "com.linkedin.pegasus2avro.mxe",
4376
- "fields": [
4377
- {
4378
- "type": [
4379
- "long",
4380
- "null"
4381
- ],
4382
- "name": "lastObserved",
4383
- "default": 0,
4384
- "doc": "The timestamp the metadata was observed at"
4385
- },
4386
- {
4387
- "type": [
4388
- "string",
4389
- "null"
4390
- ],
4391
- "name": "runId",
4392
- "default": "no-run-id-provided",
4393
- "doc": "The original run id that produced the metadata. Populated in case of batch-ingestion."
4394
- },
4395
- {
4396
- "type": [
4397
- "string",
4398
- "null"
4399
- ],
4400
- "name": "lastRunId",
4401
- "default": "no-run-id-provided",
4402
- "doc": "The last run id that produced the metadata. Populated in case of batch-ingestion."
4403
- },
4404
- {
4405
- "type": [
4406
- "null",
4407
- "string"
4408
- ],
4409
- "name": "pipelineName",
4410
- "default": null,
4411
- "doc": "The ingestion pipeline id that produced the metadata. Populated in case of batch ingestion."
4412
- },
4413
- {
4414
- "type": [
4415
- "null",
4416
- "string"
4417
- ],
4418
- "name": "registryName",
4419
- "default": null,
4420
- "doc": "The model registry name that was used to process this event"
4421
- },
4422
- {
4423
- "type": [
4424
- "null",
4425
- "string"
4426
- ],
4427
- "name": "registryVersion",
4428
- "default": null,
4429
- "doc": "The model registry version that was used to process this event"
4430
- },
4431
- {
4432
- "type": [
4433
- "null",
4434
- {
4435
- "type": "map",
4436
- "values": "string"
4437
- }
4438
- ],
4439
- "name": "properties",
4440
- "default": null,
4441
- "doc": "Additional properties"
4442
- },
4443
- {
4444
- "type": [
4445
- "null",
4446
- "string"
4447
- ],
4448
- "name": "version",
4449
- "default": null,
4450
- "doc": "Aspect version\n Initial implementation will use the aspect version's number, however stored as\n a string in the case where a different aspect versioning scheme is later adopted."
4451
- }
4452
- ],
4453
- "doc": "Metadata associated with each metadata change that is processed by the system"
4454
- }
4465
+ "com.linkedin.pegasus2avro.mxe.SystemMetadata"
4455
4466
  ],
4456
4467
  "name": "systemMetadata",
4457
4468
  "default": null,
@@ -10156,6 +10167,9 @@
10156
10167
  "doc": "Version of the MLModelDeployment"
10157
10168
  },
10158
10169
  {
10170
+ "Searchable": {
10171
+ "fieldName": "deploymentStatus"
10172
+ },
10159
10173
  "type": [
10160
10174
  "null",
10161
10175
  {
@@ -15705,6 +15719,28 @@
15705
15719
  ],
15706
15720
  "doc": "Key for a External AccessManagement"
15707
15721
  },
15722
+ {
15723
+ "type": "record",
15724
+ "Aspect": {
15725
+ "name": "dataHubOpenAPISchemaKey",
15726
+ "keyForEntity": "dataHubOpenAPISchema",
15727
+ "entityCategory": "internal",
15728
+ "entityAspects": [
15729
+ "systemMetadata"
15730
+ ],
15731
+ "entityDoc": "Contains aspects which are used in OpenAPI requests/responses which are not otherwise present in the data model."
15732
+ },
15733
+ "name": "DataHubOpenAPISchemaKey",
15734
+ "namespace": "com.linkedin.pegasus2avro.metadata.key",
15735
+ "fields": [
15736
+ {
15737
+ "type": "string",
15738
+ "name": "id",
15739
+ "doc": "A unique id for the DataHub OpenAPI schema."
15740
+ }
15741
+ ],
15742
+ "doc": "Key for a Query"
15743
+ },
15708
15744
  {
15709
15745
  "type": "record",
15710
15746
  "Aspect": {
@@ -0,0 +1,22 @@
1
+ {
2
+ "type": "record",
3
+ "Aspect": {
4
+ "name": "dataHubOpenAPISchemaKey",
5
+ "keyForEntity": "dataHubOpenAPISchema",
6
+ "entityCategory": "internal",
7
+ "entityAspects": [
8
+ "systemMetadata"
9
+ ],
10
+ "entityDoc": "Contains aspects which are used in OpenAPI requests/responses which are not otherwise present in the data model."
11
+ },
12
+ "name": "DataHubOpenAPISchemaKey",
13
+ "namespace": "com.linkedin.pegasus2avro.metadata.key",
14
+ "fields": [
15
+ {
16
+ "type": "string",
17
+ "name": "id",
18
+ "doc": "A unique id for the DataHub OpenAPI schema."
19
+ }
20
+ ],
21
+ "doc": "Key for a Query"
22
+ }
@@ -31,12 +31,14 @@
31
31
  "type": {
32
32
  "type": "enum",
33
33
  "symbolDocs": {
34
- "SQL": "A SQL Query"
34
+ "SQL": "A SQL Query",
35
+ "UNKNOWN": "Unknown query language"
35
36
  },
36
37
  "name": "QueryLanguage",
37
38
  "namespace": "com.linkedin.pegasus2avro.query",
38
39
  "symbols": [
39
- "SQL"
40
+ "SQL",
41
+ "UNKNOWN"
40
42
  ]
41
43
  },
42
44
  "name": "language",
@@ -53,6 +53,11 @@
53
53
  "namespace": "com.linkedin.pegasus2avro.form",
54
54
  "fields": [
55
55
  {
56
+ "Searchable": {
57
+ "fieldName": "promptId",
58
+ "fieldType": "KEYWORD",
59
+ "queryByDefault": false
60
+ },
56
61
  "type": "string",
57
62
  "name": "id",
58
63
  "doc": "The unique id for this prompt. This must be GLOBALLY unique."
@@ -135,6 +135,9 @@
135
135
  "doc": "Version of the MLModelDeployment"
136
136
  },
137
137
  {
138
+ "Searchable": {
139
+ "fieldName": "deploymentStatus"
140
+ },
138
141
  "type": [
139
142
  "null",
140
143
  {
@@ -6755,6 +6755,9 @@
6755
6755
  "doc": "Version of the MLModelDeployment"
6756
6756
  },
6757
6757
  {
6758
+ "Searchable": {
6759
+ "fieldName": "deploymentStatus"
6760
+ },
6758
6761
  "type": [
6759
6762
  "null",
6760
6763
  {
@@ -8116,6 +8119,9 @@
8116
8119
  "null",
8117
8120
  {
8118
8121
  "type": "record",
8122
+ "Aspect": {
8123
+ "name": "systemMetadata"
8124
+ },
8119
8125
  "name": "SystemMetadata",
8120
8126
  "namespace": "com.linkedin.pegasus2avro.mxe",
8121
8127
  "fields": [