acryl-datahub-cloud 0.3.15rc1__py3-none-any.whl → 0.3.15rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub-cloud might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "acryl-datahub-cloud",
3
- "version": "0.3.15rc1",
3
+ "version": "0.3.15rc3",
4
4
  "install_requires": [
5
5
  "avro-gen3==0.7.16",
6
6
  "acryl-datahub"
@@ -2,10 +2,10 @@ import json
2
2
  import logging
3
3
  from datetime import date, datetime, timezone
4
4
  from enum import Enum
5
- from typing import Any, Callable, Dict, Iterable, List, Optional
5
+ from typing import Any, Callable, Dict, Iterable, List, Optional, Union
6
6
 
7
7
  import pandas as pd
8
- from pydantic import BaseModel
8
+ from pydantic import BaseModel, field_validator
9
9
 
10
10
  from acryl_datahub_cloud.elasticsearch.graph_service import BaseModelRow
11
11
  from datahub.emitter.mcp import MetadataChangeProposalWrapper
@@ -130,6 +130,22 @@ class DataHubFormReportingData(FormData):
130
130
  platformInstance: Optional[str] = None
131
131
  domains: List[str] = []
132
132
 
133
+ @field_validator(
134
+ "completedFormsIncompletePromptResponseTimes",
135
+ "completedFormsCompletedPromptResponseTimes",
136
+ "incompleteFormsIncompletePromptResponseTimes",
137
+ "incompleteFormsCompletedPromptResponseTimes",
138
+ mode="before",
139
+ )
140
+ @classmethod
141
+ def convert_timestamps_to_strings(
142
+ cls, v: Union[List[int], List[str]]
143
+ ) -> List[str]:
144
+ """Convert timestamp integers to strings for compatibility with GMS data."""
145
+ if not isinstance(v, list):
146
+ return v
147
+ return [str(item) for item in v]
148
+
133
149
  def __init__(self, graph: DataHubGraph, allowed_forms: Optional[List[str]] = None):
134
150
  self.graph: DataHubGraph = graph
135
151
  self.form_registry = FormRegistry(graph)
@@ -430,7 +446,7 @@ class DataHubFormReportingData(FormData):
430
446
  question_status=QuestionStatus.COMPLETED,
431
447
  question_completed_date=datetime.fromtimestamp(
432
448
  float(prompt_response_time) / 1000, tz=timezone.utc
433
- ),
449
+ ).date(),
434
450
  snapshot_date=self.snapshot_date,
435
451
  )
436
452
  complete_forms = (
@@ -532,7 +548,7 @@ class DataHubFormReportingData(FormData):
532
548
  question_status=QuestionStatus.COMPLETED,
533
549
  question_completed_date=datetime.fromtimestamp(
534
550
  float(prompt_response_time) / 1000, tz=timezone.utc
535
- ),
551
+ ).date(),
536
552
  snapshot_date=self.snapshot_date,
537
553
  )
538
554
 
@@ -1,5 +1,5 @@
1
1
  import datetime
2
- from typing import Any, Dict, List, Optional
2
+ from typing import Any, Dict, List, Optional, get_args, get_origin
3
3
 
4
4
  import pyarrow as pa
5
5
  from pydantic import BaseModel
@@ -15,7 +15,37 @@ class BaseModelRow(BaseModel):
15
15
  return self.__dict__
16
16
 
17
17
  @staticmethod
18
- def pydantic_type_to_pyarrow(type_: Any) -> pa.DataType:
18
+ def pydantic_type_to_pyarrow(type_: Any) -> pa.DataType: # noqa: C901
19
+ # Handle generic types (List, Optional, Union, etc.)
20
+ origin = get_origin(type_)
21
+
22
+ if origin is list:
23
+ # List[X] -> list(X)
24
+ args = get_args(type_)
25
+ if args:
26
+ inner_type = BaseModelRow.pydantic_type_to_pyarrow(args[0])
27
+ return pa.list_(inner_type)
28
+ return pa.list_(pa.string()) # Default to list of strings
29
+
30
+ if origin is type(None):
31
+ # Just None type
32
+ return pa.null()
33
+
34
+ # Optional[X] is Union[X, None]
35
+ args = get_args(type_)
36
+ if args:
37
+ # Check if this is Optional (Union with None)
38
+ non_none_types = [arg for arg in args if arg is not type(None)]
39
+ if len(non_none_types) < len(args): # Had None in the union
40
+ if non_none_types:
41
+ return BaseModelRow.pydantic_type_to_pyarrow(non_none_types[0])
42
+ return pa.null()
43
+
44
+ # Handle simple types - check if it's actually a class first
45
+ if not isinstance(type_, type):
46
+ # If it's not a type/class, default to string
47
+ return pa.string()
48
+
19
49
  if issubclass(type_, bool):
20
50
  return pa.bool_()
21
51
  elif issubclass(type_, int):
@@ -50,16 +80,34 @@ class BaseModelRow(BaseModel):
50
80
  ) # Default to str for unknown types
51
81
  return BaseModelRow.pydantic_type_to_pyarrow(python_type)
52
82
 
83
+ @staticmethod
84
+ def _is_optional_type(type_: Any) -> bool:
85
+ """Check if a type annotation is Optional (i.e., Union with None)."""
86
+ origin = get_origin(type_)
87
+ if origin is type(None):
88
+ return True
89
+
90
+ args = get_args(type_)
91
+ if args:
92
+ # Check if None is in the Union args (Optional is Union[X, None])
93
+ return type(None) in args
94
+
95
+ return False
96
+
53
97
  @classmethod
54
98
  def arrow_schema(cls) -> pa.Schema:
55
99
  fields = []
56
100
  for field_name, field_model in cls.model_fields.items():
57
101
  pyarrow_type = BaseModelRow.pydantic_type_to_pyarrow(field_model.annotation)
58
102
  pyarrow_field = pa.field(field_name, pyarrow_type)
59
- if not field_model.is_required():
60
- pyarrow_field = pyarrow_field.with_nullable(True)
61
- else:
62
- pyarrow_field = pyarrow_field.with_nullable(False)
103
+
104
+ # Check if the type is Optional (Union with None) OR if the field has a default value
105
+ is_nullable = (
106
+ BaseModelRow._is_optional_type(field_model.annotation)
107
+ or not field_model.is_required()
108
+ )
109
+
110
+ pyarrow_field = pyarrow_field.with_nullable(is_nullable)
63
111
  fields.append(pyarrow_field)
64
112
  return pa.schema(fields)
65
113
 
@@ -1511,6 +1511,62 @@ class MlPrimaryKeyUrn(_SpecificUrn):
1511
1511
  def name(self) -> str:
1512
1512
  return self._entity_ids[1]
1513
1513
 
1514
+ if TYPE_CHECKING:
1515
+ from datahub.metadata.schema_classes import DataHubAiConversationKeyClass
1516
+
1517
+ class DataHubAiConversationUrn(_SpecificUrn):
1518
+ ENTITY_TYPE: ClassVar[Literal["dataHubAiConversation"]] = "dataHubAiConversation"
1519
+ _URN_PARTS: ClassVar[int] = 1
1520
+
1521
+ def __init__(self, id: Union["DataHubAiConversationUrn", str], *, _allow_coercion: bool = True) -> None:
1522
+ if _allow_coercion:
1523
+ # Field coercion logic (if any is required).
1524
+ if isinstance(id, str):
1525
+ if id.startswith('urn:li:'):
1526
+ try:
1527
+ id = DataHubAiConversationUrn.from_string(id)
1528
+ except InvalidUrnError:
1529
+ raise InvalidUrnError(f'Expecting a DataHubAiConversationUrn but got {id}')
1530
+ else:
1531
+ id = UrnEncoder.encode_string(id)
1532
+
1533
+ # Validation logic.
1534
+ if not id:
1535
+ raise InvalidUrnError("DataHubAiConversationUrn id cannot be empty")
1536
+ if isinstance(id, DataHubAiConversationUrn):
1537
+ id = id.id
1538
+ elif isinstance(id, Urn):
1539
+ raise InvalidUrnError(f'Expecting a DataHubAiConversationUrn but got {id}')
1540
+ if UrnEncoder.contains_reserved_char(id):
1541
+ raise InvalidUrnError(f'DataHubAiConversationUrn id contains reserved characters')
1542
+
1543
+ super().__init__(self.ENTITY_TYPE, [id])
1544
+
1545
+ @classmethod
1546
+ def _parse_ids(cls, entity_ids: List[str]) -> "DataHubAiConversationUrn":
1547
+ if len(entity_ids) != cls._URN_PARTS:
1548
+ raise InvalidUrnError(f"DataHubAiConversationUrn should have {cls._URN_PARTS} parts, got {len(entity_ids)}: {entity_ids}")
1549
+ return cls(id=entity_ids[0], _allow_coercion=False)
1550
+
1551
+ @classmethod
1552
+ def underlying_key_aspect_type(cls) -> Type["DataHubAiConversationKeyClass"]:
1553
+ from datahub.metadata.schema_classes import DataHubAiConversationKeyClass
1554
+
1555
+ return DataHubAiConversationKeyClass
1556
+
1557
+ def to_key_aspect(self) -> "DataHubAiConversationKeyClass":
1558
+ from datahub.metadata.schema_classes import DataHubAiConversationKeyClass
1559
+
1560
+ return DataHubAiConversationKeyClass(id=self.id)
1561
+
1562
+ @classmethod
1563
+ def from_key_aspect(cls, key_aspect: "DataHubAiConversationKeyClass") -> "DataHubAiConversationUrn":
1564
+ return cls(id=key_aspect.id)
1565
+
1566
+ @property
1567
+ def id(self) -> str:
1568
+ return self._entity_ids[0]
1569
+
1514
1570
  if TYPE_CHECKING:
1515
1571
  from datahub.metadata.schema_classes import TestKeyClass
1516
1572
 
@@ -0,0 +1,27 @@
1
+ # mypy: ignore-errors
2
+ # flake8: noqa
3
+
4
+ # This file is autogenerated by /metadata-ingestion/scripts/avro_codegen.py
5
+ # Do not modify manually!
6
+
7
+ # pylint: skip-file
8
+ # fmt: off
9
+ # isort: skip_file
10
+ from .....schema_classes import DataHubAiConversationActorClass
11
+ from .....schema_classes import DataHubAiConversationActorTypeClass
12
+ from .....schema_classes import DataHubAiConversationInfoClass
13
+ from .....schema_classes import DataHubAiConversationMessageClass
14
+ from .....schema_classes import DataHubAiConversationMessageContentClass
15
+ from .....schema_classes import DataHubAiConversationMessageTypeClass
16
+ from .....schema_classes import DataHubAiConversationOriginTypeClass
17
+
18
+
19
+ DataHubAiConversationActor = DataHubAiConversationActorClass
20
+ DataHubAiConversationActorType = DataHubAiConversationActorTypeClass
21
+ DataHubAiConversationInfo = DataHubAiConversationInfoClass
22
+ DataHubAiConversationMessage = DataHubAiConversationMessageClass
23
+ DataHubAiConversationMessageContent = DataHubAiConversationMessageContentClass
24
+ DataHubAiConversationMessageType = DataHubAiConversationMessageTypeClass
25
+ DataHubAiConversationOriginType = DataHubAiConversationOriginTypeClass
26
+
27
+ # fmt: on
@@ -21,6 +21,7 @@ from ......schema_classes import DataContractKeyClass
21
21
  from ......schema_classes import DataFlowKeyClass
22
22
  from ......schema_classes import DataHubAccessTokenKeyClass
23
23
  from ......schema_classes import DataHubActionKeyClass
24
+ from ......schema_classes import DataHubAiConversationKeyClass
24
25
  from ......schema_classes import DataHubConnectionKeyClass
25
26
  from ......schema_classes import DataHubFileKeyClass
26
27
  from ......schema_classes import DataHubIngestionSourceKeyClass
@@ -92,6 +93,7 @@ DataContractKey = DataContractKeyClass
92
93
  DataFlowKey = DataFlowKeyClass
93
94
  DataHubAccessTokenKey = DataHubAccessTokenKeyClass
94
95
  DataHubActionKey = DataHubActionKeyClass
96
+ DataHubAiConversationKey = DataHubAiConversationKeyClass
95
97
  DataHubConnectionKey = DataHubConnectionKeyClass
96
98
  DataHubFileKey = DataHubFileKeyClass
97
99
  DataHubIngestionSourceKey = DataHubIngestionSourceKeyClass
@@ -18807,6 +18807,161 @@
18807
18807
  "doc": "Properties associated with a MLPrimaryKey editable from the UI"
18808
18808
  },
18809
18809
  "com.linkedin.pegasus2avro.ml.metadata.EvaluationData",
18810
+ {
18811
+ "type": "record",
18812
+ "Aspect": {
18813
+ "name": "dataHubAiConversationInfo"
18814
+ },
18815
+ "name": "DataHubAiConversationInfo",
18816
+ "namespace": "com.linkedin.pegasus2avro.conversation",
18817
+ "fields": [
18818
+ {
18819
+ "type": [
18820
+ "null",
18821
+ "string"
18822
+ ],
18823
+ "name": "title",
18824
+ "default": null,
18825
+ "doc": "Optional title for the conversation. "
18826
+ },
18827
+ {
18828
+ "type": {
18829
+ "type": "array",
18830
+ "items": {
18831
+ "type": "record",
18832
+ "name": "DataHubAiConversationMessage",
18833
+ "namespace": "com.linkedin.pegasus2avro.conversation",
18834
+ "fields": [
18835
+ {
18836
+ "type": {
18837
+ "type": "enum",
18838
+ "name": "DataHubAiConversationMessageType",
18839
+ "namespace": "com.linkedin.pegasus2avro.conversation",
18840
+ "symbols": [
18841
+ "TEXT",
18842
+ "TOOL_CALL",
18843
+ "TOOL_RESULT",
18844
+ "THINKING"
18845
+ ]
18846
+ },
18847
+ "name": "type",
18848
+ "doc": "The type of message"
18849
+ },
18850
+ {
18851
+ "type": "long",
18852
+ "name": "time",
18853
+ "doc": "Timestamp in milliseconds"
18854
+ },
18855
+ {
18856
+ "type": {
18857
+ "type": "record",
18858
+ "name": "DataHubAiConversationActor",
18859
+ "namespace": "com.linkedin.pegasus2avro.conversation",
18860
+ "fields": [
18861
+ {
18862
+ "type": {
18863
+ "type": "enum",
18864
+ "symbolDocs": {
18865
+ "AGENT": "An agent initiated message. ",
18866
+ "USER": "A user initiated message. "
18867
+ },
18868
+ "name": "DataHubAiConversationActorType",
18869
+ "namespace": "com.linkedin.pegasus2avro.conversation",
18870
+ "symbols": [
18871
+ "USER",
18872
+ "AGENT"
18873
+ ]
18874
+ },
18875
+ "name": "type",
18876
+ "doc": "The type of actor "
18877
+ },
18878
+ {
18879
+ "Relationship": {
18880
+ "entityTypes": [
18881
+ "corpuser",
18882
+ "corpGroup"
18883
+ ],
18884
+ "name": "AuthoredBy"
18885
+ },
18886
+ "java": {
18887
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
18888
+ },
18889
+ "Urn": "Urn",
18890
+ "entityTypes": [
18891
+ "corpuser",
18892
+ "corpGroup"
18893
+ ],
18894
+ "type": [
18895
+ "null",
18896
+ "string"
18897
+ ],
18898
+ "name": "actor",
18899
+ "default": null,
18900
+ "doc": "The actor that has sent the message.\nRequired if type is not AGENT."
18901
+ }
18902
+ ]
18903
+ },
18904
+ "name": "actor",
18905
+ "doc": "Messages in the conversation"
18906
+ },
18907
+ {
18908
+ "type": {
18909
+ "type": "record",
18910
+ "name": "DataHubAiConversationMessageContent",
18911
+ "namespace": "com.linkedin.pegasus2avro.conversation",
18912
+ "fields": [
18913
+ {
18914
+ "type": "string",
18915
+ "name": "text",
18916
+ "doc": "Raw message text"
18917
+ }
18918
+ ],
18919
+ "doc": "Conversation message content info "
18920
+ },
18921
+ "name": "content",
18922
+ "doc": "The contents of the message. "
18923
+ }
18924
+ ],
18925
+ "doc": "Conversation info "
18926
+ }
18927
+ },
18928
+ "name": "messages",
18929
+ "doc": "Messages in the conversation"
18930
+ },
18931
+ {
18932
+ "Searchable": {
18933
+ "/actor": {
18934
+ "fieldName": "creator",
18935
+ "fieldType": "URN"
18936
+ },
18937
+ "/time": {
18938
+ "fieldName": "createdAt",
18939
+ "fieldType": "DATETIME"
18940
+ }
18941
+ },
18942
+ "type": "com.linkedin.pegasus2avro.common.AuditStamp",
18943
+ "name": "created",
18944
+ "doc": "The time and actor who initiated the conversation"
18945
+ },
18946
+ {
18947
+ "type": {
18948
+ "type": "enum",
18949
+ "symbolDocs": {
18950
+ "DATAHUB_UI": "Chat session originated in DataHub UI "
18951
+ },
18952
+ "name": "DataHubAiConversationOriginType",
18953
+ "namespace": "com.linkedin.pegasus2avro.conversation",
18954
+ "symbols": [
18955
+ "DATAHUB_UI"
18956
+ ]
18957
+ },
18958
+ "name": "originType",
18959
+ "default": "DATAHUB_UI",
18960
+ "doc": "Origin type for the chat session"
18961
+ }
18962
+ ],
18963
+ "doc": "Conversation info "
18964
+ },
18810
18965
  "com.linkedin.pegasus2avro.datajob.DataJobInfo",
18811
18966
  "com.linkedin.pegasus2avro.datajob.DataFlowInfo",
18812
18967
  {
@@ -25172,6 +25327,28 @@
25172
25327
  "doc": "Key for a Version Set entity"
25173
25328
  },
25174
25329
  "com.linkedin.pegasus2avro.metadata.key.MLPrimaryKeyKey",
25330
+ {
25331
+ "type": "record",
25332
+ "Aspect": {
25333
+ "name": "dataHubAiConversationKey",
25334
+ "keyForEntity": "dataHubAiConversation",
25335
+ "entityCategory": "core",
25336
+ "entityAspects": [
25337
+ "dataHubAiConversationInfo"
25338
+ ]
25339
+ },
25340
+ "name": "DataHubAiConversationKey",
25341
+ "namespace": "com.linkedin.pegasus2avro.metadata.key",
25342
+ "fields": [
25343
+ {
25344
+ "Searchable": {},
25345
+ "type": "string",
25346
+ "name": "id",
25347
+ "doc": "Unique identifier for the conversation"
25348
+ }
25349
+ ],
25350
+ "doc": "Key for a DataHub conversation"
25351
+ },
25175
25352
  {
25176
25353
  "type": "record",
25177
25354
  "Aspect": {
@@ -12088,6 +12088,234 @@ class EditableContainerPropertiesClass(_Aspect):
12088
12088
  self._inner_dict['description'] = value
12089
12089
 
12090
12090
 
12091
+ class DataHubAiConversationActorClass(DictWrapper):
12092
+ # No docs available.
12093
+
12094
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.conversation.DataHubAiConversationActor")
12095
+ def __init__(self,
12096
+ type: Union[str, "DataHubAiConversationActorTypeClass"],
12097
+ actor: Union[None, str]=None,
12098
+ ):
12099
+ super().__init__()
12100
+
12101
+ self.type = type
12102
+ self.actor = actor
12103
+
12104
+ def _restore_defaults(self) -> None:
12105
+ self.type = DataHubAiConversationActorTypeClass.USER
12106
+ self.actor = self.RECORD_SCHEMA.fields_dict["actor"].default
12107
+
12108
+
12109
+ @property
12110
+ def type(self) -> Union[str, "DataHubAiConversationActorTypeClass"]:
12111
+ """The type of actor """
12112
+ return self._inner_dict.get('type') # type: ignore
12113
+
12114
+ @type.setter
12115
+ def type(self, value: Union[str, "DataHubAiConversationActorTypeClass"]) -> None:
12116
+ self._inner_dict['type'] = value
12117
+
12118
+
12119
+ @property
12120
+ def actor(self) -> Union[None, str]:
12121
+ """The actor that has sent the message.
12122
+ Required if type is not AGENT."""
12123
+ return self._inner_dict.get('actor') # type: ignore
12124
+
12125
+ @actor.setter
12126
+ def actor(self, value: Union[None, str]) -> None:
12127
+ self._inner_dict['actor'] = value
12128
+
12129
+
12130
+ class DataHubAiConversationActorTypeClass(object):
12131
+ # No docs available.
12132
+
12133
+ USER = "USER"
12134
+ """A user initiated message. """
12135
+
12136
+ AGENT = "AGENT"
12137
+ """An agent initiated message. """
12138
+
12139
+
12140
+
12141
+ class DataHubAiConversationInfoClass(_Aspect):
12142
+ """Conversation info """
12143
+
12144
+
12145
+ ASPECT_NAME = 'dataHubAiConversationInfo'
12146
+ ASPECT_INFO = {}
12147
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.conversation.DataHubAiConversationInfo")
12148
+
12149
+ def __init__(self,
12150
+ messages: List["DataHubAiConversationMessageClass"],
12151
+ created: "AuditStampClass",
12152
+ title: Union[None, str]=None,
12153
+ originType: Optional[Union[str, "DataHubAiConversationOriginTypeClass"]]=None,
12154
+ ):
12155
+ super().__init__()
12156
+
12157
+ self.title = title
12158
+ self.messages = messages
12159
+ self.created = created
12160
+ if originType is None:
12161
+ # default: 'DATAHUB_UI'
12162
+ self.originType = self.RECORD_SCHEMA.fields_dict["originType"].default
12163
+ else:
12164
+ self.originType = originType
12165
+
12166
+ def _restore_defaults(self) -> None:
12167
+ self.title = self.RECORD_SCHEMA.fields_dict["title"].default
12168
+ self.messages = list()
12169
+ self.created = AuditStampClass._construct_with_defaults()
12170
+ self.originType = self.RECORD_SCHEMA.fields_dict["originType"].default
12171
+
12172
+
12173
+ @property
12174
+ def title(self) -> Union[None, str]:
12175
+ """Optional title for the conversation. """
12176
+ return self._inner_dict.get('title') # type: ignore
12177
+
12178
+ @title.setter
12179
+ def title(self, value: Union[None, str]) -> None:
12180
+ self._inner_dict['title'] = value
12181
+
12182
+
12183
+ @property
12184
+ def messages(self) -> List["DataHubAiConversationMessageClass"]:
12185
+ """Messages in the conversation"""
12186
+ return self._inner_dict.get('messages') # type: ignore
12187
+
12188
+ @messages.setter
12189
+ def messages(self, value: List["DataHubAiConversationMessageClass"]) -> None:
12190
+ self._inner_dict['messages'] = value
12191
+
12192
+
12193
+ @property
12194
+ def created(self) -> "AuditStampClass":
12195
+ """The time and actor who initiated the conversation"""
12196
+ return self._inner_dict.get('created') # type: ignore
12197
+
12198
+ @created.setter
12199
+ def created(self, value: "AuditStampClass") -> None:
12200
+ self._inner_dict['created'] = value
12201
+
12202
+
12203
+ @property
12204
+ def originType(self) -> Union[str, "DataHubAiConversationOriginTypeClass"]:
12205
+ """Origin type for the chat session"""
12206
+ return self._inner_dict.get('originType') # type: ignore
12207
+
12208
+ @originType.setter
12209
+ def originType(self, value: Union[str, "DataHubAiConversationOriginTypeClass"]) -> None:
12210
+ self._inner_dict['originType'] = value
12211
+
12212
+
12213
+ class DataHubAiConversationMessageClass(DictWrapper):
12214
+ """Conversation info """
12215
+
12216
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.conversation.DataHubAiConversationMessage")
12217
+ def __init__(self,
12218
+ type: Union[str, "DataHubAiConversationMessageTypeClass"],
12219
+ time: int,
12220
+ actor: "DataHubAiConversationActorClass",
12221
+ content: "DataHubAiConversationMessageContentClass",
12222
+ ):
12223
+ super().__init__()
12224
+
12225
+ self.type = type
12226
+ self.time = time
12227
+ self.actor = actor
12228
+ self.content = content
12229
+
12230
+ def _restore_defaults(self) -> None:
12231
+ self.type = DataHubAiConversationMessageTypeClass.TEXT
12232
+ self.time = int()
12233
+ self.actor = DataHubAiConversationActorClass._construct_with_defaults()
12234
+ self.content = DataHubAiConversationMessageContentClass._construct_with_defaults()
12235
+
12236
+
12237
+ @property
12238
+ def type(self) -> Union[str, "DataHubAiConversationMessageTypeClass"]:
12239
+ """The type of message"""
12240
+ return self._inner_dict.get('type') # type: ignore
12241
+
12242
+ @type.setter
12243
+ def type(self, value: Union[str, "DataHubAiConversationMessageTypeClass"]) -> None:
12244
+ self._inner_dict['type'] = value
12245
+
12246
+
12247
+ @property
12248
+ def time(self) -> int:
12249
+ """Timestamp in milliseconds"""
12250
+ return self._inner_dict.get('time') # type: ignore
12251
+
12252
+ @time.setter
12253
+ def time(self, value: int) -> None:
12254
+ self._inner_dict['time'] = value
12255
+
12256
+
12257
+ @property
12258
+ def actor(self) -> "DataHubAiConversationActorClass":
12259
+ """Messages in the conversation"""
12260
+ return self._inner_dict.get('actor') # type: ignore
12261
+
12262
+ @actor.setter
12263
+ def actor(self, value: "DataHubAiConversationActorClass") -> None:
12264
+ self._inner_dict['actor'] = value
12265
+
12266
+
12267
+ @property
12268
+ def content(self) -> "DataHubAiConversationMessageContentClass":
12269
+ """The contents of the message. """
12270
+ return self._inner_dict.get('content') # type: ignore
12271
+
12272
+ @content.setter
12273
+ def content(self, value: "DataHubAiConversationMessageContentClass") -> None:
12274
+ self._inner_dict['content'] = value
12275
+
12276
+
12277
+ class DataHubAiConversationMessageContentClass(DictWrapper):
12278
+ """Conversation message content info """
12279
+
12280
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.conversation.DataHubAiConversationMessageContent")
12281
+ def __init__(self,
12282
+ text: str,
12283
+ ):
12284
+ super().__init__()
12285
+
12286
+ self.text = text
12287
+
12288
+ def _restore_defaults(self) -> None:
12289
+ self.text = str()
12290
+
12291
+
12292
+ @property
12293
+ def text(self) -> str:
12294
+ """Raw message text"""
12295
+ return self._inner_dict.get('text') # type: ignore
12296
+
12297
+ @text.setter
12298
+ def text(self, value: str) -> None:
12299
+ self._inner_dict['text'] = value
12300
+
12301
+
12302
+ class DataHubAiConversationMessageTypeClass(object):
12303
+ # No docs available.
12304
+
12305
+ TEXT = "TEXT"
12306
+ TOOL_CALL = "TOOL_CALL"
12307
+ TOOL_RESULT = "TOOL_RESULT"
12308
+ THINKING = "THINKING"
12309
+
12310
+
12311
+ class DataHubAiConversationOriginTypeClass(object):
12312
+ # No docs available.
12313
+
12314
+ DATAHUB_UI = "DATAHUB_UI"
12315
+ """Chat session originated in DataHub UI """
12316
+
12317
+
12318
+
12091
12319
  class DashboardInfoClass(_Aspect):
12092
12320
  """Information about a dashboard"""
12093
12321
 
@@ -23968,6 +24196,35 @@ class DataHubActionKeyClass(_Aspect):
23968
24196
  self._inner_dict['id'] = value
23969
24197
 
23970
24198
 
24199
+ class DataHubAiConversationKeyClass(_Aspect):
24200
+ """Key for a DataHub conversation"""
24201
+
24202
+
24203
+ ASPECT_NAME = 'dataHubAiConversationKey'
24204
+ ASPECT_INFO = {'keyForEntity': 'dataHubAiConversation', 'entityCategory': 'core', 'entityAspects': ['dataHubAiConversationInfo']}
24205
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.metadata.key.DataHubAiConversationKey")
24206
+
24207
+ def __init__(self,
24208
+ id: str,
24209
+ ):
24210
+ super().__init__()
24211
+
24212
+ self.id = id
24213
+
24214
+ def _restore_defaults(self) -> None:
24215
+ self.id = str()
24216
+
24217
+
24218
+ @property
24219
+ def id(self) -> str:
24220
+ """Unique identifier for the conversation"""
24221
+ return self._inner_dict.get('id') # type: ignore
24222
+
24223
+ @id.setter
24224
+ def id(self, value: str) -> None:
24225
+ self._inner_dict['id'] = value
24226
+
24227
+
23971
24228
  class DataHubConnectionKeyClass(_Aspect):
23972
24229
  """Key for a Connection"""
23973
24230
 
@@ -41303,6 +41560,13 @@ __SCHEMA_TYPES = {
41303
41560
  'com.linkedin.pegasus2avro.container.Container': ContainerClass,
41304
41561
  'com.linkedin.pegasus2avro.container.ContainerProperties': ContainerPropertiesClass,
41305
41562
  'com.linkedin.pegasus2avro.container.EditableContainerProperties': EditableContainerPropertiesClass,
41563
+ 'com.linkedin.pegasus2avro.conversation.DataHubAiConversationActor': DataHubAiConversationActorClass,
41564
+ 'com.linkedin.pegasus2avro.conversation.DataHubAiConversationActorType': DataHubAiConversationActorTypeClass,
41565
+ 'com.linkedin.pegasus2avro.conversation.DataHubAiConversationInfo': DataHubAiConversationInfoClass,
41566
+ 'com.linkedin.pegasus2avro.conversation.DataHubAiConversationMessage': DataHubAiConversationMessageClass,
41567
+ 'com.linkedin.pegasus2avro.conversation.DataHubAiConversationMessageContent': DataHubAiConversationMessageContentClass,
41568
+ 'com.linkedin.pegasus2avro.conversation.DataHubAiConversationMessageType': DataHubAiConversationMessageTypeClass,
41569
+ 'com.linkedin.pegasus2avro.conversation.DataHubAiConversationOriginType': DataHubAiConversationOriginTypeClass,
41306
41570
  'com.linkedin.pegasus2avro.dashboard.DashboardInfo': DashboardInfoClass,
41307
41571
  'com.linkedin.pegasus2avro.dashboard.DashboardUsageStatistics': DashboardUsageStatisticsClass,
41308
41572
  'com.linkedin.pegasus2avro.dashboard.DashboardUserUsageCounts': DashboardUserUsageCountsClass,
@@ -41499,6 +41763,7 @@ __SCHEMA_TYPES = {
41499
41763
  'com.linkedin.pegasus2avro.metadata.key.DataFlowKey': DataFlowKeyClass,
41500
41764
  'com.linkedin.pegasus2avro.metadata.key.DataHubAccessTokenKey': DataHubAccessTokenKeyClass,
41501
41765
  'com.linkedin.pegasus2avro.metadata.key.DataHubActionKey': DataHubActionKeyClass,
41766
+ 'com.linkedin.pegasus2avro.metadata.key.DataHubAiConversationKey': DataHubAiConversationKeyClass,
41502
41767
  'com.linkedin.pegasus2avro.metadata.key.DataHubConnectionKey': DataHubConnectionKeyClass,
41503
41768
  'com.linkedin.pegasus2avro.metadata.key.DataHubFileKey': DataHubFileKeyClass,
41504
41769
  'com.linkedin.pegasus2avro.metadata.key.DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
@@ -42091,6 +42356,13 @@ __SCHEMA_TYPES = {
42091
42356
  'Container': ContainerClass,
42092
42357
  'ContainerProperties': ContainerPropertiesClass,
42093
42358
  'EditableContainerProperties': EditableContainerPropertiesClass,
42359
+ 'DataHubAiConversationActor': DataHubAiConversationActorClass,
42360
+ 'DataHubAiConversationActorType': DataHubAiConversationActorTypeClass,
42361
+ 'DataHubAiConversationInfo': DataHubAiConversationInfoClass,
42362
+ 'DataHubAiConversationMessage': DataHubAiConversationMessageClass,
42363
+ 'DataHubAiConversationMessageContent': DataHubAiConversationMessageContentClass,
42364
+ 'DataHubAiConversationMessageType': DataHubAiConversationMessageTypeClass,
42365
+ 'DataHubAiConversationOriginType': DataHubAiConversationOriginTypeClass,
42094
42366
  'DashboardInfo': DashboardInfoClass,
42095
42367
  'DashboardUsageStatistics': DashboardUsageStatisticsClass,
42096
42368
  'DashboardUserUsageCounts': DashboardUserUsageCountsClass,
@@ -42287,6 +42559,7 @@ __SCHEMA_TYPES = {
42287
42559
  'DataFlowKey': DataFlowKeyClass,
42288
42560
  'DataHubAccessTokenKey': DataHubAccessTokenKeyClass,
42289
42561
  'DataHubActionKey': DataHubActionKeyClass,
42562
+ 'DataHubAiConversationKey': DataHubAiConversationKeyClass,
42290
42563
  'DataHubConnectionKey': DataHubConnectionKeyClass,
42291
42564
  'DataHubFileKey': DataHubFileKeyClass,
42292
42565
  'DataHubIngestionSourceKey': DataHubIngestionSourceKeyClass,
@@ -42770,6 +43043,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
42770
43043
  MLTrainingRunPropertiesClass,
42771
43044
  EditableMLPrimaryKeyPropertiesClass,
42772
43045
  EvaluationDataClass,
43046
+ DataHubAiConversationInfoClass,
42773
43047
  DataJobInfoClass,
42774
43048
  DataFlowInfoClass,
42775
43049
  VersionInfoClass,
@@ -42832,6 +43106,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
42832
43106
  ConstraintKeyClass,
42833
43107
  VersionSetKeyClass,
42834
43108
  MLPrimaryKeyKeyClass,
43109
+ DataHubAiConversationKeyClass,
42835
43110
  AnomalyKeyClass,
42836
43111
  TestKeyClass,
42837
43112
  MLModelGroupKeyClass,
@@ -43070,6 +43345,7 @@ class AspectBag(TypedDict, total=False):
43070
43345
  mlTrainingRunProperties: MLTrainingRunPropertiesClass
43071
43346
  editableMlPrimaryKeyProperties: EditableMLPrimaryKeyPropertiesClass
43072
43347
  mlModelEvaluationData: EvaluationDataClass
43348
+ dataHubAiConversationInfo: DataHubAiConversationInfoClass
43073
43349
  dataJobInfo: DataJobInfoClass
43074
43350
  dataFlowInfo: DataFlowInfoClass
43075
43351
  versionInfo: VersionInfoClass
@@ -43132,6 +43408,7 @@ class AspectBag(TypedDict, total=False):
43132
43408
  constraintKey: ConstraintKeyClass
43133
43409
  versionSetKey: VersionSetKeyClass
43134
43410
  mlPrimaryKeyKey: MLPrimaryKeyKeyClass
43411
+ dataHubAiConversationKey: DataHubAiConversationKeyClass
43135
43412
  anomalyKey: AnomalyKeyClass
43136
43413
  testKey: TestKeyClass
43137
43414
  mlModelGroupKey: MLModelGroupKeyClass
@@ -43275,6 +43552,7 @@ KEY_ASPECTS: Dict[str, Type[_Aspect]] = {
43275
43552
  'constraint': ConstraintKeyClass,
43276
43553
  'versionSet': VersionSetKeyClass,
43277
43554
  'mlPrimaryKey': MLPrimaryKeyKeyClass,
43555
+ 'dataHubAiConversation': DataHubAiConversationKeyClass,
43278
43556
  'test': TestKeyClass,
43279
43557
  'mlModelGroup': MLModelGroupKeyClass,
43280
43558
  'dataHubSecret': DataHubSecretKeyClass,
@@ -43354,6 +43632,7 @@ ENTITY_TYPE_NAMES: List[str] = [
43354
43632
  'constraint',
43355
43633
  'versionSet',
43356
43634
  'mlPrimaryKey',
43635
+ 'dataHubAiConversation',
43357
43636
  'test',
43358
43637
  'mlModelGroup',
43359
43638
  'dataHubSecret',
@@ -43430,6 +43709,7 @@ EntityTypeName = Literal[
43430
43709
  'constraint',
43431
43710
  'versionSet',
43432
43711
  'mlPrimaryKey',
43712
+ 'dataHubAiConversation',
43433
43713
  'test',
43434
43714
  'mlModelGroup',
43435
43715
  'dataHubSecret',
@@ -0,0 +1,198 @@
1
+ {
2
+ "type": "record",
3
+ "Aspect": {
4
+ "name": "dataHubAiConversationInfo"
5
+ },
6
+ "name": "DataHubAiConversationInfo",
7
+ "namespace": "com.linkedin.pegasus2avro.conversation",
8
+ "fields": [
9
+ {
10
+ "type": [
11
+ "null",
12
+ "string"
13
+ ],
14
+ "name": "title",
15
+ "default": null,
16
+ "doc": "Optional title for the conversation. "
17
+ },
18
+ {
19
+ "type": {
20
+ "type": "array",
21
+ "items": {
22
+ "type": "record",
23
+ "name": "DataHubAiConversationMessage",
24
+ "namespace": "com.linkedin.pegasus2avro.conversation",
25
+ "fields": [
26
+ {
27
+ "type": {
28
+ "type": "enum",
29
+ "name": "DataHubAiConversationMessageType",
30
+ "namespace": "com.linkedin.pegasus2avro.conversation",
31
+ "symbols": [
32
+ "TEXT",
33
+ "TOOL_CALL",
34
+ "TOOL_RESULT",
35
+ "THINKING"
36
+ ]
37
+ },
38
+ "name": "type",
39
+ "doc": "The type of message"
40
+ },
41
+ {
42
+ "type": "long",
43
+ "name": "time",
44
+ "doc": "Timestamp in milliseconds"
45
+ },
46
+ {
47
+ "type": {
48
+ "type": "record",
49
+ "name": "DataHubAiConversationActor",
50
+ "namespace": "com.linkedin.pegasus2avro.conversation",
51
+ "fields": [
52
+ {
53
+ "type": {
54
+ "type": "enum",
55
+ "symbolDocs": {
56
+ "AGENT": "An agent initiated message. ",
57
+ "USER": "A user initiated message. "
58
+ },
59
+ "name": "DataHubAiConversationActorType",
60
+ "namespace": "com.linkedin.pegasus2avro.conversation",
61
+ "symbols": [
62
+ "USER",
63
+ "AGENT"
64
+ ]
65
+ },
66
+ "name": "type",
67
+ "doc": "The type of actor "
68
+ },
69
+ {
70
+ "Relationship": {
71
+ "entityTypes": [
72
+ "corpuser",
73
+ "corpGroup"
74
+ ],
75
+ "name": "AuthoredBy"
76
+ },
77
+ "java": {
78
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
79
+ },
80
+ "type": [
81
+ "null",
82
+ "string"
83
+ ],
84
+ "name": "actor",
85
+ "default": null,
86
+ "doc": "The actor that has sent the message.\nRequired if type is not AGENT.",
87
+ "Urn": "Urn",
88
+ "entityTypes": [
89
+ "corpuser",
90
+ "corpGroup"
91
+ ]
92
+ }
93
+ ]
94
+ },
95
+ "name": "actor",
96
+ "doc": "Messages in the conversation"
97
+ },
98
+ {
99
+ "type": {
100
+ "type": "record",
101
+ "name": "DataHubAiConversationMessageContent",
102
+ "namespace": "com.linkedin.pegasus2avro.conversation",
103
+ "fields": [
104
+ {
105
+ "type": "string",
106
+ "name": "text",
107
+ "doc": "Raw message text"
108
+ }
109
+ ],
110
+ "doc": "Conversation message content info "
111
+ },
112
+ "name": "content",
113
+ "doc": "The contents of the message. "
114
+ }
115
+ ],
116
+ "doc": "Conversation info "
117
+ }
118
+ },
119
+ "name": "messages",
120
+ "doc": "Messages in the conversation"
121
+ },
122
+ {
123
+ "Searchable": {
124
+ "/actor": {
125
+ "fieldName": "creator",
126
+ "fieldType": "URN"
127
+ },
128
+ "/time": {
129
+ "fieldName": "createdAt",
130
+ "fieldType": "DATETIME"
131
+ }
132
+ },
133
+ "type": {
134
+ "type": "record",
135
+ "name": "AuditStamp",
136
+ "namespace": "com.linkedin.pegasus2avro.common",
137
+ "fields": [
138
+ {
139
+ "type": "long",
140
+ "name": "time",
141
+ "doc": "When did the resource/association/sub-resource move into the specific lifecycle stage represented by this AuditEvent."
142
+ },
143
+ {
144
+ "java": {
145
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
146
+ },
147
+ "type": "string",
148
+ "name": "actor",
149
+ "doc": "The entity (e.g. a member URN) which will be credited for moving the resource/association/sub-resource into the specific lifecycle stage. It is also the one used to authorize the change.",
150
+ "Urn": "Urn"
151
+ },
152
+ {
153
+ "java": {
154
+ "class": "com.linkedin.pegasus2avro.common.urn.Urn"
155
+ },
156
+ "type": [
157
+ "null",
158
+ "string"
159
+ ],
160
+ "name": "impersonator",
161
+ "default": null,
162
+ "doc": "The entity (e.g. a service URN) which performs the change on behalf of the Actor and must be authorized to act as the Actor.",
163
+ "Urn": "Urn"
164
+ },
165
+ {
166
+ "type": [
167
+ "null",
168
+ "string"
169
+ ],
170
+ "name": "message",
171
+ "default": null,
172
+ "doc": "Additional context around how DataHub was informed of the particular change. For example: was the change created by an automated process, or manually."
173
+ }
174
+ ],
175
+ "doc": "Data captured on a resource/association/sub-resource level giving insight into when that resource/association/sub-resource moved into a particular lifecycle stage, and who acted to move it into that specific lifecycle stage."
176
+ },
177
+ "name": "created",
178
+ "doc": "The time and actor who initiated the conversation"
179
+ },
180
+ {
181
+ "type": {
182
+ "type": "enum",
183
+ "symbolDocs": {
184
+ "DATAHUB_UI": "Chat session originated in DataHub UI "
185
+ },
186
+ "name": "DataHubAiConversationOriginType",
187
+ "namespace": "com.linkedin.pegasus2avro.conversation",
188
+ "symbols": [
189
+ "DATAHUB_UI"
190
+ ]
191
+ },
192
+ "name": "originType",
193
+ "default": "DATAHUB_UI",
194
+ "doc": "Origin type for the chat session"
195
+ }
196
+ ],
197
+ "doc": "Conversation info "
198
+ }
@@ -0,0 +1,22 @@
1
+ {
2
+ "type": "record",
3
+ "Aspect": {
4
+ "name": "dataHubAiConversationKey",
5
+ "keyForEntity": "dataHubAiConversation",
6
+ "entityCategory": "core",
7
+ "entityAspects": [
8
+ "dataHubAiConversationInfo"
9
+ ]
10
+ },
11
+ "name": "DataHubAiConversationKey",
12
+ "namespace": "com.linkedin.pegasus2avro.metadata.key",
13
+ "fields": [
14
+ {
15
+ "Searchable": {},
16
+ "type": "string",
17
+ "name": "id",
18
+ "doc": "Unique identifier for the conversation"
19
+ }
20
+ ],
21
+ "doc": "Key for a DataHub conversation"
22
+ }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: acryl-datahub-cloud
3
- Version: 0.3.15rc1
3
+ Version: 0.3.15rc3
4
4
  Requires-Python: >=3.10
5
5
  Requires-Dist: avro-gen3==0.7.16
6
6
  Requires-Dist: acryl-datahub
@@ -10,89 +10,89 @@ Requires-Dist: pytz
10
10
  Requires-Dist: types-croniter
11
11
  Requires-Dist: tzlocal
12
12
  Provides-Extra: datahub-lineage-features
13
- Requires-Dist: pandas; extra == "datahub-lineage-features"
14
13
  Requires-Dist: tenacity; extra == "datahub-lineage-features"
15
- Requires-Dist: opensearch-py==2.4.2; extra == "datahub-lineage-features"
16
- Requires-Dist: pyarrow; extra == "datahub-lineage-features"
14
+ Requires-Dist: pandas; extra == "datahub-lineage-features"
17
15
  Requires-Dist: duckdb; extra == "datahub-lineage-features"
16
+ Requires-Dist: pyarrow; extra == "datahub-lineage-features"
17
+ Requires-Dist: opensearch-py==2.4.2; extra == "datahub-lineage-features"
18
18
  Provides-Extra: datahub-reporting-forms
19
19
  Requires-Dist: boto3; extra == "datahub-reporting-forms"
20
- Requires-Dist: pandas; extra == "datahub-reporting-forms"
21
20
  Requires-Dist: termcolor==2.5.0; extra == "datahub-reporting-forms"
22
- Requires-Dist: pyarrow; extra == "datahub-reporting-forms"
21
+ Requires-Dist: pandas; extra == "datahub-reporting-forms"
23
22
  Requires-Dist: duckdb; extra == "datahub-reporting-forms"
23
+ Requires-Dist: pyarrow; extra == "datahub-reporting-forms"
24
24
  Provides-Extra: datahub-reporting-extract-graph
25
25
  Requires-Dist: boto3; extra == "datahub-reporting-extract-graph"
26
26
  Requires-Dist: pandas; extra == "datahub-reporting-extract-graph"
27
- Requires-Dist: opensearch-py==2.4.2; extra == "datahub-reporting-extract-graph"
28
- Requires-Dist: pyarrow; extra == "datahub-reporting-extract-graph"
29
27
  Requires-Dist: duckdb; extra == "datahub-reporting-extract-graph"
28
+ Requires-Dist: pyarrow; extra == "datahub-reporting-extract-graph"
29
+ Requires-Dist: opensearch-py==2.4.2; extra == "datahub-reporting-extract-graph"
30
30
  Provides-Extra: datahub-reporting-extract-sql
31
- Requires-Dist: pyarrow; extra == "datahub-reporting-extract-sql"
32
- Requires-Dist: boto3; extra == "datahub-reporting-extract-sql"
33
31
  Requires-Dist: pandas; extra == "datahub-reporting-extract-sql"
34
32
  Requires-Dist: duckdb; extra == "datahub-reporting-extract-sql"
33
+ Requires-Dist: boto3; extra == "datahub-reporting-extract-sql"
34
+ Requires-Dist: pyarrow; extra == "datahub-reporting-extract-sql"
35
35
  Provides-Extra: datahub-usage-reporting
36
- Requires-Dist: elasticsearch==7.13.4; extra == "datahub-usage-reporting"
36
+ Requires-Dist: numpy<2; extra == "datahub-usage-reporting"
37
37
  Requires-Dist: termcolor==2.5.0; extra == "datahub-usage-reporting"
38
+ Requires-Dist: pyarrow<=18.0.0; extra == "datahub-usage-reporting"
39
+ Requires-Dist: duckdb; extra == "datahub-usage-reporting"
40
+ Requires-Dist: elasticsearch==7.13.4; extra == "datahub-usage-reporting"
41
+ Requires-Dist: boto3; extra == "datahub-usage-reporting"
38
42
  Requires-Dist: polars==1.34.0; extra == "datahub-usage-reporting"
39
43
  Requires-Dist: scipy<=1.14.1; extra == "datahub-usage-reporting"
40
- Requires-Dist: pyarrow; extra == "datahub-usage-reporting"
41
- Requires-Dist: boto3; extra == "datahub-usage-reporting"
42
- Requires-Dist: duckdb; extra == "datahub-usage-reporting"
43
- Requires-Dist: pyarrow<=18.0.0; extra == "datahub-usage-reporting"
44
44
  Requires-Dist: pandas; extra == "datahub-usage-reporting"
45
+ Requires-Dist: pyarrow; extra == "datahub-usage-reporting"
45
46
  Requires-Dist: opensearch-py==2.4.2; extra == "datahub-usage-reporting"
46
- Requires-Dist: numpy<2; extra == "datahub-usage-reporting"
47
47
  Provides-Extra: datahub-metadata-sharing
48
48
  Requires-Dist: tenacity; extra == "datahub-metadata-sharing"
49
49
  Provides-Extra: datahub-action-request-owner
50
50
  Requires-Dist: tenacity; extra == "datahub-action-request-owner"
51
51
  Provides-Extra: acryl-cs-issues
52
- Requires-Dist: jinja2; extra == "acryl-cs-issues"
53
- Requires-Dist: openai; extra == "acryl-cs-issues"
54
52
  Requires-Dist: zenpy; extra == "acryl-cs-issues"
53
+ Requires-Dist: openai; extra == "acryl-cs-issues"
54
+ Requires-Dist: jinja2; extra == "acryl-cs-issues"
55
55
  Requires-Dist: slack-sdk; extra == "acryl-cs-issues"
56
56
  Provides-Extra: datahub-forms-notifications
57
57
  Requires-Dist: tenacity; extra == "datahub-forms-notifications"
58
58
  Provides-Extra: all
59
- Requires-Dist: elasticsearch==7.13.4; extra == "all"
60
- Requires-Dist: tenacity; extra == "all"
61
- Requires-Dist: openai; extra == "all"
59
+ Requires-Dist: zenpy; extra == "all"
62
60
  Requires-Dist: termcolor==2.5.0; extra == "all"
61
+ Requires-Dist: numpy<2; extra == "all"
62
+ Requires-Dist: pyarrow<=18.0.0; extra == "all"
63
+ Requires-Dist: tenacity; extra == "all"
64
+ Requires-Dist: duckdb; extra == "all"
65
+ Requires-Dist: slack-sdk; extra == "all"
66
+ Requires-Dist: elasticsearch==7.13.4; extra == "all"
67
+ Requires-Dist: boto3; extra == "all"
63
68
  Requires-Dist: polars==1.34.0; extra == "all"
64
69
  Requires-Dist: scipy<=1.14.1; extra == "all"
65
- Requires-Dist: pyarrow; extra == "all"
66
- Requires-Dist: boto3; extra == "all"
67
70
  Requires-Dist: jinja2; extra == "all"
68
- Requires-Dist: duckdb; extra == "all"
69
- Requires-Dist: pyarrow<=18.0.0; extra == "all"
70
71
  Requires-Dist: pandas; extra == "all"
72
+ Requires-Dist: openai; extra == "all"
73
+ Requires-Dist: pyarrow; extra == "all"
71
74
  Requires-Dist: opensearch-py==2.4.2; extra == "all"
72
- Requires-Dist: zenpy; extra == "all"
73
- Requires-Dist: slack-sdk; extra == "all"
74
- Requires-Dist: numpy<2; extra == "all"
75
75
  Provides-Extra: dev
76
- Requires-Dist: openai; extra == "dev"
77
- Requires-Dist: polars==1.34.0; extra == "dev"
78
76
  Requires-Dist: pyarrow-stubs; extra == "dev"
77
+ Requires-Dist: openai; extra == "dev"
78
+ Requires-Dist: zenpy; extra == "dev"
79
79
  Requires-Dist: numpy<2; extra == "dev"
80
- Requires-Dist: duckdb; extra == "dev"
80
+ Requires-Dist: polars==1.34.0; extra == "dev"
81
+ Requires-Dist: pyarrow; extra == "dev"
82
+ Requires-Dist: termcolor==2.5.0; extra == "dev"
83
+ Requires-Dist: acryl-datahub[dev]; extra == "dev"
81
84
  Requires-Dist: slack-sdk; extra == "dev"
82
85
  Requires-Dist: tenacity; extra == "dev"
83
- Requires-Dist: scipy<=1.14.1; extra == "dev"
86
+ Requires-Dist: duckdb; extra == "dev"
87
+ Requires-Dist: scipy-stubs; extra == "dev"
84
88
  Requires-Dist: pandas-stubs; extra == "dev"
89
+ Requires-Dist: jinja2; extra == "dev"
90
+ Requires-Dist: scipy<=1.14.1; extra == "dev"
85
91
  Requires-Dist: pyarrow<=18.0.0; extra == "dev"
86
- Requires-Dist: pandas; extra == "dev"
87
- Requires-Dist: scipy-stubs; extra == "dev"
88
- Requires-Dist: zenpy; extra == "dev"
89
92
  Requires-Dist: elasticsearch==7.13.4; extra == "dev"
90
- Requires-Dist: termcolor==2.5.0; extra == "dev"
91
- Requires-Dist: jinja2; extra == "dev"
92
93
  Requires-Dist: boto3; extra == "dev"
93
- Requires-Dist: pyarrow; extra == "dev"
94
+ Requires-Dist: pandas; extra == "dev"
94
95
  Requires-Dist: opensearch-py==2.4.2; extra == "dev"
95
- Requires-Dist: acryl-datahub[dev]; extra == "dev"
96
96
  Dynamic: provides-extra
97
97
  Dynamic: requires-dist
98
98
  Dynamic: requires-python
@@ -1,5 +1,5 @@
1
1
  acryl_datahub_cloud/__init__.py,sha256=axrMXkn0RW80YmuZgwUP_YQImcv6L28duZLWnW-gaNM,521
2
- acryl_datahub_cloud/_codegen_config.json,sha256=9_Hr77yGGuoyJhwT9Ov6lXlvk-nkSkYDXIkLswDvKlA,556
2
+ acryl_datahub_cloud/_codegen_config.json,sha256=utyHcTdJiMn_sLpkOpUH4dYx5Oxb6oVZJ-Gy1ecdsRo,556
3
3
  acryl_datahub_cloud/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  acryl_datahub_cloud/acryl_cs_issues/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  acryl_datahub_cloud/acryl_cs_issues/acryl_customer.py,sha256=uqYPmluXYdlgyq3C09gxIU5nEkKiqHoZ53h2oN5etj0,25227
@@ -26,7 +26,7 @@ acryl_datahub_cloud/datahub_metadata_sharing/scroll_shared_entities.gql,sha256=N
26
26
  acryl_datahub_cloud/datahub_metadata_sharing/share_entity.gql,sha256=tJ0VkAekRQCxZ3TkaC0nVqMHQoILqf2J6J-HfbSRL0U,286
27
27
  acryl_datahub_cloud/datahub_reporting/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  acryl_datahub_cloud/datahub_reporting/datahub_dataset.py,sha256=y3mlLbtg9VxrfyzUUxwRXIYFQ-2K0YUlcosNEFWsMeE,20015
29
- acryl_datahub_cloud/datahub_reporting/datahub_form_reporting.py,sha256=qXUgJHH2n85K7BPBDCPOoXuWCExYeEZcxLmILJk_MKQ,22608
29
+ acryl_datahub_cloud/datahub_reporting/datahub_form_reporting.py,sha256=lXjEGAswg5SVHFxIYz2m0NU_tF-6_tpWpRTBsrQ3ZFA,23281
30
30
  acryl_datahub_cloud/datahub_reporting/extract_graph.py,sha256=n8DXMbGlAllWh5FcQ4bnUf3HYOiNYsSodWomigtmfWA,7896
31
31
  acryl_datahub_cloud/datahub_reporting/extract_sql.py,sha256=H5y3e35RuwXk23trhcCKsjFkStGlXZiYzdIYf0213Hw,9537
32
32
  acryl_datahub_cloud/datahub_reporting/forms.py,sha256=WUmJ3DLcKn4VcSplZFxmzdPrxQY2mYKqKc7hk8XZ9_Q,5939
@@ -41,14 +41,14 @@ acryl_datahub_cloud/datahub_usage_reporting/usage_feature_patch_builder.py,sha25
41
41
  acryl_datahub_cloud/datahub_usage_reporting/usage_feature_reporter.py,sha256=H9bs9Yssa90eMAC1Qlg5lzwkbddaVVdjKgsWsKsxbIU,83289
42
42
  acryl_datahub_cloud/elasticsearch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
43
  acryl_datahub_cloud/elasticsearch/config.py,sha256=6QNBOmoQZu1cJrDIBZyvZgdQt0QLfP82hdQkPtP-4HE,1220
44
- acryl_datahub_cloud/elasticsearch/graph_service.py,sha256=Bpx_BaMcNSzsLtDes6Jej9VZuFF2iJRQBcBUCVE6sxg,3404
44
+ acryl_datahub_cloud/elasticsearch/graph_service.py,sha256=nvs8rfjT-3wMr193X9b98tUxQYh2IXh_pWeO6NcERmw,5141
45
45
  acryl_datahub_cloud/lineage_features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
46
  acryl_datahub_cloud/lineage_features/source.py,sha256=fGtnIM4PsablJleNPZA_sz0uHEy0Ij2jfm1Le2t8E0Q,16638
47
47
  acryl_datahub_cloud/metadata/__init__.py,sha256=AjhXPjI6cnpdcrBRrE5gOWo15vv2TTl2ctU4UAnUN7A,238
48
- acryl_datahub_cloud/metadata/schema.avsc,sha256=VXKV0OCp7Lx8jSyqwQ7HFHzBAcOVxZ5xUpeGXrxnR80,1239850
49
- acryl_datahub_cloud/metadata/schema_classes.py,sha256=A6sP_k_qZ5nENsjisYjonm-zayNN3yWID0c5c7Kw8FQ,1631525
48
+ acryl_datahub_cloud/metadata/schema.avsc,sha256=Ynlk6QU6E9Z9j_N26dnSg56xDKqvyCOjiO80VWGyLWM,1245399
49
+ acryl_datahub_cloud/metadata/schema_classes.py,sha256=Z3-DXjtNo3xqRIxvxnSKPrKy-iDcLk8KYXFkOICvyks,1641283
50
50
  acryl_datahub_cloud/metadata/_urns/__init__.py,sha256=cOF3GHMDgPhmbLKbN02NPpuLGHSu0qNgQyBRv08eqF0,243
51
- acryl_datahub_cloud/metadata/_urns/urn_defs.py,sha256=k2fYG8RGGpwyuy-Xb8E_ZDm4T6H1T3aVDIiY5sFIpbM,172971
51
+ acryl_datahub_cloud/metadata/_urns/urn_defs.py,sha256=Bij7q4iXp0EugcF3xZg-JrrSlvpavUBogUdCeCDJKWs,175353
52
52
  acryl_datahub_cloud/metadata/com/__init__.py,sha256=gsAIuTxzfJdI7a9ybZlgMIHMAYksM1SxGxXjtySgKSc,202
53
53
  acryl_datahub_cloud/metadata/com/linkedin/__init__.py,sha256=gsAIuTxzfJdI7a9ybZlgMIHMAYksM1SxGxXjtySgKSc,202
54
54
  acryl_datahub_cloud/metadata/com/linkedin/events/__init__.py,sha256=s_dR0plZF-rOxxIbE8ojekJqwiHzl2WYR-Z3kW6kKS0,298
@@ -69,6 +69,7 @@ acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/common/fieldtransformer/_
69
69
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/connection/__init__.py,sha256=qRtw-dB14pzVzgQ0pDK8kyBplNdpRxVKNj4D70e_FqI,564
70
70
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/constraint/__init__.py,sha256=cTdQMR-vfPXYcvlp1nqIQEL-YWhxkzKyGJXjte3fR5I,519
71
71
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/container/__init__.py,sha256=3yWt36KqDKFhRc9pzvt0AMnbMTlhKurGvT3BUvc25QU,510
72
+ acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/conversation/__init__.py,sha256=pDMO-76YFHGpn2dee2kyHUS0G2grvf_RzQylmGiBQ-8,1154
72
73
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/dashboard/__init__.py,sha256=spcGVIipGkPWr6orwFXsOMBkAo3xo5eNiPxtA-Amhgw,656
73
74
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/datacontract/__init__.py,sha256=zgCDXvT8RXLBGJTO6WA4nPMRaHYNQnqjYdPaR9CGQdI,887
74
75
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/datahub/__init__.py,sha256=BnjtqbqyQIbROlo77oNsl5uZLjmBSQ6mVz9wQyf9otU,400
@@ -105,7 +106,7 @@ acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/ingestion/__init__.py,sha
105
106
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/link/__init__.py,sha256=4DfT4T_I6dh-iGk9LXYjrp98L9D66xZzM7Boqc7jmNg,388
106
107
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/logical/__init__.py,sha256=7SHiR-KzV1CkAimFy94SkcY0Xg0RlsIlLTUTGmGAW_U,290
107
108
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/__init__.py,sha256=gsAIuTxzfJdI7a9ybZlgMIHMAYksM1SxGxXjtySgKSc,202
108
- acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py,sha256=7N620d7c-BPAXnIsVI5xQ5zpfjXFIJ65XIy2QRZ0ns0,6621
109
+ acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/key/__init__.py,sha256=gJaTiK7iyh-WQ9vGGeShII4_fvjC7hgOc8dbzCNpu6I,6741
109
110
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/query/__init__.py,sha256=gsAIuTxzfJdI7a9ybZlgMIHMAYksM1SxGxXjtySgKSc,202
110
111
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/query/filter/__init__.py,sha256=DBP_QtxkFmC5q_kuk4dGjb4uOKbB4xKgqTWXGxmNbBQ,532
111
112
  acryl_datahub_cloud/metadata/com/linkedin/pegasus2avro/metadata/recommendation/__init__.py,sha256=6XhFJ-Qf_H8RkEG_kZV6TcUWa0z-RXNlze6MLhV85l4,927
@@ -216,6 +217,8 @@ acryl_datahub_cloud/metadata/schemas/DataHubAccessTokenKey.avsc,sha256=3EspNIxgb
216
217
  acryl_datahub_cloud/metadata/schemas/DataHubActionInfo.avsc,sha256=4sJNBSRJKTXxaofgDGHR04juUFmPy40Y8mbfgkwqAts,3168
217
218
  acryl_datahub_cloud/metadata/schemas/DataHubActionKey.avsc,sha256=7WGnI_vOvRKzwEASLRz_zltOwyOtLL-rsXkyToBPz30,508
218
219
  acryl_datahub_cloud/metadata/schemas/DataHubActionStatus.avsc,sha256=9OrdSvY6lUGr_srXyBynFk4JYguFbckbX0xJv6QH4Jo,6854
220
+ acryl_datahub_cloud/metadata/schemas/DataHubAiConversationInfo.avsc,sha256=88eOu52rIleUrUxkx08K4nKgNhv9k3HFi6dbolCY5f8,6457
221
+ acryl_datahub_cloud/metadata/schemas/DataHubAiConversationKey.avsc,sha256=kxnJInOOD_bLMFf76w_oqvDWuzo5uCv0Wo-_nIQI7hg,510
219
222
  acryl_datahub_cloud/metadata/schemas/DataHubConnectionDetails.avsc,sha256=IvZj6OA7HRvy-ZIIn0UbXdJNnyt_oTn16XIe5ZlcqGk,1661
220
223
  acryl_datahub_cloud/metadata/schemas/DataHubConnectionKey.avsc,sha256=n9pykqL4EBTLuFoVasxF-4a1v_5g578IrgXLPSLDxGQ,538
221
224
  acryl_datahub_cloud/metadata/schemas/DataHubFileInfo.avsc,sha256=tylUV_qx3kmoqEjQX747FUD0ZP-65kXBM7VidtJ9km8,6374
@@ -479,8 +482,8 @@ acryl_datahub_cloud/sdk/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRk
479
482
  acryl_datahub_cloud/sdk/entities/assertion.py,sha256=-OILvHyKAI4-5mS2bb_P44Fvk6rBOOcvaxSMXfEYvRw,15077
480
483
  acryl_datahub_cloud/sdk/entities/monitor.py,sha256=NMrhJrWYNPvorxA33S_5FOl8YCtSmmeAavTzFLtWcOo,9665
481
484
  acryl_datahub_cloud/sdk/entities/subscription.py,sha256=0zl3LxkFM_hXBE8iCy8g6Nen-udJF784RmtPZ73QCb8,2801
482
- acryl_datahub_cloud-0.3.15rc1.dist-info/METADATA,sha256=FQ2yNJVQ-8ijeV0jqpOvn2hdrkG-naIqWQ7USKAWKeU,4615
483
- acryl_datahub_cloud-0.3.15rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
484
- acryl_datahub_cloud-0.3.15rc1.dist-info/entry_points.txt,sha256=veuyIaEzm7JF2q-C8Q-RcSV6V5Y9LvnVvIhTjiT5WUs,1342
485
- acryl_datahub_cloud-0.3.15rc1.dist-info/top_level.txt,sha256=EwgCxfX-DzJANwxj-Mx_j4TOfAFhmc_FgMbRPzWsoZs,20
486
- acryl_datahub_cloud-0.3.15rc1.dist-info/RECORD,,
485
+ acryl_datahub_cloud-0.3.15rc3.dist-info/METADATA,sha256=K695K8adiqaXWDZOSxWIz1xJ0f2X_nyWQ3AEGCsmzvg,4615
486
+ acryl_datahub_cloud-0.3.15rc3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
487
+ acryl_datahub_cloud-0.3.15rc3.dist-info/entry_points.txt,sha256=veuyIaEzm7JF2q-C8Q-RcSV6V5Y9LvnVvIhTjiT5WUs,1342
488
+ acryl_datahub_cloud-0.3.15rc3.dist-info/top_level.txt,sha256=EwgCxfX-DzJANwxj-Mx_j4TOfAFhmc_FgMbRPzWsoZs,20
489
+ acryl_datahub_cloud-0.3.15rc3.dist-info/RECORD,,