atlan-application-sdk 0.1.1rc30__py3-none-any.whl → 0.1.1rc32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional, Tuple, Type
3
3
 
4
4
  from application_sdk.activities import ActivitiesInterface
5
5
  from application_sdk.clients.utils import get_workflow_client
6
+ from application_sdk.events.models import EventRegistration
6
7
  from application_sdk.observability.logger_adaptor import get_logger
7
- from application_sdk.outputs.eventstore import EventRegistration
8
8
  from application_sdk.server import ServerInterface
9
9
  from application_sdk.server.fastapi import APIServer, HttpWorkflowTrigger
10
10
  from application_sdk.server.fastapi.models import EventWorkflowTrigger
@@ -1,10 +1,12 @@
1
1
  import asyncio
2
2
  import uuid
3
3
  from concurrent.futures import ThreadPoolExecutor
4
+ from datetime import timedelta
4
5
  from typing import Any, Dict, Optional, Sequence, Type
5
6
 
6
7
  from temporalio import activity, workflow
7
8
  from temporalio.client import Client, WorkflowExecutionStatus, WorkflowFailureError
9
+ from temporalio.common import RetryPolicy
8
10
  from temporalio.types import CallableType, ClassType
9
11
  from temporalio.worker import (
10
12
  ActivityInboundInterceptor,
@@ -33,17 +35,17 @@ from application_sdk.constants import (
33
35
  WORKFLOW_PORT,
34
36
  WORKFLOW_TLS_ENABLED_KEY,
35
37
  )
36
- from application_sdk.inputs.secretstore import SecretStoreInput
37
- from application_sdk.inputs.statestore import StateType
38
- from application_sdk.observability.logger_adaptor import get_logger
39
- from application_sdk.outputs.eventstore import (
38
+ from application_sdk.events.models import (
40
39
  ApplicationEventNames,
41
40
  Event,
42
41
  EventMetadata,
43
- EventStore,
44
42
  EventTypes,
45
43
  WorkflowStates,
46
44
  )
45
+ from application_sdk.inputs.secretstore import SecretStoreInput
46
+ from application_sdk.inputs.statestore import StateType
47
+ from application_sdk.observability.logger_adaptor import get_logger
48
+ from application_sdk.outputs.eventstore import EventStore
47
49
  from application_sdk.outputs.secretstore import SecretStoreOutput
48
50
  from application_sdk.outputs.statestore import StateStoreOutput
49
51
  from application_sdk.workflows import WorkflowInterface
@@ -55,11 +57,30 @@ TEMPORAL_NOT_FOUND_FAILURE = (
55
57
  )
56
58
 
57
59
 
60
+ # Activity for publishing events (runs outside sandbox)
61
+ @activity.defn
62
+ async def publish_event(event_data: dict) -> None:
63
+ """Activity to publish events outside the workflow sandbox.
64
+
65
+ Args:
66
+ event_data (dict): Event data to publish containing event_type, event_name,
67
+ metadata, and data fields.
68
+ """
69
+ try:
70
+ event = Event(**event_data)
71
+ await EventStore.publish_event(event)
72
+ activity.logger.info(f"Published event: {event_data.get('event_name','')}")
73
+ except Exception as e:
74
+ activity.logger.error(f"Failed to publish event: {e}")
75
+ raise
76
+
77
+
58
78
  class EventActivityInboundInterceptor(ActivityInboundInterceptor):
59
79
  """Interceptor for tracking activity execution events.
60
80
 
61
81
  This interceptor captures the start and end of activity executions,
62
82
  creating events that can be used for monitoring and tracking.
83
+ Activities run outside the sandbox so they can directly call EventStore.
63
84
  """
64
85
 
65
86
  async def execute_activity(self, input: ExecuteActivityInput) -> Any:
@@ -71,31 +92,28 @@ class EventActivityInboundInterceptor(ActivityInboundInterceptor):
71
92
  Returns:
72
93
  Any: The result of the activity execution.
73
94
  """
74
- event = Event(
95
+ # Extract activity information for tracking
96
+
97
+ start_event = Event(
75
98
  event_type=EventTypes.APPLICATION_EVENT.value,
76
99
  event_name=ApplicationEventNames.ACTIVITY_START.value,
77
100
  data={},
78
101
  )
79
- EventStore.publish_event(event)
102
+ await EventStore.publish_event(start_event)
80
103
 
81
104
  output = None
82
105
  try:
83
106
  output = await super().execute_activity(input)
84
- except Exception as e:
107
+ except Exception:
108
+ raise
109
+ finally:
85
110
  end_event = Event(
86
111
  event_type=EventTypes.APPLICATION_EVENT.value,
87
112
  event_name=ApplicationEventNames.ACTIVITY_END.value,
88
113
  data={},
89
114
  )
90
- EventStore.publish_event(end_event)
91
- raise e
115
+ await EventStore.publish_event(end_event)
92
116
 
93
- end_event = Event(
94
- event_type=EventTypes.APPLICATION_EVENT.value,
95
- event_name=ApplicationEventNames.ACTIVITY_END.value,
96
- data={},
97
- )
98
- EventStore.publish_event(end_event)
99
117
  return output
100
118
 
101
119
 
@@ -104,6 +122,7 @@ class EventWorkflowInboundInterceptor(WorkflowInboundInterceptor):
104
122
 
105
123
  This interceptor captures the start and end of workflow executions,
106
124
  creating events that can be used for monitoring and tracking.
125
+ Uses activities to publish events to avoid sandbox restrictions.
107
126
  """
108
127
 
109
128
  async def execute_workflow(self, input: ExecuteWorkflowInput) -> Any:
@@ -115,47 +134,56 @@ class EventWorkflowInboundInterceptor(WorkflowInboundInterceptor):
115
134
  Returns:
116
135
  Any: The result of the workflow execution.
117
136
  """
118
- with workflow.unsafe.sandbox_unrestricted():
119
- EventStore.publish_event(
120
- Event(
121
- metadata=EventMetadata(workflow_state=WorkflowStates.RUNNING.value),
122
- event_type=EventTypes.APPLICATION_EVENT.value,
123
- event_name=ApplicationEventNames.WORKFLOW_START.value,
124
- data={},
125
- )
137
+
138
+ # Publish workflow start event via activity
139
+ try:
140
+ await workflow.execute_activity(
141
+ publish_event,
142
+ {
143
+ "metadata": EventMetadata(
144
+ workflow_state=WorkflowStates.RUNNING.value
145
+ ),
146
+ "event_type": EventTypes.APPLICATION_EVENT.value,
147
+ "event_name": ApplicationEventNames.WORKFLOW_START.value,
148
+ "data": {},
149
+ },
150
+ schedule_to_close_timeout=timedelta(seconds=30),
151
+ retry_policy=RetryPolicy(maximum_attempts=3),
126
152
  )
153
+ except Exception as e:
154
+ workflow.logger.warning(f"Failed to publish workflow start event: {e}")
155
+ # Don't fail the workflow if event publishing fails
127
156
 
128
157
  output = None
158
+ workflow_state = WorkflowStates.FAILED.value # Default to failed
159
+
129
160
  try:
130
161
  output = await super().execute_workflow(input)
131
- except Exception as e:
132
- with workflow.unsafe.sandbox_unrestricted():
133
- EventStore.publish_event(
134
- Event(
135
- metadata=EventMetadata(
136
- workflow_state=WorkflowStates.FAILED.value
137
- ),
138
- event_type=EventTypes.APPLICATION_EVENT.value,
139
- event_name=ApplicationEventNames.WORKFLOW_END.value,
140
- data={},
141
- ),
162
+ workflow_state = (
163
+ WorkflowStates.COMPLETED.value
164
+ ) # Update to completed on success
165
+ except Exception:
166
+ workflow_state = WorkflowStates.FAILED.value # Keep as failed
167
+ raise
168
+ finally:
169
+ # Always publish workflow end event
170
+ try:
171
+ await workflow.execute_activity(
172
+ publish_event,
173
+ {
174
+ "metadata": EventMetadata(workflow_state=workflow_state),
175
+ "event_type": EventTypes.APPLICATION_EVENT.value,
176
+ "event_name": ApplicationEventNames.WORKFLOW_END.value,
177
+ "data": {},
178
+ },
179
+ schedule_to_close_timeout=timedelta(seconds=30),
180
+ retry_policy=RetryPolicy(maximum_attempts=3),
181
+ )
182
+ except Exception as publish_error:
183
+ workflow.logger.warning(
184
+ f"Failed to publish workflow end event: {publish_error}"
142
185
  )
143
- raise e
144
186
 
145
- with workflow.unsafe.sandbox_unrestricted():
146
- EventStore.publish_event(
147
- Event(
148
- metadata=EventMetadata(
149
- workflow_state=WorkflowStates.COMPLETED.value
150
- ),
151
- event_type=EventTypes.APPLICATION_EVENT.value,
152
- event_name=ApplicationEventNames.WORKFLOW_END.value,
153
- data={
154
- "workflow_id": workflow.info().workflow_id,
155
- "workflow_run_id": workflow.info().run_id,
156
- },
157
- ),
158
- )
159
187
  return output
160
188
 
161
189
 
@@ -513,11 +541,14 @@ class TemporalWorkflowClient(WorkflowClient):
513
541
  f"Started token refresh loop with dynamic interval (initial: {self._token_refresh_interval}s)"
514
542
  )
515
543
 
544
+ # Add the publish_event to the activities list
545
+ extended_activities = list(activities) + [publish_event]
546
+
516
547
  return Worker(
517
548
  self.client,
518
549
  task_queue=self.worker_task_queue,
519
550
  workflows=workflow_classes,
520
- activities=activities,
551
+ activities=extended_activities, # Use extended activities list
521
552
  workflow_runner=SandboxedWorkflowRunner(
522
553
  restrictions=SandboxRestrictions.default.with_passthrough_modules(
523
554
  *passthrough_modules
@@ -36,7 +36,7 @@ APPLICATION_NAME = os.getenv("ATLAN_APPLICATION_NAME", "default")
36
36
  #: Name of the deployment, used to distinguish between different deployments of the same application
37
37
  DEPLOYMENT_NAME = os.getenv("ATLAN_DEPLOYMENT_NAME", LOCAL_ENVIRONMENT)
38
38
  #: Host address for the application's HTTP server
39
- APP_HOST = str(os.getenv("ATLAN_APP_HTTP_HOST", "localhost"))
39
+ APP_HOST = str(os.getenv("ATLAN_APP_HTTP_HOST", "0.0.0.0"))
40
40
  #: Port number for the application's HTTP server
41
41
  APP_PORT = int(os.getenv("ATLAN_APP_HTTP_PORT", "8000"))
42
42
  #: Tenant ID for multi-tenant applications
@@ -128,6 +128,11 @@ DEPLOYMENT_OBJECT_STORE_NAME = os.getenv("DEPLOYMENT_OBJECT_STORE_NAME", "object
128
128
  UPSTREAM_OBJECT_STORE_NAME = os.getenv("UPSTREAM_OBJECT_STORE_NAME", "objectstore")
129
129
  #: Name of the pubsub component in DAPR
130
130
  EVENT_STORE_NAME = os.getenv("EVENT_STORE_NAME", "eventstore")
131
+ #: DAPR binding operation for creating resources
132
+ DAPR_BINDING_OPERATION_CREATE = "create"
133
+ #: Version of worker start events used in the application
134
+ WORKER_START_EVENT_VERSION = "v1"
135
+
131
136
  #: Whether to enable Atlan storage upload
132
137
  ENABLE_ATLAN_UPLOAD = os.getenv("ENABLE_ATLAN_UPLOAD", "false").lower() == "true"
133
138
  # Dapr Client Configuration
@@ -135,6 +140,7 @@ ENABLE_ATLAN_UPLOAD = os.getenv("ENABLE_ATLAN_UPLOAD", "false").lower() == "true
135
140
  DAPR_MAX_GRPC_MESSAGE_LENGTH = int(
136
141
  os.getenv("DAPR_MAX_GRPC_MESSAGE_LENGTH", "104857600")
137
142
  )
143
+
138
144
  #: Name of the deployment secret store component in DAPR
139
145
  DEPLOYMENT_SECRET_STORE_NAME = os.getenv(
140
146
  "DEPLOYMENT_SECRET_STORE_NAME", "deployment-secret-store"
@@ -0,0 +1,5 @@
1
+ """Events module for handling application events.
2
+
3
+ This module provides classes and utilities for handling various types of events
4
+ in the application, including workflow, activity, and worker events.
5
+ """
@@ -0,0 +1,176 @@
1
+ """Base event classes and models.
2
+
3
+ This module contains the base classes and models for all events in the application.
4
+ """
5
+
6
+ from abc import ABC
7
+ from enum import Enum
8
+ from typing import Any, Dict, List, Optional
9
+
10
+ from pydantic import BaseModel, Field
11
+
12
+ from application_sdk.constants import APPLICATION_NAME, WORKER_START_EVENT_VERSION
13
+
14
+
15
+ class EventTypes(Enum):
16
+ """Enumeration of event types."""
17
+
18
+ APPLICATION_EVENT = "application_events"
19
+
20
+
21
+ class ApplicationEventNames(Enum):
22
+ """Enumeration of application event names."""
23
+
24
+ WORKFLOW_END = "workflow_end"
25
+ WORKFLOW_START = "workflow_start"
26
+ ACTIVITY_START = "activity_start"
27
+ ACTIVITY_END = "activity_end"
28
+ WORKER_START = "worker_start"
29
+ WORKER_END = "worker_end"
30
+ APPLICATION_START = "application_start"
31
+ APPLICATION_END = "application_end"
32
+
33
+
34
+ class WorkflowStates(Enum):
35
+ """Enumeration of workflow states."""
36
+
37
+ UNKNOWN = "unknown"
38
+ RUNNING = "running"
39
+ COMPLETED = "completed"
40
+ FAILED = "failed"
41
+
42
+
43
+ class EventMetadata(BaseModel):
44
+ """Metadata for events.
45
+
46
+ Attributes:
47
+ application_name: Name of the application the event belongs to.
48
+ created_timestamp: Timestamp when the event was created.
49
+ workflow_type: Type of the workflow.
50
+ workflow_id: ID of the workflow.
51
+ workflow_run_id: Run ID of the workflow.
52
+ workflow_state: State of the workflow.
53
+ activity_type: Type of the activity.
54
+ activity_id: ID of the activity.
55
+ attempt: Attempt number for the activity.
56
+ topic_name: Name of the topic for the event.
57
+ """
58
+
59
+ application_name: str = Field(init=True, default=APPLICATION_NAME)
60
+ created_timestamp: int = Field(init=True, default=0)
61
+
62
+ # Workflow information
63
+ workflow_type: str | None = Field(init=True, default=None)
64
+ workflow_id: str | None = Field(init=True, default=None)
65
+ workflow_run_id: str | None = Field(init=True, default=None)
66
+ workflow_state: str | None = Field(init=True, default=WorkflowStates.UNKNOWN.value)
67
+
68
+ # Activity information (Only when in an activity flow)
69
+ activity_type: str | None = Field(init=True, default=None)
70
+ activity_id: str | None = Field(init=True, default=None)
71
+ attempt: int | None = Field(init=True, default=None)
72
+
73
+ topic_name: str | None = Field(init=False, default=None)
74
+
75
+
76
+ class EventFilter(BaseModel):
77
+ """Filter for events.
78
+
79
+ Attributes:
80
+ path: Path to filter on.
81
+ operator: Operator to use for filtering.
82
+ value: Value to filter by.
83
+ """
84
+
85
+ path: str
86
+ operator: str
87
+ value: str
88
+
89
+
90
+ class Consumes(BaseModel):
91
+ """Model for event consumption configuration.
92
+
93
+ Attributes:
94
+ event_id: ID of the event.
95
+ event_type: Type of the event.
96
+ event_name: Name of the event.
97
+ version: Version of the event.
98
+ filters: List of filters to apply.
99
+ """
100
+
101
+ event_id: str = Field(alias="eventId")
102
+ event_type: str = Field(alias="eventType")
103
+ event_name: str = Field(alias="eventName")
104
+ version: str = Field()
105
+ filters: List[EventFilter] = Field(init=True, default=[])
106
+
107
+
108
+ class EventRegistration(BaseModel):
109
+ """Model for event registration.
110
+
111
+ Attributes:
112
+ consumes: List of events to consume.
113
+ produces: List of events to produce.
114
+ """
115
+
116
+ consumes: List[Consumes] = Field(init=True, default=[])
117
+ produces: List[Dict[str, Any]] = Field(init=True, default=[])
118
+
119
+
120
+ class Event(BaseModel, ABC):
121
+ """Base class for all events.
122
+
123
+ Attributes:
124
+ metadata: Metadata for the event.
125
+ event_type: Type of the event.
126
+ event_name: Name of the event.
127
+ data: Data payload of the event.
128
+ """
129
+
130
+ metadata: EventMetadata = Field(init=True, default_factory=EventMetadata)
131
+
132
+ event_type: str
133
+ event_name: str
134
+
135
+ data: Dict[str, Any]
136
+
137
+ def get_topic_name(self):
138
+ """Get the topic name for this event.
139
+
140
+ Returns:
141
+ str: The topic name.
142
+ """
143
+ return self.event_type
144
+
145
+ class Config:
146
+ extra = "allow"
147
+
148
+
149
+ class WorkerStartEventData(BaseModel):
150
+ """Model for worker creation event data.
151
+
152
+ This model represents the data structure used when publishing worker creation events.
153
+ It contains information about the worker configuration and environment.
154
+
155
+ Attributes:
156
+ application_name: Name of the application the worker belongs to.
157
+ task_queue: Task queue name for the worker.
158
+ namespace: Temporal namespace for the worker.
159
+ host: Host address of the Temporal server.
160
+ port: Port number of the Temporal server.
161
+ connection_string: Connection string for the Temporal server.
162
+ max_concurrent_activities: Maximum number of concurrent activities.
163
+ workflow_count: Number of workflow classes registered.
164
+ activity_count: Number of activity functions registered.
165
+ """
166
+
167
+ version: str = WORKER_START_EVENT_VERSION
168
+ application_name: str
169
+ task_queue: str
170
+ namespace: str
171
+ host: str
172
+ port: str
173
+ connection_string: str
174
+ max_concurrent_activities: Optional[int]
175
+ workflow_count: int
176
+ activity_count: int
@@ -51,7 +51,12 @@ class ParquetInput(Input):
51
51
  Returns:
52
52
  Optional[str]: Path to the downloaded local file.
53
53
  """
54
- parquet_files = glob.glob(local_file_path)
54
+ # if the path is a directory, then check if the directory has any parquet files
55
+ parquet_files = []
56
+ if os.path.isdir(local_file_path):
57
+ parquet_files = glob.glob(os.path.join(local_file_path, "*.parquet"))
58
+ else:
59
+ parquet_files = glob.glob(local_file_path)
55
60
  if not parquet_files:
56
61
  if self.input_prefix:
57
62
  logger.info(
@@ -162,10 +167,9 @@ class ParquetInput(Input):
162
167
  await self.download_files(path)
163
168
  yield daft.read_parquet(path)
164
169
  else:
165
- path = f"{self.path}/*.parquet"
166
- if self.input_prefix and path:
167
- await self.download_files(path)
168
- yield daft.read_parquet(path)
170
+ if self.path and self.input_prefix:
171
+ await self.download_files(self.path)
172
+ yield daft.read_parquet(f"{self.path}/*.parquet")
169
173
 
170
174
  except Exception as error:
171
175
  logger.error(
@@ -7,6 +7,7 @@ from typing import Any, Dict
7
7
 
8
8
  from dapr.clients import DaprClient
9
9
 
10
+ from application_sdk.common.dapr_utils import is_component_registered
10
11
  from application_sdk.constants import (
11
12
  DEPLOYMENT_NAME,
12
13
  DEPLOYMENT_SECRET_PATH,
@@ -22,7 +23,21 @@ logger = get_logger(__name__)
22
23
  class SecretStoreInput:
23
24
  @classmethod
24
25
  def get_deployment_secret(cls) -> Dict[str, Any]:
25
- """Get deployment config with caching."""
26
+ """Get deployment config from the deployment secret store.
27
+
28
+ Validates that the deployment secret store component is registered
29
+ before attempting to fetch secrets to prevent errors.
30
+
31
+ Returns:
32
+ Dict[str, Any]: Deployment configuration data, or empty dict if
33
+ component is unavailable or fetch fails.
34
+ """
35
+ if not is_component_registered(DEPLOYMENT_SECRET_STORE_NAME):
36
+ logger.warning(
37
+ f"Deployment secret store component '{DEPLOYMENT_SECRET_STORE_NAME}' is not registered"
38
+ )
39
+ return {}
40
+
26
41
  try:
27
42
  return cls.get_secret(DEPLOYMENT_SECRET_PATH, DEPLOYMENT_SECRET_STORE_NAME)
28
43
  except Exception as e:
@@ -16,6 +16,7 @@ from dapr.clients import DaprClient
16
16
  from pydantic import BaseModel
17
17
 
18
18
  from application_sdk.constants import (
19
+ DAPR_BINDING_OPERATION_CREATE,
19
20
  DEPLOYMENT_OBJECT_STORE_NAME,
20
21
  ENABLE_OBSERVABILITY_DAPR_SINK,
21
22
  LOG_FILE_NAME,
@@ -449,7 +450,7 @@ class AtlanObservability(Generic[T], ABC):
449
450
  with DaprClient() as client:
450
451
  client.invoke_binding(
451
452
  binding_name=DEPLOYMENT_OBJECT_STORE_NAME,
452
- operation="create",
453
+ operation=DAPR_BINDING_OPERATION_CREATE,
453
454
  data=file_content,
454
455
  binding_metadata=metadata,
455
456
  )
@@ -1,103 +1,29 @@
1
1
  """Event store module for handling application events.
2
2
 
3
- This module provides classes and utilities for handling various types of events
4
- in the application, including workflow and activity events.
3
+ This module provides the EventStore class for publishing application events
4
+ to a pub/sub system with automatic fallback to HTTP binding.
5
5
  """
6
6
 
7
7
  import json
8
- from abc import ABC
9
8
  from datetime import datetime
10
- from enum import Enum
11
- from typing import Any, Dict, List
12
9
 
13
10
  from dapr import clients
14
- from pydantic import BaseModel, Field
15
11
  from temporalio import activity, workflow
16
12
 
13
+ from application_sdk.clients.atlan_auth import AtlanAuthClient
17
14
  from application_sdk.common.dapr_utils import is_component_registered
18
- from application_sdk.constants import APPLICATION_NAME, EVENT_STORE_NAME
15
+ from application_sdk.constants import (
16
+ APPLICATION_NAME,
17
+ DAPR_BINDING_OPERATION_CREATE,
18
+ EVENT_STORE_NAME,
19
+ )
20
+ from application_sdk.events.models import Event, EventMetadata, WorkflowStates
19
21
  from application_sdk.observability.logger_adaptor import get_logger
20
22
 
21
23
  logger = get_logger(__name__)
22
24
  activity.logger = logger
23
25
 
24
26
 
25
- class EventTypes(Enum):
26
- APPLICATION_EVENT = "application_event"
27
-
28
-
29
- class ApplicationEventNames(Enum):
30
- WORKFLOW_END = "workflow_end"
31
- WORKFLOW_START = "workflow_start"
32
- ACTIVITY_START = "activity_start"
33
- ACTIVITY_END = "activity_end"
34
-
35
-
36
- class WorkflowStates(Enum):
37
- UNKNOWN = "unknown"
38
- RUNNING = "running"
39
- COMPLETED = "completed"
40
- FAILED = "failed"
41
-
42
-
43
- class EventMetadata(BaseModel):
44
- application_name: str = Field(init=True, default=APPLICATION_NAME)
45
- event_published_client_timestamp: int = Field(init=True, default=0)
46
-
47
- # Workflow information
48
- workflow_type: str | None = Field(init=True, default=None)
49
- workflow_id: str | None = Field(init=True, default=None)
50
- workflow_run_id: str | None = Field(init=True, default=None)
51
- workflow_state: str | None = Field(init=True, default=WorkflowStates.UNKNOWN.value)
52
-
53
- # Activity information (Only when in an activity flow)
54
- activity_type: str | None = Field(init=True, default=None)
55
- activity_id: str | None = Field(init=True, default=None)
56
- attempt: int | None = Field(init=True, default=None)
57
-
58
- topic_name: str | None = Field(init=False, default=None)
59
-
60
-
61
- class EventFilter(BaseModel):
62
- path: str
63
- operator: str
64
- value: str
65
-
66
-
67
- class Consumes(BaseModel):
68
- event_id: str = Field(alias="eventId")
69
- event_type: str = Field(alias="eventType")
70
- event_name: str = Field(alias="eventName")
71
- version: str = Field()
72
- filters: List[EventFilter] = Field(init=True, default=[])
73
-
74
-
75
- class EventRegistration(BaseModel):
76
- consumes: List[Consumes] = Field(init=True, default=[])
77
- produces: List[Dict[str, Any]] = Field(init=True, default=[])
78
-
79
-
80
- class Event(BaseModel, ABC):
81
- """Base class for all events.
82
-
83
- Attributes:
84
- event_type (str): Type of the event.
85
- """
86
-
87
- metadata: EventMetadata = Field(init=True, default_factory=EventMetadata)
88
-
89
- event_type: str
90
- event_name: str
91
-
92
- data: Dict[str, Any]
93
-
94
- def get_topic_name(self):
95
- return self.event_type + "_topic"
96
-
97
- class Config:
98
- extra = "allow"
99
-
100
-
101
27
  class EventStore:
102
28
  """Event store for publishing application events.
103
29
 
@@ -110,15 +36,12 @@ class EventStore:
110
36
 
111
37
  Args:
112
38
  event (Event): Event data.
113
-
114
39
  """
115
40
  if not event.metadata:
116
41
  event.metadata = EventMetadata()
117
42
 
118
43
  event.metadata.application_name = APPLICATION_NAME
119
- event.metadata.event_published_client_timestamp = int(
120
- datetime.now().timestamp()
121
- )
44
+ event.metadata.created_timestamp = int(datetime.now().timestamp())
122
45
  event.metadata.topic_name = event.get_topic_name()
123
46
 
124
47
  try:
@@ -146,15 +69,11 @@ class EventStore:
146
69
  return event
147
70
 
148
71
  @classmethod
149
- def publish_event(cls, event: Event, enrich_metadata: bool = True):
150
- """Create a new generic event.
72
+ async def publish_event(cls, event: Event):
73
+ """Publish event with automatic fallback between pub/sub and HTTP binding.
151
74
 
152
75
  Args:
153
- event (Event): Event data.
154
- topic_name (str, optional): Topic name to publish the event to. Defaults to TOPIC_NAME.
155
-
156
- Example:
157
- >>> EventStore.create_generic_event(Event(event_type="test", data={"test": "test"}))
76
+ event (Event): Event data to publish.
158
77
  """
159
78
  if not is_component_registered(EVENT_STORE_NAME):
160
79
  logger.warning(
@@ -162,16 +81,29 @@ class EventStore:
162
81
  )
163
82
  return
164
83
  try:
165
- if enrich_metadata:
166
- event = cls.enrich_event_metadata(event)
84
+ event = cls.enrich_event_metadata(event)
85
+
86
+ payload = json.dumps(event.model_dump(mode="json"))
87
+
88
+ # Prepare binding metadata with auth token for HTTP bindings
89
+ binding_metadata = {"content-type": "application/json"}
90
+
91
+ # Add auth token - HTTP bindings will use it, others will ignore it
92
+ auth_client = AtlanAuthClient()
93
+ binding_metadata.update(await auth_client.get_authenticated_headers())
167
94
 
168
95
  with clients.DaprClient() as client:
169
- client.publish_event(
170
- pubsub_name=EVENT_STORE_NAME,
171
- topic_name=event.get_topic_name(),
172
- data=json.dumps(event.model_dump(mode="json")),
173
- data_content_type="application/json",
96
+ client.invoke_binding(
97
+ binding_name=EVENT_STORE_NAME,
98
+ operation=DAPR_BINDING_OPERATION_CREATE,
99
+ data=payload,
100
+ binding_metadata=binding_metadata,
101
+ )
102
+ logger.info(
103
+ f"Published event via binding on topic: {event.get_topic_name()}"
174
104
  )
175
- logger.info(f"Published event to {event.get_topic_name()}")
176
105
  except Exception as e:
177
- logger.error(f"Error publishing event to {event.get_topic_name()}: {e}")
106
+ logger.error(
107
+ f"Failed to publish event on topic {event.get_topic_name()}: {e}",
108
+ exc_info=True,
109
+ )
@@ -417,7 +417,7 @@ class APIServer(ServerInterface):
417
417
  subscriptions.append(
418
418
  {
419
419
  "pubsubname": EVENT_STORE_NAME,
420
- "topic": event_trigger.event_type + "_topic",
420
+ "topic": event_trigger.event_type,
421
421
  "routes": {
422
422
  "rules": [
423
423
  {
@@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional, Type
5
5
 
6
6
  from pydantic import BaseModel, Field, RootModel
7
7
 
8
- from application_sdk.outputs.eventstore import Event, EventFilter
8
+ from application_sdk.events.models import Event, EventFilter
9
9
  from application_sdk.workflows import WorkflowInterface
10
10
 
11
11
 
@@ -0,0 +1,28 @@
1
+ from hypothesis import strategies as st
2
+
3
+ # Strategy for generating safe file path components
4
+ safe_path_strategy = st.text(
5
+ alphabet=st.characters(),
6
+ )
7
+
8
+ # Strategy for generating file names
9
+ file_name_strategy = st.builds(lambda name: f"{name}.parquet", name=safe_path_strategy)
10
+
11
+ # Strategy for generating lists of file names
12
+ file_names_strategy = st.lists(file_name_strategy, unique=True)
13
+
14
+ # Strategy for generating input prefixes
15
+ input_prefix_strategy = safe_path_strategy
16
+
17
+ # Strategy for generating chunk sizes
18
+ chunk_size_strategy = st.one_of(st.none(), st.integers(min_value=1, max_value=1000000))
19
+
20
+ # Strategy for generating complete ParquetInput configurations
21
+ parquet_input_config_strategy = st.fixed_dictionaries(
22
+ {
23
+ "path": safe_path_strategy,
24
+ "chunk_size": chunk_size_strategy,
25
+ "input_prefix": st.one_of(st.none(), input_prefix_strategy),
26
+ "file_names": st.one_of(st.none(), file_names_strategy),
27
+ }
28
+ )
@@ -2,7 +2,7 @@ import json
2
2
 
3
3
  from hypothesis import strategies as st
4
4
 
5
- from application_sdk.outputs.eventstore import Event
5
+ from application_sdk.events.models import Event
6
6
 
7
7
  # Strategy for generating auth credentials
8
8
  auth_credentials_strategy = st.fixed_dictionaries(
@@ -2,4 +2,4 @@
2
2
  Version information for the application_sdk package.
3
3
  """
4
4
 
5
- __version__ = "0.1.1rc30"
5
+ __version__ = "0.1.1rc32"
application_sdk/worker.py CHANGED
@@ -15,10 +15,18 @@ from temporalio.worker import Worker as TemporalWorker
15
15
 
16
16
  from application_sdk.clients.workflow import WorkflowClient
17
17
  from application_sdk.constants import MAX_CONCURRENT_ACTIVITIES
18
+ from application_sdk.events.models import (
19
+ ApplicationEventNames,
20
+ Event,
21
+ EventTypes,
22
+ WorkerStartEventData,
23
+ )
18
24
  from application_sdk.observability.logger_adaptor import get_logger
25
+ from application_sdk.outputs.eventstore import EventStore
19
26
 
20
27
  logger = get_logger(__name__)
21
28
 
29
+
22
30
  if sys.platform not in ("win32", "cygwin"):
23
31
  try:
24
32
  import uvloop
@@ -109,6 +117,21 @@ class Worker:
109
117
  thread_name_prefix="activity-pool-",
110
118
  )
111
119
 
120
+ # Store event data for later publishing
121
+ self._worker_creation_event_data = None
122
+ if self.workflow_client:
123
+ self._worker_creation_event_data = WorkerStartEventData(
124
+ application_name=self.workflow_client.application_name,
125
+ task_queue=self.workflow_client.worker_task_queue,
126
+ namespace=self.workflow_client.namespace,
127
+ host=self.workflow_client.host,
128
+ port=self.workflow_client.port,
129
+ connection_string=self.workflow_client.get_connection_string(),
130
+ max_concurrent_activities=max_concurrent_activities,
131
+ workflow_count=len(workflow_classes),
132
+ activity_count=len(workflow_activities),
133
+ )
134
+
112
135
  async def start(self, daemon: bool = True, *args: Any, **kwargs: Any) -> None:
113
136
  """Start the Temporal worker.
114
137
 
@@ -143,6 +166,15 @@ class Worker:
143
166
  if not self.workflow_client:
144
167
  raise ValueError("Workflow client is not set")
145
168
 
169
+ if self._worker_creation_event_data:
170
+ worker_creation_event = Event(
171
+ event_type=EventTypes.APPLICATION_EVENT.value,
172
+ event_name=ApplicationEventNames.WORKER_START.value,
173
+ data=self._worker_creation_event_data.model_dump(),
174
+ )
175
+
176
+ await EventStore.publish_event(worker_creation_event)
177
+
146
178
  try:
147
179
  worker = self.workflow_client.create_worker(
148
180
  activities=self.workflow_activities,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atlan-application-sdk
3
- Version: 0.1.1rc30
3
+ Version: 0.1.1rc32
4
4
  Summary: Atlan Application SDK is a Python library for developing applications on the Atlan Platform
5
5
  Project-URL: Repository, https://github.com/atlanhq/application-sdk
6
6
  Project-URL: Documentation, https://github.com/atlanhq/application-sdk/README.md
@@ -1,7 +1,7 @@
1
1
  application_sdk/__init__.py,sha256=2e2mvmLJ5dxmJGPELtb33xwP-j6JMdoIuqKycEn7hjg,151
2
- application_sdk/constants.py,sha256=JMJdTVjIJZX1XrvxISAcPh3YQ3aYKBTSQ2thqiF8BR4,9304
3
- application_sdk/version.py,sha256=JWzagPnLJJesFA2rkpWvghXzlW-fdaQlCWDoL0vAsJ4,88
4
- application_sdk/worker.py,sha256=2fLjuKNJafhaQXrHzmxXYO22F4ZSc0igMjoxXVNBFfk,6167
2
+ application_sdk/constants.py,sha256=_Fmk9PgpM68chPDHHkgrs4Zg2KK4UCqqg7Oj9_u3WVo,9486
3
+ application_sdk/version.py,sha256=ifHNi-zFUduvn5OEE1VTXtr426-rLzTEQ6lB9WnnQbk,88
4
+ application_sdk/worker.py,sha256=dZLxPkAieCSw7XEWxzL-FRk0QAZm7vXQBICjZODy3B4,7488
5
5
  application_sdk/activities/__init__.py,sha256=EH5VTHcfGykIX7V1HsG0J1Z-1FbJEXTQOET0HdzFDjU,9519
6
6
  application_sdk/activities/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  application_sdk/activities/common/models.py,sha256=305WdrZB7EAtCOAU_q9hMw81XowUdCeuFs9zfzb-MHQ,1196
@@ -11,13 +11,13 @@ application_sdk/activities/metadata_extraction/rest.py,sha256=47DEQpj8HBSa-_TImW
11
11
  application_sdk/activities/metadata_extraction/sql.py,sha256=3G9_KGKyS0kTBN-nOKdvNSpTqwFv0nUectsqhMewpnU,22594
12
12
  application_sdk/activities/query_extraction/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  application_sdk/activities/query_extraction/sql.py,sha256=xC-dC_so3D9yY88lSL2W8Q8CfDRjiIrF-OHKbITFgd0,21271
14
- application_sdk/application/__init__.py,sha256=wCaAsoQ-wIYtfeG6hNwwLi8PcufjwMBjmfp4AxWqSyw,7558
14
+ application_sdk/application/__init__.py,sha256=sUmng7msiUCPmQu1YY9gRvzXWviVKABVjKT3diPC6GM,7553
15
15
  application_sdk/application/metadata_extraction/sql.py,sha256=ohpV4qZ92uKRlH7I_8G67ocnWkZJAZCU_7XdvqYPiN4,7966
16
16
  application_sdk/clients/__init__.py,sha256=C9T84J7V6ZumcoWJPAxdd3tqSmbyciaGBJn-CaCCny0,1341
17
+ application_sdk/clients/atlan.py,sha256=f2-Uk5KiPIDJEhGkfYctA_f3CwoVB_mWNBMVvxeLuY4,2684
17
18
  application_sdk/clients/atlan_auth.py,sha256=MQznmvVKrlOT_Tp232W4UrOupRrx9Dx9zQm3n1R7kD8,8938
18
- application_sdk/clients/atlan_client.py,sha256=f2-Uk5KiPIDJEhGkfYctA_f3CwoVB_mWNBMVvxeLuY4,2684
19
19
  application_sdk/clients/sql.py,sha256=tW89SHuuWdU5jv8lDUP5AUCEpR2CF_5TyUvYDCBHses,17880
20
- application_sdk/clients/temporal.py,sha256=kiB9W2e3x6gGtFC9e7vqTIBNGbt7nyS6p7j2hLKdsgI,22364
20
+ application_sdk/clients/temporal.py,sha256=jyU2MYGZXkTn0Gqy_qvYg0iSc2bKz4snflsU_XcDsfk,23662
21
21
  application_sdk/clients/utils.py,sha256=zLFOJbTr_6TOqnjfVFGY85OtIXZ4FQy_rquzjaydkbY,779
22
22
  application_sdk/clients/workflow.py,sha256=6bSqmA3sNCk9oY68dOjBUDZ9DhNKQxPD75qqE0cfldc,6104
23
23
  application_sdk/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -42,26 +42,28 @@ application_sdk/docgen/models/manifest/section.py,sha256=4-cqJjSojQ31QiNXI-3koHc
42
42
  application_sdk/docgen/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
43
  application_sdk/docgen/parsers/directory.py,sha256=8Kk2sjb-0l2wLO_njdlcuHjv5akoNgmf-FmaDSaE4WM,7751
44
44
  application_sdk/docgen/parsers/manifest.py,sha256=3NP-dBTpHAUQa477usMIDaKSb_9xfLE8G3RX0T1Bq2s,3318
45
+ application_sdk/events/__init__.py,sha256=OcbVWDF4ZKRTJXK9UaFVtYEwu-3DHE77S-Sn6jNafUs,204
46
+ application_sdk/events/models.py,sha256=7Esqp3WlbriT2EqT4kNiY_sHtRXRPLj27b8SbeC5Sb0,5121
45
47
  application_sdk/handlers/__init__.py,sha256=U7kKwVWK0FZz1uIJ2ANN0C5tD83k_9Nyz0ns6ttr92g,1152
46
48
  application_sdk/handlers/sql.py,sha256=oeB-sgWwPYo31xaD87TyMc0h51Sary1F-CmhExt9_Pk,16100
47
49
  application_sdk/inputs/__init__.py,sha256=_d-cUhcDyoJTJR3PdQkC831go6VDw9AM6Bg7-qm3NHI,1900
48
50
  application_sdk/inputs/iceberg.py,sha256=xiv1kNtVx1k0h3ZJbJeXjZwdfBGSy9j9orYP_AyCYlI,2756
49
51
  application_sdk/inputs/json.py,sha256=J1CVz0YGQHDyq840TyoBHE7Baua2yIFHzsrybiZbeWk,6079
50
52
  application_sdk/inputs/objectstore.py,sha256=uOJW0uB3FrDmnyHFhAd23QOq3MKrAzhYdKCszdpMeF8,8219
51
- application_sdk/inputs/parquet.py,sha256=Hsi6Nz_KwxFMB6DcHSQov5y_hRkoeN7e4xfpYwogveo,6346
52
- application_sdk/inputs/secretstore.py,sha256=WPGvsPMLUaARvXA6JSa4uHp7tQcCW4NMIIUDQyM3F-I,3946
53
+ application_sdk/inputs/parquet.py,sha256=j1O5uYZ2ok28iPYFpMoltKzJTDGmht7qoEjRKQ48P8E,6590
54
+ application_sdk/inputs/secretstore.py,sha256=0H81CkbKj1KTU_wlPofpsAvPHs9YaOuKbRBmLRdiiNw,4583
53
55
  application_sdk/inputs/sql_query.py,sha256=1EREgea6kKNaMIyX2HLJgbJ07rtAgLasd9NyvDcdZok,10636
54
56
  application_sdk/inputs/statestore.py,sha256=ORWnv8ZCqC1wT4vlW4v5EemJT4oQ3t_DlpjKDAgTRTs,3117
55
57
  application_sdk/observability/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
58
  application_sdk/observability/logger_adaptor.py,sha256=WTqnNg78W2SRGOQVhELVLn6KMRsurkG1kc7essL08Lk,29529
57
59
  application_sdk/observability/metrics_adaptor.py,sha256=4TYPNn38zLeqxwf7cUbe8wh_zwQlr-nyiXjJsiEhTEM,16445
58
- application_sdk/observability/observability.py,sha256=jEascguTQMFLzEfofvwIgmj6Mv_OWc7WeaSTyR4ftkY,24474
60
+ application_sdk/observability/observability.py,sha256=DP0I4bHyg3TA4hxCqDFy2IiRmBGOpZ7449m7BUoc_RA,24530
59
61
  application_sdk/observability/traces_adaptor.py,sha256=0eQJPN-tYA_dV8D3uEa5ZiX9g12NDuLnPaFuQMVDdL0,18242
60
62
  application_sdk/observability/utils.py,sha256=MKEpT0WYtpATUgLgJDkGQaAP_t-jpDYMUKDfEvr8Phg,2448
61
63
  application_sdk/observability/decorators/observability_decorator.py,sha256=JNrWNXT5W4klmlAc5b8C3_VBjDu0PI64W2ptr7LMzk4,8110
62
64
  application_sdk/outputs/__init__.py,sha256=HX8VcN22xyrkoRWdjQj8TrC5dEUG7cPzOcvJhlprqAs,8415
63
65
  application_sdk/outputs/atlan_storage.py,sha256=HQLbuyOZQC-GxYAiCVJakIJizTWy926tdMGOHvaBlD8,6029
64
- application_sdk/outputs/eventstore.py,sha256=MY9x_2A8Gempa1B3GQJ-C-B4dVrqO5u4Sfpla-2u91Q,5827
66
+ application_sdk/outputs/eventstore.py,sha256=1Im6dTQmIqyz6x5tExLFQpwdLrlReYZ1pktHk-tyLd8,4094
65
67
  application_sdk/outputs/iceberg.py,sha256=IGtj5WDgqLu6vzDEvw5DLsKsjm29Krto3AHvWpemr0A,5311
66
68
  application_sdk/outputs/json.py,sha256=xF-8mY3BZRRejip4s9npIUuFaAxgFmBQVaLMkrI_iCI,14117
67
69
  application_sdk/outputs/objectstore.py,sha256=TJvgfkJpGRK129ttxY7qRYJ7ASKZA4R6-0BUA3Lk7mc,4450
@@ -69,8 +71,8 @@ application_sdk/outputs/parquet.py,sha256=A2EnEx1zWjaXk10u3eJusmWxGxt8WR7CHXDaJg
69
71
  application_sdk/outputs/secretstore.py,sha256=JS9vUzb11leDpcMQSCnLJuE9Ww-9G3wMvCdUKBPaw9I,1342
70
72
  application_sdk/outputs/statestore.py,sha256=XiEag2e9WW3_D3xbWQGoNrHiFJz9916qcIvhrROX8_8,3999
71
73
  application_sdk/server/__init__.py,sha256=KTqE1YPw_3WDVMWatJUuf9OOiobLM2K5SMaBrI62sCo,1568
72
- application_sdk/server/fastapi/__init__.py,sha256=MnyLA75XP0_k3_yGp9nw9CTxw4Qa1DIZIs__BgzqJWI,27781
73
- application_sdk/server/fastapi/models.py,sha256=mFFxteDS3ZYXaq7Apor_Meo5WNxTCxqdrMkmTKQjvP0,6687
74
+ application_sdk/server/fastapi/__init__.py,sha256=1RNP3170Es_GcxVSweqkl_iz4Sd0Evi8bs6fuVuVTiA,27770
75
+ application_sdk/server/fastapi/models.py,sha256=QarZthIq-ileNua-SCh6AldRuEjRCZk-C7QQZ5K6STE,6682
74
76
  application_sdk/server/fastapi/utils.py,sha256=2XI4DylhRQsukhX67lpAzRNCHeFCSpbuNd7TlE2IBJA,1164
75
77
  application_sdk/server/fastapi/middleware/logmiddleware.py,sha256=CxcPtDmCbSfSZ8RyI09nIshVIbCokyyA9bByQJ2G_ns,2545
76
78
  application_sdk/server/fastapi/middleware/metrics.py,sha256=5ddHAIg5sT-u9tB_HHMGL3Cfu2g1rm9z7ksienIr9ks,1563
@@ -97,11 +99,12 @@ application_sdk/test_utils/hypothesis/strategies/handlers/sql/sql_metadata.py,sh
97
99
  application_sdk/test_utils/hypothesis/strategies/handlers/sql/sql_preflight.py,sha256=e9uo6Bx5w_ZAEu6bDTWbMbmzqB0MYl2dH-JlXg3bkV8,2648
98
100
  application_sdk/test_utils/hypothesis/strategies/inputs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
101
  application_sdk/test_utils/hypothesis/strategies/inputs/json_input.py,sha256=9mPI585J4tfQTZiM_uoFXCq7qrLML7jwH5eDm_mzvm4,749
102
+ application_sdk/test_utils/hypothesis/strategies/inputs/parquet_input.py,sha256=qOi5En011KiOWAusP6dCvzr1rtC_sc2IkWfj7hWvSh0,950
100
103
  application_sdk/test_utils/hypothesis/strategies/outputs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
101
104
  application_sdk/test_utils/hypothesis/strategies/outputs/json_output.py,sha256=p9wotUJwc-Wmm54_qVG5Ivp_mgl7YTeAcQfC6RXlxCc,1835
102
105
  application_sdk/test_utils/hypothesis/strategies/outputs/statestore.py,sha256=gmYBwePNoSI_pl2WTXOClgkruzRwkOX_1SmBaUTha0c,2903
103
106
  application_sdk/test_utils/hypothesis/strategies/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
104
- application_sdk/test_utils/hypothesis/strategies/server/fastapi/__init__.py,sha256=Zmk39gSxpnOifKqBtCvaYlUOjslE2vRARUx8dMsAIrs,2653
107
+ application_sdk/test_utils/hypothesis/strategies/server/fastapi/__init__.py,sha256=1SOg5LNYAfE4xBj49_m530cktlpG09t6v-Shah6JqEA,2648
105
108
  application_sdk/test_utils/scale_data_generator/README.md,sha256=s3oNDuKOKNUT9WxM-r0Ve1VPBl0cRM3Po7I3Vi-PGxM,1693
106
109
  application_sdk/test_utils/scale_data_generator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
107
110
  application_sdk/test_utils/scale_data_generator/config_loader.py,sha256=ISk-fEIDlAJozhoNWwnl3Gp3IYnDDrRS-5SdiLYta24,2528
@@ -131,8 +134,8 @@ application_sdk/workflows/metadata_extraction/__init__.py,sha256=jHUe_ZBQ66jx8bg
131
134
  application_sdk/workflows/metadata_extraction/sql.py,sha256=_NhszxIgmcQI6lVpjJoyJRFLwPYvJw1Dyqox_m9K2RA,11947
132
135
  application_sdk/workflows/query_extraction/__init__.py,sha256=n066_CX5RpJz6DIxGMkKS3eGSRg03ilaCtsqfJWQb7Q,117
133
136
  application_sdk/workflows/query_extraction/sql.py,sha256=kT_JQkLCRZ44ZpaC4QvPL6DxnRIIVh8gYHLqRbMI-hA,4826
134
- atlan_application_sdk-0.1.1rc30.dist-info/METADATA,sha256=iT4sH5JQ7eYpgokSzDzPqZIuMVJwMZxmUPxNK7iSGWo,5473
135
- atlan_application_sdk-0.1.1rc30.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
136
- atlan_application_sdk-0.1.1rc30.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
137
- atlan_application_sdk-0.1.1rc30.dist-info/licenses/NOTICE,sha256=A-XVVGt3KOYuuMmvSMIFkg534F1vHiCggEBp4Ez3wGk,1041
138
- atlan_application_sdk-0.1.1rc30.dist-info/RECORD,,
137
+ atlan_application_sdk-0.1.1rc32.dist-info/METADATA,sha256=bN4buUiRRKto73NmQ8JsMnKQrWS3CqJMr5m1huc-4u8,5473
138
+ atlan_application_sdk-0.1.1rc32.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
139
+ atlan_application_sdk-0.1.1rc32.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
140
+ atlan_application_sdk-0.1.1rc32.dist-info/licenses/NOTICE,sha256=A-XVVGt3KOYuuMmvSMIFkg534F1vHiCggEBp4Ez3wGk,1041
141
+ atlan_application_sdk-0.1.1rc32.dist-info/RECORD,,
File without changes