atlan-application-sdk 2.1.0__py3-none-any.whl → 2.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,6 +15,7 @@ Example:
15
15
 
16
16
  import os
17
17
  from abc import ABC
18
+ from datetime import datetime, timedelta
18
19
  from typing import Any, Dict, Generic, Optional, TypeVar
19
20
 
20
21
  from pydantic import BaseModel
@@ -62,6 +63,7 @@ class ActivitiesState(BaseModel, Generic[HandlerType]):
62
63
  model_config = {"arbitrary_types_allowed": True}
63
64
  handler: Optional[HandlerType] = None
64
65
  workflow_args: Optional[Dict[str, Any]] = None
66
+ last_updated_timestamp: Optional[datetime] = None
65
67
 
66
68
 
67
69
  ActivitiesStateType = TypeVar("ActivitiesStateType", bound=ActivitiesState)
@@ -113,12 +115,15 @@ class ActivitiesInterface(ABC, Generic[ActivitiesStateType]):
113
115
  Note:
114
116
  The workflow ID is automatically retrieved from the current activity context.
115
117
  If no state exists for the current workflow, a new one will be created.
118
+ This method also updates the last_updated_timestamp to enable time-based
119
+ state refresh functionality.
116
120
  """
117
121
  workflow_id = get_workflow_id()
118
122
  if not self._state.get(workflow_id):
119
123
  self._state[workflow_id] = ActivitiesState()
120
124
 
121
125
  self._state[workflow_id].workflow_args = workflow_args
126
+ self._state[workflow_id].last_updated_timestamp = datetime.now()
122
127
 
123
128
  async def _get_state(self, workflow_args: Dict[str, Any]) -> ActivitiesStateType:
124
129
  """Retrieve the state for the current workflow.
@@ -142,6 +147,15 @@ class ActivitiesInterface(ABC, Generic[ActivitiesStateType]):
142
147
  workflow_id = get_workflow_id()
143
148
  if workflow_id not in self._state:
144
149
  await self._set_state(workflow_args)
150
+
151
+ else:
152
+ current_timestamp = datetime.now()
153
+ # if difference of current_timestamp and last_updated_timestamp is greater than 15 minutes, then again _set_state
154
+ last_updated = self._state[workflow_id].last_updated_timestamp
155
+ if last_updated and current_timestamp - last_updated > timedelta(
156
+ minutes=15
157
+ ):
158
+ await self._set_state(workflow_args)
145
159
  return self._state[workflow_id]
146
160
  except OrchestratorError as e:
147
161
  logger.error(
@@ -1,4 +1,5 @@
1
1
  import os
2
+ from datetime import datetime
2
3
  from typing import (
3
4
  TYPE_CHECKING,
4
5
  Any,
@@ -60,6 +61,7 @@ class BaseSQLMetadataExtractionActivitiesState(ActivitiesState):
60
61
  sql_client: Optional[BaseSQLClient] = None
61
62
  handler: Optional[BaseSQLHandler] = None
62
63
  transformer: Optional[TransformerInterface] = None
64
+ last_updated_timestamp: Optional[datetime] = None
63
65
 
64
66
 
65
67
  class BaseSQLMetadataExtractionActivities(ActivitiesInterface):
@@ -149,13 +151,30 @@ class BaseSQLMetadataExtractionActivities(ActivitiesInterface):
149
151
 
150
152
  Args:
151
153
  workflow_args (Dict[str, Any]): Arguments passed to the workflow.
154
+
155
+ Note:
156
+ This method creates and configures the new SQL client before closing
157
+ the old one to ensure state is never left with a closed client if
158
+ initialization fails. The timestamp is only updated after the new
159
+ client is successfully created and assigned.
152
160
  """
153
161
  workflow_id = get_workflow_id()
154
162
  if not self._state.get(workflow_id):
155
163
  self._state[workflow_id] = BaseSQLMetadataExtractionActivitiesState()
156
164
 
157
- await super()._set_state(workflow_args)
165
+ existing_state = self._state[workflow_id]
166
+
167
+ # Update workflow_args early, but preserve old timestamp until new client is ready
168
+ # This ensures that if initialization fails, the state can still be refreshed
169
+ existing_state.workflow_args = workflow_args
170
+
171
+ # Store reference to old client for cleanup after new client is ready
172
+ old_sql_client = None
173
+ if existing_state and existing_state.sql_client is not None:
174
+ old_sql_client = existing_state.sql_client
158
175
 
176
+ # Create and configure new client BEFORE closing old one
177
+ # This ensures state is never left with a closed client if initialization fails
159
178
  sql_client = self.sql_client_class()
160
179
 
161
180
  # Load credentials BEFORE creating handler to avoid race condition
@@ -165,10 +184,29 @@ class BaseSQLMetadataExtractionActivities(ActivitiesInterface):
165
184
  )
166
185
  await sql_client.load(credentials)
167
186
 
168
- # Assign sql_client and handler to state AFTER credentials are loaded
187
+ # Only after new client is successfully created and configured,
188
+ # close old client and assign new one to state
189
+ if old_sql_client is not None:
190
+ try:
191
+ await old_sql_client.close()
192
+ logger.debug(
193
+ f"Closed existing SQL client for workflow {workflow_id} during state refresh"
194
+ )
195
+ except Exception as e:
196
+ logger.warning(
197
+ f"Failed to close existing SQL client for workflow {workflow_id}: {e}",
198
+ exc_info=True,
199
+ )
200
+ # Continue even if close fails - new client is already ready
201
+
202
+ # Assign sql_client and handler to state AFTER new client is ready
169
203
  self._state[workflow_id].sql_client = sql_client
170
204
  handler = self.handler_class(sql_client)
171
205
  self._state[workflow_id].handler = handler
206
+ # Update timestamp only after successful client creation and assignment
207
+ # This ensures that if initialization fails, the old timestamp remains
208
+ # and the state can be refreshed again immediately
209
+ self._state[workflow_id].last_updated_timestamp = datetime.now()
172
210
 
173
211
  # Create transformer with required parameters from ApplicationConstants
174
212
  transformer_params = {
@@ -0,0 +1,6 @@
1
+ """Azure client module for the application-sdk framework."""
2
+
3
+ # Azure Management API endpoint for token acquisition
4
+ AZURE_MANAGEMENT_API_ENDPOINT = "https://management.azure.com/.default"
5
+
6
+ __all__ = ["AZURE_MANAGEMENT_API_ENDPOINT"]
@@ -0,0 +1,288 @@
1
+ """
2
+ Azure authentication provider for the application-sdk framework.
3
+
4
+ This module provides the AzureAuthProvider class that handles Azure
5
+ Service Principal authentication for the application-sdk framework.
6
+
7
+ Example:
8
+ >>> from application_sdk.clients.azure.auth import AzureAuthProvider
9
+ >>> import asyncio
10
+ >>>
11
+ >>> # Create authentication provider
12
+ >>> auth_provider = AzureAuthProvider()
13
+ >>>
14
+ >>> # Authenticate with Service Principal credentials
15
+ >>> credentials = {
16
+ ... "tenant_id": "your-tenant-id",
17
+ ... "client_id": "your-client-id",
18
+ ... "client_secret": "your-client-secret"
19
+ ... }
20
+ >>>
21
+ >>> # Create credential
22
+ >>> credential = await auth_provider.create_credential(
23
+ ... auth_type="service_principal",
24
+ ... credentials=credentials
25
+ ... )
26
+ >>>
27
+ >>> # Alternative credential key formats are also supported
28
+ >>> alt_credentials = {
29
+ ... "tenantId": "your-tenant-id", # camelCase
30
+ ... "clientId": "your-client-id", # camelCase
31
+ ... "clientSecret": "your-client-secret" # camelCase
32
+ ... }
33
+ >>>
34
+ >>> credential = await auth_provider.create_credential(
35
+ ... auth_type="service_principal",
36
+ ... credentials=alt_credentials
37
+ ... )
38
+ >>>
39
+ >>> # Error handling for missing credentials
40
+ >>> try:
41
+ ... await auth_provider.create_credential(
42
+ ... auth_type="service_principal",
43
+ ... credentials={"tenant_id": "only-tenant"} # Missing client_id and client_secret
44
+ ... )
45
+ ... except CommonError as e:
46
+ ... print(f"Authentication failed: {e}")
47
+ ... # Output: Authentication failed: Missing required credential keys: client_id, client_secret
48
+ >>>
49
+ >>> # Unsupported authentication type
50
+ >>> try:
51
+ ... await auth_provider.create_credential(
52
+ ... auth_type="unsupported_type",
53
+ ... credentials=credentials
54
+ ... )
55
+ ... except CommonError as e:
56
+ ... print(f"Authentication failed: {e}")
57
+ ... # Output: Authentication failed: Only 'service_principal' authentication is supported. Received: unsupported_type
58
+ """
59
+
60
+ from typing import Any, Dict, Optional
61
+
62
+ from azure.core.credentials import TokenCredential
63
+ from azure.core.exceptions import ClientAuthenticationError
64
+ from azure.identity import ClientSecretCredential
65
+ from pydantic import BaseModel, ConfigDict, Field, ValidationError
66
+
67
+ from application_sdk.clients.azure import AZURE_MANAGEMENT_API_ENDPOINT
68
+ from application_sdk.common.error_codes import CommonError
69
+ from application_sdk.common.utils import run_sync
70
+ from application_sdk.observability.logger_adaptor import get_logger
71
+
72
+ logger = get_logger(__name__)
73
+
74
+
75
+ class ServicePrincipalCredentials(BaseModel):
76
+ """
77
+ Pydantic model for Azure Service Principal credentials.
78
+
79
+ Supports both snake_case and camelCase field names through field aliases.
80
+ All fields are required for service principal authentication.
81
+
82
+ Attributes:
83
+ tenant_id: Azure tenant ID (also accepts 'tenantId').
84
+ client_id: Azure client ID (also accepts 'clientId').
85
+ client_secret: Azure client secret (also accepts 'clientSecret').
86
+ """
87
+
88
+ tenant_id: str = Field(
89
+ ...,
90
+ alias="tenantId",
91
+ description="Azure tenant ID for service principal authentication",
92
+ )
93
+ client_id: str = Field(
94
+ ...,
95
+ alias="clientId",
96
+ description="Azure client ID for service principal authentication",
97
+ )
98
+ client_secret: str = Field(
99
+ ...,
100
+ alias="clientSecret",
101
+ description="Azure client secret for service principal authentication",
102
+ )
103
+
104
+ model_config = ConfigDict(
105
+ populate_by_name=True, # Allow both field name and alias
106
+ extra="ignore", # Ignore additional fields (Azure client may need extra fields like storage_account_name, network_config, etc.)
107
+ validate_assignment=True, # Validate on assignment
108
+ )
109
+
110
+
111
+ class AzureAuthProvider:
112
+ """
113
+ Azure authentication provider for handling Service Principal authentication.
114
+
115
+ This class provides a unified interface for creating Azure credentials
116
+ using Service Principal authentication with Azure SDK.
117
+
118
+ Supported authentication method:
119
+ - service_principal: Using client ID, client secret, and tenant ID
120
+ """
121
+
122
+ def __init__(self):
123
+ """Initialize the Azure authentication provider."""
124
+ pass
125
+
126
+ async def create_credential(
127
+ self,
128
+ auth_type: str = "service_principal",
129
+ credentials: Optional[Dict[str, Any]] = None,
130
+ ) -> TokenCredential:
131
+ """
132
+ Create Azure credential using Service Principal authentication.
133
+
134
+ Args:
135
+ auth_type (str): Type of authentication to use.
136
+ Currently only supports 'service_principal'.
137
+ credentials (Optional[Dict[str, Any]]): Service Principal credentials.
138
+ Required fields: tenant_id, client_id, client_secret.
139
+
140
+ Returns:
141
+ TokenCredential: Azure credential instance.
142
+
143
+ Raises:
144
+ CommonError: If authentication type is not supported or credentials are invalid.
145
+ ClientAuthenticationError: If credential creation fails.
146
+ """
147
+ try:
148
+ logger.debug(f"Creating Azure credential with auth type: {auth_type}")
149
+
150
+ if auth_type.lower() != "service_principal":
151
+ raise CommonError(
152
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: "
153
+ f"Only 'service_principal' authentication is supported. "
154
+ f"Received: {auth_type}"
155
+ )
156
+
157
+ if not credentials:
158
+ raise CommonError(
159
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: "
160
+ "Credentials required for service principal authentication"
161
+ )
162
+
163
+ return await self._create_service_principal_credential(credentials)
164
+
165
+ except ClientAuthenticationError as e:
166
+ logger.error(f"Azure authentication failed: {str(e)}")
167
+ raise CommonError(f"{CommonError.AZURE_CREDENTIAL_ERROR}: {str(e)}")
168
+ except ValueError as e:
169
+ logger.error(f"Invalid Azure credential parameters: {str(e)}")
170
+ raise CommonError(
171
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: Invalid credential parameters - {str(e)}"
172
+ )
173
+ except TypeError as e:
174
+ logger.error(f"Wrong Azure credential parameter types: {str(e)}")
175
+ raise CommonError(
176
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: Invalid credential parameter types - {str(e)}"
177
+ )
178
+ except Exception as e:
179
+ logger.error(f"Unexpected error creating Azure credential: {str(e)}")
180
+ raise CommonError(
181
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: Unexpected error - {str(e)}"
182
+ )
183
+
184
+ async def _create_service_principal_credential(
185
+ self, credentials: Dict[str, Any]
186
+ ) -> ClientSecretCredential:
187
+ """
188
+ Create service principal credential.
189
+
190
+ Args:
191
+ credentials (Dict[str, Any]): Service principal credentials.
192
+ Must include tenant_id, client_id, and client_secret.
193
+
194
+ Returns:
195
+ ClientSecretCredential: Service principal credential.
196
+
197
+ Raises:
198
+ CommonError: If required credentials are missing or invalid.
199
+ """
200
+ if not credentials:
201
+ raise CommonError(
202
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: "
203
+ "Credentials required for service principal authentication"
204
+ )
205
+
206
+ try:
207
+ # Validate credentials using Pydantic model
208
+ validated_credentials = ServicePrincipalCredentials(**credentials)
209
+ except ValidationError as e:
210
+ # Pydantic provides detailed error messages for all validation errors
211
+ # Format errors into a user-friendly message
212
+ error_details = "; ".join(
213
+ [
214
+ f"{'.'.join(str(loc) for loc in err['loc'])}: {err['msg']}"
215
+ for err in e.errors()
216
+ ]
217
+ )
218
+ error_message = f"Invalid credential parameters: {error_details}"
219
+ logger.error(f"Azure credential validation failed: {error_message}")
220
+ raise CommonError(f"{CommonError.CREDENTIALS_PARSE_ERROR}: {error_message}")
221
+
222
+ logger.debug(
223
+ f"Creating service principal credential for tenant: {validated_credentials.tenant_id}"
224
+ )
225
+
226
+ try:
227
+ return await run_sync(ClientSecretCredential)(
228
+ validated_credentials.tenant_id,
229
+ validated_credentials.client_id,
230
+ validated_credentials.client_secret,
231
+ )
232
+ except ValueError as e:
233
+ logger.error(f"Invalid Azure credential parameters: {str(e)}")
234
+ raise CommonError(
235
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: Invalid credential parameters - {str(e)}"
236
+ )
237
+ except TypeError as e:
238
+ logger.error(f"Wrong Azure credential parameter types: {str(e)}")
239
+ raise CommonError(
240
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: Invalid credential parameter types - {str(e)}"
241
+ )
242
+ except ClientAuthenticationError as e:
243
+ logger.error(f"Azure authentication failed: {str(e)}")
244
+ raise CommonError(f"{CommonError.AZURE_CREDENTIAL_ERROR}: {str(e)}")
245
+ except Exception as e:
246
+ logger.error(f"Unexpected error creating Azure credential: {str(e)}")
247
+ raise CommonError(
248
+ f"{CommonError.CREDENTIALS_PARSE_ERROR}: Unexpected error - {str(e)}"
249
+ )
250
+
251
+ async def validate_credential(self, credential: TokenCredential) -> bool:
252
+ """
253
+ Validate Azure credential by attempting to get a token.
254
+
255
+ Args:
256
+ credential (TokenCredential): Azure credential to validate.
257
+
258
+ Returns:
259
+ bool: True if credential is valid, False otherwise.
260
+ """
261
+ try:
262
+ logger.debug("Validating Azure credential")
263
+
264
+ # Try to get a token for Azure Management API
265
+ token = await run_sync(credential.get_token)(AZURE_MANAGEMENT_API_ENDPOINT)
266
+
267
+ if token and hasattr(token, "token"):
268
+ logger.debug("Azure credential validation successful")
269
+ return True
270
+ else:
271
+ logger.warning("Azure credential validation failed: No token received")
272
+ return False
273
+
274
+ except ClientAuthenticationError as e:
275
+ logger.error(
276
+ f"Azure credential validation failed - authentication error: {str(e)}"
277
+ )
278
+ return False
279
+ except ValueError as e:
280
+ logger.error(
281
+ f"Azure credential validation failed - invalid parameters: {str(e)}"
282
+ )
283
+ return False
284
+ except Exception as e:
285
+ logger.error(
286
+ f"Azure credential validation failed - unexpected error: {str(e)}"
287
+ )
288
+ return False
@@ -0,0 +1,336 @@
1
+ """
2
+ Azure client implementation for the application-sdk framework.
3
+
4
+ This module provides the main AzureClient class that serves as a unified interface
5
+ for connecting to and interacting with Azure Storage services. It supports Service Principal
6
+ authentication and provides service-specific subclients.
7
+
8
+ Example:
9
+ >>> from application_sdk.clients.azure.client import AzureClient
10
+ >>> from application_sdk.clients.azure.auth import AzureAuthProvider
11
+ >>>
12
+ >>> # Create Azure client with Service Principal credentials
13
+ >>> credentials = {
14
+ ... "tenant_id": "your-tenant-id",
15
+ ... "client_id": "your-client-id",
16
+ ... "client_secret": "your-client-secret"
17
+ ... }
18
+ >>>
19
+ >>> client = AzureClient(credentials)
20
+ >>> await client.load()
21
+ >>>
22
+ >>> # Check client health
23
+ >>> health_status = await client.health_check()
24
+ >>> print(f"Overall health: {health_status.overall_health}")
25
+ >>> print(f"Connection health: {health_status.connection_health}")
26
+ >>>
27
+ >>> # Access health status details
28
+ >>> for service_name, service_health in health_status.services.items():
29
+ ... print(f"{service_name}: {service_health.status}")
30
+ ... if service_health.error:
31
+ ... print(f" Error: {service_health.error}")
32
+ """
33
+
34
+ import asyncio
35
+ from concurrent.futures import ThreadPoolExecutor
36
+ from typing import Any, Dict, Optional
37
+
38
+ from azure.core.credentials import TokenCredential
39
+ from azure.core.exceptions import AzureError, ClientAuthenticationError
40
+ from pydantic import BaseModel
41
+
42
+ from application_sdk.clients import ClientInterface
43
+ from application_sdk.clients.azure import AZURE_MANAGEMENT_API_ENDPOINT
44
+ from application_sdk.clients.azure.auth import AzureAuthProvider
45
+ from application_sdk.common.error_codes import ClientError
46
+ from application_sdk.common.utils import run_sync
47
+ from application_sdk.observability.logger_adaptor import get_logger
48
+
49
+ logger = get_logger(__name__)
50
+
51
+
52
+ class ServiceHealth(BaseModel):
53
+ """Model for individual service health status.
54
+
55
+ Attributes:
56
+ status: The health status of the service (e.g., "healthy", "error", "unknown")
57
+ error: Optional error message if the service is unhealthy
58
+ """
59
+
60
+ status: str
61
+ error: Optional[str] = None
62
+
63
+
64
+ class HealthStatus(BaseModel):
65
+ """Model for overall Azure client health status.
66
+
67
+ Attributes:
68
+ connection_health: Whether the Azure connection is healthy
69
+ services: Dictionary mapping service names to their health status
70
+ overall_health: Overall health status considering connection and services
71
+ """
72
+
73
+ connection_health: bool
74
+ services: Dict[str, ServiceHealth]
75
+ overall_health: bool
76
+
77
+
78
+ class AzureClient(ClientInterface):
79
+ """
80
+ Main Azure client for the application-sdk framework.
81
+
82
+ This client provides a unified interface for connecting to and interacting
83
+ with Azure services. It supports Service Principal authentication
84
+ and provides service-specific subclients for different Azure services.
85
+
86
+ Attributes:
87
+ credentials (Dict[str, Any]): Azure connection credentials
88
+ resolved_credentials (Dict[str, Any]): Resolved credentials after processing
89
+ credential (DefaultAzureCredential): Azure credential instance
90
+ auth_provider (AzureAuthProvider): Authentication provider instance
91
+ _services (Dict[str, Any]): Cache of service clients
92
+ _executor (ThreadPoolExecutor): Thread pool for async operations
93
+ _connection_health (bool): Connection health status
94
+ """
95
+
96
+ def __init__(
97
+ self,
98
+ credentials: Optional[Dict[str, Any]] = None,
99
+ max_workers: int = 10,
100
+ **kwargs: Any,
101
+ ):
102
+ """
103
+ Initialize the Azure client.
104
+
105
+ Args:
106
+ credentials (Optional[Dict[str, Any]]): Azure Service Principal credentials.
107
+ Must include tenant_id, client_id, and client_secret.
108
+ max_workers (int): Maximum number of worker threads for async operations.
109
+ **kwargs: Additional keyword arguments passed to service clients.
110
+ """
111
+ self.credentials = credentials or {}
112
+ self.resolved_credentials: Dict[str, Any] = {}
113
+ self.credential: Optional[TokenCredential] = None
114
+ self.auth_provider = AzureAuthProvider()
115
+ self._services: Dict[str, Any] = {}
116
+ self._executor = ThreadPoolExecutor(max_workers=max_workers)
117
+ self._connection_health = False
118
+ self._kwargs = kwargs
119
+
120
+ async def load(self, credentials: Optional[Dict[str, Any]] = None) -> None:
121
+ """
122
+ Load and establish Azure connection using Service Principal authentication.
123
+
124
+ Args:
125
+ credentials (Optional[Dict[str, Any]]): Azure Service Principal credentials.
126
+ If provided, will override the credentials passed to __init__.
127
+ Must include tenant_id, client_id, and client_secret.
128
+
129
+ Raises:
130
+ ClientError: If connection fails due to authentication or connection issues
131
+ """
132
+ if credentials:
133
+ self.credentials = credentials
134
+
135
+ try:
136
+ logger.info("Loading Azure client...")
137
+
138
+ # Handle credential resolution
139
+ if "credential_guid" in self.credentials:
140
+ # If we have a credential_guid, use the async get_credentials function
141
+ from application_sdk.services.secretstore import SecretStore
142
+
143
+ self.resolved_credentials = await SecretStore.get_credentials(
144
+ self.credentials["credential_guid"]
145
+ )
146
+ else:
147
+ # If credentials are already resolved (direct format), use them as-is
148
+ # Check if credentials appear to need resolution but no credential_guid provided
149
+ if (
150
+ "secret-path" in self.credentials
151
+ or "credentialSource" in self.credentials
152
+ ):
153
+ logger.warning(
154
+ "Credentials appear to need resolution but no credential_guid provided. Using as-is."
155
+ )
156
+ # Credentials are already in the correct format
157
+ self.resolved_credentials = self.credentials
158
+
159
+ # Create Azure credential using Service Principal authentication
160
+ self.credential = await self.auth_provider.create_credential(
161
+ auth_type="service_principal", credentials=self.resolved_credentials
162
+ )
163
+
164
+ # Test the connection
165
+ await self._test_connection()
166
+
167
+ self._connection_health = True
168
+ logger.info("Azure client loaded successfully")
169
+
170
+ except ClientAuthenticationError as e:
171
+ logger.error(f"Azure authentication failed: {str(e)}")
172
+ raise ClientError(f"{ClientError.CLIENT_AUTH_ERROR}: {str(e)}")
173
+ except AzureError as e:
174
+ logger.error(f"Azure connection error: {str(e)}")
175
+ raise ClientError(f"{ClientError.CLIENT_AUTH_ERROR}: {str(e)}")
176
+ except ValueError as e:
177
+ logger.error(f"Invalid Azure client parameters: {str(e)}")
178
+ raise ClientError(
179
+ f"{ClientError.INPUT_VALIDATION_ERROR}: Invalid parameters - {str(e)}"
180
+ )
181
+ except TypeError as e:
182
+ logger.error(f"Wrong Azure client parameter types: {str(e)}")
183
+ raise ClientError(
184
+ f"{ClientError.INPUT_VALIDATION_ERROR}: Invalid parameter types - {str(e)}"
185
+ )
186
+ except Exception as e:
187
+ logger.error(f"Unexpected error loading Azure client: {str(e)}")
188
+ raise ClientError(
189
+ f"{ClientError.CLIENT_AUTH_ERROR}: Unexpected error - {str(e)}"
190
+ )
191
+
192
+ async def close(self) -> None:
193
+ """Close Azure connections and clean up resources."""
194
+ try:
195
+ logger.info("Closing Azure client...")
196
+
197
+ # Close all service clients
198
+ for service_name, service_client in self._services.items():
199
+ try:
200
+ if hasattr(service_client, "close"):
201
+ await service_client.close()
202
+ elif hasattr(service_client, "disconnect"):
203
+ await service_client.disconnect()
204
+ except Exception as e:
205
+ logger.warning(f"Error closing {service_name} client: {str(e)}")
206
+
207
+ # Clear service cache
208
+ self._services.clear()
209
+
210
+ # Shutdown executor
211
+ self._executor.shutdown(wait=True)
212
+
213
+ # Reset connection health
214
+ self._connection_health = False
215
+
216
+ logger.info("Azure client closed successfully")
217
+
218
+ except Exception as e:
219
+ logger.error(f"Error closing Azure client: {str(e)}")
220
+
221
+ async def health_check(self) -> HealthStatus:
222
+ """
223
+ Perform health check on Azure connection and services.
224
+
225
+ Returns:
226
+ HealthStatus: Health status information.
227
+ """
228
+ health_status = HealthStatus(
229
+ connection_health=self._connection_health,
230
+ services={},
231
+ overall_health=False,
232
+ )
233
+
234
+ if not self._connection_health:
235
+ return health_status
236
+
237
+ # Check each service
238
+ for service_name, service_client in self._services.items():
239
+ try:
240
+ if hasattr(service_client, "health_check"):
241
+ service_health = await service_client.health_check()
242
+ # Handle different return types from service health checks
243
+ if isinstance(service_health, dict):
244
+ status = service_health.get("status", "unknown")
245
+ error = service_health.get("error")
246
+ elif hasattr(service_health, "status"):
247
+ # Handle Pydantic models or objects with status attribute
248
+ status = getattr(service_health, "status", "unknown")
249
+ error = getattr(service_health, "error", None)
250
+ else:
251
+ # Fallback for unexpected return types
252
+ status = "unknown"
253
+ error = f"Unexpected health check return type: {type(service_health)}"
254
+ else:
255
+ status = "unknown"
256
+ error = None
257
+
258
+ health_status.services[service_name] = ServiceHealth(
259
+ status=status,
260
+ error=error,
261
+ )
262
+ except Exception as e:
263
+ health_status.services[service_name] = ServiceHealth(
264
+ status="error", error=str(e)
265
+ )
266
+
267
+ # Overall health is True if connection is healthy and at least one service is available
268
+ health_status.overall_health = (
269
+ self._connection_health and len(health_status.services) > 0
270
+ )
271
+
272
+ return health_status
273
+
274
+ async def _test_connection(self) -> None:
275
+ """
276
+ Test the Azure connection by attempting to get a token.
277
+
278
+ Raises:
279
+ ClientAuthenticationError: If connection test fails.
280
+ """
281
+ if not self.credential:
282
+ raise ClientError(
283
+ f"{ClientError.AUTH_CREDENTIALS_ERROR}: No credential available for connection test"
284
+ )
285
+
286
+ try:
287
+ # Test the credential by getting a token
288
+ await run_sync(self.credential.get_token)(AZURE_MANAGEMENT_API_ENDPOINT)
289
+ except ClientAuthenticationError as e:
290
+ logger.error(
291
+ f"Azure connection test failed - authentication error: {str(e)}"
292
+ )
293
+ raise ClientError(f"{ClientError.CLIENT_AUTH_ERROR}: {str(e)}")
294
+ except AzureError as e:
295
+ logger.error(f"Azure connection test failed - service error: {str(e)}")
296
+ raise ClientError(f"{ClientError.CLIENT_AUTH_ERROR}: {str(e)}")
297
+ except ValueError as e:
298
+ logger.error(f"Azure connection test failed - invalid parameters: {str(e)}")
299
+ raise ClientError(
300
+ f"{ClientError.INPUT_VALIDATION_ERROR}: Invalid parameters - {str(e)}"
301
+ )
302
+ except Exception as e:
303
+ logger.error(f"Azure connection test failed - unexpected error: {str(e)}")
304
+ raise ClientError(
305
+ f"{ClientError.CLIENT_AUTH_ERROR}: Unexpected error - {str(e)}"
306
+ )
307
+
308
+ def __enter__(self):
309
+ """Context manager entry."""
310
+ return self
311
+
312
+ def __exit__(self, exc_type, exc_val, exc_tb):
313
+ """Context manager exit."""
314
+ # Note: This is a synchronous context manager.
315
+ # For proper async cleanup, use the async context manager instead.
316
+ # This method is kept for backward compatibility but doesn't guarantee cleanup.
317
+ logger.warning(
318
+ "Using synchronous context manager. For proper async cleanup, "
319
+ "use 'async with AzureClient() as client:' instead."
320
+ )
321
+ # Schedule cleanup but don't wait for it
322
+ try:
323
+ loop = asyncio.get_event_loop()
324
+ if loop.is_running():
325
+ loop.create_task(self.close())
326
+ except RuntimeError:
327
+ # No event loop running, can't schedule async cleanup
328
+ logger.warning("No event loop running, async cleanup not possible")
329
+
330
+ async def __aenter__(self):
331
+ """Async context manager entry."""
332
+ return self
333
+
334
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
335
+ """Async context manager exit."""
336
+ await self.close()
@@ -288,6 +288,15 @@ class CommonError(AtlanError):
288
288
  CREDENTIALS_RESOLUTION_ERROR = ErrorCode(
289
289
  ErrorComponent.COMMON, "401", "03", "Credentials resolution error"
290
290
  )
291
+ AZURE_CREDENTIAL_ERROR = ErrorCode(
292
+ ErrorComponent.COMMON, "401", "04", "Azure credential error"
293
+ )
294
+ AZURE_CONNECTION_ERROR = ErrorCode(
295
+ ErrorComponent.COMMON, "503", "01", "Azure connection error"
296
+ )
297
+ AZURE_SERVICE_ERROR = ErrorCode(
298
+ ErrorComponent.COMMON, "500", "01", "Azure service error"
299
+ )
291
300
 
292
301
 
293
302
  class DocGenError(AtlanError):
@@ -42,6 +42,7 @@ from application_sdk.server.fastapi.models import (
42
42
  HttpWorkflowTrigger,
43
43
  PreflightCheckRequest,
44
44
  PreflightCheckResponse,
45
+ Subscription,
45
46
  TestAuthRequest,
46
47
  TestAuthResponse,
47
48
  WorkflowConfigRequest,
@@ -90,12 +91,15 @@ class APIServer(ServerInterface):
90
91
  workflow_router: APIRouter
91
92
  dapr_router: APIRouter
92
93
  events_router: APIRouter
94
+ subscription_router: APIRouter
93
95
  handler: Optional[HandlerInterface]
94
96
  templates: Jinja2Templates
95
97
  duckdb_ui: DuckDBUI
96
98
 
97
99
  docs_directory_path: str = "docs"
98
100
  docs_export_path: str = "dist"
101
+ # List of subscriptions to be registered
102
+ subscriptions: List[Subscription] = []
99
103
 
100
104
  frontend_assets_path: str = "frontend/static"
101
105
 
@@ -112,6 +116,7 @@ class APIServer(ServerInterface):
112
116
  frontend_templates_path: str = "frontend/templates",
113
117
  ui_enabled: bool = True,
114
118
  has_configmap: bool = False,
119
+ subscriptions: List[Subscription] = [],
115
120
  ):
116
121
  """Initialize the FastAPI application.
117
122
 
@@ -138,7 +143,7 @@ class APIServer(ServerInterface):
138
143
  self.workflow_router = APIRouter()
139
144
  self.dapr_router = APIRouter()
140
145
  self.events_router = APIRouter()
141
-
146
+ self.subscriptions = subscriptions
142
147
  # Set up the application
143
148
  error_handler = internal_server_error_handler # Store as local variable
144
149
  self.app.add_exception_handler(
@@ -205,6 +210,7 @@ class APIServer(ServerInterface):
205
210
  - Workflow router (/workflows/v1)
206
211
  - Pubsub router (/dapr)
207
212
  - Events router (/events/v1)
213
+ - Subscription router (/subscriptions/v1)
208
214
  """
209
215
  # Register all routes first
210
216
  self.register_routes()
@@ -215,6 +221,16 @@ class APIServer(ServerInterface):
215
221
  self.app.include_router(self.dapr_router, prefix="/dapr")
216
222
  self.app.include_router(self.events_router, prefix="/events/v1")
217
223
 
224
+ # Register subscription routes from subscriptions with handler callbacks
225
+ subscription_router = APIRouter()
226
+ for subscription in self.subscriptions:
227
+ subscription_router.add_api_route(
228
+ f"/{subscription.route}",
229
+ subscription.handler,
230
+ methods=["POST"],
231
+ )
232
+ self.app.include_router(subscription_router, prefix="/subscriptions/v1")
233
+
218
234
  def fallback_home(self, request: Request) -> HTMLResponse:
219
235
  return self.templates.TemplateResponse(
220
236
  "index.html",
@@ -432,6 +448,19 @@ class APIServer(ServerInterface):
432
448
  """
433
449
 
434
450
  subscriptions: List[dict[str, Any]] = []
451
+ for subscription in self.subscriptions:
452
+ subscription_dict: dict[str, Any] = {
453
+ "pubsubname": subscription.component_name,
454
+ "topic": subscription.topic,
455
+ "route": f"/subscriptions/v1/{subscription.route}",
456
+ }
457
+ if subscription.bulk_config:
458
+ subscription_dict["bulkSubscribe"] = (
459
+ subscription.bulk_config.model_dump(by_alias=True)
460
+ )
461
+ if subscription.dead_letter_topic:
462
+ subscription_dict["deadLetterTopic"] = subscription.dead_letter_topic
463
+ subscriptions.append(subscription_dict)
435
464
  for event_trigger in self.event_triggers:
436
465
  filters = [
437
466
  f"({event_filter.path} {event_filter.operator} '{event_filter.value}')"
@@ -1,7 +1,7 @@
1
1
  # Request/Response DTOs for workflows
2
2
 
3
3
  from enum import Enum
4
- from typing import Any, Dict, List, Optional, Type
4
+ from typing import Any, Callable, Coroutine, Dict, List, Optional, Type, Union
5
5
 
6
6
  from pydantic import BaseModel, Field, RootModel
7
7
 
@@ -240,3 +240,64 @@ class EventWorkflowTrigger(WorkflowTrigger):
240
240
 
241
241
  def should_trigger_workflow(self, event: Event) -> bool:
242
242
  return True
243
+
244
+
245
+ class Subscription(BaseModel):
246
+ """Subscription configuration for Dapr messaging.
247
+
248
+ Attributes:
249
+ component_name: Name of the Dapr pubsub component
250
+ topic: Topic to subscribe to
251
+ route: Route path for the message handler endpoint
252
+ handler: Required callback function to handle incoming messages
253
+ bulk_config: Optional bulk subscribe configuration
254
+ dead_letter_topic: Optional dead letter topic for failed messages
255
+
256
+ Nested Classes:
257
+ BulkConfig: Configuration for bulk message processing
258
+ MessageStatus: Status codes for handler responses (SUCCESS, RETRY, DROP)
259
+ """
260
+
261
+ class BulkConfig(BaseModel):
262
+ """Bulk configuration for Dapr messaging.
263
+
264
+ Attributes:
265
+ enabled: Whether bulk subscribe is enabled
266
+ max_messages_count: Maximum number of messages to receive in a batch
267
+ max_await_duration_ms: Maximum time to wait for messages in milliseconds
268
+ """
269
+
270
+ enabled: bool = False
271
+ max_messages_count: int = Field(
272
+ default=100, serialization_alias="maxMessagesCount"
273
+ )
274
+ max_await_duration_ms: int = Field(
275
+ default=40, serialization_alias="maxAwaitDurationMs"
276
+ )
277
+
278
+ class MessageStatus(str, Enum):
279
+ """Status codes for Dapr pub/sub subscription message handler responses.
280
+
281
+ Used in subscription handler responses to indicate how Dapr should handle the message.
282
+ Based on Dapr docs: https://docs.dapr.io/reference/api/pubsub_api/#expected-http-response
283
+
284
+ Attributes:
285
+ SUCCESS: Message was processed successfully.
286
+ RETRY: Message processing failed, should be retried.
287
+ DROP: Message should be dropped (sent to dead letter topic if configured).
288
+ """
289
+
290
+ SUCCESS = "SUCCESS"
291
+ RETRY = "RETRY"
292
+ DROP = "DROP"
293
+
294
+ model_config = {"arbitrary_types_allowed": True}
295
+
296
+ component_name: str
297
+ topic: str
298
+ route: str
299
+ handler: Union[
300
+ Callable[[Any], Any], Callable[[Any], Coroutine[Any, Any, Any]]
301
+ ] # Required callback function (sync or async)
302
+ bulk_config: Optional[BulkConfig] = None
303
+ dead_letter_topic: Optional[str] = None
@@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional, Tuple, Type
4
4
 
5
5
  import daft
6
6
  import yaml
7
+ from daft.functions import to_struct, when
7
8
  from pyatlan.model.enums import AtlanConnectorType
8
9
 
9
10
  from application_sdk.observability.logger_adaptor import get_logger
@@ -204,12 +205,25 @@ class QueryBasedTransformer(TransformerInterface):
204
205
  Returns:
205
206
  Optional[daft.Expression]: The constructed struct expression or None if all fields are null
206
207
  """
208
+
209
+ # Check if level is None
210
+ if level is None:
211
+ logger.error("ERROR: level is None in _build_struct!")
212
+ raise ValueError("level cannot be None in _build_struct")
213
+
214
+ # Check if prefix is None
215
+ if prefix is None:
216
+ logger.error("ERROR: prefix is None in _build_struct!")
217
+ raise ValueError("prefix cannot be None in _build_struct")
218
+
207
219
  struct_fields = []
208
220
  non_null_fields = []
209
221
 
210
222
  # Handle columns at this level
211
223
  if "columns" in level:
224
+ logger.debug(f"Processing columns at level: {level['columns']}")
212
225
  for full_col, suffix in level["columns"]:
226
+ logger.debug(f"Processing column: {full_col} -> {suffix}")
213
227
  field = daft.col(full_col).alias(suffix)
214
228
  struct_fields.append(field)
215
229
  # Add to non_null check by negating is_null()
@@ -218,6 +232,7 @@ class QueryBasedTransformer(TransformerInterface):
218
232
  # Handle nested levels
219
233
  for component, sub_level in level.items():
220
234
  if component != "columns": # Skip the columns key
235
+ logger.debug(f"Processing nested component: {component}")
221
236
  nested_struct = self._build_struct(sub_level, component)
222
237
  if nested_struct is not None:
223
238
  struct_fields.append(nested_struct)
@@ -226,8 +241,9 @@ class QueryBasedTransformer(TransformerInterface):
226
241
 
227
242
  # Only create a struct if we have fields
228
243
  if struct_fields:
244
+ logger.debug(f"Creating struct with {len(struct_fields)} fields")
229
245
  # Create the struct first
230
- struct = daft.struct(*struct_fields)
246
+ struct = to_struct(*struct_fields)
231
247
 
232
248
  # If we have non-null checks, apply them
233
249
  if non_null_fields:
@@ -236,11 +252,12 @@ class QueryBasedTransformer(TransformerInterface):
236
252
  for check in non_null_fields[1:]:
237
253
  any_non_null = any_non_null | check
238
254
 
239
- # Use if_else on the any_non_null Expression
240
- return any_non_null.if_else(struct, None).alias(prefix)
255
+ # Use when().otherwise() for conditional expression (replaces if_else in daft 0.7+)
256
+ return when(any_non_null, struct).otherwise(None).alias(prefix)
241
257
 
242
258
  return struct.alias(prefix)
243
259
 
260
+ logger.warning(f"No fields found for level {level}")
244
261
  return None
245
262
 
246
263
  def get_grouped_dataframe_by_prefix(
@@ -281,12 +298,18 @@ class QueryBasedTransformer(TransformerInterface):
281
298
  try:
282
299
  # Get all column names
283
300
  columns = dataframe.column_names
301
+ logger.debug("=== DEBUG: get_grouped_dataframe_by_prefix ===")
302
+ logger.debug(f"Input DataFrame columns: {columns}")
284
303
 
285
304
  # Group columns by their path components
286
305
  path_groups = {}
287
306
  standalone_columns = []
288
307
 
289
308
  for col in columns:
309
+ if col is None:
310
+ logger.error(f"Found None column in DataFrame columns: {columns}")
311
+ continue
312
+
290
313
  if "." in col:
291
314
  # Split the full path into components
292
315
  path_components = col.split(".")
@@ -312,8 +335,12 @@ class QueryBasedTransformer(TransformerInterface):
312
335
  for col in standalone_columns:
313
336
  new_columns.append(daft.col(col))
314
337
 
338
+ logger.debug(f"path_groups: {path_groups}")
339
+ logger.debug(f"standalone_columns: {standalone_columns}")
340
+
315
341
  # Build nested structs starting from the root level
316
342
  for prefix, level in path_groups.items():
343
+ logger.debug(f"Building struct for prefix: {prefix}, level: {level}")
317
344
  struct_expr = self._build_struct(level, prefix)
318
345
  new_columns.append(struct_expr)
319
346
 
@@ -2,4 +2,4 @@
2
2
  Version information for the application_sdk package.
3
3
  """
4
4
 
5
- __version__ = "2.1.0"
5
+ __version__ = "2.2.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atlan-application-sdk
3
- Version: 2.1.0
3
+ Version: 2.2.0
4
4
  Summary: Atlan Application SDK is a Python library for developing applications on the Atlan Platform
5
5
  Project-URL: Repository, https://github.com/atlanhq/application-sdk
6
6
  Project-URL: Documentation, https://github.com/atlanhq/application-sdk/README.md
@@ -30,8 +30,12 @@ Requires-Dist: pyatlan<8.5.0,>=8.0.2
30
30
  Requires-Dist: pydantic<2.13.0,>=2.10.6
31
31
  Requires-Dist: python-dotenv<1.3.0,>=1.1.0
32
32
  Requires-Dist: uvloop<0.23.0,>=0.21.0; sys_platform != 'win32'
33
+ Provides-Extra: azure
34
+ Requires-Dist: azure-identity>=1.15.0; extra == 'azure'
35
+ Requires-Dist: azure-storage-blob>=12.19.0; extra == 'azure'
36
+ Requires-Dist: azure-storage-file-datalake>=12.19.0; extra == 'azure'
33
37
  Provides-Extra: daft
34
- Requires-Dist: daft<0.8.0,>=0.4.12; extra == 'daft'
38
+ Requires-Dist: daft<0.8.0,>=0.7.1; extra == 'daft'
35
39
  Provides-Extra: distributed-lock
36
40
  Requires-Dist: redis[hiredis]<7.2.0,>=5.2.0; extra == 'distributed-lock'
37
41
  Provides-Extra: iam-auth
@@ -1,8 +1,8 @@
1
1
  application_sdk/__init__.py,sha256=2e2mvmLJ5dxmJGPELtb33xwP-j6JMdoIuqKycEn7hjg,151
2
2
  application_sdk/constants.py,sha256=TvdmKQShVWBNQZdVF2y-fxuE31FmeraTnqQ9jT_n5XY,11567
3
- application_sdk/version.py,sha256=b8rehaDy9Q_sCU-sBPVls3ZFH8_ZNacFO5aQrIaO8ts,84
3
+ application_sdk/version.py,sha256=q2hQsJUqlYmQIe9BcRqShBDGfZcI_1e30HROiiS-LdU,84
4
4
  application_sdk/worker.py,sha256=DLMocpHvvwpdAopyXhxwM7ftaNlKvZMQfkgy1MFyiik,7561
5
- application_sdk/activities/__init__.py,sha256=6SiefuOPUDGfN3z6oPY4RkQLiUmkHpoDy5xadzpDzAw,11588
5
+ application_sdk/activities/__init__.py,sha256=i7iY6aL1VFg185n2rLLvD_sI2BA9zJ33jL5rD_sY__U,12350
6
6
  application_sdk/activities/lock_management.py,sha256=6Wdf3jMKitoarHQP91PIJOoGFz4aaOLS_40c7n1yAOA,3902
7
7
  application_sdk/activities/.cursor/BUGBOT.md,sha256=FNykX5aMkdOhzgpiGqstOnSp9JN63iR2XP3onU4AGh8,15843
8
8
  application_sdk/activities/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -12,7 +12,7 @@ application_sdk/activities/common/utils.py,sha256=ngyFmiZnMCAQtyu6vGeAlkzwNkM29M
12
12
  application_sdk/activities/metadata_extraction/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  application_sdk/activities/metadata_extraction/base.py,sha256=ENFojpxqKdN_eVSL4iet3cGfylPOfcl1jnflfo4zhs8,3920
14
14
  application_sdk/activities/metadata_extraction/rest.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- application_sdk/activities/metadata_extraction/sql.py,sha256=IkI1ZhOKyoSwosRT-g8c8IDBuFBq7mwyHLpDvwYO_B4,25451
15
+ application_sdk/activities/metadata_extraction/sql.py,sha256=CmE77EsgbOuDL5AKaRCnq1jApJnDWNVxx-RZ49cJwus,27415
16
16
  application_sdk/activities/query_extraction/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  application_sdk/activities/query_extraction/sql.py,sha256=Gsa79R8CYY0uyt3rA2nLMfQs8-C4_zg1pJ_yYSF2cZw,21193
18
18
  application_sdk/application/__init__.py,sha256=vcrQsqlfmGvKcCZuOtHHaNRqHSGdXlEDftkb8Tv_shI,9867
@@ -28,9 +28,12 @@ application_sdk/clients/temporal.py,sha256=7ZkQSwSSZTFkBkhwFlqmLBFxlD2-jLS8QWpSi
28
28
  application_sdk/clients/utils.py,sha256=zLFOJbTr_6TOqnjfVFGY85OtIXZ4FQy_rquzjaydkbY,779
29
29
  application_sdk/clients/workflow.py,sha256=6bSqmA3sNCk9oY68dOjBUDZ9DhNKQxPD75qqE0cfldc,6104
30
30
  application_sdk/clients/.cursor/BUGBOT.md,sha256=7nEDUqWBEMI_uU6eK1jCSZGeXoQtLQcKwOrDn8AIDWo,10595
31
+ application_sdk/clients/azure/__init__.py,sha256=koHM4dpsLDf6NQfBWh1axIwF8DGFePcc_nu8_4a-D1g,233
32
+ application_sdk/clients/azure/auth.py,sha256=lvdyNt0V099XlSuTapprNx9xmtpr1El5I_2pKtEnDX0,11129
33
+ application_sdk/clients/azure/client.py,sha256=7f2kanMWgF61xM3WXeFnsgrZaG3Pdl9a1Vt8818e8HQ,13538
31
34
  application_sdk/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
35
  application_sdk/common/aws_utils.py,sha256=xlSMIQyjvQ-CydEXaxXrnPUygv7AAbCLsxhZ2wtKnzg,11219
33
- application_sdk/common/error_codes.py,sha256=bxgvugN_0H5b8VXfJw-44mybgX5I9lRJbRdYjtPjqDI,14561
36
+ application_sdk/common/error_codes.py,sha256=BGdOSfhKe4pd9oM9T2N8GASZeIJU2Be1NcThL8LkKBQ,14900
34
37
  application_sdk/common/file_converter.py,sha256=ta0PVh7uIEGJg0BTPUJnSjj55ve2iVAOkqwAeg96_-g,3079
35
38
  application_sdk/common/types.py,sha256=qkVr3SAR1zn5_0w9hFt18vGtcnaPLKlMJLSBhTSKePU,134
36
39
  application_sdk/common/utils.py,sha256=czcWvqoe2PawDvHOahk_AI88Zqth-CM3KzdGmehHQJ4,19286
@@ -78,8 +81,8 @@ application_sdk/observability/utils.py,sha256=-02GAFom8Bg4SNyCTNYySmen2dzvLfTu43
78
81
  application_sdk/observability/decorators/observability_decorator.py,sha256=yd6qfrg1MmH5KcZ5Ydzb0RaBzmxx5FrmiI9qwvZx3EU,8963
79
82
  application_sdk/server/__init__.py,sha256=KTqE1YPw_3WDVMWatJUuf9OOiobLM2K5SMaBrI62sCo,1568
80
83
  application_sdk/server/.cursor/BUGBOT.md,sha256=p_MMoWUW5G1894WfOKYReZKWCuyJT_OJz3rL5g21NbI,16566
81
- application_sdk/server/fastapi/__init__.py,sha256=BVqf63z1hxEdpJqLU4LXpFTbk5q8dVkjEbWbu_vbW_Y,29578
82
- application_sdk/server/fastapi/models.py,sha256=h0hMtMg_p5G0Ug2MBkmBcT94W025VKCLMFyh0FciNoQ,7559
84
+ application_sdk/server/fastapi/__init__.py,sha256=Ce1eXW9Y_8icuEL3RawYfoqcJbjpyfEK2nHq9y-o04k,30941
85
+ application_sdk/server/fastapi/models.py,sha256=ZKz0RWISrJKQ50kepPiPsVqZ6li44I_hFIy5GXochLc,9796
83
86
  application_sdk/server/fastapi/utils.py,sha256=WoDGDmq6E1kwS2FN5pjIuzygKNTHpA-tg2SQJZgJOOI,1415
84
87
  application_sdk/server/fastapi/middleware/logmiddleware.py,sha256=sKKi-ysI9XQDT0uKW3sfw2r3XQq2iJT4_XrNGXZWdjI,2907
85
88
  application_sdk/server/fastapi/middleware/metrics.py,sha256=F_EhayzNgEn9KJvITv8VFxwXhBZVaOQkDptyfa9nQK4,1820
@@ -138,7 +141,7 @@ application_sdk/transformers/atlas/__init__.py,sha256=fw3D8bBtt61SseAfYut3JZddpX
138
141
  application_sdk/transformers/atlas/sql.py,sha256=rkQXNZ7oebts5oF5E_Bw8NpcHHKScU0TmKciH_1l_k4,50419
139
142
  application_sdk/transformers/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
140
143
  application_sdk/transformers/common/utils.py,sha256=4ISMIQ0Gzghmi31p51FOFm5KLF7XF-fmH9PVT7i0DFE,4899
141
- application_sdk/transformers/query/__init__.py,sha256=yG1dGP3NhUizwkCgyFAzsr9SV9uWYZKjXoCWPrsIxVw,17358
144
+ application_sdk/transformers/query/__init__.py,sha256=asp3IC0zLjrwM8wyUxI8FSFeXmBT5IoFZ23IHn67Muw,18711
142
145
  application_sdk/transformers/query/templates/column.yaml,sha256=EXLYwGXN7LKT-v51n2EZnY99o6vHucyFaVSpM-sUSXw,7679
143
146
  application_sdk/transformers/query/templates/database.yaml,sha256=SD1hJg5LI7gsBHQL5mW341sa51EkhcsIDDFlIOi9zdk,1374
144
147
  application_sdk/transformers/query/templates/extras-procedure.yaml,sha256=XhAfVY4zm99K8fcgkYA1XPLv4ks-SA6SzMO3SMtQ60s,2298
@@ -152,8 +155,8 @@ application_sdk/workflows/metadata_extraction/__init__.py,sha256=jHUe_ZBQ66jx8bg
152
155
  application_sdk/workflows/metadata_extraction/sql.py,sha256=6ZaVt84n-8U2ZvR9GR7uIJKv5v8CuyQjhlnoRJvDszc,12435
153
156
  application_sdk/workflows/query_extraction/__init__.py,sha256=n066_CX5RpJz6DIxGMkKS3eGSRg03ilaCtsqfJWQb7Q,117
154
157
  application_sdk/workflows/query_extraction/sql.py,sha256=kT_JQkLCRZ44ZpaC4QvPL6DxnRIIVh8gYHLqRbMI-hA,4826
155
- atlan_application_sdk-2.1.0.dist-info/METADATA,sha256=XTa1eREHTJv_zkHQhKdCtxe-txAkf9pmIqSEch_zbUA,5806
156
- atlan_application_sdk-2.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
157
- atlan_application_sdk-2.1.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
158
- atlan_application_sdk-2.1.0.dist-info/licenses/NOTICE,sha256=A-XVVGt3KOYuuMmvSMIFkg534F1vHiCggEBp4Ez3wGk,1041
159
- atlan_application_sdk-2.1.0.dist-info/RECORD,,
158
+ atlan_application_sdk-2.2.0.dist-info/METADATA,sha256=PiV9n04V1F27b28MfIUEsL62baglFRHbi8gxQ4yeG_k,6014
159
+ atlan_application_sdk-2.2.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
160
+ atlan_application_sdk-2.2.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
161
+ atlan_application_sdk-2.2.0.dist-info/licenses/NOTICE,sha256=A-XVVGt3KOYuuMmvSMIFkg534F1vHiCggEBp4Ez3wGk,1041
162
+ atlan_application_sdk-2.2.0.dist-info/RECORD,,